id
stringlengths 2
8
| text
stringlengths 16
264k
| dataset_id
stringclasses 1
value |
---|---|---|
3512834
|
<gh_stars>0
n=int(input('Enter the no of fibonacci series : '))
l1=[0,1]
a=0
b=1
for x in range(n-1):
c=a+b
a=b
b=c
l1.append(c)
print(l1)
|
StarcoderdataPython
|
1716882
|
"""
"""
from harmoniccontext.harmonic_context import HarmonicContext
from tonalmodel.pitch_range import PitchRange
class PolicyContext(object):
def __init__(self, harmonic_context, pitch_range):
self._harmonic_context = harmonic_context
self._pitch_range = pitch_range
@property
def harmonic_context(self):
return self._harmonic_context
@property
def pitch_range(self):
return self._pitch_range
def __eq__(self, other):
if other is None:
return False
return str(self) == str(other)
def __hash__(self):
return str(self).hash()
def __str__(self):
return 'p.c.[{0}, {1}]'.format(self.harmonic_context, self.pitch_range)
|
StarcoderdataPython
|
1730903
|
import os
import matplotlib
matplotlib.use("agg")
from matplotlib import pyplot as plt
import seaborn as sns
import datetime
import pandas as pd
import matplotlib.dates as mdates
import common
infile = snakemake.input[0]
outfile = snakemake.output[0]
df = pd.read_table(infile)
df["time"] = pd.to_datetime(df["time"])
fig = plt.figure(figsize=(4,1))
plt.semilogy('time', 'cumulative_authors', data=df, label="contributors")
plt.semilogy('time', 'cumulative_recipes', data=df, label="recipes")
plt.legend()
plt.ylabel("count")
plt.xlabel("")
# deactivate xticks because we have them in the plot below in the figure
plt.xticks([])
sns.despine()
fig.savefig(outfile, bbox_inches="tight")
|
StarcoderdataPython
|
6489720
|
<gh_stars>1-10
from turtle import *
speed(11)
shape("turtle")
sides = 12
angle = 360 / sides
for count in range(sides):
forward(100)
left(angle)
|
StarcoderdataPython
|
5062309
|
<filename>eenlp/docs/generate_docs_models.py
import mdformat
import pandas as pd
import yaml
from pyprojroot import here
from eenlp.docs.languages import languages
from eenlp.docs.model_types import cases, corpora, types
columns = [
"name",
"description",
"type",
"cased",
"languages",
"pre-trained on",
"links",
]
# Columns for the index table on the top.
# There is filtering for *both* the number of languages and these categories, so that e.g. static word embeddings won't
# show up in the index table.
# currently:
# - single == 1
# - 1 < few < 10
# - 10 <= multi
multilang_models = [
"BERT",
"RoBERTa",
"DistilBERT",
"Electra",
"ALBERT",
"mBERT",
"XLM-RoBERTa",
]
few_language_models = [
"BERT",
"RoBERTa",
"DistilBERT",
"Electra",
"ALBERT",
"XLM-RoBERTa",
"BERT/Electra(?)",
]
single_language_models = [
"BERT",
"RoBERTa",
"DistilBERT",
"Electra",
"ALBERT",
]
def generate():
df = []
for dataset in here("docs/data/models/").glob("*.yml"):
df.append(yaml.safe_load(dataset.read_text()))
df = pd.DataFrame(df)
df_o = df
df = df.explode("languages")
few_langs_models = [
x[0]
for x in df.sort_values("name").groupby(df.index)["languages"].count().items()
if 1 < x[1] < 10
]
few_langs_models = [
x for x in few_langs_models if df.loc[x].iloc[0]["type"] in few_language_models
]
with open(here("docs/models.md"), "w") as f:
f.write("# Models\n\n")
f.write("<table><thead>")
f.write(
f'<tr><td rowspan="2"></td><td align="center" width="100"> multilingual (transformers) </td><td align="center" colspan="{len(few_langs_models)}">several languages (transformers)</td><td align="center" colspan="5">single-language models</td></tr>'
)
f.write("<tr><td></td>")
f.write(
"".join(
f'<td align="center">{x}</td>'
for x in df.loc[few_langs_models]["name"].unique()
)
)
f.write(
'<td align="center">BERT</td><td align="center">RoBERTa</td><td align="center">DistilBERT</td><td align="center">Electra</td><td align="center">ALBERT</td>'
)
f.write("</tr></thead><tbody>\n")
for i, language in enumerate(languages):
emoji_name = language["emoji_name"]
language = language["name"]
f.write(
f'<tr><td><a href="#{emoji_name}-{language.lower().replace("/", "")}"><b>:{emoji_name}: {language}</b></a></td>'
)
# multilang
f.write("<td>")
dff = df[
(
df.apply(
lambda x: x["languages"] == language
and 10 <= len(df[df.index == x.name])
and x["type"] in multilang_models,
axis=1,
)
)
]
f.write(
" / ".join(
sorted(
f'<a href="#{x.name.lower().replace(" ", "-")}-multilingual">{x.name}</a>'
for x in dff.itertuples()
)
)
)
f.write("</td>")
# few langs
for i in few_langs_models:
f.write('<td align="center">')
dff = df[(df.index == i) & (df["languages"] == language)]
if len(dff):
f.write(
f'<a href="#{dff["name"].item().lower().replace(" ", "-")}-{language}">{dff["name"].item()}</a>'
)
f.write("</td>")
for model_name in single_language_models:
f.write('<td align="center">')
dff = df[
df.apply(
lambda x: x["languages"] == language
and x["type"] == model_name
and len(df[df.index == x.name]) == 1,
axis=1,
)
]
if len(dff):
f.write(
" / ".join(
sorted(
f'<a href="#{x.lower().replace(" ", "-")}-{language}">{x}</a>'
for x in dff["name"]
)
)
)
f.write("</td>")
f.write("\n")
f.write("</tbody></table>\n\n")
f.write("// TODO add legend\n\n")
for language in ["Multilingual"] + languages:
if language == "Multilingual":
emoji_name = "earth_africa"
else:
emoji_name = language["emoji_name"]
language = language["name"]
f.write(f"## :{emoji_name}: {language}\n\n")
if language == "Multilingual":
dff = df_o[df_o["languages"].str.len() >= 10]
else:
dff = df_o[
(df_o["languages"].apply(lambda x: language in x))
& (df_o["languages"].str.len() < 10)
]
f.write(
'<table width="100%"><thead><tr>'
'<th width="66%">name</th>'
'<th width="33%">description</th>'
"<th>type</th>"
"<th>cased</th>"
"<th>languages</th>"
"<th>corpora</th>"
"<th>links</th>"
"</tr></thead><tbody>"
)
for _, row in sorted(dff.iterrows(), key=lambda x: x[1]["name"]):
for column in columns:
if column == "name":
f.write(f"<td")
f.write(
f' id="{row["name"].lower().replace(" ", "-")}-{language}"'
)
f.write(f">{row[column]}</td>")
elif column == "languages":
f.write("<td>")
for x in sorted(
df[(df["name"] == row["name"])]["languages"].unique()
):
f.write(
f'<span title="{x}">:{next(y["emoji_name"] for y in languages if y["name"] == x)}:</span> '
)
f.write("</td>")
elif column == "links":
f.write("<td>")
if row["paper"] and row["paper"] != "?":
f.write(
f'<div title="paper"><a href="{row["paper"]}">📄</a></div>'
)
if row["citation"] and row["citation"] != "?":
f.write(
f'<div title="citation"><a href="{row["citation"]}">❞</a></div>'
)
if not pd.isna(row["huggingface"]):
f.write(
f"<div>"
f'<a title="huggingface model card" href="{row["huggingface"]}">🤗️</a> '
f"</div>"
)
f.write("</td>")
elif column == "pre-trained on":
f.write("<td><ul>")
for x in sorted(row["pre-trained on"]):
if x != "" and x != "?":
if x in corpora:
f.write(
f'<li title="{x}">{corpora[x]["emoji"]}</li>'
)
else:
f.write(f'<li title="{x}">{x}</li>')
f.write("</ul></td>")
elif column == "type":
if row["type"] in types:
if "image" in types[row["type"]]:
f.write(
f'<td align="center"><img width="21px" height="21px" title="{row["type"]}" src="{types[row["type"]]["image"]}"></td>'
)
else:
f.write(
f'<td align="center"><div title="{row["type"]}">{types[row["type"]]["emoji"]}</div></td>'
)
else:
f.write(f"<td>{row['type']}</td>")
elif column == "cased":
if row["cased"] in cases:
f.write(
f'<td><div title="{row["cased"]}">{cases[row["cased"]]["emoji"]}</div></td>'
)
else:
f.write(f"<td>{row['cased']}</td>")
else:
f.write(f"<td>{row[column]}</td>")
f.write("</tr>\n")
f.write("</tbody></table>\n\n")
if language != "Multilingual":
f.write(
" [+ multilingual](#earth_africa-multilingual)"
)
f.write("\n\n")
mdformat.file(here("docs/models.md"), extensions={"gfm"})
if __name__ == "__main__":
generate()
|
StarcoderdataPython
|
1959106
|
import re
from datetime import date
import dateparser
from dateutil.rrule import MONTHLY, rrule
from scrapy import Request
from gazette.items import Gazette
from gazette.spiders.base import BaseGazetteSpider
class CeSobralSpider(BaseGazetteSpider):
name = "ce_sobral"
TERRITORY_ID = "2312908"
start_date = date(2017, 2, 6)
end_date = date.today()
def start_requests(self):
for search_month in rrule(
MONTHLY, dtstart=self.start_date.replace(day=1), until=self.end_date
):
yield Request(
url=f"https://www.sobral.ce.gov.br/diario/pesquisa/index/ano_da_publicacao:{search_month.year}/mes_da_publicacao:{search_month.month}",
callback=self.parse_gazettes,
meta={"search_month": search_month},
)
def parse_gazettes(self, response):
total_gazettes = response.xpath("//div[@class = 'right']/text()").get()
if int(total_gazettes) == 0:
return
gazette_results = response.xpath("//ul[@class = 'resultado-busca']//article")
for gazette in gazette_results:
# Extract attributes
title = gazette.xpath("./a/h5/text()").get()
edition_number = re.search(r"Diário Oficial Nº (\d+)", title).group(1)
extra_edition = "Suplementar" in title
link = response.urljoin(
gazette.xpath("./a[contains(@href, '.pdf')]/@href").get()
)
gazette_content_sample = gazette.xpath(".//p/text()").get()
date = dateparser.parse(
re.search(r"^\d{2}/\d{2}/\d{4}", gazette_content_sample).group(0),
date_formats=["%d/%m/%Y"],
).date()
yield Gazette(
date=date,
file_urls=[link],
edition_number=edition_number,
is_extra_edition=extra_edition,
power="executive_legislative",
)
# Go to next page
current_page = response.xpath("//li[@class = 'active']/a/text()").get()
if current_page:
next_page = int(current_page) + 1
if response.xpath(
f"//li[@class = 'waves-effect']/a[contains(text(), {next_page})]"
):
search_month = response.meta.get("search_month")
yield Request(
url=f"https://www.sobral.ce.gov.br/diario/pesquisa/index/ano_da_publicacao:{search_month.year}/mes_da_publicacao:{search_month.month}/pg:{next_page}",
callback=self.parse_gazettes,
meta={"search_month": search_month},
)
|
StarcoderdataPython
|
8147578
|
<gh_stars>1-10
from webdriver_manager.chrome import ChromeDriverManager
from selenium.webdriver import Chrome
from selenium.webdriver.chrome.options import Options
from bs4 import BeautifulSoup as bs
import pandas as pd
import numpy as np
import sys
import pickle
def pull_page(url):
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = Chrome(ChromeDriverManager().install(), options=chrome_options)
driver.get(url)
soup = bs(driver.page_source)
driver.close()
return soup
def hemnet_pull_all(url):
# first get how many pages there are
soup = pull_page(url)
key = "div.pagination__item"
pages_num = int(soup.select(key)[-2].text)
soup_list = [soup]
for page in range(2, pages_num+1):
soup = pull_page(f"{url}&page={page}")
soup_list.append(soup)
return soup_list
def flatten(t):
return [item for sublist in t for item in sublist]
def container_list_gen(container_key, soup_list):
container_list = [soup.select(container_key) for soup in soup_list]
container_list = flatten(container_list)
container_list = [i for i in container_list if i != "\n"]
return container_list
def container_scrape(container_list, column_keys):
pd_dct = {i: [] for i in column_keys}
for container in container_list:
for cat, key in column_keys.items():
try:
pd_dct[cat].append(container.select(key)[0].text)
except:
pd_dct[cat].append(np.nan)
return pd.DataFrame(pd_dct)
def save_object(obj, filename):
with open(filename, 'wb') as outp: # Overwrites any existing file.
pickle.dump(obj, outp, pickle.HIGHEST_PROTOCOL)
def load_object(filename):
with open(filename, "rb") as f:
return pickle.load(f)
|
StarcoderdataPython
|
1789603
|
import os
import xlsxwriter
import time
import pickle
import random
import numpy as np
import matplotlib.pyplot as plt
from classes.quiz import Quiz
from classes.save import Save
from classes.result import Overall_Results, Result
from classes.answer import Picture_Answer, Text_Answer, Answer
from classes.school import School, Student, Year_Group
from classes.question import Picture_Question, Text_Question
LOAD_FILE = "data.quiz"
def clear_screen():
"""Clears the screen
"""
os.system('cls' if os.name == 'nt' else 'clear')
def save_data(save_file):
"""Saves the quiz data to a file
Arguments:
save_file {Save} -- A save object containing all of the quiz's data
"""
# Uses pickle to dump the object into a byte array and then into a file
pickle.dump(save_file, open(LOAD_FILE, "wb"))
def main():
"""The main function that is run when the file is run
"""
# If there is a load file
if os.path.exists(LOAD_FILE):
# Load it
save = pickle.load(open(LOAD_FILE, "rb"))
else:
# Otherwise create a new save object and make a new save file for it
save = Save()
pickle.dump(save, open(LOAD_FILE, "wb"))
clear_screen()
category = setup(save)
clear_screen()
quiz(category, save)
def quiz(category, save):
"""Allows the user to complete the quiz
Arguments:
school {School} -- The school that the quiz is currently set up for
year {Year_Group} -- The year-group that the quiz is currently set up
for
category {str} -- The category that the questions shall be for
save {Save} -- The save file that shall be saved to disk
"""
while 1:
school = None
year = None
if save.schools:
school_choice = print_menu("Please choose a school", [
school.name for school in save.schools])
school = save.schools[school_choice]
else:
print("There are currently no schools to pick from. Please add a school to continue")
break
if school:
if school.year_groups:
yeargroup_choice = print_menu(
"Please choose a year-group", [year.year for year in school.year_groups])
year = school.year_groups[yeargroup_choice]
else:
print(
"There are currently no year-groups to pick from with your current choice of school. Please add a yeargroup to continue")
else:
print("Please set a school before setting a year-group")
questions = []
for question in save.questions:
if question.question_category == category:
questions.append(question)
if len(questions) < 10:
print("There are not enough questions for a quiz in this category")
break
else:
questions = random.sample(questions, 10)
student = Student(school, year)
random.shuffle(questions)
answers = []
for question in questions:
print()
index = random.randint(0, 3)
options = list(question.incorrect_answers)
options.insert(index, question.correct_answer)
choice = print_menu(question.question_text, options)
clear_screen()
if choice == index:
answers.append((question, Answer(True)))
print("\nCorrect!")
else:
answers.append((question, Answer(False)))
print("\nIncorrect...")
print("The correct answer is:", question.correct_answer)
result = Result(answers, student)
if save.results:
save.results = save.results + [result]
else:
save.results = [result]
print()
print("Congratulations! You scored: " + str(len(
[answer for answer in answers if answer[1].correct is True]
)) + "/" + str(len(answers)))
print()
save_data(save)
time.sleep(5)
clear_screen()
def setup(save):
"""The method run at startup to allow configuration of the quiz
Arguments:
save {Save} -- An object that holds all the data for the quiz so that
everything can be quickly saved
Returns:
tuple -- The school and yeargroup of the person answering the quiz,
and the
"""
category = None
print("Config menu")
print("===========")
print("To return to this menu, please close the program and then reopen\n")
while 1:
print("\nCurrent config:")
if category:
print("Category: " + category)
else:
print("Category: Not Selected")
choice = print_menu("Please choose an option",
["Start Quiz",
"Add School",
"Add Year-group",
"Set Category",
"Edit Questions",
"View Statistics"])
print()
clear_screen()
if choice == 0:
if category:
return category
else:
print("Please ensure you have entered a category")
elif choice == 1:
name = input("Please enter the school's name: ")
school_ = School()
school_.name = name
if save.schools:
save.schools = save.schools + [school_]
else:
save.schools = [school_]
elif choice == 2:
if save.schools:
year_school_choice = print_menu("Please select a school to add a year-group to:", [school.name for school in save.schools])
school_to_add_year_to = save.schools[year_school_choice]
name = input("Please enter the year-group name: ")
year_ = Year_Group(name)
if school_to_add_year_to.year_groups:
school_to_add_year_to.year_groups = school_to_add_year_to.year_groups + [year_]
else:
school_to_add_year_to.year_groups = [year_]
else:
print("Please add a school before adding a year-group")
elif choice == 3:
if save.questions:
q = []
for question in save.questions:
q.append(question.question_category)
q = list(set(q))
cat = print_menu("Please select a category", q)
category = q[cat]
else:
print("Please add questions before selecting a category")
elif choice == 4:
save.questions = question_editor(save.questions)
elif choice == 5:
show_stats(save)
save_data(save)
def show_stats(save):
"""Displays and exports statistics
Arguments:
save {Save} -- Contains all application data
"""
while 1:
choice = print_menu("What would you like to do?", ["Compare year-groups from a school", "Compare schools", "Export to Excel", "Quit stats viewer"])
clear_screen()
if choice == 0:
years = {}
if save.schools:
school_choice = print_menu("Please select a school:", [school.name for school in save.schools])
school = save.schools[school_choice]
if school.year_groups:
for year_group in school.year_groups:
years[year_group.year] = []
for year in years:
if save.results:
for result in save.results:
if result.student.school == school and result.student.year_group.year == year:
answers = result.result
years[year].append(len(
[answer for answer in answers if answer[1].correct is True]
))
else:
print("Please complete at least one quiz")
year_names = []
year_averages = []
for year in years:
years[year] = sum(years[year])/len(years[year])
year_names.append(year)
year_averages.append(years[year])
index = np.arange(len(year_names))
plt.bar(index, year_averages)
plt.xlabel('Year-groups')
plt.ylabel('Average Score')
plt.xticks(index, year_names)
plt.title('Averages for year-groups in ' + school.name)
plt.show()
else:
print("This school has no year-groups")
else:
print("There are no schools to display")
elif choice == 1:
school_results = {}
if save.schools:
for school in save.schools:
if save.results:
for result in save.results:
if result.student.school.name == school.name:
if school.name in school_results:
school_results[school.name].append(len(
[answer for answer in result.result if answer[1].correct is True]
))
else:
school_results[school.name] = [(len(
[answer for answer in result.result if answer[1].correct is True]
))]
school_names = []
school_averages = []
for school in school_results:
school_results[school] = sum(school_results[school])/len(school_results[school])
school_names.append(school)
school_averages.append(school_results[school])
index = np.arange(len(school_names))
plt.bar(index, school_averages)
plt.xlabel('Schools')
plt.ylabel('Average Score')
plt.xticks(index, school_names)
plt.title('Averages for schools')
plt.show()
else:
print("There are no schools to compare")
elif choice == 2:
try:
workbook = xlsxwriter.Workbook('data.xlsx')
worksheet = workbook.add_worksheet()
bold = workbook.add_format({'bold': True})
worksheet.write('A1', 'School', bold)
worksheet.write('B1', 'Year', bold)
worksheet.write('C1', 'Category', bold)
worksheet.write('D1', 'Result', bold)
row = 1
col = 0
if save.results:
for result in save.results:
worksheet.write(row, col, result.student.school.name)
worksheet.write(row, col + 1, result.student.year_group.year)
worksheet.write(row, col + 2, result.result[0][0].question_category)
worksheet.write(row, col + 3, str(len([answer for answer in result.result if answer[1].correct is True])))
row += 1
workbook.close()
print("Data successfully exported to data.xlsx")
else:
print("There is no data to export")
except PermissionError:
print("Please close the file before attempting to write to it")
elif choice == 3:
return
def question_editor(questions):
"""Creates an easy interface to edit the questions with
Arguments:
questions {list} -- The questions to edit
Returns:
list -- The edited questions
"""
if questions:
pass
else:
questions = []
while 1:
choice = print_menu("Would you like to:", ["Add a question", "Delete a question", "Quit the question editor"])
if choice == 0:
text = input("Please enter the question: ")
correct = input("Please enter the correct answer: ")
incorrect = [input("Please enter an incorrect answer: ") for i in range(0, 3)]
cat = input("Please enter a category: ")
questions.append(Text_Question(text, correct, incorrect, cat))
elif choice == 1:
if len(questions) > 0:
choice = print_menu("Please select a question to delete:", [q.question_text for q in questions])
del questions[choice]
else:
print("There are no questions to delete")
else:
return questions
def print_menu(statement, options):
"""Presents the user with a choice of options and allows the user to pick one
Arguments:
statement {str} -- The description of the choice
options {list} -- The possible options the user can pick
Returns:
int -- The index of the option the user picked from the options
"""
print(statement)
for i, option in enumerate(options, 1):
print(str(i) + ". " + option)
while 1:
try:
value = int(input("Please choose an option: "))
if 0 < value <= len(options):
return value - 1
print("Invalid input")
except ValueError:
print("Invalid input")
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
9704929
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from heatclient import exc
from heat_integrationtests.functional import functional_base
class StackTemplateValidateTest(functional_base.FunctionalTestsBase):
random_template = '''
heat_template_version: 2014-10-16
description: the stack description
parameters:
aparam:
type: number
default: 10
description: the param description
resources:
myres:
type: OS::Heat::RandomString
properties:
length: {get_param: aparam}
'''
parent_template = '''
heat_template_version: 2014-10-16
description: the parent template
parameters:
pparam:
type: number
default: 5
description: the param description
resources:
nres:
type: mynested.yaml
properties:
aparam: {get_param: pparam}
'''
parent_template_noprop = '''
heat_template_version: 2014-10-16
description: the parent template
resources:
nres:
type: mynested.yaml
'''
random_template_groups = '''
heat_template_version: 2014-10-16
description: the stack description
parameters:
aparam:
type: number
default: 10
description: the param description
bparam:
type: string
default: foo
cparam:
type: string
default: secret
hidden: true
parameter_groups:
- label: str_params
description: The string params
parameters:
- bparam
- cparam
resources:
myres:
type: OS::Heat::RandomString
properties:
length: {get_param: aparam}
'''
def test_template_validate_basic(self):
ret = self.client.stacks.validate(template=self.random_template)
expected = {'Description': 'the stack description',
'Parameters': {
'aparam': {'Default': 10,
'Description': 'the param description',
'Label': 'aparam',
'NoEcho': 'false',
'Type': 'Number'}}}
self.assertEqual(expected, ret)
def test_template_validate_override_default(self):
env = {'parameters': {'aparam': 5}}
ret = self.client.stacks.validate(template=self.random_template,
environment=env)
expected = {'Description': 'the stack description',
'Parameters': {
'aparam': {'Default': 10,
'Value': 5,
'Description': 'the param description',
'Label': 'aparam',
'NoEcho': 'false',
'Type': 'Number'}}}
self.assertEqual(expected, ret)
def test_template_validate_override_none(self):
env = {'resource_registry': {
'OS::Heat::RandomString': 'OS::Heat::None'}}
ret = self.client.stacks.validate(template=self.random_template,
environment=env)
expected = {'Description': 'the stack description',
'Parameters': {
'aparam': {'Default': 10,
'Description': 'the param description',
'Label': 'aparam',
'NoEcho': 'false',
'Type': 'Number'}}}
self.assertEqual(expected, ret)
def test_template_validate_basic_required_param(self):
tmpl = self.random_template.replace('default: 10', '')
ret = self.client.stacks.validate(template=tmpl)
expected = {'Description': 'the stack description',
'Parameters': {
'aparam': {'Description': 'the param description',
'Label': 'aparam',
'NoEcho': 'false',
'Type': 'Number'}}}
self.assertEqual(expected, ret)
def test_template_validate_fail_version(self):
fail_template = self.random_template.replace('2014-10-16', 'invalid')
ex = self.assertRaises(exc.HTTPBadRequest,
self.client.stacks.validate,
template=fail_template)
self.assertIn('The template version is invalid', six.text_type(ex))
def test_template_validate_parameter_groups(self):
ret = self.client.stacks.validate(template=self.random_template_groups)
expected = {'Description': 'the stack description',
'ParameterGroups':
[{'description': 'The string params',
'label': 'str_params',
'parameters': ['bparam', 'cparam']}],
'Parameters':
{'aparam':
{'Default': 10,
'Description': 'the param description',
'Label': 'aparam',
'NoEcho': 'false',
'Type': 'Number'},
'bparam':
{'Default': 'foo',
'Description': '',
'Label': 'bparam',
'NoEcho': 'false',
'Type': 'String'},
'cparam':
{'Default': 'secret',
'Description': '',
'Label': 'cparam',
'NoEcho': 'true',
'Type': 'String'}}}
self.assertEqual(expected, ret)
def test_template_validate_nested_off(self):
files = {'mynested.yaml': self.random_template}
ret = self.client.stacks.validate(template=self.parent_template,
files=files)
expected = {'Description': 'the parent template',
'Parameters': {
'pparam': {'Default': 5,
'Description': 'the param description',
'Label': 'pparam',
'NoEcho': 'false',
'Type': 'Number'}}}
self.assertEqual(expected, ret)
def test_template_validate_nested_on(self):
files = {'mynested.yaml': self.random_template}
ret = self.client.stacks.validate(template=self.parent_template_noprop,
files=files,
show_nested=True)
expected = {'Description': 'the parent template',
'Parameters': {},
'NestedParameters': {
'nres': {'Description': 'the stack description',
'Parameters': {'aparam': {'Default': 10,
'Description':
'the param '
'description',
'Label': 'aparam',
'NoEcho': 'false',
'Type': 'Number'}},
'Type': 'mynested.yaml'}}}
self.assertEqual(expected, ret)
def test_template_validate_nested_on_multiple(self):
# parent_template -> nested_template -> random_template
nested_template = self.random_template.replace(
'OS::Heat::RandomString', 'mynested2.yaml')
files = {'mynested.yaml': nested_template,
'mynested2.yaml': self.random_template}
ret = self.client.stacks.validate(template=self.parent_template,
files=files,
show_nested=True)
n_param2 = {'myres': {'Description': 'the stack description',
'Parameters': {'aparam': {'Default': 10,
'Description':
'the param '
'description',
'Label': 'aparam',
'NoEcho': 'false',
'Type': 'Number'}},
'Type': 'mynested2.yaml'}}
expected = {'Description': 'the parent template',
'Parameters': {
'pparam': {'Default': 5,
'Description': 'the param description',
'Label': 'pparam',
'NoEcho': 'false',
'Type': 'Number'}},
'NestedParameters': {
'nres': {'Description': 'the stack description',
'Parameters': {'aparam': {'Default': 10,
'Description':
'the param '
'description',
'Label': 'aparam',
'Value': 5,
'NoEcho': 'false',
'Type': 'Number'}},
'NestedParameters': n_param2,
'Type': 'mynested.yaml'}}}
self.assertEqual(expected, ret)
|
StarcoderdataPython
|
9717434
|
<filename>simulation_control/src/scripts/detect_object_server.py
#!/usr/bin/env python
import rospy
import actionlib
import simulation_control.msg
'''Object detection for F651
Copyright (C) 2018, CPS2018 Challenge by <NAME>. All rights reserved.
'''
from geometry_msgs.msg import PoseStamped, Point
class detect_object_server():
def __init__(self):
#variables
self.local_pose = PoseStamped()
self.detected = False
#publishers
#subscribers
rospy.Subscriber('/color_detection/cam_point', Point, self.get_cam_pos_callback)
rospy.Subscriber('/mavros/local_position/pose', PoseStamped, self._local_pose_callback)
self.rate = rospy.Rate(20)
self.result = simulation_control.msg.detect_objectResult()
self.action_server = actionlib.SimpleActionServer('detect_object', simulation_control.msg.detect_objectAction,
execute_cb=self.execute_cb, auto_start=False)
self.action_server.start()
def execute_cb(self, goal):
rospy.sleep(0.1)
while not self.detected:
self.rate.sleep()
rospy.loginfo("Target Detected")
self.result.detected_position = self.local_pose
self.action_server.set_succeeded(self.result)
def get_cam_pos_callback(self, data):
if data.x != float("inf"):
self.detected = True
self.object_pose = data
else:
self.detected = False
def _local_pose_callback(self, data):
self.local_pose = data
if __name__ == '__main__':
try:
rospy.init_node('detect_object_server')
detect_object_server()
except rospy.ROSInterruptException:
pass
|
StarcoderdataPython
|
5156161
|
<filename>src/posts/views.py
from django.shortcuts import render, get_object_or_404
from posts.models import Recipe
# Create your views here.
def index(request):
recipes = Recipe.objects.all().order_by('-timestamp')
context = {
'recipes': recipes
}
return render(request, 'posts/index.html', context)
def detail(request, slug):
recipe = get_object_or_404(Recipe, slug=slug)
context = {
'recipe': recipe,
'ingredients': recipe.ingredients.all(),
'techniques': recipe.techniques.all(),
'categories': recipe.categories.all(),
}
return render(request, 'posts/detail.html', context)
|
StarcoderdataPython
|
6588225
|
#! /usr/bin/env python3
import pycurl
import yaml
import argparse
import io
from extractor import ExtractorFactory
# We should ignore SIGPIPE when using pycurl.NOSIGNAL - see
# the libcurl tutorial for more info.
try:
import signal
signal.signal(signal.SIGPIPE, signal.SIG_IGN)
except ImportError:
pass
parser = argparse.ArgumentParser()
parser.add_argument('urls_file', type=str,
help='the file with the urls to be fetched')
parser.add_argument('--num-conn', type=int, default=10,
help='number of connections to use (default: 10)')
args = parser.parse_args()
conf = yaml.load(open('conf.yml'))
num_conn = args.num_conn
urls_file = args.urls_file
queue = []
with open(urls_file) as f:
for url in f.readlines():
url = url.strip()
if not url or url[0] == "#":
continue
queue.append(url)
# Check args
assert queue, "no URLs given"
num_urls = len(queue)
num_conn = min(num_conn, num_urls)
assert 1 <= num_conn <= 10000, "invalid number of concurrent connections"
print("----- Getting", num_urls, "URLs using", num_conn, "connections -----")
# Pre-allocate a list of curl objects
m = pycurl.CurlMulti()
m.handles = []
for i in range(num_conn):
c = pycurl.Curl()
c.fp = None
c.setopt(pycurl.FOLLOWLOCATION, 1)
c.setopt(pycurl.MAXREDIRS, 5)
c.setopt(pycurl.CONNECTTIMEOUT, 30)
c.setopt(pycurl.TIMEOUT, 300)
c.setopt(pycurl.NOSIGNAL, 1)
# if conf['curl_http_headers']:
# c.setopt(pycurl.HTTPHEADER, conf['curl_http_headers'])
if conf['curl_user_agent']:
c.setopt(pycurl.USERAGENT, conf['curl_user_agent'])
m.handles.append(c)
# Main loop
freelist = m.handles[:]
num_processed = 0
while num_processed < num_urls:
# If there is an url to process and a free curl object, add to multi stack
while queue and freelist:
url = queue.pop(0)
c = freelist.pop()
c.fp = io.BytesIO()
c.setopt(pycurl.URL, url)
c.setopt(pycurl.WRITEDATA, c.fp)
m.add_handle(c)
c.url = url
# Run the internal curl state machine for the multi stack
while True:
ret, num_handles = m.perform()
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
# Check for curl objects which have terminated, and add them to the freelist
while True:
num_q, ok_list, err_list = m.info_read()
for c in ok_list:
s = str(c.fp.getvalue())
c.fp.close()
c.fp = None
m.remove_handle(c)
print("Success:", c.url, c.getinfo(pycurl.EFFECTIVE_URL))
ext = ExtractorFactory(c.url, s)
if ext:
articles = ext.extract()
ext.insert_in_db(articles)
else:
print("No Extractor Found for URL")
freelist.append(c)
for c, errno, errmsg in err_list:
c.fp.close()
c.fp = None
m.remove_handle(c)
print("Failed: ", c.url, errno, errmsg)
freelist.append(c)
num_processed = num_processed + len(ok_list) + len(err_list)
if num_q == 0:
break
# Currently no more I/O is pending, could do something in the meantime
# (display a progress bar, etc.).
# We just call select() to sleep until some more data is available.
m.select(1.0)
# Cleanup
for c in m.handles:
if c.fp is not None:
c.fp.close()
c.fp = None
c.close()
m.close()
|
StarcoderdataPython
|
11201811
|
import numpy as np
from petsc4py import PETSc
from src.geo import *
from src import stokes_flow as sf
from src.support_class import *
from src.StokesFlowMethod import *
__all__ = ['createEcoli_ellipse', 'createEcoliComp_ellipse', 'createEcoli_2tails',
'createEcoliComp_tunnel', 'createEcoli_tunnel', 'create_ecoli_dualTail',
'create_ecoli_2part', 'create_ecoli_tail', 'create_ecoli_tail_at',
'create_rotlets_tail_2part', 'create_selfRepeat_tail',
'create_ecoli_2part_at', 'create_ecoli_dualTail_at',
'get_tail_nodes_split_at', 'get_ecoli_nodes_split_at',
'get_ecoli_nodes_2part_at', 'get_tail_at', 'get_ellipsoid_at',
'create_diskVane_tail',
'create_capsule',
'create_rod',
'create_infHelix',
'create_helicoid_list', 'create_helicoid_comp',
'creat_dumb_obj',
'creat_helicoid_dumb', 'creat_helicoid_dumb_v2', 'creat_helicoid_dumb_selfRotate',
'obj2helicoid_list', 'obj2helicoid_list_v2', 'obj2helicoid_list_v3',
'obj2helicoid_comp', 'obj2helicoid_list_selfRotate',
'create_sphere', 'create_move_single_sphere',
'create_one_ellipse', 'create_one_ellipse_v2']
def create_capsule(rs1, rs2, ls, ds, node_dof=3):
lvs3 = ls - 2 * rs2
dth = ds / rs2
err_msg = 'geo parameter of create_capsule head is wrong. '
assert lvs3 >= 0, err_msg
vsgeo = base_geo()
vsgeo.set_dof(node_dof)
vsgeo1 = ellipse_base_geo() # velocity node geo of head
vsgeo1.create_half_delta(ds, rs1, rs2)
vsgeo2 = vsgeo1.copy()
vsgeo1.node_rotation(norm=np.array((0, 1, 0)), theta=-np.pi / 2)
vsgeo1.node_rotation(norm=np.array((0, 0, 1)), theta=-np.pi / 2)
vsgeo1.move((0, 0, +lvs3 / 2))
vsgeo2.node_rotation(norm=np.array((0, 1, 0)), theta=+np.pi / 2)
vsgeo2.node_rotation(norm=np.array((0, 0, 1)), theta=+np.pi / 2 - dth)
vsgeo2.move((0, 0, -lvs3 / 2))
vsgeo2.set_nodes(np.flipud(vsgeo2.get_nodes()), deltalength=vsgeo2.get_deltaLength())
if lvs3 > ds:
vsgeo3 = tunnel_geo()
vsgeo3.create_deltatheta(dth=dth, radius=rs2, length=lvs3)
vsgeo.combine([vsgeo1, vsgeo3, vsgeo2])
else:
vsgeo.combine([vsgeo1, vsgeo2])
return vsgeo
def create_ecoli_tail(moveh, **kwargs):
nth = kwargs['nth']
hfct = kwargs['hfct']
eh = kwargs['eh']
ch = kwargs['ch']
rh11 = kwargs['rh11']
rh12 = kwargs['rh12']
rh2 = kwargs['rh2']
ph = kwargs['ph']
n_tail = kwargs['n_tail']
with_cover = kwargs['with_cover']
with_T_geo = kwargs['with_T_geo'] if 'with_T_geo' in kwargs.keys() else 0
left_hand = kwargs['left_hand']
rT2 = kwargs['rT2']
center = kwargs['center']
matrix_method = kwargs['matrix_method']
zoom_factor = kwargs['zoom_factor']
obj_type = sf.obj_dic[matrix_method]
if 'rs' in matrix_method:
err_msg = 'the regularized family methods requires eh==0. '
assert np.isclose(eh, 0), err_msg
# create helix
vhobj0 = obj_type()
node_dof = vhobj0.get_n_unknown()
B = ph / (2 * np.pi)
vhgeo0 = FatHelix() # velocity node geo of helix
if 'dualPotential' in matrix_method:
vhgeo0.set_check_epsilon(False)
vhgeo0.set_dof(node_dof)
dth = 2 * np.pi / nth
fhgeo0 = vhgeo0.create_deltatheta(dth=dth, radius=rh2, R1=rh11, R2=rh12, B=B, n_c=ch,
epsilon=eh, with_cover=with_cover, factor=hfct,
left_hand=left_hand)
vhobj0.set_data(fhgeo0, vhgeo0, name='helix_0')
vhobj0.zoom(zoom_factor)
if with_T_geo:
# dbg
OptDB = PETSc.Options()
factor = OptDB.getReal('dbg_theta_factor', 1.5)
PETSc.Sys.Print('--------------------> DBG: dbg_theta_factor = %f' % factor)
theta = np.pi * ch + (rT2 + rh2 * factor) / (rh11 + rh2)
vhobj0.node_rotation(norm=np.array((0, 0, 1)), theta=theta)
vhobj0.move(moveh * zoom_factor)
tail_list = uniqueList()
for i0 in range(n_tail):
theta = 2 * np.pi / n_tail * i0
vhobj1 = vhobj0.copy()
vhobj1.node_rotation(norm=(0, 0, 1), theta=theta, rotation_origin=center.copy())
vhobj1.set_name('helix_%d' % i0)
tail_list.append(vhobj1)
return tail_list
def create_ecoli_tail_bck(moveh, **kwargs):
nth = kwargs['nth']
hfct = kwargs['hfct']
eh = kwargs['eh']
ch = kwargs['ch']
rh11 = kwargs['rh11']
rh12 = kwargs['rh12']
rh2 = kwargs['rh2']
ph = kwargs['ph']
n_tail = kwargs['n_tail']
with_cover = kwargs['with_cover']
left_hand = kwargs['left_hand']
rT2 = kwargs['rT2']
center = kwargs['center']
matrix_method = kwargs['matrix_method']
zoom_factor = kwargs['zoom_factor']
obj_type = sf.obj_dic[matrix_method]
if 'rs' in matrix_method:
err_msg = 'the regularized family methods requires eh==0. '
assert np.isclose(eh, 0), err_msg
# create helix
vhobj0 = obj_type()
node_dof = vhobj0.get_n_unknown()
B = ph / (2 * np.pi)
vhgeo0 = FatHelix() # velocity node geo of helix
if 'dualPotential' in matrix_method:
vhgeo0.set_check_epsilon(False)
vhgeo0.set_dof(node_dof)
dth = 2 * np.pi / nth
fhgeo0 = vhgeo0.create_deltatheta(dth=dth, radius=rh2, R1=rh11, R2=rh12, B=B, n_c=ch,
epsilon=eh, with_cover=with_cover, factor=hfct,
left_hand=left_hand)
vhobj0.set_data(fhgeo0, vhgeo0, name='helix_0')
vhobj0.zoom(zoom_factor)
# dbg
OptDB = PETSc.Options()
factor = OptDB.getReal('dbg_theta_factor', 1.5)
PETSc.Sys.Print('--------------------> DBG: dbg_theta_factor = %f' % factor)
theta = np.pi * ch + (rT2 + rh2 * factor) / (rh11 + rh2)
vhobj0.node_rotation(norm=np.array((0, 0, 1)), theta=theta)
vhobj0.move(moveh * zoom_factor)
tail_list = uniqueList()
for i0 in range(n_tail):
theta = 2 * np.pi / n_tail * i0
vhobj1 = vhobj0.copy()
vhobj1.node_rotation(norm=(0, 0, 1), theta=theta, rotation_origin=center.copy())
vhobj1.set_name('helix_%d' % i0)
tail_list.append(vhobj1)
return tail_list
def create_diskVane_tail(moveh, **kwargs):
r1 = kwargs['diskVane_r1']
r2 = kwargs['diskVane_r2']
rz = kwargs['diskVane_rz']
th_loc = kwargs['diskVane_th_loc']
# ph_loc = kwargs['diskVane_ph_loc']
ds = kwargs['diskVane_ds']
nr = kwargs['diskVane_nr']
nz = kwargs['diskVane_nz']
tgeo = regularizeDisk()
tgeo.create_ds(ds, r2)
tgeo.node_rotation(norm=np.array([1, 0, 0]), theta=np.pi / 2, rotation_origin=np.zeros(3))
tgeo.node_rotation(norm=np.array([0, 0, 1]), theta=th_loc, rotation_origin=np.zeros(3))
tgeo.move(np.array((r1, 0, moveh)))
tgeo_list0 = []
trot = 2 * np.pi / nr
for i0 in range(nr):
th = trot * i0
tgeo2 = tgeo.copy()
tgeo2.node_rotation(norm=np.array((0, 0, 1)), theta=th, rotation_origin=np.zeros(3))
tgeo_list0.append(tgeo2)
if np.isclose(nz, 1):
tgeo_list = tgeo_list0
else:
tgeo_list = []
tz = rz / (nz - 1)
for i0 in range(nz):
tmove = tz * i0
th = np.pi * i0
for tgeoi in tgeo_list0:
tgeoj = tgeoi.copy()
tgeoj.move(np.array((0, 0, tmove)))
tgeoj.node_rotation(norm=np.array((0, 0, 1)), theta=th, rotation_origin=np.zeros(3))
tgeo_list.append(tgeoj)
return tgeo_list
def create_selfRepeat_tail(moveh, **kwargs):
nth = kwargs['nth']
hfct = kwargs['hfct']
eh = kwargs['eh']
ch = kwargs['ch']
rh11 = kwargs['rh11']
rh12 = kwargs['rh12']
rh2 = kwargs['rh2']
ph = kwargs['ph']
n_tail = kwargs['n_tail']
with_cover = kwargs['with_cover']
with_T_geo = kwargs['with_T_geo'] if 'with_T_geo' in kwargs.keys() else 0
left_hand = kwargs['left_hand']
rT2 = kwargs['rT2']
repeat_n = kwargs['repeat_n']
center = kwargs['center']
matrix_method = kwargs['matrix_method']
zoom_factor = kwargs['zoom_factor']
obj_type = sf.obj_dic[matrix_method]
if 'rs' in matrix_method:
err_msg = 'the regularized family methods requires eh==0. '
assert np.isclose(eh, 0), err_msg
# create helix
vhobj0 = obj_type() # type: sf.StokesFlowObj
node_dof = vhobj0.get_n_unknown()
B = ph / (2 * np.pi)
vhgeo0 = SelfRepeat_FatHelix(repeat_n) # velocity node geo of helix
if 'dualPotential' in matrix_method:
vhgeo0.set_check_epsilon(False)
vhgeo0.set_dof(node_dof)
dth = 2 * np.pi / nth
fhgeo0 = vhgeo0.create_deltatheta(dth=dth, radius=rh2, R1=rh11, R2=rh12, B=B, n_c=ch,
epsilon=eh, with_cover=with_cover, factor=hfct,
left_hand=left_hand) # type: SelfRepeat_FatHelix
vhobj0.set_data(fhgeo0, vhgeo0, name='helix_0')
vhobj0.zoom(zoom_factor)
if with_T_geo:
# dbg
OptDB = PETSc.Options()
factor = OptDB.getReal('dbg_theta_factor', 1.5)
PETSc.Sys.Print('--------------------> DBG: dbg_theta_factor = %f' % factor)
theta = np.pi * ch + (rT2 + rh2 * factor) / (rh11 + rh2)
vhobj0.node_rotation(norm=np.array((0, 0, 1)), theta=theta)
vhobj0.move(moveh * zoom_factor)
tail_list = uniqueList()
for i0 in range(n_tail):
theta = 2 * np.pi / n_tail * i0
vhobj1 = vhobj0.copy()
vhobj1.node_rotation(norm=(0, 0, 1), theta=theta, rotation_origin=center.copy())
vhobj1.set_name('helix_%d' % i0)
tail_list.append(vhobj1)
tail_start_list = []
tail_body0_list = []
tail_end_list = []
for tobj in tail_list:
vhgeo0 = tobj.get_u_geo()
fhgeo0 = tobj.get_f_geo()
#
part_obj = obj_type()
part_ugeo = vhgeo0.get_start_geo()
part_fgeo = fhgeo0.get_start_geo()
part_obj.set_data(part_fgeo, part_ugeo, name='helix_0_start')
tail_start_list.append(part_obj)
#
part_obj = sf.SelfRepeatObj()
part_ugeo = vhgeo0.get_body_mid_geo()
part_fgeo = fhgeo0.get_body_mid_geo()
part_obj.set_data(part_fgeo, part_ugeo, name='helix_0_body0')
tail_body0_list.append(part_obj)
#
part_obj = obj_type()
part_ugeo = vhgeo0.get_end_geo()
part_fgeo = fhgeo0.get_end_geo()
part_obj.set_data(part_fgeo, part_ugeo, name='helix_0_end')
tail_end_list.append(part_obj)
return tail_list, tail_start_list, tail_body0_list, tail_end_list
def create_ecoli_tail_at(theta, phi, psi_tail, now_center=np.zeros(3), **problem_kwargs):
tail_list = create_ecoli_tail(np.zeros(3), **problem_kwargs)
tail_obj = sf.StokesFlowObj()
tail_obj.set_name('tail_obj')
tail_obj.combine(tail_list)
tail_obj.node_rotation(np.array((0, 1, 0)), theta)
tail_obj.node_rotation(np.array((0, 0, 1)), phi)
tail_obj.node_rotation(tail_obj.get_u_geo().get_geo_norm(), psi_tail)
tail_obj.move(now_center)
return tail_obj
def get_tail_nodes_split_at(theta, phi, psi_tail, now_center=np.zeros(3), **problem_kwargs):
tail_list = create_ecoli_tail(np.zeros(3), **problem_kwargs)
tail_obj = sf.StokesFlowObj()
tail_obj.set_name('tail_obj')
tail_obj.combine(tail_list)
tail_obj.node_rotation(np.array((0, 1, 0)), theta)
tail_obj.node_rotation(np.array((0, 0, 1)), phi)
tail_obj.node_rotation(tail_obj.get_u_geo().get_geo_norm(), psi_tail)
tail_obj.move(now_center)
n_tail = problem_kwargs['n_tail']
t0 = np.split(tail_obj.get_u_nodes(), 2 * n_tail)
t1 = np.vstack(t0[1::2])
t2 = np.vstack(t0[0::2])
return t1, t2
def get_tail_at(theta, phi, psi_tail, now_center=np.zeros(3), **problem_kwargs):
tail_list = create_ecoli_tail(np.zeros(3), **problem_kwargs)
tail_obj = sf.StokesFlowObj()
tail_obj.set_name('tail_obj')
tail_obj.combine(tail_list)
tail_obj.node_rotation(np.array((0, 1, 0)), theta)
tail_obj.node_rotation(np.array((0, 0, 1)), phi)
tail_obj.node_rotation(tail_obj.get_u_geo().get_geo_norm(), psi_tail)
tail_obj.move(now_center)
return [tail_obj.get_u_nodes(), ]
def createEcoli_ellipse(name='...', **kwargs):
ch = kwargs['ch']
ph = kwargs['ph']
ds = kwargs['ds']
rs1 = kwargs['rs1']
rs2 = kwargs['rs2']
es = kwargs['es']
# sphere_rotation = kwargs['sphere_rotation'] if 'sphere_rotation' in kwargs.keys() else 0
zoom_factor = kwargs['zoom_factor'] if 'zoom_factor' in kwargs.keys() else 1
dist_hs = kwargs['dist_hs']
center = kwargs['center']
matrix_method = kwargs['matrix_method']
lh = ph * ch # length of helix
movesz = 0.5 * (dist_hs - 2 * rs1 + lh) + rs1
movehz = 0.5 * (dist_hs + 2 * rs1 - lh) + lh / 2
moves = np.array((0, 0, movesz)) + center # move distance of sphere
moveh = np.array((0, 0, -movehz)) + center # move distance of helix
objtype = sf.obj_dic[matrix_method]
if 'rs' in matrix_method:
err_msg = 'the regularized family methods requires es==0. '
assert np.isclose(es, 0), err_msg
# create tail
tail_list = create_ecoli_tail(moveh, **kwargs)
# create head
vsgeo = ellipse_base_geo() # velocity node geo of sphere
vsgeo.create_delta(ds, rs1, rs2)
vsgeo.set_geo_norm(vsgeo.get_geo_norm() * -1)
vsgeo.node_rotation(norm=np.array((0, 1, 0)), theta=np.pi / 2)
fsgeo = vsgeo.copy() # force node geo of sphere
fsgeo.node_zoom(1 + ds / (0.5 * (rs1 + rs2)) * es)
vsobj = objtype()
vsobj.set_data(fsgeo, vsgeo, name='sphere_0')
vsobj.zoom(zoom_factor)
vsobj.move(moves * zoom_factor)
return vsobj, tail_list
def createEcoli_2tails(name='...', **kwargs):
ch = kwargs['ch']
ph = kwargs['ph']
ds = kwargs['ds']
rs1 = kwargs['rs1']
rs2 = kwargs['rs2']
es = kwargs['es']
# sphere_rotation = kwargs['sphere_rotation'] if 'sphere_rotation' in kwargs.keys() else 0
zoom_factor = kwargs['zoom_factor'] if 'zoom_factor' in kwargs.keys() else 1
dist_hs = kwargs['dist_hs']
center = kwargs['center']
matrix_method = kwargs['matrix_method']
lh = ph * ch # length of helix
objtype = sf.obj_dic[matrix_method]
if 'rs' in matrix_method:
err_msg = 'the regularized family methods requires es==0. '
assert np.isclose(es, 0), err_msg
# create tail
movez = np.array((0, 0, rs1 + dist_hs + lh / 2))
tkwargs = kwargs.copy()
tkwargs['left_hand'] = False
tail_list1 = create_ecoli_tail(-movez, **tkwargs)
tkwargs['left_hand'] = True
tail_list2 = create_ecoli_tail(movez, **tkwargs)
# create head
vsgeo = ellipse_base_geo() # velocity node geo of sphere
vsgeo.create_delta(ds, rs1, rs2)
vsgeo.node_rotation(norm=np.array((0, 1, 0)), theta=-np.pi / 2)
fsgeo = vsgeo.copy() # force node geo of sphere
fsgeo.node_zoom(1 + ds / (0.5 * (rs1 + rs2)) * es)
vsobj = objtype()
vsobj.set_data(fsgeo, vsgeo, name='sphere_0')
vsobj.zoom(zoom_factor)
return vsobj, tail_list1, tail_list2
def createEcoliComp_ellipse(name='...', **kwargs):
vsobj, tail_list = createEcoli_ellipse(name=name, **kwargs)
vsgeo = vsobj.get_u_geo()
center = kwargs['center']
rel_Us = kwargs['rel_Us']
rel_Uh = kwargs['rel_Uh']
ecoli_comp = sf.ForceFreeComposite(center=center.copy(), norm=vsgeo.get_geo_norm().copy(),
name=name)
ecoli_comp.add_obj(vsobj, rel_U=rel_Us)
for ti in tail_list:
ecoli_comp.add_obj(ti, rel_U=rel_Uh)
rot_norm = kwargs['rot_norm']
rot_theta = kwargs['rot_theta'] * np.pi
ecoli_comp.node_rotation(norm=rot_norm.copy(), theta=rot_theta, rotation_origin=center.copy())
return ecoli_comp
def createEcoli_tunnel(**kwargs):
ch = kwargs['ch']
rh1 = kwargs['rh1']
rh2 = kwargs['rh2']
ph = kwargs['ph']
ds = kwargs['ds']
rs1 = kwargs['rs1']
rs2 = kwargs['rs2']
ls = kwargs['ls']
es = kwargs['es']
# sphere_rotation = kwargs['sphere_rotation'] if 'sphere_rotation' in kwargs.keys() else 0
zoom_factor = kwargs['zoom_factor']
dist_hs = kwargs['dist_hs']
center = kwargs['center']
rT1 = kwargs['rT1']
rT2 = kwargs['rT2']
ntT = kwargs['ntT']
eT = kwargs['eT']
Tfct = kwargs['Tfct']
matrix_method = kwargs['matrix_method']
lh = ph * ch # length of helix
movesz = 0.5 * (dist_hs - ls + lh) + ls / 2
movehz = -1 * (0.5 * (dist_hs + ls - lh) + lh / 2)
# movesz = (ls + dist_hs) / 2
# movehz = (lh + dist_hs) / 2
moves = np.array((0, 0, movesz)) + center # move distance of sphere
moveh = np.array((rT1 - rh1, 0, movehz)) + center # move distance of helix
lT = (rT1 + rh2) * 2
objtype = sf.obj_dic[matrix_method]
if 'rs' in matrix_method:
err_msg = 'the regularized family methods requires es==0. '
assert np.isclose(es, 0), err_msg
err_msg = 'the regularized family methods requires eT==0. '
assert np.isclose(eT, 0), err_msg
# create helix
tail_list = create_ecoli_tail(moveh, **kwargs)
# create head
vsobj = objtype()
node_dof = vsobj.get_n_unknown()
vsgeo = create_capsule(rs1, rs2, ls, ds, node_dof)
fsgeo = vsgeo.copy() # force node geo of sphere
fsgeo.node_zoom(1 + ds / (0.5 * (rs1 + rs2)) * es)
fsgeo.node_zoom_z(1 - ds / (0.5 * (rs1 + rs2)) * es)
vsobj.set_data(fsgeo, vsgeo, name='sphere_0')
vsobj.zoom(zoom_factor)
vsobj.move(moves * zoom_factor)
# create T shape
dtT = 2 * np.pi / ntT
vTobj = objtype()
node_dof = vTobj.get_n_unknown()
# # dbg
# OptDB = PETSc.Options( )
# factor = OptDB.getReal('dbg_move_factor', 1)
# PETSc.Sys.Print('--------------------> DBG: dbg_move_factor = %f' % factor)
# moveT = np.array((0, 0, moveh[-1] + lh / 2 + rh2 * factor))
moveT = np.array((0, 0, movehz + lh / 2)) + center
vTgeo = tunnel_geo()
if 'dualPotential' in matrix_method:
vTgeo.set_check_epsilon(False)
vTgeo.set_dof(node_dof)
fTgeo = vTgeo.create_deltatheta(dth=dtT, radius=rT2, factor=Tfct, length=lT, epsilon=eT,
with_cover=1)
vTobj.set_data(fTgeo, vTgeo, name='T_shape_0')
theta = -np.pi / 2
vTobj.node_rotation(norm=np.array((0, 1, 0)), theta=theta)
vTobj.zoom(zoom_factor)
vTobj.move(moveT * zoom_factor)
theta = np.pi / 4 - ch * np.pi
vsobj.node_rotation(norm=np.array((0, 0, 1)), theta=theta, rotation_origin=center)
for ti in tail_list:
ti.node_rotation(norm=np.array((0, 0, 1)), theta=theta, rotation_origin=center)
vTobj.node_rotation(norm=np.array((0, 0, 1)), theta=theta, rotation_origin=center)
return vsobj, tail_list, vTobj
def createEcoliComp_tunnel(name='...', **kwargs):
with_T_geo = kwargs['with_T_geo'] if 'with_T_geo' in kwargs.keys() else 0
center = kwargs['center']
rel_Us = kwargs['rel_Us']
rel_Uh = kwargs['rel_Uh']
if not with_T_geo:
kwargs['rT1'] = kwargs['rh1']
vsobj, tail_list, vTobj = createEcoli_tunnel(**kwargs)
ecoli_comp = sf.ForceFreeComposite(center, norm=vsobj.get_u_geo().get_geo_norm(), name=name)
ecoli_comp.add_obj(vsobj, rel_U=rel_Us)
for ti in tail_list:
ecoli_comp.add_obj(ti, rel_U=rel_Uh)
if with_T_geo:
ecoli_comp.add_obj(vTobj, rel_U=rel_Uh)
return ecoli_comp
def create_ecoli_2part(**problem_kwargs):
# create a ecoli contain two parts, one is head and one is tail.
rel_Us = problem_kwargs['rel_Us']
rel_Uh = problem_kwargs['rel_Uh']
center = problem_kwargs['center']
update_order = problem_kwargs['update_order'] if 'update_order' in problem_kwargs.keys() else 1
update_fun = problem_kwargs['update_fun'] if 'update_fun' in problem_kwargs.keys() \
else Adams_Bashforth_Methods
with_T_geo = problem_kwargs['with_T_geo']
err_msg = 'currently, do not support with_T_geo for this kind of ecoli. '
assert not with_T_geo, err_msg
head_obj, tail_obj_list = createEcoli_ellipse(name='ecoli0', **problem_kwargs)
head_obj.set_name('head_obj')
tail_obj = sf.StokesFlowObj()
tail_obj.set_name('tail_obj')
tail_obj.combine(tail_obj_list)
head_geo = head_obj.get_u_geo()
# ecoli_comp = sf.ForceFreeComposite(center=head_geo.get_center(), norm=head_geo.get_geo_norm(), name='ecoli_0')
ecoli_comp = sf.ForceFreeComposite(center=center, norm=head_geo.get_geo_norm(), name='ecoli_0')
ecoli_comp.add_obj(obj=head_obj, rel_U=rel_Us)
ecoli_comp.add_obj(obj=tail_obj, rel_U=rel_Uh)
ecoli_comp.set_update_para(fix_x=False, fix_y=False, fix_z=False,
update_fun=update_fun, update_order=update_order)
return ecoli_comp
def create_rotlets_tail_2part(rotlet_strength=0, **problem_kwargs):
# create a swimmer with a infinite small head (the limit is a rotlet) and tail(s).
ch = problem_kwargs['ch']
ph = problem_kwargs['ph']
dist_hs = problem_kwargs['dist_hs']
lh = ph * ch # length of helix
with_T_geo = problem_kwargs['with_T_geo']
err_msg = 'currently, do not support with_T_geo for this kind of ecoli. '
assert not with_T_geo, err_msg
tail_list = create_ecoli_tail(np.zeros(3), **problem_kwargs)
tail_obj0 = sf.StokesFlowObj()
tail_obj0.combine(tail_list)
tail_obj = sf.FundSoltObj()
tail_obj.set_data(tail_obj0.get_u_geo(), tail_obj0.get_f_geo(), name='rotlets_tail_obj')
location = np.array((0, 0, lh / 2 + dist_hs))
tnorm = tail_obj0.get_u_geo().get_geo_norm()
torque = tnorm * rotlet_strength
tail_obj.add_point_force(location=location, force=torque,
StokesletsHandle=light_rotlets_matrix_3d)
givenT = np.hstack((np.zeros(3), -1 * torque))
ecoli_comp = sf.GivenForceComposite(center=np.zeros(3), norm=tnorm,
name='rotlets_tail_comp', givenF=givenT)
ecoli_comp.add_obj(obj=tail_obj, rel_U=np.zeros(6))
update_order = problem_kwargs['update_order'] \
if 'update_order' in problem_kwargs.keys() \
else 1
update_fun = problem_kwargs['update_fun'] \
if 'update_fun' in problem_kwargs.keys() \
else Adams_Bashforth_Methods
ecoli_comp.set_update_para(fix_x=False, fix_y=False, fix_z=False,
update_fun=update_fun, update_order=update_order)
return ecoli_comp
def create_ecoli_2part_at(theta, phi, psi_tail, now_center=np.zeros(3), **problem_kwargs):
ti = problem_kwargs['ti'] if 'ti' in problem_kwargs.keys() else 0
omega_tail = problem_kwargs['omega_tail'] if 'omega_tail' in problem_kwargs.keys() else 0
ecoli_comp = create_ecoli_2part(**problem_kwargs)
ecoli_comp.node_rotation(np.array((0, 1, 0)), theta)
ecoli_comp.node_rotation(np.array((0, 0, 1)), phi)
head_obj = ecoli_comp.get_obj_list()[0]
tail_obj = ecoli_comp.get_obj_list()[1]
head_obj.node_rotation(head_obj.get_u_geo().get_geo_norm(), psi_tail - omega_tail * ti)
tail_obj.node_rotation(tail_obj.get_u_geo().get_geo_norm(), psi_tail)
ecoli_comp.move(now_center)
return ecoli_comp
def get_ecoli_nodes_2part_at(*args, **kwargs):
ecoli_comp = create_ecoli_2part_at(*args, **kwargs)
return [i0.get_u_geo().get_nodes() for i0 in ecoli_comp.get_obj_list()]
def get_ecoli_nodes_split_at(theta, phi, psi_tail, now_center=np.zeros(3), **problem_kwargs):
n_tail = problem_kwargs['n_tail']
ti = problem_kwargs['ti'] if 'ti' in problem_kwargs.keys() else 0
omega_tail = problem_kwargs['omega_tail'] if 'omega_tail' in problem_kwargs.keys() else 0
ecoli_comp = create_ecoli_2part(**problem_kwargs)
ecoli_comp.node_rotation(np.array((0, 1, 0)), theta)
ecoli_comp.node_rotation(np.array((0, 0, 1)), phi)
head_obj = ecoli_comp.get_obj_list()[0]
tail_obj = ecoli_comp.get_obj_list()[1]
head_obj.node_rotation(head_obj.get_u_geo().get_geo_norm(), psi_tail - omega_tail * ti)
tail_obj.node_rotation(tail_obj.get_u_geo().get_geo_norm(), psi_tail)
ecoli_comp.move(now_center)
t0 = np.split(tail_obj.get_u_nodes(), 2 * n_tail)
t1 = np.vstack(t0[1::2])
t2 = np.vstack(t0[0::2])
t3 = ecoli_comp.get_obj_list()[0].get_u_nodes()
return t1, t2, t3
def get_ellipsoid_at(theta, phi, psi_tail, now_center=np.zeros(3), **problem_kwargs):
ds = problem_kwargs['ds']
rs1 = problem_kwargs['rs1']
rs2 = problem_kwargs['rs2']
vsgeo = ellipse_base_geo()
vsgeo.create_delta(ds, rs1, rs2)
vsgeo.set_geo_norm(vsgeo.get_geo_norm() * -1)
vsgeo.node_rotation(norm=np.array((0, 1, 0)), theta=np.pi / 2)
vsgeo.node_rotation(np.array((0, 1, 0)), theta)
vsgeo.node_rotation(np.array((0, 0, 1)), phi)
vsgeo.node_rotation(vsgeo.get_geo_norm(), psi_tail)
vsgeo.move(now_center - vsgeo.get_center())
return [vsgeo.get_nodes(), ]
def create_ecoli_dualTail(**problem_kwargs):
# create a swimmer with two tails in the ends. one is left hand and one is right hand.
# the swimmer contain three parts, i.e. head, upper tail and down tail.
rel_Us = problem_kwargs['rel_Us']
rel_Uh = problem_kwargs['rel_Uh']
update_order = problem_kwargs['update_order'] if 'update_order' in problem_kwargs.keys() else 1
update_fun = problem_kwargs['update_fun'] if 'update_fun' in problem_kwargs.keys() \
else Adams_Bashforth_Methods
with_T_geo = problem_kwargs['with_T_geo']
err_msg = 'currently, do not support with_T_geo for this kind of ecoli. '
assert not with_T_geo, err_msg
head_obj, tail_obj_l1, tail_obj_l2 = createEcoli_2tails(name='ecoli0', **problem_kwargs)
head_obj.set_name('head_obj')
tail_obj1 = sf.StokesFlowObj()
tail_obj1.set_name('tail_obj1')
tail_obj1.combine(tail_obj_l1)
tail_obj2 = sf.StokesFlowObj()
tail_obj2.set_name('tail_obj2')
tail_obj2.combine(tail_obj_l2)
head_geo = head_obj.get_u_geo()
tnorm = head_geo.get_geo_norm()
ecoli_comp = sf.ForceFreeComposite(center=np.zeros(3), norm=tnorm, name='ecoli_0')
ecoli_comp.add_obj(obj=head_obj, rel_U=rel_Us)
ecoli_comp.add_obj(obj=tail_obj1, rel_U=rel_Uh)
ecoli_comp.add_obj(obj=tail_obj2, rel_U=-rel_Uh)
ecoli_comp.set_update_para(fix_x=False, fix_y=False, fix_z=False,
update_fun=update_fun, update_order=update_order)
return ecoli_comp
def create_ecoli_dualTail_at(theta, phi, psi_tail1, psi_tail2, center=np.zeros(3),
**problem_kwargs):
assert 1 == 2
ecoli_comp = create_ecoli_dualTail(**problem_kwargs)
# ecoli_comp.node_rotation(np.array((0, 1, 0)), theta)
# ecoli_comp.node_rotation(np.array((0, 0, 1)), phi)
# tail_obj1 = ecoli_comp.get_obj_list()[1]
# tail_obj1.node_rotation(tail_obj1.get_u_geo().get_geo_norm(), psi_tail1)
# tail_obj2 = ecoli_comp.get_obj_list()[2]
# tail_obj2.node_rotation(tail_obj2.get_u_geo().get_geo_norm(), psi_tail2)
return ecoli_comp
def create_sphere(namehandle='sphereObj', **kwargs):
matrix_method = kwargs['matrix_method']
rs = kwargs['rs']
sphere_velocity = kwargs['sphere_velocity']
ds = kwargs['ds']
es = kwargs['es']
sphere_coord = kwargs['sphere_coord']
objtype = sf.obj_dic[matrix_method]
if 'rs' in matrix_method:
err_msg = 'the regularized family methods requires es==0. '
assert np.isclose(es, 0), err_msg
obj_sphere = objtype()
sphere_geo0 = sphere_geo() # force geo
sphere_geo0.set_dof(obj_sphere.get_n_unknown())
sphere_geo0.create_delta(ds, rs)
sphere_geo0.set_rigid_velocity([0, 0, 0, 0, 0, 0])
sphere_geo1 = sphere_geo0.copy()
if 'pf' in matrix_method:
sphere_geo1.node_zoom((rs + ds * es) / rs)
obj_sphere.set_data(sphere_geo1, sphere_geo0)
obj_list = []
for i0, (t_coord, t_velocity) in enumerate(zip(sphere_coord, sphere_velocity)):
obj2 = obj_sphere.copy()
obj2.set_name('%s_%d' % (namehandle, i0))
obj2.move(t_coord)
obj2.get_u_geo().set_rigid_velocity(t_velocity)
obj_list.append(obj2)
return obj_list
def create_one_ellipse(namehandle='ellipseObj', **kwargs):
matrix_method = kwargs['matrix_method']
rs1 = kwargs['rs1']
rs2 = kwargs['rs2']
sphere_velocity = kwargs['sphere_velocity']
ds = kwargs['ds']
es = kwargs['es']
sphere_coord = kwargs['sphere_coord']
objtype = sf.obj_dic[matrix_method]
if 'rs' in matrix_method:
err_msg = 'the regularized family methods requires es==0. '
assert np.isclose(es, 0), err_msg
obj_sphere = objtype() # type: sf.StokesFlowObj
sphere_geo0 = ellipse_base_geo() # force geo
sphere_geo0.set_dof(obj_sphere.get_n_unknown())
sphere_geo0.create_delta(ds, rs1, rs2)
sphere_geo0.set_rigid_velocity(sphere_velocity)
sphere_geo1 = sphere_geo0.copy()
if 'pf' in matrix_method:
sphere_geo1.node_zoom(1 + ds / (0.5 * (rs1 + rs2)) * es)
obj_sphere.set_data(sphere_geo1, sphere_geo0, name=namehandle)
obj_sphere.move(sphere_coord)
return obj_sphere
def create_one_ellipse_v2(namehandle='ellipseObj', **kwargs):
matrix_method = kwargs['matrix_method']
ellipse_rs1 = kwargs['ellipse_rs1']
ellipse_rs2 = kwargs['ellipse_rs2']
ellipse_rs3 = kwargs['ellipse_rs3']
ellipse_velocity = kwargs['ellipse_velocity']
ellipse_ds = kwargs['ellipse_ds']
ellipse_es = kwargs['ellipse_es']
ellipse_center = kwargs['ellipse_center']
if 'rs' in matrix_method:
err_msg = 'the regularized family methods requires ellipse_es==0. '
assert np.isclose(ellipse_es, 0), err_msg
obj_ellipse = sf.obj_dic[matrix_method]() # type: sf.StokesFlowObj
sphere_geo0 = ellipse_3d_geo() # force geo
sphere_geo0.set_dof(obj_ellipse.get_n_unknown())
sphere_geo0.create_delta(ellipse_ds, ellipse_rs1, ellipse_rs2, ellipse_rs3)
sphere_geo0.set_rigid_velocity(ellipse_velocity)
sphere_geo1 = sphere_geo0.copy()
if 'pf' in matrix_method:
t1 = np.mean((ellipse_rs1, ellipse_rs2, ellipse_rs3))
sphere_geo1.node_zoom(1 + ellipse_ds / t1 * ellipse_es)
obj_ellipse.set_data(sphere_geo1, sphere_geo0, name=namehandle)
obj_ellipse.move(ellipse_center)
return obj_ellipse
def create_move_single_sphere(namehandle='sphereObj', **kwargs):
movez = kwargs['movez']
obj_sphere = create_sphere(namehandle, **kwargs)[0]
displacement = np.array((0, 0, movez))
obj_sphere.move(displacement)
obj_list = (obj_sphere,)
return obj_list
def create_rod(namehandle='rod_obj', **problem_kwargs):
rRod = problem_kwargs['rRod']
lRod = problem_kwargs['lRod']
ntRod = problem_kwargs['ntRod']
eRod = problem_kwargs['eRod']
Rodfct = problem_kwargs['Rodfct']
RodThe = problem_kwargs['RodThe']
RodPhi = problem_kwargs['RodPhi']
rel_URod = problem_kwargs['rel_URod']
RodCenter = problem_kwargs['RodCenter']
zoom_factor = problem_kwargs['zoom_factor']
givenF = problem_kwargs['givenF']
matrix_method = problem_kwargs['matrix_method']
if 'rs' in matrix_method:
err_msg = 'the regularized family methods requires eRod==0. '
assert np.isclose(eRod, 0), err_msg
dth = 2 * np.pi / ntRod
rod_geo = tunnel_geo()
rod_geo.create_deltatheta(dth=dth, radius=rRod, length=lRod, epsilon=eRod,
with_cover=1, factor=Rodfct, left_hand=False)
# first displace the rod above the surface, rotate to horizon.
rod_geo.move(displacement=RodCenter)
rod_geo.node_zoom(factor=zoom_factor, zoom_origin=RodCenter)
norm = np.array((0, 1, 0))
theta = -np.pi / 2
rod_geo.node_rotation(norm=norm, theta=theta, rotation_origin=RodCenter)
# then, the rod is rotate in a specified plane, which is parabled to XY plane (the wall) first, then
# rotated angle theta, of an angle phi.
norm = np.array((0, np.sin(RodPhi), np.cos(RodPhi)))
rod_geo.node_rotation(norm=norm, theta=-RodThe, rotation_origin=RodCenter)
rod_obj = sf.obj_dic[matrix_method]()
name = namehandle + '_obj_0'
rod_obj.set_data(f_geo=rod_geo, u_geo=rod_geo, name=name)
name = namehandle + '_0'
rod_comp = sf.GivenForceComposite(center=RodCenter, name=name, givenF=givenF.copy())
rod_comp.add_obj(obj=rod_obj, rel_U=rel_URod)
rod_list = (rod_comp,)
return rod_list
def create_infHelix(namehandle='infhelix', normalize=False, **problem_kwargs):
n_tail = problem_kwargs['n_tail']
eh = problem_kwargs['eh']
ch = problem_kwargs['ch']
rh1 = problem_kwargs['rh1']
rh2 = problem_kwargs['rh2']
ph = problem_kwargs['ph']
nth = problem_kwargs['nth']
zoom_factor = problem_kwargs['zoom_factor']
matrix_method = problem_kwargs['matrix_method']
if 'rs' in matrix_method:
err_msg = 'the regularized family methods requires eh==0. '
assert np.isclose(eh, 0), err_msg
if normalize:
rh2 = rh2 * zoom_factor
ph = ph * zoom_factor
rh1 = rh1 * zoom_factor
helix_list = []
for i0, theta0 in enumerate(np.linspace(0, 2 * np.pi, n_tail, endpoint=False)):
infhelix_ugeo = infHelix()
infhelix_ugeo.create_n(rh1, rh2, ph, ch, nth, theta0=theta0)
infhelix_fgeo = infhelix_ugeo.create_fgeo(epsilon=eh)
infhelix_obj = sf.StokesFlowObj()
infhelix_obj.set_data(f_geo=infhelix_fgeo, u_geo=infhelix_ugeo,
name=namehandle + '%02d' % i0)
helix_list.append(infhelix_obj)
return helix_list
def create_helicoid_list(namehandle='helicoid', **problem_kwargs):
r1 = problem_kwargs['helicoid_r1']
r2 = problem_kwargs['helicoid_r2']
ds = problem_kwargs['helicoid_ds']
th_loc = problem_kwargs['helicoid_th_loc']
ndsk_each = problem_kwargs['helicoid_ndsk_each']
matrix_method = problem_kwargs['matrix_method']
assert matrix_method in ('rs', 'lg_rs')
assert ndsk_each == 4
tgeo = regularizeDisk()
tgeo.create_ds(ds, r2)
tgeo.node_rotation(norm=np.array([1, 0, 0]), theta=th_loc)
tgeo.move(np.array((r1, 0, 0)))
# tgeo.show_nodes()
tgeo_list = []
rot_dth = 2 * np.pi / ndsk_each
for i0 in range(ndsk_each):
rot_th = i0 * rot_dth + np.pi / 4
# rot_th = i0 * rot_dth
tgeo21 = tgeo.copy()
tgeo21.node_rotation(norm=np.array([0, 0, 1]), theta=rot_th, rotation_origin=np.zeros(3))
tgeo22 = tgeo21.copy()
tgeo_list.append(tgeo21)
tgeo22.node_rotation(norm=np.array([1, 0, 0]), theta=np.pi / 2, rotation_origin=np.zeros(3))
tgeo23 = tgeo21.copy()
tgeo_list.append(tgeo22)
tgeo23.node_rotation(norm=np.array([0, 1, 0]), theta=np.pi / 2, rotation_origin=np.zeros(3))
tgeo_list.append(tgeo23)
# tgeo3 = base_geo()
# tgeo3.combine(tgeo_list)
# tgeo3.show_nodes(linestyle='')
tobj_list = []
for i0, tgeo in enumerate(tgeo_list):
tobj = sf.StokesFlowObj()
tobj.set_matrix_method(matrix_method) # the geo is regularizeDisk
tobj.set_data(f_geo=tgeo, u_geo=tgeo, name=namehandle + '%02d' % i0)
tobj_list.append(tobj)
return tobj_list
def create_helicoid_comp(*args, **kwargs):
update_order = kwargs['update_order'] if 'update_order' in kwargs.keys() else 1
update_fun = kwargs['update_fun'] if 'update_fun' in kwargs.keys() else Adams_Bashforth_Methods
helicoid_list = create_helicoid_list(*args, **kwargs)
helicoid_comp = sf.ForceFreeComposite(center=np.zeros(3), norm=np.array((0, 0, 1)),
name='helicoid_comp')
for tobj in helicoid_list:
# print(tobj)
helicoid_comp.add_obj(obj=tobj, rel_U=np.zeros(6))
helicoid_comp.set_update_para(fix_x=False, fix_y=False, fix_z=False,
update_fun=update_fun, update_order=update_order)
return helicoid_comp
def obj2helicoid_list(tobj0, **problem_kwargs):
# assert 1 == 2
helicoid_r = problem_kwargs['helicoid_r']
ndsk_each = problem_kwargs['helicoid_ndsk_each']
assert ndsk_each == 4
tobj = tobj0.copy()
tobj.move(np.array((helicoid_r, 0, 0)))
tobj_list = []
rot_dth = 2 * np.pi / ndsk_each
namehandle = tobj.get_name()
for i0 in range(ndsk_each):
rot_th = i0 * rot_dth + np.pi / 4
# rot_th = i0 * rot_dth
tobj21 = tobj.copy()
tobj21.set_name('%s_%02d_%01d' % (namehandle, i0, 1))
tobj21.node_rotation(norm=np.array([0, 0, 1]), theta=rot_th, rotation_origin=np.zeros(3))
tobj_list.append(tobj21)
tobj22 = tobj21.copy()
tobj22.set_name('%s_%02d_%01d' % (namehandle, i0, 2))
tobj22.node_rotation(norm=np.array([1, 0, 0]), theta=np.pi / 2, rotation_origin=np.zeros(3))
tobj_list.append(tobj22)
tobj23 = tobj21.copy()
tobj23.set_name('%s_%02d_%01d' % (namehandle, i0, 3))
tobj23.node_rotation(norm=np.array([0, 1, 0]), theta=np.pi / 2, rotation_origin=np.zeros(3))
tobj_list.append(tobj23)
return tobj_list
def obj2helicoid_list_v2(tobj0, **problem_kwargs):
helicoid_r = problem_kwargs['helicoid_r']
ndsk_each = problem_kwargs['helicoid_ndsk_each']
assert ndsk_each == 4
helicoid_th0 = problem_kwargs['helicoid_th0'] if 'helicoid_th0' in problem_kwargs.keys() else 0
assert np.isclose(np.linalg.norm(tobj0.get_u_geo().get_center()), 0)
namehandle = tobj0.get_name()
t1 = helicoid_r / np.sqrt(2)
tobj0.move((t1, t1, 0))
tobj1 = tobj0.copy()
tobj1.node_rotation(np.array((1, 0, 0)), np.pi / 2, rotation_origin=np.zeros(3))
tobj2 = tobj0.copy()
tobj2.node_rotation(np.array((1, 0, 0)), -np.pi / 2, rotation_origin=np.zeros(3))
tobj_list = []
rot_dth = 2 * np.pi / ndsk_each
for i0 in range(ndsk_each):
rot_th = i0 * rot_dth + helicoid_th0
for i1, tobji in enumerate((tobj0, tobj1, tobj2)):
tobji_i0 = tobji.copy()
tobji_i0.set_name('%s_%02d_%01d' % (namehandle, i0, i1))
tobji_i0.node_rotation(np.array((0, 0, 1)), rot_th, rotation_origin=np.zeros(3))
tobj_list.append(tobji_i0)
return tobj_list
def obj2helicoid_list_v3(tobj, **problem_kwargs):
helicoid_r = problem_kwargs['helicoid_r']
ndsk_each = problem_kwargs['helicoid_ndsk_each']
assert ndsk_each == 4
helicoid_th0 = problem_kwargs['helicoid_th0'] if 'helicoid_th0' in problem_kwargs.keys() else 0
# assert np.isclose(np.linalg.norm(tobj.get_u_geo().get_center()), 0)
namehandle = tobj.get_name()
rot_dth = 2 * np.pi / ndsk_each
tobj.move((helicoid_r, 0, 0))
tobj0 = tobj.copy()
tobj0.node_rotation(np.array((0, 0, 1)), -rot_dth / 2, rotation_origin=np.zeros(3))
tobj1 = tobj.copy()
tobj1.node_rotation(np.array((1, 0, 0)), -np.pi / 2, rotation_origin=np.zeros(3))
tobj1.node_rotation(np.array((0, 1, 0)), rot_dth / 2, rotation_origin=np.zeros(3))
tobj2 = tobj.copy()
tobj2.node_rotation(np.array((1, 0, 0)), -np.pi / 2, rotation_origin=np.zeros(3))
tobj2.node_rotation(np.array((0, 1, 0)), -rot_dth / 2, rotation_origin=np.zeros(3))
# # dbg
# dbg_obj = sf.StokesFlowObj()
# dbg_obj.combine((tobj0, tobj1, tobj2))
# dbg_obj.show_u_nodes()
# assert 1 == 2
#
tobj_list = []
for i0 in range(ndsk_each):
rot_th = i0 * rot_dth + helicoid_th0
for i1, tobji in enumerate((tobj0, tobj1, tobj2)):
tobji_i0 = tobji.copy()
tobji_i0.set_name('%s_%02d_%01d' % (namehandle, i0, i1))
tobji_i0.node_rotation(np.array((0, 0, 1)), rot_th, rotation_origin=np.zeros(3))
tobj_list.append(tobji_i0)
return tobj_list
def obj2helicoid_list_selfRotate(tobj, **problem_kwargs):
helicoid_r = problem_kwargs['helicoid_r']
ndsk_each = problem_kwargs['helicoid_ndsk_each']
assert ndsk_each == 4
# helicoid_th0 = problem_kwargs['helicoid_th0'] if 'helicoid_th0' in problem_kwargs.keys() else 0
assert np.isclose(np.linalg.norm(tobj.get_u_geo().get_center()), 0)
# namehandle = tobj.get_name()
rot_dth = 2 * np.pi / ndsk_each
tobj.move((helicoid_r, 0, 0))
tobj0 = tobj.copy()
tobj0.node_rotation(np.array((0, 0, 1)), -rot_dth / 2, rotation_origin=np.zeros(3))
tobj1 = tobj.copy()
tobj1.node_rotation(np.array((1, 0, 0)), -np.pi / 2, rotation_origin=np.zeros(3))
tobj1.node_rotation(np.array((0, 1, 0)), rot_dth / 2, rotation_origin=np.zeros(3))
tobj2 = tobj.copy()
tobj2.node_rotation(np.array((1, 0, 0)), -np.pi / 2, rotation_origin=np.zeros(3))
tobj2.node_rotation(np.array((0, 1, 0)), -rot_dth / 2, rotation_origin=np.zeros(3))
tobj_list = [tobj0, tobj1, tobj2]
return tobj_list
def obj2helicoid_comp(tobj0, **kwargs):
update_order = kwargs['update_order'] if 'update_order' in kwargs.keys() else 1
update_fun = kwargs['update_fun'] if 'update_fun' in kwargs.keys() else Adams_Bashforth_Methods
# helicoid_list = obj2helicoid_list(tobj0, *args, **kwargs)
helicoid_list = obj2helicoid_list_v3(tobj0, **kwargs)
helicoid_comp = sf.ForceFreeComposite(center=np.zeros(3), norm=np.array((0, 0, 1)),
name='helicoid_comp')
for tobj in helicoid_list:
helicoid_comp.add_obj(obj=tobj, rel_U=np.zeros(6))
helicoid_comp.set_update_para(fix_x=False, fix_y=False, fix_z=False,
update_fun=update_fun, update_order=update_order)
return helicoid_comp
def obj2helicoid_comp_selfRotate(tobj0, **kwargs):
update_order = kwargs['update_order'] if 'update_order' in kwargs.keys() else 1
update_fun = kwargs['update_fun'] if 'update_fun' in kwargs.keys() else Adams_Bashforth_Methods
# helicoid_list = obj2helicoid_list(tobj0, *args, **kwargs)
helicoid_list = obj2helicoid_list_selfRotate(tobj0, **kwargs)
helicoid_comp = sf.ForceFreeComposite(center=np.zeros(3), norm=np.array((0, 0, 1)),
name='helicoid_comp')
for tobj in helicoid_list:
helicoid_comp.add_obj(obj=tobj, rel_U=np.zeros(6))
helicoid_comp.set_update_para(fix_x=False, fix_y=False, fix_z=False,
update_fun=update_fun, update_order=update_order)
return helicoid_comp
def creat_dumb_obj(name='helicoid_dumb', **problem_kwargs):
matrix_method = problem_kwargs['matrix_method']
dumb_d = problem_kwargs['dumb_d']
dumb_theta = problem_kwargs['dumb_theta']
ds = problem_kwargs['ds']
rs = problem_kwargs['rs']
es = problem_kwargs['es']
if 'rs' in matrix_method:
err_msg = 'the regularized family methods requires es==0. '
assert np.isclose(es, 0), err_msg
sphere_geo0 = sphere_geo()
sphere_geo0.create_delta(ds, rs)
sphere_geo0f = sphere_geo0.copy()
sphere_geo0f.node_zoom(1 + ds * es / rs)
sphere_geo1 = sphere_geo0.copy()
sphere_geo1f = sphere_geo0f.copy()
sphere_geo0.move(np.array((0, 0, dumb_d / 2)))
sphere_geo1.move(np.array((0, 0, -dumb_d / 2)))
sphere_geo0f.move(np.array((0, 0, dumb_d / 2)))
sphere_geo1f.move(np.array((0, 0, -dumb_d / 2)))
dumb_geo = base_geo()
dumb_geo.combine([sphere_geo0, sphere_geo1], origin=np.zeros(3), geo_norm=np.array((0, 0, 1)))
dumb_geo.node_rotation(norm=np.array((1, 0, 0)), theta=dumb_theta)
dumb_geof = base_geo()
dumb_geof.combine([sphere_geo0f, sphere_geo1f], origin=np.zeros(3),
geo_norm=np.array((0, 0, 1)))
dumb_geof.node_rotation(norm=np.array((1, 0, 0)), theta=dumb_theta)
tobj = sf.obj_dic[matrix_method]()
tobj.set_data(dumb_geof, dumb_geo, name=name)
return tobj
def creat_dumb_obj_v2(name='helicoid_dumb', **problem_kwargs):
matrix_method = problem_kwargs['matrix_method']
dumb_d = problem_kwargs['dumb_d']
dumb_theta = problem_kwargs['dumb_theta']
ds = problem_kwargs['ds']
rs = problem_kwargs['rs']
es = problem_kwargs['es']
if 'rs' in matrix_method:
err_msg = 'the regularized family methods requires es==0. '
assert np.isclose(es, 0), err_msg
sphere_geo0 = sphere_geo()
sphere_geo0.create_delta(ds, rs)
sphere_geo0f = sphere_geo0.copy()
sphere_geo0f.node_zoom(1 + ds * es / rs)
sphere_geo1 = sphere_geo0.copy()
sphere_geo1f = sphere_geo0f.copy()
# sphere_geo0.move(np.array((0, 0, dumb_d / 2)))
# sphere_geo1.move(np.array((0, 0, -dumb_d / 2)))
# sphere_geo0f.move(np.array((0, 0, dumb_d / 2)))
# sphere_geo1f.move(np.array((0, 0, -dumb_d / 2)))
# dumb_geo = base_geo()
# dumb_geo.combine([sphere_geo0, sphere_geo1], origin=np.zeros(3), geo_norm=np.array((0, 0, 1)))
# dumb_geo.node_rotation(norm=np.array((1, 0, 0)), theta=dumb_theta)
# dumb_geof = base_geo()
# dumb_geof.combine([sphere_geo0f, sphere_geo1f], origin=np.zeros(3),
# geo_norm=np.array((0, 0, 1)))
# dumb_geof.node_rotation(norm=np.array((1, 0, 0)), theta=dumb_theta)
tobj0 = sf.obj_dic[matrix_method]()
tobj0.set_data(sphere_geo0f, sphere_geo0, name='%s_0' % name)
tobj0.move(np.array((0, 0, dumb_d / 2)))
tobj0.node_rotation(norm=np.array((1, 0, 0)), theta=dumb_theta, rotation_origin=np.zeros(3))
# tobj0.get_u_geo().set_center(np.zeros(3))
# tobj0.get_f_geo().set_center(np.zeros(3))
# tobj0.show_u_nodes()
tobj1 = sf.obj_dic[matrix_method]()
tobj1.set_data(sphere_geo1f, sphere_geo1, name='%s_1' % name)
tobj1.move(np.array((0, 0, -dumb_d / 2)))
tobj1.node_rotation(norm=np.array((1, 0, 0)), theta=dumb_theta, rotation_origin=np.zeros(3))
# tobj1.get_u_geo().set_center(np.zeros(3))
# tobj1.get_f_geo().set_center(np.zeros(3))
return tobj0, tobj1
def creat_helicoid_dumb(**problem_kwargs):
tobj = creat_dumb_obj(**problem_kwargs)
helicoid_comp = obj2helicoid_comp(tobj, **problem_kwargs)
return helicoid_comp
def creat_helicoid_dumb_v2(**problem_kwargs):
update_order = problem_kwargs['update_order'] if 'update_order' in problem_kwargs.keys() \
else 1
update_fun = problem_kwargs['update_fun'] if 'update_fun' in problem_kwargs.keys() \
else Adams_Bashforth_Methods
tobj0, tobj1 = creat_dumb_obj_v2(**problem_kwargs)
helicoid_comp0 = obj2helicoid_comp(tobj0, **problem_kwargs)
helicoid_comp1 = obj2helicoid_comp(tobj1, **problem_kwargs)
# helicoid_comp0.show_u_nodes()
# helicoid_comp1.show_u_nodes()
helicoid_comp = sf.ForceFreeComposite(center=np.zeros(3), norm=np.array((0, 0, 1)),
name='helicoid_comp')
for tobj in helicoid_comp0.get_obj_list():
helicoid_comp.add_obj(obj=tobj, rel_U=np.zeros(6))
for tobj in helicoid_comp1.get_obj_list():
helicoid_comp.add_obj(obj=tobj, rel_U=np.zeros(6))
helicoid_comp.set_update_para(fix_x=False, fix_y=False, fix_z=False,
update_fun=update_fun, update_order=update_order)
return helicoid_comp
def creat_helicoid_dumb_selfRotate(**problem_kwargs):
tobj = creat_dumb_obj(**problem_kwargs)
helicoid_comp = obj2helicoid_comp_selfRotate(tobj, **problem_kwargs)
return helicoid_comp
|
StarcoderdataPython
|
6569922
|
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import tensorflow as tf
import numpy as np
import gpflow
from dmbrl.misc.DotmapUtils import get_required_argument
class TFGP:
def __init__(self, params):
"""Initializes class instance.
Arguments:
params
.name (str): Model name
.kernel_class (class): Kernel class
.kernel_args (args): Kernel args
.num_inducing_points (int): Number of inducing points
.sess (tf.Session): Tensorflow session
"""
self.name = params.get("name", "GP")
self.kernel_class = get_required_argument(params, "kernel_class", "Must provide kernel class.")
self.kernel_args = params.get("kernel_args", {})
self.num_inducing_points = get_required_argument(
params, "num_inducing_points", "Must provide number of inducing points."
)
if params.get("sess", None) is None:
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
self._sess = tf.Session(config=config)
else:
self._sess = params.get("sess")
with self._sess.as_default():
with tf.variable_scope(self.name):
output_dim = self.kernel_args["output_dim"]
del self.kernel_args["output_dim"]
self.model = gpflow.models.SGPR(
np.zeros([1, self.kernel_args["input_dim"]]),
np.zeros([1, output_dim]),
kern=self.kernel_class(**self.kernel_args),
Z=np.zeros([self.num_inducing_points, self.kernel_args["input_dim"]])
)
self.model.initialize()
@property
def is_probabilistic(self):
return True
@property
def sess(self):
return self._sess
@property
def is_tf_model(self):
return True
def train(self, inputs, targets,
*args, **kwargs):
"""Optimizes the parameters of the internal GP model.
Arguments:
inputs: (np.ndarray) An array of inputs.
targets: (np.ndarray) An array of targets.
num_restarts: (int) The number of times that the optimization of
the GP will be restarted to obtain a good set of parameters.
Returns: None.
"""
perm = np.random.permutation(inputs.shape[0])
inputs, targets = inputs[perm], targets[perm]
Z = np.copy(inputs[:self.num_inducing_points])
if Z.shape[0] < self.num_inducing_points:
Z = np.concatenate([Z, np.zeros([self.num_inducing_points - Z.shape[0], Z.shape[1]])])
self.model.X = inputs
self.model.Y = targets
self.model.feature.Z = Z
with self.sess.as_default():
self.model.compile()
print("Optimizing model... ", end="")
gpflow.train.ScipyOptimizer().minimize(self.model)
print("Done.")
def predict(self, inputs, *args, **kwargs):
"""Returns the predictions of this model on inputs.
Arguments:
inputs: (np.ndarray) The inputs on which predictions will be returned.
ign_var: (bool) If True, only returns the mean prediction
Returns: (np.ndarrays) The mean and variance of the model on the new points.
"""
if self.model is None:
raise RuntimeError("Cannot make predictions without initial batch of data.")
with self.sess.as_default():
mean, var = self.model.predict_y(inputs)
return mean, var
def create_prediction_tensors(self, inputs, *args, **kwargs):
""
if self.model is None:
raise RuntimeError("Cannot make predictions without initial batch of data.")
inputs = tf.cast(inputs, tf.float64)
mean, var = self.model._build_predict(inputs, full_cov=False)
return tf.cast(mean, dtype=tf.float32), tf.cast(var, tf.float32)
def save(self, *args, **kwargs):
pass
|
StarcoderdataPython
|
9636920
|
import numpy as np
import os.path
def IPF(row_totals, col_totals, seed_values=None, random_fill=False):
"""
Take two inputs: row and column totals.
The inputs must be NumPy Arrays whose sums are equal.
Optional input: seed_values (CSV only).
Option: random_fill.
"""
# Initial conditions
assert type(row_totals) == np.ndarray
assert type(col_totals) == np.ndarray
assert sum(row_totals) == sum(col_totals)
assert random_fill in (True, False)
# Initialize the matrix
if seed_values != None:
assert os.path.isfile(seed_values)
matrix = np.loadtxt(seed_values, delimiter=',')
assert matrix.shape[0] == len(row_totals)
assert matrix.shape[1] == len(col_totals)
elif seed_values == None:
if random_fill == False:
matrix = np.ones((len(row_totals), len(col_totals)))
else:
matrix = np.random.rand(len(row_totals),len(col_totals))
# Row update
row_scalars = matrix.sum(axis=1) / row_totals
matrix = (matrix.T / row_scalars).T
# Column update
col_scalars = matrix.sum(axis=0) / col_totals
matrix = (matrix / col_scalars)
# Return
return matrix
|
StarcoderdataPython
|
5143227
|
<gh_stars>1-10
"""Kata url: https://www.codewars.com/kata/55cb632c1a5d7b3ad0000145."""
def hoop_count(n: int) -> str:
return (
"Keep at it until you get it"
if n < 10 else "Great, now move on to tricks"
)
|
StarcoderdataPython
|
11361333
|
from discord.ext import commands
from .permissions import connected_to_channel
from .universal import execute_in_storage
def attach_unrecognized(bot, storage):
"""Add commands to regulate unsorted players."""
@bot.group(pass_context=True)
@commands.check_any(connected_to_channel(storage))
async def unrecognized(ctx):
"""Set alert level for unrecognized players."""
if ctx.invoked_subcommand is None:
await ctx.send('Invalid sub command passed...')
@unrecognized.command(name='alert', pass_context=True)
@execute_in_storage(storage)
async def unrecognized_alert(ctx, *args):
pass
return bot
|
StarcoderdataPython
|
4937405
|
import random
import logging
from pathlib import Path
from cdeid.utils.resources import PACKAGE_NAME
logger = logging.getLogger(PACKAGE_NAME)
def load_data(data_file):
data = open(data_file).readlines()
return data
def pass_bio_checking(lines):
for i, line in enumerate(lines):
if ('-DOCSTART-' in line) or line.strip() == '':
continue
# the NER tag in the last column
ner_tag = line.split()[-1]
if not (ner_tag.startswith('B-') or ner_tag.startswith('I-') or ner_tag.startswith('O')):
logger.error('Input file does not have correct BIO format at line {}.'.format(i))
return False
return True
def parse_documents(content):
docs = []
doc = []
line = []
# convert doc to a list of lines
for item in content:
if '-DOCSTART-' in item:
if len(doc) != 0:
docs.append(doc)
doc = []
continue
if item == '' or item == '\n':
if len(line) != 0:
doc.append(line)
line = []
continue
line.append(item)
if len(doc) != 0:
docs.append(doc)
return docs
# proportion will be like 1.0 or 1.5 or 2.0 or 4.0.
# Text lines with PHI is 10. 1.0 means the lines without PHI will be 10 * 1.0
def sample_data_lines(docs, proportion, output_file, file_name):
all_lines = [line for doc in docs for line in doc]
lines_with_phi = [line for line in all_lines if sum('B-' in token for token in line) > 0]
lines_without_phi = [line for line in all_lines if sum('B-' in token for token in line) == 0]
logger.info('PHI lines: {}, No PHI lines: {}'.format(len(lines_with_phi), len(lines_without_phi)))
random.Random(2020).shuffle(lines_without_phi)
selected_lines_without_phi = random.sample(lines_without_phi, int(len(lines_with_phi) * proportion))
all_lines = lines_with_phi + selected_lines_without_phi
random.Random(2020).shuffle(all_lines)
if not Path(output_file).exists():
Path(output_file).mkdir()
with open(Path(output_file) / file_name, 'w', newline='\n') as f:
for line in all_lines:
for token in line:
f.write(token)
f.write('\n')
logger.info('Data file {} created.'.format(Path(output_file) / file_name))
# sampled_doc = []
# for line in all_lines:
# sampled_doc += line
# sampled_doc += ['\n']
return len(all_lines), len(lines_with_phi)
|
StarcoderdataPython
|
111885
|
<gh_stars>0
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Profile(models.Model):
image = models.ImageField(default = 'default.jpg',upload_to='images/')
bio = models.CharField(max_length=150)
email = models.EmailField()
phone_number = models.CharField(max_length = 10)
user = models.OneToOneField(User, on_delete=models.CASCADE,blank=True, related_name='profile')
def __str__(self):
return self.user.username
class Meta:
ordering =['bio']
def save_profile(self):
self.save()
def update_profile(self):
self.update()
def delete_profile(self):
self.delete()
class Project(models.Model):
title = models.CharField(max_length =30)
image = models.ImageField(default = 'default.jpg',upload_to='images/')
description = models.CharField(max_length =200)
link = models.URLField(blank=True, null=True)
ratings = models.IntegerField(blank=True, null=True)
profile = models.ForeignKey(User, on_delete=models.CASCADE)
pub_date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.title
class Meta:
ordering =['pub_date']
def save_project(self):
self.save()
def delete_project(self):
self.delete()
@classmethod
def get_projects(cls):
projects = cls.objects.all()
return projects
@classmethod
def search_project(cls, repos):
project = cls.objects.filter(title__icontains=repos)
return project
@classmethod
def filter_by_user_id(cls,user_id):
projects = Project.objects.filter(profile=user_id)
return projects
@classmethod
def filter_by_title(cls,title):
projectd = Project.objects.filter(project=title)
return projectd
|
StarcoderdataPython
|
5052450
|
<gh_stars>100-1000
"""
##################################################################################################
# Copyright Info : Copyright (c) Davar Lab @ Hikvision Research Institute. All rights reserved.
# Filename : text_spotting_dataset.py
# Abstract : Implementation of text spotting dataset evaluation.
# Current Version: 1.0.0
# Date : 2020-05-31
##################################################################################################
"""
from mmdet.datasets.builder import DATASETS
from mmcv.utils import print_log
from davarocr.davar_common.datasets.davar_custom import DavarCustomDataset
from davarocr.davar_spotting.core.evaluation.e2e_hmean import evaluate_method
@DATASETS.register_module()
class TextSpotDataset(DavarCustomDataset):
""" The format is the same as DavarCustomDataset. """
def __init__(self,
ann_file,
pipeline,
data_root=None,
img_prefix='',
seg_prefix=None,
proposal_file=None,
test_mode=False,
filter_empty_gt=False,
classes_config=None,
):
"""
Args:
ann_file (str): the path to datalist.
pipeline (list(dict)): the data-flow handling pipeline
data_root (str): the root path of the dataset
img_prefix (str): the image prefixes
seg_prefix (str): the segmentation maps prefixes
proposal_file (str): the path to the preset proposal files.
test_mode (boolean): whether in test mode
filter_empty_gt (boolean): whether to filter out image without ground-truthes.
classes_config (str): the path to classes config file, used to transfer 'str' labels into 'int'
"""
super().__init__(ann_file, pipeline, data_root, img_prefix, seg_prefix, proposal_file, test_mode,
filter_empty_gt, classes_config)
self.ignore = "###"
self.eval_func_params = {
"IOU_CONSTRAINT": 0.5, # IOU threshold for pred v.s. gt matching
"AREA_PRECISION_CONSTRAINT": 0.5, # IOU threshold for pred v.s. not-cared gt matching
"WORD_SPOTTING": True, # If it is True, words that not in dictionary will be not considered
"MIN_LENGTH_CARE_WORD": 3, # The words are shorter in length (<3) will be not considered
"SPECIAL_CHARACTERS": "[]+-#$()@=_!?,:;/.%&\'\">*|<`{~}^\ ",
"ONLY_REMOVE_FIRST_LAST_CHARACTER": True # Whether to only remove the first&last character
}
def evaluate(self,
results,
metric="hmean",
logger=None,
**eval_kwargs):
"""
Main process of evaluation.
Args:
results (list(dict)): formatted inference results,
e.g., [{'points': [x1, y2, ..., xn,yn],
'confidence':[1, 0,8,...],
'texts':['apple','banana',...]},{},{},...]
metric (str | list(str)): default "e2e_hmean"
logger (obj): obj to print/ write logs
**eval_kwargs: evaluation parameters, which stored in
eval_kwargs['eval_func_params']= dict(
"IOU_CONSTRAINT": 0.5 (default),
"AREA_PRECISION_CONSTRAINT": 0.5 (default),
"CONFIDENCES": FAlSE (default)).
Returns:
dict: evaluation results, e.g.,
dict(
"precision": 0.9,
"recall": 0.9,
"hmean": 0.9,
)
"""
if not isinstance(metric, str):
assert "hmean" in metric
else:
assert metric == "hmean"
assert len(results) == len(self)
eval_func_params = eval_kwargs["eval_func_params"]
if eval_func_params is not None and isinstance(eval_func_params, dict):
if "IOU_CONSTRAINT" in eval_func_params:
self.eval_func_params["IOU_CONSTRAINT"] = eval_func_params["IOU_CONSTRAINT"]
if "AREA_PRECISION_CONSTRAINT" in eval_func_params:
self.eval_func_params["AREA_PRECISION_CONSTRAINT"] = eval_func_params["AREA_PRECISION_CONSTRAINT"]
if "WORD_SPOTTING" in eval_func_params:
self.eval_func_params["WORD_SPOTTING"] = eval_func_params["WORD_SPOTTING"]
if "MIN_LENGTH_CARE_WORD" in eval_func_params:
self.eval_func_params["MIN_LENGTH_CARE_WORD"] = eval_func_params["MIN_LENGTH_CARE_WORD"]
if "SPECIAL_CHARACTERS" in eval_func_params:
self.eval_func_params["SPECIAL_CHARACTERS"] = eval_func_params["SPECIAL_CHARACTERS"]
if "ONLY_REMOVE_FIRST_LAST_CHARACTER" in eval_func_params:
self.eval_func_params["ONLY_REMOVE_FIRST_LAST_CHARACTER"] = eval_func_params[
"ONLY_REMOVE_FIRST_LAST_CHARACTER"]
print("\nDo {} evaluation with iou constraint {}...".format(
"Word Spotting" if self.eval_func_params["WORD_SPOTTING"] else "End-to-End",
self.eval_func_params["IOU_CONSTRAINT"]))
det_results = []
gt_results = []
output = {}
for i in range(len(self)):
ann = self.get_ann_info(i)
det_result = results[i]
assert 'points' in det_result
# Prepare predictions
formated_det_result = dict()
formated_det_result['points'] = det_result['points']
formated_det_result['confidence'] = det_result['confidence'] if 'confidence' in det_result \
else [1.0] * len(det_result['points'])
formated_det_result["texts"] = det_result['texts'] if 'texts' in det_result \
else ["*"] * len(det_result['points'])
# Prepare ground truth
formated_gt_result = dict()
gt_polys = ann.get('bboxes', [])
gt_texts = ann.get('texts', [])
cares = ann.get('cares', [1] * len(gt_polys))
assert len(cares) == len(gt_texts) == len(gt_polys)
gt_trans = [(gt_texts[i] if care == 1 else self.ignore) for i, care in enumerate(cares)]
formated_gt_result['gt_bboxes'] = gt_polys
formated_gt_result['gt_texts'] = gt_trans
det_results.append(formated_det_result)
gt_results.append(formated_gt_result)
evaluate_result = evaluate_method(det_results, gt_results, self.eval_func_params)
output['precision'] = evaluate_result['summary']['spot_precision']
output['recall'] = evaluate_result['summary']['spot_recall']
output['hmean'] = evaluate_result['summary']['spot_hmean']
print("Finish evaluation !")
print_log("Detection evaluation results: Precision: {}, Recall: {}, hmean: {}".
format(evaluate_result['summary']['det_precision'],
evaluate_result['summary']['det_recall'],
evaluate_result['summary']['det_hmean']), logger=logger)
print_log("Spotting evaluation results: Precision: {}, Recall: {}, hmean: {}".
format(evaluate_result['summary']['spot_precision'],
evaluate_result['summary']['spot_recall'],
evaluate_result['summary']['spot_hmean']), logger=logger)
return output
|
StarcoderdataPython
|
11352797
|
import os
import click
import shutil
import slackviewer
from jinja2 import \
Environment, \
PackageLoader, \
select_autoescape
from slackviewer.archive import \
extract_archive, \
get_users, \
get_channels, \
compile_channels
def envvar(name, default):
"""Create callable environment variable getter
:param str name: Name of environment variable
:param default: Default value to return in case it isn't defined
"""
return lambda: os.environ.get(name, default)
def flag_ennvar(name):
return os.environ.get(name) == '1'
@click.command()
@click.option("-z", "--archive", type=click.Path(), required=True,
default=envvar('SEV_ARCHIVE', ''),
help="Path to your Slack export archive (.zip file or directory)")
@click.option("-d", "--destination", type=click.Path(), required=False,
default=envvar('SEV_DESTINATION', ''),
help="Path to export your archive web files to")
def main(archive, destination):
if not archive:
raise ValueError("Empty path provided for archive")
arch_path = extract_archive(archive)
user_data = get_users(arch_path)
channel_data = get_channels(arch_path)
channels = compile_channels(arch_path, user_data, channel_data)
path = os.path.join(os.path.split(arch_path)[0], 'archive-webview')
if destination:
path = destination
if not os.path.isdir(path):
os.makedirs(path)
css_src = os.path.join(slackviewer.__path__[0], "templates/viewer.css")
css_des = os.path.join(path, 'viewer.css')
shutil.copy2(css_src, css_des)
env = Environment(loader = PackageLoader('slackviewer', 'templates'),
autoescape=select_autoescape(['html', 'xml']))
template = env.get_template('viewer.html')
for name in sorted(channels):
page = template.render(messages=channels[name],
channels=sorted(channels.keys()),
name=name)
channel_file = "{}.html".format(os.path.join(path, name))
with open(channel_file, "w") as file:
file.write(page.encode('ascii', 'ignore'))
print ("Finished creating web files for archive")
|
StarcoderdataPython
|
6699026
|
<filename>test/generate_tokens/test_return.py
from src.token.token import Token
from test.util import add_token, wrap_with_main
def test_return():
test_str = wrap_with_main("int a = 5;return a;")
tokens = add_token(test_str)
token = tokens[-1]
assert isinstance(token, Token)
assert token.eval({'a': 5}) == 5
|
StarcoderdataPython
|
222127
|
<filename>web_assembly/_table.py
from __future__ import annotations
from typing import List, Union, Any
from dataclasses import dataclass, field
from utils import dbgprint, errprint
from web_assembly import ConstValue, Type
@dataclass
class FunRef:
__fidx: int = field(repr=False)
__original: Union[None, FunRef] = field(init=False, repr=False, default= None)
@property
def fidx(self) -> int:
return self.__fidx
@fidx.setter
def fidx(self, nidx) -> None:
if self.__original is None:
self.__original = self.copy()
self.__fidx = nidx
@property
def original(self) -> Union[None, FunRef]:
return self.__original
@property
def modified(self) -> bool:
return self.__original is not None
def get_latest(self) -> FunRef:
v = self.copy()
if not self.modified:
return v
self.fidx = self.__original.fidx
self.__original = None
return v
def copy(self) -> FunRef:
return FunRef(self.fidx)
def __repr__(self) -> str:
return f'FunRef(fidx={self.fidx})'
#TODO support remove or add elements?
class Table:
def __init__(self, _min: Union[None, int], _max: Union[int, None] , elements: List[FunRef]):
self.__min = _min
self.__max = _max
self.__elements = elements
self.__updates = {}
@property
def min(self) -> Union[int, None]:
return self.__min
@property
def max(self) -> Union[int, None]:
return self.__max
@property
def elements(self) -> List[FunRef]:
return self.__elements
@property
def modified(self) -> bool:
return next((e for e in self.elements if e.modified), False) and True
# return len(self.__updates) > 0
def copy(self) -> Table:
_elems= [ e.copy() for e in self.elements]
return Table(self.min, self.max, _elems)
def get_update(self, module:Any) -> Union[None, Table]:
if not self.modified:
return None
_elems = []
for e in self.elements:
if e.modified:
org_fidx = e.original.fidx
new_fidx = e.fidx
type1 = module.functions[org_fidx].signature
type2 = module.functions[new_fidx].signature
if not same_signature(type1, type2):
raise ValueError(f'invalid table element: signature change is unallowed. From {type1} to {type2}')
_elems.append(e.get_latest())
return Table(self.min, self.max, _elems)
def __getitem__(self, key: Any) -> FunRef:
if not isinstance(key, int):
raise ValueError(f'key error')
return self.__elements[key]
def __setitem__(self, key: Any, val: FunRef) -> None:
if not isinstance(key, int):
raise ValueError("incorrect key for table assignment")
v = val if isinstance(val, FunRef) else FunRef(val)
self.__updates[key] = v
def __len__(self) -> int:
return len(self.elements)
def as_dict(self):
d = {
'max': self.__max,
'elements': self.__elements
}
return d
def __str__(self):
d = {
'max': self.__max,
'elements': self.__elements
}
return str(d)
def __repr__(self):
return f'{self.as_dict()}'
def to_json(self) -> dict:
_json = { 'elements': [e.fidx for e in self.elements] }
if self.min is not None:
_json['min'] = self.min
if self.max is not None:
_json['max'] = self.max
return _json
@staticmethod
def from_json(_json: dict) -> Table:
_refs = [ FunRef(e) for e in _json['elements']]
return Table(_json.get('min', None), _json.get('max', None), _refs)
class Tables:
def __init__(self, tbls: List[Table]) -> Tables:
self.__tables = tbls
def __getitem__(self, key: Any) -> FunRef:
if not isinstance(key, int):
raise ValueError(f'key error')
return self.__tables[key]
def same_signature(type1: Type, type2: Type) -> bool:
dbgprint(f"comparing {type1} with {type2}")
if len(type1.parameters) != len(type2.parameters):
return False
for e1,e2 in zip(type1.parameters, type2.parameters):
if e1 != e2:
return False
if type1.results is None:
if type2.results is None:
return True
return False
elif type2.results is None:
return False
#both are lists
if len(type1.results) != len(type2.results):
return False
for r1,r2 in zip(type1.results, type2.results):
if r1 != r2:
return False
return True
|
StarcoderdataPython
|
6482439
|
"""CLI to run commands on MGS server."""
from sys import stderr
import click
from .utils import add_authorization
@click.group()
def run():
"""Run actions on the server."""
pass
@run.group()
def middleware():
"""Run middleware."""
pass
@middleware.command(name='group')
@add_authorization()
@click.argument('group_uuid')
def group_middleware(uploader, group_uuid):
"""Run middleware for a group."""
response = uploader.knex.post(f'/api/v1/sample_groups/{group_uuid}/middleware', {})
click.echo(response)
@middleware.command(name='sample')
@add_authorization()
@click.argument('sample_name')
def sample_middleware(uploader, sample_name):
"""Run middleware for a sample."""
response = uploader.knex.get(f'/api/v1/samples/getid/{sample_name}')
sample_uuid = response['data']['sample_uuid']
print(f'{sample_name} :: {sample_uuid}', file=stderr)
response = uploader.knex.post(f'/api/v1/samples/{sample_uuid}/middleware', {})
click.echo(response)
|
StarcoderdataPython
|
6704851
|
<filename>crafters/image/AlbumentationsCrafter/tests/test_albumentationscrafter.py<gh_stars>0
__copyright__ = "Copyright (c) 2021 Jina AI Limited. All rights reserved."
__license__ = "Apache-2.0"
import cv2
import numpy as np
import pytest
from jina.executors import BaseExecutor
from jina.executors.metas import get_default_metas
from .. import AlbumentationsCrafter as AC
@pytest.fixture(scope='module')
def test_img():
return cv2.imread('tests/rubi.png')[:, :, ::-1]
@pytest.fixture
def flip_img():
return cv2.imread('tests/rubi_flip.png')[:, :, ::-1]
@pytest.fixture
def crop_img():
return cv2.imread('tests/rubi_crop.png')[:, :, ::-1]
@pytest.fixture
def center_crop_img():
return cv2.imread('tests/rubi_center_crop.png')[:, :, ::-1]
@pytest.fixture
def resize_img():
return cv2.imread('tests/rubi_resize.png')[:, :, ::-1]
def test_normalize_transform(test_img):
transform = {
'Normalize': dict(mean=(0, 0, 0), std=(1, 1, 1), max_pixel_value=255)
}
crafter = AC([transform])
crafted_img = crafter.craft(test_img)
np.testing.assert_almost_equal(test_img / 255, crafted_img)
def test_flip_transform(test_img, flip_img):
crafter = AC(['VerticalFlip'])
crafted_img = crafter.craft(test_img)
np.testing.assert_almost_equal(flip_img, crafted_img)
def test_crop_transform(test_img, crop_img):
transform = {'Crop': dict(x_min=0, y_min=0, x_max=106, y_max=172)}
crafter = AC([transform])
crafted_img = crafter.craft(test_img)
np.testing.assert_almost_equal(crop_img, crafted_img)
def test_center_crop_transform(test_img, center_crop_img):
transform = {'CenterCrop': dict(height=100, width=100)}
crafter = AC([transform])
crafted_img = crafter.craft(test_img)
np.testing.assert_almost_equal(center_crop_img, crafted_img)
def test_resize_transform(test_img, resize_img):
transform = {'Resize': dict(height=100, width=200)}
crafter = AC([transform])
crafted_img = crafter.craft(test_img)
np.testing.assert_almost_equal(resize_img, crafted_img)
def test_wrong_transforms():
# Transforms not a list
with pytest.raises(ValueError):
AC('VerticalFlip')
# Item in transforms not a dict/str
with pytest.raises(ValueError):
AC([['VerticalFlip']])
# Transform not existing
with pytest.raises(ValueError):
AC(['FakeTransform'])
# Wrong args for transform
with pytest.raises(ValueError):
AC([{'VerticalFlip': {'width': 100}}])
def test_save_load_config(tmp_path):
transforms = ['VerticalFlip', {'Resize': {'width': 200, 'height': 300}}]
metas = get_default_metas()
metas['workspace'] = str(tmp_path)
orig_crafter = AC(transforms, metas=metas)
orig_crafter.save_config()
orig_trs = orig_crafter.transforms._to_dict()
load_crafter1 = BaseExecutor.load_config('tests/config.yaml')
load_crafter2 = BaseExecutor.load_config(orig_crafter.config_abspath)
assert orig_trs == load_crafter1.transforms._to_dict()
assert orig_trs == load_crafter2.transforms._to_dict()
|
StarcoderdataPython
|
6555086
|
from toee import *
import _include
from co8Util import size
from co8Util.PersistentData import *
from co8Util.ObjHandling import *
RM_KEY = "Sp404_RighteousMight_Activelist"
def OnBeginSpellCast( spell ):
print "Righteous Might OnBeginSpellCast"
print "spell.target_list=", spell.target_list
print "spell.caster=", spell.caster, " caster.level= ", spell.caster_level
game.particles( "sp-transmutation-conjure", spell.caster )
def OnSpellEffect( spell ):
print "Righteous Might OnSpellEffect"
spell.duration = 1 * spell.caster_level
target_item = spell.target_list[0]
#size mod
## print "Size:" + str(target_item.obj.obj_get_int(obj_f_size))
## print "Reach:" + str(target_item.obj.obj_get_int(obj_f_critter_reach))
size.incSizeCategory(target_item.obj)
#save target_list
activeList = Co8PersistentData.getData(RM_KEY)
if isNone(activeList): activeList = []
activeList.append([spell.id, derefHandle(target_item.obj)])
Co8PersistentData.setData(RM_KEY, activeList)
## print "new Size:" + str(target_item.obj.obj_get_int(obj_f_size))
## print "new Reach:" + str(target_item.obj.obj_get_int(obj_f_critter_reach))
target_item.obj.condition_add_with_args( 'sp-Righteous Might', spell.id, spell.duration, 0 )
target_item.partsys_id = game.particles( 'sp-Righteous Might', target_item.obj )
def OnBeginRound( spell ):
print "Righteous Might OnBeginRound"
def OnEndSpellCast( spell ):
print "Righteous Might OnEndSpellCast"
##print "spell.target_list=", spell.target_list
##print "spell.id=", spell.id
#size mod
activeList = Co8PersistentData.getData(RM_KEY)
if isNone(activeList):
print "ERROR! Active RM spell without activeList!"
return
for entry in activeList:
spellID, target = entry
targetObj = refHandle(target)
#print "activeLIst Entry:" + str(spellID)
if spellID == spell.id:
#print "Size:" + str(targetObj.obj_get_int(obj_f_size))
#print "Reach:" + str(targetObj.obj_get_int(obj_f_critter_reach))
size.resetSizeCategory(targetObj)
#print "resetting reach on", targetObj
#print "new Size:" + str(targetObj.obj_get_int(obj_f_size))
#print "new Reach:" + str(targetObj.obj_get_int(obj_f_critter_reach))
activeList.remove(entry)
#no more active spells
if len(activeList) == 0:
Co8PersistentData.removeData(RM_KEY)
break
Co8PersistentData.setData(RM_KEY, activeList)
break
else: print "ERROR! Active RM spell without entry in activeList!"
|
StarcoderdataPython
|
3571493
|
<filename>assets/code/create_casing_connections_graph.py
import numpy as np
import networkx as nx
import import_from_tenaris_catalogue # our code from part 1
class Counter:
def __init__(self, start=0):
"""
A simple counter class for storing a number that you want to increment
by one.
"""
self.counter = start
def count(self):
"""
Calling this method increments the counter by one and returns what was
the current count.
"""
self.counter += 1
return self.counter - 1
def current(self):
"""
If you just want to know the current count, use this method.
"""
return self.counter
def make_graph(catalogue):
# Now we want to initiate our graph - we'll make the edges directional, i.e.
# they will go from large to small
graph = nx.DiGraph()
return graph
def add_node(
counter, graph, manufacturer, type, type_name, tags, data
):
"""
Function for adding a node with attributes to a graph.
Parameters
----------
counter: Counter object
graph: (Di)Graph object
manufacturer: str
The manufacturer of the item being added as a node.
type: str
The type of item being added as a node.
type_name: str
The (product) name of the item being added as a node.
tags: list of str
The attribute tags (headers) of the attributes of the item.
data: list or array of floats
The data for each of the attributes of the item.
Returns
-------
graph: (Di)Graph object
Returns the (Di)Graph object with the addition of a new node.
"""
# we'll first create a dictionary with all the node attributes
node = {
'manufacturer': manufacturer,
'type': type,
'name': type_name
}
# loop through the tags and data
for t, d in zip(tags, data):
node[t] = d
# get a UID and assign it
uid = counter.count()
node['uid'] = uid
# overwrite the size - in case it didn't import properly from the pdf
size = (
node['pipe_body_inside_diameter']
+ 2 * node['pipe_body_wall_thickness']
)
# use the class method to add the node, unpacking the node dictionary
# and naming the node with its UID
graph.add_node(uid, **node)
return graph
def check_connection_clearance(graph, node1, node2, cut_off=0.7):
"""
Function for checking if one component will pass through another.
Parameters
----------
graph: (Di)Graph object
node1: dict
A dictionary of node attributes.
node2: dict
A dictionary of node attributes.
cut_off: 0 < float < 1
A ration of the nominal component size used as a filter, e.g. if set
to 0.7, if node 1 has a size of 5, only a node with a size greater than
3.5 will be considered.
Returns
-------
graph: (Di)Graph object
Graph with an edge added if node2 will drift node1, with a `clearance`
attribute indicating the clearance between the critical outer diameter
of node2 and the drift of node1.
"""
try:
node2_connection_od = node2['coupling_outside_diameter']
except KeyError:
node2_connection_od = node2['box_outside_diameter']
clearance = min(
node1['pipe_body_drift'],
node1['connection_inside_diameter']
) - max(
node2_connection_od,
node2['pipe_body_inside_diameter']
+ 2 * node2['pipe_body_wall_thickness']
)
if all((
clearance > 0,
node2['size'] / node1['size'] > cut_off
)):
graph.add_edge(node1['uid'], node2['uid'], **{'clearance': clearance})
return graph
def add_connection_edges(graph, cut_off=0.7):
"""
Function to add edges between connection components in a network graph.
Parameters
----------
graph: (Di)Graph object
cut_off: 0 < float < 1
A ration of the nominal component size used as a filter, e.g. if set
to 0.7, if node 1 has a size of 5, only a node with a size greater than
3.5 will be considered.
Returns
-------
graph: (Di)Graph object
Graph with edges added for connections that can drift through other
connections.
"""
for node_outer in graph.nodes:
# check if the node is a casing connection
if graph.nodes[node_outer]['type'] != 'casing_connection':
continue
for node_inner in graph.nodes:
if graph.nodes[node_inner]['type'] != 'casing_connection':
continue
graph = check_connection_clearance(
graph,
graph.nodes[node_outer],
graph.nodes[node_inner],
cut_off
)
return graph
def main():
# this runs the code from part 1 (make sure it's in your working directory)
# and assigns the data to the variable catalogue
catalogue = import_from_tenaris_catalogue.main()
# initiate our counter and graph
counter = Counter()
graph = make_graph(catalogue)
# add the casing connections from our catalogue to the network graph
for product, data in catalogue.items():
for row in data['data']:
graph = add_node(
counter,
graph,
manufacturer='Tenaris',
type='casing_connection',
type_name=product,
tags=data['headers'],
data=row
)
# determine which connections fit through which and add the appropriate
# edges to the graph.
graph = add_connection_edges(graph)
return graph
if __name__ == '__main__':
graph = main()
# as a QAQC step we can use matplotlib to draw the edges between connected
# casing connections
import matplotlib
import matplotlib.pyplot as plt
matplotlib.use("TKAgg") # Ubuntu sometimes needs some help
nx.draw_networkx(graph, pos=nx.circular_layout(graph))
plt.show()
print("Done")
|
StarcoderdataPython
|
3200256
|
import json
import re
from openstackinabox.services.cinder.v1 import base
class CinderV1Volumes(base.CinderV1ServiceBase):
# Note: OpenStackInABox's Keystone Service doesn't have support
# right now for inserting the tenant-id into the URL when
# generating the service catalog. So the service URL here
# can't search for it
ALL_VOLUMES = re.compile(r'^/volumes$')
ALL_VOLUMES_DETAILED = re.compile(r'^/volumes/detail$')
SPECIFIC_VOLUME = re.compile(r'^/volumes/[\w-]+$')
def __init__(self, model, keystone_service):
super(CinderV1Volumes, self).__init__(
keystone_service,
'cinder/v1/volumes'
)
self.model = model
self.__handlers = [
{
'verb': base.CinderV1ServiceBase.POST,
'path': self.ALL_VOLUMES,
'handler': CinderV1Volumes.handle_create_volume
},
{
'verb': base.CinderV1ServiceBase.GET,
'path': self.ALL_VOLUMES,
'handler': CinderV1Volumes.handle_retrieve_volumes
},
{
'verb': base.CinderV1ServiceBase.PUT,
'path': self.SPECIFIC_VOLUME,
'handler': CinderV1Volumes.handle_update_volume
},
{
'verb': base.CinderV1ServiceBase.DELETE,
'path': self.SPECIFIC_VOLUME,
'handler': CinderV1Volumes.handle_delete_volume
},
{
'verb': base.CinderV1ServiceBase.GET,
'path': self.SPECIFIC_VOLUME,
'handler': CinderV1Volumes.get_subroute
# NOTE: There is a conflict between SPECIFIC_VOLUME and
# ALL_VOLUMES when it comes to the GET verb. Therefore
# a special sub-router is required to propery direct
# the request as StackInABox doesn't allow two registrations
# on the same VERB where the path may match.
}
]
for handler in self.__handlers:
self.register(
handler['verb'],
handler['path'],
handler['handler']
)
def get_subroute(self, request, uri, headers):
uri_parts = uri.split('/')
if uri_parts[-1] == 'detail':
return self.handle_retrieve_volumes_detailed(
request, uri, headers
)
else:
return self.handle_retrieve_volume_details(
request, uri, headers
)
def handle_create_volume(self, request, uri, headers):
# https://developer.rackspace.com/docs/cloud-block-storage/v1/
# api-reference/cbs-volumes-operations/#create-a-volume
return (500, headers, 'Not yet implemented')
def handle_retrieve_volumes(self, request, uri, headers):
# https://developer.rackspace.com/docs/cloud-block-storage/v1/
# api-reference/cbs-volumes-operations/#retrieve-volumes
return (500, headers, 'Not yet implemented')
def handle_retrieve_volumes_detailed(self, request, uri, headers):
# https://developer.rackspace.com/docs/cloud-block-storage/v1/
# api-reference/cbs-volumes-operations/#retrieve-volumes-detailed
return (500, headers, 'Not yet implemented')
def handle_update_volume(self, request, uri, headers):
# https://developer.rackspace.com/docs/cloud-block-storage/v1/
# api-reference/cbs-volumes-operations/#update-a-volume
return (500, headers, 'Not yet implemented')
def handle_delete_volume(self, request, uri, headers):
# https://developer.rackspace.com/docs/cloud-block-storage/v1/
# api-reference/cbs-volumes-operations/#delete-a-volume
return (500, headers, 'Not yet implemented')
def handle_retrieve_volume_details(self, request, uri, headers):
# https://developer.rackspace.com/docs/cloud-block-storage/v1/
# api-reference/cbs-volumes-operations/#retrieve-details-for-a-volume
req_headers = request.headers
self.log_request(uri, request)
# Validate the token in the request headers
# - if it's invalid for some reason a tuple is returned
# - if all is good, then a dict with the user information is returned
user_data = self.helper_authenticate(
req_headers, headers, False, False
)
# user_data will be a tuple in the case of 401/403 errors
if isinstance(user_data, tuple):
return user_data
# volume id in the URI, nothing in the body
result = self.SPECIFIC_VOLUME.match(uri)
if result and not uri.split('/')[-1].startswith('_'):
volume_id = result.group(0)
# TODO: Mapping Tenant-ID in URL per OpenStack API norms
# OpenStackInABox Keystone Service doesn't support the insert
# of the tenant-id into the URL so the URL can't be used to
# validate the tenant-id of the request.
#
# tenant_id = result.group(0)
# volume_id = result.group(1)
# if tenant_id != user_data['tenantid']:
# return (400, headers, 'Invalid client request')
# technically the data should be looked up in the CinderModel
# and a result returned accordingly; but right now the goal
# is a very specific test so for MVP just return the result
response_body = {
'volume': {
'attachments': [],
'availability_zone': 'nova',
'bootable': 'false',
'created_at': '',
'display_description': 'clone in error state',
'display_name': 'clone_test',
'id': volume_id,
'image_id': None,
'metadata': {},
'size': 100,
'snapshot_id': None,
'source_volid': volume_id, # self-referential
'status': 'error',
'volume_type': 'SATA',
}
}
return (200, headers, json.dumps(response_body))
else:
return (400, headers, 'Invalid client request')
|
StarcoderdataPython
|
1957652
|
<gh_stars>10-100
"""Main conftest"""
import pytest
from asyncio import get_event_loop_policy
from pydantic_odm.db import MongoDBManager
pytestmark = pytest.mark.asyncio
DATABASE_SETTING = {"default": {"NAME": "test_mongo", "PORT": 37017}}
@pytest.yield_fixture(scope="session")
def event_loop():
loop = get_event_loop_policy().new_event_loop()
yield loop
loop.close()
@pytest.fixture()
async def init_test_db(event_loop):
dbm = await MongoDBManager(DATABASE_SETTING, event_loop).init_connections()
yield dbm
for db in dbm.databases.values():
await db.client.drop_database(db)
|
StarcoderdataPython
|
186528
|
from ij import IJ
from ij.plugin.frame import RoiManager
from ij.gui import Roi
from ij.gui import Overlay
from ij import ImagePlus
from ij.plugin import ChannelSplitter
from ij import measure
from ij.plugin.filter import Analyzer
from ij.measure import ResultsTable
from ij import WindowManager
from java.io import File
import os
import random
import datetime
#this function returns a string of all duplicates within an array. It only returns redundant values; if a list has two values that are the same, this returns only one of them.
def getDuplicates (an_array):
#initialize an array to store values in.
duplicates = []
#iterates across the passed array, checking if an instance appears more than once.
for x in an_array:
if an_array.count(x) > 1:
#if a duplicate is found, add it to the duplicates list and remove it from the original one.
duplicates.append(x)
an_array.remove(x)
return duplicates
#This is a specific function for selecting only objects which are handdrawn unique cells (as opposed to rectangular ROIs and duplicates)
def uniqueCells(an_array):
cells = []
for item in an_array:
#check if the items in the passed array are drawn
if str(item.getClass()== "<type 'ij.gui.PolygonRoi'>":
cells.append(item)
#this call uses getDuplicates() as a cleaner, removing any duplicate values from the array without retaining them as a string. This ensures that if any overlays have accidentally been duplicated, the function will not run twice on them.
getDuplicates(cells)
return cells
def ROI_gen (file_path):
# Global Variables
# This varible sets the width in pixels of the randomly placed ROIs. The height will be half of this value in pixels
roi_width = 20
# This variable sets the number of random ROIs to be generated
num_roi = 20
# Initializes roi_area as zero to be filled in later
roi_area = 0
# Initialises boolean that tracks weather the images have background spots
has_background = False
# Image opening
imp = IJ.openImage(file_path)
imp.show()
# Initial image handling
# Saves the active image to the variable imp
# Saves the title of the active image as title
title = imp.getTitle()
# Saves the overlay of the active image as ove
ove = ImagePlus.getOverlay(imp)
#Stores the overlay into an array file for iterating over
ove_array = ove.toArray()
#creates an array containing only one copy of each cell, excluding any duplicates or the background boxes. Do not create freehand background samples if you want this to work.
unique_cells = uniqueCells(ove_array)
######### Creating instances of ImageJ tools ##########
# Activates/Opens an instance of the ROI manager and clears it
rm = RoiManager.getInstance()
if not rm:
rm = RoiManager()
rm.reset()
# Initializes a Results Table instance to interface with the results table and clears it
rt = ResultsTable.getResultsTable();
if (rt==None):
rt = ResultsTable();
rt.reset()
# Initializes an Analyzer for imp
alz = Analyzer(imp)
# Turns all measurments off
alz.setMeasurement(1043199, False)
# Sets Area measurement
alz.setMeasurement(1, True)
# Sets Mean measurement
alz.setMeasurement(2, True)
# Stes INTEGRATED_DENSITY measurement
alz.setMeasurement(32768, True)
######### Maybe make a method to do this ##########
######### Dealing with Background Spots ##########
# Gets the total number of shapes in the overlay
shapes_num = ove.size()
# Initializes an empty list for storing dimensions of the shapes
width_height = []
# Gets size information from each shape in the overlay
for cell in unique_cells:
# Gets a bounding box for each shape
box = Roi.getBounds(cell )
# Multiplies the width and height values of that box
active_wh = box.width*box.height
# Stores the resulting value in the width_height list
width_height.append(active_wh)
#create a list of all existing duplicates in width_height
dupes_list = getDuplicates(width_height)
# If there are duplicates, the program deals with the uniformly sized background spots
if not len(dupes_list) == 0:
# Initializes a list for storing the background spots
background_spots = []
# This segment of code finds the non-unique w*h area value
# This will hold a set of prevously encountered w*h areas
seen = set()
# This variable will be set equal to the repeated w*h value once it is found
repeat = 0
# This loop checks each area value to see if it has been seen before. If it has it stores the value and terminates, otherwise it adds the value to the 'seen' set
for area in width_height:
if area in seen:
repeat = area
return
seen.add(area)
# This loop adds all of the ROIs with the same repeated area to a list and then removes them from the overlay
for i in range (0, len(width_height),1):
if width_height[i] == repeat:
background_spots.append(ove.get(i))
ove.remove(i)
# Sets the has_backgrouns boolean to true for later reference
has_background = True
######### Generating ROIs for each cell ##########
# Should I make seperate files for each channel for background spots, whole cells, and subROIs? Probably... Interface with R?
# Should I append data together for all images in the same trial?
# Initializes an array in which to store all of the cell outlines
cells = []
# Initializes an array in which to store final, approved ROIs
good_rois = []
# Asks for the number of outlines still present in ove and sets this as the number of cells in the image
num_cells = ove.size()
# Makes a new empty overlay for array -> shape purposes
ove1 = Overlay()
#turn the overlay into an iterable array
oveArray = ove.toArray()
#clean up the overlay array to ensure you only iterate over truly unique values of the cells
unique_cells = uniqueCells(ove_array)
# Master loop for looping through cells in the image
for cell in unique_cells:
# Saves an outline from the overlay as an ROI
active_cell = cell
# Adds the active cell to the cells array
cells.append(active_cell)
# Adds ROI active_cell to ROI manager
rm.addRoi(active_cell)
# Saves the bounding rectangle of the Roi as bound
bound = Roi.getBounds(active_cell)
#!!!!!!!??????!!!!!!! PROBLEM AREA !!!!!!!???????!!!!!!!
rm.setSelectedIndexes([0,1])
rm.runCommand(imp, "Measure")
rm.reset()
# This retreives the area measurement of the cell and stores it as cell_area
cell_area = rt.getValueAsDouble(0, 0)
# Clears the results table
rt.reset()
# This variable keeps track of how many good ROIs have been found
found_roi = 0
# This variable keeps track of how many attempts have gone by without finding a good ROI
tries = 0
# Loop for generating many ROIs
while (found_roi < num_roi and tries < 20):
# This increases the tries count by one
tries = tries + 1
# Uses the cell ROI bounding box dimensions to get a 'good guess' putative roi placement
rand_x = bound.x + random.randint(0,bound.width-(roi_width))
rand_y = bound.y + random.randint(0,bound.height-(roi_width/2))
# Creates new ROI based on the guess
test_roi = Roi(rand_x, rand_y, roi_width, roi_width/2)
# Adds the new ROI to an overlay
ove1.add(test_roi)
# Pulls the ROI back out of the overlay as r3
r3 = ove1.get(0)
# Adds r3 to the ROI manager
rm.addRoi(r3)
# Clears ove1
ove1.clear()
# This section checks if the random ROI falls entirely within the cell outline
# If the roi_area variable had not been filled with a value yet, set it to the area of one rectangular ROI
if roi_area == 0:
rm.setSelectedIndexes([0])
rm.runCommand(imp, "Measure")
roi_area = rt.getValueAsDouble(0, 0)
rt.reset()
# Adds the active cell outline to the ROI manager
rm.addRoi(active_cell)
#!!!!!!!??????!!!!!!! PROBLEM AREA !!!!!!!???????!!!!!!!
rm.setSelectedIndexes([0,1])
# Uses the ROI manager OR command to make a composit of the rectangular ROI and the cell outline
rm.runCommand("OR")
# Adds the composite as a new ROI to the ROI manager
rm.runCommand("Add")
# Selects the composite and measures it to get the area
rm.runCommand(imp, "Measure")
new_area = rt.getValueAsDouble(0, 0)
rt.reset()
rm.reset()
# If the composite has the same area as the cell_area then the rectangular ROI is completely contained within the cell outline and is 'legal', thus we add it to the good_rois list
if new_area == cell_area:
good_rois.append(r3)
# This increases the found_roi count by 1
found_roi = found_roi + 1
# This resets the tries counter after a new good roi is found
tries = 0
print "found_roi"
print good_rois
######### Measuring all ROIs and Saving results ##########
# This splits the color channels in the image into the three componants and saves the result as a list of images
imp_list = ChannelSplitter.split(imp)
# This makes the channel of interest in the list visible
imp_list[1].show()
imp_list[2].show()
# This block of code calculates background values
if has_background == True:
rm.clear()
# Loop that adds all spots to the ROI manager
for spot in background_spots:
rm.addRoi(roi)
# Make 1st channel active
# Measure for the first channel
# Initializes a variable for calculating the mean background in channel 1
mean_ch1 = 0
# Get average of mean intensity for channel 1 background
for i in range (0,rt.size(),1):
mean_ch1 = mean_ch1 + rt.getValue("Mean",i)
mean_ch1 = mean_ch1 / rt.size()
rt.clear()
# Make 2nd channel active
# Measure for second channel
# Initializes a variable for calculating the mean background in channel 2
mean_ch2 = 0
# Get average of mean intensity for channel 2 background
for i in range (0,rt.size(),1):
mean_ch2 = mean_ch2 + rt.getValue("Mean",i)
mean_ch2 = mean_ch2 / rt.size()
rt.clear()
rm.clear()
# Adds all of the cell outlines to the ROI manager
for cell in cells:
rm.addRoi(cell)
# This makes the first channel of interest in the list visible
imp_list[1].show()
# This measures all of the cells in this first channel
if has_background == True:
print "boo"
# Loop that subtracts the appropriate background value
# This makes the second channel of interest in the list visible
imp_list[2].show()
# This measures all of the cells in the second channel
# This region of the code extracts measurement information from the results table, formats it and saves it to an appropriately named file
# This saves the whole results table directly as a text file
rt.saveAs("C:\\Users\\<NAME>\\Desktop\\Orp1 Images\\Results"+ time + "__" + title + "results.txt")
# This can write a formated sting to a text file if we want that instead
#output = open("C:\\Users\\xenon\\Desktop\\BioFinalP\\Results\\"+ time + "__" + title + "output.txt", "w+")
#output.close()
# This splits the color channels in the image into the three componants and saves the result as a list of images
imp_list = ChannelSplitter.split(imp)
# This makes the first image in the list visible
imp_list[1].show()
#!!!!!!!??????!!!!!!! Problem area !!!!!!!???????!!!!!!!
# This attempts to make that image the temporarily active image (for measuring off of)
WindowManager.setTempCurrentImage(imp_list[1])
rm.runCommand(imp_list[1], "Measure")
#Close image, clear things and return
return
# Folder
# Make list of files
# Initialize files to append to
# Loop to call ROI.gen
# Make the good ROIs list ordered 1)background 2)cells 3)rectangular ROIs
# Make all background spots exactly the same size
# Timestamp
# This segment of code retreives the current time and makes it into a string that can be added as a time stamp to the files generated in the program
now = str(datetime.datetime.now())
nnow = now.split(" ")
time = nnow[1]
time = time.replace(":",".")
#____________________INPUT FILEPATH of tiff library as you would navigate to it. Make sure you double slash all the slashes in the filepath______________________
folder ="C:\\Users\\<NAME>\\Desktop\\Orp1 Images\\14B11SFT"
tifs = []
for file in os.listdir(folder):
filename = file
if filename.endswith(".tif"):
fullpath = folder + "\\" + filename
tifs.append(fullpath)
else:
continue
for x in tifs:
ROI_gen(x)
|
StarcoderdataPython
|
1676878
|
from __future__ import print_function, division
from pyscf.nao.m_libnao import libnao
from ctypes import POINTER, c_int64, byref
libnao.init_dens_libnao.argtypes = ( POINTER(c_int64), ) # info
def init_dens_libnao():
""" Initilize the auxiliary for computing the density on libnao site """
info = c_int64(-999)
libnao.init_dens_libnao( byref(info))
return info.value
|
StarcoderdataPython
|
6606181
|
<reponame>jfnavarro/st_ts<filename>scripts/tag_clusters_to_matrix.py<gh_stars>1-10
#! /usr/bin/env python
"""
Scripts that computes tag clusters counts (TTs)
for ST data. It takes the file generated with compute_st_tts.py
and compute a matrix of counts by doing intersection of the
TTs with the original ST BED file generate with the ST Pipeline.
The output will look like
TT1.....TTN
XxY
XxY
...
It needs the original BED file with ST data to extract the reads count
If no output file is given the output will be : output_table_ctts.txt
@Author <NAME> <<EMAIL>>
"""
import argparse
import sys
from collections import defaultdict
import os
def main(input_files, outfile):
if len(input_files) != 2:
sys.stderr.write("Error, input file not present or invalid format\n")
sys.exit(1)
st_bed_file = input_files[1]
tag_clusters_file = input_files[0]
if not os.path.isfile(st_bed_file) or not os.path.isfile(tag_clusters_file):
sys.stderr.write("Error, input file not present or invalid format\n")
sys.exit(1)
if outfile is None:
outfile = "output_table_ctts.txt"
# load all the original barcode - gene coordinates
map_original_clusters = defaultdict(list)
with open(st_bed_file, "r") as filehandler:
for line in filehandler.readlines():
if line.find("#") != -1:
continue
tokens = line.split()
assert(len(tokens) == 9)
chromosome = tokens[0]
start_site = int(tokens[1])
end_site = int(tokens[2])
strand = tokens[5]
# gene = tokens[6]
x = float(tokens[7])
y = float(tokens[8])
map_original_clusters[(chromosome,strand)].append((x,y,start_site,end_site))
# loads all the clusters
map_clusters = defaultdict(int)
clusters = set()
barcodes = set()
with open(tag_clusters_file, "r") as filehandler:
for line in filehandler.readlines():
if line.find("#") != -1:
continue
tokens = line.split()
assert(len(tokens) == 8)
chromosome = tokens[0]
start = int(tokens[2])
strand = tokens[1]
end = int(tokens[3])
# doing a full search of intersections over all barcodes (similar to bed intersect)
# If we could rely on that no barcodes were missing doing the clustering we could use
# a faster approach not needing to iterate all the barcodes but only one
# this intersection method is prob overcounting
for x, y, start_orig, end_orig in map_original_clusters[chromosome, strand]:
if strand == "-": start_orig = (end_orig - 1)
if (start_orig >= start and start_orig < end):
map_clusters[(x,y,chromosome,strand,start,end)] += 1
barcodes.add((x,y))
clusters.add((chromosome,strand,start,end))
# write cluster count for each barcode
with open(outfile, "w") as filehandler:
clusters_string = "\t".join("%s:%s-%s,%s" % cluster for cluster in clusters)
filehandler.write(clusters_string + "\n")
for x,y in barcodes:
filehandler.write("{0}x{1}".format(x,y))
for chro,strand,star,end in clusters:
count = map_clusters[(x,y,chro,strand,star,end)]
filehandler.write("\t{}".format(count))
filehandler.write("\n")
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('input_files', nargs=2,
help="The tab delimited file containing the tag clusters and the ST original BED file")
parser.add_argument("--outfile", default=None, help="Name of the output file")
args = parser.parse_args()
main(args.input_files, args.outfile)
|
StarcoderdataPython
|
11352613
|
import shelve
import pickle
import os
import sharedFunctions
import info
class PersistentData:
"""
"""
def __init__(self, inputData=None):
"""
"""
self.inputData = inputData
self.dbPath = sharedFunctions.getDbRepoDir()
self.activeShelve = None
self.data = {}
def determineActiveShelve(self):
"""
"""
crackType = self.inputData.getCrackType()
self.activeShelve = self.shelves[crackType]
def createDbMetadata(self):
"""
"""
crackType = self.inputData.getCrackType()
dataStr = self.inputData.getDataStr()
historyOutputs = dataStr["input"]["interactionProperties"][
"crack"]["historyOutputs"].keys()
historyOutputs.sort()
loadType = dataStr["input"]["createPredefinedLoads"]
if loadType:
loads = "infiniteBody"
else:
loads = "custom"
self.metadata = {
"crackType": crackType,
"dbVersion": info.dbVersion,
"historyOutputs": historyOutputs,
"loads": loads,
"description": ""}
def verifyMetadataCompatibility(self):
"""
"""
db = shelve.open(self.activeShelve)
dbMeta = db["metadata"]
db.close()
dbMeta["historyOutputs"].sort()
if self.metadata["crackType"] != dbMeta["crackType"]:
return False
elif self.metadata["loads"] != dbMeta["loads"]:
return False
elif self.metadata["dbVersion"] != dbMeta["dbVersion"]:
return False
elif self.metadata["historyOutputs"] != dbMeta["historyOutputs"]:
return False
else:
return True
def verifyActiveShelve(self):
"""
"""
if (os.path.exists(self.activeShelve + ".dat") or
os.path.exists(self.activeShelve)):
if self.verifyMetadataCompatibility():
return True
else:
raise Exception(
"Output database is incompatible with the analysis. \
Specify a different output database.")
else:
sharedFunctions.createResultsDirStr(self.dbName)
db = shelve.open(self.activeShelve)
db["metadata"] = self.metadata
db.close()
return True
def specifyActiveShelve(self, dbName):
"""
"""
self.dbName = dbName
self.activeShelve = sharedFunctions.getDbPath(dbName)
def readAll(self):
"""
"""
data = {}
db = shelve.open(self.activeShelve)
for key in db.keys():
data[key] = db[key]
db.close()
return data
def writeToDb(self):
"""
"""
self.prepareDataForShelving()
key = self.inputData.getModelName()
db = shelve.open(self.activeShelve)
db[key] = self.picklingData
db.close()
def prepareDataForShelving(self):
"""
"""
self.picklingData = self.inputData.getDataForPickling()
def pickleConfFile(self):
"""
"""
self.prepareDataForShelving()
fileName = self.inputData.getOdbName() + ".pickle"
confFile = open(fileName, 'wb')
pickle.dump(self.picklingData, confFile)
confFile.close()
def checkForDuplicate(self):
"""
"""
db = shelve.open(self.activeShelve)
for key in db.keys():
if db[key] == self.inputData:
db.close()
return True
db.close()
return False
def getDuplicates(self):
"""
"""
duplicateKeys = ()
db = shelve.open(self.activeShelve)
for key in db.keys():
if db[key] == self.inputData:
duplicateKeys += key,
db.close()
return duplicateKeys
def readKey(self, key):
"""
"""
db = shelve.open(self.activeShelve)
data = db[key]
db.close()
return data
|
StarcoderdataPython
|
9686818
|
### Item Serializer ###
from django.forms import widgets
from django.core.urlresolvers import reverse
from rest_framework import serializers
from item.models import Item, ItemBaseParam
class ItemParamSerializer(serializers.ModelSerializer):
""" Used to serialize ItemParamDirectory objects. """
name = serializers.CharField()
value = serializers.CharField()
class Meta:
model = ItemBaseParam
fields = ('name', 'value')
class ItemSerializer(serializers.ModelSerializer):
""" Used to serialize Items for API. """
name = serializers.SerializerMethodField(read_only=True)
amount = serializers.SerializerMethodField(read_only=True)
def get_name(self, object):
""" Return some readable name for Item. """
return "{0}".format(object.get_type().__str__())
def get_amount(self, object):
""" Return amount in Item pile. """
return object.amount
class Meta:
model = Item
fields = ('id', 'name', 'itype', 'amount', 'location')
class ItemDetailSerializer(ItemSerializer):
""" Used to serialize Items for API. """
params = serializers.SerializerMethodField(read_only=True)
def get_params(self, object):
""" Get ItemDirectory params. """
base_params_qs = object.itype.get_param().all()
serializer = ItemParamSerializer(base_params_qs, many=True)
return serializer.data
class Meta:
model = Item
fields = ('id', 'name', 'itype', 'amount', 'date_init', 'location', 'params')
|
StarcoderdataPython
|
9600784
|
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# landbouwschade_QgistoArcMap.py
# Created on: 2018-08-27 10:11:14.00000
# (generated by ArcGIS/ModelBuilder)
# Usage: landbouwschade_QgistoArcMap <LBS_tussenstap> <landbouwschade2018>
# Description:
# ---------------------------------------------------------------------------
# Import arcpy module
import arcpy
# Script arguments
LBS_tussenstap = arcpy.GetParameterAsText(0)
if LBS_tussenstap == '#' or not LBS_tussenstap:
LBS_tussenstap = "R:\\GIS-CO\\QGIS-PROJECTEN\\Milieu\\geodata\\landbouwschade.gdb\\landbouwschade2018_tussenstap" # provide a default value if unspecified
landbouwschade2018 = arcpy.GetParameterAsText(1)
if landbouwschade2018 == '#' or not landbouwschade2018:
landbouwschade2018 = "landbouwschade2018" # provide a default value if unspecified
# Local variables:
LBS_percelen = landbouwschade2018
LBS_percelenWGS = "R:\\GIS-CO\\QGIS-PROJECTEN\\Milieu\\geodata\\landbouwschade.gdb\\landbouwschade2018_Project"
# Process: Select Layer By Attribute
arcpy.SelectLayerByAttribute_management(landbouwschade2018, "NEW_SELECTION", "\"controle\" = 'ja' OR \"controle\" = 'nee'OR \"controle\" = 'gecontroleerd'")
# Process: Project
arcpy.Project_management(LBS_percelen, LBS_percelenWGS, "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]", "Belge_1972_To_WGS_1984_3", "PROJCS['Belge_1972_Belgian_Lambert_72',GEOGCS['GCS_Belge 1972',DATUM['D_Belge_1972',SPHEROID['International_1924',6378388.0,297.0]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['false_easting',150000.013],PARAMETER['false_northing',5400088.438],PARAMETER['central_meridian',4.367486666666666],PARAMETER['standard_parallel_1',49.8333339],PARAMETER['standard_parallel_2',51.16666723333333],PARAMETER['latitude_of_origin',90.0],UNIT['Meter',1.0]]", "NO_PRESERVE_SHAPE", "", "NO_VERTICAL")
# Process: Copy Features
arcpy.CopyFeatures_management(LBS_percelenWGS, LBS_tussenstap, "DEFAULTS", "0", "0", "0")
|
StarcoderdataPython
|
126034
|
<filename>accounts/urls.py
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from django.views.generic.edit import CreateView
from .forms import CustomUserCreationForm
from .views import (
Registration,
Profile
)
urlpatterns = [
url(
r'^login/$',
auth_views.login,
{
'template_name': 'auth/login.html'
},
name='login'
),
url(
r'^register/$',
Registration.as_view(),
name='registration'
),
url(
r'^logout/$',
auth_views.logout,
name='logout'
),
url(
r'^password_change/$',
auth_views.password_change,
{
'template_name': 'auth/password_change.html'
},
name='password_change'
),
url(
r'^password_change/done/$',
auth_views.password_change_done,
{
'template_name': 'auth/password_change_done.html'
},
name='password_change_done'
),
url(
r'^password_reset/$',
auth_views.password_reset,
{
'template_name': 'auth/password_reset.html'
},
name='password_reset'
),
url(
r'^password_reset/done/$',
auth_views.password_reset_done,
{
'template_name': 'auth/password_reset_done.html'
},
name='password_reset_done'
),
url(
r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
auth_views.password_reset_confirm,
{
'template_name': 'auth/password_reset_confirm.html'
},
name='password_reset_confirm'
),
url(
r'^reset/done/$',
auth_views.password_reset_complete,
{
'template_name': 'auth/password_reset_complete.html'
},
name='password_reset_complete'
),
url(
r'^profile/(?P<pk>\d+)/$',
Profile.as_view(),
name='profile'
)
]
|
StarcoderdataPython
|
5150339
|
# __init__.py
from .withings import Withings
|
StarcoderdataPython
|
11361936
|
<gh_stars>0
import os
HOME = os.getenv('HOME')
MUSIC_FOLDER = 'Music/'
UPLOADS_FOLDER = os.path.join(HOME, MUSIC_FOLDER)
MAX_UPLOAD_SIZE = 7e+6
|
StarcoderdataPython
|
1811374
|
<reponame>monosloth/console<filename>app/provider/command.py
import yaml
from monosloth.provider import AbstractProvider
from app.singleton import Parser
class CommandProvider(AbstractProvider):
def register(self):
"""Register all commands with an argument parser.
"""
parser = Parser()
with open('./config/commands.yaml') as stream:
commands = yaml.load(stream)
parser.set_commands(commands)
for name, data in commands.items():
args = self.__get_args(data)
kwargs = self.__get_kwargs(data)
parser.attach(*args, **kwargs)
def __get_args(self, data):
"""Build command arguments.
:param data: The data to extract args from.
:return: A tuple of args.
"""
args = ()
if 'name' in data:
args += (data['name'],)
if 'alias' in data:
args += ('--{}'.format(data['alias']),)
return args
def __get_kwargs(self, data):
"""Build command keyword arguments.
:param data: The data to extract kwargs from.
:return: A dict of kwargs.
"""
kwargs = {}
if 'action' in data:
kwargs['action'] = data['action']
if 'type' in data:
kwargs['type'] = data['type']
return kwargs
|
StarcoderdataPython
|
1632559
|
#!/usr/bin/python3
# imports {{{
import logging
import threading
import time
import datetime
import sys
import tty, termios
import random
import argparse
import atexit
# }}}
################## Global stuff ################## {{{
x0 = 0
y0 = 0
sec = 0
sec_inc = 1
lock=threading.Lock()
# Set up unbuffered read from stdin
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
flog = open('history.txt','a')
# Set up logger output
logger = logging.getLogger()
fdbg=logging.FileHandler('debug.log')
fdbg.setLevel(logging.DEBUG)
fdbg.setFormatter(logging.Formatter("%(asctime)s: %(message)s",'%H:%M:%S'))
logger.addHandler(fdbg)
#}}}
################## Functions ################## {{{
def myclrline(y,x): #{{{
with lock:
sys.stdout.write ("\x1b[s\x1b[?25l")
sys.stdout.flush ()
sys.stdout.write ("\x1b["+str(y+y0)+";"+str(x+x0)+"H\x1b[K\x1b[u\x1b[?25h")
sys.stdout.flush ()
#}}}
def myaddstr(y,x,buf): #{{{
with lock:
sys.stdout.write ("\x1b[s\x1b[?25l")
sys.stdout.flush ()
sys.stdout.write ("\x1b["+str(y+y0)+";"+str(x+x0)+"H"+buf+"\x1b[u\x1b[?25h")
sys.stdout.flush ()
#}}}
def myaddstr_m(yxbuf): #{{{
with lock:
for i in yxbuf:
sys.stdout.write ("\x1b[s\x1b[?25l")
sys.stdout.flush ()
sys.stdout.write ("\x1b["+str(i[0]+y0)+";"+str(i[1]+x0)+"H"+i[2]+"\x1b[u\x1b[?25h")
sys.stdout.flush ()
#}}}
def timer_function(name): #{{{
global sec
global lock
logging.debug ("Thread %s: starting", name)
while sec<quiz_timeout:
time.sleep(1)
logging.debug (sec)
sec = sec + sec_inc
towrite = [(1-y0, 1-x0, "\x1b[2m"+str(sec)+"\x1b[m")];
if sec % 5 == 1:
towrite.append ((10,10,str(int((c_right+c_wrong)*60./sec))+" "));
myaddstr_m (towrite)
myaddstr (1-y0, 1-x0, "\x1b[2m"+str(sec)+"\x1b[m TIMEOUT!")
logging.debug ("Thread %s: finishing", name)
#}}}
def cleanup(): #{{{
sys.stdout.write("\x1bc\x1b[?25h\x1b[f\x1b[J") # clear screen
sys.stdout.flush ()
termios.tcsetattr (fd, termios.TCSADRAIN, old_settings)
logging.debug ("Main : all done")
flog.close ()
#}}}
def _get_termsize(): #{{{
import struct
import fcntl
cr = struct.unpack('hh',fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
return cr # (rows,columns)
#}}}
#}}}
################## Main program ##################
parser = argparse.ArgumentParser(description="Fun math quiz for kids!")
parser.add_argument('-T','--timeout', type=int, default=10, help='timeout in seconds (default=10)')
parser.add_argument('-t','--type', type=str, default=1, help='quiz type (1:add,2:sub,3:add+sub,default=1)')
parser.add_argument('-r1', '--x1range', type=str, default='0,10', help='x1 range')
parser.add_argument('-r2', '--x2range', type=str, default='0,10', help='x2 range')
parser.add_argument('--log', choices=['INFO','info','DEBUG','debug'], default='INFO', help='log level (default=INFO)')
try:
options = parser.parse_args(sys.argv[1:])
except:
print("Error parsing arguments!");
sys.exit()
numeric_level = getattr(logging, options.log.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logger.setLevel(numeric_level)
quiz_timeout = options.timeout
lower1,upper1 = options.x1range.split(',')
lower2,upper2 = options.x2range.split(',')
lower1 = int(lower1)
upper1 = int(upper1)
lower2 = int(lower2)
upper2 = int(upper2)
list_q_type = [int(i) for i in options.type.split(',')]
num_q_type = len(list_q_type)
print(num_q_type,list_q_type)
# 1: add
# 2: sub
# 3: add/sub
# 4: add/sub with r1 for result range, (r1 -+ r2) +- r2 = ?
# 5: r1 = r2 +- ?
# Proper TTY reset at exit
atexit.register (cleanup)
# TTY fullscreen and unbuffered input
tty.setraw (sys.stdin.fileno())
sys.stdout.write ("\x1b[f\x1b[J") # clear screen
sys.stdout.flush ()
(nrow,ncol) = _get_termsize ()
#flog.write(str(nrow/2-5)+" "+str(ncol))
x0 = max(int(ncol/2-8),0)
y0 = max(int(nrow/2-5),0)
# main quiz codes
flog.write("\n======== "+str(datetime.datetime.now())+" ========\n\n")
s = ""
sec = 0
c_right = 0
c_wrong = 0
signchar = ('-','+')
myaddstr_m ((( 1-y0,1-x0,"\x1b[2m0\x1b[m"),\
( 8,1,"Correct: 0"),\
( 9,1," Wrong: 0"),\
(10,1," APM: 0")))
timer_thread = threading.Thread(target=timer_function, args=(1,), daemon=True)
timer_thread.start()
p_m = 0
# Main loop over questions {{{
while sec < quiz_timeout:
inplen = 0
inpstr = [' ' for i in range(10)]
# question generation {{{
if num_q_type > 1:
q_type = list_q_type[random.randint(0,num_q_type-1)]
else:
q_type = list_q_type[0]
if q_type == 5 or q_type == 6:
x1 = random.randint(lower1,upper1)
x2 = random.randint(lower2,upper2)
if q_type == 6:
p_m = random.randint(0,1)
else:
p_m = 1
if p_m == 0:
result = x2
x2 = x1 + x2
else:
result = x1
x1 = x1 + x2
qstr0 = str(x1)+" = "+str(x2)+" "+signchar[p_m]+" "
qstr = "\x1b["+str(3+y0)+";"+str(3+x0)+"H"+ qstr0 + "\x1b[K\x1b[?25h"
elif q_type == 4:
result = random.randint(lower1,upper1)
p_m = random.randint(0,1)
if p_m == 0:
x2 = random.randint(lower2,upper2)
x1 = result + x2
else:
x2 = random.randint(lower2,result)
x1 = result - x2
qstr0 = str(x1) +" "+ signchar[p_m] +" "+ str(x2) +" = "
qstr = "\x1b["+str(3+y0)+";"+str(3+x0)+"H"+ qstr0 + "\x1b[K\x1b[?25h"
else:
x1 = random.randint(lower1,upper1)
x2 = random.randint(lower2,upper2)
if q_type == 1:
p_m = 1
elif q_type == 2:
p_m = 0
elif q_type == 3:
p_m = random.randint(0,1)
else:
p_m = 1 - p_m
if p_m == 0:
if x1 < x2:
tv = x1
x1 = x2
x2 = tv
result = x1 - x2
else:
result = x1 + x2
qstr0 = str(x1) +" "+ signchar[p_m] +" "+ str(x2) +" = "
qstr = "\x1b["+str(3+y0)+";"+str(3+x0)+"H"+ qstr0 + "\x1b[K\x1b[?25h"
# }}}
t0 = datetime.datetime.now ()
with lock:
sys.stdout.write (qstr) # clear line, show cursor
sys.stdout.flush ()
# Input processing loop {{{
while True:
# Read 1 character
newchar = sys.stdin.read(1)
if newchar == 'Q': # immediately quit
sys.exit ()
elif newchar == ' ': # toggle pause
if sec_inc == 0:
myclrline (1,5)
sec_inc = 1
else:
myaddstr (1,5,"PAUSED")
sec_inc = 0
elif inplen<8 and newchar>='0' and newchar<='9':
inpstr[inplen] = newchar
inplen = inplen + 1
with lock:
sys.stdout.write (newchar)
sys.stdout.flush ()
elif inplen>0:
#logging.debug("Main : unknown character"+str(ord(newchar)))
if ord(newchar) == 13: # ENTER
break
elif ord(newchar) == 127: # BACKSPACE
inplen = inplen - 1
with lock:
sys.stdout.write ("\x1b[D\x1b[K")
sys.stdout.flush ()
# END input processing loop}}}
logging.debug (inpstr)
ansstr = s.join(inpstr[0:inplen])
ans = int(ansstr)
if ans == result:
myaddstr(5, 3, "\x1b[32mCORRECT!\x1b[m");
c_right = c_right + 1
markchar = ' '
else:
myaddstr(5, 3, "\x1b[91mWRONG! \x1b[m");
c_wrong = c_wrong + 1
markchar = '@'
td = datetime.datetime.now() - t0
flog.write( "%1s %3d %s\n" % (markchar,int(td.total_seconds()),qstr0+ansstr) )
newchar = sys.stdin.read(1)
myclrline (5,3);
myaddstr_m ((( 8,10,str(c_right)),
( 9,10,str(c_wrong))));
# END question loop }}}
newchar = sys.stdin.read(1)
|
StarcoderdataPython
|
3244152
|
from .create_admin_user import (
create_admin_user
)
|
StarcoderdataPython
|
5046876
|
import math
import numpy as np
class Partitioner:
"""2-dimensional domain decomposition of a 3-dimensional computational
grid among MPI ranks on a communicator."""
def __init__(self, comm, domain, num_halo, periodic = (True, True)):
assert len(domain) == 3, \
"Must specify a three-dimensional domain"
assert domain[0] > 0 and domain[1] > 0 and domain[2] > 0, \
"Invalid domain specification (negative size)"
assert num_halo >= 0, "Number of halo points must be zero or positive"
self.__comm = comm
self.__num_halo = num_halo
self.__periodic = periodic
self.__rank = comm.Get_rank()
self.__num_ranks = comm.Get_size()
self.__global_shape = [domain[0], domain[1] + 2 * num_halo, domain[2] + 2 * num_halo]
size = self.__setup_grid()
assert domain[1] >= size[0] and domain[2] >= size[1], "Domain is too small for number of ranks"
self.__setup_domain(domain, num_halo)
def comm(self):
"""Returns the MPI communicator use to setup this partitioner"""
return self.__comm
def num_halo(self):
"""Returns the number of halo points"""
return self.__num_halo
def periodic(self, dim=None):
"""Returns the periodicity of individual or all dimensions"""
if dim is not None:
return self.__periodic[dim]
else:
return self.__periodic
def rank(self):
"""Returns the rank of the current MPI worker"""
return self.__rank
def num_ranks(self):
"""Returns the number of ranks that have been distributed by this partitioner"""
return self.__num_ranks
def shape(self):
"""Returns the shape of a local field (including halo points)"""
return self.__shape
def global_shape(self):
"""Returns the shape of a local field (including halo points)"""
return self.__global_shape
def size(self):
"""Dimensions of the two-dimensional worker grid"""
return self.__size
def position(self):
"""Position of current rank on two-dimensional worker grid"""
return self.__rank_to_position(self.__rank)
def left(self):
"""Returns the rank of the left neighbor"""
return self.__get_neighbor_rank( [0, -1] )
def right(self):
"""Returns the rank of the left neighbor"""
return self.__get_neighbor_rank( [0, +1] )
def top(self):
"""Returns the rank of the left neighbor"""
return self.__get_neighbor_rank( [+1, 0] )
def bottom(self):
"""Returns the rank of the left neighbor"""
return self.__get_neighbor_rank( [-1, 0] )
def scatter(self, field, root=0):
"""Scatter a global field from a root rank to the workers"""
if self.__rank == root:
assert np.any(field.shape[0] == np.array(self.__global_shape[0])), \
"Field does not have correct shape"
assert 0 <= root < self.__num_ranks, "Root processor must be a valid rank"
if self.__num_ranks == 1:
return field
sendbuf = None
if self.__rank == root:
sendbuf = np.empty( [self.__num_ranks,] + self.__max_shape, dtype=field.dtype )
for rank in range(self.__num_ranks):
j_start, i_start, j_end, i_end = self.__domains[rank]
sendbuf[rank, :, :j_end-j_start, :i_end-i_start] = field[:, j_start:j_end, i_start:i_end]
recvbuf = np.empty(self.__max_shape, dtype=field.dtype)
self.__comm.Scatter(sendbuf, recvbuf, root=root)
j_start, i_start, j_end, i_end = self.__domain
return recvbuf[:, :j_end-j_start, :i_end-i_start].copy()
def gather(self, field, root=0):
"""Gather a distributed fields from workers to a single global field on a root rank"""
assert np.any(field.shape == np.array(self.__shape)), "Field does not have correct shape"
assert -1 <= root < self.__num_ranks, "Root processor must be -1 (all) or a valid rank"
if self.__num_ranks == 1:
return field
j_start, i_start, j_end, i_end = self.__domain
sendbuf = np.empty( self.__max_shape, dtype=field.dtype )
sendbuf[:, :j_end-j_start, :i_end-i_start] = field
recvbuf = None
if self.__rank == root or root == -1:
recvbuf = np.empty( [self.__num_ranks,] + self.__max_shape, dtype=field.dtype )
if root > -1:
self.__comm.Gather(sendbuf, recvbuf, root=root)
else:
self.__comm.Allgather(sendbuf, recvbuf)
global_field = None
if self.__rank == root or root == -1:
global_field = np.empty(self.__global_shape, dtype=field.dtype)
for rank in range(self.__num_ranks):
j_start, i_start, j_end, i_end = self.__domains[rank]
global_field[:, j_start:j_end, i_start:i_end] = recvbuf[rank, :, :j_end-j_start, :i_end-i_start]
return global_field
def compute_domain(self):
"""Return position of subdomain without halo on the global domain"""
return [self.__domain[0] + self.__num_halo, self.__domain[1] + self.__num_halo, \
self.__domain[2] - self.__num_halo, self.__domain[3] - self.__num_halo]
def __setup_grid(self):
"""Distribute ranks onto a Cartesian grid of workers"""
for ranks_x in range(math.floor( math.sqrt(self.__num_ranks) ), 0, -1):
if self.__num_ranks % ranks_x == 0:
break
self.__size = (self.__num_ranks // ranks_x, ranks_x)
return self.__size
def __get_neighbor_rank(self, offset):
"""Get the rank ID of a neighboring rank at a certain offset relative to the current rank"""
position = self.__rank_to_position(self.__rank)
pos_y = self.__cyclic_offset(position[0], offset[0], self.__size[0], self.__periodic[0])
pos_x = self.__cyclic_offset(position[1], offset[1], self.__size[1], self.__periodic[1])
return self.__position_to_rank( [pos_y, pos_x] )
def __cyclic_offset(self, position, offset, size, periodic=True):
"""Add offset with cyclic boundary conditions"""
pos = position + offset
if periodic:
while pos < 0:
pos += size
while pos > size - 1:
pos -= size
return pos if -1 < pos < size else None
def __setup_domain(self, shape, num_halo):
"""Distribute the points of the computational grid onto the Cartesian grid of workers"""
assert len(shape) == 3, "Must pass a 3-dimensional shape"
size_z = shape[0]
size_y = self.__distribute_to_bins(shape[1], self.__size[0])
size_x = self.__distribute_to_bins(shape[2], self.__size[1])
pos_y = self.__cumsum(size_y, initial_value=num_halo)
pos_x = self.__cumsum(size_x, initial_value=num_halo)
domains = []
shapes = []
for rank in range(self.__num_ranks):
pos = self.__rank_to_position(rank)
domains += [[ pos_y[pos[0]] - num_halo, pos_x[pos[1]] - num_halo, \
pos_y[pos[0] + 1] + num_halo, pos_x[pos[1] + 1] + num_halo ]]
shapes += [[ size_z, domains[rank][2] - domains[rank][0], \
domains[rank][3] - domains[rank][1] ]]
self.__domains, self.__shapes = domains, shapes
self.__domain, self.__shape = domains[self.__rank], shapes[self.__rank]
self.__max_shape = self.__find_max_shape( self.__shapes )
def __distribute_to_bins(self, number, bins):
"""Distribute a number of elements to a number of bins"""
n = number // bins
bin_size = [n] * bins
# make bins in the middle slightly larger
extend = number - n * bins
if extend > 0:
start_extend = bins // 2 - extend // 2
bin_size[start_extend:start_extend + extend] = \
[ n + 1 for n in bin_size[start_extend:start_extend + extend] ]
return bin_size
def __cumsum(self, array, initial_value=0):
"""Cumulative sum with an optional initial value (default is zero)"""
cumsum = [initial_value]
for i in array:
cumsum += [ cumsum[-1] + i ]
return cumsum
def __find_max_shape(self, shapes):
max_shape = shapes[0]
for shape in shapes[1:]:
max_shape = list(map(max, zip(max_shape, shape)))
return max_shape
def __rank_to_position(self, rank):
"""Find position of rank on worker grid"""
return ( rank // self.__size[1], rank % self.__size[1] )
def __position_to_rank(self, position):
"""Find rank given a position on the worker grid"""
if position[0] is None or position[1] is None:
return None
else:
return position[0] * self.__size[1] + position[1]
|
StarcoderdataPython
|
5159367
|
<reponame>joscelino/Python_Colletions
from collections import namedtuple
naipes = 'P C O E'.split()
valores = list(range(2, 11)) + 'A J Q K'.split()
carta = namedtuple('Carta', 'naipe valor')
baralho = [carta(naipe, valor) for naipe in naipes for valor in valores]
|
StarcoderdataPython
|
3581681
|
<gh_stars>0
#!/usr/bin/env python3
# 上の行はこのファイルが Python 3 インタプリタのありかを指定しています.
# この指定に加え,Linux/Mac でこのファイルに実行属性を与える(`chmod a+x simple.py`)と単に `./simple.py ...` のように起動できるようになります.Windows の場合は,`.py` 拡張子を処理するためのデフォルトアプリケーションを登録すれば,同じようなことができるかもしれません.
# このプログラムは指定した UI ファイルを表示します.
import sys
from PyQt5 import QtWidgets, uic
'''
二つのパッケージを読み込みます.
- QtWidgets は PyQt の GUI 部品.
- uic は PyQt 用の UI Compiler.QtDesigner で作成した UI 仕様を Python のコードに自動変換する.生成されるコードに興味があったら,`pyuic5 01-default.ui` のように Python 用の UI コンパイラを使ってみて下さい.
'''
# GUI アプリケーションを起動するときのお約束の一行.
app = QtWidgets.QApplication(sys.argv)
# 一般的なアプリケーション向けのウィンドウを構成する(以下の図を参照)
# http://doc.qt.io/qt-5/qmainwindow.html#details
window = QtWidgets.QMainWindow()
# QtDesignerで作成したGUI部品の仕様をPythonのプログラムに自動変換
uic.loadUi('01-default.ui' if len(sys.argv) <= 1 else sys.argv[1], window)
# 全画面表示
window.show()
# window.showFullScreen()
# お約束の一行.
# イベント処理をする無限ループが動き続け,exit が呼ばれると停止する.
sys.exit(app.exec_())
|
StarcoderdataPython
|
1878039
|
<filename>lib/radiocorona/frontend/migrations/0001_initial.py
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-23 20:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django.db.models.manager
import django.utils.timezone
import mptt.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('users', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author_name', models.CharField(max_length=12)),
('timestamp', models.DateTimeField(default=django.utils.timezone.now)),
('ups', models.IntegerField(default=0)),
('downs', models.IntegerField(default=0)),
('score', models.IntegerField(default=0)),
('raw_comment', models.TextField(blank=True)),
('html_comment', models.TextField(blank=True)),
('lft', models.PositiveIntegerField(db_index=True, editable=False)),
('rght', models.PositiveIntegerField(db_index=True, editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('level', models.PositiveIntegerField(db_index=True, editable=False)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
to='users.RedditUser')),
('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='frontend.Comment')),
],
options={
'abstract': False,
},
managers=[
('objects', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='Submission',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author_name', models.CharField(max_length=12)),
('title', models.CharField(max_length=250)),
('url', models.URLField(blank=True, null=True)),
('text', models.TextField(blank=True, max_length=5000)),
('text_html', models.TextField(blank=True)),
('ups', models.IntegerField(default=0)),
('downs', models.IntegerField(default=0)),
('score', models.IntegerField(default=0)),
('timestamp', models.DateTimeField(default=django.utils.timezone.now)),
('comment_count', models.IntegerField(default=0)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
to='users.RedditUser')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='vote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('vote_object_id', models.PositiveIntegerField()),
('value', models.IntegerField(default=0)),
('submission', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
to='frontend.Submission')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
to='users.RedditUser')),
('vote_object_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
),
migrations.AddField(
model_name='comment',
name='submission',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='frontend.Submission'),
),
]
|
StarcoderdataPython
|
1757629
|
import setuptools
setuptools.setup(
name='interactive_widgets',
version='0.0.1',
packages=setuptools.find_packages(),
entry_points={
'console_scripts': [
'interactive-widgets-backend = interactive_widgets.backend.main:main',
'interactive-widgets-monitor = interactive_widgets.monitor.main:main',
],
},
install_requires=[
'aiodocker>=0.19.1',
'aiohttp>=3.6.2',
'click>=7.1.2',
'inotify>=0.2.10',
],
)
|
StarcoderdataPython
|
5030331
|
<gh_stars>0
#from dynamodb import dynamo_functions
#def test_scan_sheets():
# assert 1==1
|
StarcoderdataPython
|
3378793
|
<gh_stars>0
from nets.yolo3 import yolo_body
from keras.layers import Input
from yolo import YOLO
from PIL import Image
import numpy as np
from datetime import datetime
if __name__ == '__main__':
yolo = YOLO()
# x = 10
# photo = []
# with open('2007_test.txt') as f:
# file = f.readlines()
# # print(file[0])
# for line in file:
# photo.append(line.split()[0])
# np.random.seed(int(datetime.timestamp(datetime.now())))
# np.random.shuffle(photo)
# np.random.seed(None)
# for i in range(x):
if True:
# img = input('Input image filename:')
img ='E:/CMPE_master_project/photo/v1780.jpg'
# print(photo[i])
try:
image = Image.open(img)
except:
print('Open Error! Try again!')
# continue
else:
# [[type,[top,left,bottom,right],score]
boxes = yolo.detect_image_boxes(image)
print(boxes)
r_image = yolo.detect_image(image)
r_image.show()
yolo.close_session()
|
StarcoderdataPython
|
6444409
|
# Copyright 2011 <NAME> (<EMAIL>)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tmapi.constants import XSD_ANY_URI, XSD_STRING
from tmapi.exceptions import IllegalArgumentException
from tmapi.indices.index import Index
from tmapi.models import Locator, Name, Occurrence
from tmapi.models.variant import Variant
class LiteralIndex (Index):
def get_names (self, value):
"""Retrieves the topic names in the topic map that have a
value equal to `value`.
The return value may be empty but must never be None.
:param value: the value of the `Name`s to be returned
:type value: string
:rtype: `QuerySet` of `Name`s
"""
if value is None:
raise IllegalArgumentException('value must not be None')
return Name.objects.filter(topic__topic_map=self.topic_map).filter(
value=value)
def get_occurrences (self, value, datatype=None):
"""Returns the `Occurrence`s in the topic map whose value
property matches `value` (or if `value` is a `Locator`, the
IRI represented by `value`).
If `value` is a string and `datatype` is None, the
`Occurrence`s' datatype property must be xsd:string.
If `value` is a `Locator`, the `Occurrence`s' datatype
property must be xsd:anyURI.
If `datatype` is not None, the `Occurrence`s returned must be
of that datatype.
The return value may be empty but must never be None.
:param value: the value of the `Occurrence`s to be returned
:type value: string or `Locator`
:param datatype: optional datatype of the `Occurrence`s to be returned
:type datatype: `Locator`
:rtype: `QuerySet` of `Occurrence`s
"""
if value is None:
raise IllegalArgumentException('value must not be None')
if isinstance(value, Locator):
value = value.get_reference()
datatype = XSD_ANY_URI
elif datatype is None:
datatype = XSD_STRING
else:
datatype = datatype.get_reference()
return Occurrence.objects.filter(topic__topic_map=self.topic_map).filter(value=value).filter(datatype=datatype)
def get_variants (self, value, datatype=None):
"""Returns the `Variant`s in teh topic map whose value
property matches `value` (or if `value` is a `Locator`, the
IRI represented by `value`).
If `value` is a string and `datatype` is None, the
`Variant`s' datatype property must be xsd:string.
If `value` is a `Locator`, the `Variant`s' datatype
property must be xsd:anyURI.
If `datatype` is not None, the `Variant`s returned must be
of that datatype.
The return value may be empty but must never be None.
:param value: the value of the `Variant`s to be returned
:type value: string or `Locator`
:param datatype: optional datatype of the `Variant`s to be returned
:type datatype: `Locator`
:rtype: `QuerySet` of `Variant`s
"""
if value is None:
raise IllegalArgumentException('value must not be None')
if isinstance(value, Locator):
value = value.get_reference()
datatype = XSD_ANY_URI
elif datatype is None:
datatype = XSD_STRING
else:
datatype = datatype.get_reference()
return Variant.objects.filter(name__topic__topic_map=self.topic_map).filter(value=value).filter(datatype=datatype)
|
StarcoderdataPython
|
8194434
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import shutil
import subprocess
def build_docs(input_dir: str, output_dir: str):
subprocess.call('python3 -m pip install --upgrade pip', shell=True, cwd=input_dir)
subprocess.call('python3 -m pip install mkdocs', shell=True, cwd=input_dir)
subprocess.call('python3 -m mkdocs build', shell=True, cwd=input_dir)
site_dir = os.path.join(input_dir, 'site')
shutil.copytree(site_dir, output_dir)
def main() -> None:
if os.path.exists('site'):
if os.path.isfile('site') or os.path.islink('site'):
os.unlink('site')
else:
shutil.rmtree('site')
os.mkdir('site')
# get the path of the current directory
docs_path = os.path.join(os.getcwd(), "docs/en")
print(docs_path)
build_docs(docs_path, output_dir='site/en')
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
5034404
|
# standard
import time
import socket
# pyqt
from PyQt5.QtCore import QObject, QRunnable, pyqtSignal
class WorkerSignals(QObject):
"""Worker Signals"""
progress = pyqtSignal(int)
error = pyqtSignal(object)
result = pyqtSignal(object)
done = pyqtSignal()
class Worker(QRunnable):
"""Worker"""
def __init__(self, fn, *args, **kwargs):
super().__init__()
self._fn = fn
self._args = args
self._kwargs = kwargs
# attach signals
self.signals = WorkerSignals()
# pass progress signal to fn
self._kwargs['progress_callback'] = self.signals.progress
def run(self):
try:
result = self._fn(*self._args, **self._kwargs)
except Exception as e:
self.signals.error.emit(e)
else:
self.signals.done.emit()
self.signals.result.emit(result)
class NetworkCheckerSignals(QObject):
"""NetworkChecker Signals"""
tik = pyqtSignal(int)
connected = pyqtSignal()
class NetworkChecker(QRunnable):
"""Network Checker"""
def __init__(self, ip, port, timeout, interval, jitter):
super().__init__()
self.ip = ip
self.port = port
self.timeout = timeout
self.interval = interval
self.jitter = jitter
# signals
self.signals = NetworkCheckerSignals()
def atempt(self):
try:
s = socket.create_connection((self.ip, self.port), self.timeout)
except OSError:
return False
else:
s.close()
return True
def run(self):
while True:
if self.atempt():
self.signals.connected.emit()
return
else:
for counter in range(self.interval, 0, -1):
time.sleep(1)
self.signals.tik.emit(counter)
self.interval += self.jitter
|
StarcoderdataPython
|
6507577
|
<gh_stars>0
# -*- coding: utf-8 -*-
# --------------------------------------
# @Time : 2020/11/01
# @Author : <NAME>
# @Email : <EMAIL>
# @File : dataset.py
# Description :preprocess data
# --------------------------------------
import sys
ros_path = '/opt/ros/kinetic/lib/python2.7/dist-packages'
if ros_path in sys.path:
sys.path.remove(ros_path)
import cv2
import os
from PIL import Image
import math
import numpy as np
import tensorflow as tf
from cfg.config import path_params, model_params, classes_map
from utils.process_utils import *
from data.augmentation import *
class Dataset(object):
def __init__(self):
self.data_path = path_params['data_path']
self.anchors = model_params['anchors']
self.num_classes = model_params['classes']
self.input_height = model_params['input_height']
self.input_width = model_params['input_width']
self.grid_height = model_params['grid_height']
self.grid_width = model_params['grid_width']
self.iou_threshold = model_params['iou_threshold']
def convert(self, data):
"""
transform [x,y,w,h,class_id to [x1,y1,x2,y2, class_id]
:param data: label data shape is [5,]
:return: [x1, y1, x2, y2, class_id]
"""
x1 = data[1] - data[3] / 2.0
y1 = data[2] - data[4] / 2.0
x2 = data[1] + data[3] / 2.0
y2 = data[2] + data[4] / 2.0
class_id = data[0]
return [x1, y1, x2, y2, class_id]
def letterbox_resize(self, image, bboxes, new_height, new_width, interp=0):
"""
Resize the image and correct the bbox accordingly.
:param image: BGR image data shape is [height, width, channel]
:param bboxes: bounding box shape is [num, 4]
:param new_height: new image height
:param new_width: new image width
:param interp:
:return: image_padded, bboxes
"""
origin_height, origin_width = image.shape[:2]
resize_ratio = min(new_width / origin_width, new_height / origin_height)
resize_width = int(resize_ratio * origin_width)
resize_height = int(resize_ratio * origin_height)
image = cv2.resize(image, (resize_width, resize_height), interpolation=interp)
image_padded = np.full((new_height, new_width, 3), 128, np.uint8)
dw = int((new_width - resize_width) / 2)
dh = int((new_height - resize_height) / 2)
image_padded[dh:resize_height + dh, dw:resize_width + dw, :] = image
# xmin, xmax, ymin, ymax
bboxes[:, [0, 2]] = bboxes[:, [0, 2]] * resize_ratio + dw
bboxes[:, [1, 3]] = bboxes[:, [1, 3]] * resize_ratio + dh
return image_padded, bboxes
def load_data(self, filename):
"""
load image and label
:param filename: file name
:return: image_raw, bbox_raw, image_shape
"""
image_path = os.path.join(self.data_path, "images", filename+'.jpg')
image = cv2.imread(image_path)
image_shape = image.shape
label_path = os.path.join(self.data_path, "labels", filename+'.txt')
lines = [line.rstrip() for line in open(label_path)]
bboxes = []
for line in lines:
data = line.split(' ')
data[0:] = [float(t) for t in data[0:]]
box = self.convert(data)
bboxes.append(box)
while len(bboxes) < 150:
bboxes = np.append(bboxes, [[0.0, 0.0, 0.0, 0.0, 0.0]], axis=0)
bboxes = np.array(bboxes, dtype=np.float32)
image_raw = image.tobytes()
bbox_raw = bboxes.tobytes()
return image_raw, bbox_raw, image_shape
def preprocess_true_data(self, image, labels):
"""
preprocess true boxes to train input format
:param image: numpy.ndarray of shape [416, 416, 3]
:param labels: numpy.ndarray of shape [20, 5]
shape[0]: the number of labels in each image.
shape[1]: x_min, y_min, x_max, y_max, class_index, yaw
:return:
image_norm is normalized image[0~1]
y_true shape is [feature_height, feature_width, per_anchor_num, 5 + num_classes]
"""
# 数据增广,包括水平翻转,裁剪,平移
image = np.array(image)
image, labels = random_horizontal_flip(image, labels)
image, labels = random_crop(image, labels)
image, labels = random_translate(image, labels)
# 图像尺寸缩放到416*416,并进行归一化
image_rgb = cv2.cvtColor(np.copy(image), cv2.COLOR_BGR2RGB).astype(np.float32)
image_rgb, labels = letterbox_resize(image_rgb, (self.input_height, self.input_width), np.copy(labels), interp=0)
image_norm = image_rgb / 255.
input_shape = np.array([self.input_height, self.input_width], dtype=np.int32)
assert input_shape[0] % 32 == 0
assert input_shape[1] % 32 == 0
feature_sizes = input_shape // 32
# anchors 归一化到图像空间0~1
num_anchors = len(self.anchors)
anchor_array = np.array(model_params['anchors'])
# labels 去除空标签
valid = (np.sum(labels, axis=-1) > 0).tolist()
labels = labels[valid]
y_true = np.zeros(shape=[feature_sizes[0], feature_sizes[1], num_anchors, 4 + 1 + len(self.num_classes)], dtype=np.float32)
boxes_xy = (labels[:, 0:2] + labels[:, 2:4]) / 2
boxes_wh = labels[:, 2:4] - labels[:, 0:2]
true_boxes = np.concatenate([boxes_xy, boxes_wh], axis=-1)
anchors_max = anchor_array / 2.
anchors_min = - anchor_array / 2.
valid_mask = boxes_wh[:, 0] > 0
wh = boxes_wh[valid_mask]
# [N, 1, 2]
wh = np.expand_dims(wh, -2)
boxes_max = wh / 2.
boxes_min = - wh / 2.
# [N, 1, 2] & [5, 2] ==> [N, 5, 2]
intersect_mins = np.maximum(boxes_min, anchors_min)
intersect_maxs = np.minimum(boxes_max, anchors_max)
# [N, 5, 2]
intersect_wh = np.maximum(intersect_maxs - intersect_mins, 0.)
intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1]
box_area = wh[..., 0] * wh[..., 1]
anchor_area = anchor_array[:, 0] * anchor_array[:, 1]
# [N, 5]
iou = intersect_area / (box_area + anchor_area - intersect_area + tf.keras.backend.epsilon())
# Find best anchor for each true box [N]
best_anchor = np.argmax(iou, axis=-1)
for t, k in enumerate(best_anchor):
i = int(np.floor(true_boxes[t, 0] / 32.))
j = int(np.floor(true_boxes[t, 1] / 32.))
c = labels[t, 4].astype('int32')
y_true[j, i, k, 0:4] = true_boxes[t, 0:4]
y_true[j, i, k, 4] = 1
y_true[j, i, k, 5 + c] = 1
return image_norm, y_true
|
StarcoderdataPython
|
5183592
|
import networkx as nx
from nose.tools import raises
from regraph.attribute_sets import FiniteSet
from regraph.rules import Rule
from regraph.utils import assert_graph_eq, normalize_attrs
from regraph.exceptions import RuleError
import regraph.primitives as prim
class TestRule(object):
"""Class for testing `regraph.rules` module."""
def __init__(self):
"""Initialize test."""
# Define the left hand side of the rule
self.pattern = nx.DiGraph()
self.pattern.add_node(1)
self.pattern.add_node(2)
self.pattern.add_node(3)
prim.add_node(self.pattern, 4, {'a': 1})
self.pattern.add_edges_from([
(1, 2),
(3, 2),
(4, 1)
])
prim.add_edge(self.pattern, 2, 3, {'a': {1}})
# Define preserved part of the rule
self.p = nx.DiGraph()
self.p.add_node('a')
self.p.add_node('b')
self.p.add_node('c')
prim.add_node(self.p, 'd', {'a': 1})
self.p.add_edges_from([
('a', 'b'),
('d', 'a')
])
prim.add_edge(self.p, 'b', 'c', {'a': {1}})
# Define the right hand side of the rule
self.rhs = nx.DiGraph()
self.rhs.add_node('x')
self.rhs.add_node('y')
self.rhs.add_node('z')
# self.rhs.add_node('s', {'a': 1})
prim.add_node(self.rhs, 's', {'a': 1})
self.rhs.add_node('t')
self.rhs.add_edges_from([
('x', 'y'),
# ('y', 'z', {'a': {1}}),
('s', 'x'),
('t', 'y')
])
prim.add_edge(self.rhs, 'y', 'z', {'a': {1}})
# Define mappings
self.p_lhs = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
self.p_rhs = {'a': 'x', 'b': 'y', 'c': 'z', 'd': 's'}
return
def test_add_node(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.add_node('g', {'a': 1})
assert_graph_eq(rule.p, self.p)
assert_graph_eq(rule.lhs, self.pattern)
assert('g' in rule.rhs)
t = {'a': set([1])}
normalize_attrs(t)
assert(rule.rhs.node['g'] == t)
return
def test_remove_node(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.remove_node(2)
assert_graph_eq(rule.lhs, self.pattern)
assert('b' not in rule.p.nodes())
assert(('a', 'b') not in rule.p.edges())
assert(('b', 'c') not in rule.p.edges())
assert('y' not in rule.rhs.nodes())
assert(('x', 'y') not in rule.rhs.edges())
assert(('t', 'y') not in rule.rhs.edges())
assert(('y', 'z') not in rule.rhs.edges())
return
def test_add_edge(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.add_edge(4, 2)
assert_graph_eq(rule.lhs, self.pattern)
assert_graph_eq(rule.p, self.p)
assert(('s', 'y') in rule.rhs.edges())
return
@raises(RuleError)
def test_remove_non_existing_edge(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.remove_edge(3, 2)
return
def test_remove_edge(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.remove_edge(1, 2)
assert_graph_eq(rule.lhs, self.pattern)
assert(('d', 'a') in rule.p.edges())
assert(('s', 'x') in rule.rhs.edges())
return
def test_clone_node(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.clone_node(2)
assert_graph_eq(rule.lhs, self.pattern)
assert('b1' in rule.p.nodes())
assert('y1' in rule.rhs.nodes())
assert(('a', 'b1') in rule.p.edges())
assert(('b1', 'c') in rule.p.edges())
assert(('x', 'y1') in rule.rhs.edges())
assert(('t', 'y1') in rule.rhs.edges())
return
def test_merge_nodes(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
new_name = rule.merge_nodes(1, 4)
assert_graph_eq(rule.lhs, self.pattern)
assert_graph_eq(rule.p, self.p)
assert(new_name in rule.rhs.nodes())
assert((new_name, new_name) in rule.rhs.edges())
assert((new_name, 'y') in rule.rhs.edges())
return
def test_add_node_attrs(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.add_node_attrs(1, {'a': 1})
t1 = {'a': {1}}
t2 = {'a': {1, 2}}
t3 = {'a': {1, 2}, 'b': {1}}
normalize_attrs(t1)
normalize_attrs(t2)
normalize_attrs(t3)
assert(rule.rhs.node['x'] == t1)
rule.add_node_attrs(4, {'a': 1})
assert(rule.rhs.node['s'] == t1)
rule.add_node_attrs(4, {'a': 2})
assert(rule.rhs.node['s'] == t2)
rule.add_node_attrs(4, {'b': 1})
assert(rule.rhs.node['s'] == t3)
return
def test_remove_node_attrs(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.add_node_attrs(4, {'a': 2})
rule.remove_node_attrs(4, {'a': 1})
t1 = {'a': set()}
t2 = {'a': set([2])}
normalize_attrs(t1)
normalize_attrs(t2)
assert(rule.p.node['d'] == t1)
assert(rule.rhs.node['s'] == t2)
return
def test_update_node_attrs(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.update_node_attrs(4, {'b': 2})
assert(rule.p.node['d'] is None)
test_dict = {'b': {2}}
normalize_attrs(test_dict)
assert(rule.rhs.node['s'] == test_dict)
return
def test_add_edge_attrs(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.add_edge_attrs(4, 1, {'amazing': True})
assert_graph_eq(rule.p, self.p)
t = {'amazing': {True}}
normalize_attrs(t)
assert(rule.rhs.edge['s']['x'] == t)
return
def test_remove_edge_attrs(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.remove_edge_attrs(2, 3, {'a': set()})
t1 = {'a': {1}}
normalize_attrs(t1)
assert(rule.p.edge['b']['c'] == t1)
assert(rule.rhs.edge['y']['z'] == t1)
rule.remove_edge_attrs(2, 3, {'a': {1}})
t2 = {'a': set()}
normalize_attrs(t2)
print(t2)
print(rule.p.edge['b']['c'])
assert(rule.p.edge['b']['c'] == t2)
assert(rule.rhs.edge['y']['z'] == t2)
return
def test_update_edge_attrs(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.update_edge_attrs(2, 3, {'b': 1})
assert(rule.p.edge['b']['c'] is None)
test_dict = {'b': FiniteSet({1})}
# normalize_attrs(test_dict)
assert(rule.rhs.edge['y']['z'] == test_dict)
return
def merge_node_list(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.merge_node_list([2, 3], 'wow_name')
assert(rule.lhs == self.pattern)
assert(rule.p == self.p)
assert('wow_name' in rule.rhs.nodes())
assert(('wow_name', 'wow_name') in rule.rhs.edges())
assert(('wow_name', 'y') in rule.rhs.edges())
def test_all(self):
rule = Rule(self.p, self.pattern, self.rhs,
self.p_lhs, self.p_rhs)
rule.clone_node(2)
rule.remove_node(1)
def test_from_script(self):
commands = "clone 2 as 21.\nadd_node 'a' {'a': 1}.\ndelete_node 3."
rule = Rule.from_transform(self.pattern, commands=commands)
assert('a' in rule.rhs.nodes())
assert('21' in rule.rhs.nodes())
assert(3 not in rule.rhs.nodes())
def test_component_getters(self):
pattern = nx.DiGraph()
prim.add_nodes_from(
pattern,
[(1, {"a1": {1}}), (2, {"a2": {2}}), (3, {"a3": {3}})]
)
prim.add_edges_from(
pattern,
[
(1, 2, {"a12": {12}}),
(2, 3),
(3, 2, {"a32": {32}})
]
)
rule = Rule.from_transform(pattern)
rule.remove_node(1)
rule.remove_edge(2, 3)
new_name, _ = rule.clone_node(2)
print(new_name)
rule.remove_node_attrs(3, {"a3": {3}})
rule.remove_edge_attrs(3, 2, {"a32": {32}})
rule.add_node_attrs(3, {"a3": {100}})
rule.add_node(4)
rule.add_edge_rhs(4, "21")
assert(rule.removed_nodes() == {1})
print(rule.removed_edges())
assert(rule.removed_edges() == {(2, 3), (new_name[0], 3)})
assert(len(rule.cloned_nodes()) == 1 and
2 in rule.cloned_nodes().keys())
assert(len(rule.removed_node_attrs()) == 1 and
3 in rule.removed_node_attrs()[3]["a3"])
assert(len(rule.removed_edge_attrs()) == 1 and
32 in rule.removed_edge_attrs()[(3, 2)]["a32"])
assert(rule.added_nodes() == {4})
assert(rule.added_edges() == {(4, "21")})
# rule.merged_nodes()
# rule.added_edge_attrs()
assert(len(rule.added_node_attrs()) == 1 and
100 in rule.added_node_attrs()[3]["a3"])
assert(rule.is_restrictive() and rule.is_relaxing())
def test_from_commands(self):
pattern = nx.DiGraph()
prim.add_nodes_from(
pattern,
[(1, {'state': 'p'}),
(2, {'name': 'BND'}),
3,
4]
)
prim.add_edges_from(
pattern,
[(1, 2, {'s': 'p'}),
(3, 2, {'s': 'u'}),
(3, 4)]
)
p = nx.DiGraph()
prim.add_nodes_from(
p,
[(1, {'state': 'p'}), ("1_clone", {'state': 'p'}), (2, {'name': 'BND'}), 3, 4])
prim.add_edges_from(
p, [(1, 2), ('1_clone', 2), (3, 4)])
rhs = nx.DiGraph()
prim.add_nodes_from(
rhs,
[(1, {'state': 'p'}), ("1_clone", {'state': 'p'}), (2, {'name': 'BND'}), 3, 4, 5])
prim.add_edges_from(
rhs, [(1, 2, {'s': 'u'}), ('1_clone', 2), (2, 4), (3, 4), (5, 3)])
p_lhs = {1: 1, '1_clone': 1, 2: 2, 3: 3, 4: 4}
p_rhs = {1: 1, '1_clone': '1_clone', 2: 2, 3: 3, 4: 4}
rule1 = Rule(p, pattern, rhs, p_lhs, p_rhs)
commands = "clone 1.\n" +\
"delete_edge 3 2.\n" +\
"add_node 5.\n" +\
"add_edge 2 4.\n" +\
"add_edge 5 3."
rule2 = Rule.from_transform(pattern, commands)
assert((5, 3) in rule2.rhs.edges())
assert(5 in rule2.rhs.nodes() and 5 not in rule2.p.nodes())
assert((2, 4) in rule2.rhs.edges())
|
StarcoderdataPython
|
4957976
|
import pytest
import discretisedfield as df
def test_instances():
p1 = (0, 0, 0)
p2 = (100, 200, 300)
cell = (1, 2, 3)
region = df.Region(p1=p1, p2=p2)
mesh = df.Mesh(region=region, cell=cell)
field = df.Field(mesh, dim=3, value=(1, 2, 3))
assert df.dx(field) == 1
assert df.dy(field) == 2
assert df.dz(field) == 3
assert df.dS(field.plane('z')).average == (0, 0, 2)
assert df.dV(field) == 6
def test_integral():
p1 = (0, 0, 0)
p2 = (10, 10, 10)
cell = (1, 1, 1)
region = df.Region(p1=p1, p2=p2)
mesh = df.Mesh(region=region, cell=cell)
field = df.Field(mesh, dim=3, value=(1, -2, 3))
for attr in ['dx', 'dy', 'dz', 'dV']:
assert df.integral(field*getattr(df, attr)) == (1000, -2000, 3000)
assert df.integral(getattr(df, attr)*field) == (1000, -2000, 3000)
assert df.integral(field*abs(getattr(df, attr))) == (1000, -2000, 3000)
assert df.integral(field * (2 * df.dx)) == (2000, -4000, 6000)
assert df.integral(field * (df.dx * 2)) == (2000, -4000, 6000)
assert df.integral(field.plane('z') @ df.dS) == 300
assert df.integral(df.dS @ field.plane('z')) == 300
assert df.integral(field.plane('z') * (df.dS @ df.dS)) == (100, -200, 300)
assert (field.plane('z') * (df.dS @ (0, 0, 1))).average == (1, -2, 3)
assert (field.plane('z') * ((0, 0, 1) @ df.dS)).average == (1, -2, 3)
dV = df.dx*df.dy*df.dz
assert df.integral(field * dV) == df.integral(field * df.dV)
with pytest.raises(TypeError):
res = df.dx * 'dy'
with pytest.raises(TypeError):
res = df.dS @ 'dy'
|
StarcoderdataPython
|
5008615
|
<filename>examples/framework_ideas/use_case.py
# TODO #18. clean up obsolete ideas and out-of-date patterns
from dataclasses import dataclass
import typing as t
from marshmallow import Schema
from pca.exceptions import ProcessError, ValidationError
from pca.utils.functools import reify
from pca.utils.dependency_injection import Container
@dataclass
class UseCaseInterface:
action: str
schema: Schema
@dataclass
class UseCaseInput:
data: t.Dict[str, t.Any]
action: t.Optional[str]
@dataclass
class UseCaseResult:
errors: t.Dict[str, ProcessError]
data: t.Dict[str, t.Any]
@property
def success(self):
return not self.errors
class UseCase:
"""
This is core object of the application. Its methods represent
application-specific actions that can be taken or queries to ask.
"""
Input: t.ClassVar[UseCaseInput]
container: Container
@property
def interfaces(self) -> t.List[UseCaseInterface]:
raise NotImplementedError
def action(self, input: UseCaseInput):
if self.is_available(input): # TODO ToCToU problem?
action_method = getattr(self, input.action, 'action')
action_method(input.data)
else:
raise ValueError(input) # TODO library-specific error class to throw
def execute(self, input: UseCaseInput) -> UseCaseResult:
"""Executes the operation defined by the use_case."""
try:
data_after_validation = self.validate(input)
except ValidationError as e:
return UseCaseResult(errors=e.errors, data={})
result = self.action(data_after_validation)
return UseCaseResult(errors={}, data=result)
def is_available(self, input: UseCaseInput):
raise NotImplementedError
# noinspection PyAbstractClass
class SimpleUseCase(UseCase):
schema_class: t.Optional[t.ClassVar[Schema]] = None
def __init__(self, container: Container):
self.container = container
@reify
def interfaces(self) -> t.List[UseCaseInterface]:
return [UseCaseInterface(schema=self.schema_class, action='action')]
def validate(self, input: UseCaseInput):
context = self.get_context()
schema = self.schema_class(context=context)
return schema.load(input)
def execute(self, input: UseCaseInput) -> UseCaseResult:
"""Executes the operation defined by the use_case."""
try:
dto = self.validate(input)
except ValidationError as e:
return UseCaseResult(errors=e.errors, data={})
result = self.action(dto)
return UseCaseResult(errors={}, data=result)
def can_execute(self, input: UseCaseInput) -> UseCaseResult:
"""
Check whether the operation defined by the use_case can be executed.
Success with empty data by default.
"""
try:
self.validate(input)
except ValidationError as e:
return UseCaseResult(errors=e.errors, data={})
return UseCaseResult(errors={}, data={})
def get_context(self) -> dict:
raise NotImplementedError
def action(self, input) -> dict:
raise NotImplementedError
|
StarcoderdataPython
|
5049069
|
import websocket
try:
import thread
except ImportError:
import _thread as thread
import time
import json
from datetime import datetime
from pprint import pprint
# Function that is executed when the socket received a message
def on_message(ws, message):
# Parsing the received JSON
msg = json.loads(message)
if msg[0:3] == 'set':
print("Server Said: \n", msg[6:-2])
else:
print("Server Said: ", msg)
if msg == "Username already taken, connect again!\n":
ws.close()
# Function that is executed when the socket errors out
def on_error(ws, error):
print(error)
# Function that is executed when the socket closes
def on_close(ws):
print("### closed connection ###")
# Utility function to send content on the socket
def sender(ws, content):
str = json.dumps(content)
ws.send(str)
# ======================================= Utility Functions =======================================
# The following utility functions are used to send messages to the Server
# Each function serializes the data into the JSON expected by the Server
# And then uses the sender() utility to send the message on the socket
# Function that registers a client
def register(ws, id):
content = {
"id": id
}
message = {
'message': 'Register',
'content': json.dumps(content)
}
sender(ws, message)
# Function that sends a connect message to the Server
# This is the only option if the user is currently disconnected
# In our system this is analogous to a Login
def connect(ws, id):
content = {
"id": id
}
message = {
'message': "Connect",
'content': json.dumps(content)
}
sender(ws, message)
# Function that sends a connect message to the Server
# This can only be used if the user is currently connected
# In our system this is analogous to a Logout
def disconnect(ws, id):
content = {
"id": id
}
message = {
'message': 'Disconnect',
'content': json.dumps(content)
}
sender(ws, message)
# Function that sends a follow message for the client
# Requires the client to specify the userid of the person they want to follow
def follow(ws, id):
to_follow = input("Which user do you want to follow: ")
content = {
"myID": id,
"toFollow": to_follow
}
message = {
'message': 'Follow',
'content': json.dumps(content)
}
sender(ws, message)
# Function that sends a tweet message
def tweet(ws, id):
text = input("Enter tweet: ")
content = {
"userid": id,
"text": text,
"time_of_tweet": datetime.now(),
"retweet": False
}
message = {
'message': 'Tweets',
'content': json.dumps(content, default=str)
}
sender(ws, message)
# Function that sends a retweet message
# Retweeet requires the client to specify the ID of the original tweet
def retweet(ws, id):
tid = input("Enter tweet id you want to retweet: ")
content = {
"id": id,
"tid": tid
}
message = {
'message': 'Retweet',
'content': json.dumps(content)
}
sender(ws, message)
# Function that handles the 3 different types of queries
# Each query except for "Following" requires an input
def search(ws, id):
query = int(input("What search do you want to execute:\n1. Hashtag\n2. Mention\n3. Subscribed Tweets\n"))
switcher = {
1: "Hashtag",
2: "Mention",
3: "Following"
}
content = {
"queryType": switcher[query],
"query": input("Enter search term: ") if query != 3 else id
}
message = {
'message': 'Search',
'content': json.dumps(content)
}
sender(ws, message)
def remove(ws, id):
content = {
"id": id
}
message = {
'message': 'Remove',
'content': json.dumps(content)
}
sender(ws, message)
# =================================================================================================
# Function that is executed when the socket connection is connected
def on_open(ws):
def run(*args):
# A pseudo switch case that is used to easily call the proper function
switcher = {
1: disconnect,
2: follow,
3: tweet,
4: retweet,
5: search
}
uid = input("Enter Username for registration: ")
register(ws, uid)
flag = True
while(True):
if flag:
# Menu driven interface for the client
inp = int(input("What functionality do you want to execute:\n1. Disconnect\n2. Follow\n3. Tweet\n4. Retweet\n5. Search\n"))
if inp == 1:
flag = False
switcher[inp](ws, uid)
print("request complete!")
if not flag:
c = input("Press Y to connect again or E to exit: ")
if c.lower() == 'y':
connect(ws, uid)
flag = True
elif c.lower() == 'e':
remove(ws, uid)
ws.close()
thread.start_new_thread(run, ())
if __name__ == "__main__":
websocket.enableTrace(False)
ws = websocket.WebSocketApp("ws://twittersharp.azurewebsites.net:80/websocket",
on_message = on_message,
on_error = on_error,
on_close = on_close)
ws.on_open = on_open
ws.run_forever()
|
StarcoderdataPython
|
5034150
|
import os
class Config:
"""Base configuration."""
ENV = None
PATH = os.path.abspath(os.path.dirname(__file__))
ROOT = os.path.dirname(PATH)
DEBUG = False
THREADED = False
# Constants
GITHUB_SLUG = "jacebrowning/memegen"
GITHUB_URL = "https://github.com/{}".format(GITHUB_SLUG)
GITHUB_BASE = "https://raw.githubusercontent.com/{}/master/".format(GITHUB_SLUG)
CONTRIBUTING_URL = GITHUB_BASE + "CONTRIBUTING.md"
CHANGES_URL = GITHUB_BASE + "CHANGELOG.md"
# Variables
FACEBOOK_APP_ID = 'localhost'
FACEBOOK_IMAGE_HEIGHT = os.getenv('FACEBOOK_IMAGE_HEIGHT', 402)
FACEBOOK_IMAGE_WIDTH = os.getenv('FACEBOOK_IMAGE_WIDTH', 802)
GOOGLE_ANALYTICS_TID = 'localhost'
REGENERATE_IMAGES = os.getenv('REGENERATE_IMAGES')
TWITTER_IMAGE_HEIGHT = os.getenv('TWITTER_IMAGE_HEIGHT', 401)
TWITTER_IMAGE_WIDTH = os.getenv('TWITTER_IMAGE_WIDTH', 801)
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
FACEBOOK_APP_ID = os.getenv('FACEBOOK_APP_ID')
GOOGLE_ANALYTICS_TID = os.getenv('GOOGLE_ANALYTICS_TID')
class TestConfig(Config):
"""Test configuration."""
ENV = 'test'
DEBUG = True
TESTING = True
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
def get_config(name):
assert name, "No configuration specified"
for config in Config.__subclasses__():
if config.ENV == name:
return config
assert False, "No matching configuration: {}".format(name)
|
StarcoderdataPython
|
7554
|
<gh_stars>0
from collections import Counter
from datetime import datetime
import os
import requests
from subprocess import Popen, PIPE
from pathlib import Path
import json
from typing import Dict, Union, TYPE_CHECKING
from kipoi_utils.external.torchvision.dataset_utils import download_url
if TYPE_CHECKING:
import zenodoclient
ZENODO_BASE = "https://zenodo.org"
ZENODO_DEPOSITION = f"{ZENODO_BASE}/api/deposit/depositions"
PathType = Union[str, Path]
def cleanup(singularity_file_path: PathType) -> None:
"""
Deletes the singularity image that was created by build_singularity_image
"""
if isinstance(singularity_file_path, str):
singularity_file_path = Path(singularity_file_path)
if singularity_file_path.exists():
singularity_file_path.unlink()
def build_singularity_image(
name_of_docker_image: str,
singularity_image_name: str,
singularity_image_folder: PathType,
) -> PathType:
"""
This function builds a singularity image from a dockerhub image
using singularity pull. The resulting .sif is stored in <singularity_image_folder> and
the filepath is returned.
"""
if isinstance(singularity_image_folder, Path):
singularity_image_folder = str(singularity_image_folder)
pull_cmd = [
"singularity",
"pull",
"--name",
f"{singularity_image_folder}/{singularity_image_name}",
"--force",
f"docker://{name_of_docker_image}",
]
print(f"Building {singularity_image_name} - {' '.join(pull_cmd)}")
process = Popen(pull_cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate()
if process.returncode != 0:
print(stderr)
print(stdout)
raise ValueError(
f"Singularity image {singularity_image_name} can not be built"
)
singularity_image_path = (
f"{singularity_image_folder}/{singularity_image_name}"
)
return singularity_image_path
def test_singularity_image(
singularity_image_folder: PathType, singularity_image_name: str, model: str
) -> None:
"""Tests a singularity image residing in singularity_image_folder
with kipoi test <model> --source=kipoi
Raises:
ValueError: Raise valueerror if the test is not successful"""
print(
f"Testing {model} with {singularity_image_folder}/{singularity_image_name}"
)
if model == "Basenji":
test_cmd = [
"kipoi",
"test",
f"{model}",
"--source=kipoi",
"--batch_size=2",
]
else:
test_cmd = ["kipoi", "test", f"{model}", "--source=kipoi"]
if isinstance(singularity_image_folder, str):
singularity_image_folder = Path(singularity_image_folder)
if isinstance(singularity_image_name, str):
singularity_image_name = Path(singularity_image_name)
exec_cmd = [
"singularity",
"exec",
f"{singularity_image_folder}/{singularity_image_name}",
]
exec_cmd.extend(test_cmd)
process = Popen(exec_cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate()
if process.returncode != 0:
print(stdout)
print(stderr)
raise ValueError(
f"Singularity image {singularity_image_name} for {model} did not pass relevant tests"
)
def create_new_deposition(
zenodo_client: "zenodoclient.Client", deposition_id: str
) -> str:
"""Creates a new version of an existing depsosition on zenodo and returns the
corresponding id"""
status_code, response = zenodo_client.post_content(
f"{ZENODO_DEPOSITION}/{deposition_id}/actions/newversion"
)
return response["links"]["latest_draft"].split("/")[-1]
def get_deposit(
zenodo_client: "zenodoclient.Client", deposition_id: str
) -> Dict:
"""Returns the response body of a get request for an existing deposition"""
response = zenodo_client.get_content(
f"{ZENODO_DEPOSITION}/{deposition_id}"
)
return response
def upload_file(
zenodo_client: "zenodoclient.Client",
url: str,
singularity_image_folder: PathType,
filename: str,
) -> None:
"""Upload singularity_image_folder/filename to a url"""
path = Path(singularity_image_folder) / Path(filename)
zenodo_client.put_content(url, data=path)
def upload_metadata(
zenodo_client: "zenodoclient.Client",
url: str,
model_group: str = "",
shared_env: str = "",
) -> None:
"""Upload metadata for a model group to a given url"""
if not model_group and not shared_env:
raise ValueError(
"You need to provide atlease a shared env name or a model group name"
)
if model_group:
data = {
"metadata": {
"title": f"{model_group} singularity container",
"upload_type": "physicalobject",
"description": "This is a singularity container for models "
f"under https://kipoi.org/models/{model_group}/",
"creators": [
{"name": "<NAME>", "affiliation": "EMBL"}
],
"publication_date": datetime.today().strftime("%Y-%m-%d"),
"license": "MIT",
}
}
elif shared_env:
if "shared" in shared_env:
data = {
"metadata": {
"title": f"{shared_env} singularity container",
"upload_type": "physicalobject",
"description": "Singularity container with conda environment "
f"https://github.com/kipoi/kipoi-containers/blob/main/envfiles/{shared_env}.yml",
"creators": [
{
"name": "<NAME>",
"affiliation": "EMBL",
}
],
"publication_date": datetime.today().strftime("%Y-%m-%d"),
"license": "MIT",
}
}
elif shared_env == "mmsplice":
data = {
"metadata": {
"title": "MMSplice singularity container except mtsplice",
"upload_type": "physicalobject",
"description": "Singularity container for MMSplice models except mtsplice "
"under http://kipoi.org/models/MMSplice/",
"creators": [
{
"name": "<NAME>",
"affiliation": "EMBL",
}
],
"publication_date": datetime.today().strftime("%Y-%m-%d"),
"license": "MIT",
}
}
else:
raise ValueError(
"Available options are - mmsplice, sharedpy3keras2tf1, sharedpy3keras2tf2, sharedpy3keras1.2"
)
zenodo_client.put_content(url, data=data)
def push_deposition(
zenodo_client: "zenodoclient.Client", deposition_id: str
) -> Dict:
"""Pushes a deposition to zenodo. An additional get request is made to the newy pushed
deposition and a response body is returned"""
status_code, response = zenodo_client.post_content(
f"{ZENODO_DEPOSITION}/{deposition_id}/actions/publish"
)
response = get_deposit(zenodo_client, deposition_id)
return response
def update_existing_singularity_container(
zenodo_client: "zenodoclient.Client",
singularity_dict: Dict,
singularity_image_folder: PathType,
model_group: str,
file_to_upload: str = "",
push: bool = True,
) -> None:
"""This function creates a new draft version of an existing image's zenodo entry with updated
metadata and file after deleting the old file. If push is True, the draft version is finalized
and the url, name and md5 fields are updated and the new deposition id and file id is added to
singularity dict which contains information about the existing image. Otherwise, only
the new deposotion id and file id is added to the dictionary. This modified dictionary is
returned"""
# Create a new version of an existing deposition
deposition_id = singularity_dict["url"].split("/")[4]
new_deposition_id = create_new_deposition(zenodo_client, deposition_id)
response = get_deposit(zenodo_client, new_deposition_id)
bucket_url = response["links"]["bucket"]
filename = (
file_to_upload if file_to_upload else f"{singularity_dict['name']}.sif"
)
file_id = ""
for fileobj in response["files"]:
if fileobj["filename"] == filename:
file_id = fileobj["id"] # Assuming only 1 version is added
# Delete existing file from this new version
if file_id:
zenodo_client.delete_content(
f"{ZENODO_DEPOSITION}/{new_deposition_id}/files/{file_id}"
)
# Add a new file to this new version
upload_file(
zenodo_client,
f"{bucket_url}/{filename}",
singularity_image_folder,
filename,
)
url = f"{ZENODO_DEPOSITION}/{new_deposition_id}"
if (
"shared" in singularity_dict["name"]
or singularity_dict["name"] == "kipoi-docker_mmsplice-slim"
):
shared_env_name = (
singularity_dict["name"]
.replace("kipoi-docker_", "")
.replace("-slim", "")
)
upload_metadata(zenodo_client, url, shared_env=shared_env_name)
else:
upload_metadata(zenodo_client, url, model_group=model_group)
# publish the newly created revision
if push:
response = push_deposition(zenodo_client, new_deposition_id)
record_id = response["metadata"]["prereserve_doi"]["recid"]
file_id, file_name, file_md5 = "", "", ""
for fileobj in response["files"]:
if fileobj["filename"] == filename:
file_id = fileobj["id"] # Assuming only 1 version is added
file_name = fileobj["filename"].replace(".sif", "")
file_md5 = fileobj["checksum"]
return {
"new_deposition_id": new_deposition_id,
"file_id": file_id,
"url": f"{ZENODO_BASE}/record/{record_id}/files/{filename}?download=1",
"name": file_name,
"md5": file_md5,
}
else:
return singularity_dict | {
"new_deposition_id": new_deposition_id,
"file_id": "",
}
def push_new_singularity_image(
zenodo_client: "zenodoclient.Client",
singularity_image_folder: PathType,
singularity_dict: Dict,
model_group: str,
file_to_upload: str = "",
path: str = "",
push: bool = True,
) -> None:
"""This function creates a draft version of a new zenodo entry with the
metadata and singularity image. If push is True, the draft version is finalized
and the url, name and md5 fields are updated and the new deposition id and file id is added to
singularity dict which contains empty strings as url and md5. Otherwise, only
the new deposotion id and file id is added to the dictionary. This modified dictionary is
returned"""
status_code, response = zenodo_client.post_content(f"{ZENODO_DEPOSITION}")
deposition_id = response["id"]
bucket_url = response["links"]["bucket"]
filename = (
file_to_upload if file_to_upload else f"{singularity_dict['name']}.sif"
)
upload_file(
zenodo_client,
f"{bucket_url}/{filename}",
singularity_image_folder,
filename,
)
url = f"{ZENODO_DEPOSITION}/{deposition_id}"
if "shared" in singularity_dict["name"]:
shared_env_name = (
singularity_dict["name"]
.replace("kipoi-docker_", "")
.replace("-slim", "")
)
upload_metadata(zenodo_client, url, shared_env=shared_env_name)
else:
upload_metadata(zenodo_client, url, model_group=model_group)
if push:
push_deposition(zenodo_client, deposition_id)
response = get_deposit(zenodo_client, deposition_id)
record_id = response["metadata"]["prereserve_doi"]["recid"]
return {
"new_deposition_id": deposition_id,
"file_id": response["files"][0]["id"],
"url": f"{ZENODO_BASE}/record/{record_id}/files/{filename}?download=1",
"name": response["files"][0]["filename"].replace(".sif", ""),
"md5": response["files"][0]["checksum"],
}
else:
return singularity_dict | {
"new_deposition_id": deposition_id,
"file_id": "",
}
def get_singularity_image(
singularity_image_folder: PathType,
singularity_image_dict: Dict,
model_or_model_group: str,
) -> PathType:
"""This function downloads the singularity image corresponding to the given model or
model group from zenodo to singularity_image_folder and returns the name of the image"""
if (
model_or_model_group in singularity_image_dict
): # Special case for MMSPlice/mtsplice, APARENT/veff
image_name = (
f"{singularity_image_dict[model_or_model_group]['name']}.sif"
)
image_url = f"{singularity_image_dict[model_or_model_group]['url']}"
image_md5 = f"{singularity_image_dict[model_or_model_group]['md5']}"
else:
model_group = model_or_model_group.split("/")[0]
image_name = f"{singularity_image_dict[model_group]['name']}.sif"
image_url = f"{singularity_image_dict[model_group]['url']}"
image_md5 = f"{singularity_image_dict[model_group]['md5']}"
if isinstance(singularity_image_folder, str):
singularity_image_folder = Path(singularity_image_folder)
if isinstance(image_name, str):
image_name = Path(image_name)
if not (singularity_image_folder / image_name).exists():
download_url(
url=image_url,
root=singularity_image_folder,
filename=image_name,
md5=image_md5,
)
return image_name
|
StarcoderdataPython
|
104069
|
<reponame>zkscpqm/pyfilter<filename>pyfilter/src/filters/__init__.py<gh_stars>0
from pyfilter.src.filters.base_filter import _BaseFilter
from pyfilter.src.filters.any_match_filter import _AnyMatchFilter
from pyfilter.src.filters.all_match_filter import _AllMatchFilter
from pyfilter.src.filters.regex_match_filter import _RegexMatchFilter
BaseFilter = _BaseFilter
AnyMatchFilter = _AnyMatchFilter
AllMatchFilter = _AllMatchFilter
RegexMatchFilter = _RegexMatchFilter
|
StarcoderdataPython
|
6513700
|
<filename>src/logml/datasets/df/__init__.py<gh_stars>0
from .datasets_df import DatasetsDf
from .df_explore import DfExplore
|
StarcoderdataPython
|
6631245
|
<filename>tetravolume.py
"""
Euler volume, modified by <NAME>
http://www.grunch.net/synergetics/quadvols.html
<NAME> (c) MIT License
The tetravolume.py methods make_tet and make_tri
assume that volume and area use R-edge cubes and
triangles for XYZ units respectively, and D-edge
tetrahedrons and triangles for IVM units of volume
and area (D = 2R).
The tetrahedron of edges D has sqrt(8/9) the
volume of a cube of edges R, yet each is unit in
its respective matrix.
The triangle of edges D has an XYZ area of sqrt(3)
i.e. an equilateral triangle of edges 2 in R-square
units. The IVM area of the same triangle is simply 1.
The cube of edges sqrt(2) in R units, has volume
sqrt(2) to the 3rd power. One third of that volume
is our unit tetrahedron of edges D (cube face diagonals).
See:
http://mathforum.org/kb/thread.jspa?threadID=2836546
for explanation of quadrays, used for some unit tests
"""
from math import sqrt as rt2
from qrays import Qvector, Vector
import sys
R =0.5
D =1.0
S3 = pow(9/8, 0.5)
root2 = rt2(2)
root3 = rt2(3)
root5 = rt2(5)
root6 = rt2(6)
PHI = (1 + root5)/2.0
class Tetrahedron:
"""
Takes six edges of tetrahedron with faces
(a,b,d)(b,c,e)(c,a,f)(d,e,f) -- returns volume
if ivm and xyz
"""
def __init__(self, a, b, c, d, e, f):
# a,b,c,d,e,f = [Decimal(i) for i in (a,b,c,d,e,f)]
self.a, self.a2 = a, a**2
self.b, self.b2 = b, b**2
self.c, self.c2 = c, c**2
self.d, self.d2 = d, d**2
self.e, self.e2 = e, e**2
self.f, self.f2 = f, f**2
def ivm_volume(self):
ivmvol = ((self._addopen()
- self._addclosed()
- self._addopposite())/2) ** 0.5
return ivmvol
def xyz_volume(self):
xyzvol = 1/S3 * self.ivm_volume()
return xyzvol
def _addopen(self):
a2,b2,c2,d2,e2,f2 = self.a2, self.b2, self.c2, self.d2, self.e2, self.f2
sumval = f2*a2*b2
sumval += d2 * a2 * c2
sumval += a2 * b2 * e2
sumval += c2 * b2 * d2
sumval += e2 * c2 * a2
sumval += f2 * c2 * b2
sumval += e2 * d2 * a2
sumval += b2 * d2 * f2
sumval += b2 * e2 * f2
sumval += d2 * e2 * c2
sumval += a2 * f2 * e2
sumval += d2 * f2 * c2
return sumval
def _addclosed(self):
a2,b2,c2,d2,e2,f2 = self.a2, self.b2, self.c2, self.d2, self.e2, self.f2
sumval = a2 * b2 * d2
sumval += d2 * e2 * f2
sumval += b2 * c2 * e2
sumval += a2 * c2 * f2
return sumval
def _addopposite(self):
a2,b2,c2,d2,e2,f2 = self.a2, self.b2, self.c2, self.d2, self.e2, self.f2
sumval = a2 * e2 * (a2 + e2)
sumval += b2 * f2 * (b2 + f2)
sumval += c2 * d2 * (c2 + d2)
return sumval
def make_tet(v0,v1,v2):
"""
three edges from any corner, remaining three edges computed
"""
tet = Tetrahedron(v0.length(), v1.length(), v2.length(),
(v0-v1).length(), (v1-v2).length(), (v2-v0).length())
return tet.ivm_volume(), tet.xyz_volume()
class Triangle:
def __init__(self, a, b, c):
self.a = a
self.b = b
self.c = c
def ivm_area(self):
ivmarea = self.xyz_area() * 1/rt2(3)
return ivmarea
def xyz_area(self):
"""
Heron's Formula, without the 1/4
"""
a,b,c = self.a, self.b, self.c
xyzarea = rt2((a+b+c) * (-a+b+c) * (a-b+c) * (a+b-c))
return xyzarea
def make_tri(v0,v1):
"""
three edges from any corner, remaining three edges computed
"""
tri = Triangle(v0.length(), v1.length(), (v1-v0).length())
return tri.ivm_area(), tri.xyz_area()
R = 0.5
D = 1.0
import unittest
class Test_Tetrahedron(unittest.TestCase):
def test_unit_volume(self):
tet = Tetrahedron(D, D, D, D, D, D)
self.assertEqual(tet.ivm_volume(), 1, "Volume not 1")
def test_e_module(self):
e0 = D
e1 = root3 * PHI**-1
e2 = rt2((5 - root5)/2)
e3 = (3 - root5)/2
e4 = rt2(5 - 2*root5)
e5 = 1/PHI
tet = Tetrahedron(e0, e1, e2, e3, e4, e5)
self.assertTrue(1/23 > tet.ivm_volume()/8 > 1/24, "Wrong E-mod")
def test_unit_volume2(self):
tet = Tetrahedron(R, R, R, R, R, R)
self.assertAlmostEqual(tet.xyz_volume(), 0.117851130)
def test_unit_volume3(self):
tet = Tetrahedron(R, R, R, R, R, R)
self.assertAlmostEqual(tet.ivm_volume(), 0.125)
def test_phi_edge_tetra(self):
tet = Tetrahedron(D, D, D, D, D, PHI)
self.assertAlmostEqual(float(tet.ivm_volume()), 0.70710678)
def test_right_tetra(self):
e = pow((root3/2)**2 + (root3/2)**2, 0.5) # right tetrahedron
tet = Tetrahedron(D, D, D, D, D, e)
self.assertAlmostEqual(tet.xyz_volume(), 1)
def test_quadrant(self):
qA = Qvector((1,0,0,0))
qB = Qvector((0,1,0,0))
qC = Qvector((0,0,1,0))
tet = make_tet(qA, qB, qC)
self.assertAlmostEqual(tet[0], 0.25)
def test_octant(self):
x = Vector((R, 0, 0))
y = Vector((0, R, 0))
z = Vector((0, 0, R))
tet = make_tet(x,y,z)
self.assertAlmostEqual(tet[1], 1/6, 5) # good to 5 places
def test_quarter_octahedron(self):
a = Vector((D,0,0))
b = Vector((0,D,0))
c = Vector((R,R,root2/2))
tet = make_tet(a, b, c)
self.assertAlmostEqual(tet[0], 1, 5) # good to 5 places
def test_xyz_cube(self):
a = Vector((R, 0.0, 0.0))
b = Vector((0.0, R, 0.0))
c = Vector((0.0, 0.0, R))
R_octa = make_tet(a,b,c)
self.assertAlmostEqual(6 * R_octa[1], 1, 4) # good to 4 places
def test_s3(self):
D_tet = Tetrahedron(D, D, D, D, D, D)
a = Vector((R, 0.0, 0.0))
b = Vector((0.0, R, 0.0))
c = Vector((0.0, 0.0, R))
R_cube = 6 * make_tet(a,b,c)[1]
self.assertAlmostEqual(D_tet.xyz_volume() * S3, R_cube, 4)
def test_martian(self):
p = Qvector((2,1,0,1))
q = Qvector((2,1,1,0))
r = Qvector((2,0,1,1))
result = make_tet(5*q, 2*p, 2*r)
self.assertAlmostEqual(result[0], 20, 7)
def test_area_martian1(self):
p = Qvector((2,1,0,1))
q = Qvector((2,1,1,0))
result = p.area(q)
self.assertAlmostEqual(result, 1)
def test_area_martian2(self):
p = 3 * Qvector((2,1,0,1))
q = 4 * Qvector((2,1,1,0))
result = p.area(q)
self.assertAlmostEqual(result, 12)
def test_area_martian3(self):
qx = Vector((D,0,0)).quadray()
qy = Vector((R,rt2(3)/2,0)).quadray()
result = qx.area(qy)
self.assertAlmostEqual(result, 1, 7)
def test_area_earthling1(self):
vx = Vector((1,0,0))
vy = Vector((0,1,0))
result = vx.area(vy)
self.assertAlmostEqual(result, 1)
def test_area_earthling2(self):
vx = Vector((2,0,0))
vy = Vector((1,rt2(3),0))
result = vx.area(vy)
self.assertAlmostEqual(result, 2*rt2(3))
def test_phi_tet(self):
"edges from common vertex: phi, 1/phi, 1"
p = Vector((1, 0, 0))
q = Vector((1, 0, 0)).rotz(60) * PHI
r = Vector((0.5, root3/6, root6/3)) * 1/PHI
result = make_tet(p, q, r)
self.assertAlmostEqual(result[0], 1, 7)
def test_phi_tet_2(self):
p = Qvector((2,1,0,1))
q = Qvector((2,1,1,0))
r = Qvector((2,0,1,1))
result = make_tet(PHI*q, (1/PHI)*p, r)
self.assertAlmostEqual(result[0], 1, 7)
def test_phi_tet_3(self):
T = Tetrahedron(PHI, 1/PHI, 1.0,
root2, root2/PHI, root2)
result = T.ivm_volume()
self.assertAlmostEqual(result, 1, 7)
def test_koski(self):
a = 1
b = PHI ** -1
c = PHI ** -2
d = (root2) * PHI ** -1
e = (root2) * PHI ** -2
f = (root2) * PHI ** -1
T = Tetrahedron(a,b,c,d,e,f)
result = T.ivm_volume()
self.assertAlmostEqual(result, PHI ** -3, 7)
class Test_Triangle(unittest.TestCase):
def test_unit_area1(self):
tri = Triangle(D, D, D)
self.assertEqual(tri.ivm_area(), 1)
def test_unit_area2(self):
tri = Triangle(2, 2, 2)
self.assertEqual(tri.ivm_area(), 4)
def test_xyz_area3(self):
tri = Triangle(D, D, D)
self.assertEqual(tri.xyz_area(), rt2(3))
def test_xyz_area4(self):
v1 = Vector((D, 0, 0))
v2 = Vector((0, D, 0))
xyz_area = make_tri(v1, v2)[1]
self.assertAlmostEqual(xyz_area, 2)
def test_xyz_area5(self):
tri = Triangle(R, R, R)
self.assertAlmostEqual(tri.xyz_area(), (root3)/4)
def command_line():
args = sys.argv[1:]
try:
args = [float(x) for x in args] # floats
t = Tetrahedron(*args)
except TypeError:
t = Tetrahedron(1,1,1,1,1,1)
print("defaults used")
print(t.ivm_volume())
print(t.xyz_volume())
if __name__ == "__main__":
if len(sys.argv)==7:
command_line()
else:
unittest.main()
|
StarcoderdataPython
|
3219035
|
<reponame>ltonetwork/lto-api.python<filename>tests/Transactions/SetScriptTest.py
from LTO.Transactions.SetScript import SetScript
from LTO.Accounts.AccountFactoryED25519 import AccountED25519 as AccountFactory
from time import time
from unittest import mock
class TestSetScript:
ACCOUNT_SEED = "df3dd6d884714288a39af0bd973a1771c9f00f168cf040d6abb6a50dd5e055d8"
account = AccountFactory('T').createFromSeed(ACCOUNT_SEED)
def testConstruct(self):
transaction = SetScript(b'aGVsbG8=')
assert transaction.script == b'aGVsbG8='
assert transaction.txFee == 500000000
def testSignWith(self):
transaction = SetScript(b'aGVsbG8=')
assert transaction.isSigned() is False
transaction.signWith(self.account)
assert transaction.isSigned() is True
timestamp = int(time() * 1000)
assert str(transaction.timestamp)[:-3] == str(timestamp)[:-3]
assert transaction.sender == '<KEY>'
assert transaction.senderPublicKey == '<KEY>'
assert self.account.verifySignature(transaction.toBinary(), transaction.proofs[0])
def expectedV1(self):
return {
"type": 13,
"version": 1,
"sender": '<KEY>',
"senderPublicKey": '<KEY>',
"fee": 500000000,
"timestamp": 1609773456000,
"script": 'base64:' + str(b'aGVsbG8='),
"proofs": ['Z5dX5Upqq8ergHPhi4J2qLTroLKzUUdf3yR36Ns9oiASs6nWKdDHacD4W2WzweQczJaUCogrBZ6xMhMi1vKMXky']
}
def expectedV3(self):
return {
"type": 13,
"version": 3,
"sender": '3MtHYnCkd3oFZr21yb2vEdngcSG<KEY>',
"senderKeyType": "ed25519",
"senderPublicKey": '<KEY>',
"fee": 500000000,
"timestamp": 1609773456000,
"script": 'base64:' + str(b'aGVsbG8='),
"proofs": ['<KEY>']
}
def testToJson(self):
transaction = SetScript(b'aGVsbG8=')
transaction.timestamp = 1609773456000
transaction.signWith(self.account)
if transaction.version == 1:
expected = self.expectedV1()
elif transaction.version == 3:
expected = self.expectedV3()
else:
expected = ''
assert transaction.toJson() == expected
@mock.patch('src.LTO.PublicNode')
def testBroadcast(self, mock_Class):
transaction = SetScript(b'aGVsbG8=')
broadcastedTransaction = SetScript(b'aGVsbG8=')
broadcastedTransaction.id = '7cCeL1qwd9i6u8NgMNsQjBPxVhrME2BbfZMT1DF9p4Yi'
mc = mock_Class.return_value
mc.broadcast.return_value = broadcastedTransaction
assert mc.broadcast(transaction) == broadcastedTransaction
def testFromData(self):
data = {
"type": 13,
"version": 1,
"id": 'BG7MQF8KffVU6MMbJW5xPowVQsohwJhfEJ4wSF8cWdC2',
"sender": '3MtHYnCkd3oFZr21yb2vEdngcSGXvuNNCq2',
"senderKeyType": "ed25519",
"senderPublicKey": '<KEY>',
"fee": 500000000,
"timestamp": 1609773456000,
"script": b'aGVsbG8=',
"proofs": ['<KEY>']
}
transaction = SetScript(data['script']).fromData(data)
for key in data:
assert data[key] == transaction.__getattr__(key)
|
StarcoderdataPython
|
4863745
|
"""
MODIFIED Bluegiga BGAPI/BGLib implementation
============================================
Bluegiga BGLib Python interface library
2013-05-04 by <NAME> <<EMAIL>>
Updates should (hopefully) always be available at
https://github.com/jrowberg/bglib
Thanks to Masaaki Shibata for Python event handler code
http://www.emptypage.jp/notes/pyevent.en.html
============================================
BGLib Python interface library code is placed under the MIT license
Copyright (c) 2013 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
===============================================
"""
from __future__ import print_function
import logging
from struct import unpack
from enum import Enum
log = logging.getLogger(__name__)
ResponsePacketType = Enum('ResponsePacketType', [
'system_reset',
'system_hello',
'system_address_get',
'system_reg_write',
'system_reg_read',
'system_get_counters',
'system_get_connections',
'system_read_memory',
'system_get_info',
'system_endpoint_tx',
'system_whitelist_append',
'system_whitelist_remove',
'system_whitelist_clear',
'system_endpoint_rx',
'system_endpoint_set_watermarks',
'flash_ps_defrag',
'flash_ps_dump',
'flash_ps_erase_all',
'flash_ps_save',
'flash_ps_load',
'flash_ps_erase',
'flash_erase_page',
'flash_write_words',
'attributes_write',
'attributes_read',
'attributes_read_type',
'attributes_user_read_response',
'attributes_user_write_response',
'connection_disconnect',
'connection_get_rssi',
'connection_update',
'connection_version_update',
'connection_channel_map_get',
'connection_channel_map_set',
'connection_features_get',
'connection_get_status',
'connection_raw_tx',
'attclient_find_by_type_value',
'attclient_read_by_group_type',
'attclient_read_by_type',
'attclient_find_information',
'attclient_read_by_handle',
'attclient_attribute_write',
'attclient_write_command',
'attclient_indicate_confirm',
'attclient_read_long',
'attclient_prepare_write',
'attclient_execute_write',
'attclient_read_multiple',
'sm_encrypt_start',
'sm_set_bondable_mode',
'sm_delete_bonding',
'sm_set_parameters',
'sm_passkey_entry',
'sm_get_bonds',
'sm_set_oob_data',
'gap_set_privacy_flags',
'gap_set_mode',
'gap_discover',
'gap_connect_direct',
'gap_end_procedure',
'gap_connect_selective',
'gap_set_filtering',
'gap_set_scan_parameters',
'gap_set_adv_parameters',
'gap_set_adv_data',
'gap_set_directed_connectable_mode',
'hardware_io_port_config_irq',
'hardware_set_soft_timer',
'hardware_adc_read',
'hardware_io_port_config_direction',
'hardware_io_port_config_function',
'hardware_io_port_config_pull',
'hardware_io_port_write',
'hardware_io_port_read',
'hardware_spi_config',
'hardware_spi_transfer',
'hardware_i2c_read',
'hardware_i2c_write',
'hardware_set_txpower',
'hardware_timer_comparator',
'test_phy_tx',
'test_phy_rx',
'test_phy_end',
'test_phy_reset',
'test_get_channel_map',
'test_debug',
])
EventPacketType = Enum('EventPacketType', [
'system_boot',
'system_debug',
'system_endpoint_watermark_rx',
'system_endpoint_watermark_tx',
'system_script_failure',
'system_no_license_key',
'flash_ps_key',
'attributes_value',
'attributes_user_read_request',
'attributes_status',
'connection_status',
'connection_version_ind',
'connection_feature_ind',
'connection_raw_rx',
'connection_disconnected',
'attclient_indicated',
'attclient_procedure_completed',
'attclient_group_found',
'attclient_attribute_found',
'attclient_find_information_found',
'attclient_attribute_value',
'attclient_read_multiple_response',
'sm_smp_data',
'sm_bonding_fail',
'sm_passkey_display',
'sm_passkey_request',
'sm_bond_status',
'gap_scan_response',
'gap_mode_changed',
'hardware_io_port_status',
'hardware_soft_timer',
'hardware_adc_result',
])
# Map a tuple of (class, command) to an enum identifier for the packet
RESPONSE_PACKET_MAPPING = {
(0, 0): ResponsePacketType.system_reset,
(0, 1): ResponsePacketType.system_hello,
(0, 2): ResponsePacketType.system_address_get,
(0, 3): ResponsePacketType.system_reg_write,
(0, 4): ResponsePacketType.system_reg_read,
(0, 5): ResponsePacketType.system_get_counters,
(0, 6): ResponsePacketType.system_get_connections,
(0, 7): ResponsePacketType.system_read_memory,
(0, 8): ResponsePacketType.system_get_info,
(0, 9): ResponsePacketType.system_endpoint_tx,
(0, 10): ResponsePacketType.system_whitelist_append,
(0, 11): ResponsePacketType.system_whitelist_remove,
(0, 12): ResponsePacketType.system_whitelist_clear,
(0, 13): ResponsePacketType.system_endpoint_rx,
(0, 14): ResponsePacketType.system_endpoint_set_watermarks,
(1, 0): ResponsePacketType.flash_ps_defrag,
(1, 1): ResponsePacketType.flash_ps_dump,
(1, 2): ResponsePacketType.flash_ps_erase_all,
(1, 3): ResponsePacketType.flash_ps_save,
(1, 4): ResponsePacketType.flash_ps_load,
(1, 5): ResponsePacketType.flash_ps_erase,
(1, 6): ResponsePacketType.flash_erase_page,
(1, 7): ResponsePacketType.flash_write_words,
(2, 0): ResponsePacketType.attributes_write,
(2, 1): ResponsePacketType.attributes_read,
(2, 2): ResponsePacketType.attributes_read_type,
(2, 3): ResponsePacketType.attributes_user_read_response,
(2, 4): ResponsePacketType.attributes_user_write_response,
(3, 0): ResponsePacketType.connection_disconnect,
(3, 1): ResponsePacketType.connection_get_rssi,
(3, 2): ResponsePacketType.connection_update,
(3, 3): ResponsePacketType.connection_version_update,
(3, 4): ResponsePacketType.connection_channel_map_get,
(3, 5): ResponsePacketType.connection_channel_map_set,
(3, 6): ResponsePacketType.connection_features_get,
(3, 7): ResponsePacketType.connection_get_status,
(3, 8): ResponsePacketType.connection_raw_tx,
(4, 0): ResponsePacketType.attclient_find_by_type_value,
(4, 1): ResponsePacketType.attclient_read_by_group_type,
(4, 2): ResponsePacketType.attclient_read_by_type,
(4, 3): ResponsePacketType.attclient_find_information,
(4, 4): ResponsePacketType.attclient_read_by_handle,
(4, 5): ResponsePacketType.attclient_attribute_write,
(4, 6): ResponsePacketType.attclient_write_command,
(4, 7): ResponsePacketType.attclient_indicate_confirm,
(4, 8): ResponsePacketType.attclient_read_long,
(4, 9): ResponsePacketType.attclient_prepare_write,
(4, 10): ResponsePacketType.attclient_execute_write,
(4, 10): ResponsePacketType.attclient_execute_write,
(5, 0): ResponsePacketType.sm_encrypt_start,
(5, 1): ResponsePacketType.sm_set_bondable_mode,
(5, 2): ResponsePacketType.sm_delete_bonding,
(5, 3): ResponsePacketType.sm_set_parameters,
(5, 4): ResponsePacketType.sm_passkey_entry,
(5, 5): ResponsePacketType.sm_get_bonds,
(5, 6): ResponsePacketType.sm_set_oob_data,
(6, 0): ResponsePacketType.gap_set_privacy_flags,
(6, 1): ResponsePacketType.gap_set_mode,
(6, 2): ResponsePacketType.gap_discover,
(6, 3): ResponsePacketType.gap_connect_direct,
(6, 4): ResponsePacketType.gap_end_procedure,
(6, 5): ResponsePacketType.gap_connect_selective,
(6, 6): ResponsePacketType.gap_set_filtering,
(6, 7): ResponsePacketType.gap_set_scan_parameters,
(6, 8): ResponsePacketType.gap_set_adv_parameters,
(6, 9): ResponsePacketType.gap_set_adv_data,
(6, 10): ResponsePacketType.gap_set_directed_connectable_mode,
(7, 0): ResponsePacketType.hardware_io_port_config_irq,
(7, 1): ResponsePacketType.hardware_set_soft_timer,
(7, 2): ResponsePacketType.hardware_adc_read,
(7, 3): ResponsePacketType.hardware_io_port_config_direction,
(7, 4): ResponsePacketType.hardware_io_port_config_function,
(7, 5): ResponsePacketType.hardware_io_port_config_pull,
(7, 6): ResponsePacketType.hardware_io_port_write,
(7, 7): ResponsePacketType.hardware_io_port_read,
(7, 8): ResponsePacketType.hardware_spi_config,
(7, 9): ResponsePacketType.hardware_spi_transfer,
(7, 10): ResponsePacketType.hardware_i2c_read,
(7, 11): ResponsePacketType.hardware_i2c_write,
(7, 12): ResponsePacketType.hardware_set_txpower,
(7, 13): ResponsePacketType.hardware_timer_comparator,
(8, 0): ResponsePacketType.test_phy_tx,
(8, 1): ResponsePacketType.test_phy_rx,
(8, 2): ResponsePacketType.test_phy_reset,
(8, 3): ResponsePacketType.test_get_channel_map,
(8, 4): ResponsePacketType.test_debug,
}
# TODO instead of this, have a different enum for each message type + class, and
# then just index into it
EVENT_PACKET_MAPPING = {
(0, 0): EventPacketType.system_boot,
(0, 1): EventPacketType.system_debug,
(0, 2): EventPacketType.system_endpoint_watermark_rx,
(0, 3): EventPacketType.system_endpoint_watermark_tx,
(0, 4): EventPacketType.system_script_failure,
(0, 5): EventPacketType.system_no_license_key,
(1, 0): EventPacketType.flash_ps_key,
(2, 0): EventPacketType.attributes_value,
(2, 1): EventPacketType.attributes_user_read_request,
(2, 2): EventPacketType.attributes_status,
(3, 0): EventPacketType.connection_status,
(3, 1): EventPacketType.connection_version_ind,
(3, 2): EventPacketType.connection_feature_ind,
(3, 3): EventPacketType.connection_raw_rx,
(3, 4): EventPacketType.connection_disconnected,
(4, 0): EventPacketType.attclient_indicated,
(4, 1): EventPacketType.attclient_procedure_completed,
(4, 2): EventPacketType.attclient_group_found,
(4, 3): EventPacketType.attclient_attribute_found,
(4, 4): EventPacketType.attclient_find_information_found,
(4, 5): EventPacketType.attclient_attribute_value,
(4, 6): EventPacketType.attclient_read_multiple_response,
(5, 0): EventPacketType.sm_smp_data,
(5, 1): EventPacketType.sm_bonding_fail,
(5, 2): EventPacketType.sm_passkey_display,
(5, 3): EventPacketType.sm_passkey_request,
(5, 4): EventPacketType.sm_bond_status,
(6, 0): EventPacketType.gap_scan_response,
(6, 1): EventPacketType.gap_mode_changed,
(7, 0): EventPacketType.hardware_io_port_status,
(7, 1): EventPacketType.hardware_soft_timer,
(7, 2): EventPacketType.hardware_adc_result,
}
class BGLib(object):
"""
Modified version of jrowberg's BGLib implementation.
"""
def __init__(self):
self.buffer = []
self.expected_length = 0
# Packet message types
self._ble_event = 0x80
self._ble_response = 0x00
self._wifi_event = 0x88
self._wifi_response = 0x08
def send_command(self, ser, packet):
"""
Send a packet to the BLED12 over serial.
ser -- The serial.Serial object to write to.
packet -- The packet to write.
"""
ser.write(packet)
def parse_byte(self, byte):
"""
Re-build packets read in from bytes over serial one byte at a time.
byte -- the next byte to add to the packet.
Returns a list of the bytes in the packet once a full packet is read.
Returns None otherwise.
"""
if (len(self.buffer) == 0 and
(byte == self._ble_event or byte == self._ble_response or
byte == self._wifi_event or byte == self._wifi_response)):
self.buffer.append(byte)
elif len(self.buffer) == 1:
self.buffer.append(byte)
self.expected_length = 4 +\
(self.buffer[0] & 0x07) + self.buffer[1]
elif len(self.buffer) > 1:
self.buffer.append(byte)
if (self.expected_length > 0 and
len(self.buffer) == self.expected_length):
packet = self.buffer
self.buffer = []
return packet
return None
def _decode_response_packet(self, packet_class, packet_command, payload,
payload_length):
packet_type = RESPONSE_PACKET_MAPPING.get(
(packet_class, packet_command))
if packet_type is None:
# TODO unrecognized packet, log something?
return
response = {}
if packet_type == ResponsePacketType.system_address_get:
address = unpack('<6s', payload[:6])[0]
address = [ord(b) for b in address]
response = {
'address': address
}
elif packet_type == ResponsePacketType.system_reg_read:
address, value =\
unpack('<HB', payload[:3])
response = {
'address': address, 'value': value
}
elif packet_type == ResponsePacketType.system_get_counters:
txok, txretry, rxok, rxfail, mbuf =\
unpack('<BBBBB', payload[:5])
response = {
'txok': txok, 'txretry': txretry, 'rxok': rxok,
'rxfail': rxfail, 'mbuf': mbuf
}
elif packet_type == ResponsePacketType.system_get_connections:
maxconn = unpack('<B', payload[:1])[0]
response = {
'maxconn': maxconn
}
elif packet_type == ResponsePacketType.system_read_memory:
address, data_len =\
unpack('<IB', payload[:5])
data_data = [ord(b) for b in payload[5:]]
response = {
'address': address, 'data': data_data
}
elif packet_type == ResponsePacketType.system_get_info:
data = unpack('<HHHHHBB', payload[:12])
response = {
'major': data[0], 'minor': data[1],
'patch': data[2], 'build': data[3],
'll_version': data[4], 'protocol_version': data[5],
'hw': data[6]
}
elif packet_type in [
ResponsePacketType.system_endpoint_tx,
ResponsePacketType.system_whitelist_append,
ResponsePacketType.system_whitelist_remove,
ResponsePacketType.system_endpoint_set_watermarks,
ResponsePacketType.flash_ps_save,
ResponsePacketType.flash_erase_page,
ResponsePacketType.attributes_write,
ResponsePacketType.system_reg_write,
ResponsePacketType.attclient_indicate_confirm,
ResponsePacketType.sm_delete_bonding,
ResponsePacketType.sm_passkey_entry,
ResponsePacketType.gap_set_mode,
ResponsePacketType.gap_discover,
ResponsePacketType.gap_end_procedure,
ResponsePacketType.gap_set_filtering,
ResponsePacketType.hardware_timer_comparator,
ResponsePacketType.test_phy_end,
ResponsePacketType.hardware_spi_config,
ResponsePacketType.gap_set_scan_parameters,
ResponsePacketType.gap_set_adv_parameters,
ResponsePacketType.gap_set_adv_data,
ResponsePacketType.gap_set_directed_connectable_mode,
ResponsePacketType.hardware_io_port_config_irq,
ResponsePacketType.hardware_set_soft_timer,
ResponsePacketType.hardware_adc_read,
ResponsePacketType.hardware_io_port_config_direction,
ResponsePacketType.hardware_io_port_config_function,
ResponsePacketType.hardware_io_port_config_pull,
ResponsePacketType.hardware_io_port_write]:
result = unpack('<H', payload[:2])[0]
response = {
'result': result
}
elif packet_type == ResponsePacketType.system_endpoint_rx:
result, data_len =\
unpack('<HB', payload[:3])
data_data = [ord(b) for b in payload[3:]]
response = {
'result': result, 'data': data_data
}
elif packet_type == ResponsePacketType.flash_ps_load:
result, value_len = unpack('<HB',
payload[:3])
value_data = [ord(b) for b in payload[3:]]
response = {
'result': result, 'value': value_data
}
elif packet_type == ResponsePacketType.attributes_read:
handle, offset, result, value_len = unpack(
'<HHHB', payload[:7]
)
value_data = [ord(b) for b in payload[7:]]
response = {
'handle': handle, 'offset': offset,
'result': result, 'value': value_data
}
elif packet_type == ResponsePacketType.attributes_read_type:
handle, result, value_len = unpack(
'<HHB', payload[:5]
)
value_data = [ord(b) for b in payload[5:]]
response = {
'handle': handle, 'result': result,
'value': value_data
}
elif packet_type in [
ResponsePacketType.connection_disconnect,
ResponsePacketType.connection_update,
ResponsePacketType.connection_version_update,
ResponsePacketType.connection_channel_map_set,
ResponsePacketType.connection_features_get,
ResponsePacketType.attclient_find_by_type_value,
ResponsePacketType.attclient_read_by_group_type,
ResponsePacketType.attclient_read_by_type,
ResponsePacketType.attclient_find_information,
ResponsePacketType.attclient_read_by_handle,
ResponsePacketType.attclient_attribute_write,
ResponsePacketType.attclient_write_command,
ResponsePacketType.attclient_read_long,
ResponsePacketType.attclient_prepare_write,
ResponsePacketType.attclient_execute_write,
ResponsePacketType.attclient_read_multiple,
]:
connection, result = unpack(
'<BH', payload[:3]
)
response = {
'connection_handle': connection, 'result': result
}
elif packet_type == ResponsePacketType.connection_get_rssi:
connection, rssi = unpack(
'<Bb', payload[:2]
)
response = {
'connection_handle': connection, 'rssi': rssi
}
elif packet_type == ResponsePacketType.connection_channel_map_get:
connection, map_len = unpack(
'<BB', payload[:2]
)
map_data = [ord(b) for b in payload[2:]]
response = {
'connection_handle': connection, 'map': map_data
}
elif packet_type == ResponsePacketType.connection_get_status:
connection = unpack('<B', payload[:1])[0]
response = {
'connection_handle': connection
}
elif packet_type == ResponsePacketType.connection_raw_tx:
connection = unpack('<B', payload[:1])[0]
response = {
'connection_handle': connection
}
elif packet_type == ResponsePacketType.sm_encrypt_start:
handle, result = unpack(
'<BH', payload[:3]
)
response = {
'handle': handle, 'result': result
}
elif packet_type == ResponsePacketType.sm_get_bonds:
bonds = unpack('<B', payload[:1])[0]
response = {
'bonds': bonds
}
elif packet_type == ResponsePacketType.gap_connect_direct:
result, connection_handle = unpack(
'<HB', payload[:3]
)
response = {
'result': result,
'connection_handle': connection_handle
}
elif packet_type == ResponsePacketType.gap_connect_selective:
result, connection_handle = unpack(
'<HB', payload[:3]
)
response = {
'result': result,
'connection_handle': connection_handle
}
elif packet_type == ResponsePacketType.hardware_io_port_read:
result, port, data = unpack(
'<HBB', payload[:4]
)
response = {
'result': result, 'port': port, 'data': data
}
elif packet_type == ResponsePacketType.hardware_spi_transfer:
result, channel, data_len = unpack(
'<HBB', payload[:4]
)
data_data = [ord(b) for b in payload[4:]]
response = {
'result': result, 'channel': channel,
'data': data_data
}
elif packet_type == ResponsePacketType.hardware_i2c_read:
result, data_len = unpack(
'<HB', payload[:3]
)
data_data = [ord(b) for b in payload[3:]]
response = {
'result': result, 'data': data_data
}
elif packet_type == ResponsePacketType.hardware_i2c_write:
written = unpack('<B', payload[:1])[0]
response = {
'written': written
}
elif packet_type == ResponsePacketType.test_get_channel_map:
# channel_map_len = unpack(
# '<B', payload[:1]
# )[0]
channel_map_data =\
[ord(b) for b in payload[1:]]
response = {
'channel_map': channel_map_data
}
elif packet_type == ResponsePacketType.test_debug:
# output_len = unpack('<B',
# payload[:1])[0]
output_data =\
[ord(b) for b in payload[1:]]
response = {
'output': output_data
}
return packet_type, response
def _decode_event_packet(self, packet_class, packet_command, payload,
payload_length):
packet_type = EVENT_PACKET_MAPPING.get((packet_class, packet_command))
if packet_type is None:
# TODO unrecognized packet, log something?
return
response = {}
if packet_type == EventPacketType.system_boot:
data = unpack('<HHHHHBB', payload[:12])
response = {
'major': data[0], 'minor': data[1],
'patch': data[2], 'build': data[3],
'll_version': data[4], 'protocol_version': data[5],
'hw': data[6]
}
elif packet_type == EventPacketType.system_debug:
data_len = unpack('<B', payload[:1])[0]
data_data = [ord(b) for b in payload[1:]]
response = {
'data': data_data
}
elif packet_type in [EventPacketType.system_endpoint_watermark_rx,
EventPacketType.system_endpoint_watermark_tx
]:
endpoint, data = unpack(
'<BB', payload[:2]
)
response = {
'endpoint': endpoint, 'data': data
}
elif packet_type == EventPacketType.system_script_failure:
address, reason = unpack(
'<HH', payload[:4]
)
response = {
'address': address, 'reason': reason
}
elif packet_type == EventPacketType.flash_ps_key:
key, value_len = unpack(
'<HB', payload[:3]
)
value_data = [ord(b) for b in payload[3:]]
response = {
'key': key, 'value': value_data
}
elif packet_type == EventPacketType.attributes_value:
connection, reason, handle, offset, value_len = unpack(
'<BBHHB', payload[:7]
)
value_data = [ord(b) for b in payload[7:]]
response = {
'connection_handle': connection, 'reason': reason,
'handle': handle, 'offset': offset,
'value': value_data
}
elif packet_type == EventPacketType.attributes_user_read_request:
connection, handle, offset, maxsize = unpack(
'<BHHB', payload[:6]
)
response = {
'connection_handle': connection, 'handle': handle,
'offset': offset, 'maxsize': maxsize
}
elif packet_type == EventPacketType.attributes_status:
handle, flags = unpack('<HB', payload[:3])
response = {
'handle': handle, 'flags': flags
}
elif packet_type == EventPacketType.connection_status:
data = unpack('<BB6sBHHHB', payload[:16])
address = [ord(b) for b in data[2]]
response = {
'connection_handle': data[0], 'flags': data[1],
'address': address, 'address_type': data[3],
'conn_interval': data[4], 'timeout': data[5],
'latency': data[6], 'bonding': data[7]
}
elif packet_type == EventPacketType.connection_version_ind:
connection, vers_nr, comp_id, sub_vers_nr = unpack(
'<BBHH', payload[:6]
)
response = {
'connection_handle': connection, 'vers_nr': vers_nr,
'comp_id': comp_id, 'sub_vers_nr': sub_vers_nr
}
elif packet_type == EventPacketType.connection_feature_ind:
connection, features_len = unpack(
'<BB', payload[:2]
)
features_data =\
[ord(b) for b in payload[2:]]
response = {
'connection_handle': connection, 'features': features_data
}
elif packet_type == EventPacketType.connection_raw_rx:
connection, data_len = unpack(
'<BB', payload[:2]
)
data_data = [ord(b) for b in payload[2:]]
response = {
'connection_handle': connection, 'data': data_data
}
elif packet_type == EventPacketType.connection_disconnected:
connection, reason = unpack(
'<BH', payload[:3]
)
response = {
'connection_handle': connection, 'reason': reason
}
elif packet_type == EventPacketType.attclient_indicated:
connection, attrhandle = unpack(
'<BH', payload[:3]
)
response = {
'connection_handle': connection, 'attrhandle': attrhandle
}
elif packet_type == EventPacketType.attclient_procedure_completed:
connection, result, chrhandle = unpack(
'<BHH', payload[:5]
)
response = {
'connection_handle': connection, 'result': result,
'chrhandle': chrhandle
}
elif packet_type == EventPacketType.attclient_group_found:
connection, start, end, uuid_len = unpack(
'<BHHB', payload[:6]
)
uuid_data = [ord(b) for b in payload[6:]]
response = {
'connection_handle': connection, 'start': start,
'end': end, 'uuid': uuid_data
}
elif packet_type == EventPacketType.attclient_attribute_found:
data = unpack('<BHHBB', payload[:7])
uuid_data = [ord(b) for b in payload[7:]]
response = {
'connection_handle': data[0], 'chrdecl': data[1],
'value': data[2], 'properties': data[3],
'uuid': uuid_data
}
elif packet_type == EventPacketType.attclient_find_information_found:
connection, chrhandle, uuid_len = unpack(
'<BHB', payload[:4]
)
uuid_data = [ord(b) for b in payload[4:]]
response = {
'connection_handle': connection, 'chrhandle': chrhandle,
'uuid': uuid_data
}
elif packet_type == EventPacketType.attclient_attribute_value:
connection, atthandle, type, value_len = unpack(
'<BHBB', payload[:5]
)
value_data = [ord(b) for b in payload[5:]]
response = {
'connection_handle': connection, 'atthandle': atthandle,
'type': type, 'value': value_data
}
elif packet_type == EventPacketType.attclient_read_multiple_response:
connection, handles_len = unpack(
'<BB', payload[:2]
)
handles_data =\
[ord(b) for b in payload[2:]]
response = {
'connection_handle': connection, 'handles': handles_data
}
elif packet_type == EventPacketType.sm_smp_data:
handle, packet, data_len = unpack(
'<BBB', payload[:3]
)
data_data = [ord(b) for b in payload[3:]]
response = {
'handle': handle, 'packet': packet,
'data': data_data
}
elif packet_type == EventPacketType.sm_bonding_fail:
handle, result = unpack(
'<BH', payload[:3]
)
response = {
'handle': handle, 'result': result
}
elif packet_type == EventPacketType.sm_passkey_display:
handle, passkey = unpack(
'<BI', payload[:5]
)
response = {
'handle': handle, 'passkey': passkey
}
elif packet_type == EventPacketType.sm_passkey_request:
handle = unpack('<B', payload[:1])[0]
response = {
'handle': handle
}
elif packet_type == EventPacketType.sm_bond_status:
bond, keysize, mitm, keys = unpack(
'<BBBB', payload[:4]
)
response = {
'bond': bond, 'keysize': keysize, 'mitm': mitm,
'keys': keys
}
elif packet_type == EventPacketType.gap_scan_response:
data = unpack('<bB6sBBB', payload[:11])
sender = [ord(b) for b in data[2]]
data_data = [ord(b) for b in payload[11:]]
response = {
'rssi': data[0], 'packet_type': data[1],
'sender': sender, 'address_type': data[3],
'bond': data[4], 'data': data_data
}
elif packet_type == EventPacketType.gap_mode_changed:
discover, connect = unpack(
'<BB', payload[:2]
)
response = {
'discover': discover, 'connect': connect
}
elif packet_type == EventPacketType.hardware_io_port_status:
timestamp, port, irq, state = unpack(
'<IBBB', payload[:7]
)
response = {
'timestamp': timestamp, 'port': port, 'irq': irq,
'state': state
}
elif packet_type == EventPacketType.hardware_io_soft_timer:
handle = unpack('<B', payload[:1])[0]
response = {
'handle': handle
}
elif packet_type == EventPacketType.hardware_adc_result:
input, value = unpack('<Bh', payload[:3])
response = {
'input': input, 'value': value
}
return packet_type, response
def decode_packet(self, packet):
"""
Decode the packet and call the appropriate handler for the packet type.
packet -- a list of bytes in the packet to decode.
Returns a tuple of (PacketType, dict response data)
BGAPI packet structure (as of 2012-11-07):
Byte 0:
[7] - 1 bit, Message Type (MT) Command/Response, 1 = Event
[6:3] - 4 bits, Technology Type (TT) 0000 = BLE, 0001 = Wi-Fi
[2:0] - 3 bits, Length High (LH) Payload length (high bits)
Byte 1: 8 bits, Length Low (LL) Payload length (low bits)
Byte 2: 8 bits, Class ID (CID) Command class ID
Byte 3: 8 bits, Command ID (CMD) Command ID
Bytes 4-n: 0 - 2048 Bytes, Payload (PL) Up to 2048 bytes of payload
"""
packet_id, payload_length, packet_class, packet_command = packet[:4]
# TODO we are not parsing out the high bits of the payload length from
# the first byte
payload = b''.join(chr(i) for i in packet[4:])
message_type = packet_id & 0x88
if message_type == 0:
return self._decode_response_packet(
packet_class, packet_command, payload, payload_length)
elif message_type == 0x80:
return self._decode_event_packet(
packet_class, packet_command, payload, payload_length)
|
StarcoderdataPython
|
356491
|
from clickhouse_orm import migrations
from ..test_migrations import *
operations = [migrations.AlterTable(Model4_compressed), migrations.AlterTable(Model2LowCardinality)]
|
StarcoderdataPython
|
3392759
|
import argparse
import requests
import json
import urllib3
from bs4 import BeautifulSoup
from pathlib import Path
from pathvalidate import sanitize_filename
from tqdm import tqdm
from urllib.parse import urljoin
from exceptions import raise_if_redirect
from parse_tululu_category import parse_category
def get_book_info(book_url):
response = requests.get(book_url, allow_redirects=False, verify=False)
response.raise_for_status()
raise_if_redirect(response)
return BeautifulSoup(response.text, 'lxml')
def download_txt(book_url, filename, folder):
download_id = book_url[book_url.find('/b')+2:-1]
response = requests.get('https://tululu.org/txt.php', params={
"id": download_id, }, verify=False)
response.raise_for_status()
raise_if_redirect(response)
path_to_save_txt = Path('books').joinpath(f'{filename}.txt')
path_to_save = Path(folder).joinpath(path_to_save_txt)
with open(path_to_save, 'wb') as file:
file.write(response.content)
return str(path_to_save_txt)
def download_img(book_data, book_url, folder):
image_url = parse_img(book_data, book_url)
response = requests.get(image_url, allow_redirects=False, verify=False)
response.raise_for_status()
raise_if_redirect(response)
filename = sanitize_filename(image_url.split('/')[-1])
path_to_save_img = Path('images').joinpath(f'{filename}')
path_to_save = Path(folder).joinpath(path_to_save_img)
with open(path_to_save, 'wb') as image:
image.write(response.content)
return str(path_to_save_img)
def download_book(start_page, end_page, book_data, title, author, download_images, download_texts):
book_info = {
'title': title,
'author': author,
'img_path': download_images,
'book_path': download_texts,
'comments': parse_comments(book_data),
'genre': parse_genres(book_data)
}
return book_info
def create_json(filename, obj):
with open(filename, 'w', encoding='utf-8') as file:
json.dump(obj, file, ensure_ascii=False)
def parse_genres(book_data):
soup = book_data.select('span.d_book a')
genres = [genres.text for genres in soup]
return genres
def parse_comments(book_data):
title_tag = book_data.select("div.texts span.black")
comments = [comment.text for comment in title_tag]
return comments
def parse_img(book_data, book_url):
img_src = book_data.select_one('div.bookimage img')['src']
return urljoin(book_url, img_src)
def parse_title_and_author(book_data):
header = book_data.select_one("#content")
title_tag = header.h1
author, title = title_tag.text.split(' \xa0 :: \xa0 ')
return sanitize_filename(author), sanitize_filename(title)
def create_argparser():
parser = argparse.ArgumentParser(
description='Download book from tululu.org')
parser.add_argument(
'-sp', '--start_page', help='Which page to start downloading from', default=1, type=int)
parser.add_argument('-ep', '--end_page',
help='To which page to start downloading', type=int)
parser.add_argument('-df', '--dest_folder', default=Path.cwd(),
help='The path to the directory with the parsing results', type=str)
parser.add_argument('-si', '--skip_imgs',
help='Don\'t download pictures', action="store_true")
parser.add_argument('-st', '--skip_txts',
help='Don\'t download books', action="store_true")
parser.add_argument(
'-jn', '--json_name', default='books.json', help='Specify your *.json filename', type=str)
args = parser.parse_args()
if not (args.start_page < args.end_page and
args.start_page > 0 and
args.end_page > 0):
print('Incorrect start_page or end_page arguments')
return parser
def main():
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
parser = create_argparser()
args = parser.parse_args()
Path(args.dest_folder, 'images').mkdir(parents=True, exist_ok=True)
Path(args.dest_folder, 'books').mkdir(parents=True, exist_ok=True)
all_books = []
book_urls = parse_category(args.start_page, args.end_page)
for book_url in tqdm(book_urls, desc='Progress:'):
try:
book_data = get_book_info(book_url)
title, author = parse_title_and_author(book_data)
download_texts = None
if not args.skip_txts:
download_texts = download_txt(
book_url, title, args.dest_folder)
download_images = None
if not args.skip_imgs:
download_images = download_img(
book_data, book_url, args.dest_folder)
book_info = download_book(
args.start_page, args.end_page, book_data, title, author, download_images, download_texts)
all_books.append(book_info)
except requests.exceptions.ConnectionError:
print('ConnectionError')
filename = Path(args.dest_folder).joinpath(args.json_name)
create_json(filename, all_books)
print('DONE!')
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
3490212
|
<reponame>PsiLupan/calcprogress
from os.path import basename
from enum import IntEnum
from re import search
from dataclasses import dataclass
from cw_map import Map
SECTION_REGEX = r"^\s*.section\s+(?P<Name>.[a-zA-Z0-9_$]+)"
class AsmSectionType(IntEnum):
CODE = 0
DATA = 1
def get_section_type(name: str) -> int:
code = [
".init", ".text"
]
data = [
"extab_", "extab", "._extab", "._exidx", "extabindex_", "extabindex", ".ctors", ".dtors", "._ctors",
"._dtors", ".file", ".rodata", ".data", ".bss", ".sdata", ".sbss", ".sdata2", ".sbss2"
]
if name in code:
return AsmSectionType.CODE
elif name in data:
return AsmSectionType.DATA
# As a failsafe, if the section is actually unknown,
# it is probably some unique data (like OGWS' ".file" section)
print(f"Unidentifiable section! ({name})")
print("Assuming this is a DATA section.")
return AsmSectionType.DATA
def get_obj_name(path: str) -> str:
# Get base file name
file_name = basename(path)
# Extract file extension/name
dot_idx = file_name.rfind(".")
file_ext = file_name[dot_idx:]
file_name = file_name[:dot_idx]
# Create object file name
return f"{file_name}.o"
@dataclass
class AsmSection:
start: int
size: int
type: int
@dataclass
class AsmSectionList:
sections: list[AsmSection]
def __init__(self, sources: list[str], dol_map: Map):
self.sections = []
for file in sources:
self.parse_file(file, dol_map)
def parse_file(self, path: str, dol_map: Map):
# Read asm
with open(path, "r") as f:
asm = f.readlines()
# Find sections in asm file by looking for .section directives
for i in range(len(asm)):
sect_match = search(SECTION_REGEX, asm[i])
if sect_match != None:
# Section name
sect_name = sect_match.group("Name")
# Header symbols in current object file
my_file_headers = dol_map.headers[get_obj_name(path)]
# Header symbol for current section
my_header = my_file_headers[sect_name]
# Create summable section object
section = AsmSection(my_header.virt_ofs, my_header.size, get_section_type(sect_name))
assert section.start > 0 and section.size >= 0
self.sections.append(section)
|
StarcoderdataPython
|
3220166
|
<filename>tools/XstreamDL_CLI/version.py<gh_stars>10-100
script_name = 'XstreamDL-CLI'
__version__ = '1.4.1'
|
StarcoderdataPython
|
3261925
|
from lib.input import read_lines
input = read_lines(5)
replace = {
'F': 0,
'B': 1,
'L': 0,
'R': 1
}
def seat_id(line):
for char, to in replace.items():
line = line.replace(char, str(to))
return int(line, 2)
def seat_ids():
return [ seat_id(line) for line in input ]
def highest_seat_id():
return max(seat_ids())
def my_seat_id():
seats = set(seat_ids())
other = [
seat
for seat in seats
if seat - 1 not in seats and seat - 2 in seats
or seat + 1 not in seats and seat + 2 in seats
]
assert len(other) == 2, 'the condition is ambiguous'
return max(other) - 1
def my_seat_id_2():
seats = sorted(seat_ids())
for i in range(len(seats)):
if i == len(seats) - 1:
break
p1, n1 = seats[i:i + 2]
if n1 == p1 + 2:
return p1 + 1
solve_1 = lambda: highest_seat_id()
solve_2 = lambda: my_seat_id_2()
|
StarcoderdataPython
|
341894
|
"""dloud_ads - Abstract Data Structures commonly used in CS scenarios.
Implemented by Data Loud Labs!"""
__version__ = '0.0.2'
__author__ = '<NAME> <<EMAIL>>'
__all__ = []
|
StarcoderdataPython
|
3293586
|
from tir import Webapp
import unittest
from tir.technologies.apw_internal import ApwInternal
import datetime
import time
DateSystem = datetime.datetime.today().strftime('%d/%m/%Y')
DateVal = datetime.datetime(2120, 5, 17)
"""-------------------------------------------------------------------
/*/{Protheus.doc} PLSA298TestCase
TIR - Casos de testes da rotina Nota Fiscal de Entrada X Guia
@author <NAME>
@since 11/2020
@version 12
-------------------------------------------------------------------"""
class PLSA298(unittest.TestCase):
@classmethod
def setUpClass(inst):
inst.oHelper = Webapp()
inst.oHelper.Setup("SIGAPLS",DateSystem,"T1","M SP 01","33")
inst.oHelper.Program("PLSA298")
def test_PLSA298_001(self):
# EXCLUIR
self.oHelper.SearchBrowse(f'{"M SP 0001000100000240000000022002"}', key=2, index=True)
self.oHelper.SetButton("Outras Ações", sub_item='Excluir')
self.oHelper.SetButton("Confirmar")
# INCLUIR
self.oHelper.SetButton("Incluir")
self.oHelper.SetBranch("M SP 01 ")
self.oHelper.SetFocus('Fornecedor')
self.oHelper.SetKey("F3")
self.oHelper.SearchBrowse(f'{"000001"}', key=1, index=True)
self.oHelper.SetButton("Ok")
self.oHelper.SetValue("B19_LOJA","01")
self.oHelper.SetFocus('Doc')
time.sleep(5)
self.oHelper.SetKey("F3")
self.oHelper.SearchBrowse(f'{"111201 123"}', key=1, index=True)
self.oHelper.SetButton("Ok")
#self.oHelper.SetValue("B19_FORNEC","000001")
#self.oHelper.SetValue("B19_LOJA","01")
#self.oHelper.SetValue("B19_DOC","111201")
#self.oHelper.SetValue("B19_SERIE","123")
# Grid
self.oHelper.ClickGridCell("Guia",row=1, grid_number=1)
self.oHelper.SetKey("Enter", grid=True, grid_number=1)
self.oHelper.SetValue("B19_GUIA","0001000100000240000000022002", check_value = False)
#self.oHelper.SetKey("F3", grid=True, grid_number=1)
#self.oHelper.SetValue(field = "cChave", value = "ID 33", name_attr = True)
#self.oHelper.SetKey("ENTER")
self.oHelper.SetButton("Salvar")
self.oHelper.SetButton("Cancelar")
# ALTERAR
self.oHelper.SearchBrowse(f'{"M SP 0001000100000240000000022002"}', key=2, index=True)
self.oHelper.SetButton("Alterar")
self.oHelper.ClickGridCell("Guia",row=1, grid_number=1)
self.oHelper.SetKey("Enter", grid=True, grid_number=1)
self.oHelper.SetValue("B19_GUIA","0001000100000240000000022003", check_value = False)
self.oHelper.SetButton("Salvar")
# PESQUISA
self.oHelper.SearchBrowse(f'{"M SP 0001000100000240000000022003"}', key=2, index=True)
self.oHelper.SetButton("Outras Ações", sub_item='Pesquisar')
#self.oHelper.SetButton("Parâmetros")
#self.oHelper.SetValue("Filial","M SP ") #B19_FILIAL
#self.oHelper.SetValue("Guia","0001000100000240000000022003", check_value = False) #B19_GUIA
self.oHelper.SetButton("Ok")
# VISUALIZAR
self.oHelper.SearchBrowse(f'{"M SP 0001000100000240000000022003"}', key=2, index=True)
self.oHelper.SetButton("Visualizar")
self.oHelper.CheckResult("B19_FORNEC","000001")
self.oHelper.CheckResult("B19_DOC","111201")
self.oHelper.SetButton("Confirmar")
# EXCLUIR
self.oHelper.SearchBrowse(f'{"M SP 0001000100000240000000022003"}', key=2, index=True)
self.oHelper.SetButton("Outras Ações", sub_item='Excluir')
self.oHelper.SetButton("Confirmar")
self.oHelper.SetButton('x')
self.oHelper.AssertTrue()
@classmethod
def tearDownClass(inst):
inst.oHelper.TearDown()
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
9683965
|
class Timer:
_timers = {}
@classmethod
def run_deferred(cls,elapsed):
timers = cls._timers.copy()
for timer in timers.keys():
if timer.interval < elapsed:
timer.run()
timer.clear()
def __init__(self,meth,interval):
self._timers[self] = (interval,meth)
self.interval = interval
self.meth = meth
def run(self):
self.meth()
def clear(self):
try:
del self._timers[self]
except:
pass
def set_interval(meth,msec):
meth()
return None
def clear_interval(timer):
return
|
StarcoderdataPython
|
9756367
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils.functional import cached_property
from django.views.generic import TemplateView
from .models import Article
from django_attachments.forms import ImageUploadForm
from django_attachments.models import Library
from django_attachments.views import AttachmentEditableMixin
class IndexView(TemplateView):
template_name = 'index.html'
class GetOrCreateArticleMixin(object):
article_title = 'Test'
@cached_property
def article(self):
article = Article.objects.filter(title=self.article_title).first()
if article is None:
attachments = Library()
attachments.save()
gallery = Library()
gallery.save()
article = Article(title=self.article_title, attachments=attachments, gallery=gallery)
article.save()
return article
class LiveUploadAttachments(AttachmentEditableMixin, GetOrCreateArticleMixin, TemplateView):
template_name = 'live_upload_attachments.html'
def get_library(self):
return self.article.attachments
class OnSaveUploadAttachments(AttachmentEditableMixin, GetOrCreateArticleMixin, TemplateView):
article_title = 'Test 2'
template_name = 'on_save_upload_attachments.html'
def get_library(self):
return self.article.attachments
class GalleryUpload(AttachmentEditableMixin, GetOrCreateArticleMixin, TemplateView):
template_name = 'gallery_upload.html'
upload_form_class = ImageUploadForm
def get_library(self):
return self.article.gallery
|
StarcoderdataPython
|
3443200
|
from reliability.Probability_plotting import Weibull_probability_plot
from reliability.Distributions import Weibull_Distribution
import matplotlib.pyplot as plt
import numpy as np
dist_1 = Weibull_Distribution(alpha=200, beta=3)
dist_2 = Weibull_Distribution(alpha=900, beta=4)
plt.subplot(121) # this is for the PDFs of the 2 individual distributions
dist_1.PDF(label=dist_1.param_title_long)
dist_2.PDF(label=dist_2.param_title_long)
plt.legend()
plt.title(
'PDF of two different distributions\nthat are contributing the failure data')
plt.subplot(122) # this will be the probability plot
dist_1_data = dist_1.random_samples(50, seed=1)
dist_2_data = dist_2.random_samples(50, seed=1)
# combine the failure data into one array
all_data = np.hstack([dist_1_data, dist_2_data])
# plot each individual distribution for comparison
dist_1.CDF(label=dist_1.param_title_long)
dist_2.CDF(label=dist_2.param_title_long)
Weibull_probability_plot(failures=all_data) # do the probability plot
# adjust the figuresize after creation. Necessary to do it after as it it automatically ajdusted within probability_plot
plt.gcf().set_size_inches(13, 7)
plt.subplots_adjust(left=0.08, right=0.96) # formatting the layout
plt.legend()
plt.show()
|
StarcoderdataPython
|
4817109
|
import sys
import subprocess
if len(sys.argv)<5:
print("usage: generaposter.py <title> <name> <date> <address>")
sys.exit(1)
title = sys.argv[1]
name = sys.argv[2]
date = sys.argv[3]
address = sys.argv[4]
f = open('BFSposter.svg')
filecontents = f.read()
filecontents=filecontents.replace('TITLE', title)
filecontents=filecontents.replace('NAME', name)
filecontents=filecontents.replace('TALKDATE', date)
filecontents=filecontents.replace('ADDRESS', address)
outputpath = 'generated-poster.svg'
towrite=open(outputpath, 'w')
towrite.write(filecontents)
towrite.close()
subprocess.call(["inkscape", outputpath, "--export-pdf=poster.pdf"])
|
StarcoderdataPython
|
4918453
|
from django.views.generic.edit import CreateView, UpdateView
from .models import TodoEntry
class TodoEntryCreate(CreateView):
model = TodoEntry
template_name = 'todo_create_form.html'
fields = ['name']
success_url = '/'
def form_valid(self, form):
obj = form.save(commit=False)
obj.user = self.request.user
return super().form_valid(form)
class TodoEntryUpdate(UpdateView):
model = TodoEntry
template_name = 'todo_update_form.html'
fields = ['name', 'done']
success_url = '/'
|
StarcoderdataPython
|
3532588
|
import os
from notes import note
if __name__ == "__main__":
aws_key_id = "AKIAIOSFODNN7EXAMPLE"
aws_key_secret = "<KEY>"
port = int(os.environ.get("PORT", 5000))
note.run(host='0.0.0.0', port=port, debug=True)
|
StarcoderdataPython
|
288664
|
# Generated by Django 2.2.13 on 2020-10-21 21:20
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('app', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=64)),
('last_modified', models.DateTimeField(auto_now=True)),
],
),
migrations.RemoveField(
model_name='account',
name='balance',
),
migrations.AddField(
model_name='account',
name='initial_balance',
field=models.DecimalField(decimal_places=2, default=0, max_digits=14),
),
migrations.AddField(
model_name='settings',
name='number_format',
field=models.CharField(choices=[('currency_standard', '1,234.56'), ('number_standard', '1234.56'), ('currency_rounded', '1,235'), ('number_rounded', '1235')], default='currency_standard', max_length=32),
),
migrations.AlterField(
model_name='account',
name='type',
field=models.IntegerField(choices=[(0, 'Bank Account'), (1, 'Savings Account'), (2, 'Pension'), (3, 'Cash'), (4, 'Asset'), (5, 'Liability')], default=0),
),
migrations.AlterField(
model_name='settings',
name='currency',
field=models.CharField(choices=[('GBP', 'GBP (£)'), ('USD', 'USD ($)'), ('EUR', 'EUR (€)')], default='GBP', max_length=3),
),
migrations.CreateModel(
name='Transaction',
fields=[
('id', models.CharField(max_length=32, primary_key=True, serialize=False)),
('date', models.DateField(default=datetime.date.today)),
('time', models.TimeField(default=datetime.time(0, 0))),
('title', models.CharField(max_length=64)),
('amount', models.DecimalField(decimal_places=2, default=0, max_digits=10)),
('notes', models.TextField(blank=True, null=True)),
('last_modified', models.DateTimeField(auto_now=True)),
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='app.Category')),
('destination', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='destination', to='app.Account')),
('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='source', to='app.Account')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-date', 'title'],
},
),
]
|
StarcoderdataPython
|
9736591
|
from django.conf.urls import patterns, url
urlpatterns = patterns(
'kitsune.kpi.views',
url(r'^dashboard$', 'dashboard', name='kpi.dashboard'),
)
|
StarcoderdataPython
|
69146
|
<reponame>vvikramb/healthbot-rules
#! /usr/bin/env python3
import requests
from jnpr.junos import Device
from jnpr.junos.utils.config import Config
from jnpr.junos.op.ethport import EthPortTable
from collections import OrderedDict
good_history_uplink = []
def degradation_percent(total_interfaces, current_lldp_interfaces, **kwargs):
global good_history_uplink
good_history_uplink = good_history_uplink + total_interfaces
good_history_uplink = list(OrderedDict.fromkeys(good_history_uplink))
total_interfaces_len = len(good_history_uplink)
uplink_down_list = []
for intf in good_history_uplink:
if intf not in current_lldp_interfaces:
uplink_down_list.append(intf)
uplink_down_length = len(uplink_down_list)
if total_interfaces_len == 0:
return 0
else:
return int((uplink_down_length / total_interfaces_len ) * 100 )
|
StarcoderdataPython
|
3288723
|
<reponame>yitsushi/pulumi-digitalocean
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from . import _utilities
import typing
# Export this package's modules as members:
from ._enums import *
from .app import *
from .cdn import *
from .certificate import *
from .container_registry import *
from .container_registry_docker_credentials import *
from .custom_image import *
from .database_cluster import *
from .database_connection_pool import *
from .database_db import *
from .database_firewall import *
from .database_replica import *
from .database_user import *
from .dns_record import *
from .domain import *
from .droplet import *
from .droplet_snapshot import *
from .firewall import *
from .floating_ip import *
from .floating_ip_assignment import *
from .get_account import *
from .get_app import *
from .get_certificate import *
from .get_container_registry import *
from .get_database_cluster import *
from .get_database_replica import *
from .get_domain import *
from .get_domains import *
from .get_droplet import *
from .get_droplet_snapshot import *
from .get_droplets import *
from .get_firewall import *
from .get_floating_ip import *
from .get_image import *
from .get_images import *
from .get_kubernetes_cluster import *
from .get_kubernetes_versions import *
from .get_load_balancer import *
from .get_project import *
from .get_projects import *
from .get_record import *
from .get_records import *
from .get_region import *
from .get_regions import *
from .get_sizes import *
from .get_spaces_bucket import *
from .get_spaces_bucket_object import *
from .get_spaces_bucket_objects import *
from .get_spaces_buckets import *
from .get_ssh_key import *
from .get_ssh_keys import *
from .get_tag import *
from .get_tags import *
from .get_volume import *
from .get_volume_snapshot import *
from .get_vpc import *
from .kubernetes_cluster import *
from .kubernetes_node_pool import *
from .load_balancer import *
from .project import *
from .project_resources import *
from .provider import *
from .spaces_bucket import *
from .spaces_bucket_object import *
from .ssh_key import *
from .tag import *
from .volume import *
from .volume_attachment import *
from .volume_snapshot import *
from .vpc import *
from ._inputs import *
from . import outputs
# Make subpackages available:
if typing.TYPE_CHECKING:
import pulumi_digitalocean.config as config
else:
config = _utilities.lazy_import('pulumi_digitalocean.config')
_utilities.register(
resource_modules="""
[
{
"pkg": "digitalocean",
"mod": "index/app",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/app:App": "App"
}
},
{
"pkg": "digitalocean",
"mod": "index/cdn",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/cdn:Cdn": "Cdn"
}
},
{
"pkg": "digitalocean",
"mod": "index/certificate",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/certificate:Certificate": "Certificate"
}
},
{
"pkg": "digitalocean",
"mod": "index/containerRegistry",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/containerRegistry:ContainerRegistry": "ContainerRegistry"
}
},
{
"pkg": "digitalocean",
"mod": "index/containerRegistryDockerCredentials",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/containerRegistryDockerCredentials:ContainerRegistryDockerCredentials": "ContainerRegistryDockerCredentials"
}
},
{
"pkg": "digitalocean",
"mod": "index/customImage",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/customImage:CustomImage": "CustomImage"
}
},
{
"pkg": "digitalocean",
"mod": "index/databaseCluster",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/databaseCluster:DatabaseCluster": "DatabaseCluster"
}
},
{
"pkg": "digitalocean",
"mod": "index/databaseConnectionPool",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/databaseConnectionPool:DatabaseConnectionPool": "DatabaseConnectionPool"
}
},
{
"pkg": "digitalocean",
"mod": "index/databaseDb",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/databaseDb:DatabaseDb": "DatabaseDb"
}
},
{
"pkg": "digitalocean",
"mod": "index/databaseFirewall",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/databaseFirewall:DatabaseFirewall": "DatabaseFirewall"
}
},
{
"pkg": "digitalocean",
"mod": "index/databaseReplica",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/databaseReplica:DatabaseReplica": "DatabaseReplica"
}
},
{
"pkg": "digitalocean",
"mod": "index/databaseUser",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/databaseUser:DatabaseUser": "DatabaseUser"
}
},
{
"pkg": "digitalocean",
"mod": "index/dnsRecord",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/dnsRecord:DnsRecord": "DnsRecord"
}
},
{
"pkg": "digitalocean",
"mod": "index/domain",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/domain:Domain": "Domain"
}
},
{
"pkg": "digitalocean",
"mod": "index/droplet",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/droplet:Droplet": "Droplet"
}
},
{
"pkg": "digitalocean",
"mod": "index/dropletSnapshot",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/dropletSnapshot:DropletSnapshot": "DropletSnapshot"
}
},
{
"pkg": "digitalocean",
"mod": "index/firewall",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/firewall:Firewall": "Firewall"
}
},
{
"pkg": "digitalocean",
"mod": "index/floatingIp",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/floatingIp:FloatingIp": "FloatingIp"
}
},
{
"pkg": "digitalocean",
"mod": "index/floatingIpAssignment",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/floatingIpAssignment:FloatingIpAssignment": "FloatingIpAssignment"
}
},
{
"pkg": "digitalocean",
"mod": "index/kubernetesCluster",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/kubernetesCluster:KubernetesCluster": "KubernetesCluster"
}
},
{
"pkg": "digitalocean",
"mod": "index/kubernetesNodePool",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/kubernetesNodePool:KubernetesNodePool": "KubernetesNodePool"
}
},
{
"pkg": "digitalocean",
"mod": "index/loadBalancer",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/loadBalancer:LoadBalancer": "LoadBalancer"
}
},
{
"pkg": "digitalocean",
"mod": "index/project",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/project:Project": "Project"
}
},
{
"pkg": "digitalocean",
"mod": "index/projectResources",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/projectResources:ProjectResources": "ProjectResources"
}
},
{
"pkg": "digitalocean",
"mod": "index/spacesBucket",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/spacesBucket:SpacesBucket": "SpacesBucket"
}
},
{
"pkg": "digitalocean",
"mod": "index/spacesBucketObject",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/spacesBucketObject:SpacesBucketObject": "SpacesBucketObject"
}
},
{
"pkg": "digitalocean",
"mod": "index/sshKey",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/sshKey:SshKey": "SshKey"
}
},
{
"pkg": "digitalocean",
"mod": "index/tag",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/tag:Tag": "Tag"
}
},
{
"pkg": "digitalocean",
"mod": "index/volume",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/volume:Volume": "Volume"
}
},
{
"pkg": "digitalocean",
"mod": "index/volumeAttachment",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/volumeAttachment:VolumeAttachment": "VolumeAttachment"
}
},
{
"pkg": "digitalocean",
"mod": "index/volumeSnapshot",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/volumeSnapshot:VolumeSnapshot": "VolumeSnapshot"
}
},
{
"pkg": "digitalocean",
"mod": "index/vpc",
"fqn": "pulumi_digitalocean",
"classes": {
"digitalocean:index/vpc:Vpc": "Vpc"
}
}
]
""",
resource_packages="""
[
{
"pkg": "digitalocean",
"token": "<PASSWORD>",
"fqn": "pulumi_digitalocean",
"class": "Provider"
}
]
"""
)
|
StarcoderdataPython
|
5051126
|
#/usr/bin/python
import torch
import numpy as np
import random
from collections import deque
from model import LinearQNet, Qtrainer
from plotter import plot
import config
import os
# Constant parameters for DQN
MAX_MEMORY = 150_000
BATCH_SIZE = 1500
LEARNING_RATE = 0.001
class DQNAgent:
def __init__(self, hiddenLayers):
self.hiddenLayers = hiddenLayers
self.noOfGames = config.NO_OF_GAMES
#To controlling randomness of the game
self.epsilon = 0
#Discount rate for agent, needs to be smaller than 1
self.gamma = 0.9
#Using deque data structure for memory allocation
#If memory is exceeded, deque will pop front to conserve memory
self.memory = deque(maxlen=MAX_MEMORY)
self.model = LinearQNet(11, hiddenLayers, 3)
self.trainer = Qtrainer(self.model, LEARNING_RATE, self.gamma)
def getState(self, snakeGame):
head = snakeGame.snake[0]
pointLeft = config.Loc(head.x - config.BLOCK_SIZE, head.y)
pointRight = config.Loc(head.x + config.BLOCK_SIZE, head.y)
pointDown = config.Loc(head.x, head.y + config.BLOCK_SIZE)
pointUp = config.Loc(head.x, head.y - config.BLOCK_SIZE)
dirLeft = snakeGame.direction == config.Direction.LEFT
dirRight = snakeGame.direction == config.Direction.RIGHT
dirUp = snakeGame.direction == config.Direction.UP
dirDown = snakeGame.direction == config.Direction.DOWN
state = [
(dirRight and snakeGame.onCollisionEnter2D(pointRight)) or
(dirLeft and snakeGame.onCollisionEnter2D(pointLeft)) or
(dirUp and snakeGame.onCollisionEnter2D(pointUp)) or
(dirDown and snakeGame.onCollisionEnter2D(pointDown)),
#Danger from right
(dirRight and snakeGame.onCollisionEnter2D(pointDown)) or
(dirLeft and snakeGame.onCollisionEnter2D(pointUp)) or
(dirUp and snakeGame.onCollisionEnter2D(pointRight)) or
(dirDown and snakeGame.onCollisionEnter2D(pointLeft)),
#Danger from left
(dirRight and snakeGame.onCollisionEnter2D(pointUp)) or
(dirLeft and snakeGame.onCollisionEnter2D(pointDown)) or
(dirUp and snakeGame.onCollisionEnter2D(pointLeft)) or
(dirDown and snakeGame.onCollisionEnter2D(pointRight)),
#Move direction
dirLeft, dirRight, dirUp, dirDown,
#Food location
snakeGame.food.x < snakeGame.head.x, #Food left
snakeGame.food.x > snakeGame.head.x, #Food right
snakeGame.food.y < snakeGame.head.y, #Food up
snakeGame.food.y > snakeGame.head.y #Food down
]
return np.array(state, dtype=int)
def remember(self, state, action, reward, nextState, gameOver):
#Store data as a tuple
self.memory.append((state, action, reward, nextState, gameOver)) # If max memory is reached or exceeded, pop front
def trainWithLongMemory(self):
if len(self.memory) > BATCH_SIZE:
miniSampleBatch = random.sample(self.memory, BATCH_SIZE) # List of tuples
else:
miniSampleBatch = self.memory
states, actions, rewards, nextStates, gameOvers = zip(*miniSampleBatch)
self.trainer.trainStep(states, actions, rewards, nextStates, gameOvers)
def trainWithShortMemory(self, state, action, reward, nextState, gameOver):
self.trainer.trainStep(state, action, reward, nextState, gameOver)
def getAction(self, state):
#Make random moves: (i.e tradeoff exploration / exploitation)
#Random moves to explore the current environment and learn
#After learning, randomness decreases due to knowing which move to choose
#This can be thinked as generating random parameters in linear regression etc.
#More games game trains, lower the epsilon, decreasing randomness
self.epsilon = 100 - self.noOfGames
finalMove = [0, 0, 0]
if np.random.randint(0, 200) < self.epsilon:
move = random.randint(0,2)
finalMove[move] = 1
else:
stateZ = torch.tensor(state, dtype=torch.float)
prediction = self.model(stateZ)
move = torch.argmax(prediction).item()
finalMove[move] = 1
return finalMove
def train(snakeGame):
pltScores = list()
pltMeanScores = list()
totScore = 0
record = 0
agent = DQNAgent([256])
#snakeGame = SnakeGameAI()
while True:
#Get previous state
oldState = agent.getState(snakeGame)
#Get current move
finalMove = agent.getAction(oldState)
#Perform move on game, then get the new state
reward, gameOver, score = snakeGame.playStep(finalMove)
newState = agent.getState(snakeGame)
#Train with short memory
agent.trainWithShortMemory(oldState, finalMove, reward, newState, gameOver)
#Remembering the states
agent.remember(oldState, finalMove, reward, newState, gameOver)
if gameOver:
# Replay memory (experience replay) on long memory
# Plot initial result
snakeGame.reset()
agent.noOfGames += 1
agent.trainWithLongMemory()
if score > config.RECORD:
print("MODEL SAVED")
config.SaveScores(agent.noOfGames, score, agent.model.model_layers)
agent.model.saveModel()
print(f"Game: {agent.noOfGames} | Score: {score} | Record: {record}")
pltScores.append(score)
totScore += score
meanScore = totScore / agent.noOfGames
pltMeanScores.append(meanScore)
plot(pltScores, pltMeanScores)
|
StarcoderdataPython
|
3382006
|
<gh_stars>0
#!/usr/bin/env python2
# copyright (c) 2014 the moorecoin core developers
# distributed under the mit software license, see the accompanying
# file copying or http://www.opensource.org/licenses/mit-license.php.
#
# test rpc http basics
#
from test_framework.test_framework import moorecointestframework
from test_framework.util import *
import base64
try:
import http.client as httplib
except importerror:
import httplib
try:
import urllib.parse as urlparse
except importerror:
import urlparse
class httpbasicstest (moorecointestframework):
def setup_nodes(self):
return start_nodes(4, self.options.tmpdir, extra_args=[['-rpckeepalive=1'], ['-rpckeepalive=0'], [], []])
def run_test(self):
#################################################
# lowlevel check for http persistent connection #
#################################################
url = urlparse.urlparse(self.nodes[0].url)
authpair = url.username + ':' + url.password
headers = {"authorization": "basic " + base64.b64encode(authpair)}
conn = httplib.httpconnection(url.hostname, url.port)
conn.connect()
conn.request('post', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read();
assert_equal('"error":null' in out1, true)
assert_equal(conn.sock!=none, true) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('post', '/', '{"method": "getchaintips"}', headers)
out2 = conn.getresponse().read();
assert_equal('"error":null' in out1, true) #must also response with a correct json-rpc message
assert_equal(conn.sock!=none, true) #according to http/1.1 connection must still be open!
conn.close()
#same should be if we add keep-alive because this should be the std. behaviour
headers = {"authorization": "basic " + base64.b64encode(authpair), "connection": "keep-alive"}
conn = httplib.httpconnection(url.hostname, url.port)
conn.connect()
conn.request('post', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read();
assert_equal('"error":null' in out1, true)
assert_equal(conn.sock!=none, true) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('post', '/', '{"method": "getchaintips"}', headers)
out2 = conn.getresponse().read();
assert_equal('"error":null' in out1, true) #must also response with a correct json-rpc message
assert_equal(conn.sock!=none, true) #according to http/1.1 connection must still be open!
conn.close()
#now do the same with "connection: close"
headers = {"authorization": "basic " + base64.b64encode(authpair), "connection":"close"}
conn = httplib.httpconnection(url.hostname, url.port)
conn.connect()
conn.request('post', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read();
assert_equal('"error":null' in out1, true)
assert_equal(conn.sock!=none, false) #now the connection must be closed after the response
#node1 (2nd node) is running with disabled keep-alive option
urlnode1 = urlparse.urlparse(self.nodes[1].url)
authpair = urlnode1.username + ':' + urlnode1.password
headers = {"authorization": "basic " + base64.b64encode(authpair)}
conn = httplib.httpconnection(urlnode1.hostname, urlnode1.port)
conn.connect()
conn.request('post', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read();
assert_equal('"error":null' in out1, true)
assert_equal(conn.sock!=none, false) #connection must be closed because keep-alive was set to false
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is off
urlnode2 = urlparse.urlparse(self.nodes[2].url)
authpair = urlnode2.username + ':' + urlnode2.password
headers = {"authorization": "basic " + base64.b64encode(authpair)}
conn = httplib.httpconnection(urlnode2.hostname, urlnode2.port)
conn.connect()
conn.request('post', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read();
assert_equal('"error":null' in out1, true)
assert_equal(conn.sock!=none, true) #connection must be closed because moorecoind should use keep-alive by default
if __name__ == '__main__':
httpbasicstest ().main ()
|
StarcoderdataPython
|
1964888
|
<reponame>kellya/jopoin-jrnl
#!/bin/env python
import yaml
import requests
import json
import sys
from datetime import datetime
from pathlib import Path
import select
import click
__version__ = "0.3.2"
class Journal:
"""The basic object to track journaly things"""
def __init__(self, joplin_url=None, joplin_token=None, note_id=None):
self.joplin_url = joplin_url
self.joplin_token = joplin_token
self.note_id = note_id
def ping(self):
"""Ensure the clipper/api service is answering"""
try:
r = requests.get(f"{self.joplin_url}/ping")
if r.status_code == 200:
return True
else:
return False
except Exception:
# This is a broad exception clause, but ultimately whatever was
# put in as the base_url is not valid, so just return false and
# make them figure out what it is
return False
def get_journal(self):
"""Returns the journal json content"""
r = requests.get(
f"{self.joplin_url}/notes/{self.note_id}?token={self.joplin_token}&fields=body"
)
return r.content.decode()
def write_entry(self, entry=None):
###post the journal entry to joplin api###
startnote = json.loads(self.get_journal())
# There is probably a better way to do this, I don't want empty lines
# at the first entry, but every other entry should start with newlines
# to separate it from the other entries. This only matters for the
# first entry, seems stupid to check it every single time
if not startnote["body"] == "":
prefix = "\n\n"
else:
prefix = ""
postdata = {
"body": startnote["body"]
+ prefix
+ f"[{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] {entry}"
}
r = requests.put(
f"{self.joplin_url}/notes/{self.note_id}?token={self.joplin_token}",
data=json.dumps(postdata),
)
if r.status_code == 200:
return True
else:
return False
def dump_journal(self):
return json.loads(self.get_journal())["body"]
@click.command()
@click.version_option(__version__, prog_name="joplin-jrnl")
@click.option("--dump", is_flag=True, help="Dump the contents of the journal")
@click.option("--quiet", is_flag=True, help="Do not emit the 'entry added' output")
@click.option("--edit", is_flag=True, help="Edit an entry with your default editor")
@click.option(
"--config",
type=click.Path(),
help="Specify an alternate configuration file location",
)
@click.argument("entry", nargs=-1)
def main(dump, quiet, edit, config, entry):
if not config:
home = str(Path.home())
config = f"{home}/.config/joplin-jrnl/conf.yaml"
with open(config) as file:
# I am about to use config as the data contents, wiping out the
# commandline option values for config.
config = yaml.safe_load(file)
# instantiate a journal
journal = Journal(config["base_url"], config["token"], config["note_id"])
# Test the URL and write what was given in argv if we get an OK
if dump and journal.ping():
print(journal.dump_journal())
sys.exit()
if edit and journal.ping():
MARKER = "###### Everything below is ignored ######\n"
entry = click.edit("\n" + MARKER)
if entry is not None:
entry_posted = journal.write_entry(entry.split(MARKER, 1)[0].rstrip("\n"))
else:
entry_posted = False
elif journal.ping():
if select.select(
[
sys.stdin,
],
[],
[],
0.0,
)[0]:
entry_posted = journal.write_entry(sys.stdin.readlines()[0].rstrip("\n"))
if entry_posted and entry_posted != "":
click.echo(click.style("[Entry added]", fg="green"))
sys.exit(0)
else:
click.echo(click.style("STDIN data not posted to journal"), fg="red")
# Since I want the whole line to be the args, handle the fact that
# specifying --quiet gives us one more argument to skip
clean_args = []
# Clean up the args to make sure we don't get the program name, or any --options
for arg in sys.argv[1:]:
if not arg.startswith("--"):
clean_args.append(arg)
if clean_args:
entry_posted = journal.write_entry(" ".join(clean_args))
else:
click.echo(click.style("- No entry added -", fg="yellow"))
sys.exit()
else:
click.echo(
click.style(
f"Error: did not get successful response from {journal.joplin_url}",
fg="red",
)
)
sys.exit(99)
if entry_posted and not quiet:
click.echo(click.style("[Entry added]", fg="green"))
elif not entry_posted:
click.echo(click.style("- No entry added -", fg="yellow"))
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
125461
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-09 10:06
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('threads', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='thread',
old_name='Subject',
new_name='subject',
),
]
|
StarcoderdataPython
|
3478656
|
<reponame>TinkoffCreditSystems/overhave<gh_stars>10-100
import logging
from contextlib import contextmanager
from typing import Any, Iterator, List
import sqlalchemy.orm as so
from sqlalchemy.exc import ProgrammingError
from overhave.db.base import Session
from overhave.db.tables import FeatureType
logger = logging.getLogger(__name__)
@contextmanager
def create_session(**kwargs: Any) -> Iterator[so.Session]:
"""Provide a transactional scope around a series of operations."""
new_session = Session(**kwargs)
try:
yield new_session
new_session.commit()
except Exception:
new_session.rollback()
raise
finally:
new_session.close()
def ensure_feature_types_exist(feature_types: List[str]) -> None:
with create_session() as session:
try:
for feature_type in feature_types:
existing_type = session.query(FeatureType).filter_by(name=feature_type).one_or_none()
if existing_type is not None:
continue
session.add(FeatureType(name=feature_type)) # type: ignore
logger.info("Created feature type '%s'", feature_type)
except ProgrammingError:
logger.exception("Could not create feature types dynamically!")
|
StarcoderdataPython
|
351519
|
<reponame>THM-MA/XSDATA-waypoint
from dataclasses import dataclass
from .t_correlation_property import TCorrelationProperty
__NAMESPACE__ = "http://www.omg.org/spec/BPMN/20100524/MODEL"
@dataclass
class CorrelationProperty(TCorrelationProperty):
class Meta:
name = "correlationProperty"
namespace = "http://www.omg.org/spec/BPMN/20100524/MODEL"
|
StarcoderdataPython
|
20281
|
import zipfile
zip_file = zipfile.ZipFile("zip_archive.zip", "w")
zip_file.write("textfile_for_zip_01")
zip_file.write("textfile_for_zip_02")
zip_file.write("textfile_for_zip_03")
# print(zipfile.is_zipfile("zip_archive.zip"))
# zip_file = zipfile.ZipFile("zip_archive.zip")
# print(zip_file.namelist())
# print(zip_file.infolist())
# zip_info = zip_file.getinfo("textfile_for_zip_02")
# print(zip_info.file_size)
# print(zip_file.read("textfile_for_zip_01"))
zip_file.extract("textfile_for_zip_02")
zip_file.extractall()
zip_file.close()
|
StarcoderdataPython
|
1604927
|
<gh_stars>1-10
#!/usr/bin/env python
import os.path
from gmprocess.io.fdsn.core import read_fdsn
from gmprocess.io.test_utils import read_data_dir
from gmprocess.streamcollection import StreamCollection
from gmprocess.processing import process_streams
def test():
datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'BK.CMB*.mseed')
streams = []
for datafile in datafiles:
streams += read_fdsn(datafile)
assert streams[0].get_id() == 'BK.CMB.HN'
datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'TA.M04C*.mseed')
streams = []
for datafile in datafiles:
streams += read_fdsn(datafile)
assert streams[0].get_id() == 'TA.M04C.HN'
# DEBUGGING
sc = StreamCollection(streams)
psc = process_streams(sc, origin)
if __name__ == '__main__':
os.environ['CALLED_FROM_PYTEST'] = 'True'
test()
|
StarcoderdataPython
|
159329
|
# -*- coding: utf8 -*-
from time import sleep
import gps
import gsm
import json
import sqlite3
db = sqlite3.connect('seaglass.sq3')
cur = db.cursor()
print 'IMEI =', gsm.getIMEI()
while True:
xs = gps.readGLL()
netscan = gsm.doCNETSCAN()
ceng = gsm.getCENG()
if netscan:
xs['netscan'] = netscan
if ceng:
xs['ceng'] = ceng
print json.dumps(xs, indent=4)
cur.execute("INSERT INTO log VALUES (datetime('now'), ?)", (json.dumps(xs),))
db.commit()
sleep(30)
|
StarcoderdataPython
|
8051032
|
from covid_test_1.get_covid_data import get_covid_data
import pandas as pd
import pytest
def test_get_covid_data():
"""Test the get_covid_data() function"""
# Tests that the function returns the correct data for given arguments
test_df = pd.DataFrame({'cases': [698], 'cumulative_cases': [161969], 'date_report': ['25-08-2021'], 'province': ['BC']})
assert get_covid_data('cases', 'BC', date = '25-08-2021').equals(test_df)
# Tests that the function returns a DataFrame of correct size
assert isinstance(get_covid_data(), pd.DataFrame)
assert get_covid_data('cases', date = '22-11-2021').shape == (14, 4)
def test_get_covid_data_errors():
"""Test that get_covid_data() raises the correct errors"""
# Tests that ValueErrors are raised when arguments are of right type but inappropriate value
with pytest.raises(ValueError):
get_covid_data(date = '33-22-1997')
get_covid_data('deaths')
get_covid_data(loc = "HK")
|
StarcoderdataPython
|
283250
|
<reponame>deifyed/kaex
from kaex.models.resource import Resource
class Service(Resource):
def __init__(self, app):
self.apiVersion = 'v1'
self.kind = 'Service'
self.metadata = {
'name': app.name
}
ports = [
{ 'port': app.service['port'], 'targetPort': app.service['targetPort'] }
]
selector = { 'app': app.name }
service_type = 'ClusterIP'
self.spec = {
'ports': ports,
'selector': selector,
'type': service_type
}
|
StarcoderdataPython
|
1676565
|
<filename>frappe/patches/v7_1/setup_integration_services.py
from __future__ import unicode_literals
import frappe
from frappe.exceptions import DataError
from frappe.utils.password import get_decrypted_password
import json
app_list = [
{"app_name": "razorpay_integration", "service_name": "Razorpay", "doctype": "Razorpay Settings", "remove": True},
{"app_name": "paypal_integration", "service_name": "PayPal", "doctype": "PayPal Settings", "remove": True},
{"app_name": "frappe", "service_name": "Dropbox Integration", "doctype": "Dropbox Backup", "remove": False}
]
def execute():
frappe.reload_doc("integration_broker", "doctype", "integration_service")
installed_apps = frappe.get_installed_apps()
for app_details in app_list:
if app_details["app_name"] in installed_apps:
try:
setup_integration_service(app_details)
except DataError:
pass
finally:
if app_details["remove"]:
uninstall_app(app_details["app_name"])
frappe.delete_doc("DocType", "Dropbox Backup")
def setup_integration_service(app_details):
settings = get_app_settings(app_details)
if not settings:
raise DataError
if frappe.db.exists("Integration Service", app_details["service_name"]):
integration_service = frappe.get_doc("Integration Service", app_details["service_name"])
else:
integration_service = frappe.new_doc("Integration Service")
integration_service.service = app_details["service_name"]
integration_service.enabled = 1
integration_service.custom_settings_json = json.dumps(settings) if settings else ''
integration_service.flags.ignore_mandatory = True
integration_service.save(ignore_permissions=True)
def get_app_settings(app_details):
from frappe.integration_broker.doctype.integration_service.integration_service import get_integration_controller
parameters = {}
doctype = docname = app_details["doctype"]
app_settings = get_parameters(app_details)
settings = app_settings["settings"]
controller = get_integration_controller(app_details["service_name"], setup=False)
for d in controller.parameters_template:
if settings.get(d.fieldname):
if ''.join(set(settings.get(d.fieldname))) == '*':
setattr(settings, d.fieldname, get_decrypted_password(doctype, docname, d.fieldname, raise_exception=True))
parameters.update({d.fieldname : settings.get(d.fieldname)})
return parameters
def uninstall_app(app_name):
from frappe.installer import remove_from_installed_apps
remove_from_installed_apps(app_name)
def get_parameters(app_details):
if app_details["service_name"] == "Razorpay":
return {"settings": frappe.get_doc(app_details["doctype"])}
elif app_details["service_name"] == "PayPal":
if frappe.conf.paypal_username and frappe.conf.paypal_password and frappe.conf.paypal_signature:
return {
"settings": {
"api_username": frappe.conf.paypal_username,
"api_password": <PASSWORD>,
"signature": frappe.conf.paypal_signature
}
}
else:
return {"settings": frappe.get_doc(app_details["doctype"])}
elif app_details["service_name"] == "Dropbox Integration":
doc = frappe.db.get_value(app_details["doctype"], None,
["dropbox_access_key", "dropbox_access_secret", "upload_backups_to_dropbox"], as_dict=1)
if not (frappe.conf.dropbox_access_key and frappe.conf.dropbox_secret_key):
raise DataError
return {
"settings": {
"app_access_key": frappe.conf.dropbox_access_key,
"app_secret_key": frappe.conf.dropbox_secret_key,
"dropbox_access_key": doc.dropbox_access_key,
"dropbox_access_secret": doc.dropbox_access_secret,
"backup_frequency": doc.upload_backups_to_dropbox
}
}
|
StarcoderdataPython
|
5136641
|
import os
import re
import unicodedata
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "systori.settings")
import django
django.setup()
from systori.apps.task.models import *
from systori.apps.project.models import *
p22 = Project.objects.get(id=22)
for job in p22.jobs.all():
for taskgroup in job.taskgroups.all():
print("before name: {}".format(taskgroup.name))
print("before description: {}".format(taskgroup.description))
name_striped = taskgroup.name.strip()
description_striped = taskgroup.description.strip()
taskgroup.name = re.sub("\s+", " ", name_striped)
taskgroup.name = unicodedata.normalize("NFC", taskgroup.name)
taskgroup.description = re.sub("\s+", " ", description_striped)
taskgroup.description = unicodedata.normalize("NFC", taskgroup.description)
print("after name: {}".format(taskgroup.name))
print("after description: {}".format(taskgroup.description))
taskgroup.save()
for task in taskgroup.tasks.all():
print("before name: {}".format(task.name))
print("before description: {}".format(task.description))
name_striped = task.name.strip()
description_striped = task.description.strip()
task.name = re.sub("\s+", " ", name_striped)
task.name = unicodedata.normalize("NFC", task.name)
task.description = re.sub("\s+", " ", description_striped)
task.description = unicodedata.normalize("NFC", task.description)
print("after name: {}".format(task.name))
print("after description: {}".format(task.description))
print("\n\n\n")
task.save()
|
StarcoderdataPython
|
8138895
|
from flask import Flask, render_template, redirect, request, flash, jsonify
from flask import session
from flask_session import Session
from celery import Celery
# from celery.utils.log import get_task_logger
from config import Config
from forms import UserNameForm
from forms import postUrlForm
from markupsafe import escape
import BlurtChain as BC
app = Flask(__name__)
app.config.from_object(Config)
celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)
Session(app)
# logger = get_task_logger(__name__)
@app.errorhandler(404)
# This handles 404 error
def page_not_found(e):
return render_template('404.html')
@app.route('/', methods=['GET', 'POST'])
def blurt():
form = UserNameForm(request.form)
if request.method == 'POST':
if form.validate():
username = request.form['username'].lower()
return redirect(f'/{username}')
else:
flash('Username is Required')
return render_template('blurt/profile.html', form=form)
# @celery.task
# def reward_summary_task(username):
# blurt = BC.BlurtChain(username)
# duration = 30
# key_name = username + '_reward_' + str(duration)
# data = blurt.get_reward_summary(duration)
# # data['reward_data'] = reward_data
# # save reward_data in firbase
# blurt.set_data_fb("reward_summary", key_name, data)
# return data
# @app.route('/<username>')
# @app.route('/<username>/')
# def blurt_profile_data(username=None):
# data = {}
# if username:
# username = escape(username).lower()
# blurt = BC.BlurtChain(username)
# # celery background task
# data = reward_summary_task.delay(username)
# print("REWARD_SUMMARY_TASK_DELAY ", data)
# print(vars(data))
# # check session profile_data
# profile_data = username + '_profile_data'
# if session.get(profile_data):
# data = session[profile_data]
# else:
# data = blurt.get_account_info()
# vote_data = blurt.get_vote_history(username)
# data['labels'] = vote_data['labels']
# data['permlinks'] = vote_data['permlinks']
# data['upvotes'] = vote_data['upvotes']
# data['count_data'] = vote_data['count_data']
# data['weight_data'] = vote_data['weight_data']
# data['total_votes'] = vote_data['total_votes']
# session[profile_data] = data
# return render_template('blurt/profile_data.html',
# username=blurt.username, data=data)
# @app.route('/blurt/stats')
# @app.route('/blurt/stats/')
# def stats():
# blurt = BC.BlurtChain(username=None)
# stats_data = blurt.get_stats()
# return render_template('blurt/stats.html', data=stats_data)
# @app.route('/blurt/upvote', methods=['GET', 'POST'])
# @app.route('/blurt/upvote/', methods=['GET', 'POST'])
# def upvote():
# form = postUrlForm(request.form)
# if request.method == 'POST':
# if form.validate():
# url = request.form['url'].lower()
# blurt = BC.BlurtChain(username=None)
# result = blurt.process_upvote(url)
# flash(result['message'])
# else:
# # check empty url
# flash('Error: URL is required')
# return render_template('blurt/upvote.html', form=form)
# BLURT API
# @app.route('/api/blurt/follower/<username>')
# @app.route('/api/blurt/follower/<username>/')
# def blurt_follower(username=None):
# data = {}
# if username:
# blurt = BC.BlurtChain(username)
# data = blurt.get_follower()
# return jsonify(data)
# @app.route('/api/blurt/following/<username>')
# @app.route('/api/blurt/following/<username>/')
# def blurt_following(username=None):
# data = {}
# if username:
# blurt = BC.BlurtChain(username)
# data = blurt.get_following()
# return jsonify(data)
# @app.route('/api/blurt/votes/<username>')
# @app.route('/api/blurt/votes/<username>/')
# def blurt_votes(username=None):
# data = {}
# if username:
# blurt = BC.BlurtChain(username)
# data = blurt.get_vote_history()
# return jsonify(data)
# @app.route('/api/blurt/mute/<username>')
# @app.route('/api/blurt/mute/<username>/')
# def blurt_mute(username=None):
# data = {}
# if username:
# blurt = BC.BlurtChain(username)
# data = blurt.get_mute()
# return jsonify(data)
# @app.route('/api/blurt/delegation/<username>/<option>')
# @app.route('/api/blurt/delegation/<username>/<option>/')
# def blurt_delegation(username=None, option=None):
# delegation_type = ["in", "out", "exp"]
# data = {}
# if username and option in delegation_type:
# # check session delegation_data
# delegation_data = username + '_delegation_' + option
# if session.get(delegation_data):
# data = session[delegation_data]
# else:
# blurt = BC.BlurtChain(username)
# data = blurt.get_delegation_new(option)
# session[delegation_data] = data
# return jsonify(data)
# @app.route('/api/blurt/reward/<username>/<int:duration>')
# @app.route('/api/blurt/reward/<username>/<int:duration>/')
# @app.route('/api/blurt/reward/<username>/<int:duration>/<option>')
# @app.route('/api/blurt/reward/<username>/<int:duration>/<option>/')
# def blurt_reward(username=None, duration=1, option=None):
# data = {}
# if username:
# blurt = BC.BlurtChain(username)
# reward_data = username + '_reward_' + str(duration)
# print(reward_data)
# if duration == 30:
# key_data = blurt.get_key_data_fb("reward_summary", reward_data)
# data = key_data.val()
# print("key_data ", data)
# if data:
# session[reward_data] = data
# blurt.remove_key_data_fb("reward_summary", reward_data)
# # check session reward_data
# if session.get(reward_data):
# data = session[reward_data]
# print("SESSION_REWARD_DATA", duration, data)
# else:
# data = blurt.get_reward_summary(duration, option=option)
# session[reward_data] = data
# print("GET_REWARD_SUMMARY", duration, data)
# return jsonify(data)
# @app.route('/api/blurt/author_reward/<username>/<int:duration>')
# @app.route('/api/blurt/author_reward/<username>/<int:duration>/')
# def blurt_author(username=None, duration=1):
# data = None
# if username:
# blurt = BC.BlurtChain(username)
# # check session reward_data
# reward_data = username + '_author_reward_' + str(duration)
# if session.get(reward_data):
# data = session[reward_data]
# else:
# data = blurt.get_author_reward(duration)
# if data != "0.0":
# session[reward_data] = data
# return jsonify(data)
# @app.route('/api/blurt/curation_reward/<username>/<int:duration>')
# @app.route('/api/blurt/curation_reward/<username>/<int:duration>/')
# def blurt_curation(username=None, duration=1):
# data = None
# if username:
# blurt = BC.BlurtChain(username)
# # check session reward_data
# reward_data = username + '_curation_reward_' + str(duration)
# if session.get(reward_data):
# data = session[reward_data]
# else:
# data = blurt.get_curation_reward(duration)
# if data != "0.0":
# session[reward_data] = data
# return jsonify(data)
# @app.route('/api/blurt/producer_reward/<username>/<int:duration>')
# @app.route('/api/blurt/producer_reward/<username>/<int:duration>/')
# def blurt_producer(username=None, duration=1):
# data = None
# if username:
# blurt = BC.BlurtChain(username)
# # check session reward_data
# reward_data = username + '_producer_reward_' + str(duration)
# if session.get(reward_data):
# data = session[reward_data]
# else:
# data = blurt.get_producer_reward(duration)
# if data != "0.0":
# session[reward_data] = data
# return jsonify(data)
if __name__ == "__main__":
app.run()
|
StarcoderdataPython
|
3555563
|
<reponame>sjyttkl/query_completion
"""
This script is used to create a summary report of key metrics for each
experiment.
"""
import glob
import json
import numpy as np
import os
import pandas
import re
import code
regex_eval = re.compile(r"'(\w*)': '?([^,]+)'?[,}]")
def FastLoadDynamic(filename):
rows = []
with open(filename, 'r') as f:
for line in f:
matches = regex_eval.finditer(line)
d = dict([m.groups() for m in matches])
if len(d) > 0:
rows.append(d)
dynamic_df = pandas.DataFrame(rows)
if len(dynamic_df) > 0:
dynamic_df['cost'] = dynamic_df.cost.astype(float)
dynamic_df['length'] = dynamic_df['length'].astype(float)
if 'score' in dynamic_df.columns:
dynamic_df['score'] = dynamic_df['score'].astype(float)
return dynamic_df
results = []
for dirname in glob.glob('/s0/ajaech/aolexps/c*'):
if os.path.isfile(dirname):
continue
# load the params file
params_filename = os.path.join(dirname, 'params.json')
if not os.path.exists(params_filename):
continue # this is just an empty directory
with open(params_filename, 'r') as g:
params = json.load(g)
params['dir'] = dirname
model_filename = os.path.join(dirname, 'model.bin.index')
if os.path.exists(model_filename):
modtime = os.path.getmtime(model_filename)
params['finish_time'] = modtime
def GetPPL(name):
filename = os.path.join(dirname, name)
if os.path.exists(filename):
with open(filename, 'r') as f:
lines = f.readlines()
if len(lines):
fields = lines[-1].split()
if len(fields):
try:
ppl = float(fields[-1])
return ppl
except:
return None
return None
ppl = GetPPL('ppl.txt')
print(ppl, dirname)
params['ppl'] = ppl
ppl = GetPPL('pplfinal.txt')
params['pplfinal'] = ppl
rank = GetPPL('rank2.txt')
params['qrank'] = rank
print(rank)
filename = os.path.join(dirname, 'dynamic.txt')
if os.path.exists(filename):
dyn = FastLoadDynamic(filename)
if len(dyn) > 0:
if 'score' in dyn.columns:
z = np.array(dyn.score.values)
z[z < 0.1] = 0.0 # crop it at ten
params['test_mrr'] = 1.0 / max(0.00001, np.mean(dyn.score))
test_ppl = np.exp((dyn.cost * dyn.length).sum() / dyn.length.sum())
params['test_ppl'] = test_ppl
results.append(params)
df = pandas.DataFrame(results)
if 'acc' in df.columns:
df = df.sort_values('acc')
else:
df = df.sort_values('ppl')
# delete boring columns
for column in df.columns:
if df[column].dtype == list:
continue
if len(df[column].unique()) == 1:
del df[column]
df.sort_values('qrank').to_csv('results.csv', index=False, sep='\t')
|
StarcoderdataPython
|
383001
|
<reponame>zorua98741/PS4-Rich-Presence-for-Discord
from ftplib import FTP # used to establish connection between PC and PS4
from ftplib import error_temp # used for error 421 too many connections (when user connects to FTP server)
from os import path # used to test if external config file exists
from pypresence import Presence # used for sending data to Discord developer application
from pypresence import InvalidPipe # used for handling discord not found on system errors
from pypresence import InvalidID
from time import sleep # used for delaying certain functions
from re import search # used for regular expressions (finding substrings in data)
from time import time # used for time elapsed functionality
from hashlib import sha1 # used for getting tmdb hash
import hmac # used for getting tmdb hash
from requests import get # used for taking tmdb url and getting gameName and image
from bs4 import BeautifulSoup # used for fixing formatting of tmdb output
class ExternalFile(object): # perform all external file operations (get, normalise, separate)
def __init__(self):
self.data = [] # holds all external config values
self.section = [] # holds where different sections in external file are
self.s1configVariables = [] # holds config variables (section 1)
self.s2appIDVariables = [] # holds Discord dev app ID variables (section 2)
self.s2titleIDVariables = [] # holds titleID variables (section 2)
self.s3titleIDVariables = [] # holds titleID variables (section 3)
self.s3gameNameVariables = [] # holds game names (section 3)
self.s3imageVariables = [] # holds game images (section 3)
def getData(self): # load external text file and get values for persistent variables
try:
file = open("PS4RPDconfig.txt", "r") # open file read-only
lines = file.readlines() # create list, each item is 1 line from external file
file.close()
for i in range(len(lines)): # loop for number of items in variable
self.data.append(lines[i]) # make each line a new item in list
del lines # no longer needed
self.normaliseData() # remove unneeded formatting from data
prepWork.ip = self.s1configVariables[0] # set ip here since s1configVariables could be not used in isPS4()
prepWork.isPS4() # file has been successfully read, check if IP address belongs to PS4
except FileNotFoundError: # external config file does not exist, most likely first run of program
print("config file not found\n")
prepWork.getIP() # call PrepWork classes getIP() function
def normaliseData(self):
self.section = [] # ! reset because getNewData() will call this, needs to be revisited
for i in range(len(self.data)):
self.data[i] = self.data[i].rstrip("\n") # remove "\n" if present on every line
try:
self.data[i] = self.data[i].split(": ", 1) # split into [0]: [1] (specify to split only once)
self.data[i] = self.data[i][1] # makes data[i] the value, instead of "info: value"
except IndexError:
self.data[i] = self.data[i][0] # makes external config file more forgiving of format
while True: # has to be after removing "\n" for some reason, runs until "break is reached"
try:
self.data.remove('') # removes empty lines
except ValueError:
break
# for i in range(len(self.data)): # DEBUGGING
# print(self.data[i])
# print("\n") # DEBUGGING
for i in range(len(self.data)): # create list holding where different sections of data begin
if '=' in self.data[i]:
self.section.append(i)
self.variables()
self.devApps()
self.previouslyMapped()
def variables(self): # separate persistent variables from config file
self.s1configVariables = [] # ! reset because getNewData() will call this, needs to be revisited
for i in range(self.section[0], self.section[1]-1): # uses section identifiers for flexibility
self.s1configVariables.append(self.data[i+1]) # add value to list
if int(self.s1configVariables[2]) < 15: # minimum value of 15 seconds for refresh time
self.s1configVariables[2] = 15
# print("variables: ", self.s1configVariables) # DEBUGGING
def devApps(self): # separate titleID-appID from config file
self.s2appIDVariables = [] # ! reset because getNewData() will call this, needs to be revisited
self.s2titleIDVariables = [] # ! reset because getNewData() will call this, needs to be revisited
for i in range(self.section[1], self.section[2]-1):
if i % 2 == 1:
self.s2appIDVariables.append(self.data[i+1])
else:
self.s2titleIDVariables.append(self.data[i+1])
# print("devApps: ", self.s2appIDVariables, self.s2titleIDVariables) # DEBUGGING
def previouslyMapped(self): # separate previously mapped titleIDs from config file
self.s3titleIDVariables = [] # ! reset because getNewData() will call this, needs to be revisited
self.s3gameNameVariables = [] # ! reset because getNewData() will call this, needs to be revisited
self.s3imageVariables = [] # ! reset because getNewData() will call this, needs to be revisited
for i in range(self.section[2]+1, len(self.data)):
line = i # relevant line in data
i = i - self.section[2]-1 # since self.section[2] is variable, range will change and make modulus operations wrong, fix by bringing "i" back to 0
if i % 3 == 0:
self.s3titleIDVariables.append(self.data[line])
if i % 3 == 1:
self.s3gameNameVariables.append(self.data[line])
if i % 3 == 2:
self.s3imageVariables.append(self.data[line])
# self.previouslyMappedVariables.append(self.data[i])
# print("previouslyMapped: ", self.s3titleIDVariables, self.s3gameNameVariables, self.s3imageVariables) # DEBUGGING
def saveData(self): # creates and adds default data to external file
file = open("PS4RPDconfig.txt", "w+")
file.write("==========Persistent Variables==========")
file.write("\nIP: " + str(prepWork.ip))
file.write("\nID: " + "858345055966461973")
file.write("\nRefresh time(seconds): " + "120")
file.write("\nReset time elapsed on game change: " + "True")
file.write("\n")
file.write("\n==========Developer Application-to-title IDs==========")
file.write("\n")
file.write("\n==========Previously Resolved Games==========")
file.write("\n")
file.close()
self.getNewData()
def updateIP(self):
file = open("PS4RPDconfig.txt", "r") # open file in "read-only" mode
lines = file.readlines() # read in all lines from external file
lines[1] = "IP: " + str(prepWork.ip) + "\n" # update the "IP" variable with newly acquired
file = open("PS4RPDconfig.txt", "w") # open file in "write" mode
file.writelines(lines) # write all lines back into external file
file.close() # close the file
self.s1configVariables[0] = prepWork.ip # fixes old IP still being used after update
def addMappedGame(self): # adds titleID, game name, and image to end of external file
file = open("PS4RPDconfig.txt", "a") # open file in "append" mode
file.write("\ntitleID: " + gatherDetails.titleID)
file.write("\ngameName: " + gatherDetails.gameName)
file.write("\nimage: " + gatherDetails.gameImage)
file.write("\n")
file.close()
def getNewData(self): # updates data[] and also the three section lists
self.data = [] # reset list
file = open("PS4RPDconfig.txt", "r") # open file read-only
lines = file.readlines() # create list, each item is 1 line from external file
file.close()
for i in range(len(lines)): # loop for number of items in variable
self.data.append(lines[i]) # make each line a new item in list
del lines # no longer needed
self.normaliseData() # remove unneeded formatting from data
class PrepWork(object):
def __init__(self):
self.ip = None
self.ftp = FTP()
self.RPC = None
def getIP(self):
self.ip = input("Please enter the PS4's IP address: ")
self.isPS4()
def isPS4(self):
try:
self.ftp.connect(self.ip, 2121) # connect to FTP server on given IP address
self.ftp.login("", "") # login to FTP server
self.ftp.cwd("/mnt/sandbox") # change directory to one known to exist on PS4, but unlikely on other servers
self.ftp.quit() # if the code reaches here then the IP given definitely belongs to a PS4, close connection
if path.isfile('./PS4RPDconfig.txt') is False: # if the file does NOT exist, then it must be made with newly acquired PS4 IP address
externalFile.saveData()
else: # if it does exist, then only update the "IP" variable
externalFile.updateIP()
except Exception as e:
print("No FTP server found on ", self.ip, "error: ", e)
self.getIP() # no FTP server on input IP address, ask user for another IP
def findDiscord(self):
self.RPC = Presence(externalFile.s1configVariables[1]) # create pypresence class
try:
self.RPC.connect() # attempts to connect to open discord client on computer
print("findDiscord(): found")
except InvalidPipe:
print("findDiscord(): !not found!")
sleep(15) # sleep program for 15 seconds
self.findDiscord() # call findDiscord() until it is found open
def findPS4(self):
try:
self.ftp.connect(externalFile.s1configVariables[0], 2121) # connect to PS4's FTP server, port must be 2121
self.ftp.login("", "") # no default username or password
self.ftp.quit() # close FTP session
self.RPC.connect()
except (ConnectionRefusedError, TimeoutError, error_temp): # ConnectionRefused when PS4 on, but FTP server off, Timeout when PS4 off
print("findPS4(): !PS4 not found! Waiting 60 seconds and retrying")
sleep(60) # sleep program for 60 seconds
self.findPS4() # call findPS4() until it is found with FTP server enabled
class GatherDetails(object):
def __init__(self):
self.ftp = FTP()
self.titleID = None
self.gameType = None
self.PS1PS2gameIDs = ["SLPS", "SCAJ", "SLKA", "SLPM", "SCPS", "CF00", "SCKA", "ALCH", "CPCS", "SLAJ", "KOEI",
"ARZE", "TCPS", "SCCS", "PAPX", "SRPM", "GUST", "WLFD", "ULKS", "VUGJ", "HAKU", "ROSE",
"CZP2", "ARP2", "PKP2", "SLPN", "NMP2", "MTP2", "SCPM",
"SLUS", "SCUS", "PBPX",
"SLES", "SCES", "SCED"] # incomplete list of gameIDs for PS1 and PS2 games
self.tmdbKey = bytearray.fromhex('F5DE66D2680E255B2DF79E74F890EBF349262F618BCAE2A9ACCDEE5156CE8DF2CDF2D48C71173CDC2594465B87405D197CF1AED3B7E9671EEB56CA6753C2E6B0')
self.gameName = None
self.gameImage = None
self.appChanged = False
self.found = False
def getTitleID(self):
self.titleID = None # ! bandaid fix ! fixes crash of going from game to main menu
data = [] # variable to hold folders in PS4 folder
gameTypeFound = False
try:
self.ftp.connect(externalFile.s1configVariables[0], 2121) # connect to PS4's FTP server, post must be 2121
self.ftp.login() # no default username or password
self.ftp.cwd("/mnt/sandbox") # change active directory
self.ftp.dir(data.append) # get directory listing and add each item to to list with formatting similar to "ls -l"
self.ftp.quit() # close FTP connection
for i in range(len(data)):
if search('(?!NPXS)([a-zA-Z0-9]{4}[0-9]{5})', data[i]) is not None: # annoying that regex has to be done twice
self.titleID = search('(?!NPXS)([a-zA-Z0-9]{4}[0-9]{5})', data[i])
if self.titleID is not None:
self.titleID = self.titleID.group(0) # remove <re.Match object> etc> junk
if "CUSA" in self.titleID: # must be a PS4 game to be true
self.gameType = "PS4"
gameTypeFound = True
else:
for i in range(len(self.PS1PS2gameIDs)):
if self.PS1PS2gameIDs[i] in self.titleID: # must be a PS1/PS2 game
self.gameType = "PS1/PS2"
gameTypeFound = True
if gameTypeFound is False:
self.gameType = "Homebrew"
print("getTitleID(): ", self.titleID)
except (ConnectionRefusedError, TimeoutError, error_temp): # ConnectionRefused for PS4 on FTP server off, Timeout for PS4 off
prepWork.RPC.clear()
prepWork.findPS4() # call PrepWork's findPS4() function
def checkMappedGames(self):
found = False
if not externalFile.s3titleIDVariables:
print("checkMappedGames(): !list is empty!")
self.getGameInfo()
found = True # not actually found, but stops from running getGameInfo() twice
if self.titleID is not None:
for i in range(len(externalFile.s3titleIDVariables)):
if self.titleID == externalFile.s3titleIDVariables[i]: # check if titleID is in external file
found = True
self.gameName = externalFile.s3gameNameVariables[i]
self.gameImage = externalFile.s3imageVariables[i]
if found is not True:
print("checkMappedGames(): !game is not mapped!")
self.getGameInfo()
else:
print("checkMappedGames(): ", self.titleID, " : ", self.gameName, " : ", self.gameImage)
def getGameInfo(self): # ! SHOULD BE REWRITTEN INTO MULTIPLE FUNCTION !
if self.titleID is not None:
if self.gameType == "PS4":
modifiedTitleID = self.titleID + "_00" # tmdb titleID's add "_00" to the end for whatever reason
Hash = hmac.new(self.tmdbKey, bytes(modifiedTitleID, 'utf-8'), sha1) # get hash of tmdb key using sha1 encryption
Hash = Hash.hexdigest().upper()
url = "http://tmdb.np.dl.playstation.net/tmdb2/" + modifiedTitleID + "_" + Hash + "/" + modifiedTitleID + ".json" # url containing game name and image
response = get(url, headers={"User-Agent": "Mozilla/5.0"}) # get HTML of website
soup = BeautifulSoup(response.text, "html.parser") # use bs4 to make data readable (fix odd formatting)
try:
self.gameName = search('{"name\":\"(.*?)"', str(soup)) # get gameName from html
self.gameName = self.gameName.group(1) # remove regex junk
self.gameImage = search('{"icon":"(.*?)"', str(soup)) # get gameImage from html
self.gameImage = self.gameImage.group(1) # remove regex junk
externalFile.addMappedGame()
except AttributeError: # not all PS4 games have a tmdb page for some reason
print("getGameInfo(): !no game found!")
self.gameName = "Unknown"
self.gameImage = "none"
if self.gameType == "Homebrew" and self.titleID is not None:
self.gameName = "Homebrew" # unfortunately no way found to resolve homebrew ID to a name
self.gameImage = "none"
externalFile.addMappedGame()
if self.gameType == "PS1/PS2":
self.gameImage = "ps2ps1temp" # PS1 and PS2 games use shared cover unless otherwise specified
try:
quote_page = "https://raw.githubusercontent.com/zorua98741/PS4-Rich-Presence-for-Discord/main/PS1%20games.md" # url to github page containing list of PS1 game id's and the corresponding game name
response = get(quote_page, headers={"User-Agent": "Mozilla/5.0"}) # get HTML of page
soup = BeautifulSoup(response.text, "html.parser") # make HTML formatted correctly
self.gameName = search(self.titleID + '.*', str(soup)) # search for the open game's titleID in HTML document
if self.gameName is not None: # if its found remove formatting
self.gameName = self.gameName.group(0)
self.gameName = self.gameName.split(';')
self.gameName = self.gameName[1] # lower() used since Discord only accepts lowercase characters
else: # if its not found perhaps open game is a PS2 game
quote_page = "https://raw.githubusercontent.com/zorua98741/PS4-Rich-Presence-for-Discord/main/PS2%20games.md" # url to github page containing list of PS2 game id's and the corresponding game name
response = get(quote_page, headers={"User-Agent": "Mozilla/5.0"})
soup = BeautifulSoup(response.text, "html.parser")
self.gameName = search(self.titleID + '.*', str(soup))
if self.gameName is not None:
self.gameName = self.gameName.group(0)
self.gameName = self.gameName.split(';')
self.gameName = self.gameName[1]
except Exception as e: # if not found then game may be missing from list, or the github page is unavailable
print("Error: ", e, "\n")
self.gameName = "Unknown PS1/PS2 game"
externalFile.addMappedGame()
else:
self.gameName = "Playstation 4 Menu"
self.gameImage = "none"
print("getGameInfo(): ", self.gameName, " : ", self.gameImage)
def changeDevApp(self): # needs to be revised
for i in range(len(externalFile.s2titleIDVariables)):
if gatherDetails.titleID == externalFile.s2titleIDVariables[i]:
print("Developer Application found, modifying presence")
prepWork.RPC.close()
prepWork.RPC = Presence(externalFile.s2appIDVariables[i])
prepWork.RPC.connect()
self.appChanged = True
self.found = True
break
else:
self.found = False
if self.appChanged is True and self.found is False:
self.appChanged = False
self.found = True
print("Changing to default Application ID in config file")
prepWork.RPC.close()
prepWork.RPC = Presence(externalFile.s1configVariables[1])
prepWork.RPC.connect()
allowed = ["True", "true"]
externalFile = ExternalFile()
prepWork = PrepWork()
gatherDetails = GatherDetails()
externalFile.getData() # get data from external text file or create it, and verify it belongs to PS4
print("\n")
prepWork.findDiscord() # ensure discord is open
previousTitleID = ""
timer = time() # start timer for time elapsed functionality
while True:
gatherDetails.getTitleID() # get game's titleID from PS4 via FTP
if gatherDetails.titleID != previousTitleID: # used so webpage does not need to be contacted if the details will be the same
previousTitleID = gatherDetails.titleID # update previously opened game
gatherDetails.checkMappedGames()
externalFile.getNewData() # method to get new data should be revisited
gatherDetails.changeDevApp()
if externalFile.s1configVariables[3] in allowed:
timer = time()
else:
print("prevGetGameInfo(): ", gatherDetails.gameName, " : ", gatherDetails.gameImage)
try:
prepWork.RPC.update(details=gatherDetails.gameName, large_image=gatherDetails.gameImage, large_text=gatherDetails.titleID, start=timer)
except(InvalidPipe, InvalidID):
prepWork.findDiscord()
print("\n")
sleep(int(externalFile.s1configVariables[2]))
|
StarcoderdataPython
|
150026
|
from metrics import calc_stability, full_wer, latency
import sys
import json
import re
# Using the given swb code - all relevant files are gathered.
code = sys.argv[1]
prep = "sw0" + code + "-mono"
incsv = "./results/msoft/universal/msoft-" + prep + ".csv"
intext = "./results/msoft/system-trans-text/msoft-" + prep + "-trans.txt"
gold_text = "./results/gold-trans-text/" + code + "-full-joined-transcript.txt"
gold_original = "./results/gold-timings/" + code + "-full-transcript.json"
# Now we have all the files, the final metrics are calculated.
# Stability
stab_file = "./results/msoft/stability/" + prep + ".json"
stability = calc_stability(incsv)
with open(stab_file, 'w') as outfile:
json.dump(stability, outfile)
# WER
full_wer_file = "./results/msoft/full-wer/" + prep + ".txt"
word_error_rate = full_wer(gold_text, intext)
with open(full_wer_file, 'w') as wer_out:
wer_out.write(str(word_error_rate))
# Latencies
fo_latency_file = "./results/msoft/latency/fo/" + prep + ".json"
fd_latency_file = "./results/msoft/latency/fd/" + prep + ".json"
fo_latencies, fd_latencies = latency(intext, incsv, gold_original, 'microsoft')
with open(fo_latency_file, 'w') as fo_out:
json.dump(fo_latencies, fo_out)
with open(fd_latency_file, 'w') as fd_out:
json.dump(fd_latencies, fd_out)
|
StarcoderdataPython
|
9798314
|
<reponame>agilentia/gargoyle<gh_stars>1-10
# -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
DEBUG = True
SECRET_KEY = 'NOTASECRET'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dev-database.sqlite3',
}
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
}
ALLOWED_HOSTS = []
INSTALLED_APPS = (
'testapp',
'gargoyle',
'nexus',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'urls'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'debug': True,
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
STATIC_URL = '/static/'
GARGOYLE_SWITCH_DEFAULTS = {
'active_by_default': {
'is_active': True,
'label': 'Default Active',
'description': 'When you want the newness',
},
'inactive_by_default': {
'is_active': False,
'label': 'Default Inactive',
'description': 'Controls the funkiness.',
},
}
|
StarcoderdataPython
|
3405606
|
from django.urls import path
from . import views
urlpatterns = [
path('',views.allblogs, name='blogs'),
path('<int:blog_id>/',views.detail_blog, name='detail')
]
|
StarcoderdataPython
|
6612973
|
/home/runner/.cache/pip/pool/21/ba/d4/9081c03433cfa7a8c6f9469405b08168172c6eff9f6aae0bf3ab9ee7fb
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.