ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a466e027b10303286b1fdeeb226de1b78bfc9f0 | # kontonr.py - functions for handling Norwegian bank account numbers
# coding: utf-8
#
# Copyright (C) 2018 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Konto nr. (Norwegian bank account number)
Konto nr. is the country-specific part in Norwegian IBAN codes. The number
consists of 11 digits, the first 4 are the bank identifier and the last is a
check digit. This module does not check if the bank identifier exists.
More information:
* https://www.ecbs.org/iban/norway-bank-account-number.html
>>> validate('8601 11 17947')
'86011117947'
>>> validate('0000.4090403') # postgiro bank code
'4090403'
>>> validate('8601 11 17949') # invalid check digits
Traceback (most recent call last):
...
InvalidChecksum: ...
>>> format('86011117947')
'8601.11.17947'
>>> to_iban('8601 11 17947')
'NO93 8601 11 17947'
"""
from stdnum import luhn
from stdnum.exceptions import *
from stdnum.util import clean, isdigits
def compact(number):
"""Convert the number to the minimal representation. This strips the
number of any valid separators and removes surrounding whitespace."""
number = clean(number, ' .-').strip()
if number.startswith('0000'):
number = number[4:] # strip leading 0000 postgiro bank code
return number
def _calc_check_digit(number):
"""Calculate the check digit for the 11-digit number."""
weights = (6, 7, 8, 9, 4, 5, 6, 7, 8, 9)
return str(sum(w * int(n) for w, n in zip(weights, number)) % 11)
def validate(number):
"""Check if the number provided is a valid bank account number."""
number = compact(number)
if not isdigits(number):
raise InvalidFormat()
if len(number) == 7:
luhn.validate(number)
elif len(number) == 11:
if _calc_check_digit(number) != number[-1]:
raise InvalidChecksum()
else:
raise InvalidLength()
return number
def is_valid(number):
"""Check if the number provided is a valid bank account number."""
try:
return bool(validate(number))
except ValidationError:
return False
def to_iban(number):
"""Convert the number to an IBAN."""
from stdnum import iban
separator = ' ' if ' ' in number else ''
return separator.join((
'NO' + iban.calc_check_digits('NO00' + number),
number))
def format(number):
"""Reformat the number to the standard presentation format."""
number = compact(number)
number = (11 - len(number)) * '0' + number
return '.'.join([
number[:4],
number[4:6],
number[6:],
])
|
py | 1a466e0fa530891508e19033958fd0617c8a96f8 | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import json
import datetime, time, re, csv, sys
import config
RE_REMOVE_HTML = re.compile('<.+?>')
SLEEP_SECONDS = 3
class TuroHostAssistant:
def __init__(self):
self.driver = webdriver.Chrome(config.driverLocation)
self.driver.set_page_load_timeout(30)
def assist(self, outfile):
self.login()
time.sleep(SLEEP_SECONDS)
trips = self.get_trips()
self.write(trips, outfile)
self.stop()
def login(self):
self.driver.get('https://turo.com/login')
WebDriverWait(self.driver, 10).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[1]/div[3]/div[1]/div/div/div/div/div[1]/div/iframe")))
iframe = self.driver.find_element_by_xpath("/html/body/div[1]/div[3]/div[1]/div/div/div/div/div[1]/div/iframe")
self.driver.switch_to.frame(iframe)
WebDriverWait(self.driver, 10).until(EC.element_to_be_clickable((By.XPATH, "//input[@id='email']"))).send_keys(config.TURO_USERNAME)
WebDriverWait(self.driver, 10).until(EC.element_to_be_clickable((By.XPATH, "//input[@id='password']"))).send_keys(config.TURO_PASSWORD)
WebDriverWait(self.driver, 10).until(EC.element_to_be_clickable((By.XPATH, "/html/body/div[1]/form/button"))).click()
def write(self, rows, out):
print 'Writing to out file', out
rows = [x for x in rows if x != None]
with open(out, 'w') as f:
w = csv.DictWriter(f,fieldnames = config.fieldnames, delimiter=',')
w.writeheader()
w.writerows(rows)
def stop(self):
self.driver.close()
def get_datetime(self, raw_string):
# Remove the header text
cleaned_str = re.sub('.*\n', '',
raw_string, count = 1)
return datetime.datetime.strptime(cleaned_str, '%a, %b %d, %Y\n%I:%M %p')
def get_trip(self, reservation_url):
print 'Getting trip', reservation_url
self.driver.get(reservation_url + '/receipt/')
anyerror = self.driver.find_elements(By.CLASS_NAME, 'error-page')
if anyerror:
return {}
pickup, dropoff = [self.get_datetime(x.text) for x in self.driver.find_elements_by_class_name('receiptSchedule')]
line_items = self.driver.find_elements_by_class_name('line-item')
results = {'URL': reservation_url,
'PICKUP': pickup,
'DROPOFF': dropoff}
for item in line_items:
name = item.find_element_by_class_name('label').text
if name == 'YOU PAID': # Ignore trips where I didn't host
return None
value = item.find_element_by_class_name('value').text
if name != 'GUEST':
value = float(re.search('[\d|\.]+', value).group())
if 'additional miles' in name.lower():
name = 'ADDITIONAL MILES DRIVEN'
elif 'tolls' in name.lower():
name = 'TOLLS'
elif 'total miles' in name.lower():
name = 'TOTAL MILES'
elif 'cleaning' in name.lower():
name = 'CLEANING'
elif 'discount' in name.lower():
name = 'DISCOUNT'
elif 'gas' in name.lower():
name = 'GAS'
elif 'smoking' in name.lower():
name = 'SMOKING'
results[name] = value
return results
def get_trips(self, page_slug = None):
with open('trips.txt') as f:
all_trips = f.readlines()
trip_links = set(all_trips)
print 'Trip Links', trip_links
trip_details = [self.get_trip(trip_link) for trip_link in trip_links]
print trip_details
return trip_details
if __name__ == '__main__':
outfile = 'output.csv'
if len(sys.argv) > 1:
outfile = sys.argv[1]
assistant = TuroHostAssistant()
assistant.assist(outfile)
|
py | 1a466e103a151f44975696e84032187a5f1afcf2 | from __future__ import absolute_import
from compressor.filters import CallbackOutputFilter
class rJSMinFilter(CallbackOutputFilter):
callback = "rjsmin.jsmin"
dependencies = ["rjsmin"]
kwargs = {
"keep_bang_comments": True
}
# This is for backwards compatibility
JSMinFilter = rJSMinFilter
class SlimItFilter(CallbackOutputFilter):
dependencies = ["slimit"]
callback = "slimit.minify"
kwargs = {
"mangle": True,
}
|
py | 1a466ea78b454f6c7d1d215a14d782bad6706b11 | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and contributors
# License: MIT. See LICENSE
import frappe
from frappe import _
from frappe.desk.doctype.notification_settings.notification_settings import (
is_email_notifications_enabled_for_type,
is_notifications_enabled,
set_seen_value,
)
from frappe.model.document import Document
class NotificationLog(Document):
def after_insert(self):
frappe.publish_realtime("notification", after_commit=True, user=self.for_user)
set_notifications_as_unseen(self.for_user)
if is_email_notifications_enabled_for_type(self.for_user, self.type):
try:
send_notification_email(self)
except frappe.OutgoingEmailError:
frappe.log_error(message=frappe.get_traceback(), title=_("Failed to send notification email"))
def get_permission_query_conditions(for_user):
if not for_user:
for_user = frappe.session.user
if for_user == "Administrator":
return
return """(`tabNotification Log`.for_user = '{user}')""".format(user=for_user)
def get_title(doctype, docname, title_field=None):
if not title_field:
title_field = frappe.get_meta(doctype).get_title_field()
title = docname if title_field == "name" else frappe.db.get_value(doctype, docname, title_field)
return title
def get_title_html(title):
return '<b class="subject-title">{0}</b>'.format(title)
def enqueue_create_notification(users, doc):
"""
During installation of new site, enqueue_create_notification tries to connect to Redis.
This breaks new site creation if Redis server is not running.
We do not need any notifications in fresh installation
"""
if frappe.flags.in_install:
return
doc = frappe._dict(doc)
if isinstance(users, str):
users = [user.strip() for user in users.split(",") if user.strip()]
users = list(set(users))
frappe.enqueue(
"frappe.desk.doctype.notification_log.notification_log.make_notification_logs",
doc=doc,
users=users,
now=frappe.flags.in_test,
)
def make_notification_logs(doc, users):
from frappe.social.doctype.energy_point_settings.energy_point_settings import (
is_energy_point_enabled,
)
for user in users:
if frappe.db.exists("User", {"email": user, "enabled": 1}):
if is_notifications_enabled(user):
if doc.type == "Energy Point" and not is_energy_point_enabled():
return
_doc = frappe.new_doc("Notification Log")
_doc.update(doc)
_doc.for_user = user
if _doc.for_user != _doc.from_user or doc.type == "Energy Point" or doc.type == "Alert":
_doc.insert(ignore_permissions=True)
def send_notification_email(doc):
if doc.type == "Energy Point" and doc.email_content is None:
return
from frappe.utils import get_url_to_form, strip_html
doc_link = get_url_to_form(doc.document_type, doc.document_name)
header = get_email_header(doc)
email_subject = strip_html(doc.subject)
frappe.sendmail(
recipients=doc.for_user,
subject=email_subject,
template="new_notification",
args={
"body_content": doc.subject,
"description": doc.email_content,
"document_type": doc.document_type,
"document_name": doc.document_name,
"doc_link": doc_link,
},
header=[header, "orange"],
now=frappe.flags.in_test,
)
def get_email_header(doc):
docname = doc.document_name
header_map = {
"Default": _("New Notification"),
"Mention": _("New Mention on {0}").format(docname),
"Assignment": _("Assignment Update on {0}").format(docname),
"Share": _("New Document Shared {0}").format(docname),
"Energy Point": _("Energy Point Update on {0}").format(docname),
}
return header_map[doc.type or "Default"]
@frappe.whitelist()
def mark_all_as_read():
unread_docs_list = frappe.db.get_all(
"Notification Log", filters={"read": 0, "for_user": frappe.session.user}
)
unread_docnames = [doc.name for doc in unread_docs_list]
if unread_docnames:
filters = {"name": ["in", unread_docnames]}
frappe.db.set_value("Notification Log", filters, "read", 1, update_modified=False)
@frappe.whitelist()
def mark_as_read(docname):
if docname:
frappe.db.set_value("Notification Log", docname, "read", 1, update_modified=False)
@frappe.whitelist()
def trigger_indicator_hide():
frappe.publish_realtime("indicator_hide", user=frappe.session.user)
def set_notifications_as_unseen(user):
try:
frappe.db.set_value("Notification Settings", user, "seen", 0)
except frappe.DoesNotExistError:
return
|
py | 1a466fad8bbf3024ef51c6243e99373c84183a72 | r"""
Query Builder Datalog
=====================
Complements QueryBuilderBase with query capabilities,
as well as Region and Neurosynth capabilities
"""
from collections import defaultdict
from typing import (
AbstractSet,
Dict,
Iterable,
List,
Optional,
Tuple,
Type,
Union,
)
from uuid import uuid1
from .. import datalog
from .. import expressions as ir
from ..datalog import aggregation
from ..datalog.constraints_representation import RightImplication
from ..datalog.expression_processing import (
TranslateToDatalogSemantics,
reachable_code,
)
from ..type_system import Unknown
from ..utils import NamedRelationalAlgebraFrozenSet, RelationalAlgebraFrozenSet
from .datalog.standard_syntax import parser as datalog_parser
from .datalog.natural_syntax import parser as nat_datalog_parser
from .query_resolution import NeuroSynthMixin, QueryBuilderBase, RegionMixin
from ..datalog import DatalogProgram
from . import query_resolution_expressions as fe
__all__ = ["QueryBuilderDatalog"]
class QueryBuilderDatalog(RegionMixin, NeuroSynthMixin, QueryBuilderBase):
"""
Complements QueryBuilderBase with query capabilities,
as well as Region and Neurosynth capabilities
"""
def __init__(
self,
program_ir: DatalogProgram,
chase_class: Type[aggregation.Chase] = aggregation.Chase,
) -> "QueryBuilderDatalog":
"""
Query builder with query, Region, Neurosynth capabilities
Parameters
----------
program_ir : DatalogProgram
Datalog program's intermediate representation,
usually blank
chase_class : Type[aggregation.Chase], optional
used to compute deterministic solutions,
by default aggregation.Chase
Returns
-------
QueryBuilderDatalog
see description
"""
super().__init__(program_ir, logic_programming=True)
self.chase_class = chase_class
self.frontend_translator = fe.TranslateExpressionToFrontEndExpression(
self
)
self.translate_expression_to_datalog = TranslateToDatalogSemantics()
self.datalog_parser = datalog_parser
self.nat_datalog_parser = nat_datalog_parser
@property
def current_program(self) -> List[fe.Expression]:
"""
Returns the list of expressions that have currently been
declared in the program
Returns
-------
List[fe.Expression]
see description
Example
-------
>>> p_ir = DatalogProgram()
>>> nl = QueryBuilderDatalog(program_ir=p_ir)
>>> nl.add_tuple_set([(1, 2), (2, 2)], name="l")
l: typing.AbstractSet[typing.Tuple[int, int]] = [(1, 2), (2, 2)]
>>> with nl.scope as e:
... e.l2[e.x] = e.l[e.x, e.y] & (e.x == e.y)
... cp = nl.current_program
>>> cp
[
l2(x) ← ( l(x, y) ) ∧ ( x eq y )
]
"""
cp = []
for rules in self.program_ir.intensional_database().values():
for rule in rules.formulas:
cp.append(self.frontend_translator.walk(rule))
return cp
def _declare_implication(
self, consequent: fe.Expression, antecedent: fe.Expression
) -> fe.Expression:
"""
Creates an implication of the consequent by the antecedent
and adds the rule to the current program:
consequent <- antecedent
Parameters
----------
consequent : fe.Expression
see description, will be processed to a logic form before
creating the implication rule
antecedent : fe.Expression
see description, will be processed to a logic form before
creating the implication rule
Returns
-------
fe.Expression
see description
Example
-------
>>> p_ir = DatalogProgram()
>>> nl = QueryBuilderDatalog(program_ir=p_ir)
>>> nl.add_tuple_set([(1, 2), (2, 2)], name="l")
l: typing.AbstractSet[typing.Tuple[int, int]] = [(1, 2), (2, 2)]
>>> with nl.scope as e:
... nl._declare_implication(e.l2[e.x], e.l2[e.x, e.y])
... cp = nl.current_program
>>> cp
[
l2(x) ← l(x, y)
]
"""
consequent = self.translate_expression_to_datalog.walk(
consequent.expression
)
antecedent = self.translate_expression_to_datalog.walk(
antecedent.expression
)
rule = datalog.Implication(consequent, antecedent)
self.program_ir.walk(rule)
return rule
def add_constraint(
self, antecedent: fe.Expression, consequent: fe.Expression
) -> fe.Expression:
"""
Creates an right implication of the consequent by the antecedent
and adds the rule to the current program:
antecedent -> consequent
Parameters
----------
antecedent : fe.Expression
see description, will be processed to a logic form before
creating the right implication rule
consequent : fe.Expression
see description, will be processed to a logic form before
creating the right implication rule
Returns
-------
fe.Expression
see description
Example
-------
>>> p_ir = DatalogProgram()
>>> nl = QueryBuilderDatalog(program_ir=p_ir)
>>> nl.add_tuple_set([(1, 2), (2, 2)], name="l")
l: typing.AbstractSet[typing.Tuple[int, int]] = [(1, 2), (2, 2)]
>>> with nl.scope as e:
... nl.add_constraint(e.l2[e.x, e.y], e.l2[e.x])
"""
consequent = self.translate_expression_to_datalog.walk(
consequent.expression
)
antecedent = self.translate_expression_to_datalog.walk(
antecedent.expression
)
rule = RightImplication(antecedent, consequent)
self.program_ir.walk(rule)
return rule
def execute_datalog_program(self, code: str) -> None:
"""
Execute a Datalog program in classical syntax
Parameters
----------
code : string
Datalog program.
"""
intermediate_representation = self.datalog_parser(code)
self.program_ir.walk(intermediate_representation)
def execute_nat_datalog_program(self, code: str) -> None:
"""Execute a natural language Datalog program in classical syntax
Parameters
----------
code : string
Datalog program.
"""
intermediate_representation = self.nat_datalog_parser(code)
self.program_ir.walk(intermediate_representation)
def query(
self, *args
) -> Union[bool, RelationalAlgebraFrozenSet, fe.Symbol]:
"""
Performs an inferential query on the database.
There are three modalities
1. If there is only one argument, the query returns `True` or `False`
depending on wether the query could be inferred.
2. If there are two arguments and the first is a tuple of `fe.Symbol`,
it returns the set of results meeting the query in the second argument.
3. If the first argument is a predicate (e.g. `Q(x)`) it performs the
query, adds it to the engine memory, and returns the
corresponding symbol.
See example for 3 modalities
Returns
-------
Union[bool, RelationalAlgebraFrozenSet, fe.Symbol]
read the descrpition.
Example
-------
Note: example ran with pandas backend
>>> p_ir = DatalogProgram()
>>> nl = QueryBuilderDatalog(program_ir=p_ir)
>>> nl.add_tuple_set([(1, 2), (2, 2)], name="l")
l: typing.AbstractSet[typing.Tuple[int, int]] = [(1, 2), (2, 2)]
>>> with nl.environment as e:
... e.l2[e.x, e.y] = e.l[e.x, e.y] & (e.x == e.y)
... s1 = nl.query(e.l2[e.x, e.y])
... s2 = nl.query((e.x,), e.l2[e.x, e.y])
... s3 = nl.query(e.l3[e.x], e.l2[e.x, e.y])
>>> s1
True
>>> s2
x
0 2
>>> s3
l3: typing.AbstractSet[typing.Tuple[int]] = [(2,)]
"""
if len(args) == 1:
predicate = args[0]
head = tuple()
elif len(args) == 2:
head, predicate = args
if isinstance(head, fe.Symbol):
head = (head,)
else:
raise ValueError("query takes 1 or 2 arguments")
solution_set, functor_orig = self._execute_query(head, predicate)
if not isinstance(head, tuple):
out_symbol = ir.Symbol[solution_set.type](functor_orig.name)
self.add_tuple_set(solution_set.value, name=functor_orig.name)
return fe.Symbol(self, out_symbol.name)
elif len(head) == 0:
return len(solution_set.value) > 0
else:
return RelationalAlgebraFrozenSet(solution_set.value)
def _execute_query(
self,
head: Union[fe.Symbol, Tuple[fe.Expression, ...]],
predicate: fe.Expression,
) -> Tuple[AbstractSet, Optional[ir.Symbol]]:
"""
[Internal usage - documentation for developpers]
Performs an inferential query. Will return as first output
an AbstractSet with as many elements as solutions of the
predicate query. The AbstractSet's columns correspond to
the expressions in the head.
If head expressions are arguments of a functor, the latter will
be returned as the second output, defaulted as None.
Parameters
----------
head : Union[fe.Symbol, Tuple[fe.Expression, ...]]
see description
predicate : fe.Expression
see description
Returns
-------
Tuple[AbstractSet, Optional[fe.Symbol]]
see description
Examples
--------
Note: example ran with pandas backend
>>> p_ir = DatalogProgram()
>>> nl = QueryBuilderDatalog(program_ir=p_ir)
>>> nl.add_tuple_set([(1, 2), (2, 2)], name="l")
l: typing.AbstractSet[typing.Tuple[int, int]] = [(1, 2), (2, 2)]
>>> with nl.scope as e:
... e.l2[e.x, e.y] = e.l[e.x, e.y] & (e.x == e.y)
... s1 = nl._execute_query(tuple(), e.l2[e.x, e.y])
... s2 = nl._execute_query((e.x,), e.l2[e.x, e.y])
... s3 = nl._execute_query(e.l2[e.x, e.y], e.l2[e.x, e.y])
>>> s1
(
C{
Empty DataFrame
Columns: []
Index: [0]
: typing.AbstractSet
},
None
)
>>> s2
(
C{
x
0 2
: typing.AbstractSet
},
None
)
>>> s3
(
C{
x y
0 2 2
: typing.AbstractSet
},
S{
l2: Unknown
}
)
"""
functor_orig = None
self.program_ir.symbol_table = self.symbol_table.create_scope()
if isinstance(head, fe.Operation):
functor_orig = head.expression.functor
new_head = self.new_symbol()(*head.arguments)
functor = new_head.expression.functor
elif isinstance(head, tuple):
new_head = self.new_symbol()(*head)
functor = new_head.expression.functor
query_expression = self._declare_implication(new_head, predicate)
reachable_rules = reachable_code(query_expression, self.program_ir)
solution = self.chase_class(
self.program_ir, rules=reachable_rules
).build_chase_solution()
solution_set = solution.get(functor.name, ir.Constant(set()))
self.program_ir.symbol_table = self.symbol_table.enclosing_scope
return solution_set, functor_orig
def solve_all(self) -> Dict[str, NamedRelationalAlgebraFrozenSet]:
"""
Returns a dictionary of "predicate_name": "Content"
for all elements in the solution of the Datalog program.
Returns
-------
Dict[str, NamedRelationalAlgebraFrozenSet]
extensional and intentional facts that have been derived
through the current program
Example
-------
Note: example ran with pandas backend
>>> p_ir = DatalogProgram()
>>> nl = QueryBuilderDatalog(program_ir=p_ir)
>>> nl.add_tuple_set([(1, 2), (2, 2)], name="l")
l: typing.AbstractSet[typing.Tuple[int, int]] = [(1, 2), (2, 2)]
>>> with nl.scope as e:
... e.l2[e.x] = e.l[e.x, e.y] & (e.x == e.y)
... solution = nl.solve_all()
>>> solution
{
'l':
0 1
0 1 2
1 2 2
'l2':
x
0 2
}
"""
solution_ir = self.chase_class(self.program_ir).build_chase_solution()
solution = {}
for k, v in solution_ir.items():
solution[k.name] = NamedRelationalAlgebraFrozenSet(
self.predicate_parameter_names(k.name), v.value.unwrap()
)
solution[k.name].row_type = v.value.row_type
return solution
def reset_program(self) -> None:
"""Clears current symbol table"""
self.symbol_table.clear()
def add_tuple_set(
self, iterable: Iterable, type_: Type = Unknown, name: str = None
) -> fe.Symbol:
"""
Creates an AbstractSet fe.Symbol containing the elements specified in
the iterable with a List[Tuple[Any, ...]] format (see examples).
Typically used to crate extensional facts from existing databases
Parameters
----------
iterable : Iterable
typically a list of tuples of values, other formats will
be interpreted as the latter
type_ : Type, optional
type of elements for the tuples, if not specified
will be inferred from the first element, by default Unknown
name : str, optional
name for the AbstractSet symbol, by default None
Returns
-------
fe.Symbol
see description
Examples
--------
>>> p_ir = DatalogProgram()
>>> nl = QueryBuilderDatalog(program_ir=p_ir)
>>> nl.add_tuple_set([(1, 2), (3, 4)], name="l1")
l1: typing.AbstractSet[typing.Tuple[int, int]] = \
[(1, 2), (3, 4)]
>>> nl.add_tuple_set([[1, 2, 3], (3, 4)], name="l2")
l2: typing.AbstractSet[typing.Tuple[int, int, float]] = \
[(1, 2, 3.0), (3, 4, nan)]
>>> nl.add_tuple_set((1, 2, 3), name="l3")
l3: typing.AbstractSet[typing.Tuple[int]] = \
[(1,), (2,), (3,)]
"""
if name is None:
name = str(uuid1())
if isinstance(type_, tuple):
type_ = Tuple[type_]
symbol = ir.Symbol[AbstractSet[type_]](name)
self.program_ir.add_extensional_predicate_from_tuples(
symbol, iterable, type_=type_
)
return fe.Symbol(self, name)
def predicate_parameter_names(
self, predicate_name: Union[str, fe.Symbol, fe.Expression]
) -> Tuple[str]:
"""
Get the names of the parameters for the given predicate
Parameters
----------
predicate_name : Union[str, fe.Symbol, fe.Expression]
predicate to obtain the names from
Returns
-------
tuple[str]
parameter names
"""
predicate_name = self._get_predicate_name(predicate_name)
parameter_names = []
pcount = defaultdict(lambda: 0)
for s in self.program_ir.predicate_terms(predicate_name):
param_name = self._obtain_parameter_name(s)
pcount[param_name] += 1
if pcount[param_name] > 1:
param_name = f"{param_name}_{pcount[param_name] - 1}"
parameter_names.append(param_name)
return tuple(parameter_names)
def _obtain_parameter_name(self, parameter_expression):
if hasattr(parameter_expression, "name"):
param_name = parameter_expression.name
elif hasattr(parameter_expression, "functor") and hasattr(
parameter_expression.functor, "name"
):
param_name = parameter_expression.functor.name
else:
param_name = ir.Symbol.fresh().name
return param_name
def _get_predicate_name(self, predicate_name):
if isinstance(predicate_name, fe.Symbol):
predicate_name = predicate_name.neurolang_symbol
elif isinstance(predicate_name, fe.Expression) and isinstance(
predicate_name.expression, ir.Symbol
):
predicate_name = predicate_name.expression
elif not isinstance(predicate_name, str):
raise ValueError(f"{predicate_name} is not a string or symbol")
return predicate_name
|
py | 1a4670b929f89750d50f3382cd313c618812f303 | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
from happy_python import HappyPyException
class TestHappyPyException(unittest.TestCase):
def test_hpe(self):
try:
raise HappyPyException('自定义错误')
except HappyPyException as e:
self.assertEqual('自定义错误', str(e))
|
py | 1a4670bc90eceaa122738ab8e47135ede56e16d8 | from __future__ import division
import requests
import datetime as dt
import json
from functools import partial
# from multiprocessing.pool import Pool
from billiard.pool import Pool
from twitterscraper.tweet import Tweet
from twitterscraper.ts_logger import logger
from twitterscraper.user import User
from fake_useragent import UserAgent
import urllib
ua = UserAgent()
HEADER = {'User-Agent': ua.random}
logger.info(HEADER)
INIT_URL = 'https://twitter.com/search?f=tweets&vertical=default&q={q}&l={lang}'
RELOAD_URL = 'https://twitter.com/i/search/timeline?f=tweets&vertical=' \
'default&include_available_features=1&include_entities=1&' \
'reset_error_state=false&src=typd&max_position={pos}&q={q}&l={lang}'
INIT_URL_USER = 'https://twitter.com/{u}'
RELOAD_URL_USER = 'https://twitter.com/i/profiles/show/{u}/timeline/tweets?' \
'include_available_features=1&include_entities=1&' \
'max_position={pos}&reset_error_state=false'
def get_query_url(query, lang, pos, from_user = False):
if from_user:
if pos is None:
return INIT_URL_USER.format(u=query)
else:
return RELOAD_URL_USER.format(u=query, pos=pos)
if pos is None:
return INIT_URL.format(q=query, lang=lang)
else:
return RELOAD_URL.format(q=query, pos=pos, lang=lang)
def linspace(start, stop, n):
if n == 1:
yield stop
return
h = (stop - start) / (n - 1)
for i in range(n):
yield start + h * i
def query_single_page(query, lang, pos, retry=50, from_user=False):
"""
Returns tweets from the given URL.
:param query: The query parameter of the query url
:param lang: The language parameter of the query url
:param pos: The query url parameter that determines where to start looking
:param retry: Number of retries if something goes wrong.
:return: The list of tweets, the pos argument for getting the next page.
"""
url = get_query_url(query, lang, pos, from_user)
logger.info('Scraping tweets from {}', url)
try:
response = requests.get(url, headers=HEADER)
if pos is None: # html response
html = response.text or ''
json_resp = None
else:
html = ''
try:
json_resp = json.loads(response.text)
html = json_resp['items_html'] or ''
except ValueError as e:
logger.exception('Failed to parse JSON "{}" while requesting "{}"'.format(e, url))
tweets = list(Tweet.from_html(html))
if not tweets:
try:
if json_resp:
pos = json_resp['min_position']
has_more_items = json_resp['has_more_items']
if not has_more_items:
logger.info("Twitter returned : 'has_more_items' ")
return [], None
else:
pos = None
except:
pass
if retry > 0:
logger.info('Retrying... (Attempts left: {})'.format(retry))
return query_single_page(query, lang, pos, retry - 1, from_user)
else:
return [], pos
if json_resp:
return tweets, urllib.parse.quote(json_resp['min_position'])
if from_user:
return tweets, tweets[-1].tweet_id
return tweets, "TWEET-{}-{}".format(tweets[-1].tweet_id, tweets[0].tweet_id)
except requests.exceptions.HTTPError as e:
logger.exception('HTTPError {} while requesting "{}"'.format(
e, url))
except requests.exceptions.ConnectionError as e:
logger.exception('ConnectionError {} while requesting "{}"'.format(
e, url))
except requests.exceptions.Timeout as e:
logger.exception('TimeOut {} while requesting "{}"'.format(
e, url))
except json.decoder.JSONDecodeError as e:
logger.exception('Failed to parse JSON "{}" while requesting "{}".'.format(
e, url))
if retry > 0:
logger.info('Retrying... (Attempts left: {})'.format(retry))
return query_single_page(query, lang, pos, retry - 1)
logger.error('Giving up.')
return [], None
def query_tweets_once_generator(query, limit=None, lang='', pos=None):
"""
Queries twitter for all the tweets you want! It will load all pages it gets
from twitter. However, twitter might out of a sudden stop serving new pages,
in that case, use the `query_tweets` method.
Note that this function catches the KeyboardInterrupt so it can return
tweets on incomplete queries if the user decides to abort.
:param query: Any advanced query you want to do! Compile it at
https://twitter.com/search-advanced and just copy the query!
:param limit: Scraping will be stopped when at least ``limit`` number of
items are fetched.
:param pos: Field used as a "checkpoint" to continue where you left off in iteration
:return: A list of twitterscraper.Tweet objects. You will get at least
``limit`` number of items.
"""
logger.info('Querying {}'.format(query))
query = query.replace(' ', '%20').replace('#', '%23').replace(':', '%3A')
num_tweets = 0
try:
while True:
new_tweets, new_pos = query_single_page(query, lang, pos)
if len(new_tweets) == 0:
logger.info('Got {} tweets for {}.'.format(
num_tweets, query))
return
for t in new_tweets:
yield t, pos
# use new_pos only once you have iterated through all old tweets
pos = new_pos
num_tweets += len(new_tweets)
if limit and num_tweets >= limit:
logger.info('Got {} tweets for {}.'.format(
num_tweets, query))
return
except KeyboardInterrupt:
logger.info('Program interrupted by user. Returning tweets gathered '
'so far...')
except BaseException:
logger.exception('An unknown error occurred! Returning tweets '
'gathered so far.')
logger.info('Got {} tweets for {}.'.format(
num_tweets, query))
def query_tweets_once(*args, **kwargs):
res = list(query_tweets_once_generator(*args, **kwargs))
if res:
tweets, positions = zip(*res)
return tweets
else:
return []
def query_tweets(query, limit=None, begindate=dt.date(2006, 3, 21), enddate=dt.date.today(), poolsize=20, lang=''):
no_days = (enddate - begindate).days
if(no_days < 0):
sys.exit('Begin date must occur before end date.')
if poolsize > no_days:
# Since we are assigning each pool a range of dates to query,
# the number of pools should not exceed the number of dates.
poolsize = no_days
dateranges = [begindate + dt.timedelta(days=elem) for elem in linspace(0, no_days, poolsize+1)]
if limit and poolsize:
limit_per_pool = (limit // poolsize)+1
else:
limit_per_pool = None
queries = ['{} since:{} until:{}'.format(query, since, until)
for since, until in zip(dateranges[:-1], dateranges[1:])]
all_tweets = []
try:
pool = Pool(poolsize)
logger.info('queries: {}'.format(queries))
try:
for new_tweets in pool.imap_unordered(partial(query_tweets_once, limit=limit_per_pool, lang=lang), queries):
all_tweets.extend(new_tweets)
logger.info('Got {} tweets ({} new).'.format(
len(all_tweets), len(new_tweets)))
except KeyboardInterrupt:
logger.info('Program interrupted by user. Returning all tweets '
'gathered so far.')
finally:
pool.close()
pool.join()
return all_tweets
def query_tweets_from_user(user, limit=None):
pos = None
tweets = []
try:
while True:
new_tweets, pos = query_single_page(user, lang='', pos=pos, from_user=True)
if len(new_tweets) == 0:
logger.info("Got {} tweets from username {}".format(len(tweets), user))
return tweets
tweets += new_tweets
if limit and len(tweets) >= limit:
logger.info("Got {} tweets from username {}".format(len(tweets), user))
return tweets
except KeyboardInterrupt:
logger.info("Program interrupted by user. Returning tweets gathered "
"so far...")
except BaseException:
logger.exception("An unknown error occurred! Returning tweets "
"gathered so far.")
logger.info("Got {} tweets from username {}.".format(
len(tweets), user))
return tweets
def query_user_page(url, retry=10):
"""
Returns the scraped user data from a twitter user page.
:param url: The URL to get the twitter user info from (url contains the user page)
:param retry: Number of retries if something goes wrong.
:return: Returns the scraped user data from a twitter user page.
"""
try:
response = requests.get(url, headers=HEADER)
html = response.text or ''
user_info = User.from_html(html)
if not user_info:
return None
return user_info
except requests.exceptions.HTTPError as e:
logger.exception('HTTPError {} while requesting "{}"'.format(
e, url))
except requests.exceptions.ConnectionError as e:
logger.exception('ConnectionError {} while requesting "{}"'.format(
e, url))
except requests.exceptions.Timeout as e:
logger.exception('TimeOut {} while requesting "{}"'.format(
e, url))
if retry > 0:
logger.info('Retrying... (Attempts left: {})'.format(retry))
return query_user_page(url, retry-1)
logger.error('Giving up.')
return None
def query_user_info(user):
"""
Returns the scraped user data from a twitter user page.
:param user: the twitter user to web scrape its twitter page info
"""
try:
user_info = query_user_page(INIT_URL_USER.format(u=user))
if user_info:
logger.info("Got user information from username {}".format(user))
return user_info
except KeyboardInterrupt:
logger.info("Program interrupted by user. Returning user information gathered so far...")
except BaseException:
logger.exception("An unknown error occurred! Returning user information gathered so far...")
logger.info("Got user information from username {}".format(user))
return user_info
|
py | 1a46710122b0b1a544b4dde746ccaf170eca68f3 | # qubit number=4
# total number=47
import cirq
import qiskit
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[3]) # number=16
prog.cz(input_qubit[0],input_qubit[3]) # number=17
prog.h(input_qubit[3]) # number=18
prog.x(input_qubit[3]) # number=13
prog.h(input_qubit[3]) # number=24
prog.cz(input_qubit[0],input_qubit[3]) # number=25
prog.h(input_qubit[3]) # number=26
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=30
prog.cz(input_qubit[0],input_qubit[2]) # number=31
prog.h(input_qubit[2]) # number=32
prog.x(input_qubit[2]) # number=28
prog.h(input_qubit[2]) # number=39
prog.cz(input_qubit[0],input_qubit[2]) # number=40
prog.h(input_qubit[2]) # number=41
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
prog.h(input_qubit[2]) # number=36
prog.cz(input_qubit[3],input_qubit[2]) # number=37
prog.h(input_qubit[2]) # number=38
prog.h(input_qubit[0]) # number=44
prog.cz(input_qubit[2],input_qubit[0]) # number=45
prog.h(input_qubit[0]) # number=46
prog.h(input_qubit[0]) # number=19
prog.cz(input_qubit[2],input_qubit[0]) # number=20
prog.h(input_qubit[0]) # number=21
prog.h(input_qubit[3]) # number=33
prog.cz(input_qubit[2],input_qubit[3]) # number=34
prog.h(input_qubit[3]) # number=35
prog.x(input_qubit[2]) # number=42
prog.x(input_qubit[2]) # number=43
# circuit end
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = BasicAer.get_backend('statevector_simulator')
sample_shot =8000
info = execute(prog, backend=backend).result().get_statevector()
qubits = round(log2(len(info)))
info = {
np.binary_repr(i, qubits): round((info[i]*(info[i].conjugate())).real,3)
for i in range(2 ** qubits)
}
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_Class3146.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
|
py | 1a467206dc11f27dad7b8f7df3d70147ce38c971 | import itertools
import operator
from collections import Counter
from .hk import bipartiteMatch
from .subgraphs import createSubGraph, createNotInConfSubGraph, createInConfSubGraph
from .sets import createNotInConfSet, createInConfSet
def computeControlConf(graphSet, nodesNo):
"""
Computation of the Control Configuration.
"""
output = {}
matching = {}
frequencies = {}
(controlSize, controlMatching) = bipartiteMatch(graphSet)
matchingSize = nodesNo - controlSize
if matchingSize == 0:
return (output, matching, frequencies)
# Start methodology.
# Find the nodes that are never in the control configuration.
nodesNotInConf = createNotInConfSet(graphSet, nodesNo, controlSize)
# Find the nodes that are always in the control configuration.
nodesInConf = createInConfSet(
graphSet, nodesNo, controlSize, nodesNotInConf)
# Find the nodes that are sometimes included in the control configuration.
freeNodes = set(range(nodesNo)) - nodesNotInConf - nodesInConf
freeControl = matchingSize - len(nodesInConf)
counter = 0
# Find all possible sets of control nodes.
for possibleSet in itertools.combinations(freeNodes, freeControl):
# Subgraph with no ingoing edges to possible control nodes.
subGraph = createSubGraph(graphSet, possibleSet, nodesInConf)
(subMatchingSize, subMatching) = bipartiteMatch(subGraph)
# Check whether the size of maximum matching remains
# the same in this subnetwork.
if subMatchingSize == controlSize:
# If true, nodes with no incoming edges form control
# configuration.
output[counter] = list(set(possibleSet) | nodesInConf)
matching[counter] = subMatching
counter += 1
# Compute node frequencies
number_of_configurations = len(output)
if number_of_configurations == 0:
return (output, matching, frequencies)
counter = dict(Counter(list(itertools.chain.from_iterable(output.values()))))
frequencies = {key:(value/number_of_configurations*100) for (key,value) in counter.items()}
# End of methodology.
return (output, matching, frequencies)
def computeAproxControlConf(graphSet, nodesNo):
"""
Computation of the approximated Control Configuration.
"""
output = {}
matching = {}
frequencies = {}
(controlSize, controlMatching) = bipartiteMatch(graphSet)
matchingSize = nodesNo - controlSize
if matchingSize == 0:
return (output, matching, frequencies)
# Start methodology.
# Find the nodes that are never in the control configuration.
nodesNotInConf = createNotInConfSet(graphSet, nodesNo, controlSize)
# Find the nodes that are always in the control configuration.
nodesInConf = createInConfSet(
graphSet, nodesNo, controlSize, nodesNotInConf)
# Find the nodes that are sometimes included in the control configuration.
freeNodes = set(range(nodesNo)) - nodesNotInConf - nodesInConf
output[0] = freeNodes
output[1] = nodesInConf
output[2] = nodesInConf
# Compute node frequencies
number_of_configurations = len(output)
if number_of_configurations == 0:
return (output, matching, frequencies)
counter = dict(Counter(list(itertools.chain.from_iterable(output.values()))))
frequencies = {key:(value/number_of_configurations*100) for (key,value) in counter.items()}
# End of methodology.
return (output, matching, frequencies) |
py | 1a4672a8b496cde0a7a76675188e5ee3633df126 | #!/usr/bin/env python
# Copyright 2020-2021 Axis Communications AB.
#
# For a full list of individual contributors, please see the commit history.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
"""ETOS suite runner module."""
import os
import logging
import traceback
import signal
from etos_lib import ETOS
from etos_lib.logging.logger import FORMAT_CONFIG
from etos_suite_runner.lib.runner import SuiteRunner
from etos_suite_runner.lib.esr_parameters import ESRParameters
# Remove spam from pika.
logging.getLogger("pika").setLevel(logging.WARNING)
LOGGER = logging.getLogger(__name__)
BASE_DIR = os.path.dirname(os.path.relpath(__file__))
class EnvironmentProviderException(Exception):
"""Exception from EnvironmentProvider."""
def __init__(self, msg, task_id):
"""Initialize with task_id."""
self.task_id = task_id
super().__init__(msg)
class ESR: # pylint:disable=too-many-instance-attributes
"""Suite runner for ETOS main program.
Run this as a daemon on your system in order to trigger test suites within
the eiffel event system.
"""
def __init__(self):
"""Initialize ESR by creating a rabbitmq publisher."""
self.logger = logging.getLogger("ESR")
self.etos = ETOS(
"ETOS Suite Runner", os.getenv("SOURCE_HOST"), "ETOS Suite Runner"
)
signal.signal(signal.SIGTERM, self.graceful_exit)
self.params = ESRParameters(self.etos)
FORMAT_CONFIG.identifier = self.params.tercc.meta.event_id
self.etos.config.rabbitmq_publisher_from_environment()
self.etos.start_publisher()
self.etos.config.set(
"WAIT_FOR_ENVIRONMENT_TIMEOUT",
int(os.getenv("ESR_WAIT_FOR_ENVIRONMENT_TIMEOUT")),
)
def _request_environment(self):
"""Request an environment from the environment provider.
:return: Task ID and an error message.
:rtype: tuple
"""
params = {"suite_id": self.params.tercc.meta.event_id}
wait_generator = self.etos.http.retry(
"POST", self.etos.debug.environment_provider, json=params
)
task_id = None
result = {}
try:
for response in wait_generator:
result = response.get("result", "")
if response and result and result.lower() == "success":
task_id = response.get("data", {}).get("id")
break
continue
else:
return None, "Did not retrieve an environment"
except ConnectionError as exception:
return None, str(exception)
return task_id, ""
def _wait_for_environment(self, task_id):
"""Wait for an environment being provided.
:param task_id: Task ID to wait for.
:type task_id: str
:return: Environment and an error message.
:rtype: tuple
"""
timeout = self.etos.config.get("WAIT_FOR_ENVIRONMENT_TIMEOUT")
wait_generator = self.etos.utils.wait(
self.etos.http.wait_for_request,
uri=self.etos.debug.environment_provider,
timeout=timeout,
params={"id": task_id},
)
environment = None
result = {}
response = None
for generator in wait_generator:
for response in generator:
result = response.get("result", {})
if response and result and result.get("error") is None:
environment = response
break
if result and result.get("error"):
return None, result.get("error")
if environment is not None:
break
else:
if result and result.get("error"):
return None, result.get("error")
return (
None,
(
"Unknown Error: Did not receive an environment "
f"within {self.etos.debug.default_http_timeout}s"
),
)
return environment, ""
def _release_environment(self, task_id):
"""Release an environment from the environment provider.
:param task_id: Task ID to release.
:type task_id: str
"""
wait_generator = self.etos.http.wait_for_request(
self.etos.debug.environment_provider, params={"release": task_id}
)
for response in wait_generator:
if response:
break
def _reserve_workers(self):
"""Reserve workers for test."""
LOGGER.info("Request environment from environment provider")
task_id, msg = self._request_environment()
if task_id is None:
raise EnvironmentProviderException(msg, task_id)
LOGGER.info("Wait for environment to become ready.")
environment, msg = self._wait_for_environment(task_id)
if environment is None:
raise EnvironmentProviderException(msg, task_id)
return environment, task_id
def run_suite(self, triggered):
"""Trigger an activity and starts the actual test runner.
Will only start the test activity if there's a 'slot' available.
:param triggered: Activity triggered.
:type triggered: :obj:`eiffel.events.EiffelActivityTriggeredEvent`
"""
context = triggered.meta.event_id
LOGGER.info("Sending ESR Docker environment event.")
self.etos.events.send_environment_defined(
"ESR Docker", {"CONTEXT": context}, image=os.getenv("SUITE_RUNNER")
)
runner = SuiteRunner(self.params, self.etos, context)
task_id = None
try:
LOGGER.info("Wait for test environment.")
environment, task_id = self._reserve_workers()
self.etos.events.send_activity_started(triggered, {"CONTEXT": context})
LOGGER.info("Starting ESR.")
runner.run(environment.get("result"))
except EnvironmentProviderException as exception:
task_id = exception.task_id
raise
finally:
LOGGER.info("Release test environment.")
if task_id is not None:
self._release_environment(task_id)
@staticmethod
def verify_input():
"""Verify that the data input to ESR are correct."""
assert os.getenv(
"SUITE_RUNNER"
), "SUITE_RUNNER enviroment variable not provided."
assert os.getenv(
"SOURCE_HOST"
), "SOURCE_HOST environment variable not provided."
assert os.getenv("TERCC"), "TERCC environment variable not provided."
def run(self):
"""Run the ESR main loop."""
tercc_id = None
try:
tercc_id = self.params.tercc.meta.event_id
self.etos.events.send_announcement_published(
"[ESR] Launching.",
"Starting up ESR. Waiting for tests to start.",
"MINOR",
{"CAUSE": tercc_id},
)
activity_name = "ETOS testrun"
links = {
"CAUSE": [
self.params.tercc.meta.event_id,
self.params.artifact_created["meta"]["id"],
]
}
triggered = self.etos.events.send_activity_triggered(
activity_name,
links,
executionType="AUTOMATED",
triggers=[{"type": "EIFFEL_EVENT"}],
)
self.verify_input()
context = triggered.meta.event_id
except: # noqa
self.etos.events.send_announcement_published(
"[ESR] Failed to start test execution",
traceback.format_exc(),
"CRITICAL",
{"CAUSE": tercc_id},
)
raise
try:
self.run_suite(triggered)
self.etos.events.send_activity_finished(
triggered, {"conclusion": "SUCCESSFUL"}, {"CONTEXT": context}
)
except Exception as exception: # pylint:disable=broad-except
reason = str(exception)
self.etos.events.send_activity_canceled(
triggered, {"CONTEXT": context}, reason=reason
)
self.etos.events.send_announcement_published(
"[ESR] Test suite execution failed",
traceback.format_exc(),
"MAJOR",
{"CONTEXT": context},
)
raise
def graceful_exit(self, *_):
"""Attempt to gracefully exit the running job."""
self.logger.info(
"Kill command received - Attempting to shut down all processes."
)
raise Exception("Terminate command received - Shutting down.")
def main():
"""Entry point allowing external calls."""
esr = ESR()
try:
esr.run() # Blocking
except:
with open("/dev/termination-log", "w", encoding="utf-8") as termination_log:
termination_log.write(traceback.format_exc())
raise
finally:
esr.etos.publisher.stop()
LOGGER.info("ESR Finished Executing.")
def run():
"""Entry point for console_scripts."""
main()
if __name__ == "__main__":
run()
|
py | 1a4672a9864c7653cd4ed3d3d945f1ede7668e32 | from random import choice
n1 = str(input('Primeiro aluno: '))
n2 = str(input('Segundo aluno: '))
n3 = str(input('Terceiro aluno: '))
n4 = str(input('Quarto aluno: '))
lista = [n1, n2, n3, n4]
escolhido = choice(lista)
print('O alundo escolhido foi {}'. format(escolhido)) |
py | 1a46731cd4ad177b412b8369420a827dea3a8f0e | # a cursor is the object we use to interact with the database
import pymysql.cursors
# this class will give us an instance of a connection to our database
class MySQLConnection:
def __init__(self, db):
# change the user and password as needed
connection = pymysql.connect(host = 'localhost',
user = 'root',
password = 'root', # CHANGE THIS IF YOU USE A DIFFERENT PASSWORD IN MySql Workbench!
db = db,
charset = 'utf8mb4',
cursorclass = pymysql.cursors.DictCursor,
autocommit = True)
# establish the connection to the database
self.connection = connection
# the method to query the database
def query_db(self, query, data=None):
with self.connection.cursor() as cursor:
try:
query = cursor.mogrify(query, data)
print("Running Query:", query)
cursor.execute(query, data)
if query.lower().find("insert") >= 0:
# INSERT queries will return the ID NUMBER of the row inserted
self.connection.commit()
return cursor.lastrowid
elif query.lower().find("select") >= 0:
# SELECT queries will return the data from the database as a LIST OF DICTIONARIES
result = cursor.fetchall()
return result
else:
# UPDATE and DELETE queries will return nothing
self.connection.commit()
except Exception as e:
# if the query fails the method will return FALSE
print("Something went wrong", e)
return False
finally:
# close the connection
self.connection.close()
# connectToMySQL receives the database we're using and uses it to create an instance of MySQLConnection
def connectToMySQL(db):
return MySQLConnection(db) |
py | 1a467357967128d5e183510f5370fec913eb579d | class Twitter:
def __init__(self):
"""
Initialize your data structure here.
"""
def postTweet(self, userId: int, tweetId: int) -> None:
"""
Compose a new tweet.
"""
def getNewsFeed(self, userId: int) -> List[int]:
"""
Retrieve the 10 most recent tweet ids in the user's news feed. Each item in the news feed must be posted by users who the user followed or by the user herself. Tweets must be ordered from most recent to least recent.
"""
def follow(self, followerId: int, followeeId: int) -> None:
"""
Follower follows a followee. If the operation is invalid, it should be a no-op.
"""
def unfollow(self, followerId: int, followeeId: int) -> None:
"""
Follower unfollows a followee. If the operation is invalid, it should be a no-op.
"""
# Your Twitter object will be instantiated and called as such:
# obj = Twitter()
# obj.postTweet(userId,tweetId)
# param_2 = obj.getNewsFeed(userId)
# obj.follow(followerId,followeeId)
# obj.unfollow(followerId,followeeId)
|
py | 1a4673deeea7ae136547271a06a6fb27a9899c6b | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# xpaw documentation build configuration file, created by
# sphinx-quickstart on Thu Mar 16 11:08:48 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import re
from os.path import join, dirname
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'xpaw'
copyright = '2016-2018, jadbin'
author = 'jadbin'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
def read_version():
p = join(dirname(dirname(__file__)), 'xpaw', '__init__.py')
with open(p, 'r', encoding='utf-8') as f:
return re.search(r"__version__ = '([^']+)'", f.read()).group(1)
version = read_version()
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'description': 'Async web scraping framework',
'github_user': 'jadbin',
'github_repo': 'xpaw',
'github_button': False,
'travis_button': True,
'font_family': '"Helvetica Neue", Helvetica, "PingFang SC", "Hiragino Sans GB", "Microsoft YaHei", "微软雅黑", Arial, sans-serif',
'font_size': '14px',
'code_font_size': '12px',
'note_bg': '#E5ECD1',
'note_border': '#BFCF8C',
}
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': [
'about.html', 'navigation.html', 'searchbox.html',
]
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'xpawdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'xpaw.tex', 'xpaw Documentation',
'jadbin', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'xpaw', 'xpaw Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'xpaw', 'xpaw Documentation',
author, 'xpaw', 'One line description of project.',
'Miscellaneous'),
]
|
py | 1a467462938f02018538077a8b39ce90b46b9baf | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ducktape.mark import matrix, parametrize
from ducktape.mark.resource import cluster
from ducktape.utils.util import wait_until
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.services.kafka import KafkaService, quorum
from kafkatest.services.verifiable_producer import VerifiableProducer
from kafkatest.services.console_consumer import ConsoleConsumer
from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
from kafkatest.utils import is_int_with_prefix
from kafkatest.version import DEV_BRANCH, LATEST_0_10_0, LATEST_0_10_1, LATEST_0_10_2, LATEST_0_11_0, LATEST_1_0, LATEST_1_1, LATEST_2_0, LATEST_2_1, LATEST_2_2, LATEST_2_3, LATEST_2_4, LATEST_2_5, LATEST_2_6, LATEST_2_7, LATEST_2_8, KafkaVersion
class ClientCompatibilityProduceConsumeTest(ProduceConsumeValidateTest):
"""
These tests validate that we can use a new client to produce and consume from older brokers.
"""
def __init__(self, test_context):
""":type test_context: ducktape.tests.test.TestContext"""
super(ClientCompatibilityProduceConsumeTest, self).__init__(test_context=test_context)
self.topic = "test_topic"
self.zk = ZookeeperService(test_context, num_nodes=3) if quorum.for_test(test_context) == quorum.zk else None
self.kafka = KafkaService(test_context, num_nodes=3, zk=self.zk, topics={self.topic:{
"partitions": 10,
"replication-factor": 2}})
self.num_partitions = 10
self.timeout_sec = 60
self.producer_throughput = 1000
self.num_producers = 2
self.messages_per_producer = 1000
self.num_consumers = 1
def setUp(self):
if self.zk:
self.zk.start()
def min_cluster_size(self):
# Override this since we're adding services outside of the constructor
return super(ClientCompatibilityProduceConsumeTest, self).min_cluster_size() + self.num_producers + self.num_consumers
@cluster(num_nodes=9)
@matrix(broker_version=[str(DEV_BRANCH)], metadata_quorum=quorum.all_non_upgrade)
@parametrize(broker_version=str(LATEST_0_10_0))
@parametrize(broker_version=str(LATEST_0_10_1))
@parametrize(broker_version=str(LATEST_0_10_2))
@parametrize(broker_version=str(LATEST_0_11_0))
@parametrize(broker_version=str(LATEST_1_0))
@parametrize(broker_version=str(LATEST_1_1))
@parametrize(broker_version=str(LATEST_2_0))
@parametrize(broker_version=str(LATEST_2_1))
@parametrize(broker_version=str(LATEST_2_2))
@parametrize(broker_version=str(LATEST_2_3))
@parametrize(broker_version=str(LATEST_2_4))
@parametrize(broker_version=str(LATEST_2_5))
@parametrize(broker_version=str(LATEST_2_6))
@parametrize(broker_version=str(LATEST_2_7))
@parametrize(broker_version=str(LATEST_2_8))
def test_produce_consume(self, broker_version, metadata_quorum=quorum.zk):
print("running producer_consumer_compat with broker_version = %s" % broker_version, flush=True)
self.kafka.set_version(KafkaVersion(broker_version))
self.kafka.security_protocol = "PLAINTEXT"
self.kafka.interbroker_security_protocol = self.kafka.security_protocol
self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka,
self.topic, throughput=self.producer_throughput,
message_validator=is_int_with_prefix)
self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka, self.topic,
consumer_timeout_ms=60000,
message_validator=is_int_with_prefix)
self.kafka.start()
self.run_produce_consume_validate(lambda: wait_until(
lambda: self.producer.each_produced_at_least(self.messages_per_producer) == True,
timeout_sec=120, backoff_sec=1,
err_msg="Producer did not produce all messages in reasonable amount of time"))
|
py | 1a467512fe700a0ec080bf7bea6c40da53405029 | import pytest
import numpy as np
from keras.utils import conv_utils
from keras import backend as K
def test_normalize_tuple():
assert conv_utils.normalize_tuple(5, 2, 'kernel_size') == (5, 5)
assert conv_utils.normalize_tuple([7, 9], 2, 'kernel_size') == (7, 9)
with pytest.raises(ValueError):
conv_utils.normalize_tuple(None, 2, 'kernel_size')
with pytest.raises(ValueError):
conv_utils.normalize_tuple([2, 3, 4], 2, 'kernel_size')
with pytest.raises(ValueError):
conv_utils.normalize_tuple(['str', 'impossible'], 2, 'kernel_size')
def test_invalid_data_format():
with pytest.raises(ValueError):
K.normalize_data_format('channels_middle')
def test_invalid_padding():
with pytest.raises(ValueError):
conv_utils.normalize_padding('diagonal')
def test_invalid_convert_kernel():
with pytest.raises(ValueError):
conv_utils.convert_kernel(np.zeros((10, 20)))
def test_conv_output_length():
assert conv_utils.conv_output_length(None, 7, 'same', 1) is None
assert conv_utils.conv_output_length(224, 7, 'same', 1) == 224
assert conv_utils.conv_output_length(224, 7, 'same', 2) == 112
assert conv_utils.conv_output_length(32, 5, 'valid', 1) == 28
assert conv_utils.conv_output_length(32, 5, 'valid', 2) == 14
assert conv_utils.conv_output_length(32, 5, 'causal', 1) == 32
assert conv_utils.conv_output_length(32, 5, 'causal', 2) == 16
assert conv_utils.conv_output_length(32, 5, 'full', 1) == 36
assert conv_utils.conv_output_length(32, 5, 'full', 2) == 18
with pytest.raises(AssertionError):
conv_utils.conv_output_length(32, 5, 'diagonal', 2)
def test_conv_input_length():
assert conv_utils.conv_input_length(None, 7, 'same', 1) is None
assert conv_utils.conv_input_length(112, 7, 'same', 1) == 112
assert conv_utils.conv_input_length(112, 7, 'same', 2) == 223
assert conv_utils.conv_input_length(28, 5, 'valid', 1) == 32
assert conv_utils.conv_input_length(14, 5, 'valid', 2) == 31
assert conv_utils.conv_input_length(36, 5, 'full', 1) == 32
assert conv_utils.conv_input_length(18, 5, 'full', 2) == 31
with pytest.raises(AssertionError):
conv_utils.conv_output_length(18, 5, 'diagonal', 2)
def test_deconv_length():
assert conv_utils.deconv_length(None, 1, 7, 'same', None) is None
assert conv_utils.deconv_length(224, 1, 7, 'same', None) == 224
assert conv_utils.deconv_length(224, 2, 7, 'same', None) == 448
assert conv_utils.deconv_length(32, 1, 5, 'valid', None) == 36
assert conv_utils.deconv_length(32, 2, 5, 'valid', None) == 67
assert conv_utils.deconv_length(32, 1, 5, 'full', None) == 28
assert conv_utils.deconv_length(32, 2, 5, 'full', None) == 59
assert conv_utils.deconv_length(224, 1, 7, 'same', 0) == 224
assert conv_utils.deconv_length(224, 2, 7, 'same', 0) == 447
assert conv_utils.deconv_length(224, 2, 7, 'same', 1) == 448
assert conv_utils.deconv_length(32, 1, 5, 'valid', 0) == 36
assert conv_utils.deconv_length(32, 2, 5, 'valid', 0) == 67
assert conv_utils.deconv_length(32, 2, 5, 'valid', 1) == 68
assert conv_utils.deconv_length(6, 1, 3, 'full', 0) == 4
assert conv_utils.deconv_length(6, 2, 3, 'full', 1) == 10
assert conv_utils.deconv_length(6, 2, 3, 'full', 2) == 11
if __name__ == '__main__':
pytest.main([__file__])
|
py | 1a4675888d03b1bb742f87b57010df5dcf4da8f2 | # This is a sample commands.py. You can add your own commands here.
#
# Please refer to commands_full.py for all the default commands and a complete
# documentation. Do NOT add them all here, or you may end up with defunct
# commands when upgrading ranger.
# A simple command for demonstration purposes follows.
# -----------------------------------------------------------------------------
from __future__ import (absolute_import, division, print_function)
# You can import any python module as needed.
import os
# You always need to import ranger.api.commands here to get the Command class:
from ranger.api.commands import Command
from ranger.core.loader import CommandLoader
# Any class that is a subclass of "Command" will be integrated into ranger as a
# command. Try typing ":my_edit<ENTER>" in ranger!
class my_edit(Command):
# The so-called doc-string of the class will be visible in the built-in
# help that is accessible by typing "?c" inside ranger.
""":my_edit <filename>
A sample command for demonstration purposes that opens a file in an editor.
"""
# The execute method is called when you run this command in ranger.
def execute(self):
# self.arg(1) is the first (space-separated) argument to the function.
# This way you can write ":my_edit somefilename<ENTER>".
if self.arg(1):
# self.rest(1) contains self.arg(1) and everything that follows
target_filename = self.rest(1)
else:
# self.fm is a ranger.core.filemanager.FileManager object and gives
# you access to internals of ranger.
# self.fm.thisfile is a ranger.container.file.File object and is a
# reference to the currently selected file.
target_filename = self.fm.thisfile.path
# This is a generic function to print text in ranger.
self.fm.notify("Let's edit the file " + target_filename + "!")
# Using bad=True in fm.notify allows you to print error messages:
if not os.path.exists(target_filename):
self.fm.notify("The given file does not exist!", bad=True)
return
# This executes a function from ranger.core.acitons, a module with a
# variety of subroutines that can help you construct commands.
# Check out the source, or run "pydoc ranger.core.actions" for a list.
self.fm.edit_file(target_filename)
# The tab method is called when you press tab, and should return a list of
# suggestions that the user will tab through.
# tabnum is 1 for <TAB> and -1 for <S-TAB> by default
def tab(self, tabnum):
# This is a generic tab-completion function that iterates through the
# content of the current directory.
return self._tab_directory_content()
class extracthere(Command):
def execute(self):
""" Extract copied files to current directory """
copied_files = tuple(self.fm.copy_buffer)
if not copied_files:
return
def refresh(_):
cwd = self.fm.get_directory(original_path)
cwd.load_content()
one_file = copied_files[0]
cwd = self.fm.thisdir
original_path = cwd.path
au_flags = ['-X', cwd.path]
au_flags += self.line.split()[1:]
au_flags += ['-e']
self.fm.copy_buffer.clear()
self.fm.cut_buffer = False
if len(copied_files) == 1:
descr = "extracting: " + os.path.basename(one_file.path)
else:
descr = "extracting files from: " + os.path.basename(one_file.dirname)
obj = CommandLoader(args=['aunpack'] + au_flags \
+ [f.path for f in copied_files], descr=descr, read=True)
obj.signal_bind('after', refresh)
self.fm.loader.add(obj)
class compress(Command):
def execute(self):
""" Compress marked files to current directory """
cwd = self.fm.thisdir
marked_files = cwd.get_selection()
if not marked_files:
return
def refresh(_):
cwd = self.fm.get_directory(original_path)
cwd.load_content()
original_path = cwd.path
parts = self.line.split()
au_flags = parts[1:]
descr = "compressing files in: " + os.path.basename(parts[1])
obj = CommandLoader(args=['apack'] + au_flags + \
[os.path.relpath(f.path, cwd.path) for f in marked_files], descr=descr, read=True)
obj.signal_bind('after', refresh)
self.fm.loader.add(obj)
def tab(self, tabnum):
""" Complete with current folder name """
extension = ['.zip', '.tar.gz', '.rar', '.7z']
return ['compress ' + os.path.basename(self.fm.thisdir.path) + ext for ext in extension]
|
py | 1a46766b841be5626e6862800916a6c29b7d6567 | from subprocess import run, CalledProcessError
from sys import exit
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization, hashes
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography import x509
from cryptography.x509.oid import NameOID
from termcolor import cprint
def gen_csr_with_new_cert(fqdn, subject, password, altnames=None):
key = rsa.generate_private_key(public_exponent=65537,
key_size=4096,
backend=default_backend())
with open('{}.key'.format(fqdn), 'wb') as f:
if password:
f.write(
key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.BestAvailableEncryption(
password.encode()),
))
else:
f.write(
key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
))
return generate_csr(key, fqdn, subject, altnames)
def gen_csr_with_existing_cert(key_path,
fqdn,
subject,
additional=None,
password=None):
key = None
with open(key_path, 'rb') as f:
key = serialization.load_pem_private_key(f.read(), password,
default_backend())
return generate_csr(key, fqdn, subject, additional)
# Helper function
def generate_csr(key, fqdn, subject, altnames=None):
csr = x509.CertificateSigningRequestBuilder().subject_name(
x509.Name([
x509.NameAttribute(NameOID.COUNTRY_NAME, subject['country']),
x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME,
subject['state']),
x509.NameAttribute(NameOID.LOCALITY_NAME, subject['city']),
x509.NameAttribute(NameOID.ORGANIZATION_NAME, subject['org']),
x509.NameAttribute(NameOID.COMMON_NAME, subject['cn']),
]))
if altnames != None:
csr = csr.add_extension(
x509.SubjectAlternativeName(
[x509.DNSName(domain) for domain in altnames]),
critical=False,
)
csr = csr.sign(key, hashes.SHA256(), default_backend())
with open('{}.req'.format(fqdn), 'wb') as f:
f.write(csr.public_bytes(serialization.Encoding.PEM))
return csr.public_bytes(serialization.Encoding.PEM).decode()
|
py | 1a4676f67ad34ed98ad9a9e308ee0ece1fc39a6e | import string
import os
def clean_name(name):
name = name.lower()
name = name.strip()
name = name.replace('\'', '')
name = name.replace('-', ' ')
return name.translate(str.maketrans("", "", string.punctuation))
class NameConverter:
def __init__(self):
self.color_map = {}
location = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
with open(location + '/colors.csv') as colors:
for line in colors:
line = line.strip()
(name, r, g, b) = line.split(',')
name = clean_name(name)
self.color_map[name] = (int(r), int(g), int(b))
def convert(self, color):
key_name = clean_name(color)
return self.color_map.get(key_name, None)
|
py | 1a46772301b86df410eb8645c1328c6438318e56 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.common.thread.messagepump import MessagePump, MessagePumpDelegate
from webkitpy.common.thread.threadedmessagequeue import ThreadedMessageQueue
class TestDelegate(MessagePumpDelegate):
def __init__(self):
self.log = []
def schedule(self, interval, callback):
self.callback = callback
self.log.append("schedule")
def message_available(self, message):
self.log.append("message_available: %s" % message)
def final_message_delivered(self):
self.log.append("final_message_delivered")
class MessagePumpTest(unittest.TestCase):
def test_basic(self):
queue = ThreadedMessageQueue()
delegate = TestDelegate()
pump = MessagePump(delegate, queue)
self.assertEqual(delegate.log, [
'schedule'
])
delegate.callback()
queue.post("Hello")
queue.post("There")
delegate.callback()
self.assertEqual(delegate.log, [
'schedule',
'schedule',
'message_available: Hello',
'message_available: There',
'schedule'
])
queue.post("More")
queue.post("Messages")
queue.stop()
delegate.callback()
self.assertEqual(delegate.log, [
'schedule',
'schedule',
'message_available: Hello',
'message_available: There',
'schedule',
'message_available: More',
'message_available: Messages',
'final_message_delivered'
])
|
py | 1a4677c0e701a6da0e80fe86a6aefb59b06ffa92 | from bench import bench
print(bench(10, '', '''
s = []
for i in range(100000): s.append(i)
for _ in range(100000): s.pop()
'''))
|
py | 1a4678cc6f77e4225c3751119a2bd428887d2507 | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'userextensions_tests.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
py | 1a467907bbbd19e836f93a5bbd3ffbee052d6545 | # -*- coding: utf-8 -*-
"""
Created on Mon Nov 15 01:18:23 2021
@author: Sharayu
"""
num=11
if(num<10):
if(num==1):
print("The value is 1")
else:
print("The value is greater than 1")
else:
print("The value is greater than 10") |
py | 1a467a134f5e7251a418ef5e116074419cafa9d0 | """sc-githooks - The base check class
Copyright (c) 2021 Scott Lau
Portions Copyright (c) 2021 InnoGames GmbH
Portions Copyright (c) 2021 Emre Hasegeli
"""
from enum import IntEnum
class CheckState(IntEnum):
NEW = 0
CLONED = 1
DONE = 2
FAILED = 3
class Severity(IntEnum):
# The numbers are selected to match the Syslog standard.
ERROR = 3
WARNING = 4
NOTICE = 5
NOTE = 5
INFO = 6
def translate(self):
if self.__eq__(Severity.ERROR):
return "错误"
elif self.__eq__(Severity.WARNING):
return "警告"
elif self.__eq__(Severity.NOTICE):
return "注意"
elif self.__eq__(Severity.NOTE):
return "注意"
elif self.__eq__(Severity.INFO):
return "信息"
else:
return "未知"
@classmethod
def split(cls, line):
"""Search the severities in the beginning of the string
It returns the highest severity when non match.
"""
for name, severity in cls._member_map_.items():
if line.upper().startswith(name):
line = line[len(name):].strip(' :-')
break
return severity, line
class BaseCheck:
"""The parent class of all checks
Checks are expanded to different objects by cloning. The subclasses
has to override prepare() method to clone the check at appropriate
stage.
"""
preferred_checks = []
state = CheckState.NEW
ERROR_MSG_PREFIX = "GL-HOOK-ERR:"
def __init__(self, **kwargs):
for key, value in kwargs.items():
# We expect all of the arguments to be initialized with defaults
# on the class.
assert hasattr(type(self), key)
if value:
setattr(self, key, value)
def clone(self):
new = type(self)(**vars(self))
new.state = CheckState.CLONED
return new
def set_state(self, state):
assert state > CheckState.CLONED
self.state = max(self.state, state)
def prepare(self, obj):
for check in self.preferred_checks:
if check.prepare(obj):
return None
return self
def print_problems(self):
header_printed = False
for severity, problem in self.evaluate_problems():
if not header_printed:
print('{} === {} ==='.format(BaseCheck.ERROR_MSG_PREFIX, self))
header_printed = True
print('{} {}: {}'.format(BaseCheck.ERROR_MSG_PREFIX, severity.translate(), problem))
# if header_printed:
# print('{}'.format(BaseCheck.ERROR_MSG_PREFIX))
self.set_state(CheckState.DONE)
def evaluate_problems(self):
assert self.state == CheckState.CLONED
for severity, problem in self.get_problems():
if severity <= Severity.ERROR:
self.set_state(CheckState.FAILED)
yield severity, problem
def __str__(self):
return type(self).__name__
def prepare_checks(checks, obj, next_checks=None):
"""Prepare the checks to the object
It yields the checks prepared and ready. The checks which are not
ready yet are going do be appended to the next_checks list.
"""
for check in checks:
prepared_check = check.prepare(obj)
if prepared_check:
cloned = prepared_check.state >= CheckState.CLONED
assert next_checks is not None or cloned
if cloned:
yield prepared_check
else:
next_checks.append(prepared_check)
|
py | 1a467a78d04d8e739338c0f9faa53ecd40e77175 | # -*- test-case-name: twisted.conch.test.test_userauth -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Implementation of the ssh-userauth service.
Currently implemented authentication types are public-key and password.
Maintainer: Paul Swartz
"""
import struct
from twisted.conch import error, interfaces
from twisted.conch.ssh import keys, transport, service
from twisted.conch.ssh.common import NS, getNS
from twisted.cred import credentials
from twisted.cred.error import UnauthorizedLogin
from twisted.internet import defer, reactor
from twisted.python import failure, log
class SSHUserAuthServer(service.SSHService):
"""
A service implementing the server side of the 'ssh-userauth' service. It
is used to authenticate the user on the other side as being able to access
this server.
@ivar name: the name of this service: 'ssh-userauth'
@type name: C{str}
@ivar authenticatedWith: a list of authentication methods that have
already been used.
@type authenticatedWith: C{list}
@ivar loginTimeout: the number of seconds we wait before disconnecting
the user for taking too long to authenticate
@type loginTimeout: C{int}
@ivar attemptsBeforeDisconnect: the number of failed login attempts we
allow before disconnecting.
@type attemptsBeforeDisconnect: C{int}
@ivar loginAttempts: the number of login attempts that have been made
@type loginAttempts: C{int}
@ivar passwordDelay: the number of seconds to delay when the user gives
an incorrect password
@type passwordDelay: C{int}
@ivar interfaceToMethod: a C{dict} mapping credential interfaces to
authentication methods. The server checks to see which of the
cred interfaces have checkers and tells the client that those methods
are valid for authentication.
@type interfaceToMethod: C{dict}
@ivar supportedAuthentications: A list of the supported authentication
methods.
@type supportedAuthentications: C{list} of C{str}
@ivar user: the last username the client tried to authenticate with
@type user: C{str}
@ivar method: the current authentication method
@type method: C{str}
@ivar nextService: the service the user wants started after authentication
has been completed.
@type nextService: C{str}
@ivar portal: the L{twisted.cred.portal.Portal} we are using for
authentication
@type portal: L{twisted.cred.portal.Portal}
@ivar clock: an object with a callLater method. Stubbed out for testing.
"""
name = 'ssh-userauth'
loginTimeout = 10 * 60 * 60
# 10 minutes before we disconnect them
attemptsBeforeDisconnect = 20
# 20 login attempts before a disconnect
passwordDelay = 1 # number of seconds to delay on a failed password
clock = reactor
interfaceToMethod = {
credentials.ISSHPrivateKey : 'publickey',
credentials.IUsernamePassword : 'password',
}
def serviceStarted(self):
"""
Called when the userauth service is started. Set up instance
variables, check if we should allow password authentication (only
allow if the outgoing connection is encrypted) and set up a login
timeout.
"""
self.authenticatedWith = []
self.loginAttempts = 0
self.user = None
self.nextService = None
self.portal = self.transport.factory.portal
self.supportedAuthentications = []
for i in self.portal.listCredentialsInterfaces():
if i in self.interfaceToMethod:
self.supportedAuthentications.append(self.interfaceToMethod[i])
if not self.transport.isEncrypted('in'):
# don't let us transport password in plaintext
if 'password' in self.supportedAuthentications:
self.supportedAuthentications.remove('password')
self._cancelLoginTimeout = self.clock.callLater(
self.loginTimeout,
self.timeoutAuthentication)
def serviceStopped(self):
"""
Called when the userauth service is stopped. Cancel the login timeout
if it's still going.
"""
if self._cancelLoginTimeout:
self._cancelLoginTimeout.cancel()
self._cancelLoginTimeout = None
def timeoutAuthentication(self):
"""
Called when the user has timed out on authentication. Disconnect
with a DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE message.
"""
self._cancelLoginTimeout = None
self.transport.sendDisconnect(
transport.DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE,
'you took too long')
def tryAuth(self, kind, user, data):
"""
Try to authenticate the user with the given method. Dispatches to a
auth_* method.
@param kind: the authentication method to try.
@type kind: C{str}
@param user: the username the client is authenticating with.
@type user: C{str}
@param data: authentication specific data sent by the client.
@type data: C{str}
@return: A Deferred called back if the method succeeded, or erred back
if it failed.
@rtype: C{defer.Deferred}
"""
log.msg('%s trying auth %s' % (user, kind))
if kind not in self.supportedAuthentications:
return defer.fail(
error.ConchError('unsupported authentication, failing'))
kind = kind.replace('-', '_')
f = getattr(self,'auth_%s'%kind, None)
if f:
ret = f(data)
if not ret:
return defer.fail(
error.ConchError('%s return None instead of a Deferred'
% kind))
else:
return ret
return defer.fail(error.ConchError('bad auth type: %s' % kind))
def ssh_USERAUTH_REQUEST(self, packet):
"""
The client has requested authentication. Payload::
string user
string next service
string method
<authentication specific data>
@type packet: C{str}
"""
user, nextService, method, rest = getNS(packet, 3)
if user != self.user or nextService != self.nextService:
self.authenticatedWith = [] # clear auth state
self.user = user
self.nextService = nextService
self.method = method
d = self.tryAuth(method, user, rest)
if not d:
self._ebBadAuth(
failure.Failure(error.ConchError('auth returned none')))
return
d.addCallback(self._cbFinishedAuth)
d.addErrback(self._ebMaybeBadAuth)
d.addErrback(self._ebBadAuth)
return d
def _cbFinishedAuth(self, (interface, avatar, logout)):
"""
The callback when user has successfully been authenticated. For a
description of the arguments, see L{twisted.cred.portal.Portal.login}.
We start the service requested by the user.
"""
self.transport.avatar = avatar
self.transport.logoutFunction = logout
service = self.transport.factory.getService(self.transport,
self.nextService)
if not service:
raise error.ConchError('could not get next service: %s'
% self.nextService)
log.msg('%s authenticated with %s' % (self.user, self.method))
self.transport.sendPacket(MSG_USERAUTH_SUCCESS, '')
self.transport.setService(service())
def _ebMaybeBadAuth(self, reason):
"""
An intermediate errback. If the reason is
error.NotEnoughAuthentication, we send a MSG_USERAUTH_FAILURE, but
with the partial success indicator set.
@type reason: L{twisted.python.failure.Failure}
"""
reason.trap(error.NotEnoughAuthentication)
self.transport.sendPacket(MSG_USERAUTH_FAILURE,
NS(','.join(self.supportedAuthentications)) + '\xff')
def _ebBadAuth(self, reason):
"""
The final errback in the authentication chain. If the reason is
error.IgnoreAuthentication, we simply return; the authentication
method has sent its own response. Otherwise, send a failure message
and (if the method is not 'none') increment the number of login
attempts.
@type reason: L{twisted.python.failure.Failure}
"""
if reason.check(error.IgnoreAuthentication):
return
if self.method != 'none':
log.msg('%s failed auth %s' % (self.user, self.method))
if reason.check(UnauthorizedLogin):
log.msg('unauthorized login: %s' % reason.getErrorMessage())
elif reason.check(error.ConchError):
log.msg('reason: %s' % reason.getErrorMessage())
else:
log.msg(reason.getTraceback())
self.loginAttempts += 1
if self.loginAttempts > self.attemptsBeforeDisconnect:
self.transport.sendDisconnect(
transport.DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE,
'too many bad auths')
return
self.transport.sendPacket(
MSG_USERAUTH_FAILURE,
NS(','.join(self.supportedAuthentications)) + '\x00')
def auth_publickey(self, packet):
"""
Public key authentication. Payload::
byte has signature
string algorithm name
string key blob
[string signature] (if has signature is True)
Create a SSHPublicKey credential and verify it using our portal.
"""
hasSig = ord(packet[0])
algName, blob, rest = getNS(packet[1:], 2)
pubKey = keys.Key.fromString(blob)
signature = hasSig and getNS(rest)[0] or None
if hasSig:
b = (NS(self.transport.sessionID) + chr(MSG_USERAUTH_REQUEST) +
NS(self.user) + NS(self.nextService) + NS('publickey') +
chr(hasSig) + NS(pubKey.sshType()) + NS(blob))
c = credentials.SSHPrivateKey(self.user, algName, blob, b,
signature)
return self.portal.login(c, None, interfaces.IConchUser)
else:
c = credentials.SSHPrivateKey(self.user, algName, blob, None, None)
return self.portal.login(c, None,
interfaces.IConchUser).addErrback(self._ebCheckKey,
packet[1:])
def _ebCheckKey(self, reason, packet):
"""
Called back if the user did not sent a signature. If reason is
error.ValidPublicKey then this key is valid for the user to
authenticate with. Send MSG_USERAUTH_PK_OK.
"""
reason.trap(error.ValidPublicKey)
# if we make it here, it means that the publickey is valid
self.transport.sendPacket(MSG_USERAUTH_PK_OK, packet)
return failure.Failure(error.IgnoreAuthentication())
def auth_password(self, packet):
"""
Password authentication. Payload::
string password
Make a UsernamePassword credential and verify it with our portal.
"""
password = getNS(packet[1:])[0]
c = credentials.UsernamePassword(self.user, password)
return self.portal.login(c, None, interfaces.IConchUser).addErrback(
self._ebPassword)
def _ebPassword(self, f):
"""
If the password is invalid, wait before sending the failure in order
to delay brute-force password guessing.
"""
d = defer.Deferred()
self.clock.callLater(self.passwordDelay, d.callback, f)
return d
class SSHUserAuthClient(service.SSHService):
"""
A service implementing the client side of 'ssh-userauth'.
This service will try all authentication methods provided by the server,
making callbacks for more information when necessary.
@ivar name: the name of this service: 'ssh-userauth'
@type name: C{str}
@ivar preferredOrder: a list of authentication methods that should be used
first, in order of preference, if supported by the server
@type preferredOrder: C{list}
@ivar user: the name of the user to authenticate as
@type user: C{str}
@ivar instance: the service to start after authentication has finished
@type instance: L{service.SSHService}
@ivar authenticatedWith: a list of strings of authentication methods we've tried
@type authenticatedWith: C{list} of C{str}
@ivar triedPublicKeys: a list of public key objects that we've tried to
authenticate with
@type triedPublicKeys: C{list} of L{Key}
@ivar lastPublicKey: the last public key object we've tried to authenticate
with
@type lastPublicKey: L{Key}
"""
name = 'ssh-userauth'
preferredOrder = ['publickey', 'password', 'keyboard-interactive']
def __init__(self, user, instance):
self.user = user
self.instance = instance
def serviceStarted(self):
self.authenticatedWith = []
self.triedPublicKeys = []
self.lastPublicKey = None
self.askForAuth('none', '')
def askForAuth(self, kind, extraData):
"""
Send a MSG_USERAUTH_REQUEST.
@param kind: the authentication method to try.
@type kind: C{str}
@param extraData: method-specific data to go in the packet
@type extraData: C{str}
"""
self.lastAuth = kind
self.transport.sendPacket(MSG_USERAUTH_REQUEST, NS(self.user) +
NS(self.instance.name) + NS(kind) + extraData)
def tryAuth(self, kind):
"""
Dispatch to an authentication method.
@param kind: the authentication method
@type kind: C{str}
"""
kind = kind.replace('-', '_')
log.msg('trying to auth with %s' % (kind,))
f = getattr(self,'auth_%s' % (kind,), None)
if f:
return f()
def _ebAuth(self, ignored, *args):
"""
Generic callback for a failed authentication attempt. Respond by
asking for the list of accepted methods (the 'none' method)
"""
self.askForAuth('none', '')
def ssh_USERAUTH_SUCCESS(self, packet):
"""
We received a MSG_USERAUTH_SUCCESS. The server has accepted our
authentication, so start the next service.
"""
self.transport.setService(self.instance)
def ssh_USERAUTH_FAILURE(self, packet):
"""
We received a MSG_USERAUTH_FAILURE. Payload::
string methods
byte partial success
If partial success is C{True}, then the previous method succeeded but is
not sufficient for authentication. C{methods} is a comma-separated list
of accepted authentication methods.
We sort the list of methods by their position in C{self.preferredOrder},
removing methods that have already succeeded. We then call
C{self.tryAuth} with the most preferred method.
@param packet: the L{MSG_USERAUTH_FAILURE} payload.
@type packet: C{str}
@return: a L{defer.Deferred} that will be callbacked with C{None} as
soon as all authentication methods have been tried, or C{None} if no
more authentication methods are available.
@rtype: C{defer.Deferred} or C{None}
"""
canContinue, partial = getNS(packet)
partial = ord(partial)
if partial:
self.authenticatedWith.append(self.lastAuth)
def orderByPreference(meth):
"""
Invoked once per authentication method in order to extract a
comparison key which is then used for sorting.
@param meth: the authentication method.
@type meth: C{str}
@return: the comparison key for C{meth}.
@rtype: C{int}
"""
if meth in self.preferredOrder:
return self.preferredOrder.index(meth)
else:
# put the element at the end of the list.
return len(self.preferredOrder)
canContinue = sorted([meth for meth in canContinue.split(',')
if meth not in self.authenticatedWith],
key=orderByPreference)
log.msg('can continue with: %s' % canContinue)
return self._cbUserauthFailure(None, iter(canContinue))
def _cbUserauthFailure(self, result, iterator):
if result:
return
try:
method = iterator.next()
except StopIteration:
self.transport.sendDisconnect(
transport.DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE,
'no more authentication methods available')
else:
d = defer.maybeDeferred(self.tryAuth, method)
d.addCallback(self._cbUserauthFailure, iterator)
return d
def ssh_USERAUTH_PK_OK(self, packet):
"""
This message (number 60) can mean several different messages depending
on the current authentication type. We dispatch to individual methods
in order to handle this request.
"""
func = getattr(self, 'ssh_USERAUTH_PK_OK_%s' %
self.lastAuth.replace('-', '_'), None)
if func is not None:
return func(packet)
else:
self.askForAuth('none', '')
def ssh_USERAUTH_PK_OK_publickey(self, packet):
"""
This is MSG_USERAUTH_PK. Our public key is valid, so we create a
signature and try to authenticate with it.
"""
publicKey = self.lastPublicKey
b = (NS(self.transport.sessionID) + chr(MSG_USERAUTH_REQUEST) +
NS(self.user) + NS(self.instance.name) + NS('publickey') +
'\x01' + NS(publicKey.sshType()) + NS(publicKey.blob()))
d = self.signData(publicKey, b)
if not d:
self.askForAuth('none', '')
# this will fail, we'll move on
return
d.addCallback(self._cbSignedData)
d.addErrback(self._ebAuth)
def ssh_USERAUTH_PK_OK_password(self, packet):
"""
This is MSG_USERAUTH_PASSWD_CHANGEREQ. The password given has expired.
We ask for an old password and a new password, then send both back to
the server.
"""
prompt, language, rest = getNS(packet, 2)
self._oldPass = self._newPass = None
d = self.getPassword('Old Password: ')
d = d.addCallbacks(self._setOldPass, self._ebAuth)
d.addCallback(lambda ignored: self.getPassword(prompt))
d.addCallbacks(self._setNewPass, self._ebAuth)
def ssh_USERAUTH_PK_OK_keyboard_interactive(self, packet):
"""
This is MSG_USERAUTH_INFO_RESPONSE. The server has sent us the
questions it wants us to answer, so we ask the user and sent the
responses.
"""
name, instruction, lang, data = getNS(packet, 3)
numPrompts = struct.unpack('!L', data[:4])[0]
data = data[4:]
prompts = []
for i in range(numPrompts):
prompt, data = getNS(data)
echo = bool(ord(data[0]))
data = data[1:]
prompts.append((prompt, echo))
d = self.getGenericAnswers(name, instruction, prompts)
d.addCallback(self._cbGenericAnswers)
d.addErrback(self._ebAuth)
def _cbSignedData(self, signedData):
"""
Called back out of self.signData with the signed data. Send the
authentication request with the signature.
@param signedData: the data signed by the user's private key.
@type signedData: C{str}
"""
publicKey = self.lastPublicKey
self.askForAuth('publickey', '\x01' + NS(publicKey.sshType()) +
NS(publicKey.blob()) + NS(signedData))
def _setOldPass(self, op):
"""
Called back when we are choosing a new password. Simply store the old
password for now.
@param op: the old password as entered by the user
@type op: C{str}
"""
self._oldPass = op
def _setNewPass(self, np):
"""
Called back when we are choosing a new password. Get the old password
and send the authentication message with both.
@param np: the new password as entered by the user
@type np: C{str}
"""
op = self._oldPass
self._oldPass = None
self.askForAuth('password', '\xff' + NS(op) + NS(np))
def _cbGenericAnswers(self, responses):
"""
Called back when we are finished answering keyboard-interactive
questions. Send the info back to the server in a
MSG_USERAUTH_INFO_RESPONSE.
@param responses: a list of C{str} responses
@type responses: C{list}
"""
data = struct.pack('!L', len(responses))
for r in responses:
data += NS(r.encode('UTF8'))
self.transport.sendPacket(MSG_USERAUTH_INFO_RESPONSE, data)
def auth_publickey(self):
"""
Try to authenticate with a public key. Ask the user for a public key;
if the user has one, send the request to the server and return True.
Otherwise, return False.
@rtype: C{bool}
"""
d = defer.maybeDeferred(self.getPublicKey)
d.addBoth(self._cbGetPublicKey)
return d
def _cbGetPublicKey(self, publicKey):
if not isinstance(publicKey, keys.Key): # failure or None
publicKey = None
if publicKey is not None:
self.lastPublicKey = publicKey
self.triedPublicKeys.append(publicKey)
log.msg('using key of type %s' % publicKey.type())
self.askForAuth('publickey', '\x00' + NS(publicKey.sshType()) +
NS(publicKey.blob()))
return True
else:
return False
def auth_password(self):
"""
Try to authenticate with a password. Ask the user for a password.
If the user will return a password, return True. Otherwise, return
False.
@rtype: C{bool}
"""
d = self.getPassword()
if d:
d.addCallbacks(self._cbPassword, self._ebAuth)
return True
else: # returned None, don't do password auth
return False
def auth_keyboard_interactive(self):
"""
Try to authenticate with keyboard-interactive authentication. Send
the request to the server and return True.
@rtype: C{bool}
"""
log.msg('authing with keyboard-interactive')
self.askForAuth('keyboard-interactive', NS('') + NS(''))
return True
def _cbPassword(self, password):
"""
Called back when the user gives a password. Send the request to the
server.
@param password: the password the user entered
@type password: C{str}
"""
self.askForAuth('password', '\x00' + NS(password))
def signData(self, publicKey, signData):
"""
Sign the given data with the given public key.
By default, this will call getPrivateKey to get the private key,
then sign the data using Key.sign().
This method is factored out so that it can be overridden to use
alternate methods, such as a key agent.
@param publicKey: The public key object returned from L{getPublicKey}
@type publicKey: L{keys.Key}
@param signData: the data to be signed by the private key.
@type signData: C{str}
@return: a Deferred that's called back with the signature
@rtype: L{defer.Deferred}
"""
key = self.getPrivateKey()
if not key:
return
return key.addCallback(self._cbSignData, signData)
def _cbSignData(self, privateKey, signData):
"""
Called back when the private key is returned. Sign the data and
return the signature.
@param privateKey: the private key object
@type publicKey: L{keys.Key}
@param signData: the data to be signed by the private key.
@type signData: C{str}
@return: the signature
@rtype: C{str}
"""
return privateKey.sign(signData)
def getPublicKey(self):
"""
Return a public key for the user. If no more public keys are
available, return C{None}.
This implementation always returns C{None}. Override it in a
subclass to actually find and return a public key object.
@rtype: L{Key} or L{NoneType}
"""
return None
def getPrivateKey(self):
"""
Return a L{Deferred} that will be called back with the private key
object corresponding to the last public key from getPublicKey().
If the private key is not available, errback on the Deferred.
@rtype: L{Deferred} called back with L{Key}
"""
return defer.fail(NotImplementedError())
def getPassword(self, prompt = None):
"""
Return a L{Deferred} that will be called back with a password.
prompt is a string to display for the password, or None for a generic
'user@hostname's password: '.
@type prompt: C{str}/C{None}
@rtype: L{defer.Deferred}
"""
return defer.fail(NotImplementedError())
def getGenericAnswers(self, name, instruction, prompts):
"""
Returns a L{Deferred} with the responses to the promopts.
@param name: The name of the authentication currently in progress.
@param instruction: Describes what the authentication wants.
@param prompts: A list of (prompt, echo) pairs, where prompt is a
string to display and echo is a boolean indicating whether the
user's response should be echoed as they type it.
"""
return defer.fail(NotImplementedError())
MSG_USERAUTH_REQUEST = 50
MSG_USERAUTH_FAILURE = 51
MSG_USERAUTH_SUCCESS = 52
MSG_USERAUTH_BANNER = 53
MSG_USERAUTH_INFO_RESPONSE = 61
MSG_USERAUTH_PK_OK = 60
messages = {}
for k, v in locals().items():
if k[:4]=='MSG_':
messages[v] = k
SSHUserAuthServer.protocolMessages = messages
SSHUserAuthClient.protocolMessages = messages
del messages
del v
# Doubles, not included in the protocols' mappings
MSG_USERAUTH_PASSWD_CHANGEREQ = 60
MSG_USERAUTH_INFO_REQUEST = 60
|
py | 1a467adc286cf48fefbbe8aee1fc17ee83989923 | from django.test import TestCase
from django.test import TestCase
from .models import *
class ProfileTestClass(TestCase):
#Set up method
def setUp(self):
self.new_profile = Profile(user_id=2,hood_id=3,bio="just testing", email='[email protected]',name="Titus",profile_pic="image.jpeg")
# Testing instance
def test_instance(self):
self.assertTrue(isinstance(self.new_profile,Profile))
def test_save_method(self):
self.new_profile.save_profile()
profile = Profile.objects.all()
self.assertTrue(len(profile)>0)
def test_delete_method(self):
self.new_profile.save_profile()
self.new_profile.delete_profile()
profile = Profile.objects.all()
self.assertTrue(len(profile)==0)
def tearDown(self):
Profile.objects.all().delete()
class PostTestClass(TestCase):
def setUp(self):
self.new_user = User(username='Titus', email='[email protected]', password='1234')
self.new_user.save()
self.new_hood = Neighborhood(name="Lavington", location="Nairobi", occupants="333",health_contact="123", police_contact="444", hood_pic="me.png", admin=self.new_user)
self.new_hood.save()
self.new_post=Post(title="techs",content="test app stuff",image='image.png',user=self.new_user, hood=self.new_hood)
def test_instance(self):
self.assertTrue(isinstance(self.new_post,Post))
def test_save_post(self):
self.new_post.save_post()
post = Post.objects.all()
self.assertTrue(len(post)>0)
def test_delete_post(self):
self.new_post.save_post()
self.new_post.delete_post()
post = Post.objects.all()
self.assertTrue(len(post)==0)
def test_update_post_method(self):
self.new_post.save_post()
new_title = 'the cold'
update = self.new_post.update_post(self.new_post.id,new_title)
self.assertEqual(update,new_title)
def test_find_method(self):
self.new_post.save_post()
post = self.new_post.get_single_post(self.new_post.id)
self.assertEquals(post.title,'techs')
def tearDown(self):
Post.objects.all().delete()
class BuninessTestClass(TestCase):
def setUp(self):
self.new_business=Business(bName="techs",user_id=4, hood_id=2, bEmail="[email protected]")
def test_instance(self):
self.assertTrue(isinstance(self.new_business,Business))
def test_save_business(self):
self.new_business.create_business()
business = Business.objects.all()
self.assertTrue(len(business)>0)
def test_delete_method(self):
self.new_business.create_business()
self.new_business.delete_business()
business = Business.objects.all()
self.assertTrue(len(business) is 0)
def test_update_bussiness_method(self):
self.new_business.create_business()
new_name = 'wannaa'
update = self.new_business.update_business(self.new_business.id,new_name)
self.assertEqual(update,new_name)
def test_find_method(self):
self.new_business.create_business()
bussiness = self.new_business.find_business(self.new_business.id)
self.assertEquals(bussiness.bName,'techs')
def tearDown(self):
Business.objects.all().delete()
class NeighborhoodTestClass(TestCase):
def setUp(self):
self.new_user = User(username='Titus', email='[email protected]', password='1234')
self.new_user.save()
self.new_hood = Neighborhood(name="Lavington", location="Nairobi", occupants="333",health_contact="123", police_contact="444", hood_pic="me.png", admin=self.new_user)
def test_instance(self):
self.assertTrue(isinstance(self.new_hood,Neighborhood))
def test_save_post(self):
self.new_hood.create_neigborhood()
hood = Neighborhood.objects.all()
self.assertTrue(len(hood)>0)
def test_delete_post(self):
self.new_hood.create_neigborhood()
self.new_hood.delete_neigborhood()
hood = Neighborhood.objects.all()
self.assertTrue(len(hood)==0)
def test_update_neigborhood_method(self):
self.new_hood.create_neigborhood()
new_name = 'Lavington'
update = self.new_hood.update_neighborhood(self.new_hood.id,new_name)
self.assertEqual(update,new_name)
def tearDown(self):
Neighborhood.objects.all().delete() |
py | 1a467bbe5366002272977eb8691d597be724d961 | import logging
import numpy as np
import pandas as pd
from random import shuffle
import models
from common.constant.df_from_csv import LISTENING_DF, SP_I_DF, SP_O_DF
from common.constant.message_type import MessageType
from core.nlp.response_generator.product.base.base_response_generator import BaseResponseGenerator
class ReactionResponseGenerator(BaseResponseGenerator):
def __call__(self):
try:
responses = self.generate_reaction_by_type(self.user.id, self.message_type, self.message.text_kw_df)
self.set_regular_response(responses)
return self.response_data
except:
return self.get_error_response_data()
@classmethod
def generate_reaction_by_type(cls, user_id, reaction_type, text_kw_df):
try:
used_reaction_numbers_list = models.Reaction.find_used_reaction_number(user_id, reaction_type)
if reaction_type == MessageType.SPECIAL.value:
responses = [
cls.__find_special_reaction(used_reaction_numbers_list, text_kw_df, user_id, reaction_type)
]
else:
responses = [
cls.__find_basic_reaction(used_reaction_numbers_list, user_id, reaction_type)]
return responses
except:
logging.exception('')
return []
@staticmethod
def generate_listening():
try:
listening = LISTENING_DF[LISTENING_DF.type == 1].text.values
response_list = [np.random.choice(listening, 1)[0]]
return response_list
except:
logging.exception('')
return []
@classmethod
def __find_special_reaction(cls, used_reaction_numbers_list, text_kw_df, user_id, reaction_type):
special_words = text_kw_df[text_kw_df.special != 'normal'].word.tolist()
special_word = special_words[-1]
# e.g. id = alone, cry, etc
special_word_id = SP_I_DF[SP_I_DF.word == special_word]['id'].values[0]
target_id_list = SP_O_DF[SP_O_DF['id'] == special_word_id].index.tolist()
if len(used_reaction_numbers_list) == len(target_id_list):
models.Reaction.enable_reaction_number(user_id, reaction_type)
sp_id_list = used_reaction_numbers_list
else:
sp_id_list = SP_O_DF[
(SP_O_DF.id == special_word_id)
& ~(SP_O_DF.index.isin(used_reaction_numbers_list))
].index.tolist()
shuffle(sp_id_list)
sp_id = sp_id_list[0]
models.Reaction.disable_reaction_number(user_id, sp_id, reaction_type)
sp_reaction = SP_O_DF[SP_O_DF.index == sp_id].output.values[0]
sp_reaction = sp_reaction.replace('\\n', '\n')
return sp_reaction
@classmethod
def __find_basic_reaction(cls, used_reaction_numbers_list, user_id, reaction_type):
try:
used_reaction_numbers_list = list(set(used_reaction_numbers_list))
rdf = pd.read_csv('./csv_files/reactions.csv')
target_id_list = rdf[rdf['type'] == reaction_type].index.tolist()
if any(i not in target_id_list for i in used_reaction_numbers_list):
# In this case, reactions.csv has changed. so set all reations status = 1
models.Reaction.enable_reaction_number(user_id, reaction_type, used_reaction_numbers_list)
candidate_id_list = target_id_list
elif len(used_reaction_numbers_list) == len(target_id_list):
models.Reaction.enable_reaction_number(user_id, reaction_type)
candidate_id_list = used_reaction_numbers_list
else:
candidate_id_list = rdf[
(rdf['type'] == reaction_type)
& ~(rdf.index.isin(used_reaction_numbers_list))
].index.tolist()
shuffle(candidate_id_list)
r_id = candidate_id_list[0]
models.Reaction.disable_reaction_number(user_id, r_id, reaction_type)
r = rdf[rdf.index == r_id].reaction.values[0]
r = r.replace('\\n', '\n')
return r
except:
logging.exception('')
return ''
|
py | 1a467c2d356ebfafea51681a173fed459a1b04ce | import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 30, transform = "Logit", sigma = 0.0, exog_count = 20, ar_order = 12); |
py | 1a467cc6fe25a410c3c01e15afa7c220e90efdae | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from setuptools import find_packages, setup
import tensorboard.version
REQUIRED_PACKAGES = [
'numpy >= 1.12.0',
'six >= 1.10.0',
'protobuf >= 3.4.0',
'werkzeug >= 0.11.10',
'html5lib == 0.9999999', # identical to 1.0b8
'markdown >= 2.6.8',
'bleach == 1.5.0',
# futures is a backport of the python 3.2+ concurrent.futures module
'futures >= 3.1.1; python_version < "3"',
# python3 specifically requires wheel 0.26
'wheel; python_version < "3"',
'wheel >= 0.26; python_version >= "3"',
]
CONSOLE_SCRIPTS = [
'tensorboard = tensorboard.main:run_main',
]
def get_readme():
with open('tensorboard/pip_package/README.rst') as f:
return f.read()
setup(
name='tensorflow-tensorboard',
version=tensorboard.version.VERSION.replace('-', ''),
description='TensorBoard lets you watch Tensors Flow',
long_description=get_readme(),
url='https://github.com/tensorflow/tensorboard',
author='Google Inc.',
author_email='[email protected]',
# Contained modules and scripts.
packages=find_packages(),
entry_points={
'console_scripts': CONSOLE_SCRIPTS,
},
package_data={
'tensorboard': [
'webfiles.zip',
],
},
# Disallow python 3.0 and 3.1 which lack a 'futures' module (see above).
python_requires='>= 2.7, != 3.0.*, != 3.1.*',
install_requires=REQUIRED_PACKAGES,
tests_require=REQUIRED_PACKAGES,
# PyPI package information.
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Libraries',
],
license='Apache 2.0',
keywords='tensorflow tensorboard tensor machine learning visualizer',
)
|
py | 1a467cffc666f7d4209f357092ce125a04accdd5 | from bs4 import BeautifulSoup
import requests,datetime
top_news = {"world":[],"business":[],"technology":[],"sports":[],"entertainment":[]}
def Scraper_news():
new_dic = {}
URLS_of_menu = {"world":"http://www.newzcone.com/world/","business":"http://www.newzcone.com/business/","technology":"http://www.newzcone.com/technology/networking-telecom/","sports":"http://www.newzcone.com/sports/","entertainment":"http://www.newzcone.com/entertainment/"}
Today = datetime.date.today()
today = ""
for string in str(Today):
if string == "-":
today +="/"
else:
today+=string
for key in URLS_of_menu:
url = URLS_of_menu[key]
html = requests.get(url)
soup = BeautifulSoup(html.text,"html.parser")
findingUrl = soup.findAll("div",class_="news-entry")
for div in findingUrl:
a_tags = div.findAll("a")
count = 0
for a in a_tags[1:15]:
new_dic["Date"] = today
new_dic["Discription"] = a.get_text().strip()
new_dic["News_URL"] = a["href"]
html = requests.get(a["href"])
needsoup = BeautifulSoup(html.text,"html.parser")
get_title = needsoup.title.get_text().strip()
new_dic["Title"] = get_title
count +=1
if count == 5:
break
top_news[key].append(new_dic.copy())
return(top_news)
|
py | 1a467daf1ebdaf7cf8e63a29b987dcc1e3af638c | from electrum_btcc.i18n import _
fullname = _('Revealer')
description = ''.join(["<br/>",
"<b>"+_("Do you have something to hide ?")+"</b>", '<br/>', '<br/>',
_("Revealer is a seed phrase back-up solution. It allows you to create a cold, analog, multi-factor backup of your wallet seeds, or of any arbitrary secret."), '<br/>', '<br/>',
_("Using a Revealer is better than writing your seed phrases on paper: a revealer is invulnerable to physical access and allows creation of trustless redundancy."), '<br/>', '<br/>',
_("This plug-in allows you to generate a pdf file of your secret phrase encrypted visually for your physical Revealer. You can print it trustlessly - it can only be decrypted optically with your Revealer."), '<br/>', '<br/>',
_("The plug-in also allows you to generate a digital Revealer file and print it yourself on a transparent overhead foil."), '<br/>', '<br/>',
_("Once activated you can access the plug-in through the icon at the seed dialog."), '<br/>', '<br/>',
_("For more information, visit"),
" <a href=\"https://revealer.cc\">https://revealer.cc</a>", '<br/>', '<br/>',
])
available_for = ['qt']
|
py | 1a467f3a725ac817ea0b64e2e15f9c5b9da7f5b6 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from webob import exc
from nova.api.openstack import common
from nova.api.openstack import wsgi
from nova import exception
from nova.image import glance
class Controller(object):
"""The image metadata API controller for the OpenStack API."""
def __init__(self):
self.image_service = glance.get_default_image_service()
def _get_image(self, context, image_id):
try:
return self.image_service.show(context, image_id)
except exception.NotFound:
msg = _("Image not found.")
raise exc.HTTPNotFound(explanation=msg)
@wsgi.serializers(xml=common.MetadataTemplate)
def index(self, req, image_id):
"""Returns the list of metadata for a given instance."""
context = req.environ['nova.context']
metadata = self._get_image(context, image_id)['properties']
return dict(metadata=metadata)
@wsgi.serializers(xml=common.MetaItemTemplate)
def show(self, req, image_id, id):
context = req.environ['nova.context']
metadata = self._get_image(context, image_id)['properties']
if id in metadata:
return {'meta': {id: metadata[id]}}
else:
raise exc.HTTPNotFound()
@wsgi.serializers(xml=common.MetadataTemplate)
@wsgi.deserializers(xml=common.MetadataDeserializer)
def create(self, req, image_id, body):
context = req.environ['nova.context']
image = self._get_image(context, image_id)
if 'metadata' in body:
for key, value in body['metadata'].iteritems():
image['properties'][key] = value
common.check_img_metadata_properties_quota(context,
image['properties'])
image = self.image_service.update(context, image_id, image, None)
return dict(metadata=image['properties'])
@wsgi.serializers(xml=common.MetaItemTemplate)
@wsgi.deserializers(xml=common.MetaItemDeserializer)
def update(self, req, image_id, id, body):
context = req.environ['nova.context']
try:
meta = body['meta']
except KeyError:
expl = _('Incorrect request body format')
raise exc.HTTPBadRequest(explanation=expl)
if not id in meta:
expl = _('Request body and URI mismatch')
raise exc.HTTPBadRequest(explanation=expl)
if len(meta) > 1:
expl = _('Request body contains too many items')
raise exc.HTTPBadRequest(explanation=expl)
image = self._get_image(context, image_id)
image['properties'][id] = meta[id]
common.check_img_metadata_properties_quota(context,
image['properties'])
self.image_service.update(context, image_id, image, None)
return dict(meta=meta)
@wsgi.serializers(xml=common.MetadataTemplate)
@wsgi.deserializers(xml=common.MetadataDeserializer)
def update_all(self, req, image_id, body):
context = req.environ['nova.context']
image = self._get_image(context, image_id)
metadata = body.get('metadata', {})
common.check_img_metadata_properties_quota(context, metadata)
image['properties'] = metadata
self.image_service.update(context, image_id, image, None)
return dict(metadata=metadata)
@wsgi.response(204)
def delete(self, req, image_id, id):
context = req.environ['nova.context']
image = self._get_image(context, image_id)
if not id in image['properties']:
msg = _("Invalid metadata key")
raise exc.HTTPNotFound(explanation=msg)
image['properties'].pop(id)
self.image_service.update(context, image_id, image, None)
def create_resource():
return wsgi.Resource(Controller())
|
py | 1a467f46acd5c23a9789210814ce38d008c7c934 | import contextlib
import re
import sys
class ColorContext(object):
"""
A context manager for terminal text colors.
Context usage:
with blue:
print 'this is blue'
with red:
print 'this is red'
print 'blue again!'
Callable usage that can break nested colors:
with purple:
print 'this is purple'
print yellow('this is yellow')
print 'this is not purple!'
"""
end = '\033[0m'
stack = [end]
def __init__(self, start):
self.start = start
def __call__(self, text):
"""Colorize some text. Cannot be nested; use as a context instead."""
return self.start + text + self.end
def __enter__(self):
code = self.start
sys.stdout.write(code)
sys.stderr.write(code)
self.stack.append(code)
def __exit__(self, type, value, traceback):
self.stack.pop()
sys.stdout.write(self.stack[-1])
sys.stderr.write(self.stack[-1])
blue = blue_text = ColorContext('\033[94m')
default = default_text_color = ColorContext(ColorContext.end)
green = green_text = ColorContext('\033[92m')
purple = purple_text = ColorContext('\033[95m')
red = red_text = ColorContext('\033[91m')
yellow = yellow_text = ColorContext('\033[93m')
class FilteredStdOut(object):
_re_type = type(re.compile(''))
def __init__(self, stdout, re_pattern):
self.stdout = stdout
if not isinstance(re_pattern, self._re_type):
re_pattern = re.compile(re_pattern)
self.pattern = re_pattern
self.blocked = False
def __getattr__(self, name):
return getattr(self.stdout, name)
def write(self, string):
if self.pattern.search(string):
self.blocked = True
elif self.blocked:
self.blocked = False
# The print statement writes the newline character afterwards,
# so this keeps track if what has been filtered out, and then
# avoids writing whitespace directly afterwards.
if string.strip():
self.stdout.write(string)
else:
self.stdout.write(string)
@contextlib.contextmanager
def do_not_print(re_pattern):
"""Stop certain messages from being printed to stdout."""
stdout = sys.stdout
sys.stdout = FilteredStdOut(stdout, re_pattern)
try:
yield
finally:
sys.stdout = stdout
|
py | 1a467f849b2be853b06d5e77e68a85cab5fb5b3c | #!/usr/bin/env python
from txros import util
from twisted.internet import defer
from navigator import Navigator
import numpy as np
from mil_tools import rosmsg_to_numpy
from geometry_msgs.msg import Vector3Stamped
class PingerAndy(Navigator):
'''
Mission to run sonar start gate challenge using Andy's sonar system, which produces a vector pointing towards the
'''
@classmethod
def init(cls):
cls.pinger_heading = cls.nh.subscribe("/hydrophones/ping_direction", Vector3Stamped)
@staticmethod
def line(p1, p2):
'''
Return equation of a line given two 2D points
https://stackoverflow.com/questions/20677795/how-do-i-compute-the-intersection-point-of-two-lines-in-python
'''
A = (p1[1] - p2[1])
B = (p2[0] - p1[0])
C = (p1[0] * p2[1] - p2[0] * p1[1])
return A, B, -C
@staticmethod
def intersection(L1, L2):
'''
Return point intersection (if it exsists) of two lines given their equations obtained from the line method
https://stackoverflow.com/questions/20677795/how-do-i-compute-the-intersection-point-of-two-lines-in-python
'''
D = L1[0] * L2[1] - L1[1] * L2[0]
Dx = L1[2] * L2[1] - L1[1] * L2[2]
Dy = L1[0] * L2[2] - L1[2] * L2[0]
if D != 0:
x = Dx / D
y = Dy / D
return x, y
else:
return None
@util.cancellableInlineCallbacks
def get_gates(self):
totems = []
for i in range(4):
while True:
self.send_feedback('Click on totem {} in rviz'.format(i + 1))
point = yield self.rviz_point.get_next_message()
if point.header.frame_id != 'enu':
self.send_feedback('Point is not in ENU.\
Please switch rviz frame to ENU or tell kevin to support other frames.')
continue
break
self.send_feedback('Recieved point for totem {}'.format(i + 1))
point = rosmsg_to_numpy(point.point)
point[2] = 0.0
totems.append(np.array(point))
# Create list of gates halfway between each pair of totems
gates = []
for i in range(3):
gates.append((totems[i] + totems[i + 1]) / 2.0)
defer.returnValue(gates)
@util.cancellableInlineCallbacks
def run(self, args):
# Get position of 3 gates based on position of totems
gates = yield self.get_gates()
# Get heading towards pinger from Andy hydrophone system
self.send_feedback('All gates clicked on! Waiting for pinger heading...')
heading = yield self.pinger_heading.get_next_message()
self.send_feedback('Recieved pinger heading')
# Convert heading and hydophones from to enu
hydrophones_to_enu = yield self.tf_listener.get_transform('enu', heading.header.frame_id)
hydrophones_origin = hydrophones_to_enu._p[0:2]
heading = rosmsg_to_numpy(heading.vector)
heading_enu = hydrophones_to_enu.transform_vector(heading)
heading_enu = heading_enu[0:2] / np.linalg.norm(heading_enu[0:2])
pinger_line = self.line(hydrophones_origin, hydrophones_origin + heading_enu)
gates_line = self.line(gates[0], gates[-1])
# Find intersection of these two lines. This is the approximate position of the pinger
intersection = self.intersection(pinger_line, gates_line)
if intersection is None:
raise Exception('No intersection')
self.send_feedback('Pinger is roughly at {}'.format(intersection))
distances = []
for gate in gates:
distances.append(np.linalg.norm(gate[0:2] - intersection))
argmin = np.argmin(np.array(distances))
self.send_feedback('Pinger is likely at gate {}'.format(argmin + 1))
gate = gates[argmin][:2]
between_vector = (gates[0] - gates[-1])[:2]
# Rotate that vector to point through the buoys
c = np.cos(np.radians(90))
s = np.sin(np.radians(90))
R = np.array([[c, -s], [s, c]])
direction_vector = R.dot(between_vector)
direction_vector /= np.linalg.norm(direction_vector)
position = self.pose[0][:2]
if np.linalg.norm(position - (gate + direction_vector)) > np.linalg.norm(position - (gate - direction_vector)):
direction_vector = -direction_vector
before_distance = 3.0
after_distance = 5.0
before = np.append(gate + direction_vector * before_distance, 0)
after = np.append(gate - direction_vector * after_distance, 0)
self.send_feedback('Moving in front of gate')
yield self.move.set_position(before).look_at(after).go()
self.send_feedback('Going through')
yield self.move.set_position(after).go()
defer.returnValue('My god it actually worked!')
|
py | 1a467fb91cf2066fd70da1bd39f98208784aad32 | from bitmovin.resources import AbstractIdResource
class EncodingStatus(AbstractIdResource):
def __init__(self, status, number_of_segments=None, id_=None, messages=None, subtasks=None,
created_at=None, queued_at=None, finished_at=None, error_at=None):
super().__init__(id_=id_)
self.status = status
self.numberOfSegments = number_of_segments
self.messages = messages
self.subtasks = subtasks
self.created_at = created_at
self.queued_at = queued_at
self.finished_at = finished_at
self.error_at = error_at
@classmethod
def parse_from_json_object(cls, json_object):
id_ = json_object.get('id')
status = json_object['status']
messages = json_object.get('messages')
subtasks = json_object.get('subtasks')
created_at = json_object.get('createdAt')
queued_at = json_object.get('queuedAt')
finished_at = json_object.get('finishedAt')
error_at = json_object.get('errorAt')
number_of_segments = json_object.get('numberOfSegments')
encoding_status = EncodingStatus(status=status, number_of_segments=number_of_segments, id_=id_,
messages=messages, subtasks=subtasks, created_at=created_at,
queued_at=queued_at, finished_at=finished_at, error_at=error_at)
return encoding_status
|
py | 1a4680d3dc6982ba075ef806fcaf6b2fa9279c5a | """Tools for simulation of transients."""
from __future__ import print_function
import sys
import math
import copy
from collections import OrderedDict
import numpy as np
from numpy import random
from scipy.interpolate import InterpolatedUnivariateSpline as Spline1d
from astropy.table import Table
from astropy.cosmology import FlatLambdaCDM
from astropy.extern.six.moves import range
from .utils import alias_map
__all__ = ['zdist', 'realize_lcs']
WHOLESKY_SQDEG = 4. * np.pi * (180. / np.pi) ** 2
def zdist(zmin, zmax, time=365.25, area=1.,
ratefunc=lambda z: 1.e-4,
cosmo=FlatLambdaCDM(H0=70.0, Om0=0.3)):
"""Generate a distribution of redshifts.
Generates the correct redshift distribution and number of SNe, given
the input volumetric SN rate, the cosmology, and the observed area and
time.
Parameters
----------
zmin, zmax : float
Minimum and maximum redshift.
time : float, optional
Time in days (default is 1 year).
area : float, optional
Area in square degrees (default is 1 square degree). ``time`` and
``area`` are only used to determine the total number of SNe to
generate.
ratefunc : callable
A callable that accepts a single float (redshift) and returns the
comoving volumetric rate at each redshift in units of yr^-1 Mpc^-3.
The default is a function that returns ``1.e-4``.
cosmo : `~astropy.cosmology.Cosmology`, optional
Cosmology used to determine volume. The default is a FlatLambdaCDM
cosmology with ``Om0=0.3``, ``H0=70.0``.
Examples
--------
Loop over the generator:
>>> for z in zdist(0.0, 0.25):
... print(z)
...
0.151285827576
0.204078030595
0.201009196731
0.181635472172
0.17896188781
0.226561237264
0.192747368762
This tells us that in one observer-frame year, over 1 square
degree, 7 SNe occured at redshifts below 0.35 (given the default
volumetric SN rate of 10^-4 SNe yr^-1 Mpc^-3). The exact number is
drawn from a Poisson distribution.
Generate the full list of redshifts immediately:
>>> zlist = list(zdist(0., 0.25))
Define a custom volumetric rate:
>>> def snrate(z):
... return 0.5e-4 * (1. + z)
...
>>> zlist = list(zdist(0., 0.25, ratefunc=snrate))
"""
# Get comoving volume in each redshift shell.
z_bins = 100 # Good enough for now.
z_binedges = np.linspace(zmin, zmax, z_bins + 1)
z_binctrs = 0.5 * (z_binedges[1:] + z_binedges[:-1])
sphere_vols = cosmo.comoving_volume(z_binedges).value
shell_vols = sphere_vols[1:] - sphere_vols[:-1]
# SN / (observer year) in shell
shell_snrate = np.array([shell_vols[i] *
ratefunc(z_binctrs[i]) / (1.+z_binctrs[i])
for i in range(z_bins)])
# SN / (observer year) within z_binedges
vol_snrate = np.zeros_like(z_binedges)
vol_snrate[1:] = np.add.accumulate(shell_snrate)
# Create a ppf (inverse cdf). We'll use this later to get
# a random SN redshift from the distribution.
snrate_cdf = vol_snrate / vol_snrate[-1]
snrate_ppf = Spline1d(snrate_cdf, z_binedges, k=1)
# Total numbe of SNe to simulate.
nsim = vol_snrate[-1] * (time/365.25) * (area/WHOLESKY_SQDEG)
for i in range(random.poisson(nsim)):
yield float(snrate_ppf(random.random()))
OBSERVATIONS_ALIASES = OrderedDict([
('time', set(['time', 'date', 'jd', 'mjd', 'mjdobs', 'mjd_obs'])),
('band', set(['band', 'bandpass', 'filter', 'flt'])),
('zp', set(['zp', 'zpt', 'zeropoint', 'zero_point'])),
('zpsys', set(['zpsys', 'zpmagsys', 'magsys'])),
('gain', set(['gain'])),
('skynoise', set(['skynoise']))
])
OBSERVATIONS_REQUIRED_ALIASES = ('time', 'band', 'zp', 'zpsys', 'gain',
'skynoise')
def realize_lcs(observations, model, params, thresh=None,
trim_observations=False, scatter=True):
"""Realize data for a set of SNe given a set of observations.
Parameters
----------
observations : `~astropy.table.Table` or `~numpy.ndarray`
Table of observations. Must contain the following column names:
``band``, ``time``, ``zp``, ``zpsys``, ``gain``, ``skynoise``.
model : `sncosmo.Model`
The model to use in the simulation.
params : list (or generator) of dict
List of parameters to feed to the model for realizing each light curve.
thresh : float, optional
If given, light curves are skipped (not returned) if none of the data
points have signal-to-noise greater than ``thresh``.
trim_observations : bool, optional
If True, only observations with times between
``model.mintime()`` and ``model.maxtime()`` are included in
result table for each SN. Default is False.
scatter : bool, optional
If True, the ``flux`` value of the realized data is calculated by
adding a random number drawn from a Normal Distribution with a
standard deviation equal to the ``fluxerror`` of the observation to
the bandflux value of the observation calculated from model. Default
is True.
Returns
-------
sne : list of `~astropy.table.Table`
Table of realized data for each item in ``params``.
Notes
-----
``skynoise`` is the image background contribution to the flux measurement
error (in units corresponding to the specified zeropoint and zeropoint
system). To get the error on a given measurement, ``skynoise`` is added
in quadrature to the photon noise from the source.
It is left up to the user to calculate ``skynoise`` as they see fit as the
details depend on how photometry is done and possibly how the PSF is
is modeled. As a simple example, assuming a Gaussian PSF, and perfect
PSF photometry, ``skynoise`` would be ``4 * pi * sigma_PSF * sigma_pixel``
where ``sigma_PSF`` is the standard deviation of the PSF in pixels and
``sigma_pixel`` is the background noise in a single pixel in counts.
"""
RESULT_COLNAMES = ('time', 'band', 'flux', 'fluxerr', 'zp', 'zpsys')
lcs = []
# Copy model so we don't mess up the user's model.
model = copy.copy(model)
# get observations as a Table
if not isinstance(observations, Table):
if isinstance(observations, np.ndarray):
observations = Table(observations)
else:
raise ValueError("observations not understood")
# map column name aliases
colname = alias_map(observations.colnames, OBSERVATIONS_ALIASES,
required=OBSERVATIONS_REQUIRED_ALIASES)
# result dtype used when there are no observations
band_dtype = observations[colname['band']].dtype
zpsys_dtype = observations[colname['zpsys']].dtype
result_dtype = ('f8', band_dtype, 'f8', 'f8', 'f8', zpsys_dtype)
for p in params:
model.set(**p)
# Select times for output that fall within tmin amd tmax of the model
if trim_observations:
mask = ((observations[colname['time']] > model.mintime()) &
(observations[colname['time']] < model.maxtime()))
snobs = observations[mask]
else:
snobs = observations
# explicitly detect no observations and add an empty table
if len(snobs) == 0:
if thresh is None:
lcs.append(Table(names=RESULT_COLNAMES,
dtype=result_dtype, meta=p))
continue
flux = model.bandflux(snobs[colname['band']],
snobs[colname['time']],
zp=snobs[colname['zp']],
zpsys=snobs[colname['zpsys']])
fluxerr = np.sqrt(snobs[colname['skynoise']]**2 +
np.abs(flux) / snobs[colname['gain']])
# Scatter fluxes by the fluxerr
# np.atleast_1d is necessary here because of an apparent bug in
# np.random.normal: when the inputs are both length 1 arrays,
# the output is a Python float!
if scatter:
flux = np.atleast_1d(np.random.normal(flux, fluxerr))
# Check if any of the fluxes are significant
if thresh is not None and not np.any(flux/fluxerr > thresh):
continue
data = [snobs[colname['time']], snobs[colname['band']], flux, fluxerr,
snobs[colname['zp']], snobs[colname['zpsys']]]
lcs.append(Table(data, names=RESULT_COLNAMES, meta=p))
return lcs
|
py | 1a468111700d6df15ef5549d3e5faa6d7905990a | from selenium import webdriver
from fixture.session import SessionHelper
from fixture.group import GroupHelper
from fixture.contact import ContactHelper
class Application:
def __init__(self):
self.wd = webdriver.Firefox()
self.wd.implicitly_wait(5)
self.session = SessionHelper(self)
self.group = GroupHelper(self)
self.contact = ContactHelper(self)
def open_home_page(self):
wd = self.wd
wd.get("http://localhost/addressbook/")
def destroy(self):
self.wd.quit() |
py | 1a4682becc7b77e3d07da2e8dee57ad3c607d60f |
''' AOS TEST
# -----------------------------------------------------------
def evaluate_illuminance_score_result_file_set_tune_weights( updateable_line, camerasVertices, triangles, shape_name ):
global cameraPos, scale, logged_score_to_file, loggable
best_LEDs = file_io.read_in_csv_file_to_list_of_lists(LED_SCORE_LOG_FILE, skip_header=False)
led_index_set = get_sorted_column_from_result_file( best_LEDs, CSV_METRIC_COLUMN_INDEX=3, QTY_OF_BEST_LEDS_REQUIRED=44 )
all_leds = draw_dome( scale , True )
print len(led_index_set)
led_vertex_weights = np.ones(len(all_leds))
faces = dome_obj_data.get_dome_faces()
led_vertex_set = []
for led_num in led_index_set:
led_vertex_set.append( all_leds[led_num] )
# aos test
print 'RUNNING FULL LAMBERTIAN EVALS'
multi_surfaces = get_full_set_surface_evaluations(triangles, all_leds)
#print(multi_surfaces)
# try to smooth the surface variations
for i in range(1000):
#sum the lambertian scores from each led that is selected
lam_scores = np.zeros(len(triangles));
for led_num in led_index_set:
for t in range(len(triangles)):
lam_scores[t] += multi_surfaces[led_num,t] * led_vertex_weights[led_num]
#get the mean value and find the face furthest from the mean
mean = np.mean(lam_scores)
worst_face=0
worst_face_delta=0
for t in range(len(triangles)):
delta = np.absolute(lam_scores[t] - mean)
if ( delta > worst_face_delta):
worst_face_delta = delta
worst_face = t
# process the 3 vertices on the worst face
delta = mean - lam_scores[worst_face]
vertices = faces[worst_face]
#for vertex in range (1,4):
#print vertices[vertex] - 1
# led_vertex_weights[vertices[vertex] -1] += 0.01 * np.sign(delta)
max = 0
max_led=0
for led_num in led_index_set:
if(multi_surfaces[led_num,worst_face] > max):
if(led_vertex_weights[led_num] > 0.6):
max = multi_surfaces[led_num,worst_face]
max_led = led_num
led_vertex_weights[max_led] += 0.01* np.sign(delta)
#print lam_scores
print np.std(lam_scores)
print np.mean(lam_scores)
print worst_face_delta
print worst_face
# end aos test
print led_vertex_weights
'''
# PDS7 REFACTORING
class MeasureIlluminanceTuneWeights_AOS(EvaluatorGeneric):
def evaluate( self, updateable_line, camerasVertices, triangles, shape_name ):
global cameraPos, scale, logged_score_to_file, loggable
best_LEDs = file_io.read_in_csv_file_to_list_of_lists(LED_SCORE_LOG_FILE, skip_header=False)
led_index_set = get_sorted_column_from_result_file( best_LEDs, CSV_METRIC_COLUMN_INDEX=3, QTY_OF_BEST_LEDS_REQUIRED=44 )
all_leds = draw_dome( scale , True )
print len(led_index_set)
print led_index_set
led_vertex_weights = np.zeros(len(all_leds))
for led in led_index_set:
# print led
led_vertex_weights[led] = 1.0
faces = dome_obj_data.get_dome_faces()
led_vertex_set = []
for led_num in led_index_set:
led_vertex_set.append( all_leds[led_num] )
# aos test
print 'RUNNING LED WEIGHT EVALS'
multi_surfaces = get_full_set_surface_evaluations(triangles, all_leds)
# print(multi_surfaces)
best_weighting = np.copy(led_vertex_weights)
best_std_dev = 10000
# try to smooth the surface variations
for i in range(1000):
# sum the lambertian scores from each led that is selected
lam_scores = np.zeros(len(triangles));
for led_num in led_index_set:
for t in range(len(triangles)):
lam_scores[t] += multi_surfaces[led_num ,t] * led_vertex_weights[led_num]
dev = np.std(lam_scores) if(dev < best_std_dev):
best_std_dev = dev
best_weighting = np.copy(led_vertex_weights)
else:
led_vertex_weights = np.copy(best_weighting)
rand = rnd.randint(0,len(all_leds )/2)
led_vertex_weights[rand] -=0.01
led_vertex_weights[91 - rand] -=0.01
# print lam_scores
print np.std(lam_scores)
# print np.mean(lam_scores)
# end aos test
print best_weighting
'''
# -----------------------------------------------------------
def evaluate_illuminance_score_multiple_result_file_set( updateable_line, camerasVertices, triangles, shape_name , count, kwords):
"""
Based on an initial set of LED positions.
Randomly swap-in/out 1 from top-half and 1 from bottom-half of dome.
Continue until the count is exceeded.
Measure standard deviation of suface illuminance, whlie measuring lambertian score from current LED set.
Report lowest standard deviation score and its LED index set.
"""
global cameraPos, scale, logged_score_to_file, loggable
best_LEDs = file_io.read_in_csv_file_to_list_of_lists( kwords['LED_SCORE_LOG_FILE'], skip_header=False )
led_index_set = get_sorted_column_from_result_file( best_LEDs,
kwords['CSV_METRIC_COLUMN_INDEX'],
kwords['QTY_OF_BEST_LEDS_REQUIRED']
)
# store the active leds
selected_leds = np.zeros(len(kwords['all_leds']))
for led_num in led_index_set:
selected_leds[led_num] = 1
multi_surfaces = get_full_set_surface_evaluations(triangles, kwords['all_leds'])
best_stddev = 100
for i in range(count):
#sum the lambertian scores from each led that is selected
num_tri = len(triangles);
lam_scores = np.zeros(num_tri);
for led_num in range (len(selected_leds)):
if(selected_leds[led_num] == 1):
for t in range(num_tri):
lam_scores[t] += multi_surfaces[led_num,t]
stddev = np.std(lam_scores)
if(stddev < best_stddev):
best_stddev = stddev
active_leds = []
for led_num in range (len(selected_leds)):
if(selected_leds[led_num] == 1):
active_leds.append(led_num)
print str(stddev) + "for led set:"+str(active_leds) + ' (leds: ' + str(np.sum(selected_leds)) + ')'
else:
#print str(stddev) + "for led set:"+str(active_leds) + ' (leds: ' + str(np.sum(selected_leds)) + ')'
if(i%100 == 0): print '(' + str(i) + ')' + str(stddev)
#modify the led pattern
#get random positions of active and inactive less, and toggle them (preserving total active count)
#mirror the top and bottom halves to preserve symmetry
active = rnd.randint(0,21)
inactive = rnd.randint(1,22)
jump_count = 0
inact_index =0
while (jump_count < inactive):
inact_index += 1
jump_count += 1 - selected_leds[inact_index]
#print 'inact ' + str(inact_index) + ' (' + str(inactive) + ')'
jump_count = -1
act_index =0
while (jump_count < active):
act_index += 1
jump_count += selected_leds[act_index]
#print 'act ' + str(act_index)
selected_leds[inact_index] = 1
selected_leds[91 - inact_index] = 1
selected_leds[act_index] = 0
selected_leds[91 - act_index] = 0
#print sum(selected_leds)
'''
# -----------------------------------------------------------
def evaluate_illuminance_score(updateable_line, camerasVertices, triangles, shape_name ):"""
Measure illuminance of surfaces.
Create a set of randomly selected LED sets.
Ignore LED sets that do not illiminate all sufaces of the spherical model.
Report the total lambertian score, measured per LED per hit surface.
Report the standard deviation of lambertian scores over the set of spherical model surfaces.
"""
global cameraPos, scale, logged_score_to_file, loggable
# BEST_LED_DATAq = file_io.read_in_csv_file_to_list_of_lists(LED_SCORE_LOG_FILE, skip_header=False)
# #best_LEDs = get_best_leds_from_file()
# print(BEST_LED_DATAq)
# print(len(BEST_LED_DATAq))
# best_LEDsq = [x[0] for x in BEST_LED_DATAq if x[3] == '1']
# print(best_LEDsq)
# print(len(best_LEDsq))
string = []
drawTextString = []
l,r,d = 0,0,0 all_leds = draw_dome( scale , False )
printsequence_size = 44
max_depth = 1 led_sequence = all_leds[ :91] # [4,7,10,11,12,69,72,81,86,87,14,15,16,17,19,21,22,23,30,33,54,56,57,59,62,65,66,68,75,78,26,29,32,35,38,39,40,42,43,46,49,51,52,55,58,60,61,63,64,67,24,25,27,28,31,34,36,37,41,44,45,47,48,50,53,70,73,76,79,82,8,9,13,18,20,71,74,77,80,83,1,2,3,5,6,84,85,88,90,91,0,89];
index_stop_search = 1
led_sets = monte_carlo_sequences. get_led_sets_selected(all_leds, led_sequence, sequence_size, max_depth, index_stop_search)
candidate_led_sets = []
led_sets_compute = 0
progress = 0
led_set = []
for index in led_sequence[:44]:
led_set.append(all_leds[index])
print(stdev_selected_set(triangles, led_set))
# ---------------------------------------------------------------
from service import GracefulShutdown
GracefulShutdown.do_shutdown()
# ---------------------------------------------------------------
startTime = currentMillis()
if not DO_EVALUATIONS:
leds = [updateable_line.get_point()] # Use reflections from single light selected by arrow-keys.
triangles = TARGET_TRIANGLES[:10]
shape_name = "Test Tri" led_sets = led_sets[0]
else:
file_io.write_to_csv(["led_set", "total_set_lambertian_score", "standard deviation"], "../",
"lambertian_led_sets_search.csv")
# Note: One led hits 50% of surfaces.
for leds in led_sets: # For all sets of LED positions with set magnitude 42.
surfaces = get_surface_evaluations(triangles, leds)
# print the progression of the total computes
led_sets_compute+=1
percent = int(led_sets_compute *100 / len(led_sets))
if( percent > progress):
progress = percent
print 'Progress : {} %'.format(progress)
if are_all_surfaces_hit(surfaces) == False:
break
else:
# if yes we can have the total lambertian score and standard deviation for this set and write it in the csv file
row = write_led_set_lambertian_scores_appended_result_file(all_leds, surfaces, leds)
candidate_led_sets.append(row)
print(str(len(
candidate_led_sets)) + " sequences computes")
candidate_led_sets = sorted(candidate_led_sets, key=lambda candidate_led_sets: candidate_led_sets[2])
best_candidate_leds_index_set = candidate_led_sets[0][0]
write_led_result_file(all_leds, best_candidate_leds_index_set)
'''
# calculate the target surface illumination for all led for all faces
def get_full_set_surface_evaluations(triangles, leds):
surfaces = np.zeros((len(leds),len(triangles)))
for led_num in range(0, len(leds)) : # For all of the leds:
for tri_num in range(0,len(triangles)):
tri = triangles[tri_num]
make_triangle_face( tri )
c = find_center_of_triangle( tri )
n1 = find_perpendicular_of_triangle( tri ) # Get normal of current tri plane.
l, r = reflect_no_rotate( c, leds[led_num], n1 )
""" usage of l and r require a prior-translate to c.
"""
if is_front_facing_reflection(tri, l, r): #Also see: __debug_is_cullable_reflection(tri, OTri, l, r, c )
draw_incident_ray(c, l)
lamb_diffuse = reflect_models.Lambert_diffuse( incident_vector=l, surface_norm=n1, intensity=1.0 )
score = lamb_diffuse # Get Lambertian intensity value (x1) per surface per led. --> [surface] = accumulated score.
surfaces[led_num][tri_num] += score
return surfaces
def stdev_selected_set(triangles, leds):
surfaces = get_surface_evaluations(triangles, leds)
stdev_set = 0
all_surfaces_hit = 1
for score in surfaces:
if (score == 0):
all_surfaces_hit = 0
break
#if yes we can have the total lambertian score and standard deviation for this set and write it in the csv file
if(all_surfaces_hit == 1):
total_set_lambertian_score = np.sum(surfaces)
stdev_set = np.std(surfaces)
return stdev_set
'''
|
py | 1a4683a53ed65389564185d852fc91c026dce5b1 | import os
import re
import sys
import glob
import json
import time
import logging
import threading
import subprocess
import six
import base64
from multiprocessing import Process, Queue
try:
from shlex import quote as cmd_quote
except ImportError:
from pipes import quote as cmd_quote # for Python 2.7
from localstack import config
from localstack.utils import bootstrap
from localstack.utils.aws import aws_stack
from localstack.utils.common import (
CaptureOutput, FuncThread, TMP_FILES, short_uid, save_file, rm_rf, in_docker,
to_str, to_bytes, run, cp_r, json_safe, get_free_tcp_port)
from localstack.services.install import INSTALL_PATH_LOCALSTACK_FAT_JAR
from localstack.utils.aws.dead_letter_queue import lambda_error_to_dead_letter_queue, sqs_error_to_dead_letter_queue
from localstack.utils.cloudwatch.cloudwatch_util import store_cloudwatch_logs, cloudwatched
# constants
LAMBDA_EXECUTOR_JAR = INSTALL_PATH_LOCALSTACK_FAT_JAR
LAMBDA_EXECUTOR_CLASS = 'cloud.localstack.LambdaExecutor'
EVENT_FILE_PATTERN = '%s/lambda.event.*.json' % config.TMP_FOLDER
LAMBDA_RUNTIME_PYTHON27 = 'python2.7'
LAMBDA_RUNTIME_PYTHON36 = 'python3.6'
LAMBDA_RUNTIME_PYTHON37 = 'python3.7'
LAMBDA_RUNTIME_PYTHON38 = 'python3.8'
LAMBDA_RUNTIME_NODEJS = 'nodejs'
LAMBDA_RUNTIME_NODEJS43 = 'nodejs4.3'
LAMBDA_RUNTIME_NODEJS610 = 'nodejs6.10'
LAMBDA_RUNTIME_NODEJS810 = 'nodejs8.10'
LAMBDA_RUNTIME_NODEJS10X = 'nodejs10.x'
LAMBDA_RUNTIME_NODEJS12X = 'nodejs12.x'
LAMBDA_RUNTIME_JAVA8 = 'java8'
LAMBDA_RUNTIME_JAVA11 = 'java11'
LAMBDA_RUNTIME_DOTNETCORE2 = 'dotnetcore2.0'
LAMBDA_RUNTIME_DOTNETCORE21 = 'dotnetcore2.1'
LAMBDA_RUNTIME_DOTNETCORE31 = 'dotnetcore3.1'
LAMBDA_RUNTIME_GOLANG = 'go1.x'
LAMBDA_RUNTIME_RUBY = 'ruby'
LAMBDA_RUNTIME_RUBY25 = 'ruby2.5'
LAMBDA_RUNTIME_PROVIDED = 'provided'
LAMBDA_SERVER_UNIQUE_PORTS = 500
LAMBDA_SERVER_PORT_OFFSET = 5000
LAMBDA_API_UNIQUE_PORTS = 500
LAMBDA_API_PORT_OFFSET = 9000
# logger
LOG = logging.getLogger(__name__)
# maximum time a pre-allocated container can sit idle before getting killed
MAX_CONTAINER_IDLE_TIME_MS = 600 * 1000
# SQS event source name
EVENT_SOURCE_SQS = 'aws:sqs'
# IP address of main Docker container (lazily initialized)
DOCKER_MAIN_CONTAINER_IP = None
# whether to use our custom Java executor, or the default from lambci
# TODO: deprecated, should be removed in the future
USE_CUSTOM_JAVA_EXECUTOR = False
def get_from_event(event, key):
try:
return event['Records'][0][key]
except KeyError:
return None
def is_java_lambda(lambda_details):
runtime = getattr(lambda_details, 'runtime', lambda_details)
return runtime in [LAMBDA_RUNTIME_JAVA8, LAMBDA_RUNTIME_JAVA11]
def is_nodejs_runtime(lambda_details):
runtime = getattr(lambda_details, 'runtime', lambda_details)
return runtime.startswith('nodejs')
def _store_logs(func_details, log_output, invocation_time=None, container_id=None):
log_group_name = '/aws/lambda/%s' % func_details.name()
container_id = container_id or short_uid()
invocation_time = invocation_time or int(time.time() * 1000)
invocation_time_secs = int(invocation_time / 1000)
time_str = time.strftime('%Y/%m/%d', time.gmtime(invocation_time_secs))
log_stream_name = '%s/[LATEST]%s' % (time_str, container_id)
return store_cloudwatch_logs(log_group_name, log_stream_name, log_output, invocation_time)
def get_main_endpoint_from_container():
global DOCKER_MAIN_CONTAINER_IP
if DOCKER_MAIN_CONTAINER_IP is None:
DOCKER_MAIN_CONTAINER_IP = False
try:
if in_docker():
DOCKER_MAIN_CONTAINER_IP = bootstrap.get_main_container_ip()
LOG.info('Determined main container target IP: %s' % DOCKER_MAIN_CONTAINER_IP)
except Exception as e:
container_name = bootstrap.get_main_container_name()
LOG.info('Unable to get IP address of main Docker container "%s": %s' %
(container_name, e))
# return main container IP, or fall back to Docker host (bridge IP, or host DNS address)
return DOCKER_MAIN_CONTAINER_IP or config.DOCKER_HOST_FROM_CONTAINER
class LambdaExecutor(object):
""" Base class for Lambda executors. Subclasses must overwrite the _execute method """
def __init__(self):
# keeps track of each function arn and the last time it was invoked
self.function_invoke_times = {}
def _prepare_environment(self, func_details):
# setup environment pre-defined variables for docker environment
result = func_details.envvars.copy()
# injecting aws credentials into docker environment if not provided
aws_stack.inject_test_credentials_into_env(result)
return result
def execute(self, func_arn, func_details, event, context=None, version=None,
asynchronous=False, callback=None):
def do_execute(*args):
@cloudwatched('lambda')
def _run(func_arn=None):
# set the invocation time in milliseconds
invocation_time = int(time.time() * 1000)
# start the execution
raised_error = None
result = None
dlq_sent = None
try:
result = self._execute(func_arn, func_details, event, context, version)
except Exception as e:
raised_error = e
if asynchronous:
if get_from_event(event, 'eventSource') == EVENT_SOURCE_SQS:
sqs_queue_arn = get_from_event(event, 'eventSourceARN')
if sqs_queue_arn:
# event source is SQS, send event back to dead letter queue
dlq_sent = sqs_error_to_dead_letter_queue(sqs_queue_arn, event, e)
else:
# event source is not SQS, send back to lambda dead letter queue
lambda_error_to_dead_letter_queue(func_details, event, e)
raise e
finally:
self.function_invoke_times[func_arn] = invocation_time
callback and callback(result, func_arn, event, error=raised_error, dlq_sent=dlq_sent)
# return final result
return result
return _run(func_arn=func_arn)
# Inform users about asynchronous mode of the lambda execution.
if asynchronous:
LOG.debug('Lambda executed in Event (asynchronous) mode, no response will be returned to caller')
FuncThread(do_execute).start()
return None, 'Lambda executed asynchronously.'
return do_execute()
def _execute(self, func_arn, func_details, event, context=None, version=None):
""" This method must be overwritten by subclasses. """
raise Exception('Not implemented.')
def startup(self):
pass
def cleanup(self, arn=None):
pass
def run_lambda_executor(self, cmd, event=None, func_details=None, env_vars={}):
process = run(cmd, asynchronous=True, stderr=subprocess.PIPE, outfile=subprocess.PIPE,
env_vars=env_vars, stdin=True)
result, log_output = process.communicate(input=event)
try:
result = to_str(result).strip()
except Exception:
pass
log_output = to_str(log_output).strip()
return_code = process.returncode
# Note: The user's code may have been logging to stderr, in which case the logs
# will be part of the "result" variable here. Hence, make sure that we extract
# only the *last* line of "result" and consider anything above that as log output.
if isinstance(result, six.string_types) and '\n' in result:
additional_logs, _, result = result.rpartition('\n')
log_output += '\n%s' % additional_logs
log_formatted = log_output.strip().replace('\n', '\n> ')
func_arn = func_details and func_details.arn()
LOG.debug('Lambda %s result / log output:\n%s\n> %s' % (func_arn, result.strip(), log_formatted))
# store log output - TODO get live logs from `process` above?
_store_logs(func_details, log_output)
if return_code != 0:
raise Exception('Lambda process returned error status code: %s. Result: %s. Output:\n%s' %
(return_code, result, log_output))
return result
class ContainerInfo:
""" Contains basic information about a docker container. """
def __init__(self, name, entry_point):
self.name = name
self.entry_point = entry_point
class LambdaExecutorContainers(LambdaExecutor):
""" Abstract executor class for executing Lambda functions in Docker containers """
def prepare_execution(self, func_arn, env_vars, runtime, command, handler, lambda_cwd):
raise Exception('Not implemented')
def _docker_cmd(self):
""" Return the string to be used for running Docker commands. """
return config.DOCKER_CMD
def prepare_event(self, environment, event_body):
""" Return the event as a stdin string. """
# amend the environment variables for execution
environment['AWS_LAMBDA_EVENT_BODY'] = event_body
return None
def _execute(self, func_arn, func_details, event, context=None, version=None):
lambda_cwd = func_details.cwd
runtime = func_details.runtime
handler = func_details.handler
environment = self._prepare_environment(func_details)
# configure USE_SSL in environment
if config.USE_SSL:
environment['USE_SSL'] = '1'
# prepare event body
if not event:
LOG.warning('Empty event body specified for invocation of Lambda "%s"' % func_arn)
event = {}
event_body = json.dumps(json_safe(event))
stdin = self.prepare_event(environment, event_body)
main_endpoint = get_main_endpoint_from_container()
environment['LOCALSTACK_HOSTNAME'] = main_endpoint
environment['_HANDLER'] = handler
if os.environ.get('HTTP_PROXY'):
environment['HTTP_PROXY'] = os.environ['HTTP_PROXY']
if func_details.timeout:
environment['AWS_LAMBDA_FUNCTION_TIMEOUT'] = str(func_details.timeout)
if context:
environment['AWS_LAMBDA_FUNCTION_NAME'] = context.function_name
environment['AWS_LAMBDA_FUNCTION_VERSION'] = context.function_version
environment['AWS_LAMBDA_FUNCTION_INVOKED_ARN'] = context.invoked_function_arn
environment['AWS_LAMBDA_COGNITO_IDENTITY'] = json.dumps(context.cognito_identity or {})
if context.client_context is not None:
environment['AWS_LAMBDA_CLIENT_CONTEXT'] = json.dumps(to_str(
base64.b64decode(to_bytes(context.client_context))))
# custom command to execute in the container
command = ''
events_file = ''
if USE_CUSTOM_JAVA_EXECUTOR and is_java_lambda(runtime):
# if running a Java Lambda with our custom executor, set up classpath arguments
java_opts = Util.get_java_opts()
stdin = None
# copy executor jar into temp directory
target_file = os.path.join(lambda_cwd, os.path.basename(LAMBDA_EXECUTOR_JAR))
if not os.path.exists(target_file):
cp_r(LAMBDA_EXECUTOR_JAR, target_file)
# TODO cleanup once we have custom Java Docker image
taskdir = '/var/task'
events_file = '_lambda.events.%s.json' % short_uid()
save_file(os.path.join(lambda_cwd, events_file), event_body)
classpath = Util.get_java_classpath(target_file)
command = ("bash -c 'cd %s; java %s -cp \"%s\" \"%s\" \"%s\" \"%s\"'" %
(taskdir, java_opts, classpath, LAMBDA_EXECUTOR_CLASS, handler, events_file))
# accept any self-signed certificates for outgoing calls from the Lambda
if is_nodejs_runtime(runtime):
environment['NODE_TLS_REJECT_UNAUTHORIZED'] = '0'
# determine the command to be executed (implemented by subclasses)
cmd = self.prepare_execution(func_arn, environment, runtime, command, handler, lambda_cwd)
# lambci writes the Lambda result to stdout and logs to stderr, fetch it from there!
LOG.info('Running lambda cmd: %s' % cmd)
result = self.run_lambda_executor(cmd, stdin, env_vars=environment, func_details=func_details)
# clean up events file
events_file and os.path.exists(events_file) and rm_rf(events_file)
return result
class LambdaExecutorReuseContainers(LambdaExecutorContainers):
""" Executor class for executing Lambda functions in re-usable Docker containers """
def __init__(self):
super(LambdaExecutorReuseContainers, self).__init__()
# locking thread for creation/destruction of docker containers.
self.docker_container_lock = threading.RLock()
# On each invocation we try to construct a port unlikely to conflict
# with a previously invoked lambda function. This is a problem with at
# least the lambci/lambda:go1.x container, which execs a go program that
# attempts to bind to the same default port.
self.next_port = 0
self.max_port = LAMBDA_SERVER_UNIQUE_PORTS
self.port_offset = LAMBDA_SERVER_PORT_OFFSET
def prepare_execution(self, func_arn, env_vars, runtime, command, handler, lambda_cwd):
# check whether the Lambda has been invoked before
has_been_invoked_before = func_arn in self.function_invoke_times
# Choose a port for this invocation
with self.docker_container_lock:
env_vars['_LAMBDA_SERVER_PORT'] = str(self.next_port + self.port_offset)
self.next_port = (self.next_port + 1) % self.max_port
# create/verify the docker container is running.
LOG.debug('Priming docker container with runtime "%s" and arn "%s".', runtime, func_arn)
container_info = self.prime_docker_container(runtime, func_arn, env_vars.items(), lambda_cwd)
# Note: currently "docker exec" does not support --env-file, i.e., environment variables can only be
# passed directly on the command line, using "-e" below. TODO: Update this code once --env-file is
# available for docker exec, to better support very large Lambda events (very long environment values)
exec_env_vars = ' '.join(['-e {}="${}"'.format(k, k) for (k, v) in env_vars.items()])
if not command:
command = '%s %s' % (container_info.entry_point, handler)
# determine files to be copied into the container
copy_command = ''
docker_cmd = self._docker_cmd()
if not has_been_invoked_before and config.LAMBDA_REMOTE_DOCKER:
# if this is the first invocation: copy the entire folder into the container
copy_command = '%s cp "%s/." "%s:/var/task";' % (docker_cmd, lambda_cwd, container_info.name)
cmd = (
'%s'
' %s exec'
' %s' # env variables
' %s' # container name
' %s' # run cmd
) % (copy_command, docker_cmd, exec_env_vars, container_info.name, command)
LOG.debug('Command for docker-reuse Lambda executor: %s' % cmd)
return cmd
def startup(self):
self.cleanup()
# start a process to remove idle containers
if config.LAMBDA_REMOVE_CONTAINERS:
self.start_idle_container_destroyer_interval()
def cleanup(self, arn=None):
if arn:
self.function_invoke_times.pop(arn, None)
return self.destroy_docker_container(arn)
self.function_invoke_times = {}
return self.destroy_existing_docker_containers()
def prime_docker_container(self, runtime, func_arn, env_vars, lambda_cwd):
"""
Prepares a persistent docker container for a specific function.
:param runtime: Lamda runtime environment. python2.7, nodejs6.10, etc.
:param func_arn: The ARN of the lambda function.
:param env_vars: The environment variables for the lambda.
:param lambda_cwd: The local directory containing the code for the lambda function.
:return: ContainerInfo class containing the container name and default entry point.
"""
with self.docker_container_lock:
# Get the container name and id.
container_name = self.get_container_name(func_arn)
docker_cmd = self._docker_cmd()
status = self.get_docker_container_status(func_arn)
LOG.debug('Priming docker container (status "%s"): %s' % (status, container_name))
docker_image = Util.docker_image_for_runtime(runtime)
rm_flag = Util.get_docker_remove_flag()
# Container is not running or doesn't exist.
if status < 1:
# Make sure the container does not exist in any form/state.
self.destroy_docker_container(func_arn)
env_vars_str = ' '.join(['-e {}={}'.format(k, cmd_quote(v)) for (k, v) in env_vars])
network = config.LAMBDA_DOCKER_NETWORK
network_str = '--network="%s"' % network if network else ''
dns = config.LAMBDA_DOCKER_DNS
dns_str = '--dns="%s"' % dns if dns else ''
mount_volume = not config.LAMBDA_REMOTE_DOCKER
lambda_cwd_on_host = Util.get_host_path_for_path_in_docker(lambda_cwd)
if (':' in lambda_cwd and '\\' in lambda_cwd):
lambda_cwd_on_host = Util.format_windows_path(lambda_cwd_on_host)
mount_volume_str = '-v "%s":/var/task' % lambda_cwd_on_host if mount_volume else ''
# Create and start the container
LOG.debug('Creating container: %s' % container_name)
cmd = (
'%s create'
' %s' # --rm flag
' --name "%s"'
' --entrypoint /bin/bash' # Load bash when it starts.
' %s'
' --interactive' # Keeps the container running bash.
' -e AWS_LAMBDA_EVENT_BODY="$AWS_LAMBDA_EVENT_BODY"'
' -e HOSTNAME="$HOSTNAME"'
' -e LOCALSTACK_HOSTNAME="$LOCALSTACK_HOSTNAME"'
' %s' # env_vars
' %s' # network
' %s' # dns
' %s'
) % (docker_cmd, rm_flag, container_name, mount_volume_str,
env_vars_str, network_str, dns_str, docker_image)
LOG.debug(cmd)
run(cmd)
if not mount_volume:
LOG.debug('Copying files to container "%s" from "%s".' % (container_name, lambda_cwd))
cmd = (
'%s cp'
' "%s/." "%s:/var/task"'
) % (docker_cmd, lambda_cwd, container_name)
LOG.debug(cmd)
run(cmd)
LOG.debug('Starting container: %s' % container_name)
cmd = '%s start %s' % (docker_cmd, container_name)
LOG.debug(cmd)
run(cmd)
# give the container some time to start up
time.sleep(1)
# Get the entry point for the image.
LOG.debug('Getting the entrypoint for image: %s' % (docker_image))
cmd = (
'%s image inspect'
' --format="{{ .ContainerConfig.Entrypoint }}"'
' %s'
) % (docker_cmd, docker_image)
LOG.debug(cmd)
run_result = run(cmd)
entry_point = run_result.strip('[]\n\r ')
container_network = self.get_docker_container_network(func_arn)
LOG.debug('Using entrypoint "%s" for container "%s" on network "%s".'
% (entry_point, container_name, container_network))
return ContainerInfo(container_name, entry_point)
def destroy_docker_container(self, func_arn):
"""
Stops and/or removes a docker container for a specific lambda function ARN.
:param func_arn: The ARN of the lambda function.
:return: None
"""
with self.docker_container_lock:
status = self.get_docker_container_status(func_arn)
docker_cmd = self._docker_cmd()
# Get the container name and id.
container_name = self.get_container_name(func_arn)
if status == 1:
LOG.debug('Stopping container: %s' % container_name)
cmd = (
'%s stop -t0 %s'
) % (docker_cmd, container_name)
LOG.debug(cmd)
run(cmd, asynchronous=False, stderr=subprocess.PIPE, outfile=subprocess.PIPE)
status = self.get_docker_container_status(func_arn)
if status == -1:
LOG.debug('Removing container: %s' % container_name)
cmd = (
'%s rm %s'
) % (docker_cmd, container_name)
LOG.debug(cmd)
run(cmd, asynchronous=False, stderr=subprocess.PIPE, outfile=subprocess.PIPE)
def get_all_container_names(self):
"""
Returns a list of container names for lambda containers.
:return: A String[] localstack docker container names for each function.
"""
with self.docker_container_lock:
LOG.debug('Getting all lambda containers names.')
cmd = '%s ps -a --filter="name=localstack_lambda_*" --format "{{.Names}}"' % self._docker_cmd()
LOG.debug(cmd)
cmd_result = run(cmd, asynchronous=False, stderr=subprocess.PIPE, outfile=subprocess.PIPE).strip()
if len(cmd_result) > 0:
container_names = cmd_result.split('\n')
else:
container_names = []
return container_names
def destroy_existing_docker_containers(self):
"""
Stops and/or removes all lambda docker containers for localstack.
:return: None
"""
with self.docker_container_lock:
container_names = self.get_all_container_names()
LOG.debug('Removing %d containers.' % len(container_names))
for container_name in container_names:
cmd = '%s rm -f %s' % (self._docker_cmd(), container_name)
LOG.debug(cmd)
run(cmd, asynchronous=False, stderr=subprocess.PIPE, outfile=subprocess.PIPE)
def get_docker_container_status(self, func_arn):
"""
Determine the status of a docker container.
:param func_arn: The ARN of the lambda function.
:return: 1 If the container is running,
-1 if the container exists but is not running
0 if the container does not exist.
"""
with self.docker_container_lock:
# Get the container name and id.
container_name = self.get_container_name(func_arn)
# Check if the container is already running
# Note: filtering by *exact* name using regex filter '^...$' seems unstable on some
# systems. Therefore, we use a combination of filter and grep to get the results.
cmd = ("docker ps -a --filter name='%s' "
'--format "{{ .Status }} - {{ .Names }}" '
'| grep -w "%s" | cat') % (container_name, container_name)
LOG.debug('Getting status for container "%s": %s' % (container_name, cmd))
cmd_result = run(cmd)
# If the container doesn't exist. Create and start it.
container_status = cmd_result.strip()
if len(container_status) == 0:
return 0
if container_status.lower().startswith('up '):
return 1
return -1
def get_docker_container_network(self, func_arn):
"""
Determine the network of a docker container.
:param func_arn: The ARN of the lambda function.
:return: name of the container network
"""
with self.docker_container_lock:
status = self.get_docker_container_status(func_arn)
# container does not exist
if status == 0:
return ''
# Get the container name.
container_name = self.get_container_name(func_arn)
docker_cmd = self._docker_cmd()
# Get the container network
LOG.debug('Getting container network: %s' % container_name)
cmd = (
'%s inspect %s'
' --format "{{ .HostConfig.NetworkMode }}"'
) % (docker_cmd, container_name)
LOG.debug(cmd)
cmd_result = run(cmd, asynchronous=False, stderr=subprocess.PIPE, outfile=subprocess.PIPE)
container_network = cmd_result.strip()
return container_network
def idle_container_destroyer(self):
"""
Iterates though all the lambda containers and destroys any container that has
been inactive for longer than MAX_CONTAINER_IDLE_TIME_MS.
:return: None
"""
LOG.info('Checking if there are idle containers.')
current_time = int(time.time() * 1000)
for func_arn, last_run_time in dict(self.function_invoke_times).items():
duration = current_time - last_run_time
# not enough idle time has passed
if duration < MAX_CONTAINER_IDLE_TIME_MS:
continue
# container has been idle, destroy it.
self.destroy_docker_container(func_arn)
def start_idle_container_destroyer_interval(self):
"""
Starts a repeating timer that triggers start_idle_container_destroyer_interval every 60 seconds.
Thus checking for idle containers and destroying them.
:return: None
"""
self.idle_container_destroyer()
threading.Timer(60.0, self.start_idle_container_destroyer_interval).start()
def get_container_name(self, func_arn):
"""
Given a function ARN, returns a valid docker container name.
:param func_arn: The ARN of the lambda function.
:return: A docker compatible name for the arn.
"""
return 'localstack_lambda_' + re.sub(r'[^a-zA-Z0-9_.-]', '_', func_arn)
class LambdaExecutorSeparateContainers(LambdaExecutorContainers):
def __init__(self):
super(LambdaExecutorSeparateContainers, self).__init__()
self.max_port = LAMBDA_API_UNIQUE_PORTS
self.port_offset = LAMBDA_API_PORT_OFFSET
def prepare_event(self, environment, event_body):
# Tell Lambci to use STDIN for the event
environment['DOCKER_LAMBDA_USE_STDIN'] = '1'
return event_body.encode()
def prepare_execution(self, func_arn, env_vars, runtime, command, handler, lambda_cwd):
entrypoint = ''
if command:
entrypoint = ' --entrypoint ""'
else:
command = '"%s"' % handler
# add Docker Lambda env vars
network = config.LAMBDA_DOCKER_NETWORK
network_str = '--network="%s"' % network if network else ''
if network == 'host':
port = get_free_tcp_port()
env_vars['DOCKER_LAMBDA_API_PORT'] = port
env_vars['DOCKER_LAMBDA_RUNTIME_PORT'] = port
dns = config.LAMBDA_DOCKER_DNS
dns_str = '--dns="%s"' % dns if dns else ''
env_vars_string = ' '.join(['-e {}="${}"'.format(k, k) for (k, v) in env_vars.items()])
debug_docker_java_port = '-p {p}:{p}'.format(p=Util.debug_java_port) if Util.debug_java_port else ''
docker_cmd = self._docker_cmd()
docker_image = Util.docker_image_for_runtime(runtime)
rm_flag = Util.get_docker_remove_flag()
if config.LAMBDA_REMOTE_DOCKER:
cmd = (
'CONTAINER_ID="$(%s create -i'
' %s' # entrypoint
' %s' # debug_docker_java_port
' %s' # env
' %s' # network
' %s' # dns
' %s' # --rm flag
' %s %s' # image and command
')";'
'%s cp "%s/." "$CONTAINER_ID:/var/task"; '
'%s start -ai "$CONTAINER_ID";'
) % (docker_cmd, entrypoint, debug_docker_java_port,
env_vars_string, network_str, dns_str, rm_flag,
docker_image, command,
docker_cmd, lambda_cwd,
docker_cmd)
else:
lambda_cwd_on_host = Util.get_host_path_for_path_in_docker(lambda_cwd)
cmd = (
'%s run -i'
' %s -v "%s":/var/task'
' %s'
' %s' # network
' %s' # dns
' %s' # --rm flag
' %s %s'
) % (docker_cmd, entrypoint, lambda_cwd_on_host, env_vars_string,
network_str, dns_str, rm_flag, docker_image, command)
return cmd
class LambdaExecutorLocal(LambdaExecutor):
def _execute(self, func_arn, func_details, event, context=None, version=None):
lambda_cwd = func_details.cwd
environment = self._prepare_environment(func_details)
# execute the Lambda function in a forked sub-process, sync result via queue
queue = Queue()
lambda_function = func_details.function(version)
def do_execute():
# now we're executing in the child process, safe to change CWD and ENV
path_before = sys.path
try:
if lambda_cwd:
os.chdir(lambda_cwd)
sys.path = [lambda_cwd] + sys.path
if environment:
os.environ.update(environment)
result = lambda_function(event, context)
queue.put(result)
finally:
sys.path = path_before
process = Process(target=do_execute)
with CaptureOutput() as c:
process.run()
result = queue.get()
# Make sure to keep the log line below, to ensure the log stream gets created
log_output = 'START: Lambda %s started via "local" executor ...' % func_arn
# TODO: Interweaving stdout/stderr currently not supported
for stream in (c.stdout(), c.stderr()):
if stream:
log_output += ('\n' if log_output else '') + stream
# store logs to CloudWatch
_store_logs(func_details, log_output)
return result
def execute_java_lambda(self, event, context, main_file, func_details=None):
handler = func_details.handler
opts = config.LAMBDA_JAVA_OPTS if config.LAMBDA_JAVA_OPTS else ''
event_file = EVENT_FILE_PATTERN.replace('*', short_uid())
save_file(event_file, json.dumps(json_safe(event)))
TMP_FILES.append(event_file)
class_name = handler.split('::')[0]
classpath = '%s:%s:%s' % (main_file, Util.get_java_classpath(main_file), LAMBDA_EXECUTOR_JAR)
cmd = 'java %s -cp %s %s %s %s' % (opts, classpath, LAMBDA_EXECUTOR_CLASS, class_name, event_file)
LOG.warning(cmd)
result = self.run_lambda_executor(cmd, func_details=func_details)
return result
class Util:
debug_java_port = False
@classmethod
def get_java_opts(cls):
opts = config.LAMBDA_JAVA_OPTS or ''
# Replace _debug_port_ with a random free port
if '_debug_port_' in opts:
if not cls.debug_java_port:
cls.debug_java_port = get_free_tcp_port()
opts = opts.replace('_debug_port_', ('%s' % cls.debug_java_port))
else:
# Parse the debug port from opts
m = re.match('.*address=(\\d+).*', opts)
if m is not None:
cls.debug_java_port = m.groups()[0]
return opts
@classmethod
def get_host_path_for_path_in_docker(cls, path):
return re.sub(r'^%s/(.*)$' % config.TMP_FOLDER,
r'%s/\1' % config.HOST_TMP_FOLDER, path)
@classmethod
def format_windows_path(cls, path):
temp = path.replace(':', '').replace('\\', '/')
if len(temp) >= 1 and temp[:1] != '/':
temp = '/' + temp
temp = '%s%s' % (config.WINDOWS_DOCKER_MOUNT_PREFIX, temp)
return temp
@classmethod
def docker_image_for_runtime(cls, runtime):
docker_tag = runtime
docker_image = config.LAMBDA_CONTAINER_REGISTRY
# TODO: remove prefix once execution issues are fixed with dotnetcore/python lambdas
# See https://github.com/lambci/docker-lambda/pull/218
lambdas_to_add_prefix = ['dotnetcore2.0', 'dotnetcore2.1', 'python2.7', 'python3.6', 'python3.7']
if docker_image == 'lambci/lambda' and any(img in docker_tag for img in lambdas_to_add_prefix):
docker_tag = '20191117-%s' % docker_tag
return '"%s:%s"' % (docker_image, docker_tag)
@classmethod
def get_docker_remove_flag(cls):
return '--rm' if config.LAMBDA_REMOVE_CONTAINERS else ''
@classmethod
def get_java_classpath(cls, archive):
"""
Return the Java classpath, using the parent folder of the
given archive as the base folder.
The result contains any *.jar files in the base folder, as
well as any JAR files in the "lib/*" subfolder living
alongside the supplied java archive (.jar or .zip).
:param archive: an absolute path to a .jar or .zip Java archive
:return: the Java classpath, relative to the base dir of "archive"
"""
entries = ['.']
base_dir = os.path.dirname(archive)
for pattern in ['%s/*.jar', '%s/lib/*.jar', '%s/java/lib/*.jar', '%s/*.zip']:
for entry in glob.glob(pattern % base_dir):
if os.path.realpath(archive) != os.path.realpath(entry):
entries.append(os.path.relpath(entry, base_dir))
# make sure to append the localstack-utils.jar at the end of the classpath
# https://github.com/localstack/localstack/issues/1160
entries.append(os.path.relpath(archive, base_dir))
entries.append('*.jar')
entries.append('java/lib/*.jar')
result = ':'.join(entries)
return result
# --------------
# GLOBAL STATE
# --------------
EXECUTOR_LOCAL = LambdaExecutorLocal()
EXECUTOR_CONTAINERS_SEPARATE = LambdaExecutorSeparateContainers()
EXECUTOR_CONTAINERS_REUSE = LambdaExecutorReuseContainers()
DEFAULT_EXECUTOR = EXECUTOR_CONTAINERS_SEPARATE
# the keys of AVAILABLE_EXECUTORS map to the LAMBDA_EXECUTOR config variable
AVAILABLE_EXECUTORS = {
'local': EXECUTOR_LOCAL,
'docker': EXECUTOR_CONTAINERS_SEPARATE,
'docker-reuse': EXECUTOR_CONTAINERS_REUSE
}
|
py | 1a46846a291bd9423d2100b3bb902dd5570ac730 |
from .render import call_to_action_email
|
py | 1a4684e56cb7a9841ce2faa9fb9afbf36ab46288 | import re
from collections.abc import Iterable
from functools import partial
from graphql_relay import connection_from_array
from ..types import Boolean, Enum, Int, Interface, List, NonNull, Scalar, String, Union
from ..types.field import Field
from ..types.objecttype import ObjectType, ObjectTypeOptions
from ..utils.thenables import maybe_thenable
from .node import is_node
class PageInfo(ObjectType):
class Meta:
description = (
"The Relay compliant `PageInfo` type, containing data necessary to"
" paginate this connection."
)
has_next_page = Boolean(
required=True,
name="hasNextPage",
description="When paginating forwards, are there more items?",
)
has_previous_page = Boolean(
required=True,
name="hasPreviousPage",
description="When paginating backwards, are there more items?",
)
start_cursor = String(
name="startCursor",
description="When paginating backwards, the cursor to continue.",
)
end_cursor = String(
name="endCursor",
description="When paginating forwards, the cursor to continue.",
)
# noinspection PyPep8Naming
def page_info_adapter(startCursor, endCursor, hasPreviousPage, hasNextPage):
"""Adapter for creating PageInfo instances"""
return PageInfo(
start_cursor=startCursor,
end_cursor=endCursor,
has_previous_page=hasPreviousPage,
has_next_page=hasNextPage,
)
class ConnectionOptions(ObjectTypeOptions):
node = None
class Connection(ObjectType):
class Meta:
abstract = True
@classmethod
def __init_subclass_with_meta__(cls, node=None, name=None, **options):
_meta = ConnectionOptions(cls)
assert node, f"You have to provide a node in {cls.__name__}.Meta"
assert isinstance(node, NonNull) or issubclass(
node, (Scalar, Enum, ObjectType, Interface, Union, NonNull)
), f'Received incompatible node "{node}" for Connection {cls.__name__}.'
base_name = re.sub("Connection$", "", name or cls.__name__) or node._meta.name
if not name:
name = f"{base_name}Connection"
edge_class = getattr(cls, "Edge", None)
_node = node
class EdgeBase:
node = Field(_node, description="The item at the end of the edge")
cursor = String(required=True, description="A cursor for use in pagination")
class EdgeMeta:
description = f"A Relay edge containing a `{base_name}` and its cursor."
edge_name = f"{base_name}Edge"
if edge_class:
edge_bases = (edge_class, EdgeBase, ObjectType)
else:
edge_bases = (EdgeBase, ObjectType)
edge = type(edge_name, edge_bases, {"Meta": EdgeMeta})
cls.Edge = edge
options["name"] = name
_meta.node = node
_meta.fields = {
"page_info": Field(
PageInfo,
name="pageInfo",
required=True,
description="Pagination data for this connection.",
),
"edges": Field(
NonNull(List(edge)),
description="Contains the nodes in this connection.",
),
}
return super(Connection, cls).__init_subclass_with_meta__(
_meta=_meta, **options
)
# noinspection PyPep8Naming
def connection_adapter(cls, edges, pageInfo):
"""Adapter for creating Connection instances"""
return cls(edges=edges, page_info=pageInfo)
class IterableConnectionField(Field):
def __init__(self, type_, *args, **kwargs):
kwargs.setdefault("before", String())
kwargs.setdefault("after", String())
kwargs.setdefault("first", Int())
kwargs.setdefault("last", Int())
super(IterableConnectionField, self).__init__(type_, *args, **kwargs)
@property
def type(self):
type_ = super(IterableConnectionField, self).type
connection_type = type_
if isinstance(type_, NonNull):
connection_type = type_.of_type
if is_node(connection_type):
raise Exception(
"ConnectionFields now need a explicit ConnectionType for Nodes.\n"
"Read more: https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#node-connections"
)
assert issubclass(
connection_type, Connection
), f'{self.__class__.__name__} type has to be a subclass of Connection. Received "{connection_type}".'
return type_
@classmethod
def resolve_connection(cls, connection_type, args, resolved):
if isinstance(resolved, connection_type):
return resolved
assert isinstance(resolved, Iterable), (
f"Resolved value from the connection field has to be an iterable or instance of {connection_type}. "
f'Received "{resolved}"'
)
connection = connection_from_array(
resolved,
args,
connection_type=partial(connection_adapter, connection_type),
edge_type=connection_type.Edge,
page_info_type=page_info_adapter,
)
connection.iterable = resolved
return connection
@classmethod
def connection_resolver(cls, resolver, connection_type, root, info, **args):
resolved = resolver(root, info, **args)
if isinstance(connection_type, NonNull):
connection_type = connection_type.of_type
on_resolve = partial(cls.resolve_connection, connection_type, args)
return maybe_thenable(resolved, on_resolve)
def wrap_resolve(self, parent_resolver):
resolver = super(IterableConnectionField, self).wrap_resolve(parent_resolver)
return partial(self.connection_resolver, resolver, self.type)
ConnectionField = IterableConnectionField
|
py | 1a4685eff1f45b0a2d14c5471a375f9bee49855f | #!/usr/bin/env python3
# Copyright (c) 2013-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Generate seeds.txt from Pieter's DNS seeder
#
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 615801
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = {
""
}
import re
import sys
import dns.resolver
import collections
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
PATTERN_AGENT = re.compile(r"^(/LunariumCore:2.2.(0|1|99)/)$")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ipstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ipstr = m.group(1)
sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ipstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
if len(sline) > 11:
agent = sline[11][1:] + sline[12][:-1]
else:
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ipstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def filtermultiport(ips):
'''Filter out hosts with more nodes per IP'''
hist = collections.defaultdict(list)
for ip in ips:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(ips, max_per_asn, max_total):
# Sift out ips by type
ips_ipv4 = [ip for ip in ips if ip['net'] == 'ipv4']
ips_ipv6 = [ip for ip in ips if ip['net'] == 'ipv6']
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
# Filter IPv4 by ASN
result = []
asn_count = {}
for ip in ips_ipv4:
if len(result) == max_total:
break
try:
asn = int([x.to_text() for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
# TODO: filter IPv6 by ASN
# Add back non-IPv4
result.extend(ips_ipv6)
result.extend(ips_onion)
return result
def main():
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
# Skip entries with valid address.
ips = [ip for ip in ips if ip is not None]
# Skip entries from suspicious hosts.
ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
ips = [ip for ip in ips if ip['uptime'] > 50]
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(re.sub(' ', '-', ip['agent']))]
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bitcoin ports, these are likely abusive
ips = filtermultiport(ips)
# Look up ASNs and limit results, both per ASN and globally.
ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in ips:
if ip['net'] == 'ipv6':
print('[%s]:%i' % (ip['ip'], ip['port']))
else:
print('%s:%i' % (ip['ip'], ip['port']))
if __name__ == '__main__':
main()
|
py | 1a46868393740617bf85d5a7efb3527613c1a82a | """Init file for Supervisor RESTful API."""
import logging
from pathlib import Path
from typing import Optional
from aiohttp import web
from ..coresys import CoreSys, CoreSysAttributes
from .addons import APIAddons
from .audio import APIAudio
from .auth import APIAuth
from .cli import APICli
from .discovery import APIDiscovery
from .dns import APICoreDNS
from .docker import APIDocker
from .hardware import APIHardware
from .homeassistant import APIHomeAssistant
from .host import APIHost
from .info import APIInfo
from .ingress import APIIngress
from .jobs import APIJobs
from .multicast import APIMulticast
from .network import APINetwork
from .observer import APIObserver
from .os import APIOS
from .proxy import APIProxy
from .resolution import APIResoulution
from .security import SecurityMiddleware
from .services import APIServices
from .snapshots import APISnapshots
from .store import APIStore
from .supervisor import APISupervisor
_LOGGER: logging.Logger = logging.getLogger(__name__)
MAX_CLIENT_SIZE: int = 1024 ** 2 * 16
class RestAPI(CoreSysAttributes):
"""Handle RESTful API for Supervisor."""
def __init__(self, coresys: CoreSys):
"""Initialize Docker base wrapper."""
self.coresys: CoreSys = coresys
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
self.webapp: web.Application = web.Application(
client_max_size=MAX_CLIENT_SIZE,
middlewares=[
self.security.system_validation,
self.security.token_validation,
],
)
# service stuff
self._runner: web.AppRunner = web.AppRunner(self.webapp)
self._site: Optional[web.TCPSite] = None
async def load(self) -> None:
"""Register REST API Calls."""
self._register_addons()
self._register_audio()
self._register_auth()
self._register_cli()
self._register_discovery()
self._register_dns()
self._register_docker()
self._register_hardware()
self._register_homeassistant()
self._register_host()
self._register_info()
self._register_ingress()
self._register_multicast()
self._register_network()
self._register_observer()
self._register_os()
self._register_jobs()
self._register_panel()
self._register_proxy()
self._register_resolution()
self._register_services()
self._register_snapshots()
self._register_supervisor()
self._register_store()
await self.start()
def _register_host(self) -> None:
"""Register hostcontrol functions."""
api_host = APIHost()
api_host.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/host/info", api_host.info),
web.get("/host/logs", api_host.logs),
web.post("/host/reboot", api_host.reboot),
web.post("/host/shutdown", api_host.shutdown),
web.post("/host/reload", api_host.reload),
web.post("/host/options", api_host.options),
web.get("/host/services", api_host.services),
web.post("/host/services/{service}/stop", api_host.service_stop),
web.post("/host/services/{service}/start", api_host.service_start),
web.post("/host/services/{service}/restart", api_host.service_restart),
web.post("/host/services/{service}/reload", api_host.service_reload),
]
)
def _register_network(self) -> None:
"""Register network functions."""
api_network = APINetwork()
api_network.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/network/info", api_network.info),
web.post("/network/reload", api_network.reload),
web.get(
"/network/interface/{interface}/info", api_network.interface_info
),
web.post(
"/network/interface/{interface}/update",
api_network.interface_update,
),
web.get(
"/network/interface/{interface}/accesspoints",
api_network.scan_accesspoints,
),
web.post(
"/network/interface/{interface}/vlan/{vlan}",
api_network.create_vlan,
),
]
)
def _register_os(self) -> None:
"""Register OS functions."""
api_os = APIOS()
api_os.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/os/info", api_os.info),
web.post("/os/update", api_os.update),
web.post("/os/config/sync", api_os.config_sync),
]
)
def _register_jobs(self) -> None:
"""Register Jobs functions."""
api_jobs = APIJobs()
api_jobs.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/jobs/info", api_jobs.info),
web.post("/jobs/options", api_jobs.options),
web.post("/jobs/reset", api_jobs.reset),
]
)
def _register_cli(self) -> None:
"""Register HA cli functions."""
api_cli = APICli()
api_cli.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/cli/info", api_cli.info),
web.get("/cli/stats", api_cli.stats),
web.post("/cli/update", api_cli.update),
]
)
def _register_observer(self) -> None:
"""Register Observer functions."""
api_observer = APIObserver()
api_observer.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/observer/info", api_observer.info),
web.get("/observer/stats", api_observer.stats),
web.post("/observer/update", api_observer.update),
]
)
def _register_multicast(self) -> None:
"""Register Multicast functions."""
api_multicast = APIMulticast()
api_multicast.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/multicast/info", api_multicast.info),
web.get("/multicast/stats", api_multicast.stats),
web.get("/multicast/logs", api_multicast.logs),
web.post("/multicast/update", api_multicast.update),
web.post("/multicast/restart", api_multicast.restart),
]
)
def _register_hardware(self) -> None:
"""Register hardware functions."""
api_hardware = APIHardware()
api_hardware.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/hardware/info", api_hardware.info),
web.get("/hardware/audio", api_hardware.audio),
web.post("/hardware/trigger", api_hardware.trigger),
]
)
def _register_info(self) -> None:
"""Register info functions."""
api_info = APIInfo()
api_info.coresys = self.coresys
self.webapp.add_routes([web.get("/info", api_info.info)])
def _register_resolution(self) -> None:
"""Register info functions."""
api_resolution = APIResoulution()
api_resolution.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/resolution/info", api_resolution.info),
web.post(
"/resolution/check/{check}/options", api_resolution.options_check
),
web.post("/resolution/check/{check}/run", api_resolution.run_check),
web.post(
"/resolution/suggestion/{suggestion}",
api_resolution.apply_suggestion,
),
web.delete(
"/resolution/suggestion/{suggestion}",
api_resolution.dismiss_suggestion,
),
web.delete(
"/resolution/issue/{issue}",
api_resolution.dismiss_issue,
),
web.post("/resolution/healthcheck", api_resolution.healthcheck),
]
)
def _register_auth(self) -> None:
"""Register auth functions."""
api_auth = APIAuth()
api_auth.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/auth", api_auth.auth),
web.post("/auth", api_auth.auth),
web.post("/auth/reset", api_auth.reset),
web.delete("/auth/cache", api_auth.cache),
]
)
def _register_supervisor(self) -> None:
"""Register Supervisor functions."""
api_supervisor = APISupervisor()
api_supervisor.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/supervisor/ping", api_supervisor.ping),
web.get("/supervisor/info", api_supervisor.info),
web.get("/supervisor/stats", api_supervisor.stats),
web.get("/supervisor/logs", api_supervisor.logs),
web.post("/supervisor/update", api_supervisor.update),
web.post("/supervisor/reload", api_supervisor.reload),
web.post("/supervisor/restart", api_supervisor.restart),
web.post("/supervisor/options", api_supervisor.options),
web.post("/supervisor/repair", api_supervisor.repair),
]
)
def _register_homeassistant(self) -> None:
"""Register Home Assistant functions."""
api_hass = APIHomeAssistant()
api_hass.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/core/info", api_hass.info),
web.get("/core/logs", api_hass.logs),
web.get("/core/stats", api_hass.stats),
web.post("/core/options", api_hass.options),
web.post("/core/update", api_hass.update),
web.post("/core/restart", api_hass.restart),
web.post("/core/stop", api_hass.stop),
web.post("/core/start", api_hass.start),
web.post("/core/check", api_hass.check),
web.post("/core/rebuild", api_hass.rebuild),
# Remove with old Supervisor fallback
web.get("/homeassistant/info", api_hass.info),
web.get("/homeassistant/logs", api_hass.logs),
web.get("/homeassistant/stats", api_hass.stats),
web.post("/homeassistant/options", api_hass.options),
web.post("/homeassistant/update", api_hass.update),
web.post("/homeassistant/restart", api_hass.restart),
web.post("/homeassistant/stop", api_hass.stop),
web.post("/homeassistant/start", api_hass.start),
web.post("/homeassistant/check", api_hass.check),
web.post("/homeassistant/rebuild", api_hass.rebuild),
]
)
def _register_proxy(self) -> None:
"""Register Home Assistant API Proxy."""
api_proxy = APIProxy()
api_proxy.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/core/api/websocket", api_proxy.websocket),
web.get("/core/websocket", api_proxy.websocket),
web.get("/core/api/stream", api_proxy.stream),
web.post("/core/api/{path:.+}", api_proxy.api),
web.get("/core/api/{path:.+}", api_proxy.api),
web.get("/core/api/", api_proxy.api),
# Remove with old Supervisor fallback
web.get("/homeassistant/api/websocket", api_proxy.websocket),
web.get("/homeassistant/websocket", api_proxy.websocket),
web.get("/homeassistant/api/stream", api_proxy.stream),
web.post("/homeassistant/api/{path:.+}", api_proxy.api),
web.get("/homeassistant/api/{path:.+}", api_proxy.api),
web.get("/homeassistant/api/", api_proxy.api),
]
)
def _register_addons(self) -> None:
"""Register Add-on functions."""
api_addons = APIAddons()
api_addons.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/addons", api_addons.list),
web.post("/addons/reload", api_addons.reload),
web.get("/addons/{addon}/info", api_addons.info),
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
web.post("/addons/{addon}/start", api_addons.start),
web.post("/addons/{addon}/stop", api_addons.stop),
web.post("/addons/{addon}/restart", api_addons.restart),
web.post("/addons/{addon}/options", api_addons.options),
web.post(
"/addons/{addon}/options/validate", api_addons.options_validate
),
web.get("/addons/{addon}/options/config", api_addons.options_config),
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
web.get("/addons/{addon}/logs", api_addons.logs),
web.get("/addons/{addon}/icon", api_addons.icon),
web.get("/addons/{addon}/logo", api_addons.logo),
web.get("/addons/{addon}/changelog", api_addons.changelog),
web.get("/addons/{addon}/documentation", api_addons.documentation),
web.post("/addons/{addon}/stdin", api_addons.stdin),
web.post("/addons/{addon}/security", api_addons.security),
web.get("/addons/{addon}/stats", api_addons.stats),
]
)
def _register_ingress(self) -> None:
"""Register Ingress functions."""
api_ingress = APIIngress()
api_ingress.coresys = self.coresys
self.webapp.add_routes(
[
web.post("/ingress/session", api_ingress.create_session),
web.post("/ingress/validate_session", api_ingress.validate_session),
web.get("/ingress/panels", api_ingress.panels),
web.view("/ingress/{token}/{path:.*}", api_ingress.handler),
]
)
def _register_snapshots(self) -> None:
"""Register snapshots functions."""
api_snapshots = APISnapshots()
api_snapshots.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/snapshots", api_snapshots.list),
web.post("/snapshots/reload", api_snapshots.reload),
web.post("/snapshots/new/full", api_snapshots.snapshot_full),
web.post("/snapshots/new/partial", api_snapshots.snapshot_partial),
web.post("/snapshots/new/upload", api_snapshots.upload),
web.get("/snapshots/{snapshot}/info", api_snapshots.info),
web.delete("/snapshots/{snapshot}", api_snapshots.remove),
web.post(
"/snapshots/{snapshot}/restore/full", api_snapshots.restore_full
),
web.post(
"/snapshots/{snapshot}/restore/partial",
api_snapshots.restore_partial,
),
web.get("/snapshots/{snapshot}/download", api_snapshots.download),
# Old, remove at end of 2020
web.post("/snapshots/{snapshot}/remove", api_snapshots.remove),
]
)
def _register_services(self) -> None:
"""Register services functions."""
api_services = APIServices()
api_services.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/services", api_services.list),
web.get("/services/{service}", api_services.get_service),
web.post("/services/{service}", api_services.set_service),
web.delete("/services/{service}", api_services.del_service),
]
)
def _register_discovery(self) -> None:
"""Register discovery functions."""
api_discovery = APIDiscovery()
api_discovery.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/discovery", api_discovery.list),
web.get("/discovery/{uuid}", api_discovery.get_discovery),
web.delete("/discovery/{uuid}", api_discovery.del_discovery),
web.post("/discovery", api_discovery.set_discovery),
]
)
def _register_dns(self) -> None:
"""Register DNS functions."""
api_dns = APICoreDNS()
api_dns.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/dns/info", api_dns.info),
web.get("/dns/stats", api_dns.stats),
web.get("/dns/logs", api_dns.logs),
web.post("/dns/update", api_dns.update),
web.post("/dns/options", api_dns.options),
web.post("/dns/restart", api_dns.restart),
web.post("/dns/reset", api_dns.reset),
]
)
def _register_audio(self) -> None:
"""Register Audio functions."""
api_audio = APIAudio()
api_audio.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/audio/info", api_audio.info),
web.get("/audio/stats", api_audio.stats),
web.get("/audio/logs", api_audio.logs),
web.post("/audio/update", api_audio.update),
web.post("/audio/restart", api_audio.restart),
web.post("/audio/reload", api_audio.reload),
web.post("/audio/profile", api_audio.set_profile),
web.post("/audio/volume/{source}/application", api_audio.set_volume),
web.post("/audio/volume/{source}", api_audio.set_volume),
web.post("/audio/mute/{source}/application", api_audio.set_mute),
web.post("/audio/mute/{source}", api_audio.set_mute),
web.post("/audio/default/{source}", api_audio.set_default),
]
)
def _register_store(self) -> None:
"""Register store endpoints."""
api_store = APIStore()
api_store.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/store", api_store.store_info),
web.get("/store/addons", api_store.addons_list),
web.get("/store/addons/{addon}", api_store.addons_addon_info),
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
web.post(
"/store/addons/{addon}/install", api_store.addons_addon_install
),
web.post(
"/store/addons/{addon}/install/{version}",
api_store.addons_addon_install,
),
web.post("/store/addons/{addon}/update", api_store.addons_addon_update),
web.post(
"/store/addons/{addon}/update/{version}",
api_store.addons_addon_update,
),
web.post("/store/reload", api_store.reload),
web.get("/store/repositories", api_store.repositories_list),
web.get(
"/store/repositories/{repository}",
api_store.repositories_repository_info,
),
]
)
# Reroute from legacy
self.webapp.add_routes(
[
web.post("/addons/{addon}/install", api_store.addons_addon_install),
web.post("/addons/{addon}/update", api_store.addons_addon_update),
]
)
def _register_panel(self) -> None:
"""Register panel for Home Assistant."""
panel_dir = Path(__file__).parent.joinpath("panel")
self.webapp.add_routes([web.static("/app", panel_dir)])
def _register_docker(self) -> None:
"""Register docker configuration functions."""
api_docker = APIDocker()
api_docker.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/docker/info", api_docker.info),
web.get("/docker/registries", api_docker.registries),
web.post("/docker/registries", api_docker.create_registry),
web.delete("/docker/registries/{hostname}", api_docker.remove_registry),
]
)
async def start(self) -> None:
"""Run RESTful API webserver."""
await self._runner.setup()
self._site = web.TCPSite(
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
)
try:
await self._site.start()
except OSError as err:
_LOGGER.critical("Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
else:
_LOGGER.info("Starting API on %s", self.sys_docker.network.supervisor)
async def stop(self) -> None:
"""Stop RESTful API webserver."""
if not self._site:
return
# Shutdown running API
await self._site.stop()
await self._runner.cleanup()
_LOGGER.info("Stopping API on %s", self.sys_docker.network.supervisor)
|
py | 1a4686c016ffadc6d70ce0f8f644675e7cd74734 | #!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2020 FABRIC Testbed
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
#
# Author: Komal Thareja ([email protected])
from __future__ import annotations
from abc import abstractmethod
from typing import TYPE_CHECKING
from fabric_cf.actor.core.apis.abc_client_policy import ABCClientPolicy
if TYPE_CHECKING:
from fabric_cf.actor.core.kernel.resource_set import ResourceSet
from fabric_cf.actor.core.time.term import Term
from fabric_cf.actor.core.util.reservation_set import ReservationSet
class ABCControllerPolicy(ABCClientPolicy):
"""
IControllerPolicy defines the policy interface for an actor acting in the orchestrator role.
"""
@abstractmethod
def get_redeeming(self, *, cycle: int) -> ReservationSet:
"""
Returns a set of reservations that must be redeemed.
@params cycle the current cycle
@returns reservations to redeem
"""
@abstractmethod
def lease_satisfies(self, *, request_resources: ResourceSet, actual_resources: ResourceSet, requested_term: Term,
actual_term: Term):
"""
Checks if the resources and term received in a lease are in compliance
with what was initially requested. The policy can prevent the application
of the incoming update if it disagrees with it.
@param request_resources
resources requested from site authority
@param actual_resources
resources received from site authority
@param requested_term
term requested from site authority
@param actual_term
term received from site authority
@raises Exception in case of error
"""
|
py | 1a468780b8abf3eef8a78caf73ce4a0c1bf8fb1b | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import pytest
import asyncio
import os
from datetime import datetime
from msrest.serialization import TZ_UTC
from azure.communication.identity import CommunicationIdentityClient
from azure.communication.chat.aio import (
ChatClient,
CommunicationTokenCredential
)
from azure.communication.chat import (
ChatParticipant,
ChatMessageType
)
from azure.communication.identity._shared.utils import parse_connection_str
from azure_devtools.scenario_tests import RecordingProcessor
from helper import URIIdentityReplacer
from chat_e2e_helper import ChatURIReplacer
from _shared.asynctestcase import AsyncCommunicationTestCase
from _shared.testcase import BodyReplacerProcessor, ResponseReplacerProcessor
from _shared.utils import get_http_logging_policy
class ChatThreadClientTestAsync(AsyncCommunicationTestCase):
def setUp(self):
super(ChatThreadClientTestAsync, self).setUp()
self.recording_processors.extend([
BodyReplacerProcessor(keys=["id", "token", "senderId", "chatMessageId", "nextLink", "participants", "multipleStatus", "value"]),
URIIdentityReplacer(),
ResponseReplacerProcessor(keys=[self._resource_name]),
ChatURIReplacer()])
endpoint, _ = parse_connection_str(self.connection_str)
self.endpoint = endpoint
self.identity_client = CommunicationIdentityClient.from_connection_string(self.connection_str)
self.users = []
self.user_tokens = []
self.chat_clients = []
# create user 1
self.user = self.identity_client.create_user()
token_response = self.identity_client.get_token(self.user, scopes=["chat"])
self.token = token_response.token
# create user 2
self.new_user = self.identity_client.create_user()
token_response = self.identity_client.get_token(self.new_user, scopes=["chat"])
self.token_new_user = token_response.token
# create ChatClient
self.chat_client = ChatClient(
self.endpoint,
CommunicationTokenCredential(self.token),
http_logging_policy=get_http_logging_policy()
)
self.chat_client_new_user = ChatClient(
self.endpoint,
CommunicationTokenCredential(self.token_new_user),
http_logging_policy=get_http_logging_policy()
)
def tearDown(self):
super(ChatThreadClientTestAsync, self).tearDown()
# delete created users
if not self.is_playback():
self.identity_client.delete_user(self.user)
self.identity_client.delete_user(self.new_user)
async def _create_thread(self):
# create chat thread
topic = "test topic"
share_history_time = datetime.utcnow()
share_history_time = share_history_time.replace(tzinfo=TZ_UTC)
participants = [ChatParticipant(
identifier=self.user,
display_name='name',
share_history_time=share_history_time
)]
create_chat_thread_result = await self.chat_client.create_chat_thread(topic, thread_participants=participants)
self.chat_thread_client = self.chat_client.get_chat_thread_client(create_chat_thread_result.chat_thread.id)
self.thread_id = self.chat_thread_client.thread_id
async def _create_thread_w_two_users(self):
# create chat thread
topic = "test topic"
share_history_time = datetime.utcnow()
share_history_time = share_history_time.replace(tzinfo=TZ_UTC)
participants = [
ChatParticipant(
identifier=self.user,
display_name='name',
share_history_time=share_history_time
),
ChatParticipant(
identifier=self.new_user,
display_name='name',
share_history_time=share_history_time
)
]
create_chat_thread_result = await self.chat_client.create_chat_thread(topic, thread_participants=participants)
self.chat_thread_client = self.chat_client.get_chat_thread_client(create_chat_thread_result.chat_thread.id)
self.thread_id = self.chat_thread_client.thread_id
async def _send_message(self):
# send a message
content = 'hello world'
sender_display_name = 'sender name'
create_message_result = await self.chat_thread_client.send_message(
content,
sender_display_name=sender_display_name)
message_id = create_message_result.id
return message_id
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_update_topic(self):
async with self.chat_client:
await self._create_thread()
topic = "update topic"
async with self.chat_thread_client:
await self.chat_thread_client.update_topic(topic=topic)
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_send_message(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
content = 'hello world'
sender_display_name = 'sender name'
create_message_result = await self.chat_thread_client.send_message(
content,
sender_display_name=sender_display_name)
create_message_result_id = create_message_result.id
self.assertTrue(create_message_result_id)
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_get_message(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
message_id = await self._send_message()
message = await self.chat_thread_client.get_message(message_id)
assert message.id == message_id
assert message.type == ChatMessageType.TEXT
assert message.content.message == 'hello world'
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_list_messages(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
await self._send_message()
chat_messages = self.chat_thread_client.list_messages(results_per_page=1)
items = []
async for item in chat_messages:
items.append(item)
assert len(items) > 0
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_update_message(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
message_id = await self._send_message()
content = "updated message content"
await self.chat_thread_client.update_message(message_id, content=content)
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_delete_message(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
message_id = await self._send_message()
await self.chat_thread_client.delete_message(message_id)
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_list_participants(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
# add another participant
share_history_time = datetime.utcnow()
share_history_time = share_history_time.replace(tzinfo=TZ_UTC)
new_participant = ChatParticipant(
identifier=self.new_user,
display_name='name',
share_history_time=share_history_time)
await self.chat_thread_client.add_participants([new_participant])
chat_thread_participants = self.chat_thread_client.list_participants(results_per_page=1, skip=1)
items = []
async for item in chat_thread_participants:
items.append(item)
assert len(items) == 1
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_add_participants(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
share_history_time = datetime.utcnow()
share_history_time = share_history_time.replace(tzinfo=TZ_UTC)
new_participant = ChatParticipant(
identifier=self.new_user,
display_name='name',
share_history_time=share_history_time)
participants = [new_participant]
failed_participants = await self.chat_thread_client.add_participants(participants)
# no error occured while adding participants
assert len(failed_participants) == 0
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_remove_participant(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
# add participant first
share_history_time = datetime.utcnow()
share_history_time = share_history_time.replace(tzinfo=TZ_UTC)
new_participant = ChatParticipant(
identifier=self.new_user,
display_name='name',
share_history_time=share_history_time)
participants = [new_participant]
await self.chat_thread_client.add_participants(participants)
# test remove participant
await self.chat_thread_client.remove_participant(self.new_user)
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_send_typing_notification(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
await self.chat_thread_client.send_typing_notification()
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_send_typing_notification_with_sender_display_name(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
await self.chat_thread_client.send_typing_notification(sender_display_name="John")
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_send_read_receipt(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
message_id = await self._send_message()
await self.chat_thread_client.send_read_receipt(message_id)
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
async def _wait_on_thread(self, chat_client, thread_id, message_id):
# print("Read Receipts Sent: ", read_receipts_sent)
chat_thread_client = chat_client.get_chat_thread_client(thread_id)
for _ in range(10):
read_receipts_paged = chat_thread_client.list_read_receipts()
chat_message_ids = []
async for page in read_receipts_paged.by_page():
async for item in page:
chat_message_ids.append(item.chat_message_id)
if message_id in chat_message_ids:
return
else:
print("Sleeping for additional 2 secs")
await asyncio.sleep(2)
raise Exception("Read receipts not updated in 20 seconds. Failing.")
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_list_read_receipts(self):
async with self.chat_client:
await self._create_thread_w_two_users()
async with self.chat_thread_client:
# first user sends 2 messages
for i in range(2):
message_id = await self._send_message()
# send read receipts first
await self.chat_thread_client.send_read_receipt(message_id)
if self.is_live:
await self._wait_on_thread(chat_client=self.chat_client, thread_id=self.thread_id, message_id=message_id)
# get chat thread client for second user
chat_thread_client_new_user = self.chat_client_new_user.get_chat_thread_client(self.thread_id)
# second user sends 1 message
message_result_new_user = await chat_thread_client_new_user.send_message(
"content",
sender_display_name="sender_display_name")
message_id_new_user = message_result_new_user.id
# send read receipt
await chat_thread_client_new_user.send_read_receipt(message_id_new_user)
if self.is_live:
await self._wait_on_thread(chat_client=self.chat_client_new_user, thread_id=self.thread_id, message_id=message_id_new_user)
# list read receipts
read_receipts = self.chat_thread_client.list_read_receipts(results_per_page=2, skip=0)
items = []
async for page in read_receipts.by_page():
async for item in page:
items.append(item)
assert len(items) == 2
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_get_properties(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
get_thread_result = await self.chat_thread_client.get_properties()
assert get_thread_result.id == self.thread_id
# delete created users and chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
|
py | 1a4687a5fe9b50d5609924c2996ddf12ac8f4387 | from No import No
from Estado import Estado
estadoInicial = Estado('/home/ec2-user/environment/DiretorioInicial')
raiz = No(estadoInicial)
estadosFilhos = estadoInicial.funcaoSucessora()
for estadoFilho in estadosFilhos:
noFilho = No(Estado(estadoFilho))
raiz.addFilho(noFilho)
raiz.printArvore()
|
py | 1a4687b70da72cc10cde66a5d116d998a45541cb | # Copyright 2018 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mathematical operations used to build up expressions for printing.
We can't use sympy because sympy will automatically simplify many types of
expressions, even with `evaluate=False` passed in. For example:
* Mul(-2, -3, evaluate=False) gives -(-6), not (-2) x (-3).
* Add(2, 1, evaluate=False) gives 1 + 2, because the terms are sorted.
As such, it's easier just to work with our own op classes that display precisely
as we created them. This also allows us to use custom symbols for the
expressions, such as the multiplication symbol.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
# Dependency imports
from absl import logging
from mathematics_dataset.sample import number
from mathematics_dataset.util import display
import numpy as np
import six
from six.moves import zip
import sympy
MUL_SYMBOL = '*'
DIV_SYMBOL = '/'
POW_SYMBOL = '**'
GT_SYMBOL = '>'
LT_SYMBOL = '<'
GE_SYMBOL = '>='
LE_SYMBOL = '<='
EQ_SYMBOL = '='
NE_SYMBOL = '!='
# Operator precedence levels. Used to insert brackets if necessary.
_EQ_PRECEDENCE = 0
_CONSTANT_PRECEDENCE = 1
_POW_PRECEDENCE = 2
_SQRT_PRECEDENCE = 3
_MUL_PRECEDENCE = 4
_ADD_PRECEDENCE = 5
def bracketed(child, parent, bracket_if_same_precedence):
"""Returns string representation of `child`, possibly bracketed.
Args:
child: Instance of `Op` or a valid value for `ConstantOp`.
parent: Instance of `Op`. Used to determine whether `child` needs to be
bracketed first before appearing in the parent op's expression.
bracket_if_same_precedence: Whether to bracket if the child has the same
operator precedence as the parent.
Returns:
String representation of `child`.
"""
if not isinstance(child, Op):
child = Constant(child)
child_precedence = child.precedence
parent_precedence = parent.precedence
if (parent_precedence > child_precedence
or (parent_precedence == child_precedence
and not bracket_if_same_precedence)):
return str(child)
else:
return '({})'.format(child)
def _flatten(iterable):
"""Returns list."""
if isinstance(iterable, (list, tuple)):
result = list(iterable)
else:
assert isinstance(iterable, dict)
keys = sorted(six.iterkeys(iterable))
result = [iterable[key] for key in keys]
# Check we don't have any hierarchy in the structure (otherwise would need
# to use something recursive like tf.contrib.framework.nest.flatten).
for item in result:
assert not isinstance(item, (list, tuple, dict))
return result
def _pack_sequence_as(example, flat):
if isinstance(example, list) or isinstance(example, tuple):
return flat
else:
assert isinstance(example, dict)
keys = sorted(six.iterkeys(example))
return {key: value for key, value in zip(keys, flat)}
@six.add_metaclass(abc.ABCMeta)
class Op(object):
"""An operation.
This needs to support being transformed into sympy (and possibly in the future
other types such as an appropriately formatted string), when given the op
arguments.
"""
def __init__(self, children):
"""Initialize this `Op` base class.
Args:
children: Iterable structure containing child ops.
"""
assert isinstance(children, (list, dict, tuple))
flat_children = _flatten(children)
flat_children = [child if isinstance(child, Op) else Constant(child)
for child in flat_children]
children = _pack_sequence_as(children, flat_children)
self._children = children
@property
def children(self):
"""Returns iterable or dict over immediate children."""
return self._children
def descendants(self):
"""Returns list of all descendants (self, children, grandchildren, etc)."""
descendants = [self]
flat_children = _flatten(self._children)
for child in flat_children:
descendants += child.descendants()
return descendants
@abc.abstractmethod
def __str__(self):
"""Returns a string format of this op."""
@abc.abstractmethod
def sympy(self):
"""Returns the sympifcation of this op."""
def _sympy_(self):
"""Convenience method to automatically sympify this object."""
try:
return self.sympy()
except AttributeError as e:
# Note: we print this error here, before raising it again, because sympy
# will think `AttributeError` refers to this object not having a `_sympy_`
# method, rather than having it, which leads to otherwise confusing error
# messages.
logging.error(
'Encountered attribute error while trying to sympify: %s', e)
raise e
@abc.abstractproperty
def precedence(self):
"""Returns the precedence (integer) of this op."""
class Constant(Op):
"""Returns a constant value; a nullary op."""
def __init__(self, value):
super(Constant, self).__init__([])
if isinstance(value, six.integer_types):
value = sympy.Integer(value)
self._value = value
def __str__(self):
return str(self._value)
def sympy(self):
return self._value
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = value
def _is_simple(self):
"""Returns whether it's a simple number, rather than a division or neg."""
if isinstance(self._value, sympy.Symbol):
return True
elif (isinstance(self._value, int)
or isinstance(self._value, sympy.Integer)
or isinstance(self._value, display.Decimal)
or isinstance(self._value, np.int64)
or isinstance(self._value, np.int32)):
return self._value >= 0
elif isinstance(self._value, sympy.Rational):
return False
elif isinstance(self._value, sympy.Function):
return True
else:
raise ValueError('Unknown type {}'.format(type(self._value)))
@property
def precedence(self):
if self._is_simple():
return _CONSTANT_PRECEDENCE
else:
return _MUL_PRECEDENCE
class _SumLikeOp(Op):
"""Abstract op for sum-like terms which may contain negative entries."""
@abc.abstractmethod
def expanded_signs_and_terms(self):
"""Returns a list of arguments, plus any sub-arguments from sub-adds.
E.g., if this op is `Add(Add(2, Neg(3)), Mul(4, 5), 1)`, then will return
`[(True, 2), (False, 3), (True, Mul(4, 5)), (True, 1)]` (the arguments of
the inner add have been extracted).
"""
def __str__(self):
signs_and_terms = self.expanded_signs_and_terms()
if not signs_and_terms:
return '0'
for i, (sign, term) in enumerate(signs_and_terms):
if i == 0:
if sign:
expression = bracketed(term, self, True)
else:
expression = '-' + bracketed(term, self, True)
else:
if sign:
expression += ' + ' + bracketed(term, self, True)
else:
expression += ' - ' + bracketed(term, self, True)
return expression
class Identity(_SumLikeOp):
"""The identity op (a unitary op)."""
def __init__(self, input_):
super(Identity, self).__init__({'input': input_})
def expanded_signs_and_terms(self):
if isinstance(self.children['input'], _SumLikeOp):
return self.children['input'].expanded_signs_and_terms()
else:
return [(True, self.children['input'])]
def __str__(self):
return str(self.children['input'])
def sympy(self):
return self.children['input'].sympy()
@property
def precedence(self):
return self.children['input'].precedence
class Neg(_SumLikeOp):
"""Negation, a unary op. Also has special display when appearing in a sum."""
def __init__(self, arg):
super(Neg, self).__init__({'input': arg})
def expanded_signs_and_terms(self):
if isinstance(self.children['input'], _SumLikeOp):
inner_signs_and_terms = self.children['input'].expanded_signs_and_terms()
return [(not sign, term) for (sign, term) in inner_signs_and_terms]
else:
return [(False, self.children['input'])]
def sympy(self):
return -sympy.sympify(self.children['input'])
def inner(self):
return self.children['input']
@property
def precedence(self):
return _ADD_PRECEDENCE
class Add(_SumLikeOp):
"""Addition."""
def __init__(self, *args):
super(Add, self).__init__(args)
def expanded_signs_and_terms(self):
"""Returns a list of arguments, plus any sub-arguments from sub-adds.
E.g., if this op is `Add(Add(2, 3), Mul(4, 5), 1)`, then will return
`[2, 3, Mul(4, 5), 1]` (the arguments of the inner add have been extracted).
"""
expanded = []
for arg in self.children:
if isinstance(arg, _SumLikeOp):
expanded += arg.expanded_signs_and_terms()
else:
expanded.append((True, arg))
return expanded
def sympy(self):
return sympy.Add(*[sympy.sympify(arg) for arg in self.children])
@property
def precedence(self):
return _ADD_PRECEDENCE
class Sub(Op):
"""Subtraction."""
def __init__(self, left, right):
super(Sub, self).__init__({'left': left, 'right': right})
def __str__(self):
return (bracketed(self.children['left'], self, False) + ' - '
+ bracketed(self.children['right'], self, True))
def sympy(self):
return sympy.Add(
self.children['left'], sympy.Mul(-1, self.children['right']))
@property
def precedence(self):
return _ADD_PRECEDENCE
class Mul(Op):
"""Multiplication."""
def __init__(self, *args):
super(Mul, self).__init__(args)
def __str__(self):
if not self.children:
return '1'
else:
args = [bracketed(arg, self, False) for arg in self.children]
return MUL_SYMBOL.join(args)
def sympy(self):
return sympy.Mul(*[sympy.sympify(arg) for arg in self.children])
@property
def precedence(self):
return _MUL_PRECEDENCE
class Div(Op):
"""Division."""
def __init__(self, numer, denom):
super(Div, self).__init__({'numer': numer, 'denom': denom})
def __str__(self):
return u'{}{}{}'.format(
bracketed(self.children['numer'], self, True), DIV_SYMBOL,
bracketed(self.children['denom'], self, True))
def sympy(self):
return sympy.Mul(
self.children['numer'], sympy.Pow(self.children['denom'], -1))
@property
def precedence(self):
return _MUL_PRECEDENCE
class Pow(Op):
"""Power a to the power b."""
def __init__(self, a, b):
super(Pow, self).__init__({'a': a, 'b': b})
def __str__(self):
return u'{}{}{}'.format(
bracketed(self.children['a'], self, True), POW_SYMBOL,
bracketed(self.children['b'], self, True))
def sympy(self):
return sympy.Pow(
sympy.sympify(self.children['a']), sympy.sympify(self.children['b']))
@property
def precedence(self):
return _POW_PRECEDENCE
class Sqrt(Op):
"""Square root of a value."""
def __init__(self, a):
super(Sqrt, self).__init__({'a': a})
def __str__(self):
return 'sqrt({})'.format(self.children['a'])
def sympy(self):
return sympy.sqrt(self.children['a'])
@property
def precedence(self):
return _POW_PRECEDENCE
class Eq(Op):
"""Equality."""
def __init__(self, left, right):
super(Eq, self).__init__({'left': left, 'right': right})
def __str__(self):
return '{} = {}'.format(self.children['left'], self.children['right'])
def sympy(self):
return sympy.Eq(self.children['left'], self.children['right'])
@property
def precedence(self):
return _EQ_PRECEDENCE
def number_constants(expressions):
"""Returns list of integer, rational, decimal constants in the expressions."""
if isinstance(expressions, Op):
expressions = [expressions]
descendants = []
for expression in expressions:
descendants += expression.descendants()
candidate_constants = [op for op in descendants if isinstance(op, Constant)]
return [constant for constant in candidate_constants
if number.is_integer_or_rational_or_decimal(constant.value)]
|
py | 1a4688285e394cba5eb0b109a1d3c0ceef22eac0 | from __future__ import absolute_import, division, print_function
from oem.core.providers.base import Provider
from oem.version import __version__
from oem_core.core.plugin import PluginManager
import inspect
import logging
import six
log = logging.getLogger(__name__)
class Client(object):
version = __version__
def __init__(self, services, provider, formats=None):
"""Client for OpenEntityMap.
:param services: List of services to load (e.g. "anidb")
:type services: list
:param provider: Provider to use for databases (e.g. "package", "release/incremental")
:type provider: str or oem.core.providers.base.Base
:param formats: List of formats to use, or `None` for any
:type formats: list or None
"""
self._formats = formats
# Discover available plugins
self._plugins = PluginManager
self._plugins.discover()
# Construct plugins
self._services = self._construct_services(services)
self._provider = self._construct_provider(provider)
# Build database + package tables
self._databases = {}
self._packages = {}
for _, cls in self._load_plugins('client', services, construct=False):
# Merge service databases into client
if cls.__databases__:
self._databases.update(cls.__databases__)
else:
log.warn('Service %r has no "__databases__" defined', cls.__key__)
# Merge service packages into client
if cls.__packages__:
self._packages.update(cls.__packages__)
else:
log.warn('Service %r has no "__packages__" defined', cls.__key__)
@property
def formats(self):
return self._formats
@property
def plugins(self):
return self._plugins
@property
def provider(self):
return self._provider
def load_all(self):
for service in six.itervalues(self._services):
service.load()
def database_name(self, source, target):
return self._databases.get((source, target))
def package_name(self, source, target):
return self._packages.get((source, target))
def __getitem__(self, source):
return ServiceInterface(self, source)
#
# Private methods
#
def _construct_services(self, services):
result = {}
for _, cls in self._load_plugins('client', services, construct=False):
# Add supported service conversions
for source, targets in cls.__services__.items():
for target in targets:
# Construct service
result[(source, target)] = cls(self, source, target)
return result
def _construct_provider(self, provider_or_key):
if isinstance(provider_or_key, Provider):
# Class
provider = provider_or_key
elif isinstance(provider_or_key, six.string_types):
# Identifier
provider = PluginManager.get('client-provider', provider_or_key)
if provider is None:
raise ValueError('Unable to find provider: %r' % provider_or_key)
else:
raise ValueError('Unknown provider: %r' % provider_or_key)
# Ensure provider has been constructed
if inspect.isclass(provider):
provider = provider()
# Initialize provider
provider.initialize(self)
return provider
@staticmethod
def _load_plugins(kind, keys, construct=True):
if not keys:
return
for name in keys:
cls = PluginManager.get(kind, name)
if cls is None:
log.warn('Unable to find plugin: %r', name)
continue
if not cls.available:
log.warn('Plugin %r is not available', name)
continue
if construct:
yield cls.__key__, cls()
else:
yield cls.__key__, cls
class ServiceInterface(object):
def __init__(self, client, source):
self.client = client
self.source = source
def to(self, target):
try:
return self.client._services[(self.source, target)]
except KeyError:
raise KeyError('Unknown service: %s -> %s' % (self.source, target))
|
py | 1a468a2b28f101a4cdaf02935262fa315c505f37 | #
# Copyright 2020 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"Retrieve remote schema file."
from pathlib import Path
import re
from typing import Union
from urllib.parse import urlparse
from urllib.request import urlopen
import requests
import requests.exceptions
from . import typing as typing_
from .exceptions import InsecureConnectionError
# Semantically, typing_.PathLike doesn't cover strings that represent URLs
def retrieve_schema_file(url_or_path: Union[typing_.PathLike, str], *,
encoding: str = 'utf-8',
tls_verification: Union[bool, typing_.PathLike] = True) -> str:
"""Retrieve a single schema file.
:param url_or_path: URL or path to the schema file.
:param encoding: The encoding of the text in ``url_or_path``.
:param tls_verification: Same as ``tls_verification`` in :class:`pydax.Schema`.
:raises ValueError: See :class:`pydax.Schema`.
:raises InsecureConnectionError: See :class:`pydax.Schema`.
:return: A string of the content.
"""
url_or_path = str(url_or_path)
# We don't detect fully whether the input is a URL or a file path because I couldn't find a reliable way. Almost any
# string with no backslash can be a file name on Linux. URL detection often involves either giant dependencies such
# as Django, or tediously long regular expression that we can't assure that it would work. Here, we detect the
# beginning of the string. If it doesn't look like a URL, treat it as a file path.
if re.match(r'[a-zA-Z0-9]+:\/\/', url_or_path):
parse_result = urlparse(url_or_path)
scheme = parse_result.scheme
if scheme in ('http', 'https'):
if scheme == 'http' and tls_verification:
raise InsecureConnectionError((f'{url_or_path} is a http link and insecure. '
'Set tls_verification=False to accept http links.'))
try:
content = requests.get(url_or_path, allow_redirects=True, verify=tls_verification).content
except requests.exceptions.SSLError as e:
raise InsecureConnectionError((f'Failed to securely connect to {url_or_path}. Caused by:\n{e}'))
# We don't use requests.Response.encoding and requests.Response.text because it is always silent when
# there's an encoding error
return content.decode(encoding)
elif scheme == 'file':
with urlopen(url_or_path) as f: # nosec: bandit will always complain but we know it points to a local file
return f.read().decode(encoding)
else:
raise ValueError(f'Unknown scheme in "{url_or_path}": "{scheme}"')
else:
# Not a URL, treated as a local file path
return Path(url_or_path).read_text(encoding)
|
py | 1a468a95c24059d3247776b5a633ef870721fefa | """Dense univariate polynomials with coefficients in Galois fields. """
from random import uniform
from math import ceil, sqrt, log
from sympy.polys.polyutils import (
_sort_factors
)
from sympy.polys.polyerrors import (
ExactQuotientFailed
)
from sympy.utilities import (
any, all, cythonized
)
from sympy.ntheory import factorint
def gf_crt(U, M, K):
"""Chinese Remainder Theorem.
Given a set of integer residues `u_0,...,u_n` and a set of
co-prime integer moduli `m_0,...,m_n`, returns an integer
`u`, such that `u = u_i mod m_i` for `i = `0,...,n`.
As an example consider a set of residues `U = [49, 76, 65]`
and a set of moduli `M = [99, 97, 95]`. Then we have::
>>> from sympy.polys.galoistools import gf_crt
>>> from sympy.polys.algebratools import ZZ
>>> gf_crt([49, 76, 65], [99, 97, 95], ZZ)
639985
This is correct result because::
>>> 639985 % 99
49
>>> 639985 % 97
76
>>> 639985 % 95
65
"""
p, v = K.one, K.zero
for m in M:
p *= m
for u, m in zip(U, M):
e = p // m
s, _, _ = K.gcdex(e, m)
v += e*(u*s % m)
return v % p
def gf_crt1(M, K):
"""First part of Chines Remainder Theorem. """
p, E, S = K.one, [], []
for m in M:
p *= m
for m in M:
E.append(p // m)
S.append(K.gcdex(E[-1], m)[0] % m)
return p, E, S
def gf_crt2(U, M, p, E, S, K):
"""Second part of Chinese Remainder Theorem. """
v = K.zero
for u, m, e, s in zip(U, M, E, S):
v += e*(u*s % m)
return v % p
def gf_int(a, p):
"""Coerce `a mod p` to an integer in `[-p/2, p/2]` range. """
if a <= p // 2:
return a
else:
return a - p
def gf_degree(f):
"""Returns leading degree of `f`. """
return len(f)-1
def gf_LC(f, K):
"""Returns leading coefficient of `f`. """
if not f:
return K.zero
else:
return f[0]
def gf_TC(f, K):
"""Returns trailing coefficient of `f`. """
if not f:
return K.zero
else:
return f[-1]
@cythonized("k")
def gf_strip(f):
"""Remove leading zeros from `f`. """
if not f or f[0]:
return f
k = 0
for coeff in f:
if coeff:
break
else:
k += 1
return f[k:]
def gf_trunc(f, p):
"""Reduce all coefficients modulo `p`. """
return gf_strip([ a % p for a in f ])
def gf_normal(f, p, K):
"""Normalize all coefficients in `K`. """
return gf_trunc(map(K, f), p)
def gf_convert(f, p, K0, K1):
"""Normalize all coefficients in `K`. """
return gf_trunc([ K1.convert(c, K0) for c in f ], p)
@cythonized("k,n")
def gf_from_dict(f, p, K):
"""Create `GF(p)[x]` polynomial from a dict. """
n, h = max(f.iterkeys()), []
if type(n) is int:
for k in xrange(n, -1, -1):
h.append(f.get(k, K.zero) % p)
else:
(n,) = n
for k in xrange(n, -1, -1):
h.append(f.get((k,), K.zero) % p)
return gf_trunc(h, p)
@cythonized("k,n")
def gf_to_dict(f, p, symmetric=True):
"""Convert `GF(p)[x]` polynomial to a dict. """
n, result = gf_degree(f), {}
for k in xrange(0, n+1):
if symmetric:
a = gf_int(f[n-k], p)
else:
a = f[n-k]
if a: result[k] = a
return result
def gf_from_int_poly(f, p):
"""Create `GF(p)[x]` polynomial from `Z[x]`. """
return gf_trunc(f, p)
def gf_to_int_poly(f, p, symmetric=True):
"""Convert `GF(p)[x]` polynomial to `Z[x]`. """
if symmetric:
return [ gf_int(c, p) for c in f ]
else:
return f
def gf_neg(f, p, K):
"""Negate a polynomial in `GF(p)[x]`. """
return [ -coeff % p for coeff in f ]
def gf_add_ground(f, a, p, K):
"""Returns `f + a` where `f` in `GF(p)[x]` and `a` in `GF(p)`. """
if not f:
a = a % p
else:
a = (f[-1] + a) % p
if len(f) > 1:
return f[:-1] + [a]
if not a:
return []
else:
return [a]
def gf_sub_ground(f, a, p, K):
"""Returns `f - a` where `f` in `GF(p)[x]` and `a` in `GF(p)`. """
if not f:
a = -a % p
else:
a = (f[-1] - a) % p
if len(f) > 1:
return f[:-1] + [a]
if not a:
return []
else:
return [a]
def gf_mul_ground(f, a, p, K):
"""Returns `f * a` where `f` in `GF(p)[x]` and `a` in `GF(p)`. """
if not a:
return []
else:
return [ (a*b) % p for b in f ]
def gf_exquo_ground(f, a, p, K):
"""Returns `f / a` where `f` in `GF(p)[x]` and `a` in `GF(p)`. """
return gf_mul_ground(f, K.invert(a, p), p, K)
@cythonized("df,dg,k")
def gf_add(f, g, p, K):
"""Add polynomials in `GF(p)[x]`. """
if not f:
return g
if not g:
return f
df = gf_degree(f)
dg = gf_degree(g)
if df == dg:
return gf_strip([ (a + b) % p for a, b in zip(f, g) ])
else:
k = abs(df - dg)
if df > dg:
h, f = f[:k], f[k:]
else:
h, g = g[:k], g[k:]
return h + [ (a + b) % p for a, b in zip(f, g) ]
@cythonized("df,dg,k")
def gf_sub(f, g, p, K):
"""Subtract polynomials in `GF(p)[x]`. """
if not g:
return f
if not f:
return gf_neg(g, p, K)
df = gf_degree(f)
dg = gf_degree(g)
if df == dg:
return gf_strip([ (a - b) % p for a, b in zip(f, g) ])
else:
k = abs(df - dg)
if df > dg:
h, f = f[:k], f[k:]
else:
h, g = gf_neg(g[:k], p, K), g[k:]
return h + [ (a - b) % p for a, b in zip(f, g) ]
@cythonized("df,dg,dh,i,j")
def gf_mul(f, g, p, K):
"""Multiply polynomials in `GF(p)[x]`. """
df = gf_degree(f)
dg = gf_degree(g)
dh = df + dg
h = [0]*(dh+1)
for i in xrange(0, dh+1):
coeff = K.zero
for j in xrange(max(0, i-dg), min(i, df)+1):
coeff += f[j]*g[i-j]
h[i] = coeff % p
return gf_strip(h)
@cythonized("df,dh,i,j,jmin,jmax,n")
def gf_sqr(f, p, K):
"""Square polynomials in `GF(p)[x]`. """
df = gf_degree(f)
dh = 2*df
h = [0]*(dh+1)
for i in xrange(0, dh+1):
coeff = K.zero
jmin = max(0, i-df)
jmax = min(i, df)
n = jmax - jmin + 1
jmax = jmin + n // 2 - 1
for j in xrange(jmin, jmax+1):
coeff += f[j]*f[i-j]
coeff += coeff
if n & 1:
elem = f[jmax+1]
coeff += elem**2
h[i] = coeff % p
return gf_strip(h)
def gf_add_mul(f, g, h, p, K):
"""Returns `f + g*h` where `f`, `g`, `h` in `GF(p)[x]`. """
return gf_add(f, gf_mul(g, h, p, K), p, K)
def gf_sub_mul(f, g, h, p, K):
"""Returns `f - g*h` where `f`, `g`, `h` in `GF(p)[x]`. """
return gf_sub(f, gf_mul(g, h, p, K), p, K)
@cythonized("k")
def gf_expand(F, p, K):
"""Expand results of `factor()` in `GF(p)[x]`. """
if type(F) is tuple:
lc, F = F
else:
lc = K.one
g = [lc]
for f, k in F:
f = gf_pow(f, k, p, K)
g = gf_mul(g, f, p, K)
return g
@cythonized("df,dg,dq,dr,i,j")
def gf_div(f, g, p, K):
"""Division with remainder in `GF(p)[x]`.
Given univariate polynomials `f` and `g` with coefficients in a
finite field with `p` elements, returns polynomials `q` and `r`
(quotient and remainder) such that `f = q*g + r`.
Consider polynomials `x**3 + x + 1` and `x**2 + x` in GF(2)::
>>> from sympy.polys.galoistools import gf_div, gf_add_mul
>>> from sympy.polys.algebratools import ZZ
>>> gf_div([1, 0, 1, 1], [1, 1, 0], 2, ZZ)
([1, 1], [1])
As result we obtained quotient `x + 1` and remainder `1`, thus::
>>> gf_add_mul([1], [1, 1], [1, 1, 0], 2, ZZ)
[1, 0, 1, 1]
References
==========
.. [Monagan93] Michael Monagan, In-place Arithmetic for Polynomials
over Z_n, Proceedings of DISCO '92, Springer-Verlag LNCS, 721,
1993, pp. 22-34
.. [Gathen99] J. von zur Gathen, J. Gerhard, Modern Computer Algebra,
First Edition, Cambridge University Press, 1999, pp. 247
"""
df = gf_degree(f)
dg = gf_degree(g)
if not g:
raise ZeroDivisionError("polynomial division")
elif df < dg:
return [], f
inv = K.invert(g[0], p)
h, dq, dr = list(f), df-dg, dg-1
for i in xrange(0, df+1):
coeff = h[i]
for j in xrange(max(0, dg-i), min(df-i, dr)+1):
coeff -= h[i+j-dg] * g[dg-j]
if i <= dq:
coeff *= inv
h[i] = coeff % p
return h[:dq+1], gf_strip(h[dq+1:])
def gf_rem(f, g, p, K):
"""Returns polynomial remainder in `GF(p)[x]`. """
return gf_div(f, g, p, K)[1]
def gf_quo(f, g, p, K):
"""Returns polynomial quotient in `GF(p)[x]`. """
q, r = gf_div(f, g, p, K)
if not r:
return q
else:
raise ExactQuotientFailed('%s does not divide %s' % (g, f))
@cythonized("df,dg,dq,dr,i,j")
def gf_exquo(f, g, p, K):
"""Computes exact quotient in `GF(p)[x]`. """
df = gf_degree(f)
dg = gf_degree(g)
if not g:
raise ZeroDivisionError("polynomial division")
elif df < dg:
return []
inv = K.invert(g[0], p)
h, dq, dr = f[:], df-dg, dg-1
for i in xrange(0, dq+1):
coeff = h[i]
for j in xrange(max(0, dg-i), min(df-i, dr)+1):
coeff -= h[i+j-dg] * g[dg-j]
h[i] = (coeff * inv) % p
return h[:dq+1]
@cythonized("n")
def gf_lshift(f, n, K):
"""Efficiently multiply `f` by `x**n`. """
if not f:
return f
else:
return f + [K.zero]*n
@cythonized("n")
def gf_rshift(f, n, K):
"""Efficiently divide `f` by `x**n`. """
if not n:
return f, []
else:
return f[:-n], f[-n:]
def gf_pow(f, n, p, K):
"""Computes `f**n` in `GF(p)[x]` using repeated squaring. """
if not n:
return [K.one]
elif n == 1:
return f
elif n == 2:
return gf_sqr(f, p, K)
h = [K.one]
while True:
if n & 1:
h = gf_mul(h, f, p, K)
n -= 1
n >>= 1
if not n:
break
f = gf_sqr(f, p, K)
return h
def gf_pow_mod(f, n, g, p, K):
"""Computes `f**n` in `GF(p)[x]/(g)` using repeated squaring.
Given polynomials `f` and `g` in `GF(p)[x]` and a non-negative
integer `n`, efficiently computes `f**n (mod g)` i.e. remainder
from division `f**n` by `g` using repeated squaring algorithm.
References
==========
.. [Gathen99] J. von zur Gathen, J. Gerhard, Modern Computer Algebra,
First Edition, Cambridge University Press, 1999, pp. 69
"""
if not n:
return [K.one]
elif n == 1:
return gf_rem(f, g, p, K)
elif n == 2:
return gf_rem(gf_sqr(f, p, K), g, p, K)
h = [K.one]
while True:
if n & 1:
h = gf_mul(h, f, p, K)
h = gf_rem(h, g, p, K)
n -= 1
n >>= 1
if not n:
break
f = gf_sqr(f, p, K)
f = gf_rem(f, g, p, K)
return h
def gf_gcd(f, g, p, K):
"""Euclidean Algorithm in `GF(p)[x]`. """
while g:
f, g = g, gf_rem(f, g, p, K)
return gf_monic(f, p, K)[1]
def gf_gcdex(f, g, p, K):
"""Extended Euclidean Algorithm in `GF(p)[x]`.
Given polynomials `f` and `g` in `GF(p)[x]`, computes polynomials
`s`, `t` and `h`, such that `h = gcd(f, g)` and `s*f + t*g = h`. The
typical application of EEA is solving polynomial diophantine equations.
Consider polynomials `f = (x + 7) (x + 1)`, `g = (x + 7) (x**2 + 1)`
in `GF(11)[x]`. Application of Extended Euclidean Algorithm gives::
>>> from sympy.polys.galoistools import gf_gcdex, gf_mul, gf_add
>>> from sympy.polys.algebratools import ZZ
>>> s, t, g = gf_gcdex([1,8,7], [1,7,1,7], 11, ZZ)
>>> s, t, g
([5, 6], [6], [1, 7])
As result we obtained polynomials `s = 5*x + 6` and `t = 6`, and
additionally `gcd(f, g) = x + 7`. This is correct because::
>>> S = gf_mul(s, [1,8,7], 11, ZZ)
>>> T = gf_mul(t, [1,7,1,7], 11, ZZ)
>>> gf_add(S, T, 11, ZZ) == [1, 7]
True
References
==========
.. [Gathen99] J. von zur Gathen, J. Gerhard, Modern Computer Algebra,
First Edition, Cambridge University Press, 1999, pp. 46
"""
if not (f or g):
return [K.one], [], []
p0, r0 = gf_monic(f, p, K)
p1, r1 = gf_monic(g, p, K)
if not f:
return [], [K.invert(p1, p)], r1
if not g:
return [K.invert(p0, p)], [], r0
s0, s1 = [K.invert(p0, p)], []
t0, t1 = [], [K.invert(p1, p)]
while True:
Q, R = gf_div(r0, r1, p, K)
if not R:
break
(lc, r1), r0 = gf_monic(R, p, K), r1
inv = K.invert(lc, p)
s = gf_sub_mul(s0, s1, Q, p, K)
t = gf_sub_mul(t0, t1, Q, p, K)
s1, s0 = gf_mul_ground(s, inv, p, K), s1
t1, t0 = gf_mul_ground(t, inv, p, K), t1
return s1, t1, r1
def gf_monic(f, p, K):
"""Returns LC and a monic polynomial in `GF(p)[x]`."""
if not f:
return K.zero, []
else:
lc = f[0]
if K.is_one(lc):
return lc, list(f)
else:
return lc, gf_exquo_ground(f, lc, p, K)
@cythonized("df,n")
def gf_diff(f, p, K):
"""Differentiate polynomial in `GF(p)[x]`. """
df = gf_degree(f)
h, n = [K.zero]*df, df
for coeff in f[:-1]:
coeff *= K(n)
coeff %= p
if coeff:
h[df-n] = coeff
n -= 1
return gf_strip(h)
def gf_eval(f, a, p, K):
"""Evaluate `f(a)` in `GF(p)` using Horner scheme. """
result = K.zero
for c in f:
result *= a
result += c
result %= p
return result
def gf_multi_eval(f, A, p, K):
"""Evaluate `f(a)` for `a` in `[a_1, ..., a_n]`. """
return [ gf_eval(f, a, p, K) for a in A ]
def gf_compose(f, g, p, K):
"""Compute polynomial composition `f(g)` in `GF(p)[x]`. """
if len(g) <= 1:
return gf_strip([gf_eval(f, gf_LC(g, K), p, K)])
if not f:
return []
h = [f[0]]
for c in f[1:]:
h = gf_mul(h, g, p, K)
h = gf_add_ground(h, c, p, K)
return h
def gf_compose_mod(g, h, f, p, K):
"""Compute polynomial composition `g(h)` in `GF(p)[x]/(f)`. """
if not g:
return []
comp = [g[0]]
for a in g[1:]:
comp = gf_mul(comp, h, p, K)
comp = gf_add_ground(comp, a, p, K)
comp = gf_rem(comp, f, p, K)
return comp
@cythonized("n")
def gf_trace_map(a, b, c, n, f, p, K):
"""Compute polynomial trace map in `GF(p)[x]/(f)`.
Given polynomial `f` in `GF(p)[x]`, polynomials `a`, `b`, `c`
in quotient ring `GF(p)[x]/(f)` such that `b = c**t (mod f)`
for some positive power `t` of `p` and a positive integer `n`,
returns a mapping::
a -> a**t**n, a + a**t + a**t**2 + ... + a**t**n (mod f)
In factorization context, `b = x**p mod f` and `c = x mod f`.
This way we can efficiently compute trace polynomials in equal
degree factorization routine, much faster than with other methods,
like iterated Frobenius algorithm, for large degrees.
References
==========
.. [Gathen92] J. von zur Gathen, V. Shoup, Computing Frobenius Maps
and Factoring Polynomials, ACM Symposium on Theory of Computing,
1992, pp. 187-224
"""
u = gf_compose_mod(a, b, f, p, K)
v = b
if n & 1:
U = gf_add(a, u, p, K)
V = b
else:
U = a
V = c
n >>= 1
while n:
u = gf_add(u, gf_compose_mod(u, v, f, p, K), p, K)
v = gf_compose_mod(v, v, f, p, K)
if n & 1:
U = gf_add(U, gf_compose_mod(u, V, f, p, K), p, K)
V = gf_compose_mod(v, V, f, p, K)
n >>= 1
return gf_compose_mod(a, V, f, p, K), U
@cythonized("i,n")
def gf_random(n, p, K):
"""Generate a random polynomial in `GF(p)[x]` of degree `n`. """
return [K.one] + [ K(int(uniform(0, p))) for i in xrange(0, n) ]
@cythonized("i,n")
def gf_irreducible(n, p, K):
"""Generate random irreducible polynomial of degree `n` in `GF(p)[x]`. """
while True:
f = gf_random(n, p, K)
if gf_irreducible_p(f, p, K):
return f
@cythonized("i,n")
def gf_irred_p_ben_or(f, p, K):
"""Ben-Or's polynomial irreducibility test over finite fields. """
n = gf_degree(f)
if n <= 1:
return True
_, f = gf_monic(f, p, K)
H = h = gf_pow_mod([K.one, K.zero], p, f, p, K)
for i in xrange(0, n//2):
g = gf_sub(h, [K.one, K.zero], p, K)
if gf_gcd(f, g, p, K) == [K.one]:
h = gf_compose_mod(h, H, f, p, K)
else:
return False
return True
@cythonized("i,n,d")
def gf_irred_p_rabin(f, p, K):
"""Rabin's polynomial irreducibility test over finite fields. """
n = gf_degree(f)
if n <= 1:
return True
_, f = gf_monic(f, p, K)
x = [K.one, K.zero]
H = h = gf_pow_mod(x, p, f, p, K)
indices = set([ n//d for d in factorint(n) ])
for i in xrange(1, n):
if i in indices:
g = gf_sub(h, x, p, K)
if gf_gcd(f, g, p, K) != [K.one]:
return False
h = gf_compose_mod(h, H, f, p, K)
return h == x
_irred_methods = {
'ben-or' : gf_irred_p_ben_or,
'rabin' : gf_irred_p_rabin,
}
def gf_irreducible_p(f, p, K, **args):
"""Test irreducibility of a polynomial `f` in `GF(p)[x]`. """
method = args.get('method')
if method is not None:
irred = _irred_methods[method](f, p, K)
else:
irred = gf_irred_p_rabin(f, p, K)
return irred
def gf_sqf_p(f, p, K):
"""Returns `True` if `f` is square-free in `GF(p)[x]`. """
_, f = gf_monic(f, p, K)
if not f:
return True
else:
return gf_gcd(f, gf_diff(f, p, K), p, K) == [K.one]
def gf_sqf_part(f, p, K):
"""Returns square-free part of a `GF(p)[x]` polynomial. """
_, sqf = gf_sqf_list(f, p, K)
g = [K.one]
for f, _ in sqf:
g = gf_mul(g, f, p, K)
return g
@cythonized("i,n,d,r")
def gf_sqf_list(f, p, K):
"""Returns square-free decomposition of a `GF(p)[x]` polynomial.
Given a polynomial `f` in `GF(p)[x]`, returns the leading coefficient
of `f` and a square-free decomposition `f_1**e_1 f_2**e_2 ... f_k**e_k`
such that all `f_i` are monic polynomials and `(f_i, f_j)` for `i != j`
are co-prime and `e_1 ... e_k` are given in increasing order. All
trivial terms (i.e. `f_i = 1`) aren't included in the output.
Consider polynomial `f = x**11 + 1` over `GF(11)[x]`::
>>> from sympy.polys.galoistools import (
... gf_from_dict, gf_diff, gf_sqf_list, gf_pow,
... )
... # doctest: +NORMALIZE_WHITESPACE
>>> from sympy.polys.algebratools import ZZ
>>> f = gf_from_dict({11: 1, 0: 1}, 11, ZZ)
Note that `f'(x) = 0`::
>>> gf_diff(f, 11, ZZ)
[]
This phenomenon doesn't happen in characteristic zero. However we can
still compute square-free decomposition of `f` using `gf_sqf()`::
>>> gf_sqf_list(f, 11, ZZ)
(1, [([1, 1], 11)])
We obtained factorization `f = (x + 1)**11`. This is correct because::
>>> gf_pow([1, 1], 11, 11, ZZ) == f
True
References
==========
.. [Geddes92] K. Geddes, S. Czapor, G. Labahn, Algorithms for
Computer Algebra, First Edition, Springer, 1992, pp. 343-347
"""
n, sqf, factors, r = 1, False, [], int(p)
lc, f = gf_monic(f, p, K)
if gf_degree(f) < 1:
return lc, []
while True:
F = gf_diff(f, p, K)
if F != []:
g = gf_gcd(f, F, p, K)
h = gf_exquo(f, g, p, K)
i = 1
while h != [K.one]:
G = gf_gcd(g, h, p, K)
H = gf_exquo(h, G, p, K)
if gf_degree(H) > 0:
factors.append((H, i*n))
g, h, i = gf_exquo(g, G, p, K), G, i+1
if g == [K.one]:
sqf = True
else:
f = g
if not sqf:
d = gf_degree(f) // r
for i in xrange(0, d+1):
f[i] = f[i*r]
f, n = f[:d+1], n*r
else:
break
return lc, factors
@cythonized("n,i,j,r")
def gf_Qmatrix(f, p, K):
"""Calculate Berlekamp's `Q` matrix. """
n, r = gf_degree(f), int(p)
q = [K.one] + [K.zero]*(n-1)
Q = [list(q)] + [[]]*(n-1)
for i in xrange(1, (n-1)*r + 1):
qq, c = [(-q[-1]*f[-1]) % p], q[-1]
for j in xrange(1, n):
qq.append((q[j-1] - c*f[-j-1]) % p)
if not (i % r):
Q[i//r] = list(qq)
q = qq
return Q
@cythonized("n,i,j,k")
def gf_Qbasis(Q, p, K):
"""Compute a basis of the kernel of `Q`. """
Q, n = [ list(q) for q in Q ], len(Q)
for k in xrange(0, n):
Q[k][k] = (Q[k][k] - K.one) % p
for k in xrange(0, n):
for i in xrange(k, n):
if Q[k][i]:
break
else:
continue
inv = K.invert(Q[k][i], p)
for j in xrange(0, n):
Q[j][i] = (Q[j][i]*inv) % p
for j in xrange(0, n):
t = Q[j][k]
Q[j][k] = Q[j][i]
Q[j][i] = t
for i in xrange(0, n):
if i != k:
q = Q[k][i]
for j in xrange(0, n):
Q[j][i] = (Q[j][i] - Q[j][k]*q) % p
for i in xrange(0, n):
for j in xrange(0, n):
if i == j:
Q[i][j] = (K.one - Q[i][j]) % p
else:
Q[i][j] = (-Q[i][j]) % p
basis = []
for q in Q:
if any(q):
basis.append(q)
return basis
@cythonized("i,k")
def gf_berlekamp(f, p, K):
"""Factor a square-free `f` in `GF(p)[x]` for small `p`. """
Q = gf_Qmatrix(f, p, K)
V = gf_Qbasis(Q, p, K)
for i, v in enumerate(V):
V[i] = gf_strip(list(reversed(v)))
factors = [f]
for k in xrange(1, len(V)):
for f in list(factors):
s = K.zero
while s < p:
g = gf_sub_ground(V[k], s, p, K)
h = gf_gcd(f, g, p, K)
if h != [K.one] and h != f:
factors.remove(f)
f = gf_exquo(f, h, p, K)
factors.extend([f, h])
if len(factors) == len(V):
return _sort_factors(factors, multiple=False)
s += K.one
return _sort_factors(factors, multiple=False)
@cythonized("i")
def gf_ddf_zassenhaus(f, p, K):
"""Cantor-Zassenhaus: Deterministic Distinct Degree Factorization
Given a monic square-free polynomial `f` in `GF(p)[x]`, computes
partial distinct degree factorization `f_1 ... f_d` of `f` where
`deg(f_i) != deg(f_j)` for `i != j`. The result is returned as a
list of pairs `(f_i, e_i)` where `deg(f_i) > 0` and `e_i > 0` is
an argument to the equal degree factorization routine.
Consider polynomial `x**15 - 1` in `GF(11)[x]`::
>>> from sympy.polys.galoistools import gf_from_dict
>>> from sympy.polys.algebratools import ZZ
>>> f = gf_from_dict({15: 1, 0: -1}, 11, ZZ)
Distinct degree factorization gives::
>>> from sympy.polys.galoistools import gf_ddf_zassenhaus
>>> gf_ddf_zassenhaus(f, 11, ZZ)
[([1, 0, 0, 0, 0, 10], 1), ([1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1], 2)]
which means `x**15 - 1 = (x**5 - 1) (x**10 + x**5 + 1)`. To obtain
factorization into irreducibles, use equal degree factorization
procedure (EDF) with each of the factors.
References
==========
.. [Gathen99] J. von zur Gathen, J. Gerhard, Modern Computer Algebra,
First Edition, Cambridge University Press, 1999, pp. 356
.. [Geddes92] K. Geddes, S. Czapor, G. Labahn, Algorithms for Computer
Algebra, First Edition, Springer, 1992, pp. 368-371
"""
i, g, factors = 1, [K.one, K.zero], []
while 2*i <= gf_degree(f):
g = gf_pow_mod(g, int(p), f, p, K)
h = gf_gcd(f, gf_sub(g, [K.one, K.zero], p, K), p, K)
if h != [K.one]:
factors.append((h, i))
f = gf_exquo(f, h, p, K)
g = gf_rem(g, f, p, K)
i += 1
if f != [K.one]:
return factors + [(f, gf_degree(f))]
else:
return factors
@cythonized("n,N,i")
def gf_edf_zassenhaus(f, n, p, K):
"""Cantor-Zassenhaus: Probabilistic Equal Degree Factorization
Given a monic square-free polynomial `f` in `GF(p)[x]` and integer
`n` such that `n` divides `deg(f)`, returns all irreducible factors
`f_1 ... f_d` of `f`, each of degree `n`. This is a complete
factorization in Galois fields.
Consider square-free polynomial `f = x**3 + x**2 + x + 1` in
`GF(5)[x]`. Lets compute its irreducible factors of degree one::
>>> from sympy.polys.galoistools import gf_edf_zassenhaus
>>> from sympy.polys.algebratools import ZZ
>>> gf_edf_zassenhaus([1,1,1,1], 1, 5, ZZ)
[[1, 1], [1, 2], [1, 3]]
References
==========
.. [Gathen99] J. von zur Gathen, J. Gerhard, Modern Computer Algebra,
First Edition, Cambridge University Press, 1999, pp. 358
.. [Geddes92] K. Geddes, S. Czapor, G. Labahn, Algorithms for Computer
Algebra, First Edition, Springer, 1992, pp. 371-373
"""
factors, q = [f], int(p)
if gf_degree(f) <= n:
return factors
N = gf_degree(f) // n
while len(factors) < N:
r = gf_random(2*n-1, p, K)
if p == 2:
h = r
for i in xrange(0, 2**(n*N-1)):
r = gf_pow_mod(r, 2, f, p, K)
h = gf_add(h, r, p, K)
g = gf_gcd(f, h, p, K)
else:
h = gf_pow_mod(r, (q**n-1) // 2, f, p, K)
g = gf_gcd(f, gf_sub_ground(h, K.one, p, K), p, K)
if g != [K.one] and g != f:
factors = gf_edf_zassenhaus(g, n, p, K) \
+ gf_edf_zassenhaus(gf_exquo(f, g, p, K), n, p, K)
return _sort_factors(factors, multiple=False)
@cythonized("n,k,i,j")
def gf_ddf_shoup(f, p, K):
"""Kaltofen-Shoup: Deterministic Distinct Degree Factorization
Given a monic square-free polynomial `f` in `GF(p)[x]`, computes
partial distinct degree factorization `f_1 ... f_d` of `f` where
`deg(f_i) != deg(f_j)` for `i != j`. The result is returned as a
list of pairs `(f_i, e_i)` where `deg(f_i) > 0` and `e_i > 0` is
an argument to the equal degree factorization routine.
This algorithm is an improved version of Zassenhaus algorithm for
large `deg(f)` and modulus `p` (especially for `deg(f) ~ lg(p)`).
References
==========
.. [Kaltofen98] E. Kaltofen, V. Shoup, Subquadratic-time Factoring
of Polynomials over Finite Fields, Mathematics of Computation,
Volume 67, Issue 223, 1998, pp. 1179-1197
.. [Shoup95] V. Shoup, A New Polynomial Factorization Algorithm and
its Implementation, Journal of Symbolic Computation, Volume 20,
Issue 4, 1995, pp. 363-397
.. [Gathen92] J. von zur Gathen, V. Shoup, Computing Frobenius Maps
and Factoring Polynomials, ACM Symposium on Theory of Computing,
1992, pp. 187-224
"""
n = gf_degree(f)
k = int(ceil(sqrt(n//2)))
h = gf_pow_mod([K.one, K.zero], int(p), f, p, K)
U = [[K.one,K.zero], h] + [K.zero]*(k-1)
for i in xrange(2, k+1):
U[i] = gf_compose_mod(U[i-1], h, f, p, K)
h, U = U[k], U[:k]
V = [h] + [K.zero]*(k-1)
for i in xrange(1, k):
V[i] = gf_compose_mod(V[i-1], h, f, p, K)
factors = []
for i, v in enumerate(V):
h, j = [K.one], k-1
for u in U:
g = gf_sub(v, u, p, K)
h = gf_mul(h, g, p, K)
h = gf_rem(h, f, p, K)
g = gf_gcd(f, h, p, K)
f = gf_exquo(f, g, p, K)
for u in reversed(U):
h = gf_sub(v, u, p, K)
F = gf_gcd(g, h, p, K)
if F != [K.one]:
factors.append((F, k*(i+1)-j))
g, j = gf_exquo(g, F, p, K), j-1
if f != [K.one]:
factors.append((f, gf_degree(f)))
return factors
@cythonized("n,N,q")
def gf_edf_shoup(f, n, p, K):
"""Gathen-Shoup: Probabilistic Equal Degree Factorization
Given a monic square-free polynomial `f` in `GF(p)[x]` and integer
`n` such that `n` divides `deg(f)`, returns all irreducible factors
`f_1 ... f_d` of `f`, each of degree `n`. This is a complete
factorization over Galois fields.
This algorithm is an improved version of Zassenhaus algorithm for
large `deg(f)` and modulus `p` (especially for `deg(f) ~ lg(p)`).
References
==========
.. [Shoup91] V. Shoup, A Fast Deterministic Algorithm for Factoring
Polynomials over Finite Fields of Small Characteristic, In
Proceedings of International Symposium on Symbolic and
Algebraic Computation, 1991, pp. 14-21
.. [Gathen92] J. von zur Gathen, V. Shoup, Computing Frobenius Maps
and Factoring Polynomials, ACM Symposium on Theory of Computing,
1992, pp. 187-224
"""
N, q = gf_degree(f), int(p)
if not N:
return []
if N <= n:
return [f]
factors, x = [f], [K.one, K.zero]
r = gf_random(N-1, p, K)
h = gf_pow_mod(x, q, f, p, K)
H = gf_trace_map(r, h, x, n-1, f, p, K)[1]
if p == 2:
h1 = gf_gcd(f, H, p, K)
h2 = gf_exquo(f, h1, p, K)
factors = gf_edf_shoup(h1, n, p, K) \
+ gf_edf_shoup(h2, n, p, K)
else:
h = gf_pow_mod(H, (q-1)//2, f, p, K)
h1 = gf_gcd(f, h, p, K)
h2 = gf_gcd(f, gf_sub_ground(h, K.one, p, K), p, K)
h3 = gf_exquo(f, gf_mul(h1, h2, p, K), p, K)
factors = gf_edf_shoup(h1, n, p, K) \
+ gf_edf_shoup(h2, n, p, K) \
+ gf_edf_shoup(h3, n, p, K)
return _sort_factors(factors, multiple=False)
@cythonized("n")
def gf_zassenhaus(f, p, K):
"""Factor a square-free `f` in `GF(p)[x]` for medium `p`. """
factors = []
for factor, n in gf_ddf_zassenhaus(f, p, K):
factors += gf_edf_zassenhaus(factor, n, p, K)
return _sort_factors(factors, multiple=False)
@cythonized("n")
def gf_shoup(f, p, K):
"""Factor a square-free `f` in `GF(p)[x]` for large `p`. """
factors = []
for factor, n in gf_ddf_shoup(f, p, K):
factors += gf_edf_shoup(factor, n, p, K)
return _sort_factors(factors, multiple=False)
_factor_methods = {
'berlekamp' : gf_berlekamp, # `p` : small
'zassenhaus' : gf_zassenhaus, # `p` : medium
'shoup' : gf_shoup, # `p` : large
}
def gf_factor_sqf(f, p, K, **args):
"""Factor a square-free polynomial `f` in `GF(p)[x]`. """
lc, f = gf_monic(f, p, K)
if gf_degree(f) < 1:
return lc, []
method = args.get('method')
if method is not None:
factors = _factor_methods[method](f, p, K)
else:
factors = gf_zassenhaus(f, p, K)
return lc, factors
@cythonized("n")
def gf_factor(f, p, K, **args):
"""Factor (non square-free) polynomials in `GF(p)[x]`.
Given a possibly non square-free polynomial `f` in `GF(p)[x]`, returns
its complete factorization into irreducibles::
f_1(x)**e_1 f_2(x)**e_2 ... f_d(x)**e_d
where each `f_i` is a monic polynomial and `gcd(f_i, f_j) == 1`, for
`i != j`. The result is given as a tuple consisting of the leading
coefficient of `f` and a list of factors with their multiplicities.
The algorithm proceeds by first computing square-free decomposition
of `f` and then iteratively factoring each of the square-free factors.
Consider a non square-free polynomial `f = (7*x + 1) (x + 2)**2` in
`GF(11)[x]`. We obtain its factorization into irreducibles as follows::
>>> from sympy.polys.galoistools import gf_factor
>>> from sympy.polys.algebratools import ZZ
>>> gf_factor([5, 2, 7, 2], 11, ZZ)
(5, [([1, 2], 1), ([1, 8], 2)])
We arrived with factorization `f = 5 (x + 2) (x + 8)**2`. We didn't
recover exact form of the input polynomial because we requested to
get monic factors of `f` and its leading coefficient separately.
Square-free factors of `f` can be factored into irreducibles over
`GF(p)` using three very different methods:
1. Berlekamp - efficient for very small values of `p` (usually `p < 25`)
2. Cantor-Zassenhaus - efficient on average input and with "typical" `p`
3. Shoup-Kaltofen-Gathen - efficient with very large inputs and modulus
If you want to use a specific factorization method, instead of relying,
on the algorithm to choose one for you, specify `method` keyword and
set it to one of `berlekamp`, `zassenhaus` or `shoup` values.
References
==========
.. [Gathen99] J. von zur Gathen, J. Gerhard, Modern Computer Algebra,
First Edition, Cambridge University Press, 1999, pp. 365
"""
lc, f = gf_monic(f, p, K)
if gf_degree(f) < 1:
return lc, []
factors = []
for g, n in gf_sqf_list(f, p, K)[1]:
for h in gf_factor_sqf(g, p, K, **args)[1]:
factors.append((h, n))
return lc, _sort_factors(factors)
|
py | 1a468af42d9560b2a1031b8a6b0dd03ca3fb5286 | from contextlib import contextmanager
# A workaround for segfaults when using OE on GHA...
try:
from openeye import oechem
if (
oechem.OEChemIsLicensed()
and oechem.OEGetMemPoolMode() == oechem.OEMemPoolMode_Default
):
oechem.OESetMemPoolMode(
oechem.OEMemPoolMode_Mutexed | oechem.OEMemPoolMode_UnboundedCache
)
except (ImportError, ModuleNotFoundError):
pass
@contextmanager
def does_not_raise():
"""A helpful context manager to use inplace of a pytest raise statement
when no exception is expected."""
yield
|
py | 1a468b3688ecd9790c23a21838d84f8ecade7205 | # flake8: noqa: F811, F401
import asyncio
import sys
from typing import Dict, List, Optional, Tuple
import aiosqlite
import pytest
from taco.consensus.block_header_validation import validate_finished_header_block
from taco.consensus.block_record import BlockRecord
from taco.consensus.blockchain import Blockchain
from taco.consensus.default_constants import DEFAULT_CONSTANTS
from taco.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty
from taco.consensus.full_block_to_block_record import block_to_block_record
from taco.full_node.block_store import BlockStore
from taco.full_node.coin_store import CoinStore
from taco.server.start_full_node import SERVICE_NAME
from taco.types.blockchain_format.sized_bytes import bytes32
from taco.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from taco.util.block_cache import BlockCache
from tests.block_tools import test_constants
from taco.util.config import load_config
from taco.util.default_root import DEFAULT_ROOT_PATH
from taco.util.generator_tools import get_block_header
from tests.setup_nodes import bt
try:
from reprlib import repr
except ImportError:
pass
from taco.consensus.pot_iterations import calculate_iterations_quality
from taco.full_node.weight_proof import ( # type: ignore
WeightProofHandler,
_map_sub_epoch_summaries,
_validate_sub_epoch_segments,
_validate_summaries_weight,
)
from taco.types.full_block import FullBlock
from taco.types.header_block import HeaderBlock
from taco.util.ints import uint32, uint64
from tests.core.fixtures import (
default_400_blocks,
default_1000_blocks,
default_10000_blocks,
default_10000_blocks_compact,
pre_genesis_empty_slots_1000_blocks,
)
@pytest.fixture(scope="session")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
def count_sub_epochs(blockchain, last_hash) -> int:
curr = blockchain._sub_blocks[last_hash]
count = 0
while True:
if curr.height == 0:
break
# next sub block
curr = blockchain._sub_blocks[curr.prev_hash]
# if end of sub-epoch
if curr.sub_epoch_summary_included is not None:
count += 1
return count
def get_prev_ses_block(sub_blocks, last_hash) -> Tuple[BlockRecord, int]:
curr = sub_blocks[last_hash]
blocks = 1
while curr.height != 0:
# next sub block
curr = sub_blocks[curr.prev_hash]
# if end of sub-epoch
if curr.sub_epoch_summary_included is not None:
return curr, blocks
blocks += 1
assert False
async def load_blocks_dont_validate(
blocks,
) -> Tuple[
Dict[bytes32, HeaderBlock], Dict[uint32, bytes32], Dict[bytes32, BlockRecord], Dict[bytes32, SubEpochSummary]
]:
header_cache: Dict[bytes32, HeaderBlock] = {}
height_to_hash: Dict[uint32, bytes32] = {}
sub_blocks: Dict[bytes32, BlockRecord] = {}
sub_epoch_summaries: Dict[bytes32, SubEpochSummary] = {}
prev_block = None
difficulty = test_constants.DIFFICULTY_STARTING
block: FullBlock
for block in blocks:
if block.height > 0:
assert prev_block is not None
difficulty = block.reward_chain_block.weight - prev_block.weight
if block.reward_chain_block.challenge_chain_sp_vdf is None:
assert block.reward_chain_block.signage_point_index == 0
cc_sp: bytes32 = block.reward_chain_block.pos_ss_cc_challenge_hash
else:
cc_sp = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash()
quality_string: Optional[bytes32] = block.reward_chain_block.proof_of_space.verify_and_get_quality_string(
test_constants,
block.reward_chain_block.pos_ss_cc_challenge_hash,
cc_sp,
)
assert quality_string is not None
required_iters: uint64 = calculate_iterations_quality(
test_constants.DIFFICULTY_CONSTANT_FACTOR,
quality_string,
block.reward_chain_block.proof_of_space.size,
difficulty,
cc_sp,
)
sub_block = block_to_block_record(
test_constants, BlockCache(sub_blocks, height_to_hash), required_iters, block, None
)
sub_blocks[block.header_hash] = sub_block
height_to_hash[block.height] = block.header_hash
header_cache[block.header_hash] = get_block_header(block, [], [])
if sub_block.sub_epoch_summary_included is not None:
sub_epoch_summaries[block.height] = sub_block.sub_epoch_summary_included
prev_block = block
return header_cache, height_to_hash, sub_blocks, sub_epoch_summaries
async def _test_map_summaries(blocks, header_cache, height_to_hash, sub_blocks, summaries):
curr = sub_blocks[blocks[-1].header_hash]
orig_summaries: Dict[int, SubEpochSummary] = {}
while curr.height > 0:
if curr.sub_epoch_summary_included is not None:
orig_summaries[curr.height] = curr.sub_epoch_summary_included
# next sub block
curr = sub_blocks[curr.prev_hash]
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
assert wp is not None
# sub epoch summaries validate hashes
summaries, sub_epoch_data_weight, _ = _map_sub_epoch_summaries(
test_constants.SUB_EPOCH_BLOCKS,
test_constants.GENESIS_CHALLENGE,
wp.sub_epochs,
test_constants.DIFFICULTY_STARTING,
)
assert len(summaries) == len(orig_summaries)
class TestWeightProof:
@pytest.mark.asyncio
async def test_weight_proof_map_summaries_1(self, default_400_blocks):
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(default_400_blocks)
await _test_map_summaries(default_400_blocks, header_cache, height_to_hash, sub_blocks, summaries)
@pytest.mark.asyncio
async def test_weight_proof_map_summaries_2(self, default_1000_blocks):
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(default_1000_blocks)
await _test_map_summaries(default_1000_blocks, header_cache, height_to_hash, sub_blocks, summaries)
@pytest.mark.asyncio
async def test_weight_proof_summaries_1000_blocks(self, default_1000_blocks):
blocks = default_1000_blocks
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks)
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
summaries, sub_epoch_data_weight, _ = _map_sub_epoch_summaries(
wpf.constants.SUB_EPOCH_BLOCKS,
wpf.constants.GENESIS_CHALLENGE,
wp.sub_epochs,
wpf.constants.DIFFICULTY_STARTING,
)
assert _validate_summaries_weight(test_constants, sub_epoch_data_weight, summaries, wp)
# assert res is not None
@pytest.mark.asyncio
async def test_weight_proof_bad_peak_hash(self, default_1000_blocks):
blocks = default_1000_blocks
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks)
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wp = await wpf.get_proof_of_weight(b"sadgfhjhgdgsfadfgh")
assert wp is None
@pytest.mark.asyncio
@pytest.mark.skip(reason="broken")
async def test_weight_proof_from_genesis(self, default_400_blocks):
blocks = default_400_blocks
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks)
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
assert wp is not None
wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
assert wp is not None
@pytest.mark.asyncio
async def test_weight_proof_edge_cases(self, default_400_blocks):
blocks: List[FullBlock] = default_400_blocks
blocks: List[FullBlock] = bt.get_consecutive_blocks(
1, block_list_input=blocks, seed=b"asdfghjkl", force_overflow=True, skip_slots=2
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
1, block_list_input=blocks, seed=b"asdfghjkl", force_overflow=True, skip_slots=1
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=True,
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
1, block_list_input=blocks, seed=b"asdfghjkl", force_overflow=True, skip_slots=2
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=True,
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=True,
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=True,
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=True,
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=True,
skip_slots=4,
normalized_to_identity_cc_eos=True,
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
10,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=True,
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=True,
skip_slots=4,
normalized_to_identity_icc_eos=True,
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
10,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=True,
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=True,
skip_slots=4,
normalized_to_identity_cc_ip=True,
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
10,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=True,
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=True,
skip_slots=4,
normalized_to_identity_cc_sp=True,
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
1, block_list_input=blocks, seed=b"asdfghjkl", force_overflow=True, skip_slots=4
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
10,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=True,
)
blocks: List[FullBlock] = bt.get_consecutive_blocks(
300,
block_list_input=blocks,
seed=b"asdfghjkl",
force_overflow=False,
)
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks)
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
assert wp is not None
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, {}))
valid, fork_point = wpf.validate_weight_proof_single_proc(wp)
assert valid
assert fork_point == 0
@pytest.mark.asyncio
async def test_weight_proof1000(self, default_1000_blocks):
blocks = default_1000_blocks
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks)
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
assert wp is not None
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, {}))
valid, fork_point = wpf.validate_weight_proof_single_proc(wp)
assert valid
assert fork_point == 0
@pytest.mark.asyncio
async def test_weight_proof1000_pre_genesis_empty_slots(self, pre_genesis_empty_slots_1000_blocks):
blocks = pre_genesis_empty_slots_1000_blocks
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks)
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
assert wp is not None
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, {}))
valid, fork_point = wpf.validate_weight_proof_single_proc(wp)
assert valid
assert fork_point == 0
@pytest.mark.asyncio
async def test_weight_proof10000__blocks_compact(self, default_10000_blocks_compact):
blocks = default_10000_blocks_compact
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks)
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
assert wp is not None
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, {}))
valid, fork_point = wpf.validate_weight_proof_single_proc(wp)
assert valid
assert fork_point == 0
@pytest.mark.asyncio
async def test_weight_proof1000_partial_blocks_compact(self, default_10000_blocks_compact):
blocks: List[FullBlock] = bt.get_consecutive_blocks(
100,
block_list_input=default_10000_blocks_compact,
seed=b"asdfghjkl",
normalized_to_identity_cc_ip=True,
normalized_to_identity_cc_eos=True,
normalized_to_identity_icc_eos=True,
)
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks)
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
assert wp is not None
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, {}))
valid, fork_point = wpf.validate_weight_proof_single_proc(wp)
assert valid
assert fork_point == 0
@pytest.mark.asyncio
async def test_weight_proof10000(self, default_10000_blocks):
blocks = default_10000_blocks
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks)
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
assert wp is not None
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, {}, height_to_hash, {}))
valid, fork_point = wpf.validate_weight_proof_single_proc(wp)
assert valid
assert fork_point == 0
@pytest.mark.asyncio
async def test_check_num_of_samples(self, default_10000_blocks):
blocks = default_10000_blocks
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks)
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wp = await wpf.get_proof_of_weight(blocks[-1].header_hash)
curr = -1
samples = 0
for sub_epoch_segment in wp.sub_epoch_segments:
if sub_epoch_segment.sub_epoch_n > curr:
curr = sub_epoch_segment.sub_epoch_n
samples += 1
assert samples <= wpf.MAX_SAMPLES
@pytest.mark.asyncio
async def test_weight_proof_extend_no_ses(self, default_1000_blocks):
blocks = default_1000_blocks
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks)
last_ses_height = sorted(summaries.keys())[-1]
wpf_synced = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wp = await wpf_synced.get_proof_of_weight(blocks[last_ses_height].header_hash)
assert wp is not None
# todo for each sampled sub epoch, validate number of segments
wpf_not_synced = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, {}))
valid, fork_point, _ = await wpf_not_synced.validate_weight_proof(wp)
assert valid
assert fork_point == 0
# extend proof with 100 blocks
new_wp = await wpf_synced._create_proof_of_weight(blocks[-1].header_hash)
valid, fork_point, _ = await wpf_not_synced.validate_weight_proof(new_wp)
assert valid
assert fork_point == 0
@pytest.mark.asyncio
async def test_weight_proof_extend_new_ses(self, default_1000_blocks):
blocks = default_1000_blocks
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks)
# delete last summary
last_ses_height = sorted(summaries.keys())[-1]
last_ses = summaries[last_ses_height]
del summaries[last_ses_height]
wpf_synced = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wp = await wpf_synced.get_proof_of_weight(blocks[last_ses_height - 10].header_hash)
assert wp is not None
wpf_not_synced = WeightProofHandler(test_constants, BlockCache(sub_blocks, height_to_hash, header_cache, {}))
valid, fork_point, _ = await wpf_not_synced.validate_weight_proof(wp)
assert valid
assert fork_point == 0
# extend proof with 100 blocks
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
summaries[last_ses_height] = last_ses
wpf_synced.blockchain = BlockCache(sub_blocks, header_cache, height_to_hash, summaries)
new_wp = await wpf_synced._create_proof_of_weight(blocks[-1].header_hash)
valid, fork_point, _ = await wpf_not_synced.validate_weight_proof(new_wp)
assert valid
assert fork_point == 0
wpf_synced.blockchain = BlockCache(sub_blocks, header_cache, height_to_hash, summaries)
new_wp = await wpf_synced._create_proof_of_weight(blocks[last_ses_height].header_hash)
valid, fork_point, _ = await wpf_not_synced.validate_weight_proof(new_wp)
assert valid
assert fork_point == 0
valid, fork_point, _ = await wpf.validate_weight_proof(new_wp)
assert valid
assert fork_point != 0
@pytest.mark.asyncio
async def test_weight_proof_extend_multiple_ses(self, default_1000_blocks):
blocks = default_1000_blocks
header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks)
last_ses_height = sorted(summaries.keys())[-1]
last_ses = summaries[last_ses_height]
before_last_ses_height = sorted(summaries.keys())[-2]
before_last_ses = summaries[before_last_ses_height]
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
wpf_verify = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, {}))
for x in range(10, -1, -1):
wp = await wpf.get_proof_of_weight(blocks[before_last_ses_height - x].header_hash)
assert wp is not None
valid, fork_point, _ = await wpf_verify.validate_weight_proof(wp)
assert valid
assert fork_point == 0
# extend proof with 100 blocks
summaries[last_ses_height] = last_ses
summaries[before_last_ses_height] = before_last_ses
wpf = WeightProofHandler(test_constants, BlockCache(sub_blocks, header_cache, height_to_hash, summaries))
new_wp = await wpf._create_proof_of_weight(blocks[-1].header_hash)
valid, fork_point, _ = await wpf.validate_weight_proof(new_wp)
assert valid
assert fork_point != 0
@pytest.mark.skip("used for debugging")
@pytest.mark.asyncio
async def test_weight_proof_from_database(self):
connection = await aiosqlite.connect("path to db")
block_store: BlockStore = await BlockStore.create(connection)
blocks = await block_store.get_block_records_in_range(0, 0xFFFFFFFF)
peak = len(blocks) - 1
peak_height = blocks[peak].height
headers = await block_store.get_header_blocks_in_range(0, peak_height)
sub_height_to_hash = {}
sub_epoch_summaries = {}
# peak_header = await block_store.get_full_blocks_at([peak_height])
if len(blocks) == 0:
return None, None
assert peak is not None
# Sets the other state variables (peak_height and height_to_hash)
curr: BlockRecord = blocks[peak]
while True:
sub_height_to_hash[curr.height] = curr.header_hash
if curr.sub_epoch_summary_included is not None:
sub_epoch_summaries[curr.height] = curr.sub_epoch_summary_included
if curr.height == 0:
break
curr = blocks[curr.prev_hash]
assert len(sub_height_to_hash) == peak_height + 1
block_cache = BlockCache(blocks, headers, sub_height_to_hash, sub_epoch_summaries)
wpf = WeightProofHandler(DEFAULT_CONSTANTS, block_cache)
wp = await wpf._create_proof_of_weight(sub_height_to_hash[peak_height - 50])
valid, fork_point = wpf.validate_weight_proof_single_proc(wp)
await connection.close()
assert valid
print(f"size of proof is {get_size(wp)}")
def get_size(obj, seen=None):
"""Recursively finds size of objects"""
size = sys.getsizeof(obj)
if seen is None:
seen = set()
obj_id = id(obj)
if obj_id in seen:
return 0
# Important mark as seen *before* entering recursion to gracefully handle
# self-referential objects
seen.add(obj_id)
if isinstance(obj, dict):
size += sum([get_size(v, seen) for v in obj.values()])
size += sum([get_size(k, seen) for k in obj.keys()])
elif hasattr(obj, "__dict__"):
size += get_size(obj.__dict__, seen)
elif hasattr(obj, "__iter__") and not isinstance(obj, (str, bytes, bytearray)):
size += sum([get_size(i, seen) for i in obj])
return size
|
py | 1a468cccc8d020383eb48ea5306760d540181234 |
def send(identity,message):
''' Dummy Send Function '''
print( 'Default Send: {0} {1}'.format(identity,message) )
return 'Default Transport', True, {}
|
py | 1a468d38a50f18e91d0abd1bf04b5b35fd5e5fe9 | import os
from setuptools import setup, Extension
try:
from Cython.Distutils import build_ext
except:
from setuptools.command.build_ext import build_ext
ext_modules = [Extension('gamtools.cosegregation_internal',
["lib/gamtools/cosegregation_internal.c"])]
else:
ext_modules = [Extension('gamtools.cosegregation_internal',
["lib/gamtools/cosegregation_internal.pyx"])]
class CustomBuildExtCommand(build_ext):
"""Customized setuptools build_ext command - checks numpy is installed."""
def run(self):
# Check numpy is installed before trying to find the location
# of numpy headers
try:
import numpy
except ImportError:
raise ImportError('numpy need to be installed before GAMtools can be '
'compiled. Try installing with "pip install numpy" '
'before installing GAMtools.')
self.include_dirs.append(numpy.get_include())
build_ext.run(self)
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "gamtools",
version = "1.1.0",
author = "Rob Beagrie",
author_email = "[email protected]",
url = "http://gam.tools",
description = ("A package containing some utilities for analyzing GAM data."),
license = "Apache2.0",
package_dir = {'': 'lib'},
packages=['gamtools', 'gamtools.qc'],
ext_modules = ext_modules,
install_requires=[
'cython',
'numpy',
'scipy',
'pandas',
'wrapit',
'pytest'],
extras_require={
':python_version<"3.0"': ['doit==0.29.0'],
':python_version>="3.0"': ['doit==0.30.0'],
':python_version<"3.0"': ['mock'],
},
# Set include_dirs in a custom build_ext class so that numpy is only
# required if we are compiling C files
cmdclass={
'build_ext': CustomBuildExtCommand,
},
entry_points = {
# TODO: make new EIYBrowse filetypes using IO functions in gamtools.matrix
#'EIYBrowse.filetypes': [
# 'gam_segmentation_file = gamtools.segmentation:GamSegmentationFile',
#],
'console_scripts': [
'gamtools = gamtools.main:main',
'create_empty_bedgraph = gamtools.utils:empty_bedgraph_from_cmdline',
]
},
long_description=read('README.md'),
include_package_data=True,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"License :: OSI Approved :: Apache Software License",
],
)
|
py | 1a468df18fb845d0756256e5508b7eba8259c109 | # -*- coding: utf-8 -*-
import logging
from abc import abstractmethod
import numpy as np
import tensorflow as tf
from jack.readers.multiple_choice.shared import AbstractSingleSupportMCModel
from jack.tfutil.attention import attention_softmax3d
from jack.tfutil.masking import mask_3d
logger = logging.getLogger(__name__)
class DecomposableAttentionModel(AbstractSingleSupportMCModel):
def forward_pass(self, shared_resources, embedded_question, embedded_support, num_classes, tensors):
# final states_fw_bw dimensions:
# [[[batch, output dim], [batch, output_dim]]
model_kwargs = {
'sequence1': embedded_question,
'sequence1_length': tensors.question_length,
'sequence2': embedded_support,
'sequence2_length': tensors.support_length,
'representation_size': shared_resources.config['repr_dim'],
'dropout_keep_prob': 1.0 - shared_resources.config.get('dropout', 0),
'use_masking': True,
}
model = FeedForwardDAM(**model_kwargs)
logits = model()
return logits
class BaseDecomposableAttentionModel:
@abstractmethod
def _transform_input(self, sequence, reuse=False):
raise NotImplementedError
@abstractmethod
def _transform_attend(self, sequence, reuse=False):
raise NotImplementedError
@abstractmethod
def _transform_compare(self, sequence, reuse=False):
raise NotImplementedError
@abstractmethod
def _transform_aggregate(self, v1_v2, reuse=False):
raise NotImplementedError
def __init__(self, sequence1, sequence1_length, sequence2, sequence2_length,
nb_classes=3, reuse=False, use_masking=False, init_std_dev=0.01, *args, **kwargs):
self.init_std_dev = init_std_dev
self.nb_classes = nb_classes
self.sequence1 = sequence1
self.sequence1_length = sequence1_length
self.sequence2 = sequence2
self.sequence2_length = sequence2_length
self.reuse = reuse
embedding1_size = self.sequence1.get_shape()[-1].value
embedding2_size = self.sequence2.get_shape()[-1].value
assert embedding1_size == embedding2_size
# [batch_size, time_steps, embedding_size] -> [batch_size, time_steps, representation_size]
self.transformed_sequence1 = self._transform_input(self.sequence1, reuse=self.reuse)
# [batch_size, time_steps, embedding_size] -> [batch_size, time_steps, representation_size]
self.transformed_sequence2 = self._transform_input(self.sequence2, reuse=True)
self.transformed_sequence1_length = self.sequence1_length
self.transformed_sequence2_length = self.sequence2_length
logger.info('Building the Attend graph ..')
self.raw_attentions = None
self.attention_sentence1 = self.attention_sentence2 = None
# tensors with shape (batch_size, time_steps, num_units)
self.alpha, self.beta = self.attend(self.transformed_sequence1, self.transformed_sequence2,
sequence1_lengths=self.transformed_sequence1_length,
sequence2_lengths=self.transformed_sequence2_length,
use_masking=use_masking, reuse=self.reuse)
logger.info('Building the Compare graph ..')
# tensor with shape (batch_size, time_steps, num_units)
self.v1 = self.compare(self.transformed_sequence1, self.beta, reuse=self.reuse)
# tensor with shape (batch_size, time_steps, num_units)
self.v2 = self.compare(self.transformed_sequence2, self.alpha, reuse=True)
logger.info('Building the Aggregate graph ..')
self.logits = self.aggregate(self.v1, self.v2, self.nb_classes,
v1_lengths=self.transformed_sequence1_length,
v2_lengths=self.transformed_sequence2_length,
use_masking=use_masking, reuse=self.reuse)
def __call__(self):
return self.logits
def attend(self, sequence1, sequence2,
sequence1_lengths=None, sequence2_lengths=None, use_masking=False, reuse=False):
"""
Attend phase.
Args:
sequence1: tensor with shape (batch_size, time_steps, num_units)
sequence2: tensor with shape (batch_size, time_steps, num_units)
sequence1_lengths: time_steps in sequence1
sequence2_lengths: time_steps in sequence2
use_masking: use masking
reuse: reuse variables
Returns:
two tensors with shape (batch_size, time_steps, num_units)
"""
with tf.variable_scope('attend') as _:
# tensor with shape (batch_size, time_steps, num_units)
transformed_sequence1 = self._transform_attend(sequence1, reuse)
# tensor with shape (batch_size, time_steps, num_units)
transformed_sequence2 = self._transform_attend(sequence2, True)
# tensor with shape (batch_size, time_steps, time_steps)
self.raw_attentions = tf.matmul(transformed_sequence1, tf.transpose(transformed_sequence2, [0, 2, 1]))
masked_raw_attentions = self.raw_attentions
if use_masking:
masked_raw_attentions = mask_3d(sequences=masked_raw_attentions,
sequence_lengths=sequence2_lengths,
mask_value=- np.inf, dimension=2)
self.attention_sentence1 = attention_softmax3d(masked_raw_attentions)
# tensor with shape (batch_size, time_steps, time_steps)
attention_transposed = tf.transpose(self.raw_attentions, [0, 2, 1])
masked_attention_transposed = attention_transposed
if use_masking:
masked_attention_transposed = mask_3d(sequences=masked_attention_transposed,
sequence_lengths=sequence1_lengths,
mask_value=- np.inf, dimension=2)
self.attention_sentence2 = attention_softmax3d(masked_attention_transposed)
# tensors with shape (batch_size, time_steps, num_units)
alpha = tf.matmul(self.attention_sentence2, sequence1, name='alpha')
beta = tf.matmul(self.attention_sentence1, sequence2, name='beta')
return alpha, beta
def compare(self, sentence, soft_alignment, reuse=False):
"""
Compare phase.
Args:
sentence: tensor with shape (batch_size, time_steps, num_units)
soft_alignment: tensor with shape (batch_size, time_steps, num_units)
reuse: reuse variables
Returns:
tensor with shape (batch_size, time_steps, num_units)
"""
# tensor with shape (batch, time_steps, num_units)
sentence_and_alignment = tf.concat(axis=2, values=[sentence, soft_alignment])
transformed_sentence_and_alignment = self._transform_compare(sentence_and_alignment, reuse=reuse)
return transformed_sentence_and_alignment
def aggregate(self, v1, v2, num_classes,
v1_lengths=None, v2_lengths=None, use_masking=False, reuse=False):
"""
Aggregate phase.
Args:
v1: tensor with shape (batch_size, time_steps, num_units)
v2: tensor with shape (batch_size, time_steps, num_units)
num_classes: number of output units
v1_lengths: time_steps in v1
v2_lengths: time_steps in v2
use_masking: use masking
reuse: reuse variables
"""
with tf.variable_scope('aggregate', reuse=reuse) as _:
if use_masking:
v1 = mask_3d(sequences=v1, sequence_lengths=v1_lengths, mask_value=0, dimension=1)
v2 = mask_3d(sequences=v2, sequence_lengths=v2_lengths, mask_value=0, dimension=1)
v1_sum, v2_sum = tf.reduce_sum(v1, [1]), tf.reduce_sum(v2, [1])
v1_v2 = tf.concat(axis=1, values=[v1_sum, v2_sum])
transformed_v1_v2 = self._transform_aggregate(v1_v2, reuse=reuse)
logits = tf.contrib.layers.fully_connected(inputs=transformed_v1_v2,
num_outputs=num_classes,
weights_initializer=tf.random_normal_initializer(0.0, 0.01),
biases_initializer=tf.zeros_initializer(),
activation_fn=None)
return logits
class FeedForwardDAM(BaseDecomposableAttentionModel):
def __init__(self, representation_size=200, dropout_keep_prob=1.0, *args, **kwargs):
self.representation_size = representation_size
self.dropout_keep_prob = dropout_keep_prob
super().__init__(*args, **kwargs)
def _transform_input(self, sequence, reuse=False):
with tf.variable_scope('transform_embeddings', reuse=reuse) as _:
projection = tf.contrib.layers.fully_connected(inputs=sequence, num_outputs=self.representation_size,
weights_initializer=tf.random_normal_initializer(0.0, self.init_std_dev),
biases_initializer=None, activation_fn=None)
return projection
def _transform_attend(self, sequence, reuse=False):
with tf.variable_scope('transform_attend', reuse=reuse) as _:
projection = tf.nn.dropout(sequence, keep_prob=self.dropout_keep_prob)
projection = tf.contrib.layers.fully_connected(inputs=projection, num_outputs=self.representation_size,
weights_initializer=tf.random_normal_initializer(0.0, self.init_std_dev),
biases_initializer=tf.zeros_initializer(),
activation_fn=tf.nn.relu)
projection = tf.nn.dropout(projection, keep_prob=self.dropout_keep_prob)
projection = tf.contrib.layers.fully_connected(inputs=projection, num_outputs=self.representation_size,
weights_initializer=tf.random_normal_initializer(0.0, self.init_std_dev),
biases_initializer=tf.zeros_initializer(),
activation_fn=tf.nn.relu)
return projection
def _transform_compare(self, sequence, reuse=False):
with tf.variable_scope('transform_compare', reuse=reuse) as _:
projection = tf.nn.dropout(sequence, keep_prob=self.dropout_keep_prob)
projection = tf.contrib.layers.fully_connected(inputs=projection, num_outputs=self.representation_size,
weights_initializer=tf.random_normal_initializer(0.0, self.init_std_dev),
biases_initializer=tf.zeros_initializer(),
activation_fn=tf.nn.relu)
projection = tf.nn.dropout(projection, keep_prob=self.dropout_keep_prob)
projection = tf.contrib.layers.fully_connected(inputs=projection, num_outputs=self.representation_size,
weights_initializer=tf.random_normal_initializer(0.0, self.init_std_dev),
biases_initializer=tf.zeros_initializer(),
activation_fn=tf.nn.relu)
return projection
def _transform_aggregate(self, v1_v2, reuse=False):
with tf.variable_scope('transform_aggregate', reuse=reuse) as _:
projection = tf.nn.dropout(v1_v2, keep_prob=self.dropout_keep_prob)
projection = tf.contrib.layers.fully_connected(inputs=projection, num_outputs=self.representation_size,
weights_initializer=tf.random_normal_initializer(0.0, self.init_std_dev),
biases_initializer=tf.zeros_initializer(),
activation_fn=tf.nn.relu)
projection = tf.nn.dropout(projection, keep_prob=self.dropout_keep_prob)
projection = tf.contrib.layers.fully_connected(inputs=projection, num_outputs=self.representation_size,
weights_initializer=tf.random_normal_initializer(0.0, self.init_std_dev),
biases_initializer=tf.zeros_initializer(),
activation_fn=tf.nn.relu)
return projection
|
py | 1a468f63b82c7db53a1d06fd3129fc8f2a78e6ef | import os
import base64
import binascii
from collections import namedtuple
import hexdump
import intervaltree
from PyQt5.QtGui import QIcon
from PyQt5.QtGui import QBrush
from PyQt5.QtGui import QPixmap
from PyQt5.QtGui import QMouseEvent
from PyQt5.QtGui import QKeySequence
from PyQt5.QtGui import QFontDatabase
import PyQt5.QtCore as QtCore
from PyQt5.QtCore import Qt
from PyQt5.QtCore import QSize
from PyQt5.QtCore import QMimeData
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtCore import QModelIndex
from PyQt5.QtCore import QItemSelection
from PyQt5.QtCore import QItemSelectionModel
from PyQt5.QtCore import QAbstractTableModel
from PyQt5.QtWidgets import QMenu
from PyQt5.QtWidgets import QAction
from PyQt5.QtWidgets import QWidget
from PyQt5.QtWidgets import QTableView
from PyQt5.QtWidgets import QSizePolicy
from PyQt5.QtWidgets import QApplication
from PyQt5.QtWidgets import QInputDialog
from PyQt5.QtWidgets import QItemDelegate
from PyQt5.QtWidgets import QAbstractItemView
from .hexview_auto import Ui_Form as HexViewBase
from .common import h
from .common import LoggingObject
from .tablecellstylemodels import row_start_index
from .tablecellstylemodels import row_end_index
from .tablecellstylemodels import row_number
from .tablecellstylemodels import ROLE_BORDER
from .tablecellstylemodels import ColorModel
from .tablecellstylemodels import BorderModel
NamedColor = namedtuple("NamedColor", ["name", "qcolor"])
QT_COLORS = (
NamedColor("red", Qt.red),
NamedColor("green", Qt.green),
NamedColor("blue", Qt.blue),
NamedColor("black", Qt.black),
NamedColor("dark red", Qt.darkRed),
NamedColor("dark green", Qt.darkGreen),
NamedColor("dark blue", Qt.darkBlue),
NamedColor("cyan", Qt.cyan),
NamedColor("magenta", Qt.magenta),
NamedColor("yellow", Qt.yellow),
NamedColor("gray", Qt.gray),
NamedColor("dark cyan", Qt.darkCyan),
NamedColor("dark magenta", Qt.darkMagenta),
NamedColor("dark yellow", Qt.darkYellow),
NamedColor("dark gray", Qt.darkGray),
NamedColor("light gray", Qt.lightGray),
)
def make_color_icon(color):
pixmap = QPixmap(10, 10)
pixmap.fill(color)
return QIcon(pixmap)
class HexItemDelegate(QItemDelegate):
def __init__(self, model, parent, *args):
super(HexItemDelegate, self).__init__(parent)
self._model = model
def paint(self, qpainter, option, qindex):
super(HexItemDelegate, self).paint(qpainter, option, qindex)
border = self._model.data(qindex, ROLE_BORDER)
if border is None:
return
qpainter.setPen(border.theme.color)
r = option.rect
if border.top:
qpainter.drawLine(r.topLeft(), r.topRight())
if border.bottom:
qpainter.drawLine(r.bottomLeft(), r.bottomRight())
if border.left:
qpainter.drawLine(r.topLeft(), r.bottomLeft())
if border.right:
qpainter.drawLine(r.topRight(), r.bottomRight())
class HexTableModel(QAbstractTableModel):
FILTER = ''.join([(len(repr(chr(x)))==3 or chr(x) == "\\") and chr(x) or '.' for x in range(256)])
def __init__(self, buf, parent=None, *args):
super(HexTableModel, self).__init__(parent, *args)
self._buf = buf
self._colors = ColorModel(self)
self._borders = BorderModel(self)
self._colors.rangeChanged.connect(self._handle_color_range_changed)
self._borders.rangeChanged.connect(self._handle_border_range_changed)
def getColorModel(self):
return self._colors
def setColorModel(self, color_model):
self._colors.rangeChanged.disconnect(self._handle_color_range_changed)
self._colors = color_model
self._colors.rangeChanged.connect(self._handle_color_range_changed)
# TODO: re-render all cells
def getBorderModel(self):
return self._borders
def setBorderModel(self, color_model):
self._borders.rangeChanged.disconnect(self._handle_border_range_changed)
self._borders = color_model
self._borders.rangeChanged.connect(self._handle_border_range_changed)
# TODO: re-render all cells
@staticmethod
def qindex2index(index):
""" from a QIndex (row/column coordinate system), get the buffer index of the byte """
r = index.row()
c = index.column()
if c > 0x10:
return (0x10 * r) + c - 0x11
else:
return (0x10 * r) + c
def index2qindexb(self, index):
""" from a buffer index, get the QIndex (row/column coordinate system) of the byte pane """
r = index // 0x10
c = index % 0x10
return self.index(r, c)
def index2qindexc(self, index):
""" from a buffer index, get the QIndex (row/column coordinate system) of the char pane """
r = (index // 0x10)
c = index % 0x10 + 0x11
return self.index(r, c)
def rowCount(self, parent):
if len(self._buf) % 0x10 != 0:
return (len(self._buf) // 0x10) + 1
else:
return len(self._buf) // 0x10
def columnCount(self, parent):
return 0x21
def data(self, index, role):
if not index.isValid():
return None
elif self.qindex2index(index) >= len(self._buf):
return None
col = index.column()
bindex = self.qindex2index(index)
if role == Qt.DisplayRole:
if col == 0x10:
return ""
c = self._buf[bindex]
if isinstance(c, str):
# python2.7 mmap is a str interface, not bytearray
c = ord(c)
if col > 0x10:
return chr(c).translate(HexTableModel.FILTER)
else:
return "%02x" % (c)
elif role == Qt.BackgroundRole:
# don't color the divider column
if col == 0x10:
return None
color = self._colors.get_color(bindex)
if color is not None:
return QBrush(color)
return None
elif role == ROLE_BORDER:
if col == 0x10:
return None
return self._borders.get_border(bindex)
else:
return None
@property
def data_length(self):
return len(self._buf)
def headerData(self, section, orientation, role):
if role != Qt.DisplayRole:
return None
elif orientation == Qt.Horizontal:
if section < 0x10:
return "%01X" % (section)
else:
return ""
elif orientation == Qt.Vertical:
return "%04X" % (section * 0x10)
else:
return None
def _emit_data_changed(self, start_bindex, end_bindex):
for i in range(start_bindex, end_bindex):
# mark data changed to encourage re-rendering of cell
qib = self.index2qindexb(i)
qic = self.index2qindexc(i)
self.dataChanged.emit(qib, qib)
self.dataChanged.emit(qic, qic)
def _handle_color_range_changed(self, range):
self._emit_data_changed(range.begin, range.end + 1)
def _handle_border_range_changed(self, range):
self._emit_data_changed(range.begin, range.end + 1)
class HexItemSelectionModel(QItemSelectionModel):
selectionRangeChanged = pyqtSignal([int])
def __init__(self, model, view):
"""
:type view: HexTableView
"""
super(HexItemSelectionModel, self).__init__(model)
self._model = model
self._view = view
self._start_qindex = None
self._view.leftMousePressedIndex.connect(self._handle_mouse_pressed)
self._view.leftMouseMovedIndex.connect(self._handle_mouse_moved)
self._view.leftMouseReleasedIndex.connect(self._handle_mouse_released)
self.start = None
self.end = None
def _bselect(self, selection, start_bindex, end_bindex):
""" add the given buffer indices to the given QItemSelection, both byte and char panes """
selection.select(self._model.index2qindexb(start_bindex), self._model.index2qindexb(end_bindex))
selection.select(self._model.index2qindexc(start_bindex), self._model.index2qindexc(end_bindex))
def _do_select(self, start_bindex, end_bindex):
"""
select the given range by buffer indices
selects items like this:
..................
......xxxxxxxxxxxx
xxxxxxxxxxxxxxxxxx
xxxxxxxxxxxxxxxxxx
xxxxxxxxxxxx......
..................
*not* like this:
..................
......xxxxxx......
......xxxxxx......
......xxxxxx......
......xxxxxx......
..................
"""
self.select(QItemSelection(), QItemSelectionModel.Clear)
if start_bindex > end_bindex:
start_bindex, end_bindex = end_bindex, start_bindex
selection = QItemSelection()
if row_number(end_bindex) - row_number(start_bindex) == 0:
# all on one line
self._bselect(selection, start_bindex, end_bindex)
elif row_number(end_bindex) - row_number(start_bindex) == 1:
# two lines
self._bselect(selection, start_bindex, row_end_index(start_bindex))
self._bselect(selection, row_start_index(end_bindex), end_bindex)
else:
# many lines
self._bselect(selection, start_bindex, row_end_index(start_bindex))
self._bselect(selection, row_start_index(start_bindex) + 0x10, row_end_index(end_bindex) - 0x10)
self._bselect(selection, row_start_index(end_bindex), end_bindex)
self.select(selection, QItemSelectionModel.SelectCurrent)
self.start = start_bindex
self.end = end_bindex
self.selectionRangeChanged.emit(end_bindex)
def bselect(self, start_bindex, end_bindex):
""" the public interface to _do_select """
return self._do_select(start_bindex, end_bindex)
def handle_move_key(self, key):
if self._start_qindex == self._model.index2qindexc(self.start) or \
self._start_qindex == self._model.index2qindexb(self.start):
i = self.end
else:
i = self.start
if key == QKeySequence.MoveToEndOfDocument:
i = self._model.data_length - 1
elif key == QKeySequence.MoveToEndOfLine:
i = row_end_index(i)
elif key == QKeySequence.MoveToNextChar:
i += 1
elif key == QKeySequence.MoveToNextLine:
i += 0x10
elif key == QKeySequence.MoveToNextPage:
i += 0x40
elif key == QKeySequence.MoveToNextWord:
i += 1
elif key == QKeySequence.MoveToPreviousChar:
i -= 1
elif key == QKeySequence.MoveToPreviousLine:
i -= 0x10
elif key == QKeySequence.MoveToPreviousPage:
i -= 0x40
elif key == QKeySequence.MoveToPreviousWord:
i -= 1
elif key == QKeySequence.MoveToStartOfDocument:
i = 0x0
elif key == QKeySequence.MoveToStartOfLine:
i = row_start_index(i)
else:
raise RuntimeError("Unexpected movement key: %s" % (key))
# this behavior selects the smallest or largest cell in the
# same column as the out-of-bounds index
if i < 0:
i %= 0x10
if i > self._model.data_length:
i %= 0x10
i = self._model.data_length - 0x10 + i
self.bselect(i, i)
def handle_select_key(self, key):
i = None
j = None
if self._start_qindex == self._model.index2qindexc(self.start) or \
self._start_qindex == self._model.index2qindexb(self.start):
i = self.end
j = self.start
else:
i = self.start
j = self.end
if key == QKeySequence.SelectEndOfDocument:
i = self._model.data_length - 1
elif key == QKeySequence.SelectEndOfLine:
i = row_end_index(i)
elif key == QKeySequence.SelectNextChar:
i += 1
elif key == QKeySequence.SelectNextLine:
i += 0x10
elif key == QKeySequence.SelectNextPage:
i += 0x40
elif key == QKeySequence.SelectNextWord:
i += 1
elif key == QKeySequence.SelectPreviousChar:
i -= 1
elif key == QKeySequence.SelectPreviousLine:
i -= 0x10
elif key == QKeySequence.SelectPreviousPage:
i -= 0x40
elif key == QKeySequence.SelectPreviousWord:
i -= 1
elif key == QKeySequence.SelectStartOfDocument:
i = 0x0
elif key == QKeySequence.SelectStartOfLine:
i = row_start_index(i)
else:
raise RuntimeError("Unexpected select key: %s" % (key))
# this behavior selects the smallest or largest cell in the
# same column as the out-of-bounds index
if i < 0:
i %= 0x10
if i > self._model.data_length:
i %= 0x10
i = self._model.data_length - 0x10 + i
# need to explicitly reset start_qindex so that the current index
# doesn't get confused when coming from a selection of a single cell
# (in the check at the start of this function to decide which end of
# the selection was most recently active)
self._start_qindex = self._model.index2qindexc(j)
self.bselect(i, j)
def _update_selection(self, qindex1, qindex2):
""" select the given range by qmodel indices """
m = self.model()
self._do_select(m.qindex2index(qindex1), m.qindex2index(qindex2))
def _handle_mouse_pressed(self, qindex):
self._start_qindex = qindex
self._update_selection(qindex, qindex)
def _handle_mouse_moved(self, qindex):
self._update_selection(self._start_qindex, qindex)
def _handle_mouse_released(self, qindex):
self._update_selection(self._start_qindex, qindex)
self._start_qindex = None
class HexTableView(QTableView, LoggingObject):
""" table view that handles click events for better selection handling """
leftMousePressed = pyqtSignal([QMouseEvent])
leftMousePressedIndex = pyqtSignal([QModelIndex])
leftMouseMoved = pyqtSignal([QMouseEvent])
leftMouseMovedIndex = pyqtSignal([QModelIndex])
leftMouseReleased = pyqtSignal([QMouseEvent])
leftMouseReleasedIndex = pyqtSignal([QModelIndex])
moveKeyPressed = pyqtSignal([QKeySequence])
selectKeyPressed = pyqtSignal([QKeySequence])
def __init__(self, *args, **kwargs):
super(HexTableView, self).__init__(*args, **kwargs)
self.leftMousePressed.connect(self._handle_mouse_press)
self.leftMouseMoved.connect(self._handle_mouse_move)
self.leftMouseReleased.connect(self._handle_mouse_release)
self._press_start_index = None
self._press_current_index = None
self._press_end_index = None
self._is_tracking_mouse = False
def _reset_press_state(self):
self._press_start_index = None
self._press_current_index = None
self._press_end_index = None
def mousePressEvent(self, event):
super(HexTableView, self).mousePressEvent(event)
if event.buttons() & Qt.LeftButton:
self.leftMousePressed.emit(event)
def mouseMoveEvent(self, event):
super(HexTableView, self).mouseMoveEvent(event)
if event.buttons() & Qt.LeftButton:
self.leftMouseMoved.emit(event)
def mouseReleaseEvent(self, event):
super(HexTableView, self).mousePressEvent(event)
if event.buttons() & Qt.LeftButton:
self.leftMouseReleased.emit(event)
def keyPressEvent(self, event):
move_keys = (
QKeySequence.MoveToEndOfDocument,
QKeySequence.MoveToEndOfLine,
QKeySequence.MoveToNextChar,
QKeySequence.MoveToNextLine,
QKeySequence.MoveToNextPage,
QKeySequence.MoveToNextWord,
QKeySequence.MoveToPreviousChar,
QKeySequence.MoveToPreviousLine,
QKeySequence.MoveToPreviousPage,
QKeySequence.MoveToPreviousWord,
QKeySequence.MoveToStartOfDocument,
QKeySequence.MoveToStartOfLine,
)
for move_key in move_keys:
if event.matches(move_key):
self.moveKeyPressed.emit(move_key)
return
t = event.text()
KeyMapping = namedtuple("KeyMapping", ["source", "destination"])
vim_move_mappings = (
KeyMapping("j", QKeySequence.MoveToNextLine),
KeyMapping("k", QKeySequence.MoveToPreviousLine),
KeyMapping("h", QKeySequence.MoveToPreviousChar),
KeyMapping("l", QKeySequence.MoveToNextChar),
KeyMapping("^", QKeySequence.MoveToStartOfLine),
KeyMapping("$", QKeySequence.MoveToEndOfLine),
)
for vim_mapping in vim_move_mappings:
if vim_mapping.source == t:
self.moveKeyPressed.emit(vim_mapping.destination)
return
select_keys = (
QKeySequence.SelectAll,
QKeySequence.SelectEndOfDocument,
QKeySequence.SelectEndOfLine,
QKeySequence.SelectNextChar,
QKeySequence.SelectNextLine,
QKeySequence.SelectNextPage,
QKeySequence.SelectNextWord,
QKeySequence.SelectPreviousChar,
QKeySequence.SelectPreviousLine,
QKeySequence.SelectPreviousPage,
QKeySequence.SelectPreviousWord,
QKeySequence.SelectStartOfDocument,
QKeySequence.SelectStartOfLine,
)
for select_key in select_keys:
if event.matches(select_key):
self.selectKeyPressed.emit(select_key)
return
t = event.text()
KeyMapping = namedtuple("KeyMapping", ["source", "destination"])
vim_select_mappings = (
KeyMapping("J", QKeySequence.SelectNextLine),
KeyMapping("K", QKeySequence.SelectPreviousLine),
KeyMapping("H", QKeySequence.SelectPreviousChar),
KeyMapping("L", QKeySequence.SelectNextChar),
)
for vim_mapping in vim_select_mappings:
if vim_mapping.source == t:
self.selectKeyPressed.emit(vim_mapping.destination)
return
def _handle_mouse_press(self, key_event):
self._reset_press_state()
self._press_start_index = self.indexAt(key_event.pos())
self._is_tracking_mouse = True
self.leftMousePressedIndex.emit(self._press_start_index)
def _handle_mouse_move(self, key_event):
if self._is_tracking_mouse:
i = self.indexAt(key_event.pos())
if i != self._press_current_index:
self._press_current_index = i
self.leftMouseMovedIndex.emit(i)
def _handle_mouse_release(self, key_event):
self._press_end_index = self.indexAt(key_event.pos())
self._is_tracking_mouse = False
self.leftMouseReleasedIndex.emit(self._press_end_index)
Origin = namedtuple("Origin", ["offset", "name"])
class HexViewWidget(QWidget, HexViewBase, LoggingObject):
originsChanged = pyqtSignal()
def __init__(self, buf, parent=None):
super(HexViewWidget, self).__init__()
self.setupUi(self)
self._buf = buf
self._model = HexTableModel(self._buf)
self._colored_regions = intervaltree.IntervalTree()
self._origins = []
# ripped from pyuic5 ui/hexview.ui
# at commit 6c9edffd32706097d7eba8814d306ea1d997b25a
# so we can add our custom HexTableView instance
self.view = HexTableView(self)
sizePolicy = QSizePolicy(QSizePolicy.MinimumExpanding, QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.view.sizePolicy().hasHeightForWidth())
self.view.setSizePolicy(sizePolicy)
self.view.setMinimumSize(QSize(660, 0))
self.view.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn)
self.view.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.view.setSelectionMode(QAbstractItemView.NoSelection)
self.view.setShowGrid(False)
self.view.setWordWrap(False)
self.view.setObjectName("view")
self.view.horizontalHeader().setDefaultSectionSize(25)
self.view.horizontalHeader().setMinimumSectionSize(25)
self.view.verticalHeader().setDefaultSectionSize(21)
self.mainLayout.insertWidget(0, self.view)
# end rip
# TODO: provide a HexViewWidget.setModel method, and don't build it ourselves
self.view.setModel(self._model)
for i in range(0x10):
self.view.setColumnWidth(i, 23)
self.view.setColumnWidth(0x10, 12)
for i in range(0x11, 0x22):
self.view.setColumnWidth(i, 11)
self._hsm = HexItemSelectionModel(self._model, self.view)
self.view.setSelectionModel(self._hsm)
self.view.setContextMenuPolicy(Qt.CustomContextMenu)
self.view.customContextMenuRequested.connect(self._handle_context_menu_requested)
self._hsm.selectionRangeChanged.connect(self._handle_selection_range_changed)
self.originsChanged.connect(self._handle_origins_changed)
self.view.moveKeyPressed.connect(self._hsm.handle_move_key)
self.view.selectKeyPressed.connect(self._hsm.handle_select_key)
f = QFontDatabase.systemFont(QFontDatabase.FixedFont)
self.view.setFont(f)
self.statusLabel.setFont(f)
self.view.setItemDelegate(HexItemDelegate(self._model, self))
self.statusLabel.setText("")
def getModel(self):
return self._model
def getColorModel(self):
""" this is a shortcut, to make it easy to add/remove colored ranges """
return self.getModel().getColorModel()
def getBorderModel(self):
""" this is a shortcut, to make it easy to add/remove bordered ranges """
return self.getModel().getBorderModel()
def getSelectionModel(self):
return self._hsm
def scrollTo(self, index):
qi = self._model.index2qindexb(index)
self.view.scrollTo(qi)
def _render_status_text(self):
txt = []
start = self._hsm.start
end = self._hsm.end
if start not in (None, -1) and end not in (None, -1):
txt.append("sel: [{:s}, {:s}]".format(hex(start), hex(end)))
txt.append("len: {:s}".format(hex(end - start + 1)))
for origin in self._origins:
txt.append("from '{:s}': {:s}".format(
origin.name, hex(start - origin.offset)))
self.statusLabel.setText(" ".join(txt))
def _handle_selection_range_changed(self, end_bindex):
self._render_status_text()
self.scrollTo(end_bindex)
def _handle_origins_changed(self):
self._render_status_text()
def get_context_menu(self, qpoint):
""" override this method to customize the context menu """
menu = QMenu(self)
index = self.view.indexAt(qpoint)
def add_action(menu, text, handler, icon=None):
a = None
if icon is None:
a = QAction(text, self)
else:
a = QAction(icon, text, self)
a.triggered.connect(handler)
menu.addAction(a)
add_action(menu, "Color selection", self._handle_color_selection)
# duplication here with vstructui
color_menu = menu.addMenu("Color selection...")
# need to escape the closure capture on the color loop variable below
# hint from: http://stackoverflow.com/a/6035865/87207
def make_color_selection_handler(color):
return lambda: self._handle_color_selection(color=color)
for color in QT_COLORS:
add_action(color_menu, "{:s}".format(color.name),
make_color_selection_handler(color.qcolor), make_color_icon(color.qcolor))
start = self._hsm.start
end = self._hsm.end
cm = self.getColorModel()
if (start == end and cm.is_index_colored(start)) or cm.is_region_colored(start, end):
def make_remove_color_handler(r):
return lambda: self._handle_remove_color_range(r)
remove_color_menu = menu.addMenu("Remove color...")
for cr in cm.get_region_colors(start, end):
pixmap = QPixmap(10, 10)
pixmap.fill(cr.color)
icon = QIcon(pixmap)
add_action(remove_color_menu,
"Remove color [{:s}, {:s}], len: {:s}".format(h(cr.begin), h(cr.end), h(cr.end - cr.begin)),
make_remove_color_handler(cr), make_color_icon(cr.color))
menu.addSeparator() # -----------------------------------------------------------------
add_action(menu, "Copy selection (binary)", self._handle_copy_binary)
copy_menu = menu.addMenu("Copy...")
add_action(copy_menu, "Copy selection (binary)", self._handle_copy_binary)
add_action(copy_menu, "Copy selection (text)", self._handle_copy_text)
add_action(copy_menu, "Copy selection (hex)", self._handle_copy_hex)
add_action(copy_menu, "Copy selection (hexdump)", self._handle_copy_hexdump)
add_action(copy_menu, "Copy selection (base64)", self._handle_copy_base64)
menu.addSeparator() # -----------------------------------------------------------------
add_action(menu, "Add origin", lambda: self._handle_add_origin(index))
return menu
def _handle_context_menu_requested(self, qpoint):
self.get_context_menu(qpoint).exec_(self.view.mapToGlobal(qpoint))
def _handle_color_selection(self, color=None):
# qt seems to set non-existant keyword args to False, so we manually reset to None
if not color:
color = None
s = self._hsm.start
e = self._hsm.end + 1
range = self.getColorModel().color_region(s, e, color=color)
self._hsm.bselect(-1, -1)
# seems to be a bit of duplication here and in the ColorModel?
self._colored_regions.addi(s, e, range)
def _handle_remove_color_range(self, range):
self.getColorModel().clear_range(range)
@property
def _selected_data(self):
start = self._hsm.start
end = self._hsm.end
return self._buf[start:end]
def _handle_copy_binary(self):
mime = QMimeData()
# mime type suggested here: http://stackoverflow.com/a/6783972/87207
mime.setData("application/octet-stream", self._selected_data)
QApplication.clipboard().setMimeData(mime)
def _handle_copy_text(self):
mime = QMimeData()
mime.setText(self._selected_data)
QApplication.clipboard().setMimeData(mime)
def _handle_copy_hex(self):
mime = QMimeData()
mime.setText(binascii.b2a_hex(self._selected_data))
QApplication.clipboard().setMimeData(mime)
def _handle_copy_hexdump(self):
mime = QMimeData()
t = hexdump.hexdump(self._selected_data, result="return")
mime.setText(t)
QApplication.clipboard().setMimeData(mime)
def _handle_copy_base64(self):
mime = QMimeData()
mime.setText(base64.b64encode(self._selected_data))
QApplication.clipboard().setMimeData(mime)
def add_origin(self, origin):
self._origins.append(origin)
self.originsChanged.emit()
def remove_origin(self, origin):
self._origins.remove(origin)
self.originsChanged.emit()
def _handle_add_origin(self, qindex):
index = self.getModel().qindex2index(qindex)
name, ok = QInputDialog.getText(self, "Add origin...", "Origin name:")
if ok and name:
self.add_origin(Origin(index, name))
|
py | 1a468f9b1ca385fe12094e9658be5ec514f8240d | # Copyright 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [ ]
|
py | 1a46903224019b77dde0d08c94c17b677ed346b2 | # Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
# useful for handling different item types with a single interface
from itemadapter import is_item, ItemAdapter
class PubsMoverSpiderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, or item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Request or item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class PubsMoverDownloaderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
|
py | 1a4690f25b44a3a10bb58eb77173e33e6b40e3d3 | import turtle
import time
import random
delay = 0.1
score = 0
high_score = 0
wn = turtle.Screen()
wn.title("Snake")
wn.bgcolor("green")
wn.setup(width=600, height=600)
wn.tracer(0)
head = turtle.Turtle()
head.speed(0)
head.shape("square")
head.color("black")
head.penup()
head.goto(0,0)
head.direction = "stop"
food = turtle.Turtle()
food.speed(0)
food.shape("circle")
food.color("red")
food.penup()
food.goto(0,100)
segments = []
pen = turtle.Turtle()
pen.speed(0)
pen.shape("square")
pen.color("white")
pen.penup()
pen.hideturtle()
pen.goto(0, 260)
pen.write("Score: 0 High Score: 0", align="center", font=("Courier", 24, "normal"))
def go_up():
if head.direction != "down":
head.direction = "up"
def go_down():
if head.direction != "up":
head.direction = "down"
def go_left():
if head.direction != "right":
head.direction = "left"
def go_right():
if head.direction != "left":
head.direction = "right"
def move():
if head.direction == "up":
y = head.ycor()
head.sety(y + 20)
if head.direction == "down":
y = head.ycor()
head.sety(y - 20)
if head.direction == "left":
x = head.xcor()
head.setx(x - 20)
if head.direction == "right":
x = head.xcor()
head.setx(x + 20)
wn.listen()
wn.onkeypress(go_up, "w")
wn.onkeypress(go_down, "s")
wn.onkeypress(go_left, "a")
wn.onkeypress(go_right, "d")
while True:
wn.update()
if head.xcor()>290 or head.xcor()<-290 or head.ycor()>290 or head.ycor()<-290:
time.sleep(1)
head.goto(0,0)
head.direction = "stop"
for segment in segments:
segment.goto(1000, 1000)
segments.clear()
score = 0
delay = 0.1
pen.clear()
pen.write("Score: {} High Score: {}".format(score, high_score), align="center", font=("Courier", 24, "normal"))
if head.distance(food) < 20:
x = random.randint(-290, 290)
y = random.randint(-290, 290)
food.goto(x,y)
new_segment = turtle.Turtle()
new_segment.speed(0)
new_segment.shape("square")
new_segment.color("grey")
new_segment.penup()
segments.append(new_segment)
delay -= 0.001
score += 10
if score > high_score:
high_score = score
pen.clear()
pen.write("Score: {} High Score: {}".format(score, high_score), align="center", font=("Courier", 24, "normal"))
for index in range(len(segments)-1, 0, -1):
x = segments[index-1].xcor()
y = segments[index-1].ycor()
segments[index].goto(x, y)
if len(segments) > 0:
x = head.xcor()
y = head.ycor()
segments[0].goto(x,y)
move()
for segment in segments:
if segment.distance(head) < 20:
time.sleep(1)
head.goto(0,0)
head.direction = "stop"
for segment in segments:
segment.goto(1000, 1000)
segments.clear()
score = 0
delay = 0.1
pen.clear()
pen.write("Score: {} High Score: {}".format(score, high_score), align="center", font=("Courier", 24, "normal"))
time.sleep(delay)
wn.mainloop()
|
py | 1a46912a2cc3a3218aa8f44cfec1d0306b6bac5a | """
inorder: [LEFT]root[RIGHT]
postorder: [LEFT][RIGHT]root
First thing we know is the value of root, which is the last element of `postorder`.
Find the index of the root in `inorder`. So find out the interval of [LEFT] and [RIGHT] in `inorder`.
The length of the [LEFT] and [RIGHT] in `inorder` are the same with the length of the [LEFT] and [RIGHT] in `postorder`.
"""
class Solution(object):
def buildTree(self, inorder, postorder):
if not inorder or not postorder: return None
root = TreeNode(postorder[-1])
if len(inorder)==1: return root
r = inorder.index(root.val)
leftInOrder = inorder[:r]
leftPostOrder = postorder[:r]
rightInOrder = inorder[r+1:]
rightPostOrder = postorder[r:len(postorder)-1]
root.left = self.buildTree(leftInOrder, leftPostOrder)
root.right = self.buildTree(rightInOrder, rightPostOrder)
return root
"""
Time: O(NLogN). For each node, we need to do an iteration to its children. To be precise..
O(N) for constructing root.
O(N/2) for constructing root.left
O(N/2) for constructing root.right
O(N/4) for constructing root.left.left
O(N/4) for constructing root.left.right
O(N/4) for constructing root.right.left
O(N/4) for constructing root.right.right
...
To improve this, we can use a hash table to get the index of `i` below
Space: O(NLogN).
For each node, we need to construct inorder/postorder arrays of its children.
We can improve this by using pointers.
"""
"""
Improved version.
Time: O(N).
Space: O(N). For `index`.
"""
class Solution(object):
def buildTree(self, inorder, postorder):
def helper(i, j, k, l):
if j-i<=0: return None
if l-k<=0: return None
root = TreeNode(postorder[l-1])
if j-i==1: return root
r = index[root.val]
root.left = helper(i, r, k, k+r-i)
root.right = helper(r+1, j, k+r-i, l-1)
return root
index = {} #the index of inorder
for i, n in enumerate(inorder): index[n] = i
return helper(0, len(inorder), 0, len(postorder)) |
py | 1a4692ed46d6a37a570dbc3717750926c79c5153 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2015, PHYTEC Messtechnik GmbH
# Author: Stefan Müller-Klieser <[email protected]>
import sys
import argparse
import os
import shutil
from phylib import *
class BSP_SiteConfLoader(BoardSupportPackage):
"""Extends the BoardSupportPackage class with functionalty to
manage a site.conf. This file is Host or user specific and defines
settings very specific to the location where the bsp is getting built
"""
def __init__(self):
super(BSP_SiteConfLoader, self).__init__()
def copy_site_conf(self, arg=None):
if arg is None:
arg = self.probe_for_siteconf()
if arg is None:
print('No site.conf found on host.')
return False
target = os.path.join(self.build_dir, 'conf/site.conf')
print("site.conf setup: Copying " + arg + " to " + target)
shutil.copyfile(arg, target)
return True
def probe_for_siteconf(self):
locations = ["~/.site.conf",
"/home/share/tools/yocto/site.conf",
"/etc/yocto/site.conf"]
for l in locations:
if os.path.isfile(os.path.expanduser(l)):
return os.path.expanduser(l)
return None
##############
# Executable #
##############
def main():
"""This script starts the site.conf mechanism and copies the choosen site.conf
in your build/conf directory
"""
parser = argparse.ArgumentParser(description='copy a site.conf into your conf dir')
parser.add_argument('-f', dest='filename', help='set the site.conf file location manually')
args = parser.parse_args()
bsp = BSP_SiteConfLoader()
if not bsp.copy_site_conf(args.filename):
# An error has happened. Report it back to calling program.
sys.exit(1)
if __name__ == "__main__":
main()
|
py | 1a46952f891af1fcc801aa531cdb7316b0e97a08 | import pytest
from apps.gdpr.utils import account_info_handler
pytestmark = pytest.mark.django_db
def test_account_info_handler(user):
needed_data = {
"email": user.email,
"username": user.username,
"first_name": user.first_name,
"last_name": user.last_name,
"privacy_policy": user.privacy_policy,
"warning_sent_email": user.warning_sent_email,
"account_info_link": user.account_info_link,
"last_account_info_created": None,
"is_staff": user.is_staff,
"is_active": user.is_active,
"date_joined": user.date_joined.strftime("%d/%m/%Y %H:%m:%S"),
"last_login": None,
"last_password_change_date": user.last_password_change_date.strftime(
"%d/%m/%Y %H:%m:%S"
),
}
assert account_info_handler(user) == needed_data
|
py | 1a46954f3e0170a579a7cc9b36f4fcd60cbb03ff | # (C) Datadog, Inc. 2019
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import mock
import pytest
from datadog_checks.ibm_db2 import IbmDb2Check
from datadog_checks.ibm_db2.utils import scrub_connection_string
pytestmark = pytest.mark.unit
class TestPasswordScrubber:
def test_start(self):
s = 'pwd=password;...'
assert scrub_connection_string(s) == 'pwd=********;...'
def test_end(self):
s = '...;pwd=password'
assert scrub_connection_string(s) == '...;pwd=********'
def test_no_match_within_value(self):
s = '...pwd=password;...'
assert scrub_connection_string(s) == s
def test_retry_connection(aggregator, instance):
ibmdb2 = IbmDb2Check('ibm_db2', {}, [instance])
conn1 = mock.MagicMock()
ibmdb2._conn = conn1
ibmdb2.get_connection = mock.MagicMock()
exception_msg = "[IBM][CLI Driver] CLI0106E Connection is closed. SQLSTATE=08003"
def mock_exception(*args, **kwargs):
raise Exception(exception_msg)
with mock.patch('ibm_db.exec_immediate', side_effect=mock_exception):
with pytest.raises(Exception, match='CLI0106E Connection is closed. SQLSTATE=08003'):
ibmdb2.check(instance)
# new connection made
assert ibmdb2._conn != conn1
|
py | 1a46955d8224e348f0fcb112267d5d36d3c9eb8d | # ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from typing import TYPE_CHECKING
from .._internal.client_credential_base import ClientCredentialBase
if TYPE_CHECKING:
# pylint:disable=unused-import,ungrouped-imports
from typing import Any
class ClientSecretCredential(ClientCredentialBase):
"""Authenticates as a service principal using a client secret.
:param str tenant_id: ID of the service principal's tenant. Also called its 'directory' ID.
:param str client_id: the service principal's client ID
:param str client_secret: one of the service principal's client secrets
:keyword str authority: Authority of an Azure Active Directory endpoint, for example 'login.microsoftonline.com',
the authority for Azure Public Cloud (which is the default). :class:`~azure.identity.AzureAuthorityHosts`
defines authorities for other clouds.
:keyword cache_persistence_options: configuration for persistent token caching. If unspecified, the credential
will cache tokens in memory.
:paramtype cache_persistence_options: ~azure.identity.TokenCachePersistenceOptions
:keyword ~azure.identity.RegionalAuthority regional_authority: a :class:`~azure.identity.RegionalAuthority` to
which the credential will authenticate. This argument should be used only by applications deployed to Azure
VMs.
"""
def __init__(self, tenant_id, client_id, client_secret, **kwargs):
# type: (str, str, str, **Any) -> None
if not client_id:
raise ValueError("client_id should be the id of an Azure Active Directory application")
if not client_secret:
raise ValueError("secret should be an Azure Active Directory application's client secret")
if not tenant_id:
raise ValueError(
"tenant_id should be an Azure Active Directory tenant's id (also called its 'directory id')"
)
super(ClientSecretCredential, self).__init__(
client_id=client_id, client_credential=client_secret, tenant_id=tenant_id, **kwargs
)
|
py | 1a469568bfd3898615f7f98000d9d7149149c09b | import os
MILVUS_TB = "Tables"
MILVUS_TBF = "TableFiles"
METRIC_DIC = {
1: "L2",
2: "IP",
3: "HAMMING",
4: "JACCARD",
5: "TANIMOTO",
6: "SUBSTRUCTURE",
7: "SUPERSTRUCTURE"
}
H2M_YAML = {
'milvus-version': '0.10.5',
'data_path': ['/home/data/data1.hdf5', '/home/data/fdata2.hdf5'],
'dest_host': '127.0.0.1',
'dest_port': 19530,
'mode': 'append',
'dest_collection_name': 'test02',
'dest_partition_name': 'partition_01',
'collection_parameter': {'dimension': 128, 'index_file_size': 1024, 'metric_type': 'IP'},
}
WORK_PATH = os.getenv("MILVUSDM_PATH", (os.path.join(os.environ['HOME'], 'milvusdm')))
IDS_PATH = WORK_PATH + os.sep + 'ids'
LOGS_NUM = os.getenv("logs_num", 0)
|
py | 1a4698489843f08801eb80bc71ca7a0f4d4f368f | from django.shortcuts import render
from django.views.generic import (DetailView,
CreateView,
UpdateView,
DeleteView)
from django.contrib.auth.mixins import (LoginRequiredMixin, #The user can only create a post if logged in
UserPassesTestMixin) #Only the author can update the post
from django.views.generic import ListView
from django.apps import apps
from django.contrib import messages
from django.shortcuts import render, redirect
Job = apps.get_model('users', 'Job')
Type = apps.get_model('users', 'Type')
Pub_or_priv = apps.get_model('users', 'Pub_or_priv')
Field = apps.get_model('users', 'Field')
def browse(request):
jobs = Job.objects.all()
all_types = Type.objects.all()
all_pop = Pub_or_priv.objects.all()
all_fields = Field.objects.all()
job_filter = JobFilter(request.GET, queryset=jobs)
return render(request, 'jobs/browse.html', {'jobs':jobs, 'all_types':all_types, 'all_pop':all_pop,
'all_fields':all_fields, 'filter': job_filter})
class JobDetailView(DetailView): #view for detail of each job
model=Job
template_name = 'jobs/job_detail.html'
class JobDeleteView(LoginRequiredMixin, UserPassesTestMixin, DeleteView): #view for deleting a spesific job
model=Job
success_url = '/jobs/browse/'
template_name = 'jobs/job_confirm_delete.html'
def test_func(self):
post=self.get_object()
if self.request.user==post.employer: #checks if the user trying to delete the post is the author of the post
return True
return False
class JobCreateView(LoginRequiredMixin, CreateView): #view for creating a new job publication
model=Job
fields = ['email','position', 'description','deadline','type','start']
template_name = 'jobs/job_form.html'
def form_valid(self, form):
form.instance.employer = self.request.user # setting the employer of the post to user
return super().form_valid(form) # returns the form
def test_func(self):
post = self.get_object()
if self.request.user == post.employer: # checks if the user trying to update the post is the author of the post
return True
return False
def form_valid(self, form):
form.instance.employer = self.request.user #setting the employer of the post to user
return super().form_valid(form) #returns the form
class JobUpdateView(LoginRequiredMixin, UserPassesTestMixin, UpdateView): #view for updating a new job publication
model=Job
fields = ['email','position', 'description','deadline','type','start']
template_name = 'jobs/job_form.html'
def form_valid(self, form):
form.instance.employer = self.request.user #setting the employer of the post to user
return super().form_valid(form) #returns the form
def test_func(self):
post=self.get_object()
if self.request.user==post.employer: #checks if the user trying to update the post is the author of the post
return True
return False
|
py | 1a46994ad1d8eabc28aadad8f33823e5d1ddeb2f | from functions_recorder import load_csv, plot_inputs_vr, plot_inputs_miniscope
import tkFileDialog
from paths import sync_path
from Tkinter import Tk
def get_tk_file(initial_path):
root = Tk()
root.withdraw()
return tkFileDialog.askopenfilenames(initialdir=initial_path, filetypes=(("csv files", "*.csv"),))[0]
# select the sync file to visualize
file_path = get_tk_file(sync_path)
# load the data in the file
sync_data = load_csv(file_path)
# determine whether it's a miniscope or VR file and plot accordingly
if 'syncVR' in file_path:
plot_inputs_vr(sync_data)
else:
plot_inputs_miniscope(sync_data)
# root.destroy()
|
py | 1a469a16b95f70ea787b97e7bf6fe8a0a2fa3ee5 | import sys
import os
import time
import subprocess
import codecs
# Change these to match your own environment
# Do not make watchfolder = outputfolder
path_to_watch = "\path\of\watchfolder"
path_to_send = "\path\of\outputfolder"
script_to_run = "\path\of\script"
def __main__():
# Create a dictionary of all the files in the watchfolder (a.k.a. path_to_watch)
before = dict([(f, None) for f in os.listdir(path_to_watch)])
while True:
# How many seconds to wait between folder checks - be careful about making this less than 2
time.sleep(5)
# Create a dictionary of all the files in the watchfolder
after = dict([(f, None) for f in os.listdir(path_to_watch)])
# Compare the two lists to find new files
added = [f for f in after if f not in before]
if added:
# print "Added: ", ", ".join(added)
for f in added:
# Create a new deadline job for each new file
CreateAndSubmitJobs(f)
# Here you can add any code to move/delete/etc. the file you just made a job out of
before = after
def CreateAndSubmitJobs(newFile):
"""
Creates a Draft job, using a file named newFile.
"""
# These values are all rough defaults, you may need to change them to match your farm
# Creating the job file programmatically
# http://docs.thinkboxsoftware.com/products/deadline/7.0/1_User%20Manual/manual/manual-submission.html#job-info-file
# This is where your temp files will be placed. You may want to change
# this, as this is assuming a default Windows 10 install of deadline
temp_path = os.path.join(GetCurrentUserHomeDirectory(), "temp")
jobInfoFilename = os.path.join(temp_path,
"draft_job_info.job") # This can be named whatever you wish
writer = open(jobInfoFilename, 'w')
try:
writer.write("Plugin=Draft\n")
writer.write("Name=WatchfolderJob-" + newFile + "\n")
writer.write("Comment=Created automatically by watchfolder.py\n")
# If you've got a specific machine you want to test this locally on,
# set this to that machine
# writer.write("Whitelist=mobile-010\n")
writer.write("OutputDirectory0=%s\n" % path_to_send)
finally:
writer.close()
# Create plugin info file programmatically
# http://docs.thinkboxsoftware.com/products/deadline/7.0/1_User%20Manual/manual/manual-submission.html#plug-in-info-file
# This can be named whatever you wish
pluginInfoFilename = os.path.join(temp_path, "draft_plugin_info.job")
writer = open(pluginInfoFilename, 'w')
try:
# Lots of these are required values, and I've left them blank. They can be
# populated if you choose
writer.write("scriptFile=%s\n" % script_to_run)
writer.write("ScriptArg0=username=\"\"\n")
writer.write("ScriptArg1=entity=\"\"\n")
writer.write("ScriptArg2=version=\"\"\n")
writer.write("ScriptArg3=frameList=\n")
writer.write("ScriptArg4=outFolder=%s\n" % path_to_send)
writer.write("ScriptArg5=outFile=%s\n" % os.path.join(path_to_send, newFile))
writer.write("ScriptArg6=inFile=%s\n" % os.path.join(path_to_watch, newFile))
finally:
writer.close()
# Setup the command line arguments.
SubmitJobs(jobInfoFilename, pluginInfoFilename)
def SubmitJobs(file1, file2):
"""
Wrapper for CallDeadlineCommand to make creating jobs simpler
"""
print(CallDeadlineCommand([file1, file2]))
def GetCurrentUserHomeDirectory():
output = CallDeadlineCommand(["-GetCurrentUserHomeDirectory"])
return output.replace("\r", "").replace("\n", "").replace("\\", os.sep)
def GetRepositoryRoot():
output = CallDeadlineCommand(['-root'])
return output.replace("\r", "").replace("\n", "").replace("\\", os.sep)
def CallDeadlineCommand(args):
"""
Calls deadlinecommand with arguments as passed args with 'deadlinecommand' as the first argument
"""
# On OSX, we look for the DEADLINE_PATH file. On other platforms, we use
# the environment variable.
if os.path.exists("/Users/Shared/Thinkbox/DEADLINE_PATH"):
with open("/Users/Shared/Thinkbox/DEADLINE_PATH") as f:
deadlineBin = f.read().strip()
deadlineCommand = "%s/deadlinecommand" % deadlineBin
else:
try:
deadlineBin = os.environ['DEADLINE_PATH']
except KeyError:
return ""
if os.name == 'nt':
deadlineCommand = "%s\\deadlinecommand.exe" % deadlineBin
else:
deadlineCommand = "%s/deadlinecommand" % deadlineBin
# insert deadlineCommand as the first argument
args.insert(0, deadlineCommand)
# Specifying PIPE for all handles to workaround a Python bug on Windows.
# The unused handles are then closed immediatley afterwards.
proc = subprocess.Popen(
args,
cwd=deadlineBin,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
startupinfo=None)
proc.stdin.close()
proc.stderr.close()
output = proc.stdout.read()
output = output.decode("utf_8")
return output
if __name__ == "__main__":
__main__()
|
py | 1a469b2cc715f536c54cf110cc455a337aadf108 | # Copyright (c) 2020 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
import os.path
import shutil
import subprocess
import tempfile
import pytest
import yaml
from ludwig.constants import TRAINER
from tests.integration_tests.utils import category_feature, generate_data, sequence_feature
def _run_commands(commands, **ludwig_kwargs):
for arg_name, value in ludwig_kwargs.items():
commands += ["--" + arg_name, value]
cmdline = " ".join(commands)
print(cmdline)
completed_process = subprocess.run(cmdline, shell=True, stdout=subprocess.PIPE, env=os.environ.copy())
assert completed_process.returncode == 0
return completed_process
def _run_ludwig(command, **ludwig_kwargs):
commands = ["ludwig", command]
return _run_commands(commands, **ludwig_kwargs)
def _run_ludwig_horovod(command, **ludwig_kwargs):
commands = ["horovodrun", "-np", "2", "ludwig", command]
return _run_commands(commands, **ludwig_kwargs)
def _prepare_data(csv_filename, config_filename):
# Single sequence input, single category output
input_features = [sequence_feature(reduce_output="sum")]
output_features = [category_feature(vocab_size=2, reduce_input="sum")]
# Generate test data
dataset_filename = generate_data(input_features, output_features, csv_filename)
# generate config file
config = {
"input_features": input_features,
"output_features": output_features,
"combiner": {"type": "concat", "output_size": 14},
TRAINER: {"epochs": 2},
}
with open(config_filename, "w") as f:
yaml.dump(config, f)
return dataset_filename
def _prepare_hyperopt_data(csv_filename, config_filename):
# Single sequence input, single category output
input_features = [sequence_feature(reduce_output="sum")]
output_features = [category_feature(vocab_size=2, reduce_input="sum")]
# Generate test data
dataset_filename = generate_data(input_features, output_features, csv_filename)
# generate config file
config = {
"input_features": input_features,
"output_features": output_features,
"combiner": {"type": "concat", "output_size": 4},
TRAINER: {"epochs": 2},
"hyperopt": {
"parameters": {
"trainer.learning_rate": {
"type": "float",
"low": 0.0001,
"high": 0.01,
"space": "log",
"steps": 3,
}
},
"goal": "minimize",
"output_feature": output_features[0]["name"],
"validation_metrics": "loss",
"executor": {"type": "serial"},
"sampler": {"type": "random", "num_samples": 2},
},
}
with open(config_filename, "w") as f:
yaml.dump(config, f)
return dataset_filename
@pytest.mark.distributed
def test_train_cli_dataset(csv_filename):
"""Test training using `ludwig train --dataset`."""
with tempfile.TemporaryDirectory() as tmpdir:
config_filename = os.path.join(tmpdir, "config.yaml")
dataset_filename = _prepare_data(csv_filename, config_filename)
_run_ludwig("train", dataset=dataset_filename, config=config_filename, output_directory=tmpdir)
@pytest.mark.distributed
def test_train_cli_training_set(csv_filename):
"""Test training using `ludwig train --training_set`."""
with tempfile.TemporaryDirectory() as tmpdir:
config_filename = os.path.join(tmpdir, "config.yaml")
dataset_filename = _prepare_data(csv_filename, config_filename)
validation_filename = shutil.copyfile(dataset_filename, os.path.join(tmpdir, "validation.csv"))
test_filename = shutil.copyfile(dataset_filename, os.path.join(tmpdir, "test.csv"))
_run_ludwig(
"train",
training_set=dataset_filename,
validation_set=validation_filename,
test_set=test_filename,
config=config_filename,
output_directory=tmpdir,
)
@pytest.mark.distributed
def test_train_cli_horovod(csv_filename):
"""Test training using `horovodrun -np 2 ludwig train --dataset`."""
with tempfile.TemporaryDirectory() as tmpdir:
config_filename = os.path.join(tmpdir, "config.yaml")
dataset_filename = _prepare_data(csv_filename, config_filename)
_run_ludwig_horovod(
"train",
dataset=dataset_filename,
config=config_filename,
output_directory=tmpdir,
experiment_name="horovod_experiment",
)
# Check that `model_load_path` works correctly
_run_ludwig_horovod(
"train",
dataset=dataset_filename,
config=config_filename,
output_directory=tmpdir,
model_load_path=os.path.join(tmpdir, "horovod_experiment_run", "model"),
)
@pytest.mark.skip(reason="Issue #1451: Use torchscript.")
@pytest.mark.distributed
def test_export_savedmodel_cli(csv_filename):
"""Test exporting Ludwig model to Tensorflows savedmodel format."""
with tempfile.TemporaryDirectory() as tmpdir:
config_filename = os.path.join(tmpdir, "config.yaml")
dataset_filename = _prepare_data(csv_filename, config_filename)
_run_ludwig("train", dataset=dataset_filename, config=config_filename, output_directory=tmpdir)
_run_ludwig(
"export_savedmodel",
model=os.path.join(tmpdir, "experiment_run", "model"),
output_path=os.path.join(tmpdir, "savedmodel"),
)
@pytest.mark.skip(reason="Issue #1451: Use torchscript.")
@pytest.mark.distributed
def test_export_neuropod_cli(csv_filename):
"""Test exporting Ludwig model to neuropod format."""
with tempfile.TemporaryDirectory() as tmpdir:
config_filename = os.path.join(tmpdir, "config.yaml")
dataset_filename = _prepare_data(csv_filename, config_filename)
_run_ludwig("train", dataset=dataset_filename, config=config_filename, output_directory=tmpdir)
_run_ludwig(
"export_neuropod",
model=os.path.join(tmpdir, "experiment_run", "model"),
output_path=os.path.join(tmpdir, "neuropod"),
)
@pytest.mark.distributed
def test_experiment_cli(csv_filename):
"""Test experiment cli."""
with tempfile.TemporaryDirectory() as tmpdir:
config_filename = os.path.join(tmpdir, "config.yaml")
dataset_filename = _prepare_data(csv_filename, config_filename)
_run_ludwig("experiment", dataset=dataset_filename, config=config_filename, output_directory=tmpdir)
@pytest.mark.distributed
def test_predict_cli(csv_filename):
"""Test predict cli."""
with tempfile.TemporaryDirectory() as tmpdir:
config_filename = os.path.join(tmpdir, "config.yaml")
dataset_filename = _prepare_data(csv_filename, config_filename)
_run_ludwig("train", dataset=dataset_filename, config=config_filename, output_directory=tmpdir)
_run_ludwig(
"predict",
dataset=dataset_filename,
model=os.path.join(tmpdir, "experiment_run", "model"),
output_directory=os.path.join(tmpdir, "predictions"),
)
@pytest.mark.distributed
def test_evaluate_cli(csv_filename):
"""Test evaluate cli."""
with tempfile.TemporaryDirectory() as tmpdir:
config_filename = os.path.join(tmpdir, "config.yaml")
dataset_filename = _prepare_data(csv_filename, config_filename)
_run_ludwig("train", dataset=dataset_filename, config=config_filename, output_directory=tmpdir)
_run_ludwig(
"evaluate",
dataset=dataset_filename,
model=os.path.join(tmpdir, "experiment_run", "model"),
output_directory=os.path.join(tmpdir, "predictions"),
)
@pytest.mark.distributed
def test_hyperopt_cli(csv_filename):
"""Test hyperopt cli."""
with tempfile.TemporaryDirectory() as tmpdir:
config_filename = os.path.join(tmpdir, "config.yaml")
dataset_filename = _prepare_hyperopt_data(csv_filename, config_filename)
_run_ludwig("hyperopt", dataset=dataset_filename, config=config_filename, output_directory=tmpdir)
@pytest.mark.distributed
def test_visualize_cli(csv_filename):
"""Test Ludwig 'visualize' cli."""
with tempfile.TemporaryDirectory() as tmpdir:
config_filename = os.path.join(tmpdir, "config.yaml")
dataset_filename = _prepare_data(csv_filename, config_filename)
_run_ludwig("train", dataset=dataset_filename, config=config_filename, output_directory=tmpdir)
_run_ludwig(
"visualize",
visualization="learning_curves",
model_names="run",
training_statistics=os.path.join(tmpdir, "experiment_run", "training_statistics.json"),
output_directory=os.path.join(tmpdir, "visualizations"),
)
@pytest.mark.distributed
def test_collect_summary_activations_weights_cli(csv_filename):
"""Test collect_summary cli."""
with tempfile.TemporaryDirectory() as tmpdir:
config_filename = os.path.join(tmpdir, "config.yaml")
dataset_filename = _prepare_data(csv_filename, config_filename)
_run_ludwig("train", dataset=dataset_filename, config=config_filename, output_directory=tmpdir)
completed_process = _run_ludwig("collect_summary", model=os.path.join(tmpdir, "experiment_run", "model"))
stdout = completed_process.stdout.decode("utf-8")
assert "Modules" in stdout
assert "Parameters" in stdout
@pytest.mark.distributed
def test_synthesize_dataset_cli(csv_filename):
"""Test synthesize_data cli."""
with tempfile.TemporaryDirectory() as tmpdir:
# test depends on default setting of --dataset_size
# if this parameter is specified, _run_ludwig fails when
# attempting to build the cli parameter structure
_run_ludwig(
"synthesize_dataset",
output_path=os.path.join(tmpdir, csv_filename),
features="'[ \
{name: text, type: text}, \
{name: category, type: category}, \
{name: number, type: number}, \
{name: binary, type: binary}, \
{name: set, type: set}, \
{name: bag, type: bag}, \
{name: sequence, type: sequence}, \
{name: timeseries, type: timeseries}, \
{name: date, type: date}, \
{name: h3, type: h3}, \
{name: vector, type: vector}, \
{name: audio, type: audio}, \
{name: image, type: image} \
]'",
)
@pytest.mark.distributed
def test_preprocess_cli(csv_filename):
"""Test preprocess `ludwig preprocess."""
with tempfile.TemporaryDirectory() as tmpdir:
config_filename = os.path.join(tmpdir, "config.yaml")
dataset_filename = _prepare_data(csv_filename, config_filename)
_run_ludwig("preprocess", dataset=dataset_filename, preprocessing_config=config_filename)
|
py | 1a469b3634c4ea884271aa34f7b9c85d293c5df0 | import sys
from datetime import datetime
import math
def extract(start_date, end_date,fname):
count = 1
with open(fname,'r') as f:
for line in f:
l = line.rstrip().split(',')
line_date = datetime.fromtimestamp(math.floor(float(l[0])))
if start_date <= line_date and line_date <= end_date:
l[0] = str(line_date)
print l[1:]
#get python args
cmdargs = str(sys.argv)
try:
start_date = datetime.strptime(str(sys.argv[1]), '%b %d %Y %I:%M%p')
end_date = datetime.strptime(str(sys.argv[2]), '%b %d %Y %I:%M%p')
extract(start_date,end_date,str(sys.argv[3]))
except Exception as ex:
print 'there was a problem', str(ex)
|
py | 1a469c23e7d3454a873451906470aa2297194eca | """
File: anagram.py
Name: Jason Huang
----------------------------------
This program recursively finds all the anagram(s)
for the word input by user and terminates when the
input string matches the EXIT constant defined
at line 19
If you correctly implement this program, you should see the
number of anagrams for each word listed below:
* arm -> 3 anagrams
* contains -> 5 anagrams
* stop -> 6 anagrams
* tesla -> 10 anagrams
* spear -> 12 anagrams
"""
# Constants
FILE = 'dictionary.txt' # This is the filename of an English dictionary
EXIT = '-1' # Controls when to stop the loop
result_list = []
dictionary = []
def main():
# This is the program to find the anagram in dictionary
global result_list
while True:
result_list = []
print(f'Welcome to stanCode \"Anagram Generator\" (or {EXIT} to quit)')
s = input(str('Find anagrams for:'))
if s == EXIT:
break
else:
read_dictionary()
find_anagrams(s)
def read_dictionary():
# This function is to add the raw material of dictionary in the list.
with open(FILE, 'r') as f:
for line in f:
line = line.strip()
dictionary.append(line)
def find_anagrams(s):
"""
:param s: the word which is the word user want to find the anagram in the dictionary using this program
:return: list, all of the anagrams
"""
word = []
find_anagrams_helper(s, word)
print(f'{len(result_list)} anagrams: {result_list}')
def find_anagrams_helper(s, word):
"""
this is the helper program to support find_anagrams(s).
:param s: the word which is the word user want to find the anagram in the dictionary using this program
:param word: the list which will collect the index of the letter in s
:return: list, anagrams, the anagrams of s.
"""
if len(word) == len(s):
result = ''
for index in word:
result += s[index]
if result in dictionary:
if result not in result_list:
print('Searching...')
print(f'Found: \'{result}\' in dictionary..')
result_list.append(result)
else:
for i in range(len(s)):
if i not in word:
# choose
word.append(i)
# explore
find_anagrams_helper(s, word)
# un-choose
word.pop()
def has_prefix(sub_s):
"""
This program is to pre-check whether the word prefix is in the dictionary
:param sub_s: the prefix of string formulated by the word index.
:return: boolean, True or False
"""
read_dictionary()
bool_list = []
for word in dictionary:
if word.startswith(sub_s):
bool_list.append(1)
else:
bool_list.append(0)
if 1 in bool_list:
return True
return False
if __name__ == '__main__':
main()
|
py | 1a469d3e3d88b316e683c60bc159ca6afcdeea69 | import operator
import numpy as np
import pytest
import pandas as pd
from pandas import DataFrame, Series
import pandas._testing as tm
class TestSeriesAnalytics:
def test_matmul(self):
# matmul test is for GH #10259
a = Series(np.random.randn(4), index=["p", "q", "r", "s"])
b = DataFrame(
np.random.randn(3, 4), index=["1", "2", "3"], columns=["p", "q", "r", "s"]
).T
# Series @ DataFrame -> Series
result = operator.matmul(a, b)
expected = Series(np.dot(a.values, b.values), index=["1", "2", "3"])
tm.assert_series_equal(result, expected)
# DataFrame @ Series -> Series
result = operator.matmul(b.T, a)
expected = Series(np.dot(b.T.values, a.T.values), index=["1", "2", "3"])
tm.assert_series_equal(result, expected)
# Series @ Series -> scalar
result = operator.matmul(a, a)
expected = np.dot(a.values, a.values)
tm.assert_almost_equal(result, expected)
# GH 21530
# vector (1D np.array) @ Series (__rmatmul__)
result = operator.matmul(a.values, a)
expected = np.dot(a.values, a.values)
tm.assert_almost_equal(result, expected)
# GH 21530
# vector (1D list) @ Series (__rmatmul__)
result = operator.matmul(a.values.tolist(), a)
expected = np.dot(a.values, a.values)
tm.assert_almost_equal(result, expected)
# GH 21530
# matrix (2D np.array) @ Series (__rmatmul__)
result = operator.matmul(b.T.values, a)
expected = np.dot(b.T.values, a.values)
tm.assert_almost_equal(result, expected)
# GH 21530
# matrix (2D nested lists) @ Series (__rmatmul__)
result = operator.matmul(b.T.values.tolist(), a)
expected = np.dot(b.T.values, a.values)
tm.assert_almost_equal(result, expected)
# mixed dtype DataFrame @ Series
a["p"] = int(a.p)
result = operator.matmul(b.T, a)
expected = Series(np.dot(b.T.values, a.T.values), index=["1", "2", "3"])
tm.assert_series_equal(result, expected)
# different dtypes DataFrame @ Series
a = a.astype(int)
result = operator.matmul(b.T, a)
expected = Series(np.dot(b.T.values, a.T.values), index=["1", "2", "3"])
tm.assert_series_equal(result, expected)
msg = r"Dot product shape mismatch, \(4,\) vs \(3,\)"
# exception raised is of type Exception
with pytest.raises(Exception, match=msg):
a.dot(a.values[:3])
msg = "matrices are not aligned"
with pytest.raises(ValueError, match=msg):
a.dot(b.T)
def test_ptp(self):
# GH21614
N = 1000
arr = np.random.randn(N)
ser = Series(arr)
assert np.ptp(ser) == np.ptp(arr)
def test_repeat(self):
s = Series(np.random.randn(3), index=["a", "b", "c"])
reps = s.repeat(5)
exp = Series(s.values.repeat(5), index=s.index.values.repeat(5))
tm.assert_series_equal(reps, exp)
to_rep = [2, 3, 4]
reps = s.repeat(to_rep)
exp = Series(s.values.repeat(to_rep), index=s.index.values.repeat(to_rep))
tm.assert_series_equal(reps, exp)
def test_numpy_repeat(self):
s = Series(np.arange(3), name="x")
expected = Series(s.values.repeat(2), name="x", index=s.index.values.repeat(2))
tm.assert_series_equal(np.repeat(s, 2), expected)
msg = "the 'axis' parameter is not supported"
with pytest.raises(ValueError, match=msg):
np.repeat(s, 2, axis=0)
def test_is_monotonic(self):
s = Series(np.random.randint(0, 10, size=1000))
assert not s.is_monotonic
s = Series(np.arange(1000))
assert s.is_monotonic is True
assert s.is_monotonic_increasing is True
s = Series(np.arange(1000, 0, -1))
assert s.is_monotonic_decreasing is True
s = Series(pd.date_range("20130101", periods=10))
assert s.is_monotonic is True
assert s.is_monotonic_increasing is True
s = Series(list(reversed(s.tolist())))
assert s.is_monotonic is False
assert s.is_monotonic_decreasing is True
|
py | 1a469e08ae5dda2de63579ace17d8a8be65733ea | #!/usr/bin/env python
# This example demonstrates the use of multiline 2D text using
# vtkTextMappers. It shows several justifications as well as
# single-line and multiple-line text inputs.
import vtk
font_size = 14
# Create the text mappers and the associated Actor2Ds.
# The font and text properties (except justification) are the same for
# each single line mapper. Let's create a common text property object
singleLineTextProp = vtk.vtkTextProperty()
singleLineTextProp.SetFontSize(font_size)
singleLineTextProp.SetFontFamilyToArial()
singleLineTextProp.BoldOff()
singleLineTextProp.ItalicOff()
singleLineTextProp.ShadowOff()
# The font and text properties (except justification) are the same for
# each multi line mapper. Let's create a common text property object
multiLineTextProp = vtk.vtkTextProperty()
multiLineTextProp.ShallowCopy(singleLineTextProp)
multiLineTextProp.BoldOn()
multiLineTextProp.ItalicOn()
multiLineTextProp.ShadowOn()
multiLineTextProp.SetLineSpacing(0.8)
# The text is on a single line and bottom-justified.
singleLineTextB = vtk.vtkTextMapper()
singleLineTextB.SetInput("Single line (bottom)")
tprop = singleLineTextB.GetTextProperty()
tprop.ShallowCopy(singleLineTextProp)
tprop.SetVerticalJustificationToBottom()
tprop.SetColor(1, 0, 0)
singleLineTextActorB = vtk.vtkActor2D()
singleLineTextActorB.SetMapper(singleLineTextB)
singleLineTextActorB.GetPositionCoordinate().SetCoordinateSystemToNormalizedDisplay()
singleLineTextActorB.GetPositionCoordinate().SetValue(0.05, 0.85)
# The text is on a single line and center-justified (vertical
# justification).
singleLineTextC = vtk.vtkTextMapper()
singleLineTextC.SetInput("Single line (centered)")
tprop = singleLineTextC.GetTextProperty()
tprop.ShallowCopy(singleLineTextProp)
tprop.SetVerticalJustificationToCentered()
tprop.SetColor(0, 1, 0)
singleLineTextActorC = vtk.vtkActor2D()
singleLineTextActorC.SetMapper(singleLineTextC)
singleLineTextActorC.GetPositionCoordinate().SetCoordinateSystemToNormalizedDisplay()
singleLineTextActorC.GetPositionCoordinate().SetValue(0.05, 0.75)
# The text is on a single line and top-justified.
singleLineTextT = vtk.vtkTextMapper()
singleLineTextT.SetInput("Single line (top)")
tprop = singleLineTextT.GetTextProperty()
tprop.ShallowCopy(singleLineTextProp)
tprop.SetVerticalJustificationToTop()
tprop.SetColor(0, 0, 1)
singleLineTextActorT = vtk.vtkActor2D()
singleLineTextActorT.SetMapper(singleLineTextT)
singleLineTextActorT.GetPositionCoordinate().SetCoordinateSystemToNormalizedDisplay()
singleLineTextActorT.GetPositionCoordinate().SetValue(0.05, 0.65)
# The text is on multiple lines and left- and top-justified.
textMapperL = vtk.vtkTextMapper()
textMapperL.SetInput("This is\nmulti-line\ntext output\n(left-top)")
tprop = textMapperL.GetTextProperty()
tprop.ShallowCopy(multiLineTextProp)
tprop.SetJustificationToLeft()
tprop.SetVerticalJustificationToTop()
tprop.SetColor(1, 0, 0)
textActorL = vtk.vtkActor2D()
textActorL.SetMapper(textMapperL)
textActorL.GetPositionCoordinate().SetCoordinateSystemToNormalizedDisplay()
textActorL.GetPositionCoordinate().SetValue(0.05, 0.5)
# The text is on multiple lines and center-justified (both horizontal and
# vertical).
textMapperC = vtk.vtkTextMapper()
textMapperC.SetInput("This is\nmulti-line\ntext output\n(centered)")
tprop = textMapperC.GetTextProperty()
tprop.ShallowCopy(multiLineTextProp)
tprop.SetJustificationToCentered()
tprop.SetVerticalJustificationToCentered()
tprop.SetColor(0, 1, 0)
textActorC = vtk.vtkActor2D()
textActorC.SetMapper(textMapperC)
textActorC.GetPositionCoordinate().SetCoordinateSystemToNormalizedDisplay()
textActorC.GetPositionCoordinate().SetValue(0.5, 0.5)
# The text is on multiple lines and right- and bottom-justified.
textMapperR = vtk.vtkTextMapper()
textMapperR.SetInput("This is\nmulti-line\ntext output\n(right-bottom)")
tprop = textMapperR.GetTextProperty()
tprop.ShallowCopy(multiLineTextProp)
tprop.SetJustificationToRight()
tprop.SetVerticalJustificationToBottom()
tprop.SetColor(0, 0, 1)
textActorR = vtk.vtkActor2D()
textActorR.SetMapper(textMapperR)
textActorR.GetPositionCoordinate().SetCoordinateSystemToNormalizedDisplay()
textActorR.GetPositionCoordinate().SetValue(0.95, 0.5)
# Draw the grid to demonstrate the placement of the text.
# Set up the necessary points.
Pts = vtk.vtkPoints()
Pts.InsertNextPoint(0.05, 0.0, 0.0)
Pts.InsertNextPoint(0.05, 1.0, 0.0)
Pts.InsertNextPoint(0.5, 0.0, 0.0)
Pts.InsertNextPoint(0.5, 1.0, 0.0)
Pts.InsertNextPoint(0.95, 0.0, 0.0)
Pts.InsertNextPoint(0.95, 1.0, 0.0)
Pts.InsertNextPoint(0.0, 0.5, 0.0)
Pts.InsertNextPoint(1.0, 0.5, 0.0)
Pts.InsertNextPoint(0.00, 0.85, 0.0)
Pts.InsertNextPoint(0.50, 0.85, 0.0)
Pts.InsertNextPoint(0.00, 0.75, 0.0)
Pts.InsertNextPoint(0.50, 0.75, 0.0)
Pts.InsertNextPoint(0.00, 0.65, 0.0)
Pts.InsertNextPoint(0.50, 0.65, 0.0)
# Set up the lines that use these points.
Lines = vtk.vtkCellArray()
Lines.InsertNextCell(2)
Lines.InsertCellPoint(0)
Lines.InsertCellPoint(1)
Lines.InsertNextCell(2)
Lines.InsertCellPoint(2)
Lines.InsertCellPoint(3)
Lines.InsertNextCell(2)
Lines.InsertCellPoint(4)
Lines.InsertCellPoint(5)
Lines.InsertNextCell(2)
Lines.InsertCellPoint(6)
Lines.InsertCellPoint(7)
Lines.InsertNextCell(2)
Lines.InsertCellPoint(8)
Lines.InsertCellPoint(9)
Lines.InsertNextCell(2)
Lines.InsertCellPoint(10)
Lines.InsertCellPoint(11)
Lines.InsertNextCell(2)
Lines.InsertCellPoint(12)
Lines.InsertCellPoint(13)
# Create a grid that uses these points and lines.
Grid = vtk.vtkPolyData()
Grid.SetPoints(Pts)
Grid.SetLines(Lines)
# Set up the coordinate system.
normCoords = vtk.vtkCoordinate()
normCoords.SetCoordinateSystemToNormalizedViewport()
# Set up the mapper and actor (2D) for the grid.
mapper = vtk.vtkPolyDataMapper2D()
mapper.SetInputData(Grid)
mapper.SetTransformCoordinate(normCoords)
gridActor = vtk.vtkActor2D()
gridActor.SetMapper(mapper)
gridActor.GetProperty().SetColor(0.1, 0.1, 0.1)
# Create the Renderer, RenderWindow, and RenderWindowInteractor
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer; set the background and size; zoom in
# closer to the image; render
ren.AddActor2D(textActorL)
ren.AddActor2D(textActorC)
ren.AddActor2D(textActorR)
ren.AddActor2D(singleLineTextActorB)
ren.AddActor2D(singleLineTextActorC)
ren.AddActor2D(singleLineTextActorT)
ren.AddActor2D(gridActor)
ren.SetBackground(1, 1, 1)
renWin.SetSize(500, 300)
ren.GetActiveCamera().Zoom(1.5)
iren.Initialize()
renWin.Render()
iren.Start()
|
py | 1a469e3b0cdaba34275f0633e12b5a78ebbc25c1 | from chainer.dataset import convert
from chainer.dataset import dataset_mixin
from chainer.dataset import download
from chainer.dataset import iterator
DatasetMixin = dataset_mixin.DatasetMixin
Iterator = iterator.Iterator
concat_examples = convert.concat_examples
get_dataset_root = download.get_dataset_root
set_dataset_root = download.set_dataset_root
get_dataset_directory = download.get_dataset_directory
cached_download = download.cached_download
cache_or_load_file = download.cache_or_load_file
|
py | 1a469efe32009b013ddd803c4c3181ec5bbc8a83 | # -*- coding: utf-8 -*-
"""
Created on Thu Oct 21 10:09:24 2021
@author: jbt5jf
TESTING SCRIPT for the neural network
"""
import matplotlib.pyplot as plt
import numpy as np
import imageio
from skimage.transform import resize
import tqdm
import cv2
import tensorflow as tf
from tensorflow.keras import layers
from tensorflow.keras.layers import *
import tensorflow.keras as keras
import matplotlib.pyplot as plt
import numpy as np
from videos import sepVideos
import os
from Dataset import Dataset
#CHECK IF model is actaully saving correctly
def testModel(model, path = "./mouse_heart/"):
input_folder = path
test = Dataset('.')
videos = [f for f in os.listdir(input_folder) if os.path.isfile(input_folder+f) and f[-3:]=='avi']
#for video in videos:
video = videos[3]
print(video)
if not os.path.exists(input_folder+video): os.makedirs(input_folder+video)
print('Spliting', video, '...')
x = sepVideos(video, save=False, resize=(128,128))
print(x.shape)
segnet = tf.keras.models.load_model('2021-10-25_17-02-21model'+'.h5')
for i in range(test.shape[0]):
img, mask = test[i]
pred = segnet.predict(img.reshape(128,128,1)[tf.newaxis,...])
fig, (ax1, ax2) = plt.subplots(1, 2)
ax1.imshow(img)
ax2.imshow(pred.reshape(128,128))
plt.show()
break
#Try the network on the video
def testVideo(model, path = "./mouse_heart/"):
input_folder = path
test = Dataset('.')
videos = [f for f in os.listdir(input_folder) if os.path.isfile(input_folder+f) and f[-3:]=='avi']
for video in videos:
print(video)
if not os.path.exists(input_folder+video): os.makedirs(input_folder+video)
print('Spliting', video, '...')
x = sepVideos(video, save=False, resize=(128,128))
#print(x.shape)
segnet = tf.keras.models.load_model(model)
pred = segnet.predict(x.reshape(-1,128,128,1)).reshape(-1,128,128)
""" DEBUG STUFF
pred = segnet.predict(img.reshape(128,128,1)[tf.newaxis,...])
fig, (ax1, ax2) = plt.subplots(1, 2)
ax1.imshow(img)
ax2.imshow(pred.reshape(128,128))
"""
size = 128, 128*2 # Height, Width
fps = 10
print(pred.shape)
out = cv2.VideoWriter(f"{video.split('.')[0]}_segmented.mp4", cv2.VideoWriter_fourcc(*'mp4v'), fps, (size[1], size[0]), False)
for i in range(pred.shape[0]):
test = np.concatenate([x[i], pred[i]*255], axis=1).astype('uint8')
out.write(test)
out.release()
break
if __name__ == "__main__":
keras.backend.clear_session()
model = '2021-10-26_12-18-12model'+'.h5'
testModel(model)
testVideo(model)
|
py | 1a469f9589ad440655ac3090e606acf503e6badf | """
This module contains helper functions for controlling caching. It does so by
managing the "Vary" header of responses. It includes functions to patch the
header of response objects directly and decorators that change functions to do
that header-patching themselves.
For information on the Vary header, see:
https://tools.ietf.org/html/rfc7231#section-7.1.4
Essentially, the "Vary" HTTP header defines which headers a cache should take
into account when building its cache key. Requests with the same path but
different header content for headers named in "Vary" need to get different
cache keys to prevent delivery of wrong content.
An example: i18n middleware would need to distinguish caches by the
"Accept-language" header.
"""
import hashlib
import re
import time
from django.conf import settings
from django.core.cache import caches
from django.http import HttpResponse, HttpResponseNotModified
from django.utils.encoding import iri_to_uri
from django.utils.http import (
http_date, parse_etags, parse_http_date_safe, quote_etag,
)
from django.utils.log import log_response
from django.utils.timezone import get_current_timezone_name
from django.utils.translation import get_language
cc_delim_re = re.compile(r'\s*,\s*')
def patch_cache_control(response, **kwargs):
"""
Patch the Cache-Control header by adding all keyword arguments to it.
The transformation is as follows:
* All keyword parameter names are turned to lowercase, and underscores
are converted to hyphens.
* If the value of a parameter is True (exactly True, not just a
true value), only the parameter name is added to the header.
* All other parameters are added with their value, after applying
str() to it.
"""
def dictitem(s):
t = s.split('=', 1)
if len(t) > 1:
return (t[0].lower(), t[1])
else:
return (t[0].lower(), True)
def dictvalue(t):
if t[1] is True:
return t[0]
else:
return '%s=%s' % (t[0], t[1])
if response.get('Cache-Control'):
cc = cc_delim_re.split(response['Cache-Control'])
cc = dict(dictitem(el) for el in cc)
else:
cc = {}
# If there's already a max-age header but we're being asked to set a new
# max-age, use the minimum of the two ages. In practice this happens when
# a decorator and a piece of middleware both operate on a given view.
if 'max-age' in cc and 'max_age' in kwargs:
kwargs['max_age'] = min(int(cc['max-age']), kwargs['max_age'])
# Allow overriding private caching and vice versa
if 'private' in cc and 'public' in kwargs:
del cc['private']
elif 'public' in cc and 'private' in kwargs:
del cc['public']
for (k, v) in kwargs.items():
cc[k.replace('_', '-')] = v
cc = ', '.join(dictvalue(el) for el in cc.items())
response['Cache-Control'] = cc
def get_max_age(response):
"""
Return the max-age from the response Cache-Control header as an integer,
or None if it wasn't found or wasn't an integer.
"""
if not response.has_header('Cache-Control'):
return
cc = dict(_to_tuple(el) for el in cc_delim_re.split(response['Cache-Control']))
try:
return int(cc['max-age'])
except (ValueError, TypeError, KeyError):
pass
def set_response_etag(response):
if not response.streaming:
response['ETag'] = quote_etag(hashlib.md5(response.content).hexdigest())
return response
def _precondition_failed(request):
response = HttpResponse(status=412)
log_response(
'Precondition Failed: %s', request.path,
response=response,
request=request,
)
return response
def _not_modified(request, response=None):
new_response = HttpResponseNotModified()
if response:
# Preserve the headers required by Section 4.1 of RFC 7232, as well as
# Last-Modified.
for header in ('Cache-Control', 'Content-Location', 'Date', 'ETag', 'Expires', 'Last-Modified', 'Vary'):
if header in response:
new_response[header] = response[header]
# Preserve cookies as per the cookie specification: "If a proxy server
# receives a response which contains a Set-cookie header, it should
# propagate the Set-cookie header to the client, regardless of whether
# the response was 304 (Not Modified) or 200 (OK).
# https://curl.haxx.se/rfc/cookie_spec.html
new_response.cookies = response.cookies
return new_response
def get_conditional_response(request, etag=None, last_modified=None, response=None):
# Only return conditional responses on successful requests.
if response and not (200 <= response.status_code < 300):
return response
# Get HTTP request headers.
if_match_etags = parse_etags(request.META.get('HTTP_IF_MATCH', ''))
if_unmodified_since = request.META.get('HTTP_IF_UNMODIFIED_SINCE')
if_unmodified_since = if_unmodified_since and parse_http_date_safe(if_unmodified_since)
if_none_match_etags = parse_etags(request.META.get('HTTP_IF_NONE_MATCH', ''))
if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
if_modified_since = if_modified_since and parse_http_date_safe(if_modified_since)
# Step 1 of section 6 of RFC 7232: Test the If-Match precondition.
if if_match_etags and not _if_match_passes(etag, if_match_etags):
return _precondition_failed(request)
# Step 2: Test the If-Unmodified-Since precondition.
if (not if_match_etags and if_unmodified_since and
not _if_unmodified_since_passes(last_modified, if_unmodified_since)):
return _precondition_failed(request)
# Step 3: Test the If-None-Match precondition.
if if_none_match_etags and not _if_none_match_passes(etag, if_none_match_etags):
if request.method in ('GET', 'HEAD'):
return _not_modified(request, response)
else:
return _precondition_failed(request)
# Step 4: Test the If-Modified-Since precondition.
if (not if_none_match_etags and if_modified_since and
not _if_modified_since_passes(last_modified, if_modified_since)):
if request.method in ('GET', 'HEAD'):
return _not_modified(request, response)
# Step 5: Test the If-Range precondition (not supported).
# Step 6: Return original response since there isn't a conditional response.
return response
def _if_match_passes(target_etag, etags):
"""
Test the If-Match comparison as defined in section 3.1 of RFC 7232.
"""
if not target_etag:
# If there isn't an ETag, then there can't be a match.
return False
elif etags == ['*']:
# The existence of an ETag means that there is "a current
# representation for the target resource", even if the ETag is weak,
# so there is a match to '*'.
return True
elif target_etag.startswith('W/'):
# A weak ETag can never strongly match another ETag.
return False
else:
# Since the ETag is strong, this will only return True if there's a
# strong match.
return target_etag in etags
def _if_unmodified_since_passes(last_modified, if_unmodified_since):
"""
Test the If-Unmodified-Since comparison as defined in section 3.4 of
RFC 7232.
"""
return last_modified and last_modified <= if_unmodified_since
def _if_none_match_passes(target_etag, etags):
"""
Test the If-None-Match comparison as defined in section 3.2 of RFC 7232.
"""
if not target_etag:
# If there isn't an ETag, then there isn't a match.
return True
elif etags == ['*']:
# The existence of an ETag means that there is "a current
# representation for the target resource", so there is a match to '*'.
return False
else:
# The comparison should be weak, so look for a match after stripping
# off any weak indicators.
target_etag = target_etag.strip('W/')
etags = (etag.strip('W/') for etag in etags)
return target_etag not in etags
def _if_modified_since_passes(last_modified, if_modified_since):
"""
Test the If-Modified-Since comparison as defined in section 3.3 of RFC 7232.
"""
return not last_modified or last_modified > if_modified_since
def patch_response_headers(response, cache_timeout=None):
"""
Add HTTP caching headers to the given HttpResponse: Expires and
Cache-Control.
Each header is only added if it isn't already set.
cache_timeout is in seconds. The CACHE_MIDDLEWARE_SECONDS setting is used
by default.
"""
if cache_timeout is None:
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
if cache_timeout < 0:
cache_timeout = 0 # Can't have max-age negative
if not response.has_header('Expires'):
response['Expires'] = http_date(time.time() + cache_timeout)
patch_cache_control(response, max_age=cache_timeout)
def add_never_cache_headers(response):
"""
Add headers to a response to indicate that a page should never be cached.
"""
patch_response_headers(response, cache_timeout=-1)
patch_cache_control(response, no_cache=True, no_store=True, must_revalidate=True)
def patch_vary_headers(response, newheaders):
"""
Add (or update) the "Vary" header in the given HttpResponse object.
newheaders is a list of header names that should be in "Vary". Existing
headers in "Vary" aren't removed.
"""
# Note that we need to keep the original order intact, because cache
# implementations may rely on the order of the Vary contents in, say,
# computing an MD5 hash.
if response.has_header('Vary'):
vary_headers = cc_delim_re.split(response['Vary'])
else:
vary_headers = []
# Use .lower() here so we treat headers as case-insensitive.
existing_headers = {header.lower() for header in vary_headers}
additional_headers = [newheader for newheader in newheaders
if newheader.lower() not in existing_headers]
response['Vary'] = ', '.join(vary_headers + additional_headers)
def has_vary_header(response, header_query):
"""
Check to see if the response has a given header name in its Vary header.
"""
if not response.has_header('Vary'):
return False
vary_headers = cc_delim_re.split(response['Vary'])
existing_headers = {header.lower() for header in vary_headers}
return header_query.lower() in existing_headers
def _i18n_cache_key_suffix(request, cache_key):
"""If necessary, add the current locale or time zone to the cache key."""
if settings.USE_I18N or settings.USE_L10N:
# first check if LocaleMiddleware or another middleware added
# LANGUAGE_CODE to request, then fall back to the active language
# which in turn can also fall back to settings.LANGUAGE_CODE
cache_key += '.%s' % getattr(request, 'LANGUAGE_CODE', get_language())
if settings.USE_TZ:
cache_key += '.%s' % get_current_timezone_name()
return cache_key
def _generate_cache_key(request, method, headerlist, key_prefix):
"""Return a cache key from the headers given in the header list."""
ctx = hashlib.md5()
for header in headerlist:
value = request.META.get(header)
if value is not None:
ctx.update(value.encode())
url = hashlib.md5(iri_to_uri(request.build_absolute_uri()).encode('ascii'))
cache_key = 'views.decorators.cache.cache_page.%s.%s.%s.%s' % (
key_prefix, method, url.hexdigest(), ctx.hexdigest())
return _i18n_cache_key_suffix(request, cache_key)
def _generate_cache_header_key(key_prefix, request):
"""Return a cache key for the header cache."""
url = hashlib.md5(iri_to_uri(request.build_absolute_uri()).encode('ascii'))
cache_key = 'views.decorators.cache.cache_header.%s.%s' % (
key_prefix, url.hexdigest())
return _i18n_cache_key_suffix(request, cache_key)
def get_cache_key(request, key_prefix=None, method='GET', cache=None):
"""
Return a cache key based on the request URL and query. It can be used
in the request phase because it pulls the list of headers to take into
account from the global URL registry and uses those to build a cache key
to check against.
If there isn't a headerlist stored, return None, indicating that the page
needs to be rebuilt.
"""
if key_prefix is None:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
cache_key = _generate_cache_header_key(key_prefix, request)
if cache is None:
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
headerlist = cache.get(cache_key)
if headerlist is not None:
return _generate_cache_key(request, method, headerlist, key_prefix)
else:
return None
def learn_cache_key(request, response, cache_timeout=None, key_prefix=None, cache=None):
"""
Learn what headers to take into account for some request URL from the
response object. Store those headers in a global URL registry so that
later access to that URL will know what headers to take into account
without building the response object itself. The headers are named in the
Vary header of the response, but we want to prevent response generation.
The list of headers to use for cache key generation is stored in the same
cache as the pages themselves. If the cache ages some data out of the
cache, this just means that we have to build the response once to get at
the Vary header and so at the list of headers to use for the cache key.
"""
if key_prefix is None:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
if cache_timeout is None:
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
cache_key = _generate_cache_header_key(key_prefix, request)
if cache is None:
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
if response.has_header('Vary'):
is_accept_language_redundant = settings.USE_I18N or settings.USE_L10N
# If i18n or l10n are used, the generated cache key will be suffixed
# with the current locale. Adding the raw value of Accept-Language is
# redundant in that case and would result in storing the same content
# under multiple keys in the cache. See #18191 for details.
headerlist = []
for header in cc_delim_re.split(response['Vary']):
header = header.upper().replace('-', '_')
if header != 'ACCEPT_LANGUAGE' or not is_accept_language_redundant:
headerlist.append('HTTP_' + header)
headerlist.sort()
cache.set(cache_key, headerlist, cache_timeout)
return _generate_cache_key(request, request.method, headerlist, key_prefix)
else:
# if there is no Vary header, we still need a cache key
# for the request.build_absolute_uri()
cache.set(cache_key, [], cache_timeout)
return _generate_cache_key(request, request.method, [], key_prefix)
def _to_tuple(s):
t = s.split('=', 1)
if len(t) == 2:
return t[0].lower(), t[1]
return t[0].lower(), True
|
wsgi | 1a46a08c0e282e3484c0db08ba717ccb3ed0167d | # Set this to fbarc viewers's virtual env
activate_this = '/opt/fbarc/ENV/bin/activate_this.py'
with open(activate_this) as file_:
exec(file_.read(), dict(__file__=activate_this))
import sys
# Set this to the path of topic tracker
sys.path.insert(0, '/opt/fbarc')
# Configure fbarc
import os
os.environ['FBARC_INDEX']='true'
os.environ['FBARC_FILES']='/path/to/file1.jsonl,/path/to/dir'
from fbarc_viewer import app as application |
py | 1a46a0ddbb67b9b67f8ccd1382900c60ee500f92 | from django.core.management.base import BaseCommand, CommandError
from ark.transactions import TxBroadcaster
class Command(BaseCommand):
help = 'start/stop a TxBroadcaster'
def add_arguments(self, parser):
parser.add_argument('uid', nargs=1, type=int)
parser.add_argument('network', nargs=1, type=str)
def handle(self, *args, **options):
self.stdout.write('creating TxBroadcaster: {uid}, network: {network}'.format(
uid=options['uid'][0],
network=options['network'][0]))
caster = TxBroadcaster(uid=options['uid'][0], network=options['network'][0])
self.stdout.write('created successfully')
self.stdout.write('starting TxBroadcaster: {}'.format(options['uid'][0]))
caster.run()
|
py | 1a46a187c5fbe22cbdacd9173813d6bce40a0031 | from NIENV import *
import cv2
# USEFUL
# self.input(index) <- access to input data
# self.outputs[index].set_val(val) <- set output data port value
# self.main_widget <- access to main widget
class AdjustBrightness_NodeInstance(NodeInstance):
def __init__(self, params):
super(AdjustBrightness_NodeInstance, self).__init__(params)
# self.special_actions['action name'] = {'method': M(self.action_method)}
self.img_unbright = None
self.img_bright= None
def update_event(self, input_called=-1):
self.img_unbright = self.input(0)
alpha= self.input(1)
alpha=int(alpha)
beta=self.input(2)
beta=int(beta)
self.img_bright = cv2.convertScaleAbs(self.img_unbright,alpha,beta)
self.main_widget.show_image(self.img_bright)
self.set_output_val(0, self.img_bright)
def get_data(self):
data = {}
# ...
return data
def set_data(self, data):
pass
# ...
def remove_event(self):
pass
|
py | 1a46a1da1b161d84052f982e6638474e3d76151f | import numpy as np
import ccobra
class UniformModel(ccobra.CCobraModel):
def __init__(self, name='UniformModel'):
super(UniformModel, self).__init__(name, ["nonmonotonic"], ["single-choice"])
def predict(self, item, **kwargs):
return item.choices[np.random.randint(0, len(item.choices))]
|
py | 1a46a1fe8082b00993757e988547f4f0ac645327 | #!/usr/bin/python
# Copyright (c) 2020, 2022 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_data_safe_user_assessment_facts
short_description: Fetches details about one or multiple UserAssessment resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple UserAssessment resources in Oracle Cloud Infrastructure
- Gets a list of user assessments.
- The ListUserAssessments operation returns only the assessments in the specified `compartmentId`.
The list does not include any subcompartments of the compartmentId passed.
- The parameter `accessLevel` specifies whether to return only those compartments for which the
requestor has INSPECT permissions on at least one resource directly
or indirectly (ACCESSIBLE) (the resource can be in a subcompartment) or to return Not Authorized if
Principal doesn't have access to even one of the child compartments. This is valid only when
`compartmentIdInSubtree` is set to `true`.
- The parameter `compartmentIdInSubtree` applies when you perform ListUserAssessments on the
`compartmentId` passed and when it is set to true, the entire hierarchy of compartments can be returned.
To get a full list of all compartments and subcompartments in the tenancy (root compartment),
set the parameter `compartmentIdInSubtree` to true and `accessLevel` to ACCESSIBLE.
- If I(user_assessment_id) is specified, the details of a single UserAssessment will be returned.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
user_assessment_id:
description:
- The OCID of the user assessment.
- Required to get a specific user_assessment.
type: str
aliases: ["id"]
compartment_id:
description:
- A filter to return only resources that match the specified compartment OCID.
- Required to list multiple user_assessments.
type: str
compartment_id_in_subtree:
description:
- Default is false.
When set to true, the hierarchy of compartments is traversed and all compartments and subcompartments in the tenancy are returned. Depends on the
'accessLevel' setting.
type: bool
access_level:
description:
- Valid values are RESTRICTED and ACCESSIBLE. Default is RESTRICTED.
Setting this to ACCESSIBLE returns only those compartments for which the
user has INSPECT permissions directly or indirectly (permissions can be on a
resource in a subcompartment). When set to RESTRICTED permissions are checked and no partial results are displayed.
type: str
choices:
- "RESTRICTED"
- "ACCESSIBLE"
display_name:
description:
- A filter to return only resources that match the specified display name.
type: str
aliases: ["name"]
schedule_user_assessment_id:
description:
- The OCID of the user assessment of type SAVE_SCHEDULE.
type: str
is_schedule_assessment:
description:
- A filter to return only user assessments of type SAVE_SCHEDULE.
type: bool
is_baseline:
description:
- A filter to return only user assessments that are set as baseline.
type: bool
target_id:
description:
- A filter to return only items that match the specified target.
type: str
type:
description:
- A filter to return only items that match the specified assessment type.
type: str
choices:
- "LATEST"
- "SAVED"
- "COMPARTMENT"
- "SAVE_SCHEDULE"
triggered_by:
description:
- A filter to return user assessments that were created by either the system or by a user only.
type: str
choices:
- "USER"
- "SYSTEM"
time_created_greater_than_or_equal_to:
description:
- A filter to return only user assessments that were created after the specified date and time, as defined by
L(RFC3339,https://tools.ietf.org/html/rfc3339).
Using timeCreatedGreaterThanOrEqualTo parameter retrieves all assessments created after that date.
- "**Example:** 2016-12-19T16:39:57.600Z"
type: str
time_created_less_than:
description:
- "Search for items that were created before a specific date.
Specifying this parameter corresponding `timeCreatedLessThan`
parameter will retrieve all items created before the
specified created date, in \\"YYYY-MM-ddThh:mmZ\\" format with a Z offset, as
defined by RFC 3339."
- "**Example:** 2016-12-19T16:39:57.600Z"
type: str
lifecycle_state:
description:
- The current state of the user assessment.
type: str
choices:
- "CREATING"
- "SUCCEEDED"
- "UPDATING"
- "DELETING"
- "FAILED"
sort_order:
description:
- The sort order to use, either ascending (ASC) or descending (DESC).
type: str
choices:
- "ASC"
- "DESC"
sort_by:
description:
- The field to sort by. You can specify only one sort order (sortOrder). The default order for timeCreated is descending.
type: str
choices:
- "timeCreated"
- "displayName"
extends_documentation_fragment: [ oracle.oci.oracle ]
"""
EXAMPLES = """
- name: Get a specific user_assessment
oci_data_safe_user_assessment_facts:
# required
user_assessment_id: "ocid1.userassessment.oc1..xxxxxxEXAMPLExxxxxx"
- name: List user_assessments
oci_data_safe_user_assessment_facts:
# required
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
# optional
compartment_id_in_subtree: true
access_level: RESTRICTED
display_name: display_name_example
schedule_user_assessment_id: "ocid1.scheduleuserassessment.oc1..xxxxxxEXAMPLExxxxxx"
is_schedule_assessment: true
is_baseline: true
target_id: "ocid1.target.oc1..xxxxxxEXAMPLExxxxxx"
type: LATEST
triggered_by: USER
time_created_greater_than_or_equal_to: 2013-10-20T19:20:30+01:00
time_created_less_than: 2013-10-20T19:20:30+01:00
lifecycle_state: CREATING
sort_order: ASC
sort_by: timeCreated
"""
RETURN = """
user_assessments:
description:
- List of UserAssessment resources
returned: on success
type: complex
contains:
compartment_id:
description:
- The OCID of the compartment that contains the user assessment.
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
description:
description:
- The description of the user assessment.
returned: on success
type: str
sample: description_example
display_name:
description:
- The display name of the user assessment.
returned: on success
type: str
sample: display_name_example
id:
description:
- The OCID of the user assessment.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
ignored_targets:
description:
- "List containing maps as values.
Example: `{\\"Operations\\": [ {\\"CostCenter\\": \\"42\\"} ] }`"
returned: on success
type: list
sample: []
ignored_assessment_ids:
description:
- "List containing maps as values.
Example: `{\\"Operations\\": [ {\\"CostCenter\\": \\"42\\"} ] }`"
returned: on success
type: list
sample: []
is_baseline:
description:
- Indicates if the user assessment is set as a baseline. This is applicable only to saved user assessments.
returned: on success
type: bool
sample: true
is_deviated_from_baseline:
description:
- Indicates if the user assessment deviates from the baseline.
returned: on success
type: bool
sample: true
last_compared_baseline_id:
description:
- The OCID of the last user assessment baseline against which the latest assessment was compared.
returned: on success
type: str
sample: "ocid1.lastcomparedbaseline.oc1..xxxxxxEXAMPLExxxxxx"
lifecycle_state:
description:
- The current state of the user assessment.
returned: on success
type: str
sample: CREATING
lifecycle_details:
description:
- Details about the current state of the user assessment.
returned: on success
type: str
sample: lifecycle_details_example
schedule_assessment_id:
description:
- The OCID of the user assessment that is responsible for creating this scheduled save assessment.
returned: on success
type: str
sample: "ocid1.scheduleassessment.oc1..xxxxxxEXAMPLExxxxxx"
schedule:
description:
- "Schedule of the assessment that runs periodically in this specified format:
<version-string>;<version-specific-schedule>"
- " Allowed version strings - \\"v1\\"
v1's version specific schedule -<ss> <mm> <hh> <day-of-week> <day-of-month>
Each of the above fields potentially introduce constraints. A workrequest is created only
when clock time satisfies all the constraints. Constraints introduced:
1. seconds = <ss> (So, the allowed range for <ss> is [0, 59])
2. minutes = <mm> (So, the allowed range for <mm> is [0, 59])
3. hours = <hh> (So, the allowed range for <hh> is [0, 23])
<day-of-week> can be either '*' (without quotes or a number between 1(Monday) and 7(Sunday))
4. No constraint introduced when it is '*'. When not, day of week must equal the given value
<day-of-month> can be either '*' (without quotes or a number between 1 and 28)
5. No constraint introduced when it is '*'. When not, day of month must equal the given value"
returned: on success
type: str
sample: schedule_example
statistics:
description:
- "Map that contains maps of values.
Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
returned: on success
type: dict
sample: {}
target_ids:
description:
- Array of database target OCIDs.
returned: on success
type: list
sample: []
time_created:
description:
- The date and time the user assessment was created, in the format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_updated:
description:
- The date and time the user assessment was last updated, in the format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
triggered_by:
description:
- Indicates whether the user assessment was created by system or user.
returned: on success
type: str
sample: USER
type:
description:
- "Type of user assessment. Type can be:"
- "LATEST: The most up-to-date assessment that is running automatically for a target. It is system generated.
SAVED: A saved user assessment. LATEST assessments will always be saved to maintain the history of runs. A SAVED assessment is also generated
by a 'refresh' action (triggered by the user).
SAVE_SCHEDULE: A schedule to periodically save LATEST assessments.
COMPARTMENT: An automatic managed assessment type that stores all details of targets in one compartment. This will keep an up-to-date status
of all database risks in one compartment.
It is automatically updated once the latest assessment or refresh action is executed, as well as when a target is deleted or move to a
different compartment."
returned: on success
type: str
sample: LATEST
freeform_tags:
description:
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see
L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm)
- "Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
defined_tags:
description:
- Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see L(Resource
Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm)
- "Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
system_tags:
description:
- "System tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags.
Example: `{\\"orcl-cloud\\": {\\"free-tier-retained\\": \\"true\\"}}`"
- Returned for get operation
returned: on success
type: dict
sample: {}
sample: [{
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"description": "description_example",
"display_name": "display_name_example",
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"ignored_targets": [],
"ignored_assessment_ids": [],
"is_baseline": true,
"is_deviated_from_baseline": true,
"last_compared_baseline_id": "ocid1.lastcomparedbaseline.oc1..xxxxxxEXAMPLExxxxxx",
"lifecycle_state": "CREATING",
"lifecycle_details": "lifecycle_details_example",
"schedule_assessment_id": "ocid1.scheduleassessment.oc1..xxxxxxEXAMPLExxxxxx",
"schedule": "schedule_example",
"statistics": {},
"target_ids": [],
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00",
"triggered_by": "USER",
"type": "LATEST",
"freeform_tags": {'Department': 'Finance'},
"defined_tags": {'Operations': {'CostCenter': 'US'}},
"system_tags": {}
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.data_safe import DataSafeClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class DataSafeUserAssessmentFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: get, list"""
def get_required_params_for_get(self):
return [
"user_assessment_id",
]
def get_required_params_for_list(self):
return [
"compartment_id",
]
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_user_assessment,
user_assessment_id=self.module.params.get("user_assessment_id"),
)
def list_resources(self):
optional_list_method_params = [
"compartment_id_in_subtree",
"access_level",
"display_name",
"schedule_user_assessment_id",
"is_schedule_assessment",
"is_baseline",
"target_id",
"type",
"triggered_by",
"time_created_greater_than_or_equal_to",
"time_created_less_than",
"lifecycle_state",
"sort_order",
"sort_by",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_user_assessments,
compartment_id=self.module.params.get("compartment_id"),
**optional_kwargs
)
DataSafeUserAssessmentFactsHelperCustom = get_custom_class(
"DataSafeUserAssessmentFactsHelperCustom"
)
class ResourceFactsHelper(
DataSafeUserAssessmentFactsHelperCustom, DataSafeUserAssessmentFactsHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
user_assessment_id=dict(aliases=["id"], type="str"),
compartment_id=dict(type="str"),
compartment_id_in_subtree=dict(type="bool"),
access_level=dict(type="str", choices=["RESTRICTED", "ACCESSIBLE"]),
display_name=dict(aliases=["name"], type="str"),
schedule_user_assessment_id=dict(type="str"),
is_schedule_assessment=dict(type="bool"),
is_baseline=dict(type="bool"),
target_id=dict(type="str"),
type=dict(
type="str", choices=["LATEST", "SAVED", "COMPARTMENT", "SAVE_SCHEDULE"]
),
triggered_by=dict(type="str", choices=["USER", "SYSTEM"]),
time_created_greater_than_or_equal_to=dict(type="str"),
time_created_less_than=dict(type="str"),
lifecycle_state=dict(
type="str",
choices=["CREATING", "SUCCEEDED", "UPDATING", "DELETING", "FAILED"],
),
sort_order=dict(type="str", choices=["ASC", "DESC"]),
sort_by=dict(type="str", choices=["timeCreated", "displayName"]),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="user_assessment",
service_client_class=DataSafeClient,
namespace="data_safe",
)
result = []
if resource_facts_helper.is_get():
result = [resource_facts_helper.get()]
elif resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(user_assessments=result)
if __name__ == "__main__":
main()
|
py | 1a46a2c4337e873b47a702eadcb84111a82b7ae4 | #lists and strings are different as lists are mutable while strings are immutable
#create new strings by creating slices
name = 'Zophie a cat'
print(name)
print('let"s slice now')
newName = name[0 : 7] + 'the' +name[8 : 12]
print(newName)
#example of splicing a string vs splicing a list
#reassigning string
spam = 42
cheese = spam
spam = 100
print(spam)
print(cheese)
#Now lets reassign a list
spam = [0, 1, 2, 3, 4, 5]
print(spam)
cheese = spam
cheese[1] = 'Hello'
print(cheese)
print("spam should also be changed")
print(spam)
|
py | 1a46a30bf3ee0852dab62729fb80ebb241f59973 | from pathlib import Path
from inxs.cli import main as _main
from tests import equal_documents
def main(*args):
_args = ()
for arg in args:
if isinstance(arg, Path):
_args += (str(arg),)
else:
_args += (arg,)
_main(_args)
# TODO case-study with this use-case
def test_mods_to_tei(datadir):
main('--inplace', datadir / 'mods_to_tei.py', datadir / 'mods_to_tei.xml')
assert equal_documents(datadir / 'mods_to_tei.xml', datadir / 'mods_to_tei_exp.xml')
|
py | 1a46a34c1a899f8ef37c3398f75f137c84bf1b82 | """Logging utilities."""
import asyncio
from asyncio.events import AbstractEventLoop
from functools import partial, wraps
import inspect
import logging
import threading
import traceback
from typing import Any, Callable, Coroutine, Optional
class HideSensitiveDataFilter(logging.Filter):
"""Filter API password calls."""
def __init__(self, text: str) -> None:
"""Initialize sensitive data filter."""
super().__init__()
self.text = text
def filter(self, record: logging.LogRecord) -> bool:
"""Hide sensitive data in messages."""
record.msg = record.msg.replace(self.text, "*******")
return True
# pylint: disable=invalid-name
class AsyncHandler:
"""Logging handler wrapper to add an async layer."""
def __init__(self, loop: AbstractEventLoop, handler: logging.Handler) -> None:
"""Initialize async logging handler wrapper."""
self.handler = handler
self.loop = loop
self._queue: asyncio.Queue = asyncio.Queue(loop=loop)
self._thread = threading.Thread(target=self._process)
# Delegate from handler
self.setLevel = handler.setLevel
self.setFormatter = handler.setFormatter
self.addFilter = handler.addFilter
self.removeFilter = handler.removeFilter
self.filter = handler.filter
self.flush = handler.flush
self.handle = handler.handle
self.handleError = handler.handleError
self.format = handler.format
self._thread.start()
def close(self) -> None:
"""Wrap close to handler."""
self.emit(None)
async def async_close(self, blocking: bool = False) -> None:
"""Close the handler.
When blocking=True, will wait till closed.
"""
await self._queue.put(None)
if blocking:
while self._thread.is_alive():
await asyncio.sleep(0)
def emit(self, record: Optional[logging.LogRecord]) -> None:
"""Process a record."""
ident = self.loop.__dict__.get("_thread_ident")
# inside eventloop
if ident is not None and ident == threading.get_ident():
self._queue.put_nowait(record)
# from a thread/executor
else:
self.loop.call_soon_threadsafe(self._queue.put_nowait, record)
def __repr__(self) -> str:
"""Return the string names."""
return str(self.handler)
def _process(self) -> None:
"""Process log in a thread."""
try:
while True:
record = asyncio.run_coroutine_threadsafe(
self._queue.get(), self.loop
).result()
if record is None:
self.handler.close()
return
self.handler.emit(record)
except asyncio.CancelledError:
self.handler.close()
def createLock(self) -> None:
"""Ignore lock stuff."""
pass
def acquire(self) -> None:
"""Ignore lock stuff."""
pass
def release(self) -> None:
"""Ignore lock stuff."""
pass
@property
def level(self) -> int:
"""Wrap property level to handler."""
return self.handler.level
@property
def formatter(self) -> Optional[logging.Formatter]:
"""Wrap property formatter to handler."""
return self.handler.formatter
@property
def name(self) -> str:
"""Wrap property set_name to handler."""
return self.handler.get_name() # type: ignore
@name.setter
def name(self, name: str) -> None:
"""Wrap property get_name to handler."""
self.handler.set_name(name) # type: ignore
def catch_log_exception(
func: Callable[..., Any], format_err: Callable[..., Any], *args: Any
) -> Callable[[], None]:
"""Decorate a callback to catch and log exceptions."""
def log_exception(*args: Any) -> None:
module = inspect.getmodule(inspect.stack()[1][0])
if module is not None:
module_name = module.__name__
else:
# If Python is unable to access the sources files, the call stack frame
# will be missing information, so let's guard.
# https://github.com/home-assistant/home-assistant/issues/24982
module_name = __name__
# Do not print the wrapper in the traceback
frames = len(inspect.trace()) - 1
exc_msg = traceback.format_exc(-frames)
friendly_msg = format_err(*args)
logging.getLogger(module_name).error("%s\n%s", friendly_msg, exc_msg)
# Check for partials to properly determine if coroutine function
check_func = func
while isinstance(check_func, partial):
check_func = check_func.func
wrapper_func = None
if asyncio.iscoroutinefunction(check_func):
@wraps(func)
async def async_wrapper(*args: Any) -> None:
"""Catch and log exception."""
try:
await func(*args)
except Exception: # pylint: disable=broad-except
log_exception(*args)
wrapper_func = async_wrapper
else:
@wraps(func)
def wrapper(*args: Any) -> None:
"""Catch and log exception."""
try:
func(*args)
except Exception: # pylint: disable=broad-except
log_exception(*args)
wrapper_func = wrapper
return wrapper_func
def catch_log_coro_exception(
target: Coroutine[Any, Any, Any], format_err: Callable[..., Any], *args: Any
) -> Coroutine[Any, Any, Any]:
"""Decorate a coroutine to catch and log exceptions."""
async def coro_wrapper(*args: Any) -> Any:
"""Catch and log exception."""
try:
return await target
except Exception: # pylint: disable=broad-except
module = inspect.getmodule(inspect.stack()[1][0])
if module is not None:
module_name = module.__name__
else:
# If Python is unable to access the sources files, the frame
# will be missing information, so let's guard.
# https://github.com/home-assistant/home-assistant/issues/24982
module_name = __name__
# Do not print the wrapper in the traceback
frames = len(inspect.trace()) - 1
exc_msg = traceback.format_exc(-frames)
friendly_msg = format_err(*args)
logging.getLogger(module_name).error("%s\n%s", friendly_msg, exc_msg)
return None
return coro_wrapper()
def async_create_catching_coro(target: Coroutine) -> Coroutine:
"""Wrap a coroutine to catch and log exceptions.
The exception will be logged together with a stacktrace of where the
coroutine was wrapped.
target: target coroutine.
"""
trace = traceback.extract_stack()
wrapped_target = catch_log_coro_exception(
target,
lambda *args: "Exception in {} called from\n {}".format(
target.__name__, # type: ignore
"".join(traceback.format_list(trace[:-1])),
),
)
return wrapped_target
|
py | 1a46a3543f0ed0292303fbe10e7946c18425aeb1 | from netaddr import IPAddress
__all__ = [
'to_server_dict',
'to_dns_zone_dict',
'to_dns_record_dict'
]
def to_server_dict(server):
public_ips = [ip['addr'] for ip in server.addresses['public']]
private_ips = [ip['addr'] for ip in server.addresses['private']]
# Pick out first public IPv4 and IPv6 address
public_ipv4 = None
public_ipv6 = None
for ip in public_ips:
try:
ip_obj = IPAddress(ip)
except Exception:
continue
if not ip_obj.is_private():
if ip_obj.version == 4:
public_ipv4 = ip
elif ip_obj.version == 6:
public_ipv6 = ip
result = {
'id': server.id,
'name': server.name,
'status': server.status,
'image_id': server.image['id'],
'flavor_id': server.flavor['id'],
'public_ips': public_ips,
'private_ips': private_ips,
'public_ipv4': public_ipv4,
'public_ipv6': public_ipv6,
'key_name': server.key_name,
'metadata': server.metadata
}
return result
def to_dns_zone_dict(zone):
result = {
'id': zone.id,
'name': zone.name,
'email_address': zone.emailAddress,
'ttl': zone.ttl
}
return result
def to_dns_record_dict(record):
result = {
'id': record.id,
'name': record.name,
'type': record.type,
'data': record.data,
'ttl': record.ttl
}
return result
|
py | 1a46a3fcbe194b272fe25efd8bf400d808ad2632 | import json
import base64
import aiohttp
import threading
from uuid import uuid4
from time import timezone, sleep
from typing import BinaryIO, Union
from time import time as timestamp
from locale import getdefaultlocale as locale
import asyncio
from .lib.util import exceptions, headers, device, objects, helpers
from .socket import Callbacks, SocketHandler
device = device.DeviceGenerator()
class Client(Callbacks, SocketHandler):
def __init__(self, deviceId: str = None, socketDebugging = False):
self.api = "https://service.narvii.com/api/v1"
self.authenticated = False
self.configured = False
self.user_agent = device.user_agent
if deviceId is not None: self.device_id = deviceId
else: self.device_id = device.device_id
SocketHandler.__init__(self, self, debug=socketDebugging)
Callbacks.__init__(self, self)
self.json = None
self.sid = None
self.userId = None
self.account: objects.UserProfile = objects.UserProfile(None)
self.profile: objects.UserProfile = objects.UserProfile(None)
self.session = aiohttp.ClientSession()
def __del__(self):
try:
loop = asyncio.get_event_loop()
loop.create_task(self._close_session())
except RuntimeError:
loop = asyncio.new_event_loop()
loop.run_until_complete(self._close_session())
async def _close_session(self):
if not self._session.closed: await self._session.close()
def parse_headers(self, data = None):
if not data:
return headers.Headers(data=data, deviceId=self.device_id).headers
else:
return headers.Headers(deviceId=self.device_id).headers
async def join_voice_chat(self, comId: str, chatId: str, joinType: int = 1):
"""
Joins a Voice Chat
**Parameters**
- **comId** : ID of the Community
- **chatId** : ID of the Chat
"""
# Made by Light, Ley and Phoenix
data = {
"o": {
"ndcId": int(comId),
"threadId": chatId,
"joinRole": joinType,
"id": "2154531" # Need to change?
},
"t": 112
}
data = json.dumps(data)
await self.send(data)
async def join_video_chat(self, comId: str, chatId: str, joinType: int = 1):
"""
Joins a Video Chat
**Parameters**
- **comId** : ID of the Community
- **chatId** : ID of the Chat
"""
# Made by Light, Ley and Phoenix
data = {
"o": {
"ndcId": int(comId),
"threadId": chatId,
"joinRole": joinType,
"channelType": 5,
"id": "2154531" # Need to change?
},
"t": 108
}
data = json.dumps(data)
await self.send(data)
async def join_video_chat_as_viewer(self, comId: str, chatId: str):
data = {
"o":
{
"ndcId": int(comId),
"threadId": chatId,
"joinRole": 2,
"id": "72446"
},
"t": 112
}
data = json.dumps(data)
await self.send(data)
async def run_vc(self, comId: str, chatId: str, joinType: str):
while self.active:
data = {
"o": {
"ndcId": comId,
"threadId": chatId,
"joinRole": joinType,
"id": "2154531" # Need to change?
},
"t": 112
}
data = json.dumps(data)
await self.send(data)
sleep(1)
async def start_vc(self, comId: str, chatId: str, joinType: int = 1):
data = {
"o": {
"ndcId": comId,
"threadId": chatId,
"joinRole": joinType,
"id": "2154531" # Need to change?
},
"t": 112
}
data = json.dumps(data)
await self.send(data)
data = {
"o": {
"ndcId": comId,
"threadId": chatId,
"channelType": 1,
"id": "2154531" # Need to change?
},
"t": 108
}
data = json.dumps(data)
await self.send(data)
self.active = True
threading.Thread(target=self.run_vc, args=[comId, chatId, joinType])
async def end_vc(self, comId: str, chatId: str, joinType: int = 2):
self.active = False
data = {
"o": {
"ndcId": comId,
"threadId": chatId,
"joinRole": joinType,
"id": "2154531" # Need to change?
},
"t": 112
}
data = json.dumps(data)
await self.send(data)
async def login_sid(self, SID: str):
"""
Login into an account with an SID
**Parameters**
- **SID** : SID of the account
"""
uId = helpers.sid_to_uid(SID)
self.authenticated = True
self.sid = SID
self.userId = uId
self.account: objects.UserProfile = await self.get_user_info(uId)
self.profile: objects.UserProfile = await self.get_user_info(uId)
headers.sid = self.sid
await self.startup()
async def login(self, email: str, password: str):
"""
Login into an account.
**Parameters**
- **email** : Email of the account.
- **password** : Password of the account.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
"email": email,
"v": 2,
"secret": f"0 {password}",
"deviceID": self.device_id,
"clientType": 100,
"action": "normal",
"timestamp": int(timestamp() * 1000)
})
async with self.session.post(f"{self.api}/g/s/auth/login", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else:
self.authenticated = True
self.json = json.loads(await response.text())
self.sid = self.json["sid"]
self.userId = self.json["account"]["uid"]
self.account: objects.UserProfile = objects.UserProfile(self.json["account"]).UserProfile
self.profile: objects.UserProfile = objects.UserProfile(self.json["userProfile"]).UserProfile
headers.sid = self.sid
await self.startup()
return response.status
async def register(self, nickname: str, email: str, password: str, verificationCode: str, deviceId: str = device.device_id):
"""
Register an account.
**Parameters**
- **nickname** : Nickname of the account.
- **email** : Email of the account.
- **password** : Password of the account.
- **verificationCode** : Verification code.
- **deviceId** : The device id being registered to.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
"secret": f"0 {password}",
"deviceID": deviceId,
"email": email,
"clientType": 100,
"nickname": nickname,
"latitude": 0,
"longitude": 0,
"address": None,
"clientCallbackURL": "narviiapp://relogin",
"validationContext": {
"data": {
"code": verificationCode
},
"type": 1,
"identity": email
},
"type": 1,
"identity": email,
"timestamp": int(timestamp() * 1000)
})
async with self.session.post(f"{self.api}/g/s/auth/register", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def restore(self, email: str, password: str):
"""
Restore a deleted account.
**Parameters**
- **email** : Email of the account.
- **password** : Password of the account.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
"secret": f"0 {password}",
"deviceID": device.device_id,
"email": email,
"timestamp": int(timestamp() * 1000)
})
async with self.session.post(f"{self.api}/g/s/account/delete-request/cancel", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def logout(self):
"""
Logout from an account.
**Parameters**
- No parameters required.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
"deviceID": self.device_id,
"clientType": 100,
"timestamp": int(timestamp() * 1000)
})
async with self.session.post(f"{self.api}/g/s/auth/logout", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else:
self.authenticated = False
self.json = None
self.sid = None
self.userId = None
self.account: None
self.profile: None
headers.sid = None
await self.close()
await self.session.close()
return response.status
async def configure(self, age: int, gender: str):
"""
Configure the settings of an account.
**Parameters**
- **age** : Age of the account. Minimum is 13.
- **gender** : Gender of the account.
- ``Male``, ``Female`` or ``Non-Binary``
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if gender.lower() == "male": gender = 1
elif gender.lower() == "female": gender = 2
elif gender.lower() == "non-binary": gender = 255
else: raise exceptions.SpecifyType()
if age <= 12: raise exceptions.AgeTooLow()
data = json.dumps({
"age": age,
"gender": gender,
"timestamp": int(timestamp() * 1000)
})
async with self.session.post(f"{self.api}/g/s/persona/profile/basic", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def verify(self, email: str, code: str):
"""
Verify an account.
**Parameters**
- **email** : Email of the account.
- **code** : Verification code.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
"validationContext": {
"type": 1,
"identity": email,
"data": {"code": code}},
"deviceID": device.device_id,
"timestamp": int(timestamp() * 1000)
})
async with self.session.post(f"{self.api}/g/s/auth/check-security-validation", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def request_verify_code(self, email: str, resetPassword: bool = False):
"""
Request an verification code to the targeted email.
**Parameters**
- **email** : Email of the account.
- **resetPassword** : If the code should be for Password Reset.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = {
"identity": email,
"type": 1,
"deviceID": device.device_id
}
if resetPassword is True:
data["level"] = 2
data["purpose"] = "reset-password"
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/auth/request-security-validation", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def activate_account(self, email: str, code: str):
"""
Activate an account.
**Parameters**
- **email** : Email of the account.
- **code** : Verification code.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
"type": 1,
"identity": email,
"data": {"code": code},
"deviceID": device.device_id
})
async with self.session.post(f"{self.api}/g/s/auth/activate-email", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
# Provided by "𝑰 𝑵 𝑻 𝑬 𝑹 𝑳 𝑼 𝑫 𝑬#4082"
async def delete_account(self, password: str):
"""
Delete an account.
**Parameters**
- **password** : Password of the account.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
"deviceID": device.device_id,
"secret": f"0 {password}"
})
async with self.session.post(f"{self.api}/g/s/account/delete-request", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def change_password(self, email: str, password: str, code: str):
"""
Change password of an account.
**Parameters**
- **email** : Email of the account.
- **password** : Password of the account.
- **code** : Verification code.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
"updateSecret": f"0 {password}",
"emailValidationContext": {
"data": {
"code": code
},
"type": 1,
"identity": email,
"level": 2,
"deviceID": device.device_id
},
"phoneNumberValidationContext": None,
"deviceID": device.device_id
})
async with self.session.post(f"{self.api}/g/s/auth/reset-password", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def check_device(self, deviceId: str):
"""
Check if the Device ID is valid.
**Parameters**
- **deviceId** : ID of the Device.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
"deviceID": deviceId,
"bundleID": "com.narvii.amino.master",
"clientType": 100,
"timezone": -timezone // 1000,
"systemPushEnabled": True,
"locale": locale()[0],
"timestamp": int(timestamp() * 1000)
})
async with self.session.post(f"{self.api}/g/s/device", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def get_account_info(self):
async with self.session.get(f"{self.api}/g/s/account", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.UserProfile(json.loads(await response.text())["account"]).UserProfile
async def upload_media(self, file: BinaryIO, fileType: str):
"""
Upload file to the amino servers.
**Parameters**
- **file** : File to be uploaded.
**Returns**
- **Success** : Url of the file uploaded to the server.
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if fileType == "audio":
t = "audio/aac"
elif fileType == "image":
t = "image/jpg"
else: raise exceptions.SpecifyType(fileType)
data = file.read()
async with self.session.post(f"{self.api}/g/s/media/upload", headers=headers.Headers(type=t, data=data, deviceId=self.device_id).headers, data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return json.loads(await response.text())["mediaValue"]
def handle_socket_message(self, data):
return self.resolve(data)
async def get_eventlog(self, language: str = "en"):
async with self.session.get(f"{self.api}/g/s/eventlog/profile?language={language}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return json.loads(await response.text())
async def sub_clients(self, start: int = 0, size: int = 25):
"""
List of Communities the account is in.
**Parameters**
- *start* : Where to start the list.
- *size* : Size of the list.
**Returns**
- **Success** : :meth:`Community List <amino.lib.util.objects.CommunityList>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if not self.authenticated: raise exceptions.NotLoggedIn()
async with self.session.get(f"{self.api}/g/s/community/joined?v=1&start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.CommunityList(json.loads(await response.text())["communityList"]).CommunityList
async def sub_clients_profile(self, start: int = 0, size: int = 25):
if not self.authenticated: raise exceptions.NotLoggedIn()
async with self.session.get(f"{self.api}/g/s/community/joined?v=1&start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return json.loads(await response.text())["communityList"]
async def get_user_info(self, userId: str):
"""
Information of an User.
**Parameters**
- **userId** : ID of the User.
**Returns**
- **Success** : :meth:`User Object <amino.lib.util.objects.UserProfile>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/user-profile/{userId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.UserProfile(json.loads(await response.text())["userProfile"]).UserProfile
async def get_chat_threads(self, start: int = 0, size: int = 25):
"""
List of Chats the account is in.
**Parameters**
- *start* : Where to start the list.
- *size* : Size of the list.
**Returns**
- **Success** : :meth:`Chat List <amino.lib.util.objects.ThreadList>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/chat/thread?type=joined-me&start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.ThreadList(json.loads(await response.text())["threadList"]).ThreadList
async def get_chat_thread(self, chatId: str):
"""
Get the Chat Object from an Chat ID.
**Parameters**
- **chatId** : ID of the Chat.
**Returns**
- **Success** : :meth:`Chat Object <amino.lib.util.objects.Thread>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/chat/thread/{chatId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.Thread(json.loads(await response.text())["thread"]).Thread
async def get_chat_users(self, chatId: str, start: int = 0, size: int = 25):
async with self.session.get(f"{self.api}/g/s/chat/thread/{chatId}/member?start={start}&size={size}&type=default&cv=1.2", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.UserProfileList(json.loads(await response.text())["memberList"]).UserProfileList
async def join_chat(self, chatId: str):
"""
Join an Chat.
**Parameters**
- **chatId** : ID of the Chat.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/member/{self.userId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def leave_chat(self, chatId: str):
"""
Leave an Chat.
**Parameters**
- **chatId** : ID of the Chat.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.delete(f"{self.api}/g/s/chat/thread/{chatId}/member/{self.userId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def start_chat(self, userId: Union[str, list], message: str, title: str = None, content: str = None, isGlobal: bool = False, publishToGlobal: bool = False):
"""
Start an Chat with an User or List of Users.
**Parameters**
- **userId** : ID of the User or List of User IDs.
- **message** : Starting Message.
- **title** : Title of Group Chat.
- **content** : Content of Group Chat.
- **isGlobal** : If Group Chat is Global.
- **publishToGlobal** : If Group Chat should show in Global.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if isinstance(userId, str): userIds = [userId]
elif isinstance(userId, list): userIds = userId
else: raise exceptions.WrongType()
data = {
"title": title,
"inviteeUids": userIds,
"initialMessageContent": message,
"content": content,
"timestamp": int(timestamp() * 1000)
}
if isGlobal is True: data["type"] = 2; data["eventSource"] = "GlobalComposeMenu"
else: data["type"] = 0
if publishToGlobal is True: data["publishToGlobal"] = 1
else: data["publishToGlobal"] = 0
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/chat/thread", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def invite_to_chat(self, userId: Union[str, list], chatId: str):
"""
Invite a User or List of Users to a Chat.
**Parameters**
- **userId** : ID of the User or List of User IDs.
- **chatId** : ID of the Chat.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if isinstance(userId, str): userIds = [userId]
elif isinstance(userId, list): userIds = userId
else: raise exceptions.WrongType(type(userId))
data = json.dumps({
"uids": userIds,
"timestamp": int(timestamp() * 1000)
})
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/member/invite", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def kick(self, userId: str, chatId: str, allowRejoin: bool = True):
if allowRejoin: allowRejoin = 1
if not allowRejoin: allowRejoin = 0
async with self.session.delete(f"{self.api}/g/s/chat/thread/{chatId}/member/{userId}?allowRejoin={allowRejoin}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def get_chat_messages(self, chatId: str, size: int = 25, pageToken: str = None):
"""
List of Messages from an Chat.
**Parameters**
- **chatId** : ID of the Chat.
- *size* : Size of the list.
- *size* : Size of the list.
- *pageToken* : Next Page Token.
**Returns**
- **Success** : :meth:`Message List <amino.lib.util.objects.MessageList>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if pageToken is not None: url = f"{self.api}/g/s/chat/thread/{chatId}/message?v=2&pagingType=t&pageToken={pageToken}&size={size}"
else: url = f"{self.api}/g/s/chat/thread/{chatId}/message?v=2&pagingType=t&size={size}"
async with self.session.get(url, headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.GetMessages(json.loads(await response.text())).GetMessages
async def get_message_info(self, chatId: str, messageId: str):
"""
Information of an Message from an Chat.
**Parameters**
- **chatId** : ID of the Chat.
- **messageId** : ID of the Message.
**Returns**
- **Success** : :meth:`Message Object <amino.lib.util.objects.Message>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/chat/thread/{chatId}/message/{messageId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.Message(json.loads(await response.text())["message"]).Message
async def get_community_info(self, comId: str):
"""
Information of an Community.
**Parameters**
- **comId** : ID of the Community.
**Returns**
- **Success** : :meth:`Community Object <amino.lib.util.objects.Community>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s-x{comId}/community/info?withInfluencerList=1&withTopicList=true&influencerListOrderStrategy=fansCount", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.Community(json.loads(await response.text())["community"]).Community
async def search_community(self, aminoId: str):
"""
Search a Community byt its Amino ID.
**Parameters**
- **aminoId** : Amino ID of the Community.
**Returns**
- **Success** : :meth:`Community List <amino.lib.util.objects.CommunityList>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/search/amino-id-and-link?q={aminoId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else:
response = json.loads(await response.text())["resultList"]
if len(response) == 0: raise exceptions.CommunityNotFound(aminoId)
else: return objects.CommunityList([com["refObject"] for com in response]).CommunityList
async def get_user_following(self, userId: str, start: int = 0, size: int = 25):
"""
List of Users that the User is Following.
**Parameters**
- **userId** : ID of the User.
- *start* : Where to start the list.
- *size* : Size of the list.
**Returns**
- **Success** : :meth:`User List <amino.lib.util.objects.UserProfileList>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/user-profile/{userId}/joined?start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.UserProfileList(json.loads(await response.text())["userProfileList"]).UserProfileList
async def get_user_followers(self, userId: str, start: int = 0, size: int = 25):
"""
List of Users that are Following the User.
**Parameters**
- **userId** : ID of the User.
- *start* : Where to start the list.
- *size* : Size of the list.
**Returns**
- **Success** : :meth:`User List <amino.lib.util.objects.UserProfileList>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/user-profile/{userId}/member?start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.UserProfileList(json.loads(await response.text())["userProfileList"]).UserProfileList
async def get_user_visitors(self, userId: str, start: int = 0, size: int = 25):
"""
List of Users that Visited the User.
**Parameters**
- **userId** : ID of the User.
- *start* : Where to start the list.
- *size* : Size of the list.
**Returns**
- **Success** : :meth:`Visitors List <amino.lib.util.objects.VisitorsList>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/user-profile/{userId}/visitors?start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.VisitorsList(json.loads(await response.text())).VisitorsList
async def get_blocked_users(self, start: int = 0, size: int = 25):
"""
List of Users that the User Blocked.
**Parameters**
- *start* : Where to start the list.
- *size* : Size of the list.
**Returns**
- **Success** : :meth:`Users List <amino.lib.util.objects.UserProfileList>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/block?start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.UserProfileList(json.loads(await response.text())["userProfileList"]).UserProfileList
async def get_blog_info(self, blogId: str = None, wikiId: str = None, quizId: str = None, fileId: str = None):
if blogId or quizId:
if quizId is not None: blogId = quizId
async with self.session.get(f"{self.api}/g/s/blog/{blogId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.GetBlogInfo(json.loads(await response.text())).GetBlogInfo
elif wikiId:
async with self.session.get(f"{self.api}/g/s/item/{wikiId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.GetWikiInfo(json.loads(await response.text())).GetWikiInfo
elif fileId:
async with self.session.get(f"{self.api}/g/s/shared-folder/files/{fileId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.SharedFolderFile(json.loads(await response.text())["file"]).SharedFolderFile
else: raise exceptions.SpecifyType()
async def get_blog_comments(self, blogId: str = None, wikiId: str = None, quizId: str = None, fileId: str = None, sorting: str = "newest", start: int = 0, size: int = 25):
if sorting == "newest": sorting = "newest"
elif sorting == "oldest": sorting = "oldest"
elif sorting == "top": sorting = "vote"
else: raise exceptions.WrongType(sorting)
if blogId or quizId:
if quizId is not None: blogId = quizId
url = f"{self.api}/g/s/blog/{blogId}/comment?sort={sorting}&start={start}&size={size}"
elif wikiId: url = f"{self.api}/g/s/item/{wikiId}/comment?sort={sorting}&start={start}&size={size}"
elif fileId: url = f"{self.api}/g/s/shared-folder/files/{fileId}/comment?sort={sorting}&start={start}&size={size}"
else: raise exceptions.SpecifyType()
async with self.session.get(url, headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.CommentList(json.loads(await response.text())["commentList"]).CommentList
async def get_blocker_users(self, start: int = 0, size: int = 25):
"""
List of Users that are Blocking the User.
**Parameters**
- *start* : Where to start the list.
- *size* : Size of the list.
**Returns**
- **Success** : :meth:`List of User IDs <None>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/block/full-list?start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return json.loads(await response.text())["blockerUidList"]
async def get_wall_comments(self, userId: str, sorting: str, start: int = 0, size: int = 25):
"""
List of Wall Comments of an User.
**Parameters**
- **userId** : ID of the User.
- **sorting** : Order of the Comments.
- ``newest``, ``oldest``, ``top``
- *start* : Where to start the list.
- *size* : Size of the list.
**Returns**
- **Success** : :meth:`Comments List <amino.lib.util.objects.CommentList>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if sorting.lower() == "newest": sorting = "newest"
elif sorting.lower() == "oldest": sorting = "oldest"
elif sorting.lower() == "top": sorting = "vote"
else: raise exceptions.WrongType(sorting)
async with self.session.get(f"{self.api}/g/s/user-profile/{userId}/g-comment?sort={sorting}&start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.CommentList(json.loads(await response.text())["commentList"]).CommentList
async def flag(self, reason: str, flagType: int, userId: str = None, blogId: str = None, wikiId: str = None, asGuest: bool = False):
"""
Flag a User, Blog or Wiki.
**Parameters**
- **reason** : Reason of the Flag.
- **flagType** : Type of the Flag.
- **userId** : ID of the User.
- **blogId** : ID of the Blog.
- **wikiId** : ID of the Wiki.
- *asGuest* : Execute as a Guest.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if reason is None: raise exceptions.ReasonNeeded
if flagType is None: raise exceptions.FlagTypeNeeded
data = {
"flagType": flagType,
"message": reason,
"timestamp": int(timestamp() * 1000)
}
if userId:
data["objectId"] = userId
data["objectType"] = 0
elif blogId:
data["objectId"] = blogId
data["objectType"] = 1
elif wikiId:
data["objectId"] = wikiId
data["objectType"] = 2
else: raise exceptions.SpecifyType
if asGuest: flg = "g-flag"
else: flg = "flag"
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/{flg}", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def send_message(self, chatId: str, message: str = None, messageType: int = 0, file: BinaryIO = None, fileType: str = None, replyTo: str = None, mentionUserIds: list = None, stickerId: str = None, embedId: str = None, embedType: int = None, embedLink: str = None, embedTitle: str = None, embedContent: str = None, embedImage: BinaryIO = None):
"""
Send a Message to a Chat.
**Parameters**
- **message** : Message to be sent
- **chatId** : ID of the Chat.
- **file** : File to be sent.
- **fileType** : Type of the file.
- ``audio``, ``image``, ``gif``
- **messageType** : Type of the Message.
- **mentionUserIds** : List of User IDS to mention. '@' needed in the Message.
- **replyTo** : Message ID to reply to.
- **stickerId** : Sticker ID to be sent.
- **embedTitle** : Title of the Embed.
- **embedContent** : Content of the Embed.
- **embedLink** : Link of the Embed.
- **embedImage** : Image of the Embed.
- **embedId** : ID of the Embed.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if message is not None and file is None:
message = message.replace("<$", "").replace("$>", "")
mentions = []
if mentionUserIds:
for mention_uid in mentionUserIds:
mentions.append({"uid": mention_uid})
if embedImage:
embedImage = [[100, await self.upload_media(embedImage, "image"), None]]
data = {
"type": messageType,
"content": message,
"clientRefId": int(timestamp() / 10 % 1000000000),
"attachedObject": {
"objectId": embedId,
"objectType": embedType,
"link": embedLink,
"title": embedTitle,
"content": embedContent,
"mediaList": embedImage
},
"extensions": {"mentionedArray": mentions},
"timestamp": int(timestamp() * 1000)
}
if replyTo: data["replyMessageId"] = replyTo
if stickerId:
data["content"] = None
data["stickerId"] = stickerId
data["type"] = 3
if file:
data["content"] = None
if fileType == "audio":
data["type"] = 2
data["mediaType"] = 110
elif fileType == "image":
data["mediaType"] = 100
data["mediaUploadValueContentType"] = "image/jpg"
data["mediaUhqEnabled"] = True
elif fileType == "gif":
data["mediaType"] = 100
data["mediaUploadValueContentType"] = "image/gif"
data["mediaUhqEnabled"] = True
else: raise exceptions.SpecifyType()
data["mediaUploadValue"] = base64.b64encode(file.read()).decode()
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/message", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def delete_message(self, chatId: str, messageId: str, asStaff: bool = False, reason: str = None):
"""
Delete a Message from a Chat.
**Parameters**
- **messageId** : ID of the Message.
- **chatId** : ID of the Chat.
- **asStaff** : If execute as a Staff member (Leader or Curator).
- **reason** : Reason of the action to show on the Moderation History.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = {
"adminOpName": 102,
"adminOpNote": {"content": reason},
"timestamp": int(timestamp() * 1000)
}
data = json.dumps(data)
if not asStaff:
async with self.session.delete(f"{self.api}/g/s/chat/thread/{chatId}/message/{messageId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
else:
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/message/{messageId}/admin", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def mark_as_read(self, chatId: str, messageId: str):
"""
Mark a Message from a Chat as Read.
**Parameters**
- **messageId** : ID of the Message.
- **chatId** : ID of the Chat.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
"messageId": messageId,
"timestamp": int(timestamp() * 1000)
})
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/mark-as-read", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def edit_chat(self, chatId: str, doNotDisturb: bool = None, pinChat: bool = None, title: str = None, icon: str = None, backgroundImage: BinaryIO = None, content: str = None, announcement: str = None, coHosts: list = None, keywords: list = None, pinAnnouncement: bool = None, publishToGlobal: bool = None, canTip: bool = None, viewOnly: bool = None, canInvite: bool = None, fansOnly: bool = None):
"""
Send a Message to a Chat.
**Parameters**
- **chatId** : ID of the Chat.
- **title** : Title of the Chat.
- **content** : Content of the Chat.
- **icon** : Icon of the Chat.
- **backgroundImage** : Background Image of the Chat.
- **announcement** : Announcement of the Chat.
- **pinAnnouncement** : If the Chat Announcement should Pinned or not.
- **coHosts** : List of User IDS to be Co-Host.
- **keywords** : List of Keywords of the Chat.
- **viewOnly** : If the Chat should be on View Only or not.
- **canTip** : If the Chat should be Tippable or not.
- **canInvite** : If the Chat should be Invitable or not.
- **fansOnly** : If the Chat should be Fans Only or not.
- **publishToGlobal** : If the Chat should show on Public Chats or not.
- **doNotDisturb** : If the Chat should Do Not Disturb or not.
- **pinChat** : If the Chat should Pinned or not.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = {"timestamp": int(timestamp() * 1000)}
if title: data["title"] = title
if content: data["content"] = content
if icon: data["icon"] = icon
if keywords: data["keywords"] = keywords
if announcement: data["extensions"] = {"announcement": announcement}
if pinAnnouncement: data["extensions"] = {"pinAnnouncement": pinAnnouncement}
if fansOnly: data["extensions"] = {"fansOnly": fansOnly}
if publishToGlobal: data["publishToGlobal"] = 0
if not publishToGlobal: data["publishToGlobal"] = 1
res = []
if doNotDisturb is not None:
if doNotDisturb:
data = json.dumps({"alertOption": 2, "timestamp": int(timestamp() * 1000)})
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/member/{self.userId}/alert", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: res.append(exceptions.CheckException(json.loads(await response.text())))
else: res.append(response.status)
if not doNotDisturb:
data = json.dumps({"alertOption": 1, "timestamp": int(timestamp() * 1000)})
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/member/{self.userId}/alert", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: res.append(exceptions.CheckException(json.loads(await response.text())))
else: res.append(response.status)
if pinChat is not None:
if pinChat:
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/pin", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: res.append(exceptions.CheckException(json.loads(await response.text())))
else: res.append(response.status)
if not pinChat:
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/unpin", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: res.append(exceptions.CheckException(json.loads(await response.text())))
else: res.append(response.status)
if backgroundImage is not None:
data = json.dumps({"media": [100, await self.upload_media(backgroundImage, "image"), None], "timestamp": int(timestamp() * 1000)})
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/member/{self.userId}/background", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: res.append(exceptions.CheckException(json.loads(await response.text())))
else: res.append(response.status)
if coHosts is not None:
data = json.dumps({"uidList": coHosts, "timestamp": int(timestamp() * 1000)})
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/co-host", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: res.append(exceptions.CheckException(json.loads(await response.text())))
else: res.append(response.status)
if viewOnly is not None:
if viewOnly:
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/view-only/enable", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: res.append(exceptions.CheckException(json.loads(await response.text())))
else: res.append(response.status)
if not viewOnly:
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/view-only/disable", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: res.append(exceptions.CheckException(json.loads(await response.text())))
else: res.append(response.status)
if canInvite is not None:
if canInvite:
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/members-can-invite/enable", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: res.append(exceptions.CheckException(json.loads(await response.text())))
else: res.append(response.status)
if not canInvite:
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/members-can-invite/disable", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: res.append(exceptions.CheckException(json.loads(await response.text())))
else: res.append(response.status)
if canTip is not None:
if canTip:
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/tipping-perm-status/enable", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: res.append(exceptions.CheckException(json.loads(await response.text())))
else: res.append(response.status)
if not canTip:
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/tipping-perm-status/disable", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: res.append(exceptions.CheckException(json.loads(await response.text())))
else: res.append(response.status)
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: res.append(exceptions.CheckException(json.loads(await response.text())))
else: res.append(response.status)
return res
async def visit(self, userId: str):
"""
Visit an User.
**Parameters**
- **userId** : ID of the User.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/user-profile/{userId}?action=visit", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def send_coins(self, coins: int, blogId: str = None, chatId: str = None, objectId: str = None, transactionId: str = None):
url = None
if transactionId is None: transactionId = str(uuid4())
data = {
"coins": coins,
"tippingContext": {"transactionId": transactionId},
"timestamp": int(timestamp() * 1000)
}
if blogId is not None: url = f"{self.api}/g/s/blog/{blogId}/tipping"
if chatId is not None: url = f"{self.api}/g/s/chat/thread/{chatId}/tipping"
if objectId is not None:
data["objectId"] = objectId
data["objectType"] = 2
url = f"{self.api}/g/s/tipping"
if url is None: raise exceptions.SpecifyType()
data = json.dumps(data)
async with self.session.post(url, headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def follow(self, userId: Union[str, list]):
"""
Follow an User or Multiple Users.
**Parameters**
- **userId** : ID of the User or List of IDs of the Users.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if isinstance(userId, str):
async with self.session.post(f"{self.api}/g/s/user-profile/{userId}/member", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
elif isinstance(userId, list):
data = json.dumps({"targetUidList": userId, "timestamp": int(timestamp() * 1000)})
async with self.session.post(f"{self.api}/g/s/user-profile/{self.userId}/joined", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
else: raise exceptions.WrongType(type(userId))
async def unfollow(self, userId: str):
"""
Unfollow an User.
**Parameters**
- **userId** : ID of the User.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.delete(f"{self.api}/g/s/user-profile/{userId}/member/{self.userId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def block(self, userId: str):
"""
Block an User.
**Parameters**
- **userId** : ID of the User.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.post(f"{self.api}/g/s/block/{userId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def unblock(self, userId: str):
"""
Unblock an User.
**Parameters**
- **userId** : ID of the User.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.delete(f"{self.api}/g/s/block/{userId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def join_community(self, comId: str, invitationCode: str = None):
"""
Join a Community.
**Parameters**
- **comId** : ID of the Community.
- **invitationCode** : Invitation Code.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = {"timestamp": int(timestamp() * 1000)}
if invitationCode: data["invitationId"] = await self.link_identify(invitationCode)
data = json.dumps(data)
async with self.session.post(f"{self.api}/x{comId}/s/community/join", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def request_join_community(self, comId: str, message: str = None):
"""
Request to join a Community.
**Parameters**
- **comId** : ID of the Community.
- **message** : Message to be sent.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({"message": message, "timestamp": int(timestamp() * 1000)})
async with self.session.post(f"{self.api}/x{comId}/s/community/membership-request", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def leave_community(self, comId: str):
"""
Leave a Community.
**Parameters**
- **comId** : ID of the Community.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.post(f"{self.api}/x{comId}/s/community/leave", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def flag_community(self, comId: str, reason: str, flagType: int, isGuest: bool = False):
"""
Flag a Community.
**Parameters**
- **comId** : ID of the Community.
- **reason** : Reason of the Flag.
- **flagType** : Type of Flag.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if reason is None: raise exceptions.ReasonNeeded()
if flagType is None: raise exceptions.FlagTypeNeeded()
data = json.dumps({
"objectId": comId,
"objectType": 16,
"flagType": flagType,
"message": reason,
"timestamp": int(timestamp() * 1000)
})
if isGuest: flg = "g-flag"
else: flg = "flag"
async with self.session.post(f"{self.api}/x{comId}/s/{flg}", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def edit_profile(self, nickname: str = None, content: str = None, icon: BinaryIO = None, backgroundColor: str = None, backgroundImage: str = None, defaultBubbleId: str = None):
"""
Edit account's Profile.
**Parameters**
- **nickname** : Nickname of the Profile.
- **content** : Biography of the Profile.
- **icon** : Icon of the Profile.
- **backgroundImage** : Url of the Background Picture of the Profile.
- **backgroundColor** : Hexadecimal Background Color of the Profile.
- **defaultBubbleId** : Chat bubble ID.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = {
"address": None,
"latitude": 0,
"longitude": 0,
"mediaList": None,
"eventSource": "UserProfileView",
"timestamp": int(timestamp() * 1000)
}
if nickname: data["nickname"] = nickname
if icon: data["icon"] = await self.upload_media(icon, "image")
if content: data["content"] = content
if backgroundColor: data["extensions"] = {"style": {"backgroundColor": backgroundColor}}
if backgroundImage: data["extensions"] = {"style": {"backgroundMediaList": [[100, backgroundImage, None, None, None]]}}
if defaultBubbleId: data["extensions"] = {"defaultBubbleId": defaultBubbleId}
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/user-profile/{self.userId}", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def set_privacy_status(self, isAnonymous: bool = False, getNotifications: bool = False):
"""
Edit account's Privacy Status.
**Parameters**
- **isAnonymous** : If visibility should be Anonymous or not.
- **getNotifications** : If account should get new Visitors Notifications.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = {"timestamp": int(timestamp() * 1000)}
if not isAnonymous: data["privacyMode"] = 1
if isAnonymous: data["privacyMode"] = 2
if not getNotifications: data["notificationStatus"] = 2
if getNotifications: data["privacyMode"] = 1
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/account/visit-settings", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def set_amino_id(self, aminoId: str):
"""
Edit account's Amino ID.
**Parameters**
- **aminoId** : Amino ID of the Account.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({"aminoId": aminoId, "timestamp": int(timestamp() * 1000)})
async with self.session.post(f"{self.api}/g/s/account/change-amino-id", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def get_linked_communities(self, userId: str):
"""
Get a List of Linked Communities of an User.
**Parameters**
- **userId** : ID of the User.
**Returns**
- **Success** : :meth:`Community List <amino.lib.util.objects.CommunityList>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/user-profile/{userId}/linked-community", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.CommunityList(json.loads(await response.text())["linkedCommunityList"]).CommunityList
async def get_unlinked_communities(self, userId: str):
"""
Get a List of Unlinked Communities of an User.
**Parameters**
- **userId** : ID of the User.
**Returns**
- **Success** : :meth:`Community List <amino.lib.util.objects.CommunityList>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/user-profile/{userId}/linked-community", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.CommunityList(json.loads(await response.text())["unlinkedCommunityList"]).CommunityList
async def reorder_linked_communities(self, comIds: list):
"""
Reorder List of Linked Communities.
**Parameters**
- **comIds** : IDS of the Communities.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({"ndcIds": comIds, "timestamp": int(timestamp() * 1000)})
async with self.session.post(f"{self.api}/g/s/user-profile/{self.userId}/linked-community/reorder", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def add_linked_community(self, comId: str):
"""
Add a Linked Community on your profile.
**Parameters**
- **comId** : ID of the Community.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.post(f"{self.api}/g/s/user-profile/{self.userId}/linked-community/{comId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def remove_linked_community(self, comId: str):
"""
Remove a Linked Community on your profile.
**Parameters**
- **comId** : ID of the Community.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.delete(f"{self.api}/g/s/user-profile/{self.userId}/linked-community/{comId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def comment(self, message: str, userId: str = None, blogId: str = None, wikiId: str = None, replyTo: str = None):
"""
Comment on a User's Wall, Blog or Wiki.
**Parameters**
- **message** : Message to be sent.
- **userId** : ID of the User. (for Walls)
- **blogId** : ID of the Blog. (for Blogs)
- **wikiId** : ID of the Wiki. (for Wikis)
- **replyTo** : ID of the Comment to Reply to.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if message is None: raise exceptions.MessageNeeded
data = {
"content": message,
"stickerId": None,
"type": 0,
"timestamp": int(timestamp() * 1000)
}
if replyTo: data["respondTo"] = replyTo
if userId:
data["eventSource"] = "UserProfileView"
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/user-profile/{userId}/g-comment", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
elif blogId:
data["eventSource"] = "PostDetailView"
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/blog/{blogId}/g-comment", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
elif wikiId:
data["eventSource"] = "PostDetailView"
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/item/{wikiId}/g-comment", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
else: raise exceptions.SpecifyType()
async def delete_comment(self, commentId: str, userId: str = None, blogId: str = None, wikiId: str = None):
"""
Delete a Comment on a User's Wall, Blog or Wiki.
**Parameters**
- **commentId** : ID of the Comment.
- **userId** : ID of the User. (for Walls)
- **blogId** : ID of the Blog. (for Blogs)
- **wikiId** : ID of the Wiki. (for Wikis)
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if userId: url = f"{self.api}/g/s/user-profile/{userId}/g-comment/{commentId}"
elif blogId: url = f"{self.api}/g/s/blog/{blogId}/g-comment/{commentId}"
elif wikiId: url = f"{self.api}/g/s/item/{wikiId}/g-comment/{commentId}"
else: raise exceptions.SpecifyType()
async with self.session.delete(url, headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def like_blog(self, blogId: Union[str, list] = None, wikiId: str = None):
"""
Like a Blog, Multiple Blogs or a Wiki.
**Parameters**
- **blogId** : ID of the Blog or List of IDs of the Blogs. (for Blogs)
- **wikiId** : ID of the Wiki. (for Wikis)
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = {
"value": 4,
"timestamp": int(timestamp() * 1000)
}
if blogId:
if isinstance(blogId, str):
data["eventSource"] = "UserProfileView"
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/blog/{blogId}/g-vote?cv=1.2", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
elif isinstance(blogId, list):
data["targetIdList"] = blogId
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/feed/g-vote", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
else: raise exceptions.WrongType(type(blogId))
elif wikiId:
data["eventSource"] = "PostDetailView"
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/item/{wikiId}/g-vote?cv=1.2", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
else: raise exceptions.SpecifyType()
async def unlike_blog(self, blogId: str = None, wikiId: str = None):
"""
Remove a like from a Blog or Wiki.
**Parameters**
- **blogId** : ID of the Blog. (for Blogs)
- **wikiId** : ID of the Wiki. (for Wikis)
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if blogId: url = f"{self.api}/g/s/blog/{blogId}/g-vote?eventSource=UserProfileView"
elif wikiId: url = f"{self.api}/g/s/item/{wikiId}/g-vote?eventSource=PostDetailView"
else: raise exceptions.SpecifyType()
async with self.session.delete(url, headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def like_comment(self, commentId: str, userId: str = None, blogId: str = None, wikiId: str = None):
"""
Like a Comment on a User's Wall, Blog or Wiki.
**Parameters**
- **commentId** : ID of the Comment.
- **userId** : ID of the User. (for Walls)
- **blogId** : ID of the Blog. (for Blogs)
- **wikiId** : ID of the Wiki. (for Wikis)
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = {
"value": 4,
"timestamp": int(timestamp() * 1000)
}
if userId:
data["eventSource"] = "UserProfileView"
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/user-profile/{userId}/comment/{commentId}/g-vote?cv=1.2&value=1", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
elif blogId:
data["eventSource"] = "PostDetailView"
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/blog/{blogId}/comment/{commentId}/g-vote?cv=1.2&value=1", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
elif wikiId:
data["eventSource"] = "PostDetailView"
data = json.dumps(data)
async with self.session.post(f"{self.api}/g/s/item/{wikiId}/comment/{commentId}/g-vote?cv=1.2&value=1", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
else: raise exceptions.SpecifyType()
async def unlike_comment(self, commentId: str, userId: str = None, blogId: str = None, wikiId: str = None):
"""
Remove a like from a Comment on a User's Wall, Blog or Wiki.
**Parameters**
- **commentId** : ID of the Comment.
- **userId** : ID of the User. (for Walls)
- **blogId** : ID of the Blog. (for Blogs)
- **wikiId** : ID of the Wiki. (for Wikis)
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if userId: url = f"{self.api}/g/s/user-profile/{userId}/comment/{commentId}/g-vote?eventSource=UserProfileView"
elif blogId: url = f"{self.api}/g/s/blog/{blogId}/comment/{commentId}/g-vote?eventSource=PostDetailView"
elif wikiId: url = f"{self.api}/g/s/item/{wikiId}/comment/{commentId}/g-vote?eventSource=PostDetailView"
else: raise exceptions.SpecifyType()
async with self.session.delete(url, headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def get_membership_info(self):
"""
Get Information about your Amino+ Membership.
**Parameters**
- No parameters required.
**Returns**
- **Success** : :meth:`Membership Object <amino.lib.util.objects.Membership>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/membership?force=true", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.Membership(json.loads(await response.text())).Membership
async def get_ta_announcements(self, language: str = "en", start: int = 0, size: int = 25):
"""
Get the list of Team Amino's Announcement Blogs.
**Parameters**
- **language** : Language of the Blogs.
- ``en``, ``es``, ``pt``, ``ar``, ``ru``, ``fr``, ``de``
- *start* : Where to start the list.
- *size* : Size of the list.
**Returns**
- **Success** : :meth:`Blogs List <amino.lib.util.objects.BlogList>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
if language not in self.get_supported_languages(): raise exceptions.UnsupportedLanguage(language)
async with self.session.get(f"{self.api}/g/s/announcement?language={language}&start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.BlogList(json.loads(await response.text())["blogList"]).BlogList
async def get_wallet_info(self):
"""
Get Information about the account's Wallet.
**Parameters**
- No parameters required.
**Returns**
- **Success** : :meth:`Wallet Object <amino.lib.util.objects.WalletInfo>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/wallet", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.WalletInfo(json.loads(await response.text())["wallet"]).WalletInfo
async def get_wallet_history(self, start: int = 0, size: int = 25):
"""
Get the Wallet's History Information.
**Parameters**
- *start* : Where to start the list.
- *size* : Size of the list.
**Returns**
- **Success** : :meth:`Wallet Object <amino.lib.util.objects.WalletInfo>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/wallet/coin/history?start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.WalletHistory(json.loads(await response.text())["coinHistoryList"]).WalletHistory
async def get_from_deviceid(self, deviceId: str):
"""
Get the User ID from an Device ID.
**Parameters**
- **deviceID** : ID of the Device.
**Returns**
- **Success** : :meth:`User ID <amino.lib.util.objects.UserProfile.userId>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/auid?deviceId={deviceId}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return json.loads(await response.text())["auid"]
async def get_from_code(self, code: str):
"""
Get the Object Information from the Amino URL Code.
**Parameters**
- **code** : Code from the Amino URL.
**Returns**
- **Success** : :meth:`From Code Object <amino.lib.util.objects.FromCode>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/link-resolution?q={code}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.FromCode(json.loads(await response.text())["linkInfoV2"]).FromCode
async def get_from_id(self, objectId: str, objectType: int, comId: str = None):
"""
Get the Object Information from the Object ID and Type.
**Parameters**
- **objectID** : ID of the Object. User ID, Blog ID, etc.
- **objectType** : Type of the Object.
- *comId* : ID of the Community. Use if the Object is in a Community.
**Returns**
- **Success** : :meth:`From Code Object <amino.lib.util.objects.FromCode>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
"objectId": objectId,
"targetCode": 1,
"objectType": objectType,
"timestamp": int(timestamp() * 1000)
})
if comId: url = f"{self.api}/g/s-x{comId}/link-resolution"
else: url = f"{self.api}/g/s/link-resolution"
async with self.session.post(url, headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.FromCode(json.loads(await response.text())["linkInfoV2"]).FromCode
async def get_supported_languages(self):
"""
Get the List of Supported Languages by Amino.
**Parameters**
- No parameters required.
**Returns**
- **Success** : :meth:`List of Supported Languages <List>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/community-collection/supported-languages?start=0&size=100", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return json.loads(await response.text())["supportedLanguages"]
async def claim_new_user_coupon(self):
"""
Claim the New User Coupon available when a new account is created.
**Parameters**
- No parameters required.
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.post(f"{self.api}/g/s/coupon/new-user-coupon/claim", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def get_subscriptions(self, start: int = 0, size: int = 25):
"""
Get Information about the account's Subscriptions.
**Parameters**
- *start* : Where to start the list.
- *size* : Size of the list.
**Returns**
- **Success** : :meth:`List <List>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/store/subscription?objectType=122&start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return json.loads(await response.text())["storeSubscriptionItemList"]
async def get_all_users(self, start: int = 0, size: int = 25):
"""
Get list of users of Amino.
**Parameters**
- *start* : Where to start the list.
- *size* : Size of the list.
**Returns**
- **Success** : :meth:`User Profile Count List Object <amino.lib.util.objects.UserProfileCountList>`
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
async with self.session.get(f"{self.api}/g/s/user-profile?type=recent&start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.UserProfileCountList(json.loads(await response.text())).UserProfileCountList
async def accept_host(self, chatId: str, requestId: str):
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/transfer-organizer/{requestId}/accept", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def accept_organizer(self, chatId: str, requestId: str):
await self.accept_host(chatId, requestId)
# Contributed by 'https://github.com/LynxN1'
async def link_identify(self, code: str):
async with self.session.get(f"{self.api}/g/s/community/link-identify?q=http%3A%2F%2Faminoapps.com%2Finvite%2F{code}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return json.loads(await response.text())
async def invite_to_vc(self, chatId: str, userId: str):
"""
Invite a User to a Voice Chat
**Parameters**
- **chatId** - ID of the Chat
- **userId** - ID of the User
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
"uid": userId
})
async with self.session.post(f"{self.api}/g/s/chat/thread/{chatId}/vvchat-presenter/invite", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def wallet_config(self, level: int):
"""
Changes ads config
**Parameters**
- **level** - Level of the ads.
- ``1``, ``2``
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
"adsLevel": level,
"timestamp": int(timestamp() * 1000)
})
async with self.session.post(f"{self.api}/g/s/wallet/ads/config", headers=self.parse_headers(data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status
async def get_avatar_frames(self, start: int = 0, size: int = 25):
async with self.session.get(f"{self.api}/g/s/avatar-frame?start={start}&size={size}", headers=self.parse_headers()) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return objects.AvatarFrameList(json.loads(await response.text())["avatarFrameList"]).AvatarFrameList
async def subscribe_amino_plus(self, transactionId="", sku="d940cf4a-6cf2-4737-9f3d-655234a92ea5"):
"""
Subscibes to amino+
**Parameters**
- **transactionId** - The transaction Id as a uuid4
**Returns**
- **Success** : 200 (int)
- **Fail** : :meth:`Exceptions <amino.lib.util.exceptions>`
"""
data = json.dumps({
{
"sku": sku,
"packageName": "com.narvii.amino.master",
"paymentType": 1,
"paymentContext": {
"transactionId": (transactionId or str(uuid4())),
"isAutoRenew": True
},
"timestamp": timestamp()
}
})
async with self.session.post(f"{self.api}/g/s/membership/product/subscribe", headers=self.parse_headers(), data=data) as response:
if response.status != 200: return exceptions.CheckException(json.loads(await response.text()))
else: return response.status |
py | 1a46a549194c4fb7925bc9a321e174e7bd1ebf34 | #!/usr/bin/env python
# coding:utf-8
"""
@Time : 2021/10/7 15:40
@Author : harvey
@File : basic.py
@Software: PyCharm
@Desc:
@Module
"""
from rest_framework import permissions
class IsValidUser(permissions.IsAuthenticated, permissions.BasePermission):
"""Allows access to valid user, is active and not expired"""
def has_permission(self, request, view):
return super(IsValidUser, self).has_permission(request, view) \
and request.user.is_valid
class IsSuperUser(IsValidUser):
def has_permission(self, request, view):
return super(IsSuperUser, self).has_permission(request, view) \
and request.user.is_superuser
|
py | 1a46a6cab818e7df2e213db7c70362b0847381a3 | import IPython
import numpy as np
import pandas as pd
def display(*dfs, head: bool = True):
"""Display the dataframes in _dfs_"""
for df in dfs:
IPython.display.display(df.head() if head else df)
def reduce_mem_usage(df: pd.DataFrame, verbose: bool = False) -> pd.DataFrame:
"""Efficiently manage the memory usage of _df_"""
if verbose:
start_mem = df.memory_usage().sum() / 1024 ** 2
print("~> Memory usage of dataframe is {:.3f} MG".format(start_mem))
for col in df.columns:
col_type = df[col].dtype
if col_type != object:
c_min = df[col].min()
c_max = df[col].max()
if str(col_type)[:3] == "int" or np.all(np.mod(df[col], 1) == 0):
# Booleans mapped to integers
if list(df[col].unique()) == [1, 0]:
df[col] = df[col].astype(bool)
elif c_min > np.iinfo(np.int8).min and c_max < np.iinfo(np.int8).max:
df[col] = df[col].astype(np.int8)
elif c_min > np.iinfo(np.uint8).min and c_max < np.iinfo(np.uint8).max:
df[col] = df[col].astype(np.uint8)
elif c_min > np.iinfo(np.int16).min and c_max < np.iinfo(np.int16).max:
df[col] = df[col].astype(np.int16)
elif (
c_min > np.iinfo(np.uint16).min and c_max < np.iinfo(np.uint16).max
):
df[col] = df[col].astype(np.uint16)
elif c_min > np.iinfo(np.int32).min and c_max < np.iinfo(np.int32).max:
df[col] = df[col].astype(np.int32)
elif (
c_min > np.iinfo(np.uint32).min and c_max < np.iinfo(np.uint32).max
):
df[col] = df[col].astype(np.uint32)
elif c_min > np.iinfo(np.int64).min and c_max < np.iinfo(np.int64).max:
df[col] = df[col].astype(np.int64)
elif (
c_min > np.iinfo(np.uint64).min and c_max < np.iinfo(np.uint64).max
):
df[col] = df[col].astype(np.uint64)
else:
if (
c_min > np.finfo(np.float16).min
and c_max < np.finfo(np.float16).max
):
df[col] = df[col].astype(np.float16)
elif (
c_min > np.finfo(np.float32).min
and c_max < np.finfo(np.float32).max
):
df[col] = df[col].astype(np.float32)
else:
df[col] = df[col].astype(np.float64)
else:
pass
if verbose:
end_mem = df.memory_usage().sum() / 1024 ** 2
print("~> Memory usage after optimization is: {:.3f} MG".format(end_mem))
print("~> Decreased by {:.1f}%".format(100 * (start_mem - end_mem) / start_mem))
print("---" * 20)
return df
def extract_num(ser: pd.Series) -> pd.Series:
"""Extract the numerical value from a string"""
return ser.str.extract(r"(\d+)").astype(np.int16)
|
py | 1a46a70f3c2dc6478615e5e355ed4713ad6f9a7f |
__version__ = "0.17.3"
__license__ = "MIT"
|
py | 1a46a83d4561af224bbcbf9dfa40e034cdb5e789 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import glob
import os
import pytest
import llnl.util.filesystem as fs
import spack.environment
import spack.repo
from spack.build_environment import ChildError, get_std_cmake_args, setup_package
from spack.spec import Spec
from spack.util.executable import which
DATA_PATH = os.path.join(spack.paths.test_path, 'data')
@pytest.mark.parametrize(
'directory',
glob.iglob(os.path.join(DATA_PATH, 'make', 'affirmative', '*'))
)
def test_affirmative_make_check(directory, config, mock_packages, working_env):
"""Tests that Spack correctly detects targets in a Makefile."""
# Get a fake package
s = Spec('mpich')
s.concretize()
pkg = spack.repo.get(s)
setup_package(pkg, False)
with fs.working_dir(directory):
assert pkg._has_make_target('check')
pkg._if_make_target_execute('check')
@pytest.mark.parametrize(
'directory',
glob.iglob(os.path.join(DATA_PATH, 'make', 'negative', '*'))
)
@pytest.mark.regression('9067')
def test_negative_make_check(directory, config, mock_packages, working_env):
"""Tests that Spack correctly ignores false positives in a Makefile."""
# Get a fake package
s = Spec('mpich')
s.concretize()
pkg = spack.repo.get(s)
setup_package(pkg, False)
with fs.working_dir(directory):
assert not pkg._has_make_target('check')
pkg._if_make_target_execute('check')
@pytest.mark.skipif(not which('ninja'), reason='ninja is not installed')
@pytest.mark.parametrize(
'directory',
glob.iglob(os.path.join(DATA_PATH, 'ninja', 'affirmative', '*'))
)
def test_affirmative_ninja_check(
directory, config, mock_packages, working_env):
"""Tests that Spack correctly detects targets in a Ninja build script."""
# Get a fake package
s = Spec('mpich')
s.concretize()
pkg = spack.repo.get(s)
setup_package(pkg, False)
with fs.working_dir(directory):
assert pkg._has_ninja_target('check')
pkg._if_ninja_target_execute('check')
# Clean up Ninja files
for filename in glob.iglob('.ninja_*'):
os.remove(filename)
@pytest.mark.skipif(not which('ninja'), reason='ninja is not installed')
@pytest.mark.parametrize(
'directory',
glob.iglob(os.path.join(DATA_PATH, 'ninja', 'negative', '*'))
)
def test_negative_ninja_check(directory, config, mock_packages, working_env):
"""Tests that Spack correctly ignores false positives in a Ninja
build script."""
# Get a fake package
s = Spec('mpich')
s.concretize()
pkg = spack.repo.get(s)
setup_package(pkg, False)
with fs.working_dir(directory):
assert not pkg._has_ninja_target('check')
pkg._if_ninja_target_execute('check')
def test_cmake_std_args(config, mock_packages):
# Call the function on a CMakePackage instance
s = Spec('cmake-client')
s.concretize()
pkg = spack.repo.get(s)
assert pkg.std_cmake_args == get_std_cmake_args(pkg)
# Call it on another kind of package
s = Spec('mpich')
s.concretize()
pkg = spack.repo.get(s)
assert get_std_cmake_args(pkg)
def test_cmake_bad_generator(config, mock_packages):
s = Spec('cmake-client')
s.concretize()
pkg = spack.repo.get(s)
pkg.generator = 'Yellow Sticky Notes'
with pytest.raises(spack.package.InstallError):
get_std_cmake_args(pkg)
def test_cmake_secondary_generator(config, mock_packages):
s = Spec('cmake-client')
s.concretize()
pkg = spack.repo.get(s)
pkg.generator = 'CodeBlocks - Unix Makefiles'
assert get_std_cmake_args(pkg)
@pytest.mark.usefixtures('config', 'mock_packages')
class TestAutotoolsPackage(object):
def test_with_or_without(self):
s = Spec('a')
s.concretize()
pkg = spack.repo.get(s)
options = pkg.with_or_without('foo')
# Ensure that values that are not representing a feature
# are not used by with_or_without
assert '--without-none' not in options
assert '--with-bar' in options
assert '--without-baz' in options
assert '--no-fee' in options
def activate(value):
return 'something'
options = pkg.with_or_without('foo', activation_value=activate)
assert '--without-none' not in options
assert '--with-bar=something' in options
assert '--without-baz' in options
assert '--no-fee' in options
options = pkg.enable_or_disable('foo')
assert '--disable-none' not in options
assert '--enable-bar' in options
assert '--disable-baz' in options
assert '--disable-fee' in options
options = pkg.with_or_without('bvv')
assert '--with-bvv' in options
options = pkg.with_or_without('lorem-ipsum', variant='lorem_ipsum')
assert '--without-lorem-ipsum' in options
def test_none_is_allowed(self):
s = Spec('a foo=none')
s.concretize()
pkg = spack.repo.get(s)
options = pkg.with_or_without('foo')
# Ensure that values that are not representing a feature
# are not used by with_or_without
assert '--with-none' not in options
assert '--without-bar' in options
assert '--without-baz' in options
assert '--no-fee' in options
def test_libtool_archive_files_are_deleted_by_default(
self, mutable_database
):
# Install a package that creates a mock libtool archive
s = Spec('libtool-deletion')
s.concretize()
s.package.do_install(explicit=True)
# Assert the libtool archive is not there and we have
# a log of removed files
assert not os.path.exists(s.package.libtool_archive_file)
search_directory = os.path.join(s.prefix, '.spack')
libtool_deletion_log = fs.find(
search_directory, 'removed_la_files.txt', recursive=True
)
assert libtool_deletion_log
def test_libtool_archive_files_might_be_installed_on_demand(
self, mutable_database, monkeypatch
):
# Install a package that creates a mock libtool archive,
# patch its package to preserve the installation
s = Spec('libtool-deletion')
s.concretize()
monkeypatch.setattr(s.package, 'install_libtool_archives', True)
s.package.do_install(explicit=True)
# Assert libtool archives are installed
assert os.path.exists(s.package.libtool_archive_file)
def test_autotools_gnuconfig_replacement(self, mutable_database):
"""
Tests whether only broken config.sub and config.guess are replaced with
files from working alternatives from the gnuconfig package.
"""
s = Spec('autotools-config-replacement +patch_config_files +gnuconfig')
s.concretize()
s.package.do_install()
with open(os.path.join(s.prefix.broken, 'config.sub')) as f:
assert "gnuconfig version of config.sub" in f.read()
with open(os.path.join(s.prefix.broken, 'config.guess')) as f:
assert "gnuconfig version of config.guess" in f.read()
with open(os.path.join(s.prefix.working, 'config.sub')) as f:
assert "gnuconfig version of config.sub" not in f.read()
with open(os.path.join(s.prefix.working, 'config.guess')) as f:
assert "gnuconfig version of config.guess" not in f.read()
def test_autotools_gnuconfig_replacement_disabled(self, mutable_database):
"""
Tests whether disabling patch_config_files
"""
s = Spec('autotools-config-replacement ~patch_config_files +gnuconfig')
s.concretize()
s.package.do_install()
with open(os.path.join(s.prefix.broken, 'config.sub')) as f:
assert "gnuconfig version of config.sub" not in f.read()
with open(os.path.join(s.prefix.broken, 'config.guess')) as f:
assert "gnuconfig version of config.guess" not in f.read()
with open(os.path.join(s.prefix.working, 'config.sub')) as f:
assert "gnuconfig version of config.sub" not in f.read()
with open(os.path.join(s.prefix.working, 'config.guess')) as f:
assert "gnuconfig version of config.guess" not in f.read()
@pytest.mark.disable_clean_stage_check
def test_autotools_gnuconfig_replacement_no_gnuconfig(self, mutable_database):
"""
Tests whether a useful error message is shown when patch_config_files is
enabled, but gnuconfig is not listed as a direct build dependency.
"""
s = Spec('autotools-config-replacement +patch_config_files ~gnuconfig')
s.concretize()
msg = "Cannot patch config files: missing dependencies: gnuconfig"
with pytest.raises(ChildError, match=msg):
s.package.do_install()
@pytest.mark.disable_clean_stage_check
def test_broken_external_gnuconfig(self, mutable_database, tmpdir):
"""
Tests whether we get a useful error message when gnuconfig is marked
external, but the install prefix is misconfigured and no config.guess
and config.sub substitute files are found in the provided prefix.
"""
env_dir = str(tmpdir.ensure('env', dir=True))
gnuconfig_dir = str(tmpdir.ensure('gnuconfig', dir=True)) # empty dir
with open(os.path.join(env_dir, 'spack.yaml'), 'w') as f:
f.write("""\
spack:
specs:
- 'autotools-config-replacement +patch_config_files +gnuconfig'
packages:
gnuconfig:
buildable: false
externals:
- spec: [email protected]
prefix: {0}
""".format(gnuconfig_dir))
msg = ("Spack could not find `config.guess`.*misconfigured as an "
"external package")
with spack.environment.Environment(env_dir) as e:
e.concretize()
with pytest.raises(ChildError, match=msg):
e.install_all()
@pytest.mark.usefixtures('config', 'mock_packages')
class TestCMakePackage(object):
def test_define(self):
s = Spec('cmake-client')
s.concretize()
pkg = spack.repo.get(s)
for cls in (list, tuple):
arg = pkg.define('MULTI', cls(['right', 'up']))
assert arg == '-DMULTI:STRING=right;up'
arg = pkg.define('ENABLE_TRUTH', False)
assert arg == '-DENABLE_TRUTH:BOOL=OFF'
arg = pkg.define('ENABLE_TRUTH', True)
assert arg == '-DENABLE_TRUTH:BOOL=ON'
arg = pkg.define('SINGLE', 'red')
assert arg == '-DSINGLE:STRING=red'
def test_define_from_variant(self):
s = Spec('cmake-client multi=up,right ~truthy single=red')
s.concretize()
pkg = spack.repo.get(s)
arg = pkg.define_from_variant('MULTI')
assert arg == '-DMULTI:STRING=right;up'
arg = pkg.define_from_variant('ENABLE_TRUTH', 'truthy')
assert arg == '-DENABLE_TRUTH:BOOL=OFF'
arg = pkg.define_from_variant('SINGLE')
assert arg == '-DSINGLE:STRING=red'
with pytest.raises(KeyError, match="not a variant"):
pkg.define_from_variant('NONEXISTENT')
@pytest.mark.usefixtures('config', 'mock_packages')
class TestGNUMirrorPackage(object):
def test_define(self):
s = Spec('mirror-gnu')
s.concretize()
pkg = spack.repo.get(s)
s = Spec('mirror-gnu-broken')
s.concretize()
pkg_broken = spack.repo.get(s)
cls_name = type(pkg_broken).__name__
with pytest.raises(AttributeError,
match=r'{0} must define a `gnu_mirror_path` '
r'attribute \[none defined\]'
.format(cls_name)):
pkg_broken.urls
assert pkg.urls[0] == 'https://ftpmirror.gnu.org/' \
'make/make-4.2.1.tar.gz'
@pytest.mark.usefixtures('config', 'mock_packages')
class TestSourceforgePackage(object):
def test_define(self):
s = Spec('mirror-sourceforge')
s.concretize()
pkg = spack.repo.get(s)
s = Spec('mirror-sourceforge-broken')
s.concretize()
pkg_broken = spack.repo.get(s)
cls_name = type(pkg_broken).__name__
with pytest.raises(AttributeError,
match=r'{0} must define a `sourceforge_mirror_path`'
r' attribute \[none defined\]'
.format(cls_name)):
pkg_broken.urls
assert pkg.urls[0] == 'https://prdownloads.sourceforge.net/' \
'tcl/tcl8.6.5-src.tar.gz'
@pytest.mark.usefixtures('config', 'mock_packages')
class TestSourcewarePackage(object):
def test_define(self):
s = Spec('mirror-sourceware')
s.concretize()
pkg = spack.repo.get(s)
s = Spec('mirror-sourceware-broken')
s.concretize()
pkg_broken = spack.repo.get(s)
cls_name = type(pkg_broken).__name__
with pytest.raises(AttributeError,
match=r'{0} must define a `sourceware_mirror_path` '
r'attribute \[none defined\]'
.format(cls_name)):
pkg_broken.urls
assert pkg.urls[0] == 'https://sourceware.org/pub/' \
'bzip2/bzip2-1.0.8.tar.gz'
@pytest.mark.usefixtures('config', 'mock_packages')
class TestXorgPackage(object):
def test_define(self):
s = Spec('mirror-xorg')
s.concretize()
pkg = spack.repo.get(s)
s = Spec('mirror-xorg-broken')
s.concretize()
pkg_broken = spack.repo.get(s)
cls_name = type(pkg_broken).__name__
with pytest.raises(AttributeError,
match=r'{0} must define a `xorg_mirror_path` '
r'attribute \[none defined\]'
.format(cls_name)):
pkg_broken.urls
assert pkg.urls[0] == 'https://www.x.org/archive/individual/' \
'util/util-macros-1.19.1.tar.bz2'
def test_cmake_define_from_variant_conditional(config, mock_packages):
"""Test that define_from_variant returns empty string when a condition on a variant
is not met. When this is the case, the variant is not set in the spec."""
s = Spec('cmake-conditional-variants-test').concretized()
assert 'example' not in s.variants
assert s.package.define_from_variant('EXAMPLE', 'example') == ''
|
py | 1a46a8ab93c7f318bcc6fd69f90a58225bb1b7ae | '''
This script monitors chrome RAM activity and kills those tab that overcomes
defined memory limit.
This program has been tested on:
python 2.7.8
psutil 2.1.1
Original source for this program is located at:
http://superuser.com/questions/413349/limiting-use-of-ram-in-chrome
Modified by wanderlust
'''
import sys, os
try:
import psutil
except ImportError:
print "psutil are not installed or not visible for python"
exit
MEM_LIMIT = 800 # Size of allowed memory for chrome per page
VERBOSE = True # Show verbose messages for
uid = os.getuid()
print "System:", sys.platform
print "Allowed memory for chrome:", str(MEM_LIMIT) + "MB"
while True:
for p in psutil.get_process_list():
try:
if p.name() == 'chrome' and \
any("--type=renderer" in part for part in p.cmdline()) and \
p.uids().real == uid:
# Eliminated all interesting processes
if sys.platform == "win32" or sys.platform == "cygwin":
mem = p.memory_info_ex().num_page_faults / 1024 / 1024
else:
mem = p.memory_info_ex().rss/1024/1024
if mem > 0.75 * MEM_LIMIT and VERBOSE:
print "Proc: " + str(p.name()) + " Memory: " + str(mem)
if mem > MEM_LIMIT:
p.kill()
if p is None:
print "Killed"
except (psutil.NoSuchProcess, psutil.AccessDenied):
pass
|
py | 1a46a96a303597a9bbff5c074adc5b4626042a28 | from sys import stdin, stdout
freq = {}
num_women = int(input())
for i in range(num_women):
line = stdin.readline().strip().split()
country = line[0]
if not country in freq:
freq[country] = 1
else:
freq[country] += 1
for pair in sorted(freq.items()):
stdout.write("{} {}\n".format(pair[0], pair[1])) |
py | 1a46ad073757c002170dbdbd349f949bc1528bb9 | """Snapcast group."""
import asyncio
import logging
_LOGGER = logging.getLogger(__name__)
# pylint: disable=too-many-public-methods
class Snapgroup(object):
"""Represents a snapcast group."""
def __init__(self, server, data):
"""Initialize."""
self._server = server
self._snapshot = None
self._callback_func = None
self.update(data)
def update(self, data):
"""Update group."""
self._group = data
@property
def identifier(self):
"""Get group identifier."""
return self._group.get('id')
@property
def name(self):
"""Get group name."""
return self._group.get('name')
@property
def stream(self):
"""Get stream identifier."""
return self._group.get('stream_id')
@asyncio.coroutine
def set_stream(self, stream_id):
"""Set group stream."""
self._group['stream_id'] = stream_id
yield from self._server.group_stream(self.identifier, stream_id)
_LOGGER.info('set stream to %s on %s', stream_id, self.friendly_name)
@property
def stream_status(self):
"""Get stream status."""
return self._server.stream(self.stream).status
@property
def muted(self):
"""Get mute status."""
return self._group.get('muted')
@asyncio.coroutine
def set_muted(self, status):
"""Set group mute status."""
self._group['muted'] = status
yield from self._server.group_mute(self.identifier, status)
_LOGGER.info('set muted to %s on %s', status, self.friendly_name)
@property
def volume(self):
"""Get volume."""
volume_sum = 0
for client in self._group.get('clients'):
volume_sum += self._server.client(client.get('id')).volume
return int(volume_sum / len(self._group.get('clients')))
@asyncio.coroutine
def set_volume(self, volume):
"""Set volume."""
if volume not in range(0, 101):
raise ValueError('Volume out of range')
current_volume = self.volume
if volume == current_volume:
_LOGGER.info('left volume at %s on group %s', volume, self.friendly_name)
return
delta = volume - current_volume
if delta < 0:
ratio = (current_volume - volume) / current_volume
else:
ratio = (volume - current_volume) / (100 - current_volume)
for data in self._group.get('clients'):
client = self._server.client(data.get('id'))
client_volume = client.volume
if delta < 0:
client_volume -= ratio * client_volume
else:
client_volume += ratio * (100 - client_volume)
client_volume = round(client_volume)
yield from client.set_volume(client_volume, update_group=False)
client.update_volume({
'volume': {
'percent': client_volume,
'muted': client.muted
}
})
_LOGGER.info('set volume to %s on group %s', volume, self.friendly_name)
@property
def friendly_name(self):
"""Get friendly name."""
return self.name if self.name != '' else self.stream
@property
def clients(self):
"""Get client identifiers."""
return [client.get('id') for client in self._group.get('clients')]
@asyncio.coroutine
def add_client(self, client_identifier):
"""Add a client."""
if client_identifier in self.clients:
_LOGGER.error('%s already in group %s', client_identifier, self.identifier)
return
new_clients = self.clients
new_clients.append(client_identifier)
yield from self._server.group_clients(self.identifier, new_clients)
_LOGGER.info('added %s to %s', client_identifier, self.identifier)
status = yield from self._server.status()
self._server.synchronize(status)
self._server.client(client_identifier).callback()
self.callback()
@asyncio.coroutine
def remove_client(self, client_identifier):
"""Remove a client."""
new_clients = self.clients
new_clients.remove(client_identifier)
yield from self._server.group_clients(self.identifier, new_clients)
_LOGGER.info('removed %s from %s', client_identifier, self.identifier)
status = yield from self._server.status()
self._server.synchronize(status)
self._server.client(client_identifier).callback()
self.callback()
def streams_by_name(self):
"""Get available stream objects by name."""
return {stream.friendly_name: stream for stream in self._server.streams}
def update_mute(self, data):
"""Update mute."""
self._group['muted'] = data['mute']
self.callback()
_LOGGER.info('updated mute on %s', self.friendly_name)
def update_stream(self, data):
"""Update stream."""
self._group['stream_id'] = data['stream_id']
self.callback()
_LOGGER.info('updated stream to %s on %s', self.stream, self.friendly_name)
def snapshot(self):
"""Snapshot current state."""
self._snapshot = {
'muted': self.muted,
'volume': self.volume,
'stream': self.stream
}
_LOGGER.info('took snapshot of current state of %s', self.friendly_name)
@asyncio.coroutine
def restore(self):
"""Restore snapshotted state."""
if not self._snapshot:
return
yield from self.set_muted(self._snapshot['muted'])
yield from self.set_volume(self._snapshot['volume'])
yield from self.set_stream(self._snapshot['stream'])
self.callback()
_LOGGER.info('restored snapshot of state of %s', self.friendly_name)
def callback(self):
"""Run callback."""
if self._callback_func and callable(self._callback_func):
self._callback_func(self)
def set_callback(self, func):
"""Set callback."""
self._callback_func = func
def __repr__(self):
"""String representation."""
return 'Snapgroup ({}, {})'.format(self.friendly_name, self.identifier)
|
py | 1a46adae6625fbb17a2b9a0a3015d0560cf59337 | # Time: ctor: O(n1 + n2)
# add: O(1)
# count: O(n1)
# Space: O(n1 + n2)
import collections
class FindSumPairs(object):
def __init__(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
"""
self.__nums2 = nums2
self.__count1 = collections.Counter(nums1)
self.__count2 = collections.Counter(nums2)
def add(self, index, val):
"""
:type index: int
:type val: int
:rtype: None
"""
self.__count2[self.__nums2[index]] -= 1
self.__nums2[index] += val
self.__count2[self.__nums2[index]] += 1
def count(self, tot):
"""
:type tot: int
:rtype: int
"""
return sum(cnt * self.__count2[tot-x] for x, cnt in self.__count1.iteritems())
|
py | 1a46adc983fafbf0acdb103bb1a062395d44d402 | from django.db import models
from django.conf import settings
from django.shortcuts import reverse
from django.contrib.auth.models import User
from django.core.validators import MinLengthValidator,MaxLengthValidator
from django.utils import timezone
# Create your models here.
CATEGORY_CHOICES=(
('S','Secondhand'),
('N','New')
)
class Item(models.Model):
title= models.CharField(max_length=100)
price=models.FloatField()
discount_price=models.FloatField(blank=True, null=True)
category= models.CharField(choices=CATEGORY_CHOICES, max_length=1)
description = models.TextField(max_length=250)
image=models.ImageField(upload_to='ecommerce/',null=True, blank=True)
user= models.ForeignKey(User,on_delete=models.CASCADE)
contact=models.CharField( max_length=12, validators=[MinLengthValidator(10)], verbose_name="Mobile No")
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('product', kwargs={
'pk':self.pk
})
def get_add_to_cart_url(self):
return reverse('add_to_cart', kwargs={
'pk':self.pk
})
def get_remove_from_cart_url(self):
return reverse('remove_from_cart', kwargs={
'pk':self.pk
})
class OrderItem(models.Model):
user = models.ForeignKey(User,on_delete=models.CASCADE)
item=models.ForeignKey(Item, on_delete=models.CASCADE)
ordered=models.BooleanField(default=False)
def __str__(self):
return f"{ self.item.title }"
def item_price(self):
return self.item.price
def discount_item_price(self):
return self.item.discount_price
def get_amount_saved(self):
return self.item_price()-self.discount_item_price()
def get_final_price(self):
if self.item.discount_price:
return self.discount_item_price()
return self.item_price()
class Order(models.Model):
user = models.ForeignKey(User,on_delete=models.CASCADE)
items =models.ManyToManyField(OrderItem)
ordered_date=models.DateTimeField()
ordered=models.BooleanField(default=False)
def __str__(self):
return self.user.username
def get_total(self):
total=0
for order_item in self.items.all():
total += order_item.get_final_price()
return total
class Comment(models.Model):
item = models.ForeignKey(Item, on_delete=models.CASCADE, related_name='comment')
user = models.ForeignKey(User,on_delete=models.CASCADE)
comment = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
approved_comment = models.BooleanField(default=False)
def approve(self):
self.approved_comment = True
self.save()
def __str__(self):
return self.user.username
def get_absolute_url(self):
return reverse('product',kwargs={'pk': self.pk})
|
py | 1a46af1b5b27b56b348d3ffbbfcf846924e85d30 | def getAssaysFromGeneID(geneID):
"""
Returns a dictionary of pandas DataFrames containing the assay data for all
assays with AIDs assocated to geneID mapped to their respective AIDs.
@param geneID The geneID to search on
"""
AIDs = getAIDsFromGeneID(geneID)
return getAssaysFromAIDs(AIDs)
|
py | 1a46af2a78b8e70c4e3a6e608aeac51deb625eb3 | import Adafruit_DHT
DHT_SENSOR = Adafruit_DHT.DHT22
DHT_PIN = 4
while True:
humidity, temperature = Adafruit_DHT.read_retry(DHT_SENSOR, DHT_PIN)
if humidity is not None and temperature is not None:
print("Temp={0:0.1f}*C Humidity={1:0.1f}%".format(temperature, humidity))
else:
print("DATA RETRIEVAL ERROR") |
py | 1a46b00cdda1a2250bb8edaab44ae42d57ccf937 | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import unittest
import time
import oneflow.unittest
import oneflow as flow
import oneflow.nn as nn
import oneflow.utils.vision.transforms as transforms
# reference: http://tangshusen.me/Dive-into-DL-PyTorch/#/chapter03_DL-basics/3.10_mlp-pytorch
def load_data_fashion_mnist(
batch_size, resize=None, root="./data/fashion-mnist", download=True, source_url=None
):
"""Download the Fashion-MNIST dataset and then load into memory."""
root = os.path.expanduser(root)
transformer = []
if resize:
transformer += [transforms.Resize(resize)]
transformer += [transforms.ToTensor()]
transformer = transforms.Compose(transformer)
mnist_train = flow.utils.vision.datasets.FashionMNIST(
root=root,
train=True,
transform=transformer,
download=download,
source_url=source_url,
)
mnist_test = flow.utils.vision.datasets.FashionMNIST(
root=root,
train=False,
transform=transformer,
download=download,
source_url=source_url,
)
num_workers = 0
train_iter = flow.utils.data.DataLoader(
mnist_train, batch_size, shuffle=True, num_workers=num_workers
)
test_iter = flow.utils.data.DataLoader(
mnist_test, batch_size, shuffle=False, num_workers=num_workers
)
return train_iter, test_iter
def get_fashion_mnist_labels(labels):
"""Get text labels for Fashion-MNIST."""
text_labels = [
"t-shirt",
"trouser",
"pullover",
"dress",
"coat",
"sandal",
"shirt",
"sneaker",
"bag",
"ankle boot",
]
return [text_labels[int(i)] for i in labels]
class FlattenLayer(nn.Module):
def __init__(self):
super(FlattenLayer, self).__init__()
def forward(self, x): # x shape: (batch, *, *, ...)
res = x.reshape(shape=[x.shape[0], -1])
return res
def evaluate_accuracy(data_iter, net, device=None):
if device is None and isinstance(net, nn.Module):
# using net device if not specified
device = list(net.parameters())[0].device
acc_sum, n = 0.0, 0
net.eval()
with flow.no_grad():
for X, y in data_iter:
X = X.to(device=device)
y = y.to(device=device)
acc_sum += (
net(X.to(device)).argmax(dim=1).numpy() == y.to(device).numpy()
).sum()
n += y.shape[0]
net.train()
return acc_sum / n
def test(test_case):
num_inputs, num_outputs, num_hiddens = 784, 10, 256
net = nn.Sequential(
FlattenLayer(),
nn.Linear(num_inputs, num_hiddens),
nn.ReLU(),
nn.Linear(num_hiddens, num_outputs),
)
if os.getenv("ONEFLOW_TEST_CPU_ONLY"):
device = flow.device("cpu")
else:
device = flow.device("cuda")
net.to(device)
batch_size = 256
num_epochs = 1
data_dir = os.path.join(
os.getenv("ONEFLOW_TEST_CACHE_DIR", "./data-test"), "fashion-mnist"
)
source_url = "https://oneflow-public.oss-cn-beijing.aliyuncs.com/datasets/mnist/Fashion-MNIST/"
train_iter, test_iter = load_data_fashion_mnist(
batch_size, root=data_dir, download=True, source_url=source_url
)
loss = nn.CrossEntropyLoss()
loss.to(device)
optimizer = flow.optim.SGD(net.parameters(), lr=0.1)
final_accuracy = 0
for epoch in range(num_epochs):
train_l_sum, train_acc_sum, n = 0.0, 0.0, 0
start = time.time()
for X, y in train_iter:
X = X.to(device=device)
y = y.to(device=device)
y_hat = net(X)
l = loss(y_hat, y).sum()
optimizer.zero_grad()
l.backward()
optimizer.step()
train_l_sum += l.numpy()
train_acc_sum += (y_hat.argmax(dim=1).numpy() == y.numpy()).sum()
n += y.shape[0]
test_acc = evaluate_accuracy(test_iter, net)
final_accuracy = train_acc_sum / n
print(
"epoch %d, loss %.4f, train acc %.3f, test acc %.3f, cost >>>>>>> %s(s)"
% (
epoch + 1,
train_l_sum / n,
final_accuracy,
test_acc,
str(time.time() - start),
)
)
final_accuracy = train_acc_sum / n
test_case.assertLess(0.70, final_accuracy)
@flow.unittest.skip_unless_1n1d()
class TestFashionMnistDataset(flow.unittest.TestCase):
def test_fashion_mnist_dataset(test_case):
test(test_case)
if __name__ == "__main__":
unittest.main()
# 1 epoch training log
# epoch 1, loss 0.0034, train acc 0.718, test acc 0.771, cost >>>>>>> 158.32699990272522(s)
# epoch 2, loss 0.0022, train acc 0.807, test acc 0.726, cost >>>>>>> 159.64465260505676(s)
|
py | 1a46b00e406fa83893cc2161333b13e66fc5224d | """
This file offers the methods to automatically retrieve the graph Sphingomonas soli NBRC 100801.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def SphingomonasSoliNbrc100801(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Sphingomonas soli NBRC 100801 graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.5
- physical.links.v11.5
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Sphingomonas soli NBRC 100801 graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="SphingomonasSoliNbrc100801",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
|
py | 1a46b17b34a0c1787a39799606fc8586a7752e4c | import base64
import requests
from constants import URL, IMAGE_EXTENSION
"""
http://<URL>/image/all (GET)
"""
def recover_images():
resource = f"{URL}image"
response = requests.get(url=resource)
for data in response.json():
image_64_encode = data['encoded_image']
image_64_encode = image_64_encode.encode("utf-8")
image_64_decode = base64.decodebytes(image_64_encode)
file_name = data['id'] + IMAGE_EXTENSION
with open(file_name, 'wb') as image_result:
image_result.write(image_64_decode)
print(response)
if __name__ == '__main__':
recover_images()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.