repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
Royal-Society-of-New-Zealand/NZ-ORCID-Hub | orcid_hub/models.py | 1 | 205148 | # -*- coding: utf-8 -*-
"""Application models."""
import copy
import csv
import os
import random
import re
import secrets
import string
import uuid
from collections import namedtuple
from datetime import datetime
from enum import IntEnum, IntFlag
from functools import lru_cache
from hashlib import md5
from io import StringIO
from itertools import groupby, zip_longest
from urllib.parse import urlencode
import chardet
import jsonschema
import validators
import yaml
from flask import json
from flask_login import UserMixin, current_user
from peewee import JOIN, BlobField
from peewee import BooleanField as BooleanField_
from peewee import (CharField, DateTimeField, DeferredForeignKey, Field,
FixedCharField, ForeignKeyField, IntegerField,
ManyToManyField, Model, OperationalError,
PostgresqlDatabase, SmallIntegerField, SqliteDatabase,
TextField, fn)
from peewee_validates import ModelValidator
# from playhouse.reflection import generate_models
from playhouse.shortcuts import model_to_dict
from pycountry import countries, currencies, languages
from pykwalify.core import Core
from pykwalify.errors import SchemaError
from . import app, cache, db, schemas
ENV = app.config["ENV"]
DEFAULT_COUNTRY = app.config["DEFAULT_COUNTRY"]
SCHEMA_DIR = os.path.normpath(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "schemas")
)
ORCID_ID_REGEX = re.compile(r"^([X\d]{4}-?){3}[X\d]{4}$")
PARTIAL_DATE_REGEX = re.compile(r"\d+([/\-\.]\d+){,2}")
AFFILIATION_TYPES = [
"student",
"education",
"staff",
"employment",
"distinction",
"position",
"invited-position",
"qualification",
"membership",
"service",
]
DISAMBIGUATION_SOURCES = ["RINGGOLD", "GRID", "FUNDREF", "ISNI"]
VISIBILITIES = ["public", "private", "registered-only", "limited"]
visibility_choices = [(v, v.replace("-", " ").title()) for v in VISIBILITIES]
EXTERNAL_ID_TYPES = [
"agr",
"ark",
"arxiv",
"asin",
"asin-tld",
"authenticusid",
"bibcode",
"cba",
"cienciaiul",
"cit",
"ctx",
"dnb",
"doi",
"eid",
"ethos",
"grant_number",
"handle",
"hir",
"isbn",
"issn",
"jfm",
"jstor",
"kuid",
"lccn",
"lensid",
"mr",
"oclc",
"ol",
"osti",
"other-id",
"pat",
"pdb",
"pmc",
"pmid",
"proposal-id",
"rfc",
"rrid",
"source-work-id",
"ssrn",
"uri",
"urn",
"wosuid",
"zbl",
]
FUNDING_TYPES = ["award", "contract", "grant", "salary-award"]
SUBJECT_TYPES = [
"artistic-performance",
"book",
"book-chapter",
"book-review",
"conference-abstract",
"conference-paper",
"conference-poster",
"data-set",
"dictionary-entry",
"disclosure",
"dissertation",
"edited-book",
"encyclopedia-entry",
"invention",
"journal-article",
"journal-issue",
"lecture-speech",
"license",
"magazine-article",
"manual",
"newsletter-article",
"newspaper-article",
"online-resource",
"other",
"patent",
"registered-copyright",
"report",
"research-technique",
"research-tool",
"spin-off-company",
"standards-and-policy",
"supervised-student-publication",
"technical-standard",
"test",
"trademark",
"translation",
"undefined",
"website",
"working-paper",
]
REVIEWER_ROLES = ["chair", "editor", "member", "organizer", "reviewer"]
REVIEW_TYPES = ["evaluation", "review"]
review_type_choices = [(v, v.title()) for v in REVIEW_TYPES]
RELATIONSHIPS = ["part-of", "self", "version-of", "funded-by"]
WORK_TYPES = [
"artistic-performance",
"book",
"book-chapter",
"book-review",
"conference-abstract",
"conference-paper",
"conference-poster",
"data-set",
"dictionary-entry",
"disclosure",
"dissertation",
"edited-book",
"encyclopedia-entry",
"invention",
"journal-article",
"journal-issue",
"lecture-speech",
"license",
"magazine-article",
"manual",
"newsletter-article",
"newspaper-article",
"online-resource",
"other" "patent",
"registered-copyright",
"report",
"research-technique",
"research-tool",
"spin-off-company",
"standards-and-policy",
"supervised-student-publication",
"technical-standard",
"test",
"trademark",
"translation",
"undefined",
"website",
"working-paper",
]
work_type_choices = [(v, v.replace("-", " ").title()) for v in WORK_TYPES]
CITATION_TYPES = [
"bibtex",
"formatted-apa",
"formatted-chicago",
"formatted-harvard",
"formatted-ieee",
"formatted-mla",
"formatted-unspecified",
"formatted-vancouver",
"ris",
]
PROPERTY_TYPES = ["URL", "NAME", "KEYWORD", "COUNTRY"]
citation_type_choices = [(v, v.replace("-", " ").title()) for v in CITATION_TYPES]
country_choices = [(c.alpha_2, c.name) for c in countries]
country_choices.sort(key=lambda e: e[1])
language_choices = [(lang.alpha_2, lang.name) for lang in languages if hasattr(lang, "alpha_2")]
language_choices.sort(key=lambda e: e[1])
currency_choices = [(cur.alpha_3, cur.name) for cur in currencies]
currency_choices.sort(key=lambda e: e[1])
external_id_type_choices = [
(v, v.replace("_", " ").replace("-", " ").title()) for v in EXTERNAL_ID_TYPES
]
relationship_choices = [(v, v.replace("-", " ").title()) for v in RELATIONSHIPS]
disambiguation_source_choices = [(v, v) for v in DISAMBIGUATION_SOURCES]
property_type_choices = [(v, v) for v in PROPERTY_TYPES]
class ModelException(Exception):
"""Application model exception."""
pass
class NestedDict(dict):
"""Helper for traversing a nested dictionaries."""
def get(self, *keys, default=None):
"""To get the value from uploaded fields."""
d = self
for k in keys:
if d is default:
break
if not isinstance(d, dict):
return default
d = super(NestedDict, d).get(k, default)
return d
def get_orcid(self, *keys, default=None):
"""Get the ORCID iD value, sanize and validate it."""
return validate_orcid_id(self.get(*keys, default=default))
def validate_orcid_id(value):
"""Sanitize and validate ORCID iD (both format and the check-sum)."""
if not value:
return
if "/" in value:
value = value.split("/")[-1]
if not ORCID_ID_REGEX.match(value):
raise ValueError(
f"Invalid ORCID iD {value}. It should be in the form of 'xxxx-xxxx-xxxx-xxxx' where x is a digit."
)
check = 0
for n in value:
if n == "-":
continue
check = (2 * check + int(10 if n == "X" else n)) % 11
if check != 1:
raise ValueError(
f"Invalid ORCID iD {value} checksum. Make sure you have entered correct ORCID iD."
)
return value
def lazy_property(fn):
"""Make a property lazy-evaluated."""
attr_name = "_lazy_" + fn.__name__
@property
def _lazy_property(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
return _lazy_property
def normalize_email(value):
"""Extact and normalize email value from the given raw data value, eg, 'Name <[email protected]>'."""
if value:
value = value.strip().lower()
return re.match(r"^(.*\<)?([^\>]*)\>?$", value).group(2) if "<" in value else value
class PartialDate(namedtuple("PartialDate", ["year", "month", "day"])):
"""Partial date (without month day or both month and month day."""
def as_orcid_dict(self):
"""Return ORCID dictionary representation of the partial date."""
if self.is_null:
return None
return dict(
(
(f, None if v is None else {"value": ("%04d" if f == "year" else "%02d") % v})
for (f, v) in zip(self._fields, self)
)
)
@property
def is_null(self):
"""Test if if the date is undefined."""
return self.year is None and self.month is None and self.day is None
@classmethod
def create(cls, value):
"""Create a partial date form ORCID dictionary representation or string.
>>> PartialDate.create({"year": {"value": "2003"}}).as_orcid_dict()
{'year': {'value': '2003'}, 'month': None, 'day': None}
>>> PartialDate.create({"year": {"value": "2003"}}).year
2003
>>> PartialDate.create("2003").year
2003
>>> PartialDate.create("2003-03")
2003-03
>>> PartialDate.create("2003-07-14")
2003-07-14
>>> PartialDate.create("2003/03")
2003-03
>>> PartialDate.create("2003/07/14")
2003-07-14
>>> PartialDate.create("03/2003")
2003-03
>>> PartialDate.create("14/07/2003")
2003-07-14
"""
if value is None or value == {}:
return None
if isinstance(value, str):
match = PARTIAL_DATE_REGEX.search(value)
if not match:
raise ModelException(f"Wrong partial date value '{value}'")
value0 = match[0]
for sep in ["/", "."]:
if sep in value0:
parts = value0.split(sep)
return cls(*[int(v) for v in (parts[::-1] if len(parts[-1]) > 2 else parts)])
return cls(*[int(v) for v in value0.split("-")])
return cls(
**{k: int(v.get("value")) if v and v.get("value") else None for k, v in value.items()}
)
def as_datetime(self):
"""Get 'datetime' data representation."""
return datetime(self.year, self.month, self.day)
def __str__(self):
"""Get string representation."""
if self.year is None:
return ""
else:
res = "%04d" % int(self.year)
if self.month:
res += "-%02d" % int(self.month)
return res + "-%02d" % int(self.day) if self.day else res
PartialDate.__new__.__defaults__ = (None,) * len(PartialDate._fields)
class OrcidIdField(FixedCharField):
"""ORCID iD value DB field."""
def __init__(self, *args, **kwargs):
"""Initialize ORCID iD data field."""
if "verbose_name" not in kwargs:
kwargs["verbose_name"] = "ORCID iD"
if "max_length" not in kwargs:
kwargs["max_length"] = 19
super().__init__(*args, **kwargs)
# TODO: figure out where to place the value validation...
# def coerce(self, value):
# validate_orcid_id(value)
# return super().coerce(value)
class BooleanField(BooleanField_):
"""BooleanField extension to support inversion in queries."""
def NOT(self): # noqa: N802
"""Negate logical value in SQL."""
return self.__invert__()
class PartialDateField(Field):
"""Partial date custom DB data field mapped to varchar(10)."""
field_type = "varchar(10)"
def db_value(self, value):
"""Convert into partial ISO date textual representation: YYYY-**-**, YYYY-MM-**, or YYYY-MM-DD."""
if isinstance(value, str):
value = PartialDate.create(value)
if value is None or not value.year:
return None
res = "%04d" % int(value.year)
if value.month:
res += "-%02d" % int(value.month)
else:
return res + "-**-**"
return res + "-%02d" % int(value.day) if value.day else res + "-**"
def python_value(self, value):
"""Parse partial ISO date textual representation."""
if value is None:
return None
parts = [int(p) for p in value.split("-") if "*" not in p]
return PartialDate(**dict(zip_longest(("year", "month", "day",), parts)))
class UUIDField(Field):
"""UUID field using build-in DBMS data type."""
field_type = "uuid"
def db_value(self, value):
"""Return DB representation."""
return (
value.hex
if isinstance(value, uuid.UUID)
else (value.replace("-", "") if "-" in value else value)
)
def python_value(self, value):
"""Return Python representation."""
return uuid.UUID(value)
class TaskType(IntEnum):
"""Enum used to represent Task type."""
NONE = 0
AFFILIATION = 4 # Affilation of employment/education
FUNDING = 1 # Funding
WORK = 2
PEER_REVIEW = 3
OTHER_ID = 5
PROPERTY = 8
RESOURCE = 9
SYNC = 11
def __eq__(self, other):
if isinstance(other, TaskType):
return self.value == other.value
elif isinstance(other, int):
return self.value == other
return self.name == other or self.name == getattr(other, "name", None)
def __hash__(self):
return hash(self.name)
@classmethod
def options(cls):
"""Get list of all types for UI dropown option list."""
return [(e, e.name.replace("_", " ").title()) for e in cls]
class TaskTypeField(SmallIntegerField):
"""Partial date custom DB data field mapped to varchar(10)."""
def db_value(self, value):
"""Change enum value to small int."""
if value is None:
return None
try:
if isinstance(value, TaskType):
return value.value
elif isinstance(value, int):
return value
elif isinstance(value, str):
if str.isdigit(value):
return int(value)
return TaskType[value.upper()].value
else:
raise ValueError("Unknow TaskType: '%s'", value)
except:
app.logger.exception("Failed to coerce the TaskType value, choosing NULL.")
return None
def python_value(self, value):
"""Parse partial ISO date textual representation."""
if value is None:
return None
try:
return TaskType(value)
except:
app.logger.exception(f"Failed to map DB value {value} to TaskType, choosing None.")
return None
class Role(IntFlag):
"""
Enum used to represent user role.
The model provide multi role support representing role sets as bitmaps.
"""
NONE = 0 # NONE
SUPERUSER = 1 # SuperUser
ADMIN = 2 # Admin
RESEARCHER = 4 # Researcher
TECHNICAL = 8 # Technical contact
ANY = 255 # ANY
def __eq__(self, other):
if isinstance(other, Role):
return self.value == other.value
return self.name == other or self.name == getattr(other, "name", None)
def __hash__(self):
return hash(self.name)
class Affiliation(IntFlag):
"""
Enum used to represent user affiliation (type) to the organisation.
The model provide multiple affiliations support representing role sets as bitmaps.
"""
NONE = 0 # NONE
EDU = 1 # Education
EMP = 2 # Employment
DST = 4 # Distinction
POS = 8 # Invited Position
QUA = 16 # Qualification
MEM = 32 # Membership
SER = 64 # Service
def __eq__(self, other):
if isinstance(other, Affiliation):
return self.value == other.value
return self.name == other or self.name == getattr(other, "name", None)
def __hash__(self):
return hash(self.name)
def __str__(self):
return ", ".join(
{
self.EDU: "Education",
self.EMP: "Employment",
self.DST: "Distinction",
self.POS: "Invited-Position",
self.QUA: "Qualification",
self.MEM: "Membership",
self.SER: "Service",
}[a]
for a in Affiliation
if a & self
)
class BaseModel(Model):
"""Encapsulate common bits and pieces of the model classes."""
def field_is_updated(self, field_name):
"""Test if field is 'dirty'."""
return any(field_name == f.name for f in self.dirty_fields)
def save(self, *args, **kwargs):
"""Consistency validation and saving."""
if self.is_dirty() and hasattr(self, "task") and self.task:
self.task.updated_at = datetime.utcnow()
self.task.save()
if self.is_dirty() and getattr(self, "email", False) and self.field_is_updated("email"):
self.email = self.email.lower()
return super().save(*args, **kwargs)
def add_status_line(self, line):
"""Add a text line to the status for logging processing progress."""
ts = datetime.utcnow().isoformat(timespec="seconds")
self.status = (self.status + "\n" if self.status else "") + ts + ": " + line
@classmethod
def get(cls, *query, **kwargs):
"""Get a single model instance."""
if query and not kwargs and len(query) == 1 and isinstance(query[0], (int, str,)):
return super().get(id=query[0])
elif not query and not kwargs:
return cls.select().limit(1).first()
return super().get(*query, **kwargs)
@classmethod
def last(cls):
"""Get last inserted entry."""
return cls.select().order_by(cls.id.desc()).limit(1).first()
@classmethod
def model_class_name(cls):
"""Get the class name of the model."""
return cls._meta.name
@classmethod
def underscore_name(cls):
"""Get the class underscore name of the model."""
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", cls.__name__)
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower()
def __to_dashes(self, o):
"""Replace '_' with '-' in the dict keys."""
if isinstance(o, dict):
return {k.replace("_", "-"): self.__to_dashes(v) for k, v in o.items()}
return o
def to_dict(
self,
to_dashes=False,
exclude_nulls=False,
recurse=True,
backrefs=False,
only=None,
exclude=None,
seen=None,
extra_attrs=None,
fields_from_query=None,
max_depth=None,
):
"""Get dictionary representation of the model."""
o = model_to_dict(
self,
recurse=recurse,
backrefs=backrefs,
only=only,
exclude=exclude,
seen=seen,
extra_attrs=extra_attrs,
fields_from_query=fields_from_query,
max_depth=max_depth,
)
if exclude_nulls:
o = {k: v for (k, v) in o.items() if v is not None}
for k, v in o.items():
if isinstance(v, PartialDate):
o[k] = str(v)
elif k == "task_type":
o[k] = v.name
if to_dashes:
return self.__to_dashes(o)
return o
def has_field(self, field_name):
"""Check if the model has a field."""
return field_name in self._meta.fields
class Meta: # noqa: D101,D106
database = db
only_save_dirty = True
legacy_table_names = False
# class ModelDeferredRelation(DeferredRelation):
# """Fixed DefferedRelation to allow inheritance and mixins."""
# def set_model(self, rel_model):
# """Include model in the generated "backref" to make it unique."""
# for model, field, name in self.fields:
# if isinstance(field, ForeignKeyField) and not field._backref:
# field._backref = "%s_%s_set" % (model.model_class_name(), name)
# super().set_model(rel_model)
# User = ModelDeferredRelation()
class AuditedModel(BaseModel):
"""Mixing for getting data necessary for data change audit trail maintenance."""
created_at = DateTimeField(default=datetime.utcnow)
updated_at = DateTimeField(null=True, default=None)
is_deleted = BooleanField(null=True, default=False)
# created_by = ForeignKeyField(User, on_delete="SET NULL", null=True, backref='+')
# updated_by = ForeignKeyField(User, on_delete="SET NULL", null=True, backref='+')
# created_by = IntegerField(null=True, index=True)
# updated_by = IntegerField(null=True, index=True)
created_by = DeferredForeignKey("User", on_delete="SET NULL", null=True, backref="+")
updated_by = DeferredForeignKey("User", on_delete="SET NULL", null=True, backref="+")
def save(self, *args, **kwargs): # noqa: D102
if self.is_dirty() and self._dirty != {"orcid_updated_at"}:
self.updated_at = datetime.utcnow()
if current_user and hasattr(current_user, "id"):
if self.created_by:
self.updated_by_id = current_user.id
elif hasattr(self, "created_by"):
self.created_by_id = current_user.id
return super().save(*args, **kwargs)
def delete_instance(self, *args, **kwargs): # noqa: D102
"""Mark the entry id_deleted and save (with the link to the user
that invoked the deletion) for audit trail.
"""
self.is_deleted = True
self.save()
return super().delete_instance(*args, **kwargs)
class File(BaseModel):
"""Uploaded image files."""
filename = CharField(max_length=100)
data = BlobField()
mimetype = CharField(max_length=30, column_name="mime_type")
token = FixedCharField(max_length=8, unique=True, default=lambda: secrets.token_urlsafe(8)[:8])
class Meta: # noqa: D101,D106
table_alias = "f"
class Organisation(AuditedModel):
"""Research organisation."""
country_choices = [(c.alpha_2, c.name) for c in countries]
country_choices.sort(key=lambda e: e[1])
country_choices.insert(0, ("", "Country"))
name = CharField(max_length=100, unique=True, null=True)
tuakiri_name = CharField(max_length=80, unique=True, null=True)
if ENV != "prod":
orcid_client_id = CharField(max_length=80, null=True)
orcid_secret = CharField(max_length=80, null=True)
else: # pragma: no cover
orcid_client_id = CharField(max_length=80, unique=True, null=True)
orcid_secret = CharField(max_length=80, unique=True, null=True)
confirmed = BooleanField(default=False)
city = CharField(null=True)
region = CharField(null=True, verbose_name="State/Region", max_length=100)
country = CharField(null=True, choices=country_choices, default=DEFAULT_COUNTRY)
disambiguated_id = CharField(null=True)
disambiguation_source = CharField(null=True, choices=disambiguation_source_choices)
is_email_sent = BooleanField(default=False)
tech_contact = DeferredForeignKey(
"User",
backref="tech_contact_of",
on_delete="SET NULL",
null=True,
help_text="Organisation technical contact",
)
# created_by = DeferredForeignKey("User", on_delete="SET NULL", null=True)
# updated_by = DeferredForeignKey("User", on_delete="SET NULL", null=True)
api_credentials_requested_at = DateTimeField(
null=True,
help_text="The time stamp when the user clicked on the button to register client API.",
)
api_credentials_entered_at = DateTimeField(
null=True, help_text="The time stamp when the user entered API Client ID and secret."
)
can_use_api = BooleanField(null=True, help_text="The organisation can access ORCID Hub API.")
logo = ForeignKeyField(
File, on_delete="CASCADE", null=True, help_text="The logo of the organisation"
)
email_template = TextField(null=True)
email_template_enabled = BooleanField(null=True, default=False)
webhook_enabled = BooleanField(default=False, null=True)
webhook_url = CharField(max_length=100, null=True)
webhook_append_orcid = BooleanField(
null=True,
verbose_name="Append ORCID iD",
help_text="Append the ORCID iD of the user the Webhook URL",
)
webhook_apikey = CharField(null=True, max_length=20)
email_notifications_enabled = BooleanField(default=False, null=True)
notification_email = CharField(
max_length=100, null=True, verbose_name="Notification Email Address"
)
@property
def invitation_sent_at(self):
"""Get the timestamp of the most recent invitation sent to the technical contact."""
row = (
self.org_invitations.select(fn.MAX(OrgInvitation.created_at).alias("last_sent_at"))
.where(OrgInvitation.invitee_id == self.tech_contact_id)
.first()
)
if row:
return row.last_sent_at
@property
def invitation_confirmed_at(self):
"""Get the timestamp when the invitation link was opened."""
row = (
self.org_invitations.select(
fn.MAX(OrgInvitation.created_at).alias("last_confirmed_at")
)
.where(OrgInvitation.invitee_id == self.tech_contact_id)
.where(OrgInvitation.confirmed_at.is_null(False))
.first()
)
if row:
return row.last_confirmed_at
@property
def users(self):
"""Get organisation's user query."""
return (
User.select().join(UserOrg, on=(UserOrg.user_id == User.id)).where(UserOrg.org == self)
)
@property
def admins(self):
"""Get organisation's administrator query."""
return self.users.where(UserOrg.is_admin)
def __str__(self):
return self.name or self.tuakiri_name
def save(self, *args, **kwargs):
"""Handle data consistency validation and saving."""
if self.is_dirty():
if self.name is None:
self.name = self.tuakiri_name
if self.field_is_updated("tech_contact") and self.tech_contact:
if not self.tech_contact.has_role(Role.TECHNICAL):
self.tech_contact.roles |= Role.TECHNICAL
self.tech_contact.save()
app.logger.info(f"Added TECHNICAL role to user {self.tech_contact}")
super().save(*args, **kwargs)
class Meta: # noqa: D101,D106
table_alias = "o"
class User(AuditedModel, UserMixin):
"""
ORCiD Hub user.
It's a generic user including researchers, organisation administrators, hub administrators, etc.
"""
name = CharField(max_length=64, null=True)
first_name = CharField(null=True)
last_name = CharField(null=True)
email = CharField(max_length=120, unique=True, null=True, verbose_name="Email Address")
eppn = CharField(max_length=120, unique=True, null=True, verbose_name="EPPN")
orcid = OrcidIdField(null=True, help_text="User's ORCID iD")
confirmed = BooleanField(default=False)
# Role bit-map:
roles = SmallIntegerField(default=0)
is_locked = BooleanField(default=False)
webhook_enabled = BooleanField(default=False, null=True)
orcid_updated_at = DateTimeField(null=True, default=None)
# TODO: many-to-many
# NB! Deprecated!
# TODO: we still need to remember the organisation that last authenticated the user
organisation = ForeignKeyField(
Organisation, backref="members", on_delete="SET NULL", null=True
)
created_by = ForeignKeyField("self", on_delete="SET NULL", null=True, backref="+")
updated_by = ForeignKeyField("self", on_delete="SET NULL", null=True, backref="+")
def __str__(self):
if self.name and (self.eppn or self.email):
return f"{self.name} ({self.email or self.eppn})"
return self.name or self.email or self.orcid or super().__str__()
@property
def full_name(self):
"""Full name of the user"""
value = self.first_name or ''
if value:
value += " "
value += self.last_name or ''
if not value:
value = self.name or ''
return value
@property
def full_name_with_email(self):
"""Full name with the email address of the user"""
value = self.full_name
if value:
value += " "
return f"{value}({self.email or self.eppn})"
@property
def username(self):
"""Usename for comlying with Flask-Login API"""
return self.orcid or self.email
@property
def organisations(self):
"""Get all linked to the user organisation query."""
return (
Organisation.select(
Organisation,
(Organisation.tech_contact_id == self.id).alias("is_tech_contact"),
((UserOrg.is_admin.is_null(False)) & (UserOrg.is_admin)).alias("is_admin"),
)
.join(UserOrg, on=(UserOrg.org_id == Organisation.id))
.where(UserOrg.user_id == self.id)
)
@lazy_property
@cache.memoize(50)
def org_links(self):
"""Get all user organisation linked directly and indirectly."""
if self.orcid:
q = (
UserOrg.select()
.join(
User,
on=(
(User.id == UserOrg.user_id)
& ((User.email == self.email) | (User.orcid == self.orcid))
),
)
.where(
(UserOrg.user_id == self.id)
| (User.email == self.email)
| (User.orcid == self.orcid)
)
)
else:
q = self.userorg_set
return [
r
for r in q.select(
UserOrg,
Organisation.name.alias("org_name"),
(Organisation.id == self.organisation_id).alias("current_org"),
)
.join(Organisation, on=(Organisation.id == UserOrg.org_id))
.order_by(Organisation.name)
.objects()
]
@property
def available_organisations(self):
"""Get all not yet linked to the user organisation query."""
return (
Organisation.select(Organisation)
.where(UserOrg.id.is_null())
.join(
UserOrg,
JOIN.LEFT_OUTER,
on=((UserOrg.org_id == Organisation.id) & (UserOrg.user_id == self.id)),
)
)
@property
def admin_for(self):
"""Get organisations the user is admin for (query)."""
return self.organisations.where(UserOrg.is_admin)
@property
def is_active(self):
"""Get 'is_active' based on confirmed for Flask-Login.
TODO: confirmed - user that email is confirmed either by IdP or by confirmation email
isn't the same as "is active".
"""
return self.confirmed
def has_role(self, role):
"""Return `True` if the user identifies with the specified role.
:param role: A role name, `Role` instance, or integer value.
"""
if isinstance(role, Role):
return bool(role & Role(self.roles))
elif isinstance(role, str):
try:
return bool(Role[role.upper()] & Role(self.roles))
except Exception:
False
elif isinstance(role, int):
return bool(role & self.roles)
else:
return False
@property
def is_superuser(self):
"""Test if the user is a HUB admin."""
return bool(self.roles & Role.SUPERUSER)
@is_superuser.setter
def is_superuser(self, value): # noqa: D401
"""Sets user as a HUB admin."""
if value:
self.roles |= Role.SUPERUSER.value
else:
self.roles &= ~Role.SUPERUSER.value
@property
def is_admin(self):
"""Test if the user belongs to the organisation admin."""
return bool(self.roles & Role.ADMIN)
def avatar(self, size=40, default="identicon"):
"""Return Gravatar service user avatar URL."""
# TODO: default gravatar image
# default = "https://www.example.com/default.jpg"
gravatar_url = (
"https://www.gravatar.com/avatar/" + md5(self.email.lower().encode()).hexdigest() + "?"
)
gravatar_url += urlencode({"d": default, "s": str(size)})
return gravatar_url
@property
def gravatar_profile_url(self):
"""Return Gravatar service user profile URL."""
return "https://www.gravatar.com/" + md5(self.email.lower().encode()).hexdigest()
@property
def affiliations(self):
"""Return affiliations with the current organisation."""
try:
user_org = UserOrg.get(user=self, org=self.organisation)
return Affiliation(user_org.affiliations)
except UserOrg.DoesNotExist:
return Affiliation.NONE
def is_tech_contact_of(self, org=None):
"""Indicate if the user is the technical contact of the organisation."""
if org is None:
org = self.organisation
return org and org.tech_contact and org.tech_contact_id == self.id
def is_admin_of(self, org=None):
"""Indicate if the user is the technical contact of the organisation."""
if org is None:
org = self.organisation
return (
org
and UserOrg.select()
.where(UserOrg.user == self, UserOrg.org == org, UserOrg.is_admin)
.exists()
)
@property
def uuid(self):
"""Generate UUID for the user based on the primary email."""
return uuid.uuid5(uuid.NAMESPACE_URL, "mailto:" + (self.email or self.eppn))
class Meta: # noqa: D101,D106
table_alias = "u"
class OrgInfo(BaseModel):
"""Preloaded organisation data."""
name = CharField(max_length=100, unique=True, help_text="Organisation name")
tuakiri_name = CharField(max_length=100, unique=True, null=True, help_text="TUAKIRI Name")
title = CharField(null=True, help_text="Contact Person Tile")
first_name = CharField(null=True, help_text="Contact Person's First Name")
last_name = CharField(null=True, help_text="Contact Person's Last Name")
role = CharField(null=True, help_text="Contact Person's Role")
email = CharField(null=True, help_text="Contact Person's Email Address")
phone = CharField(null=True, help_text="Contact Person's Phone")
is_public = BooleanField(
null=True, default=False, help_text="Permission to post contact information to WEB"
)
country = CharField(null=True, help_text="Country Code", default=DEFAULT_COUNTRY)
city = CharField(null=True, help_text="City of Home Campus")
disambiguated_id = CharField(
null=True, verbose_name="Identifier", help_text="Organisation disambiguated identifier"
)
disambiguation_source = CharField(
null=True,
verbose_name="Source",
help_text="Organisation disambiguated ID source",
choices=disambiguation_source_choices,
)
def __str__(self):
return self.name or self.disambiguated_id or super().__str__()
class Meta: # noqa: D101,D106
table_alias = "oi"
@classmethod
def load_from_csv(cls, source):
"""Load data from CSV file or a string."""
if isinstance(source, str):
source = StringIO(source, newline="")
reader = csv.reader(source)
header = next(reader)
assert len(header) >= 3, (
"Wrong number of fields. Expected at least 3 fields "
"(name, disambiguated organisation ID, and disambiguation source). "
"Read header: %s" % header
)
header_rexs = [
re.compile(ex, re.I)
for ex in (
"organisation|name",
"title",
r"first\s*(name)?",
r"last\s*(name)?",
"role",
"email",
"phone",
"public|permission to post to web",
r"country\s*(code)?",
"city",
"(common:)?disambiguated.*identifier",
"(common:)?disambiguation.*source",
r"tuakiri\s*(name)?",
)
]
def index(rex):
"""Return first header column index matching the given regex."""
for i, column in enumerate(header):
if rex.match(column):
return i
else:
return None
idxs = [index(rex) for rex in header_rexs]
def val(row, i, default=None):
if idxs[i] is None:
return default
else:
v = row[idxs[i]].strip()
return None if v == "" else v
for row in reader:
# skip empty lines:
if not row or row is None or len(row) == 0 or (len(row) == 1 and row[0].strip() == ""):
continue
name = val(row, 0)
oi, _ = cls.get_or_create(name=name)
oi.title = val(row, 1)
oi.first_name = val(row, 2)
oi.last_name = val(row, 3)
oi.role = val(row, 4)
oi.email = normalize_email(val(row, 5))
oi.phone = val(row, 6)
oi.is_public = val(row, 7) and val(row, 7).upper() == "YES"
oi.country = val(row, 8) or DEFAULT_COUNTRY
oi.city = val(row, 9)
oi.disambiguated_id = val(row, 10)
oi.disambiguation_source = val(row, 11)
oi.tuakiri_name = val(row, 12)
oi.save()
return reader.line_num - 1
class OrgInvitation(AuditedModel):
"""Organisation invitation to on-board the Hub."""
invitee = ForeignKeyField(
User, on_delete="CASCADE", null=True, backref="received_org_invitations"
)
inviter = ForeignKeyField(
User, on_delete="SET NULL", null=True, backref="sent_org_invitations"
)
org = ForeignKeyField(Organisation, on_delete="SET NULL", verbose_name="Organisation")
email = TextField(
help_text="The email address the invitation was sent to.",
verbose_name="Invitee Email Address",
)
token = TextField(unique=True)
confirmed_at = DateTimeField(null=True)
tech_contact = BooleanField(
null=True,
help_text="The invitee is the technical contact of the organisation.",
verbose_name="Is Tech.contact",
)
url = CharField(null=True)
@property
def sent_at(self):
"""Get the time the invitation was sent."""
return self.created_at
class Meta: # noqa: D101,D106
table_alias = "oi"
class UserOrg(AuditedModel):
"""Linking object for many-to-many relationship."""
user = ForeignKeyField(User, on_delete="CASCADE", index=True, backref="user_orgs")
org = ForeignKeyField(
Organisation,
on_delete="CASCADE",
index=True,
verbose_name="Organisation",
backref="user_orgs",
)
is_admin = BooleanField(
null=True, default=False, help_text="User is an administrator for the organisation"
)
# Affiliation bit-map:
affiliations = SmallIntegerField(default=0, null=True, verbose_name="EDU Person Affiliations")
# TODO: the access token should be either here or in a separate list
# access_token = CharField(max_length=120, unique=True, null=True)
def save(self, *args, **kwargs):
"""Enforce foreign key constraints and consolidate user roles with the linked organisations.
Enforce foreign key constraints and consolidate user roles with the linked organisations
before saving data.
"""
if self.is_dirty():
# if self.field_is_updated("org"):
# self.org # just enforce re-querying
user = self.user
if self.is_admin != user.is_admin:
if (
self.is_admin
or UserOrg.select()
.where(
(UserOrg.user_id == self.user_id)
& (UserOrg.org_id != self.org_id)
& UserOrg.is_admin
)
.exists()
): # noqa: E125
user.roles |= Role.ADMIN
app.logger.info(f"Added ADMIN role to user {user}")
else:
user.roles &= ~Role.ADMIN
app.logger.info(f"Revoked ADMIN role from user {user}")
user.save()
return super().save(*args, **kwargs)
class Meta: # noqa: D101,D106
table_alias = "uo"
indexes = ((("user", "org"), True),)
class OrcidToken(AuditedModel):
"""For Keeping ORCID token in the table."""
user = ForeignKeyField(
User, null=True, index=True, backref="orcid_tokens", on_delete="CASCADE"
) # TODO: add validation for 3-legged authorization tokens
org = ForeignKeyField(
Organisation, index=True, verbose_name="Organisation", backref="orcid_tokens"
)
scopes = TextField(null=True)
access_token = CharField(max_length=36, unique=True, null=True)
issue_time = DateTimeField(default=datetime.utcnow)
refresh_token = CharField(max_length=36, unique=True, null=True)
expires_in = IntegerField(default=0)
# created_by = ForeignKeyField(User, on_delete="SET NULL", null=True, backref='+')
# updated_by = ForeignKeyField(User, on_delete="SET NULL", null=True, backref='+')
class Meta: # noqa: D101,D106
table_alias = "ot"
class UserOrgAffiliation(AuditedModel):
"""For Keeping the information about the affiliation."""
user = ForeignKeyField(User, on_delete="CASCADE", backref="org_affiliations")
organisation = ForeignKeyField(
Organisation, index=True, on_delete="CASCADE", verbose_name="Organisation"
)
disambiguated_id = CharField(verbose_name="Disambiguation ORG Id", null=True)
disambiguation_source = CharField(
verbose_name="Disambiguation ORG Source", null=True, choices=disambiguation_source_choices
)
name = TextField(null=True, verbose_name="Institution/employer")
start_date = PartialDateField(null=True)
end_date = PartialDateField(null=True)
department_name = TextField(null=True)
department_city = TextField(null=True)
role_title = TextField(null=True)
put_code = IntegerField(null=True)
path = TextField(null=True)
# created_by = ForeignKeyField(User, on_delete="SET NULL", null=True, backref='+')
# updated_by = ForeignKeyField(User, on_delete="SET NULL", null=True, backref='+')
class Meta: # noqa: D101,D106
table_name = "user_organisation_affiliation"
table_alias = "oua"
class OrcidApiCall(BaseModel):
"""ORCID API call audit entry."""
called_at = DateTimeField(default=datetime.utcnow)
user = ForeignKeyField(User, null=True, on_delete="SET NULL", backref="orcid_api_calls")
method = CharField(max_length=6)
url = CharField()
query_params = TextField(null=True)
body = TextField(null=True)
put_code = IntegerField(null=True)
response = TextField(null=True)
response_time_ms = IntegerField(null=True)
status = IntegerField(null=True)
def set_response_time(self):
"""Calculate and set the response time assuming the call finished right now."""
self.response_time_ms = round((datetime.utcnow() - self.called_at).microseconds / 1000)
class Meta: # noqa: D101,D106
table_alias = "oac"
class OrcidAuthorizeCall(BaseModel):
"""ORCID Authorize call audit entry."""
called_at = DateTimeField(default=datetime.utcnow)
user = ForeignKeyField(
User, null=True, default=None, on_delete="SET NULL", backref="orcid_auth_calls"
)
method = TextField(null=True, default="GET")
url = TextField(null=True)
token = TextField(null=True)
state = TextField(null=True)
response_time_ms = IntegerField(null=True)
class Meta: # noqa: D101,D106
table_alias = "oac"
class Task(AuditedModel):
"""Batch processing task created form CSV/TSV file."""
org = ForeignKeyField(
Organisation, index=True, verbose_name="Organisation", on_delete="CASCADE", backref="tasks"
)
completed_at = DateTimeField(null=True)
filename = TextField(null=True)
is_raw = BooleanField(null=True, default=False)
task_type = TaskTypeField(
default=TaskType.NONE, choices=[(tt.value, tt.name) for tt in TaskType if tt.value]
)
expires_at = DateTimeField(null=True)
expiry_email_sent_at = DateTimeField(null=True)
status = CharField(null=True, max_length=10, choices=[(v, v) for v in ["ACTIVE", "RESET"]])
def __str__(self):
return (
"Synchronization task"
if self.task_type == TaskType.SYNC
else (
self.filename
or f"{TaskType(self.task_type).name.capitalize()} record processing task #{self.id}"
)
)
@property
def is_expiry_email_sent(self):
"""Test if the expiry email is sent ot not."""
return bool(self.expiry_email_sent_at)
@lazy_property
def record_count(self):
"""Get count of the loaded recoreds."""
return 0 if self.records is None or not self.task_type else self.records.count()
@property
def record_model(self):
"""Get record model class."""
return self.records.model
@lazy_property
def records(self):
"""Get all task record query."""
if not self.task_type or self.task_type in [TaskType.SYNC, TaskType.NONE]:
return None
if self.is_raw:
return MessageRecord.select().where(MessageRecord.task == self)
return getattr(self, self.task_type.name.lower() + "_records")
@lazy_property
def completed_count(self):
"""Get number of completed rows."""
return self.records.where(self.record_model.processed_at.is_null(False)).count()
@lazy_property
def completed_percent(self):
"""Get the percentage of completed rows."""
return (100.0 * self.completed_count) / self.record_count if self.record_count else 0.0
@property
def error_count(self):
"""Get error count encountered during processing batch task."""
return self.records.where(self.record_model.status ** "%error%").count()
@property
def is_ready(self):
"""Indicate that the task is 'ready to go':
- the task is "ACTIVE"
or
- there is at least one activated record.
"""
return self.state == "ACTIVE" or self.records.whhere(self.record_model.is_active).exists()
def to_dict(self, to_dashes=True, recurse=None, exclude=None, include_records=None, only=None):
"""Create a dict represenatation of the task suitable for serialization into JSON or YAML."""
# TODO: expand for the other types of the tasks
task_dict = super().to_dict(
recurse=False,
to_dashes=to_dashes,
exclude=exclude,
only=only
or [
Task.id,
Task.filename,
Task.task_type,
Task.created_at,
Task.updated_at,
Task.status,
Task.is_raw,
],
)
# TODO: refactor for funding task to get records here not in API or export
if (recurse or include_records or recurse is None) and self.task_type not in [
TaskType.FUNDING,
TaskType.SYNC,
]:
if self.task_type == TaskType.AFFILIATION:
task_dict["records"] = [
r.to_dict(
external_id=[
ae.to_export_dict()
for ae in AffiliationExternalId.select().where(
AffiliationExternalId.record_id == r.id
)
],
to_dashes=to_dashes,
recurse=recurse,
exclude=[self.record_model.task],
)
for r in self.records
]
else:
task_dict["records"] = [
r.to_dict(
to_dashes=to_dashes, recurse=recurse, exclude=[self.record_model.task]
)
for r in self.records
]
return task_dict
def to_export_dict(self, include_records=True):
"""Create a dictionary representation for export."""
if self.task_type == TaskType.AFFILIATION:
task_dict = self.to_dict(recurse=include_records, include_records=include_records)
else:
task_dict = self.to_dict(
recurse=False,
to_dashes=True,
include_records=False,
exclude=[Task.created_by, Task.updated_by, Task.org, Task.task_type],
)
task_dict["task-type"] = self.task_type.name
if include_records:
task_dict["records"] = [r.to_export_dict() for r in self.records]
return task_dict
class Meta: # noqa: D101,D106
table_alias = "t"
class Log(BaseModel):
"""Task log entries."""
created_at = DateTimeField(default=datetime.utcnow)
created_by = ForeignKeyField(User, on_delete="SET NULL", null=True, backref="+")
task = ForeignKeyField(
Task,
on_delete="CASCADE",
null=True,
index=True,
verbose_name="Task",
backref="log_entries",
)
message = TextField(null=True)
class Meta: # noqa: D101,D106
table_alias = "l"
def save(self, *args, **kwargs): # noqa: D102
if self.is_dirty():
if current_user and hasattr(current_user, "id"):
if hasattr(self, "created_by"):
self.created_by_id = current_user.id
return super().save(*args, **kwargs)
class UserInvitation(AuditedModel):
"""Organisation invitation to on-board the Hub."""
invitee = ForeignKeyField(
User, on_delete="CASCADE", null=True, backref="received_user_invitations"
)
inviter = ForeignKeyField(
User, on_delete="SET NULL", null=True, backref="sent_user_invitations"
)
org = ForeignKeyField(
Organisation,
on_delete="CASCADE",
null=True,
verbose_name="Organisation",
backref="user_invitations",
)
task = ForeignKeyField(
Task,
on_delete="CASCADE",
null=True,
index=True,
verbose_name="Task",
backref="user_invitations",
)
email = CharField(
index=True,
null=True,
max_length=80,
help_text="The email address the invitation was sent to.",
)
first_name = TextField(null=True)
last_name = TextField(null=True)
orcid = OrcidIdField(null=True)
department = TextField(verbose_name="Campus/Department", null=True)
organisation = TextField(verbose_name="Organisation Name", null=True)
city = TextField(null=True)
region = TextField(verbose_name="State/Region", null=True)
country = CharField(verbose_name="Country", max_length=2, null=True)
course_or_role = TextField(verbose_name="Course or Job title", null=True)
start_date = PartialDateField(verbose_name="Start date", null=True)
end_date = PartialDateField(verbose_name="End date (leave blank if current)", null=True)
affiliations = SmallIntegerField(
verbose_name="User affiliations", null=True, default=Affiliation.NONE
)
disambiguated_id = TextField(verbose_name="Disambiguation ORG Id", null=True)
disambiguation_source = TextField(
verbose_name="Disambiguation ORG Source", null=True, choices=disambiguation_source_choices
)
token = TextField(unique=True)
confirmed_at = DateTimeField(null=True)
is_person_update_invite = BooleanField(
default=False,
verbose_name="'Person/Update' Invitation",
help_text="Invitation to grant 'Person/Update' scope",
)
@property
def sent_at(self):
"""Get the time the invitation was sent."""
return self.created_at
class Meta: # noqa: D101,D106
table_alias = "ui"
class RecordModel(BaseModel):
"""Common model bits of the task records."""
def key_name(self, name):
"""Map key-name to a model class key name for export."""
return name
@classmethod
def get_field_regxes(cls):
"""Return map of compiled field name regex to the model fields."""
return {f: re.compile(e, re.I) for (f, e) in cls._field_regex_map}
@property
def invitee_model(self):
"""Get invitee model class."""
if hasattr(self, "invitees"):
return self.invitees.model
def to_export_dict(self):
"""Map the common record parts to dict for export into JSON/YAML."""
org = self.task.org
d = {"type": self.type} if self.has_field("type") else {}
if hasattr(self, "org_name"):
d["organization"] = {
"disambiguated-organization": {
"disambiguated-organization-identifier": self.disambiguated_id
or org.disambiguated_id,
"disambiguation-source": self.disambiguation_source
or org.disambiguation_source,
},
"name": self.org_name or org.name,
"address": {
"city": self.city or org.city,
"region": self.region or org.region,
"country": self.country or org.country,
},
}
if self.has_field("title"):
d["title"] = {
"title": {"value": self.title},
"translated-title": {
"value": self.translated_title,
"language-code": self.translated_title_language_code,
},
}
if hasattr(self, "invitees") and self.invitees:
d["invitees"] = [r.to_export_dict() for r in self.invitees]
if hasattr(self, "contributors") and self.contributors:
d["contributors"] = {"contributor": [r.to_export_dict() for r in self.contributors]}
if hasattr(self, "external_ids") and self.external_ids:
d[self.key_name("external-ids")] = {
"external-id": [r.to_export_dict() for r in self.external_ids]
}
if hasattr(self, "start_date") and self.start_date:
d["start-date"] = self.start_date.as_orcid_dict()
if hasattr(self, "end_date") and self.end_date:
d["end-date"] = self.end_date.as_orcid_dict()
return d
def orcid_external_id(self, type=None, value=None, url=None, relationship=None):
"""Get the object rendering into an ORCID API 3.x external-id."""
if (not type and not value) and (not self.external_id_type or not self.external_id_value):
return
ei = {
"external-id-type": type or self.external_id_type,
"external-id-value": value or self.external_id_value,
}
if self.external_id_relationship:
ei["external-id-relationship"] = relationship or self.external_id_relationship
if self.external_id_url:
ei["external-id-url"] = {"value": url or self.external_id_url}
return ei
class GroupIdRecord(RecordModel):
"""GroupID records."""
type_choices = [
("publisher", "publisher"),
("institution", "institution"),
("journal", "journal"),
("conference", "conference"),
("newspaper", "newspaper"),
("newsletter", "newsletter"),
("magazine", "magazine"),
("peer-review service", "peer-review service"),
]
type_choices.sort(key=lambda e: e[1])
type_choices.insert(0, ("", ""))
put_code = IntegerField(null=True)
processed_at = DateTimeField(null=True)
status = TextField(null=True, help_text="Record processing status.")
name = CharField(
max_length=120,
help_text="The name of the group. This can be the name of a journal (Journal of Criminal Justice),"
" a publisher (Society of Criminal Justice), or non-specific description (Legal Journal)"
" as required.",
)
group_id = CharField(
max_length=120,
help_text="The group's identifier, formatted as type:identifier, e.g. ringgold:12345678. "
"This can be as specific (e.g. the journal's ISSN) or vague as required. "
"Valid types include: ringgold:|issn:|orcid-generated:|fundref:|publons:",
)
description = CharField(
max_length=1000,
help_text="A brief textual description of the group. "
"This can be as specific or vague as required.",
)
type = CharField(
max_length=80,
choices=type_choices,
help_text="One of the specified types: publisher; institution; journal; conference; newspaper; "
"newsletter; magazine; peer-review service.",
)
organisation = ForeignKeyField(
Organisation, backref="group_id_records", on_delete="CASCADE", null=True
)
class Meta: # noqa: D101,D106
table_alias = "gid"
class AffiliationRecord(RecordModel):
"""Affiliation record loaded from CSV file for batch processing."""
is_active = BooleanField(
default=False, help_text="The record is marked 'active' for batch processing", null=True
)
task = ForeignKeyField(Task, backref="affiliation_records", on_delete="CASCADE")
put_code = IntegerField(null=True)
local_id = CharField(
max_length=100,
null=True,
verbose_name="Local ID",
help_text="Record identifier used in the data source system.",
)
processed_at = DateTimeField(null=True)
status = TextField(null=True, help_text="Record processing status.")
first_name = CharField(null=True, max_length=120)
last_name = CharField(null=True, max_length=120)
email = CharField(max_length=80, null=True)
orcid = OrcidIdField(null=True)
organisation = CharField(null=True, index=True, max_length=200)
affiliation_type = CharField(
null=True,
max_length=20,
choices=[(v, v.replace("-", " ").title()) for v in AFFILIATION_TYPES],
)
role = CharField(null=True, verbose_name="Role/Course", max_length=100)
department = CharField(null=True, max_length=200)
start_date = PartialDateField(null=True)
end_date = PartialDateField(null=True)
city = CharField(null=True, max_length=200)
region = CharField(null=True, verbose_name="State/Region", max_length=100)
country = CharField(null=True, max_length=2, choices=country_choices)
disambiguated_id = CharField(null=True, verbose_name="Disambiguated Organization Identifier")
disambiguation_source = CharField(
null=True, max_length=100, choices=disambiguation_source_choices
)
delete_record = BooleanField(null=True)
visibility = CharField(null=True, max_length=100, choices=visibility_choices)
url = CharField(max_length=200, null=True)
display_index = CharField(max_length=100, null=True)
class Meta: # noqa: D101,D106
table_alias = "ar"
_regex_field_map = [
("first_name", r"first\s*(name)?"),
("last_name", r"last\s*(name)?"),
("email", "email"),
("organisation", "organisation|^name"),
("department", "campus|department"),
("city", "city"),
("region", "state|region"),
("role", "course|title|role"),
("start_date", r"start\s*(date)?"),
("end_date", r"end\s*(date)?"),
("affiliation_type", r"affiliation(s)?\s*(type)?|student|staff"),
("country", "country"),
("disambiguated_id", r"disambiguat.*id"),
("disambiguation_source", r"disambiguat.*source"),
("put_code", r"put|code"),
("orcid", "orcid.*"),
("local_id", "local.*|.*identifier"),
]
def to_dict(self, external_id=[], *args, **kwargs):
"""Create a dict and add external ids in affiliation records."""
rd = super().to_dict(*args, **kwargs)
if external_id:
rd["external-id"] = external_id
return rd
@classmethod
def load(
cls,
data,
task=None,
task_id=None,
filename=None,
override=True,
skip_schema_validation=False,
org=None,
):
"""Load afffiliation record task form JSON/YAML. Data shoud be already deserialize."""
if isinstance(data, str):
data = json.loads(data) if filename.lower().endswith(".json") else yaml.load(data)
if org is None:
org = current_user.organisation if current_user else None
if not skip_schema_validation:
jsonschema.validate(data, schemas.affiliation_task)
if not task and task_id:
task = Task.select().where(Task.id == task_id).first()
if not task and "id" in data:
task_id = int(data["id"])
task = Task.select().where(Task.id == task_id).first()
with db.atomic() as transaction:
try:
if not task:
filename = (
filename
or data.get("filename")
or datetime.utcnow().isoformat(timespec="seconds")
)
task = Task.create(org=org, filename=filename, task_type=TaskType.AFFILIATION)
elif override:
AffiliationRecord.delete().where(AffiliationRecord.task == task).execute()
record_fields = AffiliationRecord._meta.fields.keys()
is_enqueue = False
for r in data.get("records"):
if "id" in r and not override:
rec = AffiliationRecord.get(int(r["id"]))
else:
rec = AffiliationRecord(task=task)
for k, v in r.items():
if k == "id" or k.startswith(("external", "status", "processed")):
continue
k = k.replace("-", "_")
if k == "is_active" and v:
is_enqueue = v
if k in ["disambiguation_source"] and v:
v = v.upper()
if k in ["visibility", "affiliation_type"] and v:
v = v.replace("_", "-").lower()
if k in record_fields and rec.__data__.get(k) != v:
rec.__data__[k] = PartialDate.create(v) if k.endswith("date") else v
rec._dirty.add(k)
if rec.is_dirty():
validator = ModelValidator(rec)
if not validator.validate():
raise ModelException(f"Invalid record: {validator.errors}")
rec.save()
if r.get("external-id"):
for exi in r.get("external-id"):
ext_data = {
k.replace("-", "_").replace("external_id_", ""): v.lower()
if v
else None
for k, v in exi.items()
}
if ext_data.get("type") and ext_data.get("value"):
ext_id = AffiliationExternalId.create(record=rec, **ext_data)
if not ModelValidator(ext_id).validate():
raise ModelException(
f"Invalid affiliation exteral-id: {validator.errors}"
)
ext_id.save()
if is_enqueue:
from .utils import enqueue_task_records
enqueue_task_records(task)
except:
transaction.rollback()
app.logger.exception("Failed to load affiliation record task file.")
raise
return task
@classmethod
def load_from_csv(cls, source, filename=None, org=None):
"""Load affiliation record data from CSV/TSV file or a string."""
if isinstance(source, str):
source = StringIO(source, newline="")
reader = csv.reader(source)
header = next(reader)
if filename is None:
if hasattr(source, "name"):
filename = source.name
else:
filename = datetime.utcnow().isoformat(timespec="seconds")
if len(header) == 1 and "\t" in header[0]:
source.seek(0)
reader = csv.reader(source, delimiter="\t")
header = next(reader)
if len(header) < 2:
raise ModelException("Expected CSV or TSV format file.")
if len(header) < 3:
raise ModelException(
"Wrong number of fields. Expected at least 4 fields "
"(first name, last name, email address or another unique identifier, student/staff). "
f"Read header: {header}"
)
header_rexs = [
re.compile(ex, re.I)
for ex in [
r"first\s*(name)?",
r"last\s*(name)?",
"email",
"organisation|^name",
"campus|department",
"city",
"state|region",
"course|title|role",
r"start\s*(date)?",
r"end\s*(date)?",
r"affiliation(s)?\s*(type)?|student|staff",
"country",
r"disambiguat.*id",
r"disambiguat.*source",
r"put|code",
"orcid.*",
"local.*|.*identifier",
"delete(.*record)?",
r"(is)?\s*visib(bility|le)?",
r"url",
r"(display)?.*index",
r"(external)?\s*id(entifier)?\s+type$",
r"(external)?\s*id(entifier)?\s*(value)?$",
r"(external)?\s*id(entifier)?\s*url",
r"(external)?\s*id(entifier)?\s*rel(ationship)?",
r"(is)?\s*active$",
]
]
def index(rex):
"""Return first header column index matching the given regex."""
for i, column in enumerate(header):
if column and rex.match(column.strip()):
return i
else:
return None
idxs = [index(rex) for rex in header_rexs]
if all(idx is None for idx in idxs):
raise ModelException(f"Failed to map fields based on the header of the file: {header}")
if org is None:
org = current_user.organisation if current_user else None
def val(row, i, default=None):
if idxs[i] is None or idxs[i] >= len(row):
return default
else:
v = row[idxs[i]].strip()
return default if v == "" else v
with db.atomic() as transaction:
try:
task = Task.create(org=org, filename=filename, task_type=TaskType.AFFILIATION)
is_enqueue = False
for row_no, row in enumerate(reader):
# skip empty lines:
if len([item for item in row if item and item.strip()]) == 0:
continue
if len(row) == 1 and row[0].strip() == "":
continue
put_code = val(row, 14)
delete_record = val(row, 17)
delete_record = delete_record and delete_record.lower() in [
"y",
"yes",
"ok",
"delete",
"1",
]
if delete_record:
if not put_code:
raise ModelException(
f"Missing put-code. Cannot delete a record without put-code. "
f"#{row_no+2}: {row}. Header: {header}"
)
email = normalize_email(val(row, 2, ""))
orcid = validate_orcid_id(val(row, 15))
local_id = val(row, 16)
if not email and not orcid and local_id and validators.email(local_id):
# if email is missing and local ID is given as a valid email, use it:
email = local_id
# The uploaded country must be from ISO 3166-1 alpha-2
country = val(row, 11)
if country:
try:
country = countries.lookup(country).alpha_2
except Exception:
raise ModelException(
f" (Country must be 2 character from ISO 3166-1 alpha-2) in the row "
f"#{row_no+2}: {row}. Header: {header}"
)
if not delete_record and not (email or orcid):
raise ModelException(
f"Missing user identifier (email address or ORCID iD) in the row "
f"#{row_no+2}: {row}. Header: {header}"
)
if email and not validators.email(email):
raise ValueError(
f"Invalid email address '{email}' in the row #{row_no+2}: {row}"
)
affiliation_type = val(row, 10)
if affiliation_type:
affiliation_type = affiliation_type.replace("_", "-").lower()
if not delete_record and (
not affiliation_type or affiliation_type.lower() not in AFFILIATION_TYPES
):
raise ValueError(
f"Invalid affiliation type '{affiliation_type}' in the row #{row_no+2}: {row}. "
f"Expected values: {', '.join(at for at in AFFILIATION_TYPES)}."
)
first_name = val(row, 0)
last_name = val(row, 1)
if not delete_record and not (email or orcid):
raise ModelException(
"Wrong number of fields. Expected at least 4 fields "
"(first name, last name, email address or another unique identifier, "
f"student/staff): {row}"
)
disambiguation_source = val(row, 13)
if disambiguation_source:
disambiguation_source = disambiguation_source.upper()
visibility = val(row, 18)
if visibility:
visibility = visibility.replace("_", "-").lower()
is_active = val(row, 25, "").lower() in ["y", "yes", "1", "true"]
if is_active:
is_enqueue = is_active
af = cls(
task=task,
first_name=first_name,
last_name=last_name,
email=email,
organisation=val(row, 3),
department=val(row, 4),
city=val(row, 5),
region=val(row, 6),
role=val(row, 7),
start_date=PartialDate.create(val(row, 8)),
end_date=PartialDate.create(val(row, 9)),
affiliation_type=affiliation_type,
country=country,
disambiguated_id=val(row, 12),
disambiguation_source=disambiguation_source,
put_code=put_code,
orcid=orcid,
local_id=local_id,
delete_record=delete_record,
url=val(row, 19),
display_index=val(row, 20),
visibility=visibility,
is_active=is_active,
)
validator = ModelValidator(af)
if not validator.validate():
raise ModelException(f"Invalid record: {validator.errors}")
af.save()
external_id_type = val(row, 21, "").lower()
external_id_relationship = val(row, 24)
if external_id_relationship:
external_id_relationship = external_id_relationship.replace(
"_", "-"
).lower()
external_id_value = val(row, 22)
if external_id_type and external_id_value:
ae = AffiliationExternalId(
record=af,
type=external_id_type,
value=external_id_value,
url=val(row, 23),
relationship=external_id_relationship,
)
validator = ModelValidator(ae)
if not validator.validate():
raise ModelException(f"Invalid record: {validator.errors}")
ae.save()
if is_enqueue:
from .utils import enqueue_task_records
enqueue_task_records(task)
except Exception:
transaction.rollback()
app.logger.exception("Failed to load affiliation file.")
raise
return task
class FundingRecord(RecordModel):
"""Funding record loaded from JSON file for batch processing."""
funiding_type_choices = [(v, v.replace("-", " ").title()) for v in FUNDING_TYPES]
task = ForeignKeyField(Task, backref="funding_records", on_delete="CASCADE")
title = CharField(max_length=255)
translated_title = CharField(null=True, max_length=255)
translated_title_language_code = CharField(null=True, max_length=10, choices=language_choices)
type = CharField(max_length=255, choices=funiding_type_choices)
organization_defined_type = CharField(null=True, max_length=255)
short_description = CharField(null=True, max_length=5000)
amount = CharField(null=True, max_length=255)
currency = CharField(null=True, max_length=3, choices=currency_choices)
start_date = PartialDateField(null=True)
end_date = PartialDateField(null=True)
org_name = CharField(null=True, max_length=255, verbose_name="Organisation Name")
city = CharField(null=True, max_length=255)
region = CharField(null=True, max_length=255)
country = CharField(null=True, max_length=255, choices=country_choices)
disambiguated_id = CharField(null=True)
disambiguation_source = CharField(
null=True, max_length=255, choices=disambiguation_source_choices
)
is_active = BooleanField(
default=False, help_text="The record is marked for batch processing", null=True
)
processed_at = DateTimeField(null=True)
url = CharField(max_length=200, null=True)
status = TextField(null=True, help_text="Record processing status.")
def to_export_dict(self):
"""Map the funding record to dict for export into JSON/YAML."""
d = super().to_export_dict()
d["amount"] = {
"currency-code": self.currency,
"value": self.amount,
}
return d
@classmethod
def load_from_csv(cls, source, filename=None, org=None):
"""Load data from CSV/TSV file or a string."""
if isinstance(source, str):
source = StringIO(source, newline="")
if filename is None:
filename = datetime.utcnow().isoformat(timespec="seconds")
reader = csv.reader(source)
header = next(reader)
if len(header) == 1 and "\t" in header[0]:
source.seek(0)
reader = csv.reader(source, delimiter="\t")
header = next(reader)
if len(header) < 2:
raise ModelException("Expected CSV or TSV format file.")
header_rexs = [
re.compile(ex, re.I)
for ex in [
"title$",
r"translated\s+(title)?",
r"translat(ed)?(ion)?\s+(title)?\s*lang(uage)?.*(code)?",
"type$",
r"org(ani[sz]ation)?\s*(defined)?\s*type",
r"(short\s*|description\s*)+$",
"amount",
"currency",
r"start\s*(date)?",
r"end\s*(date)?",
r"(org(gani[zs]ation)?)?\s*name$",
"city",
"region|state",
"country",
r"disambiguated\s*(org(ani[zs]ation)?)?\s*id(entifier)?",
r"disambiguation\s+source$",
r"(is)?\s*active$",
r"orcid\s*(id)?$",
"email",
r"(external)?\s*id(entifier)?\s+type$",
r"((external)?\s*id(entifier)?\s+value|funding.*id)$",
r"(external)?\s*id(entifier)?\s*url",
r"(external)?\s*id(entifier)?\s*rel(ationship)?",
"put.*code",
r"(is)?\s*visib(bility|le)?",
r"first\s*(name)?",
r"(last|sur)\s*(name)?",
"local.*|.*identifier",
r"url",
]
]
def index(rex):
"""Return first header column index matching the given regex."""
for i, column in enumerate(header):
if rex.match(column.strip()):
return i
else:
return None
idxs = [index(rex) for rex in header_rexs]
if all(idx is None for idx in idxs):
raise ModelException(f"Failed to map fields based on the header of the file: {header}")
if org is None:
org = current_user.organisation if current_user else None
def val(row, i, default=None):
if len(idxs) <= i or idxs[i] is None or idxs[i] >= len(row):
return default
else:
v = row[idxs[i]].strip()
return default if v == "" else v
rows = []
cached_row = []
is_enqueue = False
for row_no, row in enumerate(reader):
# skip empty lines:
if len([item for item in row if item and item.strip()]) == 0:
continue
if len(row) == 1 and row[0].strip() == "":
continue
orcid, email = val(row, 17), normalize_email(val(row, 18, ""))
orcid = validate_orcid_id(orcid)
if email and not validators.email(email):
raise ValueError(f"Invalid email address '{email}' in the row #{row_no+2}: {row}")
visibility = val(row, 24)
if visibility:
visibility = visibility.replace("_", "-").lower()
invitee = dict(
identifier=val(row, 27),
email=email,
first_name=val(row, 25),
last_name=val(row, 26),
orcid=orcid,
put_code=val(row, 23),
visibility=visibility,
)
title = val(row, 0)
external_id_type = val(row, 19, "").lower()
external_id_value = val(row, 20)
external_id_relationship = val(row, 22, "").replace("_", "-").lower()
if external_id_type not in EXTERNAL_ID_TYPES:
raise ModelException(
f"Invalid External Id Type: '{external_id_type}', Use 'doi', 'issn' "
f"or one of the accepted types found here: https://pub.orcid.org/v3.0/identifiers"
)
if not external_id_value:
raise ModelException(
f"Invalid External Id Value or Funding Id: {external_id_value}, #{row_no+2}: {row}."
)
if not title:
raise ModelException(f"Title is mandatory, #{row_no+2}: {row}. Header: {header}")
if external_id_relationship not in RELATIONSHIPS:
raise ModelException(
f"Invalid External Id Relationship '{external_id_relationship}' as it is not one of the "
f"{RELATIONSHIPS}, #{row_no+2}: {row}."
)
if (
cached_row
and title.lower() == val(cached_row, 0).lower()
and external_id_type.lower() == val(cached_row, 19).lower()
and external_id_value.lower() == val(cached_row, 20).lower()
and external_id_relationship.lower() == val(cached_row, 22).lower()
):
row = cached_row
else:
cached_row = row
is_active = val(row, 16, "").lower() in ["y", "yes", "1", "true"]
if is_active:
is_enqueue = is_active
funding_type = val(row, 3)
if not funding_type:
raise ModelException(
f"Funding type is mandatory, #{row_no+2}: {row}. Header: {header}"
)
else:
funding_type = funding_type.replace("_", "-").lower()
# The uploaded country must be from ISO 3166-1 alpha-2
country = val(row, 13)
if country:
try:
country = countries.lookup(country).alpha_2
except Exception:
raise ModelException(
f" (Country must be 2 character from ISO 3166-1 alpha-2) in the row "
f"#{row_no+2}: {row}. Header: {header}"
)
rows.append(
dict(
funding=dict(
title=title,
translated_title=val(row, 1),
translated_title_language_code=val(row, 2),
type=funding_type,
organization_defined_type=val(row, 4),
short_description=val(row, 5),
amount=val(row, 6),
currency=val(row, 7),
start_date=PartialDate.create(val(row, 8)),
end_date=PartialDate.create(val(row, 9)),
org_name=val(row, 10) or org.name,
city=val(row, 11) or org.city,
region=val(row, 12) or org.region,
country=country or org.country,
url=val(row, 28),
is_active=is_active,
disambiguated_id=val(row, 14) or org.disambiguated_id,
disambiguation_source=val(row, 15, "").upper()
or org.disambiguation_source,
),
invitee=invitee,
external_id=dict(
type=external_id_type,
value=external_id_value,
url=val(row, 21),
relationship=external_id_relationship,
),
)
)
with db.atomic() as transaction:
try:
task = Task.create(org=org, filename=filename, task_type=TaskType.FUNDING)
for funding, records in groupby(rows, key=lambda row: row["funding"].items()):
records = list(records)
fr = cls(task=task, **dict(funding))
validator = ModelValidator(fr)
if not validator.validate():
raise ModelException(f"Invalid record: {validator.errors}")
fr.save()
for external_id in set(
tuple(r["external_id"].items())
for r in records
if r["external_id"]["type"] and r["external_id"]["value"]
):
ei = ExternalId(record=fr, **dict(external_id))
ei.save()
for invitee in set(
tuple(r["invitee"].items()) for r in records if r["invitee"]["email"]
):
rec = FundingInvitee(record=fr, **dict(invitee))
validator = ModelValidator(rec)
if not validator.validate():
raise ModelException(f"Invalid invitee record: {validator.errors}")
rec.save()
if is_enqueue:
from .utils import enqueue_task_records
enqueue_task_records(task)
return task
except Exception:
transaction.rollback()
app.logger.exception("Failed to load funding file.")
raise
@classmethod
def load_from_json(cls, source, filename=None, org=None, task=None):
"""Load data from JSON file or a string."""
# import data from file based on its extension; either it is YAML or JSON
data = load_yaml_json(filename=filename, source=source)
records = data["records"] if isinstance(data, dict) else data
for r in records:
validation_source_data = copy.deepcopy(r)
validation_source_data = del_none(validation_source_data)
# Adding schema validation for funding
validator = Core(
source_data=validation_source_data,
schema_files=[os.path.join(SCHEMA_DIR, "funding_schema.yaml")],
)
validator.validate(raise_exception=True)
with db.atomic() as transaction:
try:
if org is None:
org = current_user.organisation if current_user else None
if not task:
task = Task.create(org=org, filename=filename, task_type=TaskType.FUNDING)
else:
FundingRecord.delete().where(FundingRecord.task == task).execute()
is_enqueue = False
for r in records:
title = r.get("title", "title", "value")
translated_title = r.get("title", "translated-title", "value")
translated_title_language_code = r.get(
"title", "translated-title", "language-code"
)
rec_type = r.get("type")
if rec_type:
rec_type = rec_type.replace("_", "-").lower()
organization_defined_type = r.get("organization-defined-type", "value")
short_description = r.get("short-description")
amount = r.get("amount", "value")
url = r.get("url", "value")
currency = r.get("amount", "currency-code")
start_date = PartialDate.create(r.get("start-date"))
end_date = PartialDate.create(r.get("end-date"))
org_name = r.get("organization", "name")
city = r.get("organization", "address", "city")
region = r.get("organization", "address", "region")
country = r.get("organization", "address", "country")
disambiguated_id = r.get(
"organization",
"disambiguated-organization",
"disambiguated-organization-identifier",
)
disambiguation_source = r.get(
"organization", "disambiguated-organization", "disambiguation-source"
)
if disambiguation_source:
disambiguation_source = disambiguation_source.upper()
is_active = (
r.get("is-active").lower() in ["y", "yes", "1", "true"]
if r.get("is-active")
else False
)
if is_active:
is_enqueue = is_active
record = cls.create(
task=task,
title=title,
translated_title=translated_title,
translated_title_language_code=translated_title_language_code,
type=rec_type,
organization_defined_type=organization_defined_type,
short_description=short_description,
amount=amount,
currency=currency,
org_name=org_name,
city=city,
region=region,
country=country,
url=url,
is_active=is_active,
disambiguated_id=disambiguated_id,
disambiguation_source=disambiguation_source,
start_date=start_date,
end_date=end_date,
)
invitees = r.get("invitees", default=[])
if invitees:
for invitee in invitees:
identifier = invitee.get("local-identifier") or invitee.get(
"identifier"
)
email = normalize_email(invitee.get("email"))
first_name = invitee.get("first-name")
last_name = invitee.get("last-name")
orcid = invitee.get_orcid("ORCID-iD")
put_code = invitee.get("put-code")
visibility = invitee.get("visibility")
if visibility:
visibility = visibility.replace("_", "-").lower()
FundingInvitee.create(
record=record,
identifier=identifier,
email=email,
first_name=first_name,
last_name=last_name,
orcid=orcid,
visibility=visibility,
put_code=put_code,
)
else:
raise SchemaError(
"Schema validation failed:\n - "
"Expecting Invitees for which the funding record will be written"
)
contributors = r.get("contributors", "contributor", default=[])
if contributors:
for contributor in contributors:
orcid = contributor.get_orcid("contributor-orcid", "path")
name = contributor.get("credit-name", "value")
email = normalize_email(contributor.get("contributor-email", "value"))
role = contributor.get("contributor-attributes", "contributor-role")
FundingContributor.create(
record=record, orcid=orcid, name=name, email=email, role=role
)
external_ids = r.get("external-ids", "external-id", default=[])
if external_ids:
for external_id in external_ids:
id_type = external_id.get("external-id-type")
value = external_id.get("external-id-value")
url = external_id.get("external-id-url", "value")
relationship = external_id.get("external-id-relationship")
if id_type:
id_type = id_type.lower()
if relationship:
relationship = relationship.replace("_", "-").lower()
ExternalId.create(
record=record,
type=id_type,
value=value,
url=url,
relationship=relationship,
)
else:
raise SchemaError(
"Schema validation failed:\n - An external identifier is required"
)
if is_enqueue:
from .utils import enqueue_task_records
enqueue_task_records(task)
return task
except Exception:
transaction.rollback()
app.logger.exception("Failed to load funding file.")
raise
class Meta: # noqa: D101,D106
table_alias = "fr"
class PeerReviewRecord(RecordModel):
"""Peer Review record loaded from Json file for batch processing."""
subject_type_choices = [(v, v.replace("-", " ").title()) for v in SUBJECT_TYPES]
reviewer_role_choices = [(v, v.title()) for v in REVIEWER_ROLES]
task = ForeignKeyField(Task, backref="peer_review_records", on_delete="CASCADE")
review_group_id = CharField(
max_length=255, verbose_name="Group ID", help_text="Review Group ID"
)
reviewer_role = CharField(
null=True,
max_length=255,
choices=reviewer_role_choices,
verbose_name="Role",
help_text="Reviewer Role",
)
review_url = CharField(null=True, max_length=255, verbose_name="URL", help_text="Review URL")
review_type = CharField(
null=True,
max_length=255,
choices=review_type_choices,
verbose_name="Type",
help_text="Review Type",
)
review_completion_date = PartialDateField(
null=True, verbose_name="Completed On", help_text="Review Completion Date"
)
subject_external_id_type = CharField(
null=True, max_length=255, verbose_name="Type", help_text="Subject External ID Type"
)
subject_external_id_value = CharField(
null=True, max_length=255, verbose_name="Value", help_text="Subject External ID Value"
)
subject_external_id_url = CharField(
null=True, max_length=255, verbose_name="URL", help_text="Subject External ID URL"
)
subject_external_id_relationship = CharField(
null=True,
max_length=255,
choices=relationship_choices,
verbose_name="Relationship",
help_text="Subject External ID Relationship",
)
subject_container_name = CharField(
null=True,
max_length=255,
verbose_name="Container Name",
help_text="Subject Container Name",
)
subject_type = CharField(
max_length=80,
choices=subject_type_choices,
null=True,
verbose_name="Type",
help_text="Subject Container Type",
)
subject_name_title = CharField(
null=True, max_length=255, verbose_name="Title", help_text="Subject Name Title"
)
subject_name_subtitle = CharField(
null=True, max_length=255, verbose_name="Subtitle", help_text="Subject Name Subtitle"
)
subject_name_translated_title_lang_code = CharField(
null=True,
max_length=10,
verbose_name="Language",
choices=language_choices,
help_text="Subject Name Translated Title Lang Code",
)
subject_name_translated_title = CharField(
null=True,
max_length=255,
verbose_name="Translated Title",
help_text="Subject Name Translated Title",
)
subject_url = CharField(null=True, max_length=255)
convening_org_name = CharField(
null=True, max_length=255, verbose_name="Name", help_text="Convening Organisation "
)
convening_org_city = CharField(
null=True, max_length=255, verbose_name="City", help_text="Convening Organisation City"
)
convening_org_region = CharField(
null=True, max_length=255, verbose_name="Region", help_text="Convening Organisation Region"
)
convening_org_country = CharField(
null=True,
max_length=255,
verbose_name="Country",
choices=country_choices,
help_text="Convening Organisation Country",
)
convening_org_disambiguated_identifier = CharField(
null=True,
max_length=255,
verbose_name="Disambiguated Identifier",
help_text="Convening Organisation Disambiguated Identifier",
)
convening_org_disambiguation_source = CharField(
null=True,
max_length=255,
verbose_name="Disambiguation Source",
help_text="Convening Organisation Disambiguation Source",
choices=disambiguation_source_choices,
)
is_active = BooleanField(
default=False, help_text="The record is marked for batch processing", null=True
)
processed_at = DateTimeField(null=True)
status = TextField(null=True, help_text="Record processing status.")
@property
def title(self):
"""Title of the record."""
return self.review_group_id
@property
def type(self):
"""Type of the record."""
return self.review_type or self.subject_type or self.subject_external_id_type
def key_name(self, name):
"""Map key-name to a model class key name for export."""
if name == "external-ids":
return "review-identifiers"
return name
@classmethod
def load_from_csv(cls, source, filename=None, org=None):
"""Load data from CSV/TSV file or a string."""
if isinstance(source, str):
source = StringIO(source, newline="")
if filename is None:
filename = datetime.utcnow().isoformat(timespec="seconds")
reader = csv.reader(source)
header = next(reader)
if len(header) == 1 and "\t" in header[0]:
source.seek(0)
reader = csv.reader(source, delimiter="\t")
header = next(reader)
if len(header) < 2:
raise ModelException("Expected CSV or TSV format file.")
header_rexs = [
re.compile(ex, re.I)
for ex in [
r"review\s*group\s*id(entifier)?$",
r"(reviewer)?\s*role$",
r"review\s*url$",
r"review\s*type$",
r"(review\s*completion)?.*date",
r"subject\s+external\s*id(entifier)?\s+type$",
r"subject\s+external\s*id(entifier)?\s+value$",
r"subject\s+external\s*id(entifier)?\s+url$",
r"subject\s+external\s*id(entifier)?\s+rel(ationship)?$",
r"subject\s+container\s+name$",
r"(subject)?\s*type$",
r"(subject)?\s*(name)?\s*title$",
r"(subject)?\s*(name)?\s*subtitle$",
r"(subject)?\s*(name)?\s*(translated)?\s*(title)?\s*lang(uage)?.*(code)?",
r"(subject)?\s*(name)?\s*translated\s*title$",
r"(subject)?\s*url$",
r"(convening)?\s*org(ani[zs]ation)?\s*name$",
r"(convening)?\s*org(ani[zs]ation)?\s*city",
r"(convening)?\s*org(ani[zs]ation)?\s*region$",
r"(convening)?\s*org(ani[zs]ation)?\s*country$",
r"(convening)?\s*(org(ani[zs]ation)?)?\s*disambiguated\s*id(entifier)?",
r"(convening)?\s*(org(ani[zs]ation)?)?\s*disambiguation\s*source$",
"email",
r"orcid\s*(id)?$",
"local.*|identifier",
r"first\s*(name)?",
r"(last|sur)\s*(name)?",
"put.*code",
r"(is)?\s*visib(ility|le)?",
r"(external)?\s*id(entifier)?\s+type$",
r"((external)?\s*id(entifier)?\s+value|peer\s*review.*id)$",
r"(external)?\s*id(entifier)?\s*url",
r"(external)?\s*id(entifier)?\s*rel(ationship)?",
r"(is)?\s*active$",
]
]
def index(rex):
"""Return first header column index matching the given regex."""
for i, column in enumerate(header):
if rex.match(column.strip()):
return i
else:
return None
idxs = [index(rex) for rex in header_rexs]
if all(idx is None for idx in idxs):
raise ModelException(f"Failed to map fields based on the header of the file: {header}")
if org is None:
org = current_user.organisation if current_user else None
def val(row, i, default=None):
if len(idxs) <= i or idxs[i] is None or idxs[i] >= len(row):
return default
else:
v = row[idxs[i]].strip()
return default if v == "" else v
rows = []
cached_row = []
is_enqueue = False
for row_no, row in enumerate(reader):
# skip empty lines:
if len([item for item in row if item and item.strip()]) == 0:
continue
if len(row) == 1 and row[0].strip() == "":
continue
orcid, email = val(row, 23), normalize_email(val(row, 22, ""))
orcid = validate_orcid_id(orcid)
if email and not validators.email(email):
raise ValueError(f"Invalid email address '{email}' in the row #{row_no+2}: {row}")
visibility = val(row, 28)
if visibility:
visibility = visibility.replace("_", "-").lower()
invitee = dict(
email=email,
orcid=orcid,
identifier=val(row, 24),
first_name=val(row, 25),
last_name=val(row, 26),
put_code=val(row, 27),
visibility=visibility,
)
review_group_id = val(row, 0)
if not review_group_id:
raise ModelException(
f"Review Group ID is mandatory, #{row_no+2}: {row}. Header: {header}"
)
external_id_type = val(row, 29, "").lower()
external_id_value = val(row, 30)
external_id_relationship = val(row, 32)
if external_id_relationship:
external_id_relationship = external_id_relationship.replace("_", "-").lower()
if external_id_relationship not in RELATIONSHIPS:
raise ModelException(
f"Invalid External Id Relationship '{external_id_relationship}' as it is not one of the "
f"{RELATIONSHIPS}, #{row_no+2}: {row}."
)
if external_id_type not in EXTERNAL_ID_TYPES:
raise ModelException(
f"Invalid External Id Type: '{external_id_type}', Use 'doi', 'issn' "
f"or one of the accepted types found here: https://pub.orcid.org/v3.0/identifiers"
)
if not external_id_value:
raise ModelException(
f"Invalid External Id Value or Peer Review Id: {external_id_value}, #{row_no+2}: {row}."
)
if (
cached_row
and review_group_id.lower() == val(cached_row, 0).lower()
and external_id_type.lower() == val(cached_row, 29).lower()
and external_id_value.lower() == val(cached_row, 30).lower()
and external_id_relationship.lower() == val(cached_row, 32).lower()
):
row = cached_row
else:
cached_row = row
is_active = val(row, 33, "").lower() in ["y", "yes", "1", "true"]
if is_active:
is_enqueue = is_active
convening_org_name = val(row, 16)
convening_org_city = val(row, 17)
convening_org_country = val(row, 19)
if not (convening_org_name and convening_org_city and convening_org_country):
raise ModelException(
f"Information about Convening Organisation (Name, City and Country) is mandatory, "
f"#{row_no+2}: {row}. Header: {header}"
)
# The uploaded country must be from ISO 3166-1 alpha-2
if convening_org_country:
try:
convening_org_country = countries.lookup(convening_org_country).alpha_2
except Exception:
raise ModelException(
f" (Convening Org Country must be 2 character from ISO 3166-1 alpha-2) in the row "
f"#{row_no+2}: {row}. Header: {header}"
)
reviewer_role = val(row, 1, "").replace("_", "-").lower() or None
review_type = val(row, 3, "").replace("_", "-").lower() or None
subject_type = val(row, 10, "").replace("_", "-").lower() or None
subject_external_id_relationship = val(row, 8, "").replace("_", "-").lower() or None
convening_org_disambiguation_source = val(row, 21, "").upper() or None
subject_external_id_type = val(row, 5, "").lower() or None
review_completion_date = val(row, 4) or None
if review_completion_date:
review_completion_date = PartialDate.create(review_completion_date)
rows.append(
dict(
peer_review=dict(
review_group_id=review_group_id,
reviewer_role=reviewer_role,
review_url=val(row, 2),
review_type=review_type,
review_completion_date=review_completion_date,
subject_external_id_type=subject_external_id_type,
subject_external_id_value=val(row, 6),
subject_external_id_url=val(row, 7),
subject_external_id_relationship=subject_external_id_relationship,
subject_container_name=val(row, 9),
subject_type=subject_type,
subject_name_title=val(row, 11),
subject_name_subtitle=val(row, 12),
subject_name_translated_title_lang_code=val(row, 13),
subject_name_translated_title=val(row, 14),
subject_url=val(row, 15),
convening_org_name=convening_org_name,
convening_org_city=convening_org_city,
convening_org_region=val(row, 18),
convening_org_country=convening_org_country,
convening_org_disambiguated_identifier=val(row, 20),
convening_org_disambiguation_source=convening_org_disambiguation_source,
is_active=is_active,
),
invitee=invitee,
external_id=dict(
type=external_id_type,
value=external_id_value,
url=val(row, 31),
relationship=external_id_relationship,
),
)
)
with db.atomic() as transaction:
try:
task = Task.create(org=org, filename=filename, task_type=TaskType.PEER_REVIEW)
for peer_review, records in groupby(
rows, key=lambda row: row["peer_review"].items()
):
records = list(records)
prr = cls(task=task, **dict(peer_review))
validator = ModelValidator(prr)
if not validator.validate():
raise ModelException(f"Invalid record: {validator.errors}")
prr.save()
for external_id in set(
tuple(r["external_id"].items())
for r in records
if r["external_id"]["type"] and r["external_id"]["value"]
):
ei = PeerReviewExternalId(record=prr, **dict(external_id))
ei.save()
for invitee in set(
tuple(r["invitee"].items()) for r in records if r["invitee"]["email"]
):
rec = PeerReviewInvitee(record=prr, **dict(invitee))
validator = ModelValidator(rec)
if not validator.validate():
raise ModelException(f"Invalid invitee record: {validator.errors}")
rec.save()
if is_enqueue:
from .utils import enqueue_task_records
enqueue_task_records(task)
return task
except Exception:
transaction.rollback()
app.logger.exception("Failed to load peer review file.")
raise
@classmethod
def load_from_json(cls, source, filename=None, org=None, task=None, **kwargs):
"""Load data from JSON file or a string."""
# import data from file based on its extension; either it is YAML or JSON
data_list = load_yaml_json(filename=filename, source=source)
if not filename:
if isinstance(data_list, dict):
filename = data_list.get("filename")
else:
filename = (
"peer_review_" + datetime.utcnow().isoformat(timespec="seconds") + ".json"
)
if isinstance(data_list, dict):
data_list = data_list.get("records")
for data in data_list:
validation_source_data = copy.deepcopy(data)
validation_source_data = del_none(validation_source_data)
validator = Core(
source_data=validation_source_data,
schema_files=[os.path.join(SCHEMA_DIR, "peer_review_schema.yaml")],
)
validator.validate(raise_exception=True)
with db.atomic() as transaction:
try:
if org is None:
org = current_user.organisation if current_user else None
if task:
cls.delete().where(cls.task == task).execute()
else:
task = Task.create(org=org, filename=filename, task_type=TaskType.PEER_REVIEW)
is_enqueue = False
for data in data_list:
review_group_id = data.get("review-group-id")
reviewer_role = data.get("reviewer-role")
if reviewer_role:
reviewer_role = reviewer_role.strip().replace("_", "-").lower()
review_url = data.get("review-url", "value")
review_type = data.get("review-type")
if review_type:
review_type = review_type.strip().replace("_", "-").lower()
review_completion_date = PartialDate.create(data.get("review-completion-date"))
subject_external_id_type = data.get(
"subject-external-identifier", "external-id-type"
)
if subject_external_id_type:
subject_external_id_type = subject_external_id_type.strip().lower()
subject_external_id_value = data.get(
"subject-external-identifier", "external-id-value"
)
subject_external_id_url = data.get(
"subject-external-identifier", "external-id-url", "value"
)
subject_external_id_relationship = data.get(
"subject-external-identifier", "external-id-relationship"
)
if subject_external_id_relationship:
subject_external_id_relationship = subject_external_id_relationship.replace(
"_", "-"
).lower()
subject_container_name = data.get("subject-container-name", "value")
subject_type = data.get("subject-type")
if subject_type:
subject_type = subject_type.strip().replace("_", "-").lower()
subject_name_title = data.get("subject-name", "title", "value")
subject_name_subtitle = data.get("subject-name", "subtitle", "value")
subject_name_translated_title_lang_code = data.get(
"subject-name", "translated-title", "language-code"
)
subject_name_translated_title = data.get(
"subject-name", "translated-title", "value"
)
subject_url = data.get("subject-url", "value")
convening_org_name = data.get("convening-organization", "name")
convening_org_city = data.get("convening-organization", "address", "city")
convening_org_region = data.get("convening-organization", "address", "region")
convening_org_country = data.get(
"convening-organization", "address", "country"
)
convening_org_disambiguated_identifier = data.get(
"convening-organization",
"disambiguated-organization",
"disambiguated-organization-identifier",
)
convening_org_disambiguation_source = data.get(
"convening-organization",
"disambiguated-organization",
"disambiguation-source",
)
if convening_org_disambiguation_source:
convening_org_disambiguation_source = (
convening_org_disambiguation_source.upper()
)
is_active = (
data.get("is-active").lower() in ["y", "yes", "1", "true"]
if data.get("is-active")
else False
)
if is_active:
is_enqueue = is_active
record = cls.create(
task=task,
review_group_id=review_group_id,
reviewer_role=reviewer_role,
review_url=review_url,
review_type=review_type,
review_completion_date=review_completion_date,
subject_external_id_type=subject_external_id_type,
subject_external_id_value=subject_external_id_value,
subject_external_id_url=subject_external_id_url,
subject_external_id_relationship=subject_external_id_relationship,
subject_container_name=subject_container_name,
subject_type=subject_type,
subject_name_title=subject_name_title,
subject_name_subtitle=subject_name_subtitle,
subject_name_translated_title_lang_code=subject_name_translated_title_lang_code,
subject_name_translated_title=subject_name_translated_title,
subject_url=subject_url,
convening_org_name=convening_org_name,
convening_org_city=convening_org_city,
convening_org_region=convening_org_region,
convening_org_country=convening_org_country,
convening_org_disambiguated_identifier=convening_org_disambiguated_identifier,
convening_org_disambiguation_source=convening_org_disambiguation_source,
is_active=is_active,
)
invitee_list = data.get("invitees")
if invitee_list:
for invitee in invitee_list:
identifier = invitee.get("local-identifier") or invitee.get(
"identifier"
)
email = normalize_email(invitee.get("email"))
first_name = invitee.get("first-name")
last_name = invitee.get("last-name")
orcid_id = invitee.get("ORCID-iD")
put_code = invitee.get("put-code")
visibility = get_val(invitee, "visibility")
if visibility:
visibility = visibility.replace("_", "-").lower()
PeerReviewInvitee.create(
record=record,
identifier=identifier,
email=email,
first_name=first_name,
last_name=last_name,
orcid=orcid_id,
visibility=visibility,
put_code=put_code,
)
else:
raise SchemaError(
"Schema validation failed:\n - "
"Expecting Invitees for which the peer review record will be written"
)
external_ids_list = (
data.get("review-identifiers").get("external-id")
if data.get("review-identifiers")
else None
)
if external_ids_list:
for external_id in external_ids_list:
id_type = external_id.get("external-id-type")
if id_type:
id_type = id_type.lower()
value = external_id.get("external-id-value")
url = (
external_id.get("external-id-url").get("value")
if external_id.get("external-id-url")
else None
)
relationship = external_id.get("external-id-relationship")
if relationship:
relationship = relationship.replace("_", "-").lower()
PeerReviewExternalId.create(
record=record,
type=id_type,
value=value,
url=url,
relationship=relationship,
)
else:
raise SchemaError(
"Schema validation failed:\n - An external identifier is required"
)
if is_enqueue:
from .utils import enqueue_task_records
enqueue_task_records(task)
return task
except Exception:
transaction.rollback()
app.logger.exception("Failed to load peer review file.")
raise
def to_export_dict(self):
"""Map the peer-review record to dict for export into JSON/YAML."""
d = super().to_export_dict()
d["review-type"] = self.review_type
d["reviewer-role"] = self.reviewer_role
if self.subject_external_id_relationship or self.subject_external_id_value:
d["subject-external-identifier"] = {
"external-id-type": self.subject_external_id_type,
"external-id-value": self.subject_external_id_value,
"external-id-url": {"value": self.subject_external_id_url},
"external-id-relationship": self.subject_external_id_relationship,
}
if self.subject_container_name:
d["subject-container-name"] = {"value": self.subject_container_name}
if self.subject_type:
d["subject-type"] = self.subject_type
if self.review_completion_date:
cd = self.review_completion_date.as_orcid_dict()
d["review-completion-date"] = cd
if self.review_url:
d["review-url"] = {"value": self.review_url}
if self.review_group_id:
d["review-group-id"] = self.review_group_id
if self.subject_name_title:
sn = {"title": {"value": self.subject_name_title}}
if self.subject_name_subtitle:
sn["subtitle"] = {"value": self.subject_name_subtitle}
if self.subject_name_translated_title:
sn["translated-title"] = {"value": self.subject_name_translated_title}
if self.subject_name_translated_title_lang_code:
sn["translated-title"][
"language-code"
] = self.subject_name_translated_title_lang_code
d["subject-name"] = sn
if self.subject_url:
d["subject-url"] = dict(value=self.subject_url)
if self.convening_org_name:
co = {
"name": self.convening_org_name,
"address": {
"city": self.convening_org_city,
"region": self.convening_org_region,
"country": self.convening_org_country,
},
}
if self.convening_org_disambiguated_identifier:
pass
co["disambiguated-organization"] = {
"disambiguated-organization-identifier": self.convening_org_disambiguated_identifier,
"disambiguation-source": self.convening_org_disambiguation_source,
}
d["convening-organization"] = co
return d
class Meta: # noqa: D101,D106
table_alias = "pr"
class PropertyRecord(RecordModel):
"""Researcher Url record loaded from Json file for batch processing."""
task = ForeignKeyField(Task, backref="property_records", on_delete="CASCADE")
type = CharField(verbose_name="Property Type", choices=property_type_choices)
display_index = IntegerField(null=True)
name = CharField(
null=True, max_length=255, verbose_name="Property Name", help_text="Website name."
)
value = CharField(
max_length=255,
verbose_name="Property Value",
help_text="URL, Also known as, Keyword, Other ID, or Country value.",
)
email = CharField(max_length=120, null=True)
first_name = CharField(max_length=120, null=True)
last_name = CharField(max_length=120, null=True)
orcid = OrcidIdField(null=True)
put_code = IntegerField(null=True)
visibility = CharField(null=True, max_length=100, choices=visibility_choices)
is_active = BooleanField(
default=False, help_text="The record is marked for batch processing", null=True
)
processed_at = DateTimeField(null=True)
status = TextField(null=True, help_text="Record processing status.")
@classmethod
def load_from_csv(cls, source, filename=None, org=None, file_property_type=None):
"""Load data from CSV/TSV file or a string."""
if isinstance(source, str):
source = StringIO(source, newline="")
if filename is None:
if hasattr(source, "name"):
filename = source.name
else:
filename = datetime.utcnow().isoformat(timespec="seconds")
reader = csv.reader(source)
header = next(reader)
if len(header) == 1 and "\t" in header[0]:
source.seek(0)
reader = csv.reader(source, delimiter="\t")
header = next(reader)
if len(header) < 2:
raise ModelException("Expected CSV or TSV format file.")
if len(header) < 4:
raise ModelException(
"Wrong number of fields. Expected at least 3 fields "
"(email address or another unique identifier, name and/or value) "
f"and property type. Read header: {header}"
)
header_rexs = [
re.compile(ex, re.I)
for ex in [
r"(url)?.*name",
r".*value|.*content|.*country",
r"(display)?.*index",
"email",
r"first\s*(name)?",
r"(last|sur)\s*(name)?",
"orcid.*",
r"put|code",
r"(is)?\s*visib(bility|le)?",
"(propery)?.*type",
r"(is)?\s*active$",
]
]
def index(rex):
"""Return first header column index matching the given regex."""
for i, column in enumerate(header):
if rex.match(column.strip()):
return i
else:
return None
idxs = [index(rex) for rex in header_rexs]
if all(idx is None for idx in idxs):
raise ModelException(f"Failed to map fields based on the header of the file: {header}")
if org is None:
org = current_user.organisation if current_user else None
def val(row, i, default=None):
if len(idxs) <= i or idxs[i] is None or idxs[i] >= len(row):
return default
else:
v = row[idxs[i]].strip()
return default if v == "" else v
with db.atomic() as transaction:
try:
task = Task.create(org=org, filename=filename, task_type=TaskType.PROPERTY)
is_enqueue = False
for row_no, row in enumerate(reader):
# skip empty lines:
if len([item for item in row if item and item.strip()]) == 0:
continue
if len(row) == 1 and row[0].strip() == "":
continue
email = normalize_email(val(row, 3, ""))
orcid = validate_orcid_id(val(row, 6))
if not (email or orcid):
raise ModelException(
f"Missing user identifier (email address or ORCID iD) in the row "
f"#{row_no+2}: {row}. Header: {header}"
)
if email and not validators.email(email):
raise ValueError(
f"Invalid email address '{email}' in the row #{row_no+2}: {row}"
)
value = val(row, 1, "")
first_name = val(row, 4)
last_name = val(row, 5)
property_type = val(row, 9) or file_property_type
is_active = val(row, 10, "").lower() in ["y", "yes", "1", "true"]
if is_active:
is_enqueue = is_active
if property_type:
property_type = property_type.strip().upper()
if not property_type or property_type not in PROPERTY_TYPES:
raise ModelException(
"Missing or incorrect property type. "
f"(expected: {','.join(PROPERTY_TYPES)}: {row}"
)
name = None
if property_type == "URL":
name = val(row, 0, "")
if not name:
raise ModelException(
f"Missing URL Name. For Researcher URL Name is expected: {row}."
)
elif property_type == "COUNTRY":
# The uploaded country must be from ISO 3166-1 alpha-2
if value:
try:
value = countries.lookup(value).alpha_2
except Exception:
raise ModelException(
f" (Country must be 2 character from ISO 3166-1 alpha-2) in the row "
f"#{row_no+2}: {row}. Header: {header}"
)
if not value:
raise ModelException(
"Wrong number of fields. Expected at least fields ( content or value or country and "
f"email address or another unique identifier): {row}"
)
visibility = val(row, 8)
if visibility:
visibility = visibility.replace("_", "-").lower()
rr = cls(
task=task,
type=property_type,
is_active=is_active,
name=name,
value=value,
display_index=val(row, 2),
email=email,
first_name=first_name,
last_name=last_name,
orcid=orcid,
put_code=val(row, 7),
visibility=visibility,
)
validator = ModelValidator(rr)
if not validator.validate():
raise ModelException(f"Invalid record: {validator.errors}")
rr.save()
if is_enqueue:
from .utils import enqueue_task_records
enqueue_task_records(task)
except Exception:
transaction.rollback()
app.logger.exception("Failed to load Researcher Url Record file.")
raise
return task
@classmethod
def load_from_json(
cls,
source,
filename=None,
org=None,
task=None,
skip_schema_validation=False,
file_property_type=None,
):
"""Load data from JSON file or a string."""
data = load_yaml_json(filename=filename, source=source)
if not skip_schema_validation:
if isinstance(data, dict):
jsonschema.validate(data, schemas.property_task)
else:
jsonschema.validate(data, schemas.property_record_list)
records = data["records"] if isinstance(data, dict) else data
if isinstance(data, dict):
records = data["records"]
if not filename:
filename = data.get("filename")
task_type = data.get("taks-type")
if not file_property_type and task_type:
file_property_type = {
"RESEARCHER_URL": "URL",
"OTHER_NAME": "NAME",
"KEYWORD": "KEYWORD",
"COUNTRY": "COUNTRY",
}.get(task_type)
else:
records = data
with db.atomic() as transaction:
try:
if org is None:
org = current_user.organisation if current_user else None
if not task:
task = Task.create(org=org, filename=filename, task_type=TaskType.PROPERTY)
else:
cls.delete().where(cls.task_id == task.id).execute()
is_enqueue = False
for r in records:
value = (
r.get("value")
or r.get("url", "value")
or r.get("url-value")
or r.get("content")
or r.get("country")
)
display_index = r.get("display-index")
property_type = r.get("type") or file_property_type
if property_type:
property_type = property_type.strip().upper()
email = normalize_email(r.get("email"))
first_name = r.get("first-name")
last_name = r.get("last-name")
orcid = r.get_orcid("ORCID-iD") or r.get_orcid("orcid")
put_code = r.get("put-code")
visibility = r.get("visibility")
if visibility:
visibility = visibility.replace("_", "-").lower()
is_active = bool(r.get("is-active"))
if is_active:
is_enqueue = is_active
if not property_type or property_type not in PROPERTY_TYPES:
raise ModelException(
"Missing or incorrect property type. "
f"(expected: {','.join(PROPERTY_TYPES)}: {r}"
)
name = None
if property_type == "URL":
name = r.get("name") or r.get("url-name")
if not name:
raise ModelException(
f"Missing URL Name. For Researcher URL Name is expected: {r}."
)
elif property_type == "COUNTRY":
# The uploaded country must be from ISO 3166-1 alpha-2
if value:
try:
value = countries.lookup(value).alpha_2
except Exception:
raise ModelException(
f"(Country {value} must be 2 character from ISO 3166-1 alpha-2): {r}."
)
cls.create(
task=task,
type=property_type,
is_active=is_active,
name=name,
value=value,
display_index=display_index,
email=email,
first_name=first_name,
last_name=last_name,
orcid=orcid,
visibility=visibility,
put_code=put_code,
)
if is_enqueue:
from .utils import enqueue_task_records
enqueue_task_records(task)
return task
except Exception:
transaction.rollback()
app.logger.exception("Failed to load Researcher property file.")
raise
def to_export_dict(self):
"""Map the property record to dict for export into JSON/YAML."""
d = super().to_export_dict()
d.update(
self.to_dict(
recurse=False, to_dashes=True, exclude=[PropertyRecord.type, PropertyRecord.task]
)
)
return d
class Meta: # noqa: D101,D106
table_alias = "pr"
class WorkRecord(RecordModel):
"""Work record loaded from Json file for batch processing."""
task = ForeignKeyField(Task, backref="work_records", on_delete="CASCADE")
title = CharField(max_length=255)
subtitle = CharField(null=True, max_length=255)
translated_title = CharField(null=True, max_length=255)
translated_title_language_code = CharField(null=True, max_length=10, choices=language_choices)
journal_title = CharField(null=True, max_length=255)
short_description = CharField(null=True, max_length=5000)
citation_type = CharField(null=True, max_length=255, choices=citation_type_choices)
citation_value = CharField(null=True, max_length=32767)
type = CharField(null=True, max_length=255, choices=work_type_choices)
publication_date = PartialDateField(null=True)
url = CharField(null=True, max_length=255)
language_code = CharField(null=True, max_length=10, choices=language_choices)
country = CharField(null=True, max_length=255, choices=country_choices)
is_active = BooleanField(
default=False, help_text="The record is marked for batch processing", null=True
)
processed_at = DateTimeField(null=True)
status = TextField(null=True, help_text="Record processing status.")
@classmethod
def load_from_csv(cls, source, filename=None, org=None):
"""Load data from CSV/TSV file or a string."""
if isinstance(source, str):
source = StringIO(source, newline="")
if filename is None:
filename = datetime.utcnow().isoformat(timespec="seconds")
reader = csv.reader(source)
header = next(reader)
if len(header) == 1 and "\t" in header[0]:
source.seek(0)
reader = csv.reader(source, delimiter="\t")
header = next(reader)
if len(header) < 2:
raise ModelException("Expected CSV or TSV format file.")
header_rexs = [
re.compile(ex, re.I)
for ex in [
"title$",
r"sub.*(title)?$",
r"translated\s+(title)?",
r"translat(ed)?(ion)?\s+(title)?\s*lang(uage)?.*(code)?",
r"journal",
"type$",
r"(short\s*|description\s*)+$",
r"citat(ion)?.*type",
r"citat(ion)?.*value",
r"(publication)?.*date",
r"(publ(ication?))?.*media.*(type)?",
r"url",
r"lang(uage)?.*(code)?",
r"country",
r"(is)?\s*active$",
r"orcid\s*(id)?$",
"email",
r"(external)?\s*id(entifier)?\s+type$",
r"((external)?\s*id(entifier)?\s+value|work.*id)$",
r"(external)?\s*id(entifier)?\s*url",
r"(external)?\s*id(entifier)?\s*rel(ationship)?",
"put.*code",
r"(is)?\s*visib(bility|le)?",
r"first\s*(name)?",
r"(last|sur)\s*(name)?",
"local.*|.*identifier",
]
]
def index(rex):
"""Return first header column index matching the given regex."""
for i, column in enumerate(header):
if rex.match(column.strip()):
return i
else:
return None
idxs = [index(rex) for rex in header_rexs]
if all(idx is None for idx in idxs):
raise ModelException(f"Failed to map fields based on the header of the file: {header}")
if org is None:
org = current_user.organisation if current_user else None
def val(row, i, default=None):
if len(idxs) <= i or idxs[i] is None or idxs[i] >= len(row):
return default
else:
v = row[idxs[i]].strip()
return default if v == "" else v
rows = []
cached_row = []
is_enqueue = False
for row_no, row in enumerate(reader):
# skip empty lines:
if len([item for item in row if item and item.strip()]) == 0:
continue
if len(row) == 1 and row[0].strip() == "":
continue
orcid, email = val(row, 15), normalize_email(val(row, 16))
if orcid:
orcid = validate_orcid_id(orcid)
if email and not validators.email(email):
raise ValueError(f"Invalid email address '{email}' in the row #{row_no+2}: {row}")
visibility = val(row, 22)
if visibility:
visibility = visibility.replace("_", "-").lower()
invitee = dict(
identifier=val(row, 25),
email=email,
first_name=val(row, 23),
last_name=val(row, 24),
orcid=orcid,
put_code=val(row, 21),
visibility=visibility,
)
title = val(row, 0)
external_id_type = val(row, 17, "").lower()
external_id_value = val(row, 18)
external_id_relationship = val(row, 20, "").replace("_", "-").lower()
if external_id_type not in EXTERNAL_ID_TYPES:
raise ModelException(
f"Invalid External Id Type: '{external_id_type}', Use 'doi', 'issn' "
f"or one of the accepted types found here: https://pub.orcid.org/v3.0/identifiers"
)
if not external_id_value:
raise ModelException(
f"Invalid External Id Value or Work Id: {external_id_value}, #{row_no+2}: {row}."
)
if not title:
raise ModelException(f"Title is mandatory, #{row_no+2}: {row}. Header: {header}")
if external_id_relationship not in RELATIONSHIPS:
raise ModelException(
f"Invalid External Id Relationship '{external_id_relationship}' as it is not one of the "
f"{RELATIONSHIPS}, #{row_no+2}: {row}."
)
if (
cached_row
and title.lower() == val(cached_row, 0).lower()
and external_id_type.lower() == val(cached_row, 17).lower()
and external_id_value.lower() == val(cached_row, 18).lower()
and external_id_relationship.lower() == val(cached_row, 20).lower()
):
row = cached_row
else:
cached_row = row
is_active = val(row, 14, "").lower() in ["y", "yes", "1", "true"]
if is_active:
is_enqueue = is_active
work_type = val(row, 5, "").replace("_", "-").lower()
if not work_type:
raise ModelException(
f"Work type is mandatory, #{row_no+2}: {row}. Header: {header}"
)
# The uploaded country must be from ISO 3166-1 alpha-2
country = val(row, 13)
if country:
try:
country = countries.lookup(country).alpha_2
except Exception:
raise ModelException(
f" (Country must be 2 character from ISO 3166-1 alpha-2) in the row "
f"#{row_no+2}: {row}. Header: {header}"
)
publication_date = val(row, 9)
citation_type = val(row, 7)
if citation_type:
citation_type = citation_type.replace("_", "-").lower()
if publication_date:
publication_date = PartialDate.create(publication_date)
rows.append(
dict(
work=dict(
title=title,
subtitle=val(row, 1),
translated_title=val(row, 2),
translated_title_language_code=val(row, 3),
journal_title=val(row, 4),
type=work_type,
short_description=val(row, 6),
citation_type=citation_type,
citation_value=val(row, 8),
publication_date=publication_date,
url=val(row, 11),
language_code=val(row, 12),
country=country,
is_active=is_active,
),
invitee=invitee,
external_id=dict(
type=external_id_type,
value=external_id_value,
url=val(row, 19),
relationship=external_id_relationship,
),
)
)
with db.atomic() as transaction:
try:
task = Task.create(org=org, filename=filename, task_type=TaskType.WORK)
for work, records in groupby(rows, key=lambda row: row["work"].items()):
records = list(records)
wr = cls(task=task, **dict(work))
validator = ModelValidator(wr)
if not validator.validate():
raise ModelException(f"Invalid record: {validator.errors}")
wr.save()
for external_id in set(
tuple(r["external_id"].items())
for r in records
if r["external_id"]["type"] and r["external_id"]["value"]
):
ei = WorkExternalId(record=wr, **dict(external_id))
ei.save()
for invitee in set(
tuple(r["invitee"].items()) for r in records if r["invitee"]["email"]
):
rec = WorkInvitee(record=wr, **dict(invitee))
validator = ModelValidator(rec)
if not validator.validate():
raise ModelException(f"Invalid invitee record: {validator.errors}")
rec.save()
if is_enqueue:
from .utils import enqueue_task_records
enqueue_task_records(task)
return task
except Exception:
transaction.rollback()
app.logger.exception("Failed to load work file.")
raise
@classmethod
def load_from_json(cls, source, filename=None, org=None, task=None, **kwargs):
"""Load data from JSON file or a string."""
# import data from file based on its extension; either it is YAML or JSON
data = load_yaml_json(filename=filename, source=source)
if not filename and isinstance(data, dict):
filename = data.get("filename")
if isinstance(data, dict):
data = data.get("records")
# TODO: validation of uploaded work file
for r in data:
validation_source_data = copy.deepcopy(r)
validation_source_data = del_none(validation_source_data)
# Adding schema validation for Work
validator = Core(
source_data=validation_source_data,
schema_files=[os.path.join(SCHEMA_DIR, "work_schema.yaml")],
)
validator.validate(raise_exception=True)
with db.atomic() as transaction:
try:
if org is None:
org = current_user.organisation if current_user else None
if not task:
task = Task.create(org=org, filename=filename, task_type=TaskType.WORK)
is_enqueue = False
for r in data:
title = r.get("title", "title", "value")
subtitle = r.get("title", "subtitle", "value")
translated_title = r.get("title", "translated-title", "value")
translated_title_language_code = r.get(
"title", "translated-title", "language-code"
)
journal_title = r.get("journal-title", "value")
short_description = r.get("short-description")
citation_type = r.get("citation", "citation-type")
if citation_type:
citation_type = citation_type.strip().replace("_", "-").lower()
citation_value = r.get("citation", "citation-value")
rec_type = r.get("type")
if rec_type:
rec_type = rec_type.strip().replace("_", "-").lower()
url = r.get("url", "value")
language_code = r.get("language-code")
country = r.get("country", "value")
is_active = (
r.get("is-active").lower() in ["y", "yes", "1", "true"]
if r.get("is-active")
else False
)
if is_active:
is_enqueue = is_active
publication_date = PartialDate.create(r.get("publication-date"))
record = WorkRecord.create(
task=task,
title=title,
subtitle=subtitle,
translated_title=translated_title,
translated_title_language_code=translated_title_language_code,
journal_title=journal_title,
short_description=short_description,
citation_type=citation_type,
citation_value=citation_value,
type=rec_type,
publication_date=publication_date,
url=url,
is_active=is_active,
language_code=language_code,
country=country,
)
validator = ModelValidator(record)
if not validator.validate():
raise ModelException(f"Invalid Work record: {validator.errors}")
invitee_list = r.get("invitees")
if invitee_list:
for invitee in invitee_list:
identifier = invitee.get("local-identifier") or invitee.get(
"identifier"
)
email = normalize_email(invitee.get("email"))
first_name = invitee.get("first-name")
last_name = invitee.get("last-name")
orcid = invitee.get_orcid("ORCID-iD")
put_code = invitee.get("put-code")
visibility = get_val(invitee, "visibility")
if visibility:
visibility = visibility.replace("_", "-").lower()
WorkInvitee.create(
record=record,
identifier=identifier,
email=email.lower(),
first_name=first_name,
last_name=last_name,
orcid=orcid,
visibility=visibility,
put_code=put_code,
)
else:
raise SchemaError(
"Schema validation failed:\n - "
"Expecting Invitees for which the work record will be written"
)
contributor_list = r.get("contributors", "contributor")
if contributor_list:
for contributor in contributor_list:
orcid = contributor.get_orcid("contributor-orcid", "path")
name = get_val(contributor, "credit-name", "value")
email = normalize_email(
get_val(contributor, "contributor-email", "value")
)
role = get_val(
contributor, "contributor-attributes", "contributor-role"
)
contributor_sequence = get_val(
contributor, "contributor-attributes", "contributor-sequence"
)
WorkContributor.create(
record=record,
orcid=orcid,
name=name,
email=email,
role=role,
contributor_sequence=contributor_sequence,
)
external_ids_list = (
r.get("external-ids").get("external-id") if r.get("external-ids") else None
)
if external_ids_list:
for external_id in external_ids_list:
id_type = external_id.get("external-id-type")
if id_type:
id_type = id_type.lower()
value = external_id.get("external-id-value")
url = get_val(external_id, "external-id-url", "value")
relationship = external_id.get("external-id-relationship")
if relationship:
relationship = relationship.replace("_", "-").lower()
WorkExternalId.create(
record=record,
type=id_type,
value=value,
url=url,
relationship=relationship,
)
else:
raise SchemaError(
"Schema validation failed:\n - An external identifier is required"
)
if is_enqueue:
from .utils import enqueue_task_records
enqueue_task_records(task)
return task
except Exception:
transaction.rollback()
app.logger.exception("Failed to load work record file.")
raise
def to_export_dict(self):
"""Map the work record to dict for export into JSON/YAML."""
d = super().to_export_dict()
if self.journal_title:
d["journal-title"] = dict(value=self.journal_title)
d["short-description"] = self.short_description
if self.publication_date:
pd = self.publication_date.as_orcid_dict()
d["publication-date"] = pd
if self.url:
d["url"] = self.url
if self.citation_type or self.citation_value:
d["citation"] = {
"citation-type": self.citation_type,
"citation-value": self.citation_value,
}
if self.country:
d["country"] = dict(value=self.country)
return d
class Meta: # noqa: D101,D106
table_alias = "wr"
class ContributorModel(BaseModel):
"""Common model bits of the contributor records."""
orcid = OrcidIdField(null=True)
name = CharField(max_length=120, null=True)
role = CharField(max_length=120, null=True)
email = CharField(max_length=120, null=True)
def to_export_dict(self):
"""Map the contributor record to dict for export into JSON/YAML."""
d = {
"contributor-email": dict(value=self.email),
"credit-name": dict(value=self.name),
"contributor-orcid": dict(path=self.orcid),
}
if self.role:
d["contributor-attributes"] = {"contributor-role": self.role}
return d
class WorkContributor(ContributorModel):
"""Researcher or contributor - related to work."""
record = ForeignKeyField(WorkRecord, backref="contributors", on_delete="CASCADE")
contributor_sequence = CharField(max_length=120, null=True)
class Meta: # noqa: D101,D106
table_alias = "wc"
def to_export_dict(self):
"""Map the contributor record to dict for export into JSON/YAML."""
d = super().to_export_dict()
if self.contributor_sequence:
if "contributor-attributes" in d:
d["contributor-attributes"].update(
{"contributor-sequence": self.contributor_sequence}
)
else:
d["contributor-attributes"] = {"contributor-sequence": self.contributor_sequence}
return d
class FundingContributor(ContributorModel):
"""Researcher or contributor - receiver of the funding."""
record = ForeignKeyField(FundingRecord, backref="contributors", on_delete="CASCADE")
class Meta: # noqa: D101,D106
table_alias = "fc"
class Invitee(BaseModel):
"""Common model bits of the invitees records."""
identifier = CharField(max_length=120, null=True, verbose_name="Local Identifier")
email = CharField(max_length=120, null=True)
orcid = OrcidIdField(null=True)
first_name = CharField(max_length=120, null=True)
last_name = CharField(max_length=120, null=True)
put_code = IntegerField(null=True)
visibility = CharField(null=True, max_length=100, choices=visibility_choices)
status = TextField(null=True, help_text="Record processing status.")
processed_at = DateTimeField(null=True)
def to_export_dict(self):
"""Get row representation suitable for export to JSON/YAML."""
c = self.__class__
d = self.to_dict(
to_dashes=True,
exclude_nulls=True,
only=[c.identifier, c.email, c.first_name, c.last_name, c.put_code, c.visibility],
recurse=False,
)
if self.orcid:
d["ORCID-iD"] = self.orcid
return d
class Meta: # noqa: D101,D106
table_alias = "i"
class PeerReviewInvitee(Invitee):
"""Researcher or Invitee - related to peer review."""
record = ForeignKeyField(PeerReviewRecord, backref="invitees", on_delete="CASCADE")
class Meta: # noqa: D101,D106
table_alias = "pi"
class WorkInvitee(Invitee):
"""Researcher or Invitee - related to work."""
record = ForeignKeyField(WorkRecord, backref="invitees", on_delete="CASCADE")
class Meta: # noqa: D101,D106
table_alias = "wi"
class FundingInvitee(Invitee):
"""Researcher or Invitee - related to funding."""
record = ForeignKeyField(FundingRecord, backref="invitees", on_delete="CASCADE")
class Meta: # noqa: D101,D106
table_alias = "fi"
class ExternalIdModel(BaseModel):
"""Common model bits of the ExternalId records."""
type = CharField(max_length=255, choices=external_id_type_choices)
value = CharField(max_length=255)
url = CharField(max_length=200, null=True)
relationship = CharField(null=True, max_length=255, choices=relationship_choices)
def to_export_dict(self):
"""Map the external ID record to dict for exprt into JSON/YAML."""
d = {
"external-id-type": self.type,
"external-id-value": self.value,
"external-id-relationship": self.relationship,
}
if self.url:
d["external-id-url"] = {"value": self.url}
return d
class WorkExternalId(ExternalIdModel):
"""Work ExternalId loaded for batch processing."""
record = ForeignKeyField(WorkRecord, backref="external_ids", on_delete="CASCADE")
class Meta: # noqa: D101,D106
table_alias = "wei"
class PeerReviewExternalId(ExternalIdModel):
"""Peer Review ExternalId loaded for batch processing."""
record = ForeignKeyField(PeerReviewRecord, backref="external_ids", on_delete="CASCADE")
class Meta: # noqa: D101,D106
table_alias = "pei"
# TODO: refacore to use many-to-many relation
class ExternalId(ExternalIdModel):
"""Funding ExternalId loaded for batch processing."""
record = ForeignKeyField(FundingRecord, null=True, backref="external_ids", on_delete="CASCADE")
class Meta: # noqa: D101,D106
table_alias = "ei"
class Resource(BaseModel):
"""Research resource."""
title = CharField(max_length=1000)
display_index = IntegerField(null=True)
visibility = CharField(max_length=10, choices=visibility_choices)
class ResoureceExternalId(BaseModel):
"""Linkage between resoucrece and ExternalId."""
external_id = ForeignKeyField(ExternalId, index=True, on_delete="CASCADE")
resource = ForeignKeyField(Resource, index=True, on_delete="CASCADE")
class Meta: # noqa: D106
table_alias = "rei"
class AffiliationExternalId(ExternalIdModel):
"""Affiliation ExternalId loaded for batch processing."""
record = ForeignKeyField(AffiliationRecord, backref="external_ids", on_delete="CASCADE")
def to_export_dict(self):
"""Map the external ID record to dict for exprt into JSON/YAML."""
d = {
"external-id-type": self.type,
"external-id-value": self.value,
"external-id-relationship": self.relationship,
"external-id-url": self.url,
}
return d
class Meta: # noqa: D101,D106
table_alias = "aei"
class OtherIdRecord(ExternalIdModel):
"""Other ID record loaded from json/csv file for batch processing."""
task = ForeignKeyField(Task, backref="other_id_records", on_delete="CASCADE")
display_index = IntegerField(null=True)
email = CharField(max_length=120, null=True)
first_name = CharField(max_length=120, null=True)
last_name = CharField(max_length=120, null=True)
orcid = OrcidIdField(null=True)
put_code = IntegerField(null=True)
visibility = CharField(null=True, max_length=100, choices=visibility_choices)
is_active = BooleanField(
default=False, help_text="The record is marked for batch processing", null=True
)
processed_at = DateTimeField(null=True)
status = TextField(null=True, help_text="Record processing status.")
@classmethod
def load_from_csv(cls, source, filename=None, org=None):
"""Load data from CSV/TSV file or a string."""
if isinstance(source, str):
source = StringIO(source)
if filename is None:
if hasattr(source, "name"):
filename = source.name
else:
filename = datetime.utcnow().isoformat(timespec="seconds")
reader = csv.reader(source)
header = next(reader)
if len(header) == 1 and "\t" in header[0]:
source.seek(0)
reader = csv.reader(source, delimiter="\t")
header = next(reader)
if len(header) < 2:
raise ModelException("Expected CSV or TSV format file.")
if len(header) < 5:
raise ModelException(
"Wrong number of fields. Expected at least 5 fields "
"(email address or another unique identifier, External ID Type, External ID Value, External ID URL, "
f"External ID Relationship). Read header: {header}"
)
header_rexs = [
re.compile(ex, re.I)
for ex in (
r"(display)?.*index",
r"((external)?\s*id(entifier)?\s+type|.*type)$",
r"((external)?\s*id(entifier)?\s+value|.*value)$",
r"((external)?\s*id(entifier)?\s*url|.*url)$",
r"((external)?\s*id(entifier)?\s*rel(ationship)?|.*relationship)$",
"email",
r"first\s*(name)?",
r"(last|sur)\s*(name)?",
"orcid.*",
r"put|code",
r"(is)?\s*visib(bility|le)?",
r"(is)?\s*active$",
)
]
def index(rex):
"""Return first header column index matching the given regex."""
for i, column in enumerate(header):
if rex.match(column.strip()):
return i
else:
return None
idxs = [index(rex) for rex in header_rexs]
if all(idx is None for idx in idxs):
raise ModelException(f"Failed to map fields based on the header of the file: {header}")
if org is None:
org = current_user.organisation if current_user else None
def val(row, i, default=None):
if len(idxs) <= i or idxs[i] is None or idxs[i] >= len(row):
return default
else:
v = row[idxs[i]].strip()
return default if v == "" else v
with db.atomic() as transaction:
try:
task = Task.create(org=org, filename=filename, task_type=TaskType.OTHER_ID)
is_enqueue = False
for row_no, row in enumerate(reader):
# skip empty lines:
if len([item for item in row if item and item.strip()]) == 0:
continue
if len(row) == 1 and row[0].strip() == "":
continue
email = normalize_email(val(row, 5))
orcid = validate_orcid_id(val(row, 8))
if not (email or orcid):
raise ModelException(
f"Missing user identifier (email address or ORCID iD) in the row "
f"#{row_no+2}: {row}. Header: {header}"
)
if email and not validators.email(email):
raise ValueError(
f"Invalid email address '{email}' in the row #{row_no+2}: {row}"
)
rec_type = val(row, 1, "").lower()
value = val(row, 2)
url = val(row, 3)
relationship = val(row, 4)
if relationship:
relationship = relationship.replace("_", "-").lower()
first_name = val(row, 6)
last_name = val(row, 7)
is_active = val(row, 11, "").lower() in ["y", "yes", "1", "true"]
if is_active:
is_enqueue = is_active
if rec_type not in EXTERNAL_ID_TYPES:
raise ModelException(
f"Invalid External Id Type: '{rec_type}', Use 'doi', 'issn' "
f"or one of the accepted types found here: https://pub.orcid.org/v3.0/identifiers"
)
if not value:
raise ModelException(
f"Missing External Id Value: {value}, #{row_no+2}: {row}."
)
visibility = val(row, 10)
if visibility:
visibility = visibility.replace("_", "-").lower()
rr = cls(
task=task,
type=rec_type,
url=url,
relationship=relationship,
value=value,
display_index=val(row, 0),
email=email,
first_name=first_name,
last_name=last_name,
orcid=orcid,
put_code=val(row, 9),
visibility=visibility,
is_active=is_active,
)
validator = ModelValidator(rr)
if not validator.validate():
raise ModelException(f"Invalid record: {validator.errors}")
rr.save()
if is_enqueue:
from .utils import enqueue_task_records
enqueue_task_records(task)
except Exception:
transaction.rollback()
app.logger.exception("Failed to load Other IDs Record file.")
raise
return task
@classmethod
def load_from_json(
cls, source, filename=None, org=None, task=None, skip_schema_validation=False
):
"""Load data from JSON file or a string."""
data = load_yaml_json(filename=filename, source=source)
if not skip_schema_validation:
if isinstance(data, dict):
jsonschema.validate(data, schemas.other_id_task)
else:
jsonschema.validate(data, schemas.other_id_record_list)
records = data["records"] if isinstance(data, dict) else data
with db.atomic() as transaction:
try:
if org is None:
org = current_user.organisation if current_user else None
if not task:
task = Task.create(org=org, filename=filename, task_type=TaskType.OTHER_ID)
is_enqueue = False
for r in records:
id_type = r.get("type") or r.get("external-id-type")
if id_type:
id_type = id_type.lower()
value = r.get("value") or r.get("external-id-value")
url = (
r.get("url")
or r.get("external-id-url", "value")
or r.get("external-id-url")
)
relationship = r.get("relationship") or r.get("external-id-relationship")
if relationship:
relationship = relationship.replace("_", "-").lower()
display_index = r.get("display-index")
email = normalize_email(r.get("email"))
first_name = r.get("first-name")
last_name = r.get("last-name")
orcid = r.get_orcid("ORCID-iD") or r.get_orcid("orcid")
put_code = r.get("put-code")
visibility = r.get("visibility")
if visibility:
visibility = visibility.replace("_", "-").lower()
is_active = bool(r.get("is-active"))
if is_active:
is_enqueue = is_active
cls.create(
task=task,
type=id_type,
value=value,
url=url,
relationship=relationship,
display_index=display_index,
email=email,
first_name=first_name,
last_name=last_name,
orcid=orcid,
visibility=visibility,
put_code=put_code,
is_active=is_active,
)
if is_enqueue:
from .utils import enqueue_task_records
enqueue_task_records(task)
return task
except Exception:
transaction.rollback()
app.logger.exception("Failed to load Other IDs file.")
raise
class Meta: # noqa: D101,D106
table_alias = "oir"
class OrgRecord(RecordModel):
"""Common organisation record part of the batch processing reocords."""
name = CharField(max_length=1000)
city = TextField(null=True)
region = TextField(verbose_name="State/Region", null=True)
country = CharField(max_length=2, null=True, choices=country_choices)
disambiguated_id = TextField(verbose_name="Disambiguation ORG Id", null=True)
disambiguation_source = TextField(
verbose_name="Disambiguation ORG Source", null=True, choices=disambiguation_source_choices
)
class Meta: # noqa: D101,D106
table_alias = "or"
class ResourceRecord(RecordModel, Invitee):
"""Research resource record."""
display_index = IntegerField(null=True)
task = ForeignKeyField(Task, backref="resource_records", on_delete="CASCADE")
# Resource
name = CharField(max_length=1000)
type = CharField(max_length=1000, null=True)
start_date = PartialDateField(null=True)
end_date = PartialDateField(null=True)
url = CharField(max_length=200, null=True)
host_name = CharField(max_length=1000, verbose_name="Name", help_text="Resource Host Name")
host_city = CharField(null=True, verbose_name="City", help_text="Resource Host City")
host_region = CharField(
max_length=300, null=True, verbose_name="Region", help_text="Resource Host Region"
)
host_country = CharField(
max_length=2,
null=True,
choices=country_choices,
verbose_name="Country",
help_text="Resource Host Country",
)
host_disambiguated_id = CharField(
null=True, verbose_name="Disambiguated ID", help_text="Resource Host Disambiguated ID"
)
host_disambiguation_source = CharField(
null=True,
choices=disambiguation_source_choices,
verbose_name="Disambiguation Source",
help_text="Resource Host Disambiguation Source",
)
external_id_type = CharField(
max_length=255,
choices=external_id_type_choices,
verbose_name="Type",
help_text="External ID Type",
)
external_id_value = CharField(
max_length=255, verbose_name="Value", help_text="External ID Value"
)
external_id_url = CharField(
max_length=200, null=True, verbose_name="URL", help_text="External ID URL"
)
external_id_relationship = CharField(
null=True,
max_length=255,
choices=relationship_choices,
verbose_name="Relationship",
help_text="External ID Relationship",
)
# Proposal
proposal_title = CharField(max_length=1000, verbose_name="Title", help_text="Proposal Title")
proposal_start_date = PartialDateField(
null=True, verbose_name="Start Date", help_text="Proposal Start Date"
)
proposal_end_date = PartialDateField(
null=True, verbose_name="End Date", help_text="Proposal End Date"
)
proposal_url = CharField(
max_length=200, null=True, verbose_name="URL", help_text="Proposal URL"
)
proposal_host_name = CharField(
max_length=1000, verbose_name="Name", help_text="Proposal Host Name"
)
proposal_host_city = CharField(null=True, verbose_name="City", help_text="Proposal Host City")
proposal_host_region = CharField(
max_length=300, null=True, verbose_name="Region", help_text="Proposal Host Region"
)
proposal_host_country = CharField(
max_length=2,
null=True,
choices=country_choices,
verbose_name="City",
help_text="Proposal Host City",
)
proposal_host_disambiguated_id = CharField(
null=True, verbose_name="Disambiguated ID", help_text="Proposal Host Disambiguated ID"
)
proposal_host_disambiguation_source = CharField(
null=True,
choices=disambiguation_source_choices,
verbose_name="Disabmiguation Source",
help_text="Propasal Host Disambiguation Source",
)
proposal_external_id_type = CharField(
max_length=255,
choices=external_id_type_choices,
verbose_name="Type",
help_text="Proposal Externa ID Type",
)
proposal_external_id_value = CharField(
max_length=255, verbose_name="Value", help_text="Proposal External ID Value"
)
proposal_external_id_url = CharField(
max_length=200, null=True, verbose_name="URL", help_text="Proposal External ID URL"
)
proposal_external_id_relationship = CharField(
null=True,
max_length=255,
choices=relationship_choices,
verbose_name="Relationship",
help_text="Proposal External ID Relationship",
)
is_active = BooleanField(
default=False, help_text="The record is marked 'active' for batch processing", null=True
)
processed_at = DateTimeField(null=True)
local_id = CharField(
max_length=100,
null=True,
verbose_name="Local ID",
help_text="Record identifier used in the data source system.",
)
visibility = CharField(null=True, max_length=100, choices=visibility_choices)
status = TextField(null=True, help_text="Record processing status.")
class Meta: # noqa: D101,D106
table_alias = "rr"
@classmethod
def load_from_csv(cls, source, filename=None, org=None):
"""Load data from CSV/TSV file or a string."""
if isinstance(source, str):
source = StringIO(source, newline="")
if filename is None:
filename = datetime.utcnow().isoformat(timespec="seconds") + (
".tsv" if "\t" in source else ".csv"
)
reader = csv.reader(source)
header = next(reader)
if len(header) == 1 and "\t" in header[0]:
source.seek(0)
reader = csv.reader(source, delimiter="\t")
header = next(reader)
if len(header) < 2:
raise ModelException("Expected CSV or TSV format file.")
header_rexs = [
(re.compile(ex, re.I), c)
for (ex, c) in [
(r"local.*|.*identifier", "identifier"),
(r"email", "email"),
(r"orcid\s*id", "orcid"),
(r"first\s*name", "first_name"),
(r"last\s*name", "last_name"),
(r"put\s*code", "put_code"),
(r"visibility", "visibility"),
(r"proposal\s*title", "proposal_title"),
(r"proposal\s*start\s*date", "proposal_start_date"),
(r"proposal\s*end\s*date", "proposal_end_date"),
(r"proposal\s*url", "proposal_url"),
(r"proposal\s*external\s*id\s*type", "proposal_external_id_type"),
(r"proposal\s*external\s*id\s*value", "proposal_external_id_value"),
(r"proposal\s*external\s*id\s*url", "proposal_external_id_url"),
(r"proposal\s*external\s*id\s*relationship", "proposal_external_id_relationship"),
(r"proposal\s*host\s*name", "proposal_host_name"),
(r"proposal\s*host\s*city", "proposal_host_city"),
(r"proposal\s*host\s*region", "proposal_host_region"),
(r"proposal\s*host\s*country", "proposal_host_country"),
(r"proposal\s*host\s*disambiguat.*id", "proposal_host_disambiguated_id"),
(r"proposal\s*host\s*disambiguat.*source", "proposal_host_disambiguation_source"),
(r"resource\s*name", "name"),
(r"resource\s*type", "type"),
(r"(resource\s*)?external\s*id\s*type", "external_id_type"),
(r"(resource\s*)?external\s*id\s*value", "external_id_value"),
(r"(resource\s*)?external\s*id\s*url", "external_id_url"),
(r"(resource\s*)?external\s*id\s*relationship", "external_id_relationship"),
(r"(resource\s*)?host\s*name", "host_name"),
(r"(resource\s*)?host\s*city", "host_city"),
(r"(resource\s*)?host\s*region", "host_region"),
(r"(resource\s*)?host\s*country", "host_country"),
(r"(resource\s*)?host\s*disambiguat.*id", "host_disambiguated_id"),
(r"(resource\s*)?host\s*disambiguat.*source", "host_disambiguation_source"),
]
]
def index(ex):
"""Return first header column index matching the given regex."""
for i, column in enumerate(header):
if ex.match(column.strip()):
return i
else:
return None
# model column -> file column map:
idxs = {column: index(ex) for ex, column in header_rexs}
if all(idx is None for idx in idxs):
raise ModelException(f"Failed to map fields based on the header of the file: {header}")
if org is None:
org = current_user.organisation if current_user else None
def val(row, column, default=None):
idx = idxs.get(column)
if idx is None or idx < 0 or idx >= len(row):
return default
return row[idx].strip() or default
def country_code(row, column):
country = val(row, column)
if country:
try:
country = countries.lookup(country).alpha_2
except Exception:
raise ModelException(
f" (Country must be 2 character from ISO 3166-1 alpha-2) in the row "
f"#{row_no+2}: {row}. Header: {header}"
)
return country
with db.atomic() as transaction:
try:
task = Task.create(org=org, filename=filename, task_type=TaskType.RESOURCE)
for row_no, row in enumerate(reader):
# skip empty lines:
if len([item for item in row if item and item.strip()]) == 0:
continue
if len(row) == 1 and row[0].strip() == "":
continue
orcid, email = val(row, "orcid"), normalize_email(val(row, "email"))
if orcid:
orcid = validate_orcid_id(orcid)
if email and not validators.email(email):
raise ValueError(
f"Invalid email address '{email}' in the row #{row_no+2}: {row}"
)
visibility = val(row, "visibility")
if visibility:
visibility = visibility.lower()
rec = cls.create(
task=task,
visibility=visibility,
email=email,
orcid=orcid,
**{
c: v
for c, v in (
(c, val(row, c))
for c in idxs
if c not in ["email", "orcid", "visibility"]
)
if v
},
)
validator = ModelValidator(rec)
# TODO: removed the exclude paramtere after we sortout the
# valid domain values.
if not validator.validate(
exclude=[cls.external_id_relationship, cls.visibility]
):
raise ValueError(
f"Invalid data in the row #{row_no+2}: {validator.errors}"
)
transaction.commit()
return task
except Exception:
transaction.rollback()
app.logger.exception("Failed to load work file.")
raise
if task.is_ready:
from .utils import enqueue_task_records
enqueue_task_records(task)
return task
@property
def orcid_research_resource(self):
"""Map the common record parts to dict representation of ORCID API V3.x research resource."""
d = {}
# resource-item
host_org = {"name": self.host_name}
if self.host_city or self.host_region or self.host_country:
host_org["address"] = {
"city": self.host_city,
"region": self.host_region,
"country": self.host_country,
}
if self.host_disambiguated_id:
host_org["disambiguated-organization"] = {
"disambiguated-organization-identifier": self.host_disambiguated_id,
"disambiguation-source": self.host_disambiguation_source,
}
item = {
"resource-name": self.name,
"resource-type": self.type,
"hosts": {"organization": [host_org]},
"external-ids": {"external-id": [self.orcid_external_id()]},
}
if self.url:
item["url"] = dict(value=self.url)
d["resource-item"] = [item]
# proposal
host_org = {"name": self.proposal_host_name}
if self.proposal_host_city or self.proposal_host_region or self.proposal_host_country:
host_org["address"] = {
"city": self.proposal_host_city,
"region": self.proposal_host_region,
"country": self.proposal_host_country,
}
if self.proposal_host_disambiguated_id:
host_org["disambiguated-organization"] = {
"disambiguated-organization-identifier": self.proposal_host_disambiguated_id,
"disambiguation-source": self.proposal_host_disambiguation_source,
}
d["proposal"] = {
"title": {"title": {"value": self.proposal_title}},
"hosts": {"organization": [host_org]},
"external-ids": {
"external-id": [
self.orcid_external_id(
self.proposal_external_id_type,
self.proposal_external_id_value,
self.proposal_external_id_url,
self.proposal_external_id_relationship,
)
]
},
"start-date": self.proposal_start_date.as_orcid_dict(),
"end-date": self.proposal_end_date.as_orcid_dict(),
}
if self.proposal_url:
d["proposal"]["url"] = dict(value=self.proposal_url)
if self.display_index:
d["display-index"] = self.display_index
if self.visibility:
d["visibility"] = self.visibility.lower()
if self.put_code:
d["put-code"] = self.put_code
return d
def to_export_dict(self):
"""Map the funding record to dict for export into JSON/YAML."""
d = self.orcid_research_resource
d["invitees"] = [
Invitee.to_export_dict(self),
]
return d
@classmethod
def load(
cls,
data,
task=None,
task_id=None,
filename=None,
override=True,
skip_schema_validation=False,
org=None,
):
"""Load ORCID message record task form JSON/YAML."""
return MessageRecord.load(
data,
filename=filename,
override=True,
skip_schema_validation=True,
org=org or current_user.organisation,
task=task,
task_id=task_id,
task_type=TaskType.RESOURCE,
version="3.0",
)
# ThroughDeferred = DeferredThroughModel()
class MessageRecord(RecordModel):
"""ORCID message loaded from structured batch task file."""
task = ForeignKeyField(Task, backref="message_records", on_delete="CASCADE")
version = CharField(null=True)
# type = CharField()
message = TextField()
# invitees = ManyToManyField(Invitee, backref="records", through_model=ThroughDeferred)
invitees = ManyToManyField(Invitee, backref="records", on_delete="CASCADE")
is_active = BooleanField(
default=False, help_text="The record is marked for batch processing", null=True
)
# indicates that all ivitees (user profiles) were processed
processed_at = DateTimeField(null=True)
status = TextField(null=True, help_text="Record processing status.")
@classmethod
def load(
cls,
data,
task=None,
task_id=None,
filename=None,
override=True,
skip_schema_validation=False,
org=None,
task_type=None,
version="3.0",
):
"""Load ORCID message record task form JSON/YAML."""
data = load_yaml_json(filename=filename, source=data)
if org is None:
org = current_user.organisation if current_user else None
# if not skip_schema_validation:
# jsonschema.validate(data, schemas.affiliation_task)
if not task and task_id:
task = Task.select().where(Task.id == task_id).first()
if not task and "id" in data and override and task_type:
task_id = int(data["id"])
task = (
Task.select()
.where(Task.id == task_id, Task.task_type == task_type, Task.is_raw)
.first()
)
if not filename:
if isinstance(data, dict):
filename = data.get("filename")
if not filename:
filename = datetime.utcnow().isoformat(timespec="seconds")
if task and not task_type:
task_type = task.task_type
if not task_type and isinstance(data, dict) and "type" in data:
task_type = TaskType[data["type"].upper()]
if isinstance(data, dict):
records = data.get("records")
else:
records = data
with db.atomic() as transaction:
try:
if not task:
task = Task.create(
org=org, filename=filename, task_type=task_type, is_raw=True
)
elif override:
task.record_model.delete().where(task.record_model.task == task).execute()
is_enqueue = False
for r in records:
invitees = r.get("invitees")
if not invitees:
raise ModelException(
f"Missing invitees, expected to have at lease one: {r}"
)
del r["invitees"]
rec_id = r.get("id")
if rec_id:
rec_id = int(rec_id)
del r["id"]
is_active = r.get("is-active")
if "is-active" in r:
del r["is-active"]
message = json.dumps(r, indent=2)
if rec_id and not override:
rec = cls.get(int(rec_id))
if rec.message != message:
rec.message = message
if rec.version != version:
rec.version = version
rec.is_active = is_active
else:
rec = cls.create(
task=task, version=version, message=message, is_active=is_active
)
rec.invitees.add(
[
Invitee.create(
orcid=i.get("ORCID-iD"),
email=i.get("email"),
first_name=i.get("first-name"),
last_name=i.get("last-name"),
put_code=i.get("put-code"),
visibility=i.get("visibility"),
)
for i in invitees
]
)
if is_enqueue:
from .utils import enqueue_task_records
enqueue_task_records(task)
except:
transaction.rollback()
app.logger.exception("Failed to load affiliation record task file.")
raise
return task
def to_export_dict(self):
"""Map the common record parts to dict for export into JSON/YAML."""
return self.to_dict()
def to_dict(self, *args, **kwargs):
"""Map the common record parts to dict."""
d = json.loads(self.message)
d["id"] = self.id
d["invitees"] = [i.to_export_dict() for i in self.invitees]
return d
RecordInvitee = MessageRecord.invitees.get_through_model()
class Delegate(BaseModel):
"""External applications that can be redirected to."""
hostname = CharField()
class Url(AuditedModel):
"""Shortened URLs."""
short_id = CharField(unique=True, max_length=5)
url = TextField()
@classmethod
def shorten(cls, url):
"""Create a shorten URL or retrieves an exiting one."""
try:
u = cls.get(url=url)
except cls.DoesNotExist:
while True:
short_id = "".join(
random.choice(string.ascii_letters + string.digits) for _ in range(5)
)
if not cls.select().where(cls.short_id == short_id).exists():
break
u = cls.create(short_id=short_id, url=url)
return u
return u
class Funding(BaseModel):
"""Uploaded research Funding record."""
short_id = CharField(unique=True, max_length=5)
url = TextField()
class Client(AuditedModel):
"""API Client Application/Consumer.
A client is the app which wants to use the resource of a user.
It is suggested that the client is registered by a user on your site,
but it is not required.
"""
name = CharField(null=True, max_length=40, help_text="human readable name, not required")
homepage_url = CharField(null=True, max_length=100)
description = CharField(
null=True, max_length=400, help_text="human readable description, not required"
)
user = ForeignKeyField(
User, null=True, on_delete="SET NULL", help_text="creator of the client, not required"
)
org = ForeignKeyField(Organisation, on_delete="CASCADE", backref="client_applications")
client_id = CharField(max_length=100, unique=True)
client_secret = CharField(max_length=55, unique=True)
is_confidential = BooleanField(null=True, help_text="public or confidential")
grant_type = CharField(max_length=18, default="client_credentials", null=True)
response_type = CharField(max_length=4, default="code", null=True)
_redirect_uris = TextField(null=True)
_default_scopes = TextField(null=True)
def save(self, *args, **kwargs): # noqa: D102
if self.is_dirty() and not getattr(self, "user_id") and current_user:
self.user_id = current_user.id
return super().save(*args, **kwargs)
@property
def client_type(self): # noqa: D102
if self.is_confidential:
return "confidential"
return "public"
@property
def redirect_uris(self): # noqa: D102
if self._redirect_uris:
return self._redirect_uris.split()
return []
@redirect_uris.setter
def redirect_uris(self, value):
if value and isinstance(value, str):
self._redirect_uris = value
@property
def callback_urls(self): # noqa: D102
return self._redirect_uris
@callback_urls.setter
def callback_urls(self, value):
self._redirect_uris = value
@property
def default_redirect_uri(self): # noqa: D102
ru = self.redirect_uris
if not ru:
return None
return self.redirect_uris[0]
@property
def default_scopes(self): # noqa: D102
if self._default_scopes:
return self._default_scopes.split()
return []
def validate_scopes(self, scopes):
"""Validate client requested scopes."""
return "/webhook" in scopes or not scopes
def __str__(self): # noqa: D102
return self.name or self.homepage_url or self.description
class Grant(BaseModel):
"""Grant Token / Authorization Code.
A grant token is created in the authorization flow, and will be destroyed when
the authorization is finished. In this case, it would be better to store the data
in a cache, which leads to better performance.
"""
user = ForeignKeyField(User, on_delete="CASCADE")
# client_id = db.Column(
# db.String(40), db.ForeignKey('client.client_id'),
# nullable=False,
# )
client = ForeignKeyField(Client, index=True, on_delete="CASCADE")
code = CharField(max_length=255, index=True)
redirect_uri = CharField(max_length=255, null=True)
expires = DateTimeField(null=True)
_scopes = TextField(null=True)
# def delete(self):
# super().delete().execute()
# return self
@property
def scopes(self): # noqa: D102
if self._scopes:
return self._scopes.split()
return []
@scopes.setter
def scopes(self, value): # noqa: D102
if isinstance(value, str):
self._scopes = value
else:
self._scopes = " ".join(value)
class Token(BaseModel):
"""Bearer Token.
A bearer token is the final token that could be used by the client.
There are other token types, but bearer token is widely used.
Flask-OAuthlib only comes with a bearer token.
"""
client = ForeignKeyField(Client, index=True, on_delete="CASCADE")
user = ForeignKeyField(User, null=True, index=True, on_delete="SET NULL")
token_type = CharField(max_length=40)
access_token = CharField(max_length=100, unique=True)
refresh_token = CharField(max_length=100, unique=True, null=True)
created_at = DateTimeField(default=datetime.utcnow, null=True)
expires_in = IntegerField(null=True)
expires = DateTimeField(null=True, index=True)
_scopes = TextField(null=True)
@property
def scopes(self): # noqa: D102
if self._scopes:
return self._scopes.split()
return []
@property
def expires_at(self): # noqa: D102
return self.expires
class AsyncOrcidResponse(BaseModel):
"""Asynchronouly invoked ORCID API calls."""
job_id = UUIDField(primary_key=True)
enqueued_at = DateTimeField(default=datetime.utcnow)
executed_at = DateTimeField(null=True)
method = CharField(max_length=10)
url = CharField(max_length=200)
status_code = SmallIntegerField(null=True)
headers = TextField(null=True)
body = TextField(null=True)
class MailLog(BaseModel):
"""Email log - the log of email sent from the Hub."""
sent_at = DateTimeField(default=datetime.utcnow)
org = ForeignKeyField(Organisation, null=True)
recipient = CharField()
sender = CharField()
subject = CharField()
was_sent_successfully = BooleanField(null=True)
error = TextField(null=True)
token = CharField(max_length=10)
DeferredForeignKey.resolve(User)
def readup_file(input_file):
"""Read up the whole content and decode it and return the whole content."""
raw = input_file.read()
detected_encoding = chardet.detect(raw).get("encoding")
encoding_list = ["utf-8", "utf-8-sig", "utf-16", "utf-16-le", "utf-16-be", "latin-1"]
if detected_encoding:
encoding_list.insert(0, detected_encoding)
for encoding in encoding_list:
try:
return raw.decode(encoding)
except UnicodeDecodeError:
continue
def create_tables(safe=True, drop=False):
"""Create all DB tables."""
try:
db.connect()
except OperationalError:
pass
for model in [
File,
Organisation,
User,
UserOrg,
OrcidToken,
UserOrgAffiliation,
OrgInfo,
OrcidApiCall,
OrcidAuthorizeCall,
Task,
Log,
AffiliationRecord,
AffiliationExternalId,
GroupIdRecord,
OrgInvitation,
Url,
UserInvitation,
FundingRecord,
WorkRecord,
WorkContributor,
WorkExternalId,
WorkInvitee,
FundingContributor,
FundingInvitee,
ExternalId,
PeerReviewRecord,
PeerReviewInvitee,
PeerReviewExternalId,
OtherIdRecord,
PropertyRecord,
Client,
Grant,
Token,
Delegate,
AsyncOrcidResponse,
MessageRecord,
Invitee,
RecordInvitee,
ResourceRecord,
MailLog,
]:
model.bind(db)
if drop and model.table_exists():
model.drop_table()
if not model.table_exists():
model.create_table(safe=safe)
def create_audit_tables():
"""Create all DB audit tables for PostgreSQL DB."""
try:
db.connect()
except OperationalError:
pass
if isinstance(db, PostgresqlDatabase):
with open(
os.path.join(os.path.dirname(__file__), "sql", "auditing.sql"), "br"
) as input_file:
sql = readup_file(input_file)
db.commit()
with db.cursor() as cr:
cr.execute(sql)
db.commit()
def drop_tables():
"""Drop all model tables."""
if isinstance(db, SqliteDatabase):
foreign_keys = db.pragma("foreign_keys")
db.pragma("foreign_keys", 0)
for m in (
File,
User,
UserOrg,
OrcidToken,
UserOrgAffiliation,
OrgInfo,
OrgInvitation,
OrcidApiCall,
OrcidAuthorizeCall,
OtherIdRecord,
FundingContributor,
FundingInvitee,
FundingRecord,
PropertyRecord,
PeerReviewInvitee,
PeerReviewExternalId,
PeerReviewRecord,
WorkInvitee,
WorkExternalId,
WorkContributor,
WorkRecord,
AffiliationRecord,
AffiliationExternalId,
ExternalId,
Url,
UserInvitation,
Task,
Organisation,
):
m.bind(db)
if m.table_exists():
try:
m.drop_table(
fail_silently=True,
safe=True,
cascade=hasattr(db, "drop_cascade") and db.drop_cascade,
)
except OperationalError:
pass
if isinstance(db, SqliteDatabase):
db.pragma("foreign_keys", foreign_keys)
def load_yaml_json(filename=None, source=None, content_type=None):
"""Create a common way of loading JSON or YAML file."""
if not content_type:
_, ext = os.path.splitext(filename or "")
if not ext:
source = source.strip()
content_type = (
"json"
if ((not ext and (source.startswith("[") or source.startswith("{"))) or ext == ".json")
else "yaml"
)
if content_type == "yaml":
data = json.loads(json.dumps(yaml.load(source)), object_pairs_hook=NestedDict)
else:
data = json.loads(source, object_pairs_hook=NestedDict)
# Removing None for correct schema validation
if not isinstance(data, list) and not (isinstance(data, dict) and "records" in data):
raise SchemaError("Schema validation failed:\n - Expecting a list of Records")
return data
def del_none(d):
"""
Delete keys with the value ``None`` in a dictionary, recursively.
So that the schema validation will not fail, for elements that are none
"""
for key, value in list(d.items()):
if value is None:
del d[key]
elif isinstance(value, list):
for item in value:
if isinstance(item, dict):
del_none(item)
elif isinstance(value, dict):
del_none(value)
return d
def get_val(d, *keys, default=None):
"""To get the value from uploaded fields."""
if isinstance(d, NestedDict):
return d.get(*keys, default=default)
for k in keys:
if not d:
break
d = d.get(k, default)
return d
@lru_cache()
def audit_models():
"""Inrospects the audit trail table models."""
# return generate_models(db, schema="audit") if isinstance(db, PostgresqlDatabase) else {}
return {}
| mit | -7,884,738,462,338,003,000 | 37.16707 | 117 | 0.521994 | false |
vlfedotov/django-business-logic | tests/test_reference.py | 1 | 2699 | # -*- coding: utf-8 -*-
from .common import *
class ReferenceDescriptorTest(TestCase):
def setUp(self):
self.content_type = ContentType.objects.get_for_model(Model)
def test_reference_descriptor_search_fields_empty(self):
reference_descriptor = ReferenceDescriptor.objects.create(content_type=self.content_type)
self.assertEqual([], reference_descriptor.get_search_fields())
def test_reference_descriptor_search_fields_split(self):
reference_descriptor = ReferenceDescriptor.objects.create(
content_type=self.content_type, search_fields='xxx, yyy zzz; aa_bb__cc')
self.assertEqual(['xxx', 'yyy', 'zzz', 'aa_bb__cc'], reference_descriptor.get_search_fields())
# see also tests.test_program.ProgramTest.test_program_version_execute_set_reference_variable
class ReferenceConstantTest(TestCase):
def test_interpret(self):
constant = ReferenceConstant.objects.create()
root = Node.add_root(content_object=constant)
test_model = Model.objects.create()
root.add_child(content_object=test_model)
context = Context()
self.failUnlessEqual(test_model, constant.interpret(context))
def test_operator_eq_equals(self):
root = Node.add_root(content_object=BinaryOperator(operator='=='))
constant1 = ReferenceConstant.objects.create()
test_model1 = Model.objects.create()
constant1_node = root.add_child(content_object=constant1)
constant1_node.add_child(content_object=test_model1)
root = Node.objects.get(id=root.id)
constant2 = ReferenceConstant.objects.create()
test_model2 = Model.objects.create()
constant2_node = root.add_child(content_object=constant2)
constant2_node.add_child(content_object=test_model1)
root = Node.objects.get(id=root.id)
context = Context(log=True)
self.assertTrue(root.interpret(context))
def test_operator_eq_not_equals(self):
root = Node.add_root(content_object=BinaryOperator(operator='=='))
constant1 = ReferenceConstant.objects.create()
test_model1 = Model.objects.create()
constant1_node = root.add_child(content_object=constant1)
constant1_node.add_child(content_object=test_model1)
root = Node.objects.get(id=root.id)
constant2 = ReferenceConstant.objects.create()
test_model2 = Model.objects.create()
constant2_node = root.add_child(content_object=constant2)
constant2_node.add_child(content_object=test_model2)
root = Node.objects.get(id=root.id)
context = Context(log=True)
self.assertFalse(root.interpret(context))
| mit | -730,975,050,206,971,100 | 36.486111 | 102 | 0.685069 | false |
fim/locksmith-agent | modules/locksmith/commands.py | 1 | 6091 | import os
import inspect
from datetime import datetime
from argparse import ArgumentParser
from locksmith.lock import *
from locksmith.log import logger
class CommandlineArgumentError(Exception):
pass
class CommandError(Exception):
pass
def cmd_list(conf, argv):
"""
List all existing build environments
"""
usage = "%(prog)s [options] list [list_options]"
description = inspect.getdoc(cmd_list)
parser = ArgumentParser(
usage = usage,
description = description)
args = parser.parse_args(argv)
try:
try:
lrpc = LockRPC(**conf)
except TypeError:
raise CommandError("Config file is missing or is invalid. Try re-registering your client")
for l in lrpc.list():
print l['stub']
except ListException,e:
raise CommandError("Couldn't list locks for user %s: %s" % (conf['username'], e))
def cmd_lock(conf, argv):
"""
Acquire a lock from the server
"""
usage = "%(prog)s [options] lock [lock_options] lock_name"
description = inspect.getdoc(cmd_lock)
parser = ArgumentParser(
usage = usage,
description = description)
parser.add_argument("-s", "--silent", action="store_true",
default=False, help="Fail silently on error",)
parser.add_argument("-x", "--exclusive", action="store_true",
default=False, help="Acquire exclusive lock",)
parser.add_argument("lock", help="lock name",)
args = parser.parse_args(argv)
try:
lrpc = LockRPC(**conf)
lrpc.lock(args.lock, args.exclusive)
except TypeError:
raise CommandError("Config file is missing or is invalid. Try re-registering your client")
except LockException,e:
if args.silent:
return
raise CommandError("Couldn't acquire lock %s: %s" % (args.lock, e))
def cmd_unlock(conf, argv):
"""
Create new build environments
"""
description = inspect.getdoc(cmd_unlock)
usage = "%(prog)s [options] unlock [unlock_options] lock_name"
parser = ArgumentParser(
usage = usage,
description = description)
parser.add_argument("-s", "--silent", action="store_true",
default=False, help="Fail silently on error",)
parser.add_argument("lock", help="lock name",)
args = parser.parse_args(argv)
try:
lrpc = LockRPC(**conf)
lrpc.unlock(args.lock)
except TypeError:
raise CommandError("Config file is missing or is invalid. Try re-registering your client")
except UnlockException,e:
if args.silent:
return
raise CommandError("Couldn't release lock %s: %s" % (args.lock, e))
def cmd_register(conf, argv):
"""
Register with the lock server
"""
usage = "%(prog)s [options] register [register_options]"
description = inspect.getdoc(cmd_register)
parser = ArgumentParser(
usage=usage,
description = "Automatically register with the lock server and " \
"initialize the config file")
parser.add_argument("-k", "--insecure", action="store_true",
default=False, help="Do not verify server certificate",)
parser.add_argument("server", help="server url for RPC service",)
args = parser.parse_args(argv)
if not args.server:
raise CommandlineArgumentError("You need to define a server URL")
if os.path.exists(os.path.expanduser(conf.filename)):
raw_input("Config file already exists. If you want to abort press ^C otherwise press enter.")
try:
lrpc = LockRPC(server=args.server, https_verify=(not args.insecure))
u,p = lrpc.register()
except TypeError:
raise CommandError("Config file is missing or is invalid. Try re-registering your client")
except RegisterException,e:
raise CommandError("Couldn't register with server: %s" % e)
try:
conf['username'] = u
conf['password'] = p
conf['server'] = args.server
if args.insecure: conf['https_verify'] = 'False'
conf.save()
except Exception,e:
logger.error("Couldn't config %s: %s" % (conf.filename, e))
raise
def cmd_execute(conf, argv):
"""
Execute command if lock can be acquired
"""
usage = "%(prog)s [options] execute [execute_options] -l lock_name command"
description = inspect.getdoc(cmd_execute)
parser = ArgumentParser(
usage = usage,
description = description)
parser.add_argument("-s", "--silent", action="store_true",
default=False, help="Fail silently on error",)
parser.add_argument("-l", "--lock", dest="lock",
required=True, help="lock name",)
parser.add_argument("-x", "--exclusive", action="store_true",
default=False, help="Acquire exclusive lock",)
parser.add_argument("command", help="command to execute",)
args = parser.parse_args(argv)
try:
lrpc = LockRPC(**conf)
lrpc.lock(args.lock, exclusive=args.exclusive)
from subprocess import call
call(args.command.split(" "))
lrpc.unlock(args.lock)
except LockException,e:
if args.silent:
return
raise CommandError("Couldn't acquire lock %s: %s" % (args.lock, e))
except (Exception, SystemExit):
lrpc.unlock(args.lock)
raise
def cmd_help(conf, argv):
"""
List available commands
"""
usage = "%(prog)s [options] help [help_options] [command_name]"
description = inspect.getdoc(cmd_help)
parser = ArgumentParser(usage=usage,description=description)
parser.add_argument("command", nargs="?", default=None, help="command name")
args = parser.parse_args(argv)
import locksmith.util
cmds = locksmith.util.discover_commands()
if args.command:
try:
cmds[argv[0]]({}, ['--help'])
except KeyError:
raise Exception("Command not found")
logger.info("Available commands:")
for k in sorted(cmds.keys()):
logger.info(" {:16}\t{}".format(k, inspect.getdoc(cmds[k])))
| bsd-2-clause | -2,406,269,660,180,101,600 | 31.747312 | 102 | 0.626991 | false |
krisawatson/plugin.video.genesis | modules/sources/xmovies8_mv.py | 1 | 3436 | # -*- coding: utf-8 -*-
'''
Genesis Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
import urllib
import urlparse
from modules.libraries import cleantitle
from modules.libraries import cloudflare
from modules.libraries import client
class source:
def __init__(self):
self.base_link = 'http://xmovies8.tv'
self.search_link = 'https://www.google.com/search?q=%s&sitesearch=xmovies8.co'
def get_movie(self, imdb, title, year):
try:
query = self.search_link % (urllib.quote_plus(title))
result = client.source(query)
title = cleantitle.movie(title)
years = ['%s' % str(year), '%s' % str(int(year)+1), '%s' % str(int(year)-1)]
result = client.parseDOM(result, "h3", attrs = { "class": ".+?" })
result = [(client.parseDOM(i, "a", ret="href"), client.parseDOM(i, "a")) for i in result]
result = [(i[0][0], i[1][-1]) for i in result if len(i[0]) > 0 and len(i[1]) > 0]
result = [i for i in result if any(x in i[0] for x in years) or any(x in i[1] for x in years)]
result = [i[0] for i in result if title in cleantitle.movie(i[0]) or title in cleantitle.movie(i[1])][0]
try: url = re.compile('//.+?(/.+)').findall(result)[0]
except: url = result
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
sources = []
if url == None: return sources
url = urlparse.urljoin(self.base_link, url)
result = cloudflare.source(url)
url = re.compile('(<a .+?</a>)').findall(result)
url = [(client.parseDOM(i, "a", ret="href"), client.parseDOM(i, "a")) for i in url]
url = [(i[0][0], i[1][0]) for i in url if len(i[0]) > 0 and len(i[1]) > 0]
try: sources.append({'source': 'GVideo', 'quality': '1080p', 'provider': 'Xmovies8', 'url': [i[0] for i in url if i[1].startswith('1920') and 'google' in i[0]][0]})
except: pass
try: sources.append({'source': 'GVideo', 'quality': 'HD', 'provider': 'Xmovies8', 'url': [i[0] for i in url if i[1].startswith('1280') and 'google' in i[0]][0]})
except: pass
return sources
except:
return sources
def resolve(self, url):
try:
if url.startswith('stack://'): return url
url = client.request(url, output='geturl')
if 'requiressl=yes' in url: url = url.replace('http://', 'https://')
else: url = url.replace('https://', 'http://')
return url
except:
return
| gpl-3.0 | -7,435,579,324,462,938,000 | 35.553191 | 176 | 0.575378 | false |
kobotoolbox/formpack | tests/test_kobo_locking.py | 1 | 12350 | # coding: utf-8
import base64
from io import BytesIO
from unittest import TestCase
import xlwt
from formpack.constants import KOBO_LOCK_SHEET
from formpack.utils.kobo_locking import (
get_kobo_locking_profiles,
revert_kobo_lock_structure,
strip_kobo_locking_profile,
)
from formpack.utils.exceptions import FormPackLibraryLockingError
class TestKoboLocking(TestCase):
def setUp(self):
self.locking_profiles = [
['restriction', 'core', 'flex', 'delete'],
['choice_add', 'locked', 'locked', ''],
['choice_delete', '', '', 'locked'],
['choice_value_edit', '', '', ''],
['choice_label_edit', '', '', ''],
['choice_order_edit', 'locked', '', ''],
['question_delete', 'locked', 'locked', 'locked'],
['question_label_edit', 'locked', 'locked', ''],
['question_settings_edit', 'locked', 'locked', ''],
['question_skip_logic_edit', 'locked', 'locked', ''],
['question_validation_edit', 'locked', 'locked', ''],
['group_delete', 'locked', '', 'locked'],
['group_label_edit', '', '', ''],
['group_question_add', 'locked', 'locked', ''],
['group_question_delete', 'locked', 'locked', 'locked'],
['group_question_order_edit', 'locked', 'locked', ''],
['group_settings_edit', 'locked', 'locked', ''],
['group_skip_logic_edit', 'locked', 'locked', ''],
['group_split', 'locked', 'locked', ''],
['form_replace', 'locked', '', ''],
['group_add', 'locked', '', ''],
['question_add', 'locked', '', ''],
['question_order_edit', 'locked', '', ''],
['language_edit', 'locked', '', ''],
['form_appearance', 'locked', '', ''],
['form_meta_edit', '', '', ''],
]
def _construct_xls_for_import(self, sheet_name, sheet_content):
workbook_to_import = xlwt.Workbook()
worksheet = workbook_to_import.add_sheet(sheet_name)
for row_num, row_list in enumerate(sheet_content):
for col_num, cell_value in enumerate(row_list):
worksheet.write(row_num, col_num, cell_value)
xls_import_io = BytesIO()
workbook_to_import.save(xls_import_io)
xls_import_io.seek(0)
return xls_import_io
def test_get_kobo_locking_profiles(self):
expected_locking_profiles = [
{
'name': 'core',
'restrictions': [
'choice_add',
'choice_order_edit',
'question_delete',
'question_label_edit',
'question_settings_edit',
'question_skip_logic_edit',
'question_validation_edit',
'group_delete',
'group_question_add',
'group_question_delete',
'group_question_order_edit',
'group_settings_edit',
'group_skip_logic_edit',
'group_split',
'form_replace',
'group_add',
'question_add',
'question_order_edit',
'language_edit',
'form_appearance',
],
},
{
'name': 'delete',
'restrictions': [
'choice_delete',
'question_delete',
'group_delete',
'group_question_delete',
],
},
{
'name': 'flex',
'restrictions': [
'choice_add',
'question_delete',
'question_label_edit',
'question_settings_edit',
'question_skip_logic_edit',
'question_validation_edit',
'group_question_add',
'group_question_delete',
'group_question_order_edit',
'group_settings_edit',
'group_skip_logic_edit',
'group_split',
],
},
]
xls = self._construct_xls_for_import(
KOBO_LOCK_SHEET, self.locking_profiles
)
actual_locking_profiles = get_kobo_locking_profiles(xls)
for profiles in expected_locking_profiles:
name = profiles['name']
expected_restrictions = profiles['restrictions']
actual_restrictions = [
val['restrictions']
for val in actual_locking_profiles
if val['name'] == name
][0]
assert expected_restrictions == actual_restrictions
def test_revert_kobo_lock_structure(self):
expected_reverted_locking_profiles = [
{'restriction': 'choice_add', 'core': 'locked', 'flex': 'locked'},
{'restriction': 'choice_delete', 'delete': 'locked'},
{'restriction': 'choice_label_edit'},
{'restriction': 'choice_value_edit'},
{'restriction': 'choice_order_edit', 'core': 'locked'},
{
'restriction': 'question_delete',
'core': 'locked',
'flex': 'locked',
'delete': 'locked',
},
{
'restriction': 'question_label_edit',
'core': 'locked',
'flex': 'locked',
},
{
'restriction': 'question_settings_edit',
'core': 'locked',
'flex': 'locked',
},
{
'restriction': 'question_skip_logic_edit',
'core': 'locked',
'flex': 'locked',
},
{
'restriction': 'question_validation_edit',
'core': 'locked',
'flex': 'locked',
},
{
'restriction': 'group_delete',
'core': 'locked',
'delete': 'locked',
},
{
'restriction': 'group_split',
'core': 'locked',
'flex': 'locked',
},
{'restriction': 'group_label_edit'},
{
'restriction': 'group_question_add',
'core': 'locked',
'flex': 'locked',
},
{
'restriction': 'group_question_delete',
'core': 'locked',
'flex': 'locked',
'delete': 'locked',
},
{
'restriction': 'group_question_order_edit',
'core': 'locked',
'flex': 'locked',
},
{
'restriction': 'group_settings_edit',
'core': 'locked',
'flex': 'locked',
},
{
'restriction': 'group_skip_logic_edit',
'core': 'locked',
'flex': 'locked',
},
{'restriction': 'form_replace', 'core': 'locked'},
{'restriction': 'group_add', 'core': 'locked'},
{'restriction': 'question_add', 'core': 'locked'},
{'restriction': 'question_order_edit', 'core': 'locked'},
{'restriction': 'language_edit', 'core': 'locked'},
{'restriction': 'form_appearance', 'core': 'locked'},
{'restriction': 'form_meta_edit'},
]
xls = self._construct_xls_for_import(
KOBO_LOCK_SHEET, self.locking_profiles
)
actual_reverted_locks = {
KOBO_LOCK_SHEET: get_kobo_locking_profiles(xls)
}
revert_kobo_lock_structure(actual_reverted_locks)
def _get_sorted_restrictions(restrictions):
return sorted(restrictions, key=lambda k: k['restriction'])
actual = _get_sorted_restrictions(
actual_reverted_locks[KOBO_LOCK_SHEET]
)
expected = _get_sorted_restrictions(expected_reverted_locking_profiles)
assert len(actual) == len(expected)
assert actual == expected
def test_strip_kobo_locks_from_survey_content(self):
content = {
'survey': [
{
'name': 'today',
'type': 'today',
'$kuid': 'pitYOxYwh',
'$autoname': 'today',
},
{
'name': 'gender',
'type': 'select_one',
'$kuid': '6bPK3a1G1',
'label': ["Respondent's gender?"],
'required': True,
'$autoname': 'gender',
'kobo--locking-profile': 'flex',
'select_from_list_name': 'gender',
},
{
'name': 'age',
'type': 'integer',
'$kuid': 'Ms8NYWNpT',
'label': ["Respondent's age?"],
'required': True,
'$autoname': 'age',
},
{
'name': 'confirm',
'type': 'select_one',
'$kuid': 'SBHBly6cC',
'label': ['Is your age really ${age}?'],
'relevant': '${age}!=' '',
'required': True,
'$autoname': 'confirm',
'kobo--locking-profile': 'delete',
'select_from_list_name': 'yesno',
},
{
'name': 'group_1',
'type': 'begin_group',
'$kuid': 'pUGHAi9Wv',
'label': ['A message from our sponsors'],
'$autoname': 'group_1',
'kobo--locking-profile': 'core',
},
{
'name': 'note_1',
'type': 'note',
'$kuid': 'KXV08ZVMS',
'label': ['Hi there 👋'],
'$autoname': 'note_1',
},
{'type': 'end_group', '$kuid': '04eEDul2R'},
]
}
strip_kobo_locking_profile(content)
for item in content['survey']:
assert 'kobo--locking-profile' not in item
def test_no_locking_profiles_raises_exception(self):
no_profiles = [[row[0]] for row in self.locking_profiles]
xls = self._construct_xls_for_import(
KOBO_LOCK_SHEET, no_profiles
)
try:
get_kobo_locking_profiles(xls)
except FormPackLibraryLockingError as e:
assert str(e) == 'At least one locking profile must be defined.'
def test_locking_profile_name_is_locked_raises_exception(self):
locking_profiles = self.locking_profiles
locking_profiles[0][1] = 'locked'
xls = self._construct_xls_for_import(
KOBO_LOCK_SHEET, locking_profiles
)
try:
get_kobo_locking_profiles(xls)
except FormPackLibraryLockingError as e:
assert str(e) == 'Locking profile name of "locked" cannot be used.'
def test_invalid_restriction_raises_exception(self):
locking_profiles = self.locking_profiles
locking_profiles.append(['invalid_restriction', 'locked', 'locked', 'locked'])
xls = self._construct_xls_for_import(
KOBO_LOCK_SHEET, locking_profiles
)
try:
get_kobo_locking_profiles(xls)
except FormPackLibraryLockingError as e:
assert str(e) == 'invalid_restriction is not a valid restriction.'
def test_restriction_column_missing_raises_exception(self):
locking_profiles = self.locking_profiles
locking_profiles[0][0] = 'something_other_than_restriction'
xls = self._construct_xls_for_import(
KOBO_LOCK_SHEET, locking_profiles
)
try:
get_kobo_locking_profiles(xls)
except FormPackLibraryLockingError as e:
assert str(e) == 'The column name `restriction` must be present.'
| gpl-3.0 | 556,275,671,662,753,500 | 36.75841 | 86 | 0.452499 | false |
snehasi/servo | components/script/dom/bindings/codegen/CodegenRust.py | 1 | 284971 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Common codegen classes.
from collections import defaultdict
from itertools import groupby
import operator
import os
import re
import string
import textwrap
import functools
from WebIDL import (
BuiltinTypes,
IDLBuiltinType,
IDLNullValue,
IDLNullableType,
IDLObject,
IDLType,
IDLInterfaceMember,
IDLUndefinedValue,
IDLWrapperType,
)
from Configuration import (
MakeNativeName,
MemberIsUnforgeable,
getModuleFromObject,
getTypesFromCallback,
getTypesFromDescriptor,
getTypesFromDictionary,
iteratorNativeType
)
AUTOGENERATED_WARNING_COMMENT = \
"/* THIS FILE IS AUTOGENERATED - DO NOT EDIT */\n\n"
FINALIZE_HOOK_NAME = '_finalize'
TRACE_HOOK_NAME = '_trace'
CONSTRUCT_HOOK_NAME = '_constructor'
HASINSTANCE_HOOK_NAME = '_hasInstance'
RUST_KEYWORDS = {"abstract", "alignof", "as", "become", "box", "break", "const", "continue",
"else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in",
"let", "loop", "macro", "match", "mod", "move", "mut", "offsetof", "override",
"priv", "proc", "pub", "pure", "ref", "return", "static", "self", "sizeof",
"struct", "super", "true", "trait", "type", "typeof", "unsafe", "unsized",
"use", "virtual", "where", "while", "yield"}
def replaceFileIfChanged(filename, newContents):
"""
Read a copy of the old file, so that we don't touch it if it hasn't changed.
Returns True if the file was updated, false otherwise.
"""
# XXXjdm This doesn't play well with make right now.
# Force the file to always be updated, or else changing CodegenRust.py
# will cause many autogenerated bindings to be regenerated perpetually
# until the result is actually different.
# oldFileContents = ""
# try:
# with open(filename, 'rb') as oldFile:
# oldFileContents = ''.join(oldFile.readlines())
# except:
# pass
# if newContents == oldFileContents:
# return False
with open(filename, 'wb') as f:
f.write(newContents)
return True
def toStringBool(arg):
return str(not not arg).lower()
def toBindingNamespace(arg):
return re.sub("((_workers)?$)", "Binding\\1", MakeNativeName(arg))
def stripTrailingWhitespace(text):
tail = '\n' if text.endswith('\n') else ''
lines = text.splitlines()
for i in range(len(lines)):
lines[i] = lines[i].rstrip()
return '\n'.join(lines) + tail
def innerContainerType(type):
assert type.isSequence() or type.isMozMap()
return type.inner.inner if type.nullable() else type.inner
def wrapInNativeContainerType(type, inner):
if type.isSequence():
containerType = "Vec"
elif type.isMozMap():
containerType = "MozMap"
else:
raise TypeError("Unexpected container type %s", type)
return CGWrapper(inner, pre=containerType + "<", post=">")
builtinNames = {
IDLType.Tags.bool: 'bool',
IDLType.Tags.int8: 'i8',
IDLType.Tags.int16: 'i16',
IDLType.Tags.int32: 'i32',
IDLType.Tags.int64: 'i64',
IDLType.Tags.uint8: 'u8',
IDLType.Tags.uint16: 'u16',
IDLType.Tags.uint32: 'u32',
IDLType.Tags.uint64: 'u64',
IDLType.Tags.unrestricted_float: 'f32',
IDLType.Tags.float: 'Finite<f32>',
IDLType.Tags.unrestricted_double: 'f64',
IDLType.Tags.double: 'Finite<f64>'
}
numericTags = [
IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32, IDLType.Tags.uint32,
IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float,
IDLType.Tags.unrestricted_double
]
# We'll want to insert the indent at the beginnings of lines, but we
# don't want to indent empty lines. So only indent lines that have a
# non-newline character on them.
lineStartDetector = re.compile("^(?=[^\n#])", re.MULTILINE)
def indent(s, indentLevel=2):
"""
Indent C++ code.
Weird secret feature: this doesn't indent lines that start with # (such as
#include lines or #ifdef/#endif).
"""
if s == "":
return s
return re.sub(lineStartDetector, indentLevel * " ", s)
# dedent() and fill() are often called on the same string multiple
# times. We want to memoize their return values so we don't keep
# recomputing them all the time.
def memoize(fn):
"""
Decorator to memoize a function of one argument. The cache just
grows without bound.
"""
cache = {}
@functools.wraps(fn)
def wrapper(arg):
retval = cache.get(arg)
if retval is None:
retval = cache[arg] = fn(arg)
return retval
return wrapper
@memoize
def dedent(s):
"""
Remove all leading whitespace from s, and remove a blank line
at the beginning.
"""
if s.startswith('\n'):
s = s[1:]
return textwrap.dedent(s)
# This works by transforming the fill()-template to an equivalent
# string.Template.
fill_multiline_substitution_re = re.compile(r"( *)\$\*{(\w+)}(\n)?")
@memoize
def compile_fill_template(template):
"""
Helper function for fill(). Given the template string passed to fill(),
do the reusable part of template processing and return a pair (t,
argModList) that can be used every time fill() is called with that
template argument.
argsModList is list of tuples that represent modifications to be
made to args. Each modification has, in order: i) the arg name,
ii) the modified name, iii) the indent depth.
"""
t = dedent(template)
assert t.endswith("\n") or "\n" not in t
argModList = []
def replace(match):
"""
Replaces a line like ' $*{xyz}\n' with '${xyz_n}',
where n is the indent depth, and add a corresponding entry to
argModList.
Note that this needs to close over argModList, so it has to be
defined inside compile_fill_template().
"""
indentation, name, nl = match.groups()
depth = len(indentation)
# Check that $*{xyz} appears by itself on a line.
prev = match.string[:match.start()]
if (prev and not prev.endswith("\n")) or nl is None:
raise ValueError("Invalid fill() template: $*{%s} must appear by itself on a line" % name)
# Now replace this whole line of template with the indented equivalent.
modified_name = name + "_" + str(depth)
argModList.append((name, modified_name, depth))
return "${" + modified_name + "}"
t = re.sub(fill_multiline_substitution_re, replace, t)
return (string.Template(t), argModList)
def fill(template, **args):
"""
Convenience function for filling in a multiline template.
`fill(template, name1=v1, name2=v2)` is a lot like
`string.Template(template).substitute({"name1": v1, "name2": v2})`.
However, it's shorter, and has a few nice features:
* If `template` is indented, fill() automatically dedents it!
This makes code using fill() with Python's multiline strings
much nicer to look at.
* If `template` starts with a blank line, fill() strips it off.
(Again, convenient with multiline strings.)
* fill() recognizes a special kind of substitution
of the form `$*{name}`.
Use this to paste in, and automatically indent, multiple lines.
(Mnemonic: The `*` is for "multiple lines").
A `$*` substitution must appear by itself on a line, with optional
preceding indentation (spaces only). The whole line is replaced by the
corresponding keyword argument, indented appropriately. If the
argument is an empty string, no output is generated, not even a blank
line.
"""
t, argModList = compile_fill_template(template)
# Now apply argModList to args
for (name, modified_name, depth) in argModList:
if not (args[name] == "" or args[name].endswith("\n")):
raise ValueError("Argument %s with value %r is missing a newline" % (name, args[name]))
args[modified_name] = indent(args[name], depth)
return t.substitute(args)
class CGThing():
"""
Abstract base class for things that spit out code.
"""
def __init__(self):
pass # Nothing for now
def define(self):
"""Produce code for a Rust file."""
raise NotImplementedError # Override me!
class CGMethodCall(CGThing):
"""
A class to generate selection of a method signature from a set of
signatures and generation of a call to that signature.
"""
def __init__(self, argsPre, nativeMethodName, static, descriptor, method):
CGThing.__init__(self)
methodName = '\\"%s.%s\\"' % (descriptor.interface.identifier.name, method.identifier.name)
def requiredArgCount(signature):
arguments = signature[1]
if len(arguments) == 0:
return 0
requiredArgs = len(arguments)
while requiredArgs and arguments[requiredArgs - 1].optional:
requiredArgs -= 1
return requiredArgs
signatures = method.signatures()
def getPerSignatureCall(signature, argConversionStartsAt=0):
signatureIndex = signatures.index(signature)
return CGPerSignatureCall(signature[0], argsPre, signature[1],
nativeMethodName + '_' * signatureIndex,
static, descriptor,
method, argConversionStartsAt)
if len(signatures) == 1:
# Special case: we can just do a per-signature method call
# here for our one signature and not worry about switching
# on anything.
signature = signatures[0]
self.cgRoot = CGList([getPerSignatureCall(signature)])
requiredArgs = requiredArgCount(signature)
if requiredArgs > 0:
code = (
"if argc < %d {\n"
" throw_type_error(cx, \"Not enough arguments to %s.\");\n"
" return false;\n"
"}" % (requiredArgs, methodName))
self.cgRoot.prepend(
CGWrapper(CGGeneric(code), pre="\n", post="\n"))
return
# Need to find the right overload
maxArgCount = method.maxArgCount
allowedArgCounts = method.allowedArgCounts
argCountCases = []
for argCount in allowedArgCounts:
possibleSignatures = method.signaturesForArgCount(argCount)
if len(possibleSignatures) == 1:
# easy case!
signature = possibleSignatures[0]
argCountCases.append(CGCase(str(argCount), getPerSignatureCall(signature)))
continue
distinguishingIndex = method.distinguishingIndexForArgCount(argCount)
# We can't handle unions at the distinguishing index.
for (returnType, args) in possibleSignatures:
if args[distinguishingIndex].type.isUnion():
raise TypeError("No support for unions as distinguishing "
"arguments yet: %s",
args[distinguishingIndex].location)
# Convert all our arguments up to the distinguishing index.
# Doesn't matter which of the possible signatures we use, since
# they all have the same types up to that point; just use
# possibleSignatures[0]
caseBody = [
CGArgumentConverter(possibleSignatures[0][1][i],
i, "args", "argc", descriptor)
for i in range(0, distinguishingIndex)]
# Select the right overload from our set.
distinguishingArg = "args.get(%d)" % distinguishingIndex
def pickFirstSignature(condition, filterLambda):
sigs = filter(filterLambda, possibleSignatures)
assert len(sigs) < 2
if len(sigs) > 0:
call = getPerSignatureCall(sigs[0], distinguishingIndex)
if condition is None:
caseBody.append(call)
else:
caseBody.append(CGGeneric("if " + condition + " {"))
caseBody.append(CGIndenter(call))
caseBody.append(CGGeneric("}"))
return True
return False
# First check for null or undefined
pickFirstSignature("%s.get().is_null_or_undefined()" % distinguishingArg,
lambda s: (s[1][distinguishingIndex].type.nullable() or
s[1][distinguishingIndex].type.isDictionary()))
# Now check for distinguishingArg being an object that implements a
# non-callback interface. That includes typed arrays and
# arraybuffers.
interfacesSigs = [
s for s in possibleSignatures
if (s[1][distinguishingIndex].type.isObject() or
s[1][distinguishingIndex].type.isNonCallbackInterface())]
# There might be more than one of these; we need to check
# which ones we unwrap to.
if len(interfacesSigs) > 0:
# The spec says that we should check for "platform objects
# implementing an interface", but it's enough to guard on these
# being an object. The code for unwrapping non-callback
# interfaces and typed arrays will just bail out and move on to
# the next overload if the object fails to unwrap correctly. We
# could even not do the isObject() check up front here, but in
# cases where we have multiple object overloads it makes sense
# to do it only once instead of for each overload. That will
# also allow the unwrapping test to skip having to do codegen
# for the null-or-undefined case, which we already handled
# above.
caseBody.append(CGGeneric("if %s.get().is_object() {" %
(distinguishingArg)))
for idx, sig in enumerate(interfacesSigs):
caseBody.append(CGIndenter(CGGeneric("loop {")))
type = sig[1][distinguishingIndex].type
# The argument at index distinguishingIndex can't possibly
# be unset here, because we've already checked that argc is
# large enough that we can examine this argument.
info = getJSToNativeConversionInfo(
type, descriptor, failureCode="break;", isDefinitelyObject=True)
template = info.template
declType = info.declType
testCode = instantiateJSToNativeConversionTemplate(
template,
{"val": distinguishingArg},
declType,
"arg%d" % distinguishingIndex)
# Indent by 4, since we need to indent further than our "do" statement
caseBody.append(CGIndenter(testCode, 4))
# If we got this far, we know we unwrapped to the right
# interface, so just do the call. Start conversion with
# distinguishingIndex + 1, since we already converted
# distinguishingIndex.
caseBody.append(CGIndenter(
getPerSignatureCall(sig, distinguishingIndex + 1), 4))
caseBody.append(CGIndenter(CGGeneric("}")))
caseBody.append(CGGeneric("}"))
# XXXbz Now we're supposed to check for distinguishingArg being
# an array or a platform object that supports indexed
# properties... skip that last for now. It's a bit of a pain.
pickFirstSignature("%s.get().is_object() && is_array_like(cx, %s)" %
(distinguishingArg, distinguishingArg),
lambda s:
(s[1][distinguishingIndex].type.isSequence() or
s[1][distinguishingIndex].type.isObject()))
# Check for Date objects
# XXXbz Do we need to worry about security wrappers around the Date?
pickFirstSignature("%s.get().is_object() && "
"{ rooted!(in(cx) let obj = %s.get().to_object()); "
"let mut is_date = false; "
"assert!(JS_ObjectIsDate(cx, obj.handle(), &mut is_date)); "
"is_date }" %
(distinguishingArg, distinguishingArg),
lambda s: (s[1][distinguishingIndex].type.isDate() or
s[1][distinguishingIndex].type.isObject()))
# Check for vanilla JS objects
# XXXbz Do we need to worry about security wrappers?
pickFirstSignature("%s.get().is_object() && !is_platform_object(%s.get().to_object())" %
(distinguishingArg, distinguishingArg),
lambda s: (s[1][distinguishingIndex].type.isCallback() or
s[1][distinguishingIndex].type.isCallbackInterface() or
s[1][distinguishingIndex].type.isDictionary() or
s[1][distinguishingIndex].type.isObject()))
# The remaining cases are mutually exclusive. The
# pickFirstSignature calls are what change caseBody
# Check for strings or enums
if pickFirstSignature(None,
lambda s: (s[1][distinguishingIndex].type.isString() or
s[1][distinguishingIndex].type.isEnum())):
pass
# Check for primitives
elif pickFirstSignature(None,
lambda s: s[1][distinguishingIndex].type.isPrimitive()):
pass
# Check for "any"
elif pickFirstSignature(None,
lambda s: s[1][distinguishingIndex].type.isAny()):
pass
else:
# Just throw; we have no idea what we're supposed to
# do with this.
caseBody.append(CGGeneric("return Throw(cx, NS_ERROR_XPC_BAD_CONVERT_JS);"))
argCountCases.append(CGCase(str(argCount),
CGList(caseBody, "\n")))
overloadCGThings = []
overloadCGThings.append(
CGGeneric("let argcount = cmp::min(argc, %d);" %
maxArgCount))
overloadCGThings.append(
CGSwitch("argcount",
argCountCases,
CGGeneric("throw_type_error(cx, \"Not enough arguments to %s.\");\n"
"return false;" % methodName)))
# XXXjdm Avoid unreachable statement warnings
# overloadCGThings.append(
# CGGeneric('panic!("We have an always-returning default case");\n'
# 'return false;'))
self.cgRoot = CGWrapper(CGList(overloadCGThings, "\n"),
pre="\n")
def define(self):
return self.cgRoot.define()
def dictionaryHasSequenceMember(dictionary):
return (any(typeIsSequenceOrHasSequenceMember(m.type) for m in
dictionary.members) or
(dictionary.parent and
dictionaryHasSequenceMember(dictionary.parent)))
def typeIsSequenceOrHasSequenceMember(type):
if type.nullable():
type = type.inner
if type.isSequence():
return True
if type.isDictionary():
return dictionaryHasSequenceMember(type.inner)
if type.isUnion():
return any(typeIsSequenceOrHasSequenceMember(m.type) for m in
type.flatMemberTypes)
return False
def union_native_type(t):
name = t.unroll().name
return 'UnionTypes::%s' % name
class JSToNativeConversionInfo():
"""
An object representing information about a JS-to-native conversion.
"""
def __init__(self, template, default=None, declType=None):
"""
template: A string representing the conversion code. This will have
template substitution performed on it as follows:
${val} is a handle to the JS::Value in question
default: A string or None representing rust code for default value(if any).
declType: A CGThing representing the native C++ type we're converting
to. This is allowed to be None if the conversion code is
supposed to be used as-is.
"""
assert isinstance(template, str)
assert declType is None or isinstance(declType, CGThing)
self.template = template
self.default = default
self.declType = declType
def getJSToNativeConversionInfo(type, descriptorProvider, failureCode=None,
isDefinitelyObject=False,
isMember=False,
isArgument=False,
invalidEnumValueFatal=True,
defaultValue=None,
treatNullAs="Default",
isEnforceRange=False,
isClamp=False,
exceptionCode=None,
allowTreatNonObjectAsNull=False,
isCallbackReturnValue=False,
sourceDescription="value"):
"""
Get a template for converting a JS value to a native object based on the
given type and descriptor. If failureCode is given, then we're actually
testing whether we can convert the argument to the desired type. That
means that failures to convert due to the JS value being the wrong type of
value need to use failureCode instead of throwing exceptions. Failures to
convert that are due to JS exceptions (from toString or valueOf methods) or
out of memory conditions need to throw exceptions no matter what
failureCode is.
If isDefinitelyObject is True, that means we know the value
isObject() and we have no need to recheck that.
isMember is `False`, "Dictionary", "Union" or "Variadic", and affects
whether this function returns code suitable for an on-stack rooted binding
or suitable for storing in an appropriate larger structure.
invalidEnumValueFatal controls whether an invalid enum value conversion
attempt will throw (if true) or simply return without doing anything (if
false).
If defaultValue is not None, it's the IDL default value for this conversion
If isEnforceRange is true, we're converting an integer and throwing if the
value is out of range.
If isClamp is true, we're converting an integer and clamping if the
value is out of range.
If allowTreatNonObjectAsNull is true, then [TreatNonObjectAsNull]
extended attributes on nullable callback functions will be honored.
The return value from this function is an object of JSToNativeConversionInfo consisting of four things:
1) A string representing the conversion code. This will have template
substitution performed on it as follows:
${val} replaced by an expression for the JS::Value in question
2) A string or None representing Rust code for the default value (if any).
3) A CGThing representing the native C++ type we're converting to
(declType). This is allowed to be None if the conversion code is
supposed to be used as-is.
4) A boolean indicating whether the caller has to root the result.
"""
# We should not have a defaultValue if we know we're an object
assert not isDefinitelyObject or defaultValue is None
# If exceptionCode is not set, we'll just rethrow the exception we got.
# Note that we can't just set failureCode to exceptionCode, because setting
# failureCode will prevent pending exceptions from being set in cases when
# they really should be!
if exceptionCode is None:
exceptionCode = "return false;\n"
if failureCode is None:
failOrPropagate = "throw_type_error(cx, &error);\n%s" % exceptionCode
else:
failOrPropagate = failureCode
def handleOptional(template, declType, default):
assert (defaultValue is None) == (default is None)
return JSToNativeConversionInfo(template, default, declType)
# Unfortunately, .capitalize() on a string will lowercase things inside the
# string, which we do not want.
def firstCap(string):
return string[0].upper() + string[1:]
# Helper functions for dealing with failures due to the JS value being the
# wrong type of value.
def onFailureNotAnObject(failureCode):
return CGWrapper(
CGGeneric(
failureCode or
('throw_type_error(cx, "%s is not an object.");\n'
'%s' % (firstCap(sourceDescription), exceptionCode))),
post="\n")
def onFailureInvalidEnumValue(failureCode, passedVarName):
return CGGeneric(
failureCode or
('throw_type_error(cx, &format!("\'{}\' is not a valid enum value for enumeration \'%s\'.", %s)); %s'
% (type.name, passedVarName, exceptionCode)))
def onFailureNotCallable(failureCode):
return CGGeneric(
failureCode or
('throw_type_error(cx, \"%s is not callable.\");\n'
'%s' % (firstCap(sourceDescription), exceptionCode)))
# A helper function for handling null default values. Checks that the
# default value, if it exists, is null.
def handleDefaultNull(nullValue):
if defaultValue is None:
return None
if not isinstance(defaultValue, IDLNullValue):
raise TypeError("Can't handle non-null default value here")
assert type.nullable() or type.isDictionary()
return nullValue
# A helper function for wrapping up the template body for
# possibly-nullable objecty stuff
def wrapObjectTemplate(templateBody, nullValue, isDefinitelyObject, type,
failureCode=None):
if not isDefinitelyObject:
# Handle the non-object cases by wrapping up the whole
# thing in an if cascade.
templateBody = (
"if ${val}.get().is_object() {\n" +
CGIndenter(CGGeneric(templateBody)).define() + "\n")
if type.nullable():
templateBody += (
"} else if ${val}.get().is_null_or_undefined() {\n"
" %s\n") % nullValue
templateBody += (
"} else {\n" +
CGIndenter(onFailureNotAnObject(failureCode)).define() +
"}")
return templateBody
assert not (isEnforceRange and isClamp) # These are mutually exclusive
if type.isSequence() or type.isMozMap():
innerInfo = getJSToNativeConversionInfo(innerContainerType(type),
descriptorProvider,
isMember=isMember)
declType = wrapInNativeContainerType(type, innerInfo.declType)
config = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs)
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=" >")
templateBody = ("match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(ConversionResult::Success(value)) => value,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
"%s\n"
" }\n"
" _ => { %s },\n"
"}" % (config, indent(failOrPropagate, 8), exceptionCode))
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isUnion():
declType = CGGeneric(union_native_type(type))
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=" >")
if isMember != "Dictionary" and type_needs_tracing(type):
declType = CGTemplatedType("RootedTraceableBox", declType)
templateBody = ("match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(ConversionResult::Success(value)) => value,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
"%s\n"
" }\n"
" _ => { %s },\n"
"}" % (indent(failOrPropagate, 8), exceptionCode))
dictionaries = [
memberType
for memberType in type.unroll().flatMemberTypes
if memberType.isDictionary()
]
if dictionaries:
if defaultValue:
assert isinstance(defaultValue, IDLNullValue)
dictionary, = dictionaries
default = "%s::%s(%s::%s::empty(cx))" % (
union_native_type(type),
dictionary.name,
CGDictionary.makeModuleName(dictionary.inner),
CGDictionary.makeDictionaryName(dictionary.inner))
else:
default = None
else:
default = handleDefaultNull("None")
return handleOptional(templateBody, declType, default)
if type.isGeckoInterface():
assert not isEnforceRange and not isClamp
descriptor = descriptorProvider.getDescriptor(
type.unroll().inner.identifier.name)
if descriptor.interface.isCallback():
name = descriptor.nativeType
declType = CGWrapper(CGGeneric(name), pre="Rc<", post=">")
template = "%s::new(cx, ${val}.get().to_object())" % name
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
template = wrapObjectTemplate("Some(%s)" % template, "None",
isDefinitelyObject, type,
failureCode)
return handleOptional(template, declType, handleDefaultNull("None"))
conversionFunction = "root_from_handlevalue"
descriptorType = descriptor.returnType
if isMember == "Variadic":
conversionFunction = "native_from_handlevalue"
descriptorType = descriptor.nativeType
elif isArgument:
descriptorType = descriptor.argumentType
templateBody = ""
isPromise = descriptor.interface.identifier.name == "Promise"
if isPromise:
# Per spec, what we're supposed to do is take the original
# Promise.resolve and call it with the original Promise as this
# value to make a Promise out of whatever value we actually have
# here. The question is which global we should use. There are
# a couple cases to consider:
#
# 1) Normal call to API with a Promise argument. This is a case the
# spec covers, and we should be using the current Realm's
# Promise. That means the current compartment.
# 2) Promise return value from a callback or callback interface.
# This is in theory a case the spec covers but in practice it
# really doesn't define behavior here because it doesn't define
# what Realm we're in after the callback returns, which is when
# the argument conversion happens. We will use the current
# compartment, which is the compartment of the callable (which
# may itself be a cross-compartment wrapper itself), which makes
# as much sense as anything else. In practice, such an API would
# once again be providing a Promise to signal completion of an
# operation, which would then not be exposed to anyone other than
# our own implementation code.
templateBody = fill(
"""
{ // Scope for our JSAutoCompartment.
rooted!(in(cx) let globalObj = CurrentGlobalOrNull(cx));
let promiseGlobal = GlobalScope::from_object_maybe_wrapped(globalObj.handle().get());
rooted!(in(cx) let mut valueToResolve = $${val}.get());
if !JS_WrapValue(cx, valueToResolve.handle_mut()) {
$*{exceptionCode}
}
match Promise::Resolve(&promiseGlobal, cx, valueToResolve.handle()) {
Ok(value) => value,
Err(error) => {
throw_dom_exception(cx, &promiseGlobal, error);
$*{exceptionCode}
}
}
}
""",
exceptionCode=exceptionCode)
else:
if descriptor.interface.isConsequential():
raise TypeError("Consequential interface %s being used as an "
"argument" % descriptor.interface.identifier.name)
if failureCode is None:
substitutions = {
"sourceDescription": sourceDescription,
"interface": descriptor.interface.identifier.name,
"exceptionCode": exceptionCode,
}
unwrapFailureCode = string.Template(
'throw_type_error(cx, "${sourceDescription} does not '
'implement interface ${interface}.");\n'
'${exceptionCode}').substitute(substitutions)
else:
unwrapFailureCode = failureCode
templateBody = fill(
"""
match ${function}($${val}) {
Ok(val) => val,
Err(()) => {
$*{failureCode}
}
}
""",
failureCode=unwrapFailureCode + "\n",
function=conversionFunction)
declType = CGGeneric(descriptorType)
if type.nullable():
templateBody = "Some(%s)" % templateBody
declType = CGWrapper(declType, pre="Option<", post=">")
templateBody = wrapObjectTemplate(templateBody, "None",
isDefinitelyObject, type, failureCode)
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isSpiderMonkeyInterface():
raise TypeError("Can't handle SpiderMonkey interface arguments yet")
if type.isDOMString():
nullBehavior = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs)
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(ConversionResult::Success(strval)) => strval,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
"%s\n"
" }\n"
" _ => { %s },\n"
"}" % (nullBehavior, indent(failOrPropagate, 8), exceptionCode))
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
assert type.nullable()
default = "None"
else:
assert defaultValue.type.tag() == IDLType.Tags.domstring
default = 'DOMString::from("%s")' % defaultValue.value
if type.nullable():
default = "Some(%s)" % default
declType = "DOMString"
if type.nullable():
declType = "Option<%s>" % declType
return handleOptional(conversionCode, CGGeneric(declType), default)
if type.isUSVString():
assert not isEnforceRange and not isClamp
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(ConversionResult::Success(strval)) => strval,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
"%s\n"
" }\n"
" _ => { %s },\n"
"}" % (indent(failOrPropagate, 8), exceptionCode))
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
assert type.nullable()
default = "None"
else:
assert defaultValue.type.tag() in (IDLType.Tags.domstring, IDLType.Tags.usvstring)
default = 'USVString("%s".to_owned())' % defaultValue.value
if type.nullable():
default = "Some(%s)" % default
declType = "USVString"
if type.nullable():
declType = "Option<%s>" % declType
return handleOptional(conversionCode, CGGeneric(declType), default)
if type.isByteString():
assert not isEnforceRange and not isClamp
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(ConversionResult::Success(strval)) => strval,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
"%s\n"
" }\n"
" _ => { %s },\n"
"}" % (indent(failOrPropagate, 8), exceptionCode))
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
assert type.nullable()
default = "None"
else:
assert defaultValue.type.tag() in (IDLType.Tags.domstring, IDLType.Tags.bytestring)
default = 'ByteString::new(b"%s".to_vec())' % defaultValue.value
if type.nullable():
default = "Some(%s)" % default
declType = "ByteString"
if type.nullable():
declType = "Option<%s>" % declType
return handleOptional(conversionCode, CGGeneric(declType), default)
if type.isEnum():
assert not isEnforceRange and not isClamp
if type.nullable():
raise TypeError("We don't support nullable enumerated arguments "
"yet")
enum = type.inner.identifier.name
if invalidEnumValueFatal:
handleInvalidEnumValueCode = onFailureInvalidEnumValue(failureCode, 'search').define()
else:
handleInvalidEnumValueCode = "return true;"
template = (
"match find_enum_value(cx, ${val}, %(pairs)s) {\n"
" Err(_) => { %(exceptionCode)s },\n"
" Ok((None, search)) => { %(handleInvalidEnumValueCode)s },\n"
" Ok((Some(&value), _)) => value,\n"
"}" % {"pairs": enum + "Values::pairs",
"exceptionCode": exceptionCode,
"handleInvalidEnumValueCode": handleInvalidEnumValueCode})
if defaultValue is not None:
assert defaultValue.type.tag() == IDLType.Tags.domstring
default = "%s::%s" % (enum, getEnumValueName(defaultValue.value))
else:
default = None
return handleOptional(template, CGGeneric(enum), default)
if type.isCallback():
assert not isEnforceRange and not isClamp
assert not type.treatNonCallableAsNull()
assert not type.treatNonObjectAsNull() or type.nullable()
assert not type.treatNonObjectAsNull() or not type.treatNonCallableAsNull()
callback = type.unroll().callback
declType = CGGeneric(callback.identifier.name)
finalDeclType = CGTemplatedType("Rc", declType)
conversion = CGCallbackTempRoot(declType.define())
if type.nullable():
declType = CGTemplatedType("Option", declType)
finalDeclType = CGTemplatedType("Option", finalDeclType)
conversion = CGWrapper(conversion, pre="Some(", post=")")
if allowTreatNonObjectAsNull and type.treatNonObjectAsNull():
if not isDefinitelyObject:
haveObject = "${val}.get().is_object()"
template = CGIfElseWrapper(haveObject,
conversion,
CGGeneric("None")).define()
else:
template = conversion
else:
template = CGIfElseWrapper("IsCallable(${val}.get().to_object())",
conversion,
onFailureNotCallable(failureCode)).define()
template = wrapObjectTemplate(
template,
"None",
isDefinitelyObject,
type,
failureCode)
if defaultValue is not None:
assert allowTreatNonObjectAsNull
assert type.treatNonObjectAsNull()
assert type.nullable()
assert isinstance(defaultValue, IDLNullValue)
default = "None"
else:
default = None
return JSToNativeConversionInfo(template, default, finalDeclType)
if type.isAny():
assert not isEnforceRange and not isClamp
assert isMember != "Union"
if isMember == "Dictionary":
# TODO: Need to properly root dictionaries
# https://github.com/servo/servo/issues/6381
declType = CGGeneric("Heap<JSVal>")
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
default = "Heap::new(NullValue())"
elif isinstance(defaultValue, IDLUndefinedValue):
default = "Heap::new(UndefinedValue())"
else:
raise TypeError("Can't handle non-null, non-undefined default value here")
return handleOptional("Heap::new(${val}.get())", declType, default)
declType = CGGeneric("HandleValue")
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
default = "HandleValue::null()"
elif isinstance(defaultValue, IDLUndefinedValue):
default = "HandleValue::undefined()"
else:
raise TypeError("Can't handle non-null, non-undefined default value here")
return handleOptional("${val}", declType, default)
if type.isObject():
assert not isEnforceRange and not isClamp
# TODO: Need to root somehow
# https://github.com/servo/servo/issues/6382
default = "ptr::null_mut()"
templateBody = wrapObjectTemplate("${val}.get().to_object()",
default,
isDefinitelyObject, type, failureCode)
if isMember in ("Dictionary", "Union"):
declType = CGGeneric("Heap<*mut JSObject>")
templateBody = "Heap::new(%s)" % templateBody
default = "Heap::new(%s)" % default
else:
# TODO: Need to root somehow
# https://github.com/servo/servo/issues/6382
declType = CGGeneric("*mut JSObject")
return handleOptional(templateBody, declType,
handleDefaultNull(default))
if type.isDictionary():
# There are no nullable dictionaries
assert not type.nullable()
typeName = "%s::%s" % (CGDictionary.makeModuleName(type.inner),
CGDictionary.makeDictionaryName(type.inner))
declType = CGGeneric(typeName)
empty = "%s::empty(cx)" % typeName
if isMember != "Dictionary" and type_needs_tracing(type):
declType = CGTemplatedType("RootedTraceableBox", declType)
empty = "RootedTraceableBox::new(%s)" % empty
template = ("match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(ConversionResult::Success(dictionary)) => dictionary,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
"%s\n"
" }\n"
" _ => { %s },\n"
"}" % (indent(failOrPropagate, 8), exceptionCode))
return handleOptional(template, declType, handleDefaultNull(empty))
if type.isVoid():
# This one only happens for return values, and its easy: Just
# ignore the jsval.
return JSToNativeConversionInfo("", None, None)
if not type.isPrimitive():
raise TypeError("Need conversion for argument type '%s'" % str(type))
conversionBehavior = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs)
if failureCode is None:
failureCode = 'return false'
declType = CGGeneric(builtinNames[type.tag()])
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
template = (
"match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(ConversionResult::Success(v)) => v,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
"%s\n"
" }\n"
" _ => { %s }\n"
"}" % (conversionBehavior, indent(failOrPropagate, 8), exceptionCode))
if defaultValue is not None:
if isinstance(defaultValue, IDLNullValue):
assert type.nullable()
defaultStr = "None"
else:
tag = defaultValue.type.tag()
if tag in [IDLType.Tags.float, IDLType.Tags.double]:
defaultStr = "Finite::wrap(%s)" % defaultValue.value
elif tag in numericTags:
defaultStr = str(defaultValue.value)
else:
assert tag == IDLType.Tags.bool
defaultStr = toStringBool(defaultValue.value)
if type.nullable():
defaultStr = "Some(%s)" % defaultStr
else:
defaultStr = None
return handleOptional(template, declType, defaultStr)
def instantiateJSToNativeConversionTemplate(templateBody, replacements,
declType, declName):
"""
Take the templateBody and declType as returned by
getJSToNativeConversionInfo, a set of replacements as required by the
strings in such a templateBody, and a declName, and generate code to
convert into a stack Rust binding with that name.
"""
result = CGList([], "\n")
conversion = CGGeneric(string.Template(templateBody).substitute(replacements))
if declType is not None:
newDecl = [
CGGeneric("let "),
CGGeneric(declName),
CGGeneric(": "),
declType,
CGGeneric(" = "),
conversion,
CGGeneric(";"),
]
result.append(CGList(newDecl))
else:
result.append(conversion)
# Add an empty CGGeneric to get an extra newline after the argument
# conversion.
result.append(CGGeneric(""))
return result
def convertConstIDLValueToJSVal(value):
if isinstance(value, IDLNullValue):
return "ConstantVal::NullVal"
tag = value.type.tag()
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, IDLType.Tags.int16,
IDLType.Tags.uint16, IDLType.Tags.int32]:
return "ConstantVal::IntVal(%s)" % (value.value)
if tag == IDLType.Tags.uint32:
return "ConstantVal::UintVal(%s)" % (value.value)
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64]:
return "ConstantVal::DoubleVal(%s)" % (value.value)
if tag == IDLType.Tags.bool:
return "ConstantVal::BoolVal(true)" if value.value else "ConstantVal::BoolVal(false)"
if tag in [IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
return "ConstantVal::DoubleVal(%s)" % (value.value)
raise TypeError("Const value of unhandled type: " + value.type)
class CGArgumentConverter(CGThing):
"""
A class that takes an IDL argument object, its index in the
argument list, and the argv and argc strings and generates code to
unwrap the argument to the right native type.
"""
def __init__(self, argument, index, args, argc, descriptorProvider,
invalidEnumValueFatal=True):
CGThing.__init__(self)
assert not argument.defaultValue or argument.optional
replacer = {
"index": index,
"argc": argc,
"args": args
}
replacementVariables = {
"val": string.Template("${args}.get(${index})").substitute(replacer),
}
info = getJSToNativeConversionInfo(
argument.type,
descriptorProvider,
invalidEnumValueFatal=invalidEnumValueFatal,
defaultValue=argument.defaultValue,
treatNullAs=argument.treatNullAs,
isEnforceRange=argument.enforceRange,
isClamp=argument.clamp,
isMember="Variadic" if argument.variadic else False,
allowTreatNonObjectAsNull=argument.allowTreatNonCallableAsNull())
template = info.template
default = info.default
declType = info.declType
if not argument.variadic:
if argument.optional:
condition = "{args}.get({index}).is_undefined()".format(**replacer)
if argument.defaultValue:
assert default
template = CGIfElseWrapper(condition,
CGGeneric(default),
CGGeneric(template)).define()
else:
assert not default
declType = CGWrapper(declType, pre="Option<", post=">")
template = CGIfElseWrapper(condition,
CGGeneric("None"),
CGGeneric("Some(%s)" % template)).define()
else:
assert not default
self.converter = instantiateJSToNativeConversionTemplate(
template, replacementVariables, declType, "arg%d" % index)
else:
assert argument.optional
variadicConversion = {
"val": string.Template("${args}.get(variadicArg)").substitute(replacer),
}
innerConverter = [instantiateJSToNativeConversionTemplate(
template, variadicConversion, declType, "slot")]
arg = "arg%d" % index
if argument.type.isGeckoInterface():
init = "rooted_vec!(let mut %s)" % arg
innerConverter.append(CGGeneric("%s.push(JS::from_ref(&*slot));" % arg))
else:
init = "let mut %s = vec![]" % arg
innerConverter.append(CGGeneric("%s.push(slot);" % arg))
inner = CGIndenter(CGList(innerConverter, "\n"), 8).define()
self.converter = CGGeneric("""\
%(init)s;
if %(argc)s > %(index)s {
%(arg)s.reserve(%(argc)s as usize - %(index)s);
for variadicArg in %(index)s..%(argc)s {
%(inner)s
}
}""" % {'arg': arg, 'argc': argc, 'index': index, 'inner': inner, 'init': init})
def define(self):
return self.converter.define()
def wrapForType(jsvalRef, result='result', successCode='return true;', pre=''):
"""
Reflect a Rust value into JS.
* 'jsvalRef': a MutableHandleValue in which to store the result
of the conversion;
* 'result': the name of the variable in which the Rust value is stored;
* 'successCode': the code to run once we have done the conversion.
* 'pre': code to run before the conversion if rooting is necessary
"""
wrap = "%s\n(%s).to_jsval(cx, %s);" % (pre, result, jsvalRef)
if successCode:
wrap += "\n%s" % successCode
return wrap
def typeNeedsCx(type, retVal=False):
if type is None:
return False
if type.nullable():
type = type.inner
if type.isSequence():
type = type.inner
if type.isUnion():
return any(typeNeedsCx(t) for t in type.unroll().flatMemberTypes)
if retVal and type.isSpiderMonkeyInterface():
return True
return type.isAny() or type.isObject()
# Returns a conversion behavior suitable for a type
def getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs):
if type.isSequence() or type.isMozMap():
return getConversionConfigForType(innerContainerType(type), isEnforceRange, isClamp, treatNullAs)
if type.isDOMString():
assert not isEnforceRange and not isClamp
treatAs = {
"Default": "StringificationBehavior::Default",
"EmptyString": "StringificationBehavior::Empty",
}
if treatNullAs not in treatAs:
raise TypeError("We don't support [TreatNullAs=%s]" % treatNullAs)
if type.nullable():
# Note: the actual behavior passed here doesn't matter for nullable
# strings.
return "StringificationBehavior::Default"
else:
return treatAs[treatNullAs]
if type.isPrimitive() and type.isInteger():
if isEnforceRange:
return "ConversionBehavior::EnforceRange"
elif isClamp:
return "ConversionBehavior::Clamp"
else:
return "ConversionBehavior::Default"
assert not isEnforceRange and not isClamp
return "()"
# Returns a CGThing containing the type of the return value.
def getRetvalDeclarationForType(returnType, descriptorProvider):
if returnType is None or returnType.isVoid():
# Nothing to declare
return CGGeneric("()")
if returnType.isPrimitive() and returnType.tag() in builtinNames:
result = CGGeneric(builtinNames[returnType.tag()])
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isDOMString():
result = CGGeneric("DOMString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isUSVString():
result = CGGeneric("USVString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isByteString():
result = CGGeneric("ByteString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isEnum():
result = CGGeneric(returnType.unroll().inner.identifier.name)
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isGeckoInterface():
descriptor = descriptorProvider.getDescriptor(
returnType.unroll().inner.identifier.name)
result = CGGeneric(descriptor.returnType)
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isCallback():
callback = returnType.unroll().callback
result = CGGeneric('Rc<%s::%s>' % (getModuleFromObject(callback), callback.identifier.name))
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isUnion():
result = CGGeneric(union_native_type(returnType))
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
# TODO: Return the value through a MutableHandleValue outparam
# https://github.com/servo/servo/issues/6307
if returnType.isAny():
return CGGeneric("JSVal")
if returnType.isObject() or returnType.isSpiderMonkeyInterface():
result = CGGeneric("NonZero<*mut JSObject>")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isSequence() or returnType.isMozMap():
result = getRetvalDeclarationForType(innerContainerType(returnType), descriptorProvider)
result = wrapInNativeContainerType(returnType, result)
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isDictionary():
nullable = returnType.nullable()
dictName = returnType.inner.name if nullable else returnType.name
result = CGGeneric(dictName)
if nullable:
result = CGWrapper(result, pre="Option<", post=">")
return result
raise TypeError("Don't know how to declare return value for %s" %
returnType)
def MemberCondition(pref, func):
"""
A string representing the condition for a member to actually be exposed.
Any of the arguments can be None. If not None, they should have the
following types:
pref: The name of the preference.
func: The name of the function.
"""
assert pref is None or isinstance(pref, str)
assert func is None or isinstance(func, str)
assert func is None or pref is None
if pref:
return 'Condition::Pref("%s")' % pref
if func:
return 'Condition::Func(%s)' % func
return "Condition::Satisfied"
class PropertyDefiner:
"""
A common superclass for defining things on prototype objects.
Subclasses should implement generateArray to generate the actual arrays of
things we're defining. They should also set self.regular to the list of
things exposed to web pages.
"""
def __init__(self, descriptor, name):
self.descriptor = descriptor
self.name = name
def variableName(self):
return "s" + self.name
def length(self):
return len(self.regular)
def __str__(self):
# We only need to generate id arrays for things that will end
# up used via ResolveProperty or EnumerateProperties.
return self.generateArray(self.regular, self.variableName())
@staticmethod
def getStringAttr(member, name):
attr = member.getExtendedAttribute(name)
if attr is None:
return None
# It's a list of strings
assert len(attr) == 1
assert attr[0] is not None
return attr[0]
@staticmethod
def getControllingCondition(interfaceMember, descriptor):
return MemberCondition(
PropertyDefiner.getStringAttr(interfaceMember,
"Pref"),
PropertyDefiner.getStringAttr(interfaceMember,
"Func"))
def generateGuardedArray(self, array, name, specTemplate, specTerminator,
specType, getCondition, getDataTuple):
"""
This method generates our various arrays.
array is an array of interface members as passed to generateArray
name is the name as passed to generateArray
specTemplate is a template for each entry of the spec array
specTerminator is a terminator for the spec array (inserted at the end
of the array), or None
specType is the actual typename of our spec
getDataTuple is a callback function that takes an array entry and
returns a tuple suitable for substitution into specTemplate.
"""
# We generate an all-encompassing list of lists of specs, with each sublist
# representing a group of members that share a common pref name. That will
# make sure the order of the properties as exposed on the interface and
# interface prototype objects does not change when pref control is added to
# members while still allowing us to define all the members in the smallest
# number of JSAPI calls.
assert len(array) != 0
specs = []
prefableSpecs = []
prefableTemplate = ' Guard::new(%s, %s[%d])'
for cond, members in groupby(array, lambda m: getCondition(m, self.descriptor)):
currentSpecs = [specTemplate % getDataTuple(m) for m in members]
if specTerminator:
currentSpecs.append(specTerminator)
specs.append("&[\n" + ",\n".join(currentSpecs) + "]\n")
prefableSpecs.append(
prefableTemplate % (cond, name + "_specs", len(specs) - 1))
specsArray = ("const %s_specs: &'static [&'static[%s]] = &[\n" +
",\n".join(specs) + "\n" +
"];\n") % (name, specType)
prefArray = ("const %s: &'static [Guard<&'static [%s]>] = &[\n" +
",\n".join(prefableSpecs) + "\n" +
"];\n") % (name, specType)
return specsArray + prefArray
# The length of a method is the minimum of the lengths of the
# argument lists of all its overloads.
def methodLength(method):
signatures = method.signatures()
return min(
len([arg for arg in arguments if not arg.optional and not arg.variadic])
for (_, arguments) in signatures)
class MethodDefiner(PropertyDefiner):
"""
A class for defining methods on a prototype object.
"""
def __init__(self, descriptor, name, static, unforgeable):
assert not (static and unforgeable)
PropertyDefiner.__init__(self, descriptor, name)
# FIXME https://bugzilla.mozilla.org/show_bug.cgi?id=772822
# We should be able to check for special operations without an
# identifier. For now we check if the name starts with __
# Ignore non-static methods for callback interfaces
if not descriptor.interface.isCallback() or static:
methods = [m for m in descriptor.interface.members if
m.isMethod() and m.isStatic() == static and
not m.isIdentifierLess() and
MemberIsUnforgeable(m, descriptor) == unforgeable]
else:
methods = []
self.regular = [{"name": m.identifier.name,
"methodInfo": not m.isStatic(),
"length": methodLength(m),
"condition": PropertyDefiner.getControllingCondition(m, descriptor)}
for m in methods]
# FIXME Check for an existing iterator on the interface first.
if any(m.isGetter() and m.isIndexed() for m in methods):
self.regular.append({"name": '@@iterator',
"methodInfo": False,
"selfHostedName": "ArrayValues",
"length": 0,
"condition": "Condition::Satisfied"})
# Generate the keys/values/entries aliases for value iterables.
maplikeOrSetlikeOrIterable = descriptor.interface.maplikeOrSetlikeOrIterable
if (not static and not unforgeable and
(maplikeOrSetlikeOrIterable and
maplikeOrSetlikeOrIterable.isIterable() and
maplikeOrSetlikeOrIterable.isValueIterator())):
# Add our keys/values/entries/forEach
self.regular.append({
"name": "keys",
"methodInfo": False,
"selfHostedName": "ArrayKeys",
"length": 0,
"condition": PropertyDefiner.getControllingCondition(m,
descriptor)
})
self.regular.append({
"name": "values",
"methodInfo": False,
"selfHostedName": "ArrayValues",
"length": 0,
"condition": PropertyDefiner.getControllingCondition(m,
descriptor)
})
self.regular.append({
"name": "entries",
"methodInfo": False,
"selfHostedName": "ArrayEntries",
"length": 0,
"condition": PropertyDefiner.getControllingCondition(m,
descriptor)
})
self.regular.append({
"name": "forEach",
"methodInfo": False,
"selfHostedName": "ArrayForEach",
"length": 1,
"condition": PropertyDefiner.getControllingCondition(m,
descriptor)
})
isUnforgeableInterface = bool(descriptor.interface.getExtendedAttribute("Unforgeable"))
if not static and unforgeable == isUnforgeableInterface:
stringifier = descriptor.operations['Stringifier']
if stringifier:
self.regular.append({
"name": "toString",
"nativeName": stringifier.identifier.name,
"length": 0,
"condition": PropertyDefiner.getControllingCondition(stringifier, descriptor)
})
self.unforgeable = unforgeable
def generateArray(self, array, name):
if len(array) == 0:
return ""
def condition(m, d):
return m["condition"]
flags = "JSPROP_ENUMERATE"
if self.unforgeable:
flags += " | JSPROP_PERMANENT | JSPROP_READONLY"
def specData(m):
# TODO: Use something like JS_FNSPEC
# https://github.com/servo/servo/issues/6391
if "selfHostedName" in m:
selfHostedName = '%s as *const u8 as *const libc::c_char' % str_to_const_array(m["selfHostedName"])
assert not m.get("methodInfo", True)
accessor = "None"
jitinfo = "0 as *const JSJitInfo"
else:
selfHostedName = "0 as *const libc::c_char"
if m.get("methodInfo", True):
identifier = m.get("nativeName", m["name"])
# Go through an intermediate type here, because it's not
# easy to tell whether the methodinfo is a JSJitInfo or
# a JSTypedMethodJitInfo here. The compiler knows, though,
# so let it do the work.
jitinfo = "&%s_methodinfo as *const _ as *const JSJitInfo" % identifier
accessor = "Some(generic_method)"
else:
jitinfo = "0 as *const JSJitInfo"
accessor = 'Some(%s)' % m.get("nativeName", m["name"])
if m["name"].startswith("@@"):
return ('(SymbolCode::%s as i32 + 1)'
% m["name"][2:], accessor, jitinfo, m["length"], flags, selfHostedName)
return (str_to_const_array(m["name"]), accessor, jitinfo, m["length"], flags, selfHostedName)
return self.generateGuardedArray(
array, name,
' JSFunctionSpec {\n'
' name: %s as *const u8 as *const libc::c_char,\n'
' call: JSNativeWrapper { op: %s, info: %s },\n'
' nargs: %s,\n'
' flags: (%s) as u16,\n'
' selfHostedName: %s\n'
' }',
' JSFunctionSpec {\n'
' name: 0 as *const libc::c_char,\n'
' call: JSNativeWrapper { op: None, info: 0 as *const JSJitInfo },\n'
' nargs: 0,\n'
' flags: 0,\n'
' selfHostedName: 0 as *const libc::c_char\n'
' }',
'JSFunctionSpec',
condition, specData)
class AttrDefiner(PropertyDefiner):
def __init__(self, descriptor, name, static, unforgeable):
assert not (static and unforgeable)
PropertyDefiner.__init__(self, descriptor, name)
self.name = name
self.descriptor = descriptor
self.regular = [
m
for m in descriptor.interface.members if
m.isAttr() and m.isStatic() == static and
MemberIsUnforgeable(m, descriptor) == unforgeable
]
self.static = static
self.unforgeable = unforgeable
def generateArray(self, array, name):
if len(array) == 0:
return ""
flags = "JSPROP_ENUMERATE | JSPROP_SHARED"
if self.unforgeable:
flags += " | JSPROP_PERMANENT"
def getter(attr):
if self.static:
accessor = 'get_' + self.descriptor.internalNameFor(attr.identifier.name)
jitinfo = "0 as *const JSJitInfo"
else:
if attr.hasLenientThis():
accessor = "generic_lenient_getter"
else:
accessor = "generic_getter"
jitinfo = "&%s_getterinfo" % self.descriptor.internalNameFor(attr.identifier.name)
return ("JSNativeWrapper { op: Some(%(native)s), info: %(info)s }"
% {"info": jitinfo,
"native": accessor})
def setter(attr):
if (attr.readonly and not attr.getExtendedAttribute("PutForwards")
and not attr.getExtendedAttribute("Replaceable")):
return "JSNativeWrapper { op: None, info: 0 as *const JSJitInfo }"
if self.static:
accessor = 'set_' + self.descriptor.internalNameFor(attr.identifier.name)
jitinfo = "0 as *const JSJitInfo"
else:
if attr.hasLenientThis():
accessor = "generic_lenient_setter"
else:
accessor = "generic_setter"
jitinfo = "&%s_setterinfo" % self.descriptor.internalNameFor(attr.identifier.name)
return ("JSNativeWrapper { op: Some(%(native)s), info: %(info)s }"
% {"info": jitinfo,
"native": accessor})
def specData(attr):
return (str_to_const_array(attr.identifier.name), flags, getter(attr),
setter(attr))
return self.generateGuardedArray(
array, name,
' JSPropertySpec {\n'
' name: %s as *const u8 as *const libc::c_char,\n'
' flags: (%s) as u8,\n'
' getter: %s,\n'
' setter: %s\n'
' }',
' JSPropertySpec {\n'
' name: 0 as *const libc::c_char,\n'
' flags: 0,\n'
' getter: JSNativeWrapper { op: None, info: 0 as *const JSJitInfo },\n'
' setter: JSNativeWrapper { op: None, info: 0 as *const JSJitInfo }\n'
' }',
'JSPropertySpec',
PropertyDefiner.getControllingCondition, specData)
class ConstDefiner(PropertyDefiner):
"""
A class for definining constants on the interface object
"""
def __init__(self, descriptor, name):
PropertyDefiner.__init__(self, descriptor, name)
self.name = name
self.regular = [m for m in descriptor.interface.members if m.isConst()]
def generateArray(self, array, name):
if len(array) == 0:
return ""
def specData(const):
return (str_to_const_array(const.identifier.name),
convertConstIDLValueToJSVal(const.value))
return self.generateGuardedArray(
array, name,
' ConstantSpec { name: %s, value: %s }',
None,
'ConstantSpec',
PropertyDefiner.getControllingCondition, specData)
# We'll want to insert the indent at the beginnings of lines, but we
# don't want to indent empty lines. So only indent lines that have a
# non-newline character on them.
lineStartDetector = re.compile("^(?=[^\n])", re.MULTILINE)
class CGIndenter(CGThing):
"""
A class that takes another CGThing and generates code that indents that
CGThing by some number of spaces. The default indent is two spaces.
"""
def __init__(self, child, indentLevel=4):
CGThing.__init__(self)
self.child = child
self.indent = " " * indentLevel
def define(self):
defn = self.child.define()
if defn != "":
return re.sub(lineStartDetector, self.indent, defn)
else:
return defn
class CGWrapper(CGThing):
"""
Generic CGThing that wraps other CGThings with pre and post text.
"""
def __init__(self, child, pre="", post="", reindent=False):
CGThing.__init__(self)
self.child = child
self.pre = pre
self.post = post
self.reindent = reindent
def define(self):
defn = self.child.define()
if self.reindent:
# We don't use lineStartDetector because we don't want to
# insert whitespace at the beginning of our _first_ line.
defn = stripTrailingWhitespace(
defn.replace("\n", "\n" + (" " * len(self.pre))))
return self.pre + defn + self.post
class CGImports(CGWrapper):
"""
Generates the appropriate import/use statements.
"""
def __init__(self, child, descriptors, callbacks, dictionaries, enums, imports, config, ignored_warnings=None):
"""
Adds a set of imports.
"""
if ignored_warnings is None:
ignored_warnings = [
'non_camel_case_types',
'non_upper_case_globals',
'unused_imports',
'unused_variables',
'unused_assignments',
]
def componentTypes(type):
if type.isType() and type.nullable():
type = type.unroll()
if type.isUnion():
return type.flatMemberTypes
if type.isDictionary():
return [type] + getTypesFromDictionary(type)
if type.isSequence():
return componentTypes(type.inner)
return [type]
def isImportable(type):
if not type.isType():
assert (type.isInterface() or type.isDictionary() or
type.isEnum() or type.isNamespace())
return True
return not (type.builtin or type.isSequence() or type.isUnion())
def relatedTypesForSignatures(method):
types = []
for (returnType, arguments) in method.signatures():
types += componentTypes(returnType)
for arg in arguments:
types += componentTypes(arg.type)
return types
def getIdentifier(t):
if t.isType():
if t.nullable():
t = t.inner
if t.isCallback():
return t.callback.identifier
return t.identifier
assert t.isInterface() or t.isDictionary() or t.isEnum() or t.isNamespace()
return t.identifier
def removeWrapperAndNullableTypes(types):
normalized = []
for t in types:
while (t.isType() and t.nullable()) or isinstance(t, IDLWrapperType):
t = t.inner
if isImportable(t):
normalized += [t]
return normalized
types = []
for d in descriptors:
if not d.interface.isCallback():
types += [d.interface]
if d.interface.isIteratorInterface():
types += [d.interface.iterableInterface]
members = d.interface.members + d.interface.namedConstructors
constructor = d.interface.ctor()
if constructor:
members += [constructor]
if d.proxy:
members += [o for o in d.operations.values() if o]
for m in members:
if m.isMethod():
types += relatedTypesForSignatures(m)
elif m.isAttr():
types += componentTypes(m.type)
# Import the type names used in the callbacks that are being defined.
for c in callbacks:
types += relatedTypesForSignatures(c)
# Import the type names used in the dictionaries that are being defined.
for d in dictionaries:
types += componentTypes(d)
# Normalize the types we've collected and remove any ones which can't be imported.
types = removeWrapperAndNullableTypes(types)
descriptorProvider = config.getDescriptorProvider()
extras = []
for t in types:
# Importing these types in the same module that defines them is an error.
if t in dictionaries or t in enums:
continue
if t.isInterface() or t.isNamespace():
name = getIdentifier(t).name
descriptor = descriptorProvider.getDescriptor(name)
if name != 'GlobalScope':
extras += [descriptor.path]
parentName = descriptor.getParentName()
if parentName:
descriptor = descriptorProvider.getDescriptor(parentName)
extras += [descriptor.path, descriptor.bindingPath]
elif t.isType() and t.isMozMap():
extras += ['dom::bindings::mozmap::MozMap']
else:
if t.isEnum():
extras += [getModuleFromObject(t) + '::' + getIdentifier(t).name + 'Values']
extras += [getModuleFromObject(t) + '::' + getIdentifier(t).name]
statements = []
if len(ignored_warnings) > 0:
statements.append('#![allow(%s)]' % ','.join(ignored_warnings))
statements.extend('use %s;' % i for i in sorted(set(imports + extras)))
CGWrapper.__init__(self, child,
pre='\n'.join(statements) + '\n\n')
class CGIfWrapper(CGWrapper):
def __init__(self, condition, child):
pre = CGWrapper(CGGeneric(condition), pre="if ", post=" {\n",
reindent=True)
CGWrapper.__init__(self, CGIndenter(child), pre=pre.define(),
post="\n}")
class CGTemplatedType(CGWrapper):
def __init__(self, templateName, child):
CGWrapper.__init__(self, child, pre=templateName + "<", post=">")
class CGNamespace(CGWrapper):
def __init__(self, namespace, child, public=False):
pre = "%smod %s {\n" % ("pub " if public else "", namespace)
post = "} // mod %s" % namespace
CGWrapper.__init__(self, child, pre=pre, post=post)
@staticmethod
def build(namespaces, child, public=False):
"""
Static helper method to build multiple wrapped namespaces.
"""
if not namespaces:
return child
inner = CGNamespace.build(namespaces[1:], child, public=public)
return CGNamespace(namespaces[0], inner, public=public)
def DOMClassTypeId(desc):
protochain = desc.prototypeChain
inner = ""
if desc.hasDescendants():
if desc.interface.getExtendedAttribute("Abstract"):
return "::dom::bindings::codegen::InheritTypes::TopTypeId { abstract_: () }"
name = desc.interface.identifier.name
inner = "(::dom::bindings::codegen::InheritTypes::%sTypeId::%s)" % (name, name)
elif len(protochain) == 1:
return "::dom::bindings::codegen::InheritTypes::TopTypeId { alone: () }"
reversed_protochain = list(reversed(protochain))
for (child, parent) in zip(reversed_protochain, reversed_protochain[1:]):
inner = "(::dom::bindings::codegen::InheritTypes::%sTypeId::%s%s)" % (parent, child, inner)
return "::dom::bindings::codegen::InheritTypes::TopTypeId { %s: %s }" % (protochain[0].lower(), inner)
def DOMClass(descriptor):
protoList = ['PrototypeList::ID::' + proto for proto in descriptor.prototypeChain]
# Pad out the list to the right length with ID::Last so we
# guarantee that all the lists are the same length. ID::Last
# is never the ID of any prototype, so it's safe to use as
# padding.
protoList.extend(['PrototypeList::ID::Last'] * (descriptor.config.maxProtoChainLength - len(protoList)))
prototypeChainString = ', '.join(protoList)
heapSizeOf = 'heap_size_of_raw_self_and_children::<%s>' % descriptor.concreteType
if descriptor.isGlobal():
globals_ = camel_to_upper_snake(descriptor.name)
else:
globals_ = 'EMPTY'
return """\
DOMClass {
interface_chain: [ %s ],
type_id: %s,
heap_size_of: %s as unsafe fn(_) -> _,
global: InterfaceObjectMap::%s,
}""" % (prototypeChainString, DOMClassTypeId(descriptor), heapSizeOf, globals_)
class CGDOMJSClass(CGThing):
"""
Generate a DOMJSClass for a given descriptor
"""
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
args = {
"domClass": DOMClass(self.descriptor),
"enumerateHook": "None",
"finalizeHook": FINALIZE_HOOK_NAME,
"flags": "0",
"name": str_to_const_array(self.descriptor.interface.identifier.name),
"resolveHook": "None",
"slots": "1",
"traceHook": TRACE_HOOK_NAME,
}
if self.descriptor.isGlobal():
assert not self.descriptor.weakReferenceable
args["enumerateHook"] = "Some(enumerate_global)"
args["flags"] = "JSCLASS_IS_GLOBAL | JSCLASS_DOM_GLOBAL"
args["slots"] = "JSCLASS_GLOBAL_SLOT_COUNT + 1"
args["resolveHook"] = "Some(resolve_global)"
args["traceHook"] = "js::jsapi::JS_GlobalObjectTraceHook"
elif self.descriptor.weakReferenceable:
args["slots"] = "2"
return """\
static CLASS_OPS: js::jsapi::JSClassOps = js::jsapi::JSClassOps {
addProperty: None,
delProperty: None,
getProperty: None,
setProperty: None,
enumerate: %(enumerateHook)s,
resolve: %(resolveHook)s,
mayResolve: None,
finalize: Some(%(finalizeHook)s),
call: None,
hasInstance: None,
construct: None,
trace: Some(%(traceHook)s),
};
static Class: DOMJSClass = DOMJSClass {
base: js::jsapi::JSClass {
name: %(name)s as *const u8 as *const libc::c_char,
flags: JSCLASS_IS_DOMJSCLASS | %(flags)s |
(((%(slots)s) & JSCLASS_RESERVED_SLOTS_MASK) << JSCLASS_RESERVED_SLOTS_SHIFT)
/* JSCLASS_HAS_RESERVED_SLOTS(%(slots)s) */,
cOps: &CLASS_OPS,
reserved: [0 as *mut _; 3],
},
dom_class: %(domClass)s
};""" % args
def str_to_const_array(s):
return "b\"%s\\0\"" % s
class CGPrototypeJSClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
name = str_to_const_array(self.descriptor.interface.identifier.name + "Prototype")
slotCount = 0
if self.descriptor.hasUnforgeableMembers:
slotCount += 1
return """\
static PrototypeClass: JSClass = JSClass {
name: %(name)s as *const u8 as *const libc::c_char,
flags:
// JSCLASS_HAS_RESERVED_SLOTS(%(slotCount)s)
(%(slotCount)s & JSCLASS_RESERVED_SLOTS_MASK) << JSCLASS_RESERVED_SLOTS_SHIFT,
cOps: 0 as *const _,
reserved: [0 as *mut os::raw::c_void; 3]
};
""" % {'name': name, 'slotCount': slotCount}
class CGInterfaceObjectJSClass(CGThing):
def __init__(self, descriptor):
assert descriptor.interface.hasInterfaceObject() and not descriptor.interface.isCallback()
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
if self.descriptor.interface.isNamespace():
classString = self.descriptor.interface.getExtendedAttribute("ClassString")
if classString:
classString = classString[0]
else:
classString = "Object"
return """\
static NAMESPACE_OBJECT_CLASS: NamespaceObjectClass = unsafe {
NamespaceObjectClass::new(%s)
};
""" % str_to_const_array(classString)
if self.descriptor.interface.ctor():
constructorBehavior = "InterfaceConstructorBehavior::call(%s)" % CONSTRUCT_HOOK_NAME
else:
constructorBehavior = "InterfaceConstructorBehavior::throw()"
name = self.descriptor.interface.identifier.name
args = {
"constructorBehavior": constructorBehavior,
"id": name,
"representation": 'b"function %s() {\\n [native code]\\n}"' % name,
"depth": self.descriptor.prototypeDepth
}
return """\
static INTERFACE_OBJECT_CLASS: NonCallbackInterfaceObjectClass =
NonCallbackInterfaceObjectClass::new(
&%(constructorBehavior)s,
%(representation)s,
PrototypeList::ID::%(id)s,
%(depth)s);
""" % args
class CGList(CGThing):
"""
Generate code for a list of GCThings. Just concatenates them together, with
an optional joiner string. "\n" is a common joiner.
"""
def __init__(self, children, joiner=""):
CGThing.__init__(self)
# Make a copy of the kids into a list, because if someone passes in a
# generator we won't be able to both declare and define ourselves, or
# define ourselves more than once!
self.children = list(children)
self.joiner = joiner
def append(self, child):
self.children.append(child)
def prepend(self, child):
self.children.insert(0, child)
def join(self, iterable):
return self.joiner.join(s for s in iterable if len(s) > 0)
def define(self):
return self.join(child.define() for child in self.children if child is not None)
def __len__(self):
return len(self.children)
class CGIfElseWrapper(CGList):
def __init__(self, condition, ifTrue, ifFalse):
kids = [CGIfWrapper(condition, ifTrue),
CGWrapper(CGIndenter(ifFalse), pre=" else {\n", post="\n}")]
CGList.__init__(self, kids)
class CGGeneric(CGThing):
"""
A class that spits out a fixed string into the codegen. Can spit out a
separate string for the declaration too.
"""
def __init__(self, text):
self.text = text
def define(self):
return self.text
class CGCallbackTempRoot(CGGeneric):
def __init__(self, name):
CGGeneric.__init__(self, "%s::new(cx, ${val}.get().to_object())" % name)
def getAllTypes(descriptors, dictionaries, callbacks, typedefs):
"""
Generate all the types we're dealing with. For each type, a tuple
containing type, descriptor, dictionary is yielded. The
descriptor and dictionary can be None if the type does not come
from a descriptor or dictionary; they will never both be non-None.
"""
for d in descriptors:
for t in getTypesFromDescriptor(d):
yield (t, d, None)
for dictionary in dictionaries:
for t in getTypesFromDictionary(dictionary):
yield (t, None, dictionary)
for callback in callbacks:
for t in getTypesFromCallback(callback):
yield (t, None, None)
for typedef in typedefs:
yield (typedef.innerType, None, None)
def UnionTypes(descriptors, dictionaries, callbacks, typedefs, config):
"""
Returns a CGList containing CGUnionStructs for every union.
"""
imports = [
'dom',
'dom::bindings::codegen::PrototypeList',
'dom::bindings::conversions::ConversionResult',
'dom::bindings::conversions::FromJSValConvertible',
'dom::bindings::conversions::ToJSValConvertible',
'dom::bindings::conversions::ConversionBehavior',
'dom::bindings::conversions::StringificationBehavior',
'dom::bindings::conversions::root_from_handlevalue',
'dom::bindings::error::throw_not_in_union',
'dom::bindings::js::Root',
'dom::bindings::mozmap::MozMap',
'dom::bindings::str::ByteString',
'dom::bindings::str::DOMString',
'dom::bindings::str::USVString',
'dom::types::*',
'js::error::throw_type_error',
'js::jsapi::HandleValue',
'js::jsapi::Heap',
'js::jsapi::JSContext',
'js::jsapi::JSObject',
'js::jsapi::MutableHandleValue',
'js::jsval::JSVal',
]
# Now find all the things we'll need as arguments and return values because
# we need to wrap or unwrap them.
unionStructs = dict()
for (t, descriptor, dictionary) in getAllTypes(descriptors, dictionaries, callbacks, typedefs):
if dictionary:
imports.append("%s::%s" % (CGDictionary.makeModuleName(dictionary),
CGDictionary.makeDictionaryName(dictionary)))
t = t.unroll()
if not t.isUnion():
continue
name = str(t)
if name not in unionStructs:
provider = descriptor or config.getDescriptorProvider()
unionStructs[name] = CGList([
CGUnionStruct(t, provider),
CGUnionConversionStruct(t, provider)
])
# Sort unionStructs by key, retrieve value
unionStructs = (i[1] for i in sorted(unionStructs.items(), key=operator.itemgetter(0)))
return CGImports(CGList(unionStructs, "\n\n"),
descriptors=[],
callbacks=[],
dictionaries=[],
enums=[],
imports=imports,
config=config,
ignored_warnings=[])
class Argument():
"""
A class for outputting the type and name of an argument
"""
def __init__(self, argType, name, default=None, mutable=False):
self.argType = argType
self.name = name
self.default = default
self.mutable = mutable
def declare(self):
string = ('mut ' if self.mutable else '') + self.name + ((': ' + self.argType) if self.argType else '')
# XXXjdm Support default arguments somehow :/
# if self.default is not None:
# string += " = " + self.default
return string
def define(self):
return self.argType + ' ' + self.name
class CGAbstractMethod(CGThing):
"""
An abstract class for generating code for a method. Subclasses
should override definition_body to create the actual code.
descriptor is the descriptor for the interface the method is associated with
name is the name of the method as a string
returnType is the IDLType of the return value
args is a list of Argument objects
inline should be True to generate an inline method, whose body is
part of the declaration.
alwaysInline should be True to generate an inline method annotated with
MOZ_ALWAYS_INLINE.
If templateArgs is not None it should be a list of strings containing
template arguments, and the function will be templatized using those
arguments.
docs is None or documentation for the method in a string.
unsafe is used to add the decorator 'unsafe' to a function, giving as a result
an 'unsafe fn()' declaration.
"""
def __init__(self, descriptor, name, returnType, args, inline=False,
alwaysInline=False, extern=False, unsafe=False, pub=False,
templateArgs=None, docs=None, doesNotPanic=False):
CGThing.__init__(self)
self.descriptor = descriptor
self.name = name
self.returnType = returnType
self.args = args
self.alwaysInline = alwaysInline
self.extern = extern
self.unsafe = extern or unsafe
self.templateArgs = templateArgs
self.pub = pub
self.docs = docs
self.catchPanic = self.extern and not doesNotPanic
def _argstring(self):
return ', '.join([a.declare() for a in self.args])
def _template(self):
if self.templateArgs is None:
return ''
return '<%s>\n' % ', '.join(self.templateArgs)
def _docs(self):
if self.docs is None:
return ''
lines = self.docs.splitlines()
return ''.join('/// %s\n' % line for line in lines)
def _decorators(self):
decorators = []
if self.alwaysInline:
decorators.append('#[inline]')
if self.pub:
decorators.append('pub')
if self.unsafe:
decorators.append('unsafe')
if self.extern:
decorators.append('extern')
if not decorators:
return ''
return ' '.join(decorators) + ' '
def _returnType(self):
return (" -> %s" % self.returnType) if self.returnType != "void" else ""
def define(self):
body = self.definition_body()
if self.catchPanic:
body = CGWrapper(CGIndenter(body),
pre="return wrap_panic(panic::AssertUnwindSafe(|| {\n",
post=("""\n}), %s);""" % ("()" if self.returnType == "void" else "false")))
return CGWrapper(CGIndenter(body),
pre=self.definition_prologue(),
post=self.definition_epilogue()).define()
def definition_prologue(self):
return "%s%sfn %s%s(%s)%s {\n" % (self._docs(), self._decorators(),
self.name, self._template(),
self._argstring(), self._returnType())
def definition_epilogue(self):
return "\n}\n"
def definition_body(self):
raise NotImplementedError # Override me!
class CGConstructorEnabled(CGAbstractMethod):
"""
A method for testing whether we should be exposing this interface object.
This can perform various tests depending on what conditions are specified
on the interface.
"""
def __init__(self, descriptor):
CGAbstractMethod.__init__(self, descriptor,
'ConstructorEnabled', 'bool',
[Argument("*mut JSContext", "aCx"),
Argument("HandleObject", "aObj")],
unsafe=True)
def definition_body(self):
conditions = []
iface = self.descriptor.interface
bits = " | ".join(sorted(
"InterfaceObjectMap::" + camel_to_upper_snake(i) for i in iface.exposureSet
))
conditions.append("is_exposed_in(aObj, %s)" % bits)
pref = iface.getExtendedAttribute("Pref")
if pref:
assert isinstance(pref, list) and len(pref) == 1
conditions.append('PREFS.get("%s").as_boolean().unwrap_or(false)' % pref[0])
func = iface.getExtendedAttribute("Func")
if func:
assert isinstance(func, list) and len(func) == 1
conditions.append("%s(aCx, aObj)" % func[0])
return CGList((CGGeneric(cond) for cond in conditions), " &&\n")
def CreateBindingJSObject(descriptor, parent=None):
assert not descriptor.isGlobal()
create = "let raw = Box::into_raw(object);\nlet _rt = RootedTraceable::new(&*raw);\n"
if descriptor.proxy:
create += """
let handler = RegisterBindings::PROXY_HANDLERS[PrototypeList::Proxies::%s as usize];
rooted!(in(cx) let private = PrivateValue(raw as *const libc::c_void));
let obj = NewProxyObject(cx, handler,
private.handle(),
proto.get(), %s.get(),
ptr::null_mut(), ptr::null_mut());
assert!(!obj.is_null());
rooted!(in(cx) let obj = obj);\
""" % (descriptor.name, parent)
else:
create += ("rooted!(in(cx) let obj = JS_NewObjectWithGivenProto(\n"
" cx, &Class.base as *const JSClass, proto.handle()));\n"
"assert!(!obj.is_null());\n"
"\n"
"JS_SetReservedSlot(obj.get(), DOM_OBJECT_SLOT,\n"
" PrivateValue(raw as *const libc::c_void));")
if descriptor.weakReferenceable:
create += """
JS_SetReservedSlot(obj.get(), DOM_WEAK_SLOT, PrivateValue(ptr::null()));"""
return create
def InitUnforgeablePropertiesOnHolder(descriptor, properties):
"""
Define the unforgeable properties on the unforgeable holder for
the interface represented by descriptor.
properties is a PropertyArrays instance.
"""
unforgeables = []
defineUnforgeableAttrs = "define_guarded_properties(cx, unforgeable_holder.handle(), %s);"
defineUnforgeableMethods = "define_guarded_methods(cx, unforgeable_holder.handle(), %s);"
unforgeableMembers = [
(defineUnforgeableAttrs, properties.unforgeable_attrs),
(defineUnforgeableMethods, properties.unforgeable_methods),
]
for template, array in unforgeableMembers:
if array.length() > 0:
unforgeables.append(CGGeneric(template % array.variableName()))
return CGList(unforgeables, "\n")
def CopyUnforgeablePropertiesToInstance(descriptor):
"""
Copy the unforgeable properties from the unforgeable holder for
this interface to the instance object we have.
"""
if not descriptor.hasUnforgeableMembers:
return ""
copyCode = ""
# For proxies, we want to define on the expando object, not directly on the
# reflector, so we can make sure we don't get confused by named getters.
if descriptor.proxy:
copyCode += """\
rooted!(in(cx) let mut expando = ptr::null_mut());
ensure_expando_object(cx, obj.handle(), expando.handle_mut());
"""
obj = "expando"
else:
obj = "obj"
# We can't do the fast copy for globals, because we can't allocate the
# unforgeable holder for those with the right JSClass. Luckily, there
# aren't too many globals being created.
if descriptor.isGlobal():
copyFunc = "JS_CopyPropertiesFrom"
else:
copyFunc = "JS_InitializePropertiesFromCompatibleNativeObject"
copyCode += """\
rooted!(in(cx) let mut unforgeable_holder = ptr::null_mut());
unforgeable_holder.handle_mut().set(
JS_GetReservedSlot(proto.get(), DOM_PROTO_UNFORGEABLE_HOLDER_SLOT).to_object());
assert!(%(copyFunc)s(cx, %(obj)s.handle(), unforgeable_holder.handle()));
""" % {'copyFunc': copyFunc, 'obj': obj}
return copyCode
class CGWrapMethod(CGAbstractMethod):
"""
Class that generates the FooBinding::Wrap function for non-callback
interfaces.
"""
def __init__(self, descriptor):
assert not descriptor.interface.isCallback()
assert not descriptor.isGlobal()
args = [Argument('*mut JSContext', 'cx'),
Argument('&GlobalScope', 'scope'),
Argument("Box<%s>" % descriptor.concreteType, 'object')]
retval = 'Root<%s>' % descriptor.concreteType
CGAbstractMethod.__init__(self, descriptor, 'Wrap', retval, args,
pub=True, unsafe=True)
def definition_body(self):
unforgeable = CopyUnforgeablePropertiesToInstance(self.descriptor)
create = CreateBindingJSObject(self.descriptor, "scope")
return CGGeneric("""\
let scope = scope.reflector().get_jsobject();
assert!(!scope.get().is_null());
assert!(((*get_object_class(scope.get())).flags & JSCLASS_IS_GLOBAL) != 0);
rooted!(in(cx) let mut proto = ptr::null_mut());
let _ac = JSAutoCompartment::new(cx, scope.get());
GetProtoObject(cx, scope, proto.handle_mut());
assert!(!proto.is_null());
%(createObject)s
%(copyUnforgeable)s
(*raw).init_reflector(obj.get());
Root::from_ref(&*raw)""" % {'copyUnforgeable': unforgeable, 'createObject': create})
class CGWrapGlobalMethod(CGAbstractMethod):
"""
Class that generates the FooBinding::Wrap function for global interfaces.
"""
def __init__(self, descriptor, properties):
assert not descriptor.interface.isCallback()
assert descriptor.isGlobal()
args = [Argument('*mut JSContext', 'cx'),
Argument("Box<%s>" % descriptor.concreteType, 'object')]
retval = 'Root<%s>' % descriptor.concreteType
CGAbstractMethod.__init__(self, descriptor, 'Wrap', retval, args,
pub=True, unsafe=True)
self.properties = properties
def definition_body(self):
values = {
"unforgeable": CopyUnforgeablePropertiesToInstance(self.descriptor)
}
pairs = [
("define_guarded_properties", self.properties.attrs),
("define_guarded_methods", self.properties.methods),
("define_guarded_constants", self.properties.consts)
]
members = ["%s(cx, obj.handle(), %s);" % (function, array.variableName())
for (function, array) in pairs if array.length() > 0]
values["members"] = "\n".join(members)
return CGGeneric("""\
let raw = Box::into_raw(object);
let _rt = RootedTraceable::new(&*raw);
rooted!(in(cx) let mut obj = ptr::null_mut());
create_global_object(
cx,
&Class.base,
raw as *const libc::c_void,
_trace,
obj.handle_mut());
assert!(!obj.is_null());
(*raw).init_reflector(obj.get());
let _ac = JSAutoCompartment::new(cx, obj.get());
rooted!(in(cx) let mut proto = ptr::null_mut());
GetProtoObject(cx, obj.handle(), proto.handle_mut());
assert!(JS_SplicePrototype(cx, obj.handle(), proto.handle()));
let mut immutable = false;
assert!(JS_SetImmutablePrototype(cx, obj.handle(), &mut immutable));
assert!(immutable);
%(members)s
%(unforgeable)s
Root::from_ref(&*raw)\
""" % values)
class CGIDLInterface(CGThing):
"""
Class for codegen of an implementation of the IDLInterface trait.
"""
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
interface = self.descriptor.interface
name = self.descriptor.concreteType
if (interface.getUserData("hasConcreteDescendant", False) or
interface.getUserData("hasProxyDescendant", False)):
depth = self.descriptor.prototypeDepth
check = "class.interface_chain[%s] == PrototypeList::ID::%s" % (depth, name)
elif self.descriptor.proxy:
check = "class as *const _ == &Class as *const _"
else:
check = "class as *const _ == &Class.dom_class as *const _"
return """\
impl IDLInterface for %(name)s {
#[inline]
fn derives(class: &'static DOMClass) -> bool {
%(check)s
}
}
impl PartialEq for %(name)s {
fn eq(&self, other: &%(name)s) -> bool {
self as *const %(name)s == &*other
}
}
""" % {'check': check, 'name': name}
class CGAbstractExternMethod(CGAbstractMethod):
"""
Abstract base class for codegen of implementation-only (no
declaration) static methods.
"""
def __init__(self, descriptor, name, returnType, args, doesNotPanic=False):
CGAbstractMethod.__init__(self, descriptor, name, returnType, args,
inline=False, extern=True, doesNotPanic=doesNotPanic)
class PropertyArrays():
def __init__(self, descriptor):
self.static_methods = MethodDefiner(descriptor, "StaticMethods",
static=True, unforgeable=False)
self.static_attrs = AttrDefiner(descriptor, "StaticAttributes",
static=True, unforgeable=False)
self.methods = MethodDefiner(descriptor, "Methods", static=False, unforgeable=False)
self.unforgeable_methods = MethodDefiner(descriptor, "UnforgeableMethods",
static=False, unforgeable=True)
self.attrs = AttrDefiner(descriptor, "Attributes", static=False, unforgeable=False)
self.unforgeable_attrs = AttrDefiner(descriptor, "UnforgeableAttributes",
static=False, unforgeable=True)
self.consts = ConstDefiner(descriptor, "Constants")
pass
@staticmethod
def arrayNames():
return [
"static_methods",
"static_attrs",
"methods",
"unforgeable_methods",
"attrs",
"unforgeable_attrs",
"consts",
]
def variableNames(self):
names = {}
for array in self.arrayNames():
names[array] = getattr(self, array).variableName()
return names
def __str__(self):
define = ""
for array in self.arrayNames():
define += str(getattr(self, array))
return define
class CGCreateInterfaceObjectsMethod(CGAbstractMethod):
"""
Generate the CreateInterfaceObjects method for an interface descriptor.
properties should be a PropertyArrays instance.
"""
def __init__(self, descriptor, properties, haveUnscopables):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'global'),
Argument('*mut ProtoOrIfaceArray', 'cache')]
CGAbstractMethod.__init__(self, descriptor, 'CreateInterfaceObjects', 'void', args,
unsafe=True)
self.properties = properties
self.haveUnscopables = haveUnscopables
def definition_body(self):
name = self.descriptor.interface.identifier.name
if self.descriptor.interface.isNamespace():
if self.descriptor.interface.getExtendedAttribute("ProtoObjectHack"):
proto = "JS_GetObjectPrototype(cx, global)"
else:
proto = "JS_NewPlainObject(cx)"
if self.properties.static_methods.length():
methods = self.properties.static_methods.variableName()
else:
methods = "&[]"
return CGGeneric("""\
rooted!(in(cx) let proto = %(proto)s);
assert!(!proto.is_null());
rooted!(in(cx) let mut namespace = ptr::null_mut());
create_namespace_object(cx, global, proto.handle(), &NAMESPACE_OBJECT_CLASS,
%(methods)s, %(name)s, namespace.handle_mut());
assert!(!namespace.is_null());
assert!((*cache)[PrototypeList::Constructor::%(id)s as usize].is_null());
(*cache)[PrototypeList::Constructor::%(id)s as usize] = namespace.get();
<*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::Constructor::%(id)s as isize),
ptr::null_mut(),
namespace.get());
""" % {"id": MakeNativeName(name), "methods": methods, "name": str_to_const_array(name), "proto": proto})
if self.descriptor.interface.isCallback():
assert not self.descriptor.interface.ctor() and self.descriptor.interface.hasConstants()
return CGGeneric("""\
rooted!(in(cx) let mut interface = ptr::null_mut());
create_callback_interface_object(cx, global, sConstants, %(name)s, interface.handle_mut());
assert!(!interface.is_null());
assert!((*cache)[PrototypeList::Constructor::%(id)s as usize].is_null());
(*cache)[PrototypeList::Constructor::%(id)s as usize] = interface.get();
<*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::Constructor::%(id)s as isize),
ptr::null_mut(),
interface.get());
""" % {"id": name, "name": str_to_const_array(name)})
parentName = self.descriptor.getParentName()
if not parentName:
if self.descriptor.interface.getExtendedAttribute("ExceptionClass"):
getPrototypeProto = "prototype_proto.set(JS_GetErrorPrototype(cx))"
elif self.descriptor.interface.isIteratorInterface():
getPrototypeProto = "prototype_proto.set(JS_GetIteratorPrototype(cx))"
else:
getPrototypeProto = "prototype_proto.set(JS_GetObjectPrototype(cx, global))"
else:
getPrototypeProto = ("%s::GetProtoObject(cx, global, prototype_proto.handle_mut())" %
toBindingNamespace(parentName))
code = [CGGeneric("""\
rooted!(in(cx) let mut prototype_proto = ptr::null_mut());
%s;
assert!(!prototype_proto.is_null());""" % getPrototypeProto)]
properties = {
"id": name,
"unscopables": "unscopable_names" if self.haveUnscopables else "&[]"
}
for arrayName in self.properties.arrayNames():
array = getattr(self.properties, arrayName)
if array.length():
properties[arrayName] = array.variableName()
else:
properties[arrayName] = "&[]"
if self.descriptor.isGlobal():
assert not self.haveUnscopables
proto_properties = {
"attrs": "&[]",
"consts": "&[]",
"id": name,
"methods": "&[]",
"unscopables": "&[]",
}
else:
proto_properties = properties
code.append(CGGeneric("""
rooted!(in(cx) let mut prototype = ptr::null_mut());
create_interface_prototype_object(cx,
prototype_proto.handle(),
&PrototypeClass,
%(methods)s,
%(attrs)s,
%(consts)s,
%(unscopables)s,
prototype.handle_mut());
assert!(!prototype.is_null());
assert!((*cache)[PrototypeList::ID::%(id)s as usize].is_null());
(*cache)[PrototypeList::ID::%(id)s as usize] = prototype.get();
<*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::ID::%(id)s as isize),
ptr::null_mut(),
prototype.get());
""" % proto_properties))
if self.descriptor.interface.hasInterfaceObject():
properties["name"] = str_to_const_array(name)
if self.descriptor.interface.ctor():
properties["length"] = methodLength(self.descriptor.interface.ctor())
else:
properties["length"] = 0
parentName = self.descriptor.getParentName()
if parentName:
parentName = toBindingNamespace(parentName)
code.append(CGGeneric("""
rooted!(in(cx) let mut interface_proto = ptr::null_mut());
%s::GetConstructorObject(cx, global, interface_proto.handle_mut());""" % parentName))
else:
code.append(CGGeneric("""
rooted!(in(cx) let interface_proto = JS_GetFunctionPrototype(cx, global));"""))
code.append(CGGeneric("""\
assert!(!interface_proto.is_null());
rooted!(in(cx) let mut interface = ptr::null_mut());
create_noncallback_interface_object(cx,
global,
interface_proto.handle(),
&INTERFACE_OBJECT_CLASS,
%(static_methods)s,
%(static_attrs)s,
%(consts)s,
prototype.handle(),
%(name)s,
%(length)s,
interface.handle_mut());
assert!(!interface.is_null());""" % properties))
if self.descriptor.hasDescendants():
code.append(CGGeneric("""\
assert!((*cache)[PrototypeList::Constructor::%(id)s as usize].is_null());
(*cache)[PrototypeList::Constructor::%(id)s as usize] = interface.get();
<*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::Constructor::%(id)s as isize),
ptr::null_mut(),
interface.get());
""" % properties))
aliasedMembers = [m for m in self.descriptor.interface.members if m.isMethod() and m.aliases]
if aliasedMembers:
def defineAlias(alias):
if alias == "@@iterator":
symbolJSID = "RUST_SYMBOL_TO_JSID(GetWellKnownSymbol(cx, SymbolCode::iterator))"
getSymbolJSID = CGGeneric(fill("rooted!(in(cx) let iteratorId = ${symbolJSID});",
symbolJSID=symbolJSID))
defineFn = "JS_DefinePropertyById2"
prop = "iteratorId.handle()"
elif alias.startswith("@@"):
raise TypeError("Can't handle any well-known Symbol other than @@iterator")
else:
getSymbolJSID = None
defineFn = "JS_DefineProperty"
prop = '"%s"' % alias
return CGList([
getSymbolJSID,
# XXX If we ever create non-enumerable properties that can
# be aliased, we should consider making the aliases
# match the enumerability of the property being aliased.
CGGeneric(fill(
"""
assert!(${defineFn}(cx, prototype.handle(), ${prop}, aliasedVal.handle(),
JSPROP_ENUMERATE, None, None));
""",
defineFn=defineFn,
prop=prop))
], "\n")
def defineAliasesFor(m):
return CGList([
CGGeneric(fill(
"""
assert!(JS_GetProperty(cx, prototype.handle(),
${prop} as *const u8 as *const _,
aliasedVal.handle_mut()));
""",
prop=str_to_const_array(m.identifier.name)))
] + [defineAlias(alias) for alias in sorted(m.aliases)])
defineAliases = CGList([
CGGeneric(fill("""
// Set up aliases on the interface prototype object we just created.
""")),
CGGeneric("rooted!(in(cx) let mut aliasedVal = UndefinedValue());\n\n")
] + [defineAliasesFor(m) for m in sorted(aliasedMembers)])
code.append(defineAliases)
constructors = self.descriptor.interface.namedConstructors
if constructors:
decl = "let named_constructors: [(ConstructorClassHook, &'static [u8], u32); %d]" % len(constructors)
specs = []
for constructor in constructors:
hook = CONSTRUCT_HOOK_NAME + "_" + constructor.identifier.name
name = str_to_const_array(constructor.identifier.name)
length = methodLength(constructor)
specs.append(CGGeneric("(%s as ConstructorClassHook, %s, %d)" % (hook, name, length)))
values = CGIndenter(CGList(specs, "\n"), 4)
code.append(CGWrapper(values, pre="%s = [\n" % decl, post="\n];"))
code.append(CGGeneric("create_named_constructors(cx, global, &named_constructors, prototype.handle());"))
if self.descriptor.hasUnforgeableMembers:
# We want to use the same JSClass and prototype as the object we'll
# end up defining the unforgeable properties on in the end, so that
# we can use JS_InitializePropertiesFromCompatibleNativeObject to do
# a fast copy. In the case of proxies that's null, because the
# expando object is a vanilla object, but in the case of other DOM
# objects it's whatever our class is.
#
# Also, for a global we can't use the global's class; just use
# nullpr and when we do the copy off the holder we'll take a slower
# path. This also means that we don't need to worry about matching
# the prototype.
if self.descriptor.proxy or self.descriptor.isGlobal():
holderClass = "ptr::null()"
holderProto = "HandleObject::null()"
else:
holderClass = "&Class.base as *const JSClass"
holderProto = "prototype.handle()"
code.append(CGGeneric("""
rooted!(in(cx) let mut unforgeable_holder = ptr::null_mut());
unforgeable_holder.handle_mut().set(
JS_NewObjectWithoutMetadata(cx, %(holderClass)s, %(holderProto)s));
assert!(!unforgeable_holder.is_null());
""" % {'holderClass': holderClass, 'holderProto': holderProto}))
code.append(InitUnforgeablePropertiesOnHolder(self.descriptor, self.properties))
code.append(CGGeneric("""\
JS_SetReservedSlot(prototype.get(), DOM_PROTO_UNFORGEABLE_HOLDER_SLOT,
ObjectValue(unforgeable_holder.get()))"""))
return CGList(code, "\n")
class CGGetPerInterfaceObject(CGAbstractMethod):
"""
A method for getting a per-interface object (a prototype object or interface
constructor object).
"""
def __init__(self, descriptor, name, idPrefix="", pub=False):
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'global'),
Argument('MutableHandleObject', 'rval')]
CGAbstractMethod.__init__(self, descriptor, name,
'void', args, pub=pub, unsafe=True)
self.id = idPrefix + "::" + MakeNativeName(self.descriptor.name)
def definition_body(self):
return CGGeneric("""
assert!(((*get_object_class(global.get())).flags & JSCLASS_DOM_GLOBAL) != 0);
/* Check to see whether the interface objects are already installed */
let proto_or_iface_array = get_proto_or_iface_array(global.get());
rval.set((*proto_or_iface_array)[%(id)s as usize]);
if !rval.get().is_null() {
return;
}
CreateInterfaceObjects(cx, global, proto_or_iface_array);
rval.set((*proto_or_iface_array)[%(id)s as usize]);
assert!(!rval.get().is_null());
""" % {"id": self.id})
class CGGetProtoObjectMethod(CGGetPerInterfaceObject):
"""
A method for getting the interface prototype object.
"""
def __init__(self, descriptor):
CGGetPerInterfaceObject.__init__(self, descriptor, "GetProtoObject",
"PrototypeList::ID", pub=True)
def definition_body(self):
return CGList([
CGGeneric("""\
/* Get the interface prototype object for this class. This will create the
object as needed. */"""),
CGGetPerInterfaceObject.definition_body(self),
])
class CGGetConstructorObjectMethod(CGGetPerInterfaceObject):
"""
A method for getting the interface constructor object.
"""
def __init__(self, descriptor):
CGGetPerInterfaceObject.__init__(self, descriptor, "GetConstructorObject",
"PrototypeList::Constructor",
pub=True)
def definition_body(self):
return CGList([
CGGeneric("""\
/* Get the interface object for this class. This will create the object as
needed. */"""),
CGGetPerInterfaceObject.definition_body(self),
])
class CGDefineProxyHandler(CGAbstractMethod):
"""
A method to create and cache the proxy trap for a given interface.
"""
def __init__(self, descriptor):
assert descriptor.proxy
CGAbstractMethod.__init__(self, descriptor, 'DefineProxyHandler',
'*const libc::c_void', [],
pub=True, unsafe=True)
def define(self):
return CGAbstractMethod.define(self)
def definition_body(self):
customDefineProperty = 'proxyhandler::define_property'
if self.descriptor.operations['IndexedSetter'] or self.descriptor.operations['NamedSetter']:
customDefineProperty = 'defineProperty'
customDelete = 'proxyhandler::delete'
if self.descriptor.operations['NamedDeleter']:
customDelete = 'delete'
getOwnEnumerablePropertyKeys = "own_property_keys"
if self.descriptor.interface.getExtendedAttribute("LegacyUnenumerableNamedProperties"):
getOwnEnumerablePropertyKeys = "getOwnEnumerablePropertyKeys"
args = {
"defineProperty": customDefineProperty,
"delete": customDelete,
"getOwnEnumerablePropertyKeys": getOwnEnumerablePropertyKeys,
"trace": TRACE_HOOK_NAME,
"finalize": FINALIZE_HOOK_NAME,
}
return CGGeneric("""\
let traps = ProxyTraps {
enter: None,
getOwnPropertyDescriptor: Some(getOwnPropertyDescriptor),
defineProperty: Some(%(defineProperty)s),
ownPropertyKeys: Some(own_property_keys),
delete_: Some(%(delete)s),
enumerate: None,
getPrototypeIfOrdinary: Some(proxyhandler::get_prototype_if_ordinary),
preventExtensions: Some(proxyhandler::prevent_extensions),
isExtensible: Some(proxyhandler::is_extensible),
has: None,
get: Some(get),
set: None,
call: None,
construct: None,
getPropertyDescriptor: Some(get_property_descriptor),
hasOwn: Some(hasOwn),
getOwnEnumerablePropertyKeys: Some(%(getOwnEnumerablePropertyKeys)s),
nativeCall: None,
hasInstance: None,
objectClassIs: None,
className: Some(className),
fun_toString: None,
boxedValue_unbox: None,
defaultValue: None,
trace: Some(%(trace)s),
finalize: Some(%(finalize)s),
objectMoved: None,
isCallable: None,
isConstructor: None,
};
CreateProxyHandler(&traps, Class.as_void_ptr())\
""" % args)
class CGDefineDOMInterfaceMethod(CGAbstractMethod):
"""
A method for resolve hooks to try to lazily define the interface object for
a given interface.
"""
def __init__(self, descriptor):
assert descriptor.interface.hasInterfaceObject()
args = [
Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'global'),
]
CGAbstractMethod.__init__(self, descriptor, 'DefineDOMInterface',
'void', args, pub=True, unsafe=True)
def define(self):
return CGAbstractMethod.define(self)
def definition_body(self):
if self.descriptor.interface.isCallback() or self.descriptor.interface.isNamespace():
function = "GetConstructorObject"
else:
function = "GetProtoObject"
return CGGeneric("""\
assert!(!global.get().is_null());
if !ConstructorEnabled(cx, global) {
return;
}
rooted!(in(cx) let mut proto = ptr::null_mut());
%s(cx, global, proto.handle_mut());
assert!(!proto.is_null());""" % (function,))
def needCx(returnType, arguments, considerTypes):
return (considerTypes and
(typeNeedsCx(returnType, True) or
any(typeNeedsCx(a.type) for a in arguments)))
class CGCallGenerator(CGThing):
"""
A class to generate an actual call to a C++ object. Assumes that the C++
object is stored in a variable whose name is given by the |object| argument.
errorResult should be a string for the value to return in case of an
exception from the native code, or None if no error reporting is needed.
"""
def __init__(self, errorResult, arguments, argsPre, returnType,
extendedAttributes, descriptor, nativeMethodName,
static, object="this"):
CGThing.__init__(self)
assert errorResult is None or isinstance(errorResult, str)
isFallible = errorResult is not None
result = getRetvalDeclarationForType(returnType, descriptor)
if isFallible:
result = CGWrapper(result, pre="Result<", post=", Error>")
args = CGList([CGGeneric(arg) for arg in argsPre], ", ")
for (a, name) in arguments:
# XXXjdm Perhaps we should pass all nontrivial types by borrowed pointer
if a.type.isDictionary() and not type_needs_tracing(a.type):
name = "&" + name
args.append(CGGeneric(name))
needsCx = needCx(returnType, (a for (a, _) in arguments), True)
if "cx" not in argsPre and needsCx:
args.prepend(CGGeneric("cx"))
# Build up our actual call
self.cgRoot = CGList([], "\n")
call = CGGeneric(nativeMethodName)
if static:
call = CGWrapper(call, pre="%s::" % MakeNativeName(descriptor.interface.identifier.name))
else:
call = CGWrapper(call, pre="%s." % object)
call = CGList([call, CGWrapper(args, pre="(", post=")")])
self.cgRoot.append(CGList([
CGGeneric("let result: "),
result,
CGGeneric(" = "),
call,
CGGeneric(";"),
]))
if isFallible:
if static:
glob = "global.upcast::<GlobalScope>()"
else:
glob = "&this.global()"
self.cgRoot.append(CGGeneric(
"let result = match result {\n"
" Ok(result) => result,\n"
" Err(e) => {\n"
" throw_dom_exception(cx, %s, e);\n"
" return%s;\n"
" },\n"
"};" % (glob, errorResult)))
def define(self):
return self.cgRoot.define()
class CGPerSignatureCall(CGThing):
"""
This class handles the guts of generating code for a particular
call signature. A call signature consists of four things:
1) A return type, which can be None to indicate that there is no
actual return value (e.g. this is an attribute setter) or an
IDLType if there's an IDL type involved (including |void|).
2) An argument list, which is allowed to be empty.
3) A name of a native method to call.
4) Whether or not this method is static.
We also need to know whether this is a method or a getter/setter
to do error reporting correctly.
The idlNode parameter can be either a method or an attr. We can query
|idlNode.identifier| in both cases, so we can be agnostic between the two.
"""
# XXXbz For now each entry in the argument list is either an
# IDLArgument or a FakeArgument, but longer-term we may want to
# have ways of flagging things like JSContext* or optional_argc in
# there.
def __init__(self, returnType, argsPre, arguments, nativeMethodName, static,
descriptor, idlNode, argConversionStartsAt=0,
getter=False, setter=False):
CGThing.__init__(self)
self.returnType = returnType
self.descriptor = descriptor
self.idlNode = idlNode
self.extendedAttributes = descriptor.getExtendedAttributes(idlNode,
getter=getter,
setter=setter)
self.argsPre = argsPre
self.arguments = arguments
self.argCount = len(arguments)
cgThings = []
cgThings.extend([CGArgumentConverter(arguments[i], i, self.getArgs(),
self.getArgc(), self.descriptor,
invalidEnumValueFatal=not setter) for
i in range(argConversionStartsAt, self.argCount)])
errorResult = None
if self.isFallible():
errorResult = " false"
if idlNode.isMethod() and idlNode.isMaplikeOrSetlikeOrIterableMethod():
if idlNode.maplikeOrSetlikeOrIterable.isMaplike() or \
idlNode.maplikeOrSetlikeOrIterable.isSetlike():
raise TypeError('Maplike/Setlike methods are not supported yet')
else:
cgThings.append(CGIterableMethodGenerator(descriptor,
idlNode.maplikeOrSetlikeOrIterable,
idlNode.identifier.name))
else:
cgThings.append(CGCallGenerator(
errorResult,
self.getArguments(), self.argsPre, returnType,
self.extendedAttributes, descriptor, nativeMethodName,
static))
self.cgRoot = CGList(cgThings, "\n")
def getArgs(self):
return "args" if self.argCount > 0 else ""
def getArgc(self):
return "argc"
def getArguments(self):
return [(a, process_arg("arg" + str(i), a)) for (i, a) in enumerate(self.arguments)]
def isFallible(self):
return 'infallible' not in self.extendedAttributes
def wrap_return_value(self):
return wrapForType('args.rval()')
def define(self):
return (self.cgRoot.define() + "\n" + self.wrap_return_value())
class CGSwitch(CGList):
"""
A class to generate code for a switch statement.
Takes three constructor arguments: an expression, a list of cases,
and an optional default.
Each case is a CGCase. The default is a CGThing for the body of
the default case, if any.
"""
def __init__(self, expression, cases, default=None):
CGList.__init__(self, [CGIndenter(c) for c in cases], "\n")
self.prepend(CGWrapper(CGGeneric(expression),
pre="match ", post=" {"))
if default is not None:
self.append(
CGIndenter(
CGWrapper(
CGIndenter(default),
pre="_ => {\n",
post="\n}"
)
)
)
self.append(CGGeneric("}"))
class CGCase(CGList):
"""
A class to generate code for a case statement.
Takes three constructor arguments: an expression, a CGThing for
the body (allowed to be None if there is no body), and an optional
argument (defaulting to False) for whether to fall through.
"""
def __init__(self, expression, body, fallThrough=False):
CGList.__init__(self, [], "\n")
self.append(CGWrapper(CGGeneric(expression), post=" => {"))
bodyList = CGList([body], "\n")
if fallThrough:
raise TypeError("fall through required but unsupported")
# bodyList.append(CGGeneric('panic!("fall through unsupported"); /* Fall through */'))
self.append(CGIndenter(bodyList))
self.append(CGGeneric("}"))
class CGGetterCall(CGPerSignatureCall):
"""
A class to generate a native object getter call for a particular IDL
getter.
"""
def __init__(self, argsPre, returnType, nativeMethodName, descriptor, attr):
CGPerSignatureCall.__init__(self, returnType, argsPre, [],
nativeMethodName, attr.isStatic(), descriptor,
attr, getter=True)
class FakeArgument():
"""
A class that quacks like an IDLArgument. This is used to make
setters look like method calls or for special operations.
"""
def __init__(self, type, interfaceMember, allowTreatNonObjectAsNull=False):
self.type = type
self.optional = False
self.variadic = False
self.defaultValue = None
self._allowTreatNonObjectAsNull = allowTreatNonObjectAsNull
self.treatNullAs = interfaceMember.treatNullAs
self.enforceRange = False
self.clamp = False
def allowTreatNonCallableAsNull(self):
return self._allowTreatNonObjectAsNull
class CGSetterCall(CGPerSignatureCall):
"""
A class to generate a native object setter call for a particular IDL
setter.
"""
def __init__(self, argsPre, argType, nativeMethodName, descriptor, attr):
CGPerSignatureCall.__init__(self, None, argsPre,
[FakeArgument(argType, attr, allowTreatNonObjectAsNull=True)],
nativeMethodName, attr.isStatic(), descriptor, attr,
setter=True)
def wrap_return_value(self):
# We have no return value
return "\nreturn true;"
def getArgc(self):
return "1"
class CGAbstractStaticBindingMethod(CGAbstractMethod):
"""
Common class to generate the JSNatives for all our static methods, getters
and setters. This will generate the function declaration and unwrap the
global object. Subclasses are expected to override the generate_code
function to do the rest of the work. This function should return a
CGThing which is already properly indented.
"""
def __init__(self, descriptor, name):
args = [
Argument('*mut JSContext', 'cx'),
Argument('libc::c_uint', 'argc'),
Argument('*mut JSVal', 'vp'),
]
CGAbstractMethod.__init__(self, descriptor, name, "bool", args, extern=True)
self.exposureSet = descriptor.interface.exposureSet
def definition_body(self):
preamble = "let global = GlobalScope::from_object(JS_CALLEE(cx, vp).to_object());\n"
if len(self.exposureSet) == 1:
preamble += "let global = Root::downcast::<dom::types::%s>(global).unwrap();\n" % list(self.exposureSet)[0]
return CGList([CGGeneric(preamble), self.generate_code()])
def generate_code(self):
raise NotImplementedError # Override me!
class CGSpecializedMethod(CGAbstractExternMethod):
"""
A class for generating the C++ code for a specialized method that the JIT
can call with lower overhead.
"""
def __init__(self, descriptor, method):
self.method = method
name = method.identifier.name
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('*const JSJitMethodCallArgs', 'args')]
CGAbstractExternMethod.__init__(self, descriptor, name, 'bool', args)
def definition_body(self):
nativeName = CGSpecializedMethod.makeNativeName(self.descriptor,
self.method)
return CGWrapper(CGMethodCall([], nativeName, self.method.isStatic(),
self.descriptor, self.method),
pre="let this = &*this;\n"
"let args = &*args;\n"
"let argc = args._base.argc_;\n")
@staticmethod
def makeNativeName(descriptor, method):
name = method.identifier.name
nativeName = descriptor.binaryNameFor(name)
if nativeName == name:
nativeName = descriptor.internalNameFor(name)
return MakeNativeName(nativeName)
class CGStaticMethod(CGAbstractStaticBindingMethod):
"""
A class for generating the Rust code for an IDL static method.
"""
def __init__(self, descriptor, method):
self.method = method
name = method.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedMethod.makeNativeName(self.descriptor,
self.method)
setupArgs = CGGeneric("let args = CallArgs::from_vp(vp, argc);\n")
call = CGMethodCall(["&global"], nativeName, True, self.descriptor, self.method)
return CGList([setupArgs, call])
class CGSpecializedGetter(CGAbstractExternMethod):
"""
A class for generating the code for a specialized attribute getter
that the JIT can call with lower overhead.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'get_' + descriptor.internalNameFor(attr.identifier.name)
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('JSJitGetterCallArgs', 'args')]
CGAbstractExternMethod.__init__(self, descriptor, name, "bool", args)
def definition_body(self):
nativeName = CGSpecializedGetter.makeNativeName(self.descriptor,
self.attr)
return CGWrapper(CGGetterCall([], self.attr.type, nativeName,
self.descriptor, self.attr),
pre="let this = &*this;\n")
@staticmethod
def makeNativeName(descriptor, attr):
name = attr.identifier.name
nativeName = descriptor.binaryNameFor(name)
if nativeName == name:
nativeName = descriptor.internalNameFor(name)
nativeName = MakeNativeName(nativeName)
infallible = ('infallible' in
descriptor.getExtendedAttributes(attr, getter=True))
if attr.type.nullable() or not infallible:
return "Get" + nativeName
return nativeName
class CGStaticGetter(CGAbstractStaticBindingMethod):
"""
A class for generating the C++ code for an IDL static attribute getter.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'get_' + attr.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedGetter.makeNativeName(self.descriptor,
self.attr)
setupArgs = CGGeneric("let args = CallArgs::from_vp(vp, argc);\n")
call = CGGetterCall(["&global"], self.attr.type, nativeName, self.descriptor,
self.attr)
return CGList([setupArgs, call])
class CGSpecializedSetter(CGAbstractExternMethod):
"""
A class for generating the code for a specialized attribute setter
that the JIT can call with lower overhead.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'set_' + descriptor.internalNameFor(attr.identifier.name)
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('JSJitSetterCallArgs', 'args')]
CGAbstractExternMethod.__init__(self, descriptor, name, "bool", args)
def definition_body(self):
nativeName = CGSpecializedSetter.makeNativeName(self.descriptor,
self.attr)
return CGWrapper(CGSetterCall([], self.attr.type, nativeName,
self.descriptor, self.attr),
pre="let this = &*this;\n")
@staticmethod
def makeNativeName(descriptor, attr):
name = attr.identifier.name
nativeName = descriptor.binaryNameFor(name)
if nativeName == name:
nativeName = descriptor.internalNameFor(name)
return "Set" + MakeNativeName(nativeName)
class CGStaticSetter(CGAbstractStaticBindingMethod):
"""
A class for generating the C++ code for an IDL static attribute setter.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'set_' + attr.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedSetter.makeNativeName(self.descriptor,
self.attr)
checkForArg = CGGeneric(
"let args = CallArgs::from_vp(vp, argc);\n"
"if argc == 0 {\n"
" throw_type_error(cx, \"Not enough arguments to %s setter.\");\n"
" return false;\n"
"}" % self.attr.identifier.name)
call = CGSetterCall(["&global"], self.attr.type, nativeName, self.descriptor,
self.attr)
return CGList([checkForArg, call])
class CGSpecializedForwardingSetter(CGSpecializedSetter):
"""
A class for generating the code for an IDL attribute forwarding setter.
"""
def __init__(self, descriptor, attr):
CGSpecializedSetter.__init__(self, descriptor, attr)
def definition_body(self):
attrName = self.attr.identifier.name
forwardToAttrName = self.attr.getExtendedAttribute("PutForwards")[0]
# JS_GetProperty and JS_SetProperty can only deal with ASCII
assert all(ord(c) < 128 for c in attrName)
assert all(ord(c) < 128 for c in forwardToAttrName)
return CGGeneric("""\
rooted!(in(cx) let mut v = UndefinedValue());
if !JS_GetProperty(cx, obj, %s as *const u8 as *const libc::c_char, v.handle_mut()) {
return false;
}
if !v.is_object() {
throw_type_error(cx, "Value.%s is not an object.");
return false;
}
rooted!(in(cx) let target_obj = v.to_object());
JS_SetProperty(cx, target_obj.handle(), %s as *const u8 as *const libc::c_char, args.get(0))
""" % (str_to_const_array(attrName), attrName, str_to_const_array(forwardToAttrName)))
class CGSpecializedReplaceableSetter(CGSpecializedSetter):
"""
A class for generating the code for an IDL replaceable attribute setter.
"""
def __init__(self, descriptor, attr):
CGSpecializedSetter.__init__(self, descriptor, attr)
def definition_body(self):
assert self.attr.readonly
name = str_to_const_array(self.attr.identifier.name)
# JS_DefineProperty can only deal with ASCII.
assert all(ord(c) < 128 for c in name)
return CGGeneric("""\
JS_DefineProperty(cx, obj, %s as *const u8 as *const libc::c_char,
args.get(0), JSPROP_ENUMERATE, None, None)""" % name)
class CGMemberJITInfo(CGThing):
"""
A class for generating the JITInfo for a property that points to
our specialized getter and setter.
"""
def __init__(self, descriptor, member):
self.member = member
self.descriptor = descriptor
def defineJitInfo(self, infoName, opName, opType, infallible, movable,
aliasSet, alwaysInSlot, lazilyInSlot, slotIndex,
returnTypes, args):
"""
aliasSet is a JSJitInfo_AliasSet value, without the "JSJitInfo_AliasSet::" bit.
args is None if we don't want to output argTypes for some
reason (e.g. we have overloads or we're not a method) and
otherwise an iterable of the arguments for this method.
"""
assert not movable or aliasSet != "AliasEverything" # Can't move write-aliasing things
assert not alwaysInSlot or movable # Things always in slots had better be movable
def jitInfoInitializer(isTypedMethod):
initializer = fill(
"""
JSJitInfo {
call: ${opName} as *const os::raw::c_void,
protoID: PrototypeList::ID::${name} as u16,
depth: ${depth},
_bitfield_1:
JSJitInfo::new_bitfield_1(
JSJitInfo_OpType::${opType} as u8,
JSJitInfo_AliasSet::${aliasSet} as u8,
JSValueType::${returnType} as u8,
${isInfallible},
${isMovable},
${isEliminatable},
${isAlwaysInSlot},
${isLazilyCachedInSlot},
${isTypedMethod},
${slotIndex} as u16,
)
}
""",
opName=opName,
name=self.descriptor.name,
depth=self.descriptor.interface.inheritanceDepth(),
opType=opType,
aliasSet=aliasSet,
returnType=reduce(CGMemberJITInfo.getSingleReturnType, returnTypes,
""),
isInfallible=toStringBool(infallible),
isMovable=toStringBool(movable),
# FIXME(nox): https://github.com/servo/servo/issues/10991
isEliminatable=toStringBool(False),
isAlwaysInSlot=toStringBool(alwaysInSlot),
isLazilyCachedInSlot=toStringBool(lazilyInSlot),
isTypedMethod=toStringBool(isTypedMethod),
slotIndex=slotIndex)
return initializer.rstrip()
if args is not None:
argTypes = "%s_argTypes" % infoName
args = [CGMemberJITInfo.getJSArgType(arg.type) for arg in args]
args.append("JSJitInfo_ArgType::ArgTypeListEnd as i32")
argTypesDecl = (
"const %s: [i32; %d] = [ %s ];\n" %
(argTypes, len(args), ", ".join(args)))
return fill(
"""
$*{argTypesDecl}
const ${infoName}: JSTypedMethodJitInfo = JSTypedMethodJitInfo {
base: ${jitInfo},
argTypes: &${argTypes} as *const _ as *const JSJitInfo_ArgType,
};
""",
argTypesDecl=argTypesDecl,
infoName=infoName,
jitInfo=indent(jitInfoInitializer(True)),
argTypes=argTypes)
return ("\n"
"const %s: JSJitInfo = %s;\n"
% (infoName, jitInfoInitializer(False)))
def define(self):
if self.member.isAttr():
internalMemberName = self.descriptor.internalNameFor(self.member.identifier.name)
getterinfo = ("%s_getterinfo" % internalMemberName)
getter = ("get_%s" % internalMemberName)
getterinfal = "infallible" in self.descriptor.getExtendedAttributes(self.member, getter=True)
movable = self.mayBeMovable() and getterinfal
aliasSet = self.aliasSet()
isAlwaysInSlot = self.member.getExtendedAttribute("StoreInSlot")
if self.member.slotIndices is not None:
assert isAlwaysInSlot or self.member.getExtendedAttribute("Cached")
isLazilyCachedInSlot = not isAlwaysInSlot
slotIndex = memberReservedSlot(self.member) # noqa:FIXME: memberReservedSlot is not defined
# We'll statically assert that this is not too big in
# CGUpdateMemberSlotsMethod, in the case when
# isAlwaysInSlot is true.
else:
isLazilyCachedInSlot = False
slotIndex = "0"
result = self.defineJitInfo(getterinfo, getter, "Getter",
getterinfal, movable, aliasSet,
isAlwaysInSlot, isLazilyCachedInSlot,
slotIndex,
[self.member.type], None)
if (not self.member.readonly or self.member.getExtendedAttribute("PutForwards")
or self.member.getExtendedAttribute("Replaceable")):
setterinfo = ("%s_setterinfo" % internalMemberName)
setter = ("set_%s" % internalMemberName)
# Setters are always fallible, since they have to do a typed unwrap.
result += self.defineJitInfo(setterinfo, setter, "Setter",
False, False, "AliasEverything",
False, False, "0",
[BuiltinTypes[IDLBuiltinType.Types.void]],
None)
return result
if self.member.isMethod():
methodinfo = ("%s_methodinfo" % self.member.identifier.name)
method = ("%s" % self.member.identifier.name)
# Methods are infallible if they are infallible, have no arguments
# to unwrap, and have a return type that's infallible to wrap up for
# return.
sigs = self.member.signatures()
if len(sigs) != 1:
# Don't handle overloading. If there's more than one signature,
# one of them must take arguments.
methodInfal = False
args = None
movable = False
else:
sig = sigs[0]
# For methods that affect nothing, it's OK to set movable to our
# notion of infallible on the C++ side, without considering
# argument conversions, since argument conversions that can
# reliably throw would be effectful anyway and the jit doesn't
# move effectful things.
hasInfallibleImpl = "infallible" in self.descriptor.getExtendedAttributes(self.member)
movable = self.mayBeMovable() and hasInfallibleImpl
# XXXbz can we move the smarts about fallibility due to arg
# conversions into the JIT, using our new args stuff?
if (len(sig[1]) != 0):
# We have arguments or our return-value boxing can fail
methodInfal = False
else:
methodInfal = hasInfallibleImpl
# For now, only bother to output args if we're side-effect-free.
if self.member.affects == "Nothing":
args = sig[1]
else:
args = None
aliasSet = self.aliasSet()
result = self.defineJitInfo(methodinfo, method, "Method",
methodInfal, movable, aliasSet,
False, False, "0",
[s[0] for s in sigs], args)
return result
raise TypeError("Illegal member type to CGPropertyJITInfo")
def mayBeMovable(self):
"""
Returns whether this attribute or method may be movable, just
based on Affects/DependsOn annotations.
"""
affects = self.member.affects
dependsOn = self.member.dependsOn
assert affects in IDLInterfaceMember.AffectsValues
assert dependsOn in IDLInterfaceMember.DependsOnValues
# Things that are DependsOn=DeviceState are not movable, because we
# don't want them coalesced with each other or loop-hoisted, since
# their return value can change even if nothing is going on from our
# point of view.
return (affects == "Nothing" and
(dependsOn != "Everything" and dependsOn != "DeviceState"))
def aliasSet(self):
"""Returns the alias set to store in the jitinfo. This may not be the
effective alias set the JIT uses, depending on whether we have enough
information about our args to allow the JIT to prove that effectful
argument conversions won't happen.
"""
dependsOn = self.member.dependsOn
assert dependsOn in IDLInterfaceMember.DependsOnValues
if dependsOn == "Nothing" or dependsOn == "DeviceState":
assert self.member.affects == "Nothing"
return "AliasNone"
if dependsOn == "DOMState":
assert self.member.affects == "Nothing"
return "AliasDOMSets"
return "AliasEverything"
@staticmethod
def getJSReturnTypeTag(t):
if t.nullable():
# Sometimes it might return null, sometimes not
return "JSVAL_TYPE_UNKNOWN"
if t.isVoid():
# No return, every time
return "JSVAL_TYPE_UNDEFINED"
if t.isSequence():
return "JSVAL_TYPE_OBJECT"
if t.isMozMap():
return "JSVAL_TYPE_OBJECT"
if t.isGeckoInterface():
return "JSVAL_TYPE_OBJECT"
if t.isString():
return "JSVAL_TYPE_STRING"
if t.isEnum():
return "JSVAL_TYPE_STRING"
if t.isCallback():
return "JSVAL_TYPE_OBJECT"
if t.isAny():
# The whole point is to return various stuff
return "JSVAL_TYPE_UNKNOWN"
if t.isObject():
return "JSVAL_TYPE_OBJECT"
if t.isSpiderMonkeyInterface():
return "JSVAL_TYPE_OBJECT"
if t.isUnion():
u = t.unroll()
if u.hasNullableType:
# Might be null or not
return "JSVAL_TYPE_UNKNOWN"
return reduce(CGMemberJITInfo.getSingleReturnType,
u.flatMemberTypes, "")
if t.isDictionary():
return "JSVAL_TYPE_OBJECT"
if t.isDate():
return "JSVAL_TYPE_OBJECT"
if not t.isPrimitive():
raise TypeError("No idea what type " + str(t) + " is.")
tag = t.tag()
if tag == IDLType.Tags.bool:
return "JSVAL_TYPE_BOOLEAN"
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32]:
return "JSVAL_TYPE_INT32"
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
# These all use JS_NumberValue, which can return int or double.
# But TI treats "double" as meaning "int or double", so we're
# good to return JSVAL_TYPE_DOUBLE here.
return "JSVAL_TYPE_DOUBLE"
if tag != IDLType.Tags.uint32:
raise TypeError("No idea what type " + str(t) + " is.")
# uint32 is sometimes int and sometimes double.
return "JSVAL_TYPE_DOUBLE"
@staticmethod
def getSingleReturnType(existingType, t):
type = CGMemberJITInfo.getJSReturnTypeTag(t)
if existingType == "":
# First element of the list; just return its type
return type
if type == existingType:
return existingType
if ((type == "JSVAL_TYPE_DOUBLE" and
existingType == "JSVAL_TYPE_INT32") or
(existingType == "JSVAL_TYPE_DOUBLE" and
type == "JSVAL_TYPE_INT32")):
# Promote INT32 to DOUBLE as needed
return "JSVAL_TYPE_DOUBLE"
# Different types
return "JSVAL_TYPE_UNKNOWN"
@staticmethod
def getJSArgType(t):
assert not t.isVoid()
if t.nullable():
# Sometimes it might return null, sometimes not
return "JSJitInfo_ArgType::Null as i32 | %s" % CGMemberJITInfo.getJSArgType(t.inner)
if t.isSequence():
return "JSJitInfo_ArgType::Object as i32"
if t.isGeckoInterface():
return "JSJitInfo_ArgType::Object as i32"
if t.isString():
return "JSJitInfo_ArgType::String as i32"
if t.isEnum():
return "JSJitInfo_ArgType::String as i32"
if t.isCallback():
return "JSJitInfo_ArgType::Object as i32"
if t.isAny():
# The whole point is to return various stuff
return "JSJitInfo_ArgType::Any as i32"
if t.isObject():
return "JSJitInfo_ArgType::Object as i32"
if t.isSpiderMonkeyInterface():
return "JSJitInfo_ArgType::Object as i32"
if t.isUnion():
u = t.unroll()
type = "JSJitInfo::Null as i32" if u.hasNullableType else ""
return reduce(CGMemberJITInfo.getSingleArgType,
u.flatMemberTypes, type)
if t.isDictionary():
return "JSJitInfo_ArgType::Object as i32"
if t.isDate():
return "JSJitInfo_ArgType::Object as i32"
if not t.isPrimitive():
raise TypeError("No idea what type " + str(t) + " is.")
tag = t.tag()
if tag == IDLType.Tags.bool:
return "JSJitInfo_ArgType::Boolean as i32"
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32]:
return "JSJitInfo_ArgType::Integer as i32"
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
# These all use JS_NumberValue, which can return int or double.
# But TI treats "double" as meaning "int or double", so we're
# good to return JSVAL_TYPE_DOUBLE here.
return "JSJitInfo_ArgType::Double as i32"
if tag != IDLType.Tags.uint32:
raise TypeError("No idea what type " + str(t) + " is.")
# uint32 is sometimes int and sometimes double.
return "JSJitInfo_ArgType::Double as i32"
@staticmethod
def getSingleArgType(existingType, t):
type = CGMemberJITInfo.getJSArgType(t)
if existingType == "":
# First element of the list; just return its type
return type
if type == existingType:
return existingType
return "%s | %s" % (existingType, type)
def getEnumValueName(value):
# Some enum values can be empty strings. Others might have weird
# characters in them. Deal with the former by returning "_empty",
# deal with possible name collisions from that by throwing if the
# enum value is actually "_empty", and throw on any value
# containing non-ASCII chars for now. Replace all chars other than
# [0-9A-Za-z_] with '_'.
if re.match("[^\x20-\x7E]", value):
raise SyntaxError('Enum value "' + value + '" contains non-ASCII characters')
if re.match("^[0-9]", value):
raise SyntaxError('Enum value "' + value + '" starts with a digit')
value = re.sub(r'[^0-9A-Za-z_]', '_', value)
if re.match("^_[A-Z]|__", value):
raise SyntaxError('Enum value "' + value + '" is reserved by the C++ spec')
if value == "_empty":
raise SyntaxError('"_empty" is not an IDL enum value we support yet')
if value == "":
return "_empty"
return MakeNativeName(value)
class CGEnum(CGThing):
def __init__(self, enum):
CGThing.__init__(self)
ident = enum.identifier.name
decl = """\
#[repr(usize)]
#[derive(JSTraceable, PartialEq, Copy, Clone, HeapSizeOf, Debug)]
pub enum %s {
%s
}
""" % (ident, ",\n ".join(map(getEnumValueName, enum.values())))
pairs = ",\n ".join(['("%s", super::%s::%s)' % (val, ident, getEnumValueName(val)) for val in enum.values()])
inner = """\
use dom::bindings::conversions::ToJSValConvertible;
use js::jsapi::{JSContext, MutableHandleValue};
use js::jsval::JSVal;
pub const pairs: &'static [(&'static str, super::%s)] = &[
%s,
];
impl super::%s {
pub fn as_str(&self) -> &'static str {
pairs[*self as usize].0
}
}
impl ToJSValConvertible for super::%s {
unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {
pairs[*self as usize].0.to_jsval(cx, rval);
}
}
""" % (ident, pairs, ident, ident)
self.cgRoot = CGList([
CGGeneric(decl),
CGNamespace.build([ident + "Values"],
CGIndenter(CGGeneric(inner)), public=True),
])
def define(self):
return self.cgRoot.define()
def convertConstIDLValueToRust(value):
tag = value.type.tag()
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32, IDLType.Tags.uint32,
IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
return str(value.value)
if tag == IDLType.Tags.bool:
return toStringBool(value.value)
raise TypeError("Const value of unhandled type: " + value.type)
class CGConstant(CGThing):
def __init__(self, constant):
CGThing.__init__(self)
self.constant = constant
def define(self):
name = self.constant.identifier.name
value = convertConstIDLValueToRust(self.constant.value)
return "pub const %s: %s = %s;\n" % (name, builtinNames[self.constant.value.type.tag()], value)
def getUnionTypeTemplateVars(type, descriptorProvider):
if type.isGeckoInterface():
name = type.inner.identifier.name
typeName = descriptorProvider.getDescriptor(name).returnType
elif type.isEnum():
name = type.inner.identifier.name
typeName = name
elif type.isDictionary():
name = type.name
typeName = name
elif type.isSequence() or type.isMozMap():
name = type.name
inner = getUnionTypeTemplateVars(innerContainerType(type), descriptorProvider)
typeName = wrapInNativeContainerType(type, CGGeneric(inner["typeName"])).define()
elif type.isByteString():
name = type.name
typeName = "ByteString"
elif type.isDOMString():
name = type.name
typeName = "DOMString"
elif type.isUSVString():
name = type.name
typeName = "USVString"
elif type.isPrimitive():
name = type.name
typeName = builtinNames[type.tag()]
elif type.isObject():
name = type.name
typeName = "Heap<*mut JSObject>"
else:
raise TypeError("Can't handle %s in unions yet" % type)
info = getJSToNativeConversionInfo(
type, descriptorProvider, failureCode="return Ok(None);",
exceptionCode='return Err(());',
isDefinitelyObject=True,
isMember="Union")
template = info.template
jsConversion = string.Template(template).substitute({
"val": "value",
})
jsConversion = CGWrapper(CGGeneric(jsConversion), pre="Ok(Some(", post="))")
return {
"name": name,
"typeName": typeName,
"jsConversion": jsConversion,
}
class CGUnionStruct(CGThing):
def __init__(self, type, descriptorProvider):
assert not type.nullable()
assert not type.hasNullableType
CGThing.__init__(self)
self.type = type
self.descriptorProvider = descriptorProvider
def define(self):
templateVars = map(lambda t: getUnionTypeTemplateVars(t, self.descriptorProvider),
self.type.flatMemberTypes)
enumValues = [
" %s(%s)," % (v["name"], v["typeName"]) for v in templateVars
]
enumConversions = [
" %s::%s(ref inner) => inner.to_jsval(cx, rval),"
% (self.type, v["name"]) for v in templateVars
]
return ("""\
#[derive(JSTraceable)]
pub enum %s {
%s
}
impl ToJSValConvertible for %s {
unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {
match *self {
%s
}
}
}
""") % (self.type, "\n".join(enumValues), self.type, "\n".join(enumConversions))
class CGUnionConversionStruct(CGThing):
def __init__(self, type, descriptorProvider):
assert not type.nullable()
assert not type.hasNullableType
CGThing.__init__(self)
self.type = type
self.descriptorProvider = descriptorProvider
def from_jsval(self):
memberTypes = self.type.flatMemberTypes
names = []
conversions = []
def get_name(memberType):
if self.type.isGeckoInterface():
return memberType.inner.identifier.name
return memberType.name
def get_match(name):
return (
"match %s::TryConvertTo%s(cx, value) {\n"
" Err(_) => return Err(()),\n"
" Ok(Some(value)) => return Ok(ConversionResult::Success(%s::%s(value))),\n"
" Ok(None) => (),\n"
"}\n") % (self.type, name, self.type, name)
interfaceMemberTypes = filter(lambda t: t.isNonCallbackInterface(), memberTypes)
if len(interfaceMemberTypes) > 0:
typeNames = [get_name(memberType) for memberType in interfaceMemberTypes]
interfaceObject = CGList(CGGeneric(get_match(typeName)) for typeName in typeNames)
names.extend(typeNames)
else:
interfaceObject = None
arrayObjectMemberTypes = filter(lambda t: t.isSequence(), memberTypes)
if len(arrayObjectMemberTypes) > 0:
assert len(arrayObjectMemberTypes) == 1
typeName = arrayObjectMemberTypes[0].name
arrayObject = CGGeneric(get_match(typeName))
names.append(typeName)
else:
arrayObject = None
dateObjectMemberTypes = filter(lambda t: t.isDate(), memberTypes)
if len(dateObjectMemberTypes) > 0:
assert len(dateObjectMemberTypes) == 1
raise TypeError("Can't handle dates in unions.")
else:
dateObject = None
callbackMemberTypes = filter(lambda t: t.isCallback() or t.isCallbackInterface(), memberTypes)
if len(callbackMemberTypes) > 0:
assert len(callbackMemberTypes) == 1
raise TypeError("Can't handle callbacks in unions.")
else:
callbackObject = None
dictionaryMemberTypes = filter(lambda t: t.isDictionary(), memberTypes)
if len(dictionaryMemberTypes) > 0:
assert len(dictionaryMemberTypes) == 1
typeName = dictionaryMemberTypes[0].name
dictionaryObject = CGGeneric(get_match(typeName))
names.append(typeName)
else:
dictionaryObject = None
objectMemberTypes = filter(lambda t: t.isObject(), memberTypes)
if len(objectMemberTypes) > 0:
assert len(objectMemberTypes) == 1
typeName = objectMemberTypes[0].name
object = CGGeneric(get_match(typeName))
names.append(typeName)
else:
object = None
mozMapMemberTypes = filter(lambda t: t.isMozMap(), memberTypes)
if len(mozMapMemberTypes) > 0:
assert len(mozMapMemberTypes) == 1
typeName = mozMapMemberTypes[0].name
mozMapObject = CGGeneric(get_match(typeName))
names.append(typeName)
else:
mozMapObject = None
hasObjectTypes = interfaceObject or arrayObject or dateObject or object or mozMapObject
if hasObjectTypes:
# "object" is not distinguishable from other types
assert not object or not (interfaceObject or arrayObject or dateObject or callbackObject or mozMapObject)
templateBody = CGList([], "\n")
if interfaceObject:
templateBody.append(interfaceObject)
if arrayObject:
templateBody.append(arrayObject)
if mozMapObject:
templateBody.append(mozMapObject)
conversions.append(CGIfWrapper("value.get().is_object()", templateBody))
if dictionaryObject:
assert not hasObjectTypes
conversions.append(dictionaryObject)
stringTypes = [t for t in memberTypes if t.isString() or t.isEnum()]
numericTypes = [t for t in memberTypes if t.isNumeric()]
booleanTypes = [t for t in memberTypes if t.isBoolean()]
if stringTypes or numericTypes or booleanTypes:
assert len(stringTypes) <= 1
assert len(numericTypes) <= 1
assert len(booleanTypes) <= 1
def getStringOrPrimitiveConversion(memberType):
typename = get_name(memberType)
return CGGeneric(get_match(typename))
other = []
stringConversion = map(getStringOrPrimitiveConversion, stringTypes)
numericConversion = map(getStringOrPrimitiveConversion, numericTypes)
booleanConversion = map(getStringOrPrimitiveConversion, booleanTypes)
if stringConversion:
if booleanConversion:
other.append(CGIfWrapper("value.get().is_boolean()", booleanConversion[0]))
if numericConversion:
other.append(CGIfWrapper("value.get().is_number()", numericConversion[0]))
other.append(stringConversion[0])
elif numericConversion:
if booleanConversion:
other.append(CGIfWrapper("value.get().is_boolean()", booleanConversion[0]))
other.append(numericConversion[0])
else:
assert booleanConversion
other.append(booleanConversion[0])
conversions.append(CGList(other, "\n\n"))
conversions.append(CGGeneric(
"throw_not_in_union(cx, \"%s\");\n"
"Err(())" % ", ".join(names)))
method = CGWrapper(
CGIndenter(CGList(conversions, "\n\n")),
pre="unsafe fn from_jsval(cx: *mut JSContext,\n"
" value: HandleValue,\n"
" _option: ())\n"
" -> Result<ConversionResult<%s>, ()> {\n" % self.type,
post="\n}")
return CGWrapper(
CGIndenter(CGList([
CGGeneric("type Config = ();"),
method,
], "\n")),
pre="impl FromJSValConvertible for %s {\n" % self.type,
post="\n}")
def try_method(self, t):
templateVars = getUnionTypeTemplateVars(t, self.descriptorProvider)
returnType = "Result<Option<%s>, ()>" % templateVars["typeName"]
jsConversion = templateVars["jsConversion"]
return CGWrapper(
CGIndenter(jsConversion, 4),
# TryConvertToObject is unused, but not generating it while generating others is tricky.
pre="#[allow(dead_code)] unsafe fn TryConvertTo%s(cx: *mut JSContext, value: HandleValue) -> %s {\n"
% (t.name, returnType),
post="\n}")
def define(self):
from_jsval = self.from_jsval()
methods = CGIndenter(CGList([
self.try_method(t) for t in self.type.flatMemberTypes
], "\n\n"))
return """
%s
impl %s {
%s
}
""" % (from_jsval.define(), self.type, methods.define())
class ClassItem:
""" Use with CGClass """
def __init__(self, name, visibility):
self.name = name
self.visibility = visibility
def declare(self, cgClass):
assert False
def define(self, cgClass):
assert False
class ClassBase(ClassItem):
def __init__(self, name, visibility='pub'):
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return '%s %s' % (self.visibility, self.name)
def define(self, cgClass):
# Only in the header
return ''
class ClassMethod(ClassItem):
def __init__(self, name, returnType, args, inline=False, static=False,
virtual=False, const=False, bodyInHeader=False,
templateArgs=None, visibility='public', body=None,
breakAfterReturnDecl="\n",
breakAfterSelf="\n", override=False):
"""
override indicates whether to flag the method as MOZ_OVERRIDE
"""
assert not override or virtual
assert not (override and static)
self.returnType = returnType
self.args = args
self.inline = False
self.static = static
self.virtual = virtual
self.const = const
self.bodyInHeader = True
self.templateArgs = templateArgs
self.body = body
self.breakAfterReturnDecl = breakAfterReturnDecl
self.breakAfterSelf = breakAfterSelf
self.override = override
ClassItem.__init__(self, name, visibility)
def getDecorators(self, declaring):
decorators = []
if self.inline:
decorators.append('inline')
if declaring:
if self.static:
decorators.append('static')
if self.virtual:
decorators.append('virtual')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getBody(self):
# Override me or pass a string to constructor
assert self.body is not None
return self.body
def declare(self, cgClass):
templateClause = '<%s>' % ', '.join(self.templateArgs) \
if self.bodyInHeader and self.templateArgs else ''
args = ', '.join([a.declare() for a in self.args])
if self.bodyInHeader:
body = CGIndenter(CGGeneric(self.getBody())).define()
body = ' {\n' + body + '\n}'
else:
body = ';'
return string.Template(
"${decorators}%s"
"${visibility}fn ${name}${templateClause}(${args})${returnType}${const}${override}${body}%s" %
(self.breakAfterReturnDecl, self.breakAfterSelf)
).substitute({
'templateClause': templateClause,
'decorators': self.getDecorators(True),
'returnType': (" -> %s" % self.returnType) if self.returnType else "",
'name': self.name,
'const': ' const' if self.const else '',
'override': ' MOZ_OVERRIDE' if self.override else '',
'args': args,
'body': body,
'visibility': self.visibility + ' ' if self.visibility != 'priv' else ''
})
def define(self, cgClass):
pass
class ClassConstructor(ClassItem):
"""
Used for adding a constructor to a CGClass.
args is a list of Argument objects that are the arguments taken by the
constructor.
inline should be True if the constructor should be marked inline.
bodyInHeader should be True if the body should be placed in the class
declaration in the header.
visibility determines the visibility of the constructor (public,
protected, private), defaults to private.
explicit should be True if the constructor should be marked explicit.
baseConstructors is a list of strings containing calls to base constructors,
defaults to None.
body contains a string with the code for the constructor, defaults to empty.
"""
def __init__(self, args, inline=False, bodyInHeader=False,
visibility="priv", explicit=False, baseConstructors=None,
body=""):
self.args = args
self.inline = False
self.bodyInHeader = bodyInHeader
self.explicit = explicit
self.baseConstructors = baseConstructors or []
self.body = body
ClassItem.__init__(self, None, visibility)
def getDecorators(self, declaring):
decorators = []
if self.explicit:
decorators.append('explicit')
if self.inline and declaring:
decorators.append('inline')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getInitializationList(self, cgClass):
items = [str(c) for c in self.baseConstructors]
for m in cgClass.members:
if not m.static:
initialize = m.body
if initialize:
items.append(m.name + "(" + initialize + ")")
if len(items) > 0:
return '\n : ' + ',\n '.join(items)
return ''
def getBody(self, cgClass):
initializers = [" parent: %s" % str(self.baseConstructors[0])]
return (self.body + (
"let mut ret = Rc::new(%s {\n"
"%s\n"
"});\n"
"// Note: callback cannot be moved after calling init.\n"
"match Rc::get_mut(&mut ret) {\n"
" Some(ref mut callback) => unsafe { callback.parent.init(%s, %s) },\n"
" None => unreachable!(),\n"
"};\n"
"ret") % (cgClass.name, '\n'.join(initializers),
self.args[0].name, self.args[1].name))
def declare(self, cgClass):
args = ', '.join([a.declare() for a in self.args])
body = ' ' + self.getBody(cgClass)
body = stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
body = ' {\n' + body + '}'
return string.Template("""\
pub fn ${decorators}new(${args}) -> Rc<${className}>${body}
""").substitute({'decorators': self.getDecorators(True),
'className': cgClass.getNameString(),
'args': args,
'body': body})
def define(self, cgClass):
if self.bodyInHeader:
return ''
args = ', '.join([a.define() for a in self.args])
body = ' ' + self.getBody()
body = '\n' + stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
return string.Template("""\
${decorators}
${className}::${className}(${args})${initializationList}
{${body}}
""").substitute({'decorators': self.getDecorators(False),
'className': cgClass.getNameString(),
'args': args,
'initializationList': self.getInitializationList(cgClass),
'body': body})
class ClassMember(ClassItem):
def __init__(self, name, type, visibility="priv", static=False,
body=None):
self.type = type
self.static = static
self.body = body
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return '%s %s: %s,\n' % (self.visibility, self.name, self.type)
def define(self, cgClass):
if not self.static:
return ''
if self.body:
body = " = " + self.body
else:
body = ""
return '%s %s::%s%s;\n' % (self.type, cgClass.getNameString(),
self.name, body)
class CGClass(CGThing):
def __init__(self, name, bases=[], members=[], constructors=[],
destructor=None, methods=[],
typedefs=[], enums=[], unions=[], templateArgs=[],
templateSpecialization=[],
disallowCopyConstruction=False, indent='',
decorators='',
extradeclarations=''):
CGThing.__init__(self)
self.name = name
self.bases = bases
self.members = members
self.constructors = constructors
# We store our single destructor in a list, since all of our
# code wants lists of members.
self.destructors = [destructor] if destructor else []
self.methods = methods
self.typedefs = typedefs
self.enums = enums
self.unions = unions
self.templateArgs = templateArgs
self.templateSpecialization = templateSpecialization
self.disallowCopyConstruction = disallowCopyConstruction
self.indent = indent
self.decorators = decorators
self.extradeclarations = extradeclarations
def getNameString(self):
className = self.name
if self.templateSpecialization:
className = className + \
'<%s>' % ', '.join([str(a) for a
in self.templateSpecialization])
return className
def define(self):
result = ''
if self.templateArgs:
templateArgs = [a.declare() for a in self.templateArgs]
templateArgs = templateArgs[len(self.templateSpecialization):]
result = result + self.indent + 'template <%s>\n' % ','.join([str(a) for a in templateArgs])
if self.templateSpecialization:
specialization = \
'<%s>' % ', '.join([str(a) for a in self.templateSpecialization])
else:
specialization = ''
myself = ''
if self.decorators != '':
myself += self.decorators + '\n'
myself += '%spub struct %s%s' % (self.indent, self.name, specialization)
result += myself
assert len(self.bases) == 1 # XXjdm Can we support multiple inheritance?
result += ' {\n'
if self.bases:
self.members = [ClassMember("parent", self.bases[0].name, "pub")] + self.members
result += CGIndenter(CGGeneric(self.extradeclarations),
len(self.indent)).define()
def declareMembers(cgClass, memberList):
result = ''
for member in memberList:
declaration = member.declare(cgClass)
declaration = CGIndenter(CGGeneric(declaration)).define()
result = result + declaration
return result
if self.disallowCopyConstruction:
class DisallowedCopyConstructor(object):
def __init__(self):
self.visibility = "private"
def declare(self, cgClass):
name = cgClass.getNameString()
return ("%s(const %s&) MOZ_DELETE;\n"
"void operator=(const %s) MOZ_DELETE;\n" % (name, name, name))
disallowedCopyConstructors = [DisallowedCopyConstructor()]
else:
disallowedCopyConstructors = []
order = [(self.enums, ''), (self.unions, ''),
(self.typedefs, ''), (self.members, '')]
for (memberList, separator) in order:
memberString = declareMembers(self, memberList)
if self.indent:
memberString = CGIndenter(CGGeneric(memberString),
len(self.indent)).define()
result = result + memberString
result += self.indent + '}\n\n'
result += 'impl %s {\n' % self.name
order = [(self.constructors + disallowedCopyConstructors, '\n'),
(self.destructors, '\n'), (self.methods, '\n)')]
for (memberList, separator) in order:
memberString = declareMembers(self, memberList)
if self.indent:
memberString = CGIndenter(CGGeneric(memberString),
len(self.indent)).define()
result = result + memberString
result += "}"
return result
class CGProxySpecialOperation(CGPerSignatureCall):
"""
Base class for classes for calling an indexed or named special operation
(don't use this directly, use the derived classes below).
"""
def __init__(self, descriptor, operation):
nativeName = MakeNativeName(descriptor.binaryNameFor(operation))
operation = descriptor.operations[operation]
assert len(operation.signatures()) == 1
signature = operation.signatures()[0]
(returnType, arguments) = signature
if operation.isGetter() and not returnType.nullable():
returnType = IDLNullableType(returnType.location, returnType)
# We pass len(arguments) as the final argument so that the
# CGPerSignatureCall won't do any argument conversion of its own.
CGPerSignatureCall.__init__(self, returnType, "", arguments, nativeName,
False, descriptor, operation,
len(arguments))
if operation.isSetter() or operation.isCreator():
# arguments[0] is the index or name of the item that we're setting.
argument = arguments[1]
info = getJSToNativeConversionInfo(
argument.type, descriptor, treatNullAs=argument.treatNullAs,
exceptionCode="return false;")
template = info.template
declType = info.declType
templateValues = {
"val": "value.handle()",
}
self.cgRoot.prepend(instantiateJSToNativeConversionTemplate(
template, templateValues, declType, argument.identifier.name))
self.cgRoot.prepend(CGGeneric("rooted!(in(cx) let value = desc.value);"))
def getArguments(self):
args = [(a, process_arg(a.identifier.name, a)) for a in self.arguments]
return args
def wrap_return_value(self):
if not self.idlNode.isGetter() or self.templateValues is None:
return ""
wrap = CGGeneric(wrapForType(**self.templateValues))
wrap = CGIfWrapper("let Some(result) = result", wrap)
return "\n" + wrap.define()
class CGProxyIndexedGetter(CGProxySpecialOperation):
"""
Class to generate a call to an indexed getter. If templateValues is not None
the returned value will be wrapped with wrapForType using templateValues.
"""
def __init__(self, descriptor, templateValues=None):
self.templateValues = templateValues
CGProxySpecialOperation.__init__(self, descriptor, 'IndexedGetter')
class CGProxyIndexedSetter(CGProxySpecialOperation):
"""
Class to generate a call to an indexed setter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'IndexedSetter')
class CGProxyNamedOperation(CGProxySpecialOperation):
"""
Class to generate a call to a named operation.
"""
def __init__(self, descriptor, name):
CGProxySpecialOperation.__init__(self, descriptor, name)
def define(self):
# Our first argument is the id we're getting.
argName = self.arguments[0].identifier.name
return ("let %s = string_jsid_to_string(cx, id);\n"
"let this = UnwrapProxy(proxy);\n"
"let this = &*this;\n" % argName +
CGProxySpecialOperation.define(self))
class CGProxyNamedGetter(CGProxyNamedOperation):
"""
Class to generate a call to an named getter. If templateValues is not None
the returned value will be wrapped with wrapForType using templateValues.
"""
def __init__(self, descriptor, templateValues=None):
self.templateValues = templateValues
CGProxySpecialOperation.__init__(self, descriptor, 'NamedGetter')
class CGProxyNamedPresenceChecker(CGProxyNamedGetter):
"""
Class to generate a call that checks whether a named property exists.
For now, we just delegate to CGProxyNamedGetter
"""
def __init__(self, descriptor):
CGProxyNamedGetter.__init__(self, descriptor)
class CGProxyNamedSetter(CGProxyNamedOperation):
"""
Class to generate a call to a named setter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'NamedSetter')
class CGProxyNamedDeleter(CGProxyNamedOperation):
"""
Class to generate a call to a named deleter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'NamedDeleter')
class CGProxyUnwrap(CGAbstractMethod):
def __init__(self, descriptor):
args = [Argument('HandleObject', 'obj')]
CGAbstractMethod.__init__(self, descriptor, "UnwrapProxy",
'*const ' + descriptor.concreteType, args,
alwaysInline=True, unsafe=True)
def definition_body(self):
return CGGeneric("""\
/*if (xpc::WrapperFactory::IsXrayWrapper(obj)) {
obj = js::UnwrapObject(obj);
}*/
//MOZ_ASSERT(IsProxy(obj));
let box_ = GetProxyPrivate(obj.get()).to_private() as *const %s;
return box_;""" % self.descriptor.concreteType)
class CGDOMJSProxyHandler_getOwnPropertyDescriptor(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'),
Argument('MutableHandle<PropertyDescriptor>', 'desc')]
CGAbstractExternMethod.__init__(self, descriptor, "getOwnPropertyDescriptor",
"bool", args)
self.descriptor = descriptor
def getBody(self):
indexedGetter = self.descriptor.operations['IndexedGetter']
indexedSetter = self.descriptor.operations['IndexedSetter']
get = ""
if indexedGetter or indexedSetter:
get = "let index = get_array_index_from_id(cx, id);\n"
if indexedGetter:
attrs = "JSPROP_ENUMERATE"
if self.descriptor.operations['IndexedSetter'] is None:
attrs += " | JSPROP_READONLY"
# FIXME(#11868) Should assign to desc.value, desc.get() is a copy.
fillDescriptor = ("desc.get().value = result_root.get();\n"
"fill_property_descriptor(desc, proxy.get(), %s);\n"
"return true;" % attrs)
templateValues = {
'jsvalRef': 'result_root.handle_mut()',
'successCode': fillDescriptor,
'pre': 'rooted!(in(cx) let mut result_root = UndefinedValue());'
}
get += ("if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define() + "\n" +
"}\n")
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
attrs = []
if not self.descriptor.interface.getExtendedAttribute("LegacyUnenumerableNamedProperties"):
attrs.append("JSPROP_ENUMERATE")
if self.descriptor.operations['NamedSetter'] is None:
attrs.append("JSPROP_READONLY")
if attrs:
attrs = " | ".join(attrs)
else:
attrs = "0"
# FIXME(#11868) Should assign to desc.value, desc.get() is a copy.
fillDescriptor = ("desc.get().value = result_root.get();\n"
"fill_property_descriptor(desc, proxy.get(), %s);\n"
"return true;" % attrs)
templateValues = {
'jsvalRef': 'result_root.handle_mut()',
'successCode': fillDescriptor,
'pre': 'rooted!(in(cx) let mut result_root = UndefinedValue());'
}
# Once we start supporting OverrideBuiltins we need to make
# ResolveOwnProperty or EnumerateOwnProperties filter out named
# properties that shadow prototype properties.
namedGet = """
if RUST_JSID_IS_STRING(id) {
let mut has_on_proto = false;
if !has_property_on_prototype(cx, proxy, id, &mut has_on_proto) {
return false;
}
if !has_on_proto {
%s
}
}
""" % CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues), 8).define()
else:
namedGet = ""
# FIXME(#11868) Should assign to desc.obj, desc.get() is a copy.
return get + """\
rooted!(in(cx) let mut expando = ptr::null_mut());
get_expando_object(proxy, expando.handle_mut());
//if (!xpc::WrapperFactory::IsXrayWrapper(proxy) && (expando = GetExpandoObject(proxy))) {
if !expando.is_null() {
if !JS_GetPropertyDescriptorById(cx, expando.handle(), id, desc) {
return false;
}
if !desc.obj.is_null() {
// Pretend the property lives on the wrapper.
desc.get().obj = proxy.get();
return true;
}
}
""" + namedGet + """\
desc.get().obj = ptr::null_mut();
return true;"""
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_defineProperty(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'),
Argument('Handle<PropertyDescriptor>', 'desc'),
Argument('*mut ObjectOpResult', 'opresult')]
CGAbstractExternMethod.__init__(self, descriptor, "defineProperty", "bool", args)
self.descriptor = descriptor
def getBody(self):
set = ""
indexedSetter = self.descriptor.operations['IndexedSetter']
if indexedSetter:
set += ("let index = get_array_index_from_id(cx, id);\n" +
"if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedSetter(self.descriptor)).define() +
" return (*opresult).succeed();\n" +
"}\n")
elif self.descriptor.operations['IndexedGetter']:
set += ("if get_array_index_from_id(cx, id).is_some() {\n" +
" return (*opresult).failNoIndexedSetter();\n" +
"}\n")
namedSetter = self.descriptor.operations['NamedSetter']
if namedSetter:
if self.descriptor.hasUnforgeableMembers:
raise TypeError("Can't handle a named setter on an interface that has "
"unforgeables. Figure out how that should work!")
set += ("if RUST_JSID_IS_STRING(id) {\n" +
CGIndenter(CGProxyNamedSetter(self.descriptor)).define() +
" return (*opresult).succeed();\n" +
"} else {\n" +
" return false;\n" +
"}\n")
else:
set += ("if RUST_JSID_IS_STRING(id) {\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor)).define() +
" if result.is_some() {\n"
" return (*opresult).failNoNamedSetter();\n"
" }\n"
"}\n")
set += "return proxyhandler::define_property(%s);" % ", ".join(a.name for a in self.args)
return set
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_delete(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'),
Argument('*mut ObjectOpResult', 'res')]
CGAbstractExternMethod.__init__(self, descriptor, "delete", "bool", args)
self.descriptor = descriptor
def getBody(self):
set = ""
if self.descriptor.operations['NamedDeleter']:
if self.descriptor.hasUnforgeableMembers:
raise TypeError("Can't handle a deleter on an interface that has "
"unforgeables. Figure out how that should work!")
set += CGProxyNamedDeleter(self.descriptor).define()
set += "return proxyhandler::delete(%s);" % ", ".join(a.name for a in self.args)
return set
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_ownPropertyKeys(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'proxy'),
Argument('*mut AutoIdVector', 'props')]
CGAbstractExternMethod.__init__(self, descriptor, "own_property_keys", "bool", args)
self.descriptor = descriptor
def getBody(self):
body = dedent(
"""
let unwrapped_proxy = UnwrapProxy(proxy);
""")
if self.descriptor.operations['IndexedGetter']:
body += dedent(
"""
for i in 0..(*unwrapped_proxy).Length() {
rooted!(in(cx) let rooted_jsid = int_to_jsid(i as i32));
AppendToAutoIdVector(props, rooted_jsid.handle().get());
}
""")
if self.descriptor.operations['NamedGetter']:
body += dedent(
"""
for name in (*unwrapped_proxy).SupportedPropertyNames() {
let cstring = CString::new(name).unwrap();
let jsstring = JS_AtomizeAndPinString(cx, cstring.as_ptr());
rooted!(in(cx) let rooted = jsstring);
let jsid = INTERNED_STRING_TO_JSID(cx, rooted.handle().get());
rooted!(in(cx) let rooted_jsid = jsid);
AppendToAutoIdVector(props, rooted_jsid.handle().get());
}
""")
body += dedent(
"""
rooted!(in(cx) let mut expando = ptr::null_mut());
get_expando_object(proxy, expando.handle_mut());
if !expando.is_null() {
GetPropertyKeys(cx, expando.handle(), JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, props);
}
return true;
""")
return body
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_getOwnEnumerablePropertyKeys(CGAbstractExternMethod):
def __init__(self, descriptor):
assert (descriptor.operations["IndexedGetter"] and
descriptor.interface.getExtendedAttribute("LegacyUnenumerableNamedProperties"))
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'proxy'),
Argument('*mut AutoIdVector', 'props')]
CGAbstractExternMethod.__init__(self, descriptor,
"getOwnEnumerablePropertyKeys", "bool", args)
self.descriptor = descriptor
def getBody(self):
body = dedent(
"""
let unwrapped_proxy = UnwrapProxy(proxy);
""")
if self.descriptor.operations['IndexedGetter']:
body += dedent(
"""
for i in 0..(*unwrapped_proxy).Length() {
rooted!(in(cx) let rooted_jsid = int_to_jsid(i as i32));
AppendToAutoIdVector(props, rooted_jsid.handle().get());
}
""")
body += dedent(
"""
rooted!(in(cx) let mut expando = ptr::null_mut());
get_expando_object(proxy, expando.handle_mut());
if !expando.is_null() {
GetPropertyKeys(cx, expando.handle(), JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, props);
}
return true;
""")
return body
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_hasOwn(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'), Argument('*mut bool', 'bp')]
CGAbstractExternMethod.__init__(self, descriptor, "hasOwn", "bool", args)
self.descriptor = descriptor
def getBody(self):
indexedGetter = self.descriptor.operations['IndexedGetter']
if indexedGetter:
indexed = ("let index = get_array_index_from_id(cx, id);\n" +
"if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor)).define() + "\n" +
" *bp = result.is_some();\n" +
" return true;\n" +
"}\n\n")
else:
indexed = ""
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
named = """\
if RUST_JSID_IS_STRING(id) {
let mut has_on_proto = false;
if !has_property_on_prototype(cx, proxy, id, &mut has_on_proto) {
return false;
}
if !has_on_proto {
%s
*bp = result.is_some();
return true;
}
}
""" % CGIndenter(CGProxyNamedGetter(self.descriptor), 8).define()
else:
named = ""
return indexed + """\
rooted!(in(cx) let mut expando = ptr::null_mut());
get_expando_object(proxy, expando.handle_mut());
if !expando.is_null() {
let ok = JS_HasPropertyById(cx, expando.handle(), id, bp);
if !ok || *bp {
return ok;
}
}
""" + named + """\
*bp = false;
return true;"""
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_get(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleValue', 'receiver'), Argument('HandleId', 'id'),
Argument('MutableHandleValue', 'vp')]
CGAbstractExternMethod.__init__(self, descriptor, "get", "bool", args)
self.descriptor = descriptor
def getBody(self):
getFromExpando = """\
rooted!(in(cx) let mut expando = ptr::null_mut());
get_expando_object(proxy, expando.handle_mut());
if !expando.is_null() {
let mut hasProp = false;
if !JS_HasPropertyById(cx, expando.handle(), id, &mut hasProp) {
return false;
}
if hasProp {
return JS_ForwardGetPropertyTo(cx, expando.handle(), id, receiver, vp);
}
}"""
templateValues = {
'jsvalRef': 'vp',
'successCode': 'return true;',
}
indexedGetter = self.descriptor.operations['IndexedGetter']
if indexedGetter:
getIndexedOrExpando = ("let index = get_array_index_from_id(cx, id);\n" +
"if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define())
getIndexedOrExpando += """\
// Even if we don't have this index, we don't forward the
// get on to our expando object.
} else {
%s
}
""" % (stripTrailingWhitespace(getFromExpando.replace('\n', '\n ')))
else:
getIndexedOrExpando = getFromExpando + "\n"
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
getNamed = ("if RUST_JSID_IS_STRING(id) {\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues)).define() +
"}\n")
else:
getNamed = ""
return """\
//MOZ_ASSERT(!xpc::WrapperFactory::IsXrayWrapper(proxy),
//"Should not have a XrayWrapper here");
%s
let mut found = false;
if !get_property_on_prototype(cx, proxy, receiver, id, &mut found, vp) {
return false;
}
if found {
return true;
}
%s
vp.set(UndefinedValue());
return true;""" % (getIndexedOrExpando, getNamed)
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_className(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', '_proxy')]
CGAbstractExternMethod.__init__(self, descriptor, "className", "*const i8", args, doesNotPanic=True)
self.descriptor = descriptor
def getBody(self):
return '%s as *const u8 as *const i8' % str_to_const_array(self.descriptor.name)
def definition_body(self):
return CGGeneric(self.getBody())
class CGAbstractClassHook(CGAbstractExternMethod):
"""
Meant for implementing JSClass hooks, like Finalize or Trace. Does very raw
'this' unwrapping as it assumes that the unwrapped type is always known.
"""
def __init__(self, descriptor, name, returnType, args, doesNotPanic=False):
CGAbstractExternMethod.__init__(self, descriptor, name, returnType,
args)
def definition_body_prologue(self):
return CGGeneric("""
let this = native_from_object::<%s>(obj).unwrap();
""" % self.descriptor.concreteType)
def definition_body(self):
return CGList([
self.definition_body_prologue(),
self.generate_code(),
])
def generate_code(self):
raise NotImplementedError # Override me!
def finalizeHook(descriptor, hookName, context):
release = ""
if descriptor.isGlobal():
release += """\
finalize_global(obj);
"""
elif descriptor.weakReferenceable:
release += """\
let weak_box_ptr = JS_GetReservedSlot(obj, DOM_WEAK_SLOT).to_private() as *mut WeakBox<%s>;
if !weak_box_ptr.is_null() {
let count = {
let weak_box = &*weak_box_ptr;
assert!(weak_box.value.get().is_some());
assert!(weak_box.count.get() > 0);
weak_box.value.set(None);
let count = weak_box.count.get() - 1;
weak_box.count.set(count);
count
};
if count == 0 {
mem::drop(Box::from_raw(weak_box_ptr));
}
}
""" % descriptor.concreteType
release += """\
if !this.is_null() {
// The pointer can be null if the object is the unforgeable holder of that interface.
let _ = Box::from_raw(this as *mut %s);
}
debug!("%s finalize: {:p}", this);\
""" % (descriptor.concreteType, descriptor.concreteType)
return release
class CGClassTraceHook(CGAbstractClassHook):
"""
A hook to trace through our native object; used for GC and CC
"""
def __init__(self, descriptor):
args = [Argument('*mut JSTracer', 'trc'), Argument('*mut JSObject', 'obj')]
CGAbstractClassHook.__init__(self, descriptor, TRACE_HOOK_NAME, 'void',
args, doesNotPanic=True)
self.traceGlobal = descriptor.isGlobal()
def generate_code(self):
body = [CGGeneric("if this.is_null() { return; } // GC during obj creation\n"
"(*this).trace(%s);" % self.args[0].name)]
if self.traceGlobal:
body += [CGGeneric("trace_global(trc, obj);")]
return CGList(body, "\n")
class CGClassConstructHook(CGAbstractExternMethod):
"""
JS-visible constructor for our objects
"""
def __init__(self, descriptor, constructor=None):
args = [Argument('*mut JSContext', 'cx'), Argument('u32', 'argc'), Argument('*mut JSVal', 'vp')]
name = CONSTRUCT_HOOK_NAME
if constructor:
name += "_" + constructor.identifier.name
else:
constructor = descriptor.interface.ctor()
assert constructor
CGAbstractExternMethod.__init__(self, descriptor, name, 'bool', args)
self.constructor = constructor
self.exposureSet = descriptor.interface.exposureSet
def definition_body(self):
preamble = """let global = GlobalScope::from_object(JS_CALLEE(cx, vp).to_object());\n"""
if len(self.exposureSet) == 1:
preamble += "let global = Root::downcast::<dom::types::%s>(global).unwrap();\n" % list(self.exposureSet)[0]
preamble += """let args = CallArgs::from_vp(vp, argc);\n"""
preamble = CGGeneric(preamble)
name = self.constructor.identifier.name
nativeName = MakeNativeName(self.descriptor.binaryNameFor(name))
callGenerator = CGMethodCall(["&global"], nativeName, True,
self.descriptor, self.constructor)
return CGList([preamble, callGenerator])
class CGClassFinalizeHook(CGAbstractClassHook):
"""
A hook for finalize, used to release our native object.
"""
def __init__(self, descriptor):
args = [Argument('*mut JSFreeOp', '_fop'), Argument('*mut JSObject', 'obj')]
CGAbstractClassHook.__init__(self, descriptor, FINALIZE_HOOK_NAME,
'void', args)
def generate_code(self):
return CGGeneric(finalizeHook(self.descriptor, self.name, self.args[0].name))
class CGDOMJSProxyHandlerDOMClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
return "static Class: DOMClass = " + DOMClass(self.descriptor) + ";\n"
class CGInterfaceTrait(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
def attribute_arguments(needCx, argument=None):
if needCx:
yield "cx", "*mut JSContext"
if argument:
yield "value", argument_type(descriptor, argument)
def members():
for m in descriptor.interface.members:
if (m.isMethod() and not m.isStatic() and
not m.isMaplikeOrSetlikeOrIterableMethod() and
(not m.isIdentifierLess() or m.isStringifier())):
name = CGSpecializedMethod.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m)
for idx, (rettype, arguments) in enumerate(m.signatures()):
arguments = method_arguments(descriptor, rettype, arguments)
rettype = return_type(descriptor, rettype, infallible)
yield name + ('_' * idx), arguments, rettype
elif m.isAttr() and not m.isStatic():
name = CGSpecializedGetter.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m, getter=True)
yield (name,
attribute_arguments(typeNeedsCx(m.type, True)),
return_type(descriptor, m.type, infallible))
if not m.readonly:
name = CGSpecializedSetter.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m, setter=True)
if infallible:
rettype = "()"
else:
rettype = "ErrorResult"
yield name, attribute_arguments(typeNeedsCx(m.type, False), m.type), rettype
if descriptor.proxy:
for name, operation in descriptor.operations.iteritems():
if not operation or operation.isStringifier():
continue
assert len(operation.signatures()) == 1
rettype, arguments = operation.signatures()[0]
infallible = 'infallible' in descriptor.getExtendedAttributes(operation)
if operation.isGetter():
if not rettype.nullable():
rettype = IDLNullableType(rettype.location, rettype)
arguments = method_arguments(descriptor, rettype, arguments)
# If this interface 'supports named properties', then we
# should be able to access 'supported property names'
#
# WebIDL, Second Draft, section 3.2.4.5
# https://heycam.github.io/webidl/#idl-named-properties
if operation.isNamed():
yield "SupportedPropertyNames", [], "Vec<DOMString>"
else:
arguments = method_arguments(descriptor, rettype, arguments)
rettype = return_type(descriptor, rettype, infallible)
yield name, arguments, rettype
def fmt(arguments):
return "".join(", %s: %s" % argument for argument in arguments)
def contains_unsafe_arg(arguments):
if not arguments or len(arguments) == 0:
return False
return reduce((lambda x, y: x or y[1] == '*mut JSContext'), arguments, False)
methods = []
for name, arguments, rettype in members():
arguments = list(arguments)
methods.append(CGGeneric("%sfn %s(&self%s) -> %s;\n" % (
'unsafe ' if contains_unsafe_arg(arguments) else '',
name, fmt(arguments), rettype))
)
if methods:
self.cgRoot = CGWrapper(CGIndenter(CGList(methods, "")),
pre="pub trait %sMethods {\n" % descriptor.interface.identifier.name,
post="}")
else:
self.cgRoot = CGGeneric("")
self.empty = not methods
def define(self):
return self.cgRoot.define()
class CGWeakReferenceableTrait(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
assert descriptor.weakReferenceable
self.code = "impl WeakReferenceable for %s {}" % descriptor.interface.identifier.name
def define(self):
return self.code
def generate_imports(config, cgthings, descriptors, callbacks=None, dictionaries=None, enums=None):
if not callbacks:
callbacks = []
if not dictionaries:
dictionaries = []
if not enums:
enums = []
return CGImports(cgthings, descriptors, callbacks, dictionaries, enums, [
'core::nonzero::NonZero',
'js',
'js::JSCLASS_GLOBAL_SLOT_COUNT',
'js::JSCLASS_IS_DOMJSCLASS',
'js::JSCLASS_IS_GLOBAL',
'js::JSCLASS_RESERVED_SLOTS_MASK',
'js::JS_CALLEE',
'js::error::throw_type_error',
'js::jsapi::AutoIdVector',
'js::jsapi::Call',
'js::jsapi::CallArgs',
'js::jsapi::CurrentGlobalOrNull',
'js::jsapi::FreeOp',
'js::jsapi::GetPropertyKeys',
'js::jsapi::GetWellKnownSymbol',
'js::jsapi::Handle',
'js::jsapi::HandleId',
'js::jsapi::HandleObject',
'js::jsapi::HandleValue',
'js::jsapi::HandleValueArray',
'js::jsapi::Heap',
'js::jsapi::INTERNED_STRING_TO_JSID',
'js::jsapi::IsCallable',
'js::jsapi::JSAutoCompartment',
'js::jsapi::JSCLASS_RESERVED_SLOTS_SHIFT',
'js::jsapi::JSClass',
'js::jsapi::JSContext',
'js::jsapi::JSFreeOp',
'js::jsapi::JSFunctionSpec',
'js::jsapi::JSITER_HIDDEN',
'js::jsapi::JSITER_OWNONLY',
'js::jsapi::JSITER_SYMBOLS',
'js::jsapi::JSJitGetterCallArgs',
'js::jsapi::JSJitInfo',
'js::jsapi::JSJitInfo_AliasSet',
'js::jsapi::JSJitInfo_ArgType',
'js::jsapi::JSJitInfo_OpType',
'js::jsapi::JSJitMethodCallArgs',
'js::jsapi::JSJitSetterCallArgs',
'js::jsapi::JSNative',
'js::jsapi::JSNativeWrapper',
'js::jsapi::JSObject',
'js::jsapi::JSPROP_ENUMERATE',
'js::jsapi::JSPROP_PERMANENT',
'js::jsapi::JSPROP_READONLY',
'js::jsapi::JSPROP_SHARED',
'js::jsapi::JSPropertySpec',
'js::jsapi::JSString',
'js::jsapi::JSTracer',
'js::jsapi::JSType',
'js::jsapi::JSTypedMethodJitInfo',
'js::jsapi::JSValueType',
'js::jsapi::JS_AtomizeAndPinString',
'js::jsapi::JS_CallFunctionValue',
'js::jsapi::JS_CopyPropertiesFrom',
'js::jsapi::JS_DefineProperty',
'js::jsapi::JS_DefinePropertyById2',
'js::jsapi::JS_ForwardGetPropertyTo',
'js::jsapi::JS_GetErrorPrototype',
'js::jsapi::JS_GetFunctionPrototype',
'js::jsapi::JS_GetGlobalForObject',
'js::jsapi::JS_GetIteratorPrototype',
'js::jsapi::JS_GetObjectPrototype',
'js::jsapi::JS_GetProperty',
'js::jsapi::JS_GetPropertyById',
'js::jsapi::JS_GetPropertyDescriptorById',
'js::jsapi::JS_GetReservedSlot',
'js::jsapi::JS_HasProperty',
'js::jsapi::JS_HasPropertyById',
'js::jsapi::JS_InitializePropertiesFromCompatibleNativeObject',
'js::jsapi::JS_NewObject',
'js::jsapi::JS_NewObjectWithGivenProto',
'js::jsapi::JS_NewObjectWithoutMetadata',
'js::jsapi::JS_ObjectIsDate',
'js::jsapi::JS_SetImmutablePrototype',
'js::jsapi::JS_SetProperty',
'js::jsapi::JS_SetReservedSlot',
'js::jsapi::JS_SplicePrototype',
'js::jsapi::JS_WrapValue',
'js::jsapi::MutableHandle',
'js::jsapi::MutableHandleObject',
'js::jsapi::MutableHandleValue',
'js::jsapi::ObjectOpResult',
'js::jsapi::PropertyDescriptor',
'js::jsapi::RootedId',
'js::jsapi::RootedObject',
'js::jsapi::RootedString',
'js::jsapi::SymbolCode',
'js::jsapi::jsid',
'js::jsval::JSVal',
'js::jsval::NullValue',
'js::jsval::ObjectValue',
'js::jsval::ObjectOrNullValue',
'js::jsval::PrivateValue',
'js::jsval::UndefinedValue',
'js::glue::AppendToAutoIdVector',
'js::glue::CallJitGetterOp',
'js::glue::CallJitMethodOp',
'js::glue::CallJitSetterOp',
'js::glue::CreateProxyHandler',
'js::glue::GetProxyPrivate',
'js::glue::NewProxyObject',
'js::glue::ProxyTraps',
'js::glue::RUST_JSID_IS_STRING',
'js::glue::RUST_SYMBOL_TO_JSID',
'js::glue::int_to_jsid',
'js::panic::maybe_resume_unwind',
'js::panic::wrap_panic',
'js::rust::GCMethods',
'js::rust::define_methods',
'js::rust::define_properties',
'js::rust::get_object_class',
'dom',
'dom::bindings',
'dom::bindings::codegen::InterfaceObjectMap',
'dom::bindings::constant::ConstantSpec',
'dom::bindings::constant::ConstantVal',
'dom::bindings::interface::ConstructorClassHook',
'dom::bindings::interface::InterfaceConstructorBehavior',
'dom::bindings::interface::NonCallbackInterfaceObjectClass',
'dom::bindings::interface::create_callback_interface_object',
'dom::bindings::interface::create_global_object',
'dom::bindings::interface::create_interface_prototype_object',
'dom::bindings::interface::create_named_constructors',
'dom::bindings::interface::create_noncallback_interface_object',
'dom::bindings::interface::define_guarded_constants',
'dom::bindings::interface::define_guarded_methods',
'dom::bindings::interface::define_guarded_properties',
'dom::bindings::interface::is_exposed_in',
'dom::bindings::iterable::Iterable',
'dom::bindings::iterable::IteratorType',
'dom::bindings::js::JS',
'dom::bindings::js::Root',
'dom::bindings::js::RootedReference',
'dom::bindings::namespace::NamespaceObjectClass',
'dom::bindings::namespace::create_namespace_object',
'dom::bindings::reflector::MutDomObject',
'dom::bindings::reflector::DomObject',
'dom::bindings::utils::AsVoidPtr',
'dom::bindings::utils::DOMClass',
'dom::bindings::utils::DOMJSClass',
'dom::bindings::utils::DOM_PROTO_UNFORGEABLE_HOLDER_SLOT',
'dom::bindings::utils::JSCLASS_DOM_GLOBAL',
'dom::bindings::utils::ProtoOrIfaceArray',
'dom::bindings::utils::enumerate_global',
'dom::bindings::utils::finalize_global',
'dom::bindings::utils::find_enum_value',
'dom::bindings::utils::generic_getter',
'dom::bindings::utils::generic_lenient_getter',
'dom::bindings::utils::generic_lenient_setter',
'dom::bindings::utils::generic_method',
'dom::bindings::utils::generic_setter',
'dom::bindings::utils::get_array_index_from_id',
'dom::bindings::utils::get_dictionary_property',
'dom::bindings::utils::get_property_on_prototype',
'dom::bindings::utils::get_proto_or_iface_array',
'dom::bindings::utils::has_property_on_prototype',
'dom::bindings::utils::is_platform_object',
'dom::bindings::utils::resolve_global',
'dom::bindings::utils::set_dictionary_property',
'dom::bindings::utils::trace_global',
'dom::bindings::trace::JSTraceable',
'dom::bindings::trace::RootedTraceable',
'dom::bindings::trace::RootedTraceableBox',
'dom::bindings::callback::CallSetup',
'dom::bindings::callback::CallbackContainer',
'dom::bindings::callback::CallbackInterface',
'dom::bindings::callback::CallbackFunction',
'dom::bindings::callback::CallbackObject',
'dom::bindings::callback::ExceptionHandling',
'dom::bindings::callback::wrap_call_this_object',
'dom::bindings::conversions::ConversionBehavior',
'dom::bindings::conversions::ConversionResult',
'dom::bindings::conversions::DOM_OBJECT_SLOT',
'dom::bindings::conversions::FromJSValConvertible',
'dom::bindings::conversions::IDLInterface',
'dom::bindings::conversions::StringificationBehavior',
'dom::bindings::conversions::ToJSValConvertible',
'dom::bindings::conversions::is_array_like',
'dom::bindings::conversions::native_from_handlevalue',
'dom::bindings::conversions::native_from_object',
'dom::bindings::conversions::private_from_object',
'dom::bindings::conversions::root_from_handleobject',
'dom::bindings::conversions::root_from_handlevalue',
'dom::bindings::conversions::root_from_object',
'dom::bindings::conversions::string_jsid_to_string',
'dom::bindings::codegen::PrototypeList',
'dom::bindings::codegen::RegisterBindings',
'dom::bindings::codegen::UnionTypes',
'dom::bindings::error::Error',
'dom::bindings::error::ErrorResult',
'dom::bindings::error::Fallible',
'dom::bindings::error::Error::JSFailed',
'dom::bindings::error::throw_dom_exception',
'dom::bindings::guard::Condition',
'dom::bindings::guard::Guard',
'dom::bindings::inheritance::Castable',
'dom::bindings::proxyhandler',
'dom::bindings::proxyhandler::ensure_expando_object',
'dom::bindings::proxyhandler::fill_property_descriptor',
'dom::bindings::proxyhandler::get_expando_object',
'dom::bindings::proxyhandler::get_property_descriptor',
'dom::bindings::mozmap::MozMap',
'dom::bindings::num::Finite',
'dom::bindings::str::ByteString',
'dom::bindings::str::DOMString',
'dom::bindings::str::USVString',
'dom::bindings::weakref::DOM_WEAK_SLOT',
'dom::bindings::weakref::WeakBox',
'dom::bindings::weakref::WeakReferenceable',
'dom::browsingcontext::BrowsingContext',
'dom::globalscope::GlobalScope',
'mem::heap_size_of_raw_self_and_children',
'libc',
'servo_config::prefs::PREFS',
'std::borrow::ToOwned',
'std::cmp',
'std::mem',
'std::num',
'std::os',
'std::panic',
'std::ptr',
'std::str',
'std::rc',
'std::rc::Rc',
'std::default::Default',
'std::ffi::CString',
], config)
class CGDescriptor(CGThing):
def __init__(self, descriptor, config, soleDescriptor):
CGThing.__init__(self)
assert not descriptor.concrete or not descriptor.interface.isCallback()
reexports = []
def reexportedName(name):
if name.startswith(descriptor.name):
return name
if not soleDescriptor:
return '%s as %s%s' % (name, descriptor.name, name)
return name
cgThings = []
unscopableNames = []
for m in descriptor.interface.members:
if (m.isMethod() and
(not m.isIdentifierLess() or m == descriptor.operations["Stringifier"])):
if m.getExtendedAttribute("Unscopable"):
assert not m.isStatic()
unscopableNames.append(m.identifier.name)
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticMethod(descriptor, m))
elif not descriptor.interface.isCallback():
cgThings.append(CGSpecializedMethod(descriptor, m))
cgThings.append(CGMemberJITInfo(descriptor, m))
elif m.isAttr():
if m.stringifier:
raise TypeError("Stringifier attributes not supported yet. "
"See https://github.com/servo/servo/issues/7590\n"
"%s" % m.location)
if m.getExtendedAttribute("Unscopable"):
assert not m.isStatic()
unscopableNames.append(m.identifier.name)
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticGetter(descriptor, m))
elif not descriptor.interface.isCallback():
cgThings.append(CGSpecializedGetter(descriptor, m))
if not m.readonly:
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticSetter(descriptor, m))
elif not descriptor.interface.isCallback():
cgThings.append(CGSpecializedSetter(descriptor, m))
elif m.getExtendedAttribute("PutForwards"):
cgThings.append(CGSpecializedForwardingSetter(descriptor, m))
elif m.getExtendedAttribute("Replaceable"):
cgThings.append(CGSpecializedReplaceableSetter(descriptor, m))
if (not m.isStatic() and not descriptor.interface.isCallback()):
cgThings.append(CGMemberJITInfo(descriptor, m))
if descriptor.concrete:
cgThings.append(CGClassFinalizeHook(descriptor))
cgThings.append(CGClassTraceHook(descriptor))
# If there are no constant members, don't make a module for constants
constMembers = [CGConstant(m) for m in descriptor.interface.members if m.isConst()]
if constMembers:
cgThings.append(CGNamespace.build([descriptor.name + "Constants"],
CGIndenter(CGList(constMembers)),
public=True))
reexports.append(descriptor.name + 'Constants')
if descriptor.proxy:
cgThings.append(CGDefineProxyHandler(descriptor))
properties = PropertyArrays(descriptor)
if descriptor.concrete:
if descriptor.proxy:
# cgThings.append(CGProxyIsProxy(descriptor))
cgThings.append(CGProxyUnwrap(descriptor))
cgThings.append(CGDOMJSProxyHandlerDOMClass(descriptor))
cgThings.append(CGDOMJSProxyHandler_ownPropertyKeys(descriptor))
if descriptor.interface.getExtendedAttribute("LegacyUnenumerableNamedProperties"):
cgThings.append(CGDOMJSProxyHandler_getOwnEnumerablePropertyKeys(descriptor))
cgThings.append(CGDOMJSProxyHandler_getOwnPropertyDescriptor(descriptor))
cgThings.append(CGDOMJSProxyHandler_className(descriptor))
cgThings.append(CGDOMJSProxyHandler_get(descriptor))
cgThings.append(CGDOMJSProxyHandler_hasOwn(descriptor))
if descriptor.operations['IndexedSetter'] or descriptor.operations['NamedSetter']:
cgThings.append(CGDOMJSProxyHandler_defineProperty(descriptor))
# We want to prevent indexed deleters from compiling at all.
assert not descriptor.operations['IndexedDeleter']
if descriptor.operations['NamedDeleter']:
cgThings.append(CGDOMJSProxyHandler_delete(descriptor))
# cgThings.append(CGDOMJSProxyHandler(descriptor))
# cgThings.append(CGIsMethod(descriptor))
pass
else:
cgThings.append(CGDOMJSClass(descriptor))
pass
if descriptor.isGlobal():
cgThings.append(CGWrapGlobalMethod(descriptor, properties))
else:
cgThings.append(CGWrapMethod(descriptor))
reexports.append('Wrap')
haveUnscopables = False
if not descriptor.interface.isCallback() and not descriptor.interface.isNamespace():
if unscopableNames:
haveUnscopables = True
cgThings.append(
CGList([CGGeneric("const unscopable_names: &'static [&'static [u8]] = &["),
CGIndenter(CGList([CGGeneric(str_to_const_array(name)) for
name in unscopableNames], ",\n")),
CGGeneric("];\n")], "\n"))
if descriptor.concrete or descriptor.hasDescendants():
cgThings.append(CGIDLInterface(descriptor))
interfaceTrait = CGInterfaceTrait(descriptor)
cgThings.append(interfaceTrait)
if not interfaceTrait.empty:
reexports.append('%sMethods' % descriptor.name)
if descriptor.weakReferenceable:
cgThings.append(CGWeakReferenceableTrait(descriptor))
cgThings.append(CGGeneric(str(properties)))
if not descriptor.interface.getExtendedAttribute("Inline"):
if not descriptor.interface.isCallback() and not descriptor.interface.isNamespace():
cgThings.append(CGGetProtoObjectMethod(descriptor))
reexports.append('GetProtoObject')
cgThings.append(CGPrototypeJSClass(descriptor))
if descriptor.interface.hasInterfaceObject():
if descriptor.interface.ctor():
cgThings.append(CGClassConstructHook(descriptor))
for ctor in descriptor.interface.namedConstructors:
cgThings.append(CGClassConstructHook(descriptor, ctor))
if not descriptor.interface.isCallback():
cgThings.append(CGInterfaceObjectJSClass(descriptor))
if descriptor.shouldHaveGetConstructorObjectMethod():
cgThings.append(CGGetConstructorObjectMethod(descriptor))
reexports.append('GetConstructorObject')
if descriptor.register:
cgThings.append(CGDefineDOMInterfaceMethod(descriptor))
reexports.append('DefineDOMInterface')
cgThings.append(CGConstructorEnabled(descriptor))
cgThings.append(CGCreateInterfaceObjectsMethod(descriptor, properties, haveUnscopables))
cgThings = generate_imports(config, CGList(cgThings, '\n'), [descriptor])
cgThings = CGWrapper(CGNamespace(toBindingNamespace(descriptor.name),
cgThings, public=True),
post='\n')
if reexports:
reexports = ', '.join(map(lambda name: reexportedName(name), reexports))
cgThings = CGList([CGGeneric('pub use self::%s::{%s};' % (toBindingNamespace(descriptor.name), reexports)),
cgThings], '\n')
self.cgRoot = cgThings
def define(self):
return self.cgRoot.define()
class CGNonNamespacedEnum(CGThing):
def __init__(self, enumName, names, first, comment="", deriving="", repr=""):
# Account for first value
entries = ["%s = %s" % (names[0], first)] + names[1:]
# Append a Last.
entries.append('#[allow(dead_code)] Last = ' + str(first + len(entries)))
# Indent.
entries = [' ' + e for e in entries]
# Build the enum body.
enumstr = comment + 'pub enum %s {\n%s\n}\n' % (enumName, ',\n'.join(entries))
if repr:
enumstr = ('#[repr(%s)]\n' % repr) + enumstr
if deriving:
enumstr = ('#[derive(%s)]\n' % deriving) + enumstr
curr = CGGeneric(enumstr)
# Add some whitespace padding.
curr = CGWrapper(curr, pre='\n', post='\n')
# Add the typedef
# typedef = '\ntypedef %s::%s %s;\n\n' % (namespace, enumName, enumName)
# curr = CGList([curr, CGGeneric(typedef)])
# Save the result.
self.node = curr
def define(self):
return self.node.define()
class CGDictionary(CGThing):
def __init__(self, dictionary, descriptorProvider):
self.dictionary = dictionary
if all(CGDictionary(d, descriptorProvider).generatable for
d in CGDictionary.getDictionaryDependencies(dictionary)):
self.generatable = True
else:
self.generatable = False
# Nothing else to do here
return
self.memberInfo = [
(member,
getJSToNativeConversionInfo(member.type,
descriptorProvider,
isMember="Dictionary",
defaultValue=member.defaultValue,
exceptionCode="return Err(());"))
for member in dictionary.members]
def define(self):
if not self.generatable:
return ""
return self.struct() + "\n" + self.impl()
def struct(self):
d = self.dictionary
if d.parent:
inheritance = " pub parent: %s::%s,\n" % (self.makeModuleName(d.parent),
self.makeClassName(d.parent))
else:
inheritance = ""
memberDecls = [" pub %s: %s," %
(self.makeMemberName(m[0].identifier.name), self.getMemberType(m))
for m in self.memberInfo]
return (string.Template(
"#[derive(JSTraceable)]\n"
"pub struct ${selfName} {\n" +
"${inheritance}" +
"\n".join(memberDecls) + "\n" +
"}").substitute({"selfName": self.makeClassName(d),
"inheritance": inheritance}))
def impl(self):
d = self.dictionary
if d.parent:
initParent = ("parent: {\n"
" match try!(%s::%s::new(cx, val)) {\n"
" ConversionResult::Success(v) => v,\n"
" ConversionResult::Failure(error) => {\n"
" throw_type_error(cx, &error);\n"
" return Err(());\n"
" }\n"
" }\n"
"},\n" % (self.makeModuleName(d.parent),
self.makeClassName(d.parent)))
else:
initParent = ""
def memberInit(memberInfo):
member, _ = memberInfo
name = self.makeMemberName(member.identifier.name)
conversion = self.getMemberConversion(memberInfo, member.type)
return CGGeneric("%s: %s,\n" % (name, conversion.define()))
def varInsert(varName, dictionaryName):
insertion = ("rooted!(in(cx) let mut %s_js = UndefinedValue());\n"
"%s.to_jsval(cx, %s_js.handle_mut());\n"
"set_dictionary_property(cx, obj.handle(), \"%s\", %s_js.handle()).unwrap();"
% (varName, varName, varName, dictionaryName, varName))
return CGGeneric(insertion)
def memberInsert(memberInfo):
member, _ = memberInfo
name = self.makeMemberName(member.identifier.name)
if member.optional and not member.defaultValue:
insertion = CGIfWrapper("let Some(ref %s) = self.%s" % (name, name),
varInsert(name, member.identifier.name))
else:
insertion = CGGeneric("let %s = &self.%s;\n%s" %
(name, name, varInsert(name, member.identifier.name).define()))
return CGGeneric("%s\n" % insertion.define())
memberInits = CGList([memberInit(m) for m in self.memberInfo])
memberInserts = CGList([memberInsert(m) for m in self.memberInfo])
return string.Template(
"impl ${selfName} {\n"
" pub unsafe fn empty(cx: *mut JSContext) -> ${selfName} {\n"
" match ${selfName}::new(cx, HandleValue::null()) {\n"
" Ok(ConversionResult::Success(v)) => v,\n"
" _ => unreachable!(),\n"
" }\n"
" }\n"
" pub unsafe fn new(cx: *mut JSContext, val: HandleValue) \n"
" -> Result<ConversionResult<${selfName}>, ()> {\n"
" let object = if val.get().is_null_or_undefined() {\n"
" ptr::null_mut()\n"
" } else if val.get().is_object() {\n"
" val.get().to_object()\n"
" } else {\n"
" throw_type_error(cx, \"Value not an object.\");\n"
" return Err(());\n"
" };\n"
" rooted!(in(cx) let object = object);\n"
" Ok(ConversionResult::Success(${selfName} {\n"
"${initParent}"
"${initMembers}"
" }))\n"
" }\n"
"}\n"
"\n"
"impl FromJSValConvertible for ${selfName} {\n"
" type Config = ();\n"
" unsafe fn from_jsval(cx: *mut JSContext, value: HandleValue, _option: ())\n"
" -> Result<ConversionResult<${selfName}>, ()> {\n"
" ${selfName}::new(cx, value)\n"
" }\n"
"}\n"
"\n"
"impl ToJSValConvertible for ${selfName} {\n"
" unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {\n"
" rooted!(in(cx) let obj = JS_NewObject(cx, ptr::null()));\n"
"${insertMembers}"
" rval.set(ObjectOrNullValue(obj.get()))\n"
" }\n"
"}\n").substitute({
"selfName": self.makeClassName(d),
"initParent": CGIndenter(CGGeneric(initParent), indentLevel=12).define(),
"initMembers": CGIndenter(memberInits, indentLevel=12).define(),
"insertMembers": CGIndenter(memberInserts, indentLevel=8).define(),
})
@staticmethod
def makeDictionaryName(dictionary):
return dictionary.identifier.name
def makeClassName(self, dictionary):
return self.makeDictionaryName(dictionary)
@staticmethod
def makeModuleName(dictionary):
return getModuleFromObject(dictionary)
def getMemberType(self, memberInfo):
member, info = memberInfo
declType = info.declType
if member.optional and not member.defaultValue:
declType = CGWrapper(info.declType, pre="Option<", post=">")
return declType.define()
def getMemberConversion(self, memberInfo, memberType):
def indent(s):
return CGIndenter(CGGeneric(s), 12).define()
member, info = memberInfo
templateBody = info.template
default = info.default
replacements = {"val": "rval.handle()"}
conversion = string.Template(templateBody).substitute(replacements)
assert (member.defaultValue is None) == (default is None)
if not member.optional:
assert default is None
default = ("throw_type_error(cx, \"Missing required member \\\"%s\\\".\");\n"
"return Err(());") % member.identifier.name
elif not default:
default = "None"
conversion = "Some(%s)" % conversion
conversion = (
"{\n"
" rooted!(in(cx) let mut rval = UndefinedValue());\n"
" match try!(get_dictionary_property(cx, object.handle(), \"%s\", rval.handle_mut())) {\n"
" true => {\n"
"%s\n"
" },\n"
" false => {\n"
"%s\n"
" },\n"
" }\n"
"}") % (member.identifier.name, indent(conversion), indent(default))
return CGGeneric(conversion)
@staticmethod
def makeMemberName(name):
# Can't use Rust keywords as member names.
if name in RUST_KEYWORDS:
return name + "_"
return name
@staticmethod
def getDictionaryDependencies(dictionary):
deps = set()
if dictionary.parent:
deps.add(dictionary.parent)
for member in dictionary.members:
if member.type.isDictionary():
deps.add(member.type.unroll().inner)
return deps
class CGRegisterProxyHandlersMethod(CGAbstractMethod):
def __init__(self, descriptors):
docs = "Create the global vtables used by the generated DOM bindings to implement JS proxies."
CGAbstractMethod.__init__(self, None, 'RegisterProxyHandlers', 'void', [],
unsafe=True, pub=True, docs=docs)
self.descriptors = descriptors
def definition_body(self):
return CGList([
CGGeneric("PROXY_HANDLERS[Proxies::%s as usize] = Bindings::%s::DefineProxyHandler();"
% (desc.name, '::'.join([desc.name + 'Binding'] * 2)))
for desc in self.descriptors
], "\n")
class CGRegisterProxyHandlers(CGThing):
def __init__(self, config):
descriptors = config.getDescriptors(proxy=True)
length = len(descriptors)
self.root = CGList([
CGGeneric("pub static mut PROXY_HANDLERS: [*const libc::c_void; %d] = [0 as *const libc::c_void; %d];"
% (length, length)),
CGRegisterProxyHandlersMethod(descriptors),
], "\n")
def define(self):
return self.root.define()
class CGBindingRoot(CGThing):
"""
Root codegen class for binding generation. Instantiate the class, and call
declare or define to generate header or cpp code (respectively).
"""
def __init__(self, config, prefix, webIDLFile):
descriptors = config.getDescriptors(webIDLFile=webIDLFile,
hasInterfaceObject=True)
# We also want descriptors that have an interface prototype object
# (isCallback=False), but we don't want to include a second copy
# of descriptors that we also matched in the previous line
# (hence hasInterfaceObject=False).
descriptors.extend(config.getDescriptors(webIDLFile=webIDLFile,
hasInterfaceObject=False,
isCallback=False,
register=True))
dictionaries = config.getDictionaries(webIDLFile=webIDLFile)
mainCallbacks = config.getCallbacks(webIDLFile=webIDLFile)
callbackDescriptors = config.getDescriptors(webIDLFile=webIDLFile,
isCallback=True)
enums = config.getEnums(webIDLFile)
typedefs = config.getTypedefs(webIDLFile)
if not (descriptors or dictionaries or mainCallbacks or callbackDescriptors or enums):
self.root = None
return
# Do codegen for all the enums.
cgthings = [CGEnum(e) for e in enums]
# Do codegen for all the typdefs
for t in typedefs:
typeName = getRetvalDeclarationForType(t.innerType, config.getDescriptorProvider())
substs = {
"name": t.identifier.name,
"type": typeName.define(),
}
if t.innerType.isUnion() and not t.innerType.nullable():
# Allow using the typedef's name for accessing variants.
template = "pub use self::%(type)s as %(name)s;"
else:
template = "pub type %(name)s = %(type)s;"
cgthings.append(CGGeneric(template % substs))
# Do codegen for all the dictionaries.
cgthings.extend([CGDictionary(d, config.getDescriptorProvider())
for d in dictionaries])
# Do codegen for all the callbacks.
cgthings.extend(CGList([CGCallbackFunction(c, config.getDescriptorProvider()),
CGCallbackFunctionImpl(c)], "\n")
for c in mainCallbacks)
# Do codegen for all the descriptors
cgthings.extend([CGDescriptor(x, config, len(descriptors) == 1) for x in descriptors])
# Do codegen for all the callback interfaces.
cgthings.extend(CGList([CGCallbackInterface(x),
CGCallbackFunctionImpl(x.interface)], "\n")
for x in callbackDescriptors)
# And make sure we have the right number of newlines at the end
curr = CGWrapper(CGList(cgthings, "\n\n"), post="\n\n")
# Add imports
curr = generate_imports(config, curr, callbackDescriptors, mainCallbacks,
dictionaries, enums)
# Add the auto-generated comment.
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
# Store the final result.
self.root = curr
def define(self):
if not self.root:
return None
return stripTrailingWhitespace(self.root.define())
def type_needs_tracing(t):
assert isinstance(t, IDLObject), (t, type(t))
if t.isType():
if isinstance(t, IDLWrapperType):
return type_needs_tracing(t.inner)
if t.nullable():
return type_needs_tracing(t.inner)
if t.isAny():
return True
if t.isObject():
return True
if t.isSequence():
return type_needs_tracing(t.inner)
if t.isUnion():
return any(type_needs_tracing(member) for member in t.flatMemberTypes)
return False
if t.isDictionary():
if t.parent and type_needs_tracing(t.parent):
return True
if any(type_needs_tracing(member.type) for member in t.members):
return True
return False
if t.isInterface():
return False
if t.isEnum():
return False
assert False, (t, type(t))
def argument_type(descriptorProvider, ty, optional=False, defaultValue=None, variadic=False):
info = getJSToNativeConversionInfo(
ty, descriptorProvider, isArgument=True)
declType = info.declType
if variadic:
if ty.isGeckoInterface():
declType = CGWrapper(declType, pre="&[", post="]")
else:
declType = CGWrapper(declType, pre="Vec<", post=">")
elif optional and not defaultValue:
declType = CGWrapper(declType, pre="Option<", post=">")
if ty.isDictionary() and not type_needs_tracing(ty):
declType = CGWrapper(declType, pre="&")
return declType.define()
def method_arguments(descriptorProvider, returnType, arguments, passJSBits=True, trailing=None):
if needCx(returnType, arguments, passJSBits):
yield "cx", "*mut JSContext"
for argument in arguments:
ty = argument_type(descriptorProvider, argument.type, argument.optional,
argument.defaultValue, argument.variadic)
yield CGDictionary.makeMemberName(argument.identifier.name), ty
if trailing:
yield trailing
def return_type(descriptorProvider, rettype, infallible):
result = getRetvalDeclarationForType(rettype, descriptorProvider)
if not infallible:
result = CGWrapper(result, pre="Fallible<", post=">")
return result.define()
class CGNativeMember(ClassMethod):
def __init__(self, descriptorProvider, member, name, signature, extendedAttrs,
breakAfter=True, passJSBitsAsNeeded=True, visibility="public"):
"""
If passJSBitsAsNeeded is false, we don't automatically pass in a
JSContext* or a JSObject* based on the return and argument types.
"""
self.descriptorProvider = descriptorProvider
self.member = member
self.extendedAttrs = extendedAttrs
self.passJSBitsAsNeeded = passJSBitsAsNeeded
breakAfterSelf = "\n" if breakAfter else ""
ClassMethod.__init__(self, name,
self.getReturnType(signature[0]),
self.getArgs(signature[0], signature[1]),
static=member.isStatic(),
# Mark our getters, which are attrs that
# have a non-void return type, as const.
const=(not member.isStatic() and member.isAttr() and
not signature[0].isVoid()),
breakAfterSelf=breakAfterSelf,
visibility=visibility)
def getReturnType(self, type):
infallible = 'infallible' in self.extendedAttrs
typeDecl = return_type(self.descriptorProvider, type, infallible)
return typeDecl
def getArgs(self, returnType, argList):
return [Argument(arg[1], arg[0]) for arg in method_arguments(self.descriptorProvider,
returnType,
argList,
self.passJSBitsAsNeeded)]
class CGCallback(CGClass):
def __init__(self, idlObject, descriptorProvider, baseName, methods,
getters=[], setters=[]):
self.baseName = baseName
self._deps = idlObject.getDeps()
name = idlObject.identifier.name
# For our public methods that needThisHandling we want most of the
# same args and the same return type as what CallbackMember
# generates. So we want to take advantage of all its
# CGNativeMember infrastructure, but that infrastructure can't deal
# with templates and most especially template arguments. So just
# cheat and have CallbackMember compute all those things for us.
realMethods = []
for method in methods:
if not method.needThisHandling:
realMethods.append(method)
else:
realMethods.extend(self.getMethodImpls(method))
CGClass.__init__(self, name,
bases=[ClassBase(baseName)],
constructors=self.getConstructors(),
methods=realMethods + getters + setters,
decorators="#[derive(JSTraceable, PartialEq)]\n#[allow_unrooted_interior]")
def getConstructors(self):
return [ClassConstructor(
[Argument("*mut JSContext", "aCx"), Argument("*mut JSObject", "aCallback")],
bodyInHeader=True,
visibility="pub",
explicit=False,
baseConstructors=[
"%s::new()" % self.baseName
])]
def getMethodImpls(self, method):
assert method.needThisHandling
args = list(method.args)
# Strip out the JSContext*/JSObject* args
# that got added.
assert args[0].name == "cx" and args[0].argType == "*mut JSContext"
assert args[1].name == "aThisObj" and args[1].argType == "HandleObject"
args = args[2:]
# Record the names of all the arguments, so we can use them when we call
# the private method.
argnames = [arg.name for arg in args]
argnamesWithThis = ["s.get_context()", "thisObjJS.handle()"] + argnames
argnamesWithoutThis = ["s.get_context()", "thisObjJS.handle()"] + argnames
# Now that we've recorded the argnames for our call to our private
# method, insert our optional argument for deciding whether the
# CallSetup should re-throw exceptions on aRv.
args.append(Argument("ExceptionHandling", "aExceptionHandling",
"ReportExceptions"))
# And now insert our template argument.
argsWithoutThis = list(args)
args.insert(0, Argument("&T", "thisObj"))
# And the self argument
method.args.insert(0, Argument(None, "&self"))
args.insert(0, Argument(None, "&self"))
argsWithoutThis.insert(0, Argument(None, "&self"))
setupCall = "let s = CallSetup::new(self, aExceptionHandling);\n"
bodyWithThis = string.Template(
setupCall +
"rooted!(in(s.get_context()) let mut thisObjJS = ptr::null_mut());\n"
"wrap_call_this_object(s.get_context(), thisObj, thisObjJS.handle_mut());\n"
"if thisObjJS.is_null() {\n"
" return Err(JSFailed);\n"
"}\n"
"return ${methodName}(${callArgs});").substitute({
"callArgs": ", ".join(argnamesWithThis),
"methodName": 'self.' + method.name,
})
bodyWithoutThis = string.Template(
setupCall +
"rooted!(in(s.get_context()) let thisObjJS = ptr::null_mut());\n"
"return ${methodName}(${callArgs});").substitute({
"callArgs": ", ".join(argnamesWithoutThis),
"methodName": 'self.' + method.name,
})
return [ClassMethod(method.name + '_', method.returnType, args,
bodyInHeader=True,
templateArgs=["T: DomObject"],
body=bodyWithThis,
visibility='pub'),
ClassMethod(method.name + '__', method.returnType, argsWithoutThis,
bodyInHeader=True,
body=bodyWithoutThis,
visibility='pub'),
method]
def deps(self):
return self._deps
# We're always fallible
def callbackGetterName(attr, descriptor):
return "Get" + MakeNativeName(
descriptor.binaryNameFor(attr.identifier.name))
def callbackSetterName(attr, descriptor):
return "Set" + MakeNativeName(
descriptor.binaryNameFor(attr.identifier.name))
class CGCallbackFunction(CGCallback):
def __init__(self, callback, descriptorProvider):
CGCallback.__init__(self, callback, descriptorProvider,
"CallbackFunction",
methods=[CallCallback(callback, descriptorProvider)])
def getConstructors(self):
return CGCallback.getConstructors(self)
class CGCallbackFunctionImpl(CGGeneric):
def __init__(self, callback):
impl = string.Template("""\
impl CallbackContainer for ${type} {
unsafe fn new(cx: *mut JSContext, callback: *mut JSObject) -> Rc<${type}> {
${type}::new(cx, callback)
}
fn callback_holder(&self) -> &CallbackObject {
self.parent.callback_holder()
}
}
impl ToJSValConvertible for ${type} {
unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {
self.callback().to_jsval(cx, rval);
}
}\
""").substitute({"type": callback.identifier.name})
CGGeneric.__init__(self, impl)
class CGCallbackInterface(CGCallback):
def __init__(self, descriptor):
iface = descriptor.interface
attrs = [m for m in iface.members if m.isAttr() and not m.isStatic()]
getters = [CallbackGetter(a, descriptor) for a in attrs]
setters = [CallbackSetter(a, descriptor) for a in attrs
if not a.readonly]
methods = [m for m in iface.members
if m.isMethod() and not m.isStatic() and not m.isIdentifierLess()]
methods = [CallbackOperation(m, sig, descriptor) for m in methods
for sig in m.signatures()]
assert not iface.isJSImplemented() or not iface.ctor()
CGCallback.__init__(self, iface, descriptor, "CallbackInterface",
methods, getters=getters, setters=setters)
class FakeMember():
def __init__(self):
self.treatNullAs = "Default"
def isStatic(self):
return False
def isAttr(self):
return False
def isMethod(self):
return False
def getExtendedAttribute(self, name):
return None
class CallbackMember(CGNativeMember):
def __init__(self, sig, name, descriptorProvider, needThisHandling):
"""
needThisHandling is True if we need to be able to accept a specified
thisObj, False otherwise.
"""
self.retvalType = sig[0]
self.originalSig = sig
args = sig[1]
self.argCount = len(args)
if self.argCount > 0:
# Check for variadic arguments
lastArg = args[self.argCount - 1]
if lastArg.variadic:
self.argCountStr = (
"(%d - 1) + %s.len()" % (self.argCount,
lastArg.identifier.name))
else:
self.argCountStr = "%d" % self.argCount
self.needThisHandling = needThisHandling
# If needThisHandling, we generate ourselves as private and the caller
# will handle generating public versions that handle the "this" stuff.
visibility = "priv" if needThisHandling else "pub"
# We don't care, for callback codegen, whether our original member was
# a method or attribute or whatnot. Just always pass FakeMember()
# here.
CGNativeMember.__init__(self, descriptorProvider, FakeMember(),
name, (self.retvalType, args),
extendedAttrs={},
passJSBitsAsNeeded=False,
visibility=visibility)
# We have to do all the generation of our body now, because
# the caller relies on us throwing if we can't manage it.
self.exceptionCode = "return Err(JSFailed);"
self.body = self.getImpl()
def getImpl(self):
replacements = {
"declRval": self.getRvalDecl(),
"returnResult": self.getResultConversion(),
"convertArgs": self.getArgConversions(),
"doCall": self.getCall(),
"setupCall": self.getCallSetup(),
}
if self.argCount > 0:
replacements["argCount"] = self.argCountStr
replacements["argvDecl"] = string.Template(
"rooted_vec!(let mut argv);\n"
"argv.extend((0..${argCount}).map(|_| Heap::new(UndefinedValue())));\n"
).substitute(replacements)
else:
# Avoid weird 0-sized arrays
replacements["argvDecl"] = ""
# Newlines and semicolons are in the values
pre = string.Template(
"${setupCall}"
"${declRval}"
"${argvDecl}").substitute(replacements)
body = string.Template(
"${convertArgs}"
"${doCall}"
"${returnResult}").substitute(replacements)
return CGWrapper(CGIndenter(CGList([
CGGeneric(pre),
CGGeneric(body),
], "\n"), 4), pre="unsafe {\n", post="\n}").define()
def getResultConversion(self):
replacements = {
"val": "rval.handle()",
}
info = getJSToNativeConversionInfo(
self.retvalType,
self.descriptorProvider,
exceptionCode=self.exceptionCode,
isCallbackReturnValue="Callback",
# XXXbz we should try to do better here
sourceDescription="return value")
template = info.template
declType = info.declType
convertType = instantiateJSToNativeConversionTemplate(
template, replacements, declType, "rvalDecl")
if self.retvalType is None or self.retvalType.isVoid():
retval = "()"
elif self.retvalType.isAny():
retval = "rvalDecl.get()"
else:
retval = "rvalDecl"
return "%s\nOk(%s)\n" % (convertType.define(), retval)
def getArgConversions(self):
# Just reget the arglist from self.originalSig, because our superclasses
# just have way to many members they like to clobber, so I can't find a
# safe member name to store it in.
argConversions = [self.getArgConversion(i, arg) for (i, arg)
in enumerate(self.originalSig[1])]
# Do them back to front, so our argc modifications will work
# correctly, because we examine trailing arguments first.
argConversions.reverse()
argConversions = [CGGeneric(c) for c in argConversions]
if self.argCount > 0:
argConversions.insert(0, self.getArgcDecl())
# And slap them together.
return CGList(argConversions, "\n\n").define() + "\n\n"
def getArgConversion(self, i, arg):
argval = arg.identifier.name
if arg.variadic:
argval = argval + "[idx].get()"
jsvalIndex = "%d + idx" % i
else:
jsvalIndex = "%d" % i
if arg.optional and not arg.defaultValue:
argval += ".clone().unwrap()"
conversion = wrapForType(
"argv_root.handle_mut()", result=argval,
successCode="argv[%s] = Heap::new(argv_root.get());" % jsvalIndex,
pre="rooted!(in(cx) let mut argv_root = UndefinedValue());")
if arg.variadic:
conversion = string.Template(
"for idx in 0..${arg}.len() {\n" +
CGIndenter(CGGeneric(conversion)).define() + "\n"
"}"
).substitute({"arg": arg.identifier.name})
elif arg.optional and not arg.defaultValue:
conversion = (
CGIfWrapper("%s.is_some()" % arg.identifier.name,
CGGeneric(conversion)).define() +
" else if argc == %d {\n"
" // This is our current trailing argument; reduce argc\n"
" argc -= 1;\n"
"} else {\n"
" argv[%d] = Heap::new(UndefinedValue());\n"
"}" % (i + 1, i))
return conversion
def getArgs(self, returnType, argList):
args = CGNativeMember.getArgs(self, returnType, argList)
if not self.needThisHandling:
# Since we don't need this handling, we're the actual method that
# will be called, so we need an aRethrowExceptions argument.
args.append(Argument("ExceptionHandling", "aExceptionHandling",
"ReportExceptions"))
return args
# We want to allow the caller to pass in a "this" object, as
# well as a JSContext.
return [Argument("*mut JSContext", "cx"),
Argument("HandleObject", "aThisObj")] + args
def getCallSetup(self):
if self.needThisHandling:
# It's been done for us already
return ""
return (
"CallSetup s(CallbackPreserveColor(), aRv, aExceptionHandling);\n"
"JSContext* cx = s.get_context();\n"
"if (!cx) {\n"
" return Err(JSFailed);\n"
"}\n")
def getArgcDecl(self):
if self.argCount <= 1:
return CGGeneric("let argc = %s;" % self.argCountStr)
return CGGeneric("let mut argc = %s;" % self.argCountStr)
@staticmethod
def ensureASCIIName(idlObject):
type = "attribute" if idlObject.isAttr() else "operation"
if re.match("[^\x20-\x7E]", idlObject.identifier.name):
raise SyntaxError('Callback %s name "%s" contains non-ASCII '
"characters. We can't handle that. %s" %
(type, idlObject.identifier.name,
idlObject.location))
if re.match('"', idlObject.identifier.name):
raise SyntaxError("Callback %s name '%s' contains "
"double-quote character. We can't handle "
"that. %s" %
(type, idlObject.identifier.name,
idlObject.location))
class CallbackMethod(CallbackMember):
def __init__(self, sig, name, descriptorProvider, needThisHandling):
CallbackMember.__init__(self, sig, name, descriptorProvider,
needThisHandling)
def getRvalDecl(self):
return "rooted!(in(cx) let mut rval = UndefinedValue());\n"
def getCall(self):
replacements = {
"thisObj": self.getThisObj(),
"getCallable": self.getCallableDecl(),
"callGuard": self.getCallGuard(),
}
if self.argCount > 0:
replacements["argv"] = "argv.as_ptr() as *const JSVal"
replacements["argc"] = "argc"
else:
replacements["argv"] = "ptr::null_mut()"
replacements["argc"] = "0"
return string.Template(
"${getCallable}"
"rooted!(in(cx) let rootedThis = ${thisObj});\n"
"let ok = ${callGuard}JS_CallFunctionValue(\n"
" cx, rootedThis.handle(), callable.handle(),\n"
" &HandleValueArray {\n"
" length_: ${argc} as ::libc::size_t,\n"
" elements_: ${argv}\n"
" }, rval.handle_mut());\n"
"maybe_resume_unwind();\n"
"if !ok {\n"
" return Err(JSFailed);\n"
"}\n").substitute(replacements)
class CallCallback(CallbackMethod):
def __init__(self, callback, descriptorProvider):
self.callback = callback
CallbackMethod.__init__(self, callback.signatures()[0], "Call",
descriptorProvider, needThisHandling=True)
def getThisObj(self):
return "aThisObj.get()"
def getCallableDecl(self):
return "rooted!(in(cx) let callable = ObjectValue(self.callback()));\n"
def getCallGuard(self):
if self.callback._treatNonObjectAsNull:
return "!IsCallable(self.callback()) || "
return ""
class CallbackOperationBase(CallbackMethod):
"""
Common class for implementing various callback operations.
"""
def __init__(self, signature, jsName, nativeName, descriptor, singleOperation):
self.singleOperation = singleOperation
self.methodName = jsName
CallbackMethod.__init__(self, signature, nativeName, descriptor, singleOperation)
def getThisObj(self):
if not self.singleOperation:
return "self.callback()"
# This relies on getCallableDecl declaring a boolean
# isCallable in the case when we're a single-operation
# interface.
return "if isCallable { aThisObj.get() } else { self.callback() }"
def getCallableDecl(self):
replacements = {
"methodName": self.methodName
}
getCallableFromProp = string.Template(
'try!(self.parent.get_callable_property(cx, "${methodName}"))'
).substitute(replacements)
if not self.singleOperation:
return 'rooted!(in(cx) let callable =\n' + getCallableFromProp + ');\n'
return (
'let isCallable = IsCallable(self.callback());\n'
'rooted!(in(cx) let callable =\n' +
CGIndenter(
CGIfElseWrapper('isCallable',
CGGeneric('ObjectValue(self.callback())'),
CGGeneric(getCallableFromProp))).define() + ');\n')
def getCallGuard(self):
return ""
class CallbackOperation(CallbackOperationBase):
"""
Codegen actual WebIDL operations on callback interfaces.
"""
def __init__(self, method, signature, descriptor):
self.ensureASCIIName(method)
jsName = method.identifier.name
CallbackOperationBase.__init__(self, signature,
jsName,
MakeNativeName(descriptor.binaryNameFor(jsName)),
descriptor, descriptor.interface.isSingleOperationInterface())
class CallbackGetter(CallbackMember):
def __init__(self, attr, descriptor):
self.ensureASCIIName(attr)
self.attrName = attr.identifier.name
CallbackMember.__init__(self,
(attr.type, []),
callbackGetterName(attr),
descriptor,
needThisHandling=False)
def getRvalDecl(self):
return "JS::Rooted<JS::Value> rval(cx, JS::UndefinedValue());\n"
def getCall(self):
replacements = {
"attrName": self.attrName
}
return string.Template(
'if (!JS_GetProperty(cx, mCallback, "${attrName}", &rval)) {\n'
' return Err(JSFailed);\n'
'}\n').substitute(replacements)
class CallbackSetter(CallbackMember):
def __init__(self, attr, descriptor):
self.ensureASCIIName(attr)
self.attrName = attr.identifier.name
CallbackMember.__init__(self,
(BuiltinTypes[IDLBuiltinType.Types.void],
[FakeArgument(attr.type, attr)]),
callbackSetterName(attr),
descriptor,
needThisHandling=False)
def getRvalDecl(self):
# We don't need an rval
return ""
def getCall(self):
replacements = {
"attrName": self.attrName,
"argv": "argv.handleAt(0)",
}
return string.Template(
'MOZ_ASSERT(argv.length() == 1);\n'
'if (!JS_SetProperty(cx, mCallback, "${attrName}", ${argv})) {\n'
' return Err(JSFailed);\n'
'}\n').substitute(replacements)
def getArgcDecl(self):
return None
class CGIterableMethodGenerator(CGGeneric):
"""
Creates methods for iterable interfaces. Unwrapping/wrapping
will be taken care of by the usual method generation machinery in
CGMethodCall/CGPerSignatureCall. Functionality is filled in here instead of
using CGCallGenerator.
"""
def __init__(self, descriptor, iterable, methodName):
if methodName == "forEach":
CGGeneric.__init__(self, fill(
"""
if !IsCallable(arg0) {
throw_type_error(cx, "Argument 1 of ${ifaceName}.forEach is not callable.");
return false;
}
rooted!(in(cx) let arg0 = ObjectValue(arg0));
rooted!(in(cx) let mut call_arg1 = UndefinedValue());
rooted!(in(cx) let mut call_arg2 = UndefinedValue());
let mut call_args = vec![UndefinedValue(), UndefinedValue(), ObjectValue(*_obj)];
rooted!(in(cx) let mut ignoredReturnVal = UndefinedValue());
for i in 0..(*this).get_iterable_length() {
(*this).get_value_at_index(i).to_jsval(cx, call_arg1.handle_mut());
(*this).get_key_at_index(i).to_jsval(cx, call_arg2.handle_mut());
call_args[0] = call_arg1.handle().get();
call_args[1] = call_arg2.handle().get();
let call_args = HandleValueArray { length_: 3, elements_: call_args.as_ptr() };
if !Call(cx, arg1, arg0.handle(), &call_args,
ignoredReturnVal.handle_mut()) {
return false;
}
}
let result = ();
""",
ifaceName=descriptor.interface.identifier.name))
return
CGGeneric.__init__(self, fill(
"""
let result = ${iterClass}::new(&*this,
IteratorType::${itrMethod},
super::${ifaceName}IteratorBinding::Wrap);
""",
iterClass=iteratorNativeType(descriptor, True),
ifaceName=descriptor.interface.identifier.name,
itrMethod=methodName.title()))
def camel_to_upper_snake(s):
return "_".join(m.group(0).upper() for m in re.finditer("[A-Z][a-z]*", s))
def process_arg(expr, arg):
if arg.type.isGeckoInterface() and not arg.type.unroll().inner.isCallback():
if arg.type.nullable() or arg.type.isSequence() or arg.optional:
expr += ".r()"
else:
expr = "&" + expr
return expr
class GlobalGenRoots():
"""
Roots for global codegen.
To generate code, call the method associated with the target, and then
call the appropriate define/declare method.
"""
@staticmethod
def InterfaceObjectMap(config):
mods = [
"dom::bindings::codegen",
"js::jsapi::{HandleObject, JSContext}",
"phf",
]
imports = CGList([CGGeneric("use %s;" % mod) for mod in mods], "\n")
global_descriptors = config.getDescriptors(isGlobal=True)
flags = [("EMPTY", 0)]
flags.extend(
(camel_to_upper_snake(d.name), 2 ** idx)
for (idx, d) in enumerate(global_descriptors)
)
global_flags = CGWrapper(CGIndenter(CGList([
CGGeneric("const %s = %#x," % args)
for args in flags
], "\n")), pre="pub flags Globals: u8 {\n", post="\n}")
globals_ = CGWrapper(CGIndenter(global_flags), pre="bitflags! {\n", post="\n}")
phf = CGGeneric("include!(concat!(env!(\"OUT_DIR\"), \"/InterfaceObjectMapPhf.rs\"));")
return CGList([
CGGeneric(AUTOGENERATED_WARNING_COMMENT),
CGList([imports, globals_, phf], "\n\n")
])
@staticmethod
def InterfaceObjectMapData(config):
pairs = []
for d in config.getDescriptors(hasInterfaceObject=True, isInline=False):
binding = toBindingNamespace(d.name)
pairs.append((d.name, binding, binding))
for ctor in d.interface.namedConstructors:
pairs.append((ctor.identifier.name, binding, binding))
pairs.sort(key=operator.itemgetter(0))
mappings = [
CGGeneric('"%s": "codegen::Bindings::%s::%s::DefineDOMInterface as unsafe fn(_, _)"' % pair)
for pair in pairs
]
return CGWrapper(
CGList(mappings, ",\n"),
pre="{\n",
post="\n}\n")
@staticmethod
def PrototypeList(config):
# Prototype ID enum.
interfaces = config.getDescriptors(isCallback=False, isNamespace=False)
protos = [d.name for d in interfaces]
constructors = sorted([MakeNativeName(d.name)
for d in config.getDescriptors(hasInterfaceObject=True)
if d.shouldHaveGetConstructorObjectMethod()])
proxies = [d.name for d in config.getDescriptors(proxy=True)]
return CGList([
CGGeneric(AUTOGENERATED_WARNING_COMMENT),
CGGeneric("pub const PROTO_OR_IFACE_LENGTH: usize = %d;\n" % (len(protos) + len(constructors))),
CGGeneric("pub const MAX_PROTO_CHAIN_LENGTH: usize = %d;\n\n" % config.maxProtoChainLength),
CGNonNamespacedEnum('ID', protos, 0, deriving="PartialEq, Copy, Clone", repr="u16"),
CGNonNamespacedEnum('Constructor', constructors, len(protos),
deriving="PartialEq, Copy, Clone", repr="u16"),
CGWrapper(CGIndenter(CGList([CGGeneric('"' + name + '"') for name in protos],
",\n"),
indentLevel=4),
pre="static INTERFACES: [&'static str; %d] = [\n" % len(protos),
post="\n];\n\n"),
CGGeneric("pub fn proto_id_to_name(proto_id: u16) -> &'static str {\n"
" debug_assert!(proto_id < ID::Last as u16);\n"
" INTERFACES[proto_id as usize]\n"
"}\n\n"),
CGNonNamespacedEnum('Proxies', proxies, 0, deriving="PartialEq, Copy, Clone"),
])
@staticmethod
def RegisterBindings(config):
# TODO - Generate the methods we want
code = CGList([
CGRegisterProxyHandlers(config),
], "\n")
return CGImports(code, descriptors=[], callbacks=[], dictionaries=[], enums=[], imports=[
'dom::bindings::codegen::Bindings',
'dom::bindings::codegen::PrototypeList::Proxies',
'libc',
], config=config, ignored_warnings=[])
@staticmethod
def InterfaceTypes(config):
descriptors = sorted([MakeNativeName(d.name)
for d in config.getDescriptors(register=True,
isCallback=False,
isIteratorInterface=False)])
curr = CGList([CGGeneric("pub use dom::%s::%s;\n" % (name.lower(),
MakeNativeName(name)))
for name in descriptors])
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def Bindings(config):
def leafModule(d):
return getModuleFromObject(d).split('::')[-1]
descriptors = config.getDescriptors(register=True, isIteratorInterface=False)
descriptors = (set(toBindingNamespace(d.name) for d in descriptors) |
set(leafModule(d) for d in config.callbacks) |
set(leafModule(d) for d in config.getDictionaries()))
curr = CGList([CGGeneric("pub mod %s;\n" % name) for name in sorted(descriptors)])
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def InheritTypes(config):
descriptors = config.getDescriptors(register=True, isCallback=False)
imports = [CGGeneric("use dom::types::*;\n"),
CGGeneric("use dom::bindings::conversions::{DerivedFrom, get_dom_class};\n"),
CGGeneric("use dom::bindings::inheritance::Castable;\n"),
CGGeneric("use dom::bindings::js::{JS, LayoutJS, Root};\n"),
CGGeneric("use dom::bindings::trace::JSTraceable;\n"),
CGGeneric("use dom::bindings::reflector::DomObject;\n"),
CGGeneric("use js::jsapi::JSTracer;\n\n"),
CGGeneric("use std::mem;\n\n")]
allprotos = []
topTypes = []
hierarchy = defaultdict(list)
for descriptor in descriptors:
name = descriptor.name
chain = descriptor.prototypeChain
upcast = descriptor.hasDescendants()
downcast = len(chain) != 1
if upcast and not downcast:
topTypes.append(name)
if not upcast:
# No other interface will implement DeriveFrom<Foo> for this Foo, so avoid
# implementing it for itself.
chain = chain[:-1]
# Implement `DerivedFrom<Bar>` for `Foo`, for all `Bar` that `Foo` inherits from.
if chain:
allprotos.append(CGGeneric("impl Castable for %s {}\n" % name))
for baseName in chain:
allprotos.append(CGGeneric("impl DerivedFrom<%s> for %s {}\n" % (baseName, name)))
if chain:
allprotos.append(CGGeneric("\n"))
if downcast:
hierarchy[descriptor.interface.parent.identifier.name].append(name)
typeIdCode = []
topTypeVariants = [
("ID used by abstract interfaces.", "pub abstract_: ()"),
("ID used by interfaces that are not castable.", "pub alone: ()"),
]
topTypeVariants += [
("ID used by interfaces that derive from %s." % typeName,
"pub %s: %sTypeId" % (typeName.lower(), typeName))
for typeName in topTypes
]
topTypeVariantsAsStrings = [CGGeneric("/// %s\n%s," % variant) for variant in topTypeVariants]
typeIdCode.append(CGWrapper(CGIndenter(CGList(topTypeVariantsAsStrings, "\n"), 4),
pre="#[derive(Copy)]\npub union TopTypeId {\n",
post="\n}\n\n"))
typeIdCode.append(CGGeneric("""\
impl Clone for TopTypeId {
fn clone(&self) -> Self { *self }
}
"""))
def type_id_variant(name):
# If `name` is present in the hierarchy keys', that means some other interfaces
# derive from it and this enum variant should have an argument with its own
# TypeId enum.
return "%s(%sTypeId)" % (name, name) if name in hierarchy else name
for base, derived in hierarchy.iteritems():
variants = []
if config.getDescriptor(base).concrete:
variants.append(CGGeneric(base))
variants += [CGGeneric(type_id_variant(derivedName)) for derivedName in derived]
derives = "Clone, Copy, Debug, PartialEq"
typeIdCode.append(CGWrapper(CGIndenter(CGList(variants, ",\n"), 4),
pre="#[derive(%s)]\npub enum %sTypeId {\n" % (derives, base),
post="\n}\n\n"))
if base in topTypes:
typeIdCode.append(CGGeneric("""\
impl %(base)s {
pub fn type_id(&self) -> &'static %(base)sTypeId {
unsafe {
&get_dom_class(self.reflector().get_jsobject().get())
.unwrap()
.type_id
.%(field)s
}
}
}
""" % {'base': base, 'field': base.lower()}))
curr = CGList(imports + typeIdCode + allprotos)
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def UnionTypes(config):
curr = UnionTypes(config.getDescriptors(),
config.getDictionaries(),
config.getCallbacks(),
config.typedefs,
config)
# Add the auto-generated comment.
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
# Done.
return curr
@staticmethod
def SupportedDomApis(config):
descriptors = config.getDescriptors(isExposedConditionally=False)
base_path = os.path.join('dom', 'bindings', 'codegen')
with open(os.path.join(base_path, 'apis.html.template')) as f:
base_template = f.read()
with open(os.path.join(base_path, 'api.html.template')) as f:
api_template = f.read()
with open(os.path.join(base_path, 'property.html.template')) as f:
property_template = f.read()
with open(os.path.join(base_path, 'interface.html.template')) as f:
interface_template = f.read()
apis = []
interfaces = []
for descriptor in descriptors:
props = []
for m in descriptor.interface.members:
if PropertyDefiner.getStringAttr(m, 'Pref') or \
PropertyDefiner.getStringAttr(m, 'Func') or \
(m.isMethod() and m.isIdentifierLess()):
continue
display = m.identifier.name + ('()' if m.isMethod() else '')
props += [property_template.replace('${name}', display)]
name = descriptor.interface.identifier.name
apis += [(api_template.replace('${interface}', name)
.replace('${properties}', '\n'.join(props)))]
interfaces += [interface_template.replace('${interface}', name)]
return CGGeneric((base_template.replace('${apis}', '\n'.join(apis))
.replace('${interfaces}', '\n'.join(interfaces))))
| mpl-2.0 | 6,758,460,289,241,190,000 | 38.911905 | 120 | 0.569563 | false |
john-tornblom/llvm-p86 | llvm_p86/compiler.py | 1 | 11818 | # encoding: utf-8
# Copyright (C) 2013 John Törnblom
#
# This file is part of LLVM-P86.
#
# LLVM-P86 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# LLVM-P86 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with LLVM-P86. If not, see <http://www.gnu.org/licenses/>.
'''
Pascal-86 compiler front end for LLVM with mutation capabilities.
'''
import datetime
import sys
import hashlib
import os
import shutil
from . import pre
from . import tokens
from . import grammar
from . import ast
from . import mutation
from . import typesys
from . import sourcegen
from . import log
try:
from llvm import ee
from llvm import passes
from llvm import target
import ctypes
from . import codegen
from . import fn
target.initialize_all()
except ImportError:
print('Cannot find llvmpy, code generation will not function')
pass
class PrintVisitor(ast.NodeVisitor):
def __init__(self):
self.level = 0
def visit_VarDeclListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def visit_StatementListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def visit_ConstListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def visit_FunctionListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def visit_ParameterListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def visit_ArgumentListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def visit_IdentifierListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def visit_TypeDefListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def visit_RecordSectionListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def visit_CaseListElementListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def visit_CaseConstListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def visit_LabelListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def visit_SetMemberListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def visit_VariantListNode(self, node, arg=None):
ast.NodeVisitor.default_visit(self, node)
def default_visit(self, node, arg=None):
self.print_node(node)
self.level += 1
for c in filter(None, node.children):
if isinstance(c, ast.Node):
c.accept(self)
else:
self.print_string(c)
self.level -= 1
def print_string(self, node, arg=None):
pos = '?'
prefix = pos + ' \t'
spacer = ''.join([' ' * (2 * self.level)])
print((prefix + spacer + str(node)))
def print_node(self, node, arg=None):
pos = node.position
if not pos:
pos = '?'
ty = node.type
if not ty:
ty = '(?)'
prefix = str(pos) + ' \t' + str(ty.__class__.__name__[0:-4]) + ' \t'
# prefix = str(pos) + ' \t' + str(ty) + ' \t'
spacer = ''.join([' ' * (2 * self.level)])
print((prefix + spacer + str(node)))
class Compiler(object):
def __init__(self, filename):
self.ctx = None
self.filename = filename
basename = os.path.basename(filename)
filename = os.path.splitext(basename)
self.name = filename[0]
self.chars = None
self.hash = None
self.mutants = []
self.defines = dict()
self.includes = ['.']
def define(self, d):
d = d.split('=')
if len(d) == 1:
key = d[0]
val = True
elif len(d) == 2:
try:
key = d[0]
val = eval(d[1])
except:
log.w("compiler", "Invalid define syntax '%s=%s'" %
(d[0], d[1]))
return
else:
log.w("compiler", "Invalid define syntax")
return
key = key. lower()
self.defines[key] = val
def include(self, path):
if os.path.isdir(path):
self.includes.append(path)
else:
log.w("compiler", "Invalid include '%s'", path)
def analyze(self):
log.d("compiler", "Parsing source code")
pre.pre_defines = self.defines
pre.pre_includes = self.includes
textRoot = pre.process(self.filename)
hash_code = hashlib.md5()
hash_code.update(str(textRoot).encode())
self.hash = hash_code.hexdigest()
self.mutants = []
scanner = tokens.scanner()
parser = grammar.parser()
# To preserve positional information between files,
# use a custom token generator
def token_generator():
for node in textRoot.nodes:
#print type(node), node.pos
if not isinstance(node, pre.TextNode):
continue
scanner.lineno = lineno = 0
scanner.lexpos = lexpos = 0
scanner.input(node.value)
while True:
t = scanner.token()
if t is None:
break
lineno = scanner.lineno
lexpos = scanner.lexpos
scanner.lineno = (node.pos[0],
scanner.lineno + node.pos[1])
scanner.lexpos = node.pos[2] + t.lexpos
t.lineno = (node.pos[0], t.lineno + node.pos[1])
t.lexpos = t.lexpos + node.pos[2]
t.endlexpos = t.endlexpos + node.pos[3]
yield t
scanner.lineno = lineno
scanner.lexpos = lexpos
gen = token_generator()
def next_token():
try:
t = next(gen)
return t
except:
return None
self.ast = parser.parse(lexer=scanner, tokenfunc=next_token,
tracking=True)
if not self.ast:
sys.exit(1)
v = typesys.TypeSetVisitor()
self.ast.accept(v)
v = typesys.CallByRefVisitor()
self.ast.accept(v)
def mutate(self, mop, rep_path):
if mop == 'sc':
mutator = mutation.SCMutationVisitor(self.filename, self.hash)
elif mop == 'dcc':
mutator = mutation.DCCMutationVisitor(self.filename, self.hash)
elif mop == 'ror':
mutator = mutation.RorMutationVisitor(self.filename, self.hash)
elif mop == 'cor':
mutator = mutation.CorMutationVisitor(self.filename, self.hash)
elif mop == 'aor':
mutator = mutation.AorMutationVisitor(self.filename, self.hash)
elif mop == 'sdl':
mutator = mutation.SdlMutationVisitor(self.filename, self.hash)
else:
log.e("compiler", "Unknown mutation operator %s" % mop)
return
self.ast.accept(mutator)
self.mutants = mutator.report.ids()
log.i("compiler", "Generated %d mutants" % len(self.mutants))
if rep_path:
mutator.report.save(rep_path + "/" + self.name + ".json")
shutil.copy2(self.filename, rep_path + "/" + self.name + ".p")
def synthesize(self):
log.d("compiler", "Generating code")
v = codegen.CodegenVisitor(self.mutants)
self.ast.accept(v)
self.ctx = v.ctx
# verify fails with goto-statements, but compile and run just fine
# self.ctx.module.verify()
def optimize(self, level=0):
log.i("compiler", "Optimizing code at level %d" % level)
pm = passes.PassManager.new()
pmb = passes.PassManagerBuilder.new()
pmb.opt_level = level
pmb.populate(pm)
pm.run(self.ctx.module)
def execute(self, args=''):
tm = ee.TargetMachine.new(opt=0, cm=ee.CM_JITDEFAULT)
engine = ee.EngineBuilder.new(self.ctx.module).create(tm)
func = fn.f_module_constructor(self.ctx.module)
engine.run_function(func, [])
func = fn.f_main(self.ctx.module)
func = engine.get_pointer_to_function(func)
if len(args):
args = args.split(' ')
args.insert(0, self.filename)
else:
args = [self.filename]
args = [x.encode() for x in args]
ret_ct = ctypes.c_int
argv_ct = ctypes.ARRAY(ctypes.c_char_p, len(args))
argc_ct = ctypes.c_int
FUNC_TYPE = ctypes.CFUNCTYPE(ret_ct, *[argc_ct, argv_ct])
py_main = FUNC_TYPE(func)
argc = argc_ct(len(args))
argv = argv_ct(*args)
py_main(argc, argv)
def _open_file(self, path):
basedir = os.path.dirname(path)
if not basedir:
path = os.getcwd() + os.sep + path
basedir = os.path.dirname(path)
if not os.path.exists(basedir):
os.makedirs(basedir)
return open(path, 'wb')
def save_source_code(self, out):
v = sourcegen.SourceVisitor(self.filename)
src = self.ast.accept(v)
src = sourcegen.split_long_lines(src, 120)
src = "(* Generated by llvm-p86 from %s at %s *)\n%s" % (
self.filename, datetime.datetime.now().strftime("%c"), src)
if out == '-':
print(src)
else:
f = self._open_file(out)
f.write(src.encode())
f.close()
def save_ir(self, out, triple=''):
if self.ctx is None or self.ctx.module is None:
return
tm = ee.TargetMachine.new(triple)
self.ctx.module.target = str(tm.triple)
self.ctx.module.data_layout = str(tm.target_data)
s = "; Generated by llvm-p86 from %s at %s\n%s" % (
self.filename, datetime.datetime.now().strftime("%c"),
self.ctx.module)
if out == '-':
print(s)
else:
f = self._open_file(out)
f.write(s.encode())
f.close()
def save_bit_code(self, out, triple=''):
if self.ctx is None or self.ctx.module is None:
return
tm = ee.TargetMachine.new(triple)
self.ctx.module.target = str(tm.triple)
self.ctx.module.data_layout = str(tm.target_data)
bc = self.ctx.module.to_bitcode()
if out == '-':
os.write(sys.stdout.fileno(), bc)
else:
f = self._open_file(out)
f.write(bc)
f.close()
def save_obj_code(self, out, triple='', cpu='', attrs=''):
if self.ctx is None or self.ctx.module is None:
return
if not cpu: cpu = 'generic'
tm = ee.TargetMachine.new(triple, cpu, attrs, 0)
obj = tm.emit_object(self.ctx.module)
if out == '-':
os.write(sys.stdout.fileno(), obj)
else:
f = self._open_file(out)
f.write(obj)
f.close()
def print_tree(self):
if self.ast is not None:
log.d("compiler", "Printing syntax tree for %s" % self.filename)
self.ast.accept(PrintVisitor())
| gpl-3.0 | -1,454,283,402,078,824,400 | 28.105911 | 78 | 0.556148 | false |
weblyzard/weblyzard_api | src/python/weblyzard_api/model/__init__.py | 1 | 18549 | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Created on May 14, 2018
.. codeauthor: Max Göbel <[email protected]>
'''
from __future__ import print_function
from __future__ import unicode_literals
from builtins import map
from builtins import str
from builtins import object
import json
import hashlib
import logging
from collections import namedtuple
from weblyzard_api.model.parsers.xml_2005 import XML2005
from weblyzard_api.model.parsers.xml_2013 import XML2013
from weblyzard_api.model.parsers.xml_deprecated import XMLDeprecated
LabeledDependency = namedtuple("LabeledDependency", "parent pos label")
logger = logging.getLogger(__name__)
class CharSpan(object):
DICT_MAPPING = {'@type': 'span_type',
'start': 'start',
'end': 'end'}
def __init__(self, span_type, start, end):
self.span_type = span_type
self.start = start
self.end = end
def to_dict(self):
return {k: getattr(self, v) for k, v in self.DICT_MAPPING.items()}
@classmethod
def from_dict(cls, dict_):
# mismatched_keys = {k: v for k, v in dict_.items() if
# k not in cls.DICT_MAPPING}
# if mismatched_keys:
# pass # debugging
kwargs = {cls.DICT_MAPPING.get(k, k): v for k, v in dict_.items()}
try:
return cls(**kwargs)
except TypeError as e:
raise e
def to_tuple(self):
return (self.start, self.end)
def __repr__(self, *args, **kwargs):
return json.dumps(self.to_dict())
class TokenCharSpan(CharSpan):
DICT_MAPPING = {'@type': 'span_type',
'start': 'start',
'end': 'end',
'pos': 'pos',
'dependency': 'dependency'}
DEFAULT_POS = 'XY'
def __init__(self, span_type, start, end, pos=None, dependency=None):
CharSpan.__init__(self, span_type, start, end)
if pos is None:
pos = self.DEFAULT_POS
self.pos = pos
self.dependency = dependency
# def to_dict(self):
# return {'@type': self.span_type,
# 'start': self.start,
# 'end': self.end,
# 'pos': self.pos,
# 'dependency': self.dependency}
def __repr__(self, *args, **kwargs):
return json.dumps(self.to_dict())
class SentenceCharSpan(CharSpan):
DICT_MAPPING = {'@type': 'span_type',
'start': 'start',
'end': 'end',
'md5sum': 'md5sum',
'semOrient': 'sem_orient',
'significance': 'significance',
'emotions': 'emotions',
'id': 'md5sum'}
def __init__(self, span_type, start, end, md5sum=None, sem_orient=0.0,
significance=0.0, emotions=None, multimodal_sentiment=None):
CharSpan.__init__(self, span_type, start, end)
self.md5sum = md5sum
self.sem_orient = sem_orient
self.significance = significance
self.emotions = emotions or {}
if not emotions and multimodal_sentiment:
logger.warning('Deprecated parameter `multimodal_sentiment` '
'use `emotions` instead!')
self.emotions = multimodal_sentiment
def __repr__(self, *args, **kwargs):
return json.dumps(self.to_dict())
class MultiplierCharSpan(CharSpan):
DICT_MAPPING = {'@type': 'span_type',
'start': 'start',
'end': 'end',
'value': 'value'}
def __init__(self, span_type, start, end, value=None):
super(MultiplierCharSpan, self).__init__(span_type=span_type,
start=start,
end=end)
self.value = value
class SentimentCharSpan(CharSpan):
DICT_MAPPING = {'@type': 'span_type',
'start': 'start',
'end': 'end',
'value': 'value',
'modality': 'modality'}
def __init__(self, span_type, start, end, value, modality='polarity'):
super(SentimentCharSpan, self).__init__(span_type=span_type,
start=start, end=end)
self.value = value
self.modality = modality
class LayoutCharSpan(CharSpan):
DICT_MAPPING = {'@type': 'span_type',
'start': 'start',
'end': 'end',
'layout': 'layout',
'title': 'title',
'level': 'level'}
def __init__(self, span_type, start, end, layout, title, level):
CharSpan.__init__(self, span_type=span_type, start=start, end=end)
self.layout = layout
self.title = title
self.level = level
class SpanFactory(object):
SPAN_TYPE_TO_CLASS = {
'CharSpan': CharSpan,
'TokenCharSpan': TokenCharSpan,
'SentimentCharSpan': SentimentCharSpan,
'MultiplierCharSpan': MultiplierCharSpan,
'SentenceCharSpan': SentenceCharSpan,
'LayoutCharSpan': LayoutCharSpan
}
@classmethod
def new_span(cls, span):
if isinstance(span, CharSpan):
return span
span_type = None
if '@type' in span:
span_type = span['@type']
elif 'span_type' in span:
span_type = span['span_type']
if span_type is not None and span_type in cls.SPAN_TYPE_TO_CLASS:
try:
return cls.SPAN_TYPE_TO_CLASS[span_type].from_dict(span)
except Exception as e:
logger.warning("Unable to process span %s. Error was %s",
span, e, exc_info=True)
raise e
result = CharSpan.from_dict(span)
return result
# if '@type' in span:
# span['span_type'] = span['@type']
# del span['@type']
# if span['span_type'] == 'SentenceCharSpan':
# try:
# assert all(
# [k in list(SentenceCharSpan.DICT_MAPPING.values()) + ['id']
# for k in span.keys()])
# except AssertionError:
# logger.warning("Unable to process SentenceCharSpan for input "
# "span %s. Traceback: ", span,
# exc_info=True)
# raise TypeError(
# 'Unexpected parameters for SentenceCharSpan: {}')
# return SentenceCharSpan(span_type='SentenceCharSpan',
# start=span['start'],
# end=span['end'],
# sem_orient=span.get('sem_orient', None),
# md5sum=span.get('md5sum', span.get('id')),
# significance=span.get('significance', None))
# elif span['span_type'] in cls.SPAN_TYPE_TO_CLASS:
# try:
# return cls.SPAN_TYPE_TO_CLASS[span['span_type']](**span)
# except Exception as e:
# logger.warning("Unable to process span %s. Error was %s",
# span, e, exc_info=True)
# raise e
# raise Exception('Invalid Span Type: {}'.format(span['span_type']))
class Annotation(object):
def __init__(self, annotation_type=None, start=None, end=None, key=None,
sentence=None, surfaceForm=None, md5sum=None, sem_orient=None,
preferredName=None, confidence=None):
self.annotation_type = annotation_type
self.surfaceForm = surfaceForm
self.start = start
self.end = end
self.key = key
self.sentence = sentence
self.md5sum = md5sum
self.sem_orient = sem_orient
self.preferredName = preferredName
self.confidence = confidence
class Sentence(object):
'''
The sentence class used for accessing single sentences.
.. note::
the class provides convenient properties for accessing pos tags and tokens:
* s.sentence: sentence text
* s.tokens : provides a list of tokens (e.g. ['A', 'new', 'day'])
* s.pos_tags: provides a list of pos tags (e.g. ['DET', 'CC', 'NN'])
'''
# : Maps the keys of the attributes to the corresponding key for the API JSON
API_MAPPINGS = {
1.0: {
'md5sum': 'id',
'value': 'value',
'pos': 'pos_list',
'sem_orient': 'polarity',
'token': 'tok_list',
'is_title': 'is_title',
'dependency': 'dep_tree',
'significance': 'significance'
}
}
# Delimiter between items (POS, TOKEN, DEPENDENCY)
ITEM_DELIMITER = ' '
# Delimiter for a single token
TOKEN_DELIMITER = ','
# Delimiter for a single dependency
DEPENDENCY_DELIMITER = ':'
def __init__(self, md5sum=None, pos=None, sem_orient=None,
significance=None,
token=None, value=None, is_title=False, dependency=None,
emotions=None):
if not md5sum and value:
try:
m = hashlib.md5()
m.update(value.encode('utf-8')
if isinstance(value, str) else str(value).encode(
'utf-8'))
md5sum = m.hexdigest()
except Exception as e:
print(e)
self.md5sum = md5sum
self.pos = pos
self.sem_orient = sem_orient
self.significance = significance
self.token = token
self.value = value
self.is_title = is_title
self.dependency = dependency
self.emotions = emotions
def as_dict(self):
'''
:returns: a dictionary representation of the sentence object.
'''
return dict((k, v) for k, v in self.__dict__.items() if
not k.startswith('_'))
def get_sentence(self):
return self.value
def set_sentence(self, new_sentence):
self.value = new_sentence
def get_pos_tags(self):
'''
Get the POS Tags as list.
>>> sentence = Sentence(pos = 'PRP ADV NN')
>>> sentence.get_pos_tags()
['PRP', 'ADV', 'NN']
'''
if self.pos:
return self.pos.strip().split(self.ITEM_DELIMITER)
else:
return None
def set_pos_tags(self, new_pos_tags):
if isinstance(new_pos_tags, list):
new_pos_tags = self.ITEM_DELIMITER.join(new_pos_tags)
self.pos = new_pos_tags
def get_pos_tags_list(self):
'''
:returns: list of the sentence's POS tags
>>> sentence = Sentence(pos = 'PRP ADV NN')
>>> sentence.get_pos_tags_list()
['PRP', 'ADV', 'NN']
'''
return [] if not self.pos_tag_string else self.get_pos_tags()
def set_pos_tags_list(self, pos_tags_list):
self.set_pos_tags(pos_tags_list)
def get_pos_tags_string(self):
'''
:returns: String of the sentence's POS tags
>>> sentence = Sentence(pos = 'PRP ADV NN')
>>> sentence.get_pos_tags_string()
'PRP ADV NN'
'''
return self.pos
def set_pos_tags_string(self, new_value):
self.pos = new_value
def get_tokens(self):
'''
:returns: an iterator providing the sentence's tokens
'''
if not self.token:
raise StopIteration
correction_offset = int(self.token.split(',')[0] or 0)
for token_pos in self.token.split(self.ITEM_DELIMITER):
token_indices = token_pos.split(self.TOKEN_DELIMITER)
try:
start, end = [int(i) - correction_offset for i \
in token_indices]
except ValueError as e:
# occasionally there appear to be missing spaces in token
# strings
logger.warn('Error parsing tokens for sentence {}; token '
'string was {}; individual token identifier '
'was {}. Original error was: {}'.format(
self.value, self.token, token_pos, e
), exc_info=True)
token_indices = [int(tok) for tok in token_indices]
start, end = token_indices[0], token_indices[-1]
res = str(self.sentence)[start:end]
# de- and encoding sometimes leads to index errors with double-width
# characters - here we attempt to detect such cases and correct
if res.strip() != res:
correction_offset += len(res) - len(res.strip())
res = res.strip()
yield res
def is_digit(self, x):
"""built in is_digit rejects negative number strings like -1 (used for
root in dependency annotations"""
try:
_ = int(x)
return True
except ValueError:
return False
def get_dependency_list(self):
'''
:returns: the dependencies of the sentence as a list of \
`LabeledDependency` objects
:rtype: :py:class:`list` of :py:class:\
`weblyzard_api.model.xml_content.LabeledDependency` objects
>>> s = Sentence(pos='RB PRP MD', dependency='1:SUB -1:ROOT 1:OBJ')
>>> s.dependency_list
[
LabeledDependency(parent='1', pos='RB', label='SUB'),
LabeledDependency(parent='-1', pos='PRP', label='ROOT'),
LabeledDependency(parent='1', pos='MD', label='OBJ')
]
'''
if self.dependency:
result = []
deps = self.dependency.strip().split(self.ITEM_DELIMITER)
for index, dep in enumerate(deps):
if self.DEPENDENCY_DELIMITER in dep:
parent, label = dep.split(self.DEPENDENCY_DELIMITER, 1)
if not self.is_digit(parent):
try:
label, parent = parent, label
assert self.is_digit(parent)
except AssertionError:
logger.info(
'Unable to parse dependeny annotation {} for sentence '
'{} with dependency string {} as tuple of '
'(parent index, dependency label), treating it as '
'parent index only'.format(dep, self.value,
self.dependency))
parent, label = -1, 'XX'
elif self.is_digit(dep):
parent, label = dep, None
logger.info(
'Unable to parse dependeny annotation {} for sentence '
'{} with dependency string {} as tuple of '
'(parent index, dependency label), treating it as '
'parent index only'.format(dep, self.value,
self.dependency))
else:
parent, dep = -1, dep
logger.info(
'Unable to parse dependeny annotation {} for sente'
'nce '
'{} with dependency string {} as tuple of '
'(parent index, dependency label), treating it as '
'dependency label only'.format(dep, self.value,
self.dependency))
result.append(LabeledDependency(parent,
self.pos_tags_list[index],
label))
return result
else:
return None
def set_dependency_list(self, dependencies):
'''
Takes a list of :py:class:`weblyzard_api.model.xml_content.LabeledDependency`
:param dependencies: The dependencies to set for this sentence.
:type dependencies: list
.. note:: The list must contain items of the type \
:py:class:`weblyzard_api.model.xml_content.LabeledDependency`
>>> s = Sentence(pos='RB PRP MD', dependency='1:SUB -1:ROOT 1:OBJ')
>>> s.dependency_list
[LabeledDependency(parent='1', pos='RB', label='SUB'),
LabeledDependency(parent='-1', pos='PRP', label='ROOT'),
LabeledDependency(parent='1', pos='MD', label='OBJ')]
>>> s.dependency_list = [LabeledDependency(parent='-1', pos='MD', label='ROOT'), ]
>>> s.dependency_list
[LabeledDependency(parent='-1', pos='MD', label='ROOT')]
'''
if not dependencies:
return
deps = []
new_pos = []
for dependency in dependencies:
deps.append(self.DEPENDENCY_DELIMITER.join(
[dependency.parent, dependency.label]))
new_pos.append(dependency.pos)
self.pos = self.ITEM_DELIMITER.join(new_pos)
self.dependency = self.ITEM_DELIMITER.join(deps)
def to_json(self, version=1.0):
'''
Converts the Sentence object to the corresponding JSON string
according to the given API version (default 1.0).
:param version: The API version to target.
:type version: float
:returns: A JSON string.
:rtype: str
'''
return json.dumps(self.to_api_dict(version))
def to_api_dict(self, version=1.0):
'''
Serializes the Sentence object to a dict conforming to the
specified API version (default 1.0).
:param version: The API version to target.
:type version: float
:returns: A dict with the correct keys as defined in the API.
:rtype: dict
'''
key_map = self.API_MAPPINGS[version]
return {key_map[key]: value for key, value in
self.as_dict().items() if key in key_map and
value is not None}
sentence = property(get_sentence, set_sentence)
pos_tags = property(get_pos_tags, set_pos_tags)
tokens = property(get_tokens)
pos_tags_list = property(get_pos_tags_list, set_pos_tags_list)
pos_tag_string = property(get_pos_tags_string, set_pos_tags_string)
dependency_list = property(get_dependency_list, set_dependency_list)
| apache-2.0 | -1,612,660,601,922,884,600 | 35.511811 | 90 | 0.520649 | false |
jsirois/pants | src/python/pants/engine/unions.py | 1 | 4665 | # Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from collections import defaultdict
from dataclasses import dataclass
from typing import DefaultDict, Iterable, Mapping, Type, TypeVar
from pants.util.frozendict import FrozenDict
from pants.util.meta import decorated_type_checkable, frozen_after_init
from pants.util.ordered_set import FrozenOrderedSet, OrderedSet
@decorated_type_checkable
def union(cls):
"""A class decorator to allow a class to be a union base in the engine's mechanism for
polymorphism.
Annotating a class with @union allows other classes to register a `UnionRule(BaseClass,
MemberClass)`. Then, you can use `await Get(Output, UnionBase, concrete_union_member)`. This
would be similar to writing `UnionRule(Output, ConcreteUnionMember,
concrete_union_member_instance)`, but allows you to write generic code without knowing what
concrete classes might later implement that union.
Often, union bases are abstract classes, but they need not be.
See https://www.pantsbuild.org/docs/rules-api-unions.
"""
# TODO: Check that the union base type is used as a tag and nothing else (e.g. no attributes)!
assert isinstance(cls, type)
return union.define_instance_of(cls)
def is_union(input_type: type) -> bool:
"""Return whether or not a type has been annotated with `@union`."""
return union.is_instance(input_type)
@dataclass(frozen=True)
class UnionRule:
"""Specify that an instance of `union_member` can be substituted wherever `union_base` is
used."""
union_base: Type
union_member: Type
def __post_init__(self) -> None:
if not union.is_instance(self.union_base):
msg = (
f"The first argument must be a class annotated with @union "
f"(from pants.engine.unions), but was {self.union_base}."
)
if union.is_instance(self.union_member):
msg += (
"\n\nHowever, the second argument was annotated with `@union`. Did you "
"switch the first and second arguments to `UnionRule()`?"
)
raise ValueError(msg)
_T = TypeVar("_T")
@frozen_after_init
@dataclass(unsafe_hash=True)
class UnionMembership:
union_rules: FrozenDict[Type, FrozenOrderedSet[Type]]
@classmethod
def from_rules(cls, rules: Iterable[UnionRule]) -> UnionMembership:
mapping: DefaultDict[Type, OrderedSet[Type]] = defaultdict(OrderedSet)
for rule in rules:
mapping[rule.union_base].add(rule.union_member)
return cls(mapping)
def __init__(self, union_rules: Mapping[Type, Iterable[Type]]) -> None:
self.union_rules = FrozenDict(
{base: FrozenOrderedSet(members) for base, members in union_rules.items()}
)
def __getitem__(self, union_type: Type[_T]) -> FrozenOrderedSet[Type[_T]]:
"""Get all members of this union type.
If the union type does not exist because it has no members registered, this will raise an
IndexError.
Note that the type hint assumes that all union members will have subclassed the union type
- this is only a convention and is not actually enforced. So, you may have inaccurate type
hints.
"""
return self.union_rules[union_type]
def get(self, union_type: Type[_T]) -> FrozenOrderedSet[Type[_T]]:
"""Get all members of this union type.
If the union type does not exist because it has no members registered, return an empty
FrozenOrderedSet.
Note that the type hint assumes that all union members will have subclassed the union type
- this is only a convention and is not actually enforced. So, you may have inaccurate type
hints.
"""
return self.union_rules.get(union_type, FrozenOrderedSet())
def is_member(self, union_type: Type, putative_member: Type) -> bool:
members = self.union_rules.get(union_type)
if members is None:
raise TypeError(f"Not a registered union type: {union_type}")
return type(putative_member) in members
def has_members(self, union_type: Type) -> bool:
"""Check whether the union has an implementation or not."""
return bool(self.union_rules.get(union_type))
def has_members_for_all(self, union_types: Iterable[Type]) -> bool:
"""Check whether every union given has an implementation or not."""
return all(self.has_members(union_type) for union_type in union_types)
| apache-2.0 | -1,717,259,079,218,027,300 | 38.533898 | 98 | 0.673955 | false |
ZeitOnline/zeit.content.article | src/zeit/content/article/interfaces.py | 1 | 7135 | from zeit.content.article.i18n import MessageFactory as _
import zeit.cms.content.contentsource
import zeit.cms.content.interfaces
import zeit.cms.section.interfaces
import zeit.content.article.source
import zeit.content.cp.source
import zeit.content.image.interfaces
import zope.schema
ARTICLE_NS = 'http://namespaces.zeit.de/CMS/Article'
class IArticleMetadata(zeit.cms.content.interfaces.ICommonMetadata):
"""Metadata of an article."""
# bind(None) amounts to "clone".
keywords = zeit.cms.content.interfaces.ICommonMetadata['keywords'].bind(
object())
keywords.setTaggedValue('zeit.cms.tagging.updateable', True)
body = zope.interface.Attribute('Convenience access to IEditableBody')
header = zope.interface.Attribute('Convenience access to IHeaderArea')
paragraphs = zope.schema.Int(
title=_("Paragraphsamount"),
description=_("Amount of paragraphs in total."),
readonly=True,
required=False)
textLength = zope.schema.Int(
title=_('Textlength'),
required=False)
# DEPRECATED (xslt)
has_recensions = zope.schema.Bool(
title=_('Has recension content'),
default=False,
required=False)
# DEPRECATED (xslt)
artbox_thema = zope.schema.Bool(
title=_('First related as box'),
default=False,
required=False)
layout = zope.schema.Choice(
title=_("Layout"),
source=zeit.content.cp.source.centerPageSource,
required=False)
genre = zope.schema.Choice(
title=_("Genre"),
source=zeit.content.article.source.GenreSource(),
required=False)
main_image = zeit.cms.content.interfaces.ReferenceField(
title=_("Image"),
description=_("Drag an image group here"),
# BBB allow single images
source=zeit.content.image.interfaces.imageSource,
required=False)
main_image_variant_name = zope.schema.Choice(
title=_('Variant Name'),
source=zeit.content.article.source.MAIN_IMAGE_VARIANT_NAME_SOURCE,
required=False)
main_image_block = zope.interface.Attribute(
u'First block of the body if it is present and is an image block')
template = zope.schema.Choice(
title=_("Template"),
source=zeit.content.article.source.ARTICLE_TEMPLATE_SOURCE,
required=False)
header_layout = zope.schema.Choice(
title=_("Header layout"),
source=zeit.content.article.source.ArticleHeaderSource(),
required=False)
is_instant_article = zope.schema.Bool(
title=_('Is instant article'),
default=False,
required=False)
is_amp = zope.schema.Bool(
title=_('Is AMP'),
default=False,
required=False)
hide_ligatus_recommendations = zope.schema.Bool(
title=_('Hide Ligatus recommendations'),
default=False,
required=False)
recent_comments_first = zope.schema.Bool(
title=_('Recent comments first'),
default=False,
required=False)
has_audio = zope.schema.Bool(
title=_('Has audio file'),
default=False,
readonly=True)
class IArticle(IArticleMetadata, zeit.cms.content.interfaces.IXMLContent):
"""Article is the main content type in the Zeit CMS."""
def updateDAVFromXML():
"""Update the DAV properties based on the information in the XML.
This is useful when importing an article for instance from
the Content-Drehscheibe, where the only property information we have
is in the XML and there is no head section.
"""
class IZONArticle(IArticle, zeit.cms.section.interfaces.ISectionMarker):
pass
class ArticleSource(zeit.cms.content.contentsource.CMSContentSource):
name = 'article'
check_interfaces = (IArticle,)
articleSource = ArticleSource()
class IBookRecensionReadContainer(zope.interface.Interface):
"""Read interface for book recensions."""
def __getitem__(index):
"""Get recension with given `inded`."""
def __iter__():
"""Iterate over recensions."""
def __len__():
"""Return amount of items."""
class IBookRecensionWriteContainer(zope.interface.Interface):
"""Write interface for book recensions."""
def append(item):
"""Append item to container."""
def remove(name):
"""Remove recension with given name from container."""
class IBookRecensionContainer(IBookRecensionReadContainer,
IBookRecensionWriteContainer):
"""Book recensions."""
class IBookRecension(zope.interface.Interface):
"""A recension for a book."""
authors = zope.schema.Tuple(
title=_('Authors'),
min_length=1,
default=(None, ),
value_type=zope.schema.TextLine(
title=_('Author')))
title = zope.schema.TextLine(title=_('Title'))
info = zope.schema.Text(
title=_('Info'),
required=False)
genre = zope.schema.TextLine(
title=_('Genre'),
required=False)
category = zope.schema.Choice(
title=_('ZEIT category'),
source=zeit.content.article.source.BookRecensionCategories())
age_limit = zope.schema.Int(
title=_('Agelimit'),
required=False)
original_language = zope.schema.TextLine(
title=_('Original language'),
required=False)
translator = zope.schema.TextLine(
title=_('Translator'),
required=False)
publisher = zope.schema.TextLine(
title=_('Publisher'),
required=False)
location = zope.schema.TextLine(
title=_('book-location', default=u'Location'),
required=False)
year = zope.schema.Int(
title=_('Year'),
required=False)
media_type = zope.schema.TextLine(
title=_('Media type'),
required=False)
pages = zope.schema.Int(
title=_('Pages'),
required=False)
price = zope.schema.TextLine(
title=_('Price (EUR)'),
required=False)
raw_data = zope.schema.Text(
title=_('Raw data'),
required=False,
readonly=True)
class ITagesspiegelArticle(zope.interface.Interface):
"""Marker for articles imported from Tagesspiegel."""
class IBreakingNews(IArticle):
"""Breaking news are IArticles that receive special one-time treatment
on publishing.
"""
title = zope.schema.Text(
title=_("Title"), missing_value=u'')
title.setTaggedValue('zeit.cms.charlimit', 70)
is_breaking = zope.schema.Bool(
title=_('Breaking news article'),
default=False,
required=False)
def banner_matches(banner):
"""Returns True if the given banner content object refers to this
breaking news article."""
IBreakingNews.setTaggedValue(
'zeit.cms.addform', 'zeit.content.article.AddBreakingNews')
IBreakingNews.setTaggedValue(
'zeit.cms.title', _('Add breaking news'))
class IErrorPage(IArticle):
"""Marker interface for error pages, so zeit.web can render them
differently.
This interface is applied manually.
"""
| bsd-3-clause | 6,100,495,689,345,631,000 | 26.548263 | 76 | 0.643588 | false |
doge-search/webdoge | liqian/rice/scrap_professors.py | 1 | 2173 | #!/usr/bin/python
#coding=utf-8
import urllib2
import HTMLParser
import sys
import xml.dom.minidom as minidom
from htmlentitydefs import entitydefs
import glob
import requests.packages.urllib3.util.ssl_
requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS = 'ALL'
reload(sys)
sys.setdefaultencoding('utf-8')
class MyParser(HTMLParser.HTMLParser):
def __init__(self):
HTMLParser.HTMLParser.__init__(self)
self.hasname = False
self.hastitle = False
self.hasemail = False
self.hasphone = False
self.namelist = []
self.titlelist = []
self.phonelist = []
self.emaillist = []
self.tempname = []
self.temptitle = []
self.tempphone = []
self.tempemail = []
def handle_starttag(self, tag, attrs):
if tag == 'div':
for name, value in attrs:
if name == 'class':
if value == 'name':
self.hasname = True
if value == 'title':
self.hastitle = True
def handle_data(self, text):
if self.hasname and text.isspace() == False:
self.tempname.append(text)
if self.hastitle and text.isspace() == False:
self.temptitle.append(text)
def handle_endtag(self, tag):
if tag == 'a':
if self.hasname:
self.namelist.append(self.tempname)
self.hasname = False
self.tempname = []
if tag == 'div':
if self.hastitle:
self.titlelist.append(self.temptitle)
self.hastitle = False
self.temptitle = []
fout_xml = file('rice.xml', 'w')
doc = minidom.Document()
institution = doc.createElement("institution")
doc.appendChild(institution)
if True:
rootUrl = 'http://www.cs.rice.edu/people/faculty/'
response = urllib2.urlopen(rootUrl)
html = response.read()
my = MyParser()
my.feed(html)
for i in range(len(my.namelist)):
professor = doc.createElement("professor")
name = my.namelist[i][0]
titles = my.titlelist[i]
namenode = doc.createElement("name")
namenode.appendChild(doc.createTextNode(name))
professor.appendChild(namenode)
for title in titles:
titlenode = doc.createElement("title")
titlenode.appendChild(doc.createTextNode(title))
professor.appendChild(titlenode)
institution.appendChild(professor)
doc.writexml(fout_xml, "\t", "\t", "\n")
fout_xml.close() | unlicense | 446,444,873,430,418,800 | 23.426966 | 59 | 0.69075 | false |
hivesolutions/appier | src/appier/test/util.py | 1 | 46210 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Hive Appier Framework
# Copyright (c) 2008-2021 Hive Solutions Lda.
#
# This file is part of Hive Appier Framework.
#
# Hive Appier Framework is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by the Apache
# Foundation, either version 2.0 of the License, or (at your option) any
# later version.
#
# Hive Appier Framework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License along with
# Hive Appier Framework. If not, see <http://www.apache.org/licenses/>.
__author__ = "João Magalhães <[email protected]>"
""" The author(s) of the module """
__version__ = "1.0.0"
""" The version of the module """
__revision__ = "$LastChangedRevision$"
""" The revision number of the module """
__date__ = "$LastChangedDate$"
""" The last change date of the module """
__copyright__ = "Copyright (c) 2008-2021 Hive Solutions Lda."
""" The copyright for the module """
__license__ = "Apache License, Version 2.0"
""" The license for the module """
import unittest
import appier
class UtilTest(unittest.TestCase):
def test_is_mobile(self):
result = appier.is_mobile("Mozilla/5.0 (Linux; Android 4.0.4; Galaxy Nexus Build/IMM76B) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.133 Mobile Safari/535.19")
self.assertEqual(result, True)
result = appier.is_mobile("Mozilla/5.0 (iPhone; CPU iPhone OS 10_3 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.75 Mobile/14E5239e Safari/602.1")
self.assertEqual(result, True)
result = appier.is_mobile("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/600.7.12 (KHTML, like Gecko) Version/8.0.7 Safari/600.7.12")
self.assertEqual(result, False)
result = appier.is_mobile("Mozilla/5.0 (Linux; U; Android 4.1.1; en-gb; Build/KLP) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Safari/534.30")
self.assertEqual(result, False)
result = appier.is_mobile("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.75 Safari/537.36")
self.assertEqual(result, False)
result = appier.is_mobile("Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13F69 Safari/601.1")
self.assertEqual(result, False)
result = appier.is_mobile("")
self.assertEqual(result, False)
result = appier.is_mobile(None)
self.assertEqual(result, False)
def test_is_tablet(self):
result = appier.is_tablet("Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13F69 Safari/601.1")
self.assertEqual(result, True)
result = appier.is_tablet("Mozilla/5.0 (iPad; CPU OS 6_1_3 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Mobile/10B329")
self.assertEqual(result, True)
result = appier.is_tablet("Mozilla/5.0 (Linux; Android 4.0.4; Galaxy Nexus Build/IMM76B) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.133 Mobile Safari/535.19")
self.assertEqual(result, True)
result = appier.is_tablet("Mozilla/5.0 (iPhone; CPU iPhone OS 10_3 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.75 Mobile/14E5239e Safari/602.1")
self.assertEqual(result, True)
result = appier.is_tablet("Mozilla/5.0 (Linux; U; Android 4.1.1; en-gb; Build/KLP) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Safari/534.30")
self.assertEqual(result, True)
result = appier.is_tablet("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/600.7.12 (KHTML, like Gecko) Version/8.0.7 Safari/600.7.12")
self.assertEqual(result, False)
result = appier.is_tablet("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.75 Safari/537.36")
self.assertEqual(result, False)
result = appier.is_tablet("")
self.assertEqual(result, False)
result = appier.is_tablet(None)
self.assertEqual(result, False)
def test_is_browser(self):
result = appier.is_browser("Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13F69 Safari/601.1")
self.assertEqual(result, True)
result = appier.is_browser("Mozilla/5.0 (iPad; CPU OS 6_1_3 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Mobile/10B329")
self.assertEqual(result, True)
result = appier.is_browser("Mozilla/5.0 (Linux; Android 4.0.4; Galaxy Nexus Build/IMM76B) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.133 Mobile Safari/535.19")
self.assertEqual(result, True)
result = appier.is_browser("Mozilla/5.0 (iPhone; CPU iPhone OS 10_3 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.75 Mobile/14E5239e Safari/602.1")
self.assertEqual(result, True)
result = appier.is_browser("Mozilla/5.0 (Linux; U; Android 4.1.1; en-gb; Build/KLP) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Safari/534.30")
self.assertEqual(result, True)
result = appier.is_browser("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/600.7.12 (KHTML, like Gecko) Version/8.0.7 Safari/600.7.12")
self.assertEqual(result, True)
result = appier.is_browser("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.75 Safari/537.36")
self.assertEqual(result, True)
result = appier.is_browser("Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10136")
self.assertEqual(result, True)
result = appier.is_browser("DuckDuckBot/1.0; (+http://duckduckgo.com/duckduckbot.html)")
self.assertEqual(result, False)
result = appier.is_browser("netius/1.1.10")
self.assertEqual(result, False)
result = appier.is_browser("netius/1.1b")
self.assertEqual(result, False)
result = appier.is_browser("")
self.assertEqual(result, False)
def test_is_bot(self):
result = appier.is_bot("Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13F69 Safari/601.1")
self.assertEqual(result, False)
result = appier.is_bot("Mozilla/5.0 (iPad; CPU OS 6_1_3 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Mobile/10B329")
self.assertEqual(result, False)
result = appier.is_bot("Mozilla/5.0 (Linux; Android 4.0.4; Galaxy Nexus Build/IMM76B) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.133 Mobile Safari/535.19")
self.assertEqual(result, False)
result = appier.is_bot("Mozilla/5.0 (iPhone; CPU iPhone OS 10_3 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.75 Mobile/14E5239e Safari/602.1")
self.assertEqual(result, False)
result = appier.is_bot("Mozilla/5.0 (Linux; U; Android 4.1.1; en-gb; Build/KLP) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Safari/534.30")
self.assertEqual(result, False)
result = appier.is_bot("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/600.7.12 (KHTML, like Gecko) Version/8.0.7 Safari/600.7.12")
self.assertEqual(result, False)
result = appier.is_bot("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.75 Safari/537.36")
self.assertEqual(result, False)
result = appier.is_bot("Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10136")
self.assertEqual(result, False)
result = appier.is_bot("DuckDuckBot/1.0; (+http://duckduckgo.com/duckduckbot.html)")
self.assertEqual(result, True)
result = appier.is_bot("netius/1.1.10")
self.assertEqual(result, False)
result = appier.is_bot("netius/1.1b")
self.assertEqual(result, False)
result = appier.is_bot("")
self.assertEqual(result, False)
result = appier.is_bot(None)
self.assertEqual(result, False)
def test_browser_info(self):
result = appier.browser_info("Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10136")
self.assertEqual(result, dict(
name = "Edge",
version = "12.10136",
version_f = 12.10136,
version_i = 12,
interactive = True,
bot = False,
os = "Windows"
))
result = appier.browser_info("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.75 Safari/537.36")
self.assertEqual(result, dict(
name = "Chrome",
version = "62.0.3202.75",
version_f = 62.0,
version_i = 62,
interactive = True,
bot = False,
os = "Windows"
))
result = appier.browser_info("Mozilla/5.0 (iPad; CPU OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13F69 Safari/601.1")
self.assertEqual(result, dict(
name = "Safari",
version = "601.1",
version_f = 601.1,
version_i = 601,
interactive = True,
bot = False,
os = "Mac"
))
result = appier.browser_info("Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:56.0) Gecko/20100101 Firefox/56.0")
self.assertEqual(result, dict(
name = "Firefox",
version = "56.0",
version_f = 56.0,
version_i = 56,
interactive = True,
bot = False,
os = "Windows"
))
result = appier.browser_info("Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)")
self.assertEqual(result, dict(
name = "Explorer",
version = "8.0",
version_f = 8.0,
version_i = 8,
interactive = True,
bot = False,
os = "Windows"
))
result = appier.browser_info("Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")
self.assertEqual(result, dict(
name = "Googlebot",
version = "2.1",
version_f = 2.1,
version_i = 2,
interactive = False,
bot = True
))
result = appier.browser_info("Mozilla/5.0 (compatible; Bingbot/2.0; +http://www.bing.com/bingbot.htm)")
self.assertEqual(result, dict(
name = "Bingbot",
version = "2.0",
version_f = 2.0,
version_i = 2,
interactive = False,
bot = True
))
result = appier.browser_info("DuckDuckBot/1.0; (+http://duckduckgo.com/duckduckbot.html)")
self.assertEqual(result, dict(
name = "DuckDuckBot",
version = "1.0",
version_f = 1.0,
version_i = 1,
interactive = False,
bot = True
))
result = appier.browser_info("netius/1.1.10")
self.assertEqual(result, dict(
name = "netius",
version = "1.1.10",
version_f = 1.1,
version_i = 1,
interactive = False,
bot = False
))
result = appier.browser_info("netius/1.1b")
self.assertEqual(result, dict(
name = "netius",
version = "1.1b",
version_f = 0,
version_i = 0,
interactive = False,
bot = False
))
result = appier.browser_info("APIs-Google (+https://developers.google.com/webmasters/APIs-Google.html)")
self.assertEqual(result, None)
result = appier.browser_info(None)
self.assertEqual(result, None)
def test_obfuscate(self):
result = appier.obfuscate("hello world")
self.assertEqual(result, "hel********")
result = appier.obfuscate("hello world", display_l = 6)
self.assertEqual(result, "hello *****")
result = appier.obfuscate("hello world", display_l = 100)
self.assertEqual(result, "hello world")
result = appier.obfuscate("hello world", display_l = 6, token = "-")
self.assertEqual(result, "hello -----")
result = appier.obfuscate(appier.legacy.u("你好世界"), display_l = 3)
self.assertEqual(result, appier.legacy.u("你好世*"))
def test_email_parts(self):
name, email = appier.email_parts("João Magalhães <[email protected]>")
self.assertEqual(type(name), str)
self.assertEqual(type(email), str)
self.assertEqual(name, "João Magalhães")
self.assertEqual(email, "[email protected]")
name, email = appier.email_parts(appier.legacy.u("João Magalhães <[email protected]>"))
self.assertEqual(type(name), appier.legacy.UNICODE)
self.assertEqual(type(email), appier.legacy.UNICODE)
self.assertEqual(name, appier.legacy.u("João Magalhães"))
self.assertEqual(email, appier.legacy.u("[email protected]"))
name, email = appier.email_parts(appier.legacy.u("你好世界 <[email protected]>"))
self.assertEqual(type(name), appier.legacy.UNICODE)
self.assertEqual(type(email), appier.legacy.UNICODE)
self.assertEqual(name, appier.legacy.u("你好世界"))
self.assertEqual(email, appier.legacy.u("[email protected]"))
name, email = appier.email_parts(appier.legacy.u(" [email protected] "))
self.assertEqual(type(name), appier.legacy.UNICODE)
self.assertEqual(type(email), appier.legacy.UNICODE)
self.assertEqual(name, appier.legacy.u("[email protected]"))
self.assertEqual(email, appier.legacy.u("[email protected]"))
def test_email_mime(self):
result = appier.email_mime("João Magalhães <[email protected]>")
self.assertEqual(type(result), str)
self.assertEqual(result, "=?utf-8?q?Jo=C3=A3o_Magalh=C3=A3es?= <[email protected]>")
result = appier.email_mime(appier.legacy.u("João Magalhães <[email protected]>"))
self.assertEqual(type(result), appier.legacy.UNICODE)
self.assertEqual(result, appier.legacy.u("=?utf-8?q?Jo=C3=A3o_Magalh=C3=A3es?= <[email protected]>"))
result = appier.email_mime(appier.legacy.u(" [email protected] "))
self.assertEqual(type(result), appier.legacy.UNICODE)
self.assertEqual(result, appier.legacy.u("[email protected] <[email protected]>"))
result = appier.email_mime([
appier.legacy.u("João Magalhães <[email protected]>"),
appier.legacy.u(" [email protected] "),
None
])
self.assertEqual(type(result), list)
self.assertEqual(result, [
appier.legacy.u("=?utf-8?q?Jo=C3=A3o_Magalh=C3=A3es?= <[email protected]>"),
appier.legacy.u("[email protected] <[email protected]>")
])
def test_email_name(self):
result = appier.email_name("João Magalhães <[email protected]>")
self.assertEqual(type(result), str)
self.assertEqual(result, "João Magalhães")
result = appier.email_name(appier.legacy.u("João Magalhães <[email protected]>"))
self.assertEqual(type(result), appier.legacy.UNICODE)
self.assertEqual(result, appier.legacy.u("João Magalhães"))
result = appier.email_name(appier.legacy.u(" [email protected] "))
self.assertEqual(type(result), appier.legacy.UNICODE)
self.assertEqual(result, appier.legacy.u("[email protected]"))
result = appier.email_name(appier.legacy.u("joamag"))
self.assertEqual(result, None)
result = appier.email_name(appier.legacy.u(""))
self.assertEqual(result, None)
result = appier.email_name([
appier.legacy.u("[email protected]"),
appier.legacy.u("[email protected]"),
None
])
self.assertEqual(type(result), list)
self.assertEqual(result, [
appier.legacy.u("[email protected]"),
appier.legacy.u("[email protected]")
])
result = appier.email_name([
appier.legacy.u("joamag"),
appier.legacy.u("[email protected]"),
None
])
self.assertEqual(type(result), list)
self.assertEqual(result, [
appier.legacy.u("[email protected]")
])
result = appier.email_name([
appier.legacy.u(""),
None
])
self.assertEqual(type(result), list)
self.assertEqual(result, [])
result = appier.email_name([
appier.legacy.u("")
])
self.assertEqual(type(result), list)
self.assertEqual(result, [])
def test_email_base(self):
result = appier.email_base("João Magalhães <[email protected]>")
self.assertEqual(type(result), str)
self.assertEqual(result, "[email protected]")
result = appier.email_base(appier.legacy.u("João Magalhães <[email protected]>"))
self.assertEqual(type(result), appier.legacy.UNICODE)
self.assertEqual(result, appier.legacy.u("[email protected]"))
result = appier.email_base(appier.legacy.u(" [email protected] "))
self.assertEqual(type(result), appier.legacy.UNICODE)
self.assertEqual(result, appier.legacy.u("[email protected]"))
result = appier.email_base(appier.legacy.u("joamag"))
self.assertEqual(result, None)
result = appier.email_base(appier.legacy.u(""))
self.assertEqual(result, None)
result = appier.email_base([
appier.legacy.u("[email protected]"),
appier.legacy.u("[email protected]"),
None
])
self.assertEqual(type(result), list)
self.assertEqual(result, [
appier.legacy.u("[email protected]"),
appier.legacy.u("[email protected]")
])
result = appier.email_base([
appier.legacy.u("joamag"),
appier.legacy.u("[email protected]"),
None
])
self.assertEqual(type(result), list)
self.assertEqual(result, [
appier.legacy.u("[email protected]")
])
result = appier.email_base([
appier.legacy.u(""),
None
])
self.assertEqual(type(result), list)
self.assertEqual(result, [])
result = appier.email_base([
appier.legacy.u(""),
])
self.assertEqual(type(result), list)
self.assertEqual(result, [])
def test_date_to_timestamp(self):
result = appier.date_to_timestamp("29/06/1984")
self.assertEqual(type(result), int)
self.assertEqual(int(result), 457315200)
result = appier.date_to_timestamp("29/06/0000")
self.assertEqual(result, None)
result = appier.date_to_timestamp("1984-06-29", format = "%Y-%m-%d")
self.assertEqual(result, 457315200)
result = appier.date_to_timestamp("1984-13-29", format = "%Y-%m-%d")
self.assertEqual(result, None)
def test_gather_errors(self):
def raiser(): raise appier.OperationalError(message = "hello")
struct = appier.lazy_dict(
first = appier.lazy(lambda: raiser()),
second = appier.lazy(lambda: 2),
)
errors = appier.gather_errors(struct)
self.assertEqual(errors, dict(first = ["hello"]))
struct.__getitem__("first", force = True)._value = 1
errors = appier.gather_errors(struct)
self.assertEqual(errors, dict(first = ["hello"]))
struct.__getitem__("first", force = True)._value = 1
errors = appier.gather_errors(struct, resolve = False)
self.assertEqual(errors, dict())
def test_camel_to_underscore(self):
result = appier.camel_to_underscore(None)
self.assertEqual(result, None)
result = appier.camel_to_underscore("")
self.assertEqual(type(result), str)
self.assertEqual(result, "")
result = appier.camel_to_underscore("HelloWorld")
self.assertEqual(type(result), str)
self.assertEqual(result, "hello_world")
result = appier.camel_to_underscore("HELLOWorld")
self.assertEqual(type(result), str)
self.assertEqual(result, "hello_world")
result = appier.camel_to_underscore("HELLOWorldHELLOWorld")
self.assertEqual(type(result), str)
self.assertEqual(result, "hello_world_hello_world")
def test_camel_to_readable(self):
result = appier.camel_to_readable(None)
self.assertEqual(result, None)
result = appier.camel_to_readable("")
self.assertEqual(type(result), str)
self.assertEqual(result, "")
result = appier.camel_to_readable("HelloWorld")
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello World")
result = appier.camel_to_readable("HelloWorld", lower = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello world")
result = appier.camel_to_readable("HelloWorld", lower = True, capitalize = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello World")
result = appier.camel_to_readable("HELLOWorld")
self.assertEqual(type(result), str)
self.assertEqual(result, "HELLO World")
result = appier.camel_to_readable("HELLOWorld", lower = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello world")
result = appier.camel_to_readable(
"HELLOWorld",
lower = True,
capitalize = True
)
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello World")
result = appier.camel_to_readable("HELLOWorldHELLOWorld")
self.assertEqual(type(result), str)
self.assertEqual(result, "HELLO World HELLO World")
result = appier.camel_to_readable(
"HELLOWorldHELLOWorld",
lower = True
)
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello world hello world")
result = appier.camel_to_readable(
"HELLOWorldHELLOWorld",
lower = True,
capitalize = True
)
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello World Hello World")
def test_underscore_to_camel(self):
result = appier.underscore_to_camel(None)
self.assertEqual(result, None)
result = appier.underscore_to_camel("")
self.assertEqual(type(result), str)
self.assertEqual(result, "")
result = appier.underscore_to_camel("hello_world")
self.assertEqual(type(result), str)
self.assertEqual(result, "HelloWorld")
result = appier.underscore_to_camel("hello_world", lower = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "helloWorld")
result = appier.underscore_to_camel("hello_world_hello_world")
self.assertEqual(type(result), str)
self.assertEqual(result, "HelloWorldHelloWorld")
result = appier.underscore_to_camel("hello_world_hello_world", lower = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "helloWorldHelloWorld")
result = appier.underscore_to_camel("hello_world_")
self.assertEqual(type(result), str)
self.assertEqual(result, "HelloWorld")
result = appier.underscore_to_camel("hello_world_", lower = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "helloWorld")
result = appier.underscore_to_camel("__hello_world__")
self.assertEqual(type(result), str)
self.assertEqual(result, "HelloWorld")
result = appier.underscore_to_camel("__hello_world__", lower = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "helloWorld")
result = appier.underscore_to_camel("__hello___world__")
self.assertEqual(type(result), str)
self.assertEqual(result, "HelloWorld")
result = appier.underscore_to_camel("__hello___world__", lower = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "helloWorld")
result = appier.underscore_to_camel("__hello___WORLD__")
self.assertEqual(type(result), str)
self.assertEqual(result, "HelloWORLD")
result = appier.underscore_to_camel("__hello___WORLD__", lower = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "helloWORLD")
result = appier.underscore_to_camel("HelloWorld")
self.assertEqual(type(result), str)
self.assertEqual(result, "HelloWorld")
result = appier.underscore_to_camel("HelloWorld", lower = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "helloWorld")
def test_underscore_to_readable(self):
result = appier.underscore_to_readable(None)
self.assertEqual(result, None)
result = appier.underscore_to_readable("")
self.assertEqual(type(result), str)
self.assertEqual(result, "")
result = appier.underscore_to_readable("hello_world")
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello world")
result = appier.underscore_to_readable("hello_world", capitalize = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello World")
result = appier.underscore_to_readable("hello_world_hello_world")
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello world hello world")
result = appier.underscore_to_readable("hello_world_hello_world", capitalize = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello World Hello World")
result = appier.underscore_to_readable("hello_world_")
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello world")
result = appier.underscore_to_readable("hello_world_", capitalize = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello World")
result = appier.underscore_to_readable("__hello_world__")
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello world")
result = appier.underscore_to_readable("__hello_world__", capitalize = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello World")
result = appier.underscore_to_readable("__hello___world__")
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello world")
result = appier.underscore_to_readable("__hello___world__", capitalize = True)
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello World")
result = appier.underscore_to_readable("__hello___world__", capitalize = True, separator = "-")
self.assertEqual(type(result), str)
self.assertEqual(result, "Hello-World")
def test_escape(self):
result = appier.escape("foo,bar", ",", escape = "$")
self.assertEqual(result, "foo$,bar")
result = appier.escape("foo$,bar", ",", escape = "$")
self.assertEqual(result, "foo$$$,bar")
def test_unescape(self):
result = appier.unescape("foo$,bar", escape = "$")
self.assertEqual(result, "foo,bar")
result = appier.unescape("foo$$,bar", escape = "$")
self.assertEqual(result, "foo$,bar")
result = appier.unescape("$$foo$,bar$$$$", escape = "$")
self.assertEqual(result, "$foo,bar$$")
def test_count_unescape(self):
result = appier.count_unescape("foo:bar", ":")
self.assertEqual(result, 1)
result = appier.count_unescape("foo:bar:hello:world", ":")
self.assertEqual(result, 3)
result = appier.count_unescape("foo,bar,hello,world", ":")
self.assertEqual(result, 0)
result = appier.count_unescape("foo:bar\\:hello:world", ":")
self.assertEqual(result, 2)
result = appier.count_unescape("foo:bar\\:hello\\:world", ":")
self.assertEqual(result, 1)
result = appier.count_unescape("foo:bar\\:hello\\\\:world", ":")
self.assertEqual(result, 2)
result = appier.count_unescape("foo\\:bar\\:hello\\:world", ":")
self.assertEqual(result, 0)
def test_split_unescape(self):
result = appier.split_unescape("foo bar")
self.assertEqual(result, ["foo", "bar"])
result = appier.split_unescape("foo bar hello world", max = 2)
self.assertEqual(result, ["foo", "bar", "hello world"])
result = appier.split_unescape("foo,bar", ",")
self.assertEqual(result, ["foo", "bar"])
result = appier.split_unescape("foo$,bar", ",", escape = "$")
self.assertEqual(result, ["foo,bar"])
result = appier.split_unescape("foo$$,bar", ",", escape = "$", unescape = True)
self.assertEqual(result, ["foo$", "bar"])
result = appier.split_unescape("foo$$,bar", ",", escape = "$", unescape = False)
self.assertEqual(result, ["foo$$", "bar"])
result = appier.split_unescape("foo$", ",", escape = "$", unescape = True)
self.assertEqual(result, ["foo$"])
result = appier.split_unescape("foo\\\\\\:bar", ":", unescape = True)
self.assertEqual(result, ["foo\\:bar"])
result = appier.split_unescape("foo\\\\:bar", ":", unescape = True)
self.assertEqual(result, ["foo\\", "bar"])
def test_is_content_type(self):
result = appier.is_content_type("text/plain", "text/plain")
self.assertEqual(result, True)
result = appier.is_content_type("text/plain", ("text/plain",))
self.assertEqual(result, True)
result = appier.is_content_type("text/plain", "text/html")
self.assertEqual(result, False)
result = appier.is_content_type("text/plain", ("text/html",))
self.assertEqual(result, False)
result = appier.is_content_type("text/plain", ("text/plain", "text/html"))
self.assertEqual(result, True)
result = appier.is_content_type("text/*", "text/plain")
self.assertEqual(result, True)
result = appier.is_content_type("text/*", "text/json")
self.assertEqual(result, True)
def test_parse_content_type(self):
result = appier.parse_content_type("text/plain")
self.assertEqual(type(result), tuple)
self.assertEqual(len(result), 2)
self.assertEqual(result[0], ["text/plain"])
self.assertEqual(result[1], dict())
result = appier.parse_content_type("text/plain+json")
self.assertEqual(type(result), tuple)
self.assertEqual(len(result), 2)
self.assertEqual(result[0], ["text/plain", "text/json"])
self.assertEqual(result[1], dict())
result = appier.parse_content_type("text/plain+json; charset=utf-8")
self.assertEqual(type(result), tuple)
self.assertEqual(len(result), 2)
self.assertEqual(result[0], ["text/plain", "text/json"])
self.assertEqual(result[1], dict(charset = "utf-8"))
result = appier.parse_content_type("text/plain+json ; charset=utf-8")
self.assertEqual(type(result), tuple)
self.assertEqual(len(result), 2)
self.assertEqual(result[0], ["text/plain", "text/json"])
self.assertEqual(result[1], dict(charset = "utf-8"))
result = appier.parse_content_type("text/plain+json; charset=utf-8; boundary=hello;")
self.assertEqual(type(result), tuple)
self.assertEqual(len(result), 2)
self.assertEqual(result[0], ["text/plain", "text/json"])
self.assertEqual(result[1], dict(charset = "utf-8", boundary = "hello"))
result = appier.parse_content_type("")
self.assertEqual(type(result), tuple)
self.assertEqual(len(result), 2)
self.assertEqual(result[0], [])
self.assertEqual(result[1], dict())
result = appier.parse_content_type("text")
self.assertEqual(type(result), tuple)
self.assertEqual(len(result), 2)
self.assertEqual(result[0], [])
self.assertEqual(result[1], dict())
result = appier.parse_content_type("text/plain+json; charset")
self.assertEqual(type(result), tuple)
self.assertEqual(len(result), 2)
self.assertEqual(result[0], ["text/plain", "text/json"])
self.assertEqual(result[1], dict())
def test_check_login(self):
request = appier.Request("GET", "/", session_c = appier.MemorySession)
request.session["tokens"] = ["*"]
result = appier.check_login(None, token = "admin", request = request)
self.assertEqual(result, True)
self.assertEqual(request.session["tokens"], {"*" : True})
request.session["tokens"] = []
result = appier.check_login(None, token = "admin", request = request)
self.assertEqual(result, False)
self.assertEqual(request.session["tokens"], {})
request.session["tokens"] = ["admin"]
result = appier.check_login(None, token = "admin", request = request)
self.assertEqual(result, True)
self.assertEqual(request.session["tokens"], {"admin" : True})
request.session["tokens"] = ["admin.read"]
result = appier.check_login(None, token = "admin", request = request)
self.assertEqual(result, False)
result = appier.check_login(None, token = "admin.read", request = request)
self.assertEqual(result, True)
self.assertEqual(request.session["tokens"], {
"admin" : {
"read" : True
}
})
request.session["tokens"] = ["admin.*"]
result = appier.check_login(None, token = "admin.read", request = request)
self.assertEqual(result, True)
self.assertEqual(request.session["tokens"], {
"admin" : {
"*" : True
}
})
request.session["tokens"] = ["admin", "admin.write"]
result = appier.check_login(None, token = "admin.read", request = request)
self.assertEqual(result, False)
self.assertEqual(request.session["tokens"], {
"admin" : {
"_" : True,
"write" : True
}
})
request.session["tokens"] = ["admin.write", "admin.*"]
result = appier.check_login(None, token = "admin.read", request = request)
self.assertEqual(result, True)
self.assertEqual(request.session["tokens"], {
"admin" : {
"write" : True,
"*" : True
}
})
del request.session["tokens"]
result = appier.check_login(None, token = "admin.read", request = request)
self.assertEqual(result, False)
self.assertEqual("tokens" in request.session, False)
def test_check_tokens(self):
result = appier.check_tokens(None, ("admin", "user"), tokens_m = {"*" : True})
self.assertEqual(result, True)
result = appier.check_tokens(None, ("admin", "user"), tokens_m = {})
self.assertEqual(result, False)
result = appier.check_tokens(None, ("admin", "user"), tokens_m = {"admin" : True})
self.assertEqual(result, False)
def test_check_token(self):
result = appier.check_token(None, "admin", tokens_m = {"*" : True})
self.assertEqual(result, True)
result = appier.check_token(None, "admin", tokens_m = {})
self.assertEqual(result, False)
result = appier.check_token(None, "admin", tokens_m = {"admin" : True})
self.assertEqual(result, True)
result = appier.check_token(None, "admin.read", tokens_m = {
"admin" : {
"read" : True
}
})
self.assertEqual(result, True)
result = appier.check_token(None, "admin", tokens_m = {
"admin" : {
"read" : True
}
})
self.assertEqual(result, False)
result = appier.check_token(None, "admin.read", tokens_m = {
"admin" : {
"*" : True
}
})
self.assertEqual(result, True)
result = appier.check_token(None, None, tokens_m = {})
self.assertEqual(result, True)
def test_to_tokens_m(self):
result = appier.to_tokens_m(["admin"])
self.assertEqual(result, {"admin" : True})
result = appier.to_tokens_m(["admin", "admin.read"])
self.assertEqual(result, {
"admin" : {
"_" : True,
"read" : True
}
})
result = appier.to_tokens_m(["admin.read", "admin"])
self.assertEqual(result, {
"admin" : {
"_" : True,
"read" : True
}
})
result = appier.to_tokens_m(["admin", "admin.*"])
self.assertEqual(result, {
"admin" : {
"_" : True,
"*" : True
}
})
def test_dict_merge(self):
first = dict(a = "hello", b = "world")
second = dict(a = "hello_new", b = "world_new", c = "other")
result = appier.dict_merge(first, second)
self.assertEqual(id(result) in (id(first), (id(second))), False)
self.assertEqual(result["a"], "hello_new")
self.assertEqual(result["b"], "world_new")
self.assertEqual(result["c"], "other")
result = appier.dict_merge(first, second, override = False)
self.assertEqual(id(result) in (id(first), (id(second))), False)
self.assertEqual(result["a"], "hello")
self.assertEqual(result["b"], "world")
self.assertEqual(result["c"], "other")
first = dict(a = dict(a = "hello", b = "world", d = "other", m = dict(a = "hello")))
second = dict(a = dict(a = "hello_new", b = "world_new", c = "other", m = dict(b = "world")))
result = appier.dict_merge(first, second)
self.assertEqual(id(result) in (id(first), (id(second))), False)
self.assertEqual(result["a"], dict(
a = "hello_new",
b = "world_new",
c = "other",
m = dict(b = "world")
))
result = appier.dict_merge(first, second, recursive = True)
self.assertEqual(id(result) in (id(first), (id(second))), False)
self.assertEqual(result["a"], dict(
a = "hello_new",
b = "world_new",
c = "other",
d = "other",
m = dict(
a = "hello",
b = "world"
)
))
result = appier.dict_merge(first, second, override = False, recursive = True)
self.assertEqual(id(result) in (id(first), (id(second))), False)
self.assertEqual(result["a"], dict(
a = "hello",
b = "world",
c = "other",
d = "other",
m = dict(
a = "hello",
b = "world"
)
))
first = {
"info" : {
"personal" : {
"general" : {
"kind" : "human",
}
}
}
}
second = {
"info" : {
"personal" : {
"general": {
"kind" : "cat",
"tail" : "long",
"meaw" : 12
}
},
"profile": "base"
}
}
result = appier.dict_merge(first, second, override = False, recursive = True)
self.assertEqual(id(result) in (id(first), (id(second))), False)
self.assertEqual(result, {
"info" : {
"personal" : {
"general" : {
"kind" : "human",
"tail" : "long",
"meaw" : 12
}
},
"profile": "base"
}
})
result = appier.dict_merge(first["info"], second["info"], override = False, recursive = True)
self.assertEqual(id(result) in (id(first), (id(second))), False)
self.assertEqual(result, {
"personal": {
"general": {
"kind" : "human",
"tail" : "long",
"meaw" : 12
}
},
"profile": "base"
})
def test_verify(self):
result = appier.verify(1 == 1)
self.assertEqual(result, None)
result = appier.verify("hello" == "hello")
self.assertEqual(result, None)
self.assertRaises(appier.AssertionError, lambda: appier.verify(1 == 2))
self.assertRaises(
appier.OperationalError,
lambda: appier.verify(1 == 2, exception = appier.OperationalError)
)
def test_verify_equal(self):
result = appier.verify_equal(1, 1)
self.assertEqual(result, None)
result = appier.verify_equal("hello", "hello")
self.assertEqual(result, None)
self.assertRaises(appier.AssertionError, lambda: appier.verify_equal(1, 2))
self.assertRaises(
appier.OperationalError,
lambda: appier.verify_equal(1, 2, exception = appier.OperationalError)
)
def test_verify_not_equal(self):
result = appier.verify_not_equal(1, 2)
self.assertEqual(result, None)
result = appier.verify_not_equal("hello", "world")
self.assertEqual(result, None)
self.assertRaises(appier.AssertionError, lambda: appier.verify_not_equal(1, 1))
self.assertRaises(
appier.OperationalError,
lambda: appier.verify_not_equal(1, 1, exception = appier.OperationalError)
)
def test_verify_type(self):
result = appier.verify_type("hello", str)
self.assertEqual(result, None)
result = appier.verify_type(1, int)
self.assertEqual(result, None)
result = appier.verify_type(None, int)
self.assertEqual(result, None)
self.assertRaises(appier.AssertionError, lambda: appier.verify_type(1, str))
self.assertRaises(
appier.OperationalError,
lambda: appier.verify_type(1, str, exception = appier.OperationalError)
)
self.assertRaises(appier.AssertionError, lambda: appier.verify_type(None, str, null = False))
self.assertRaises(
appier.OperationalError,
lambda: appier.verify_type(None, str, null = False, exception = appier.OperationalError)
)
def test_verify_many(self):
result = appier.verify_many((1 == 1, 2 == 2, 3 == 3))
self.assertEqual(result, None)
result = appier.verify_many(("hello" == "hello",))
self.assertEqual(result, None)
self.assertRaises(appier.AssertionError, lambda: appier.verify_many((1 == 2,)))
self.assertRaises(appier.AssertionError, lambda: appier.verify_many((1 == 1, 1 == 2)))
self.assertRaises(
appier.OperationalError,
lambda: appier.verify_many(
(1 == 1, 1 == 2),
exception = appier.OperationalError
)
)
class FileTupleTest(unittest.TestCase):
def test_basic(self):
file = appier.FileTuple.from_data(
b"hello world",
name = "hello",
mime = "text/plain"
)
self.assertEqual(file.read(), b"hello world")
self.assertEqual(file.name, "hello")
self.assertEqual(file.mime, "text/plain")
self.assertEqual(file.data, b"hello world")
def test_interface(self):
file = appier.FileTuple.from_data(
b"hello world",
name = "hello",
mime = "text/plain"
)
self.assertEqual(file.read(), b"hello world")
self.assertEqual(file.tell(), 11)
file.seek(0)
self.assertEqual(file.tell(), 0)
self.assertEqual(file.read(5), b"hello")
self.assertEqual(file.tell(), 5)
class BaseThreadTest(unittest.TestCase):
def test_basic(self):
thread = appier.BaseThread(
args = (),
daemon = True,
name = "Test"
)
self.assertEqual(thread.name, "Test")
| apache-2.0 | 8,092,827,500,867,253,000 | 36.913997 | 181 | 0.5725 | false |
kengz/Unity-Lab | slm_lab/agent/algorithm/hydra_dqn.py | 1 | 6241 | from slm_lab.agent import net
from slm_lab.agent.algorithm import policy_util
from slm_lab.agent.algorithm.sarsa import SARSA
from slm_lab.agent.algorithm.dqn import DQN
from slm_lab.lib import logger, util
from slm_lab.lib.decorator import lab_api
import numpy as np
import torch
logger = logger.get_logger(__name__)
class HydraDQN(DQN):
'''Multi-task DQN with separate state and action processors per environment'''
@lab_api
def init_nets(self, global_nets=None):
'''Initialize nets with multi-task dimensions, and set net params'''
# NOTE: Separate init from MultitaskDQN despite similarities so that this implementation can support arbitrary sized state and action heads (e.g. multiple layers)
self.state_dims = in_dims = [body.state_dim for body in self.agent.nanflat_body_a]
self.action_dims = out_dims = [body.action_dim for body in self.agent.nanflat_body_a]
if global_nets is None:
NetClass = getattr(net, self.net_spec['type'])
self.net = NetClass(self.net_spec, in_dims, out_dims)
self.target_net = NetClass(self.net_spec, in_dims, out_dims)
self.net_names = ['net', 'target_net']
else:
util.set_attr(self, global_nets)
self.net_names = list(global_nets.keys())
self.post_init_nets()
self.online_net = self.target_net
self.eval_net = self.target_net
@lab_api
def calc_pdparam(self, xs, evaluate=True, net=None):
'''
Calculate pdparams for multi-action by chunking the network logits output
'''
pdparam = SARSA.calc_pdparam(self, xs, evaluate=evaluate, net=net)
return pdparam
@lab_api
def space_act(self, state_a):
'''Non-atomizable act to override agent.act(), do a single pass on the entire state_a instead of composing act() via iteration'''
# gather and flatten
states = []
for eb, body in util.ndenumerate_nonan(self.agent.body_a):
state = state_a[eb]
if self.normalize_state:
state = policy_util.update_online_stats_and_normalize_state(body, state)
states.append(state)
xs = [torch.from_numpy(state).float() for state in states]
pdparam = self.calc_pdparam(xs, evaluate=False)
# use multi-policy. note arg change
action_a, action_pd_a = self.action_policy(states, self, self.agent.nanflat_body_a, pdparam)
for idx, body in enumerate(self.agent.nanflat_body_a):
body.action_tensor, body.action_pd = action_a[idx], action_pd_a[idx] # used for body.action_pd_update later
return action_a.cpu().numpy()
@lab_api
def space_sample(self):
'''Samples a batch per body, which may experience different environment'''
batch = {k: [] for k in self.body.memory.data_keys}
for body in self.agent.nanflat_body_a:
body_batch = body.memory.sample()
if self.normalize_state:
body_batch = policy_util.normalize_states_and_next_states(body, body_batch)
body_batch = util.to_torch_batch(body_batch, self.net.device, body.memory.is_episodic)
for k, arr in batch.items():
arr.append(body_batch[k])
return batch
def calc_q_loss(self, batch):
'''Compute the Q value loss for Hydra network by apply the singleton logic on generalized aggregate.'''
q_preds = torch.stack(self.net.wrap_eval(batch['states']))
act_q_preds = q_preds.gather(-1, torch.stack(batch['actions']).long().unsqueeze(-1)).squeeze(-1)
# Use online_net to select actions in next state
online_next_q_preds = torch.stack(self.online_net.wrap_eval(batch['next_states']))
# Use eval_net to calculate next_q_preds for actions chosen by online_net
next_q_preds = torch.stack(self.eval_net.wrap_eval(batch['next_states']))
max_next_q_preds = online_next_q_preds.gather(-1, next_q_preds.argmax(dim=-1, keepdim=True)).squeeze(-1)
max_q_targets = torch.stack(batch['rewards']) + self.gamma * (1 - torch.stack(batch['dones'])) * max_next_q_preds
q_loss = self.net.loss_fn(act_q_preds, max_q_targets)
# TODO use the same loss_fn but do not reduce yet
for body in self.agent.nanflat_body_a:
if 'Prioritized' in util.get_class_name(body.memory): # PER
errors = torch.abs(max_q_targets - act_q_preds)
body.memory.update_priorities(errors)
return q_loss
@lab_api
def space_train(self):
'''
Completes one training step for the agent if it is time to train.
i.e. the environment timestep is greater than the minimum training timestep and a multiple of the training_frequency.
Each training step consists of sampling n batches from the agent's memory.
For each of the batches, the target Q values (q_targets) are computed and a single training step is taken k times
Otherwise this function does nothing.
'''
if util.in_eval_lab_modes():
self.body.flush()
return np.nan
clock = self.body.env.clock # main clock
tick = util.s_get(self, 'aeb_space.clock').get(clock.max_tick_unit)
self.to_train = (tick > self.training_start_step and tick % self.training_frequency == 0)
if self.to_train == 1:
total_loss = torch.tensor(0.0, device=self.net.device)
for _ in range(self.training_epoch):
batch = self.space_sample()
for _ in range(self.training_batch_epoch):
loss = self.calc_q_loss(batch)
self.net.training_step(loss=loss, lr_clock=clock)
total_loss += loss
loss = total_loss / (self.training_epoch * self.training_batch_epoch)
# reset
self.to_train = 0
for body in self.agent.nanflat_body_a:
body.flush()
logger.debug(f'Trained {self.name} at epi: {clock.epi}, total_t: {clock.total_t}, t: {clock.t}, total_reward so far: {self.body.memory.total_reward}, loss: {loss:g}')
return loss.item()
else:
return np.nan
| mit | -135,697,500,058,434,270 | 49.739837 | 178 | 0.627784 | false |
dpm76/eaglebone | drone/flight/stabilization/imu6050.py | 1 | 7844 | # -*- coding: utf-8 -*-
'''
Created on 23/10/2015
@author: david
'''
import logging
import math
import time
import imu6050_defs as reg
from sensors.I2CSensor import I2CSensor
from sensors.vector import Vector
from copy import deepcopy
from flight.stabilization.state import SensorState
try:
import smbus
except ImportError:
class smbus(object):
@staticmethod
def SMBus(channel):
raise Exception("smbus module not found!")
class Imu6050(I2CSensor):
'''
Gyro and accelerometer
'''
ADDRESS = 0x68
GYRO2DEG = 250.0 / 32767.0 # +/- 250º/s mode
ACCEL2G = 2.0 / 32767.0 # +/- 2g mode
GRAVITY = 9.807 #m/s²
PI2 = math.pi / 2.0
ACCEL2MS2 = GRAVITY * ACCEL2G
#CALIBRATION_FILE_PATH = "../calibration.config.json"
def __init__(self):
'''
Constructor
'''
self._setAddress(Imu6050.ADDRESS)
self._bus = smbus.SMBus(1)
self._gyroOffset = [0]*3
self._gyroReadTime = time.time()
self._previousAngles = [0.0]*3
self._accOffset = [0]*3
self._accAnglesOffset = [0.0]*2
self._lastReadAccRawData = [0]*3
self._angSpeed = [0.0]*2
self._localGravity = 0.0
self._state = SensorState()
def _readRawGyroX(self):
return self._readWordHL(reg.GYRO_XOUT)
def _readRawGyroY(self):
return self._readWordHL(reg.GYRO_YOUT)
def _readRawGyroZ(self):
return self._readWordHL(reg.GYRO_ZOUT)
def _readAngSpeed(self, reg, index):
data = (self._readWordHL(reg) - self._gyroOffset[index]) * Imu6050.GYRO2DEG
return data
def readAngleSpeeds(self):
return self._state.angleSpeeds
def _readAngleSpeeds(self):
speedAX = self._readAngSpeedX()
speedAY = self._readAngSpeedY()
speedAZ = self._readAngSpeedZ()
self._state.angleSpeeds = [speedAX, speedAY, speedAZ]
def _readAngSpeedX(self):
return self._readAngSpeed(reg.GYRO_XOUT, 0)
def _readAngSpeedY(self):
return self._readAngSpeed(reg.GYRO_YOUT, 1)
def _readAngSpeedZ(self):
return self._readAngSpeed(reg.GYRO_ZOUT, 2)
def _readAccAngles(self):
rawAccX = self._readRawAccelX()
rawAccY = self._readRawAccelY()
rawAccZ = self._readRawAccelZ()
accAngX = math.degrees(math.atan2(rawAccY, rawAccZ))
accAngY = -math.degrees(math.atan2(rawAccX, rawAccZ))
accAngles = [accAngX, accAngY]
return accAngles
def readAngles(self):
return self._state.angles
def _readAngles(self):
accAngles = self._readAccAngles()
previousAngSpeeds = self._angSpeed
self._angSpeed = [self._state.angleSpeeds[0],self._state.angleSpeeds[1]] #[self._readAngSpeedX(), self._readAngSpeedY()]
currentTime = time.time()
dt2 = (currentTime - self._gyroReadTime) / 2.0
currentAngles = [0.0]*3
for index in range(2):
expectedAngle = self._previousAngles[index] + \
(self._angSpeed[index] + previousAngSpeeds[index]) * dt2
currentAngles[index] = 0.2 * accAngles[index] + 0.8 * expectedAngle
self._gyroReadTime = currentTime
self._previousAngles = currentAngles
self._state.angles = deepcopy(currentAngles)
def readDeviceAngles(self):
angles = self.readAngles()
angles[0] -= self._accAnglesOffset[0]
angles[1] -= self._accAnglesOffset[1]
return angles
def _readRawAccel(self, reg):
return self._readWordHL(reg)
def _readRawAccelX(self):
return self._readRawAccel(reg.ACC_XOUT)
def _readRawAccelY(self):
return self._readRawAccel(reg.ACC_YOUT)
def _readRawAccelZ(self):
return self._readRawAccel(reg.ACC_ZOUT)
def readAccels(self):
return self._state.accels
def _readAccels(self):
accelX = self._readRawAccelX() * Imu6050.ACCEL2MS2
accelY = self._readRawAccelY() * Imu6050.ACCEL2MS2
accelZ = self._readRawAccelZ() * Imu6050.ACCEL2MS2
angles = [math.radians(angle) for angle in self.readAngles()]
accels = Vector.rotateVector3D([accelX, accelY, accelZ], angles + [0.0])
#Eliminate gravity acceleration
accels[2] -= self._localGravity
self._state.accels = accels
def readQuaternions(self):
#TODO
pass
def resetGyroReadTime(self):
self._gyroReadTime = time.time()
def refreshState(self):
self._readAngleSpeeds()
self._readAngles()
self._readAccels()
def start(self):
'''
Initializes sensor
'''
startMessage = "Using IMU-6050."
print startMessage
logging.info(startMessage)
#Initializes gyro
self._bus.write_byte_data(self._address, reg.PWR_MGM1, reg.RESET)
self._bus.write_byte_data(self._address, reg.PWR_MGM1, reg.CLK_SEL_X)
#1kHz (as DPLF_CG_6) / (SMPLRT_DIV +1) => sample rate @50Hz)
self._bus.write_byte_data(self._address, reg.SMPRT_DIV, 19)
#DLPF_CFG_6: Low-pass filter @5Hz; analog sample rate @1kHz
self._bus.write_byte_data(self._address, reg.CONFIG, reg.DLPF_CFG_6)
self._bus.write_byte_data(self._address, reg.GYRO_CONFIG, reg.GFS_250)
self._bus.write_byte_data(self._address, reg.ACCEL_CONFIG, reg.AFS_2)
self._bus.write_byte_data(self._address, reg.PWR_MGM1, 0)
#TODO 20160202 DPM - Sample rate at least at 400Hz
#Wait for sensor stabilization
time.sleep(1)
self.calibrate()
def calibrate(self):
'''
Calibrates sensor
'''
print "Calibrating accelerometer..."
self._accOffset = [0.0]*3
i = 0
while i < 100:
self._accOffset[0] += self._readRawAccelX()
self._accOffset[1] += self._readRawAccelY()
self._accOffset[2] += self._readRawAccelZ()
time.sleep(0.02)
i+=1
for index in range(3):
self._accOffset[index] /= float(i)
#Calibrate gyro
print "Calibrating gyro..."
self._gyroOffset = [0.0]*3
i = 0
while i < 100:
self._gyroOffset[0] += self._readRawGyroX()
self._gyroOffset[1] += self._readRawGyroY()
self._gyroOffset[2] += self._readRawGyroZ()
time.sleep(0.02)
i += 1
for index in range(3):
self._gyroOffset[index] /= float(i)
#Calculate sensor installation angles
self._accAnglesOffset[0] = self._previousAngles[0] = math.degrees(math.atan2(self._accOffset[1], self._accOffset[2]))
self._accAnglesOffset[1] = self._previousAngles[1] = -math.degrees(math.atan2(self._accOffset[0], self._accOffset[2]))
#Calculate local gravity
angles = [math.radians(angle) for angle in self._accAnglesOffset]
accels = [accel * Imu6050.ACCEL2MS2 for accel in self._accOffset]
self._localGravity = Vector.rotateVector3D(accels, angles + [0.0])[2]
def getMaxErrorZ(self):
return 0.1
def stop(self):
pass
| isc | 3,956,320,715,344,091,600 | 23.974522 | 128 | 0.55394 | false |
slibby/machine | openaddr/ci/__init__.py | 1 | 28361 | import logging; _L = logging.getLogger('openaddr.ci')
from ..compat import standard_library, expand_uri
from .. import jobs, render
from .objects import (
add_job, write_job, read_job, complete_set, update_set_renders,
add_run, set_run, copy_run, read_completed_set_runs,
get_completed_file_run, get_completed_run, new_read_completed_set_runs
)
from os.path import relpath, splitext, join, basename
from datetime import timedelta
from uuid import uuid4, getnode
from base64 import b64decode
from tempfile import mkdtemp
from shutil import rmtree
from time import sleep
import json, os
from flask import Flask, request, Response, current_app, jsonify, render_template
from requests import get, post
from dateutil.tz import tzutc
from psycopg2 import connect
from boto import connect_sns
from pq import PQ
# Ask Python 2 to get real unicode from the database.
# http://initd.org/psycopg/docs/usage.html#unicode-handling
import psycopg2.extensions
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
def load_config():
def truthy(value):
return bool(value.lower() in ('yes', 'true'))
secrets_string = os.environ.get('WEBHOOK_SECRETS', u'').encode('utf8')
webhook_secrets = secrets_string.split(b',') if secrets_string else []
return dict(GAG_GITHUB_STATUS=truthy(os.environ.get('GAG_GITHUB_STATUS', '')),
GITHUB_AUTH=(os.environ['GITHUB_TOKEN'], 'x-oauth-basic'),
MEMCACHE_SERVER=os.environ.get('MEMCACHE_SERVER'),
DATABASE_URL=os.environ['DATABASE_URL'],
WEBHOOK_SECRETS=webhook_secrets)
MAGIC_OK_MESSAGE = 'Everything is fine'
TASK_QUEUE, DONE_QUEUE, DUE_QUEUE = 'tasks', 'finished', 'due'
# Additional delay after JOB_TIMEOUT for due tasks.
DUETASK_DELAY = timedelta(minutes=5)
# Amount of time to reuse run results.
RUN_REUSE_TIMEOUT = timedelta(days=5)
# Time to chill out in pop_task_from_taskqueue() after sending Done task.
WORKER_COOLDOWN = timedelta(seconds=5)
def td2str(td):
''' Convert a timedelta to a string formatted like '3h'.
Will not be necessary when https://github.com/malthe/pq/pull/5 is released.
'''
return '{}s'.format(td.seconds + td.days * 86400)
def get_touched_payload_files(payload):
''' Return a set of files modified in payload commits.
'''
touched = set()
# Iterate over commits in chronological order.
for commit in payload['commits']:
for filelist in (commit['added'], commit['modified']):
# Include any potentially-new files.
touched.update(filelist)
for filename in commit['removed']:
# Skip files that no longer exist.
if filename in touched:
touched.remove(filename)
current_app.logger.debug(u'Touched files {}'.format(', '.join(touched)))
return touched
def get_touched_branch_files(payload, github_auth):
''' Return a set of files modified between master and payload head.
'''
branch_sha = payload['head_commit']['id']
compare1_url = payload['repository']['compare_url']
compare1_url = expand_uri(compare1_url, dict(base='master', head=branch_sha))
current_app.logger.debug('Compare URL 1 {}'.format(compare1_url))
compare1 = get(compare1_url, auth=github_auth).json()
merge_base_sha = compare1['merge_base_commit']['sha']
# That's no branch.
if merge_base_sha == branch_sha:
return set()
compare2_url = payload['repository']['compare_url']
compare2_url = expand_uri(compare2_url, dict(base=merge_base_sha, head=branch_sha))
current_app.logger.debug('Compare URL 2 {}'.format(compare2_url))
compare2 = get(compare2_url, auth=github_auth).json()
touched = set([file['filename'] for file in compare2['files']])
current_app.logger.debug(u'Touched files {}'.format(', '.join(touched)))
return touched
def get_touched_pullrequest_files(payload, github_auth):
''' Return a set of files modified between master and payload head.
'''
if payload['action'] == 'closed':
return set()
base_sha = payload['pull_request']['base']['sha']
head_sha = payload['pull_request']['head']['sha']
compare_url = payload['pull_request']['head']['repo']['compare_url']
compare_url = expand_uri(compare_url, dict(head=head_sha, base=base_sha))
current_app.logger.debug('Compare URL {}'.format(compare_url))
compare = get(compare_url, auth=github_auth).json()
touched = set([file['filename'] for file in compare['files']])
current_app.logger.debug(u'Touched files {}'.format(', '.join(touched)))
return touched
def skip_payload(payload):
''' Return True if this payload should not be processed.
'''
if 'action' in payload and 'pull_request' in payload:
return bool(payload['action'] == 'closed')
if 'commits' in payload and 'head_commit' in payload:
# Deleted refs will not have a status URL.
return bool(payload.get('deleted') == True)
return True
def process_payload_files(payload, github_auth):
''' Return a dictionary of file paths to raw JSON contents and file IDs.
'''
if 'action' in payload and 'pull_request' in payload:
return process_pullrequest_payload_files(payload, github_auth)
if 'commits' in payload and 'head_commit' in payload:
return process_pushevent_payload_files(payload, github_auth)
raise ValueError('Unintelligible webhook payload')
def process_pullrequest_payload_files(payload, github_auth):
''' Return a dictionary of files paths from a pull request event payload.
https://developer.github.com/v3/activity/events/types/#pullrequestevent
'''
files = dict()
touched = get_touched_pullrequest_files(payload, github_auth)
commit_sha = payload['pull_request']['head']['sha']
for filename in touched:
if relpath(filename, 'sources').startswith('..'):
# Skip things outside of sources directory.
continue
if splitext(filename)[1] != '.json':
# Skip non-JSON files.
continue
contents_url = payload['pull_request']['head']['repo']['contents_url'] + '{?ref}'
contents_url = expand_uri(contents_url, dict(path=filename, ref=commit_sha))
current_app.logger.debug('Contents URL {}'.format(contents_url))
got = get(contents_url, auth=github_auth)
contents = got.json()
if got.status_code not in range(200, 299):
current_app.logger.warning('Skipping {} - {}'.format(filename, got.status_code))
continue
if contents['encoding'] != 'base64':
raise ValueError('Unrecognized encoding "{encoding}"'.format(**contents))
current_app.logger.debug('Contents SHA {sha}'.format(**contents))
files[filename] = contents['content'], contents['sha']
return files
def process_pushevent_payload_files(payload, github_auth):
''' Return a dictionary of files paths from a push event payload.
https://developer.github.com/v3/activity/events/types/#pushevent
'''
files = dict()
touched = get_touched_payload_files(payload)
touched |= get_touched_branch_files(payload, github_auth)
commit_sha = payload['head_commit']['id']
for filename in touched:
if relpath(filename, 'sources').startswith('..'):
# Skip things outside of sources directory.
continue
if splitext(filename)[1] != '.json':
# Skip non-JSON files.
continue
contents_url = payload['repository']['contents_url'] + '{?ref}'
contents_url = expand_uri(contents_url, dict(path=filename, ref=commit_sha))
current_app.logger.debug('Contents URL {}'.format(contents_url))
got = get(contents_url, auth=github_auth)
contents = got.json()
if got.status_code not in range(200, 299):
current_app.logger.warning('Skipping {} - {}'.format(filename, got.status_code))
continue
if contents['encoding'] != 'base64':
raise ValueError('Unrecognized encoding "{encoding}"'.format(**contents))
current_app.logger.debug('Contents SHA {sha}'.format(**contents))
files[filename] = contents['content'], contents['sha']
return files
def get_commit_info(payload):
''' Get commit SHA and Github status API URL from webhook payload.
'''
if 'pull_request' in payload:
commit_sha = payload['pull_request']['head']['sha']
status_url = payload['pull_request']['statuses_url']
elif 'head_commit' in payload:
commit_sha = payload['head_commit']['id']
status_url = payload['repository']['statuses_url']
status_url = expand_uri(status_url, dict(sha=commit_sha))
else:
raise ValueError('Unintelligible payload')
current_app.logger.debug('Status URL {}'.format(status_url))
return commit_sha, status_url
def post_github_status(status_url, status_json, github_auth):
''' POST status JSON to Github status API.
'''
if status_url is None:
return
# Github only wants 140 chars of description.
status_json['description'] = status_json['description'][:140]
posted = post(status_url, data=json.dumps(status_json), auth=github_auth,
headers={'Content-Type': 'application/json'})
if posted.status_code not in range(200, 299):
raise ValueError('Failed status post to {}'.format(status_url))
if posted.json()['state'] != status_json['state']:
raise ValueError('Mismatched status post to {}'.format(status_url))
def update_pending_status(status_url, job_url, filenames, github_auth):
''' Push pending status for head commit to Github status API.
'''
status = dict(context='openaddresses/hooked', state='pending',
description=u'Checking {}'.format(', '.join(filenames)),
target_url=job_url)
return post_github_status(status_url, status, github_auth)
def update_error_status(status_url, message, filenames, github_auth):
''' Push error status for head commit to Github status API.
'''
status = dict(context='openaddresses/hooked', state='error',
description=u'Errored on {}: {}'.format(', '.join(filenames), message))
return post_github_status(status_url, status, github_auth)
def update_failing_status(status_url, job_url, bad_files, filenames, github_auth):
''' Push failing status for head commit to Github status API.
'''
status = dict(context='openaddresses/hooked', state='failure',
description=u'Failed on {} from {}'.format(', '.join(bad_files), ', '.join(filenames)),
target_url=job_url)
return post_github_status(status_url, status, github_auth)
def update_empty_status(status_url, github_auth):
''' Push success status for head commit to Github status API.
'''
status = dict(context='openaddresses/hooked', state='success',
description='Nothing to check')
return post_github_status(status_url, status, github_auth)
def update_success_status(status_url, job_url, filenames, github_auth):
''' Push success status for head commit to Github status API.
'''
status = dict(context='openaddresses/hooked', state='success',
description=u'Succeeded on {}'.format(', '.join(filenames)),
target_url=job_url)
return post_github_status(status_url, status, github_auth)
def find_batch_sources(owner, repository, github_auth):
''' Starting with a Github repo API URL, generate a stream of master sources.
'''
resp = get('https://api.github.com/', auth=github_auth)
if resp.status_code >= 400:
raise Exception('Got status {} from Github API'.format(resp.status_code))
start_url = expand_uri(resp.json()['repository_url'], dict(owner=owner, repo=repository))
_L.info('Starting batch sources at {start_url}'.format(**locals()))
got = get(start_url, auth=github_auth).json()
contents_url, commits_url = got['contents_url'], got['commits_url']
master_url = expand_uri(commits_url, dict(sha=got['default_branch']))
_L.debug('Getting {ref} branch {master_url}'.format(ref=got['default_branch'], **locals()))
got = get(master_url, auth=github_auth).json()
commit_sha, commit_date = got['sha'], got['commit']['committer']['date']
contents_url += '{?ref}' # So that we are consistently at the same commit.
sources_urls = [expand_uri(contents_url, dict(path='sources', ref=commit_sha))]
sources_dict = dict()
for sources_url in sources_urls:
_L.debug('Getting sources {sources_url}'.format(**locals()))
sources = get(sources_url, auth=github_auth).json()
for source in sources:
if source['type'] == 'dir':
params = dict(path=source['path'], ref=commit_sha)
sources_urls.append(expand_uri(contents_url, params))
continue
if source['type'] != 'file':
continue
path_base, ext = splitext(source['path'])
if ext == '.json':
_L.debug('Getting source {url}'.format(**source))
more_source = get(source['url'], auth=github_auth).json()
yield dict(commit_sha=commit_sha, url=source['url'],
blob_sha=source['sha'], path=source['path'],
content=more_source['content'])
def enqueue_sources(queue, the_set, sources):
''' Batch task generator, yields counts of remaining expected paths.
'''
expected_paths = set()
commit_sha = None
#
# Enqueue each source if there is nothing else in the queue.
#
for source in sources:
while len(queue) >= 1:
yield len(expected_paths)
with queue as db:
_L.info(u'Sending {path} to task queue'.format(**source))
task_data = dict(job_id=None, url=None, set_id=the_set.id,
name=source['path'],
content_b64=source['content'],
commit_sha=source['commit_sha'],
file_id=source['blob_sha'])
task_id = queue.put(task_data)
expected_paths.add(source['path'])
commit_sha = source['commit_sha']
while len(expected_paths):
with queue as db:
_update_expected_paths(db, expected_paths, the_set)
yield len(expected_paths)
with queue as db:
complete_set(db, the_set.id, commit_sha)
yield 0
def _update_expected_paths(db, expected_paths, the_set):
''' Discard sources from expected_paths set as they appear in runs table.
'''
for (_, source_path, _, _) in read_completed_set_runs(db, the_set.id):
_L.debug(u'Discarding {}'.format(source_path))
expected_paths.discard(source_path)
def render_index_maps(s3, runs):
''' Render index maps and upload them to S3.
'''
dirname = mkdtemp(prefix='index-maps-')
try:
good_runs = [run for run in runs if (run.state or {}).get('processed')]
good_sources = _prepare_render_sources(good_runs, dirname)
_render_and_upload_maps(s3, good_sources, '/', dirname)
finally:
rmtree(dirname)
def render_set_maps(s3, db, the_set):
''' Render set maps, upload them to S3 and add to the database.
'''
dirname = mkdtemp(prefix='set-maps-')
try:
s3_prefix = join('/sets', str(the_set.id))
runs = new_read_completed_set_runs(db, the_set.id)
good_sources = _prepare_render_sources(runs, dirname)
s3_urls = _render_and_upload_maps(s3, good_sources, s3_prefix, dirname)
update_set_renders(db, the_set.id, *s3_urls)
finally:
rmtree(dirname)
def _render_and_upload_maps(s3, good_sources, s3_prefix, dirname):
''' Render set maps, upload them to S3 and return their URLs.
'''
urls = dict()
areas = (render.WORLD, 'world'), (render.USA, 'usa'), (render.EUROPE, 'europe')
key_kwargs = dict(policy='public-read', headers={'Content-Type': 'image/png'})
url_kwargs = dict(expires_in=0, query_auth=False, force_http=True)
for (area, area_name) in areas:
png_basename = 'render-{}.png'.format(area_name)
png_filename = join(dirname, png_basename)
render.render(dirname, good_sources, 960, 2, png_filename, area)
with open(png_filename, 'rb') as file:
render_path = 'render-{}.png'.format(area_name)
render_key = s3.new_key(join(s3_prefix, png_basename))
render_key.set_contents_from_string(file.read(), **key_kwargs)
urls[area_name] = render_key.generate_url(**url_kwargs)
return urls['world'], urls['usa'], urls['europe']
def _prepare_render_sources(runs, dirname):
''' Dump all non-null set runs into a directory for rendering.
'''
good_sources = set()
for run in runs:
filename = '{source_id}.json'.format(**run.__dict__)
with open(join(dirname, filename), 'w+b') as file:
content = b64decode(run.source_data)
file.write(content)
if run.status is True:
good_sources.add(filename)
return good_sources
def calculate_job_id(files):
'''
'''
return str(uuid4())
#
# Previously, we created a deterministic hash of
# the files, but for now that might be too optimistic.
#
blob = json.dumps(files, ensure_ascii=True, sort_keys=True)
job_id = sha1(blob).hexdigest()
return job_id
def create_queued_job(queue, files, job_url_template, commit_sha, status_url):
''' Create a new job, and add its files to the queue.
'''
filenames = list(files.keys())
file_states = {name: None for name in filenames}
file_results = {name: None for name in filenames}
job_id = calculate_job_id(files)
job_url = job_url_template and expand_uri(job_url_template, dict(id=job_id))
job_status = None
with queue as db:
task_files = add_files_to_queue(queue, job_id, job_url, files, commit_sha)
add_job(db, job_id, None, task_files, file_states, file_results, status_url)
return job_id
def add_files_to_queue(queue, job_id, job_url, files, commit_sha):
''' Make a new task for each file, return dict of file IDs to file names.
'''
tasks = {}
for (file_name, (content_b64, file_id)) in files.items():
task_data = dict(job_id=job_id, url=job_url, name=file_name,
content_b64=content_b64, file_id=file_id,
commit_sha=commit_sha)
# Spread tasks out over time.
delay = timedelta(seconds=len(tasks))
queue.put(task_data, expected_at=td2str(delay))
tasks[file_id] = file_name
return tasks
def is_completed_run(db, run_id, min_datetime):
'''
'''
if min_datetime.tzinfo:
# Convert known time zones to UTC.
min_dtz = min_datetime.astimezone(tzutc())
else:
# Assume unspecified time zones are UTC.
min_dtz = min_datetime.replace(tzinfo=tzutc())
completed_run = get_completed_run(db, run_id, min_dtz)
if completed_run:
_L.debug('Found completed run {0} ({1}) since {min_datetime}'.format(*completed_run, **locals()))
else:
_L.debug('No completed run {run_id} since {min_datetime}'.format(**locals()))
return bool(completed_run is not None)
def update_job_status(db, job_id, job_url, filename, run_status, results, github_auth):
'''
'''
try:
job = read_job(db, job_id)
except TypeError:
raise Exception('Job {} not found'.format(job_id))
if filename not in job.states:
raise Exception('Unknown file from job {}: "{}"'.format(job.id, filename))
filenames = list(job.task_files.values())
job.states[filename] = run_status
job.file_results[filename] = results
# Update job status.
if False in job.states.values():
# Any task failure means the whole job has failed.
job.status = False
elif None in job.states.values():
job.status = None
else:
job.status = True
write_job(db, job.id, job.status, job.task_files, job.states, job.file_results, job.github_status_url)
if not job.github_status_url:
_L.warning('No status_url to tell about {} status of job {}'.format(job.status, job.id))
return
if job.status is False:
bad_files = [name for (name, state) in job.states.items() if state is False]
update_failing_status(job.github_status_url, job_url, bad_files, filenames, github_auth)
elif job.status is None:
update_pending_status(job.github_status_url, job_url, filenames, github_auth)
elif job.status is True:
update_success_status(job.github_status_url, job_url, filenames, github_auth)
def pop_task_from_taskqueue(s3, task_queue, done_queue, due_queue, output_dir):
'''
'''
with task_queue as db:
task = task_queue.get()
# PQ will return NULL after 1 second timeout if not ask
if task is None:
return
_L.info(u'Got file {name} from task queue'.format(**task.data))
passed_on_keys = 'job_id', 'file_id', 'name', 'url', 'content_b64', 'commit_sha', 'set_id'
passed_on_kwargs = {k: task.data.get(k) for k in passed_on_keys}
passed_on_kwargs['worker_id'] = hex(getnode()).rstrip('L')
interval = '{} seconds'.format(RUN_REUSE_TIMEOUT.seconds + RUN_REUSE_TIMEOUT.days * 86400)
previous_run = get_completed_file_run(db, task.data.get('file_id'), interval)
if previous_run:
# Make a copy of the previous run.
previous_run_id, _, _ = previous_run
copy_args = (passed_on_kwargs[k] for k in ('job_id', 'commit_sha', 'set_id'))
passed_on_kwargs['run_id'] = copy_run(db, previous_run_id, *copy_args)
# Don't send a due task, since we will not be doing any actual work.
else:
# Reserve space for a new run.
passed_on_kwargs['run_id'] = add_run(db)
# Send a Due task, possibly for later.
due_task_data = dict(task_data=task.data, **passed_on_kwargs)
due_queue.put(due_task_data, schedule_at=td2str(jobs.JOB_TIMEOUT + DUETASK_DELAY))
if previous_run:
# Re-use result from the previous run.
run_id, state, status = previous_run
message = MAGIC_OK_MESSAGE if status else 'Re-using failed previous run'
result = dict(message=message, reused_run=run_id, output=state)
else:
# Run the task.
from . import worker # <-- TODO: un-suck this.
source_name, _ = splitext(relpath(passed_on_kwargs['name'], 'sources'))
result = worker.do_work(s3, passed_on_kwargs['run_id'], source_name,
passed_on_kwargs['content_b64'], output_dir)
# Send a Done task
done_task_data = dict(result=result, **passed_on_kwargs)
done_queue.put(done_task_data, expected_at=td2str(timedelta(0)))
_L.info('Done')
# Sleep a short time to allow done task to show up in runs table.
# In a one-worker situation with repetitive pull request jobs,
# this helps the next job take advantage of previous run results.
sleep(WORKER_COOLDOWN.seconds + WORKER_COOLDOWN.days * 86400)
def pop_task_from_donequeue(queue, github_auth):
''' Look for a completed job in the "done" task queue, update Github status.
'''
with queue as db:
task = queue.get()
if task is None:
return
_L.info(u'Got file {name} from done queue'.format(**task.data))
results = task.data['result']
message = results['message']
run_state = results.get('output', None)
content_b64 = task.data['content_b64']
commit_sha = task.data['commit_sha']
worker_id = task.data.get('worker_id')
set_id = task.data.get('set_id')
job_url = task.data['url']
filename = task.data['name']
file_id = task.data['file_id']
run_id = task.data['run_id']
job_id = task.data['job_id']
if is_completed_run(db, run_id, task.enqueued_at):
# We are too late, this got handled.
return
run_status = bool(message == MAGIC_OK_MESSAGE)
set_run(db, run_id, filename, file_id, content_b64, run_state,
run_status, job_id, worker_id, commit_sha, set_id)
if job_id:
update_job_status(db, job_id, job_url, filename, run_status, results, github_auth)
def pop_task_from_duequeue(queue, github_auth):
'''
'''
with queue as db:
task = queue.get()
if task is None:
return
_L.info(u'Got file {name} from due queue'.format(**task.data))
original_task = task.data['task_data']
content_b64 = task.data['content_b64']
commit_sha = task.data['commit_sha']
worker_id = task.data.get('worker_id')
set_id = task.data.get('set_id')
job_url = task.data['url']
filename = task.data['name']
file_id = task.data['file_id']
run_id = task.data['run_id']
job_id = task.data['job_id']
if is_completed_run(db, run_id, task.enqueued_at):
# Everything's fine, this got handled.
return
run_status = False
set_run(db, run_id, filename, file_id, content_b64, None, run_status,
job_id, worker_id, commit_sha, set_id)
if job_id:
update_job_status(db, job_id, job_url, filename, run_status, False, github_auth)
def db_connect(dsn=None, user=None, password=None, host=None, database=None, sslmode=None):
''' Connect to database.
Use DSN string if given, but allow other calls for older systems.
'''
if dsn is None:
return connect(user=user, password=password, host=host, database=database, sslmode=sslmode)
return connect(dsn)
def db_queue(conn, name):
return PQ(conn, table='queue')[name]
def db_cursor(conn):
return conn.cursor()
class SnsHandler(logging.Handler):
''' Logs to the given Amazon SNS topic; meant for errors.
'''
def __init__(self, arn, *args, **kwargs):
super(SnsHandler, self).__init__(*args, **kwargs)
# Rely on boto AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY variables.
self.arn, self.sns = arn, connect_sns()
def emit(self, record):
subject = u'OpenAddr: {}: {}'.format(record.levelname, record.name)
if hasattr(record, 'request_info'):
subject = '{} - {}'.format(subject, record.request_info)
self.sns.publish(self.arn, self.format(record), subject[:79])
def setup_logger(sns_arn, log_level=logging.DEBUG):
''' Set up logging for openaddr code.
'''
# Get a handle for the openaddr logger and its children
openaddr_logger = logging.getLogger('openaddr')
# Default logging format.
log_format = '%(asctime)s %(levelname)07s: %(message)s'
# Set the logger level to show everything, and filter down in the handlers.
openaddr_logger.setLevel(log_level)
# Set up a logger to stderr
handler1 = logging.StreamHandler()
handler1.setLevel(log_level)
handler1.setFormatter(logging.Formatter(log_format))
openaddr_logger.addHandler(handler1)
# Set up a second logger to SNS
try:
handler2 = SnsHandler(sns_arn)
except:
openaddr_logger.warning('Failed to authenticate SNS handler')
else:
handler2.setLevel(logging.ERROR)
handler2.setFormatter(logging.Formatter(log_format))
openaddr_logger.addHandler(handler2)
if __name__ == '__main__':
app.run(debug=True)
| isc | -8,725,184,806,641,569,000 | 36.464993 | 106 | 0.618138 | false |
GoogleCloudPlatform/healthcare | imaging/ml/toolkit/hcls_imaging_ml_toolkit/test_pubsub_util.py | 1 | 1730 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility class for tests using Pub/Sub-related functionality."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from typing import Dict, Optional, Text
from google.cloud import pubsub_v1
class PubsubRunLoopExitError(BaseException):
"""Forces exit from the infinite PubsubListener run loop.
PubsubListener catches all exceptions inheriting from Exception within its
Run loop. Use this exceptions within tests to force exit.
"""
pass
def CreatePubsubReceivedMessage(
ack_id: Text,
data: Text,
message_id: Text,
attributes: Optional[Dict[Text, Text]] = None
) -> pubsub_v1.types.ReceivedMessage:
"""Creates a ReceivedMessage instance for testing.
Args:
ack_id: Pubsub ACK ID.
data: The payload of the Pubsub message.
message_id: Pubsub Message ID
attributes: Pubsub attributes.
Returns:
Instance of ReceivedMessage.
"""
return pubsub_v1.types.ReceivedMessage(
ack_id=ack_id,
message=pubsub_v1.types.PubsubMessage(
data=data.encode('utf8'),
message_id=message_id,
attributes=attributes))
| apache-2.0 | 4,498,637,888,027,374,600 | 30.454545 | 76 | 0.731792 | false |
transientlunatic/acreroad_1420 | acreroad_1420/__main__.py | 2 | 18434 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Main entry for SRT drive control.
Author: Ronnie Frith
Contact: [email protected]
"""
from . import CONFIGURATION as config
from . import CATALOGUE
from .drive import Drive
#from acreroad_1420 import CONFIGURATION as config
import numpy as np
import sys, argparse, time
from PyQt4 import QtGui, QtCore
from skymap import Skymap
from srt import SRT, Status, Mode
from radiosource import RadioSource,radec,galactic
from astropy.time import Time
from formlayout import fedit
import astropy
import astropy.units as u
from astropy.coordinates import SkyCoord, ICRS, EarthLocation, AltAz
from os.path import expanduser, isfile, join
import os.path
class SlewToggle:
ON = 0
OFF = 1
class TrackToggle:
ON = 0
OFF = 1
class mainWindow(QtGui.QMainWindow):
"""
Container class for the whole main window. Container classes for other widgets such as buttons and labels are constructed here.
"""
OFFSET_CHANGE = 1.1
cursorkeys = [QtCore.Qt.Key_Left, QtCore.Qt.Key_Right, QtCore.Qt.Key_Up, QtCore.Qt.Key_Down]
def __init__(self, drive, catalogue, parent=None):
super(mainWindow,self).__init__(parent=parent)
screen = QtGui.QDesktopWidget().screenGeometry()
#self.showMaximized()
self.setGeometry(50,50,700,450)
self.setWindowTitle("SRT Drive Control")
self.setFocus()
self.drive = drive
self.skymap = Skymap(self, time=self.drive.current_time, location=self.drive.location)
self.skymap.init_cat(CATALOGUE)
self.commandButtons = commandButtons(self)
self.antennaCoordsInfo = antennaCoordsInfo(self)
self.sourceInfo = sourceInfo(self)
self.infoTimer = QtCore.QTimer(self)
self.infoTimer.timeout.connect(self.skymap.updateSkymap)
self.infoTimer.start(100)
self.sourceTimer = QtCore.QTimer(self)
self.sourceTimer.timeout.connect(self.skymap.fetchRadioSourceCoordinates)
self.sourceTimer.start(60000)
def updateStatusBar(self,status):
"""
Update the text of the status bar with the string status.
"""
self.statusBar().showMessage(str(status))
def setMode(self,mode):
self.mode = mode
def getMode(self):
return self.mode
class antennaCoordsInfo(QtGui.QWidget):
"""
Container class for the widget which displays antenna coordinate information and offsets etc.
"""
def __init__(self,parent):
super(antennaCoordsInfo,self).__init__(parent)
screen = QtGui.QDesktopWidget().screenGeometry()
self.setGeometry(0,-8,700,38)
gb = QtGui.QGroupBox(self)
#gb.setTitle("Antenna Coordinates")
gb.setStyleSheet("QGroupBox {background: black; color: #ffffff; margin-top: 0.5em; margin-bottom: 0.5em; font-size: 10pt;}")
gb.setFixedSize(screen.width(),200)
layout = QtGui.QHBoxLayout(self)
#self.setLayout(layout)
position = self.parent().drive.current_position
self.posLabel = QtGui.QLabel(
"""<span style='font-family:mono,fixed;
background: black; font-size:8pt; font-weight:600;
color:#dddddd;'>
AltAz</span>: {0.alt:.2f} {0.az:.2f} """.format(self.parent().drive.current_position))
layout.addWidget(self.posLabel)
self.radecLabel = QtGui.QLabel("Ra Dec: {0.ra:.2f} {0.dec:.2f}".format( position.transform_to('icrs') ))
layout.addWidget(self.radecLabel)
self.galLabel = QtGui.QLabel("Gal: {0.l:.2f} {0.b:.2f}".format(position.transform_to('galactic')))
layout.addWidget(self.galLabel)
self.utcLabel = QtGui.QLabel("UTC: todo")
layout.addWidget(self.utcLabel)
#self.sidLabel = QtGui.QLabel("Sidereal: todo")
#layout.addWidget(self.sidLabel)
vbox = QtGui.QVBoxLayout()
#vbox.addStretch(1)
vbox.addLayout(layout)
def updateCoords(self):
"""
Update is called when the on screen antenna coordinate information should be updated to new values.
"""
currentPos = self.parent().drive.skycoord()
self.posLabel.setText("<span style='font-family:mono,fixed; background: black; font-size:12pt; font-weight:600; color:#ffffff;'>{0.az.value:.2f}</span> <span style='font-family:mono,fixed; background: black; font-size:8pt; font-weight:600; color:#dddddd; left: -5px;'>az</span> <span style='font-family:mono,fixed; background: black; font-size:12pt; font-weight:600; color:#ffffff;'>{0.alt.value:.2f}</span> <span style='font-family:mono,fixed; background: black; font-size:8pt; font-weight:600; color:#dddddd;'>alt</span>".format(currentPos))
self.radecLabel.setText("<span style='font-family:mono,fixed; background: black; font-size:12pt; font-weight:600; color:#ffffff;'>{0.ra.value:.2f}<span><span style='font-family:mono,fixed; background: black; font-size:8pt; font-weight:600; color:#dddddd;'>ra</span> <span style='font-family:mono,fixed; background: black; font-size:12pt; font-weight:600; color:#ffffff;'>{0.dec.value:.2f}</span><span style='font-family:mono,fixed; background: black; font-size:8pt; font-weight:600; color:#dddddd;'>dec</span>" .format(currentPos.transform_to('icrs')))
self.galLabel.setText("<span style='font-family:mono,fixed; background: black; font-size:12pt; font-weight:600; color:#ffffff;'>{0.l.value:.2f}<span><span style='font-family:mono,fixed; background: black; font-size:8pt; font-weight:600; color:#dddddd;'>lon</span> <span style='font-family:mono,fixed; background: black; font-size:12pt; font-weight:600; color:#ffffff;'>{0.b.value:.2f}</span><span style='font-family:mono,fixed; background: black; font-size:8pt; font-weight:600; color:#dddddd;'>lat</span>".format(currentPos.transform_to('galactic')))
def tick(self):
self.utcLabel.setText(" <span style='font-family:mono,fixed; background: black; font-size:12pt; font-weight:600; color:#ffffff;'>{0}</span> <span style='font-family:mono,fixed; background: black; font-size:8pt; font-weight:600; color:#dddddd;'>UTC</span>".format(time.strftime("%H:%M:%S",time.gmtime())))
#self.sidLabel.setText("Sidereal: {0.sidereal_time()}".format(self.parent().getSRT().drive.current_time_local))
class sourceInfo(QtGui.QWidget):
"""
A container class for displaying the information about a selected radio source on the skymap.
"""
def __init__(self,parent):
super(sourceInfo,self).__init__(parent)
screen = QtGui.QDesktopWidget().screenGeometry()
self.setGeometry(700-190,105,180,100)
gb = QtGui.QGroupBox(self)
#gb.setTitle("Source Information")
gb.setStyleSheet("QGroupBox {background: #dddddd; margin: 0.5em; } *[class=objectName]{font-size: 24pt;}")
gb.setFixedSize(600,100)
layout = QtGui.QVBoxLayout(self)
self.nameLabel = QtGui.QLabel("")
layout.addWidget(self.nameLabel)
self.posLabel = QtGui.QLabel("AzEl: ")
layout.addWidget(self.posLabel)
self.radecLabel = QtGui.QLabel("Ra Dec: ")
layout.addWidget(self.radecLabel)
self.galLabel = QtGui.QLabel("Gal: ")
layout.addWidget(self.galLabel)
gb.setLayout(layout)
def updateEphemLabel(self,src):
"""
Whenever it is required to update information about a radio source src.
"""
name = src.getName()
pos = src.getPos()
skycoord = src.skycoord
#radec = src.getRADEC()
#gal = src.getGal()
self.nameLabel.setText("<span style='font-weight: 600; color: blue;'>{}</span>".format(name))
self.posLabel.setText("AzEl: {0.az.value:.2f} az {0.alt.value:.2f} el".format(skycoord))
self.radecLabel.setText("{0.ra.value:.2f} {0.dec.value:.2f}".format(skycoord.transform_to('icrs')))
galco = skycoord.transform_to('galactic')
self.galLabel.setText(u"{0:.0f}°{1[2]:.0f}'{1[3]:.2f}\" l {2:.0f}°{3[2]:.0f}'{3[3]:.2f}\" b".format(galco.l.signed_dms[0]*galco.l.signed_dms[1], \
galco.l.signed_dms, \
galco.b.signed_dms[0]*galco.b.signed_dms[1],\
galco.b.signed_dms))
class commandButtons(QtGui.QWidget):
"""
Container class for the buttons on the main windows which (usually) instruct the SRT to do something.
"""
def __init__(self,parent):
super(commandButtons,self).__init__(parent)
#self.setGeometry(0,0,150,200)
screen = QtGui.QDesktopWidget().screenGeometry()
self.setGeometry(0,20,700,60)
gb = QtGui.QGroupBox(self)
#gb.setStyleSheet("QGroupBox {background: black; color: #ffffff; margin-top: 0.5em; margin-bottom: 0.5em;}")
gb.setFixedSize(screen.width(),200)
layout = QtGui.QHBoxLayout(self)
#gb = QtGui.QGroupBox(self)
#gb.setTitle("Control")
#gb.setStyleSheet("QGroupBox {border: 2px solid gray; border-radius: 5px; margin-top: 0.5em;} QGroupBox::title {subcontrol-origin: margin; left: 10px; padding: 0 3px 0 3px;}")
#gb.setFixedSize(150,200)
buttonWidth = 100
stowButton = QtGui.QPushButton("Stow")
#stowButton.setMinimumSize(20,50)
stowButton.setFixedWidth(buttonWidth)
layout.addWidget(stowButton)
stowButton.clicked.connect(self.handleStowButton)
homeButton = QtGui.QPushButton("Home")
homeButton.setFixedWidth(buttonWidth)
layout.addWidget(homeButton)
homeButton.clicked.connect(self.handleHomeButton)
self.slewToggle = SlewToggle.OFF
slewButton = QtGui.QPushButton("Slew Toggle")
slewButton.setFixedWidth(buttonWidth)
slewButton.setCheckable(True)
layout.addWidget(slewButton)
slewButton.clicked.connect(self.handleSlewButton)
slewToCoordButton = QtGui.QPushButton("Slew to coord")
slewToCoordButton.setFixedWidth(buttonWidth)
layout.addWidget(slewToCoordButton)
slewToCoordButton.clicked.connect(self.handleSlewToCoordButton)
self.trackToggle = TrackToggle.OFF
trackButton = QtGui.QPushButton("Track Toggle")
trackButton.setFixedWidth(buttonWidth)
trackButton.setCheckable(True)
layout.addWidget(trackButton)
trackButton.clicked.connect(self.handleTrackButton)
calibrateButton = QtGui.QPushButton("Calibrate")
calibrateButton.setFixedWidth(buttonWidth)
layout.addWidget(calibrateButton)
calibrateButton.clicked.connect(self.handleCalibrateButton)
layout = QtGui.QVBoxLayout(self)
gb.setLayout(layout)
self.trackSource = RadioSource("ts")
self.oldTrackSource = RadioSource("ots")
self.trackTimer = QtCore.QTimer()
self.trackTimer.timeout.connect(self.handleTrackButton)
self.trackTimer.setInterval(5000)
self.offset = (0,0) #azel
def setOffset(self,offset):
self.offset = offset
def getOffset(self):
return self.offset
def handleStowButton(self):
"""
Returns the SRT to its stow position.
"""
#current_az = self.srt.getCurrentPos()[0]
self.parent().skymap.setTargetPos((0,90))
self.parent().drive.stow()
self.parent().setFocus()
def handleHomeButton(self):
"""
Returns the SRT to its home position.
"""
#homeOffset = self.getOffset().split()
#self.parent().skymap.setTargetPos((float(homeOffset[0]),float(homeOffset[1])))
self.parent().skymap.setTargetPos((self.parent().drive.az_home,self.parent().drive.el_home))
self.parent().drive.home()
self.parent().setFocus()
def handleSlewButton(self):
"""
Turns slew capability on/off for selecting/slewing to source on the skymap.
"""
if self.slewToggle == SlewToggle.ON:
self.slewToggle = SlewToggle.OFF
#print("Slew toggle OFF")
elif self.slewToggle == SlewToggle.OFF:
self.slewToggle = SlewToggle.ON
#print("Slew toggle ON")
self.parent().setFocus()
def _parseInput(self, data):
print data
eq, ho, ga = data[0], data[1], data[2]
if (not eq[0] == '') and (not eq[1] == ''):
frame = 'ICRS'
# "Parsing an RA and Dec"
eq[0], eq[1] = np.float(eq[0]), np.float(eq[1])
c = SkyCoord(ra=eq[0]*u.deg, dec=eq[1]*u.deg, frame='icrs')
elif (not ho[0]=='') and (not ho[1]==''):
# Parsing a horizontal coordinate
ho[0], ho[1] = np.float(ho[0]), np.float(ho[1])
c = SkyCoord(AltAz(ho[0]*u.deg, ho[1]*u.deg, obstime=self.parent().drive.current_time, location=self.parent().drive.location))
elif (not ga[0]=='') and (not ga[1]==''):
# Parsing a galactic coordinate
ga[0], ga[1] = np.float(ga[0]), np.float(ga[1])
c = SkyCoord(l=ga[0]*u.deg, b=ga[1]*u.deg, frame='galactic')
else:
# No valid coordinates were passed
return None
return c
def handleSlewToCoordButton(self):
"""
An input window will be presented where AzEl coordinates are required to be input. The SRT will then slew to these coordinates.
"""
# azel, ok = QtGui.QInputDialog.getText(self, 'Input',
# 'Enter Az Alt:')
# Use formlayout to make the form
equatorialgroup = ( [('Right Ascension', ''), ('Declination', '')], "Equatorial", "Input equatorial coordinates." )
horizontalgroup = ( [('Azimuth',''), ('Altitude','')], "Horizontal", "Input Horizontal coordinates." )
galacticgroup = ( [('Longitude', ''), ('Latitude', '')], "Galactic", "Input galactic coordinates." )
result = fedit([equatorialgroup, horizontalgroup, galacticgroup])
print result
if result:
# Need to parse the output of the form
skycoord = self._parseInput(result)
if skycoord:
#self.parent().srt.slew(self.parent().skymap,(azf,elf))
currentPos = self.parent().drive.skycoord()
targetPos = skycoord
print targetPos
if targetPos == currentPos:
print("Already at that position.")
#self.targetPos = currentPos
self.parent().skymap.setTargetPos(currentPos)
else:
print("Slewing to " + str(targetPos))
self.parent().skymap.setTargetPos(targetPos)
self.parent().drive.goto(targetPos)
#self.parent().skymap.setCurrentPos(targetPos)
#self.parent().updateStatusBar()
self.parent().setFocus()
def handleTrackButton(self):
"""
Whenever the track button is pressed, the SRT will begin tracking whatever source is currently seclected. If it is pressed again and the source hasn't changed, it'll stop tracking that source.
"""
if self.trackToggle == TrackToggle.OFF:
self.trackToggle = TrackToggle.ON
print("Track Toggle ON")
self.parent().drive.track()
elif self.trackToggle == TrackToggle.ON:
self.trackToggle = TrackToggle.OFF
print("Track Toggle OFF")
self.parent().drive.track(tracking=False)
self.parent().setFocus()
def handleCalibrateButton(self):
self.parent().drive.calibrate()
self.parent().setFocus()
def getSlewToggle(self):
return self.slewToggle
def setSlewToggle(self,st):
self.slewToggle = st
def readConfigFile():
pass
def writeConfigFile():
pass
def park():
"""
A simple script to park the telescope in the stow position.
"""
import sys
import time
import drive
device = config.get('arduino','dev')
print "Connecting to the drive"
connection = drive.Drive(device, config.get("arduino", "baud"), simulate=0, homeonstart=False)
print "Connected"
#connection.stow()
if sys.argv[1] == "snow":
# If we're parking due to snow we want to park in the home position
# to avoid the bowl filling with the white stuff
command = connection.home()
#while connection.homing ==True:
# print "Homing..."
# time.sleep(1)
elif sys.argv[1] == "wind":
# If we're parking due to the wind then park in the stow position
# pointing straight up
connection._command(connection.vocabulary['STOW'])
time.sleep(1)
#connection.stow()
def main():
app = QtGui.QApplication(sys.argv)
parser = argparse.ArgumentParser()
parser.add_argument('-live',dest='live',action='store_true',
help='Starts main in live mode.')
parser.add_argument('-sim',dest='sim',action='store_true',
help='Starts main in simulation mode.')
args = parser.parse_args()
if args.live == False and args.sim == True:
print("Simulation mode enabled.")
mode = Mode.SIM
else:
mode = Mode.LIVE
device = config.get('arduino','dev')
catalogue = config.get('catalogue','catfile')
calibrationSpeeds = config.get('calibration','speeds')
homeOffset = config.get('offsets','home')
#calibrationSpeeds = (cs.split()[0],cs.split()[1])
#print(calibrationSpeeds.split()[0],calibrationSpeeds.split()[1])
if mode == Mode.SIM:
drive = Drive(simulate=1,calibration=calibrationSpeeds)
elif mode == Mode.LIVE:
drive = Drive(simulate=0,calibration=calibrationSpeeds, persist=True)
main = mainWindow(drive,catalogue)
main.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
| bsd-3-clause | 1,118,366,716,028,535,700 | 40.327354 | 560 | 0.613932 | false |
Qwaz/solved-hacking-problem | DEFCON/2019 Finals/babi/solver.py | 1 | 1302 | from base64 import b64encode
from pwn import *
def s_array(*args):
assert len(args) % 2 == 0
return 'a:%d:{' % (len(args) // 2) + ''.join(args) + '}'
def s_bool(val):
return 'b:%d;' % val
def s_str(s):
return 's:%d:"%s";' % (len(s), s)
def s_ref(val):
return 'r:%d;' % val
def s_int(val):
return 'i:%d;' % val
def s_float(val):
return 'd:%f;' % val
def s_null():
return 'N;'
host = "10.13.37.8"
host = "localhost"
r = remote(host, 47793)
def send_payload(r, path, payload):
http_payload = "GET %s HTTP/1.1\r\n" % path
http_payload += "Host: z\r\n"
http_payload += "Connection: keep-alive\r\n"
http_payload += "Cookie: session=%s\r\n" % b64encode(payload)
http_payload += "\r\n"
r.send(http_payload)
result = ''
try:
t = r.recv(timeout=0.5)
while t != '':
result += t
t = r.recv(timeout=0.5)
except EOFError:
pass
return result
spray = s_array(
*[s_int(0x01010101 * i) for i in range(32)]
)
print send_payload(r, "/info", spray)
payload = s_array(
s_str("aaaa"), s_ref(4),
s_str("bbbb"), s_int(0x70),
s_ref(2), s_str("cccc")
)
print send_payload(r, "/info", payload)
| gpl-2.0 | -9,006,028,544,528,207,000 | 15.835616 | 65 | 0.506144 | false |
WamanAvadhani/Rammbock | src/Rammbock/networking.py | 1 | 13800 | # Copyright 2014 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import socket
import time
from .logger import logger
from .synchronization import SynchronizedType
from .binary_tools import to_hex
try:
from sctp import sctpsocket_tcp
SCTP_ENABLED = True
except ImportError:
SCTP_ENABLED = False
UDP_BUFFER_SIZE = 65536
TCP_BUFFER_SIZE = 1000000
TCP_MAX_QUEUED_CONNECTIONS = 5
def get_family(family):
if not family:
family = 'ipv4'
return {'ipv4': socket.AF_INET, 'ipv6': socket.AF_INET6}[family.lower()]
class _WithTimeouts(object):
_default_timeout = 10
def _get_timeout(self, timeout):
if timeout in (None, '') or str(timeout).lower() == 'none':
return self._default_timeout
elif str(timeout).lower() == 'blocking':
return None
return float(timeout)
def _set_default_timeout(self, timeout):
self._default_timeout = self._get_timeout(timeout)
class _NetworkNode(_WithTimeouts):
__metaclass__ = SynchronizedType
_message_stream = None
parent = None
name = '<not set>'
def set_handler(self, msg_template, handler_func, header_filter, alias=None, interval=None):
if alias:
raise AssertionError('Named connections not supported.')
self._message_stream.set_handler(msg_template, handler_func, header_filter, interval)
def get_own_address(self):
return self._socket.getsockname()[:2]
def get_peer_address(self, alias=None):
if alias:
raise AssertionError('Named connections not supported.')
return self._socket.getpeername()[:2]
def close(self):
if self._is_connected:
self._is_connected = False
self._socket.close()
if self._message_stream:
self._message_stream.close()
self._message_stream = None
# TODO: Rename to _get_new_message_stream
def _get_message_stream(self):
if not self._protocol:
return None
return self._protocol.get_message_stream(BufferedStream(self, self._default_timeout))
def get_message(self, message_template, timeout=None, header_filter=None, latest=None):
if not self._protocol:
raise AssertionError('Can not receive messages without protocol. Initialize network node with "protocol=<protocl name>"')
if self._protocol != message_template._protocol:
raise AssertionError('Template protocol does not match network node protocol %s!=%s' % (self.protocol_name, message_template._protocol.name))
return self._get_from_stream(message_template, self._message_stream, timeout=timeout, header_filter=header_filter, latest=latest)
def _get_from_stream(self, message_template, stream, timeout, header_filter, latest):
return stream.get(message_template, timeout=timeout, header_filter=header_filter, latest=latest)
def log_send(self, binary, ip, port):
logger.debug("Send %d bytes: %s to %s:%s over %s" % (len(binary), to_hex(binary), ip, port, self._transport_layer_name))
def log_receive(self, binary, ip, port):
logger.trace("Trying to read %d bytes: %s from %s:%s over %s" % (len(binary), to_hex(binary), ip, port, self._transport_layer_name))
def empty(self):
result = True
try:
while result:
result = self.receive(timeout=0.0)
except (socket.timeout, socket.error):
pass
if self._message_stream:
self._message_stream.empty()
def receive(self, timeout=None, alias=None):
return self.receive_from(timeout, alias)[0]
def receive_from(self, timeout=None, alias=None):
self._raise_error_if_alias_given(alias)
timeout = self._get_timeout(timeout)
self._socket.settimeout(timeout)
return self._receive_msg_ip_port()
def _receive_msg_ip_port(self):
msg = self._socket.recv(self._size_limit)
ip, port = self._socket.getpeername()[:2]
self.log_receive(msg, ip, port)
return msg, ip, port
def send(self, msg, alias=None):
self._raise_error_if_alias_given(alias)
ip, port = self.get_peer_address()
self.log_send(msg, ip, port)
self._sendall(msg)
def _sendall(self, msg):
self._socket.sendall(msg)
def _raise_error_if_alias_given(self, alias):
if alias:
raise AssertionError('Connection aliases not supported.')
@property
def protocol_name(self):
return self._protocol.name if self._protocol else None
def get_messages_count_in_buffer(self):
return self._message_stream.get_messages_count_in_cache()
class _TCPNode(object):
_transport_layer_name = 'TCP'
_size_limit = TCP_BUFFER_SIZE
def _init_socket(self, family):
self._socket = socket.socket(get_family(family), socket.SOCK_STREAM)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
class _UDPNode(object):
_transport_layer_name = 'UDP'
_size_limit = UDP_BUFFER_SIZE
def _init_socket(self, family):
self._socket = socket.socket(get_family(family), socket.SOCK_DGRAM)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
class _SCTPNode(object):
_transport_layer_name = 'SCTP'
_size_limit = TCP_BUFFER_SIZE
def _init_socket(self, family):
if not SCTP_ENABLED:
raise Exception("SCTP Not enabled. Is pysctp installed? https://github.com/philpraxis/pysctp")
self._socket = sctpsocket_tcp(get_family(family))
class _Server(_NetworkNode):
def __init__(self, ip, port, timeout=None):
self._ip = ip
self._port = int(port)
self._set_default_timeout(timeout)
_NetworkNode.__init__(self)
def _bind_socket(self):
try:
self._socket.bind((self._ip, self._port))
except socket.error, e:
raise Exception("error: [Errno %d] %s for address %s:%d" % (e[0], e[1], self._ip, self._port))
self._is_connected = True
class UDPServer(_Server, _UDPNode):
def __init__(self, ip, port, timeout=None, protocol=None, family=None):
_Server.__init__(self, ip, port, timeout)
self._protocol = protocol
self._last_client = None
self._init_socket(family)
self._bind_socket()
self._message_stream = self._get_message_stream()
def _receive_msg_ip_port(self):
msg, address = self._socket.recvfrom(self._size_limit)
ip, port = address[:2]
self.log_receive(msg, ip, port)
self._last_client = (ip, int(port))
return msg, ip, port
def _check_no_alias(self, alias):
if alias:
raise Exception('Connection aliases are not supported on UDP Servers')
def send_to(self, msg, ip, port):
self._last_client = (ip, int(port))
self.send(msg)
def _sendall(self, msg):
self._socket.sendto(msg, self.get_peer_address())
def get_peer_address(self, alias=None):
self._check_no_alias(alias)
if not self._last_client:
raise Exception('Server has no default client, because it has not received messages from clients yet.')
return self._last_client
class StreamServer(_Server):
def __init__(self, ip, port, timeout=None, protocol=None, family=None):
_Server.__init__(self, ip, port, timeout)
self._init_socket(family)
self._bind_socket()
self._socket.listen(TCP_MAX_QUEUED_CONNECTIONS)
self._protocol = protocol
self._init_connection_cache()
def _init_connection_cache(self):
self._connections = _NamedCache('connection', "No connections accepted!")
def set_handler(self, msg_template, handler_func, header_filter, alias=None, interval=None):
connection = self._connections.get(alias)
connection.set_handler(msg_template, handler_func, header_filter, interval=interval)
def receive_from(self, timeout=None, alias=None):
connection = self._connections.get(alias)
return connection.receive_from(timeout=timeout)
def accept_connection(self, alias=None, timeout=0):
timeout = self._get_timeout(timeout)
if timeout > 0:
self._socket.settimeout(timeout)
connection, client_address = self._socket.accept()
self._connections.add(_TCPConnection(self, connection, protocol=self._protocol), alias)
return client_address
def send(self, msg, alias=None):
connection = self._connections.get(alias)
connection.send(msg)
def send_to(self, *args):
raise Exception("Stream server cannot send to a specific address.")
def close(self):
if self._is_connected:
self._is_connected = False
for connection in self._connections:
connection.close()
self._socket.close()
self._init_connection_cache()
def close_connection(self, alias=None):
raise Exception("Not yet implemented")
def get_message(self, message_template, timeout=None, alias=None, header_filter=None):
connection = self._connections.get(alias)
return connection.get_message(message_template, timeout=timeout, header_filter=header_filter)
def empty(self):
for connection in self._connections:
connection.empty()
def get_peer_address(self, alias=None):
connection = self._connections.get(alias)
return connection.get_peer_address()
class _TCPConnection(_NetworkNode, _TCPNode):
def __init__(self, parent, socket, protocol=None):
self.parent = parent
self._socket = socket
self._protocol = protocol
self._message_stream = self._get_message_stream()
self._is_connected = True
_NetworkNode.__init__(self)
class SCTPServer(StreamServer, _SCTPNode):
pass
class TCPServer(StreamServer, _TCPNode):
pass
class _Client(_NetworkNode):
def __init__(self, timeout=None, protocol=None, family=None):
self._is_connected = False
self._init_socket(family)
self._set_default_timeout(timeout)
self._protocol = protocol
self._message_stream = None
_NetworkNode.__init__(self)
def set_own_ip_and_port(self, ip=None, port=None):
if ip and port:
self._socket.bind((ip, int(port)))
elif ip:
self._socket.bind((ip, 0))
elif port:
self._socket.bind(("", int(port)))
else:
raise Exception("You must specify host or port")
def connect_to(self, server_ip, server_port):
if self._is_connected:
raise Exception('Client already connected!')
self._server_ip = server_ip
self._socket.connect((server_ip, int(server_port)))
self._message_stream = self._get_message_stream()
self._is_connected = True
return self
class UDPClient(_Client, _UDPNode):
pass
class TCPClient(_Client, _TCPNode):
pass
class SCTPClient(_Client, _SCTPNode):
pass
class _NamedCache(object):
def __init__(self, basename, miss_error):
self._basename = basename
self._counter = 0
self._cache = {}
self._current = None
self._miss_error = miss_error
def add(self, value, name=None):
name = name or self._next_name()
self._cache[name] = value
value.name = name
self._current = name
def _next_name(self):
self._counter += 1
return self._basename + str(self._counter)
def get_with_name(self, name=None):
if not name:
name = self._current
if not name:
raise AssertionError(self._miss_error)
logger.debug("Choosing %s by default" % self._current)
return self._cache[name], name
def get(self, name=None):
return self.get_with_name(name)[0]
def __iter__(self):
return self._cache.itervalues()
class BufferedStream(_WithTimeouts):
def __init__(self, connection, default_timeout):
self._connection = connection
self._buffer = ''
self._default_timeout = default_timeout
def read(self, size, timeout=None):
result = ''
timeout = float(timeout if timeout else self._default_timeout)
cutoff = time.time() + timeout
while time.time() < cutoff:
result += self._get(size - len(result))
if self._size_full(result, size):
return result
self._fill_buffer(timeout)
raise AssertionError('Timeout %fs exceeded.' % timeout)
def _size_full(self, result, size):
return len(result) == size or (size == -1 and len(result))
def return_data(self, data):
if data:
self._buffer = data + self._buffer
def _get(self, size):
if size == -1:
size = len(self._buffer)
if not self._buffer:
return ''
result = self._buffer[:size]
self._buffer = self._buffer[size:]
return result
def _fill_buffer(self, timeout):
self._buffer += self._connection.receive(timeout=timeout)
def empty(self):
self._buffer = ''
| apache-2.0 | 5,183,357,354,468,281,000 | 31.394366 | 153 | 0.626304 | false |
jhanley634/testing-tools | problem/covid/sd_cases_deaths.py | 1 | 2150 | #! /usr/bin/env streamlit run
# Copyright 2020 John Hanley.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# The software is provided "AS IS", without warranty of any kind, express or
# implied, including but not limited to the warranties of merchantability,
# fitness for a particular purpose and noninfringement. In no event shall
# the authors or copyright holders be liable for any claim, damages or
# other liability, whether in an action of contract, tort or otherwise,
# arising from, out of or in connection with the software or the use or
# other dealings in the software.
import datetime as dt
from altair import datum
from covid.us_cases_deaths import delta, get_cases_and_deaths, get_chart, smooth
import altair as alt
import streamlit as st
def _get_annotation(df):
# https://en.wikipedia.org/wiki/Sturgis_Motorcycle_Rally
rally = 1e3 * dt.datetime.strptime('2020-08-07', '%Y-%m-%d').timestamp()
ten_days = 10 * 1e3 * 86400
annotation = alt.Chart(df).mark_text(
align='left',
baseline='middle',
fontSize=20,
dx=7
).encode(
x='date',
y='val',
text='label'
).transform_filter(
(rally <= datum.date) & (datum.date < rally + ten_days)
)
return annotation
def main():
df = get_cases_and_deaths('us-states.csv', 'South Dakota')
df['label'] = '.'
st.altair_chart(get_chart(df) + _get_annotation(df))
st.altair_chart(get_chart(df, 'log') + _get_annotation(df))
delta(df)
smooth(df, span=7)
st.altair_chart(get_chart(df) + _get_annotation(df))
if __name__ == '__main__':
main()
| mit | -4,698,819,997,443,697,000 | 35.440678 | 80 | 0.695349 | false |
spreeker/democracygame | democracy/issue/migrations/0002_auto__add_field_issue_slug.py | 1 | 4992 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Issue.slug'
db.add_column('issue_issue', 'slug', self.gf('django.db.models.fields.SlugField')(max_length=80, null=True, db_index=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Issue.slug'
db.delete_column('issue_issue', 'slug')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'issue.issue': {
'Meta': {'object_name': 'Issue'},
'body': ('django.db.models.fields.TextField', [], {'max_length': '2000'}),
'hotness': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'offensiveness': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '80', 'null': 'True', 'db_index': 'True'}),
'source_type': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'time_stamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2010, 9, 16, 14, 17, 28, 118475)'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
}
}
complete_apps = ['issue']
| bsd-3-clause | -6,876,075,056,248,061,000 | 64.684211 | 182 | 0.553085 | false |
TheImagingSource/tiscamera | examples/python/00-list-devices.py | 1 | 2576 | #!/usr/bin/env python3
# Copyright 2017 The Imaging Source Europe GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example will show you how to list information about the available devices
#
import sys
import gi
gi.require_version("Tcam", "0.1")
gi.require_version("Gst", "1.0")
from gi.repository import Tcam, Gst
def list_devices():
"""
Print information about all available devices
"""
sample_pipeline = Gst.parse_launch("tcambin name=source ! fakesink")
if not sample_pipeline:
print("Unable to create pipeline")
sys.exit(1)
source = sample_pipeline.get_by_name("source")
serials = source.get_device_serials_backend()
for single_serial in serials:
# This returns someting like:
# (True,
# name='DFK Z12GP031',
# identifier='The Imaging Source Europe GmbH-11410533',
# connection_type='aravis')
# The identifier is the name given by the backend
# The connection_type identifies the backend that is used.
# Currently 'aravis', 'v4l2', 'libusb' and 'unknown' exist
(return_value, model,
identifier, connection_type) = source.get_device_info(single_serial)
# return value would be False when a non-existant serial is used
# since we are iterating get_device_serials this should not happen
if return_value:
print("Model: {} Serial: {} Type: {}".format(model,
single_serial,
connection_type))
if __name__ == "__main__":
Gst.init(sys.argv) # init gstreamer
# this line sets the gstreamer default logging level
# it can be removed in normal applications
# gstreamer logging can contain verry useful information
# when debugging your application
# see https://gstreamer.freedesktop.org/documentation/tutorials/basic/debugging-tools.html
# for further details
Gst.debug_set_default_threshold(Gst.DebugLevel.WARNING)
list_devices()
| apache-2.0 | -4,272,001,744,929,887,700 | 31.607595 | 94 | 0.660326 | false |
cfelton/minnesota | test/test_system/test_regfile.py | 1 | 7464 | #
# Copyright (c) 2006-2013 Christopher L. Felton
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import collections
from random import randint
import traceback
from myhdl import *
from mn.system import Clock
from mn.system import Reset
from mn.system import Global
from mn.system import RegisterFile
from mn.system import Register
from mn.system import Wishbone
from mn.utils.test import *
regdef = None
regfile = None
def _create_mask(n):
m = 1
for _ in xrange(n):
m = (m << 1) | 1
return m
def _create_test_regfile():
global regdef
regdef = collections.OrderedDict()
# --register 0--
reg = Register('control', 0x0018, 8, 'rw', 0)
reg.comment = "register 0"
reg.add_named_bits('enable', slice(1, 0)) # read-only namedbit
reg.add_named_bits('loop', slice(2, 1)) # read-only namedbit
regdef[reg.name] = reg
# -- more registers register --
for addr,default in zip((0x20, 0x40, 0x80),
(0xDE, 0xCA, 0xFB)):
reg = Register('reg%s' % (addr,), addr, 8, 'rw', default)
regdef[reg.name] = reg
# -- read only register --
reg = Register('regro', 0x100, 8, 'ro', 0xAA)
regdef[reg.name] = reg
# another read only register, with named bits
reg = Register('status', 0x200, 8, 'ro', 0)
reg.add_named_bits('error', slice(1, 0)) # bit 0, read-write namedbit
reg.add_named_bits('ok', slice(2, 1)) # bit 1, read-write namedbit
reg.add_named_bits('cnt', slice(8, 2)) # bits 7-2, read-write namedbit
regdef[reg.name] = reg
regfile = RegisterFile(regdef)
return regfile
def m_per_top(clock, reset, mon):
glbl = Global(clock, reset)
wb = Wishbone(glbl)
#gpm = wb.m_controller(wb)
gp1 = m_per(glbl, wb, mon)
return gp1
def m_per(glbl, regbus, mon):
global regfile
regfile = _create_test_regfile()
g_regfile = regbus.m_per_interface(glbl, regfile)
clock, reset = glbl.clock, glbl.reset
## all "read-only" (status) bits if needed
@always_seq(clock.posedge, reset=reset)
def rtl_roregs():
if regfile.regro.rd:
regfile.regro.next = mon
return g_regfile #, rtl_roregs
def m_per_bits(glbl, regbus, mon):
global regfile
regfile = _create_test_regfile()
g_regfile = regbus.m_per_interface(glbl, regfile)
count = modbv(0, min=0, max=1)
clock, reset = glbl.clock, glbl.reset
## all "read-only" (status) bits if needed
@always(clock.posedge)
def rtl_roregs():
count[:] = count + 1
# only 'ro' registers can have named bits that can
# be set
if count:
regfile.error.next = True
regfile.ok.next = False
else:
regfile.error.next = False
regfile.ok.next = True
if regfile.regro.rd:
regfile.regro.next = mon
regfile.cnt.next = count[5:]
return g_regfile, rtl_roregs
def test_register_def():
regfile = _create_test_regfile()
assert len(regfile._rwregs) == 4
assert len(regfile._roregs) == 2
def test_register_file():
global regfile
# top-level signals and interfaces
clock = Clock(0, frequency=50e6)
reset = Reset(0, active=1, async=False)
glbl = Global(clock, reset)
regbus = Wishbone(glbl)
def _test_rf():
tb_dut = m_per(glbl, regbus, 0xAA)
tb_or = regbus.m_per_outputs()
tb_mclk = clock.gen()
tb_rclk = regbus.clk_i.gen()
asserr = Signal(bool(0))
@instance
def tb_stim():
try:
yield delay(100)
yield reset.pulse(111)
for k,reg in regdef.iteritems():
if reg.access == 'ro':
yield regbus.read(reg.addr)
rval = regbus.readval
assert rval == reg.default, "ro: %02x != %02x"%(rwd.rval,reg.default)
else:
wval = randint(0,(2**reg.width)-1)
yield regbus.write(reg.addr, wval)
for _ in xrange(4):
yield clock.posedge
yield regbus.read(reg.addr)
rval = regbus.readval
assert rval == wval, "rw: %02x != %02x"%(rwd.rval,rwd.wval)
yield delay(100)
except AssertionError,err:
print("@E: %s" % (err,))
traceback.print_exc()
asserr.next = True
for _ in xrange(10):
yield clock.posedge
raise err
raise StopSimulation
return tb_mclk, tb_stim, tb_dut, tb_or, tb_rclk
vcd = tb_clean_vcd('_test_rf')
traceSignals.name = vcd
g = traceSignals(_test_rf)
Simulation(g).run()
def test_register_file_bits():
global regfile
# top-level signals and interfaces
clock = Clock(0, frequency=50e6)
reset = Reset(0, active=1, async=False)
glbl = Global(clock, reset)
regbus = Wishbone(glbl)
def _test():
tb_dut = m_per_bits(glbl, regbus, 0xAA)
tb_or = regbus.m_per_outputs()
tb_mclk = clock.gen()
tb_rclk = regbus.clk_i.gen()
asserr = Signal(bool(0))
@instance
def tb_stim():
regfile.ok.next = True
try:
yield reset.pulse(111)
yield clock.posedge
yield clock.posedge
truefalse = True
yield regbus.write(regfile.control.addr, 0x01)
for _ in xrange(100):
assert (regfile.enable, regfile.loop) == (truefalse, not truefalse)
yield regbus.read(regfile.control.addr)
yield regbus.write(regfile.control.addr,
~regbus.readval)
truefalse = not truefalse
yield clock.posedge
except AssertionError, err:
asserr.next = True
for _ in xrange(20):
yield clock.posedge
raise err
raise StopSimulation
return tb_mclk, tb_stim, tb_dut, tb_or, tb_rclk
vcd = tb_clean_vcd('_test')
traceSignals.name = vcd
g = traceSignals(_test)
Simulation(g).run()
def test_convert():
clock = Signal(bool(0))
reset = ResetSignal(0, active=0, async=True)
mon = Signal(intbv(0)[8:])
toVerilog(m_per_top, clock, reset, mon)
toVHDL(m_per_top, clock, reset, mon)
if __name__ == '__main__':
#parser = tb_arparser()
#args = parser.parse_args()
test_register_def()
test_register_file()
test_register_file_bits()
test_convert()
| gpl-3.0 | 6,406,166,799,250,013,000 | 28.975904 | 93 | 0.561763 | false |
BlackhatEspeed/electrum | gui/qt/seed_dialog.py | 1 | 3072 | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2013 ecdsa@github
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import PyQt4.QtCore as QtCore
from electrum.i18n import _
from electrum import mnemonic
from qrcodewidget import QRCodeWidget, QRDialog
from util import close_button
from qrtextedit import ShowQRTextEdit, ScanQRTextEdit
class SeedDialog(QDialog):
def __init__(self, parent, seed, imported_keys):
QDialog.__init__(self, parent)
self.setModal(1)
self.setMinimumWidth(400)
self.setWindowTitle('Electrum' + ' - ' + _('Seed'))
vbox = show_seed_box_msg(seed)
if imported_keys:
vbox.addWidget(QLabel("<b>"+_("WARNING")+":</b> " + _("Your wallet contains imported keys. These keys cannot be recovered from seed.") + "</b><p>"))
vbox.addLayout(close_button(self))
self.setLayout(vbox)
def icon_filename(sid):
if sid == 'cold':
return ":icons/cold_seed.png"
elif sid == 'hot':
return ":icons/hot_seed.png"
else:
return ":icons/seed.png"
def show_seed_box_msg(seedphrase, sid=None):
msg = _("Your wallet generation seed is") + ":"
vbox = show_seed_box(msg, seedphrase, sid)
save_msg = _("Please save these %d words on paper (order is important).")%len(seedphrase.split()) + " "
msg2 = save_msg + " " \
+ _("This seed will allow you to recover your wallet in case of computer failure.") + "<br/>" \
+ "<b>"+_("WARNING")+":</b> " + _("Never disclose your seed. Never type it on a website.") + "</b><p>"
label2 = QLabel(msg2)
label2.setWordWrap(True)
vbox.addWidget(label2)
vbox.addStretch(1)
return vbox
def show_seed_box(msg, seed, sid):
vbox, seed_e = enter_seed_box(msg, None, sid=sid, text=seed)
return vbox
def enter_seed_box(msg, window, sid=None, text=None):
vbox = QVBoxLayout()
logo = QLabel()
logo.setPixmap(QPixmap(icon_filename(sid)).scaledToWidth(56))
logo.setMaximumWidth(60)
label = QLabel(msg)
label.setWordWrap(True)
if not text:
seed_e = ScanQRTextEdit(win=window)
seed_e.setTabChangesFocus(True)
else:
seed_e = ShowQRTextEdit(text=text)
seed_e.setMaximumHeight(130)
vbox.addWidget(label)
grid = QGridLayout()
grid.addWidget(logo, 0, 0)
grid.addWidget(seed_e, 0, 1)
vbox.addLayout(grid)
return vbox, seed_e
| gpl-3.0 | 2,860,938,589,312,174,000 | 35.141176 | 160 | 0.66569 | false |
crdoconnor/dumbyaml | tests/engine.py | 1 | 4347 | from subprocess import check_call, call, PIPE, CalledProcessError
from os import path, system, chdir
import hitchpython
import hitchserve
import hitchtest
import hitchcli
from commandlib import Command, run
from pathlib import Path
# Get directory above this file
PROJECT_DIRECTORY = path.abspath(path.join(path.dirname(__file__), '..'))
class ExecutionEngine(hitchtest.ExecutionEngine):
"""Python engine for running tests."""
def set_up(self):
"""Set up your applications and the test environment."""
self.python_package = hitchpython.PythonPackage(
self.preconditions.get('python_version', '3.5.0')
)
self.python_package.build()
# Uninstall and reinstall
call([self.python_package.pip, "install", "ipython==1.2.1", ], stdout=PIPE)
call([self.python_package.pip, "install", "pyzmq", ], stdout=PIPE)
call([self.python_package.pip, "install", "flake8", ], stdout=PIPE)
call([self.python_package.pip, "uninstall", "dumbyaml", "-y"], stdout=PIPE)
#chdir(PROJECT_DIRECTORY)
#check_call([self.python_package.python, "setup.py", "install"], stdout=PIPE)
run(Command([self.python_package.python, "setup.py", "install"]).in_dir(PROJECT_DIRECTORY))
#print(Command([self.python_package.python, "setup.py", "install"]).arguments)
self.services = hitchserve.ServiceBundle(
PROJECT_DIRECTORY,
startup_timeout=8.0,
shutdown_timeout=1.0
)
self.services['IPython'] = hitchpython.IPythonKernelService(self.python_package)
self.services.startup(interactive=False)
self.ipython_kernel_filename = self.services['IPython'].wait_and_get_ipykernel_filename()
self.ipython_step_library = hitchpython.IPythonStepLibrary()
self.ipython_step_library.startup_connection(self.ipython_kernel_filename)
self.run_command = self.ipython_step_library.run
self.assert_true = self.ipython_step_library.assert_true
self.assert_exception = self.ipython_step_library.assert_exception
self.shutdown_connection = self.ipython_step_library.shutdown_connection
self.run_command("import dumbyaml")
self.run_command("import yaml")
def on_failure(self):
if self.settings.get("pause_on_failure", True):
if hasattr(self.settings, "services"):
import sys
self.services.log(message=self.stacktrace.to_template())
self.services.start_interactive_mode()
if path.exists(path.join(
path.expanduser("~"), ".ipython/profile_default/security/",
self.ipython_kernel_filename)
):
call([
sys.executable, "-m", "IPython", "console",
"--existing",
path.join(
path.expanduser("~"),
".ipython/profile_default/security/",
self.ipython_kernel_filename
)
])
else:
call([
sys.executable, "-m", "IPython", "console",
"--existing", self.ipython_kernel_filename
])
self.services.stop_interactive_mode()
def flake8(self, directory):
# Silently install flake8
chdir(PROJECT_DIRECTORY)
try:
check_call([
path.join(self.python_package.bin_directory, "flake8"),
directory
])
except CalledProcessError:
raise RuntimeError("flake8 failure")
def run_unit_tests(self, directory):
chdir(PROJECT_DIRECTORY)
try:
check_call([
path.join(self.python_package.bin_directory, "py.test"),
"--maxfail=1",
"-s",
directory
])
except CalledProcessError:
raise RuntimeError("py.test failure")
def tear_down(self):
if hasattr(self, 'services'):
self.services.shutdown()
try:
self.end_python_interpreter()
except:
pass
| mit | -51,328,045,312,967,176 | 37.8125 | 99 | 0.566598 | false |
tleonhardt/CodingPlayground | python/cffi/fibonacci/test_cffi.py | 1 | 1755 | #!/usr/bin/env python
""" Python wrapper to time the CFFI wrapper for computing the nth fibonacci number
in a non-recursive fashion and compare it to the pure Python implementation.
"""
import cffi
import fib_python
if __name__ == '__main__':
import sys
import timeit
n = 20
try:
n = int(sys.argv[1])
except Exception:
pass
number_of_times = 100000
try:
number_of_times = int(sys.argv[2])
except Exception:
pass
# The main top-level CFFI class that you instantiate once
ffi = cffi.FFI()
# Parses the given C source. This registers all declared functions.
ffi.cdef('int compute_fibonacci(int n);')
# Load and return a dynamic library. The standard C library can be loaded by passing None.
libfib = ffi.dlopen('./libfibonacci.so')
fib_py = fib_python.compute_fibonacci(n)
fib_cffi = libfib.compute_fibonacci(n)
if fib_py != fib_cffi:
raise(ValueError(fib_cffi))
py_tot = timeit.timeit("compute_fibonacci({})".format(n),
setup="from fib_python import compute_fibonacci",
number=number_of_times)
cffi_tot = timeit.timeit("libfib.compute_fibonacci({})".format(n),
setup="""import cffi; ffi = cffi.FFI(); ffi.cdef('int compute_fibonacci(int n);'); libfib = ffi.dlopen('./libfibonacci.so')""",
number=number_of_times)
py_avg = py_tot / number_of_times
cffi_avg = cffi_tot / number_of_times
print("fib({}) = {}".format(n, fib_py))
print("Python average time: {0:.2g}".format(py_avg))
print("CFFI/C average time: {0:.2g}".format(cffi_avg))
print("CFFI/C speedup: {0:.2g} times".format(py_avg/cffi_avg))
| mit | -701,642,183,918,904,800 | 34.1 | 154 | 0.612536 | false |
openstack/octavia | octavia/tests/functional/api/v2/test_availability_zone_profiles.py | 1 | 29649 | # Copyright 2019 Verizon Media
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from oslo_config import cfg
from oslo_config import fixture as oslo_fixture
from oslo_db import exception as odb_exceptions
from oslo_utils import uuidutils
from octavia.common import constants
import octavia.common.context
from octavia.tests.functional.api.v2 import base
class TestAvailabilityZoneProfiles(base.BaseAPITest):
root_tag = 'availability_zone_profile'
root_tag_list = 'availability_zone_profiles'
root_tag_links = 'availability_zone_profile_links'
def _assert_request_matches_response(self, req, resp, **optionals):
self.assertTrue(uuidutils.is_uuid_like(resp.get('id')))
self.assertEqual(req.get('name'), resp.get('name'))
self.assertEqual(req.get(constants.PROVIDER_NAME),
resp.get(constants.PROVIDER_NAME))
self.assertEqual(req.get(constants.AVAILABILITY_ZONE_DATA),
resp.get(constants.AVAILABILITY_ZONE_DATA))
def test_empty_list(self):
response = self.get(self.AZPS_PATH)
api_list = response.json.get(self.root_tag_list)
self.assertEqual([], api_list)
def test_create(self):
az_json = {'name': 'test1', constants.PROVIDER_NAME: 'noop_driver',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(az_json)
response = self.post(self.AZPS_PATH, body)
api_azp = response.json.get(self.root_tag)
self._assert_request_matches_response(az_json, api_azp)
def test_create_with_missing_name(self):
az_json = {constants.PROVIDER_NAME: 'pr1',
constants.AVAILABILITY_ZONE_DATA: '{"x": "y"}'}
body = self._build_body(az_json)
response = self.post(self.AZPS_PATH, body, status=400)
err_msg = ("Invalid input for field/attribute name. Value: "
"'None'. Mandatory field missing.")
self.assertEqual(err_msg, response.json.get('faultstring'))
def test_create_with_missing_provider(self):
az_json = {'name': 'xyz',
constants.AVAILABILITY_ZONE_DATA: '{"x": "y"}'}
body = self._build_body(az_json)
response = self.post(self.AZPS_PATH, body, status=400)
err_msg = ("Invalid input for field/attribute provider_name. "
"Value: 'None'. Mandatory field missing.")
self.assertEqual(err_msg, response.json.get('faultstring'))
def test_create_with_missing_availability_zone_data(self):
az_json = {'name': 'xyz', constants.PROVIDER_NAME: 'pr1'}
body = self._build_body(az_json)
response = self.post(self.AZPS_PATH, body, status=400)
err_msg = ("Invalid input for field/attribute availability_zone_data. "
"Value: 'None'. Mandatory field missing.")
self.assertEqual(err_msg, response.json.get('faultstring'))
def test_create_with_empty_availability_zone_data(self):
az_json = {'name': 'test1', constants.PROVIDER_NAME: 'noop_driver',
constants.AVAILABILITY_ZONE_DATA: '{}'}
body = self._build_body(az_json)
response = self.post(self.AZPS_PATH, body)
api_azp = response.json.get(self.root_tag)
self._assert_request_matches_response(az_json, api_azp)
def test_create_with_long_name(self):
az_json = {'name': 'n' * 256, constants.PROVIDER_NAME: 'test1',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(az_json)
self.post(self.AZPS_PATH, body, status=400)
def test_create_with_long_provider(self):
az_json = {'name': 'name1', constants.PROVIDER_NAME: 'n' * 256,
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(az_json)
self.post(self.AZPS_PATH, body, status=400)
def test_create_with_long_availability_zone_data(self):
az_json = {'name': 'name1', constants.PROVIDER_NAME: 'amp',
constants.AVAILABILITY_ZONE_DATA: 'n' * 4097}
body = self._build_body(az_json)
self.post(self.AZPS_PATH, body, status=400)
def test_create_authorized(self):
az_json = {'name': 'test1', constants.PROVIDER_NAME: 'noop_driver',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(az_json)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
project_id = uuidutils.generate_uuid()
with mock.patch.object(octavia.common.context.Context, 'project_id',
project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': True,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.post(self.AZPS_PATH, body)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
api_azp = response.json.get(self.root_tag)
self._assert_request_matches_response(az_json, api_azp)
def test_create_not_authorized(self):
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
az_json = {'name': 'name',
constants.PROVIDER_NAME: 'xyz',
constants.AVAILABILITY_ZONE_DATA: '{"x": "y"}'}
body = self._build_body(az_json)
response = self.post(self.AZPS_PATH, body, status=403)
api_azp = response.json
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, api_azp)
def test_create_db_failure(self):
az_json = {'name': 'test1', constants.PROVIDER_NAME: 'noop_driver',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(az_json)
with mock.patch(
"octavia.db.repositories.AvailabilityZoneProfileRepository."
"create") as mock_create:
mock_create.side_effect = Exception
self.post(self.AZPS_PATH, body, status=500)
mock_create.side_effect = odb_exceptions.DBDuplicateEntry
self.post(self.AZPS_PATH, body, status=409)
def test_create_with_invalid_json(self):
az_json = {'name': 'test1', constants.PROVIDER_NAME: 'noop_driver',
constants.AVAILABILITY_ZONE_DATA: '{hello: "world"}'}
body = self._build_body(az_json)
self.post(self.AZPS_PATH, body, status=400)
def test_get(self):
azp = self.create_availability_zone_profile(
'name', 'noop_driver', '{"x": "y"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
response = self.get(
self.AZP_PATH.format(
azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('name', response.get('name'))
self.assertEqual(azp.get('id'), response.get('id'))
def test_get_one_deleted_id(self):
response = self.get(self.AZP_PATH.format(azp_id=constants.NIL_UUID),
status=404)
self.assertEqual('Availability Zone Profile {} not found.'.format(
constants.NIL_UUID), response.json.get('faultstring'))
def test_get_one_fields_filter(self):
azp = self.create_availability_zone_profile(
'name', 'noop_driver', '{"x": "y"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id')), params={
'fields': ['id', constants.PROVIDER_NAME]}
).json.get(self.root_tag)
self.assertEqual(azp.get('id'), response.get('id'))
self.assertIn(u'id', response)
self.assertIn(constants.PROVIDER_NAME, response)
self.assertNotIn(u'name', response)
self.assertNotIn(constants.AVAILABILITY_ZONE_DATA, response)
def test_get_authorized(self):
azp = self.create_availability_zone_profile(
'name', 'noop_driver', '{"x": "y"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
project_id = uuidutils.generate_uuid()
with mock.patch.object(octavia.common.context.Context, 'project_id',
project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': True,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.get(
self.AZP_PATH.format(
azp_id=azp.get('id'))).json.get(self.root_tag)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual('name', response.get('name'))
self.assertEqual(azp.get('id'), response.get('id'))
def test_get_not_authorized(self):
azp = self.create_availability_zone_profile(
'name', 'noop_driver', '{"x": "y"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
self.get(self.AZP_PATH.format(azp_id=azp.get('id')), status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
def test_get_all(self):
fp1 = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
ref_fp_1 = {u'availability_zone_data': u'{"compute_zone": "my_az_1"}',
u'id': fp1.get('id'), u'name': u'test1',
constants.PROVIDER_NAME: u'noop_driver'}
self.assertTrue(uuidutils.is_uuid_like(fp1.get('id')))
fp2 = self.create_availability_zone_profile(
'test2', 'noop_driver-alt', '{"compute_zone": "my_az_1"}')
ref_fp_2 = {u'availability_zone_data': u'{"compute_zone": "my_az_1"}',
u'id': fp2.get('id'), u'name': u'test2',
constants.PROVIDER_NAME: u'noop_driver-alt'}
self.assertTrue(uuidutils.is_uuid_like(fp2.get('id')))
response = self.get(self.AZPS_PATH)
api_list = response.json.get(self.root_tag_list)
self.assertEqual(2, len(api_list))
self.assertIn(ref_fp_1, api_list)
self.assertIn(ref_fp_2, api_list)
def test_get_all_fields_filter(self):
fp1 = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(fp1.get('id')))
fp2 = self.create_availability_zone_profile(
'test2', 'noop_driver-alt', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(fp2.get('id')))
response = self.get(self.AZPS_PATH, params={
'fields': ['id', 'name']})
api_list = response.json.get(self.root_tag_list)
self.assertEqual(2, len(api_list))
for profile in api_list:
self.assertIn(u'id', profile)
self.assertIn(u'name', profile)
self.assertNotIn(constants.PROVIDER_NAME, profile)
self.assertNotIn(constants.AVAILABILITY_ZONE_DATA, profile)
def test_get_all_authorized(self):
fp1 = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(fp1.get('id')))
fp2 = self.create_availability_zone_profile(
'test2', 'noop_driver-alt', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(fp2.get('id')))
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
project_id = uuidutils.generate_uuid()
with mock.patch.object(octavia.common.context.Context, 'project_id',
project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': True,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.get(self.AZPS_PATH)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
api_list = response.json.get(self.root_tag_list)
self.assertEqual(2, len(api_list))
def test_get_all_not_authorized(self):
fp1 = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(fp1.get('id')))
fp2 = self.create_availability_zone_profile(
'test2', 'noop_driver-alt', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(fp2.get('id')))
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
self.get(self.AZPS_PATH, status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
def test_update(self):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
update_data = {'name': 'the_profile',
constants.PROVIDER_NAME: 'noop_driver-alt',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(update_data)
self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('the_profile', response.get('name'))
self.assertEqual('noop_driver-alt',
response.get(constants.PROVIDER_NAME))
self.assertEqual('{"hello": "world"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
def test_update_deleted_id(self):
update_data = {'name': 'fake_profile'}
body = self._build_body(update_data)
response = self.put(self.AZP_PATH.format(azp_id=constants.NIL_UUID),
body, status=404)
self.assertEqual('Availability Zone Profile {} not found.'.format(
constants.NIL_UUID), response.json.get('faultstring'))
def test_update_nothing(self):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
body = self._build_body({})
self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('test_profile', response.get('name'))
self.assertEqual('noop_driver', response.get(constants.PROVIDER_NAME))
self.assertEqual('{"x": "y"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
def test_update_name_none(self):
self._test_update_param_none(constants.NAME)
def test_update_provider_name_none(self):
self._test_update_param_none(constants.PROVIDER_NAME)
def test_update_availability_zone_data_none(self):
self._test_update_param_none(constants.AVAILABILITY_ZONE_DATA)
def _test_update_param_none(self, param_name):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
expect_error_msg = ("None is not a valid option for %s" %
param_name)
body = self._build_body({param_name: None})
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body,
status=400)
self.assertEqual(expect_error_msg, response.json['faultstring'])
def test_update_no_availability_zone_data(self):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
update_data = {'name': 'the_profile',
constants.PROVIDER_NAME: 'noop_driver-alt'}
body = self._build_body(update_data)
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('the_profile', response.get('name'))
self.assertEqual('noop_driver-alt',
response.get(constants.PROVIDER_NAME))
self.assertEqual('{"x": "y"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
def test_update_authorized(self):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
update_data = {'name': 'the_profile',
constants.PROVIDER_NAME: 'noop_driver-alt',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(update_data)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
project_id = uuidutils.generate_uuid()
with mock.patch.object(octavia.common.context.Context, 'project_id',
project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': True,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')),
body)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('the_profile', response.get('name'))
self.assertEqual('noop_driver-alt',
response.get(constants.PROVIDER_NAME))
self.assertEqual('{"hello": "world"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
def test_update_not_authorized(self):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
update_data = {'name': 'the_profile', constants.PROVIDER_NAME: 'amp',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(update_data)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')),
body, status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('test_profile', response.get('name'))
self.assertEqual('noop_driver', response.get(constants.PROVIDER_NAME))
self.assertEqual('{"x": "y"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
def test_update_in_use(self):
azp = self.create_availability_zone_profile(
'test_profile', 'noop_driver', '{"x": "y"}')
self.create_availability_zone(
'name1', 'description', azp.get('id'), True)
# Test updating provider while in use is not allowed
update_data = {'name': 'the_profile',
constants.PROVIDER_NAME: 'noop_driver-alt'}
body = self._build_body(update_data)
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body,
status=409)
err_msg = ("Availability Zone Profile {} is in use and cannot be "
"modified.".format(azp.get('id')))
self.assertEqual(err_msg, response.json.get('faultstring'))
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('test_profile', response.get('name'))
self.assertEqual('noop_driver', response.get(constants.PROVIDER_NAME))
self.assertEqual('{"x": "y"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
# Test updating availability zone data while in use is not allowed
update_data = {'name': 'the_profile',
constants.AVAILABILITY_ZONE_DATA: '{"hello": "world"}'}
body = self._build_body(update_data)
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body,
status=409)
err_msg = ("Availability Zone Profile {} is in use and cannot be "
"modified.".format(azp.get('id')))
self.assertEqual(err_msg, response.json.get('faultstring'))
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('test_profile', response.get('name'))
self.assertEqual('noop_driver', response.get(constants.PROVIDER_NAME))
self.assertEqual('{"x": "y"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
# Test that you can still update the name when in use
update_data = {'name': 'the_profile'}
body = self._build_body(update_data)
response = self.put(self.AZP_PATH.format(azp_id=azp.get('id')), body)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('the_profile', response.get('name'))
self.assertEqual('noop_driver', response.get(constants.PROVIDER_NAME))
self.assertEqual('{"x": "y"}',
response.get(constants.AVAILABILITY_ZONE_DATA))
def test_delete(self):
azp = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
self.delete(self.AZP_PATH.format(azp_id=azp.get('id')))
response = self.get(self.AZP_PATH.format(
azp_id=azp.get('id')), status=404)
err_msg = "Availability Zone Profile %s not found." % azp.get('id')
self.assertEqual(err_msg, response.json.get('faultstring'))
def test_delete_deleted_id(self):
response = self.delete(self.AZP_PATH.format(azp_id=constants.NIL_UUID),
status=404)
self.assertEqual('Availability Zone Profile {} not found.'.format(
constants.NIL_UUID), response.json.get('faultstring'))
def test_delete_nonexistent_id(self):
response = self.delete(self.AZP_PATH.format(azp_id='bogus_id'),
status=404)
self.assertEqual('Availability Zone Profile bogus_id not found.',
response.json.get('faultstring'))
def test_delete_authorized(self):
azp = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
project_id = uuidutils.generate_uuid()
with mock.patch.object(octavia.common.context.Context, 'project_id',
project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': True,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
self.delete(self.AZP_PATH.format(azp_id=azp.get('id')))
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
response = self.get(self.AZP_PATH.format(
azp_id=azp.get('id')), status=404)
err_msg = "Availability Zone Profile %s not found." % azp.get('id')
self.assertEqual(err_msg, response.json.get('faultstring'))
def test_delete_not_authorized(self):
azp = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.assertTrue(uuidutils.is_uuid_like(azp.get('id')))
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
response = self.delete(self.AZP_PATH.format(
azp_id=azp.get('id')), status=403)
api_azp = response.json
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, api_azp)
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('test1', response.get('name'))
def test_delete_in_use(self):
azp = self.create_availability_zone_profile(
'test1', 'noop_driver', '{"compute_zone": "my_az_1"}')
self.create_availability_zone(
'name1', 'description', azp.get('id'), True)
response = self.delete(self.AZP_PATH.format(azp_id=azp.get('id')),
status=409)
err_msg = ("Availability Zone Profile {} is in use and cannot be "
"modified.".format(azp.get('id')))
self.assertEqual(err_msg, response.json.get('faultstring'))
response = self.get(
self.AZP_PATH.format(azp_id=azp.get('id'))).json.get(self.root_tag)
self.assertEqual('test1', response.get('name'))
| apache-2.0 | 1,955,087,208,449,069,000 | 49.337861 | 79 | 0.58835 | false |
robertchase/spindrift | test/test_http.py | 1 | 2290 | import gzip
import pytest
import spindrift.http as http
import spindrift.network as network
PORT = 12345
class Context(object):
def __init__(self):
self.server = 0
self.client = 0
class Server(http.HTTPHandler):
def on_http_data(self):
self.context.server += 1
self.http_send_server()
class Client(http.HTTPHandler):
def on_ready(self):
self.http_send()
def on_http_data(self):
self.context.client += 1
self.close('done')
@pytest.fixture
def ctx():
return Context()
@pytest.fixture
def net(ctx):
n = network.Network()
n.add_server(PORT, Server, context=ctx)
yield n
n.close()
def test_basic(ctx, net):
c = net.add_connection('localhost', PORT, Client, context=ctx)
while c.is_open:
net.service()
assert ctx.server == 1
assert ctx.client == 1
class PipelineClient(http.HTTPHandler):
def on_ready(self):
self.http_send()
self.http_send()
self.http_send()
def on_http_data(self):
self.context.client += 1
if self.context.client == 3:
self.close()
def test_pipeline(ctx, net):
c = net.add_connection('localhost', PORT, PipelineClient, context=ctx)
while c.is_open:
net.service()
assert ctx.server == 3
assert ctx.client == 3
def test_gzip():
handler = http.HTTPHandler(0, network.Network())
data = b'This Is A Test'
zdata = gzip.compress(data)
handler.http_content = zdata
handler._on_http_data()
assert handler.http_content == zdata
handler.http_headers = {'content-encoding': 'gzip'}
handler._on_http_data()
assert handler.http_content == data
handler.http_headers['content-type'] = 'text/html; charset=utf-8'
handler.http_content = zdata
handler._on_http_data()
assert handler.http_content == data.decode()
def test_server_compress():
data = 'This is a TeSt'
class _handler(http.HTTPHandler):
def _send(self, headers, content):
print(headers)
self.tested = True
assert content == gzip.compress(data.encode())
handler = _handler(0, network.Network())
handler.tested = False
handler.http_send_server(data, gzip=True)
assert handler.tested
| mit | -7,030,067,683,979,909,000 | 20.203704 | 74 | 0.624017 | false |
RUBi-ZA/JMS | src/JMS/wsgi.py | 1 | 1589 | """
WSGI config for JMS project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "hvd.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "JMS.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
#from django.core.wsgi import get_wsgi_application
#application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
import django
from django.core.handlers.wsgi import WSGIHandler
def get_wsgi_application():
django.setup()
return WSGIHandler()
application = get_wsgi_application()
| gpl-2.0 | 7,755,971,003,792,942,000 | 36.833333 | 79 | 0.786029 | false |
CityGenerator/Megacosm-Generator | tests/test_magicitem.py | 1 | 2982 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"Fully test this module's functionality through the use of fixtures."
from megacosm.generators import MagicItem
from megacosm.generators import NPC
from megacosm.generators import Curse
import unittest2 as unittest
import fixtures
import fakeredis
from config import TestConfiguration
class TestMagicItem(unittest.TestCase):
def setUp(self):
""" """
self.redis = fakeredis.FakeRedis()
fixtures.magicitem.import_fixtures(self)
fixtures.npc.import_fixtures(self)
fixtures.phobia.import_fixtures(self)
fixtures.motivation.import_fixtures(self)
fixtures.curse.import_fixtures(self)
self.redis.lpush('npc_race','gnome')
def tearDown(self):
self.redis.flushall()
def test_random_magicitem(self):
""" Test a "random" magicitem. """
magicitem = MagicItem(self.redis)
self.assertEqual('Muffled Leather Armor Of Shock', str(magicitem))
def test_magicitem_static_npc(self):
""" Pass in all magicitem fields. """
npc=NPC(self.redis)
magicitem = MagicItem(self.redis, { 'npc':npc })
self.assertEqual('Muffled Leather Armor Of Shock', str(magicitem))
def test_magicitem_static_curse(self):
""" Pass in all magicitem fields. """
curse=Curse(self.redis, {'text':'Buzzkill Curse'})
magicitem = MagicItem(self.redis, { 'curse':curse })
self.assertEqual('Muffled Leather Armor Of Shock', str(magicitem))
self.assertEqual('Buzzkill Curse', str(magicitem.curse))
def test_magicitem_rolled_curse(self):
""" Pass in all magicitem fields. """
magicitem = MagicItem(self.redis, { 'curse_chance':1090 })
self.assertEqual('Muffled Leather Armor Of Shock', str(magicitem))
self.assertIn('The Bezerker Curse', str(magicitem.curse))
def test_magicitem_static_text(self):
""" Pass in all magicitem fields. """
magicitem = MagicItem(self.redis, {
'text': 'Tacos Tacos Tacos',
})
self.assertEqual('Tacos Tacos Tacos', magicitem.text)
def test_magicitem_static_potion(self):
""" Pass in all magicitem fields. """
self.redis.lpush('magicitem_kind', 'potion')
magicitem = MagicItem(self.redis, { 'kind':'potion'})
self.assertEqual('Powerful Accuracy Boost Potion', str(magicitem))
def test_magicitem_static_scroll(self):
""" Pass in all magicitem fields. """
self.redis.lpush('magicitem_kind', 'scroll')
magicitem = MagicItem(self.redis, { 'kind':'scroll'})
self.assertEqual('Powerful Ad Nauseum Scroll', str(magicitem))
def test_magicitem_static_weapon(self):
""" Pass in all magicitem fields. """
self.redis.lpush('magicitem_kind', 'weapon')
magicitem = MagicItem(self.redis, { 'kind':'weapon'})
self.assertEqual('Sharp Sword Of The Bull', str(magicitem))
| gpl-2.0 | 744,235,947,299,921,000 | 34.5 | 74 | 0.648893 | false |
jinhong666/Python | AvailabilityMonitor/DbAccess/dbaccesser.py | 1 | 1073 | #!/usr/bin/python
from Tools.datetimetool import DateTimeTool
import logging
from DbAccess import DBHelper
SOCKET_TIMEOUT = 1
class DbAccesser:
def __init__(self,host,user,pwd,db):
self._dbHelper = DBHelper(host,user,pwd,db)
self._logger = logging.getLogger("root")
def RecordMonitor(self,domain,url,ip,status,isVip):
sqlStr = 'insert into MonitorRecord(domain,WebIP,MonStatus,monTime,isVip,monUrl) VALUES (%s,%s,%s,%s,%s,%s)'
params = (domain,ip,status,DateTimeTool.GetCurrentTimeStr(),isVip,url)
try:
self._dbHelper.ExcuteNoneQuery(sqlStr,params)
except Exception as e:
logging.error("记录监控信息错误",e.args[1])
def GetDayStat(self,domain,url,ip,isVip):
sqlStr = "select count(1) from DayStat where Domain=%s and ip=%s and isVip=%s and monUrl=%s"
params = (domain,ip,isVip,url)
try:
self._dbHelper.ExcuteScalarQuery(sqlStr,params)
except Exception as e:
self._logger.error('获取按日统计错误:',e.args[1])
| apache-2.0 | 5,166,579,525,175,662,000 | 33.633333 | 116 | 0.6564 | false |
googleapis/python-aiplatform | samples/snippets/create_hyperparameter_tuning_job_python_package_sample.py | 1 | 3771 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START aiplatform_create_hyperparameter_tuning_job_python_package_sample]
from google.cloud import aiplatform
def create_hyperparameter_tuning_job_python_package_sample(
project: str,
display_name: str,
executor_image_uri: str,
package_uri: str,
python_module: str,
location: str = "us-central1",
api_endpoint: str = "us-central1-aiplatform.googleapis.com",
):
# The AI Platform services require regional API endpoints.
client_options = {"api_endpoint": api_endpoint}
# Initialize client that will be used to create and send requests.
# This client only needs to be created once, and can be reused for multiple requests.
client = aiplatform.gapic.JobServiceClient(client_options=client_options)
# study_spec
metric = {
"metric_id": "val_rmse",
"goal": aiplatform.gapic.StudySpec.MetricSpec.GoalType.MINIMIZE,
}
conditional_parameter_decay = {
"parameter_spec": {
"parameter_id": "decay",
"double_value_spec": {"min_value": 1e-07, "max_value": 1},
"scale_type": aiplatform.gapic.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
},
"parent_discrete_values": {"values": [32, 64]},
}
conditional_parameter_learning_rate = {
"parameter_spec": {
"parameter_id": "learning_rate",
"double_value_spec": {"min_value": 1e-07, "max_value": 1},
"scale_type": aiplatform.gapic.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
},
"parent_discrete_values": {"values": [4, 8, 16]},
}
parameter = {
"parameter_id": "batch_size",
"discrete_value_spec": {"values": [4, 8, 16, 32, 64, 128]},
"scale_type": aiplatform.gapic.StudySpec.ParameterSpec.ScaleType.UNIT_LINEAR_SCALE,
"conditional_parameter_specs": [
conditional_parameter_decay,
conditional_parameter_learning_rate,
],
}
# trial_job_spec
machine_spec = {
"machine_type": "n1-standard-4",
"accelerator_type": aiplatform.gapic.AcceleratorType.NVIDIA_TESLA_K80,
"accelerator_count": 1,
}
worker_pool_spec = {
"machine_spec": machine_spec,
"replica_count": 1,
"python_package_spec": {
"executor_image_uri": executor_image_uri,
"package_uris": [package_uri],
"python_module": python_module,
"args": [],
},
}
# hyperparameter_tuning_job
hyperparameter_tuning_job = {
"display_name": display_name,
"max_trial_count": 4,
"parallel_trial_count": 2,
"study_spec": {
"metrics": [metric],
"parameters": [parameter],
"algorithm": aiplatform.gapic.StudySpec.Algorithm.RANDOM_SEARCH,
},
"trial_job_spec": {"worker_pool_specs": [worker_pool_spec]},
}
parent = f"projects/{project}/locations/{location}"
response = client.create_hyperparameter_tuning_job(
parent=parent, hyperparameter_tuning_job=hyperparameter_tuning_job
)
print("response:", response)
# [END aiplatform_create_hyperparameter_tuning_job_python_package_sample]
| apache-2.0 | 1,663,557,871,573,529,000 | 35.970588 | 95 | 0.638823 | false |
poffey21/edge | src/authentication/tests.py | 1 | 8664 | import re
from django.conf import settings
from django.contrib import auth
from django.contrib.auth import authenticate, get_user_model
from django.db.models.functions import Length
from django.test import Client
from django.test import TestCase
# Create your tests here.
from django.urls import reverse
from django.utils.crypto import get_random_string
from . import models
def special_match(strg, search=re.compile(r'[^a-zA-Z0-9!]').search):
return not bool(search(strg))
class TokenTestCase(TestCase):
"""Quick and simple unit tests for Token Model"""
def test_random_string(self):
self.assertEqual(128, len(models.unusable_string()))
def test_creation_of_token(self):
""" Let's ensure that the token is the correct length and that it only has one ! """
obj = models.Token.objects.create(
user=authenticate(username=settings.TEST_LDAP_USER)
)
self.assertEqual(obj.__unicode__(), u'Not yet generated.')
self.assertEqual(18, len(obj.hint))
self.assertEqual(128, len(obj.api_key_encoded))
self.assertEqual(obj.user, authenticate(username=settings.TEST_LDAP_USER))
for x in range(100):
api_key = obj.generate_api_key('d' * x)
self.assertEqual(16, len(obj.hint))
self.assertEqual(obj.__str__(), obj.hint)
self.assertEqual(81, len(api_key))
self.assertIn('$', obj.api_key_encoded)
self.assertEqual(2, len(api_key.split('!')))
r = special_match(api_key)
if not r:
print(api_key)
self.assertTrue(r)
def test_multiple_random_keys_can_be_blank(self):
""" Let's ensure that the token is the correct length and that it only has one ! """
mgr = models.Token.objects
u_mgr = get_user_model().objects
for x in range(100):
mgr.create(user=u_mgr.create(username=get_random_string(16)), )
self.assertEqual(100, mgr.annotate(
text_len=Length('random_key')
).filter(text_len__gt=10).count())
class SubscriptionTestCase(TestCase):
def setUp(self):
self.username = unicode(settings.TEST_LDAP_USER)
self.password = unicode(settings.TEST_LDAP_PASS)
self.first_name = unicode(settings.TEST_LDAP_FIRST_NAME)
self.last_name = unicode(settings.TEST_LDAP_LAST_NAME)
self.email = unicode(settings.TEST_LDAP_EMAIL)
self.secondary_id = unicode(settings.TEST_LDAP_SECONDARY_USER)
def test_create_active_user_with_lowered_username(self):
user = authenticate(username=self.username.lower(), activate=True)
self.assertTrue(user.is_active)
self.assertEqual(self.username.upper(), user.username)
self.assertEqual(self.first_name, user.first_name)
self.assertEqual(self.last_name, user.last_name)
self.assertEqual(self.email, user.email)
def test_create_active_user(self):
user = authenticate(username=self.username, activate=True)
self.assertTrue(user.is_active)
def test_create_user_with_default_active_setting(self):
user = authenticate(username=self.username)
self.assertTrue(user.is_active)
def test_create_inactive_user_then_activate(self):
user = authenticate(username=self.username, activate=False)
self.assertFalse(user.is_active)
user = authenticate(username=self.username, activate=True)
self.assertTrue(user.is_active)
def test_create_active_user_and_pass_inactive(self):
user = authenticate(username=self.username, activate=True)
self.assertTrue(user.is_active)
user = authenticate(username=self.username, activate=False)
self.assertTrue(user.is_active)
def test_ability_to_login(self):
self.client = Client()
self.client.login(username=self.username)
user = auth.get_user(self.client)
self.assertTrue(user.is_authenticated())
########################
# LDAP LOGIN TIME
########################
def test_create_active_ldap_user_with_lowered_username(self):
user = authenticate(username=self.username, password=settings.TEST_LDAP_PASS, activate=True)
self.assertTrue(user.is_active)
self.assertEqual(self.username.upper(), user.username)
self.assertEqual(self.first_name, user.first_name)
self.assertEqual(self.last_name, user.last_name)
self.assertEqual(self.email, user.email)
def test_create_active_ldap_user(self):
user = authenticate(username=self.username, activate=True)
self.assertTrue(user.is_active)
def test_create_ldap_user_with_bad_password(self):
user = authenticate(username=self.username[:-1], password='bad')
self.assertIsNone(user)
def test_create_ldap_user_with_default_active_setting(self):
user = authenticate(username=self.username)
self.assertTrue(user.is_active)
def test_create_inactive_ldap_user_then_activate(self):
user = authenticate(username=self.username, activate=False)
self.assertFalse(user.is_active)
user = authenticate(username=self.username, activate=True)
self.assertTrue(user.is_active)
def test_create_active_ldap_user_and_pass_inactive(self):
user = authenticate(username=self.username, activate=True)
self.assertTrue(user.is_active)
user = authenticate(username=self.username, activate=False)
self.assertTrue(user.is_active)
def test_ability_to_login_ldap_user(self):
self.client = Client()
self.client.login(username=self.username)
user = auth.get_user(self.client)
self.assertTrue(user.is_authenticated())
def test_set_session_view_not_allowed(self):
self.client = Client()
self.client.login(username=self.username)
user = auth.get_user(self.client)
r = self.client.get(reverse('account:set_session') + '?user_id={}'.format(user.username), follow=True)
messages = list(r.context['messages']) if 'messages' in r.context else []
print(', '.join([str(x) for x in messages]))
self.assertTrue(messages)
self.assertTrue('You are not allowed to use this feature.', messages[0])
def test_set_session_view_authentication_failed(self):
self.client = Client()
self.client.login(username=self.username)
user = auth.get_user(self.client)
r = self.client.get(reverse('account:set_session') + '?user_id={}'.format('no_id_here'), follow=True)
messages = list(r.context['messages']) if 'messages' in r.context else []
print(', '.join([str(x) for x in messages]))
self.assertTrue(messages)
self.assertTrue('Unable to login as different user. Authenticate stage failed', messages[0])
def test_set_session_view_allowed(self):
self.client = Client()
self.client.login(username=self.username)
user = auth.get_user(self.client)
user.is_superuser = True
user.save()
r = self.client.get(reverse('account:set_session') +
'?user_id={}'.format(self.username) + '&next={}'.format(reverse('account:group-list')),
follow=True)
user = auth.get_user(self.client)
messages = list(r.context['messages']) if 'messages' in r.context else []
print(', '.join([str(x) for x in messages]))
self.assertFalse(messages)
self.assertEqual(str(self.username).upper(), user.username)
r = self.client.get(reverse('account:set_session') + '?user_id={}'.format(self.secondary_id), follow=True)
user = auth.get_user(self.client)
messages = list(r.context['messages']) if 'messages' in r.context else []
print(', '.join([str(x) for x in messages]))
self.assertFalse(messages)
self.assertEqual(self.secondary_id, user.username)
def test_logout_view_succeeds(self):
self.client = Client()
self.client.login(username=self.username)
r = self.client.get(reverse('account:logout'))
user = auth.get_user(self.client)
self.assertTrue(user.is_anonymous)
def test_profile_view_succeeds(self):
self.client = Client()
self.client.login(username=self.username)
r = self.client.get(reverse('account:profile'))
self.assertContains(r, self.last_name)
def test_group_list_view_succeeds(self):
self.client = Client()
self.client.login(username=self.username)
r = self.client.get(reverse('account:group-list'))
self.assertContains(r, self.last_name)
| mit | -395,900,838,223,951,100 | 41.679803 | 115 | 0.651662 | false |
dNG-git/pas_upnp | src/dNG/data/upnp/identifier_mixin.py | 1 | 5198 | # -*- coding: utf-8 -*-
"""
direct PAS
Python Application Services
----------------------------------------------------------------------------
(C) direct Netware Group - All rights reserved
https://www.direct-netware.de/redirect?pas;upnp
The following license agreement remains valid unless any additions or
changes are being made by direct Netware Group in a written form.
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
----------------------------------------------------------------------------
https://www.direct-netware.de/redirect?licenses;gpl
----------------------------------------------------------------------------
#echo(pasUPnPVersion)#
#echo(__FILEPATH__)#
"""
import re
from dNG.data.binary import Binary
class IdentifierMixin(object):
"""
"IdentifierMixin" implements methods to get UPnP identifier values.
:author: direct Netware Group et al.
:copyright: direct Netware Group - All rights reserved
:package: pas
:subpackage: upnp
:since: v0.2.00
:license: https://www.direct-netware.de/redirect?licenses;gpl
GNU General Public License 2
"""
RE_USN_URN = re.compile("^urn:(.+):(.+):(.*):(.*)$", re.I)
"""
URN RegExp
"""
def __init__(self):
"""
Constructor __init__(IdentifierMixin)
:since: v0.2.00
"""
self.identifier = None
"""
Parsed UPnP identifier
"""
#
def _get_identifier(self):
"""
Returns the UPnP USN string.
:return: (dict) Parsed UPnP identifier; None if not set
:since: v0.2.00
"""
return self.identifier
#
def get_type(self):
"""
Returns the UPnP service type.
:return: (str) Service type
:since: v0.2.00
"""
return self.identifier['type']
#
def get_udn(self):
"""
Returns the UPnP UDN value.
:return: (str) UPnP service UDN
:since: v0.2.00
"""
return self.identifier['uuid']
#
def get_upnp_domain(self):
"""
Returns the UPnP service specification domain.
:return: (str) UPnP service specification domain
:since: v0.2.00
"""
return self.identifier['domain']
#
def get_urn(self):
"""
Returns the UPnP serviceType value.
:return: (str) UPnP URN
:since: v0.2.00
"""
return self.identifier['urn']
#
def get_usn(self):
"""
Returns the UPnP USN value.
:return: (str) UPnP USN
:since: v0.2.00
"""
return "uuid:{0}::urn:{1}".format(self.get_udn(), self.get_urn())
#
def get_version(self):
"""
Returns the UPnP device type version.
:return: (str) Device type version; None if undefined
:since: v0.2.00
"""
return self.identifier.get("version")
#
def _set_identifier(self, identifier):
"""
Sets the UPnP USN identifier.
:param identifier: Parsed UPnP identifier
:since: v0.2.00
"""
self.identifier = identifier
#
@staticmethod
def get_identifier(usn, bootid = None, configid = None):
"""
Parses the given UPnP USN string.
:param usn: UPnP USN
:param bootid: UPnP bootId (bootid.upnp.org) if any
:param configid: UPnP configId (configid.upnp.org) if any
:return: (dict) Parsed UPnP identifier; None on error
:since: v0.2.00
"""
usn = Binary.str(usn)
if (type(usn) == str):
usn_data = usn.split("::", 1)
device_id = usn_data[0].lower().replace("-", "")
else: device_id = ""
if (device_id.startswith("uuid:")):
device_id = device_id[5:]
_return = { "device": device_id,
"bootid": None,
"configid": None,
"uuid": usn_data[0][5:],
"class": "unknown",
"usn": usn
}
if (bootid is not None and configid is not None):
_return['bootid'] = bootid
_return['configid'] = configid
#
re_result = (IdentifierMixin.RE_USN_URN.match(usn_data[1]) if (len(usn_data) > 1) else None)
if (re_result is not None):
_return['urn'] = usn_data[1][4:]
_return['domain'] = re_result.group(1)
_return['class'] = re_result.group(2)
_return['type'] = re_result.group(3)
_return['version'] = re_result.group(4)
elif (usn[-17:].lower() == "::upnp:rootdevice"): _return['class'] = "rootdevice"
else: _return = None
return _return
#
#
| gpl-2.0 | 28,591,505,664,425,940 | 24.111111 | 104 | 0.563678 | false |
JackyChou/SGRS | SGRS/common_settings.py | 1 | 3285 | """
Django settings for SGRS project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^2oeytt80lcv67-b7o3x4dav&x08ao&@d3k01-p8=s=ygbgz5u'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'GeneralReport',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'SGRS.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'SGRS.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'SGRS_db',
'USER': 'jacky',
'PASSWORD': 'jacky',
'HOST': '127.0.0.1',
'PORT': '',
}
}
DB_FOR_CHOICES = (
('default', u'test db',),
)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'query_result_cache',
'KEY_PREFIX': 'SGRS',
'TIMEOUT':60 * 30,
'MAX_ENTRIES':100,
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, "static")
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'GeneralReport.SGRSUser'
LOGIN_URL = '/sgrs/login/'
import time, datetime
CLEAN_TMP_FILE_TIMESTAMP = int(time.mktime(
(datetime.date.today() - datetime.timedelta(days=1)).timetuple()
))
| gpl-2.0 | 4,849,460,929,883,837,000 | 24.269231 | 71 | 0.666971 | false |
miguelgrinberg/microblog | migrations/versions/780739b227a7_posts_table.py | 1 | 1057 | """posts table
Revision ID: 780739b227a7
Revises: e517276bb1c2
Create Date: 2017-09-11 12:23:25.496587
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '780739b227a7'
down_revision = 'e517276bb1c2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('post',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('body', sa.String(length=140), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_post_timestamp'), 'post', ['timestamp'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_post_timestamp'), table_name='post')
op.drop_table('post')
# ### end Alembic commands ###
| mit | -2,497,830,188,465,469,400 | 27.567568 | 83 | 0.663198 | false |
googleapis/googleapis-gen | google/ads/googleads/v8/googleads-py/google/ads/googleads/v8/enums/types/merchant_center_link_status.py | 1 | 1291 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v8.enums',
marshal='google.ads.googleads.v8',
manifest={
'MerchantCenterLinkStatusEnum',
},
)
class MerchantCenterLinkStatusEnum(proto.Message):
r"""Container for enum describing possible statuses of a Google
Merchant Center link.
"""
class MerchantCenterLinkStatus(proto.Enum):
r"""Describes the possible statuses for a link between a Google
Ads customer and a Google Merchant Center account.
"""
UNSPECIFIED = 0
UNKNOWN = 1
ENABLED = 2
PENDING = 3
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | 245,458,820,869,564,640 | 29.738095 | 74 | 0.693261 | false |
ActiDoo/gamification-engine | setup.py | 1 | 2935 | import os
import re
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'pyramid_tm',
'SQLAlchemy',
'transaction',
'zope.sqlalchemy',
'waitress',
'pytz',
'dogpile.cache',
'pyramid_dogpile_cache',
'Flask>=0.10.1',
'flask-admin',
'psycopg2',
'pymemcache',
'mock',
'alembic',
'raven',
'jsl',
'jsonschema',
'pyparsing',
'python-crontab',
'croniter',
'zope.interface',
'zope.sqlalchemy',
'argon2'
]
version = ''
with open('gengine/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
setup(name='gamification-engine',
version=version,
description='The Gamification-Engine (gengine) provides an API for integrating any kinds of gamification features.',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"Topic :: Software Development :: Libraries",
"Programming Language :: Python :: 3.6",
"License :: OSI Approved :: MIT License"
],
author='Marcel Sander, Jens Janiuk, Matthias Feldotto',
author_email='[email protected]',
license='MIT',
url='https://www.gamification-software.com',
keywords='web wsgi bfg pylons pyramid gamification',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='gengine',
install_requires=requires,
extras_require={
"auth": [
'argon2'
],
"pushes": [
'tapns3',
'python-gcm',
],
"testing": [
'testing.postgresql',
'testing.redis',
'names'
]
},
entry_points="""\
[paste.app_factory]
main = gengine:main
[console_scripts]
initialize_gengine_db = gengine.maintenance.scripts.initializedb:main
generate_gengine_erd = gengine.maintenance.scripts.generate_erd:main
generate_gengine_revision = gengine.maintenance.scripts.generate_revision:main
gengine_push_messages = gengine.maintenance.scripts.push_messages:main
gengine_scheduler_beat = gengine.maintenance.scripts.scheduler_beat:main
gengine_scheduler_worker = gengine.maintenance.scripts.scheduler_worker:main
[redgalaxy.plugins]
gengine = gengine:redgalaxy
""",
)
| mit | -1,113,054,953,801,862,000 | 28.646465 | 122 | 0.600341 | false |
keseldude/brobot | brobot/core/bot.py | 1 | 10481 | #===============================================================================
# brobot
# Copyright (C) 2010 Michael Keselman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#===============================================================================
from irc.clients import Client
from irc.structures import Server
from irc.events import Events
from irc.connections import IRCError
from threading import Thread
import itertools
import logging
import os
log = logging.getLogger(__name__)
class Plugin(object):
"""Abstract class, which initializes every plugin to have the essentials:
* a name
* a link to the ircbot
* the path to the common shelf (for serialized objects).
"""
name = 'unnamed'
admin = False
def __init__(self, ircbot):
self.ircbot = ircbot
self.shelf_path = os.path.join(ircbot.data_path, 'shelf.db')
try:
self.load()
except NotImplementedError:
pass
def load(self):
raise NotImplementedError
class CommandPlugin(Plugin):
"""Abstract Plugin to be used for commands."""
class Action(object):
PRIVMSG = staticmethod(lambda bot: bot.privmsg)
NOTICE = staticmethod(lambda bot: bot.notice)
def _process(self, connection, source, target, args):
result = self.process(connection, source, target, args)
if not result:
return
try:
action = result['action'](self.ircbot)
target = result['target']
message = result['message']
except KeyError:
log.error(u'Invalid plugin response.')
else:
if isinstance(message, basestring):
message = (message,)
for line in message:
try:
action(connection, target, line)
except IRCError as e:
log.error(e)
except Exception as e:
log.error('Unexpected exception occurred: %s' % e)
def process(self, connection, source, target, args):
raise NotImplementedError
def privmsg(self, target, message):
return {'action': self.Action.PRIVMSG,
'target': target,
'message': message
}
def notice(self, target, message):
return {'action': self.Action.NOTICE,
'target': target,
'message': message
}
class EventPlugin(Plugin):
"""Abstract Plugin to be used for events."""
def process(self, connection, source='', target='', args=None, message=''):
raise NotImplementedError
class IRCBot(Client):
"""Functional implementation of Client, which serves as an IRC bot as
opposed to a fully function client."""
def __init__(self, settings):
self.settings = settings
self.data_path = os.path.join(settings['base_path'],
settings['data_path'])
if not os.path.exists(self.data_path):
try:
os.mkdir(self.data_path)
except OSError:
raise Exception('Unable to create data directory.')
self._register_loggers()
self.pid_path = os.path.join(self.data_path, settings['pid_filename'])
self._save_pid(self.pid_path)
self.admins = {}
self.initial_channels = {}
servers = []
for server in settings['servers']:
irc_server = Server(server['host'], server['port'], server['nick'],
owner=server['owner'], name=server['name'],
use_ssl=server['ssl'])
servers.append(irc_server)
self.admins[irc_server] = server['admins']
self.initial_channels[irc_server] = server['channels']
self.plugin_path = settings['plugin_path']
event_plugins = {}
for event in settings['event_plugins']:
if 'plugins' not in event:
continue
name = getattr(Events, event['name'])
plugins = []
for plugin in event['plugins']:
split_path = plugin.split('.')
plugin_name = split_path.pop()
module_path = '.'.join(split_path)
module = __import__('%s.%s' % (self.plugin_path, module_path))
for part in split_path:
module = getattr(module, part)
plugins.append(getattr(module, plugin_name)(self))
event_plugins[name] = plugins
super(IRCBot, self).__init__(servers, event_plugins)
self.command_plugins = {}
self.command_prefix = settings['command_prefix']
self.version = settings['version_string']
self._restart = False
def _register_loggers(self):
root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(os.path.join(self.data_path,
self.settings['log_filename']),
encoding='utf-8')
fh_fmt = logging.Formatter("%(asctime)s - %(name)s - %(levelname)-8s: \
%(message)s")
fh.setFormatter(fh_fmt)
if self.settings['debug']:
ch = logging.StreamHandler()
ch_fmt = logging.Formatter("%(levelname)-8s - %(message)s")
ch.setFormatter(ch_fmt)
ch.setLevel(logging.DEBUG)
root_logger.addHandler(ch)
fh.setLevel(logging.DEBUG)
else:
fh.setLevel(logging.INFO)
root_logger.addHandler(fh)
def _save_pid(self, pid_path):
pid = os.getpid()
with open(pid_path, 'w') as pidfile:
pidfile.write(str(pid))
def _register_command_plugins(self):
items = self.settings['command_plugins'].iteritems()
for msg_type, command_plugins in items:
self.command_plugins[msg_type] = plugins = {}
if command_plugins is None:
continue
for command_plugin in command_plugins:
split_path = command_plugin['path'].split('.')
plugin_name = split_path.pop()
module_path = '.'.join(split_path)
module = __import__('%s.%s' % (self.plugin_path, module_path))
for part in split_path:
module = getattr(module, part)
commands = tuple(command_plugin['commands'])
plugins[commands] = getattr(module, plugin_name)(self)
log.debug('Loaded plugin "%s"!' % plugin_name)
def start(self):
super(IRCBot, self).start()
return self._restart
def restart(self):
self._restart = True
self.exit(message=u'Restarting!')
def register_command_plugin(self, command, plugin):
both = self.command_plugins['BOTH']
for commands in both.iterkeys():
if command in commands:
return False
self.command_plugins['BOTH'][(command,)] = plugin(self)
return True
def unregister_command_plugin(self, command):
commands = (command,)
both = self.command_plugins['BOTH']
for cmds in both.iterkeys():
if cmds == commands:
del both[cmds]
return True
return False
def on_connect(self, connection):
pass
def on_welcome(self, connection, source, target, message):
initial_channels = self.initial_channels[connection.server]
if initial_channels:
self.join(connection, *initial_channels)
def on_initial_connect(self):
self._register_command_plugins()
def is_admin(self, server, nick):
"""Returns whether a given nick is one of the administrators of the
bot."""
return nick in self.admins[server]
def get_version(self):
"""Returns the version of the bot."""
return self.version
def process_message(self, connection, source, target, message, is_pubmsg):
"""Processes a message, determining whether it is a bot command, and
taking action if it is."""
if message and message[0] == self.command_prefix:
if message[1:2] == u' ':
command = u' '
args = message[2:].strip().split(u' ')
else:
tokens = message[1:].strip().split(u' ')
command, args = tokens[0], tokens[1:]
both = self.command_plugins['BOTH'].iteritems()
if is_pubmsg:
either = self.command_plugins['PUBMSG'].iteritems()
else:
either = self.command_plugins['PRIVMSG'].iteritems()
for commands, plugin in itertools.chain(both, either):
if command in commands:
plugin._process(connection, source, target, args)
break
def _on_msg(self, connection, source, target, message, is_pubmsg):
process = Thread(target=self.process_message,
args=(connection, source, target, message,
is_pubmsg))
process.start()
def on_privmsg(self, connection, source, target, message):
self._on_msg(connection, source, source.nick, message, False)
def on_pubmsg(self, connection, source, target, message):
self._on_msg(connection, source, target, message, True)
| gpl-3.0 | 7,017,456,056,481,446,000 | 34.771331 | 80 | 0.539929 | false |
Southpaw-TACTIC/TACTIC | src/pyasm/prod/web/render_wdg.py | 1 | 6716 | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = [
'RenderException',
'SObjectRenderCbk',
'RenderTableElementWdg',
'RenderSubmitInfoWdg',
]
from pyasm.common import Container, TacticException, Config, Common
from pyasm.command import Command, CommandExitException
from pyasm.command.remote_command import XmlRpcExec, TacticDispatcher
from pyasm.checkin import SnapshotBuilder
from pyasm.search import SearchType, Search
from pyasm.web import Widget, WebContainer, DivWdg
from pyasm.widget import FunctionalTableElement, SelectWdg, IconWdg, IconSubmitWdg, CheckboxWdg, BaseInputWdg, HiddenWdg, TableWdg
from pyasm.biz import Snapshot
from pyasm.prod.biz import Layer, FrameRange, RenderPolicy
from pyasm.prod.render import *
class RenderException(Exception):
pass
from pyasm.command import DatabaseAction
class SObjectRenderCbk(DatabaseAction):
'''initiates a render with properties'''
def get_title(self):
return "Render Submission"
def check(self):
web = WebContainer.get_web()
if web.get_form_value("Render") == "" and web.get_form_value("do_edit").startswith("Submit/") == "":
return False
else:
return True
def execute(self):
web = WebContainer.get_web()
search_keys = []
# discovery phase to find the sobject to be rendered. This can be
# either a snapshots or sobjects. If it is an sobject, then
# the latest snapshot will be rendered
search_type = web.get_form_value("parent_search_type")
search_id = web.get_form_value("parent_search_id")
if search_type:
search_keys = ["%s|%s" % (search_type, search_id)]
if not search_keys:
if self.sobject:
search_keys = [self.sobject.get_search_key()]
else:
search_keys = web.get_form_values("search_key")
# get the policy
policy = None
if self.sobject:
policy_code = self.sobject.get_value("policy_code")
if policy_code:
policy = RenderPolicy.get_by_code(policy_code)
# render options
options = {}
keys = web.get_form_keys()
for key in keys:
if key.startswith("edit|"):
value = web.get_form_value(key)
new_key = key.replace("edit|", "")
options[new_key] = value
# add the xmlrpc server to the package:
# WARNING: not that there is no / separating the 2 %s.
client_api_url = web.get_client_api_url()
options['client_api_url'] = client_api_url
# go through each of the search keys found from the interface
for search_key in search_keys:
# find the sobject associates with this key
if not search_key:
continue
sobject = Search.get_by_search_key(search_key)
if not sobject:
raise TacticException("Search Key [%s] does not exist" % search_key)
# if the search_keys represented a snapshot, then use this as
# the snapshot and find the parent
if sobject.get_base_search_type() == "sthpw/snapshot":
snapshot = sobject
sobject = sobject.get_sobject()
else:
# else use the latest, assuming a context (really doesn't
# make much sense????!!!???
# FIXME: will deal with this later
context = "publish"
snapshot = Snapshot.get_latest_by_sobject(sobject, context)
if not snapshot:
raise TacticException("No checkins of context '%s' exist for '%s'. Please look at the chekin history" % (context, sobject.get_code()) )
# We provide a render package with a bunch of necessary information
render_package = RenderPackage()
render_package.set_policy(policy)
render_package.set_snapshot(snapshot)
render_package.set_sobject(sobject)
render_package.set_options(options)
# submission class
submit_class = self.get_option("submit")
if not submit_class:
submit_class = Config.get_value("services", "render_submit_class", no_exception=True)
if not submit_class:
submit_class = "pyasm.prod.render.RenderSubmit"
# now we have an sobject and a snapshot, we initiate a job
submit = Common.create_from_class_path(submit_class, [render_package])
# if this is from the EditWdg for queues then use this queue
# entry instead
if self.sobject.get_base_search_type() == "sthpw/queue":
submit.set_queue(self.sobject)
submit.execute()
self.description = "Submitted: %s" % ", ".join(search_keys)
class RenderTableElementWdg(FunctionalTableElement):
'''presents a checkbox to select for each sobject and executes a render'''
def get_title(self):
WebContainer.register_cmd("pyasm.prod.web.SObjectRenderCbk")
render_button = IconSubmitWdg("Render", IconWdg.RENDER, False)
return render_button
def get_display(self):
sobject = self.get_current_sobject()
search_key = sobject.get_search_key()
div = DivWdg()
checkbox = CheckboxWdg("search_key")
checkbox.set_option("value", search_key)
div.add(checkbox)
return div
class RenderSubmitInfoWdg(BaseInputWdg):
'''presents information about the render'''
def get_display(self):
web = WebContainer.get_web()
widget = Widget()
search_type = web.get_form_value("parent_search_type")
search_id = web.get_form_value("parent_search_id")
if not search_type:
widget.add("RenderSubmitInfo: parent type not found")
return widget
hidden = HiddenWdg("parent_search_type", search_type)
widget.add(hidden)
hidden = HiddenWdg("parent_search_id", search_id)
widget.add(hidden)
sobject = Search.get_by_id(search_type, search_id)
table = TableWdg(search_type, css="embed")
table.set_show_property(False)
table.set_sobject(sobject)
table.remove_widget("render")
table.remove_widget("description")
widget.add(table)
return widget
| epl-1.0 | 7,091,345,297,028,031,000 | 31.444444 | 156 | 0.609738 | false |
Miserlou/Zappa | tests/test_app.py | 1 | 1055 | from zappa.asynchronous import task
try:
from urllib.parse import parse_qs
except ImportError:
from cgi import parse_qs
try:
from html import escape
except ImportError:
from cgi import escape
def hello_world(environ, start_response):
parameters = parse_qs(environ.get('QUERY_STRING', ''))
if 'subject' in parameters:
subject = escape(parameters['subject'][0])
else:
subject = 'World'
start_response('200 OK', [('Content-Type', 'text/html')])
return ['''Hello {subject!s}
Hello {subject!s}!
'''.format(**{'subject': subject})]
def schedule_me():
return "Hello!"
@task
def async_me(arg1, **kwargs):
return "run async when on lambda %s%s" % (arg1, kwargs.get('foo', ''))
@task(remote_aws_lambda_function_name='test-app-dev', remote_aws_region='us-east-1')
def remote_async_me(arg1, **kwargs):
return "run async always on lambda %s%s" % (arg1, kwargs.get('foo', ''))
def callback(self):
print("this is a callback")
def prebuild_me():
print("this is a prebuild script")
| mit | -5,456,621,769,303,471,000 | 22.444444 | 84 | 0.651185 | false |
awacha/cct | attic/gui/core/builderwidget.py | 1 | 2464 | import logging
from gi.repository import Gtk, Gdk, GdkPixbuf
from ...core.utils.callback import Callbacks, SignalFlags
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class BuilderWidget(Callbacks):
__signals__ = {'destroy': (SignalFlags.RUN_FIRST, None, ())}
def __init__(self, gladefile: str, mainwidget: str):
super().__init__()
self.gladefile = gladefile
self.builder = Gtk.Builder.new_from_file(gladefile)
assert isinstance(self.builder, Gtk.Builder)
self.builder.set_application(Gtk.Application.get_default())
self.widget = self.builder.get_object(mainwidget)
assert isinstance(self.widget, Gtk.Widget)
self.builder.connect_signals(self)
self._mainwidget_connections = [self.widget.connect('map', self.on_mainwidget_map),
self.widget.connect('unmap', self.on_mainwidget_unmap),
self.widget.connect('destroy', self.on_mainwidget_destroy)]
def on_mainwidget_destroy(self, widget: Gtk.Widget):
logger.debug('Destroying main widget: ' + self.gladefile)
self.emit('destroy')
logger.debug('Destroy signal emitted for BuilderWidget ' + self.gladefile)
self.cleanup()
return False
def on_mainwidget_map(self, widget: Gtk.Widget):
logger.debug('Mapping mainwidget for BuilderWidget ' + self.gladefile)
self.widget.foreach(lambda x: x.show_all())
return False
# noinspection PyMethodMayBeStatic
def on_mainwidget_unmap(self, widget: Gtk.Widget):
logger.debug('Unmapping mainwidget for BuilderWidget ' + self.gladefile)
return False
def cleanup(self):
for c in self._mainwidget_connections:
self.widget.disconnect(c)
self._mainwidget_connections = []
try:
self.widget = None
self.builder = None
except AttributeError:
pass
self.cleanup_callback_handlers()
def __del__(self):
logger.debug('Deleting a BuilderWidget.')
def on_close(self, widget, event=None):
self.widget.destroy()
def get_screenshot(self) -> GdkPixbuf.Pixbuf:
assert isinstance(self.widget, Gtk.Widget)
gdkwin = self.widget.get_window()
assert isinstance(gdkwin, Gdk.Window)
return Gdk.pixbuf_get_from_window(gdkwin, 0, 0, gdkwin.get_width(), gdkwin.get_height())
| bsd-3-clause | -4,639,427,528,054,207,000 | 36.907692 | 99 | 0.640016 | false |
google-code/acromania | amprotocol.py | 1 | 3608 | # Copyright 2009 Lee Harr
#
# This file is part of Acromania.
#
# Acromania is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Acromania is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Acromania. If not, see <http://www.gnu.org/licenses/>.
from twisted.protocols import basic
from twisted.internet import reactor
later = reactor.callLater
import amgame
import amplayer
import colors
import amdb
class AM(basic.LineReceiver, amgame.CMix):
'Twisted protocol. One is created for each client connection.'
delimiter = '\n'
def connectionMade(self):
'A new connection. Send out the MOTD.'
print "Got new client!"
player = self.gameserver.join(self)
self.player = player
self.motd()
def connectionLost(self, reason):
'Client has disconnected.'
print "Lost a client!"
self.gameserver.leave(self.player)
def leave(self):
self.transport.loseConnection()
def lineReceived(self, line):
'''Called each time a new line of input is received from the client.
'''
line = line.strip()
#print "received", repr(line)
if not line:
return
elif line.startswith('/'):
self.command(line[1:], self.player)
else:
msg = '%s: %s' % (colors.white(self.player.name), line)
self.gameserver.broadcast(msg)
def change_name(self, player, name):
if not name:
player.message('Usage: /nick <new name>')
return
elif amdb.exists(name) and not player.username==name:
msg = colors.red('Name "') + colors.white(name) + colors.red('" is reserved')
player.message(msg)
player.message('Login with /login <name> <password> if that is your account.')
return
if name not in self.gameserver.game.playernames():
orig = player.name
player.name = name
broadcast = self.gameserver.broadcast
broadcast('Player "%s" is now known as "%s"' % (colors.white(orig), colors.white(name)))
else:
player.message('Name "%s" already in use.' % colors.white(name))
def motd(self):
'Message of the day.'
lines = open('MOTD').readlines()
for line in lines:
self.message(line.rstrip())
def help(self, player):
'Show HELP file.'
lines = open('HELP.sa').readlines()
for line in lines:
self.message(line.rstrip())
def rules(self, player):
'Show RULES file.'
lines = open('RULES').readlines()
for line in lines:
player.message(line.rstrip())
def simessage(self, msg=''):
'Send simple line to client. Used before player has logged in.'
self.transport.write(msg + '\r\n')
def message(self, *args, **kw):
color = kw.get('color', True)
strs = map(str, args)
msg = ' '.join(strs)
msg = msg % colors.yes
self.transport.write(msg + '\r\n')
def game_over(self, player):
player.message('Game over. Type /new to start again.')
player.message()
| gpl-3.0 | 1,931,519,000,349,934,800 | 28.818182 | 100 | 0.613359 | false |
matthewbauer/Reggie | windows_build.py | 1 | 2846 | from distutils.core import setup
from py2exe.build_exe import py2exe
import os, os.path, shutil, sys
upxFlag = False
if '-upx' in sys.argv:
sys.argv.remove('-upx')
upxFlag = True
dir = 'distrib/windows'
print '[[ Freezing Reggie! ]]'
print '>> Destination directory: %s' % dir
sys.argv.append('py2exe')
if os.path.isdir(dir): shutil.rmtree(dir)
os.makedirs(dir)
# exclude QtWebKit to save space, plus Python stuff we don't use
excludes = ['encodings', 'doctest', 'pdb', 'unittest', 'difflib', 'inspect',
'os2emxpath', 'posixpath', 'optpath', 'locale', 'calendar',
'threading', 'select', 'socket', 'hashlib', 'multiprocessing', 'ssl',
'PyQt4.QtWebKit', 'PyQt4.QtNetwork']
# set it up
setup(
name='Reggie! Level Editor',
version='1.0',
description='Reggie! Level Editor',
windows=[
{'script': 'reggie.py',
'icon_resources': [(0,'reggiedata/win_icon.ico')]}
],
options={'py2exe':{
'includes': ['sip', 'encodings', 'encodings.hex_codec', 'encodings.utf_8'],
'compressed': 1,
'optimize': 2,
'ascii': True,
'excludes': excludes,
'bundle_files': 3,
'dist_dir': dir
}}
)
print '>> Built frozen executable!'
# now that it's built, configure everything
os.unlink(dir + '/w9xpopen.exe') # not needed
if upxFlag:
if os.path.isfile('upx.exe'):
print '>> Found UPX, using it to compress the executables!'
files = os.listdir(dir)
upx = []
for f in files:
if f.endswith('.exe') or f.endswith('.dll') or f.endswith('.pyd'):
upx.append('"%s/%s"' % (dir,f))
os.system('upx -9 ' + ' '.join(upx))
print '>> Compression complete.'
else:
print '>> UPX not found, binaries can\'t be compressed.'
print '>> In order to build Reggie! with UPX, place the upx.exe file into '\
'this folder.'
if os.path.isdir(dir + '/reggiedata'): shutil.rmtree(dir + '/reggiedata')
if os.path.isdir(dir + '/reggieextras'): shutil.rmtree(dir + '/reggieextras')
shutil.copytree('reggiedata', dir + '/reggiedata')
shutil.copytree('reggieextras', dir + '/reggieextras')
shutil.copy('license.txt', dir)
shutil.copy('readme.txt', dir)
print '>> Attempting to copy VC++2008 libraries...'
if os.path.isdir('Microsoft.VC90.CRT'):
shutil.copytree('Microsoft.VC90.CRT', dir + '/Microsoft.VC90.CRT')
print '>> Copied libraries!'
else:
print '>> Libraries not found! The frozen executable will require the '\
'Visual C++ 2008 runtimes to be installed in order to work.'
print '>> In order to automatically include the runtimes, place the '\
'Microsoft.VC90.CRT folder into this folder.'
print '>> Reggie has been frozen to %s!' % dir
| gpl-2.0 | 7,492,280,498,728,124,000 | 32.707317 | 84 | 0.603654 | false |
henrykironde/deletedret | docs/conf.py | 3 | 7970 | import sys
import sphinx_rtd_theme
from retriever.lib.defaults import ENCODING
encoding = ENCODING.lower()
from retriever.lib.defaults import VERSION, COPYRIGHT
from retriever.lib.scripts import SCRIPT_LIST, reload_scripts
from retriever.lib.tools import open_fw
from retriever.lib.repository import check_for_updates
def to_str(object, object_encoding=encoding):
return str(object).encode('UTF-8').decode(encoding)
# Create the .rst file for the available datasets
datasetfile = open_fw("datasets_list.rst")
datasetfile_title = """==================
Datasets Available
==================
"""
check_for_updates()
reload_scripts()
script_list = SCRIPT_LIST()
# write the title of dataset rst file
# ref:http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html
datasetfile.write(datasetfile_title)
# get info from the scripts using specified encoding
for script_num, script in enumerate(script_list, start=1):
reference_link = ''
if script.ref.strip():
reference_link = script.ref
elif hasattr(script, 'homepage'):
reference_link = script.homepage
elif not reference_link.strip():
if bool(script.urls.values()):
reference_link = list(script.urls.values())[0].rpartition('/')[0]
else:
reference_link = 'Not available'
title = str(script_num) + ". **{}**\n".format(to_str(script.title.strip(), encoding))
datasetfile.write(title)
datasetfile.write("-" * (len(title) - 1) + "\n\n")
# keep the gap between : {} standard as required by restructuredtext
datasetfile.write(":name: {}\n\n".format(script.name))
# Long urls can't render well, embed them in a text(home link)
if len(to_str(reference_link)) <= 85:
datasetfile.write(":reference: `{}`\n\n".format(reference_link))
else:
datasetfile.write(":reference: `{s}'s home link <{r}>`_.\n".format(
s=script.name, r=to_str(reference_link).rstrip("/")))
datasetfile.write(":citation: {}\n\n".format(to_str(script.citation, encoding)))
datasetfile.write(":description: {}\n\n".format(to_str(script.description, encoding)))
datasetfile.close()
needs_sphinx = '1.3'
# Add any Sphinx extension module names here, as strings.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.napoleon']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Data Retriever'
copyright = COPYRIGHT
version = release = VERSION
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
| mit | -5,806,422,816,114,753,000 | 30.88 | 90 | 0.692346 | false |
Pikecillo/genna | external/4Suite-XML-1.0.2/test/Xml/Xslt/Borrowed/mb_20030223.py | 1 | 2150 | # a pretty straightforward Muenchian grouping test
from Xml.Xslt import test_harness
sheet_1 = """<?xml version="1.0" encoding="utf-8"?>
<xsl:stylesheet version="1.0"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:output method="html" indent="yes"/>
<xsl:key name="skills-by-mark" match="skill" use="@mark"/>
<xsl:template match="skills">
<table>
<!-- process a set consisting of the first skill element for each mark -->
<xsl:for-each select="skill[count(.|key('skills-by-mark',@mark)[1])=1]">
<tr>
<td><b><xsl:value-of select="concat(@mark,' skills:')"/></b></td>
<td>
<!-- process all skill elements having the current skill's mark -->
<xsl:for-each select="key('skills-by-mark',@mark)">
<xsl:value-of select="@name"/>
<xsl:if test="position()!=last()"><br/></xsl:if>
</xsl:for-each>
</td>
</tr>
</xsl:for-each>
</table>
</xsl:template>
</xsl:stylesheet>"""
source_1 = """<skills>
<skill mark="excellent" name="excellentskill"/>
<skill mark="excellent" name="excellent skill"/>
<skill mark="good" name="goodskill"/>
<skill mark="good" name="goodskill"/>
<skill mark="basic" name="basicskill"/>
<skill mark="basic" name="basicskill"/>
<skill mark="excellent" name="excellentskill"/>
<skill mark="good" name="goodskill"/>
<skill mark="basic" name="basicskill"/>
</skills>"""
expected_1 = """<table>
<tr>
<td><b>excellent skills:</b></td>
<td>excellentskill
<br>excellent skill
<br>excellentskill
</td>
</tr>
<tr>
<td><b>good skills:</b></td>
<td>goodskill
<br>goodskill
<br>goodskill
</td>
</tr>
<tr>
<td><b>basic skills:</b></td>
<td>basicskill
<br>basicskill
<br>basicskill
</td>
</tr>
</table>"""
def Test(tester):
source = test_harness.FileInfo(string=source_1)
sheet = test_harness.FileInfo(string=sheet_1)
test_harness.XsltTest(tester, source, [sheet], expected_1,
title='ordinary Muenchian grouping with keys')
return
| gpl-2.0 | 4,578,644,373,810,415,600 | 28.452055 | 80 | 0.583256 | false |
AstroHuntsman/POCS | pocs/tests/test_focuser.py | 1 | 4299 | import pytest
from pocs.focuser.simulator import Focuser as SimFocuser
from pocs.focuser.birger import Focuser as BirgerFocuser
from pocs.camera.simulator import Camera
from pocs.utils.config import load_config
params = [SimFocuser, BirgerFocuser]
ids = ['simulator', 'birger']
# Ugly hack to access id inside fixture
@pytest.fixture(scope='module', params=zip(params, ids), ids=ids)
def focuser(request):
if request.param[0] == SimFocuser:
# Simulated focuser, just create one and return it
return request.param[0]()
else:
# Load the local config file and look for focuser configurations of the specified type
focuser_configs = []
local_config = load_config('pocs_local', ignore_local=True)
camera_info = local_config.get('cameras')
if camera_info:
# Local config file has a cameras section
camera_configs = camera_info.get('devices')
if camera_configs:
# Local config file camera section has a devices list
for camera_config in camera_configs:
focuser_config = camera_config.get('focuser', None)
if focuser_config and focuser_config['model'] == request.param[1]:
# Camera config has a focuser section, and it's the right type
focuser_configs.append(focuser_config)
if not focuser_configs:
pytest.skip(
"Found no {} configurations in pocs_local.yaml, skipping tests".format(
request.param[1]))
# Create and return a Focuser based on the first config
return request.param[0](**focuser_configs[0])
@pytest.fixture(scope='module')
def tolerance(focuser):
"""
Tolerance for confirming focuser has moved to the requested position. The Birger may be
1 or 2 encoder steps off.
"""
if isinstance(focuser, SimFocuser):
return 0
elif isinstance(focuser, BirgerFocuser):
return 2
def test_init(focuser):
"""
Confirm proper init & exercise some of the property getters
"""
assert focuser.is_connected
# Expect UID to be a string (or integer?) of non-zero length? Just assert its True
assert focuser.uid
def test_move_to(focuser, tolerance):
focuser.move_to(100)
assert focuser.position == pytest.approx(100, abs=tolerance)
def test_move_by(focuser, tolerance):
previous_position = focuser.position
increment = -13
focuser.move_by(increment)
assert focuser.position == pytest.approx((previous_position + increment), abs=tolerance)
def test_position_setter(focuser, tolerance):
"""
Can assign to position property as an alternative to move_to() method
"""
focuser.position = 75
assert focuser.position == pytest.approx(75, abs=tolerance)
def test_move_below_min_position(focuser, tolerance):
focuser.move_to(focuser.min_position - 100)
assert focuser.position == pytest.approx(focuser.min_position, tolerance)
def test_move_above_max_positons(focuser, tolerance):
focuser.move_to(focuser.max_position + 100)
assert focuser.position == pytest.approx(focuser.max_position, tolerance)
def test_camera_association(focuser):
"""
Test association of Focuser with Camera after initialisation (getter, setter)
"""
sim_camera_1 = Camera()
sim_camera_2 = Camera()
# Cameras in the fixture haven't been associated with a Camera yet, this should work
focuser.camera = sim_camera_1
assert focuser.camera is sim_camera_1
# Attempting to associate with a second Camera should fail, though.
focuser.camera = sim_camera_2
assert focuser.camera is sim_camera_1
def test_camera_init():
"""
Test focuser init via Camera constructor/
"""
sim_camera = Camera(focuser={'model': 'simulator', 'focus_port': '/dev/ttyFAKE'})
assert isinstance(sim_camera.focuser, SimFocuser)
assert sim_camera.focuser.is_connected
assert sim_camera.focuser.uid
assert sim_camera.focuser.camera is sim_camera
def test_camera_association_on_init():
"""
Test association of Focuser with Camera during Focuser init
"""
sim_camera = Camera()
focuser = SimFocuser(camera=sim_camera)
assert focuser.camera is sim_camera
| mit | -8,882,572,169,062,581,000 | 33.392 | 94 | 0.676902 | false |
windmill/windmill | windmill/management/commands/test_windmill.py | 1 | 4314 | # Copyright (c) 2008-2009 Mikeal Rogers <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.core.management.base import BaseCommand
from windmill.authoring import djangotest
import sys, os
from time import sleep
import types
import logging
class ServerContainer(object):
start_test_server = djangotest.start_test_server
stop_test_server = djangotest.stop_test_server
def attempt_import(name, suffix):
try:
mod = __import__(name+'.'+suffix)
except ImportError:
mod = None
if mod is not None:
s = name.split('.')
mod = __import__(s.pop(0))
for x in s+[suffix]:
mod = getattr(mod, x)
return mod
class Command(BaseCommand):
help = "Run windmill tests. Specify a browser, if one is not passed Firefox will be used"
args = '<label label ...>'
label = 'label'
def handle(self, *labels, **options):
from windmill.conf import global_settings
from windmill.authoring.djangotest import WindmillDjangoUnitTest
if 'ie' in labels:
global_settings.START_IE = True
sys.argv.remove('ie')
elif 'safari' in labels:
global_settings.START_SAFARI = True
sys.argv.remove('safari')
elif 'chrome' in labels:
global_settings.START_CHROME = True
sys.argv.remove('chrome')
else:
global_settings.START_FIREFOX = True
if 'firefox' in labels:
sys.argv.remove('firefox')
if 'manage.py' in sys.argv:
sys.argv.remove('manage.py')
if 'test_windmill' in sys.argv:
sys.argv.remove('test_windmill')
server_container = ServerContainer()
server_container.start_test_server()
global_settings.TEST_URL = 'http://127.0.0.1:%d' % server_container.server_thread.port
# import windmill
# windmill.stdout, windmill.stdin = sys.stdout, sys.stdin
from windmill.authoring import setup_module, teardown_module
from django.conf import settings
tests = []
for name in settings.INSTALLED_APPS:
for suffix in ['tests', 'wmtests', 'windmilltests']:
x = attempt_import(name, suffix)
if x is not None: tests.append((suffix,x,));
wmtests = []
for (ttype, mod,) in tests:
if ttype == 'tests':
for ucls in [getattr(mod, x) for x in dir(mod)
if ( type(getattr(mod, x, None)) in (types.ClassType,
types.TypeType) ) and
issubclass(getattr(mod, x), WindmillDjangoUnitTest)
]:
wmtests.append(ucls.test_dir)
else:
if mod.__file__.endswith('__init__.py') or mod.__file__.endswith('__init__.pyc'):
wmtests.append(os.path.join(*os.path.split(os.path.abspath(mod.__file__))[:-1]))
else:
wmtests.append(os.path.abspath(mod.__file__))
if len(wmtests) is 0:
print 'Sorry, no windmill tests found.'
else:
testtotals = {}
x = logging.getLogger()
x.setLevel(0)
from windmill.dep import functest
bin = functest.bin
runner = functest.runner
runner.CLIRunner.final = classmethod(lambda self, totals: testtotals.update(totals) )
setup_module(tests[0][1])
sys.argv = sys.argv + wmtests
bin.cli()
teardown_module(tests[0][1])
if testtotals['fail'] is not 0:
sleep(.5)
sys.exit(1)
| apache-2.0 | 8,694,265,997,949,678,000 | 36.513043 | 100 | 0.572323 | false |
datapythonista/pandas | pandas/tests/io/pytables/test_timezones.py | 2 | 11495 | from datetime import (
date,
timedelta,
)
import numpy as np
import pytest
from pandas._libs.tslibs.timezones import maybe_get_tz
import pandas.util._test_decorators as td
import pandas as pd
from pandas import (
DataFrame,
DatetimeIndex,
Series,
Timestamp,
date_range,
)
import pandas._testing as tm
from pandas.tests.io.pytables.common import (
_maybe_remove,
ensure_clean_path,
ensure_clean_store,
)
# TODO(ArrayManager) HDFStore relies on accessing the blocks
pytestmark = td.skip_array_manager_not_yet_implemented
def _compare_with_tz(a, b):
tm.assert_frame_equal(a, b)
# compare the zones on each element
for c in a.columns:
for i in a.index:
a_e = a.loc[i, c]
b_e = b.loc[i, c]
if not (a_e == b_e and a_e.tz == b_e.tz):
raise AssertionError(f"invalid tz comparison [{a_e}] [{b_e}]")
# use maybe_get_tz instead of dateutil.tz.gettz to handle the windows
# filename issues.
gettz_dateutil = lambda x: maybe_get_tz("dateutil/" + x)
gettz_pytz = lambda x: x
@pytest.mark.parametrize("gettz", [gettz_dateutil, gettz_pytz])
def test_append_with_timezones(setup_path, gettz):
# as columns
# Single-tzinfo, no DST transition
df_est = DataFrame(
{
"A": [
Timestamp("20130102 2:00:00", tz=gettz("US/Eastern"))
+ timedelta(hours=1) * i
for i in range(5)
]
}
)
# frame with all columns having same tzinfo, but different sides
# of DST transition
df_crosses_dst = DataFrame(
{
"A": Timestamp("20130102", tz=gettz("US/Eastern")),
"B": Timestamp("20130603", tz=gettz("US/Eastern")),
},
index=range(5),
)
df_mixed_tz = DataFrame(
{
"A": Timestamp("20130102", tz=gettz("US/Eastern")),
"B": Timestamp("20130102", tz=gettz("EET")),
},
index=range(5),
)
df_different_tz = DataFrame(
{
"A": Timestamp("20130102", tz=gettz("US/Eastern")),
"B": Timestamp("20130102", tz=gettz("CET")),
},
index=range(5),
)
with ensure_clean_store(setup_path) as store:
_maybe_remove(store, "df_tz")
store.append("df_tz", df_est, data_columns=["A"])
result = store["df_tz"]
_compare_with_tz(result, df_est)
tm.assert_frame_equal(result, df_est)
# select with tz aware
expected = df_est[df_est.A >= df_est.A[3]]
result = store.select("df_tz", where="A>=df_est.A[3]")
_compare_with_tz(result, expected)
# ensure we include dates in DST and STD time here.
_maybe_remove(store, "df_tz")
store.append("df_tz", df_crosses_dst)
result = store["df_tz"]
_compare_with_tz(result, df_crosses_dst)
tm.assert_frame_equal(result, df_crosses_dst)
msg = (
r"invalid info for \[values_block_1\] for \[tz\], "
r"existing_value \[(dateutil/.*)?US/Eastern\] "
r"conflicts with new value \[(dateutil/.*)?EET\]"
)
with pytest.raises(ValueError, match=msg):
store.append("df_tz", df_mixed_tz)
# this is ok
_maybe_remove(store, "df_tz")
store.append("df_tz", df_mixed_tz, data_columns=["A", "B"])
result = store["df_tz"]
_compare_with_tz(result, df_mixed_tz)
tm.assert_frame_equal(result, df_mixed_tz)
# can't append with diff timezone
msg = (
r"invalid info for \[B\] for \[tz\], "
r"existing_value \[(dateutil/.*)?EET\] "
r"conflicts with new value \[(dateutil/.*)?CET\]"
)
with pytest.raises(ValueError, match=msg):
store.append("df_tz", df_different_tz)
@pytest.mark.parametrize("gettz", [gettz_dateutil, gettz_pytz])
def test_append_with_timezones_as_index(setup_path, gettz):
# GH#4098 example
dti = date_range("2000-1-1", periods=3, freq="H", tz=gettz("US/Eastern"))
dti = dti._with_freq(None) # freq doesn't round-trip
df = DataFrame({"A": Series(range(3), index=dti)})
with ensure_clean_store(setup_path) as store:
_maybe_remove(store, "df")
store.put("df", df)
result = store.select("df")
tm.assert_frame_equal(result, df)
_maybe_remove(store, "df")
store.append("df", df)
result = store.select("df")
tm.assert_frame_equal(result, df)
def test_roundtrip_tz_aware_index(setup_path):
# GH 17618
time = Timestamp("2000-01-01 01:00:00", tz="US/Eastern")
df = DataFrame(data=[0], index=[time])
with ensure_clean_store(setup_path) as store:
store.put("frame", df, format="fixed")
recons = store["frame"]
tm.assert_frame_equal(recons, df)
assert recons.index[0].value == 946706400000000000
def test_store_index_name_with_tz(setup_path):
# GH 13884
df = DataFrame({"A": [1, 2]})
df.index = DatetimeIndex([1234567890123456787, 1234567890123456788])
df.index = df.index.tz_localize("UTC")
df.index.name = "foo"
with ensure_clean_store(setup_path) as store:
store.put("frame", df, format="table")
recons = store["frame"]
tm.assert_frame_equal(recons, df)
def test_tseries_select_index_column(setup_path):
# GH7777
# selecting a UTC datetimeindex column did
# not preserve UTC tzinfo set before storing
# check that no tz still works
rng = date_range("1/1/2000", "1/30/2000")
frame = DataFrame(np.random.randn(len(rng), 4), index=rng)
with ensure_clean_store(setup_path) as store:
store.append("frame", frame)
result = store.select_column("frame", "index")
assert rng.tz == DatetimeIndex(result.values).tz
# check utc
rng = date_range("1/1/2000", "1/30/2000", tz="UTC")
frame = DataFrame(np.random.randn(len(rng), 4), index=rng)
with ensure_clean_store(setup_path) as store:
store.append("frame", frame)
result = store.select_column("frame", "index")
assert rng.tz == result.dt.tz
# double check non-utc
rng = date_range("1/1/2000", "1/30/2000", tz="US/Eastern")
frame = DataFrame(np.random.randn(len(rng), 4), index=rng)
with ensure_clean_store(setup_path) as store:
store.append("frame", frame)
result = store.select_column("frame", "index")
assert rng.tz == result.dt.tz
def test_timezones_fixed_format_frame_non_empty(setup_path):
with ensure_clean_store(setup_path) as store:
# index
rng = date_range("1/1/2000", "1/30/2000", tz="US/Eastern")
rng = rng._with_freq(None) # freq doesn't round-trip
df = DataFrame(np.random.randn(len(rng), 4), index=rng)
store["df"] = df
result = store["df"]
tm.assert_frame_equal(result, df)
# as data
# GH11411
_maybe_remove(store, "df")
df = DataFrame(
{
"A": rng,
"B": rng.tz_convert("UTC").tz_localize(None),
"C": rng.tz_convert("CET"),
"D": range(len(rng)),
},
index=rng,
)
store["df"] = df
result = store["df"]
tm.assert_frame_equal(result, df)
def test_timezones_fixed_format_empty(setup_path, tz_aware_fixture, frame_or_series):
# GH 20594
dtype = pd.DatetimeTZDtype(tz=tz_aware_fixture)
obj = Series(dtype=dtype, name="A")
if frame_or_series is DataFrame:
obj = obj.to_frame()
with ensure_clean_store(setup_path) as store:
store["obj"] = obj
result = store["obj"]
tm.assert_equal(result, obj)
def test_timezones_fixed_format_series_nonempty(setup_path, tz_aware_fixture):
# GH 20594
dtype = pd.DatetimeTZDtype(tz=tz_aware_fixture)
with ensure_clean_store(setup_path) as store:
s = Series([0], dtype=dtype)
store["s"] = s
result = store["s"]
tm.assert_series_equal(result, s)
def test_fixed_offset_tz(setup_path):
rng = date_range("1/1/2000 00:00:00-07:00", "1/30/2000 00:00:00-07:00")
frame = DataFrame(np.random.randn(len(rng), 4), index=rng)
with ensure_clean_store(setup_path) as store:
store["frame"] = frame
recons = store["frame"]
tm.assert_index_equal(recons.index, rng)
assert rng.tz == recons.index.tz
@td.skip_if_windows
def test_store_timezone(setup_path):
# GH2852
# issue storing datetime.date with a timezone as it resets when read
# back in a new timezone
# original method
with ensure_clean_store(setup_path) as store:
today = date(2013, 9, 10)
df = DataFrame([1, 2, 3], index=[today, today, today])
store["obj1"] = df
result = store["obj1"]
tm.assert_frame_equal(result, df)
# with tz setting
with ensure_clean_store(setup_path) as store:
with tm.set_timezone("EST5EDT"):
today = date(2013, 9, 10)
df = DataFrame([1, 2, 3], index=[today, today, today])
store["obj1"] = df
with tm.set_timezone("CST6CDT"):
result = store["obj1"]
tm.assert_frame_equal(result, df)
def test_legacy_datetimetz_object(datapath, setup_path):
# legacy from < 0.17.0
# 8260
expected = DataFrame(
{
"A": Timestamp("20130102", tz="US/Eastern"),
"B": Timestamp("20130603", tz="CET"),
},
index=range(5),
)
with ensure_clean_store(
datapath("io", "data", "legacy_hdf", "datetimetz_object.h5"), mode="r"
) as store:
result = store["df"]
tm.assert_frame_equal(result, expected)
def test_dst_transitions(setup_path):
# make sure we are not failing on transitions
with ensure_clean_store(setup_path) as store:
times = date_range(
"2013-10-26 23:00",
"2013-10-27 01:00",
tz="Europe/London",
freq="H",
ambiguous="infer",
)
times = times._with_freq(None) # freq doesn't round-trip
for i in [times, times + pd.Timedelta("10min")]:
_maybe_remove(store, "df")
df = DataFrame({"A": range(len(i)), "B": i}, index=i)
store.append("df", df)
result = store.select("df")
tm.assert_frame_equal(result, df)
def test_read_with_where_tz_aware_index(setup_path):
# GH 11926
periods = 10
dts = date_range("20151201", periods=periods, freq="D", tz="UTC")
mi = pd.MultiIndex.from_arrays([dts, range(periods)], names=["DATE", "NO"])
expected = DataFrame({"MYCOL": 0}, index=mi)
key = "mykey"
with ensure_clean_path(setup_path) as path:
with pd.HDFStore(path) as store:
store.append(key, expected, format="table", append=True)
result = pd.read_hdf(path, key, where="DATE > 20151130")
tm.assert_frame_equal(result, expected)
def test_py2_created_with_datetimez(datapath, setup_path):
# The test HDF5 file was created in Python 2, but could not be read in
# Python 3.
#
# GH26443
index = [Timestamp("2019-01-01T18:00").tz_localize("America/New_York")]
expected = DataFrame({"data": 123}, index=index)
with ensure_clean_store(
datapath("io", "data", "legacy_hdf", "gh26443.h5"), mode="r"
) as store:
result = store["key"]
tm.assert_frame_equal(result, expected)
| bsd-3-clause | -4,588,879,910,586,167,000 | 29.817694 | 85 | 0.582427 | false |
WarrenWeckesser/scikits-image | skimage/morphology/convex_hull.py | 2 | 3703 | __all__ = ['convex_hull_image', 'convex_hull_object']
import numpy as np
from ..measure._pnpoly import grid_points_in_poly
from ._convex_hull import possible_hull
from ..measure._label import label
from ..util import unique_rows
try:
from scipy.spatial import Delaunay
except ImportError:
Delaunay = None
def convex_hull_image(image):
"""Compute the convex hull image of a binary image.
The convex hull is the set of pixels included in the smallest convex
polygon that surround all white pixels in the input image.
Parameters
----------
image : (M, N) array
Binary input image. This array is cast to bool before processing.
Returns
-------
hull : (M, N) array of bool
Binary image with pixels in convex hull set to True.
References
----------
.. [1] http://blogs.mathworks.com/steve/2011/10/04/binary-image-convex-hull-algorithm-notes/
"""
if Delaunay is None:
raise ImportError("Could not import scipy.spatial.Delaunay, "
"only available in scipy >= 0.9.")
# Here we do an optimisation by choosing only pixels that are
# the starting or ending pixel of a row or column. This vastly
# limits the number of coordinates to examine for the virtual hull.
coords = possible_hull(image.astype(np.uint8))
N = len(coords)
# Add a vertex for the middle of each pixel edge
coords_corners = np.empty((N * 4, 2))
for i, (x_offset, y_offset) in enumerate(zip((0, 0, -0.5, 0.5),
(-0.5, 0.5, 0, 0))):
coords_corners[i * N:(i + 1) * N] = coords + [x_offset, y_offset]
# repeated coordinates can *sometimes* cause problems in
# scipy.spatial.Delaunay, so we remove them.
coords = unique_rows(coords_corners)
# Subtract offset
offset = coords.mean(axis=0)
coords -= offset
# Find the convex hull
chull = Delaunay(coords).convex_hull
v = coords[np.unique(chull)]
# Sort vertices clock-wise
v_centred = v - v.mean(axis=0)
angles = np.arctan2(v_centred[:, 0], v_centred[:, 1])
v = v[np.argsort(angles)]
# Add back offset
v += offset
# For each pixel coordinate, check whether that pixel
# lies inside the convex hull
mask = grid_points_in_poly(image.shape[:2], v)
return mask
def convex_hull_object(image, neighbors=8):
"""Compute the convex hull image of individual objects in a binary image.
The convex hull is the set of pixels included in the smallest convex
polygon that surround all white pixels in the input image.
Parameters
----------
image : ndarray
Binary input image.
neighbors : {4, 8}, int
Whether to use 4- or 8-connectivity.
Returns
-------
hull : ndarray of bool
Binary image with pixels in convex hull set to True.
Notes
-----
This function uses skimage.morphology.label to define unique objects,
finds the convex hull of each using convex_hull_image, and combines
these regions with logical OR. Be aware the convex hulls of unconnected
objects may overlap in the result. If this is suspected, consider using
convex_hull_image separately on each object.
"""
if neighbors != 4 and neighbors != 8:
raise ValueError('Neighbors must be either 4 or 8.')
labeled_im = label(image, neighbors, background=0)
convex_obj = np.zeros(image.shape, dtype=bool)
convex_img = np.zeros(image.shape, dtype=bool)
for i in range(0, labeled_im.max() + 1):
convex_obj = convex_hull_image(labeled_im == i)
convex_img = np.logical_or(convex_img, convex_obj)
return convex_img
| bsd-3-clause | 7,252,472,451,150,757,000 | 30.117647 | 96 | 0.644883 | false |
evancasey/startup-finder | lib/github_dump.py | 1 | 1658 | import urllib
import urllib2
import json
import pdb
import sys
import time
import csv
import tokens
from models import *
class GithubListener:
def get_all_repos(self,org):
url = "https://api.github.com/orgs/" + org + "/repos?client_id=" + tokens.GITHUB_ID + "&client_secret=" + tokens.GITHUB_SECRET
try:
resource = urllib2.urlopen(url)
pages = json.loads(resource.read())
return pages
except:
print("path not found")
pass
def get_all_orgs_csv(self):
orgs = []
f = open('all_orgs.txt', 'rt')
reader = csv.reader(f)
for row in reader:
orgs += row
return orgs
if __name__ == "__main__":
gl = GithubListener()
orgs = gl.get_all_orgs_csv()
counter = 0
for org in orgs[100:]:
repos = gl.get_all_repos(org)
if repos:
for repo in repos:
print(json.dumps(repo,indent=2))
counter +=1
try:
github_data = Github(id = str(counter),
organization = org,
repos = json.dumps(repos))
Session.add(github_data)
print "Committing.."
Session.commit()
except Exception, e:
print >> sys.stderr, 'Encountered Exception: ', e
pass
| mit | -1,315,194,730,258,532,000 | 20.269231 | 142 | 0.431242 | false |
reed-college/lemur | lemur/utility_modify.py | 1 | 23527 | # Libraries
# Local
from lemur import models as m
from lemur import (app, db)
from lemur.utility_generate_and_convert import (check_existence,
generate_lab_id,
generate_experiment_id,
generate_observation_id,
generate_class_id,
generate_user_name,
decompose_lab_id,
tranlate_term_code_to_semester,
cleanup_class_data)
from lemur.utility_find_and_get import (lab_exists,
experiment_exists,
class_exists,
observation_exists,
user_exists,
get_lab,
get_observation,
get_user,
get_class,
get_role,
get_all_class,
get_all_user,
get_experiments_for_lab,
get_observations_for_experiment,
find_lab_copy_id)
ds = db.session
# --- Manage labs ---
# Delete a lab's basic info, experiments info and observations info
def delete_lab(lab_id):
ds.delete(get_lab(lab_id))
experiments_query = get_experiments_for_lab(lab_id)
for e in experiments_query:
ds.delete(e)
ds.commit()
# Modify a lab
def modify_lab(lab_json):
the_class = None
class_users = []
experiments_for_lab = []
lab_status = 'Unactivated'
lab_id = None
err_msg = check_existence(lab_json, 'labName', 'classId', 'labDescription',
'experiments', 'oldLabId')
if lab_exists(lab_json['oldLabId']):
lab_status = get_lab(lab_json['oldLabId']).status
delete_lab(lab_json['oldLabId'])
if not class_exists(lab_json['classId']):
err_msg += 'class id: {0} doesn\' exist in the database'.format(lab_json['classId'])
if err_msg != '':
return err_msg
the_class = get_class(lab_json['classId'])
# Build connection between the current lab and the existing users/class
if the_class is not None:
class_users = the_class.users
lab_id = generate_lab_id(lab_json['labName'], lab_json['classId'])
if lab_exists(lab_id):
return 'lab id:{0} already exists'.format(lab_id)
for e in lab_json['experiments']:
err_msg = check_existence(e, 'name', 'description', 'order',
'valueType', 'valueRange',
'valueCandidates')
if err_msg != '':
return err_msg
for e in lab_json['experiments']:
experiment_name = e['name']
# Check if the experiment name already repetes among all the
# experiments to be added into the current lab
for i in range(len(lab_json['experiments'])):
if [exp['name'] for exp in lab_json['experiments']].count(experiment_name) > 1:
lab_json['experiments'] = (lab_json['experiments'][0:i] +
lab_json['experiments'][i+1:len(lab_json['experiments'])])
warning_msg = 'repeted experiment name:{} in this lab'.format(experiment_name)
app.logger.warning(warning_msg)
continue
experiment_id = generate_experiment_id(lab_id, experiment_name)
if experiment_exists(experiment_id):
warning_msg = 'The same experiment name has already exist in the same lab'
app.logger.warning(warning_msg)
continue
else:
experiments_for_lab.append(m.Experiment(lab_id=lab_id,
id=experiment_id,
name=experiment_name,
description=e['description'],
order=e['order'],
value_type=e['valueType'],
value_range=e['valueRange'],
value_candidates=e['valueCandidates']))
the_lab = m.Lab(id=lab_id, name=lab_json['labName'],
description=lab_json['labDescription'],
status=lab_status,
the_class=the_class,
experiments=experiments_for_lab,
users=class_users)
ds.add(the_lab)
ds.commit()
return ''
# copy a old lab and rename the new lab with 'copy'+index+'-'+old_lab_name
def duplicate_lab(old_lab_id):
# Find a new lab id according to the old lab id
new_lab_id = find_lab_copy_id(old_lab_id)
# Copy info from old lab and add to new lab
old_lab = get_lab(old_lab_id)
# A lab can only belong to one class at this point
old_class = get_class(old_lab.the_class.id)
new_lab = m.Lab(id=new_lab_id,
name=decompose_lab_id(new_lab_id)['lab_name'],
description=old_lab.description, status=old_lab.status,
the_class=old_class, users=old_class.users)
new_experiments = []
for e in old_lab.experiments:
experiment_name = e.name
new_experiment_id = generate_experiment_id(new_lab_id,
experiment_name)
new_experiment = m.Experiment(lab_id=new_lab_id,
id=new_experiment_id,
name=experiment_name,
description=e.description,
order=e.order,
value_type=e.value_type,
value_range=e.value_range,
value_candidates=e.value_candidates)
new_experiments.append(new_experiment)
new_lab.experiments = new_experiments
ds.add(new_lab)
ds.commit()
# Change a lab's status
def change_lab_status(lab_id, new_status):
lab_query = get_lab(lab_id)
lab_query.status = new_status
# Automatically delete all the data in the lab if it's made unavailable
if new_status == "Unactivated":
experiments_query = get_experiments_for_lab(lab_query.id)
for e in experiments_query:
for d in get_observations_for_experiment(e.id):
ds.delete(d)
ds.commit()
# --- Manage observations ---
# delete observation from a list of observation ids sent from client
def delete_observation(old_observation_ids_list):
err_msg = ''
# delete all the old data by old observation_id
for observation_id in old_observation_ids_list:
observations_query = get_observation(observation_id)
# Check the existence of the observation to be deleted
if observation_exists(observation_id):
ds.delete(observations_query)
# err_msg += ('To be deleted observation:' +
# '{} doesn\'t exits in db\n'.format(observations_query))
ds.commit()
return err_msg
# add observation from a list JSON format observations sent from client
# This function is invoked when admin edits data of a lab
def add_observation(new_observations_list):
warning_msg = ''
for d in new_observations_list:
err_msg = check_existence(d, 'studentName', 'observationData',
'experimentId', 'observationId')
if err_msg != '':
return err_msg
for d in new_observations_list:
# Check if the observation name already repetes among all the
# observations to be added into the database and rename it if necessary
index = 1
tmp_student_name = d['studentName']
tmp_observation_id = d['observationId']
while observation_exists(tmp_observation_id):
tmp_student_name = d['studentName'] + '('+str(index)+')'
tmp_observation_id = generate_observation_id(d['experimentId'], tmp_student_name)
index += 1
# warning_msg = ('repeated observation id:{} in this lab so the ' +
# 'current, modified entry will be renamed to ' +
# '{}'.format(d['observationId'], tmp_observation_id))
# Capitalize every input
ds.add(m.Observation(experiment_id=d['experimentId'],
id=tmp_observation_id,
student_name=tmp_student_name,
datum=d['observationData'].upper()))
ds.commit()
return warning_msg
# add observations sent by students into the database
# This function is invoked when a student send a group of data
def add_observations_sent_by_students(observations_group_by_student):
# the data type of observations should be a list
if not(isinstance(observations_group_by_student, list)):
err_msg = 'The value of the key observations should be a list'
return err_msg
# check that all list elements have the right format
for student in observations_group_by_student:
err_msg = check_existence(student, 'studentName',
'observationsForOneExperiment')
if err_msg != '':
return err_msg
for ob in student['observationsForOneExperiment']:
err_msg = check_existence(ob, 'labId', 'experimentName',
'observation')
if err_msg != '':
return err_msg
# If everything is correct add the data to the database
experiment_id = generate_experiment_id(ob['labId'], ob['experimentName'])
# To avoid repetition in student name field since it's used as part
# of key for an input we add an unique index at the end of
# each student name
tmp_student_name = student['studentName']+'(1)'
observation_id = generate_observation_id(experiment_id,
tmp_student_name)
index = 2
while observation_exists(observation_id):
tmp_student_name = student['studentName'] + '('+str(index)+')'
observation_id = generate_observation_id(experiment_id,
tmp_student_name)
index += 1
# Capitalize every input
if not observation_exists(observation_id):
ds.add(m.Observation(experiment_id=experiment_id,
id=observation_id,
student_name=tmp_student_name,
datum=ob['observation'].upper()))
ds.commit()
return ''
# --- Manage admins ---
# add an admin into the database according to admin_info
def add_user(user_info):
# Get role object from table
user_role = get_role(user_info['role'])
# id of the Admin must be unique before user can be created
err_msg = check_existence(user_info, 'username', 'role')
if err_msg != '':
return err_msg
classes = []
labs = []
if user_info['role'] == 'Student':
for class_id in user_info.getlist('classIds'):
if class_exists(class_id):
the_class = get_class(class_id)
classes.append(the_class)
for lab in the_class.labs:
labs.append(lab)
else:
return 'the class with id:{} doesn\'t exist.'.format(class_id)
if not user_exists(user_info['username']):
name = None
if 'name' in user_info:
name = user_info['name']
new_user = m.User(id=user_info['username'],
name=name,
role=user_role,
classes=classes,
labs=labs)
ds.add(new_user)
ds.commit()
else:
err_msg = 'The username:{} already exists'.format(user_info['username'])
return err_msg
# change the user's info(including role and classes)
def change_user_info(username, role, class_ids):
user = get_user(username)
classes = []
labs = []
if class_ids:
for c in class_ids:
the_class = get_class(c)
classes.append(the_class)
for lab in the_class.labs:
labs.append(lab)
user.role = get_role(role)
user.classes = classes
user.labs = labs
ds.commit()
# delete an admin from the database
def delete_user(username):
user_to_be_removed = get_user(username)
ds.delete(user_to_be_removed)
ds.commit()
# add a class into the database according to class_info
def add_class(class_info):
# Check the correctness of data format
# Note: students is optional i.e. it can be undefined
err_msg = check_existence(class_info, 'className', 'classTime')
if err_msg != '':
return err_msg
users = []
usernames = []
# create new class with data sent by client to be added to database
new_class_id = generate_class_id(
class_info['className'], class_info['classTime'])
if not class_exists(new_class_id):
if 'professors' in class_info:
for p in class_info.getlist('professors'):
if not user_exists(p):
err_msg = 'The professor with id:{} doesn\'t exist.'.format(p)
return err_msg
else:
usernames.append(p)
if 'students' in class_info:
for s in class_info.getlist('students'):
if not user_exists(s):
err_msg = 'The student with id:{} doesn\'t exist.'.format(p)
return err_msg
elif get_user(s).role_name != 'Student':
err_msg = s+(' already exists and is not a student.'
'You should not put their name into student name')
return err_msg
else:
usernames.append(s)
for username in usernames:
users.append(get_user(username))
new_class = m.Class(id=new_class_id,
name=class_info['className'],
time=class_info['classTime'],
users=users)
ds.add(new_class)
ds.commit()
else:
err_msg = "The class id already exists: {}".format(get_class(new_class_id))
return err_msg
# ---Manage classes---
# remove a class from the database according to class_id
def delete_class(class_id):
class_to_be_removed = get_class(class_id)
# discard users not enrolled in any other class with labs
# discard labs associated with the class to be deleted
for s in class_to_be_removed.users:
if s.role_name == 'Student' and len(s.classes) == 1:
ds.delete(s)
for l in class_to_be_removed.labs:
if lab_exists(l.id):
ds.delete(get_lab(l.id))
ds.delete(class_to_be_removed)
ds.commit()
# Change the users(both professors and students) in a class
def change_class_users(class_id, new_users):
if not class_exists(class_id):
return 'Class with id: {} doesn\'t exist'.format(class_id)
the_class = get_class(class_id)
old_users = the_class.users
# Add new users to the class;
# add the associated labs to these users lab list
for u in new_users:
if not user_exists(str(u)):
ds.rollback()
return 'User with username: {} doesn\'t exist'.format(u)
else:
user = get_user(u)
if not (u in the_class.users):
the_class.users.append(user)
user.labs = the_class.labs
# Delete the class and the associated labs from old users who
# are not in the class anymore
for u in old_users:
if not(u.id in str(new_users)):
u.classes = [c for c in u.classes if c.id != class_id]
new_lab_list = []
for lab in u.labs:
if lab.the_class.id != class_id:
new_lab_list.append(lab)
u.labs = new_lab_list
ds.commit()
return ''
# --- Initialize Classes and Users by getting data from Iris ---
# Populate the database with classes and their corresponding professors
# Note: This needs to be invoked before update_users_by_data_from_iris
# The existing professors will not be deleted even if they don't teach
# any class
def populate_db_with_classes_and_professors(class_data):
class_data = cleanup_class_data(class_data)
for c in class_data:
class_name = c['subject'] + c['course_number']
class_time = tranlate_term_code_to_semester(c['term_code'])
class_professor_info_list = c['instructors']
class_professor_ids = [p['username'] for p in class_professor_info_list]
class_professors = []
for p in class_professor_info_list:
if not user_exists(p['username']):
name = generate_user_name(p['first_name'], p['last_name'])
ds.add(m.User(id=p['username'], name=name, role=get_role('Admin')))
ds.commit()
the_user = get_user(p['username'])
class_professors.append(the_user)
if class_name and class_time:
class_id = generate_class_id(class_name, class_time)
# If the class already exists, update the professors and keep
# the students
if class_exists(class_id):
the_class = get_class(class_id)
# handle the change of class and the labs associated with it
old_class_professors = [u for u in the_class.users if ((u.role_name == 'Admin') or (u.role_name == 'SuperAdmin'))]
for p in class_professors:
# Add the class to the professor's class list if it is not
# the list now.
if not (class_id in [c.id for c in p.classes]):
p.classes.append(the_class)
for lab in the_class.labs:
if not (lab in p.labs):
p.labs.append(lab)
ds.commit()
# Remove the class from the old professor's class list
# if the professor is no longer in the class's user list.
for p in old_class_professors:
if not (p.id in class_professor_ids):
p.classes = [c for c in p.classes if c.id != class_id]
p.labs = [lab for lab in p.labs if lab.class_id != class_id]
# otherwise create a class with the professors
else:
ds.add(m.Class(id=class_id, name=class_name, time=class_time,
users=class_professors))
else:
return 'class_time is not valid:{}'.format(class_time)
ds.commit()
return ''
# Update the users in the classes according to registration info
def update_students_by_data_from_iris(class_id_list, registration_data):
all_classes = get_all_class()
selected_classes = [c for c in all_classes if c.id in class_id_list]
registration_by_class = {}
warning_msg = ''
# A registration_object looks like
# {"user_name":"fake1","course_id":"10256","term_code":"201501",
# "subject":"BIOL","course_number":"101","section":"FTN",
# "first_name":"Fake", "last_name":"One"}
# Add the students in the received data into the database
for registration_object in registration_data:
username = registration_object['user_name']
invalid_list = [None, 'undefined', 'null', '']
# Since username is our key for User object, it cannot be empty
# If that happens, we skip the current user
if username in invalid_list:
continue
name = generate_user_name(registration_object['first_name'],
registration_object['last_name'])
class_id = generate_class_id((registration_object['subject'] +
registration_object['course_number']),
tranlate_term_code_to_semester(registration_object['term_code']))
# only students who registered courses in the list will be updated
if class_id not in class_id_list:
continue
# If the class exists in the database, update
if class_exists(class_id):
the_class = get_class(class_id)
# If user already exists, add the class into the class list of the
# user;
# otherwise, create a user with the class
if user_exists(username):
the_user = get_user(username)
if not (class_id in [c.id for c in the_user.classes]):
the_user.classes.append(the_class)
for lab in the_class.labs:
if not (lab in the_user.labs):
the_user.labs.append(lab)
else:
the_user = m.User(id=username, name=name, classes=[the_class],
role=get_role('Student'), labs=the_class.labs)
ds.add(the_user)
# else return a warning message to notify the user
else:
warning_msg += ('class_id: ' + class_id +
' doesn\'t exist in database\n')
# for efficiency: otherwise we have to loop through
# registration_data many times
if class_id in registration_by_class:
registration_by_class[class_id].append(username)
else:
registration_by_class[class_id] = []
# Check the students of the classes in the database and update them
# according to the received data
for c in selected_classes:
# If the class exists in the received data, compare
# the users of the class in database and data
if c.id in registration_by_class:
# Keep the admins/superadmins of the class
class_new_users = [u for u in c.users if ((u.role_name == 'Admin') or (u.role_name == 'SuperAdmin'))]
# Replace the students of the class with the students in the
# received data
for student_id in registration_by_class[c.id]:
class_new_users.append(get_user(student_id))
c.users = class_new_users
else:
warning_msg += ('class_id: ' + class_id +
' doesn\'t exist in received data\n')
ds.commit()
return warning_msg
# Delete all students in the database
# The current function will not report any warning messages
def delete_all_students():
for u in get_all_user():
if u.role_name == "Student":
ds.delete(u)
ds.commit()
return ''
| mit | -2,824,706,532,023,522,000 | 42.649351 | 130 | 0.538403 | false |
Egor-Krivov/pdp | tests/test_base.py | 1 | 3421 | import unittest
import time
from contextlib import suppress
from queue import Queue as ThreadQueue
from threading import Thread
from threading import Event as ThreadEvent
import numpy as np
from pdp.base import InterruptableQueue, StopEvent, start_one2one_transformer
DEFAULT_LOOP_TIMEOUT = 0.02
def set_event_after_timeout(event, timeout):
def target():
time.sleep(timeout)
event.set()
Thread(target=target).start()
class TestInterruptableQueue(unittest.TestCase):
def setUp(self):
self.maxsize = 10
self.loop_timeout = DEFAULT_LOOP_TIMEOUT
self.wait_timeout = 7.5 * self.loop_timeout
self.receive_timeout = 0.5 * self.loop_timeout
self.stop_event = ThreadEvent()
self.q = InterruptableQueue(ThreadQueue(self.maxsize), self.loop_timeout, self.stop_event)
def test_get(self):
def target():
with suppress(StopEvent):
self.q.get()
thread = Thread(target=target)
thread.start()
self.assertTrue(thread.is_alive())
set_event_after_timeout(event=self.stop_event, timeout=self.wait_timeout + self.receive_timeout)
self.assertTrue(thread.is_alive())
time.sleep(self.wait_timeout)
self.assertTrue(thread.is_alive())
time.sleep(self.receive_timeout * 2)
self.assertFalse(thread.is_alive())
def test_put(self):
for i in range(self.maxsize):
self.q.put(i)
def target():
with suppress(StopEvent):
self.q.put(-1)
thread = Thread(target=target)
thread.start()
self.assertTrue(thread.is_alive())
set_event_after_timeout(event=self.stop_event, timeout=self.wait_timeout + self.receive_timeout)
self.assertTrue(thread.is_alive())
time.sleep(self.wait_timeout)
self.assertTrue(thread.is_alive())
time.sleep(self.receive_timeout * 2)
self.assertFalse(thread.is_alive())
class testOne2One(unittest.TestCase):
def setUp(self):
self.buffer_size = 20
self.loop_timeout = DEFAULT_LOOP_TIMEOUT
self.stop_event = ThreadEvent()
self.q_in = InterruptableQueue(ThreadQueue(self.buffer_size), self.loop_timeout, self.stop_event)
self.q_out = InterruptableQueue(ThreadQueue(self.buffer_size), self.loop_timeout, self.stop_event)
def tearDown(self):
self.q_in.join()
self.q_out.join()
def data_pass(self, n_workers):
data_in = np.random.randn(self.buffer_size * 10)
def f(x):
return x ** 2
data_out_true = f(data_in)
start_one2one_transformer(f, q_in=self.q_in, q_out=self.q_out, stop_event=self.stop_event, n_workers=n_workers)
i = 0
data_out = []
for d in data_in:
self.q_in.put(d)
i += 1
if i == self.buffer_size:
for j in range(self.buffer_size):
data_out.append(self.q_out.get())
self.q_out.task_done()
i = 0
if n_workers > 1:
data_out_true = sorted(data_out_true)
data_out = sorted(data_out)
np.testing.assert_equal(data_out, data_out_true)
def test_data_pass(self):
for n_workers in (1, 4, 10):
with self.subTest(f'n_workers={n_workers}'):
self.data_pass(n_workers=n_workers)
| mit | -184,339,067,233,399,360 | 29.81982 | 119 | 0.609763 | false |
modoboa/modoboa-stats | modoboa_stats/forms.py | 1 | 1691 | """Modoboa stats forms."""
import rrdtool
from pkg_resources import parse_version
from django.conf import settings
from django.utils.translation import ugettext_lazy
from django import forms
from modoboa.lib import form_utils
from modoboa.parameters import forms as param_forms
class ParametersForm(param_forms.AdminParametersForm):
"""Stats global parameters."""
app = "modoboa_stats"
general_sep = form_utils.SeparatorField(label=ugettext_lazy("General"))
logfile = forms.CharField(
label=ugettext_lazy("Path to the log file"),
initial="/var/log/mail.log",
help_text=ugettext_lazy("Path to log file used to collect statistics"),
widget=forms.TextInput(attrs={"class": "form-control"})
)
rrd_rootdir = forms.CharField(
label=ugettext_lazy("Directory to store RRD files"),
initial="/tmp/modoboa",
help_text=ugettext_lazy(
"Path to directory where RRD files are stored"),
widget=forms.TextInput(attrs={"class": "form-control"})
)
greylist = form_utils.YesNoField(
label=ugettext_lazy("Show greylisted messages"),
initial=False,
help_text=ugettext_lazy(
"Differentiate between hard and soft rejects (greylisting)")
)
def __init__(self, *args, **kwargs):
"""Check RRDtool version."""
super(ParametersForm, self).__init__(*args, **kwargs)
rrd_version = parse_version(rrdtool.lib_version())
required_version = parse_version("1.6.0")
test_mode = getattr(settings, "RRDTOOL_TEST_MODE", False)
if rrd_version < required_version and not test_mode:
del self.fields["greylist"]
| mit | 6,087,176,765,779,094,000 | 32.156863 | 79 | 0.66233 | false |
danthedeckie/streetsign | tests/test_formgets.py | 1 | 7692 | '''
tests/test_formgets.py
Tests for the various getstr/getint/getbool helper functions.
Part of streetsign.
'''
import sys
import os
import unittest
sys.path.append(os.path.dirname(__file__) + '/..')
from streetsign_server.views.utils import getstr, getint, getbool, \
DATESTR, STRIPSTR
# pylint: disable=too-many-public-methods, too-few-public-methods
# pylint: disable=missing-docstring, invalid-name
class FakeResp(object):
' Mock the Response global object, for testing form stuff. '
def __init__(self):
self.form = {}
class FakeRespCase(unittest.TestCase):
'''
Testcase which hides the real request global, and replaces it with
a fake one
'''
def setUp(self):
self.resp = FakeResp()
self.realresp = getstr.__globals__['request']
getstr.__globals__['request'] = self.resp
getint.__globals__['request'] = self.resp
getbool.__globals__['request'] = self.resp
def tearDown(self, *vargs): # pylint: disable=unused-argument
del getstr.__globals__['request']
getstr.__globals__['request'] = self.realresp
getint.__globals__['request'] = self.realresp
getbool.__globals__['request'] = self.realresp
class TestGetStr(FakeRespCase):
' tests for the getstr helper method '
def test_not_there(self):
self.assertFalse('GETME' in self.resp.form)
self.assertEqual(getstr('GETME', 'default_value'), 'default_value')
def test_there(self):
self.resp.form['GETME'] = 'set thing'
self.assertEqual(getstr('GETME', 'default_value'), 'set thing')
def test_empty(self):
self.resp.form['GETME'] = ''
self.assertEqual(getstr('GETME', 'default'), '')
def test_validates(self):
self.resp.form['GETME'] = 'blah'
self.assertEqual(getstr('GETME', 'none', validate='.*@.*'), 'none')
def test_validates_date(self):
self.resp.form['GETME'] = 'blah'
dateformat = r'(\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d)'
date = '2016-10-12 16:02:19'
self.assertEqual(getstr('GETME', 'none', validate=dateformat), 'none')
self.resp.form['GETME'] = date
self.assertEqual(getstr('GETME', 'none', validate=dateformat), date)
def test_validate_fails(self):
fallback = 42
self.resp.form['GETME'] = 'Not a valid date.'
self.assertEqual(getstr('GETME', fallback, validate=DATESTR), fallback)
def test_validates_DATESTR_valid(self):
date = '2016-10-12 16:02:19'
self.resp.form['GETME'] = date
self.assertEqual(getstr('GETME', 'none', validate=DATESTR), date)
def test_validates_DATESTR_strip(self):
date = '2016-10-12 16:02:19'
self.resp.form['GETME'] = ' ' + date + '.00 stuff'
self.assertEqual(getstr('GETME', 'none', validate=DATESTR), date)
def test_validates_STRIPSTR_all(self):
text = 'this is some text'
self.resp.form['GETME'] = text
self.assertEqual(getstr('GETME', 'none', validate=STRIPSTR), text)
def test_validates_STRIPSTR_lstrip(self):
text = 'this is some text'
self.resp.form['GETME'] = ' ' + text
self.assertEqual(getstr('GETME', 'none', validate=STRIPSTR), text)
def test_validates_STRIPSTR_rstrip(self):
text = 'this is some text'
self.resp.form['GETME'] = ' ' + text + '\t'
self.assertEqual(getstr('GETME', 'none', validate=STRIPSTR), text)
def test_validates_STRIPSTR_stripboth(self):
text = 'this is some text'
self.resp.form['GETME'] = ' ' + text + '\t '
self.assertEqual(getstr('GETME', 'none', validate=STRIPSTR), text)
def test_validates_STRIPSTR_number(self):
text = '2019'
self.resp.form['GETME'] = ' ' + text + '\t '
self.assertEqual(getstr('GETME', 'none', validate=STRIPSTR), text)
def test_empty_string(self):
self.resp.form['GETME'] = ''
self.assertEqual(getstr('GETME', 'none'), '')
class TestGetInt(FakeRespCase):
' tests for the getint helper '
def test_not_there(self):
self.assertFalse('GETME' in self.resp.form)
self.assertEqual(getint('GETME', 42), 42)
def test_there(self):
self.resp.form['GETME'] = 999
self.assertEqual(getint('GETME', 42), 999)
def test_empty(self):
self.resp.form['GETME'] = ''
self.assertEqual(getint('GETME', 42), 42)
def test_validate_min(self):
# input is big enough
self.resp.form['GETME'] = 120
self.assertEqual(getint('GETME', 42, minimum=99), 120)
# end up on default
del self.resp.form['GETME']
self.assertEqual(getint('GETME', 100, minimum=99), 100)
# fallback to minimum
self.resp.form['GETME'] = 80
self.assertEqual(getint('GETME', 42, minimum=99), 99)
def test_validate_max(self):
# input is small enough
self.resp.form['GETME'] = 80
self.assertEqual(getint('GETME', 42, maximum=99), 80)
# end up on default
del self.resp.form['GETME']
self.assertEqual(getint('GETME', 100, maximum=200), 100)
# fallback to maximum
self.resp.form['GETME'] = 80
self.assertEqual(getint('GETME', 42, maximum=25), 25)
def test_validate_minmax(self):
# input is small enough
self.resp.form['GETME'] = 80
self.assertEqual(getint('GETME', 42, minimum=20, maximum=99), 80)
# end up on default
del self.resp.form['GETME']
self.assertEqual(getint('GETME', 75, minimum=20, maximum=99), 75)
# fallback to maximum
self.resp.form['GETME'] = 9000
self.assertEqual(getint('GETME', 42, minimum=20, maximum=99), 99)
# fallback to minimum
self.resp.form['GETME'] = 9
self.assertEqual(getint('GETME', 42, minimum=20, maximum=99), 20)
class TestGetBool(FakeRespCase):
' tests for the getbool helper function '
def test_getbool_not_there(self):
self.assertFalse('GETME' in self.resp.form)
self.assertFalse(getbool('GETME', False))
self.assertTrue(getbool('GETME', True))
def test_getbool_True(self):
self.resp.form['GETME'] = True
self.assertTrue(getbool('GETME', True))
def test_getbool_TrueStr(self):
self.resp.form['GETME'] = 'True'
self.assertTrue(getbool('GETME', True))
def test_getbool_trueStr(self):
self.resp.form['GETME'] = 'true'
self.assertTrue(getbool('GETME', True))
def test_getbool_TRUEStr(self):
self.resp.form['GETME'] = 'TRUE'
self.assertTrue(getbool('GETME', True))
def test_getbool_1(self):
self.resp.form['GETME'] = 1
self.assertTrue(getbool('GETME', True))
def test_getbool_1Str(self):
self.resp.form['GETME'] = '1'
self.assertTrue(getbool('GETME', True))
def test_getbool_yesStr(self):
self.resp.form['GETME'] = 'yes'
self.assertTrue(getbool('GETME', True))
def test_getbool_YesStr(self):
self.resp.form['GETME'] = 'Yes'
self.assertTrue(getbool('GETME', True))
def test_getbool_YESStr(self):
self.resp.form['GETME'] = 'YES'
self.assertTrue(getbool('GETME', True))
def test_getbool_checkedStr(self):
self.resp.form['GETME'] = 'checked'
self.assertTrue(getbool('GETME', True))
def test_getbool_CheckedStr(self):
self.resp.form['GETME'] = 'Checked'
self.assertTrue(getbool('GETME', True))
def test_getbool_CHECKEDStr(self):
self.resp.form['GETME'] = 'CHECKED'
self.assertTrue(getbool('GETME', True))
| gpl-3.0 | 531,835,391,887,350,800 | 32.443478 | 79 | 0.606344 | false |
bigzhao/flask-projects-manage | app/auth/views.py | 1 | 2830 | # -*- coding: utf-8 -*-
from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, current_user
from . import auth
from ..models import User
from .forms import RegisterForm, EditForm, ChangePasswdForm
from .. import db
@auth.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for("main.index"))
if request.method == 'POST':
user = User.query.filter_by(id=request.form.get('uid')).first()
if user is not None and user.verify_password(request.form.get('password')):
login_user(user, request.form.get('remember_me'))
return redirect(request.args.get('next') or url_for('main.index'))
flash(u'错误的用户名或密码.')
return render_template('auth/login.html')
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['POST', 'GET'])
def register():
if current_user.is_authenticated:
return redirect(url_for("main.index"))
form = RegisterForm()
if form.validate_on_submit():
user = User(id=form.uid.data,
name=form.username.data.strip(),
password=form.password.data)
db.session.add(user)
db.session.commit()
flash(u'注册成功!')
return redirect(url_for(".login"))
return render_template('auth/register.html', form=form)
@auth.route('/edit_profile', methods=['POST', 'GET'])
@login_required
def edit_profile():
form = EditForm()
if form.validate_on_submit():
user = current_user._get_current_object()
user.name = form.username.data
db.session.add(user)
db.session.commit()
flash(u'用户名修改成功')
return redirect(url_for('main.index'))
form.uid.data = current_user.id
form.username.data = current_user.name
return render_template('auth/edit_profile.html', form=form)
@auth.route('/changepasswd', methods=['POST', 'GET'])
@login_required
def change_passwd():
form = ChangePasswdForm()
if form.validate_on_submit():
if current_user.verify_password(form.old_password.data):
user = current_user._get_current_object()
user.password = form.password.data
db.session.add(user)
db.session.commit()
return redirect(url_for('auth.login'))
else:
flash(u'密码错误')
return render_template('auth/change_passwd.html', form=form)
def allowed_file(filename):
'''
判断文件格式
'''
return '.' in filename and \
filename.rsplit('.', 1)[1] in set(['png', 'jpg', 'jpeg', 'gif'])
| mit | 6,027,085,668,962,736,000 | 29.43956 | 83 | 0.625632 | false |
Tamriel/wagtail_room_booking | account/models.py | 1 | 13705 | from __future__ import unicode_literals
import datetime
import operator
try:
from urllib.parse import urlencode
except ImportError: # python 2
from urllib import urlencode
from django.core.urlresolvers import reverse
from django.db import models, transaction
from django.db.models import Q
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils import timezone, translation, six
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import AnonymousUser
from django.contrib.sites.models import Site
import pytz
from account import signals
from account.conf import settings
from account.fields import TimeZoneField
from account.hooks import hookset
from account.managers import EmailAddressManager, EmailConfirmationManager
from account.signals import signup_code_sent, signup_code_used
@python_2_unicode_compatible
class Account(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, related_name="account", verbose_name=_("user"))
street = models.CharField(
_("street"),
max_length=100,
)
phone = models.CharField(
_("phone"),
max_length=100,
)
plz_city = models.CharField(
_("plz_city"),
max_length=100,
)
@classmethod
def for_request(cls, request):
user = getattr(request, "user", None)
if user and user.is_authenticated():
try:
return Account._default_manager.get(user=user)
except Account.DoesNotExist:
pass
return AnonymousAccount(request)
@classmethod
def create(cls, request=None, **kwargs):
create_email = kwargs.pop("create_email", True)
confirm_email = kwargs.pop("confirm_email", None)
account = cls(**kwargs)
if "language" not in kwargs:
if request is None:
account.language = settings.LANGUAGE_CODE
else:
account.language = translation.get_language_from_request(request, check_path=True)
account.save()
if create_email and account.user.email:
kwargs = {"primary": True}
if confirm_email is not None:
kwargs["confirm"] = confirm_email
EmailAddress.objects.add_email(account.user, account.user.email, **kwargs)
return account
def __str__(self):
return str(self.user)
def now(self):
"""
Returns a timezone aware datetime localized to the account's timezone.
"""
now = datetime.datetime.utcnow().replace(tzinfo=pytz.timezone("UTC"))
timezone = settings.TIME_ZONE if not self.timezone else self.timezone
return now.astimezone(pytz.timezone(timezone))
def localtime(self, value):
"""
Given a datetime object as value convert it to the timezone of
the account.
"""
timezone = settings.TIME_ZONE if not self.timezone else self.timezone
if value.tzinfo is None:
value = pytz.timezone(settings.TIME_ZONE).localize(value)
return value.astimezone(pytz.timezone(timezone))
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def user_post_save(sender, **kwargs):
"""
After User.save is called we check to see if it was a created user. If so,
we check if the User object wants account creation. If all passes we
create an Account object.
We only run on user creation to avoid having to check for existence on
each call to User.save.
"""
user, created = kwargs["instance"], kwargs["created"]
disabled = getattr(user, "_disable_account_creation", not settings.ACCOUNT_CREATE_ON_SAVE)
if created and not disabled:
Account.create(user=user)
@python_2_unicode_compatible
class AnonymousAccount(object):
def __init__(self, request=None):
self.user = AnonymousUser()
self.timezone = settings.TIME_ZONE
if request is None:
self.language = settings.LANGUAGE_CODE
else:
self.language = translation.get_language_from_request(request, check_path=True)
def __str__(self):
return "AnonymousAccount"
@python_2_unicode_compatible
class SignupCode(models.Model):
class AlreadyExists(Exception):
pass
class InvalidCode(Exception):
pass
code = models.CharField(_("code"), max_length=64, unique=True)
max_uses = models.PositiveIntegerField(_("max uses"), default=0)
expiry = models.DateTimeField(_("expiry"), null=True, blank=True)
inviter = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True)
email = models.EmailField(max_length=254, blank=True)
notes = models.TextField(_("notes"), blank=True)
sent = models.DateTimeField(_("sent"), null=True, blank=True)
created = models.DateTimeField(_("created"), default=timezone.now, editable=False)
use_count = models.PositiveIntegerField(_("use count"), editable=False, default=0)
class Meta:
verbose_name = _("signup code")
verbose_name_plural = _("signup codes")
def __str__(self):
if self.email:
return "{0} [{1}]".format(self.email, self.code)
else:
return self.code
@classmethod
def exists(cls, code=None, email=None):
checks = []
if code:
checks.append(Q(code=code))
if email:
checks.append(Q(email=code))
if not checks:
return False
return cls._default_manager.filter(six.moves.reduce(operator.or_, checks)).exists()
@classmethod
def create(cls, **kwargs):
email, code = kwargs.get("email"), kwargs.get("code")
if kwargs.get("check_exists", True) and cls.exists(code=code, email=email):
raise cls.AlreadyExists()
expiry = timezone.now() + datetime.timedelta(hours=kwargs.get("expiry", 24))
if not code:
code = hookset.generate_signup_code_token(email)
params = {
"code": code,
"max_uses": kwargs.get("max_uses", 0),
"expiry": expiry,
"inviter": kwargs.get("inviter"),
"notes": kwargs.get("notes", "")
}
if email:
params["email"] = email
return cls(**params)
@classmethod
def check_code(cls, code):
try:
signup_code = cls._default_manager.get(code=code)
except cls.DoesNotExist:
raise cls.InvalidCode()
else:
if signup_code.max_uses and signup_code.max_uses <= signup_code.use_count:
raise cls.InvalidCode()
else:
if signup_code.expiry and timezone.now() > signup_code.expiry:
raise cls.InvalidCode()
else:
return signup_code
def calculate_use_count(self):
self.use_count = self.signupcoderesult_set.count()
self.save()
def use(self, user):
"""
Add a SignupCode result attached to the given user.
"""
result = SignupCodeResult()
result.signup_code = self
result.user = user
result.save()
signup_code_used.send(sender=result.__class__, signup_code_result=result)
def send(self, **kwargs):
protocol = getattr(settings, "DEFAULT_HTTP_PROTOCOL", "http")
current_site = kwargs["site"] if "site" in kwargs else Site.objects.get_current()
if "signup_url" not in kwargs:
signup_url = "{0}://{1}{2}?{3}".format(
protocol,
current_site.domain,
reverse("account_signup"),
urlencode({"code": self.code})
)
else:
signup_url = kwargs["signup_url"]
ctx = {
"signup_code": self,
"current_site": current_site,
"signup_url": signup_url,
}
ctx.update(kwargs.get("extra_ctx", {}))
hookset.send_invitation_email([self.email], ctx)
self.sent = timezone.now()
self.save()
signup_code_sent.send(sender=SignupCode, signup_code=self)
class SignupCodeResult(models.Model):
signup_code = models.ForeignKey(SignupCode)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
timestamp = models.DateTimeField(default=timezone.now)
def save(self, **kwargs):
super(SignupCodeResult, self).save(**kwargs)
self.signup_code.calculate_use_count()
@python_2_unicode_compatible
class EmailAddress(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
email = models.EmailField(max_length=254, unique=settings.ACCOUNT_EMAIL_UNIQUE)
verified = models.BooleanField(_("verified"), default=False)
primary = models.BooleanField(_("primary"), default=False)
objects = EmailAddressManager()
class Meta:
verbose_name = _("email address")
verbose_name_plural = _("email addresses")
if not settings.ACCOUNT_EMAIL_UNIQUE:
unique_together = [("user", "email")]
def __str__(self):
return "{0} ({1})".format(self.email, self.user)
def set_as_primary(self, conditional=False):
old_primary = EmailAddress.objects.get_primary(self.user)
if old_primary:
if conditional:
return False
old_primary.primary = False
old_primary.save()
self.primary = True
self.save()
self.user.email = self.email
self.user.save()
return True
def send_confirmation(self, **kwargs):
confirmation = EmailConfirmation.create(self)
confirmation.send(**kwargs)
return confirmation
def change(self, new_email, confirm=True):
"""
Given a new email address, change self and re-confirm.
"""
with transaction.atomic():
self.user.email = new_email
self.user.save()
self.email = new_email
self.verified = False
self.save()
if confirm:
self.send_confirmation()
@python_2_unicode_compatible
class EmailConfirmation(models.Model):
email_address = models.ForeignKey(EmailAddress)
created = models.DateTimeField(default=timezone.now)
sent = models.DateTimeField(null=True)
key = models.CharField(max_length=64, unique=True)
objects = EmailConfirmationManager()
class Meta:
verbose_name = _("email confirmation")
verbose_name_plural = _("email confirmations")
def __str__(self):
return "confirmation for {0}".format(self.email_address)
@classmethod
def create(cls, email_address):
key = hookset.generate_email_confirmation_token(email_address.email)
return cls._default_manager.create(email_address=email_address, key=key)
def key_expired(self):
expiration_date = self.sent + datetime.timedelta(days=settings.ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS)
return expiration_date <= timezone.now()
key_expired.boolean = True
def confirm(self):
if not self.key_expired() and not self.email_address.verified:
email_address = self.email_address
email_address.verified = True
email_address.set_as_primary(conditional=True)
email_address.save()
signals.email_confirmed.send(sender=self.__class__, email_address=email_address)
return email_address
def send(self, **kwargs):
current_site = kwargs["site"] if "site" in kwargs else Site.objects.get_current()
protocol = getattr(settings, "DEFAULT_HTTP_PROTOCOL", "http")
activate_url = "{0}://{1}{2}".format(
protocol,
current_site.domain,
reverse(settings.ACCOUNT_EMAIL_CONFIRMATION_URL, args=[self.key])
)
ctx = {
"email_address": self.email_address,
"user": self.email_address.user,
"activate_url": activate_url,
"current_site": current_site,
"key": self.key,
}
hookset.send_confirmation_email([self.email_address.email], ctx)
self.sent = timezone.now()
self.save()
signals.email_confirmation_sent.send(sender=self.__class__, confirmation=self)
class AccountDeletion(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.SET_NULL)
email = models.EmailField(max_length=254)
date_requested = models.DateTimeField(_("date requested"), default=timezone.now)
date_expunged = models.DateTimeField(_("date expunged"), null=True, blank=True)
class Meta:
verbose_name = _("account deletion")
verbose_name_plural = _("account deletions")
@classmethod
def expunge(cls, hours_ago=None):
if hours_ago is None:
hours_ago = settings.ACCOUNT_DELETION_EXPUNGE_HOURS
before = timezone.now() - datetime.timedelta(hours=hours_ago)
count = 0
for account_deletion in cls.objects.filter(date_requested__lt=before, user__isnull=False):
settings.ACCOUNT_DELETION_EXPUNGE_CALLBACK(account_deletion)
account_deletion.date_expunged = timezone.now()
account_deletion.save()
count += 1
return count
@classmethod
def mark(cls, user):
account_deletion, created = cls.objects.get_or_create(user=user)
account_deletion.email = user.email
account_deletion.save()
settings.ACCOUNT_DELETION_MARK_CALLBACK(account_deletion)
return account_deletion
| gpl-3.0 | 7,228,095,418,603,898,000 | 34.505181 | 110 | 0.624954 | false |
appleseedhq/gaffer | python/GafferUI/CompoundDataPlugValueWidget.py | 1 | 9030 | ##########################################################################
#
# Copyright (c) 2012, John Haddon. All rights reserved.
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import functools
import imath
import IECore
import Gaffer
import GafferUI
## Supported plug metadata :
#
# "compoundDataPlugValueWidget:editable"
class CompoundDataPlugValueWidget( GafferUI.PlugValueWidget ) :
def __init__( self, plug, **kw ) :
self.__column = GafferUI.ListContainer( spacing = 6 )
GafferUI.PlugValueWidget.__init__( self, self.__column, plug, **kw )
with self.__column :
self.__layout = GafferUI.PlugLayout( plug )
with GafferUI.ListContainer( GafferUI.ListContainer.Orientation.Horizontal ) as self.__editRow :
GafferUI.Spacer( imath.V2i( GafferUI.PlugWidget.labelWidth(), 1 ) )
GafferUI.MenuButton(
image = "plus.png",
hasFrame = False,
menu = GafferUI.Menu( Gaffer.WeakMethod( self.__addMenuDefinition ) )
)
GafferUI.Spacer( imath.V2i( 1 ), imath.V2i( 999999, 1 ), parenting = { "expand" : True } )
self._updateFromPlug()
def hasLabel( self ) :
return True
def setPlug( self, plug ) :
GafferUI.PlugValueWidget.setPlug( self, plug )
self.__layout = GafferUI.PlugLayout( plug )
self.__column[0] = self.__layout
def setReadOnly( self, readOnly ) :
if readOnly == self.getReadOnly() :
return
GafferUI.PlugValueWidget.setReadOnly( self, readOnly )
self.__layout.setReadOnly( readOnly )
def childPlugValueWidget( self, childPlug, lazy=True ) :
return self.__layout.plugValueWidget( childPlug, lazy )
def _updateFromPlug( self ) :
editable = True
readOnly = False
if self.getPlug() is not None :
editable = Gaffer.Metadata.value( self.getPlug(), "compoundDataPlugValueWidget:editable" )
editable = editable if editable is not None else True
readOnly = Gaffer.MetadataAlgo.readOnly( self.getPlug() )
self.__editRow.setVisible( editable )
self.__editRow.setEnabled( not readOnly )
def __addMenuDefinition( self ) :
result = IECore.MenuDefinition()
result.append( "/Add/Bool", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.BoolData( False ) ) } )
result.append( "/Add/Float", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.FloatData( 0 ) ) } )
result.append( "/Add/Int", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.IntData( 0 ) ) } )
result.append( "/Add/NumericDivider", { "divider" : True } )
result.append( "/Add/String", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.StringData( "" ) ) } )
result.append( "/Add/StringDivider", { "divider" : True } )
result.append( "/Add/V2i/Vector", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.V2iData( imath.V2i( 0 ), IECore.GeometricData.Interpretation.Vector ) ) } )
result.append( "/Add/V2i/Normal", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.V2iData( imath.V2i( 0 ), IECore.GeometricData.Interpretation.Normal ) ) } )
result.append( "/Add/V2i/Point", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.V2iData( imath.V2i( 0 ), IECore.GeometricData.Interpretation.Point ) ) } )
result.append( "/Add/V3i/Vector", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.V3iData( imath.V3i( 0 ), IECore.GeometricData.Interpretation.Vector ) ) } )
result.append( "/Add/V3i/Normal", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.V3iData( imath.V3i( 0 ), IECore.GeometricData.Interpretation.Normal ) ) } )
result.append( "/Add/V3i/Point", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.V3iData( imath.V3i( 0 ), IECore.GeometricData.Interpretation.Point ) ) } )
result.append( "/Add/V2f/Vector", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.V2fData( imath.V2f( 0 ), IECore.GeometricData.Interpretation.Vector ) ) } )
result.append( "/Add/V2f/Normal", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.V2fData( imath.V2f( 0 ), IECore.GeometricData.Interpretation.Normal ) ) } )
result.append( "/Add/V2f/Point", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.V2fData( imath.V2f( 0 ), IECore.GeometricData.Interpretation.Point ) ) } )
result.append( "/Add/V3f/Vector", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.V3fData( imath.V3f( 0 ), IECore.GeometricData.Interpretation.Vector ) ) } )
result.append( "/Add/V3f/Normal", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.V3fData( imath.V3f( 0 ), IECore.GeometricData.Interpretation.Normal ) ) } )
result.append( "/Add/V3f/Point", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.V3fData( imath.V3f( 0 ), IECore.GeometricData.Interpretation.Point ) ) } )
result.append( "/Add/VectorDivider", { "divider" : True } )
result.append( "/Add/Color3f", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.Color3fData( imath.Color3f( 0 ) ) ) } )
result.append( "/Add/Color4f", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.Color4fData( imath.Color4f( 0, 0, 0, 1 ) ) ) } )
result.append( "/Add/BoxDivider", { "divider" : True } )
result.append( "/Add/Box2i", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.Box2iData( imath.Box2i( imath.V2i( 0 ), imath.V2i( 1 ) ) ) ) } )
result.append( "/Add/Box2f", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.Box2fData( imath.Box2f( imath.V2f( 0 ), imath.V2f( 1 ) ) ) ) } )
result.append( "/Add/Box3i", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.Box3iData( imath.Box3i( imath.V3i( 0 ), imath.V3i( 1 ) ) ) ) } )
result.append( "/Add/Box3f", { "command" : functools.partial( Gaffer.WeakMethod( self.__addItem ), "", IECore.Box3fData( imath.Box3f( imath.V3f( 0 ), imath.V3f( 1 ) ) ) ) } )
result.append( "/Add/BoxDivider", { "divider" : True } )
for label, plugType in [
( "Float", Gaffer.FloatVectorDataPlug ),
( "Int", Gaffer.IntVectorDataPlug),
( "NumericDivider", None ),
( "String", Gaffer.StringVectorDataPlug ),
] :
if plugType is not None :
result.append( "/Add/Array/" + label, {"command" : IECore.curry( Gaffer.WeakMethod( self.__addItem ), "", plugType.ValueType() ) } )
else :
result.append( "/Add/Array/" + label, { "divider" : True } )
return result
def __addItem( self, name, value ) :
with Gaffer.UndoScope( self.getPlug().ancestor( Gaffer.ScriptNode ) ) :
self.getPlug().addChild( Gaffer.NameValuePlug( name, value, True, "member1", flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic ) )
GafferUI.PlugValueWidget.registerType( Gaffer.CompoundDataPlug, CompoundDataPlugValueWidget )
##########################################################################
# Plug metadata
##########################################################################
Gaffer.Metadata.registerValue( Gaffer.CompoundDataPlug, "*", "deletable", lambda plug : plug.getFlags( Gaffer.Plug.Flags.Dynamic ) )
| bsd-3-clause | 551,014,885,235,974,460 | 50.306818 | 192 | 0.670764 | false |
bobismijnnaam/bobe-euler | 48/Utils.py | 1 | 12422 | #!/usr/bin/env python3
import collections
# skim seems to be intensive and doing a lot of work even though in some
# cases it could do less. For example, in add, you can stop skimming if after
# skimming the first two cells the carrier is 0.
# we need more kinds of skimming, at least two (i.e. the skim as we know it
# now for extreme cases, and a skim that I just described to cover cases
# where we now after topping off one time we now there are no others.)
class BigInt:
def __init__(self):
self.number = [0]
def skim(self):
carrier = 0
for i in range(len(self.number)):
self.number[i] += carrier
head = self.number[i] % 10
carrier = (self.number[i] - head) / 10
self.number[i] = int(head)
while carrier != 0:
head = carrier % 10
carrier = (carrier - head) / 10
self.number.append(int(head))
def add(self, factor):
self.number[0] += factor
self.skim();
def mul(self, factor):
carry = 0
for i in range(len(self.number)):
self.number[i] *= factor
self.number[i] += carry
carry = 0
if self.number[i] > 9:
head = int(self.number[i] % 10)
carry = int((self.number[i] - head) / 10)
self.number[i] = head
while carry != 0:
head = carry % 10
carry = (carry - head) / 10
self.number.append(int(head))
def pow(self, factor):
if factor < 0:
raise NotImplementedError("Negative powers not supported")
if type(factor) == type(0.1) and not factor.is_integer():
raise NotImplementedError("Non-integer powers not supported")
if factor == 0:
self.numbers = [1]
return
oldSelf = self.clone()
for _ in range(factor - 1):
self.bigMul(oldSelf)
def smartPow(self, factor):
# Inspired by: https://en.wikipedia.org/wiki/Exponentiation_by_squaring
if factor < 0:
raise NotImplementedError("Negative powers not supported")
if type(factor) == type(0.1) and not factor.is_integer():
raise NotImplementedError("Non-integer powers not supported")
if factor == 0:
self.numbers = [1]
return
if factor == 1:
return
if (factor % 2) == 0:
# Even
self.bigMul(self)
self.smartPow(factor / 2)
else:
# Odd
oldSelf = self.clone()
self.bigMul(self)
self.smartPow((factor - 1) / 2)
self.bigMul(oldSelf)
def smartPowIt(self, factor):
# Inspired by: https://en.wikipedia.org/wiki/Exponentiation_by_squaring
if factor < 0:
raise NotImplementedError("Negative powers not supported")
if type(factor) == type(0.1) and not factor.is_integer():
raise NotImplementedError("Non-integer powers not supported")
if factor == 0:
self.numbers = [1]
return
if factor == 1:
return
y = BigInt()
y.add(1)
while factor > 1:
if (factor % 2) == 0:
# Even
self.bigMul(self)
factor /= 2
else:
# Odd
y.bigMul(self)
self.bigMul(self)
factor = (factor - 1) / 2
self.bigMul(y)
def skimOne(self, i):
if self.number[i] > 9:
old = self.number[i]
self.number[i] = int(old % 10)
head = int((old - (old % 10)) / 10)
if i + 1 < len(self.number):
self.number[i + 1] += head
else:
self.number.append(head)
def bigAdd(self, bigInt):
# TODO: Self add does not work!
if len(self.number) < len(bigInt.number):
self.number += [0] * (len(bigInt.number) - len(self.number))
for (i, v) in enumerate(bigInt.number):
self.number[i] += bigInt.number[i]
self.skimOne(i)
# TODO: Bottleneck for smartpow is here!
# self.skim()
def bigMul(self, bigFactor):
# We can take the internal list because we construct a new list
# (in total)
# So even if we multiply with self this should still work out
total = BigInt()
# For each factor...
for (i, v) in enumerate(bigFactor.number):
# If v is zero, skip it, because then the order should be skipped
if v == 0:
continue
# Make a copy of the original
digitSelf = self.clone()
# Shift it the amount of places of the current digit
digitSelf.shift(i)
# If v is more than zero, multiply
if v > 1:
digitSelf.mul(v)
total.bigAdd(digitSelf)
# Set the end result
self.number = total.number
def getNumberArray(self):
return list(self.number)
def toString(self):
result = ""
for i in self.number:
result += str(i)
return result[::-1]
def clone(self):
newSelf = BigInt()
newSelf.number = self.getNumberArray()
return newSelf
def shift(self, n):
if n == 0:
return
if n < 0:
raise NotImplementedError("Negative shifts are not yet implemented")
oldLen = len(self.number)
self.number += [0] * n
for i in range(len(self.number) - 1, n - 1, -1):
self.number[i] = self.number[i - n]
self.number[i - n] = 0
def take(self, n):
if n == 0:
self.number = [0]
if n < 0:
raise ValueError("Non-negative takes are not supported")
self.number = self.number[:n]
def generatePrimeTable(lim):
numbers = [True] * lim
numbers[0] = False
numbers[1] = False
currNum = 4
while currNum < lim:
numbers[currNum] = False
currNum += 2
prime = 3
while prime < lim:
if numbers[prime]:
currNum = prime
currNum += prime
while currNum < lim:
numbers[currNum] = False
currNum += prime
prime += 2
return numbers
class NumberJuggler:
def __init__(self, lim):
print("Generating prime lookup table")
self.primeTable = generatePrimeTable(lim)
print("Generating prime list")
self.primeList = [i for i, b in enumerate(self.primeTable) if b]
print("Finished initializing number juggler")
def getFactorization(self, num):
factorisation = collections.defaultdict(int)
countdown = num
for prime in self.primeList:
if countdown == 1: break
while countdown % prime == 0:
countdown = countdown // prime
factorisation[prime] += 1
return factorisation
def getFactors(self, num):
factorisation = self.getFactorization(num)
result = []
for k, v in factorisation.items():
result.extend([k] * v)
return result
def getPrimeFactors(self, num):
return list(self.getFactorization(num).keys())
def getDivisors(self, num):
if num == 1: return [1]
factorization = self.getFactorization(num)
factors = list(factorization.keys())
factorCounts = [0] * len(factors)
factorCounts[0] = 1
run = True
divisors = [1]
while run:
divisor = 1;
for j in range(0, len(factors)):
if factorCounts[j] != 0:
divisor *= factors[j]**factorCounts[j]
if divisor != num:
divisors.append(divisor)
factorCounts[0] += 1
for j in range(0, len(factorCounts)):
if factorCounts[j] == factorization[factors[j]] + 1:
if j == len(factorCounts) - 1:
run = False
break
else:
factorCounts[j] = 0;
factorCounts[j + 1] += 1
return divisors
def mergeSort(array):
if len(array) <= 1:
return array[:]
else:
mid = len(array) // 2
left = mergeSort(array[:mid])
right = mergeSort(array[mid:])
result = []
while len(left) > 0 and len(right) > 0:
if left[0] < right[0]:
result.append(left.pop(0))
else:
result.append(right.pop(0))
if len(left) > 0:
result.extend(left)
elif len(right) > 0:
result.extend(right)
return result
def removeDupsOrdered(array):
prev = array[0]
result = [prev]
for e in array[1:]:
if e != prev:
prev = e
result.append(e)
return result
def simplifyFraction(nj, numerator, denominator):
if denominator == 0:
return (0, 0)
if numerator == 0:
return (0, 0)
numFactors = nj.getFactors(numerator)
denFactors = nj.getFactors(denominator)
i = 0
while i < len(denFactors):
currFactor = denFactors[i]
if currFactor in denFactors and currFactor in numFactors:
denFactors.remove(currFactor)
numFactors.remove(currFactor)
else:
i += 1
newNumerator = 1
for f in numFactors:
newNumerator *= f
newDenominator = 1
for f in denFactors:
newDenominator *= f
return (newNumerator, newDenominator)
def isPandigital(num):
numStr = str(num)
seen = [False] * len(numStr)
total = 0
for c in numStr:
cInt = int(c)
if cInt < 1 or cInt > len(numStr):
total = -1
break
if not seen[cInt - 1]:
total += 1
seen[cInt - 1] = True
else:
total = -1
break
return total == len(numStr)
def generatePermutations(elements):
allPerms = []
if len(elements) == 1:
return [elements]
for i in range(0, len(elements)):
lessElements = list(elements)
del lessElements[i]
partialPerms = generatePermutations(lessElements)
for perm in partialPerms:
allPerms.append([elements[i]] + perm)
return allPerms
if __name__ == "__main__":
print("Unit testing!")
print("Tests for BigInt")
bi = BigInt()
bi.add(123)
assert(bi.toString() == "123")
bi.shift(3)
assert(bi.toString() == "123000")
bi = BigInt()
bi.add(50)
bi.mul(5)
# print(bi.toString())
assert(bi.toString() == "250")
ba = BigInt()
ba.add(200)
bb = BigInt()
bb.add(12345)
bb.bigAdd(ba)
assert(bb.toString() == str(12345 + 200))
ba = BigInt()
ba.add(12345)
bb = BigInt()
bb.add(67890)
bb.bigMul(ba)
assert(bb.toString() == str(12345 * 67890))
ba = BigInt()
ba.add(3)
bb = BigInt()
bb.add(3)
ba.bigMul(bb)
ba.bigMul(bb)
assert(ba.toString() == "27")
bi = BigInt()
bi.add(3)
bi.pow(3)
assert(bi.toString() == "27")
bi = BigInt()
bi.add(80)
bi.pow(80)
assert(bi.toString() == str(80 ** 80))
bi = BigInt()
bi.add(3)
bi.smartPow(3)
assert(bi.toString() == "27")
bi = BigInt()
bi.add(80)
bi.smartPow(80)
assert(bi.toString() == str(80 ** 80))
bi = BigInt()
bi.add(3)
bi.smartPowIt(3)
assert(bi.toString() == "27")
bi = BigInt()
bi.add(80)
bi.smartPowIt(80)
assert(bi.toString() == str(80 ** 80))
| mit | 5,004,734,104,982,722,000 | 24.096639 | 80 | 0.495089 | false |
projectgus/yamdwe | mediawiki.py | 1 | 7964 | """
Methods for importing mediawiki pages, images via the simplemediawki
wrapper to the MediaWiki API.
Copyright (C) 2014 Angus Gratton
Licensed under New BSD License as described in the file LICENSE.
"""
from __future__ import print_function, unicode_literals, absolute_import, division
import simplemediawiki, simplejson
import re
from pprint import pprint
class Importer(object):
def __init__(self, api_url, http_user=None, http_pass="", wiki_user=None, wiki_pass="", wiki_domain=None, verbose=False):
self.verbose = verbose
if wiki_domain:
self.mw = simplemediawiki.MediaWiki(api_url, http_user=http_user, http_password=http_pass, domain=wiki_domain)
else:
self.mw = simplemediawiki.MediaWiki(api_url, http_user=http_user, http_password=http_pass)
# login if necessary
if wiki_user is not None:
print("Logging in as %s..." % wiki_user)
if not self.mw.login(wiki_user, wiki_pass):
raise RuntimeError("Mediawiki login failed. Wrong credentials?")
# version check
try:
self.need_rawcontinue = False
generator = "".join(self._query({'meta' : 'siteinfo'}, ['general', 'generator']))
version = [ int(x) for x in re.search(r'[0-9.]+', generator).group(0).split(".") ] # list of [ 1, 19, 1 ] or similar
if version[0] == 1 and version[1] < 13:
raise RuntimeError("Mediawiki version is too old. Yamdwe requires 1.13 or newer. This install is %s" % generator)
# check if the version is too old for the 'rawcontinue' parameter
# see https://www.mediawiki.org/wiki/API:Query#Backwards_compatibility_of_continue
self.need_rawcontinue = version[0] > 1 or (version[0] == 1 and version[1] >= 24)
print("%s meets version requirements." % generator)
except IndexError:
raise RuntimeError("Failed to read Mediawiki siteinfo/generator. Is version older than 1.8? Yamdwe requires 1.13 or greater.")
def verbose_print(self, msg):
if self.verbose:
print(msg)
def get_all_pages(self):
"""
Slurp all pages down from the mediawiki instance, together with all revisions including content.
WARNING: Hits API hard, don't do this without knowledge/permission of wiki operator!!
"""
query = {'list' : 'allpages'}
print("Getting list of pages...")
pages = self._query(query, [ 'allpages' ])
self.verbose_print("Got %d pages." % len(pages))
print("Query page revisions (this may take a while)...")
for page in pages:
self.verbose_print("Querying revisions for pageid %s (%s)..." % (page['pageid'], page['title']))
page["revisions"] = self._get_revisions(page)
self.verbose_print("Got %d revisions." % len(page["revisions"]))
return pages
def _get_revisions(self, page):
pageid = page['pageid']
query = { 'prop' : 'revisions',
'pageids' : pageid,
'rvprop' : 'timestamp|user|comment|content',
'rvlimit' : '5',
}
revisions = self._query(query, [ 'pages', str(pageid), 'revisions' ])
return revisions
def get_all_images(self):
"""
Slurp all images down from the mediawiki instance, latest revision of each image, only.
WARNING: Hits API hard, don't do this without knowledge/permission of wiki operator!!
"""
query = {'list' : 'allimages'}
return self._query(query, [ 'allimages' ])
def get_all_users(self):
"""
Slurp down all usernames from the mediawiki instance.
"""
query = {'list' : 'allusers'}
return self._query(query, [ 'allusers' ])
def _query(self, args, path_to_result):
"""
Make a Mediawiki API query that results a list of results,
handle the possibility of making a paginated query using query-continue
"""
query = { 'action' : 'query' }
if self.need_rawcontinue:
query["rawcontinue"] = ""
query.update(args)
result = []
continuations = 0
while True:
try:
response = self.mw.call(query)
except simplejson.scanner.JSONDecodeError as e:
if e.pos == 0:
if not self.verbose:
raise RuntimeError("Mediawiki gave us back a non-JSON response. You may need to double-check the Mediawiki API URL you are providing (it usually ends in api.php), and also your Mediawiki permissions. To see the response content, pass the --verbose flag to yamdwe.")
else:
raise RuntimeError("Mediawiki gave us back a non-JSON response:\n\n\nInvalid response follows (%d bytes):\n%s\n\n(End of content)\nFailed to parse. You may need to double-check the Mediawiki API URL you are providing (it usually ends in api.php), and also your Mediawiki permissions." % (len(e.doc), e.doc.decode("utf-8")))
raise
# fish around in the response for our actual data (location depends on query)
try:
inner = response['query']
for key in path_to_result:
inner = inner[key]
except KeyError:
raise RuntimeError("Mediawiki query '%s' returned unexpected response '%s' after %d continuations" % (args, response, continuations))
result += inner
# if there's a warning print it out (shouldn't need a debug flag since this is of interest to any user)
if 'warnings' in response:
for warnkey in response['warnings']:
print("WARNING: %s function throws the warning %s" % (warnkey, response['warnings'][warnkey]['*']))
# if there's a continuation, find the new arguments and follow them
try:
query.update(response['query-continue'][path_to_result[-1]])
continuations += 1
except KeyError:
return result
def get_file_namespaces(self):
"""
Return a tuple. First entry is the name used by default for the file namespace (which dokuwiki will also use.)
Second entry is a list of all aliases used for that namespace, and aliases used for the 'media' namespace.
"""
query = { 'action' : 'query', 'meta' : 'siteinfo', 'siprop' : 'namespaces|namespacealiases' }
result = self.mw.call(query)['query']
namespaces = result['namespaces'].values()
aliases = result.get('namespacealiases', {})
file_namespace = {'*' : 'Files', 'canonical' : 'File'}
media_namespace = {'*' : 'Media', 'canonical' : 'Media'}
# search for the File namespace
for namespace in namespaces:
if namespace.get('canonical', None) == 'File':
file_namespace = namespace
elif namespace.get('canonical', None) == 'Media':
media_namespace = namespace
# alias list starts with the file & media namespace canonical values, and the media "real" value
aliases_result = [ file_namespace['canonical'], media_namespace['canonical'], media_namespace['*'] ]
# look for any aliases by searching the file namespace id, add to the list
ids = [ file_namespace.get('id', None), media_namespace.get('id', None) ]
for alias in aliases:
if alias['id'] in ids:
aliases_result.append(alias['*'])
return file_namespace['*'], aliases_result
def get_main_pagetitle(self):
"""
Return the title of the main Mediawiki page
"""
query = { 'action' : 'query', 'meta' : 'siteinfo', 'siprop' : 'general' }
result = self.mw.call(query)['query']
return result['general'].get("mainpage", "Main")
| bsd-3-clause | 6,800,407,379,736,151,000 | 48.465839 | 345 | 0.596936 | false |
alerta/python-alerta-client | alertaclient/top.py | 1 | 5079 | import curses
import sys
import time
from curses import wrapper
from datetime import datetime
from alertaclient.models.alert import Alert
from alertaclient.utils import DateTime
class Screen:
ALIGN_RIGHT = 'R'
ALIGN_CENTRE = 'C'
def __init__(self, client, timezone):
self.client = client
self.timezone = timezone
self.screen = None
self.lines = None
self.cols = None
def run(self):
wrapper(self.main)
def main(self, stdscr):
self.screen = stdscr
curses.use_default_colors()
curses.init_pair(1, curses.COLOR_RED, -1)
curses.init_pair(2, curses.COLOR_MAGENTA, -1)
curses.init_pair(3, curses.COLOR_YELLOW, -1)
curses.init_pair(4, curses.COLOR_BLUE, -1)
curses.init_pair(5, curses.COLOR_CYAN, -1)
curses.init_pair(6, curses.COLOR_GREEN, -1)
curses.init_pair(7, curses.COLOR_WHITE, curses.COLOR_BLACK)
COLOR_RED = curses.color_pair(1)
COLOR_MAGENTA = curses.color_pair(2)
COLOR_YELLOW = curses.color_pair(3)
COLOR_BLUE = curses.color_pair(4)
COLOR_CYAN = curses.color_pair(5)
COLOR_GREEN = curses.color_pair(6)
COLOR_BLACK = curses.color_pair(7)
self.SEVERITY_MAP = {
'security': ['Sec', COLOR_BLACK],
'critical': ['Crit', COLOR_RED],
'major': ['Majr', COLOR_MAGENTA],
'minor': ['Minr', COLOR_YELLOW],
'warning': ['Warn', COLOR_BLUE],
'indeterminate': ['Ind ', COLOR_CYAN],
'cleared': ['Clr', COLOR_GREEN],
'normal': ['Norm', COLOR_GREEN],
'ok': ['Ok', COLOR_GREEN],
'informational': ['Info', COLOR_GREEN],
'debug': ['Dbug', COLOR_BLACK],
'trace': ['Trce', COLOR_BLACK],
'unknown': ['Unkn', COLOR_BLACK]
}
self.screen.keypad(1)
self.screen.nodelay(1)
while True:
self.update()
event = self.screen.getch()
if 0 < event < 256:
self._key_press(chr(event))
else:
if event == curses.KEY_RESIZE:
self.update()
time.sleep(2)
def update(self):
self.lines, self.cols = self.screen.getmaxyx()
self.screen.clear()
now = datetime.utcnow()
status = self.client.mgmt_status()
version = status['version']
# draw header
self._addstr(0, 0, self.client.endpoint, curses.A_BOLD)
self._addstr(0, 'C', f'alerta {version}', curses.A_BOLD)
self._addstr(0, 'R', '{}'.format(now.strftime('%H:%M:%S %d/%m/%y')), curses.A_BOLD)
# TODO - draw bars
# draw alerts
text_width = self.cols - 95 if self.cols >= 95 else 0
self._addstr(2, 1, 'Sev. Time Dupl. Customer Env. Service Resource Group Event'
+ ' Value Text' + ' ' * (text_width - 4), curses.A_UNDERLINE)
def short_sev(severity):
return self.SEVERITY_MAP.get(severity, self.SEVERITY_MAP['unknown'])[0]
def color(severity):
return self.SEVERITY_MAP.get(severity, self.SEVERITY_MAP['unknown'])[1]
r = self.client.http.get('/alerts')
alerts = [Alert.parse(a) for a in r['alerts']]
last_time = DateTime.parse(r['lastTime'])
for i, alert in enumerate(alerts):
row = i + 3
if row >= self.lines - 2: # leave room for footer
break
text = '{:<4} {} {:5d} {:8.8} {:<12} {:<12} {:<12.12} {:5.5} {:<12.12} {:<5.5} {:.{width}}'.format(
short_sev(alert.severity),
DateTime.localtime(alert.last_receive_time, self.timezone, fmt='%H:%M:%S'),
alert.duplicate_count,
alert.customer or '-',
alert.environment,
','.join(alert.service),
alert.resource,
alert.group,
alert.event,
alert.value or 'n/a',
alert.text,
width=text_width
)
# XXX - needed to support python2 and python3
if not isinstance(text, str):
text = text.encode('ascii', errors='replace')
self._addstr(row, 1, text, color(alert.severity))
# draw footer
self._addstr(self.lines - 1, 0, 'Last Update: {}'.format(last_time.strftime('%H:%M:%S')), curses.A_BOLD)
self._addstr(self.lines - 1, 'C', '{} - {}'.format(r['status'], r.get('message', 'no errors')), curses.A_BOLD)
self._addstr(self.lines - 1, 'R', 'Count: {}'.format(r['total']), curses.A_BOLD)
self.screen.refresh()
def _addstr(self, y, x, line, attr=0):
if x == self.ALIGN_RIGHT:
x = self.cols - len(line) - 1
if x == self.ALIGN_CENTRE:
x = int((self.cols / 2) - len(line) / 2)
self.screen.addstr(y, x, line, attr)
def _key_press(self, key):
if key in 'qQ':
sys.exit(0)
| apache-2.0 | 1,558,769,885,894,008,600 | 33.087248 | 118 | 0.525103 | false |
ikinsella/squall | flaskapp/squall/models.py | 1 | 32913 | import os
import shutil
import json
import yaml
import zipfile
import re
from flask import (render_template, current_app)
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import (UserMixin, AnonymousUserMixin)
from werkzeug.security import (generate_password_hash, check_password_hash)
from sqlalchemy.ext.hybrid import hybrid_property
from flask.ext.pymongo import PyMongo
db = SQLAlchemy()
mongo = PyMongo()
""" Tables For Many To Many Relationships """
""" TODO : Multi-User
users_tags = db.Table(
'users_tags',
db.Column('user_id', db.Integer, db.ForeignKey('user.id')),
db.Column('tag_id', db.Integer, db.ForeignKey('tag.id')))
"""
algorithms_experiments = db.Table(
'algorithms_experiments',
db.Column('experiment_id', db.Integer, db.ForeignKey('experiment.id')),
db.Column('algorithm_id', db.Integer, db.ForeignKey('algorithm.id')))
algorithms_tags = db.Table(
'algorithms_tags',
db.Column('algorithm_id', db.Integer, db.ForeignKey('algorithm.id')),
db.Column('tag_id', db.Integer, db.ForeignKey('tag.id')))
implementations_tags = db.Table(
'implementations_tags',
db.Column('implementation_id', db.Integer,
db.ForeignKey('implementation.id')),
db.Column('tag_id', db.Integer, db.ForeignKey('tag.id')))
collections_experiments = db.Table(
'collections_experiments',
db.Column('experiment_id', db.Integer, db.ForeignKey('experiment.id')),
db.Column('collection_id', db.Integer,
db.ForeignKey('data_collection.id')))
collections_tags = db.Table(
'collections_tags',
db.Column('data_collection_id', db.Integer,
db.ForeignKey('data_collection.id')),
db.Column('tag_id', db.Integer, db.ForeignKey('tag.id')))
data_sets_tags = db.Table(
'data_sets_tags',
db.Column('data_set_id', db.Integer, db.ForeignKey('data_set.id')),
db.Column('tag_id', db.Integer, db.ForeignKey('tag.id')))
experiments_tags = db.Table(
'experiments_tags',
db.Column('experiment_id', db.Integer, db.ForeignKey('experiment.id')),
db.Column('tag_id', db.Integer, db.ForeignKey('tag.id')))
batches_tags = db.Table(
'batches_tags',
db.Column('batch_id', db.Integer, db.ForeignKey('batch.id')),
db.Column('tag_id', db.Integer, db.ForeignKey('tag.id')))
""" Entities """
class User(db.Model, UserMixin):
"""Represents a single User who has access to the application"""
# Fields
id = db.Column(db.Integer(), primary_key=True)
_launch_directory = db.Column(db.String(128))
username = db.Column(db.String(64))
password = db.Column(db.String(64))
""" TODO: Multi-User
_tags = db.relationship('Tag', secondary=users_tags,
backref=db.backref('users',
lazy='dynamic'))
"""
# relationships
""" TODO: Multi-User
algorithms = db.relationship()
datacollections = db.relationship()
experiments = db.relationship()
batches = db.relationship()
tags = db.relationship()
"""
def __init__(self, username, launch_directory, password):
self.username = username
self._launch_directory = launch_directory
self.set_password(password)
def __repr__(self):
return '<User {username}>'.format(username=self.username)
def set_password(self, password):
self.password = generate_password_hash(password)
def check_password(self, value):
return check_password_hash(self.password, value)
def get_id(self):
return unicode(self.id)
@property
def is_authenticated(self):
if isinstance(self, AnonymousUserMixin):
return False
else:
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
if isinstance(self, AnonymousUserMixin):
return True
else:
return False
@hybrid_property
def launch_directory(self):
return self._launch_directory
@launch_directory.setter
def launch_directory(self, value):
self._launch_directory = value
class Tag(db.Model):
"""Represents a tag which is used to add query-able meta data
to experiments, batches, data collections, data sets, algorithms,
and implementations. A User defines tags in a view and each collected
job is associated with all the tags contained in its hierarchy."""
# Fields
id = db.Column(db.Integer, primary_key=True)
_name = db.Column(db.String(64), index=True, unique=True)
# Relationships
""" TODO: Multi-User
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
"""
def __init__(self, name):
super(Tag, self).__init__()
self._name = name
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
class Algorithm(db.Model):
""" Entity representing a single algorithm used in a an experiment """
# Fields
id = db.Column(db.Integer, primary_key=True)
_name = db.Column(db.String(64), index=True, unique=True)
_description = db.Column(db.String(512), index=False, unique=False)
# Relationships
""" TODO: Multi-User
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
"""
_tags = db.relationship('Tag', secondary=algorithms_tags,
backref=db.backref('algorithms', lazy='dynamic'))
_implementations = db.relationship('Implementation', backref='algorithm',
lazy='dynamic')
def __init__(self, name, description, tags):
self._name = name
self._description = description
self._tags = tags
@hybrid_property
def serialize(self):
return {'Name': self.name,
'Tags': [tag.name for tag in self.tags]}
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@hybrid_property
def description(self):
return self._description
@description.setter
def description(self, value):
self._description = value
@hybrid_property
def tags(self):
return self._tags
@tags.setter
def tags(self, value):
self._tags.append(value)
@hybrid_property
def implementations(self):
return self._implementations
@implementations.setter
def implementations(self, value):
self._implementations.append(value)
class Implementation(db.Model):
"""Represents a single implementation of an algorithm"""
# Fields
id = db.Column(db.Integer, primary_key=True)
_name = db.Column(db.String(64), index=True, unique=True)
_description = db.Column(db.String(512), index=False, unique=False)
_setup_scripts = db.Column(db.PickleType(), index=False, unique=False)
_executable = db.Column(db.String(64), index=False, unique=False)
# Relationships
_algorithm_id = db.Column(db.Integer, db.ForeignKey('algorithm.id'))
""" TODO: Multi-User
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
"""
_tags = db.relationship('Tag', secondary=implementations_tags,
backref=db.backref('implementations',
lazy='dynamic'))
_urls = db.relationship('URL', backref='implementation', lazy='select')
_batches = db.relationship('Batch', backref='implementation',
lazy='dynamic')
""" TODO: Parameter Validation
_arguments = db.relationship('Argument',
backref='implementation',
lazy='dynamic')
"""
def __init__(self,
algorithm_id,
name,
description,
tags,
urls,
setup_scripts,
executable):
self._algorithm_id = algorithm_id
self._name = name
self._description = description
self._tags = tags
self._urls = [URL(url, implementation_id=self.id) for url in urls]
self._setup_scripts = setup_scripts
self._executable = executable
# self._arguments = arguments # TODO: Parameter Validation
@hybrid_property
def serialize(self):
return {'Name': self.name,
'Tags': [tag.name for tag in self.tags]}
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@hybrid_property
def description(self):
return self._description
@description.setter
def description(self, value):
self._description = value
@hybrid_property
def tags(self):
return self._tags
@tags.setter
def tags(self, value):
self._tags.append(value)
@hybrid_property
def urls(self):
return [url.url for url in self._urls]
@urls.setter
def urls(self, value):
self._urls.append(URL(value, implementation_id=self.id))
@hybrid_property
def setup_scripts(self):
return self._setup_scripts
@setup_scripts.setter
def setup_scripts(self, value):
self._setup_scripts.append(value)
@hybrid_property
def executable(self):
return self._executable
@executable.setter
def executable(self, value):
self._executable = value
@hybrid_property
def batches(self):
return self._batches
@batches.setter
def batches(self, value):
self._batches.append(value)
""" TODO: Parameter Validation
@hybrid_property
def arguments(self):
return self._arguments
@arguments.setter
def arguments(self, value):
self._arguments.append(value)
"""
class DataCollection(db.Model):
"""Represents a collection of datasets derived from a common source"""
# Fields
id = db.Column(db.Integer, primary_key=True)
_name = db.Column(db.String(64), index=True, unique=True)
_description = db.Column(db.String(512), index=False, unique=False)
# Relationships
""" TODO: Moving To Multi-User
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
"""
_tags = db.relationship('Tag', secondary=collections_tags,
backref=db.backref('data_collections',
lazy='dynamic'))
_data_sets = db.relationship('DataSet', backref='data_collection',
lazy='dynamic')
def __init__(self, name, description, tags):
super(DataCollection, self).__init__()
self._name = name
self._description = description
self._tags = tags
@hybrid_property
def serialize(self):
return {'Name': self.name,
'Tags': [tag.name for tag in self.tags]}
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@hybrid_property
def description(self):
return self._description
@description.setter
def description(self, value):
self._description = value
@hybrid_property
def tags(self):
return self._tags
@tags.setter
def tags(self, value):
self._tags.append(value)
@hybrid_property
def data_sets(self):
return self._data_sets
@data_sets.setter
def data_sets(self, value):
self._data_sets.append(value)
class DataSet(db.Model):
"""Represents a single dataset belonging to a data collection"""
# Fields
id = db.Column(db.Integer, primary_key=True)
_name = db.Column(db.String(64), index=True, unique=True)
_description = db.Column(db.String(512), index=False, unique=False)
# Relationships
""" TODO: Moving To Multi-User
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
"""
data_collection_id = db.Column(
db.Integer, db.ForeignKey('data_collection.id'))
_tags = db.relationship('Tag', secondary=data_sets_tags,
backref=db.backref('data_sets', lazy='dynamic'))
_urls = db.relationship('URL', backref='data_set', lazy='select')
_batches = db.relationship('Batch', backref='data_set', lazy='dynamic')
def __init__(self, data_collection_id, name, description, tags, urls):
super(DataSet, self).__init__()
self.data_collection_id = data_collection_id
self._name = name
self._description = description
self._tags = tags
self._urls = [URL(url, data_set_id=self.id) for url in urls]
@hybrid_property
def serialize(self):
return {'Name': self.name,
'Tags': [tag.name for tag in self.tags]}
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@hybrid_property
def description(self):
return self._description
@description.setter
def description(self, value):
self._description = value
@hybrid_property
def tags(self):
return self._tags
@tags.setter
def tags(self, value):
self._tags.append(value)
@hybrid_property
def urls(self):
return [url.url for url in self._urls]
@urls.setter
def urls(self, value):
self._urls.append(URL(value, data_set_id=self.id))
@hybrid_property
def batches(self):
return self._batches
@batches.setter
def batches(self, value):
self._batches.append(value)
class Experiment(db.Model):
"""Represents an experiment composed of data collections and algorithms"""
# Fields
id = db.Column(db.Integer, primary_key=True)
_name = db.Column(db.String(64), index=True, unique=True)
_description = db.Column(db.String(512), index=False, unique=False)
# Relationships
"""
Moving To Multi-User TODO:
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
"""
_tags = db.relationship('Tag', secondary=experiments_tags,
backref=db.backref('experiments', lazy='dynamic'))
_algorithms = db.relationship('Algorithm',
secondary=algorithms_experiments,
backref=db.backref('experiments',
lazy='dynamic'))
_collections = db.relationship('DataCollection',
secondary=collections_experiments,
backref=db.backref('experiments',
lazy='dynamic'))
_batches = db.relationship('Batch', backref='experiment', lazy='dynamic')
def __init__(self, name, description, tags, algorithms, collections):
super(Experiment, self).__init__()
self._name = name
self._description = description
self._tags = tags
self._algorithms = algorithms
self._collections = collections
@hybrid_property
def serialize(self):
return {'Name': self.name,
'Tags': [tag.name for tag in self.tags]}
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@hybrid_property
def description(self):
return self._description
@description.setter
def description(self, value):
self._description = value
@hybrid_property
def tags(self):
return self._tags
@tags.setter
def tags(self, value):
self._tags.append(value)
@hybrid_property
def algorithms(self):
return self._algorithms
@algorithms.setter
def algorithms(self, value):
self._algorithms.append(value)
@hybrid_property
def collections(self):
return self._collections
@collections.setter
def collections(self, value):
self._collections.append(value)
@hybrid_property
def batches(self):
return self._batches
@batches.setter
def batches(self, value):
self._batches.append(value)
class Batch(db.Model):
"""Represents a batch of jobs to be deployed on HTCondor"""
# Fields
id = db.Column(db.Integer, primary_key=True)
_name = db.Column(db.String(64), index=True, unique=True)
_description = db.Column(db.String(512), index=False, unique=False)
_params = db.Column(db.PickleType(), index=False, unique=False)
_memory = db.Column(db.Integer, index=False, unique=False)
_disk = db.Column(db.Integer, index=False, unique=False)
_flock = db.Column(db.Boolean(), index=False)
_glide = db.Column(db.Boolean(), index=False)
_arguments = db.Column(db.PickleType(), index=False, unique=False)
_kwargs = db.Column(db.PickleType(), index=False, unique=False)
_sweep = db.Column(db.String(64), index=False, unique=False)
_wrapper = db.Column(db.String(64), index=False, unique=False)
_submit_file = db.Column(db.String(64), index=False, unique=False)
_params_file = db.Column(db.String(64), index=False, unique=False)
_share_dir = db.Column(db.String(64), index=False, unique=False)
_results_dir = db.Column(db.String(64), index=False, unique=False)
_pre = db.Column(db.String(64), index=False, unique=False)
_post = db.Column(db.String(64), index=False, unique=False)
_job_pre = db.Column(db.String(64), index=False, unique=False)
_job_post = db.Column(db.String(64), index=False, unique=False)
_completed = db.Column(db.Boolean(), index=False)
# Relationships
""" TODO: Multi-User
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
"""
experiment_id = db.Column(db.Integer, db.ForeignKey('experiment.id'))
data_set_id = db.Column(db.Integer, db.ForeignKey('data_set.id'))
implementation_id = db.Column(db.Integer,
db.ForeignKey('implementation.id'))
_tags = db.relationship('Tag', secondary=batches_tags,
backref=db.backref('batches', lazy='dynamic'))
_jobs = db.relationship('Job', backref='batch', lazy='select')
def __init__(self,
experiment_id,
data_set_id,
implementation_id,
name,
description,
tags,
params,
memory,
disk,
flock,
glide,
arguments=None,
keyword_arguments=None,
sweep='sweep.dag',
wrapper='wrapper.sh',
submit_file='process.sub',
params_file='params.json',
share_directory='share',
results_directory='results',
pre_script=None,
job_pre_script=None,
post_script='batch_post.py',
job_post_script='job_post.py'):
super(Batch, self).__init__()
# Relationships
self.experiment_id = experiment_id
self.data_set_id = data_set_id
self.implementation_id = implementation_id
# Mandatory
self._name = name
self._description = description
self._tags = tags
self._params = params
self._jobs = [Job(batch_id=self.id, uid=uid, params=job_params)
for uid, job_params in enumerate(params)]
self._memory = memory
self._disk = disk
self._flock = flock
self._glide = glide
# Optional Arguments
self._pre = pre_script
self._post = post_script
self._job_pre = job_pre_script
self._job_post = job_post_script
self._arguments = arguments
self._kwargs = keyword_arguments
self._sweep = sweep
self._wrapper = wrapper
self._submit_file = submit_file
self._params_file = params_file
self._share_dir = share_directory
self._results_dir = results_directory
self._completed = False
def package(self): # TODO: Remove after, replace zip if exists,
"""Packages the files to run a batch of jobs into a directory"""
rootdir = makedir(
os.path.join(current_app.config['STAGING_AREA'], self.safe_name))
makedir(os.path.join(rootdir, self.results_dir))
sharedir = makedir(os.path.join(rootdir, self.share_dir))
self.write_template('sweep', os.path.join(rootdir, self.sweep))
self.write_params(rootdir)
self.write_template('wrapper', os.path.join(sharedir, self.wrapper))
for job in self.jobs: # Setup Job Directories
job.package(rootdir)
# self.write_template('batch_pre', os.path.join(sharedir, self.pre))
self.write_template('batch_post', os.path.join(sharedir, self.post))
# self.write_template('job_pre', os.path.join(sharedir, self.job_pre))
self.write_template('job_post', os.path.join(sharedir, self.job_post))
self.write_template('hack', os.path.join(sharedir, 'hack.sub'))
shutil.copy(os.path.join(current_app.config['STAGING_AREA'], 'hack'),
sharedir) # Copy fragile hack executable to share_dir
zipfile = rootdir + '.zip'
make_zipfile(zipfile, rootdir)
shutil.rmtree(rootdir) # clean up for next package
return os.path.basename(zipfile)
def write_params(self, rootdir):
""" Writes a dictionary to a json file """
filename = os.path.join(rootdir, self.params_file)
with open(filename, 'w') as writefile:
json.dump(self.params, writefile, sort_keys=True, indent=4)
def write_template(self, template, filename):
""" Renders a batch level tempalte and writes it to filename """
if filename:
with open(filename, 'w') as writefile:
writefile.write(render_template(template, batch=self))
@hybrid_property
def serialize(self):
return {'Name': self.name,
'Tags': [tag.name for tag in self.tags]}
@hybrid_property
def mongoize(self):
imp = Implementation.query.filter_by(id=self.implementation_id).first()
exp = Experiment.query.filter_by(id=self.experiment_id).first()
ds = DataSet.query.filter_by(id=self.data_set_id).first()
dc = DataCollection.query.filter_by(id=ds.data_collection_id).first()
alg = Algorithm.query.filter_by(id=imp._algorithm_id).first()
return {'Batch': self.serialize,
'Tags': [tag.name for tag in self.tags], # TODO: conglomerate
'Experiment': exp.serialize,
'DataSet': ds.serialize,
'DataCollection': dc.serialize,
'Algorithm': alg.serialize,
'Implementation': imp.serialize}
@hybrid_property
def safe_name(self):
"""Remove non-word characters & replace whitespace with underscore"""
return re.sub(r"\s+", '_', re.sub(r"[^\w\s]", '', self.name))
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@hybrid_property
def description(self):
return self._description
@description.setter
def description(self, value):
self._description = value
@hybrid_property
def tags(self):
return self._tags
@tags.setter
def tags(self, value):
self._tags.append(value)
@hybrid_property
def jobs(self):
return self._jobs
@jobs.setter
def jobs(self, value):
self._jobs.append(value)
@hybrid_property
def params(self):
return self._params
@params.setter
def params(self, value):
self._params = yaml.load(value) # TODO: Validate
@hybrid_property
def memory(self):
return self._memory
@memory.setter
def memory(self, value):
self._memory = value
@hybrid_property
def disk(self):
return self._disk
@disk.setter
def disk(self, value):
self._disk = value
@hybrid_property
def flock(self):
return self._flock
@flock.setter
def flock(self, value):
self._flock = value
@hybrid_property
def glide(self):
return self._glide
@hybrid_property
def pre(self):
return self._pre
@pre.setter
def pre(self, value):
self._pre = value
@hybrid_property
def post(self):
return self._post
@post.setter
def post(self, value):
self._post = value
@hybrid_property
def job_pre(self):
return self._job_pre
@job_pre.setter
def job_pre(self, value):
self._job_pre = value
@hybrid_property
def job_post(self):
return self._job_post
@job_post.setter
def job_post(self, value):
self._job_post = value
@hybrid_property
def args(self):
return self._arguments
@args.setter
def args(self, value):
self._arguments = value
@hybrid_property
def kwargs(self):
return self._kwargs
@kwargs.setter
def kwargs(self, value):
self._kwargs = value
@hybrid_property
def sweep(self):
return self._sweep
@sweep.setter
def sweep(self, value):
self._sweep = value
@hybrid_property
def wrapper(self):
return self._wrapper
@wrapper.setter
def wrapper(self, value):
self._wrapper = value
@hybrid_property
def submit_file(self):
return self._submit_file
@submit_file.setter
def submit_file(self, value):
self._submit_file = value
@hybrid_property
def params_file(self):
return self._params_file
@params_file.setter
def params_file(self, value):
self._params_file = value
@hybrid_property
def share_dir(self):
return self._share_dir
@share_dir.setter
def share_dir(self, value):
self._share_dir = value
@hybrid_property
def results_dir(self):
return self._results_dir
@results_dir.setter
def results_dir(self, value):
self._results_dir = value
@hybrid_property
def size(self):
return len(self._jobs)
@hybrid_property
def completed(self):
return self._completed
@completed.setter
def completed(self, value):
self._completed = value
class Job(db.Model):
"""Represents a single job, belonging to a Batch"""
# Fields
id = db.Column(db.Integer, primary_key=True)
_uid = db.Column(db.Integer, index=True, unique=False)
_params = db.Column(db.PickleType(), index=True, unique=False)
# Relationships
batch_id = db.Column(db.Integer, db.ForeignKey('batch.id'))
""" TODO: Multi-User
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
"""
def __init__(self, batch_id, uid, params):
super(Job, self).__init__()
self.batch_id = batch_id
self._uid = uid
self._params = params
def package(self, rootdir):
"""Packages files to run a job into a directory in rootdir"""
jobdir = makedir(os.path.join(rootdir, self.uid))
self.write_params(jobdir)
self.write_template('process', os.path.join(jobdir, self.submit_file))
self.write_template('subdag', os.path.join(jobdir, self.subdag))
def write_params(self, jobdir):
""" Writes a dictionary to a json file """
filename = os.path.join(jobdir, self.params_file)
with open(filename, 'w') as writefile:
json.dump(self.params, writefile, sort_keys=True, indent=4)
def write_template(self, template, filename):
""" Renders a batch level tempalte and writes it to filename """
if filename:
with open(filename, 'w') as writefile:
writefile.write(render_template(template, job=self))
@hybrid_property
def uid(self):
return str(self._uid).zfill(len(str(self.batch.size-1)))
@uid.setter
def uid(self, value):
self._uid = value
@hybrid_property
def params(self):
return self._params
@params.setter
def params(self, value):
self._params = value
@hybrid_property
def params_file(self):
return self.batch.params_file
@hybrid_property
def memory(self):
return self.batch.memory
@hybrid_property
def disk(self):
return self.batch.disk
@hybrid_property
def flock(self):
return self.batch.flock
@hybrid_property
def glide(self):
return self.batch.glide
@hybrid_property
def args(self):
return self.batch.args
@hybrid_property
def kwargs(self):
return self.batch.kwargs
@hybrid_property
def wrapper(self):
return self.batch.wrapper
@hybrid_property
def submit_file(self):
return self.batch.submit_file
@hybrid_property
def subdag(self):
return self.uid + '.dag'
@hybrid_property
def share_dir(self):
return self.batch.share_dir
@hybrid_property
def results_dir(self):
return self.batch.results_dir
@hybrid_property
def pre(self):
return self.batch.job_pre
@hybrid_property
def post(self):
return self.batch.job_post
@hybrid_property
def batch_name(self):
return self.batch.safe_name
@hybrid_property
def tags(self):
return self.batch.tags
""" TODO: Parameter Validation
class Argument(db.Model):
Entity representing a single valid argument
belonging to an implementation of an algorithm
# Fields
id = db.Column(db.Integer,
primary_key=True)
_name = db.Column(db.String(64),
index=True,
unique=True)
_data_type = db.Column(db.Enum('int', 'float', 'string', 'enum'),
index=True,
unique=False)
_optional = db.Column(db.Boolean(),
index=True)
# Relationships
implementation_id = db.Column(db.Integer,
db.ForeignKey('implementation.id'))
def __init__(self, implementation_id, name, data_type, optional):
super(Argument, self).__init__()
self.implementation_id = implementation_id
self._name = name
self._data_type = data_type
self._optional = optional
@hybrid_property
def serialize(self):
return {'id': self.id,
'name': self.name,
'data type': self.data_type,
'optional': self.optional}
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@hybrid_property
def data_type(self):
return self._data_type
@data_type.setter
def data_type(self, value):
self._data_type = value
@hybrid_property
def optional(self):
return self._optional
@optional.setter
def optional(self, value):
self._optional = value
"""
class URL(db.Model):
# Fields
id = db.Column(db.Integer, primary_key=True)
_url = db.Column(db.String(124), index=False, unique=False)
# Relationships
data_set_id = db.Column(db.Integer, db.ForeignKey('data_set.id'))
implementation_id = db.Column(db.Integer,
db.ForeignKey('implementation.id'))
def __init__(self, url, data_set_id=None, implementation_id=None):
self._url = url
self.data_set_id = data_set_id
self.implementation_id = implementation_id
@hybrid_property
def url(self):
return self._url
@url.setter
def url(self, value):
self._url = value
# Universal Functions
def makedir(dirname):
""" Creates a directory if it doesn't already exist """
if not os.path.isdir(dirname):
os.makedirs(dirname)
return dirname
def make_zipfile(output_filename, source_dir):
"""http://stackoverflow.com/questions/1855095/"""
relroot = os.path.abspath(os.path.join(source_dir, os.pardir))
with zipfile.ZipFile(output_filename, "w",
zipfile.ZIP_DEFLATED, True) as zip:
for root, dirs, files in os.walk(source_dir):
# add directory (needed for empty dirs)
zip.write(root, os.path.relpath(root, relroot))
for file in files:
filename = os.path.join(root, file)
os.chmod(filename, 0755)
if os.path.isfile(filename): # regular files only
arcname = os.path.join(os.path.relpath(root, relroot), file)
zip.write(filename, arcname)
| bsd-3-clause | -587,099,930,290,131,300 | 28.360393 | 80 | 0.599824 | false |
digwanderlust/pants | src/python/pants/base/target_addressable.py | 1 | 2168 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from six import string_types
from pants.base.addressable import Addressable
from pants.base.deprecated import deprecated
from pants.base.exceptions import TargetDefinitionException
class TargetAddressable(Addressable):
@classmethod
def get_target_type(cls):
raise NotImplemented
def __init__(self, *args, **kwargs):
self.target_type = self.get_target_type()
if 'name' not in kwargs:
raise Addressable.AddressableInitError(
'name is a required parameter to all Targets specified within a BUILD file.'
' Target type was: {target_type}.'
.format(target_type=self.target_type))
if args:
raise Addressable.AddressableInitError(
'All arguments passed to Targets within BUILD files must use explicit keyword syntax.'
' Target type was: {target_type}.'
' Arguments passed were: {args}'
.format(target_type=self.target_type, args=args))
self.kwargs = kwargs
self.name = kwargs['name']
self.dependency_specs = self.kwargs.pop('dependencies', [])
for dep_spec in self.dependency_specs:
if not isinstance(dep_spec, string_types):
msg = ('dependencies passed to Target constructors must be strings. {dep_spec} is not'
' a string. Target type was: {target_type}.'
.format(target_type=self.target_type, dep_spec=dep_spec))
raise TargetDefinitionException(target=self, msg=msg)
@property
def addressable_name(self):
return self.name
def __str__(self):
format_str = 'TargetAddressable(target_type={target_type}, name={name}, **kwargs=...)'
return format_str.format(target_type=self.target_type, name=self.name)
def __repr__(self):
format_str = 'TargetAddressable(target_type={target_type}, kwargs={kwargs})'
return format_str.format(target_type=self.target_type, kwargs=self.kwargs)
| apache-2.0 | 9,125,159,257,805,332,000 | 37.035088 | 95 | 0.690959 | false |
cryptapus/electrum-myr | plugins/ledger/qt.py | 1 | 2181 | import threading
from PyQt4.Qt import (QDialog, QInputDialog, QLineEdit,
QVBoxLayout, QLabel, SIGNAL)
import PyQt4.QtCore as QtCore
from electrum.i18n import _
from .ledger import LedgerPlugin
from ..hw_wallet.qt import QtHandlerBase, QtPluginBase
from electrum_gui.qt.util import *
from btchip.btchipPersoWizard import StartBTChipPersoDialog
class Plugin(LedgerPlugin, QtPluginBase):
icon_unpaired = ":icons/ledger_unpaired.png"
icon_paired = ":icons/ledger.png"
def create_handler(self, window):
return Ledger_Handler(window)
class Ledger_Handler(QtHandlerBase):
setup_signal = pyqtSignal()
auth_signal = pyqtSignal(object)
def __init__(self, win):
super(Ledger_Handler, self).__init__(win, 'Ledger')
self.setup_signal.connect(self.setup_dialog)
self.auth_signal.connect(self.auth_dialog)
def word_dialog(self, msg):
response = QInputDialog.getText(self.top_level_window(), "Ledger Wallet Authentication", msg, QLineEdit.Password)
if not response[1]:
self.word = None
else:
self.word = str(response[0])
self.done.set()
def message_dialog(self, msg):
self.clear_dialog()
self.dialog = dialog = WindowModalDialog(self.top_level_window(), _("Ledger Status"))
l = QLabel(msg)
vbox = QVBoxLayout(dialog)
vbox.addWidget(l)
dialog.show()
def auth_dialog(self, data):
try:
from .auth2fa import LedgerAuthDialog
except ImportError as e:
self.message_dialog(e)
return
dialog = LedgerAuthDialog(self, data)
dialog.exec_()
self.word = dialog.pin
self.done.set()
def get_auth(self, data):
self.done.clear()
self.auth_signal.emit(data)
self.done.wait()
return self.word
def get_setup(self):
self.done.clear()
self.setup_signal.emit()
self.done.wait()
return
def setup_dialog(self):
dialog = StartBTChipPersoDialog()
dialog.exec_()
| mit | 870,677,349,423,049,600 | 27.324675 | 121 | 0.607519 | false |
beni55/sentry | src/sentry/interfaces.py | 1 | 35445 | """
sentry.interfaces
~~~~~~~~~~~~~~~~~
Interfaces provide an abstraction for how structured data should be
validated and rendered.
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import itertools
import urlparse
import warnings
from pygments import highlight
# from pygments.lexers import get_lexer_for_filename, TextLexer, ClassNotFound
from pygments.lexers import TextLexer
from pygments.formatters import HtmlFormatter
from django.http import QueryDict
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from sentry.app import env
from sentry.models import UserOption
from sentry.utils.strings import strip
from sentry.web.helpers import render_to_string
_Exception = Exception
def unserialize(klass, data):
value = object.__new__(klass)
value.__setstate__(data)
return value
def is_url(filename):
return filename.startswith(('http:', 'https:', 'file:'))
def get_context(lineno, context_line, pre_context=None, post_context=None, filename=None,
format=False):
lineno = int(lineno)
context = []
start_lineno = lineno - len(pre_context or [])
if pre_context:
start_lineno = lineno - len(pre_context)
at_lineno = start_lineno
for line in pre_context:
context.append((at_lineno, line))
at_lineno += 1
else:
start_lineno = lineno
at_lineno = lineno
if start_lineno < 0:
start_lineno = 0
context.append((at_lineno, context_line))
at_lineno += 1
if post_context:
for line in post_context:
context.append((at_lineno, line))
at_lineno += 1
# HACK:
if filename and is_url(filename) and '.' not in filename.rsplit('/', 1)[-1]:
filename = 'index.html'
if format:
# try:
# lexer = get_lexer_for_filename(filename)
# except ClassNotFound:
# lexer = TextLexer()
lexer = TextLexer()
formatter = HtmlFormatter()
def format(line):
if not line:
return mark_safe('<pre></pre>')
return mark_safe(highlight(line, lexer, formatter))
context = tuple((n, format(l)) for n, l in context)
return context
def is_newest_frame_first(event):
newest_first = event.platform not in ('python', None)
if env.request and env.request.user.is_authenticated():
display = UserOption.objects.get_value(
user=env.request.user,
project=None,
key='stacktrace_order',
default=None,
)
if display == '1':
newest_first = False
elif display == '2':
newest_first = True
return newest_first
class Interface(object):
"""
An interface is a structured representation of data, which may
render differently than the default ``extra`` metadata in an event.
"""
score = 0
display_score = None
def __init__(self, **kwargs):
self.attrs = kwargs.keys()
self.__dict__.update(kwargs)
def __eq__(self, other):
if type(self) != type(other):
return False
return self.serialize() == other.serialize()
def __setstate__(self, data):
kwargs = self.unserialize(data)
self.attrs = kwargs.keys()
self.__dict__.update(kwargs)
def __getstate__(self):
return self.serialize()
def validate(self):
pass
def unserialize(self, data):
return data
def serialize(self):
return dict((k, self.__dict__[k]) for k in self.attrs)
def get_composite_hash(self, interfaces):
return self.get_hash()
def get_hash(self):
return []
def to_html(self, event, is_public=False, **kwargs):
return ''
def to_string(self, event, is_public=False, **kwargs):
return ''
def get_slug(self):
return type(self).__name__.lower()
def get_title(self):
return _(type(self).__name__)
def get_display_score(self):
return self.display_score or self.score
def get_score(self):
return self.score
def get_search_context(self, event):
"""
Returns a dictionary describing the data that should be indexed
by the search engine. Several fields are accepted:
- text: a list of text items to index as part of the generic query
- filters: a map of fields which are used for precise matching
"""
return {
# 'text': ['...'],
# 'filters': {
# 'field": ['...'],
# },
}
class Message(Interface):
"""
A standard message consisting of a ``message`` arg, and an optional
``params`` arg for formatting.
If your message cannot be parameterized, then the message interface
will serve no benefit.
- ``message`` must be no more than 1000 characters in length.
>>> {
>>> "message": "My raw message with interpreted strings like %s",
>>> "params": ["this"]
>>> }
"""
attrs = ('message', 'params')
def __init__(self, message, params=(), **kwargs):
self.message = message
self.params = params
def validate(self):
assert len(self.message) <= 5000
def serialize(self):
return {
'message': self.message,
'params': self.params,
}
def get_hash(self):
return [self.message]
def get_search_context(self, event):
if isinstance(self.params, (list, tuple)):
params = list(self.params)
elif isinstance(self.params, dict):
params = self.params.values()
else:
params = []
return {
'text': [self.message] + params,
}
class Query(Interface):
"""
A SQL query with an optional string describing the SQL driver, ``engine``.
>>> {
>>> "query": "SELECT 1"
>>> "engine": "psycopg2"
>>> }
"""
attrs = ('query', 'engine')
def __init__(self, query, engine=None, **kwargs):
self.query = query
self.engine = engine
def get_hash(self):
return [self.query]
def serialize(self):
return {
'query': self.query,
'engine': self.engine,
}
def get_search_context(self, event):
return {
'text': [self.query],
}
class Frame(object):
attrs = ('abs_path', 'filename', 'lineno', 'colno', 'in_app', 'context_line',
'pre_context', 'post_context', 'vars', 'module', 'function', 'data')
def __init__(self, abs_path=None, filename=None, lineno=None, colno=None,
in_app=None, context_line=None, pre_context=(),
post_context=(), vars=None, module=None, function=None,
data=None, **kwargs):
self.abs_path = abs_path or filename
self.filename = filename or abs_path
if self.is_url():
urlparts = urlparse.urlparse(self.abs_path)
if urlparts.path:
self.filename = urlparts.path
self.module = module
self.function = function
if lineno is not None:
self.lineno = int(lineno)
else:
self.lineno = None
if colno is not None:
self.colno = int(colno)
else:
self.colno = None
self.in_app = in_app
self.context_line = context_line
self.pre_context = pre_context
self.post_context = post_context
self.vars = vars or {}
self.data = data or {}
def __getitem__(self, key):
warnings.warn('Frame[key] is deprecated. Use Frame.key instead.', DeprecationWarning)
return getattr(self, key)
def is_url(self):
if not self.abs_path:
return False
return is_url(self.abs_path)
def is_valid(self):
if self.in_app not in (False, True, None):
return False
if type(self.vars) != dict:
return False
if type(self.data) != dict:
return False
return self.filename or self.function or self.module
def get_hash(self):
output = []
if self.module:
output.append(self.module)
elif self.filename and not self.is_url():
output.append(self.filename)
if self.context_line is not None:
output.append(self.context_line)
elif not output:
# If we were unable to achieve any context at this point
# (likely due to a bad JavaScript error) we should just
# bail on recording this frame
return output
elif self.function:
output.append(self.function)
elif self.lineno is not None:
output.append(self.lineno)
return output
def get_context(self, event, is_public=False, **kwargs):
if (self.context_line and self.lineno is not None
and (self.pre_context or self.post_context)):
context = get_context(
lineno=self.lineno,
context_line=self.context_line,
pre_context=self.pre_context,
post_context=self.post_context,
filename=self.filename or self.module,
format=True,
)
start_lineno = context[0][0]
else:
context = []
start_lineno = None
frame_data = {
'abs_path': self.abs_path,
'filename': self.filename,
'module': self.module,
'function': self.function,
'start_lineno': start_lineno,
'lineno': self.lineno,
'context': context,
'context_line': self.context_line,
'in_app': self.in_app,
'is_url': self.is_url(),
}
if not is_public:
frame_data['vars'] = self.vars or {}
if event.platform == 'javascript' and self.data:
frame_data.update({
'sourcemap': self.data['sourcemap'].rsplit('/', 1)[-1],
'sourcemap_url': urlparse.urljoin(self.abs_path, self.data['sourcemap']),
'orig_function': self.data['orig_function'],
'orig_filename': self.data['orig_filename'],
'orig_lineno': self.data['orig_lineno'],
'orig_colno': self.data['orig_colno'],
})
return frame_data
def to_string(self, event):
if event.platform is not None:
choices = [event.platform]
else:
choices = []
choices.append('default')
templates = [
'sentry/partial/frames/%s.txt' % choice
for choice in choices
]
return render_to_string(templates, {
'abs_path': self.abs_path,
'filename': self.filename,
'function': self.function,
'module': self.module,
'lineno': self.lineno,
'colno': self.colno,
'context_line': self.context_line,
}).strip('\n')
class Stacktrace(Interface):
"""
A stacktrace contains a list of frames, each with various bits (most optional)
describing the context of that frame. Frames should be sorted from oldest
to newest.
The stacktrace contains one element, ``frames``, which is a list of hashes. Each
hash must contain **at least** the ``filename`` attribute. The rest of the values
are optional, but recommended.
The list of frames should be ordered by the oldest call first.
Each frame must contain the following attributes:
``filename``
The relative filepath to the call
OR
``function``
The name of the function being called
OR
``module``
Platform-specific module path (e.g. sentry.interfaces.Stacktrace)
The following additional attributes are supported:
``lineno``
The line number of the call
``colno``
The column number of the call
``abs_path``
The absolute path to filename
``context_line``
Source code in filename at lineno
``pre_context``
A list of source code lines before context_line (in order) -- usually [lineno - 5:lineno]
``post_context``
A list of source code lines after context_line (in order) -- usually [lineno + 1:lineno + 5]
``in_app``
Signifies whether this frame is related to the execution of the relevant code in this stacktrace. For example,
the frames that might power the framework's webserver of your app are probably not relevant, however calls to
the framework's library once you start handling code likely are.
``vars``
A mapping of variables which were available within this frame (usually context-locals).
>>> {
>>> "frames": [{
>>> "abs_path": "/real/file/name.py"
>>> "filename": "file/name.py",
>>> "function": "myfunction",
>>> "vars": {
>>> "key": "value"
>>> },
>>> "pre_context": [
>>> "line1",
>>> "line2"
>>> ],
>>> "context_line": "line3",
>>> "lineno": 3,
>>> "in_app": true,
>>> "post_context": [
>>> "line4",
>>> "line5"
>>> ],
>>> }]
>>> }
.. note:: This interface can be passed as the 'stacktrace' key in addition
to the full interface path.
"""
attrs = ('frames',)
score = 1000
def __init__(self, frames, **kwargs):
self.frames = [Frame(**f) for f in frames]
def __iter__(self):
return iter(self.frames)
def validate(self):
for frame in self.frames:
# ensure we've got the correct required values
assert frame.is_valid()
def serialize(self):
frames = []
for f in self.frames:
# compatibility with old serialization
if isinstance(f, Frame):
frames.append(vars(f))
else:
frames.append(f)
return {
'frames': frames,
}
def has_app_frames(self):
return any(f.in_app is not None for f in self.frames)
def unserialize(self, data):
data['frames'] = [Frame(**f) for f in data.pop('frames', [])]
return data
def get_composite_hash(self, interfaces):
output = self.get_hash()
if 'sentry.interfaces.Exception' in interfaces:
exc = interfaces['sentry.interfaces.Exception'][0]
if exc.type:
output.append(exc.type)
elif not output:
output = exc.get_hash()
return output
def get_hash(self):
output = []
for frame in self.frames:
output.extend(frame.get_hash())
return output
def get_context(self, event, is_public=False, newest_first=None,
with_stacktrace=True, **kwargs):
system_frames = 0
frames = []
for frame in self.frames:
frames.append(frame.get_context(event=event, is_public=is_public))
if not frame.in_app:
system_frames += 1
if len(frames) == system_frames:
system_frames = 0
# if theres no system frames, pretend they're all part of the app
if not system_frames:
for frame in frames:
frame['in_app'] = True
if newest_first is None:
newest_first = is_newest_frame_first(event)
if newest_first:
frames = frames[::-1]
context = {
'is_public': is_public,
'newest_first': newest_first,
'system_frames': system_frames,
'event': event,
'frames': frames,
'stack_id': 'stacktrace_1',
}
if with_stacktrace:
context['stacktrace'] = self.get_traceback(event, newest_first=newest_first)
return context
def to_html(self, event, **kwargs):
context = self.get_context(
event=event,
**kwargs
)
return render_to_string('sentry/partial/interfaces/stacktrace.html', context)
def to_string(self, event, is_public=False, **kwargs):
return self.get_stacktrace(event, system_frames=False, max_frames=5)
def get_stacktrace(self, event, system_frames=True, newest_first=None, max_frames=None):
if newest_first is None:
newest_first = is_newest_frame_first(event)
result = []
if newest_first:
result.append(_('Stacktrace (most recent call first):'))
else:
result.append(_('Stacktrace (most recent call last):'))
result.append('')
frames = self.frames
num_frames = len(frames)
if not system_frames:
frames = [f for f in frames if f.in_app is not False]
if not frames:
frames = self.frames
if newest_first:
frames = frames[::-1]
if max_frames:
visible_frames = max_frames
if newest_first:
start, stop = None, max_frames
else:
start, stop = -max_frames, None
else:
visible_frames = len(frames)
start, stop = None, None
if not newest_first and visible_frames < num_frames:
result.extend(('(%d additional frame(s) were not displayed)' % (num_frames - visible_frames,), '...'))
for frame in frames[start:stop]:
result.append(frame.to_string(event))
if newest_first and visible_frames < num_frames:
result.extend(('...', '(%d additional frame(s) were not displayed)' % (num_frames - visible_frames,)))
return '\n'.join(result)
def get_traceback(self, event, newest_first=None):
result = [
event.message, '',
self.get_stacktrace(event, newest_first=newest_first),
]
return '\n'.join(result)
def get_search_context(self, event):
return {
'text': list(itertools.chain(*[[f.filename, f.function, f.context_line] for f in self.frames])),
}
class SingleException(Interface):
"""
A standard exception with a mandatory ``value`` argument, and optional
``type`` and``module`` argument describing the exception class type and
module namespace.
You can also optionally bind a stacktrace interface to an exception. The
spec is identical to ``sentry.interfaces.Stacktrace``.
>>> {
>>> "type": "ValueError",
>>> "value": "My exception value",
>>> "module": "__builtins__"
>>> "stacktrace": {
>>> # see sentry.interfaces.Stacktrace
>>> }
>>> }
"""
attrs = ('value', 'type', 'module', 'stacktrace')
score = 900
display_score = 1200
def __init__(self, value, type=None, module=None, stacktrace=None, **kwargs):
# A human readable value for the exception
self.value = value
# The exception type name (e.g. TypeError)
self.type = type
# Optional module of the exception type (e.g. __builtin__)
self.module = module
# Optional bound stacktrace interface
if stacktrace:
self.stacktrace = Stacktrace(**stacktrace)
else:
self.stacktrace = None
def validate(self):
if self.stacktrace:
return self.stacktrace.validate()
def serialize(self):
if self.stacktrace:
stacktrace = self.stacktrace.serialize()
else:
stacktrace = None
return {
'type': strip(self.type) or None,
'value': strip(self.value) or None,
'module': strip(self.module) or None,
'stacktrace': stacktrace,
}
def unserialize(self, data):
if data.get('stacktrace'):
data['stacktrace'] = unserialize(Stacktrace, data['stacktrace'])
else:
data['stacktrace'] = None
return data
def get_hash(self):
output = None
if self.stacktrace:
output = self.stacktrace.get_hash()
if output and self.type:
output.append(self.type)
if not output:
output = filter(bool, [self.type, self.value])
return output
def get_context(self, event, is_public=False, **kwargs):
last_frame = None
interface = event.interfaces.get('sentry.interfaces.Stacktrace')
if interface is not None and interface.frames:
last_frame = interface.frames[-1]
e_module = strip(self.module)
e_type = strip(self.type) or 'Exception'
e_value = strip(self.value)
if self.module:
fullname = '%s.%s' % (e_module, e_type)
else:
fullname = e_type
return {
'is_public': is_public,
'event': event,
'exception_value': e_value or e_type or '<empty value>',
'exception_type': e_type,
'exception_module': e_module,
'fullname': fullname,
'last_frame': last_frame
}
def get_search_context(self, event):
return {
'text': [self.value, self.type, self.module]
}
class Exception(Interface):
"""
An exception consists of a list of values. In most cases, this list
contains a single exception, with an optional stacktrace interface.
Each exception has a mandatory ``value`` argument and optional ``type`` and
``module`` arguments describing the exception class type and module
namespace.
You can also optionally bind a stacktrace interface to an exception. The
spec is identical to ``sentry.interfaces.Stacktrace``.
>>> [{
>>> "type": "ValueError",
>>> "value": "My exception value",
>>> "module": "__builtins__"
>>> "stacktrace": {
>>> # see sentry.interfaces.Stacktrace
>>> }
>>> }]
Values should be sent oldest to newest, this includes both the stacktrace
and the exception itself.
.. note:: This interface can be passed as the 'exception' key in addition
to the full interface path.
"""
attrs = ('values',)
score = 2000
def __init__(self, *args, **kwargs):
if 'values' in kwargs:
values = kwargs['values']
elif not kwargs and len(args) == 1 and isinstance(args[0], (list, tuple)):
values = args[0]
else:
values = [kwargs]
self.values = [SingleException(**e) for e in values]
def __getitem__(self, key):
return self.values[key]
def __iter__(self):
return iter(self.values)
def __len__(self):
return len(self.values)
def validate(self):
for exception in self.values:
# ensure we've got the correct required values
exception.validate()
def serialize(self):
return {
'values': [e.serialize() for e in self.values]
}
def unserialize(self, data):
if 'values' not in data:
data = {'values': [data]}
data['values'] = [unserialize(SingleException, v) for v in data['values']]
return data
def get_hash(self):
return self.values[0].get_hash()
def get_composite_hash(self, interfaces):
return self.values[0].get_composite_hash(interfaces)
def get_context(self, event, is_public=False, **kwargs):
newest_first = is_newest_frame_first(event)
context_kwargs = {
'event': event,
'is_public': is_public,
'newest_first': newest_first,
}
exceptions = []
last = len(self.values) - 1
for num, e in enumerate(self.values):
context = e.get_context(**context_kwargs)
if e.stacktrace:
context['stacktrace'] = e.stacktrace.get_context(
with_stacktrace=False, **context_kwargs)
else:
context['stacktrace'] = {}
context['stack_id'] = 'exception_%d' % (num,)
context['is_root'] = num == last
exceptions.append(context)
if newest_first:
exceptions.reverse()
return {
'newest_first': newest_first,
'system_frames': sum(e['stacktrace'].get('system_frames', 0) for e in exceptions),
'exceptions': exceptions,
'stacktrace': self.get_stacktrace(event, newest_first=newest_first)
}
def to_html(self, event, **kwargs):
if not self.values:
return ''
if len(self.values) == 1 and not self.values[0].stacktrace:
exception = self.values[0]
context = exception.get_context(event=event, **kwargs)
return render_to_string('sentry/partial/interfaces/exception.html', context)
context = self.get_context(event=event, **kwargs)
return render_to_string('sentry/partial/interfaces/chained_exception.html', context)
def to_string(self, event, is_public=False, **kwargs):
return self.get_stacktrace(event, system_frames=False, max_frames=5)
def get_search_context(self, event):
return self.values[0].get_search_context(event)
def get_stacktrace(self, *args, **kwargs):
exc = self.values[0]
if exc.stacktrace:
return exc.stacktrace.get_stacktrace(*args, **kwargs)
return ''
class Http(Interface):
"""
The Request information is stored in the Http interface. Two arguments
are required: ``url`` and ``method``.
The ``env`` variable is a compounded dictionary of HTTP headers as well
as environment information passed from the webserver. Sentry will explicitly
look for ``REMOTE_ADDR`` in ``env`` for things which require an IP address.
The ``data`` variable should only contain the request body (not the query
string). It can either be a dictionary (for standard HTTP requests) or a
raw request body.
>>> {
>>> "url": "http://absolute.uri/foo",
>>> "method": "POST",
>>> "data": {
>>> "foo": "bar"
>>> },
>>> "query_string": "hello=world",
>>> "cookies": "foo=bar",
>>> "headers": {
>>> "Content-Type": "text/html"
>>> },
>>> "env": {
>>> "REMOTE_ADDR": "192.168.0.1"
>>> }
>>> }
.. note:: This interface can be passed as the 'request' key in addition
to the full interface path.
"""
attrs = ('url', 'method', 'data', 'query_string', 'cookies', 'headers',
'env')
display_score = 1000
score = 800
# methods as defined by http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html + PATCH
METHODS = ('GET', 'POST', 'PUT', 'OPTIONS', 'HEAD', 'DELETE', 'TRACE', 'CONNECT', 'PATCH')
def __init__(self, url, method=None, data=None, query_string=None, cookies=None, headers=None, env=None, **kwargs):
if data is None:
data = {}
if method:
method = method.upper()
urlparts = urlparse.urlsplit(url)
if not query_string:
# define querystring from url
query_string = urlparts.query
elif query_string.startswith('?'):
# remove '?' prefix
query_string = query_string[1:]
self.url = '%s://%s%s' % (urlparts.scheme, urlparts.netloc, urlparts.path)
self.method = method
self.data = data
self.query_string = query_string
if cookies:
self.cookies = cookies
else:
self.cookies = {}
# if cookies were a string, convert to a dict
# parse_qsl will parse both acceptable formats:
# a=b&c=d
# and
# a=b; c=d
if isinstance(self.cookies, basestring):
self.cookies = dict(urlparse.parse_qsl(self.cookies, keep_blank_values=True))
# if cookies were [also] included in headers we
# strip them out
if headers and 'Cookie' in headers:
cookies = headers.pop('Cookie')
if cookies:
self.cookies = cookies
self.headers = headers or {}
self.env = env or {}
def serialize(self):
return {
'url': self.url,
'method': self.method,
'data': self.data,
'query_string': self.query_string,
'cookies': self.cookies,
'headers': self.headers,
'env': self.env,
}
def to_string(self, event, is_public=False, **kwargs):
return render_to_string('sentry/partial/interfaces/http.txt', {
'event': event,
'full_url': '?'.join(filter(bool, [self.url, self.query_string])),
'url': self.url,
'method': self.method,
'query_string': self.query_string,
})
def _to_dict(self, value):
if value is None:
value = {}
if isinstance(value, dict):
return True, value
try:
value = QueryDict(value)
except _Exception:
return False, value
else:
return True, value
def to_html(self, event, is_public=False, **kwargs):
data = self.data
headers_is_dict, headers = self._to_dict(self.headers)
# educated guess as to whether the body is normal POST data
if headers_is_dict and headers.get('Content-Type') == 'application/x-www-form-urlencoded' and '=' in data:
_, data = self._to_dict(data)
context = {
'is_public': is_public,
'event': event,
'full_url': '?'.join(filter(bool, [self.url, self.query_string])),
'url': self.url,
'method': self.method,
'data': data,
'query_string': self.query_string,
'headers': self.headers,
}
if not is_public:
# It's kind of silly we store this twice
_, cookies = self._to_dict(self.cookies)
context.update({
'cookies': cookies,
'env': self.env,
})
return render_to_string('sentry/partial/interfaces/http.html', context)
def get_title(self):
return _('Request')
def get_search_context(self, event):
return {
'filters': {
'url': [self.url],
}
}
class Template(Interface):
"""
A rendered template (generally used like a single frame in a stacktrace).
The attributes ``filename``, ``context_line``, and ``lineno`` are required.
>>> {
>>> "abs_path": "/real/file/name.html"
>>> "filename": "file/name.html",
>>> "pre_context": [
>>> "line1",
>>> "line2"
>>> ],
>>> "context_line": "line3",
>>> "lineno": 3,
>>> "post_context": [
>>> "line4",
>>> "line5"
>>> ],
>>> }
.. note:: This interface can be passed as the 'template' key in addition
to the full interface path.
"""
attrs = ('filename', 'context_line', 'lineno', 'pre_context', 'post_context',
'abs_path')
score = 1100
def __init__(self, filename, context_line, lineno, pre_context=None, post_context=None,
abs_path=None, **kwargs):
self.abs_path = abs_path
self.filename = filename
self.context_line = context_line
self.lineno = int(lineno)
self.pre_context = pre_context
self.post_context = post_context
def serialize(self):
return {
'abs_path': self.abs_path,
'filename': self.filename,
'context_line': self.context_line,
'lineno': self.lineno,
'pre_context': self.pre_context,
'post_context': self.post_context,
}
def get_hash(self):
return [self.filename, self.context_line]
def to_string(self, event, is_public=False, **kwargs):
context = get_context(
lineno=self.lineno,
context_line=self.context_line,
pre_context=self.pre_context,
post_context=self.post_context,
filename=self.filename,
format=False,
)
result = [
'Stacktrace (most recent call last):', '',
self.get_traceback(event, context)
]
return '\n'.join(result)
def to_html(self, event, is_public=False, **kwargs):
context = get_context(
lineno=self.lineno,
context_line=self.context_line,
pre_context=self.pre_context,
post_context=self.post_context,
filename=self.filename,
format=True,
)
return render_to_string('sentry/partial/interfaces/template.html', {
'event': event,
'abs_path': self.abs_path,
'filename': self.filename,
'lineno': int(self.lineno),
'start_lineno': context[0][0],
'context': context,
'template': self.get_traceback(event, context),
'is_public': is_public,
})
def get_traceback(self, event, context):
result = [
event.message, '',
'File "%s", line %s' % (self.filename, self.lineno), '',
]
result.extend([n[1].strip('\n') for n in context])
return '\n'.join(result)
def get_search_context(self, event):
return {
'text': [self.abs_path, self.filename, self.context_line],
}
class User(Interface):
"""
An interface which describes the authenticated User for a request.
All data is arbitrary and optional other than the ``id``
field which should be a string representing the user's unique identifier.
>>> {
>>> "id": "unique_id",
>>> "username": "my_user",
>>> "email": "[email protected]"
>>> }
"""
attrs = ('id', 'email', 'username', 'data')
def __init__(self, id=None, email=None, username=None, **kwargs):
self.id = id
self.email = email
self.username = username
self.data = kwargs
def serialize(self):
# XXX: legacy -- delete
if hasattr(self, 'is_authenticated'):
self.data['is_authenticated'] = self.is_authenticated
return {
'id': self.id,
'username': self.username,
'email': self.email,
'data': self.data,
}
def get_hash(self):
return []
def to_html(self, event, is_public=False, **kwargs):
if is_public:
return ''
return render_to_string('sentry/partial/interfaces/user.html', {
'is_public': is_public,
'event': event,
'user_id': self.id,
'user_username': self.username,
'user_email': self.email,
'user_data': self.data,
})
def get_search_context(self, event):
tokens = filter(bool, [self.id, self.username, self.email])
if not tokens:
return {}
return {
'text': tokens
}
| bsd-3-clause | 8,498,770,027,748,643,000 | 29.424893 | 119 | 0.551136 | false |
blag/django-watchman | tests/test_views.py | 1 | 13325 | # -*- coding: utf-8 -*-
"""
test_django-watchman
------------
Tests for `django-watchman` views module.
"""
from __future__ import unicode_literals
import json
try:
from importlib import reload
except ImportError: # Python < 3
pass
import sys
import unittest
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.core import mail
from django.test import TestCase as DjangoTestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from mock import patch
from watchman import checks, views
PYTHON_VERSION = sys.version_info[0]
if django.VERSION >= (1, 7):
# Initialize Django
django.setup()
# Silence MIDDLEWARE_CLASSES warning as this is not an actual Django project
settings.SILENCED_SYSTEM_CHECKS = ['1_7.W001']
def reload_settings():
# Reload settings - and all dependent modules - from scratch
reload(sys.modules['watchman.settings'])
reload(sys.modules['watchman.decorators'])
reload(sys.modules['watchman.views'])
class TestWatchman(unittest.TestCase):
def setUp(self):
# Ensure that every test executes with separate settings
reload_settings()
def test_response_content_type_json(self):
request = RequestFactory().get('/')
response = views.status(request)
self.assertEqual(response['Content-Type'], 'application/json')
def test_response_contains_expected_checks(self):
expected_checks = ['caches', 'databases', 'storage', ]
request = RequestFactory().get('/')
response = views.status(request)
if PYTHON_VERSION == 2:
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
else:
content = json.loads(response.content.decode('utf-8'))
self.assertCountEqual(expected_checks, content.keys())
def test_check_database_handles_exception(self):
response = checks._check_database('foo')
self.assertFalse(response['foo']['ok'])
self.assertEqual(response['foo']['error'], "The connection foo doesn't exist")
def test_check_cache_handles_exception(self):
if django.VERSION < (1, 7):
expected_error = "Could not find backend 'foo': Could not find backend 'foo': foo doesn't look like a module path"
else:
expected_error = "Could not find config for 'foo' in settings.CACHES"
response = checks._check_cache('foo')
self.assertFalse(response['foo']['ok'])
self.assertIn(response['foo']['error'], expected_error)
def test_response_skipped_checks(self):
expected_checks = ['caches', 'storage', ]
request = RequestFactory().get('/', data={
'skip': 'watchman.checks.databases',
})
response = views.status(request)
if PYTHON_VERSION == 2:
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
else:
content = json.loads(response.content.decode('utf-8'))
self.assertCountEqual(expected_checks, content.keys())
def test_response_is_404_for_checked_and_skipped_check(self):
# This is a bit of a weird one, basically if you explicitly include and
# skip the same check, you should get back a 404 as they cancel each
# other out
request = RequestFactory().get('/', data={
'check': 'watchman.checks.email',
'skip': 'watchman.checks.email',
})
response = views.status(request)
self.assertEqual(response.status_code, 404)
@patch('watchman.checks._check_databases')
def test_response_only_single_check(self, patched_check_databases):
patched_check_databases.return_value = []
request = RequestFactory().get('/', data={
'check': 'watchman.checks.databases',
})
response = views.status(request)
self.assertEqual(response.status_code, 200)
if PYTHON_VERSION == 2:
content = json.loads(response.content)
self.assertItemsEqual({'databases': []}, content)
else:
content = json.loads(response.content.decode('utf-8'))
self.assertCountEqual({'databases': []}, content)
def test_response_404_when_none_specified(self):
request = RequestFactory().get('/', data={
'check': '',
})
response = views.status(request)
self.assertEqual(response.status_code, 404)
if PYTHON_VERSION == 2:
content = json.loads(response.content)
self.assertItemsEqual({'message': 'No checks found', 'error': 404}, content)
else:
content = json.loads(response.content.decode('utf-8'))
self.assertCountEqual({'message': 'No checks found', 'error': 404}, content)
@override_settings(WATCHMAN_TOKEN='ABCDE')
@override_settings(WATCHMAN_AUTH_DECORATOR='watchman.decorators.token_required')
def test_login_not_required_with_get_param(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp(), but before self.tearDown()
reload_settings()
request = RequestFactory().get('/', data={
'watchman-token': 'ABCDE',
})
response = views.status(request)
self.assertEqual(response.status_code, 200)
@override_settings(WATCHMAN_TOKEN='ABCDE')
@override_settings(WATCHMAN_AUTH_DECORATOR='watchman.decorators.token_required')
def test_login_not_required_with_authorization_header(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp(), but before self.tearDown()
reload_settings()
request = RequestFactory().get('/', HTTP_AUTHORIZATION='WATCHMAN-TOKEN Token="ABCDE"')
response = views.status(request)
self.assertEqual(response.status_code, 200)
@override_settings(WATCHMAN_TOKEN='123-456-ABCD')
@override_settings(WATCHMAN_AUTH_DECORATOR='watchman.decorators.token_required')
def test_login_not_required_with_authorization_header_dashes_in_token(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp(), but before self.tearDown()
reload_settings()
request = RequestFactory().get('/', HTTP_AUTHORIZATION='WATCHMAN-TOKEN Token="123-456-ABCD"')
response = views.status(request)
self.assertEqual(response.status_code, 200)
@override_settings(WATCHMAN_TOKEN='ABCDE')
@override_settings(WATCHMAN_AUTH_DECORATOR='watchman.decorators.token_required')
def test_login_fails_with_invalid_get_param(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp(), but before self.tearDown()
reload_settings()
request = RequestFactory().get('/', data={
'watchman-token': '12345',
})
response = views.status(request)
self.assertEqual(response.status_code, 403)
@override_settings(WATCHMAN_TOKEN='ABCDE')
@override_settings(WATCHMAN_AUTH_DECORATOR='watchman.decorators.token_required')
def test_login_fails_with_invalid_authorization_header(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp(), but before self.tearDown()
reload_settings()
request = RequestFactory().get('/', HTTP_AUTHORIZATION='WATCHMAN-TOKEN Token="12345"')
response = views.status(request)
self.assertEqual(response.status_code, 403)
@override_settings(WATCHMAN_AUTH_DECORATOR='django.contrib.auth.decorators.login_required')
def test_response_when_login_required_is_redirect(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp()
reload_settings()
request = RequestFactory().get('/')
request.user = AnonymousUser()
response = views.status(request)
self.assertEqual(response.status_code, 302)
@override_settings(WATCHMAN_AUTH_DECORATOR='django.contrib.auth.decorators.login_required')
def test_response_when_login_required(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp()
reload_settings()
request = RequestFactory().get('/')
request.user = AnonymousUser()
# Fake logging the user in
request.user.is_authenticated = lambda: True
response = views.status(request)
self.assertEqual(response.status_code, 200)
def test_response_version_header(self):
request = RequestFactory().get('/')
response = views.status(request)
self.assertTrue(response.has_header('X-Watchman-Version'))
@patch('watchman.checks._check_databases')
@override_settings(WATCHMAN_ERROR_CODE=503)
def test_custom_error_code(self, patched_check_databases):
reload_settings()
# Fake a DB error, ensure we get our error code
patched_check_databases.return_value = [{
"foo": {
"ok": False,
"error": "Fake DB Error",
"stacktrace": "Fake DB Stack Trace",
},
}]
request = RequestFactory().get('/', data={
'check': 'watchman.checks.databases',
})
response = views.status(request)
self.assertEqual(response.status_code, 503)
@patch('watchman.checks._check_databases')
def test_default_error_code(self, patched_check_databases):
reload_settings()
# Fake a DB error, ensure we get our error code
patched_check_databases.return_value = [{
"foo": {
"ok": False,
"error": "Fake DB Error",
"stacktrace": "Fake DB Stack Trace",
},
}]
request = RequestFactory().get('/', data={
'check': 'watchman.checks.databases',
})
response = views.status(request)
self.assertEqual(response.status_code, 500)
def tearDown(self):
pass
class TestWatchmanDashboard(unittest.TestCase):
def setUp(self):
# Ensure that every test executes with separate settings
reload_settings()
def test_dashboard_response_code(self):
request = RequestFactory().get('/')
response = views.dashboard(request)
self.assertEqual(response.status_code, 200)
def test_response_version_header(self):
request = RequestFactory().get('/')
response = views.dashboard(request)
self.assertTrue(response.has_header('X-Watchman-Version'))
class TestEmailCheck(DjangoTestCase):
def setUp(self):
# Ensure that every test executes with separate settings
reload_settings()
def def_test_email_with_default_recipient(self):
checks._check_email()
# Test that one message has been sent.
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
expected_recipients = ['[email protected]']
self.assertEqual(sent_email.to, expected_recipients)
@override_settings(WATCHMAN_EMAIL_RECIPIENTS=['[email protected]'])
def def_test_email_with_custom_recipient(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp()
reload_settings()
checks._check_email()
# Test that one message has been sent.
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
expected_recipients = ['[email protected]']
self.assertEqual(sent_email.to, expected_recipients)
@override_settings(WATCHMAN_EMAIL_RECIPIENTS=['[email protected]', '[email protected]'])
def def_test_email_with_multiple_recipients(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp()
reload_settings()
checks._check_email()
# Test that one message has been sent.
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
expected_recipients = ['[email protected]', '[email protected]']
self.assertEqual(sent_email.to, expected_recipients)
def test_email_check_with_default_headers(self):
checks._check_email()
# Test that one message has been sent.
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
expected_headers = {
'X-DJANGO-WATCHMAN': True,
}
self.assertEqual(sent_email.extra_headers, expected_headers)
@override_settings(WATCHMAN_EMAIL_HEADERS={'foo': 'bar'})
def test_email_check_with_custom_headers(self):
# Have to manually reload settings here because override_settings
# happens after self.setUp()
reload_settings()
checks._check_email()
# Test that one message has been sent.
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
expected_headers = {
'X-DJANGO-WATCHMAN': True,
'foo': 'bar',
}
self.assertEqual(sent_email.extra_headers, expected_headers)
| bsd-3-clause | 8,361,702,971,815,497,000 | 36.641243 | 126 | 0.644428 | false |
xcgspring/XSTAF | XSTAF/ui/ui_confirmDialog.py | 1 | 2148 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'confirmDialog.ui'
#
# Created: Fri Jun 05 09:54:07 2015
# by: PyQt4 UI code generator 4.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_confirmDialog(object):
def setupUi(self, confirmDialog):
confirmDialog.setObjectName(_fromUtf8("confirmDialog"))
confirmDialog.resize(398, 60)
self.gridLayout = QtGui.QGridLayout(confirmDialog)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.noButton = QtGui.QPushButton(confirmDialog)
self.noButton.setObjectName(_fromUtf8("noButton"))
self.gridLayout.addWidget(self.noButton, 1, 2, 1, 1)
self.yesButton = QtGui.QPushButton(confirmDialog)
self.yesButton.setObjectName(_fromUtf8("yesButton"))
self.gridLayout.addWidget(self.yesButton, 1, 1, 1, 1)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem, 1, 0, 1, 1)
self.messageLabel = QtGui.QLabel(confirmDialog)
self.messageLabel.setObjectName(_fromUtf8("messageLabel"))
self.gridLayout.addWidget(self.messageLabel, 0, 0, 1, 3)
self.retranslateUi(confirmDialog)
QtCore.QMetaObject.connectSlotsByName(confirmDialog)
def retranslateUi(self, confirmDialog):
confirmDialog.setWindowTitle(_translate("confirmDialog", "Dialog", None))
self.noButton.setText(_translate("confirmDialog", "No", None))
self.yesButton.setText(_translate("confirmDialog", "Yes", None))
self.messageLabel.setText(_translate("confirmDialog", "TextLabel", None))
| apache-2.0 | -5,367,792,330,494,818,000 | 40.307692 | 102 | 0.705773 | false |
deannariddlespur/django-baker | django_baker/management/commands/bake.py | 1 | 2636 | from __future__ import print_function
from django.core.management.base import BaseCommand, CommandError
from django.core.exceptions import ImproperlyConfigured
from django.db.models import get_app, get_models
from django.db.models.loading import get_model
from ...bakery import Baker
class Command(BaseCommand):
args = "appname:modelname,modelname2,modelname3"
help = ("Generates generic views (create, update, detail, list, and delete), urls, forms, and admin for model in an"
"app. Optionally can restrict which apps are generated on a per app basis.\n\nexample: python manage.py "
"bake bread:Sesame,Pumpkernickel donut:Glazed,Chocolate")
def handle(self, *args, **options):
ingredients = self.parse_bake_options(*args)
baker = Baker()
baker.bake(ingredients)
def parse_bake_options(self, *args):
"""
Parses command line options to determine what apps and models for those apps we should bake.
"""
apps_and_models_to_bake = {}
for arg in args:
app_and_model_names = arg.split(':')
app_label = app_and_model_names[0]
if len(app_and_model_names) == 2:
selected_model_names = app_and_model_names[1].split(",")
else:
selected_model_names = None
app, models = self.get_app_and_models(app_label, selected_model_names)
apps_and_models_to_bake[app_label] = models
return apps_and_models_to_bake
def get_app_and_models(self, app_label, model_names):
"""
Gets the app and models when given app_label and model names
"""
try:
app = get_app(app_label)
except ImproperlyConfigured:
raise CommandError("%s is ImproperlyConfigured - did you remember to add %s to settings.INSTALLED_APPS?" %
(app_label, app_label))
models = self.get_selected_models(app, app_label, model_names)
return (app, models)
def get_selected_models(self, app, app_label, model_names):
"""
Returns the model for a given app. If given model_names, returns those so long as the model names are
actually models in the given app.
"""
if model_names:
try:
print(app_label, model_names)
return [get_model(app_label, model_name) for model_name in model_names]
except:
raise CommandError("One or more of the models you entered for %s are incorrect." % app_label)
else:
return get_models(app)
| bsd-3-clause | 8,704,876,466,907,126,000 | 42.933333 | 120 | 0.616844 | false |
noironetworks/neutron | neutron/agent/common/resource_processing_queue.py | 1 | 6194 | # Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import datetime
from oslo_utils import timeutils
from six.moves import queue as Queue
class ResourceUpdate(object):
"""Encapsulates a resource update
An instance of this object carries the information necessary to prioritize
and process a request to update a resource.
Priority values are ordered from higher (0) to lower (>0) by the caller,
and are therefore not defined here, but must be done by the consumer.
"""
def __init__(self, id, priority,
action=None, resource=None, timestamp=None, tries=5):
self.priority = priority
self.timestamp = timestamp
if not timestamp:
self.timestamp = timeutils.utcnow()
self.id = id
self.action = action
self.resource = resource
self.tries = tries
def __lt__(self, other):
"""Implements priority among updates
Lower numerical priority always gets precedence. When comparing two
updates of the same priority then the one with the earlier timestamp
gets precedence. In the unlikely event that the timestamps are also
equal it falls back to a simple comparison of ids meaning the
precedence is essentially random.
"""
if self.priority != other.priority:
return self.priority < other.priority
if self.timestamp != other.timestamp:
return self.timestamp < other.timestamp
return self.id < other.id
def hit_retry_limit(self):
return self.tries < 0
class ExclusiveResourceProcessor(object):
"""Manager for access to a resource for processing
This class controls access to a resource in a non-blocking way. The first
instance to be created for a given ID is granted exclusive access to
the resource.
Other instances may be created for the same ID while the first
instance has exclusive access. If that happens then it doesn't block and
wait for access. Instead, it signals to the master instance that an update
came in with the timestamp.
This way, a thread will not block to wait for access to a resource.
Instead it effectively signals to the thread that is working on the
resource that something has changed since it started working on it.
That thread will simply finish its current iteration and then repeat.
This class keeps track of the last time that resource data was fetched and
processed. The timestamp that it keeps must be before when the data used
to process the resource last was fetched from the database. But, as close
as possible. The timestamp should not be recorded, however, until the
resource has been processed using the fetch data.
"""
_masters = {}
_resource_timestamps = {}
def __init__(self, id):
self._id = id
if id not in self._masters:
self._masters[id] = self
self._queue = []
self._master = self._masters[id]
def _i_am_master(self):
return self == self._master
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self._i_am_master():
del self._masters[self._id]
def _get_resource_data_timestamp(self):
return self._resource_timestamps.get(self._id,
datetime.datetime.min)
def fetched_and_processed(self, timestamp):
"""Records the timestamp after it is used to update the resource"""
new_timestamp = max(timestamp, self._get_resource_data_timestamp())
self._resource_timestamps[self._id] = new_timestamp
def queue_update(self, update):
"""Queues an update from a worker
This is the queue used to keep new updates that come in while a
resource is being processed. These updates have already bubbled to
the front of the ResourceProcessingQueue.
"""
self._master._queue.append(update)
def updates(self):
"""Processes the resource until updates stop coming
Only the master instance will process the resource. However, updates
may come in from other workers while it is in progress. This method
loops until they stop coming.
"""
if self._i_am_master():
while self._queue:
# Remove the update from the queue even if it is old.
update = self._queue.pop(0)
# Process the update only if it is fresh.
if self._get_resource_data_timestamp() < update.timestamp:
yield update
class ResourceProcessingQueue(object):
"""Manager of the queue of resources to process."""
def __init__(self):
self._queue = Queue.PriorityQueue()
def add(self, update):
update.tries -= 1
self._queue.put(update)
def each_update_to_next_resource(self):
"""Grabs the next resource from the queue and processes
This method uses a for loop to process the resource repeatedly until
updates stop bubbling to the front of the queue.
"""
next_update = self._queue.get()
with ExclusiveResourceProcessor(next_update.id) as rp:
# Queue the update whether this worker is the master or not.
rp.queue_update(next_update)
# Here, if the current worker is not the master, the call to
# rp.updates() will not yield and so this will essentially be a
# noop.
for update in rp.updates():
yield (rp, update)
| apache-2.0 | 4,223,049,414,664,777,700 | 36.539394 | 79 | 0.652406 | false |
open-synergy/opnsynid-partner-contact | partner_app/models/res_partner.py | 1 | 1107 | # -*- coding: utf-8 -*-
# Copyright 2017 OpenSynergy Indonesia
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import models, api
class ResPartner(models.Model):
_inherit = "res.partner"
@api.multi
def button_toggle_customer(self):
for partner in self:
partner._toggle_customer()
@api.multi
def button_toggle_supplier(self):
for partner in self:
partner._toggle_supplier()
@api.multi
def _toggle_customer(self):
self.ensure_one()
criteria = [
"|",
("id", "=", self.id),
("commercial_partner_id", "=", self.id),
]
self.env["res.partner"].search(criteria).write({
"customer": not self.customer,
})
@api.multi
def _toggle_supplier(self):
self.ensure_one()
criteria = [
"|",
("id", "=", self.id),
("commercial_partner_id", "=", self.id),
]
self.env["res.partner"].search(criteria).write({
"supplier": not self.supplier,
})
| agpl-3.0 | -3,164,423,185,243,750,400 | 24.744186 | 63 | 0.528455 | false |
GroestlCoin/electrum-grs | electrum_grs/plugins/trustedcoin/trustedcoin.py | 1 | 31657 | #!/usr/bin/env python
#
# Electrum - Lightweight Bitcoin Client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import asyncio
import socket
import json
import base64
import time
import hashlib
from collections import defaultdict
from typing import Dict, Union
from urllib.parse import urljoin
from urllib.parse import quote
from aiohttp import ClientResponse
from electrum_grs import ecc, constants, keystore, version, bip32, bitcoin
from electrum_grs.bitcoin import TYPE_ADDRESS
from electrum_grs.bip32 import BIP32Node, xpub_type
from electrum_grs.crypto import sha256
from electrum_grs.transaction import TxOutput
from electrum_grs.mnemonic import Mnemonic, seed_type, is_any_2fa_seed_type
from electrum_grs.wallet import Multisig_Wallet, Deterministic_Wallet
from electrum_grs.i18n import _
from electrum_grs.plugin import BasePlugin, hook
from electrum_grs.util import NotEnoughFunds, UserFacingException
from electrum_grs.storage import STO_EV_USER_PW
from electrum_grs.network import Network
from electrum_grs.base_wizard import BaseWizard
from electrum_grs.logging import Logger
def get_signing_xpub(xtype):
if not constants.net.TESTNET:
xpub = "xpub661MyMwAqRbcGnMkaTx2594P9EDuiEqMq25PM2aeG6UmwzaohgA6uDmNsvSUV8ubqwA3Wpste1hg69XHgjUuCD5HLcEp2QPzyV1HMrPppsL"
else:
xpub = "tpubD6NzVbkrYhZ4XdmyJQcCPjQfg6RXVUzGFhPjZ7uvRC8JLcS7Hw1i7UTpyhp9grHpak4TyK2hzBJrujDVLXQ6qB5tNpVx9rC6ixijUXadnmY"
if xtype not in ('standard', 'p2wsh'):
raise NotImplementedError('xtype: {}'.format(xtype))
if xtype == 'standard':
return xpub
node = BIP32Node.from_xkey(xpub)
return node._replace(xtype=xtype).to_xpub()
def get_billing_xpub():
if constants.net.TESTNET:
return "tpubD6NzVbkrYhZ4X11EJFTJujsYbUmVASAYY7gXsEt4sL97AMBdypiH1E9ZVTpdXXEy3Kj9Eqd1UkxdGtvDt5z23DKsh6211CfNJo8bLLyem5r"
else:
return "xpub6DTBdtBB8qUmH5c77v8qVGVoYk7WjJNpGvutqjLasNG1mbux6KsojaLrYf2sRhXAVU4NaFuHhbD9SvVPRt1MB1MaMooRuhHcAZH1yhQ1qDU"
DISCLAIMER = [
_("Two-factor authentication is a service provided by TrustedCoin. "
"It uses a multi-signature wallet, where you own 2 of 3 keys. "
"The third key is stored on a remote server that signs transactions on "
"your behalf. To use this service, you will need a smartphone with "
"Google Authenticator installed."),
_("A small fee will be charged on each transaction that uses the "
"remote server. You may check and modify your billing preferences "
"once the installation is complete."),
_("Note that your coins are not locked in this service. You may withdraw "
"your funds at any time and at no cost, without the remote server, by "
"using the 'restore wallet' option with your wallet seed."),
_("The next step will generate the seed of your wallet. This seed will "
"NOT be saved in your computer, and it must be stored on paper. "
"To be safe from malware, you may want to do this on an offline "
"computer, and move your wallet later to an online computer."),
]
KIVY_DISCLAIMER = [
_("Two-factor authentication is a service provided by TrustedCoin. "
"To use it, you must have a separate device with Google Authenticator."),
_("This service uses a multi-signature wallet, where you own 2 of 3 keys. "
"The third key is stored on a remote server that signs transactions on "
"your behalf. A small fee will be charged on each transaction that uses the "
"remote server."),
_("Note that your coins are not locked in this service. You may withdraw "
"your funds at any time and at no cost, without the remote server, by "
"using the 'restore wallet' option with your wallet seed."),
]
RESTORE_MSG = _("Enter the seed for your 2-factor wallet:")
class TrustedCoinException(Exception):
def __init__(self, message, status_code=0):
Exception.__init__(self, message)
self.status_code = status_code
class ErrorConnectingServer(Exception):
def __init__(self, reason: Union[str, Exception] = None):
self.reason = reason
def __str__(self):
header = _("Error connecting to {} server").format('TrustedCoin')
reason = self.reason
if isinstance(reason, BaseException):
reason = repr(reason)
return f"{header}:\n{reason}" if reason else header
class TrustedCoinCosignerClient(Logger):
def __init__(self, user_agent=None, base_url='https://api.trustedcoin.com/2/'):
self.base_url = base_url
self.debug = False
self.user_agent = user_agent
Logger.__init__(self)
async def handle_response(self, resp: ClientResponse):
if resp.status != 200:
try:
r = await resp.json()
message = r['message']
except:
message = await resp.text()
raise TrustedCoinException(message, resp.status)
try:
return await resp.json()
except:
return await resp.text()
def send_request(self, method, relative_url, data=None, *, timeout=None):
network = Network.get_instance()
if not network:
raise ErrorConnectingServer('You are offline.')
url = urljoin(self.base_url, relative_url)
if self.debug:
self.logger.debug(f'<-- {method} {url} {data}')
headers = {}
if self.user_agent:
headers['user-agent'] = self.user_agent
try:
if method == 'get':
response = Network.send_http_on_proxy(method, url,
params=data,
headers=headers,
on_finish=self.handle_response,
timeout=timeout)
elif method == 'post':
response = Network.send_http_on_proxy(method, url,
json=data,
headers=headers,
on_finish=self.handle_response,
timeout=timeout)
else:
assert False
except TrustedCoinException:
raise
except Exception as e:
raise ErrorConnectingServer(e)
else:
if self.debug:
self.logger.debug(f'--> {response}')
return response
def get_terms_of_service(self, billing_plan='electrum-per-tx-otp'):
"""
Returns the TOS for the given billing plan as a plain/text unicode string.
:param billing_plan: the plan to return the terms for
"""
payload = {'billing_plan': billing_plan}
return self.send_request('get', 'tos', payload)
def create(self, xpubkey1, xpubkey2, email, billing_plan='electrum-per-tx-otp'):
"""
Creates a new cosigner resource.
:param xpubkey1: a bip32 extended public key (customarily the hot key)
:param xpubkey2: a bip32 extended public key (customarily the cold key)
:param email: a contact email
:param billing_plan: the billing plan for the cosigner
"""
payload = {
'email': email,
'xpubkey1': xpubkey1,
'xpubkey2': xpubkey2,
'billing_plan': billing_plan,
}
return self.send_request('post', 'cosigner', payload)
def auth(self, id, otp):
"""
Attempt to authenticate for a particular cosigner.
:param id: the id of the cosigner
:param otp: the one time password
"""
payload = {'otp': otp}
return self.send_request('post', 'cosigner/%s/auth' % quote(id), payload)
def get(self, id):
""" Get billing info """
return self.send_request('get', 'cosigner/%s' % quote(id))
def get_challenge(self, id):
""" Get challenge to reset Google Auth secret """
return self.send_request('get', 'cosigner/%s/otp_secret' % quote(id))
def reset_auth(self, id, challenge, signatures):
""" Reset Google Auth secret """
payload = {'challenge':challenge, 'signatures':signatures}
return self.send_request('post', 'cosigner/%s/otp_secret' % quote(id), payload)
def sign(self, id, transaction, otp):
"""
Attempt to authenticate for a particular cosigner.
:param id: the id of the cosigner
:param transaction: the hex encoded [partially signed] compact transaction to sign
:param otp: the one time password
"""
payload = {
'otp': otp,
'transaction': transaction
}
return self.send_request('post', 'cosigner/%s/sign' % quote(id), payload,
timeout=60)
def transfer_credit(self, id, recipient, otp, signature_callback):
"""
Transfer a cosigner's credits to another cosigner.
:param id: the id of the sending cosigner
:param recipient: the id of the recipient cosigner
:param otp: the one time password (of the sender)
:param signature_callback: a callback that signs a text message using xpubkey1/0/0 returning a compact sig
"""
payload = {
'otp': otp,
'recipient': recipient,
'timestamp': int(time.time()),
}
relative_url = 'cosigner/%s/transfer' % quote(id)
full_url = urljoin(self.base_url, relative_url)
headers = {
'x-signature': signature_callback(full_url + '\n' + json.dumps(payload))
}
return self.send_request('post', relative_url, payload, headers)
server = TrustedCoinCosignerClient(user_agent="Electrum/" + version.ELECTRUM_VERSION)
class Wallet_2fa(Multisig_Wallet):
wallet_type = '2fa'
def __init__(self, storage):
self.m, self.n = 2, 3
Deterministic_Wallet.__init__(self, storage)
self.is_billing = False
self.billing_info = None
self._load_billing_addresses()
def _load_billing_addresses(self):
billing_addresses = {
'legacy': self.storage.get('trustedcoin_billing_addresses', {}),
'segwit': self.storage.get('trustedcoin_billing_addresses_segwit', {})
}
self._billing_addresses = {} # type: Dict[str, Dict[int, str]] # addr_type -> index -> addr
self._billing_addresses_set = set() # set of addrs
for addr_type, d in list(billing_addresses.items()):
self._billing_addresses[addr_type] = {}
# convert keys from str to int
for index, addr in d.items():
self._billing_addresses[addr_type][int(index)] = addr
self._billing_addresses_set.add(addr)
def can_sign_without_server(self):
return not self.keystores['x2/'].is_watching_only()
def get_user_id(self):
return get_user_id(self.storage)
def min_prepay(self):
return min(self.price_per_tx.keys())
def num_prepay(self, config):
default = self.min_prepay()
n = config.get('trustedcoin_prepay', default)
if n not in self.price_per_tx:
n = default
return n
def extra_fee(self, config):
if self.can_sign_without_server():
return 0
if self.billing_info is None:
self.plugin.start_request_thread(self)
return 0
if self.billing_info.get('tx_remaining'):
return 0
if self.is_billing:
return 0
n = self.num_prepay(config)
price = int(self.price_per_tx[n])
if price > 100000 * n:
raise Exception('too high trustedcoin fee ({} for {} txns)'.format(price, n))
return price
def make_unsigned_transaction(self, coins, outputs, config, fixed_fee=None,
change_addr=None, is_sweep=False):
mk_tx = lambda o: Multisig_Wallet.make_unsigned_transaction(
self, coins, o, config, fixed_fee, change_addr)
fee = self.extra_fee(config) if not is_sweep else 0
if fee:
address = self.billing_info['billing_address_segwit']
fee_output = TxOutput(TYPE_ADDRESS, address, fee)
try:
tx = mk_tx(outputs + [fee_output])
except NotEnoughFunds:
# TrustedCoin won't charge if the total inputs is
# lower than their fee
tx = mk_tx(outputs)
if tx.input_value() >= fee:
raise
self.logger.info("not charging for this tx")
else:
tx = mk_tx(outputs)
return tx
def on_otp(self, tx, otp):
if not otp:
self.logger.info("sign_transaction: no auth code")
return
otp = int(otp)
long_user_id, short_id = self.get_user_id()
raw_tx = tx.serialize()
try:
r = server.sign(short_id, raw_tx, otp)
except TrustedCoinException as e:
if e.status_code == 400: # invalid OTP
raise UserFacingException(_('Invalid one-time password.')) from e
else:
raise
if r:
raw_tx = r.get('transaction')
tx.update(raw_tx)
self.logger.info(f"twofactor: is complete {tx.is_complete()}")
# reset billing_info
self.billing_info = None
self.plugin.start_request_thread(self)
def add_new_billing_address(self, billing_index: int, address: str, addr_type: str):
billing_addresses_of_this_type = self._billing_addresses[addr_type]
saved_addr = billing_addresses_of_this_type.get(billing_index)
if saved_addr is not None:
if saved_addr == address:
return # already saved this address
else:
raise Exception('trustedcoin billing address inconsistency.. '
'for index {}, already saved {}, now got {}'
.format(billing_index, saved_addr, address))
# do we have all prior indices? (are we synced?)
largest_index_we_have = max(billing_addresses_of_this_type) if billing_addresses_of_this_type else -1
if largest_index_we_have + 1 < billing_index: # need to sync
for i in range(largest_index_we_have + 1, billing_index):
addr = make_billing_address(self, i, addr_type=addr_type)
billing_addresses_of_this_type[i] = addr
self._billing_addresses_set.add(addr)
# save this address; and persist to disk
billing_addresses_of_this_type[billing_index] = address
self._billing_addresses_set.add(address)
self._billing_addresses[addr_type] = billing_addresses_of_this_type
self.storage.put('trustedcoin_billing_addresses', self._billing_addresses['legacy'])
self.storage.put('trustedcoin_billing_addresses_segwit', self._billing_addresses['segwit'])
# FIXME this often runs in a daemon thread, where storage.write will fail
self.storage.write()
def is_billing_address(self, addr: str) -> bool:
return addr in self._billing_addresses_set
# Utility functions
def get_user_id(storage):
def make_long_id(xpub_hot, xpub_cold):
return sha256(''.join(sorted([xpub_hot, xpub_cold])))
xpub1 = storage.get('x1/')['xpub']
xpub2 = storage.get('x2/')['xpub']
long_id = make_long_id(xpub1, xpub2)
short_id = hashlib.sha256(long_id).hexdigest()
return long_id, short_id
def make_xpub(xpub, s) -> str:
rootnode = BIP32Node.from_xkey(xpub)
child_pubkey, child_chaincode = bip32._CKD_pub(parent_pubkey=rootnode.eckey.get_public_key_bytes(compressed=True),
parent_chaincode=rootnode.chaincode,
child_index=s)
child_node = BIP32Node(xtype=rootnode.xtype,
eckey=ecc.ECPubkey(child_pubkey),
chaincode=child_chaincode)
return child_node.to_xpub()
def make_billing_address(wallet, num, addr_type):
long_id, short_id = wallet.get_user_id()
xpub = make_xpub(get_billing_xpub(), long_id)
usernode = BIP32Node.from_xkey(xpub)
child_node = usernode.subkey_at_public_derivation([num])
pubkey = child_node.eckey.get_public_key_bytes(compressed=True)
if addr_type == 'legacy':
return bitcoin.public_key_to_p2pkh(pubkey)
elif addr_type == 'segwit':
return bitcoin.public_key_to_p2wpkh(pubkey)
else:
raise ValueError(f'unexpected billing type: {addr_type}')
class TrustedCoinPlugin(BasePlugin):
wallet_class = Wallet_2fa
disclaimer_msg = DISCLAIMER
def __init__(self, parent, config, name):
BasePlugin.__init__(self, parent, config, name)
self.wallet_class.plugin = self
self.requesting = False
@staticmethod
def is_valid_seed(seed):
t = seed_type(seed)
return is_any_2fa_seed_type(t)
def is_available(self):
return True
def is_enabled(self):
# Not available for GRS.
return False
def can_user_disable(self):
return False
@hook
def tc_sign_wrapper(self, wallet, tx, on_success, on_failure):
if not isinstance(wallet, self.wallet_class):
return
if tx.is_complete():
return
if wallet.can_sign_without_server():
return
if not wallet.keystores['x3/'].get_tx_derivations(tx):
self.logger.info("twofactor: xpub3 not needed")
return
def wrapper(tx):
self.prompt_user_for_otp(wallet, tx, on_success, on_failure)
return wrapper
@hook
def get_tx_extra_fee(self, wallet, tx):
if type(wallet) != Wallet_2fa:
return
for o in tx.outputs():
if o.type == TYPE_ADDRESS and wallet.is_billing_address(o.address):
return o.address, o.value
def finish_requesting(func):
def f(self, *args, **kwargs):
try:
return func(self, *args, **kwargs)
finally:
self.requesting = False
return f
@finish_requesting
def request_billing_info(self, wallet: 'Wallet_2fa', *, suppress_connection_error=True):
if wallet.can_sign_without_server():
return
self.logger.info("request billing info")
try:
billing_info = server.get(wallet.get_user_id()[1])
except ErrorConnectingServer as e:
if suppress_connection_error:
self.logger.info(str(e))
return
raise
billing_index = billing_info['billing_index']
# add segwit billing address; this will be used for actual billing
billing_address = make_billing_address(wallet, billing_index, addr_type='segwit')
if billing_address != billing_info['billing_address_segwit']:
raise Exception(f'unexpected trustedcoin billing address: '
f'calculated {billing_address}, received {billing_info["billing_address_segwit"]}')
wallet.add_new_billing_address(billing_index, billing_address, addr_type='segwit')
# also add legacy billing address; only used for detecting past payments in GUI
billing_address = make_billing_address(wallet, billing_index, addr_type='legacy')
wallet.add_new_billing_address(billing_index, billing_address, addr_type='legacy')
wallet.billing_info = billing_info
wallet.price_per_tx = dict(billing_info['price_per_tx'])
wallet.price_per_tx.pop(1, None)
return True
def start_request_thread(self, wallet):
from threading import Thread
if self.requesting is False:
self.requesting = True
t = Thread(target=self.request_billing_info, args=(wallet,))
t.setDaemon(True)
t.start()
return t
def make_seed(self, seed_type):
if not is_any_2fa_seed_type(seed_type):
raise Exception(f'unexpected seed type: {seed_type}')
return Mnemonic('english').make_seed(seed_type=seed_type, num_bits=128)
@hook
def do_clear(self, window):
window.wallet.is_billing = False
def show_disclaimer(self, wizard: BaseWizard):
wizard.set_icon('trustedcoin-wizard.png')
wizard.reset_stack()
wizard.confirm_dialog(title='Disclaimer', message='\n\n'.join(self.disclaimer_msg), run_next = lambda x: wizard.run('choose_seed'))
def choose_seed(self, wizard):
title = _('Create or restore')
message = _('Do you want to create a new seed, or to restore a wallet using an existing seed?')
choices = [
('choose_seed_type', _('Create a new seed')),
('restore_wallet', _('I already have a seed')),
]
wizard.choice_dialog(title=title, message=message, choices=choices, run_next=wizard.run)
def choose_seed_type(self, wizard):
choices = [
('create_2fa_segwit_seed', _('Segwit 2FA')),
('create_2fa_seed', _('Legacy 2FA')),
]
wizard.choose_seed_type(choices=choices)
def create_2fa_seed(self, wizard): self.create_seed(wizard, '2fa')
def create_2fa_segwit_seed(self, wizard): self.create_seed(wizard, '2fa_segwit')
def create_seed(self, wizard, seed_type):
seed = self.make_seed(seed_type)
f = lambda x: wizard.request_passphrase(seed, x)
wizard.show_seed_dialog(run_next=f, seed_text=seed)
@classmethod
def get_xkeys(self, seed, t, passphrase, derivation):
assert is_any_2fa_seed_type(t)
xtype = 'standard' if t == '2fa' else 'p2wsh'
bip32_seed = Mnemonic.mnemonic_to_seed(seed, passphrase)
rootnode = BIP32Node.from_rootseed(bip32_seed, xtype=xtype)
child_node = rootnode.subkey_at_private_derivation(derivation)
return child_node.to_xprv(), child_node.to_xpub()
@classmethod
def xkeys_from_seed(self, seed, passphrase):
t = seed_type(seed)
if not is_any_2fa_seed_type(t):
raise Exception(f'unexpected seed type: {t}')
words = seed.split()
n = len(words)
# old version use long seed phrases
if n >= 20:
# note: pre-2.7 2fa seeds were typically 24-25 words, however they
# could probabilistically be arbitrarily shorter due to a bug. (see #3611)
# the probability of it being < 20 words is about 2^(-(256+12-19*11)) = 2^(-59)
if passphrase != '':
raise Exception('old 2fa seed cannot have passphrase')
xprv1, xpub1 = self.get_xkeys(' '.join(words[0:12]), t, '', "m/")
xprv2, xpub2 = self.get_xkeys(' '.join(words[12:]), t, '', "m/")
elif not t == '2fa' or n == 12:
xprv1, xpub1 = self.get_xkeys(seed, t, passphrase, "m/0'/")
xprv2, xpub2 = self.get_xkeys(seed, t, passphrase, "m/1'/")
else:
raise Exception('unrecognized seed length: {} words'.format(n))
return xprv1, xpub1, xprv2, xpub2
def create_keystore(self, wizard, seed, passphrase):
# this overloads the wizard's method
xprv1, xpub1, xprv2, xpub2 = self.xkeys_from_seed(seed, passphrase)
k1 = keystore.from_xprv(xprv1)
k2 = keystore.from_xpub(xpub2)
wizard.request_password(run_next=lambda pw, encrypt: self.on_password(wizard, pw, encrypt, k1, k2))
def on_password(self, wizard, password, encrypt_storage, k1, k2):
k1.update_password(None, password)
wizard.data['x1/'] = k1.dump()
wizard.data['x2/'] = k2.dump()
wizard.pw_args = password, encrypt_storage, STO_EV_USER_PW
self.go_online_dialog(wizard)
def restore_wallet(self, wizard):
wizard.opt_bip39 = False
wizard.opt_ext = True
title = _("Restore two-factor Wallet")
f = lambda seed, is_bip39, is_ext: wizard.run('on_restore_seed', seed, is_ext)
wizard.restore_seed_dialog(run_next=f, test=self.is_valid_seed)
def on_restore_seed(self, wizard, seed, is_ext):
f = lambda x: self.restore_choice(wizard, seed, x)
wizard.passphrase_dialog(run_next=f) if is_ext else f('')
def restore_choice(self, wizard: BaseWizard, seed, passphrase):
wizard.set_icon('trustedcoin-wizard.png')
wizard.reset_stack()
title = _('Restore 2FA wallet')
msg = ' '.join([
'You are going to restore a wallet protected with two-factor authentication.',
'Do you want to keep using two-factor authentication with this wallet,',
'or do you want to disable it, and have two master private keys in your wallet?'
])
choices = [('keep', 'Keep'), ('disable', 'Disable')]
f = lambda x: self.on_choice(wizard, seed, passphrase, x)
wizard.choice_dialog(choices=choices, message=msg, title=title, run_next=f)
def on_choice(self, wizard, seed, passphrase, x):
if x == 'disable':
f = lambda pw, encrypt: wizard.run('on_restore_pw', seed, passphrase, pw, encrypt)
wizard.request_password(run_next=f)
else:
self.create_keystore(wizard, seed, passphrase)
def on_restore_pw(self, wizard, seed, passphrase, password, encrypt_storage):
xprv1, xpub1, xprv2, xpub2 = self.xkeys_from_seed(seed, passphrase)
k1 = keystore.from_xprv(xprv1)
k2 = keystore.from_xprv(xprv2)
k1.add_seed(seed)
k1.update_password(None, password)
k2.update_password(None, password)
wizard.data['x1/'] = k1.dump()
wizard.data['x2/'] = k2.dump()
long_user_id, short_id = get_user_id(wizard.data)
xtype = xpub_type(xpub1)
xpub3 = make_xpub(get_signing_xpub(xtype), long_user_id)
k3 = keystore.from_xpub(xpub3)
wizard.data['x3/'] = k3.dump()
wizard.pw_args = password, encrypt_storage, STO_EV_USER_PW
wizard.terminate()
def create_remote_key(self, email, wizard):
xpub1 = wizard.data['x1/']['xpub']
xpub2 = wizard.data['x2/']['xpub']
# Generate third key deterministically.
long_user_id, short_id = get_user_id(wizard.data)
xtype = xpub_type(xpub1)
xpub3 = make_xpub(get_signing_xpub(xtype), long_user_id)
# secret must be sent by the server
try:
r = server.create(xpub1, xpub2, email)
except (socket.error, ErrorConnectingServer):
wizard.show_message('Server not reachable, aborting')
wizard.terminate()
return
except TrustedCoinException as e:
if e.status_code == 409:
r = None
else:
wizard.show_message(str(e))
return
if r is None:
otp_secret = None
else:
otp_secret = r.get('otp_secret')
if not otp_secret:
wizard.show_message(_('Error'))
return
_xpub3 = r['xpubkey_cosigner']
_id = r['id']
if short_id != _id:
wizard.show_message("unexpected trustedcoin short_id: expected {}, received {}"
.format(short_id, _id))
return
if xpub3 != _xpub3:
wizard.show_message("unexpected trustedcoin xpub3: expected {}, received {}"
.format(xpub3, _xpub3))
return
self.request_otp_dialog(wizard, short_id, otp_secret, xpub3)
def check_otp(self, wizard, short_id, otp_secret, xpub3, otp, reset):
if otp:
self.do_auth(wizard, short_id, otp, xpub3)
elif reset:
wizard.opt_bip39 = False
wizard.opt_ext = True
f = lambda seed, is_bip39, is_ext: wizard.run('on_reset_seed', short_id, seed, is_ext, xpub3)
wizard.restore_seed_dialog(run_next=f, test=self.is_valid_seed)
def on_reset_seed(self, wizard, short_id, seed, is_ext, xpub3):
f = lambda passphrase: wizard.run('on_reset_auth', short_id, seed, passphrase, xpub3)
wizard.passphrase_dialog(run_next=f) if is_ext else f('')
def do_auth(self, wizard, short_id, otp, xpub3):
try:
server.auth(short_id, otp)
except TrustedCoinException as e:
if e.status_code == 400: # invalid OTP
wizard.show_message(_('Invalid one-time password.'))
# ask again for otp
self.request_otp_dialog(wizard, short_id, None, xpub3)
else:
wizard.show_message(str(e))
wizard.terminate()
except Exception as e:
wizard.show_message(str(e))
wizard.terminate()
else:
k3 = keystore.from_xpub(xpub3)
wizard.data['x3/'] = k3.dump()
wizard.data['use_trustedcoin'] = True
wizard.terminate()
def on_reset_auth(self, wizard, short_id, seed, passphrase, xpub3):
xprv1, xpub1, xprv2, xpub2 = self.xkeys_from_seed(seed, passphrase)
if (wizard.data['x1/']['xpub'] != xpub1 or
wizard.data['x2/']['xpub'] != xpub2):
wizard.show_message(_('Incorrect seed'))
return
r = server.get_challenge(short_id)
challenge = r.get('challenge')
message = 'TRUSTEDCOIN CHALLENGE: ' + challenge
def f(xprv):
rootnode = BIP32Node.from_xkey(xprv)
key = rootnode.subkey_at_private_derivation((0, 0)).eckey
sig = key.sign_message(message, True)
return base64.b64encode(sig).decode()
signatures = [f(x) for x in [xprv1, xprv2]]
r = server.reset_auth(short_id, challenge, signatures)
new_secret = r.get('otp_secret')
if not new_secret:
wizard.show_message(_('Request rejected by server'))
return
self.request_otp_dialog(wizard, short_id, new_secret, xpub3)
@hook
def get_action(self, storage):
if storage.get('wallet_type') != '2fa':
return
if not storage.get('x1/'):
return self, 'show_disclaimer'
if not storage.get('x2/'):
return self, 'show_disclaimer'
if not storage.get('x3/'):
return self, 'accept_terms_of_use'
| gpl-3.0 | -3,658,117,888,799,464,400 | 41.041169 | 139 | 0.603468 | false |
jkibele/benthic_photo_survey | bps_package/photo_tagging.py | 1 | 24681 | import pyexiv2 as exiv # see note about pyexiv2 in notes.txt
import json
from ast import literal_eval
from depth_temp_log_io import *
from configuration import *
from gps_log_io import *
from common import *
# That namespace url doesn't really exist. The custom tags seem to work
# without it. Perhaps I should figure out if I really need it or not.
exiv.xmp.register_namespace('http://svarchiteuthis.com/benthicphoto/', 'BenthicPhoto')
class image_directory(object):
def __init__(self, dir_path):
if os.path.isdir(dir_path):
jpgs = [ os.path.join(dir_path,f) for f in os.listdir(dir_path) if f.lower().endswith('.jpg') and not f.startswith('.') ]
else:
raise ValueError("%s is not a directory." % dir_path)
self.path = dir_path
self.images = [ image_file(img) for img in jpgs ]
self.images.sort(key=lambda i: i.datetime) # sort the images by datetime of the image
self.image_count = len( self.images )
def __shift_datetimes__(self, time_delta_obj, verbose=True):
"""
Shift the 'date original' values of all photos in the directory. See the
warnings in the image_file.__set_datetime__ method doc string. You should
be careful about using this method.
"""
for img in self.images:
new_dt = img.__shift_datetime__( time_delta_obj, verbose=verbose )
@property
def local_datetimes(self):
return [ x.datetime for x in self.images ]
@property
def utc_datetimes(self):
return [ x.utc_datetime for x in self.images ]
@property
def exif_depths(self):
d_list = []
for img in self.images:
if img.exif_depth:
d_list.append(img.exif_depth * -1)
else:
d_list.append(0.0)
return np.array(d_list)
@property
def fuzzy_habitat_dict(self):
d = {}
for img in self.images:
for hab in img.xmp_fuzzy_habitats:
try:
d[hab] += 1
except KeyError:
d[hab] = 1
return d
def dive_record_set(self,db_path):
return dive_record_set( min(self.local_datetimes), max(self.local_datetimes), db_path )
def depth_plot(self, db_path, depth_time_offset=None):
"""
Create a plot of the depth profile with photo times and depths marked.
db_path: A string of the path to the sqlite database.
depth_time_offset: An int in seconds to offset x values by. This only
changes the plot. It does not alter any of the values or change
what gets exported to shapefile.
"""
drs = self.dive_record_set(db_path)
y = -1 * drs.depth_time_array[:,0] # depths * -1 to make negative values
x = drs.depth_time_array[:,1] # datetimes
if depth_time_offset:
x = x + td(seconds=depth_time_offset)
fig = plt.figure() # imported from matplotlib
ax = fig.add_subplot(111)
ax.plot_date(x,y,marker='.',linestyle='-',tz=pytz.timezone(LOCAL_TIME_ZONE) ) # LOCAL_TIME_ZONE from configuration.py)
ax.plot(self.local_datetimes,self.exif_depths,'r*',markersize=10,picker=5)
plt.xlabel('Date and Time')
plt.ylabel('Depth (meters)')
fig.suptitle('Photos with Depth and Time')
#print "Before def onpick"
def onpick(event):
global ann
try:
ann.remove()
except NameError:
pass
ind = event.ind[0]
fname = os.path.basename( self.images[ind].file_path )
ann_text = "Photo: %s\ndepth: %g\ndate: %s" % ( fname, self.exif_depths[ind], self.local_datetimes[ind].strftime('%Y/%m/%d %H:%M:%S') )
ann = ax.annotate(ann_text, xy=(self.local_datetimes[ind], self.exif_depths[ind]), xytext=(-20,-20),
textcoords='offset points', ha='center', va='top',
bbox=dict(boxstyle='round,pad=0.2', fc='yellow', alpha=0.3),
arrowprops=dict(arrowstyle='->', connectionstyle='arc3,rad=0.5',
color='red'))
plt.draw()
print "Photo: %s, index: %i, depth: %g, date: %s" % ( fname, ind, self.exif_depths[ind], self.local_datetimes[ind].strftime('%Y/%m/%d %H:%M:%S') )
#print "Before mpl_connect"
fig.canvas.mpl_connect('pick_event', onpick)
plt.show()
#print "after plt show"
def depth_temp_tag(self,db_path,verbose=False):
"""
Depth tag all the photos in the directory.
"""
for img in self.images:
img.depth_temp_tag(db_path,verbose)
class image_file(object):
"""
An object to make accessing image files and metadata easier.
"""
def __init__(self,img_path):
if os.path.exists(img_path):
self.file_path = img_path
md = exiv.ImageMetadata(img_path)
md.read()
self.md = md
else:
raise ValueError( "The file %s does not exist." % (img_path,) )
def __repr__(self):
return "Image file: %s" % (self.file_path,)
def __get_exiv_tag(self,tag_string):
"""
Try to get a pyexiv2 tag. If the tag doesn't exist, return None.
"""
try:
return self.md[tag_string]
except KeyError:
return None
def __get_exiv_tag_value(self,tag_string):
"""
Try to get a pyexiv2 tag value. If the tag doesn't exist, return None.
"""
try:
return self.md[tag_string].value
except KeyError:
return None
def __get_exiv_tag_human_value(self,tag_string):
"""
Try to get a pyexiv2 tag human value. If the tag doesn't exist, return None.
"""
try:
return self.md[tag_string].human_value
except KeyError:
return None
def exif_dict(self, exclude_panasonic_keys=True):
"""
Return a dict with all exif and xmp keys and values.
"""
exif_dict = {}
for key in self.md.xmp_keys:
if self.__get_exiv_tag_value(key):
exif_dict.update( { key : self.__get_exiv_tag_value(key) } )
for key in self.md.exif_keys:
if not ( exclude_panasonic_keys and 'Panasonic' in key.split('.') ):
if self.__get_exiv_tag_human_value(key):
exif_dict.update( { key : self.__get_exiv_tag_human_value(key)[:100] } )
return exif_dict
@property
def file_name(self):
return os.path.basename(self.file_path)
@property
def datetime(self):
"""
Try to get a datetime object for the image's creation from the
Exif.Photo.DateTimeOriginal value via pyexiv2.
"""
if self.__get_exiv_tag_value('Exif.Photo.DateTimeOriginal').tzname():
return self.__get_exiv_tag_value('Exif.Photo.DateTimeOriginal')
else:
return make_aware_of_local_tz( self.__get_exiv_tag_value('Exif.Photo.DateTimeOriginal') )
@property
def utc_datetime(self):
if self.datetime:
return utc_from_local(self.datetime)
else:
return None
@property
def exif_direction(self):
if self.__get_exiv_tag_value('Exif.GPSInfo.GPSImgDirection'):
return float( self.__get_exiv_tag_value('Exif.GPSInfo.GPSImgDirection') )
@property
def exif_lat_tag(self):
return self.__get_exiv_tag('Exif.GPSInfo.GPSLatitude')
@property
def exif_latref_tag(self):
return self.__get_exiv_tag('Exif.GPSInfo.GPSLatitudeRef')
@property
def exif_lon_tag(self):
return self.__get_exiv_tag('Exif.GPSInfo.GPSLongitude')
@property
def exif_lonref_tag(self):
return self.__get_exiv_tag('Exif.GPSInfo.GPSLongitudeRef')
@property
def exif_depth_tag(self):
return self.__get_exiv_tag('Exif.GPSInfo.GPSAltitude')
@property
def exif_depth(self):
try:
ret_val = float( self.__get_exiv_tag_value('Exif.GPSInfo.GPSAltitude') )
except TypeError:
try:
ret_val = self.__get_exiv_tag_value('Exif.GPSInfo.GPSAltitude').to_float()
except AttributeError:
ret_val = None
return ret_val
@property
def __exif_depth_temp_dict(self):
"""
This is a bit of a hack. I couldn't find a good place to store temperature
data in the exif so I went with storing a python dictionary as a string
in Exif.Photo.UserComment. I think I'm going to stop using this and store
this stuff in custom xmp tags instead. UserComment is accessible to many
photo management apps so it seems likely to get corrupted. I made it a
private method but maybe I should have just deleted it.
"""
try:
dstr = self.md['Exif.Photo.UserComment'].value
return literal_eval(dstr)
except KeyError:
return None
@property
def __exif_temperature(self):
"""
This just exposes the temperature value from the hack mentioned in the
doc string for exif_depth_temp_dict. I'm going to stop writing to this
tag so don't be surprised if this returns nothing. Actually, I think I
may just make it a private method because I don't want to delete it.
"""
if self.exif_depth_temp_dict:
return self.exif_depth_temp_dict['temp']
else:
return None
@property
def xmp_temperature(self):
return self.__get_exiv_tag_value('Xmp.BenthicPhoto.temperature')
@property
def xmp_temp_units(self):
return self.__get_exiv_tag_value('Xmp.BenthicPhoto.temp_units')
@property
def xmp_substrate(self):
return self.__get_exiv_tag_value('Xmp.BenthicPhoto.substrate')
@property
def xmp_habitat(self):
"""
xmp_habitat will be set to the dominant habitat type of all the fuzzy
habitats. Specifically, the fuzzy habitat with the highest proportion.
In the event of a tie (multiple fuzzy habitats with the same proportion)
one of the tied habitats will be chosen at random. Assignment happens
in the setHabitat method of the MainWindow in bps_gui.py.
"""
return self.__get_exiv_tag_value('Xmp.BenthicPhoto.habitat')
@property
def xmp_fuzzy_hab_dict(self):
hd_json = self.__get_exiv_tag_value('Xmp.BenthicPhoto.fuzzy_hab_dict')
if hd_json:
return json.loads(hd_json)
else:
return None
@property
def xmp_fuzzy_habitats(self):
habdict = self.xmp_fuzzy_hab_dict
if habdict:
return habdict.keys()
else:
return []
@property
def position(self):
"""
Look at the exif data and return a position object (as defined in
gps_log_io). Return None if there's no GPSInfo in the exif.
"""
if self.exif_lat_tag and self.exif_lon_tag and self.exif_latref_tag and self.exif_lonref_tag:
lat = latitude.from_exif_coord(self.exif_lat_tag.value,self.exif_latref_tag.value)
lon = longitude.from_exif_coord(self.exif_lon_tag.value,self.exif_lonref_tag.value)
return position(lat,lon)
else:
return None
def __set_datetime__(self,dt_obj):
"""
Set the date original in the exif. I don't think you want to do this
but I did want to once. If you lose the origination time for your
image you can not sync it to your gps track or your depth log so
leave this alone unless you're sure you know what you're doing.
If you screw up your data don't come crying to me. I tried to warn
you.
"""
key = 'Exif.Photo.DateTimeOriginal'
self.md[key] = exiv.ExifTag(key,dt_obj)
self.md.write()
return self.datetime
def __shift_datetime__(self,time_delta_obj,verbose=True):
"""
Shift the 'date original' in the exif by the given time delta. See the
warnings in the doc string of __set_datetime__ method. You should be
careful with this.
"""
current_dt = self.datetime
self.__set_datetime__( current_dt + time_delta_obj )
if verbose:
print "datetime of %s changed from %s to %s." % ( self.file_name, current_dt.strftime('%X, %x'), self.datetime.strftime('%X, %x') )
return self.datetime
def __set_exif_position(self,pos,verbose=False):
"""
Set the relevant exif tags to match the position object handed in.
The position object is defined over in gps_log_io.py
"""
pre = 'Exif.GPSInfo.GPS'
add_dict = {pre+'Latitude': pos.lat.exif_coord,
pre+'LatitudeRef': pos.lat.hemisphere,
pre+'Longitude': pos.lon.exif_coord,
pre+'LongitudeRef': pos.lon.hemisphere }
for k,v in add_dict.iteritems():
if verbose:
print "%s = %s" % (str(k),str(v))
self.md[k] = exiv.ExifTag(k,v)
self.md.write()
return True
def __set_exif_depth_temp(self,depth,temp,verbose=False):
from pyexiv2.utils import Rational
if depth < 0: # This can happen because there's a bit of slop in the conversion from pressure to depth
if verbose:
print "Given depth was a negative value."
depth = 0
if not depth:
return None
if not temp:
temp = 0.0 # temperature isn't important at this point so if it's not there we'll just call it zero
pre = 'Exif.GPSInfo.GPS'
#dt_str = "{'depth':%g,'temp':%g}" % (depth,temp)
dfrac = Fraction.from_float(depth).limit_denominator()
add_dict = {pre+'Altitude': Rational(dfrac.numerator,dfrac.denominator),
pre+'AltitudeRef': bytes(1),
}
#'Exif.Photo.UserComment': dt_str }
for k,v in add_dict.iteritems():
if verbose:
print "%s = %s" % (str(k),str(v))
self.md[k] = exiv.ExifTag(k,v)
self.md.write()
return True
def __set_xmp_depth_temp(self,depth,temp):
if not depth:
return None
if not temp:
temp = 0.0 # temperature isn't important at this point so if it's not there we'll just call it zero
pre = 'Xmp.BenthicPhoto.'
self.md[pre+'depth'] = str(depth)
self.md[pre+'depth_units'] = 'meters'
self.md[pre+'temperature'] = str(temp)
self.md[pre+'temp_units'] = 'celsius'
self.md.write()
def set_xmp_substrate(self, subst_str):
pre = 'Xmp.BenthicPhoto.'
self.md[pre+'substrate'] = subst_str
self.md.write()
def set_xmp_habitat(self, subst_str):
pre = 'Xmp.BenthicPhoto.'
self.md[pre+'habitat'] = subst_str
self.md.write()
def set_xmp_fuzzy_habitats(self, habdict):
habdict_json_str = json.dumps(habdict)
pre = 'Xmp.BenthicPhoto.'
self.md[pre+'fuzzy_hab_dict'] = habdict_json_str
self.md.write()
def logger_depth(self,db_path):
"""
Get the logged depth out of the db that matches the photo's timestamp.
"""
if self.utc_datetime:
depth = get_depth_for_time(self.utc_datetime, db_path, reject_threshold=30)
return depth
else:
return None
def logger_temp(self, db_path):
"""
Get the logged temperature out of the db that matches the photo's timestamp.
"""
if self.utc_datetime:
temp = get_temp_for_time(self.utc_datetime, db_path, reject_threshold=30)
return temp
else:
return None
def depth_temp_tag(self,db_path,verbose=False):
"""
Get the depth and temp readings out of the db that match the photo's origination
time (considering that the photo's time stamp is in the local timezone and the
logs are in UTC) and write those values to the image's exif data.
"""
self.__set_exif_depth_temp(self.logger_depth(db_path),self.logger_temp(db_path),verbose=verbose)
self.__set_xmp_depth_temp(self.logger_depth(db_path),self.logger_temp(db_path))
if self.exif_depth_tag:
return self.exif_depth_tag.value
else:
return None
def geotag(self,db_path,verbose=True):
"""
Get a position that matches the time of creation for the image out
of the database and set the exif data accordingly. We assume that
the photo timestamp is local and the gps position is utc.
"""
pos = get_position_for_time(self.utc_datetime,db_path,verbose=verbose)
if verbose and pos:
print "-------------------GeoTagg--------------------------------"
print "%s is going to get set to %s as %s, %s" % ( os.path.basename( self.file_path ), unicode( pos ), str(pos.lat.exif_coord), str(pos.lon.exif_coord) )
print "%s, %s in dms" % ( str(pos.lat.dms), str(pos.lon.dms) )
if pos:
self.__set_exif_position(pos,verbose)
return self.position
def __compare_position__(self,db_path):
"""
This is just for testing. Check to see if the value stored in the db
matches what we display after conversion. I want to make sure I'm not
throwing away precision in coordinate conversions.
"""
pos = get_position_for_time(self.utc_datetime,db_path,verbose=True)
print " db says: %s, %s \nexif says: %s, %s" % ( pos.lat.nmea_string, pos.lon.nmea_string, self.position.lat.nmea_string, self.position.lon.nmea_string )
if pos.lat.nmea_string == self.position.lat.nmea_string:
print "Latitudes match"
if pos.lon.nmea_string == self.position.lon.nmea_string:
print "Longitudes match"
def remove_geotagging(self):
"""
You probably won't need to do this but I did a few times during testing.
"""
geokeys = ['Latitude','LatitudeRef','Longitude','LongitudeRef']
pre = 'Exif.GPSInfo.GPS'
for key in [pre+gk for gk in geokeys]:
if self.md.__contains__(key):
self.md.__delitem__(key)
self.md.write()
def remove_depthtagging(self):
"""
You probably won't need to do this but I did a few times during testing.
"""
geokeys = ['Altitude','AltitudeRef']
pre = 'Exif.GPSInfo.GPS'
for key in [pre+gk for gk in geokeys]:
if self.md.__contains__(key):
self.md.__delitem__(key)
self.md.write()
def remove_temptagging(self):
"""
You probably won't need to do this but I did a few times during testing.
"""
geokeys = ['depth','depth_units','temperature','temp_units']
pre = 'Xmp.BenthicPhoto.'
for key in [pre+gk for gk in geokeys]:
if self.md.__contains__(key):
self.md.__delitem__(key)
self.md.write()
def remove_substratetagging(self):
"""
You probably won't need to do this but I did a few times during testing.
"""
key = 'Xmp.BenthicPhoto.substrate'
if self.md.__contains__(key):
self.md.__delitem__(key)
self.md.write()
def remove_habitattag(self):
"""
You probably won't need to do this but I did a few times during testing.
"""
key = 'Xmp.BenthicPhoto.habitat'
if self.md.__contains__(key):
self.md.__delitem__(key)
self.md.write()
def remove_fuzzyhabitats(self):
"""
You probably won't need to do this but I did a few times during testing.
"""
key = 'Xmp.BenthicPhoto.fuzzy_hab_dict'
if self.md.__contains__(key):
self.md.__delitem__(key)
self.md.write()
def remove_habitattagging(self):
"""
You probably won't need to do this but I did a few times during testing.
"""
self.remove_habitattag()
self.remove_fuzzyhabitats()
def remove_all_tagging(self):
"""
You probably won't need to do this but I did a few times during testing.
"""
self.remove_geotagging()
self.remove_depthtagging()
self.remove_temptagging()
self.remove_substratetagging()
self.remove_habitattagging()
def exif_tag_jpegs(photo_dir,db_path):
for fname in os.listdir(photo_dir):
if fname.lower().endswith('.jpg'):
imf = image_file( os.path.join(photo_dir,fname) )
imf.depth_temp_tag(db_path)
imf.geotag()
if imf.exif_depth_tag:
dstr = imf.exif_depth_tag.human_value
else:
dstr = 'None'
if imf.exif_temperature:
tstr = "%g C" % imf.exif_temperature
else:
tstr = 'None'
print "Image: %s - Depth: %s, Temp %s, Position: %s" % (fname,dstr,tstr,imf.position)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Tag a photos with position, depth, and temperature from a gps and a Sensus Ultra depth and temperature logger.')
parser.add_argument('photo_dir', nargs='?', type=str, help='The directory that contains photos you would like tagged.')
parser.add_argument('db_path', nargs='?', type=str, help='The database that contains the depth and location information you want to tag the photos with.')
args = parser.parse_args()
exif_tag_jpegs(args.photo_dir,db_path)
#### Pretty much just testing garbage below here #######
#### Why don't I delete it? Good question. #######
def check_time_tags(img):
md = get_photo_metadata(img)
timetags = [tag for tag in md.exif_keys if tag.find('Time')<>-1]
for t in timetags:
print "%s: %s" % (t,md[t])
def read_gps_crap(img):
md = get_photo_metadata(img_path)
try:
gpstag = md['Exif.Image.GPSTag'].human_value
except KeyError:
gpstag = 'not set'
try:
lat = md['Exif.GPSInfo.GPSLatitude'].human_value
except KeyError:
lat = 'not set'
try:
lon = md['Exif.GPSInfo.GPSLongitude'].human_value
except KeyError:
lon = 'not set'
print "GPSTag: %s, Lat: %s, Lon: %s" % ( str(gpstag), str(lat), str(lon) )
def read_gps_crap_from_dir(dir):
for fname in os.listdir(dir):
if fname.lower().endswith('.jpg'):
read_gps_crap(os.path.join(dir,fname))
def shift_time_for_photos(direc,time_delta):
for fname in os.listdir(direc):
if fname.lower().endswith('.jpg'):
imf = image_file( os.path.join( direc,fname ) )
orig_time = imf.datetime
imf.__set_datetime__( orig_time + time_delta )
print "Changed %s from %s to %s." % ( fname, orig_time.strftime('%H:%M'), imf.datetime.strftime('%H:%M') )
def photo_times_for_dir(dir):
for fname in os.listdir(dir):
if fname.lower().endswith('.jpg'):
img = os.path.join(dir,fname)
md = get_photo_metadata(img)
ptime = get_photo_datetime(md)
if ptime:
ending = ptime.strftime('%Y-%m-%d %H:%M:%S')
else:
ending = 'no time tag'
print "%s: %s" % (fname,ending)
def get_photo_metadata(img_path):
md = exiv.ImageMetadata(img_path)
md.read()
return md
def get_photo_datetime(md):
"""If I find inconsistency in exif tags, I may have to get a little more creative
here."""
try:
ptime = md['Exif.Photo.DateTimeOriginal'].value
except KeyError:
ptime = False
return ptime
| bsd-3-clause | 3,740,261,318,711,634,000 | 37.146832 | 165 | 0.565658 | false |
rpavlik/jhbuild-vrjuggler | jhbuild/frontends/buildscript.py | 1 | 11073 | # jhbuild - a build script for GNOME 1.x and 2.x
# Copyright (C) 2001-2006 James Henstridge
# Copyright (C) 2003-2004 Seth Nickell
#
# buildscript.py: base class of the various interface types
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
from jhbuild.utils import packagedb
from jhbuild.errors import FatalError, CommandError, SkipToPhase, SkipToEnd
class BuildScript:
def __init__(self, config, module_list=None):
if self.__class__ is BuildScript:
raise NotImplementedError('BuildScript is an abstract base class')
self.modulelist = module_list
self.module_num = 0
self.config = config
# the existence of self.config.prefix is checked in config.py
if not os.access(self.config.prefix, os.R_OK|os.W_OK|os.X_OK):
raise FatalError(_('install prefix (%s) must be writable') % self.config.prefix)
if not os.path.exists(self.config.checkoutroot):
try:
os.makedirs(self.config.checkoutroot)
except OSError:
raise FatalError(
_('checkout root (%s) can not be created') % self.config.checkoutroot)
if not os.access(self.config.checkoutroot, os.R_OK|os.W_OK|os.X_OK):
raise FatalError(_('checkout root (%s) must be writable') % self.config.checkoutroot)
if self.config.copy_dir and not os.path.exists(self.config.copy_dir):
try:
os.makedirs(self.config.copy_dir)
except OSError:
raise FatalError(
_('checkout copy dir (%s) can not be created') % self.config.copy_dir)
if not os.access(self.config.copy_dir, os.R_OK|os.W_OK|os.X_OK):
raise FatalError(_('checkout copy dir (%s) must be writable') % self.config.copy_dir)
packagedbdir = os.path.join(self.config.prefix, 'share', 'jhbuild')
try:
if not os.path.isdir(packagedbdir):
os.makedirs(packagedbdir)
except OSError:
raise FatalError(_('could not create directory %s') % packagedbdir)
self.packagedb = packagedb.PackageDB(os.path.join(packagedbdir,
'packagedb.xml'))
def execute(self, command, hint=None, cwd=None, extra_env=None):
'''Executes the given command.
If an error occurs, CommandError is raised. The hint argument
gives a hint about the type of output to expect.
'''
raise NotImplementedError
def build(self, phases=None):
'''start the build of the current configuration'''
self.start_build()
failures = [] # list of modules that couldn't be built
self.module_num = 0
for module in self.modulelist:
self.module_num = self.module_num + 1
if self.config.min_age is not None:
installdate = self.packagedb.installdate(module.name)
if installdate > self.config.min_age:
self.message(_('Skipping %s (installed recently)') % module.name)
continue
self.start_module(module.name)
failed = False
for dep in module.dependencies:
if dep in failures:
if self.config.module_nopoison.get(dep,
self.config.nopoison):
self.message(_('module %(mod)s will be built even though %(dep)s failed')
% { 'mod':module.name, 'dep':dep })
else:
self.message(_('module %(mod)s not built due to non buildable %(dep)s')
% { 'mod':module.name, 'dep':dep })
failed = True
if failed:
failures.append(module.name)
self.end_module(module.name, failed)
continue
if not phases:
build_phases = self.get_build_phases(module)
else:
build_phases = phases
phase = None
num_phase = 0
# if there is an error and a new phase is selected (be it by the
# user or an automatic system), the chosen phase must absolutely
# be executed, it should in no condition be skipped automatically.
# The force_phase variable flags that condition.
force_phase = False
while num_phase < len(build_phases):
last_phase, phase = phase, build_phases[num_phase]
try:
if not force_phase and module.skip_phase(self, phase, last_phase):
num_phase += 1
continue
except SkipToEnd:
break
if not module.has_phase(phase):
# skip phases that do not exist, this can happen when
# phases were explicitely passed to this method.
num_phase += 1
continue
self.start_phase(module.name, phase)
error = None
try:
try:
error, altphases = module.run_phase(self, phase)
except SkipToPhase, e:
try:
num_phase = build_phases.index(e.phase)
except ValueError:
break
continue
except SkipToEnd:
break
finally:
self.end_phase(module.name, phase, error)
if error:
try:
nextphase = build_phases[num_phase+1]
except IndexError:
nextphase = None
newphase = self.handle_error(module, phase,
nextphase, error,
altphases)
force_phase = True
if newphase == 'fail':
failures.append(module.name)
failed = True
break
if newphase is None:
break
if newphase in build_phases:
num_phase = build_phases.index(newphase)
else:
# requested phase is not part of the plan, we insert
# it, then fill with necessary phases to get back to
# the current one.
filling_phases = self.get_build_phases(module, targets=[phase])
canonical_new_phase = newphase
if canonical_new_phase.startswith('force_'):
# the force_ phases won't appear in normal build
# phases, so get the non-forced phase
canonical_new_phase = canonical_new_phase[6:]
if canonical_new_phase in filling_phases:
filling_phases = filling_phases[
filling_phases.index(canonical_new_phase)+1:-1]
build_phases[num_phase:num_phase] = [newphase] + filling_phases
if build_phases[num_phase+1] == canonical_new_phase:
# remove next phase if it would just be a repeat of
# the inserted one
del build_phases[num_phase+1]
else:
force_phase = False
num_phase += 1
self.end_module(module.name, failed)
self.end_build(failures)
if failures:
return 1
return 0
def get_build_phases(self, module, targets=None):
'''returns the list of required phases'''
if targets:
tmp_phases = targets[:]
else:
tmp_phases = self.config.build_targets[:]
i = 0
while i < len(tmp_phases):
phase = tmp_phases[i]
depadd = []
try:
phase_method = getattr(module, 'do_' + phase)
except AttributeError:
# unknown phase for this module type, simply skip
del tmp_phases[i]
continue
if hasattr(phase_method, 'depends'):
for subphase in phase_method.depends:
if subphase not in tmp_phases[:i+1]:
depadd.append(subphase)
if depadd:
tmp_phases[i:i] = depadd
else:
i += 1
# remove duplicates
phases = []
for phase in tmp_phases:
if not phase in phases:
phases.append(phase)
return phases
def start_build(self):
'''Hook to perform actions at start of build.'''
pass
def end_build(self, failures):
'''Hook to perform actions at end of build.
The argument is a list of modules that were not buildable.'''
pass
def start_module(self, module):
'''Hook to perform actions before starting a build of a module.'''
pass
def end_module(self, module, failed):
'''Hook to perform actions after finishing a build of a module.
The argument is true if the module failed to build.'''
pass
def start_phase(self, module, phase):
'''Hook to perform actions before starting a particular build phase.'''
pass
def end_phase(self, module, phase, error):
'''Hook to perform actions after finishing a particular build phase.
The argument is a string containing the error text if something
went wrong.'''
pass
def message(self, msg, module_num=-1):
'''Display a message to the user'''
raise NotImplementedError
def set_action(self, action, module, module_num=-1, action_target=None):
'''inform the buildscript of a new stage of the build'''
raise NotImplementedError
def handle_error(self, module, phase, nextphase, error, altphases):
'''handle error during build'''
raise NotImplementedError
| gpl-2.0 | 3,736,484,522,379,380,700 | 40.943182 | 101 | 0.530118 | false |
aoyono/sicpy | Chapter2/exercises/exercise2_37.py | 1 | 1735 | # -*- coding: utf-8 -*-
"""
https://mitpress.mit.edu/sicp/full-text/book/book-Z-H-15.html#%_thm_2.37
"""
from operator import add, mul
from Chapter2.exercises.exercise2_36 import accumulate_n
from Chapter2.themes.lisp_list_structured_data import cons, lisp_list, print_lisp_list
from Chapter2.sequences_as_conventional_interfaces import accumulate, map
from utils import let
def map_n(op, init, *seqs):
return map(
lambda l: accumulate(op, init, l),
accumulate_n(
cons,
lisp_list(),
lisp_list(*seqs)
)
)
def dot_product(v, w):
return accumulate(
add,
0,
map_n(mul, 1, v, w)
)
def matrix_dot_vector(m, v):
return map(
lambda row: dot_product(v, row),
m
)
def transpose(mat):
return accumulate_n(
cons,
lisp_list(),
mat
)
def matrix_dot_matrix(m, n):
with let(transpose(n)) as (cols,):
return map(
lambda row: matrix_dot_vector(cols, row),
m
)
def run_the_magic():
with let(
lisp_list(
lisp_list(1, 2, 3),
lisp_list(4, 5, 6),
lisp_list(6, 7, 8),
),
lisp_list(
lisp_list(1, 0, 0),
lisp_list(0, 8, 0),
lisp_list(0, 0, 1),
)
) as (m, n):
print('(define m (list (list 1 2 3) (list 4 5 6) (list 6 7 8))')
print_lisp_list(m)
print('(define n (list (list 1 0 0) (list 0 1 0) (list 0 0 1))')
print_lisp_list(n)
print('(matrix-*-matrix m n)')
print_lisp_list(matrix_dot_matrix(m, n))
if __name__ == '__main__':
run_the_magic()
| mit | 4,179,315,568,607,372,300 | 21.532468 | 86 | 0.508357 | false |
Bioto/Huuey-python | huuey/hue/scenes/scene.py | 1 | 1071 | from huuey.paths import Paths
class Scene:
name = None
lights = []
owner = None
recycle = None
locked = None
appdata = None
picture = None
lastupdated = None
version = None
_id = None
_parent = None
def __init__(self, obj, parent, _id):
self._parent = parent
self._id = _id
self._map(obj)
def get_id(self):
return self._id
def _map(self, obj):
for key in obj:
setattr(self, key, obj)
@staticmethod
def create(name, lights, controller, recycle=False):
request = controller.request(Paths.SceneCREATE, data={
'name': name,
'lights': lights,
'recycle': recycle
})
return request[0]['success']['id']
def activate(self):
return self._parent.request(Paths.SceneGroup, data={
'scene': self._id
})
def delete(self):
self._parent.request(Paths.SceneDEL, additional={
'id': self._id
})
self._parent.remove_scene(self._id)
| mit | 6,638,197,520,970,899,000 | 20 | 62 | 0.535948 | false |
openstack/vitrage | tools/datasource-scaffold/sample/__init__.py | 1 | 1820 | # Copyright 2018 - Vitrage team
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from vitrage.common.constants import DatasourceOpts as DSOpts
from vitrage.common.constants import UpdateMethod
SAMPLE_DATASOURCE = 'sample'
OPTS = [
cfg.StrOpt(DSOpts.TRANSFORMER,
default='vitrage.datasources.sample.transformer.'
'SampleTransformer',
help='Sample transformer class path',
required=True),
cfg.StrOpt(DSOpts.DRIVER,
default='vitrage.datasources.sample.driver.'
'SampleDriver',
help='Sample driver class path',
required=True),
cfg.StrOpt(DSOpts.UPDATE_METHOD,
default=UpdateMethod.PULL,
help='None: updates only via Vitrage periodic snapshots.'
'Pull: updates periodically.'
'Push: updates by getting notifications from the'
' datasource itself.',
required=True),
cfg.IntOpt(DSOpts.CHANGES_INTERVAL,
default=30,
min=10,
help='interval in seconds between checking changes in the'
'sample configuration files')]
class SampleFields(object):
TYPE = 'type'
ID = 'id'
| apache-2.0 | 5,598,332,246,566,623,000 | 36.916667 | 75 | 0.636813 | false |
paultag/monomoy | monomoy/__init__.py | 1 | 1167 | # Copyright (c) 2012 Paul Tagliamonte <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
__appname__ = "monomoy"
__version__ = "0.1~pre1"
| mit | -483,813,172,240,003,100 | 52.045455 | 76 | 0.769494 | false |
angloc/savutil | csv2json.py | 1 | 3749 | # Convert a CSV file into a JSON object with distribution
import classifiedunicodevalue
from classifiedunicodevalue import ClassifiedUnicodeValue
from datautil import compressedValueSequence, compressedValues
import unicodecsv
from version import savutilName, savutilVersion
def blankNone (x):
if x is None:
return u""
else:
return unicode (x)
def objectify (x):
if x == "":
return None
else:
try:
i = int (x)
return i
except:
try:
f = float (x)
return g
except:
return x
if __name__ == "__main__":
import getopt
import json
import os
import sys
import xlrd
optlist, args = getopt.getopt(sys.argv[1:], 'ad:h:s:e:o:w:')
delimiter = ","
headerIndex = None
skipLines = None
encoding = "cp1252"
outputPath = ""
worksheetName = None
for (option, value) in optlist:
if option == "-d":
delimiter = value
if option == "-e":
encoding = value
if option == "-h":
headerIndex = int (value)
if option == "-o":
outputPath = value
if option == "-s":
skipLines = int (value)
if option == "-w":
worksheetName = value
if skipLines is None:
if headerIndex is None:
headerIndex = 1
skipLines = headerIndex
if len (args) < 1 or\
headerIndex > skipLines:
print "--Usage: [-d,] [-ecp1252] [-h1] [-s1] <inputFile> [<outputFile>]"
sys.exit (0)
(root, csvExt) = os.path.splitext (args [0])
if not csvExt:
if worksheetName:
csvExt = ".xlsx"
else:
csvExt = ".csv"
inputFilename = root + csvExt
if len (args) > 1:
outputFilename = args [1]
else:
outputFilename = os.path.join (outputPath, root + ".json")
if headerIndex:
print "..Using line %d for headers" % headerIndex
if not (skipLines == 1 and headerIndex == 1):
print "..Taking data from line %d onwards" % skipLines
if worksheetName:
print "..Looking for worksheet '%s' in workbook %s" %\
(worksheetName, inputFilename)
wb = xlrd.open_workbook (inputFilename)
ws = wb.sheet_by_name (worksheetName)
print ws.ncols, ws.nrows
csvRows = [
[ws.cell_value (rowx, colx) for colx in xrange (ws.ncols)]
for rowx in xrange (ws.nrows)
]
else:
csvFile = open (inputFilename)
csv = unicodecsv.UnicodeReader (csvFile, encoding=encoding, delimiter=delimiter)
csvRows = list (csv)
csvFile.close ()
if skipLines > len (csvRows):
print "--Only %d row(s) found in CSV file, %d required for header" %\
(len (csvRows), skipLines)
sys.exit (0)
if headerIndex:
headers = csvRows [headerIndex-1]
csvRows = csvRows [skipLines:]
print "..%d row(s) found in input" % len (csvRows)
jsonObject = {
"origin": "csv2json %s from %s" %
(savutilVersion, inputFilename),
"code_lists": {},
"variable_sequence": headers,
"total_count": len (csvRows),
"variables": {},
"data": {}
}
variables = jsonObject ["variables"]
data = jsonObject ["data"]
for index, variableName in enumerate (headers):
values = [ClassifiedUnicodeValue (row [index]).value for row in csvRows]
distribution = {}
for value in values:
if distribution.has_key (value):
distribution [value] += 1
else:
distribution [value] = 1
cd = classifiedunicodevalue.ClassifiedDistribution (distribution)
if cd.dataType == "integer":
jsonType = "integer"
elif cd.dataType == "decimal":
jsonType = "decimal"
elif cd.dataType == "text":
jsonType = "string"
else:
jsonType = "null"
variables [variableName] = {
"sequence": index + 1,
"name": variableName,
"json_type": jsonType,
"distribution": cd.toObject (includeTotal=False)
}
data [variableName] = compressedValues (values, jsonType)
jsonFile = open (outputFilename, 'wb')
json.dump (jsonObject, jsonFile,
sort_keys=True,
indent=4,
separators=(',', ': ')
)
jsonFile.close ()
| gpl-2.0 | 4,556,128,825,799,290,000 | 23.503268 | 82 | 0.660443 | false |
ideascube/ideascube | ideascube/management/commands/tags.py | 1 | 7027 | import sys
from itertools import groupby
from operator import itemgetter
from django.utils.termcolors import colorize
from taggit.models import Tag, TaggedItem
from ideascube.management.base import BaseCommandWithSubcommands
from ideascube.utils import sanitize_tag_name
def log(text, **kwargs):
sys.stdout.write(colorize(str(text), **kwargs) + '\n')
def notice(text, **kwargs):
log(text, fg='blue')
def exit(text, **kwargs):
log(text, fg='red')
sys.exit(1)
class Command(BaseCommandWithSubcommands):
help = "Manage tags"
def add_arguments(self, parser):
super().add_arguments(parser)
count = self.subs.add_parser('count', help='Count tag usage')
count.add_argument('name', help='Tag name we want to count.')
count.set_defaults(func=self.count)
delete = self.subs.add_parser('delete', help='Delete tag')
delete.add_argument('name', help='Tag name we want to delete.')
delete.add_argument('--force', action='store_true',
help='Force delete even if tag is still used.')
delete.set_defaults(func=self.delete)
rename = self.subs.add_parser('rename', help='Rename a tag')
rename.add_argument('old', help='Old name.')
rename.add_argument('new', help='New name.')
rename.set_defaults(func=self.rename)
replace = self.subs.add_parser('replace',
help='Replace tag by another and delete it')
replace.add_argument('old', help='Old tag name.')
replace.add_argument('new', help='New tag name.')
replace.set_defaults(func=self.replace)
sanitize = self.subs.add_parser('sanitize',
help=('Sanitize existing tags.\n'
'Remove duplicates, clean characters...'))
sanitize.set_defaults(func=self.sanitize)
list_ = self.subs.add_parser('list', help='List tags')
list_.set_defaults(func=self.list)
def handle(self, *args, **options):
log('-'*80, fg='white')
return super().handle(*args, **options)
def _count(self, name):
return TaggedItem.objects.filter(tag__name=name).count()
def get_tag_or_exit(self, name):
tag = Tag.objects.filter(name=name).first()
if not tag:
exit('No tag found with name "{}"'.format(name))
return tag
def count(self, options):
count = self._count(options['name'])
notice('{count} object(s) tagged with "{name}"'.format(count=count,
**options))
def delete(self, options):
name = options['name']
tag = self.get_tag_or_exit(name)
count = self._count(name)
force = options.get('force')
if count and not force:
confirm = input('Tag "{}" is still linked to {} items.\n'
'Type "yes" to confirm delete or "no" to '
'cancel: '.format(name, count))
if confirm != 'yes':
exit("Delete cancelled.")
tag.delete()
notice('Deleted tag "{name}".'.format(**options))
def rename(self, options):
if options['old'] == options['new']:
exit('Nothing to rename, tags are equal.')
tag = self.get_tag_or_exit(options['old'])
if Tag.objects.filter(name=options['new']).exclude(pk=tag.pk).exists():
exit('Tag "{new}" already exists. Aborting.'.format(**options))
tag.name = options['new']
tag.save()
notice('Renamed "{old}" to "{new}".'.format(**options))
def replace(self, options):
if options['old'] == options['new']:
exit('Nothing to rename, tags are equal.')
old = self.get_tag_or_exit(options['old'])
new, created = Tag.objects.get_or_create(name=options['new'])
if created:
notice('Created tag "{new}"'.format(**options))
relations = TaggedItem.objects.filter(tag=old)
for relation in relations:
content = relation.content_object
notice('Processing "{}"'.format(repr(content)))
relation.delete()
content.tags.add(new)
old.delete()
notice('Deleted "{}"'.format(old))
def list(self, options):
row = '{:<40}{:<40}{}'
print(row.format('name', 'slug', 'count'))
print(row.format('.' * 40, '.' * 40, '.' * 40))
for tag in Tag.objects.order_by('slug'):
print(row.format(tag.name, tag.slug, self._count(tag.name)))
def sanitize(self, options):
all_tags = ((sanitize_tag_name(t.name), t) for t in Tag.objects.all())
all_tags = sorted(all_tags, key=itemgetter(0))
all_tags = groupby(all_tags, key=itemgetter(0))
for new_tag_name, tags in all_tags:
tags = (t[1] for t in tags)
if not new_tag_name:
# No need to delete relation, happy cascading !
for tag in tags:
tag.delete()
continue
tag = next(tags)
other_equivalent_tags = list(tags)
# All the relations we need to redirect to `tag`
other_relations = TaggedItem.objects.filter(
tag__in=other_equivalent_tags)
for relation in other_relations:
# if an object `o` is tagged with tag `foo` and `Foo`, the
# relation `o-Foo` must be change to `o-foo`. But this relation
# already exists, so, instead, we must delete `o-Foo`,
# not change it.
existing_relations = TaggedItem.objects.filter(
tag=tag,
object_id=relation.content_object.id,
content_type = relation.content_type)
if not existing_relations.exists():
# We must change the relation
relation.tag = tag
relation.save()
else:
# We have existing relation(s).
# We should not have more than one because we cannot have
# the *exact* same relation twice but let's be safe :
# delete any extra relation.
extra_relations = list(existing_relations)[1:]
for rel in extra_relations:
rel.delete()
# Then delete the current relation because we know we have
# an existing relation.
relation.delete()
# There is no relation to other tags left, delete them.
for t in other_equivalent_tags:
t.delete()
# Be sure our tag is correctly renamed.
# We do it at the end because the tag's name is unique and so,
# we want to be sure that all potential duplicates have been
# deleted/changed.
tag.name = new_tag_name
tag.save()
| agpl-3.0 | -4,190,601,327,684,026,000 | 38.477528 | 79 | 0.552156 | false |
hanteng/country-groups | scripts/_construct_data_EU-Asia.py | 1 | 4189 | # -*- coding: utf-8 -*-
#歧視無邊,回頭是岸。鍵起鍵落,情真情幻。
# Correction
import os.path, glob
import requests
from lxml.html import fromstring, tostring, parse
from io import StringIO, BytesIO
import codecs
import pandas as pd
import json
XML_encoding="utf-8"
# Data source
URL_ = "https://ec.europa.eu/europeaid/regions/asia-0_en"
URL_country_names_template = "https://raw.githubusercontent.com/hanteng/country-names/master/data/CLDR_country_name_{locale}.tsv"
URL_country_names = URL_country_names_template.format(locale= 'en')
# Xpath extraction
_xpath = '//*[@id="block-views-8eba70350aa66960065a1bb4224c751a"]/div/div/div/div/ul/li/a/text()'
## Outpuing Lists
PE = 'EU-Asia'
path_data = u'../data'
outputfn1 = os.path.join(path_data, "PE_org.json")
outputfn2 = os.path.join(path_data, "CLDR_UN_region.tsv")
def url_request (url):
r = requests.get(url)
if r.status_code == 200:
#r.raw.decode_content = True
return r
else:
print ("Downloading the data from {0} failed. Plese check Internet connections.".format(XML_src_url))
return None
def url_local_request (url):
fn_local = os.path.join(path_data, PE+ ".htm")
print (fn_local) #debug
try:
tree = parse(fn_local)
except:
r = url_request (url)
XML_src=r.content
with codecs.open(fn_local, "w", XML_encoding) as file:
file.write(XML_src.decode(XML_encoding))
#from lxml.html.clean import clean_html
#XML_src = clean_html(XML_src)
tree = fromstring(XML_src)
return tree
t = url_local_request(URL_)
list_country_names_Web = t.xpath(_xpath) # Based on the network map http://www.tein.asia/tein4/network/maps.do TW is included and 24 listed
print (list_country_names_Web)
## Retrive data directly from unicode-cldr project hosted at github
print ("Retrieve country names data now ...")
locale = "en"
url = URL_country_names_template.format(locale=locale)
df_results = pd.read_csv(url, sep='\t', encoding='utf-8',
na_values=[], keep_default_na = False,
names = ['c','n'] , index_col='c',
)
## Construct dictionary for country/region names
c_names = df_results.to_dict()['n'] #http://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_dict.html
c_names_inv = {v: k for k, v in c_names.items()}
## Country names fuzzy match
from fuzzywuzzy import process
choice=[]
for i, c_name_Web in enumerate(list_country_names_Web):
#found_candidates = [x for x in c_names_inv.keys() if fuzzy_match(x,c_name_Web)==True]
found_candidate = process.extract(c_name_Web, c_names_inv.keys(), limit=1)
found_candidate_c = c_names_inv[found_candidate[0][0]]
choice_item = [i, c_name_Web, found_candidate, found_candidate_c]
#print (choice_item)
choice.append(choice_item)
import ast
done = False
while not(done):
try:
# Note: Python 2.x users should use raw_input, the equivalent of 3.x's input
prn= [repr(x) for x in choice]
print ("\n\r".join(prn))
i = int(input("Please enter your corrections: Serial no (-1:None): "))
if i==-1:
print ("Done!")
done==True
break
else:
if i in range(len(choice)):
c = input("Please enter your corrections: Country code (ISO-alpha2): ")
choice[i][3] = c
else:
print("Sorry, Please revise your input.")
except ValueError:
print("Sorry, I didn't understand that.")
#better try again... Return to the start of the loop
continue
list_country_codes_Web = [x[3] for x in choice]
print (list_country_codes_Web)
print (list_country_names_Web)
print ("==========")
PE_org = dict()
with codecs.open(outputfn1, encoding='utf-8', mode='r+') as fp:
lines=fp.readlines()
PE_org = json.loads(u"".join(lines))
print ("Before:", PE_org.keys())
d={PE: list_country_codes_Web}
print("Adding:",d)
PE_org.update(d)
print ("After:", PE_org.keys())
with codecs.open(outputfn1, encoding='utf-8', mode='w') as fp:
json.dump(PE_org, fp)
| gpl-3.0 | 8,520,566,600,957,169,000 | 29.962687 | 140 | 0.633888 | false |
eckardm/archivematica | src/dashboard/src/components/archival_storage/forms.py | 1 | 1245 | # This file is part of Archivematica.
#
# Copyright 2010-2013 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
from django import forms
class CreateAICForm(forms.Form):
results = forms.CharField(label=None, required=True, widget=forms.widgets.HiddenInput())
class ReingestAIPForm(forms.Form):
METADATA_ONLY = 'metadata'
OBJECTS = 'objects'
REINGEST_CHOICES = (
(METADATA_ONLY, 'Re-ingest metadata only'),
(OBJECTS, 'Re-ingest metadata and objects')
)
reingest_type = forms.ChoiceField(choices=REINGEST_CHOICES, widget=forms.RadioSelect, required=True)
| agpl-3.0 | -1,230,661,360,885,088,800 | 37.90625 | 105 | 0.746185 | false |
petropavel13/2photo-api | utils.py | 1 | 3700 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals # py2
from datetime import datetime
from django.utils import timezone as tz
from django.utils.timezone import is_naive, make_aware
msk_tz = tz.pytz.timezone('Europe/Moscow')
date_mapping = {
'января': '1',
'февраля': '2',
'марта': '3',
'апреля': '4',
'мая': '5',
'июня': '6',
'июля': '7',
'августа': '8',
'сентября': '9',
'октября': '10',
'ноября': '11',
'декабря': '12',
}
def to_msk_datetime(datetime):
if is_naive(datetime):
return datetime.replace(tzinfo=msk_tz)
elif datetime.tzinfo == msk_tz:
return datetime
else:
return tz.localtime(datetime, msk_tz)
def if_not_none(func):
return lambda arg: None if arg is None else func(arg)
@if_not_none
def prct_to_int(percent):
return int(percent[:percent.index('%')])
@if_not_none
def url_to_id(url):
str_id = url[url.rindex('/') + 1:]
return int(str_id) if len(str_id) > 0 else None
def ru_str_date_to_date_stream(ru_date):
new_date = ru_date
for ru, en in date_mapping.items():
new_date = new_date.replace(ru, en)
py2_date = new_date.encode('utf-8')
py3_format = '%d %m %Y г. %H:%M'
py2_format = py3_format.encode('utf-8')
date = datetime.strptime(py2_date if isinstance(py2_date, str) else new_date,
py2_format if isinstance(py2_format, str) else py3_format)
return to_msk_datetime(date)
def ru_str_date_to_date_comment(ru_date):
new_date = ru_date
for ru, en in date_mapping.items():
new_date = new_date.replace(ru, en)
str_date = new_date.replace('\n ', '').replace(' ', '')
py3_date = '0' + str_date if str_date.index(':') == 1 else str_date
py2_date = py3_date.encode('utf-8')
py3_format = '%H:%M,%d %m %Y г.'
py2_format = py3_format.encode('utf-8')
date = datetime.strptime(py2_date if isinstance(py2_date, str) else py3_date,
py2_format if isinstance(py2_format, str) else py3_format)
return to_msk_datetime(date)
def ru_str_date_to_date_reg(ru_date):
new_date = ru_date
for ru, en in date_mapping.items():
new_date = new_date.replace(ru, en)
return to_msk_datetime(datetime.strptime(new_date, '%d %m %Y'))
def ru_str_date_to_date_last_visit(ru_date):
new_date = ru_date
for ru, en in date_mapping.items():
new_date = new_date.replace(ru, en)
date = datetime.strptime(new_date, '%d %m %Y, %H:%M')
return to_msk_datetime(date)
def clean_dict_for_model(dict_obj, dj_model):
return { f.name : dict_obj[f.name] for f in dj_model._meta.fields }
def dict_to_model_instance(dict_obj, dj_model):
return dj_model( **clean_dict_for_model(dict_obj, dj_model) )
def bulk_create_by_chunks(iterable_objects, dj_model, chunk_size=1024):
buffer = [None] * chunk_size
next_idx = 0
for obj in iterable_objects:
buffer[next_idx] = obj
next_idx += 1
if next_idx % chunk_size == 0:
dj_model.objects.bulk_create(buffer)
next_idx = 0
dj_model.objects.bulk_create(buffer[0:next_idx])
def namedtuples_to_model_instances_generator(namedtuples, dj_model):
for namedtuple in namedtuples:
yield dict_to_model_instance(namedtuple._asdict(), dj_model)
def bulk_save_namedtuples(namedtuples, dj_model, chunk_size=1024):
model_instances_generator = namedtuples_to_model_instances_generator(namedtuples, dj_model)
bulk_create_by_chunks(model_instances_generator, dj_model, chunk_size)
| mit | -4,624,639,881,835,006,000 | 24.549296 | 95 | 0.617971 | false |
shibanis1/spark-tk | python/sparktk/frame/ops/dot_product.py | 1 | 3552 | def dot_product(self, left_column_names,right_column_names,dot_product_column_name,default_left_values=None,default_right_values=None):
"""
Calculate dot product for each row in current frame.
Parameters
----------
:param left_column_names: (List[str]) Names of columns used to create the left vector (A) for each row.
Names should refer to a single column of type vector, or two or more columns of numeric scalars.
:param right_column_names: (List[str]) Names of columns used to create right vector (B) for each row.
Names should refer to a single column of type vector, or two or more columns of numeric scalars.
:param dot_product_column_name: (str) Name of column used to store the dot product.
:param default_left_values: (Optional[List[float]) Default values used to substitute null values in left vector.Default is None.
:param default_right_values: (Optional[List[float]) Default values used to substitute null values in right vector.Default is None.
:return: (Frame) returns a frame with give "dot_product" column name
Calculate the dot product for each row in a frame using values from two equal-length sequences of columns.
Dot product is computed by the following formula:
The dot product of two vectors :math:`A=[a_1, a_2, ..., a_n]` and :math:`B =[b_1, b_2, ..., b_n]` is :math:`a_1*b_1 + a_2*b_2 + ...+ a_n*b_n`.
The dot product for each row is stored in a new column in the existing frame.
Notes
-----
* If default_left_values or default_right_values are not specified, any null values will be replaced by zeros.
* This method applies only to columns containing numerical data.
Examples
--------
>>> data = [[1, 0.2, -2, 5], [2, 0.4, -1, 6], [3, 0.6, 0, 7], [4, 0.8, 1, 8]]
>>> schema = [('col_0', int), ('col_1', float),('col_2', int) ,('col_3', int)]
>>> my_frame = tc.frame.create(data, schema)
<progress>
Calculate the dot product for a sequence of columns in Frame object *my_frame*:
>>> my_frame.inspect()
[#] col_0 col_1 col_2 col_3
===============================
[0] 1 0.2 -2 5
[1] 2 0.4 -1 6
[2] 3 0.6 0 7
[3] 4 0.8 1 8
Modify the frame by computing the dot product for a sequence of columns:
>>> my_frame.dot_product(['col_0','col_1'], ['col_2', 'col_3'], 'dot_product')
<progress>
>>> my_frame.inspect()
[#] col_0 col_1 col_2 col_3 dot_product
============================================
[0] 1 0.2 -2 5 -1.0
[1] 2 0.4 -1 6 0.4
[2] 3 0.6 0 7 4.2
[3] 4 0.8 1 8 10.4
"""
if not isinstance(left_column_names, list):
left_column_names = [left_column_names]
if not isinstance(right_column_names, list):
right_column_names = [right_column_names]
self._scala.dotProduct(self._tc.jutils.convert.to_scala_list_string(left_column_names),
self._tc.jutils.convert.to_scala_list_string(right_column_names),
dot_product_column_name,
self._tc.jutils.convert.to_scala_option_list_double(default_left_values),
self._tc.jutils.convert.to_scala_option_list_double(default_right_values))
| apache-2.0 | -7,716,103,984,630,105,000 | 46.36 | 146 | 0.561937 | false |
bjoernmainz/simple-contact-form | images/bin/rename_speech.py | 1 | 1168 | #!/usr/bin/python
import os
import glob
import shutil
def rename(files):
new_files = []
for f in files:
dirname = os.path.dirname(f)
basename = os.path.basename(f)
filetype = ""
number = ""
print("------------")
print("Basename: %s") % basename
if not basename.find("ogg") == -1:
file_type = "ogg";
number = basename.replace(".ogg", "")
elif not basename.find("mp3") == -1:
file_type = "mp3";
number = basename.replace(".mp3", "")
else:
raise("Not found")
print "Number: %s" % number
new_number = blist[number]
new_filename = "%s/%s_new.%s" % (dirname, blist[number], file_type)
print("%s -> %s") % (f, new_filename)
shutil.move(f, new_filename)
new_files.append(new_filename)
#print blist
fileh = open("../../config/list.txt.php")
blist = {}
for f in fileh:
f = f.rstrip()
f = f.split("|")
f[1] = f[1].rstrip()
blist[f[1]] = f[0]
globber = glob.glob("../captcha/speech/de/*.ogg")
rename(globber)
globber = glob.glob("../captcha/speech/de/*.mp3")
rename(globber)
globber = glob.glob("../captcha/speech/en/*.ogg")
rename(globber)
globber = glob.glob("../captcha/speech/en/*.mp3")
rename(globber)
| gpl-2.0 | 4,740,280,325,139,808,000 | 20.236364 | 69 | 0.605308 | false |
openstack/designate | designate/rpc.py | 1 | 8026 | # Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import threading
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_messaging.rpc import dispatcher as rpc_dispatcher
from oslo_serialization import jsonutils
from designate import objects
import designate.context
import designate.exceptions
__all__ = [
'init',
'cleanup',
'set_defaults',
'add_extra_exmods',
'clear_extra_exmods',
'get_allowed_exmods',
'RequestContextSerializer',
'get_client',
'get_server',
'get_notifier',
]
CONF = cfg.CONF
EXPECTED_EXCEPTION = threading.local()
NOTIFICATION_TRANSPORT = None
NOTIFIER = None
TRANSPORT = None
# NOTE: Additional entries to designate.exceptions goes here.
ALLOWED_EXMODS = [
designate.exceptions.__name__,
'designate.backend.impl_dynect'
]
EXTRA_EXMODS = []
def init(conf):
global TRANSPORT, NOTIFIER, NOTIFICATION_TRANSPORT
exmods = get_allowed_exmods()
TRANSPORT = create_transport(get_transport_url())
NOTIFICATION_TRANSPORT = messaging.get_notification_transport(
conf, allowed_remote_exmods=exmods)
serializer = RequestContextSerializer(JsonPayloadSerializer())
NOTIFIER = messaging.Notifier(NOTIFICATION_TRANSPORT,
serializer=serializer)
def initialized():
return None not in [TRANSPORT, NOTIFIER, NOTIFICATION_TRANSPORT]
def cleanup():
global TRANSPORT, NOTIFIER, NOTIFICATION_TRANSPORT
if TRANSPORT is None:
raise AssertionError("'TRANSPORT' must not be None")
if NOTIFICATION_TRANSPORT is None:
raise AssertionError("'NOTIFICATION_TRANSPORT' must not be None")
if NOTIFIER is None:
raise AssertionError("'NOTIFIER' must not be None")
TRANSPORT.cleanup()
NOTIFICATION_TRANSPORT.cleanup()
TRANSPORT = NOTIFICATION_TRANSPORT = NOTIFIER = None
def set_defaults(control_exchange):
messaging.set_transport_defaults(control_exchange)
def add_extra_exmods(*args):
EXTRA_EXMODS.extend(args)
def clear_extra_exmods():
del EXTRA_EXMODS[:]
def get_allowed_exmods():
return ALLOWED_EXMODS + EXTRA_EXMODS + CONF.allowed_remote_exmods
class JsonPayloadSerializer(messaging.NoOpSerializer):
@staticmethod
def serialize_entity(context, entity):
return jsonutils.to_primitive(entity, convert_instances=True)
class DesignateObjectSerializer(messaging.NoOpSerializer):
def _process_iterable(self, context, action_fn, values):
"""Process an iterable, taking an action on each value.
:param:context: Request context
:param:action_fn: Action to take on each item in values
:param:values: Iterable container of things to take action on
:returns: A new container of the same type (except set) with
items from values having had action applied.
"""
iterable = values.__class__
if iterable == set:
# NOTE: A set can't have an unhashable value inside, such as
# a dict. Convert sets to tuples, which is fine, since we can't
# send them over RPC anyway.
iterable = tuple
return iterable([action_fn(context, value) for value in values])
def serialize_entity(self, context, entity):
if isinstance(entity, (tuple, list, set)):
entity = self._process_iterable(context, self.serialize_entity,
entity)
elif hasattr(entity, 'to_primitive') and callable(entity.to_primitive):
entity = entity.to_primitive()
return jsonutils.to_primitive(entity, convert_instances=True)
def deserialize_entity(self, context, entity):
if isinstance(entity, dict) and 'designate_object.name' in entity:
entity = objects.DesignateObject.from_primitive(entity)
elif isinstance(entity, (tuple, list, set)):
entity = self._process_iterable(context, self.deserialize_entity,
entity)
return entity
class RequestContextSerializer(messaging.Serializer):
def __init__(self, base):
self._base = base
def serialize_entity(self, context, entity):
if not self._base:
return entity
return self._base.serialize_entity(context, entity)
def deserialize_entity(self, context, entity):
if not self._base:
return entity
return self._base.deserialize_entity(context, entity)
def serialize_context(self, context):
return context.to_dict()
def deserialize_context(self, context):
return designate.context.DesignateContext.from_dict(context)
def get_transport_url(url_str=None):
return messaging.TransportURL.parse(CONF, url_str)
def get_client(target, version_cap=None, serializer=None):
if TRANSPORT is None:
raise AssertionError("'TRANSPORT' must not be None")
if serializer is None:
serializer = DesignateObjectSerializer()
serializer = RequestContextSerializer(serializer)
return messaging.RPCClient(
TRANSPORT,
target,
version_cap=version_cap,
serializer=serializer
)
def get_server(target, endpoints, serializer=None):
if TRANSPORT is None:
raise AssertionError("'TRANSPORT' must not be None")
if serializer is None:
serializer = DesignateObjectSerializer()
serializer = RequestContextSerializer(serializer)
access_policy = rpc_dispatcher.DefaultRPCAccessPolicy
return messaging.get_rpc_server(
TRANSPORT,
target,
endpoints,
executor='eventlet',
serializer=serializer,
access_policy=access_policy
)
def get_notification_listener(targets, endpoints, serializer=None, pool=None):
if NOTIFICATION_TRANSPORT is None:
raise AssertionError("'NOTIFICATION_TRANSPORT' must not be None")
if serializer is None:
serializer = JsonPayloadSerializer()
return messaging.get_notification_listener(
NOTIFICATION_TRANSPORT,
targets,
endpoints,
executor='eventlet',
pool=pool,
serializer=serializer
)
def get_notifier(service=None, host=None, publisher_id=None):
if NOTIFIER is None:
raise AssertionError("'NOTIFIER' must not be None")
if not publisher_id:
publisher_id = "%s.%s" % (service, host or CONF.host)
return NOTIFIER.prepare(publisher_id=publisher_id)
def create_transport(url):
exmods = get_allowed_exmods()
return messaging.get_rpc_transport(CONF,
url=url,
allowed_remote_exmods=exmods)
def expected_exceptions():
def outer(f):
@functools.wraps(f)
def exception_wrapper(self, *args, **kwargs):
if not hasattr(EXPECTED_EXCEPTION, 'depth'):
EXPECTED_EXCEPTION.depth = 0
EXPECTED_EXCEPTION.depth += 1
# We only want to wrap the first function wrapped.
if EXPECTED_EXCEPTION.depth > 1:
return f(self, *args, **kwargs)
try:
return f(self, *args, **kwargs)
except designate.exceptions.DesignateException as e:
if e.expected:
raise rpc_dispatcher.ExpectedException()
raise
finally:
EXPECTED_EXCEPTION.depth = 0
return exception_wrapper
return outer
| apache-2.0 | -4,182,365,424,016,584,000 | 31.626016 | 79 | 0.661974 | false |
Azure/azure-sdk-for-python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_06_01/aio/operations/_dedicated_host_groups_operations.py | 1 | 21434 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DedicatedHostGroupsOperations:
"""DedicatedHostGroupsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def create_or_update(
self,
resource_group_name: str,
host_group_name: str,
parameters: "_models.DedicatedHostGroup",
**kwargs: Any
) -> "_models.DedicatedHostGroup":
"""Create or update a dedicated host group. For details of Dedicated Host and Dedicated Host
Groups please see [Dedicated Host Documentation]
(https://go.microsoft.com/fwlink/?linkid=2082596).
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param host_group_name: The name of the dedicated host group.
:type host_group_name: str
:param parameters: Parameters supplied to the Create Dedicated Host Group.
:type parameters: ~azure.mgmt.compute.v2020_06_01.models.DedicatedHostGroup
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DedicatedHostGroup, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2020_06_01.models.DedicatedHostGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DedicatedHostGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'hostGroupName': self._serialize.url("host_group_name", host_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'DedicatedHostGroup')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('DedicatedHostGroup', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('DedicatedHostGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/hostGroups/{hostGroupName}'} # type: ignore
async def update(
self,
resource_group_name: str,
host_group_name: str,
parameters: "_models.DedicatedHostGroupUpdate",
**kwargs: Any
) -> "_models.DedicatedHostGroup":
"""Update an dedicated host group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param host_group_name: The name of the dedicated host group.
:type host_group_name: str
:param parameters: Parameters supplied to the Update Dedicated Host Group operation.
:type parameters: ~azure.mgmt.compute.v2020_06_01.models.DedicatedHostGroupUpdate
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DedicatedHostGroup, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2020_06_01.models.DedicatedHostGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DedicatedHostGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'hostGroupName': self._serialize.url("host_group_name", host_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'DedicatedHostGroupUpdate')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DedicatedHostGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/hostGroups/{hostGroupName}'} # type: ignore
async def delete(
self,
resource_group_name: str,
host_group_name: str,
**kwargs: Any
) -> None:
"""Delete a dedicated host group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param host_group_name: The name of the dedicated host group.
:type host_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'hostGroupName': self._serialize.url("host_group_name", host_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/hostGroups/{hostGroupName}'} # type: ignore
async def get(
self,
resource_group_name: str,
host_group_name: str,
expand: Optional[str] = "instanceView",
**kwargs: Any
) -> "_models.DedicatedHostGroup":
"""Retrieves information about a dedicated host group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param host_group_name: The name of the dedicated host group.
:type host_group_name: str
:param expand: The expand expression to apply on the operation. The response shows the list of
instance view of the dedicated hosts under the dedicated host group.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DedicatedHostGroup, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2020_06_01.models.DedicatedHostGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DedicatedHostGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'hostGroupName': self._serialize.url("host_group_name", host_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DedicatedHostGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/hostGroups/{hostGroupName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.DedicatedHostGroupListResult"]:
"""Lists all of the dedicated host groups in the specified resource group. Use the nextLink
property in the response to get the next page of dedicated host groups.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DedicatedHostGroupListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2020_06_01.models.DedicatedHostGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DedicatedHostGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('DedicatedHostGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/hostGroups'} # type: ignore
def list_by_subscription(
self,
**kwargs: Any
) -> AsyncIterable["_models.DedicatedHostGroupListResult"]:
"""Lists all of the dedicated host groups in the subscription. Use the nextLink property in the
response to get the next page of dedicated host groups.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DedicatedHostGroupListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2020_06_01.models.DedicatedHostGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DedicatedHostGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('DedicatedHostGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/hostGroups'} # type: ignore
| mit | -3,765,282,343,104,155,000 | 48.273563 | 180 | 0.646776 | false |
ewandor/home-assistant | homeassistant/components/websocket_api.py | 1 | 14819 | """
Websocket based API for Home Assistant.
For more details about this component, please refer to the documentation at
https://home-assistant.io/developers/websocket_api/
"""
import asyncio
from contextlib import suppress
from functools import partial
import json
import logging
from aiohttp import web
import voluptuous as vol
from voluptuous.humanize import humanize_error
from homeassistant.const import (
MATCH_ALL, EVENT_TIME_CHANGED, EVENT_HOMEASSISTANT_STOP,
__version__)
from homeassistant.components import frontend
from homeassistant.core import callback
from homeassistant.remote import JSONEncoder
from homeassistant.helpers import config_validation as cv
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.http.auth import validate_password
from homeassistant.components.http.const import KEY_AUTHENTICATED
from homeassistant.components.http.ban import process_wrong_login
DOMAIN = 'websocket_api'
URL = '/api/websocket'
DEPENDENCIES = ('http',)
MAX_PENDING_MSG = 512
ERR_ID_REUSE = 1
ERR_INVALID_FORMAT = 2
ERR_NOT_FOUND = 3
TYPE_AUTH = 'auth'
TYPE_AUTH_INVALID = 'auth_invalid'
TYPE_AUTH_OK = 'auth_ok'
TYPE_AUTH_REQUIRED = 'auth_required'
TYPE_CALL_SERVICE = 'call_service'
TYPE_EVENT = 'event'
TYPE_GET_CONFIG = 'get_config'
TYPE_GET_PANELS = 'get_panels'
TYPE_GET_SERVICES = 'get_services'
TYPE_GET_STATES = 'get_states'
TYPE_PING = 'ping'
TYPE_PONG = 'pong'
TYPE_RESULT = 'result'
TYPE_SUBSCRIBE_EVENTS = 'subscribe_events'
TYPE_UNSUBSCRIBE_EVENTS = 'unsubscribe_events'
_LOGGER = logging.getLogger(__name__)
JSON_DUMP = partial(json.dumps, cls=JSONEncoder)
AUTH_MESSAGE_SCHEMA = vol.Schema({
vol.Required('type'): TYPE_AUTH,
vol.Required('api_password'): str,
})
SUBSCRIBE_EVENTS_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_SUBSCRIBE_EVENTS,
vol.Optional('event_type', default=MATCH_ALL): str,
})
UNSUBSCRIBE_EVENTS_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_UNSUBSCRIBE_EVENTS,
vol.Required('subscription'): cv.positive_int,
})
CALL_SERVICE_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_CALL_SERVICE,
vol.Required('domain'): str,
vol.Required('service'): str,
vol.Optional('service_data', default=None): dict
})
GET_STATES_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_GET_STATES,
})
GET_SERVICES_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_GET_SERVICES,
})
GET_CONFIG_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_GET_CONFIG,
})
GET_PANELS_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_GET_PANELS,
})
PING_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_PING,
})
BASE_COMMAND_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): vol.Any(TYPE_CALL_SERVICE,
TYPE_SUBSCRIBE_EVENTS,
TYPE_UNSUBSCRIBE_EVENTS,
TYPE_GET_STATES,
TYPE_GET_SERVICES,
TYPE_GET_CONFIG,
TYPE_GET_PANELS,
TYPE_PING)
}, extra=vol.ALLOW_EXTRA)
def auth_ok_message():
"""Return an auth_ok message."""
return {
'type': TYPE_AUTH_OK,
'ha_version': __version__,
}
def auth_required_message():
"""Return an auth_required message."""
return {
'type': TYPE_AUTH_REQUIRED,
'ha_version': __version__,
}
def auth_invalid_message(message):
"""Return an auth_invalid message."""
return {
'type': TYPE_AUTH_INVALID,
'message': message,
}
def event_message(iden, event):
"""Return an event message."""
return {
'id': iden,
'type': TYPE_EVENT,
'event': event.as_dict(),
}
def error_message(iden, code, message):
"""Return an error result message."""
return {
'id': iden,
'type': TYPE_RESULT,
'success': False,
'error': {
'code': code,
'message': message,
},
}
def pong_message(iden):
"""Return a pong message."""
return {
'id': iden,
'type': TYPE_PONG,
}
def result_message(iden, result=None):
"""Return a success result message."""
return {
'id': iden,
'type': TYPE_RESULT,
'success': True,
'result': result,
}
@asyncio.coroutine
def async_setup(hass, config):
"""Initialize the websocket API."""
hass.http.register_view(WebsocketAPIView)
return True
class WebsocketAPIView(HomeAssistantView):
"""View to serve a websockets endpoint."""
name = "websocketapi"
url = URL
requires_auth = False
@asyncio.coroutine
def get(self, request):
"""Handle an incoming websocket connection."""
# pylint: disable=no-self-use
return ActiveConnection(request.app['hass'], request).handle()
class ActiveConnection:
"""Handle an active websocket client connection."""
def __init__(self, hass, request):
"""Initialize an active connection."""
self.hass = hass
self.request = request
self.wsock = None
self.event_listeners = {}
self.to_write = asyncio.Queue(maxsize=MAX_PENDING_MSG, loop=hass.loop)
self._handle_task = None
self._writer_task = None
def debug(self, message1, message2=''):
"""Print a debug message."""
_LOGGER.debug("WS %s: %s %s", id(self.wsock), message1, message2)
def log_error(self, message1, message2=''):
"""Print an error message."""
_LOGGER.error("WS %s: %s %s", id(self.wsock), message1, message2)
@asyncio.coroutine
def _writer(self):
"""Write outgoing messages."""
# Exceptions if Socket disconnected or cancelled by connection handler
with suppress(RuntimeError, asyncio.CancelledError):
while not self.wsock.closed:
message = yield from self.to_write.get()
if message is None:
break
self.debug("Sending", message)
yield from self.wsock.send_json(message, dumps=JSON_DUMP)
@callback
def send_message_outside(self, message):
"""Send a message to the client outside of the main task.
Closes connection if the client is not reading the messages.
Async friendly.
"""
try:
self.to_write.put_nowait(message)
except asyncio.QueueFull:
self.log_error("Client exceeded max pending messages [2]:",
MAX_PENDING_MSG)
self.cancel()
@callback
def cancel(self):
"""Cancel the connection."""
self._handle_task.cancel()
self._writer_task.cancel()
@asyncio.coroutine
def handle(self):
"""Handle the websocket connection."""
request = self.request
wsock = self.wsock = web.WebSocketResponse(heartbeat=55)
yield from wsock.prepare(request)
self.debug("Connected")
# Get a reference to current task so we can cancel our connection
self._handle_task = asyncio.Task.current_task(loop=self.hass.loop)
@callback
def handle_hass_stop(event):
"""Cancel this connection."""
self.cancel()
unsub_stop = self.hass.bus.async_listen(
EVENT_HOMEASSISTANT_STOP, handle_hass_stop)
self._writer_task = self.hass.async_add_job(self._writer())
final_message = None
msg = None
authenticated = False
try:
if request[KEY_AUTHENTICATED]:
authenticated = True
else:
yield from self.wsock.send_json(auth_required_message())
msg = yield from wsock.receive_json()
msg = AUTH_MESSAGE_SCHEMA(msg)
if validate_password(request, msg['api_password']):
authenticated = True
else:
self.debug("Invalid password")
yield from self.wsock.send_json(
auth_invalid_message('Invalid password'))
if not authenticated:
yield from process_wrong_login(request)
return wsock
yield from self.wsock.send_json(auth_ok_message())
# ---------- AUTH PHASE OVER ----------
msg = yield from wsock.receive_json()
last_id = 0
while msg:
self.debug("Received", msg)
msg = BASE_COMMAND_MESSAGE_SCHEMA(msg)
cur_id = msg['id']
if cur_id <= last_id:
self.to_write.put_nowait(error_message(
cur_id, ERR_ID_REUSE,
'Identifier values have to increase.'))
else:
handler_name = 'handle_{}'.format(msg['type'])
getattr(self, handler_name)(msg)
last_id = cur_id
msg = yield from wsock.receive_json()
except vol.Invalid as err:
error_msg = "Message incorrectly formatted: "
if msg:
error_msg += humanize_error(msg, err)
else:
error_msg += str(err)
self.log_error(error_msg)
if not authenticated:
final_message = auth_invalid_message(error_msg)
else:
if isinstance(msg, dict):
iden = msg.get('id')
else:
iden = None
final_message = error_message(
iden, ERR_INVALID_FORMAT, error_msg)
except TypeError as err:
if wsock.closed:
self.debug("Connection closed by client")
else:
_LOGGER.exception("Unexpected TypeError: %s", msg)
except ValueError as err:
msg = "Received invalid JSON"
value = getattr(err, 'doc', None) # Py3.5+ only
if value:
msg += ': {}'.format(value)
self.log_error(msg)
self._writer_task.cancel()
except asyncio.CancelledError:
self.debug("Connection cancelled by server")
except asyncio.QueueFull:
self.log_error("Client exceeded max pending messages [1]:",
MAX_PENDING_MSG)
self._writer_task.cancel()
except Exception: # pylint: disable=broad-except
error = "Unexpected error inside websocket API. "
if msg is not None:
error += str(msg)
_LOGGER.exception(error)
finally:
unsub_stop()
for unsub in self.event_listeners.values():
unsub()
try:
if final_message is not None:
self.to_write.put_nowait(final_message)
self.to_write.put_nowait(None)
# Make sure all error messages are written before closing
yield from self._writer_task
except asyncio.QueueFull:
self._writer_task.cancel()
yield from wsock.close()
self.debug("Closed connection")
return wsock
def handle_subscribe_events(self, msg):
"""Handle subscribe events command.
Async friendly.
"""
msg = SUBSCRIBE_EVENTS_MESSAGE_SCHEMA(msg)
@asyncio.coroutine
def forward_events(event):
"""Forward events to websocket."""
if event.event_type == EVENT_TIME_CHANGED:
return
self.send_message_outside(event_message(msg['id'], event))
self.event_listeners[msg['id']] = self.hass.bus.async_listen(
msg['event_type'], forward_events)
self.to_write.put_nowait(result_message(msg['id']))
def handle_unsubscribe_events(self, msg):
"""Handle unsubscribe events command.
Async friendly.
"""
msg = UNSUBSCRIBE_EVENTS_MESSAGE_SCHEMA(msg)
subscription = msg['subscription']
if subscription in self.event_listeners:
self.event_listeners.pop(subscription)()
self.to_write.put_nowait(result_message(msg['id']))
else:
self.to_write.put_nowait(error_message(
msg['id'], ERR_NOT_FOUND,
'Subscription not found.'))
def handle_call_service(self, msg):
"""Handle call service command.
This is a coroutine.
"""
msg = CALL_SERVICE_MESSAGE_SCHEMA(msg)
@asyncio.coroutine
def call_service_helper(msg):
"""Call a service and fire complete message."""
yield from self.hass.services.async_call(
msg['domain'], msg['service'], msg['service_data'], True)
self.send_message_outside(result_message(msg['id']))
self.hass.async_add_job(call_service_helper(msg))
def handle_get_states(self, msg):
"""Handle get states command.
Async friendly.
"""
msg = GET_STATES_MESSAGE_SCHEMA(msg)
self.to_write.put_nowait(result_message(
msg['id'], self.hass.states.async_all()))
def handle_get_services(self, msg):
"""Handle get services command.
Async friendly.
"""
msg = GET_SERVICES_MESSAGE_SCHEMA(msg)
self.to_write.put_nowait(result_message(
msg['id'], self.hass.services.async_services()))
def handle_get_config(self, msg):
"""Handle get config command.
Async friendly.
"""
msg = GET_CONFIG_MESSAGE_SCHEMA(msg)
self.to_write.put_nowait(result_message(
msg['id'], self.hass.config.as_dict()))
def handle_get_panels(self, msg):
"""Handle get panels command.
Async friendly.
"""
msg = GET_PANELS_MESSAGE_SCHEMA(msg)
panels = {
panel:
self.hass.data[frontend.DATA_PANELS][panel].to_response(
self.hass, self.request)
for panel in self.hass.data[frontend.DATA_PANELS]}
self.to_write.put_nowait(result_message(
msg['id'], panels))
def handle_ping(self, msg):
"""Handle ping command.
Async friendly.
"""
self.to_write.put_nowait(pong_message(msg['id']))
| apache-2.0 | -6,186,071,054,472,008,000 | 28.51992 | 78 | 0.577569 | false |
dazhaoniel/1kg-more | js1kg/urls.py | 1 | 3537 | from django.conf.urls import url, include
from django.views.generic import TemplateView
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse_lazy
from rest_framework import routers
from js1kg.corporate import views as corporate_views
from js1kg.message import views as message_views
from js1kg.project import views as project_views
from js1kg.organization import views as organization_views
from js1kg.user import views as user_views
from js1kg.trip import views as trip_views
from js1kg.api import api
from . import views
# For Development Only
from django.conf import settings
from django.conf.urls.static import static
# REST API
router = routers.DefaultRouter()
router.register(r'^organizations', api.NonProfitOrganizationsViewSet)
router.register(r'^projects', api.ProjectsViewSet)
router.register(r'^users', api.UsersViewSet)
router.register(r'^threads', api.MessageThreadsViewSet)
router.register(r'^messages', api.MessagesViewSet)
router.register(r'^admins', api.OrgAdminsViewSet)
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^login/$', 'django.contrib.auth.views.login', {'template_name': 'user/login.html'}, name='js1kg_login'),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': reverse_lazy('index')}, name='js1kg_logout'),
url(r'^register/$', user_views.register, name='user_register'),
url(r'^api/', include(router.urls)),
# Password Reset
# Organizations
# url(r'^organization/$', organization_views.organization_main, name='organization'),
url(r'^organization/$', TemplateView.as_view(template_name="organization/organization_main.html"), name='organization'),
url(r'^organization/create/$', login_required(organization_views.organization_create), name='organization_create'),
url(r'^organization/(?P<pk>[-\w]+)/edit/$', organization_views.OrganizationUpdate.as_view(), name='organization_update'),
url(r'^organization/(?P<slug>[-\w]+)/$', organization_views.organization_detail),
# url(r'^project/(?P<queryview>[\w-]+)/$', organization_views.organization_projects),
# Projects
# url(r'^project/$', project_views.project_main, name='project'),
url(r'^project/$', TemplateView.as_view(template_name="project/project_main.html"), name='project'),
url(r'^project/create/$', login_required(project_views.ProjectCreate.as_view()), name='project_create'),
url(r'^project/(?P<pk>[-\w]+)/edit/$', login_required(project_views.ProjectUpdate.as_view()), name='project_update'),
url(r'^project/(?P<pk>[-\w]+)/$', project_views.project_detail),
# User
url(r'^user/$', user_views.user_main, name='my_profile'),
url(r'^user/settings/$', user_views.user_settings, name='user_settings'),
url(r'^user/([a-zA-Z0-9-]{1,32})/$', user_views.other_profile, name='user_profile'),
# Messages
url(r'^messages/$', message_views.messages, name='my_inbox'),
url(r'^messages/(?P<pk>[-\w]+)/$', message_views.message_thread),
# Trips
url(r'^trip/$', trip_views.trip_main, name='my_trip'),
url(r'^trip/(?P<pk>[-\w]+)/$', trip_views.trip_detail),
# Corporate
url(r'^about/$', TemplateView.as_view(template_name="corporate/about.html"), name='about'),
# url(r'^contact-us/$', corporate_views.contact, name='contact_us'),
url(r'^style/$', TemplateView.as_view(template_name="corporate/style.html"), name='style_guide'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) # For Development Only
| apache-2.0 | 2,167,665,762,157,306,400 | 46.797297 | 125 | 0.704552 | false |
lissyx/build-mozharness | scripts/desktop_unittest.py | 1 | 28855 | #!/usr/bin/env python
# ***** BEGIN LICENSE BLOCK *****
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
"""desktop_unittest.py
The goal of this is to extract desktop unittesting from buildbot's factory.py
author: Jordan Lund
"""
import os
import re
import sys
import copy
import shutil
import glob
# load modules from parent dir
sys.path.insert(1, os.path.dirname(sys.path[0]))
from mozharness.base.errors import BaseErrorList
from mozharness.base.log import INFO, ERROR, WARNING
from mozharness.base.script import PreScriptAction
from mozharness.base.vcs.vcsbase import MercurialScript
from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
from mozharness.mozilla.mozbase import MozbaseMixin
from mozharness.mozilla.testing.codecoverage import (
CodeCoverageMixin,
code_coverage_config_options
)
from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
from mozharness.mozilla.buildbot import TBPL_WARNING
SUITE_CATEGORIES = ['cppunittest', 'jittest', 'mochitest', 'reftest', 'xpcshell', 'mozbase', 'mozmill', 'webapprt']
# DesktopUnittest {{{1
class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMixin, CodeCoverageMixin):
config_options = [
[['--mochitest-suite', ], {
"action": "extend",
"dest": "specified_mochitest_suites",
"type": "string",
"help": "Specify which mochi suite to run. "
"Suites are defined in the config file.\n"
"Examples: 'all', 'plain1', 'plain5', 'chrome', or 'a11y'"}
],
[['--webapprt-suite', ], {
"action": "extend",
"dest": "specified_webapprt_suites",
"type": "string",
"help": "Specify which webapprt suite to run. "
"Suites are defined in the config file.\n"
"Examples: 'content', 'chrome'"}
],
[['--reftest-suite', ], {
"action": "extend",
"dest": "specified_reftest_suites",
"type": "string",
"help": "Specify which reftest suite to run. "
"Suites are defined in the config file.\n"
"Examples: 'all', 'crashplan', or 'jsreftest'"}
],
[['--xpcshell-suite', ], {
"action": "extend",
"dest": "specified_xpcshell_suites",
"type": "string",
"help": "Specify which xpcshell suite to run. "
"Suites are defined in the config file\n."
"Examples: 'xpcshell'"}
],
[['--cppunittest-suite', ], {
"action": "extend",
"dest": "specified_cppunittest_suites",
"type": "string",
"help": "Specify which cpp unittest suite to run. "
"Suites are defined in the config file\n."
"Examples: 'cppunittest'"}
],
[['--jittest-suite', ], {
"action": "extend",
"dest": "specified_jittest_suites",
"type": "string",
"help": "Specify which jit-test suite to run. "
"Suites are defined in the config file\n."
"Examples: 'jittest'"}
],
[['--mozbase-suite', ], {
"action": "extend",
"dest": "specified_mozbase_suites",
"type": "string",
"help": "Specify which mozbase suite to run. "
"Suites are defined in the config file\n."
"Examples: 'mozbase'"}
],
[['--mozmill-suite', ], {
"action": "extend",
"dest": "specified_mozmill_suites",
"type": "string",
"help": "Specify which mozmill suite to run. "
"Suites are defined in the config file\n."
"Examples: 'mozmill'"}
],
[['--run-all-suites', ], {
"action": "store_true",
"dest": "run_all_suites",
"default": False,
"help": "This will run all suites that are specified "
"in the config file. You do not need to specify "
"any other suites.\nBeware, this may take a while ;)"}
],
[['--e10s', ], {
"action": "store_true",
"dest": "e10s",
"default": False,
"help": "Run tests with multiple processes."}
],
[['--strict-content-sandbox', ], {
"action": "store_true",
"dest": "strict_content_sandbox",
"default": False,
"help": "Run tests with a more strict content sandbox (Windows only)."}
],
[['--no-random', ], {
"action": "store_true",
"dest": "no_random",
"default": False,
"help": "Run tests with no random intermittents and bisect in case of real failure."}
],
[["--total-chunks"], {
"action": "store",
"dest": "total_chunks",
"help": "Number of total chunks"}
],
[["--this-chunk"], {
"action": "store",
"dest": "this_chunk",
"help": "Number of this chunk"}
],
] + copy.deepcopy(testing_config_options) + \
copy.deepcopy(blobupload_config_options) + \
copy.deepcopy(code_coverage_config_options)
def __init__(self, require_config_file=True):
# abs_dirs defined already in BaseScript but is here to make pylint happy
self.abs_dirs = None
super(DesktopUnittest, self).__init__(
config_options=self.config_options,
all_actions=[
'clobber',
'read-buildbot-config',
'download-and-extract',
'create-virtualenv',
'install',
'run-tests',
],
require_config_file=require_config_file,
config={'require_test_zip': True})
c = self.config
self.global_test_options = []
self.installer_url = c.get('installer_url')
self.test_url = c.get('test_url')
self.test_packages_url = c.get('test_packages_url')
self.symbols_url = c.get('symbols_url')
# this is so mozinstall in install() doesn't bug out if we don't run
# the download_and_extract action
self.installer_path = c.get('installer_path')
self.binary_path = c.get('binary_path')
self.abs_app_dir = None
self.abs_res_dir = None
# helper methods {{{2
def _pre_config_lock(self, rw_config):
super(DesktopUnittest, self)._pre_config_lock(rw_config)
c = self.config
if not c.get('run_all_suites'):
return # configs are valid
for category in SUITE_CATEGORIES:
specific_suites = c.get('specified_%s_suites' % (category))
if specific_suites:
if specific_suites != 'all':
self.fatal("Config options are not valid. Please ensure"
" that if the '--run-all-suites' flag was enabled,"
" then do not specify to run only specific suites "
"like:\n '--mochitest-suite browser-chrome'")
def query_abs_dirs(self):
if self.abs_dirs:
return self.abs_dirs
abs_dirs = super(DesktopUnittest, self).query_abs_dirs()
c = self.config
dirs = {}
dirs['abs_app_install_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'application')
dirs['abs_test_install_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'tests')
dirs['abs_test_extensions_dir'] = os.path.join(dirs['abs_test_install_dir'], 'extensions')
dirs['abs_test_bin_dir'] = os.path.join(dirs['abs_test_install_dir'], 'bin')
dirs['abs_test_bin_plugins_dir'] = os.path.join(dirs['abs_test_bin_dir'],
'plugins')
dirs['abs_test_bin_components_dir'] = os.path.join(dirs['abs_test_bin_dir'],
'components')
dirs['abs_mochitest_dir'] = os.path.join(dirs['abs_test_install_dir'], "mochitest")
dirs['abs_webapprt_dir'] = os.path.join(dirs['abs_test_install_dir'], "mochitest")
dirs['abs_reftest_dir'] = os.path.join(dirs['abs_test_install_dir'], "reftest")
dirs['abs_xpcshell_dir'] = os.path.join(dirs['abs_test_install_dir'], "xpcshell")
dirs['abs_cppunittest_dir'] = os.path.join(dirs['abs_test_install_dir'], "cppunittest")
dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'blobber_upload_dir')
dirs['abs_jittest_dir'] = os.path.join(dirs['abs_test_install_dir'], "jit-test", "jit-test")
dirs['abs_mozbase_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozbase")
dirs['abs_mozmill_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozmill")
if os.path.isabs(c['virtualenv_path']):
dirs['abs_virtualenv_dir'] = c['virtualenv_path']
else:
dirs['abs_virtualenv_dir'] = os.path.join(abs_dirs['abs_work_dir'],
c['virtualenv_path'])
abs_dirs.update(dirs)
self.abs_dirs = abs_dirs
return self.abs_dirs
def query_abs_app_dir(self):
"""We can't set this in advance, because OSX install directories
change depending on branding and opt/debug.
"""
if self.abs_app_dir:
return self.abs_app_dir
if not self.binary_path:
self.fatal("Can't determine abs_app_dir (binary_path not set!)")
self.abs_app_dir = os.path.dirname(self.binary_path)
return self.abs_app_dir
def query_abs_res_dir(self):
"""The directory containing resources like plugins and extensions. On
OSX this is Contents/Resources, on all other platforms its the same as
the app dir.
As with the app dir, we can't set this in advance, because OSX install
directories change depending on branding and opt/debug.
"""
if self.abs_res_dir:
return self.abs_res_dir
abs_app_dir = self.query_abs_app_dir()
if self._is_darwin():
res_subdir = self.tree_config.get("mac_res_subdir", "Resources")
self.abs_res_dir = os.path.join(os.path.dirname(abs_app_dir), res_subdir)
else:
self.abs_res_dir = abs_app_dir
return self.abs_res_dir
@PreScriptAction('create-virtualenv')
def _pre_create_virtualenv(self, action):
dirs = self.query_abs_dirs()
self.register_virtualenv_module(name='mock')
self.register_virtualenv_module(name='simplejson')
requirements = os.path.join(dirs['abs_test_install_dir'],
'config',
'mozbase_requirements.txt')
if os.path.isfile(requirements):
self.register_virtualenv_module(requirements=[requirements],
two_pass=True)
return
def _query_symbols_url(self):
"""query the full symbols URL based upon binary URL"""
# may break with name convention changes but is one less 'input' for script
if self.symbols_url:
return self.symbols_url
symbols_url = None
self.info("finding symbols_url based upon self.installer_url")
if self.installer_url:
for ext in ['.zip', '.dmg', '.tar.bz2']:
if ext in self.installer_url:
symbols_url = self.installer_url.replace(
ext, '.crashreporter-symbols.zip')
if not symbols_url:
self.fatal("self.installer_url was found but symbols_url could \
not be determined")
else:
self.fatal("self.installer_url was not found in self.config")
self.info("setting symbols_url as %s" % (symbols_url))
self.symbols_url = symbols_url
return self.symbols_url
def get_webapprt_path(self, res_dir, mochitest_dir):
"""Get the path to the webapp runtime binary.
On Mac, we copy the stub from the resources dir to the test app bundle,
since we have to run it from the executable directory of a bundle
in order for its windows to appear. Ideally, the build system would do
this for us at build time, and we should find a way for it to do that.
"""
exe_suffix = self.config.get('exe_suffix', '')
app_name = 'webapprt-stub' + exe_suffix
app_path = os.path.join(res_dir, app_name)
if self._is_darwin():
mac_dir_name = os.path.join(
mochitest_dir,
'webapprtChrome',
'webapprt',
'test',
'chrome',
'TestApp.app',
'Contents',
'MacOS')
mac_app_name = 'webapprt' + exe_suffix
mac_app_path = os.path.join(mac_dir_name, mac_app_name)
self.copyfile(app_path, mac_app_path, copystat=True)
return mac_app_path
return app_path
def _query_abs_base_cmd(self, suite_category, suite):
if self.binary_path:
c = self.config
dirs = self.query_abs_dirs()
run_file = c['run_file_names'][suite_category]
base_cmd = [self.query_python_path('python'), '-u']
base_cmd.append(os.path.join(dirs["abs_%s_dir" % suite_category], run_file))
abs_app_dir = self.query_abs_app_dir()
abs_res_dir = self.query_abs_res_dir()
raw_log_file = os.path.join(dirs['abs_blob_upload_dir'],
'%s_raw.log' % suite)
error_summary_file = os.path.join(dirs['abs_blob_upload_dir'],
'%s_errorsummary.log' % suite)
str_format_values = {
'binary_path': self.binary_path,
'symbols_path': self._query_symbols_url(),
'abs_app_dir': abs_app_dir,
'abs_res_dir': abs_res_dir,
'raw_log_file': raw_log_file,
'error_summary_file': error_summary_file,
}
# TestingMixin._download_and_extract_symbols() will set
# self.symbols_path when downloading/extracting.
if self.symbols_path:
str_format_values['symbols_path'] = self.symbols_path
if suite_category == 'webapprt':
str_format_values['app_path'] = self.get_webapprt_path(abs_res_dir, dirs['abs_mochitest_dir'])
if c['e10s']:
base_cmd.append('--e10s')
if c.get('strict_content_sandbox'):
if suite_category == "mochitest":
base_cmd.append('--strict-content-sandbox')
else:
self.fatal("--strict-content-sandbox only works with mochitest suites.")
if c.get('total_chunks') and c.get('this_chunk'):
base_cmd.extend(['--total-chunks', c['total_chunks'],
'--this-chunk', c['this_chunk']])
if c['no_random']:
if suite_category == "mochitest":
base_cmd.append('--bisect-chunk=default')
else:
self.warning("--no-random does not currently work with suites other than mochitest.")
# set pluginsPath
abs_res_plugins_dir = os.path.join(abs_res_dir, 'plugins')
str_format_values['test_plugin_path'] = abs_res_plugins_dir
missing_key = True
if "suite_definitions" in self.tree_config: # new structure
if suite_category in self.tree_config["suite_definitions"]:
missing_key = False
options = self.tree_config["suite_definitions"][suite_category]["options"]
else:
suite_options = '%s_options' % suite_category
if suite_options in self.tree_config:
missing_key = False
options = self.tree_config[suite_options]
if missing_key:
self.fatal("'%s' not defined in the in-tree config! Please add it to '%s'. "
"See bug 981030 for more details." %
(suite_category,
os.path.join('gecko', 'testing', self.config['in_tree_config'])))
if options:
for option in options:
option = option % str_format_values
if not option.endswith('None'):
base_cmd.append(option)
return base_cmd
else:
self.warning("Suite options for %s could not be determined."
"\nIf you meant to have options for this suite, "
"please make sure they are specified in your "
"config under %s_options" %
(suite_category, suite_category))
return base_cmd
else:
self.fatal("'binary_path' could not be determined.\n This should "
"be like '/path/build/application/firefox/firefox'"
"\nIf you are running this script without the 'install' "
"action (where binary_path is set), please ensure you are"
" either:\n(1) specifying it in the config file under "
"binary_path\n(2) specifying it on command line with the"
" '--binary-path' flag")
def _query_specified_suites(self, category):
# logic goes: if at least one '--{category}-suite' was given,
# then run only that(those) given suite(s). Elif no suites were
# specified and the --run-all-suites flag was given,
# run all {category} suites. Anything else, run no suites.
c = self.config
all_suites = c.get('all_%s_suites' % (category))
specified_suites = c.get('specified_%s_suites' % (category)) # list
suites = None
if specified_suites:
if 'all' in specified_suites:
# useful if you want a quick way of saying run all suites
# of a specific category.
suites = all_suites
else:
# suites gets a dict of everything from all_suites where a key
# is also in specified_suites
suites = dict((key, all_suites.get(key)) for key in
specified_suites if key in all_suites.keys())
else:
if c.get('run_all_suites'): # needed if you dont specify any suites
suites = all_suites
return suites
# Actions {{{2
# clobber defined in BaseScript, deletes mozharness/build if exists
# read_buildbot_config is in BuildbotMixin.
# postflight_read_buildbot_config is in TestingMixin.
# preflight_download_and_extract is in TestingMixin.
# create_virtualenv is in VirtualenvMixin.
# preflight_install is in TestingMixin.
# install is in TestingMixin.
# upload_blobber_files is in BlobUploadMixin
def download_and_extract(self):
"""
download and extract test zip / download installer
optimizes which subfolders to extract from tests zip
"""
c = self.config
target_unzip_dirs = None
if c['specific_tests_zip_dirs']:
target_unzip_dirs = list(c['minimum_tests_zip_dirs'])
for category in c['specific_tests_zip_dirs'].keys():
if c['run_all_suites'] or self._query_specified_suites(category) \
or 'run-tests' not in self.actions:
target_unzip_dirs.extend(c['specific_tests_zip_dirs'][category])
if c.get('run_all_suites'):
target_categories = SUITE_CATEGORIES
else:
target_categories = [cat for cat in SUITE_CATEGORIES
if self._query_specified_suites(cat) is not None]
super(DesktopUnittest, self).download_and_extract(target_unzip_dirs=target_unzip_dirs,
suite_categories=target_categories)
# pull defined in VCSScript.
# preflight_run_tests defined in TestingMixin.
def run_tests(self):
self._run_category_suites('mochitest')
self._run_category_suites('reftest')
self._run_category_suites('webapprt')
self._run_category_suites('xpcshell',
preflight_run_method=self.preflight_xpcshell)
self._run_category_suites('cppunittest',
preflight_run_method=self.preflight_cppunittest)
self._run_category_suites('jittest')
self._run_category_suites('mozbase')
self._run_category_suites('mozmill',
preflight_run_method=self.preflight_mozmill)
def preflight_xpcshell(self, suites):
c = self.config
dirs = self.query_abs_dirs()
abs_app_dir = self.query_abs_app_dir()
# For mac these directories are in Contents/Resources, on other
# platforms abs_res_dir will point to abs_app_dir.
abs_res_dir = self.query_abs_res_dir()
abs_res_components_dir = os.path.join(abs_res_dir, 'components')
abs_res_plugins_dir = os.path.join(abs_res_dir, 'plugins')
abs_res_extensions_dir = os.path.join(abs_res_dir, 'extensions')
if suites: # there are xpcshell suites to run
self.mkdir_p(abs_res_plugins_dir)
self.info('copying %s to %s' % (os.path.join(dirs['abs_test_bin_dir'],
c['xpcshell_name']), os.path.join(abs_app_dir,
c['xpcshell_name'])))
shutil.copy2(os.path.join(dirs['abs_test_bin_dir'], c['xpcshell_name']),
os.path.join(abs_app_dir, c['xpcshell_name']))
self.copytree(dirs['abs_test_bin_components_dir'],
abs_res_components_dir,
overwrite='overwrite_if_exists')
self.copytree(dirs['abs_test_bin_plugins_dir'],
abs_res_plugins_dir,
overwrite='overwrite_if_exists')
if os.path.isdir(dirs['abs_test_extensions_dir']):
self.mkdir_p(abs_res_extensions_dir)
self.copytree(dirs['abs_test_extensions_dir'],
abs_res_extensions_dir,
overwrite='overwrite_if_exists')
def preflight_cppunittest(self, suites):
abs_res_dir = self.query_abs_res_dir()
dirs = self.query_abs_dirs()
abs_cppunittest_dir = dirs['abs_cppunittest_dir']
# move manifest and js fils to resources dir, where tests expect them
files = glob.glob(os.path.join(abs_cppunittest_dir, '*.js'))
files.extend(glob.glob(os.path.join(abs_cppunittest_dir, '*.manifest')))
for f in files:
self.move(f, abs_res_dir)
def preflight_mozmill(self, suites):
c = self.config
dirs = self.query_abs_dirs()
abs_app_dir = self.query_abs_app_dir()
abs_app_plugins_dir = os.path.join(abs_app_dir, 'plugins')
abs_app_extensions_dir = os.path.join(abs_app_dir, 'extensions')
if suites: # there are mozmill suites to run
self.mkdir_p(abs_app_plugins_dir)
self.copytree(dirs['abs_test_bin_plugins_dir'],
abs_app_plugins_dir,
overwrite='overwrite_if_exists')
if os.path.isdir(dirs['abs_test_extensions_dir']):
self.copytree(dirs['abs_test_extensions_dir'],
abs_app_extensions_dir,
overwrite='overwrite_if_exists')
modules = ['jsbridge', 'mozmill']
for module in modules:
self.install_module(module=os.path.join(dirs['abs_mozmill_dir'],
'resources',
module))
def _run_category_suites(self, suite_category, preflight_run_method=None):
"""run suite(s) to a specific category"""
c = self.config
dirs = self.query_abs_dirs()
suites = self._query_specified_suites(suite_category)
abs_app_dir = self.query_abs_app_dir()
abs_res_dir = self.query_abs_res_dir()
if preflight_run_method:
preflight_run_method(suites)
if suites:
self.info('#### Running %s suites' % suite_category)
for suite in suites:
abs_base_cmd = self._query_abs_base_cmd(suite_category, suite)
cmd = abs_base_cmd[:]
replace_dict = {
'abs_app_dir': abs_app_dir,
# Mac specific, but points to abs_app_dir on other
# platforms.
'abs_res_dir': abs_res_dir,
}
options_list = []
env = {}
if isinstance(suites[suite], dict):
options_list = suites[suite]['options']
env = copy.deepcopy(suites[suite]['env'])
else:
options_list = suites[suite]
for arg in options_list:
cmd.append(arg % replace_dict)
cmd = self.append_harness_extra_args(cmd)
suite_name = suite_category + '-' + suite
tbpl_status, log_level = None, None
error_list = BaseErrorList + [{
'regex': re.compile(r'''PROCESS-CRASH.*application crashed'''),
'level': ERROR,
}]
parser = self.get_test_output_parser(suite_category,
config=self.config,
error_list=error_list,
log_obj=self.log_obj)
if self.query_minidump_stackwalk():
env['MINIDUMP_STACKWALK'] = self.minidump_stackwalk_path
env['MOZ_UPLOAD_DIR'] = self.query_abs_dirs()['abs_blob_upload_dir']
env['MINIDUMP_SAVE_PATH'] = self.query_abs_dirs()['abs_blob_upload_dir']
if not os.path.isdir(env['MOZ_UPLOAD_DIR']):
self.mkdir_p(env['MOZ_UPLOAD_DIR'])
env = self.query_env(partial_env=env, log_level=INFO)
return_code = self.run_command(cmd, cwd=dirs['abs_work_dir'],
output_timeout=1000,
output_parser=parser,
env=env)
# mochitest, reftest, and xpcshell suites do not return
# appropriate return codes. Therefore, we must parse the output
# to determine what the tbpl_status and worst_log_level must
# be. We do this by:
# 1) checking to see if our mozharness script ran into any
# errors itself with 'num_errors' <- OutputParser
# 2) if num_errors is 0 then we look in the subclassed 'parser'
# findings for harness/suite errors <- DesktopUnittestOutputParser
# 3) checking to see if the return code is in success_codes
success_codes = None
if self._is_windows():
# bug 1120644
success_codes = [0, 1]
tbpl_status, log_level = parser.evaluate_parser(return_code,
success_codes=success_codes)
parser.append_tinderboxprint_line(suite_name)
self.buildbot_status(tbpl_status, level=log_level)
self.log("The %s suite: %s ran with return status: %s" %
(suite_category, suite, tbpl_status), level=log_level)
else:
self.debug('There were no suites to run for %s' % suite_category)
# main {{{1
if __name__ == '__main__':
desktop_unittest = DesktopUnittest()
desktop_unittest.run_and_exit()
| mpl-2.0 | 5,648,094,374,295,425,000 | 44.369497 | 115 | 0.535089 | false |
rosedu/I.GameBot | tictactoe/game_engine/generate_leaderboard.py | 1 | 1523 | import sys
import json
import operator
def read_raw_json(file_handle):
f = open(file_handle, 'r')
try:
raw_json = f.read()
finally:
f.close()
return raw_json
def parse_raw_json(raw_json):
return json.loads(raw_json)
def add_contestant_info_to_players_dictionary(
contestant,
overall_score,
players_dictionary
):
if contestant in players_dictionary:
players_dictionary[contestant] += overall_score
else:
players_dictionary[contestant] = overall_score
return players_dictionary
players = {}
for json_file in sys.argv[1:]:
progress_log = parse_raw_json( read_raw_json(json_file) )
contestant_x = progress_log['contestant_x'].encode('ascii')
contestant_zero = progress_log['contestant_zero'].encode('ascii')
overall_score_x = progress_log['overall_score_x']
overall_score_zero = progress_log['overall_score_zero']
players = \
add_contestant_info_to_players_dictionary(
contestant_x,
overall_score_x,
players
)
players = \
add_contestant_info_to_players_dictionary(
contestant_zero,
overall_score_zero,
players
)
leaderboard = sorted(players.items(), key=operator.itemgetter(1), reverse=True)
print
print 'Leaderboard / Cumulative Scores for the OSSS TicTacToe++ competition'
print
for contestant_info in leaderboard:
print contestant_info[0], ':', contestant_info[1]
print
| agpl-3.0 | -8,578,562,874,529,528,000 | 21.397059 | 79 | 0.639527 | false |
kashif/chainer | tests/chainer_tests/functions_tests/activation_tests/test_tanh.py | 1 | 3973 | import unittest
import mock
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
@testing.parameterize(*testing.product({
'shape': [(3, 2), ()],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
}))
class TestTanh(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-.5, .5, self.shape).astype(self.dtype)
self.gy = numpy.random.uniform(-.1, .1, self.shape).astype(self.dtype)
self.check_backward_options = {}
if self.dtype == numpy.float16:
self.check_backward_options = {
'dtype': numpy.float64, 'atol': 1e-4, 'rtol': 1e-3}
def check_forward(self, x_data, use_cudnn='always'):
x = chainer.Variable(x_data)
with chainer.using_config('use_cudnn', use_cudnn):
y = functions.tanh(x)
self.assertEqual(y.data.dtype, self.dtype)
y_expect = functions.tanh(chainer.Variable(self.x))
testing.assert_allclose(y_expect.data, y.data)
@attr.gpu
@condition.retry(3)
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), 'always')
@attr.gpu
@condition.retry(3)
def test_forward_gpu_non_contiguous(self):
self.check_forward(cuda.cupy.asfortranarray(cuda.to_gpu(self.x)),
'always')
@attr.gpu
@condition.retry(3)
def test_forward_gpu_no_cudnn(self):
self.check_forward(cuda.to_gpu(self.x), 'never')
def check_backward(self, x_data, gy_data, use_cudnn='always'):
with chainer.using_config('use_cudnn', use_cudnn):
gradient_check.check_backward(
functions.Tanh(), x_data, gy_data,
**self.check_backward_options)
@condition.retry(3)
def test_backward_cpu(self):
self.check_backward(self.x, self.gy)
@attr.gpu
@condition.retry(3)
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
@attr.gpu
@condition.retry(3)
def test_backward_gpu_non_contiguous(self):
self.check_backward(cuda.cupy.asfortranarray(cuda.to_gpu(self.x)),
cuda.cupy.asfortranarray(cuda.to_gpu(self.gy)))
@attr.gpu
@condition.retry(3)
def test_backward_gpu_no_cudnn(self):
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy), 'never')
@testing.parameterize(*testing.product({
'use_cudnn': ['always', 'auto', 'never'],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
}))
@attr.cudnn
class TestTanhCudnnCall(unittest.TestCase):
def setUp(self):
self.x = cuda.cupy.random.uniform(-1, 1, (2, 3)).astype(self.dtype)
self.gy = cuda.cupy.random.uniform(-1, 1, (2, 3)).astype(self.dtype)
with chainer.using_config('use_cudnn', self.use_cudnn):
self.expect = chainer.should_use_cudnn('==always')
def forward(self):
x = chainer.Variable(self.x)
return functions.tanh(x)
def test_call_cudnn_forward(self):
with chainer.using_config('use_cudnn', self.use_cudnn):
default_func = cuda.cupy.cudnn.activation_forward
with mock.patch('cupy.cudnn.activation_forward') as func:
func.side_effect = default_func
self.forward()
self.assertEqual(func.called, self.expect)
def test_call_cudnn_backward(self):
with chainer.using_config('use_cudnn', self.use_cudnn):
y = self.forward()
y.grad = self.gy
default_func = cuda.cupy.cudnn.activation_backward
with mock.patch('cupy.cudnn.activation_backward') as func:
func.side_effect = default_func
y.backward()
self.assertEqual(func.called, self.expect)
testing.run_module(__name__, __file__)
| mit | -706,666,682,520,923,000 | 33.25 | 79 | 0.623207 | false |
jtaghiyar/single_cell_lims | elastidjango/celery.py | 1 | 1033 | """
Created on Oct 27, 2016
@author: Jafar Taghiyar ([email protected])
"""
from __future__ import absolute_import
import os
#============================
# Celery imports
#----------------------------
from celery import Celery
#============================
# Django imports
#----------------------------
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'elastidjango.settings')
from django.conf import settings # noqa
#============================
# main
#----------------------------
app = Celery('elastidajngo')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
# A common practice for reusable apps is to define all tasks in a separate
# tasks.py module, and this is how Celery autodiscovers these modules
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
# @app.task(bind=True)
# def debug_task(self):
# print('Request: {0!r}'.format(self.request)) | mit | 4,008,024,806,926,323,000 | 25.512821 | 74 | 0.621491 | false |
alecthomas/voluptuous | voluptuous/util.py | 3 | 3150 | import sys
from voluptuous.error import LiteralInvalid, TypeInvalid, Invalid
from voluptuous.schema_builder import Schema, default_factory, raises
from voluptuous import validators
__author__ = 'tusharmakkar08'
def _fix_str(v):
if sys.version_info[0] == 2 and isinstance(v, unicode):
s = v
else:
s = str(v)
return s
def Lower(v):
"""Transform a string to lower case.
>>> s = Schema(Lower)
>>> s('HI')
'hi'
"""
return _fix_str(v).lower()
def Upper(v):
"""Transform a string to upper case.
>>> s = Schema(Upper)
>>> s('hi')
'HI'
"""
return _fix_str(v).upper()
def Capitalize(v):
"""Capitalise a string.
>>> s = Schema(Capitalize)
>>> s('hello world')
'Hello world'
"""
return _fix_str(v).capitalize()
def Title(v):
"""Title case a string.
>>> s = Schema(Title)
>>> s('hello world')
'Hello World'
"""
return _fix_str(v).title()
def Strip(v):
"""Strip whitespace from a string.
>>> s = Schema(Strip)
>>> s(' hello world ')
'hello world'
"""
return _fix_str(v).strip()
class DefaultTo(object):
"""Sets a value to default_value if none provided.
>>> s = Schema(DefaultTo(42))
>>> s(None)
42
>>> s = Schema(DefaultTo(list))
>>> s(None)
[]
"""
def __init__(self, default_value, msg=None):
self.default_value = default_factory(default_value)
self.msg = msg
def __call__(self, v):
if v is None:
v = self.default_value()
return v
def __repr__(self):
return 'DefaultTo(%s)' % (self.default_value(),)
class SetTo(object):
"""Set a value, ignoring any previous value.
>>> s = Schema(validators.Any(int, SetTo(42)))
>>> s(2)
2
>>> s("foo")
42
"""
def __init__(self, value):
self.value = default_factory(value)
def __call__(self, v):
return self.value()
def __repr__(self):
return 'SetTo(%s)' % (self.value(),)
class Set(object):
"""Convert a list into a set.
>>> s = Schema(Set())
>>> s([]) == set([])
True
>>> s([1, 2]) == set([1, 2])
True
>>> with raises(Invalid, regex="^cannot be presented as set: "):
... s([set([1, 2]), set([3, 4])])
"""
def __init__(self, msg=None):
self.msg = msg
def __call__(self, v):
try:
set_v = set(v)
except Exception as e:
raise TypeInvalid(
self.msg or 'cannot be presented as set: {0}'.format(e))
return set_v
def __repr__(self):
return 'Set()'
class Literal(object):
def __init__(self, lit):
self.lit = lit
def __call__(self, value, msg=None):
if self.lit != value:
raise LiteralInvalid(
msg or '%s not match for %s' % (value, self.lit)
)
else:
return self.lit
def __str__(self):
return str(self.lit)
def __repr__(self):
return repr(self.lit)
def u(x):
if sys.version_info < (3,):
return unicode(x)
else:
return x
| bsd-3-clause | 5,798,480,879,912,180,000 | 18.444444 | 72 | 0.51619 | false |
google/sample-sql-translator | sql_parser/func.py | 1 | 3624 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from dataclasses import dataclass
from typing import Optional
from typing import List
from rfmt.blocks import LineBlock as LB
from rfmt.blocks import IndentBlock as IB
from rfmt.blocks import TextBlock as TB
from rfmt.blocks import StackBlock as SB
from rfmt.blocks import WrapBlock as WB
from .utils import comments_sqlf
from .const import SQLString
from .ident import SQLIdentifier
from .node import SQLNode
from .node import SQLNodeList
from .expr import SQLExpr
from .types import SQLType
from .types import SQLNamedType
@dataclass(frozen=True)
class SQLFunction(SQLNode):
name: SQLIdentifier
params: SQLNodeList[SQLNode]
retval: Optional[SQLNode]
impl: SQLNode
comments: List[str]
def sqlf(self, compact):
# Start the stack with comments
stack = comments_sqlf(self.comments)
# Get params as a list of sqlf
paramf = []
for param in self.params[:-1]:
paramf.append(LB([param.sqlf(compact), TB(',')]))
if self.params:
paramf.append(self.params[-1].sqlf(compact))
stack.append(LB([
TB('CREATE TEMPORARY FUNCTION '),
self.name.sqlf(True),
TB('('),
WB(paramf, sep=' '),
TB(')')
]))
if self.retval:
stack.append(LB([TB('RETURNS '),
self.retval.sqlf(compact)]))
if isinstance(self.impl, SQLString):
stack.append(TB('LANGUAGE js AS'))
stack.append(IB(LB([
self.impl.sqlf(compact), TB(';')
])))
else:
stack.append(TB('AS'))
stack.append(IB(LB([self.impl.sqlf(compact),
TB(';')])))
stack.append(TB(''))
return SB(stack)
@staticmethod
def consume(lex) -> 'Optional[SQLFunction]':
if not (lex.consume(['CREATE', 'TEMP']) or
lex.consume(['CREATE', 'TEMPORARY'])):
return None
comments = lex.get_comments()
lex.expect('FUNCTION')
name = (SQLIdentifier.consume(lex) or
lex.error('Expected UDF name'))
lex.expect('(')
params = []
while True:
var_name = SQLIdentifier.parse(lex)
ltype = SQLType.parse(lex)
params.append(SQLNamedType(var_name, ltype))
if not lex.consume(','):
break
lex.expect(')')
rtype = None
# Javascript function
if lex.consume('RETURNS'):
rtype = SQLType.parse(lex)
lex.expect('LANGUAGE')
lex.expect('JS')
lex.expect('AS')
impl = (SQLString.consume(lex) or
lex.error('Expected Javascript code'))
# SQL-expression
else:
lex.expect('AS')
impl = SQLExpr.parse(lex)
comments.extend(lex.get_comments())
return SQLFunction(name, SQLNodeList(params),
rtype, impl, comments)
| apache-2.0 | 3,897,709,106,737,068,000 | 27.992 | 78 | 0.586093 | false |
FrodeSolheim/fs-uae | docs/scripts/update.py | 1 | 17476 | #!/usr/bin/env python3
import os
import shutil
import sys
try:
import markdown
except ImportError:
markdown = None
if not os.path.exists("docs/scripts"):
print("Run this script from the project root directory")
sys.exit(1)
last_main_option = ""
last_main_option_added = False
option_data = {}
option_data_all = {}
options = {}
option_repl = {}
f = open("doc/options.html", "w", encoding="UTF-8")
if not os.path.exists("doc/dist"):
os.makedirs("doc/dist")
def handle_option_data(name, data, option):
global last_main_option, last_main_option_added
since = ""
see = ""
default = ""
example = ""
example2 = ""
option["type"] = ""
text = []
h_name = name.replace("-", "_")
name = name.replace("_", "-")
data = data.strip()
org_lines = data.split("\n")
lines = []
in_code = False
in_test = False
for line in org_lines:
if ":" in line or line.startswith("##"):
if line.startswith("##"):
key = line.strip("# ")
value = ""
else:
key, value = line.split(":", 1)
key = key.lower().strip()
value = value.strip()
if key == "since":
since = value
continue
elif key == "see":
see = value
continue
elif key == "default":
default = value
option["default"] = value
continue
elif key == "example":
# example = value
example2 = value
continue
elif key == "type":
option["type"] = value
continue
elif key == "range":
mi, ma = value.split(" - ")
option["min"] = mi.strip()
option["max"] = ma.strip()
continue
elif key == "description":
if value.startswith('"'):
value = value.strip('"')
option["summary_translation"] = True
else:
option["summary_translation"] = False
option["summary"] = value
continue
elif key == "summary":
if value.startswith('"'):
value = value.strip('"')
option["summary_translation"] = True
else:
option["summary_translation"] = False
option["summary"] = value
continue
elif key == "code":
in_code = True
continue
elif key == "tests":
in_code = False
elif key.startswith("test "):
in_code = False
in_test = 1
elif key == "input":
in_code = False
in_test = 1
elif key == "output":
in_test = 2
elif key == "value":
if " - " in value:
value, summary = value.split(" - ", 1)
else:
summary = ""
try:
n, desc = value.split("(", 1)
except ValueError:
n, desc = value, value
else:
n = n.strip()
desc = desc[:-1].strip()
# FIXME: use summary
option["values"].append((n, desc))
# continue
if in_code:
option["code"].append(line[4:])
elif in_test == 1:
pass
elif in_test == 2:
pass
else:
lines.append(line)
# text.append("\n<h2 id=\"{hname}\">"
# "{hname}<a name=\"{name}\"></a></h2>\n".format(
# name=name, hname=h_name))
text.append("<h1>{hname}</h1>\n".format(name=name, hname=h_name))
if since:
text.append("<p>")
text.append("<i>Since {since}</i>.".format(since=since))
if default:
if since:
text.append(" ")
else:
text.append("<p>")
text.append("Default value: {default}".format(default=default))
if example:
if default:
text.append(", ")
elif since:
text.append(" ")
else:
text.append("<p>")
text.append("Example:")
if since or default or example:
text.append("</p>\n")
if example2:
text.append(
"<pre>Example: {hname} = {value}</pre>\n".format(
name=name, hname=h_name, value=example2
)
)
in_list = False
# in_para = False
# in_value = False
last_was_line = False
has_started = False
for line in lines:
if not line.strip() and not has_started:
continue
has_started = True
if (line.startswith("*") or line.startswith("Value:")) or (
in_list and line.startswith(" ")
):
started = False
if not in_list:
started = True
text.append("<ul>\n")
in_list = True
if line.startswith("*") or line.startswith("Value:"):
if not started:
text.append("</li>\n")
text.append("<li>")
else:
text.append(" ")
if line.startswith("Value:"):
if "(" in line:
line = line.split("(")[0][6:].strip()
else:
line = line[6:].strip()
line += " - "
else:
line = line[1:].strip()
text.append(line)
last_was_line = False
else:
if in_list:
text.append("</li>\n</ul>\n")
in_list = False
# if not line.strip():
# text.append("<p>")
# else:
if last_was_line:
if text[-1] == "":
text.append("\n\n")
else:
text.append(" ")
if line.startswith(" "):
text.append("<pre>{0}</pre>".format(line.strip()))
last_was_line = False
else:
text.append(line)
last_was_line = True
if in_list:
text.append("</li>\n</ul>\n")
in_list = False
if see:
# text.append('\n<p>See <a href="#{see}">{see}</a></p>\n'.format(
# see=see))
text.append(
'\n<p>See <a href="{see_l}">{see}</a></p>\n'.format(
see=see, see_l=see.replace("_", "-")
)
)
t = "".join(text)
t = t.replace("\n\n\n", "\n\n")
while " " in t:
t = t.replace(" ", " ")
t = t.replace("</pre><pre>", "\n")
for key, value in option_repl.items():
t = t.replace(key, value)
if "</h2>\n\n<p>See" in t and last_main_option in t:
if last_main_option_added:
f.write(", ")
else:
f.write("\n\nSimilar options: ")
f.write(
'<a name="{name}"></a><a name="{hname}"></a>{hname}'.format(
name=name, hname=h_name
)
)
last_main_option_added = True
else:
last_main_option = name
last_main_option_added = False
f.write(t)
# f.write("\n")
codes = {}
class Code:
def __init__(self):
self.dependencies = None
self.marked = False
def handle_code(option, lines):
# print("handle_code", option)
inputs = set()
for line in lines:
line = line.replace("(", " ")
line = line.replace(")", " ")
line = line.replace(",", " ")
line = line.replace(":", " ")
words = line.split(" ")
for word in words:
word = word.strip()
if not word:
continue
# if word[-1] in "):":
# word = word[:-1]
if word.startswith("c."):
name = word[2:]
name = name.split(".")[0]
if name != option:
inputs.add(name)
inputs = sorted(inputs)
code = Code()
code.option = option
code.inputs = sorted(inputs)
code.lines = lines
codes[option] = code
print(" ", code.inputs)
def handle_option_file(name, path):
print(name)
if os.path.isfile(path):
with open(path, "r", encoding="UTF-8") as option_f:
original_name = name
if original_name.endswith(".md"):
name = name[:-3]
option = {"values": [], "default": "", "code": [], "tests": []}
option_text = option_f.read()
handle_option_data(name, option_text, option)
if option["code"]:
handle_code(name, option["code"])
if "summary" in option:
option_data[name] = option
option_data_all[name] = option
if original_name.endswith(".md") and markdown is not None:
if not os.path.exists("doc/html"):
os.makedirs("doc/html")
html_path = "doc/html/" + name + ".html"
with open(html_path, "w", encoding="UTF-8") as html_f:
html = markdown.markdown(option_text)
html_f.write(html)
def main():
global f
f.write("This page documents the options you can use in FS-UAE ")
f.write('<a href="/fs-uae/configuration-files">configuration files</a>. ')
f.write("The options are sorted in alphabetical order.")
# f.write(" Both hyphens and ")
# f.write("underscores can be used/mixed in option names.")
f.write("\n")
for name in os.listdir("docs/options"):
if name == ".gitignore":
continue
if name.endswith("~"):
continue
if os.path.isfile(os.path.join("docs/options", name)):
option_name = name
if name.endswith(".md"):
option_name, _ = os.path.splitext(name)
options[option_name] = os.path.join("docs/options", name)
# option_repl["[{0}]".format(name)] = "<a href=\"#{0}\">{0}</a>".format(name)
option_repl["[{0}]".format(name)] = '<a href="{0}">{1}</a>'.format(
name.replace("_", "-"), name
)
for name in os.listdir("docs/options/launcher"):
if name.endswith("~"):
continue
if os.path.isfile(os.path.join("docs/options/launcher", name)):
options[name] = os.path.join("docs/options", "launcher", name)
for name in os.listdir("docs/options/arcade"):
if name.endswith("~"):
continue
if os.path.isfile(os.path.join("docs/options/arcade", name)):
options[name] = os.path.join("docs/options", "arcade", name)
for name in os.listdir("docs/options/fsgs"):
if name.endswith("~"):
continue
if os.path.isfile(os.path.join("docs/options/fsgs", name)):
options[name] = os.path.join("docs/options", "fsgs", name)
for name in sorted(options.keys()):
if name == "Makefile":
continue
f = open("doc/dist/" + name, "w", encoding="UTF-8")
handle_option_file(name, options[name])
f.close()
with open(
"../fs-uae-launcher-private/fsgamesys/options/constants.py", "w", encoding="UTF-8"
) as f:
f.write(
"""\
# Automatically generated - do not edit by hand
# noinspection SpellCheckingInspection
"""
)
for key in sorted(option_data_all.keys()):
# Strip leading __ because that will invoke Python's
# name mangling feature
f.write('{} = "{}"\n'.format(key.upper().strip("__"), key))
with open(
"../fs-uae-launcher-private/fsgamesys/options/option.py", "w", encoding="UTF-8"
) as f:
f.write(
"""\
# Automatically generated - do not edit by hand
# noinspection SpellCheckingInspection
class Option(object):
\"\"\"Constants for option names.\"\"\"
"""
)
for key in sorted(option_data_all.keys()):
# Strip leading __ because that will invoke Python's
# name mangling feature
f.write(' {} = "{}"\n'.format(key.upper().strip("__"), key))
with open(
"../fs-uae-launcher-private/launcher/option.py", "w", encoding="UTF-8"
) as f:
f.write(
"""\
# Automatically generated - do not edit by hand
from fsgamesys.options.option import Option as BaseOption
# noinspection PyClassHasNoInit
class Option(BaseOption):
@staticmethod
def get(name):
return options[name]
# noinspection PyPep8Naming
def N_(x):
return x
options = {
"""
)
for key in sorted(option_data.keys()):
print(key)
option = option_data[key]
f.write(" Option.{0}: {{\n".format(key.upper()))
f.write(' "default": "{0}",\n'.format(option["default"]))
if len(option["summary"]) == 0:
f.write(' "description": "",\n')
else:
if key.startswith("uae_"):
f.write(' "description":')
if len(option["summary"]) < 50:
f.write(" ")
else:
if option["summary_translation"]:
f.write(' "description": N_(')
else:
f.write(' "description": (')
if len(option["summary"]) >= 50:
f.write("\n ")
if key.startswith("uae_"):
f.write('"{0}",\n'.format(option["summary"]))
else:
f.write('"{0}"),\n'.format(option["summary"]))
f.write(' "type": "{0}",\n'.format(option["type"]))
if len(option["values"]) > 0:
f.write(' "values": [\n')
for name, desc in option["values"]:
if desc.startswith('"'):
if key.startswith("uae_"):
desc = "{0}".format(desc)
else:
desc = "N_({0})".format(desc)
else:
desc = '"{0}"'.format(desc)
f.write(' ("{0}", {1}),\n'.format(name, desc))
f.write(" ]\n")
if "min" in option:
f.write(' "min": {0},\n'.format(option["min"]))
if "max" in option:
f.write(' "max": {0},\n'.format(option["max"]))
f.write(" },\n")
f.write("}\n")
update_codes()
def update_code_dependencies(code):
# print(sorted(codes.keys()))
if code.dependencies is not None:
return
code.dependencies = set()
code.dependencies.update(code.inputs)
for dependency in code.inputs:
code2 = codes[dependency]
update_code_dependencies(code2)
code.dependencies.update(code2.dependencies)
def write_option(f, option):
code = codes[option]
for dependency in sorted(code.dependencies):
write_option(f, dependency)
if not code.marked:
code.marked = True
f.write(" _{0}(c, f)\n".format(option))
def update_codes():
for option, code in codes.items():
update_code_dependencies(code)
with open("doc/options2.py", "w") as f:
f.write("# Automatically generated - do not edit by hand\n")
f.write("\n")
for option in sorted(codes.keys()):
code = codes[option]
f.write(
"\n# noinspection PyUnusedLocal,"
"SpellCheckingInspection,PyUnresolvedReferences\n"
)
f.write("def _{0}(c, f):\n".format(option))
if option.startswith("int_"):
f.write(" # noinspection PyUnresolvedReferences\n")
f.write(" if c.{0}.explicit:\n".format(option))
f.write(
' f.fail("{0} was set explicitly")\n'.format(option)
)
uses_value = False
for line in code.lines:
if not line.strip():
continue
if "value = " in line:
uses_value = True
f.write(" {0}\n".format(line))
if line.strip().startswith("f.fail("):
f.write(line.split("f.fail(")[0])
f.write(' raise Exception("Failed")\n')
if uses_value:
f.write(" c.{0} = value\n".format(option))
f.write("\n")
f.write(
"""\
class AbstractExpandFunctions:
@staticmethod
def matches(a, b):
pass
@staticmethod
def fail(message):
pass
@staticmethod
def warning(message):
pass
@staticmethod
def lower(s):
pass
"""
)
f.write("\ndef expand_config(c, f):\n")
f.write(" assert isinstance(f, AbstractExpandFunctions)\n")
for option in sorted(codes.keys()):
write_option(f, option)
shutil.move(
"doc/options2.py", "../fs-uae-launcher-private/launcher/ui/config/expand.py"
)
if __name__ == "__main__":
main()
| gpl-2.0 | -654,725,747,254,878,300 | 30.545126 | 90 | 0.463493 | false |
openstack/python-senlinclient | doc/source/conf.py | 1 | 2408 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# python-senlinclient documentation build configuration file
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'openstackdocstheme',
]
# The content that will be inserted into the main body of an autoclass
# directive.
autoclass_content = 'both'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/python-senlinclient'
openstackdocs_bug_project = 'python-senlinclient'
openstackdocs_bug_tag = ''
copyright = 'OpenStack Contributors'
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'openstackdocs'
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'senlin', 'OpenStack Senlin command line client',
['OpenStack Contributors'], 1),
]
| apache-2.0 | 1,665,903,313,054,131,200 | 33.4 | 79 | 0.69892 | false |
gregplaysguitar/django-trolley | cart/views.py | 1 | 17305 | # -*- coding: utf-8 -*-
import simplejson
from django.http import HttpResponse, HttpResponseNotAllowed, HttpResponseRedirect
from django.template import RequestContext
from django.shortcuts import get_object_or_404
from django.conf import settings
from django.template.loader import get_template
from django.template.loader import render_to_string
from django.template.loader import TemplateDoesNotExist
from django.core.mail import send_mail
from django.core.mail import EmailMultiAlternatives
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.utils import importlib
from django.views.decorators.cache import never_cache
from django.contrib.contenttypes.models import ContentType
from api import ItemAlreadyExists
from utils import form_errors_as_notification, get_current_site
import settings as cart_settings
from models import Order
from forms import AddToCartForm, OrderForm, shipping_options_form_factory, order_detail_form_factory, checkout_form_factory
import helpers
render_to_response = helpers.get_render_function()
def index(request):
"""Dummy view for backwards-compatibility - allows reversing of cart.view.index"""
pass
def validate_cart(request, view):
cart = helpers.get_cart()(request)
if view == 'delivery':
return cart.is_valid()
elif view == 'payment':
return bool(Order.objects.filter(pk=cart.data.get('order_pk', None)).count())
def steps(request):
steps = []
if not cart_settings.SKIP_CHECKOUT:
steps.append((reverse('cart.views.checkout'), 'Review Order'))
for step in [
('delivery', 'Delivery Details'),
('payment', 'Payment Details')
]:
if validate_cart(request, step[0]):
steps.append((reverse('cart.views.%s' % step[0]), step[1]))
else:
steps.append((None, step[1]))
return steps
@never_cache
def checkout(request):
"""Display a list of cart items, quantities, total etc, with the option
to change quantities, specify shipping options etc."""
if cart_settings.SKIP_CHECKOUT:
return HttpResponseRedirect(reverse('cart.views.delivery'))
else:
cart = helpers.get_cart()(request)
shipping_options_form_cls = shipping_options_form_factory(cart)
checkout_form_cls = checkout_form_factory()
if request.method == 'POST':
checkout_form = checkout_form_cls(request.POST)
shipping_options_form = shipping_options_form_cls(request.POST, prefix='shipping')
valid = checkout_form.is_valid() and shipping_options_form.is_valid()
if valid:
cart.update_detail_data(checkout_form.cleaned_data)
cart.update_shipping_options(shipping_options_form.cleaned_data)
for item in cart:
# update quantities if changed
q = request.POST.get('quantity-%s' % item.formindex, None)
if q == 'remove':
quantity = 0
else:
try:
quantity = int(q)
except ValueError:
quantity = item['quantity']
if quantity != item['quantity']:
cart.update(item.product, quantity, item['options'])
if request.POST.get('next', False):
redirect_url = reverse(delivery)
else:
redirect_url = request.path_info
if request.is_ajax():
html = render_to_string(
'cart/checkout_ajax.html',
RequestContext(request, {
'cart': cart,
'steps': steps(request),
'current_step': 1,
'checkout_form': checkout_form,
'shipping_options_form': shipping_options_form,
})
)
return HttpResponse(simplejson.dumps({
'success': valid,
'cart': cart.as_dict(),
'redirect_url': redirect_url if valid else None,
'html': html,
}), mimetype='application/json')
elif valid:
return HttpResponseRedirect(redirect_url)
else:
checkout_form = checkout_form_cls(initial=cart.detail_data)
shipping_options_form = shipping_options_form_cls(prefix='shipping', initial=cart.shipping_options)
return render_to_response(
'cart/checkout.html',
RequestContext(request, {
'cart': cart,
'steps': steps(request),
'current_step': 1,
'checkout_form': checkout_form,
'shipping_options_form': shipping_options_form,
})
)
@never_cache
def delivery(request):
"""Collects standard delivery information, along with any extra information
from the order_detail model."""
cart = helpers.get_cart()(request)
order_form_cls = helpers.get_order_form()
detail_cls = helpers.get_order_detail()
if not validate_cart(request, 'delivery'):
return HttpResponseRedirect(reverse(checkout))
else:
try:
instance = Order.objects.get(pk=cart.data.get('order_pk', None))
if detail_cls:
try:
detail_instance = instance.get_detail()
except detail_cls.DoesNotExist:
detail_instance = None
else:
detail_instance = None
except Order.DoesNotExist:
instance = None
detail_instance = None
# get detail form, or dummy form if no ORDER_DETAIL_MODEL defined
detail_form_cls = order_detail_form_factory()
form_kwargs = {'label_suffix': '', 'instance': instance, 'initial': cart.data}
detail_form_kwargs = {'label_suffix': '', 'instance': detail_instance, 'initial': cart.detail_data, 'prefix': 'detail'}
if request.POST:
form = order_form_cls(request.POST, **form_kwargs)
detail_form = detail_form_cls(request.POST, **detail_form_kwargs)
valid = form.is_valid() and detail_form.is_valid()
if valid:
order = form.save(commit=False)
order.session_id = request.session.session_key
order.shipping_cost = cart.shipping_cost()
# save needed here to create the primary key
order.save()
for line in order.orderline_set.all():
line.delete()
for item in cart:
order.orderline_set.create(
product=item.product,
quantity=item['quantity'],
price=item.row_total(),
options=simplejson.dumps(item['options'])
)
# if the form has no 'save' method, assume it's the dummy form
if callable(getattr(detail_form, 'save', None)):
# the detail object may have been created on order save, so check for that
if detail_cls:
try:
detail_form.instance = order.get_detail()
except detail_cls.DoesNotExist:
pass
detail = detail_form.save(commit=False)
detail.order = order # in case it is being created for the first time
for field in cart_settings.CHECKOUT_FORM_FIELDS:
setattr(detail, field, cart.detail_data[field])
detail.save()
# confirmed status can trigger notifications etc, so don't set it until all
# order info is in the database
order.status = 'confirmed'
order.save()
cart.update_data({'order_pk': order.pk})
cart.modified()
redirect_url = reverse('cart.views.payment', args=(order.hash,))
else:
redirect_url = None
if request.is_ajax():
html = render_to_string(
'cart/delivery_ajax.html',
RequestContext(request, {
'cart': cart,
'form': form,
'detail_form': detail_form,
'steps': steps(request),
'current_step': 2,
})
)
return HttpResponse(simplejson.dumps({
'success': valid,
'cart': cart.as_dict(),
'redirect_url': redirect_url,
'hard_redirect': True,
'html': html,
}), mimetype='application/json')
elif valid:
return HttpResponseRedirect(redirect_url)
else:
form = order_form_cls(**form_kwargs)
detail_form = detail_form_cls(**detail_form_kwargs)
return render_to_response(
'cart/delivery.html',
RequestContext(request, {
'cart': cart,
'form': form,
'detail_form': detail_form,
'steps': steps(request),
'current_step': 2,
})
)
@never_cache
def payment(request, order_hash=None, param=None):
"""Handle payments using the specified backend."""
if order_hash:
order = get_object_or_404(Order, hash=order_hash)
else:
cart = helpers.get_cart()(request)
if not validate_cart(request, 'payment'):
return HttpResponseRedirect(reverse('cart.views.delivery'))
else:
# Assume this will work since validate_cart returned True
order = Order.objects.get(pk=cart.data['order_pk'])
return HttpResponseRedirect(reverse('cart.views.payment', args=(order.hash,)))
if order.total():
if cart_settings.PAYMENT_BACKEND:
try:
backend_module = importlib.import_module(cart_settings.PAYMENT_BACKEND)
except ImportError:
# Try old format for backwards-compatibility
backend_module = importlib.import_module('cart.payment.%s' % cart_settings.PAYMENT_BACKEND)
backend = backend_module.PaymentBackend()
return backend.paymentView(request, param, order)
else:
# If no payment backend, assume we're skipping this step
return HttpResponseRedirect(order.get_absolute_url())
else:
order.payment_successful = True
order.save()
return HttpResponseRedirect(order.get_absolute_url())
@never_cache
def complete(request, order_hash):
"""Display completed order information."""
cart = helpers.get_cart()(request)
cart.clear()
order = get_object_or_404(Order, hash=order_hash)
if not order.notification_sent:
notify_body = render_to_string(
'cart/email/order_notify.txt',
RequestContext(request, {
'order': order,
'site': get_current_site(),
})
)
send_mail(
"Order Received",
notify_body,
settings.DEFAULT_FROM_EMAIL,
[t[1] for t in cart_settings.MANAGERS]
)
order.notification_sent = True
order.save()
if order.email and not order.acknowledgement_sent:
acknowledge_body = render_to_string(
'cart/email/order_acknowledge.txt',
RequestContext(request, {
'order': order,
'site': get_current_site(),
})
)
acknowledge_subject = render_to_string(
'cart/email/order_acknowledge_subject.txt',
RequestContext(request, {
'order': order,
'site': get_current_site(),
})
)
try:
acknowledge_body_html = render_to_string('cart/email/order_acknowledge.html',
RequestContext(request, {'order': order, 'site': get_current_site()}))
except TemplateDoesNotExist:
acknowledge_body_html = None
msg = EmailMultiAlternatives(acknowledge_subject,
acknowledge_body,
settings.DEFAULT_FROM_EMAIL,
[order.email])
if acknowledge_body_html:
msg.attach_alternative(acknowledge_body_html, "text/html")
msg.send()
order.acknowledgement_sent = True
order.save()
return render_to_response(
'cart/complete.html',
RequestContext(request, {
'order': order,
})
)
def clear(request):
"""Remove all items from the cart."""
if request.method != 'POST':
return HttpResponseNotAllowed('GET not allowed; POST is required.')
else:
helpers.get_cart()(request).clear()
notification = (messages.SUCCESS, 'Your cart was emptied',)
if request.is_ajax():
response = HttpResponse()
response.write(simplejson.dumps({
'notification': notification
}))
return response
else:
messages.add_message(request, *notification)
return HttpResponseRedirect(request.POST.get('redirect_to', reverse(checkout)))
@never_cache
def update(request):
"""Update cart quantities."""
if request.method != 'POST':
return HttpResponseNotAllowed('GET not allowed; POST is required.')
else:
cart = helpers.get_cart()(request)
for item in cart:
index = 'quantity-%s' % unicode(item.formindex)
if index in request.POST:
try:
quantity = int(request.POST[index])
cart.update(item.product, quantity, item['options'])
except ValueError:
pass
notification = (messages.SUCCESS, 'Cart updated. <a href="%s">View cart</a>' % (reverse(checkout)))
if request.is_ajax():
response = HttpResponse()
data = {
'cart': cart.as_dict(),
'notification': notification,
}
response.write(simplejson.dumps(data))
return response
else:
messages.add_message(request, *notification)
return HttpResponseRedirect(request.POST.get('redirect_to', reverse(checkout)))
def add(request, content_type_id, product_id, form_class=None):
"""Add a product to the cart
POST data should include content_type_id,
"""
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
else:
ctype = get_object_or_404(ContentType, pk=content_type_id)
product = get_object_or_404(ctype.model_class(), pk=product_id)
if not form_class:
form_class = helpers.get_add_form(product)
form = form_class(request.POST, product=product)
cart = helpers.get_cart()(request)
if form.is_valid():
form.add(request)
notification = (messages.SUCCESS, 'Product was added to your cart. <a href="%s">View cart</a>' % (reverse(checkout)))
else:
notification = (messages.ERROR, 'Could not add product to cart. \r%s' % form_errors_as_notification(form))
if request.is_ajax():
data = {
'notification': notification,
'cart': cart.as_dict(),
'checkout_url': reverse('cart.views.checkout'),
'delivery_url': reverse('cart.views.delivery'),
}
if form.is_valid():
data.update({
'success': True,
'cart': cart.as_dict(),
'product_pk': product.pk,
'product_name': product.name,
'product_quantity_added': form.get_quantity(),
'product_quantity': cart.get(product, form.get_options())['quantity'],
'total_quantity': cart.quantity(),
})
return HttpResponse(simplejson.dumps(data), mimetype='application/json')
else:
messages.add_message(request, *notification)
if form.is_valid():
return HttpResponseRedirect(request.POST.get('redirect_to', reverse(checkout)))
else:
return HttpResponseRedirect(request.META.get('HTTP_REFERER', reverse(checkout)))
| bsd-3-clause | -3,236,693,349,976,724,500 | 35.976496 | 129 | 0.535452 | false |
dhermes/project-euler | python/complete/no335.py | 1 | 1809 | #!/usr/bin/env python
# Since M(2**n + 1) = 4**n + 3**n - 2**(n + 1) (empirically),
# we find sum_{n=0}^{P} M(2**n + 1) is equal to
# (4**(P + 1) - 1)/3 + (3**(P + 1) - 1)/2 + 2*(2**(P + 1) - 1)
# = (4*(4**P) - 1)*(3**(-1)) + (3*(3**P) - 1)*(2**(-1)) + 4*(2**P) - 2
# (This is because (r - 1)*(r**P + ... + r + 1) = r**(P + 1) - 1
from python.decorators import euler_timer
from python.functions import inverse_mod_n
def moves(n):
if n < 3:
return n
goal_state = [1] * n
state = [0, 2] + [1] * (n - 2)
num_moves = 1
last_placed = 1
while state != goal_state:
beans = state[last_placed]
state[last_placed] = 0
for bean in range(1, beans + 1):
next_index = (last_placed + bean) % n
state[next_index] += 1
last_placed = (last_placed + beans) % n
num_moves += 1
return num_moves
def check_formula(n):
return (moves(2 ** n + 1) == 4 ** n - 3 ** n + 2 ** (n + 1))
# Since (a**(n**k))**n = a**(n*(n**k)) = a**(n**(k + 1)),
# We can easily compute X**(P + 1) = X*(X**P) for P = 10**18
def modular_exponentiate(val, exp_base, exp_power, modulus):
result = val
for i in xrange(exp_power):
result = (result ** exp_base) % modulus
return result
def main(verbose=False):
for n in range(10):
if not check_formula(n):
raise Exception("Proposed formula for M(2**k + 1) incorrect.")
modulus = 7 ** 9
p_2 = 4 * modular_exponentiate(2, 10, 18, modulus) - 2
p_3 = 3 * modular_exponentiate(3, 10, 18, modulus) - 1
p_4 = 4 * modular_exponentiate(4, 10, 18, modulus) - 1
return (p_4 * inverse_mod_n(3, modulus) -
p_3 * inverse_mod_n(2, modulus) + p_2) % (modulus)
if __name__ == '__main__':
print euler_timer(335)(main)(verbose=True)
| apache-2.0 | -1,711,806,754,531,232,000 | 29.15 | 74 | 0.512438 | false |
dionhaefner/veros | veros/core/streamfunction/island.py | 1 | 1983 | import numpy
import scipy.ndimage
from ... import veros_method, runtime_settings as rs
from .. import utilities
@veros_method
def isleperim(vs, kmt, verbose=False):
utilities.enforce_boundaries(vs, kmt)
if rs.backend == 'bohrium':
kmt = kmt.copy2numpy()
structure = numpy.ones((3, 3)) # merge diagonally connected land masses
# find all land masses
labelled, _ = scipy.ndimage.label(kmt == 0, structure=structure)
# find and set perimeter
land_masses = labelled > 0
inner = scipy.ndimage.binary_dilation(land_masses, structure=structure)
perimeter = numpy.logical_xor(inner, land_masses)
labelled[perimeter] = -1
# match wrapping periodic land masses
if vs.enable_cyclic_x:
west_slice = labelled[2]
east_slice = labelled[-2]
for west_label in numpy.unique(west_slice[west_slice > 0]):
east_labels = numpy.unique(east_slice[west_slice == west_label])
east_labels = east_labels[~numpy.isin(east_labels, [west_label, -1])]
if not east_labels.size:
# already labelled correctly
continue
assert len(numpy.unique(east_labels)) == 1, (west_label, east_labels)
labelled[labelled == east_labels[0]] = west_label
utilities.enforce_boundaries(vs, labelled)
# label landmasses in a way that is consistent with pyom
labels = numpy.unique(labelled[labelled > 0])
label_idx = {}
for label in labels:
# find index of first island cell, scanning west to east, north to south
label_idx[label] = np.argmax(labelled[:, ::-1].T == label)
sorted_labels = list(sorted(labels, key=lambda i: label_idx[i]))
# ensure labels are numbered consecutively
relabelled = labelled.copy()
for new_label, label in enumerate(sorted_labels, 1):
if label == new_label:
continue
relabelled[labelled == label] = new_label
return np.asarray(relabelled)
| mit | 4,557,765,566,113,674,000 | 32.610169 | 81 | 0.646495 | false |
AlessandroMinali/pyIRCbot | pyIRCbot.py | 1 | 2803 | #Alessandro Minali 2014
# www.alessandrom.me
#questions/suggestions/feedback to: [email protected]
##ONLY CHANGE VALUES THAT HAVE COMMENTS BESIDE THEM
import socket
import commands
import moderation
import time
class PyIRCBot():
def __init__(self):
HOST = "irc.twitch.tv"
PORT = 6667
REALNAME = "Bot"
data = self.config()
NICK = data[0] ##This has to be your bots username.
IDENT = data[0] ##Bot username again.
PASS = data[1] ##This has to be your oauth token.
self.CHANNEL = data[2] ##This is the channel your bot will be working on.
self.flag = data[3]
self.s = socket.socket()
self.s.connect((HOST, PORT))
self.s.send("PASS %s\r\n" % PASS)
self.s.send("NICK %s\r\n" % NICK)
self.s.send("USER %s %s bla :%s\r\n" % (IDENT, HOST, REALNAME))
self.s.send("JOIN %s\r\n" % self.CHANNEL)
def run(self):
LAST_MESSAGE = ""
readbuffer = ""
print "Running..."
while(1):
time.sleep(0.3)
readbuffer = readbuffer + self.s.recv(1024)
temp = readbuffer.split("\n")
readbuffer = temp.pop()
self.debug(temp)
for line in temp:
message = line.split(":")
name = message[1].split("!")[0]
ENTIRE_MESSAGE = message[-1]
if(ENTIRE_MESSAGE[0] == "!"):
self.command(ENTIRE_MESSAGE)
elif(line.split()[0].strip(":") == "PING"):
self.s.send("PONG %s\r\n" % line.split()[1])
else:
self.moderate(ENTIRE_MESSAGE, name)
def command(self, msg):
name = msg[1:-1:].split()[0]
try:
ans = eval("commands." + name + "." + name + "(\"" + msg[1:-1:] + "\")")
reply = "PRIVMSG "+ self.CHANNEL + " :" + str(ans) + "\r\n"
self.s.send(reply)
except:
pass
def moderate(self, msg, name):
if moderation.mod.scan(msg):
reply = "PRIVMSG "+ self.CHANNEL + " :" + moderation.mod.timeout(name,200) + "\r\n"
self.s.send(reply)
## reply = "PRIVMSG "+ self.CHANNEL + " :Bad boy, :( you know what you did!\r\n"
## self.s.send(reply)
def config(self):
f = open("config.txt", "r")
data = f.readlines()
j = 0
for i in data:
data[j] = i.split()[-1]
j = j + 1
return data
def debug(self, log):
if int(self.flag):
print log
else:
pass
if __name__ == "__main__":
bot = PyIRCBot()
bot.run()
| mit | -26,257,662,847,638,284 | 30.593023 | 95 | 0.475919 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.