prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>lit.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
<|fim▁hole|>
from lit.main import main
import os
if __name__ == '__main__':
if not os.path.exists(".cache/core.Item"):
print("Note that first run may take quite a while .cache/core.* is populated...")
main()<|fim▁end|> | # https://medium.com/@mshockwave/using-llvm-lit-out-of-tree-5cddada85a78
# To run lit-based test suite:
# cd xyz/qmlcore/test && ./lit.py -va . |
<|file_name|>bothLocust.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import csv
import os
import random
import re
import urllib3
from urllib.parse import parse_qs, urlparse, unquote, urlencode, quote
from locust import HttpLocust, TaskSet, task, TaskSequence, seq_task, between
# SAML IdP Host
HOST = "https://axman000.local:8443"
# Run weighting for each protocol
CAS_WEIGHT = 3
SAML_WEIGHT = 1
# Testing using locally generated cert, so turning off error messaging
IGNORE_SSL = True
# Search Patterns
EXECUTION_PAT = re.compile(r'<input type="hidden" name="execution" value="([^"]+)"')
EVENTID_PAT = re.compile(r'<input type="hidden" name="_eventId" value="([^"]+)"')
RELAY_STATE_PAT = re.compile(r'<input type="hidden" name="RelayState" value="([^"]+)"')
SAML_RESPONSE_PAT = re.compile(r'<input type="hidden" name="SAMLResponse" value="([^"]+)"')
SAML_SP_PAGE_PAT = re.compile(".*PHP Variables.*")
# Service Provider settings
CAS_SP = "https://castest.edu/"
# SAML Service Provider settings
SP = 'https://idptestbed'
SP_LOGIN = '/Shibboleth.sso/SAML2/POST'
SP_ENTITY_ID = 'https://sp.idptestbed/shibboleth'
SP_PROTECTED = '/php-shib-protected/'
sp = 'https://idptestbed'
sp_login = '/Shibboleth.sso/SAML2/POST'
sp_entity_id = 'https://sp.idptestbed/shibboleth'
sp_protected = '/php-shib-protected/'
class BasicTaskSet(TaskSet):
def on_start(self):
"""
LOCUST startup process
"""
if IGNORE_SSL:
urllib3.disable_warnings()
print("Start Locust Run!")
def on_stop(self):
"""
LOCUST shutdown process
"""
print("End of Locust Run")
@task(CAS_WEIGHT)
class CASTaskSet(TaskSequence):
@seq_task(1)
def login(self):
"""
Main script used to log in via CAS protocol
"""
print("CAS Login Process Starting ...")
client = self.client
cas_response = client.get("/cas/login",
params={'service': CAS_SP},
name="CAS 1. /cas/login - GET",
verify=False)
content = cas_response.text
found_exec = EXECUTION_PAT.search(content)
if found_exec is None:
print("CAS No Execution field found on login form!")
self.interrupt()
execution = found_exec.groups()[0]<|fim▁hole|> if found_eventid is None:
print("CAS No Event Id field found on login form!")
self.interrupt()
event_id = found_eventid.groups()[0]
creds = random.choice(self.locust.creds)
cas_user = creds[0]
cas_passwd = creds[1]
data = {
"username": cas_user,
"password": cas_passwd,
"execution": execution,
"_eventId": event_id,
"geolocation": "",
}
print("CAS Logging in User")
cas_login_response = client.post("/cas/login?service={}".format(CAS_SP),
data=data,
name="CAS 2. /cas/login - POST",
verify=False,
allow_redirects=False)
cas_response_url = cas_login_response.next.url
url_query = unquote(urlparse(cas_response_url).query)
cas_parsed_url = parse_qs(url_query)
if 'ticket' in cas_parsed_url:
cas_ticket = cas_parsed_url['ticket'][0]
else:
print("CAS No Ticket found in returned form!")
self.interrupt()
print("Validating service ticket ...")
ticket_response = client.get("/cas/serviceValidate",
params={'service': CAS_SP, 'ticket': cas_ticket},
name="CAS 3. /cas/serviceValidate - GET",
verify=False)
user_data = ticket_response.text
if "<cas:authenticationSuccess>" in user_data:
print("Succesful Run!")
else:
print("CAS No Event Id field found on login form!")
self.interrupt()
print("Validating service ticket ...")
@seq_task(2)
def logout(self):
"""
CAS User logout
"""
print("CAS Logged out of SSO.")
self.client.get("/cas/logout",
verify=False,
name="CAS 4. /cas/logout - GET")
self.interrupt()
@task(SAML_WEIGHT)
class SAMLTaskSet(TaskSequence):
@seq_task(1)
def login(self):
"""
Main script used to log in via SAML protocol
"""
client = self.client
print("SAML Go to SP and redirect to CAS")
sp_client = SP + SP_PROTECTED
client_response = client.get(sp_client,
verify=False,
name="SAML 1. {} - GET".format(SP_PROTECTED))
print("SAML Now at CAS Login page")
response_url = client_response.url
url_query = unquote(urlparse(response_url).query)
parsed_url = parse_qs(url_query)
print("SAML Grab data passed to CAS")
if 'RelayState' in parsed_url:
sp_relay_state = parsed_url['RelayState'][0]
else:
print("SAML No RelayState field found on login form!")
self.interrupt()
if 'SAMLRequest' in parsed_url:
sp_saml_request = parsed_url['SAMLRequest'][0]
else:
print("SAML No SAMLRequest field found on login form!")
self.interrupt()
content = client_response.text
found_exec = EXECUTION_PAT.search(content)
if found_exec is None:
print("SAML No Execution field found on login form!")
self.interrupt()
execution = found_exec.groups()[0]
found_eventid = EVENTID_PAT.search(content)
if found_eventid is None:
print("SAML No Event Id field found on login form!")
self.interrupt()
event_id = found_eventid.groups()[0]
print("SAML Get user login info")
creds = random.choice(self.locust.creds)
user = creds[0]
passwd = creds[1]
print("SAML Build Login parameters")
params = {
'SAMLRequest': sp_saml_request,
'RelayState': sp_relay_state
}
data = {
"username": user,
"password": passwd,
"execution": execution,
"_eventId": event_id,
"geolocation": '',
}
encoded_params = urlencode(params, quote_via=quote)
encoded_entityid = quote(SP_ENTITY_ID, safe='')
encoded_service = quote(
'{}/cas/idp/profile/SAML2/Callback?entityId={}&{}'.format(HOST,
encoded_entityid,
encoded_params), safe='')
print("SAML Submit User login credentials ...")
login_response = client.post("/cas/login?service=" + encoded_service,
data=data,
verify=False,
allow_redirects=True,
name="SAML 2. /cas/login?service= - POST")
login_content = login_response.text
found_relay = RELAY_STATE_PAT.search(login_content)
if found_relay is None:
print("SAML No Relay State field found!")
self.interrupt()
# Having issues with the relay coming back with hex code, adding this call to convert.
idp_relay_state = found_relay.groups()[0].replace(':', ':')
saml_response = SAML_RESPONSE_PAT.search(login_content)
if saml_response is None:
print("SAML No SAML Response field found!")
self.interrupt()
idp_saml_response = unquote(saml_response.groups()[0])
sp_url = SP + SP_LOGIN
data = {
"RelayState": idp_relay_state,
"SAMLResponse": idp_saml_response,
}
print("SAML Return call to SP with SAML info ...")
sp_response = client.post(sp_url,
data=data,
verify=False,
name="SAML 3. {} - POST".format(SP_LOGIN))
assert SAML_SP_PAGE_PAT.search(sp_response.text) is not None, "Expected title has not been found!"
print("SAML Successful Run!")
@seq_task(2)
def logout(self):
"""
SAML User logout
"""
print("SAML Logged out of SSO.")
self.client.get("/cas/logout",
verify=False,
name="SAML 4. /cas/logout - GET")
self.interrupt()
def load_creds():
"""
Load test user credentials.
"""
credpath = os.path.join(
os.path.dirname(__file__),
"credentials.csv")
creds = []
with open(credpath, "r") as f:
reader = csv.reader(f)
for row in reader:
creds.append((row[0], row[1]))
return creds
class BothLocust(HttpLocust):
task_set = BasicTaskSet
host = HOST
wait_time = between(2, 15)
creds = load_creds()<|fim▁end|> |
found_eventid = EVENTID_PAT.search(content) |
<|file_name|>stdafx.cpp<|end_file_name|><|fim▁begin|>// stdafx.cpp : Ç¥ÁØ Æ÷ÇÔ ÆÄÀϸ¸ µé¾î ÀÖ´Â ¼Ò½º ÆÄÀÏÀÔ´Ï´Ù.
// SampleClient.pch´Â ¹Ì¸® ÄÄÆÄÀÏµÈ Çì´õ°¡ µË´Ï´Ù.
// stdafx.obj¿¡´Â ¹Ì¸® ÄÄÆÄÀÏµÈ Çü½Ä Á¤º¸°¡ Æ÷ÇԵ˴ϴÙ.
#include "stdafx.h"<|fim▁hole|>// ÀÌ ÆÄÀÏÀÌ ¾Æ´Ñ STDAFX.H¿¡¼ ÂüÁ¶ÇÕ´Ï´Ù.<|fim▁end|> |
// TODO: ÇÊ¿äÇÑ Ãß°¡ Çì´õ´Â |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import division
import datetime
import re
import itertools
import random
from django.conf import settings
from django.core import exceptions
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.gis.db import models
from markitup.fields import MarkupField
from django_date_extensions.fields import ApproximateDateField, ApproximateDate
from tasks.models import Task
from images.models import HasImageMixin, Image
from scorecards.models import ScorecardMixin
from mapit import models as mapit_models
# tell South how to handle the custom fields
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^django_date_extensions\.fields\.ApproximateDateField"])
add_introspection_rules([], ["^django.contrib\.gis\.db\.models\.fields\.PointField"])
date_help_text = "Format: '2011-12-31', '31 Jan 2011', 'Jan 2011' or '2011' or 'future'"
class ModelBase(models.Model):
created = models.DateTimeField( auto_now_add=True, default=datetime.datetime.now(), )
updated = models.DateTimeField( auto_now=True, default=datetime.datetime.now(), )
def css_class(self):
return self._meta.module_name
def get_admin_url(self):
url = reverse(
'admin:%s_%s_change' % ( self._meta.app_label, self._meta.module_name),
args=[self.id]
)
return url
class Meta:
abstract = True
class ManagerBase(models.GeoManager):
def update_or_create(self, filter_attrs, attrs):
"""Given unique look-up attributes, and extra data attributes, either
updates the entry referred to if it exists, or creates it if it doesn't.
Returns the object updated or created, having saved the changes.
"""
try:
obj = self.get(**filter_attrs)
changed = False
for k, v in attrs.items():
if obj.__dict__[k] != v:
changed = True
obj.__dict__[k] = v
if changed:
obj.save()
except exceptions.ObjectDoesNotExist:
attrs.update(filter_attrs)
obj = self.create(**attrs)
obj.save()
return obj
class ContactKind(ModelBase):
name = models.CharField(max_length=200, unique=True)
slug = models.SlugField(max_length=200, unique=True,
help_text="created from name")
objects = ManagerBase()
def __unicode__(self):
return self.name
class Meta:
ordering = ["slug"]
class Contact(ModelBase):
kind = models.ForeignKey('ContactKind')
value = models.TextField()
note = models.TextField(blank=True,
help_text="publicly visible, use to clarify contact detail")
source = models.CharField(max_length=500, blank=True, default='',
help_text="where did this contact detail come from")
# link to other objects using the ContentType system
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
objects = ManagerBase()
def __unicode__(self):
return "%s (%s for %s)" % (self.value, self.kind, self.content_object)
def generate_tasks(self):
"""generate tasks for ourselves, and for the foreign object"""
Task.call_generate_tasks_on_if_possible(self.content_object)
return []
class Meta:
ordering = ["content_type", "object_id", "kind"]
class InformationSource(ModelBase):
source = models.CharField(max_length=500)
note = models.TextField(blank=True)
entered = models.BooleanField(default=False,
help_text="has the information in this source been entered into this system?")
# link to other objects using the ContentType system
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
objects = ManagerBase()
def __unicode__(self):
return "%s: %s" % (self.source, self.content_object)
class Meta:
ordering = ["content_type", "object_id", "source"]
class PersonQuerySet(models.query.GeoQuerySet):
def is_politician(self, when=None):
# FIXME - Don't like the look of this, rather a big subquery.
return self.filter(position__in=Position.objects.all().current_politician_positions(when))
class PersonManager(ManagerBase):
def get_query_set(self):
return PersonQuerySet(self.model)
def loose_match_name(self, name):
"""Search for a loose match on a name. May not be too reliable"""
# import here to avoid creating an import loop
from haystack.query import SearchQuerySet
# Try matching all the bits
results = SearchQuerySet().filter_and(content=name).models(self.model)
# if that fails try matching all the bits in any order
if not len(results):
results = SearchQuerySet().models(Person)
for bit in re.split(r'\s+', name):
results = results.filter_and(content=bit)
# If we have exactly one result return that
if len(results) == 1:
return results[0].object
else:
return None
def get_next_featured(self, current_slug, want_previous=False):
""" Returns the next featured person, in slug order: using slug order because it's unique and easy to
exclude the current person.
If no slug is provided, returns a random person.
If the slug is purely numeric (n), this consisently returns a person (actually the nth wrapping around
where necessary): this allows js to generate random calls that can nonetheless be served from the cache."""
all_results = self.filter(can_be_featured=True)
if not all_results.exists():
return None
sort_order = 'slug'
if not current_slug:
return random.choice(all_results)
elif current_slug.isdigit():
all_results = all_results.order_by(sort_order)
return all_results[int(current_slug) % len(all_results)] # ignore direction: just provide a person
else:
all_results = all_results.exclude(slug=current_slug)
if len(all_results) == 0: # special case: return the excluded person if they are the only one or nothing
all_results = self.filter(can_be_featured=True)
if all_results.exists():
return all_results[0]
else:
return None
if want_previous:
sort_order = '-slug'
results = all_results.order_by(sort_order).filter(slug__lt=current_slug)[:1]
else:
results = all_results.order_by(sort_order).filter(slug__gt=current_slug)[:1]
if len(results) == 1:
return results[0]
else: # we're at the start/end of the list, wrap round to the other end
results = all_results.order_by(sort_order)[:1]
if len(results) == 1:
return results[0]
else:
return None
class Person(ModelBase, HasImageMixin, ScorecardMixin):
title = models.CharField(max_length=100, blank=True)
legal_name = models.CharField(max_length=300)
other_names = models.TextField(blank=True, default='', help_text="other names the person might be known by - one per line")
slug = models.SlugField(max_length=200, unique=True, help_text="auto-created from first name and last name")
gender = models.CharField(max_length=1, choices=(('m','Male'),('f','Female')) )
date_of_birth = ApproximateDateField(blank=True, help_text=date_help_text)
date_of_death = ApproximateDateField(blank=True, help_text=date_help_text)
original_id = models.PositiveIntegerField(blank=True, null=True, help_text='temporary - used to link to members in original mzalendo.com db')
# religion
# tribe
summary = MarkupField(blank=True, default='')
contacts = generic.GenericRelation(Contact)
images = generic.GenericRelation(Image)
objects = PersonManager()
can_be_featured = models.BooleanField(default=False, help_text="can this person be featured on the home page (e.g., is their data appropriate and extant)?")
def clean(self):
# strip other_names and flatten multiple newlines
self.other_names = re.sub(r"\n+", "\n", self.other_names).strip()
@property
def name(self):
if self.other_names:
return self.other_names.split("\n")[0]
else:
return self.legal_name
def additional_names(self):
if self.other_names:
return self.other_names.split("\n")[1:]
else:
return []
def aspirant_positions(self):
return self.position_set.all().current_aspirant_positions()
def is_aspirant(self):
return self.aspirant_positions().exists()
def politician_positions(self):
return self.position_set.all().current_politician_positions()
def is_politician(self):
return self.politician_positions().exists()
def parties(self):
"""Return list of parties that this person is currently a member of"""
party_memberships = self.position_set.all().currently_active().filter(title__slug='member').filter(organisation__kind__slug='party')
return Organisation.objects.filter(position__in=party_memberships)
def constituencies(self):
"""Return list of constituencies that this person is currently an politician for"""
return Place.objects.filter(position__in=self.politician_positions())
def __unicode__(self):
return self.legal_name
@models.permalink
def get_absolute_url(self):
return ('person', [self.slug])
def generate_tasks(self):
"""Generate tasks for missing contact details etc"""
task_slugs = []
wanted_contact_slugs = ['phone','email','address']
have_contact_slugs = [c.kind.slug for c in self.contacts.all()]
for wanted in wanted_contact_slugs:
if wanted not in have_contact_slugs:
task_slugs.append("find-missing-" + wanted)
return task_slugs
def scorecard_overall(self):
total_count = super(Person, self).active_scorecards().count()
total_score = super(Person, self).active_scorecards().aggregate(models.Sum('score'))['score__sum']
for constituency in self.constituencies():
constituency_count = constituency.active_scorecards().count()
if constituency_count:
total_count += constituency_count
total_score += constituency.active_scorecards().aggregate(models.Sum('score'))['score__sum']
return total_score / total_count
def scorecards(self):
"""This is the list of scorecards that will actually be displayed on the site."""
scorecard_lists = []
# We're only showing scorecards for current MPs
if self.is_politician():
scorecard_lists.append(super(Person, self).scorecards())
scorecard_lists.extend([x.scorecards() for x in self.constituencies()])
return itertools.chain(*scorecard_lists)
def has_scorecards(self):
# We're only showing scorecards for current MPs
if self.is_politician():
return super(Person, self).has_scorecards() or any([x.has_scorecards() for x in self.constituencies()])
class Meta:
ordering = ["slug"]
class OrganisationKind(ModelBase):
name = models.CharField(max_length=200, unique=True)
slug = models.SlugField(max_length=200, unique=True, help_text="created from name")
summary = MarkupField(blank=True, default='')
objects = ManagerBase()
def __unicode__(self):
return self.name
class Meta:
ordering = ["slug"]
class OrganisationQuerySet(models.query.GeoQuerySet):
def parties(self):
return self.filter(kind__slug='party')
def active_parties(self):
# FIXME - What a lot of subqueries...
active_politician_positions = Position.objects.all().current_politician_positions()
active_member_positions = Position.objects.all().filter(title__slug='member').currently_active()
current_politicians = Person.objects.all().filter(position__in=active_politician_positions).distinct()
current_members = Person.objects.all().filter(position__in=active_member_positions).distinct()
return (
self
.parties()
.filter(position__person__in=current_politicians)
.filter(position__person__in=current_members)
.distinct()
)
class OrganisationManager(ManagerBase):
def get_query_set(self):
return OrganisationQuerySet(self.model)
class Organisation(ModelBase):
name = models.CharField(max_length=200)
slug = models.SlugField(max_length=200, unique=True, help_text="created from name")
summary = MarkupField(blank=True, default='')
kind = models.ForeignKey('OrganisationKind')
started = ApproximateDateField(blank=True, help_text=date_help_text)
ended = ApproximateDateField(blank=True, help_text=date_help_text)
original_id = models.PositiveIntegerField(blank=True, null=True, help_text='temporary - used to link to parties in original mzalendo.com db')
objects = OrganisationManager()
contacts = generic.GenericRelation(Contact)
def __unicode__(self):
return "%s (%s)" % (self.name, self.kind)
@models.permalink
def get_absolute_url(self):
return ('organisation', [self.slug])
class Meta:
ordering = ["slug"]
class PlaceKind(ModelBase):
name = models.CharField(max_length=200, unique=True)
plural_name = models.CharField(max_length=200, blank=True)
slug = models.SlugField(max_length=200, unique=True, help_text="created from name")
summary = MarkupField(blank=True, default='')
objects = ManagerBase()
def __unicode__(self):
return self.name
class Meta:
ordering = ["slug"]
class PlaceQuerySet(models.query.GeoQuerySet):
def constituencies(self):
return self.filter(kind__slug='constituency')
def counties(self):
return self.filter(kind__slug='county')
class PlaceManager(ManagerBase):
def get_query_set(self):
return PlaceQuerySet(self.model)
class Place(ModelBase, ScorecardMixin):
name = models.CharField(max_length=200)
slug = models.SlugField(max_length=100, unique=True, help_text="created from name")
kind = models.ForeignKey('PlaceKind')
summary = MarkupField(blank=True, default='')
shape_url = models.URLField(verify_exists=True, blank=True )
location = models.PointField(null=True, blank=True)
organisation = models.ForeignKey('Organisation', null=True, blank=True, help_text="use if the place uniquely belongs to an organisation - eg a field office" )
original_id = models.PositiveIntegerField(blank=True, null=True, help_text='temporary - used to link to constituencies in original mzalendo.com db')
mapit_area = models.ForeignKey( mapit_models.Area, null=True, blank=True )
parent_place = models.ForeignKey('self', blank=True, null=True, related_name='child_places')
objects = PlaceManager()
is_overall_scorecard_score_applicable = False
@property
def position_with_organisation_set(self):
return self.position_set.filter(organisation__isnull=False)
def __unicode__(self):
return "%s (%s)" % (self.name, self.kind)
def is_constituency(self):
return self.kind.slug == 'constituency'
def current_politician_position(self):
"""Return the current politician position, or None"""
qs = self.position_set.all().current_politician_positions()
try:
return qs[0]
except IndexError:
return None
def related_people(self):
# Can't order by the sorting_end_date_high of position
# because that ruins the distinct.
return Person.objects.filter(position__place=self).distinct()#.order_by('-position__sorting_end_date_high')
@models.permalink
def get_absolute_url(self):
return ('place', [self.slug])
class Meta:
ordering = ["slug"]
class PositionTitle(ModelBase):
name = models.CharField(max_length=200, unique=True)
slug = models.SlugField(max_length=200, unique=True, help_text="created from name")
summary = MarkupField(blank=True, default='')
original_id = models.PositiveIntegerField(blank=True, null=True,
help_text='temporary - used to link to data in original mzalendo.com db')
requires_place = models.BooleanField(default=False,
help_text="Does this job title require a place to complete the position?")
objects = ManagerBase()
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('position', [self.slug])
def organisations(self):
"""
Return a qs of organisations, with the most frequently related first.
Each organisation is also annotated with 'position_count' which might be
useful.
This is intended as an alternative to assigning a org to each
position_title. Instead we can deduce it from the postions.
"""
orgs = (
Organisation
.objects
.filter(position__title=self)
.annotate(position_count=models.Count('position'))
.order_by('-position_count')
)
return orgs
class Meta:
ordering = ["slug"]
class PositionQuerySet(models.query.GeoQuerySet):
def currently_active(self, when=None):
"""Filter on start and end dates to limit to currently active postitions"""
if when == None:
when = datetime.date.today()
now_approx = repr(ApproximateDate(year=when.year, month=when.month, day=when.day))
qs = (
self
.filter(start_date__lte=now_approx)
.filter(Q(sorting_end_date_high__gte=now_approx) | Q(end_date=''))
)
return qs
def currently_inactive(self, when=None):
"""Filter on start and end dates to limit to currently inactive postitions"""
if when == None:
when = datetime.date.today()
now_approx = repr(ApproximateDate(year=when.year, month=when.month, day=when.day))
start_criteria = Q(start_date__gt=now_approx)
end_criteria = Q(sorting_end_date_high__lt=now_approx) & ~Q(end_date='')
qs = self.filter(start_criteria | end_criteria)
return qs
def aspirant_positions(self):
"""
Filter down to only positions which are aspirant ones. This uses the
convention that the slugs always start with 'aspirant-'.
"""
return self.filter( title__slug__startswith='aspirant-' )
def current_aspirant_positions(self, when=None):
"""Filter down to only positions which are those of current aspirantsns."""
return self.aspirant_positions().currently_active(when)
def politician_positions(self):
"""Filter down to only positions which are one of the two kinds of
politician (those with constituencies, and nominated ones).
"""
return self.filter(title__slug__in=settings.POLITICIAN_TITLE_SLUGS)
def current_politician_positions(self, when=None):
"""Filter down to only positions which are those of current politicians."""
return self.politician_positions().currently_active(when)
def political(self):
"""Filter down to only the political category"""
return self.filter(category='political')
def education(self):
"""Filter down to only the education category"""
return self.filter(category='education')
def other(self):
"""Filter down to only the other category"""
return self.filter(category='other')
def order_by_place(self):
"""Sort by the place name"""
return self.order_by('place__name')
class PositionManager(ManagerBase):
def get_query_set(self):
return PositionQuerySet(self.model)
class Position(ModelBase):
category_choices = (
('political', 'Political'),
('education', 'Education (as a learner)'),
('other', 'Anything else'),
)
person = models.ForeignKey('Person')
organisation = models.ForeignKey('Organisation', null=True, blank=True)
place = models.ForeignKey('Place', null=True, blank=True, help_text="use if needed to identify the position - eg add constituency for a politician" )
title = models.ForeignKey('PositionTitle', null=True, blank=True)
subtitle = models.CharField(max_length=200, blank=True, default='')
category = models.CharField(max_length=20, choices=category_choices, default='other',
help_text="What sort of position was this?")
note = models.CharField(max_length=300, blank=True, default='')
start_date = ApproximateDateField(blank=True, help_text=date_help_text)
end_date = ApproximateDateField(blank=True, help_text=date_help_text, default="future")
# hidden fields that are only used to do sorting. Filled in by code.
sorting_start_date = models.CharField(editable=True, default='', max_length=10)
sorting_end_date = models.CharField(editable=True, default='', max_length=10)
sorting_start_date_high = models.CharField(editable=True, default='', max_length=10)
sorting_end_date_high = models.CharField(editable=True, default='', max_length=10)
objects = PositionManager()
def clean(self):
if not (self.organisation or self.title or self.place):
raise exceptions.ValidationError('Must have at least one of organisation, title or place.')
if self.title and self.title.requires_place and not self.place:
raise exceptions.ValidationError("The job title '%s' requires a place to be set" % self.title.name)
def display_dates(self):
"""Nice HTML for the display of dates"""
# no dates
if not (self.start_date or self.end_date):
return ''
# start but no end
if self.start_date and not self.end_date:
return "Started %s" % self.start_date
# both dates
if self.start_date and self.end_date:<|fim▁hole|>
# end but no start
if not self.start_date and self.end_date:
return 'ongoing'
def display_start_date(self):
"""Return text that represents the start date"""
if self.start_date:
return str(self.start_date)
return '?'
def display_end_date(self):
"""Return text that represents the end date"""
if self.end_date:
return str(self.end_date)
return '?'
def is_ongoing(self):
"""Return True or False for whether the position is currently ongoing"""
if not self.end_date:
return False
elif self.end_date.future:
return True
else:
# turn today's date into an ApproximateDate object and cmp to that
now = datetime.date.today()
now_approx = ApproximateDate(year=now.year, month=now.month, day=now.day)
return now_approx <= self.end_date
def has_known_dates(self):
"""Is there at least one known (not future) date?"""
return (self.start_date and not self.start_date.future) or \
(self.end_date and not self.end_date.future)
def _set_sorting_dates(self):
"""Set the sorting dates from the actual dates (does not call save())"""
# value can be yyyy-mm-dd, future or None
start = repr(self.start_date) if self.start_date else ''
end = repr(self.end_date) if self.end_date else ''
# set the value or default to something sane
sorting_start_date = start or '0000-00-00'
sorting_end_date = end or start or '0000-00-00'
# To make the sorting consistent special case some parts
if not end and start == 'future':
sorting_start_date = 'a-future' # come after 'future'
self.sorting_start_date = sorting_start_date
self.sorting_end_date = sorting_end_date
self.sorting_start_date_high = re.sub('-00', '-99', sorting_start_date)
self.sorting_end_date_high = re.sub('-00', '-99', sorting_end_date)
def is_nominated_politician(self):
return self.title.slug == 'nominated-member-parliament'
def save(self, *args, **kwargs):
self._set_sorting_dates()
super(Position, self).save(*args, **kwargs)
def __unicode__(self):
title = self.title or '???'
if self.organisation:
organisation = self.organisation.name
else:
organisation = '???'
return "%s (%s at %s)" % ( self.person.name, title, organisation)
class Meta:
ordering = ['-sorting_end_date', '-sorting_start_date']<|fim▁end|> | if self.end_date.future:
return "Started %s" % self.start_date
else:
return "%s → %s" % (self.start_date, self.end_date) |
<|file_name|>analysis.py<|end_file_name|><|fim▁begin|>"""
NeuroTools.analysis
==================
A collection of analysis functions that may be used by NeuroTools.signals or other packages.
.. currentmodule:: NeuroTools.analysis
Classes
-------
.. autosummary::
TuningCurve
Functions
---------
.. autosummary::
:nosignatures:
ccf
crosscorrelate
make_kernel
simple_frequency_spectrum
"""
import numpy as np
from NeuroTools import check_dependency
HAVE_MATPLOTLIB = check_dependency('matplotlib')
if HAVE_MATPLOTLIB:
import matplotlib
matplotlib.use('Agg')
else:
MATPLOTLIB_ERROR = "The matplotlib package was not detected"
HAVE_PYLAB = check_dependency('pylab')
if HAVE_PYLAB:
import pylab
else:
PYLAB_ERROR = "The pylab package was not detected"
def ccf(x, y, axis=None):
"""Fast cross correlation function based on fft.
Computes the cross-correlation function of two series.
Note that the computations are performed on anomalies (deviations from
average).
Returns the values of the cross-correlation at different lags.
Parameters
----------
x, y : 1D MaskedArrays
The two input arrays.
axis : integer, optional
Axis along which to compute (0 for rows, 1 for cols).
If `None`, the array is flattened first.
Examples
--------
>>> z = arange(5)
>>> ccf(z,z)
array([ 3.90798505e-16, -4.00000000e-01, -4.00000000e-01,
-1.00000000e-01, 4.00000000e-01, 1.00000000e+00,
4.00000000e-01, -1.00000000e-01, -4.00000000e-01,
-4.00000000e-01])
"""
assert x.ndim == y.ndim, "Inconsistent shape !"
# assert(x.shape == y.shape, "Inconsistent shape !")
if axis is None:
if x.ndim > 1:
x = x.ravel()
y = y.ravel()
npad = x.size + y.size
xanom = (x - x.mean(axis=None))
yanom = (y - y.mean(axis=None))
Fx = np.fft.fft(xanom, npad, )
Fy = np.fft.fft(yanom, npad, )
iFxy = np.fft.ifft(Fx.conj() * Fy).real
varxy = np.sqrt(np.inner(xanom, xanom) * np.inner(yanom, yanom))
else:
npad = x.shape[axis] + y.shape[axis]
if axis == 1:
if x.shape[0] != y.shape[0]:
raise ValueError("Arrays should have the same length!")
xanom = (x - x.mean(axis=1)[:, None])
yanom = (y - y.mean(axis=1)[:, None])
varxy = np.sqrt((xanom * xanom).sum(1) *
(yanom * yanom).sum(1))[:, None]
else:
if x.shape[1] != y.shape[1]:
raise ValueError("Arrays should have the same width!")
xanom = (x - x.mean(axis=0))
yanom = (y - y.mean(axis=0))
varxy = np.sqrt((xanom * xanom).sum(0) * (yanom * yanom).sum(0))
Fx = np.fft.fft(xanom, npad, axis=axis)
Fy = np.fft.fft(yanom, npad, axis=axis)
iFxy = np.fft.ifft(Fx.conj() * Fy, n=npad, axis=axis).real
# We just turn the lags into correct positions:
iFxy = np.concatenate((iFxy[len(iFxy) / 2:len(iFxy)],
iFxy[0:len(iFxy) / 2]))
return iFxy / varxy
from NeuroTools.plotting import get_display, set_labels
HAVE_PYLAB = check_dependency('pylab')
def crosscorrelate(sua1, sua2, lag=None, n_pred=1, predictor=None,
display=False, kwargs={}):
"""Cross-correlation between two series of discrete events (e.g. spikes).
Calculates the cross-correlation between
two vectors containing event times.
Returns ``(differeces, pred, norm)``. See below for details.
Adapted from original script written by Martin P. Nawrot for the
FIND MATLAB toolbox [1]_.
Parameters
----------
sua1, sua2 : 1D row or column `ndarray` or `SpikeTrain`
Event times. If sua2 == sua1, the result is the autocorrelogram.
lag : float
Lag for which relative event timing is considered
with a max difference of +/- lag. A default lag is computed
from the inter-event interval of the longer of the two sua
arrays.
n_pred : int
Number of surrogate compilations for the predictor. This
influences the total length of the predictor output array
predictor : {None, 'shuffle'}
Determines the type of bootstrap predictor to be used.
'shuffle' shuffles interevent intervals of the longer input array
and calculates relative differences with the shorter input array.
`n_pred` determines the number of repeated shufflings, resulting
differences are pooled from all repeated shufflings.
display : boolean
If True the corresponding plots will be displayed. If False,
int, int_ and norm will be returned.
kwargs : dict
Arguments to be passed to np.histogram.
Returns
-------
differences : np array
Accumulated differences of events in `sua1` minus the events in
`sua2`. Thus positive values relate to events of `sua2` that
lead events of `sua1`. Units are the same as the input arrays.
pred : np array
Accumulated differences based on the prediction method.
The length of `pred` is ``n_pred * length(differences)``. Units are
the same as the input arrays.
norm : float
Normalization factor used to scale the bin heights in `differences` and
`pred`. ``differences/norm`` and ``pred/norm`` correspond to the linear
correlation coefficient.
Examples
--------
>> crosscorrelate(np_array1, np_array2)
>> crosscorrelate(spike_train1, spike_train2)
>> crosscorrelate(spike_train1, spike_train2, lag = 150.0)
>> crosscorrelate(spike_train1, spike_train2, display=True,
kwargs={'bins':100})
See also
--------
ccf
.. [1] Meier R, Egert U, Aertsen A, Nawrot MP, "FIND - a unified framework
for neural data analysis"; Neural Netw. 2008 Oct; 21(8):1085-93.
"""
assert predictor is 'shuffle' or predictor is None, "predictor must be \
either None or 'shuffle'. Other predictors are not yet implemented."
#Check whether sua1 and sua2 are SpikeTrains or arrays
sua = []
for x in (sua1, sua2):
#if isinstance(x, SpikeTrain):
if hasattr(x, 'spike_times'):
sua.append(x.spike_times)
elif x.ndim == 1:
sua.append(x)
elif x.ndim == 2 and (x.shape[0] == 1 or x.shape[1] == 1):
sua.append(x.ravel())
else:
raise TypeError("sua1 and sua2 must be either instances of the" \
"SpikeTrain class or column/row vectors")
sua1 = sua[0]
sua2 = sua[1]
if sua1.size < sua2.size:
if lag is None:
lag = np.ceil(10*np.mean(np.diff(sua1)))
reverse = False
else:<|fim▁hole|> sua1, sua2 = sua2, sua1
reverse = True
#construct predictor
if predictor is 'shuffle':
isi = np.diff(sua2)
sua2_ = np.array([])
for ni in xrange(1,n_pred+1):
idx = np.random.permutation(isi.size-1)
sua2_ = np.append(sua2_, np.add(np.insert(
(np.cumsum(isi[idx])), 0, 0), sua2.min() + (
np.random.exponential(isi.mean()))))
#calculate cross differences in spike times
differences = np.array([])
pred = np.array([])
for k in xrange(0, sua1.size):
differences = np.append(differences, sua1[k] - sua2[np.nonzero(
(sua2 > sua1[k] - lag) & (sua2 < sua1[k] + lag))])
if predictor == 'shuffle':
for k in xrange(0, sua1.size):
pred = np.append(pred, sua1[k] - sua2_[np.nonzero(
(sua2_ > sua1[k] - lag) & (sua2_ < sua1[k] + lag))])
if reverse is True:
differences = -differences
pred = -pred
norm = np.sqrt(sua1.size * sua2.size)
# Plot the results if display=True
if display:
subplot = get_display(display)
if not subplot or not HAVE_PYLAB:
return differences, pred, norm
else:
# Plot the cross-correlation
try:
counts, bin_edges = np.histogram(differences, **kwargs)
edge_distances = np.diff(bin_edges)
bin_centers = bin_edges[1:] - edge_distances/2
counts = counts / norm
xlabel = "Time"
ylabel = "Cross-correlation coefficient"
#NOTE: the x axis corresponds to the upper edge of each bin
subplot.plot(bin_centers, counts, label='cross-correlation', color='b')
if predictor is None:
set_labels(subplot, xlabel, ylabel)
pylab.draw()
elif predictor is 'shuffle':
# Plot the predictor
norm_ = norm * n_pred
counts_, bin_edges_ = np.histogram(pred, **kwargs)
counts_ = counts_ / norm_
subplot.plot(bin_edges_[1:], counts_, label='predictor')
subplot.legend()
pylab.draw()
except ValueError:
print "There are no correlated events within the selected lag"\
" window of %s" % lag
else:
return differences, pred, norm
def _dict_max(D):
"""For a dict containing numerical values, return the key for the
highest value. If there is more than one item with the same highest
value, return one of them (arbitrary - depends on the order produced
by the iterator).
"""
max_val = max(D.values())
for k in D:
if D[k] == max_val:
return k
def make_kernel(form, sigma, time_stamp_resolution, direction=1):
"""Creates kernel functions for convolution.
Constructs a numeric linear convolution kernel of basic shape to be used
for data smoothing (linear low pass filtering) and firing rate estimation
from single trial or trial-averaged spike trains.
Exponential and alpha kernels may also be used to represent postynaptic
currents / potentials in a linear (current-based) model.
Adapted from original script written by Martin P. Nawrot for the
FIND MATLAB toolbox [1]_ [2]_.
Parameters
----------
form : {'BOX', 'TRI', 'GAU', 'EPA', 'EXP', 'ALP'}
Kernel form. Currently implemented forms are BOX (boxcar),
TRI (triangle), GAU (gaussian), EPA (epanechnikov), EXP (exponential),
ALP (alpha function). EXP and ALP are aymmetric kernel forms and
assume optional parameter `direction`.
sigma : float
Standard deviation of the distribution associated with kernel shape.
This parameter defines the time resolution (in ms) of the kernel estimate
and makes different kernels comparable (cf. [1] for symetric kernels).
This is used here as an alternative definition to the cut-off
frequency of the associated linear filter.
time_stamp_resolution : float
Temporal resolution of input and output in ms.
direction : {-1, 1}
Asymmetric kernels have two possible directions.
The values are -1 or 1, default is 1. The
definition here is that for direction = 1 the
kernel represents the impulse response function
of the linear filter. Default value is 1.
Returns
-------
kernel : array_like
Array of kernel. The length of this array is always an odd
number to represent symmetric kernels such that the center bin
coincides with the median of the numeric array, i.e for a
triangle, the maximum will be at the center bin with equal
number of bins to the right and to the left.
norm : float
For rate estimates. The kernel vector is normalized such that
the sum of all entries equals unity sum(kernel)=1. When
estimating rate functions from discrete spike data (0/1) the
additional parameter `norm` allows for the normalization to
rate in spikes per second.
For example:
``rate = norm * scipy.signal.lfilter(kernel, 1, spike_data)``
m_idx : int
Index of the numerically determined median (center of gravity)
of the kernel function.
Examples
--------
To obtain single trial rate function of trial one should use::
r = norm * scipy.signal.fftconvolve(sua, kernel)
To obtain trial-averaged spike train one should use::
r_avg = norm * scipy.signal.fftconvolve(sua, np.mean(X,1))
where `X` is an array of shape `(l,n)`, `n` is the number of trials and
`l` is the length of each trial.
See also
--------
SpikeTrain.instantaneous_rate
SpikeList.averaged_instantaneous_rate
.. [1] Meier R, Egert U, Aertsen A, Nawrot MP, "FIND - a unified framework
for neural data analysis"; Neural Netw. 2008 Oct; 21(8):1085-93.
.. [2] Nawrot M, Aertsen A, Rotter S, "Single-trial estimation of neuronal
firing rates - from single neuron spike trains to population activity";
J. Neurosci Meth 94: 81-92; 1999.
"""
assert form.upper() in ('BOX','TRI','GAU','EPA','EXP','ALP'), "form must \
be one of either 'BOX','TRI','GAU','EPA','EXP' or 'ALP'!"
assert direction in (1,-1), "direction must be either 1 or -1"
SI_sigma = sigma / 1000. #convert to SI units (ms -> s)
SI_time_stamp_resolution = time_stamp_resolution / 1000. #convert to SI units (ms -> s)
norm = 1./SI_time_stamp_resolution
if form.upper() == 'BOX':
w = 2.0 * SI_sigma * np.sqrt(3)
width = 2 * np.floor(w / 2.0 / SI_time_stamp_resolution) + 1 # always odd number of bins
height = 1. / width
kernel = np.ones((1, width)) * height # area = 1
elif form.upper() == 'TRI':
w = 2 * SI_sigma * np.sqrt(6)
halfwidth = np.floor(w / 2.0 / SI_time_stamp_resolution)
trileft = np.arange(1, halfwidth + 2)
triright = np.arange(halfwidth, 0, -1) # odd number of bins
triangle = np.append(trileft, triright)
kernel = triangle / triangle.sum() # area = 1
elif form.upper() == 'EPA':
w = 2.0 * SI_sigma * np.sqrt(5)
halfwidth = np.floor(w / 2.0 / SI_time_stamp_resolution)
base = np.arange(-halfwidth, halfwidth + 1)
parabula = base**2
epanech = parabula.max() - parabula # inverse parabula
kernel = epanech / epanech.sum() # area = 1
elif form.upper() == 'GAU':
w = 2.0 * SI_sigma * 2.7 # > 99% of distribution weight
halfwidth = np.floor(w / 2.0 / SI_time_stamp_resolution) # always odd
base = np.arange(-halfwidth, halfwidth + 1) * SI_time_stamp_resolution
g = np.exp(-(base**2) / 2.0 / SI_sigma**2) / SI_sigma / np.sqrt(2.0 * np.pi)
kernel = g / g.sum()
elif form.upper() == 'ALP':
w = 5.0 * SI_sigma
alpha = np.arange(1, (2.0 * np.floor(w / SI_time_stamp_resolution / 2.0) + 1) + 1) * SI_time_stamp_resolution
alpha = (2.0 / SI_sigma**2) * alpha * np.exp(-alpha * np.sqrt(2) / SI_sigma)
kernel = alpha / alpha.sum() # normalization
if direction == -1:
kernel = np.flipud(kernel)
elif form.upper() == 'EXP':
w = 5.0 * SI_sigma
expo = np.arange(1, (2.0 * np.floor(w / SI_time_stamp_resolution / 2.0) + 1) + 1) * SI_time_stamp_resolution
expo = np.exp(-expo / SI_sigma)
kernel = expo / expo.sum()
if direction == -1:
kernel = np.flipud(kernel)
kernel = kernel.ravel()
m_idx = np.nonzero(kernel.cumsum() >= 0.5)[0].min()
return kernel, norm, m_idx
def simple_frequency_spectrum(x):
"""Simple frequency spectrum.
Very simple calculation of frequency spectrum with no detrending,
windowing, etc, just the first half (positive frequency components) of
abs(fft(x))
Parameters
----------
x : array_like
The input array, in the time-domain.
Returns
-------
spec : array_like
The frequency spectrum of `x`.
"""
spec = np.absolute(np.fft.fft(x))
spec = spec[:len(x) / 2] # take positive frequency components
spec /= len(x) # normalize
spec *= 2.0 # to get amplitudes of sine components, need to multiply by 2
spec[0] /= 2.0 # except for the dc component
return spec
class TuningCurve(object):
"""Class to facilitate working with tuning curves."""
def __init__(self, D=None):
"""
If `D` is a dict, it is used to give initial values to the tuning curve.
"""
self._tuning_curves = {}
self._counts = {}
if D is not None:
for k,v in D.items():
self._tuning_curves[k] = [v]
self._counts[k] = 1
self.n = 1
else:
self.n = 0
def add(self, D):
for k,v in D.items():
self._tuning_curves[k].append(v)
self._counts[k] += 1
self.n += 1
def __getitem__(self, i):
D = {}
for k,v in self._tuning_curves[k].items():
D[k] = v[i]
return D
def __repr__(self):
return "TuningCurve: %s" % self._tuning_curves
def stats(self):
"""Return the mean tuning curve with stderrs."""
mean = {}
stderr = {}
n = self.n
for k in self._tuning_curves.keys():
arr = np.array(self._tuning_curves[k])
mean[k] = arr.mean()
stderr[k] = arr.std()*n/(n-1)/np.sqrt(n)
return mean, stderr
def max(self):
"""Return the key of the max value and the max value."""
k = _dict_max(self._tuning_curves)
return k, self._tuning_curves[k]<|fim▁end|> | if lag is None:
lag = np.ceil(20*np.mean(np.diff(sua2))) |
<|file_name|>cell.py<|end_file_name|><|fim▁begin|>from collections.abc import Mapping, Iterable
from ctypes import c_int, c_int32, c_double, c_char_p, POINTER
from weakref import WeakValueDictionary
import numpy as np
from numpy.ctypeslib import as_array
from openmc.exceptions import AllocationError, InvalidIDError
from . import _dll
from .core import _FortranObjectWithID
from .error import _error_handler
from .material import Material
__all__ = ['Cell', 'cells']
# Cell functions
_dll.openmc_extend_cells.argtypes = [c_int32, POINTER(c_int32), POINTER(c_int32)]
_dll.openmc_extend_cells.restype = c_int
_dll.openmc_extend_cells.errcheck = _error_handler
_dll.openmc_cell_get_id.argtypes = [c_int32, POINTER(c_int32)]
_dll.openmc_cell_get_id.restype = c_int
_dll.openmc_cell_get_id.errcheck = _error_handler
_dll.openmc_cell_get_fill.argtypes = [
c_int32, POINTER(c_int), POINTER(POINTER(c_int32)), POINTER(c_int32)]
_dll.openmc_cell_get_fill.restype = c_int
_dll.openmc_cell_get_fill.errcheck = _error_handler
_dll.openmc_cell_set_fill.argtypes = [
c_int32, c_int, c_int32, POINTER(c_int32)]
_dll.openmc_cell_set_fill.restype = c_int
_dll.openmc_cell_set_fill.errcheck = _error_handler
_dll.openmc_cell_set_id.argtypes = [c_int32, c_int32]
_dll.openmc_cell_set_id.restype = c_int
_dll.openmc_cell_set_id.errcheck = _error_handler
_dll.openmc_cell_set_temperature.argtypes = [
c_int32, c_double, POINTER(c_int32)]
_dll.openmc_cell_set_temperature.restype = c_int
_dll.openmc_cell_set_temperature.errcheck = _error_handler
_dll.openmc_get_cell_index.argtypes = [c_int32, POINTER(c_int32)]
_dll.openmc_get_cell_index.restype = c_int
_dll.openmc_get_cell_index.errcheck = _error_handler
_dll.cells_size.restype = c_int
class Cell(_FortranObjectWithID):
"""Cell stored internally.
This class exposes a cell that is stored internally in the OpenMC
library. To obtain a view of a cell with a given ID, use the
:data:`openmc.capi.cells` mapping.
Parameters
----------
index : int
Index in the `cells` array.
Attributes
----------
id : int
ID of the cell
"""
__instances = WeakValueDictionary()
def __new__(cls, uid=None, new=True, index=None):
mapping = cells
if index is None:
if new:
# Determine ID to assign
if uid is None:
uid = max(mapping, default=0) + 1
else:
if uid in mapping:
raise AllocationError('A cell with ID={} has already '
'been allocated.'.format(uid))
index = c_int32()
_dll.openmc_extend_cells(1, index, None)
index = index.value
else:
index = mapping[uid]._index
if index not in cls.__instances:
instance = super().__new__(cls)
instance._index = index
if uid is not None:
instance.id = uid
cls.__instances[index] = instance
return cls.__instances[index]
@property
def id(self):
cell_id = c_int32()
_dll.openmc_cell_get_id(self._index, cell_id)
return cell_id.value
@id.setter
def id(self, cell_id):
_dll.openmc_cell_set_id(self._index, cell_id)
@property
def fill(self):
fill_type = c_int()
indices = POINTER(c_int32)()
n = c_int32()
_dll.openmc_cell_get_fill(self._index, fill_type, indices, n)
if fill_type.value == 1:
if n.value > 1:
return [Material(index=i) for i in indices[:n.value]]
else:
index = indices[0]
return Material(index=index)
else:
raise NotImplementedError
@fill.setter
def fill(self, fill):
if isinstance(fill, Iterable):
n = len(fill)
indices = (c_int32*n)(*(m._index if m is not None else -1
for m in fill))
_dll.openmc_cell_set_fill(self._index, 1, n, indices)
elif isinstance(fill, Material):
indices = (c_int32*1)(fill._index)
_dll.openmc_cell_set_fill(self._index, 1, 1, indices)
elif fill is None:
indices = (c_int32*1)(-1)
_dll.openmc_cell_set_fill(self._index, 1, 1, indices)
def set_temperature(self, T, instance=None):
"""Set the temperature of a cell
Parameters
----------
T : float
Temperature in K
instance : int or None
Which instance of the cell
<|fim▁hole|>class _CellMapping(Mapping):
def __getitem__(self, key):
index = c_int32()
try:
_dll.openmc_get_cell_index(key, index)
except (AllocationError, InvalidIDError) as e:
# __contains__ expects a KeyError to work correctly
raise KeyError(str(e))
return Cell(index=index.value)
def __iter__(self):
for i in range(len(self)):
yield Cell(index=i).id
def __len__(self):
return _dll.cells_size()
def __repr__(self):
return repr(dict(self))
cells = _CellMapping()<|fim▁end|> | """
_dll.openmc_cell_set_temperature(self._index, T, c_int32(instance))
|
<|file_name|>BackpackOutlined.js<|end_file_name|><|fim▁begin|>import * as React from 'react';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(<|fim▁hole|><|fim▁end|> | <path d="M17 4.14V2h-3v2h-4V2H7v2.14c-1.72.45-3 2-3 3.86v12c0 1.1.9 2 2 2h12c1.1 0 2-.9 2-2V8c0-1.86-1.28-3.41-3-3.86zM18 20H6V8c0-1.1.9-2 2-2h8c1.1 0 2 .9 2 2v12zm-1.5-8v4h-2v-2h-7v-2h9z" />
, 'BackpackOutlined'); |
<|file_name|>string.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! An owned, growable string that enforces that its contents are valid UTF-8.
#![stable(feature = "rust1", since = "1.0.0")]
use core::prelude::*;
use core::fmt;
use core::hash;
use core::iter::FromIterator;
use core::mem;
use core::ops::{self, Deref, Add, Index};
use core::ptr;
use core::slice;
use core::str::pattern::Pattern;
use rustc_unicode::str as unicode_str;
use rustc_unicode::str::Utf16Item;
use borrow::{Cow, IntoCow};
use range::RangeArgument;
use str::{self, FromStr, Utf8Error, Chars};
use vec::Vec;
use boxed::Box;
/// A growable string stored as a UTF-8 encoded buffer.
#[derive(Clone, PartialOrd, Eq, Ord)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct String {
vec: Vec<u8>,
}
/// A possible error value from the `String::from_utf8` function.
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct FromUtf8Error {
bytes: Vec<u8>,
error: Utf8Error,
}
/// A possible error value from the `String::from_utf16` function.
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct FromUtf16Error(());
impl String {
/// Creates a new string buffer initialized with the empty string.
///
/// # Examples
///
/// ```
/// let mut s = String::new();
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> String {
String {
vec: Vec::new(),
}
}
/// Creates a new string buffer with the given capacity.
/// The string will be able to hold exactly `capacity` bytes without
/// reallocating. If `capacity` is 0, the string will not allocate.
///
/// # Examples
///
/// ```
/// let mut s = String::with_capacity(10);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: usize) -> String {
String {
vec: Vec::with_capacity(capacity),
}
}
/// Creates a new string buffer from the given string.
///
/// # Examples
///
/// ```
/// # #![feature(collections)]
/// let s = String::from("hello");
/// assert_eq!(&s[..], "hello");
/// ```
#[inline]
#[unstable(feature = "collections", reason = "use `String::from` instead")]
#[deprecated(since = "1.2.0", reason = "use `String::from` instead")]
#[cfg(not(test))]
pub fn from_str(string: &str) -> String {
String { vec: <[_]>::to_vec(string.as_bytes()) }
}
// HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
// required for this method definition, is not available. Since we don't
// require this method for testing purposes, I'll just stub it
// NB see the slice::hack module in slice.rs for more information
#[inline]
#[cfg(test)]
pub fn from_str(_: &str) -> String {
panic!("not available with cfg(test)");
}
/// Returns the vector as a string buffer, if possible, taking care not to
/// copy it.
///
/// # Failure
///
/// If the given vector is not valid UTF-8, then the original vector and the
/// corresponding error is returned.
///
/// # Examples
///
/// ```
/// let hello_vec = vec![104, 101, 108, 108, 111];
/// let s = String::from_utf8(hello_vec).unwrap();
/// assert_eq!(s, "hello");
///
/// let invalid_vec = vec![240, 144, 128];
/// let s = String::from_utf8(invalid_vec).err().unwrap();
/// let err = s.utf8_error();
/// assert_eq!(s.into_bytes(), [240, 144, 128]);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_utf8(vec: Vec<u8>) -> Result<String, FromUtf8Error> {
match str::from_utf8(&vec) {
Ok(..) => Ok(String { vec: vec }),
Err(e) => Err(FromUtf8Error { bytes: vec, error: e })
}
}
/// Converts a vector of bytes to a new UTF-8 string.
/// Any invalid UTF-8 sequences are replaced with U+FFFD REPLACEMENT CHARACTER.
///
/// # Examples
///
/// ```
/// let input = b"Hello \xF0\x90\x80World";
/// let output = String::from_utf8_lossy(input);
/// assert_eq!(output, "Hello \u{FFFD}World");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_utf8_lossy<'a>(v: &'a [u8]) -> Cow<'a, str> {
let mut i;
match str::from_utf8(v) {
Ok(s) => return Cow::Borrowed(s),
Err(e) => i = e.valid_up_to(),
}
const TAG_CONT_U8: u8 = 128;
const REPLACEMENT: &'static [u8] = b"\xEF\xBF\xBD"; // U+FFFD in UTF-8
let total = v.len();
fn unsafe_get(xs: &[u8], i: usize) -> u8 {
unsafe { *xs.get_unchecked(i) }
}
fn safe_get(xs: &[u8], i: usize, total: usize) -> u8 {
if i >= total {
0
} else {
unsafe_get(xs, i)
}
}
let mut res = String::with_capacity(total);
if i > 0 {
unsafe {
res.as_mut_vec().push_all(&v[..i])
};
}
// subseqidx is the index of the first byte of the subsequence we're
// looking at. It's used to copy a bunch of contiguous good codepoints
// at once instead of copying them one by one.
let mut subseqidx = i;
while i < total {
let i_ = i;
let byte = unsafe_get(v, i);
i += 1;
macro_rules! error { () => ({
unsafe {
if subseqidx != i_ {
res.as_mut_vec().push_all(&v[subseqidx..i_]);
}
subseqidx = i;
res.as_mut_vec().push_all(REPLACEMENT);
}
})}
if byte < 128 {
// subseqidx handles this
} else {
let w = unicode_str::utf8_char_width(byte);
match w {
2 => {
if safe_get(v, i, total) & 192 != TAG_CONT_U8 {
error!();
continue;
}
i += 1;
}
3 => {
match (byte, safe_get(v, i, total)) {
(0xE0 , 0xA0 ... 0xBF) => (),
(0xE1 ... 0xEC, 0x80 ... 0xBF) => (),
(0xED , 0x80 ... 0x9F) => (),
(0xEE ... 0xEF, 0x80 ... 0xBF) => (),
_ => {
error!();
continue;
}
}
i += 1;
if safe_get(v, i, total) & 192 != TAG_CONT_U8 {
error!();
continue;
}
i += 1;
}
4 => {
match (byte, safe_get(v, i, total)) {
(0xF0 , 0x90 ... 0xBF) => (),
(0xF1 ... 0xF3, 0x80 ... 0xBF) => (),
(0xF4 , 0x80 ... 0x8F) => (),
_ => {
error!();
continue;
}
}
i += 1;
if safe_get(v, i, total) & 192 != TAG_CONT_U8 {
error!();
continue;
}
i += 1;
if safe_get(v, i, total) & 192 != TAG_CONT_U8 {
error!();
continue;
}
i += 1;
}
_ => {
error!();
continue;
}
}
}
}
if subseqidx < total {
unsafe {
res.as_mut_vec().push_all(&v[subseqidx..total])
};
}
Cow::Owned(res)
}
/// Decode a UTF-16 encoded vector `v` into a `String`, returning `None`
/// if `v` contains any invalid data.
///
/// # Examples
///
/// ```
/// // 𝄞music
/// let mut v = &mut [0xD834, 0xDD1E, 0x006d, 0x0075,
/// 0x0073, 0x0069, 0x0063];
/// assert_eq!(String::from_utf16(v).unwrap(),
/// "𝄞music".to_string());
///
/// // 𝄞mu<invalid>ic
/// v[4] = 0xD800;
/// assert!(String::from_utf16(v).is_err());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_utf16(v: &[u16]) -> Result<String, FromUtf16Error> {
let mut s = String::with_capacity(v.len());
for c in unicode_str::utf16_items(v) {
match c {
Utf16Item::ScalarValue(c) => s.push(c),
Utf16Item::LoneSurrogate(_) => return Err(FromUtf16Error(())),
}
}
Ok(s)
}
/// Decode a UTF-16 encoded vector `v` into a string, replacing
/// invalid data with the replacement character (U+FFFD).
///
/// # Examples
///
/// ```
/// // 𝄞mus<invalid>ic<invalid>
/// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
/// 0x0073, 0xDD1E, 0x0069, 0x0063,
/// 0xD834];
///
/// assert_eq!(String::from_utf16_lossy(v),
/// "𝄞mus\u{FFFD}ic\u{FFFD}".to_string());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_utf16_lossy(v: &[u16]) -> String {
unicode_str::utf16_items(v).map(|c| c.to_char_lossy()).collect()
}
/// Creates a new `String` from a length, capacity, and pointer.
///
/// # Unsafety
///
/// This is _very_ unsafe because:
///
/// * We call `Vec::from_raw_parts` to get a `Vec<u8>`. Therefore, this
/// function inherits all of its unsafety, see [its
/// documentation](../vec/struct.Vec.html#method.from_raw_parts)
/// for the invariants it expects, they also apply to this function.
/// * We assume that the `Vec` contains valid UTF-8.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String {
String {
vec: Vec::from_raw_parts(buf, length, capacity),
}
}
/// Converts a vector of bytes to a new `String` without checking if
/// it contains valid UTF-8. This is unsafe because it assumes that
/// the UTF-8-ness of the vector has already been validated.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_utf8_unchecked(bytes: Vec<u8>) -> String {
String { vec: bytes }
}
/// Returns the underlying byte buffer, encoded as UTF-8.
///
/// # Examples
///
/// ```
/// let s = String::from("hello");
/// let bytes = s.into_bytes();
/// assert_eq!(bytes, [104, 101, 108, 108, 111]);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_bytes(self) -> Vec<u8> {
self.vec
}
/// Extracts a string slice containing the entire string.
#[inline]
#[unstable(feature = "convert",
reason = "waiting on RFC revision")]
pub fn as_str(&self) -> &str {
self
}
/// Pushes the given string onto this string buffer.
///
/// # Examples
///
/// ```
/// let mut s = String::from("foo");
/// s.push_str("bar");
/// assert_eq!(s, "foobar");
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push_str(&mut self, string: &str) {
self.vec.push_all(string.as_bytes())
}
/// Returns the number of bytes that this string buffer can hold without
/// reallocating.
///
/// # Examples
///
/// ```
/// let s = String::with_capacity(10);
/// assert!(s.capacity() >= 10);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn capacity(&self) -> usize {
self.vec.capacity()
}
<|fim▁hole|> ///
/// # Panics
///
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
/// ```
/// let mut s = String::new();
/// s.reserve(10);
/// assert!(s.capacity() >= 10);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve(&mut self, additional: usize) {
self.vec.reserve(additional)
}
/// Reserves the minimum capacity for exactly `additional` more bytes to be
/// inserted in the given `String`. Does nothing if the capacity is already
/// sufficient.
///
/// Note that the allocator may give the collection more space than it
/// requests. Therefore capacity can not be relied upon to be precisely
/// minimal. Prefer `reserve` if future insertions are expected.
///
/// # Panics
///
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
/// ```
/// let mut s = String::new();
/// s.reserve_exact(10);
/// assert!(s.capacity() >= 10);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve_exact(&mut self, additional: usize) {
self.vec.reserve_exact(additional)
}
/// Shrinks the capacity of this string buffer to match its length.
///
/// # Examples
///
/// ```
/// let mut s = String::from("foo");
/// s.reserve(100);
/// assert!(s.capacity() >= 100);
/// s.shrink_to_fit();
/// assert_eq!(s.capacity(), 3);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn shrink_to_fit(&mut self) {
self.vec.shrink_to_fit()
}
/// Adds the given character to the end of the string.
///
/// # Examples
///
/// ```
/// let mut s = String::from("abc");
/// s.push('1');
/// s.push('2');
/// s.push('3');
/// assert_eq!(s, "abc123");
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push(&mut self, ch: char) {
match ch.len_utf8() {
1 => self.vec.push(ch as u8),
ch_len => {
let cur_len = self.len();
// This may use up to 4 bytes.
self.vec.reserve(ch_len);
unsafe {
// Attempt to not use an intermediate buffer by just pushing bytes
// directly onto this string.
let slice = slice::from_raw_parts_mut (
self.vec.as_mut_ptr().offset(cur_len as isize),
ch_len
);
let used = ch.encode_utf8(slice).unwrap_or(0);
self.vec.set_len(cur_len + used);
}
}
}
}
/// Works with the underlying buffer as a byte slice.
///
/// # Examples
///
/// ```
/// let s = String::from("hello");
/// assert_eq!(s.as_bytes(), [104, 101, 108, 108, 111]);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn as_bytes(&self) -> &[u8] {
&self.vec
}
/// Shortens a string to the specified length.
///
/// # Panics
///
/// Panics if `new_len` > current length,
/// or if `new_len` is not a character boundary.
///
/// # Examples
///
/// ```
/// let mut s = String::from("hello");
/// s.truncate(2);
/// assert_eq!(s, "he");
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn truncate(&mut self, new_len: usize) {
assert!(self.is_char_boundary(new_len));
self.vec.truncate(new_len)
}
/// Removes the last character from the string buffer and returns it.
/// Returns `None` if this string buffer is empty.
///
/// # Examples
///
/// ```
/// let mut s = String::from("foo");
/// assert_eq!(s.pop(), Some('o'));
/// assert_eq!(s.pop(), Some('o'));
/// assert_eq!(s.pop(), Some('f'));
/// assert_eq!(s.pop(), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn pop(&mut self) -> Option<char> {
let len = self.len();
if len == 0 {
return None
}
let ch = self.char_at_reverse(len);
unsafe {
self.vec.set_len(len - ch.len_utf8());
}
Some(ch)
}
/// Removes the character from the string buffer at byte position `idx` and
/// returns it.
///
/// # Warning
///
/// This is an O(n) operation as it requires copying every element in the
/// buffer.
///
/// # Panics
///
/// If `idx` does not lie on a character boundary, or if it is out of
/// bounds, then this function will panic.
///
/// # Examples
///
/// ```
/// let mut s = String::from("foo");
/// assert_eq!(s.remove(0), 'f');
/// assert_eq!(s.remove(1), 'o');
/// assert_eq!(s.remove(0), 'o');
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove(&mut self, idx: usize) -> char {
let len = self.len();
assert!(idx <= len);
let ch = self.char_at(idx);
let next = idx + ch.len_utf8();
unsafe {
ptr::copy(self.vec.as_ptr().offset(next as isize),
self.vec.as_mut_ptr().offset(idx as isize),
len - next);
self.vec.set_len(len - (next - idx));
}
ch
}
/// Inserts a character into the string buffer at byte position `idx`.
///
/// # Warning
///
/// This is an O(n) operation as it requires copying every element in the
/// buffer.
///
/// # Panics
///
/// If `idx` does not lie on a character boundary or is out of bounds, then
/// this function will panic.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, idx: usize, ch: char) {
let len = self.len();
assert!(idx <= len);
assert!(self.is_char_boundary(idx));
self.vec.reserve(4);
let mut bits = [0; 4];
let amt = ch.encode_utf8(&mut bits).unwrap();
unsafe {
ptr::copy(self.vec.as_ptr().offset(idx as isize),
self.vec.as_mut_ptr().offset((idx + amt) as isize),
len - idx);
ptr::copy(bits.as_ptr(),
self.vec.as_mut_ptr().offset(idx as isize),
amt);
self.vec.set_len(len + amt);
}
}
/// Views the string buffer as a mutable sequence of bytes.
///
/// This is unsafe because it does not check
/// to ensure that the resulting string will be valid UTF-8.
///
/// # Examples
///
/// ```
/// let mut s = String::from("hello");
/// unsafe {
/// let vec = s.as_mut_vec();
/// assert!(vec == &[104, 101, 108, 108, 111]);
/// vec.reverse();
/// }
/// assert_eq!(s, "olleh");
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn as_mut_vec(&mut self) -> &mut Vec<u8> {
&mut self.vec
}
/// Returns the number of bytes in this string.
///
/// # Examples
///
/// ```
/// let a = "foo".to_string();
/// assert_eq!(a.len(), 3);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> usize { self.vec.len() }
/// Returns true if the string contains no bytes
///
/// # Examples
///
/// ```
/// let mut v = String::new();
/// assert!(v.is_empty());
/// v.push('a');
/// assert!(!v.is_empty());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_empty(&self) -> bool { self.len() == 0 }
/// Truncates the string, returning it to 0 length.
///
/// # Examples
///
/// ```
/// let mut s = "foo".to_string();
/// s.clear();
/// assert!(s.is_empty());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn clear(&mut self) {
self.vec.clear()
}
/// Create a draining iterator that removes the specified range in the string
/// and yields the removed chars from start to end. The element range is
/// removed even if the iterator is not consumed until the end.
///
/// # Panics
///
/// Panics if the starting point or end point are not on character boundaries,
/// or if they are out of bounds.
///
/// # Examples
///
/// ```
/// # #![feature(drain)]
///
/// let mut s = String::from("α is alpha, β is beta");
/// let beta_offset = s.find('β').unwrap_or(s.len());
///
/// // Remove the range up until the β from the string
/// let t: String = s.drain(..beta_offset).collect();
/// assert_eq!(t, "α is alpha, ");
/// assert_eq!(s, "β is beta");
///
/// // A full range clears the string
/// s.drain(..);
/// assert_eq!(s, "");
/// ```
#[unstable(feature = "drain",
reason = "recently added, matches RFC")]
pub fn drain<R>(&mut self, range: R) -> Drain where R: RangeArgument<usize> {
// Memory safety
//
// The String version of Drain does not have the memory safety issues
// of the vector version. The data is just plain bytes.
// Because the range removal happens in Drop, if the Drain iterator is leaked,
// the removal will not happen.
let len = self.len();
let start = *range.start().unwrap_or(&0);
let end = *range.end().unwrap_or(&len);
// Take out two simultaneous borrows. The &mut String won't be accessed
// until iteration is over, in Drop.
let self_ptr = self as *mut _;
// slicing does the appropriate bounds checks
let chars_iter = self[start..end].chars();
Drain {
start: start,
end: end,
iter: chars_iter,
string: self_ptr,
}
}
/// Converts the string into `Box<str>`.
///
/// Note that this will drop any excess capacity.
#[unstable(feature = "box_str",
reason = "recently added, matches RFC")]
pub fn into_boxed_slice(self) -> Box<str> {
let slice = self.vec.into_boxed_slice();
unsafe { mem::transmute::<Box<[u8]>, Box<str>>(slice) }
}
}
impl FromUtf8Error {
/// Consumes this error, returning the bytes that were attempted to make a
/// `String` with.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_bytes(self) -> Vec<u8> { self.bytes }
/// Access the underlying UTF8-error that was the cause of this error.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn utf8_error(&self) -> Utf8Error { self.error }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Display for FromUtf8Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.error, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Display for FromUtf16Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt("invalid utf-16: lone surrogate found", f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl FromIterator<char> for String {
fn from_iter<I: IntoIterator<Item=char>>(iter: I) -> String {
let mut buf = String::new();
buf.extend(iter);
buf
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> FromIterator<&'a str> for String {
fn from_iter<I: IntoIterator<Item=&'a str>>(iter: I) -> String {
let mut buf = String::new();
buf.extend(iter);
buf
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Extend<char> for String {
fn extend<I: IntoIterator<Item=char>>(&mut self, iterable: I) {
let iterator = iterable.into_iter();
let (lower_bound, _) = iterator.size_hint();
self.reserve(lower_bound);
for ch in iterator {
self.push(ch)
}
}
}
#[stable(feature = "extend_ref", since = "1.2.0")]
impl<'a> Extend<&'a char> for String {
fn extend<I: IntoIterator<Item=&'a char>>(&mut self, iter: I) {
self.extend(iter.into_iter().cloned());
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> Extend<&'a str> for String {
fn extend<I: IntoIterator<Item=&'a str>>(&mut self, iterable: I) {
let iterator = iterable.into_iter();
// A guess that at least one byte per iterator element will be needed.
let (lower_bound, _) = iterator.size_hint();
self.reserve(lower_bound);
for s in iterator {
self.push_str(s)
}
}
}
/// A convenience impl that delegates to the impl for `&str`
impl<'a, 'b> Pattern<'a> for &'b String {
type Searcher = <&'b str as Pattern<'a>>::Searcher;
fn into_searcher(self, haystack: &'a str) -> <&'b str as Pattern<'a>>::Searcher {
self[..].into_searcher(haystack)
}
#[inline]
fn is_contained_in(self, haystack: &'a str) -> bool {
self[..].is_contained_in(haystack)
}
#[inline]
fn is_prefix_of(self, haystack: &'a str) -> bool {
self[..].is_prefix_of(haystack)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl PartialEq for String {
#[inline]
fn eq(&self, other: &String) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
fn ne(&self, other: &String) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
macro_rules! impl_eq {
($lhs:ty, $rhs: ty) => {
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> PartialEq<$rhs> for $lhs {
#[inline]
fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> PartialEq<$lhs> for $rhs {
#[inline]
fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
}
}
impl_eq! { String, str }
impl_eq! { String, &'a str }
impl_eq! { Cow<'a, str>, str }
impl_eq! { Cow<'a, str>, String }
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, 'b> PartialEq<&'b str> for Cow<'a, str> {
#[inline]
fn eq(&self, other: &&'b str) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
fn ne(&self, other: &&'b str) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, 'b> PartialEq<Cow<'a, str>> for &'b str {
#[inline]
fn eq(&self, other: &Cow<'a, str>) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
fn ne(&self, other: &Cow<'a, str>) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Default for String {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn default() -> String {
String::new()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Display for String {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Debug for String {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl hash::Hash for String {
#[inline]
fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
(**self).hash(hasher)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> Add<&'a str> for String {
type Output = String;
#[inline]
fn add(mut self, other: &str) -> String {
self.push_str(other);
self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Index<ops::Range<usize>> for String {
type Output = str;
#[inline]
fn index(&self, index: ops::Range<usize>) -> &str {
&self[..][index]
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Index<ops::RangeTo<usize>> for String {
type Output = str;
#[inline]
fn index(&self, index: ops::RangeTo<usize>) -> &str {
&self[..][index]
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Index<ops::RangeFrom<usize>> for String {
type Output = str;
#[inline]
fn index(&self, index: ops::RangeFrom<usize>) -> &str {
&self[..][index]
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Index<ops::RangeFull> for String {
type Output = str;
#[inline]
fn index(&self, _index: ops::RangeFull) -> &str {
unsafe { mem::transmute(&*self.vec) }
}
}
#[cfg(not(stage0))]
#[stable(feature = "derefmut_for_string", since = "1.2.0")]
impl ops::IndexMut<ops::Range<usize>> for String {
#[inline]
fn index_mut(&mut self, index: ops::Range<usize>) -> &mut str {
&mut self[..][index]
}
}
#[cfg(not(stage0))]
#[stable(feature = "derefmut_for_string", since = "1.2.0")]
impl ops::IndexMut<ops::RangeTo<usize>> for String {
#[inline]
fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut str {
&mut self[..][index]
}
}
#[cfg(not(stage0))]
#[stable(feature = "derefmut_for_string", since = "1.2.0")]
impl ops::IndexMut<ops::RangeFrom<usize>> for String {
#[inline]
fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut str {
&mut self[..][index]
}
}
#[stable(feature = "derefmut_for_string", since = "1.2.0")]
impl ops::IndexMut<ops::RangeFull> for String {
#[inline]
fn index_mut(&mut self, _index: ops::RangeFull) -> &mut str {
unsafe { mem::transmute(&mut *self.vec) }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Deref for String {
type Target = str;
#[inline]
fn deref(&self) -> &str {
unsafe { mem::transmute(&self.vec[..]) }
}
}
#[stable(feature = "derefmut_for_string", since = "1.2.0")]
impl ops::DerefMut for String {
#[inline]
fn deref_mut(&mut self) -> &mut str {
unsafe { mem::transmute(&mut self.vec[..]) }
}
}
/// Error returned from `String::from`
#[unstable(feature = "str_parse_error", reason = "may want to be replaced with \
Void if it ever exists")]
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct ParseError(());
#[stable(feature = "rust1", since = "1.0.0")]
impl FromStr for String {
type Err = ParseError;
#[inline]
fn from_str(s: &str) -> Result<String, ParseError> {
Ok(String::from(s))
}
}
/// A generic trait for converting a value to a string
#[stable(feature = "rust1", since = "1.0.0")]
pub trait ToString {
/// Converts the value of `self` to an owned string
#[stable(feature = "rust1", since = "1.0.0")]
fn to_string(&self) -> String;
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Display + ?Sized> ToString for T {
#[inline]
fn to_string(&self) -> String {
use core::fmt::Write;
let mut buf = String::new();
let _ = buf.write_fmt(format_args!("{}", self));
buf.shrink_to_fit();
buf
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl AsRef<str> for String {
#[inline]
fn as_ref(&self) -> &str {
self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl AsRef<[u8]> for String {
#[inline]
fn as_ref(&self) -> &[u8] {
self.as_bytes()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> From<&'a str> for String {
#[cfg(not(test))]
#[inline]
fn from(s: &'a str) -> String {
String { vec: <[_]>::to_vec(s.as_bytes()) }
}
// HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
// required for this method definition, is not available. Since we don't
// require this method for testing purposes, I'll just stub it
// NB see the slice::hack module in slice.rs for more information
#[inline]
#[cfg(test)]
fn from(_: &str) -> String {
panic!("not available with cfg(test)");
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> From<&'a str> for Cow<'a, str> {
#[inline]
fn from(s: &'a str) -> Cow<'a, str> {
Cow::Borrowed(s)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> From<String> for Cow<'a, str> {
#[inline]
fn from(s: String) -> Cow<'a, str> {
Cow::Owned(s)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Into<Vec<u8>> for String {
fn into(self) -> Vec<u8> {
self.into_bytes()
}
}
#[unstable(feature = "into_cow", reason = "may be replaced by `convert::Into`")]
impl IntoCow<'static, str> for String {
#[inline]
fn into_cow(self) -> Cow<'static, str> {
Cow::Owned(self)
}
}
#[unstable(feature = "into_cow", reason = "may be replaced by `convert::Into`")]
impl<'a> IntoCow<'a, str> for &'a str {
#[inline]
fn into_cow(self) -> Cow<'a, str> {
Cow::Borrowed(self)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Write for String {
#[inline]
fn write_str(&mut self, s: &str) -> fmt::Result {
self.push_str(s);
Ok(())
}
#[inline]
fn write_char(&mut self, c: char) -> fmt::Result {
self.push(c);
Ok(())
}
}
/// A draining iterator for `String`.
#[unstable(feature = "drain", reason = "recently added")]
pub struct Drain<'a> {
/// Will be used as &'a mut String in the destructor
string: *mut String,
/// Start of part to remove
start: usize,
/// End of part to remove
end: usize,
/// Current remaining range to remove
iter: Chars<'a>,
}
unsafe impl<'a> Sync for Drain<'a> {}
unsafe impl<'a> Send for Drain<'a> {}
#[unstable(feature = "drain", reason = "recently added")]
impl<'a> Drop for Drain<'a> {
fn drop(&mut self) {
unsafe {
// Use Vec::drain. "Reaffirm" the bounds checks to avoid
// panic code being inserted again.
let self_vec = (*self.string).as_mut_vec();
if self.start <= self.end && self.end <= self_vec.len() {
self_vec.drain(self.start..self.end);
}
}
}
}
#[unstable(feature = "drain", reason = "recently added")]
impl<'a> Iterator for Drain<'a> {
type Item = char;
#[inline]
fn next(&mut self) -> Option<char> {
self.iter.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
#[unstable(feature = "drain", reason = "recently added")]
impl<'a> DoubleEndedIterator for Drain<'a> {
#[inline]
fn next_back(&mut self) -> Option<char> {
self.iter.next_back()
}
}<|fim▁end|> | /// Reserves capacity for at least `additional` more bytes to be inserted
/// in the given `String`. The collection may reserve more space to avoid
/// frequent reallocations. |
<|file_name|>interwebs.py<|end_file_name|><|fim▁begin|>"""
A simple HTTP interface for making GET, PUT and POST requests.
"""
import http.client
import json
from urllib.parse import urlparse, urlencode # NOQA
from base64 import b64encode
from functools import partial
from collections import namedtuple
Response = namedtuple("Response", ("payload", "headers", "status", "is_json"))
def request(verb, host, port, path, payload=None, https=False, headers=None, auth=None, redirect=True):
"""
Make an HTTP(S) request with the provided HTTP verb, host FQDN, port number, path,
payload, protocol, headers, and auth information. Return a response object with
payload, headers, JSON flag, and HTTP status number.
"""
if not headers:
headers = {}
headers["User-Agent"] = "GitSavvy Sublime Plug-in"
if auth:
username_password = "{}:{}".format(*auth).encode("ascii")
headers["Authorization"] = "Basic {}".format(b64encode(username_password).decode("ascii"))
connection = (http.client.HTTPSConnection(host, port)
if https
else http.client.HTTPConnection(host, port))
connection.request(verb, path, body=payload, headers=headers)
response = connection.getresponse()
response_payload = response.read()
response_headers = dict(response.getheaders())
status = response.status
is_json = "application/json" in response_headers["Content-Type"]
if is_json:
response_payload = json.loads(response_payload.decode("utf-8"))
response.close()
connection.close()
if redirect and verb == "GET" and status == 301 or status == 302:
return request_url(
verb,
response_headers["Location"],
headers=headers,
auth=auth
)
return Response(response_payload, response_headers, status, is_json)
def request_url(verb, url, payload=None, headers=None, auth=None):
parsed = urlparse(url)
https = parsed.scheme == "https"
return request(
verb,
parsed.hostname,
parsed.port or 443 if https else 80,
parsed.path,
payload=payload,
https=https,
headers=headers,
auth=([parsed.username, parsed.password]
if parsed.username and parsed.password
else None)
)
<|fim▁hole|>put = partial(request, "PUT")
get_url = partial(request_url, "GET")
post_url = partial(request_url, "POST")
put_url = partial(request_url, "PUT")<|fim▁end|> |
get = partial(request, "GET")
post = partial(request, "POST") |
<|file_name|>alpha-triangle.py<|end_file_name|><|fim▁begin|>alpha = "abcdefghijklmnopqrstuvwxyz"
for n in range(0, 26, 1):
print alpha[0:n+1]
for n in range(26, 1, -1):
print alpha[0:n-1]
"""
alpha = "a"
m = ord(alpha)
n = 0<|fim▁hole|>while n < m:
print chr(m + 1) in range(65, 122)
m += 1
for i in range(ord('a'), 123, 1):
print chr(i[0:m+1])
while m < 123:
print chr(m[0:])
"""<|fim▁end|> | |
<|file_name|>inefficient_to_string.rs<|end_file_name|><|fim▁begin|>// run-rustfix
#![deny(clippy::inefficient_to_string)]
use std::borrow::Cow;
fn main() {
let rstr: &str = "hello";
let rrstr: &&str = &rstr;<|fim▁hole|> let _: String = rstr.to_string();
let _: String = rrstr.to_string();
let _: String = rrrstr.to_string();
let string: String = String::from("hello");
let rstring: &String = &string;
let rrstring: &&String = &rstring;
let rrrstring: &&&String = &rrstring;
let _: String = string.to_string();
let _: String = rstring.to_string();
let _: String = rrstring.to_string();
let _: String = rrrstring.to_string();
let cow: Cow<'_, str> = Cow::Borrowed("hello");
let rcow: &Cow<'_, str> = &cow;
let rrcow: &&Cow<'_, str> = &rcow;
let rrrcow: &&&Cow<'_, str> = &rrcow;
let _: String = cow.to_string();
let _: String = rcow.to_string();
let _: String = rrcow.to_string();
let _: String = rrrcow.to_string();
}<|fim▁end|> | let rrrstr: &&&str = &rrstr; |
<|file_name|>choice.ts<|end_file_name|><|fim▁begin|>export interface IChoice<T> {
value: T,
label?: string
}<|fim▁hole|> constructor(public value: T, label?: string) {
this._label = label;
}
get label(): string {
return this._label || String(this.value);
}
toJs(): object {
return {
label: this.label,
value: this.value
}
}
fromJs(obj): Choice<T> {
return new Choice(obj.value, obj.label);
}
}<|fim▁end|> |
export class Choice<T> implements IChoice<T> {
private _label;
|
<|file_name|>app.ts<|end_file_name|><|fim▁begin|>import application = require("application");
// Specify custom UIApplicationDelegate.
/*
class MyDelegate extends UIResponder implements UIApplicationDelegate {
public static ObjCProtocols = [UIApplicationDelegate];
applicationDidFinishLaunchingWithOptions(application: UIApplication, launchOptions: NSDictionary): boolean {
console.log("applicationWillFinishLaunchingWithOptions: " + launchOptions)
return true;
}
applicationDidBecomeActive(application: UIApplication): void {
console.log("applicationDidBecomeActive: " + application)
}
}
application.ios.delegate = MyDelegate;
*/
if (application.ios) {
// Observe application notifications.
application.ios.addNotificationObserver(UIApplicationDidFinishLaunchingNotification, (notification: NSNotification) => {
console.log("UIApplicationDidFinishLaunchingNotification: " + notification)
});
}
application.mainModule = "app/mainPage";
// Common events for both Android and iOS.
application.on(application.launchEvent, function (args: application.ApplicationEventData) {
if (args.android) {
// For Android applications, args.android is an android.content.Intent class.
console.log("Launched Android application with the following intent: " + args.android + ".");
} else if (args.ios !== undefined) {
// For iOS applications, args.ios is NSDictionary (launchOptions).
console.log("Launched iOS application with options: " + args.ios);
}
});
application.on(application.suspendEvent, function (args: application.ApplicationEventData) {
if (args.android) {
// For Android applications, args.android is an android activity class.
console.log("Activity: " + args.android);
} else if (args.ios) {
// For iOS applications, args.ios is UIApplication.
console.log("UIApplication: " + args.ios);
}
});
application.on(application.resumeEvent, function (args: application.ApplicationEventData) {
if (args.android) {
// For Android applications, args.android is an android activity class.
console.log("Activity: " + args.android);
} else if (args.ios) {
// For iOS applications, args.ios is UIApplication.
console.log("UIApplication: " + args.ios);
}
});
application.on(application.exitEvent, function (args: application.ApplicationEventData) {
if (args.android) {
// For Android applications, args.android is an android activity class.
console.log("Activity: " + args.android);
} else if (args.ios) {
// For iOS applications, args.ios is UIApplication.
console.log("UIApplication: " + args.ios);
}
});
application.on(application.lowMemoryEvent, function (args: application.ApplicationEventData) {
if (args.android) {
// For Android applications, args.android is an android activity class.
console.log("Activity: " + args.android);
} else if (args.ios) {
// For iOS applications, args.ios is UIApplication.
console.log("UIApplication: " + args.ios);
}
});
application.on(application.uncaughtErrorEvent, function (args: application.ApplicationEventData) {
if (args.android) {<|fim▁hole|> // For Android applications, args.android is an NativeScriptError.
console.log("NativeScriptError: " + args.android);
} else if (args.ios) {
// For iOS applications, args.ios is NativeScriptError.
console.log("NativeScriptError: " + args.ios);
}
});
// Android activity events
if (application.android) {
application.android.on(application.AndroidApplication.activityCreatedEvent, function (args: application.AndroidActivityBundleEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity + ", Bundle: " + args.bundle);
});
application.android.on(application.AndroidApplication.activityDestroyedEvent, function (args: application.AndroidActivityEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity);
});
application.android.on(application.AndroidApplication.activityStartedEvent, function (args: application.AndroidActivityEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity);
});
application.android.on(application.AndroidApplication.activityPausedEvent, function (args: application.AndroidActivityEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity);
});
application.android.on(application.AndroidApplication.activityResumedEvent, function (args: application.AndroidActivityEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity);
});
application.android.on(application.AndroidApplication.activityStoppedEvent, function (args: application.AndroidActivityEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity);
});
application.android.on(application.AndroidApplication.saveActivityStateEvent, function (args: application.AndroidActivityBundleEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity + ", Bundle: " + args.bundle);
});
application.android.on(application.AndroidApplication.activityResultEvent, function (args: application.AndroidActivityResultEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity +
", requestCode: " + args.requestCode + ", resultCode: " + args.resultCode + ", Intent: " + args.intent);
});
application.android.on(application.AndroidApplication.activityBackPressedEvent, function (args: application.AndroidActivityBackPressedEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity);
// Set args.cancel = true to cancel back navigation and do something custom.
});
}
application.start();<|fim▁end|> | |
<|file_name|>switch.py<|end_file_name|><|fim▁begin|>"""Control switches."""
from datetime import timedelta
import logging
from ProgettiHWSW.relay import Relay
import async_timeout
from homeassistant.components.switch import SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from . import setup_switch
from .const import DEFAULT_POLLING_INTERVAL_SEC, DOMAIN
_LOGGER = logging.getLogger(DOMAIN)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the switches from a config entry."""
board_api = hass.data[DOMAIN][config_entry.entry_id]
relay_count = config_entry.data["relay_count"]
switches = []
async def async_update_data():
"""Fetch data from API endpoint of board."""
async with async_timeout.timeout(5):
return await board_api.get_switches()
coordinator = DataUpdateCoordinator(<|fim▁hole|> name="switch",
update_method=async_update_data,
update_interval=timedelta(seconds=DEFAULT_POLLING_INTERVAL_SEC),
)
await coordinator.async_refresh()
for i in range(1, int(relay_count) + 1):
switches.append(
ProgettihwswSwitch(
coordinator,
f"Relay #{i}",
setup_switch(board_api, i, config_entry.data[f"relay_{str(i)}"]),
)
)
async_add_entities(switches)
class ProgettihwswSwitch(CoordinatorEntity, SwitchEntity):
"""Represent a switch entity."""
def __init__(self, coordinator, name, switch: Relay):
"""Initialize the values."""
super().__init__(coordinator)
self._switch = switch
self._name = name
async def async_turn_on(self, **kwargs):
"""Turn the switch on."""
await self._switch.control(True)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs):
"""Turn the switch off."""
await self._switch.control(False)
await self.coordinator.async_request_refresh()
async def async_toggle(self, **kwargs):
"""Toggle the state of switch."""
await self._switch.toggle()
await self.coordinator.async_request_refresh()
@property
def name(self):
"""Return the switch name."""
return self._name
@property
def is_on(self):
"""Get switch state."""
return self.coordinator.data[self._switch.id]<|fim▁end|> | hass,
_LOGGER, |
<|file_name|>mod_pow.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::arithmetic::traits::ModMulAssign;
use malachite_base::num::basic::traits::{One, Zero};
use malachite_base::num::logic::traits::BitIterable;
use malachite_nz::natural::Natural;
pub fn simple_binary_mod_pow(x: &Natural, exp: &Natural, m: &Natural) -> Natural {
if *m == 1 {
return Natural::ZERO;
}
let mut out = Natural::ONE;<|fim▁hole|> if bit {
out.mod_mul_assign(x, m);
}
}
out
}<|fim▁end|> | for bit in exp.bits().rev() {
out.mod_mul_assign(out.clone(), m); |
<|file_name|>timer.js<|end_file_name|><|fim▁begin|>// var isWaiting = false;
// var isRunning = false;
// var seconds = 10;
// var countdownTimer;
// var finalCountdown = false;
function GameTimer(game) {
this.seconds = game.timelimit;
<|fim▁hole|> if (this.seconds === 0 && !game.gameOver) {
game.endGame();
} else if (!game.gameOver) {
this.seconds--;
$("#timer_num").html(this.seconds);
}
}
var countdownTimer = setInterval('t.secondPassed()', 1000);
}<|fim▁end|> | this.secondPassed = function() { |
<|file_name|>formations_test.go<|end_file_name|><|fim▁begin|>package api_test<|fim▁hole|> "testing"
"github.com/remind101/empire/pkg/heroku"
)
func TestFormationBatchUpdate(t *testing.T) {
c, s := NewTestClient(t)
defer s.Close()
mustDeploy(t, c, DefaultImage)
q := 2
f := mustFormationBatchUpdate(t, c, "acme-inc", []heroku.FormationBatchUpdateOpts{
{
Process: "web",
Quantity: &q,
},
})
if got, want := f[0].Quantity, 2; got != want {
t.Fatalf("Quantity => %d; want %d", got, want)
}
}
func mustFormationBatchUpdate(t testing.TB, c *heroku.Client, appName string, updates []heroku.FormationBatchUpdateOpts) []heroku.Formation {
f, err := c.FormationBatchUpdate(appName, updates, "")
if err != nil {
t.Fatal(err)
}
return f
}<|fim▁end|> |
import ( |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Secrets framework provides means of getting connection objects from various sources, e.g. the following:
* Environment variables
* Metastore database<|fim▁hole|>__all__ = ['BaseSecretsBackend', 'DEFAULT_SECRETS_SEARCH_PATH']
from airflow.secrets.base_secrets import BaseSecretsBackend
DEFAULT_SECRETS_SEARCH_PATH = [
"airflow.secrets.environment_variables.EnvironmentVariablesBackend",
"airflow.secrets.metastore.MetastoreBackend",
]<|fim▁end|> | * AWS SSM Parameter store
""" |
<|file_name|>ServerScheduler.hpp<|end_file_name|><|fim▁begin|>/*
* ServerScheduler.hpp
*
* Copyright (C) 2009-12 by RStudio, Inc.
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
#ifndef SERVER_SCHEDULER_HPP
#define SERVER_SCHEDULER_HPP
#include <string>
#include <core/ScheduledCommand.hpp>
namespace rstudio {
namespace server {
namespace scheduler {
// add a scheduled command to the server
//<|fim▁hole|>
} // namespace scheduler
} // namespace server
} // namespace rstudio
#endif // SERVER_SCHEDULER_HPP<|fim▁end|> | // note that this function does not synchronize access to the list of
// scheduled commands so it should ONLY be called during server init
void addCommand(boost::shared_ptr<core::ScheduledCommand> pCmd); |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//!
//! Module which provide handlers to send the log records to the appropriate destination.
//!
pub mod streams;
use handlers::streams::file::FileHandler;
use handlers::streams::net::TCPHandler;
use handlers::streams::stdout::StdoutHandler;
use log::LogLevelFilter;
use ExtendedLogRecord;
use std::sync::Mutex;
/// A trait encapsulating the filtering operation of the handler.
pub trait Filter {
/// determines if a log message would be logged by the handler.
fn filter(&self, record: &ExtendedLogRecord) -> bool;<|fim▁hole|>lazy_static! {
/// We define handlers as static to be executed at runtime.
pub static ref HANDLERS: Mutex<Vec<Handler>> = Mutex::new(vec![]);
}
/// A trait encapsulating the operations required of a handler
pub trait Handle {
/// Determines if a log record may be handled by the handler.
fn handle(&mut self, record: &ExtendedLogRecord);
/// Emit the log record.
fn emit(&mut self, record: &ExtendedLogRecord);
}
/// Available handlers
pub enum Handler {
/// A dummy handler use to do nothing.
Null(NullHandler),
/// A handler to send the log record into stdout.
Stdout(StdoutHandler),
/// A handler to send the log record into a file.
File(FileHandler),
/// A handler to send the log record into a TCP socket.
TCP(TCPHandler)
}
impl Handler {
pub fn handle(&mut self, record: &ExtendedLogRecord) {
match *self {
Handler::Null(ref mut hdlr) => hdlr.handle(record),
Handler::Stdout(ref mut hdlr) => hdlr.handle(record),
Handler::File(ref mut hdlr) => hdlr.handle(record),
Handler::TCP(ref mut hdlr) => hdlr.handle(record),
};
}
}
impl From<StdoutHandler> for Handler {
fn from(hdlr: StdoutHandler) -> Handler {
Handler::Stdout(hdlr)
}
}
impl From<NullHandler> for Handler {
fn from(hdlr: NullHandler) -> Handler {
Handler::Null(hdlr)
}
}
impl From<FileHandler> for Handler {
fn from(hdlr: FileHandler) -> Handler {
Handler::File(hdlr)
}
}
impl From<TCPHandler> for Handler {
fn from(hdlr: TCPHandler) -> Handler {
Handler::TCP(hdlr)
}
}
///
/// A dummy handler which does nothing
///
pub struct NullHandler;
impl Filter for NullHandler {
/// Always accept the record
fn filter(&self, record: &ExtendedLogRecord) -> bool { true }
}
impl Handle for NullHandler {
fn handle(&mut self, record: &ExtendedLogRecord) {}
fn emit(&mut self, record: &ExtendedLogRecord) {}
}<|fim▁end|> | }
|
<|file_name|>bip65-cltv-p2p.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import start_nodes
from test_framework.mininode import CTransaction, NetworkThread
from test_framework.blocktools import create_coinbase, create_block
from test_framework.comptool import TestInstance, TestManager
from test_framework.script import CScript, OP_1NEGATE, OP_NOP2, OP_DROP
from binascii import unhexlify
import cStringIO
'''
This test is meant to exercise BIP65 (CHECKLOCKTIMEVERIFY).
Connect to a single node.
Mine a coinbase block, and then ...
Mine 1 version 4 block.
Check that the CLTV rules are enforced.
TODO: factor out common code from {bipdersig-p2p,bip65-cltv-p2p}.py.
'''
class BIP65Test(ComparisonTestFramework):
def __init__(self):
self.num_nodes = 1
def setup_network(self):
self.nodes = start_nodes(1, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1']],
binary=[self.options.testbinary])
self.is_network_split = False
def run_test(self):
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
test.run()
def create_transaction(self, node, coinbase, to_address, amount):
from_txid = node.getblock(coinbase)['tx'][0]
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
tx = CTransaction()
f = cStringIO.StringIO(unhexlify(signresult['hex']))
tx.deserialize(f)
return tx
def invalidate_transaction(self, tx):
'''
Modify the signature in vin 0 of the tx to fail CLTV
Prepends -1 CLTV DROP in the scriptSig itself.
'''
tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_NOP2, OP_DROP] +
list(CScript(tx.vin[0].scriptSig)))
def get_tests(self):
self.coinbase_blocks = self.nodes[0].generate(1)
self.nodes[0].generate(100)
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
self.nodeaddress = self.nodes[0].getnewaddress()
'''Check that the rules are enforced.'''
for valid in (True, False):
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[0],
self.nodeaddress, 1.0)
if not valid:
self.invalidate_transaction(spendtx)
spendtx.rehash()
gbt = self.nodes[0].getblocktemplate()
self.block_time = gbt["mintime"] + 1
self.block_bits = int("0x" + gbt["bits"], 0)
block = create_block(self.tip, create_coinbase(101),
self.block_time, self.block_bits)<|fim▁hole|> block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.block_time += 1
self.tip = block.sha256
yield TestInstance([[block, valid]])
if __name__ == '__main__':
BIP65Test().main()<|fim▁end|> | block.nVersion = 4 |
<|file_name|>texture_compat.go<|end_file_name|><|fim▁begin|>// Copyright (C) 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package gles
import (
"context"
"github.com/google/gapid/core/image"
"github.com/google/gapid/core/log"
"github.com/google/gapid/gapis/atom"
"github.com/google/gapid/gapis/atom/transform"
"github.com/google/gapid/gapis/gfxapi"
"github.com/google/gapid/gapis/memory"
)
var luminanceSwizzle = map[GLenum]GLenum{
GLenum_GL_RED: GLenum_GL_RED,
GLenum_GL_GREEN: GLenum_GL_RED,
GLenum_GL_BLUE: GLenum_GL_RED,
GLenum_GL_ALPHA: GLenum_GL_ONE,
GLenum_GL_ONE: GLenum_GL_ONE,
GLenum_GL_ZERO: GLenum_GL_ZERO,
}
var alphaSwizzle = map[GLenum]GLenum{
GLenum_GL_RED: GLenum_GL_ZERO,
GLenum_GL_GREEN: GLenum_GL_ZERO,
GLenum_GL_BLUE: GLenum_GL_ZERO,
GLenum_GL_ALPHA: GLenum_GL_RED,
GLenum_GL_ONE: GLenum_GL_ONE,
GLenum_GL_ZERO: GLenum_GL_ZERO,
}
var luminanceAlphaSwizzle = map[GLenum]GLenum{
GLenum_GL_RED: GLenum_GL_RED,
GLenum_GL_GREEN: GLenum_GL_RED,
GLenum_GL_BLUE: GLenum_GL_RED,
GLenum_GL_ALPHA: GLenum_GL_GREEN,
GLenum_GL_ONE: GLenum_GL_ONE,
GLenum_GL_ZERO: GLenum_GL_ZERO,
}
var noSwizzle = map[GLenum]GLenum{
GLenum_GL_RED: GLenum_GL_RED,
GLenum_GL_GREEN: GLenum_GL_GREEN,
GLenum_GL_BLUE: GLenum_GL_BLUE,
GLenum_GL_ALPHA: GLenum_GL_ALPHA,
GLenum_GL_ONE: GLenum_GL_ONE,
GLenum_GL_ZERO: GLenum_GL_ZERO,
}
// getLuminanceAlphaSwizzle emulates Luminance/Alpha by mapping the channels to Red/Green.
func getLuminanceAlphaSwizzle(internalformat GLenum) map[GLenum]GLenum {
switch internalformat {
case GLenum_GL_LUMINANCE,
GLenum_GL_LUMINANCE8_EXT,
GLenum_GL_LUMINANCE16F_EXT,
GLenum_GL_LUMINANCE32F_EXT:
return luminanceSwizzle
case GLenum_GL_ALPHA,
GLenum_GL_ALPHA8_EXT,
GLenum_GL_ALPHA16F_EXT,
GLenum_GL_ALPHA32F_EXT:
return alphaSwizzle
case GLenum_GL_LUMINANCE_ALPHA,
GLenum_GL_LUMINANCE8_ALPHA8_EXT,
GLenum_GL_LUMINANCE_ALPHA16F_EXT,
GLenum_GL_LUMINANCE_ALPHA32F_EXT:
return luminanceAlphaSwizzle
default:
return noSwizzle
}
}
type textureCompat struct {
f features
v *Version
ctx context.Context
// Original user-defined swizzle which would be used without compatibility layer.
// (GL_TEXTURE_SWIZZLE_{R,G,B,A}, Texture) -> GL_{RED,GREEN,BLUE,ALPHA,ONE,ZERO}
origSwizzle map[GLenum]map[*Texture]GLenum
// Compatibility component remapping needed to support luminance/alpha formats.
// Texture -> (GL_{RED,GREEN,BLUE,ALPHA,ONE,ZERO} -> GL_{RED,GREEN,BLUE,ALPHA,ONE,ZERO})
compatSwizzle map[*Texture]map[GLenum]GLenum
}
// getSwizzle returns the original user-defined swizzle and the current swizzle from state.
func (tc *textureCompat) getSwizzle(t *Texture, parameter GLenum) (orig, curr GLenum) {
var init GLenum
switch parameter {
case GLenum_GL_TEXTURE_SWIZZLE_R:
init, curr = GLenum_GL_RED, t.SwizzleR
case GLenum_GL_TEXTURE_SWIZZLE_G:
init, curr = GLenum_GL_GREEN, t.SwizzleG
case GLenum_GL_TEXTURE_SWIZZLE_B:
init, curr = GLenum_GL_BLUE, t.SwizzleB
case GLenum_GL_TEXTURE_SWIZZLE_A:
init, curr = GLenum_GL_ALPHA, t.SwizzleA
}
if orig, ok := tc.origSwizzle[parameter][t]; ok {
return orig, curr
}
return init, curr
}
func (tc *textureCompat) writeCompatSwizzle(ctx context.Context, t *Texture, parameter GLenum, out transform.Writer) {
target := t.Kind
orig, curr := tc.getSwizzle(t, parameter)
compat := orig
if compatSwizzle, ok := tc.compatSwizzle[t]; ok {
compat = compatSwizzle[compat]
}
if compat != curr {
out.MutateAndWrite(ctx, atom.NoID, NewGlTexParameteri(target, parameter, GLint(compat)))
}
}
// Common handler for all glTex* methods.
// Arguments may be null if the given method does not use them.
func (tc *textureCompat) convertFormat(target GLenum, internalformat, format, componentType *GLenum, out transform.Writer) {
if tc.v.IsES {
return
}
if internalformat != nil {
s := out.State()
switch target {
case GLenum_GL_TEXTURE_CUBE_MAP_POSITIVE_X, GLenum_GL_TEXTURE_CUBE_MAP_NEGATIVE_X,
GLenum_GL_TEXTURE_CUBE_MAP_POSITIVE_Y, GLenum_GL_TEXTURE_CUBE_MAP_NEGATIVE_Y,
GLenum_GL_TEXTURE_CUBE_MAP_POSITIVE_Z, GLenum_GL_TEXTURE_CUBE_MAP_NEGATIVE_Z:
target = GLenum_GL_TEXTURE_CUBE_MAP
}
// Set swizzles to emulate luminance/alpha formats. We need to do this before we convert the format.
if t, err := subGetBoundTextureOrErrorInvalidEnum(tc.ctx, nil, nil, s, GetState(s), nil, target); err == nil {
tc.compatSwizzle[t] = getLuminanceAlphaSwizzle(*internalformat)
tc.writeCompatSwizzle(tc.ctx, t, GLenum_GL_TEXTURE_SWIZZLE_R, out)
tc.writeCompatSwizzle(tc.ctx, t, GLenum_GL_TEXTURE_SWIZZLE_G, out)
tc.writeCompatSwizzle(tc.ctx, t, GLenum_GL_TEXTURE_SWIZZLE_B, out)
tc.writeCompatSwizzle(tc.ctx, t, GLenum_GL_TEXTURE_SWIZZLE_A, out)
}
if componentType != nil {
*internalformat = getSizedInternalFormat(*internalformat, *componentType)
} else {
*internalformat = getSizedInternalFormat(*internalformat, GLenum_GL_UNSIGNED_BYTE)
}
// Compressed formats are replaced by RGBA8
// TODO: What about SRGB?
if isCompressedFormat(*internalformat) {
if _, supported := tc.f.compressedTextureFormats[*internalformat]; !supported {
*internalformat = GLenum_GL_RGBA8
}
}
}
if format != nil {
// Luminance/Alpha is not supported on desktop so convert it to R/G.
switch *format {
case GLenum_GL_LUMINANCE, GLenum_GL_ALPHA:
*format = GLenum_GL_RED
case GLenum_GL_LUMINANCE_ALPHA:
*format = GLenum_GL_RG
}
}
if componentType != nil {
// Half-float is a core feature on desktop (with different enum value)
if *componentType == GLenum_GL_HALF_FLOAT_OES {
*componentType = GLenum_GL_HALF_FLOAT
}
}
}
func (tc *textureCompat) postTexParameter(target, parameter GLenum, out transform.Writer) {
if tc.v.IsES {
return
}
s := out.State()
switch parameter {
case GLenum_GL_TEXTURE_SWIZZLE_R, GLenum_GL_TEXTURE_SWIZZLE_G, GLenum_GL_TEXTURE_SWIZZLE_B, GLenum_GL_TEXTURE_SWIZZLE_A:
if t, err := subGetBoundTextureOrErrorInvalidEnum(tc.ctx, nil, nil, s, GetState(s), nil, target); err == nil {
_, curr := tc.getSwizzle(t, parameter)
// The tex parameter was recently mutated, so set the original swizzle from current state.
tc.origSwizzle[parameter][t] = curr
// Combine the original and compat swizzles and write out the commands to set it.
tc.writeCompatSwizzle(tc.ctx, t, parameter, out)
}
case GLenum_GL_TEXTURE_SWIZZLE_RGBA:
log.E(tc.ctx, "Unexpected GL_TEXTURE_SWIZZLE_RGBA")
}
}
// decompressTexImage2D writes a glTexImage2D using the decompressed data for
// the given glCompressedTexImage2D.
func decompressTexImage2D(ctx context.Context, i atom.ID, a *GlCompressedTexImage2D, s *gfxapi.State, out transform.Writer) error {
ctx = log.Enter(ctx, "decompressTexImage2D")
c := GetContext(s)
data := a.Data
if pb := c.BoundBuffers.PixelUnpackBuffer; pb != 0 {
base := a.Data.Address
data = TexturePointer(c.Instances.Buffers[pb].Data.Index(base, s))
out.MutateAndWrite(ctx, atom.NoID, NewGlBindBuffer(GLenum_GL_PIXEL_UNPACK_BUFFER, 0))
defer out.MutateAndWrite(ctx, atom.NoID, NewGlBindBuffer(GLenum_GL_PIXEL_UNPACK_BUFFER, pb))
} else {
a.Extras().Observations().ApplyReads(s.Memory[memory.ApplicationPool])
}
src := image.Info2D{
Data: image.NewID(data.Slice(0, uint64(a.ImageSize), s).ResourceID(ctx, s)),
Width: uint32(a.Width),
Height: uint32(a.Height),
Format: newImgfmt(a.Format, 0).asImageOrPanic(),
}
dst, err := src.ConvertTo(ctx, image.RGBA_U8_NORM)
if err != nil {
return err<|fim▁hole|> tmp := atom.Must(atom.Alloc(ctx, s, uint64(dstSize)))
out.MutateAndWrite(ctx, i, NewGlTexImage2D(
a.Target,
a.Level,
GLint(GLenum_GL_RGBA8),
a.Width,
a.Height,
a.Border,
GLenum_GL_RGBA,
GLenum_GL_UNSIGNED_BYTE,
tmp.Ptr(),
).AddRead(tmp.Range(), dst.Data.ID()))
tmp.Free()
return nil
}
// decompressTexSubImage2D writes a glTexSubImage2D using the decompressed data for
// the given glCompressedTexSubImage2D.
func decompressTexSubImage2D(ctx context.Context, i atom.ID, a *GlCompressedTexSubImage2D, s *gfxapi.State, out transform.Writer) error {
ctx = log.Enter(ctx, "decompressTexSubImage2D")
c := GetContext(s)
data := a.Data
if pb := c.BoundBuffers.PixelUnpackBuffer; pb != 0 {
base := a.Data.Address
data = TexturePointer(c.Instances.Buffers[pb].Data.Index(base, s))
out.MutateAndWrite(ctx, atom.NoID, NewGlBindBuffer(GLenum_GL_PIXEL_UNPACK_BUFFER, 0))
defer out.MutateAndWrite(ctx, atom.NoID, NewGlBindBuffer(GLenum_GL_PIXEL_UNPACK_BUFFER, pb))
} else {
a.Extras().Observations().ApplyReads(s.Memory[memory.ApplicationPool])
}
src := image.Info2D{
Data: image.NewID(data.Slice(0, uint64(a.ImageSize), s).ResourceID(ctx, s)),
Width: uint32(a.Width),
Height: uint32(a.Height),
Format: newImgfmtFromSIF(a.Format).asImageOrPanic(),
}
dst, err := src.ConvertTo(ctx, image.RGBA_U8_NORM)
if err != nil {
return err
}
dstSize := a.Width * a.Height * 4
tmp := atom.Must(atom.Alloc(ctx, s, uint64(dstSize)))
out.MutateAndWrite(ctx, i, NewGlTexSubImage2D(
a.Target,
a.Level,
a.Xoffset,
a.Yoffset,
a.Width,
a.Height,
GLenum_GL_RGBA,
GLenum_GL_UNSIGNED_BYTE,
tmp.Ptr(),
).AddRead(tmp.Range(), dst.Data.ID()))
tmp.Free()
return nil
}
// getSupportedCompressedTextureFormats returns the set of supported compressed
// texture formats for a given extension list.
func getSupportedCompressedTextureFormats(extensions extensions) map[GLenum]struct{} {
supported := map[GLenum]struct{}{}
for extension := range extensions {
for _, format := range getExtensionTextureFormats(extension) {
supported[format] = struct{}{}
}
}
return supported
}
// getExtensionTextureFormats returns the list of compressed texture formats
// enabled by a given extension
func getExtensionTextureFormats(extension string) []GLenum {
switch extension {
case "GL_AMD_compressed_ATC_texture":
return []GLenum{
GLenum_GL_ATC_RGB_AMD,
GLenum_GL_ATC_RGBA_EXPLICIT_ALPHA_AMD,
GLenum_GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD,
}
case "GL_OES_compressed_ETC1_RGB8_texture":
return []GLenum{
GLenum_GL_ETC1_RGB8_OES,
}
case "GL_EXT_texture_compression_dxt1":
return []GLenum{
GLenum_GL_COMPRESSED_RGB_S3TC_DXT1_EXT,
GLenum_GL_COMPRESSED_RGBA_S3TC_DXT1_EXT,
}
case "GL_EXT_texture_compression_s3tc", "GL_NV_texture_compression_s3tc":
return []GLenum{
GLenum_GL_COMPRESSED_RGB_S3TC_DXT1_EXT,
GLenum_GL_COMPRESSED_RGBA_S3TC_DXT1_EXT,
GLenum_GL_COMPRESSED_RGBA_S3TC_DXT3_EXT,
GLenum_GL_COMPRESSED_RGBA_S3TC_DXT5_EXT,
}
case "GL_KHR_texture_compression_astc_ldr":
return []GLenum{
GLenum_GL_COMPRESSED_RGBA_ASTC_4x4_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_5x4_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_5x5_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_6x5_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_6x6_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_8x5_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_8x6_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_8x8_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_10x5_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_10x6_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_10x8_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_10x10_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_12x10_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_12x12_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR,
}
case "GL_EXT_texture_compression_latc", "GL_NV_texture_compression_latc":
return []GLenum{
GLenum_GL_COMPRESSED_LUMINANCE_LATC1_EXT,
GLenum_GL_COMPRESSED_SIGNED_LUMINANCE_LATC1_EXT,
GLenum_GL_COMPRESSED_LUMINANCE_ALPHA_LATC2_EXT,
GLenum_GL_COMPRESSED_SIGNED_LUMINANCE_ALPHA_LATC2_EXT,
}
default:
return []GLenum{}
}
}
func isCompressedFormat(internalformat GLenum) bool {
switch internalformat {
case
GLenum_GL_ATC_RGBA_EXPLICIT_ALPHA_AMD,
GLenum_GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD,
GLenum_GL_ATC_RGB_AMD,
GLenum_GL_COMPRESSED_LUMINANCE_ALPHA_LATC2_EXT,
GLenum_GL_COMPRESSED_LUMINANCE_LATC1_EXT,
GLenum_GL_COMPRESSED_RG11_EAC,
GLenum_GL_COMPRESSED_RGB8_ETC2,
GLenum_GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2,
GLenum_GL_COMPRESSED_RGBA8_ETC2_EAC,
GLenum_GL_COMPRESSED_RGBA_ASTC_10x10_KHR,
GLenum_GL_COMPRESSED_RGBA_ASTC_10x5,
GLenum_GL_COMPRESSED_RGBA_ASTC_10x6,
GLenum_GL_COMPRESSED_RGBA_ASTC_10x8,
GLenum_GL_COMPRESSED_RGBA_ASTC_12x10,
GLenum_GL_COMPRESSED_RGBA_ASTC_12x12,
GLenum_GL_COMPRESSED_RGBA_ASTC_4x4,
GLenum_GL_COMPRESSED_RGBA_ASTC_5x4,
GLenum_GL_COMPRESSED_RGBA_ASTC_5x5,
GLenum_GL_COMPRESSED_RGBA_ASTC_6x5,
GLenum_GL_COMPRESSED_RGBA_ASTC_6x6,
GLenum_GL_COMPRESSED_RGBA_ASTC_8x5,
GLenum_GL_COMPRESSED_RGBA_ASTC_8x6,
GLenum_GL_COMPRESSED_RGBA_ASTC_8x8,
GLenum_GL_COMPRESSED_RGBA_S3TC_DXT1_EXT,
GLenum_GL_COMPRESSED_RGBA_S3TC_DXT3_EXT,
GLenum_GL_COMPRESSED_RGBA_S3TC_DXT5_EXT,
GLenum_GL_COMPRESSED_RGB_S3TC_DXT1_EXT,
GLenum_GL_COMPRESSED_SIGNED_LUMINANCE_ALPHA_LATC2_EXT,
GLenum_GL_COMPRESSED_SIGNED_LUMINANCE_LATC1_EXT,
GLenum_GL_COMPRESSED_SIGNED_R11_EAC,
GLenum_GL_COMPRESSED_SIGNED_RG11_EAC,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8,
GLenum_GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC,
GLenum_GL_COMPRESSED_SRGB8_ETC2,
GLenum_GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2,
GLenum_GL_ETC1_RGB8_OES:
return true
}
return false
}<|fim▁end|> | }
dstSize := a.Width * a.Height * 4
|
<|file_name|>firewalld.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2013, Adam Miller <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: firewalld
short_description: Manage arbitrary ports/services with firewalld
description:
- This module allows for addition or deletion of services and ports either tcp or udp in either running or permanent firewalld rules.
version_added: "1.4"
options:
service:
description:
- Name of a service to add/remove to/from firewalld.
- The service must be listed in output of firewall-cmd --get-services.
type: str
port:
description:
- Name of a port or port range to add/remove to/from firewalld.
- Must be in the form PORT/PROTOCOL or PORT-PORT/PROTOCOL for port ranges.
type: str
rich_rule:
description:
- Rich rule to add/remove to/from firewalld.
type: str
source:
description:
- The source/network you would like to add/remove to/from firewalld.
type: str
version_added: "2.0"
interface:
description:
- The interface you would like to add/remove to/from a zone in firewalld.
type: str
version_added: "2.1"
icmp_block:
description:
- The icmp block you would like to add/remove to/from a zone in firewalld.
type: str
version_added: "2.8"
icmp_block_inversion:
description:
- Enable/Disable inversion of icmp blocks for a zone in firewalld.
type: str
version_added: "2.8"
zone:
description:
- >
The firewalld zone to add/remove to/from (NOTE: default zone can be configured per system but "public" is default from upstream.
- Available choices can be extended based on per-system configs, listed here are "out of the box" defaults).
- Possible values include C(block), C(dmz), C(drop), C(external), C(home), C(internal), C(public), C(trusted), C(work) ]
type: str
default: system-default(public)
permanent:
description:
- Should this configuration be in the running firewalld configuration or persist across reboots.
- As of Ansible 2.3, permanent operations can operate on firewalld configs when it is not running (requires firewalld >= 3.0.9).
- Note that if this is C(no), immediate is assumed C(yes).
type: bool
immediate:
description:
- Should this configuration be applied immediately, if set as permanent.
type: bool
default: no
version_added: "1.9"
state:
description:
- Enable or disable a setting.
- 'For ports: Should this port accept(enabled) or reject(disabled) connections.'
- The states C(present) and C(absent) can only be used in zone level operations (i.e. when no other parameters but zone and state are set).
type: str
required: true
choices: [ absent, disabled, enabled, present ]
timeout:
description:
- The amount of time the rule should be in effect for when non-permanent.
type: int
default: 0
masquerade:
description:
- The masquerade setting you would like to enable/disable to/from zones within firewalld.
type: str
version_added: "2.1"
offline:
description:
- Whether to run this module even when firewalld is offline.
type: bool
version_added: "2.3"
notes:
- Not tested on any Debian based system.
- Requires the python2 bindings of firewalld, which may not be installed by default.
- For distributions where the python2 firewalld bindings are unavailable (e.g Fedora 28 and later) you will have to set the
ansible_python_interpreter for these hosts to the python3 interpreter path and install the python3 bindings.
- Zone transactions (creating, deleting) can be performed by using only the zone and state parameters "present" or "absent".
Note that zone transactions must explicitly be permanent. This is a limitation in firewalld.
This also means that you will have to reload firewalld after adding a zone that you wish to perform immediate actions on.
The module will not take care of this for you implicitly because that would undo any previously performed immediate actions which were not
permanent. Therefore, if you require immediate access to a newly created zone it is recommended you reload firewalld immediately after the zone
creation returns with a changed state and before you perform any other immediate, non-permanent actions on that zone.
requirements: [ 'firewalld >= 0.2.11' ]
author: "Adam Miller (@maxamillion)"
'''
EXAMPLES = r'''
- firewalld:
service: https
permanent: yes
state: enabled
- firewalld:
port: 8081/tcp
permanent: yes
state: disabled
- firewalld:
port: 161-162/udp
permanent: yes
state: enabled
- firewalld:
zone: dmz
service: http
permanent: yes
state: enabled
- firewalld:
rich_rule: rule service name="ftp" audit limit value="1/m" accept
permanent: yes
state: enabled
- firewalld:
source: 192.0.2.0/24
zone: internal
state: enabled
- firewalld:
zone: trusted
interface: eth2
permanent: yes
state: enabled
- firewalld:
masquerade: yes
state: enabled
permanent: yes
zone: dmz
- firewalld:
zone: custom
state: present
permanent: yes
- firewalld:
zone: drop
state: present
permanent: yes
icmp_block_inversion: yes
- firewalld:
zone: drop
state: present
permanent: yes
icmp_block: echo-request
- name: Redirect port 443 to 8443 with Rich Rule
firewalld:
rich_rule: rule forward-port port=443 protocol=tcp to-port=8443
zone: public
permanent: yes
immediate: yes
state: enabled
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.firewalld import FirewallTransaction, fw_offline
try:
from firewall.client import Rich_Rule
from firewall.client import FirewallClientZoneSettings
except ImportError:
# The import errors are handled via FirewallTransaction, don't need to
# duplicate that here
pass
class IcmpBlockTransaction(FirewallTransaction):
"""
IcmpBlockTransaction
"""
def __init__(self, module, action_args=None, zone=None, desired_state=None, permanent=False, immediate=False):
super(IcmpBlockTransaction, self).__init__(
module, action_args=action_args, desired_state=desired_state, zone=zone, permanent=permanent, immediate=immediate
)
def get_enabled_immediate(self, icmp_block, timeout):
return icmp_block in self.fw.getIcmpBlocks(self.zone)
def get_enabled_permanent(self, icmp_block, timeout):
fw_zone, fw_settings = self.get_fw_zone_settings()
return icmp_block in fw_settings.getIcmpBlocks()
def set_enabled_immediate(self, icmp_block, timeout):
self.fw.addIcmpBlock(self.zone, icmp_block, timeout)
def set_enabled_permanent(self, icmp_block, timeout):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.addIcmpBlock(icmp_block)
self.update_fw_settings(fw_zone, fw_settings)
def set_disabled_immediate(self, icmp_block, timeout):
self.fw.removeIcmpBlock(self.zone, icmp_block)
def set_disabled_permanent(self, icmp_block, timeout):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.removeIcmpBlock(icmp_block)
self.update_fw_settings(fw_zone, fw_settings)
class IcmpBlockInversionTransaction(FirewallTransaction):
"""
IcmpBlockInversionTransaction
"""
def __init__(self, module, action_args=None, zone=None, desired_state=None, permanent=False, immediate=False):
super(IcmpBlockInversionTransaction, self).__init__(
module, action_args=action_args, desired_state=desired_state, zone=zone, permanent=permanent, immediate=immediate
)
def get_enabled_immediate(self):
if self.fw.queryIcmpBlockInversion(self.zone) is True:
return True
else:
return False
def get_enabled_permanent(self):
fw_zone, fw_settings = self.get_fw_zone_settings()
if fw_settings.getIcmpBlockInversion() is True:
return True
else:
return False
def set_enabled_immediate(self):
self.fw.addIcmpBlockInversion(self.zone)
def set_enabled_permanent(self):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.setIcmpBlockInversion(True)
self.update_fw_settings(fw_zone, fw_settings)
def set_disabled_immediate(self):
self.fw.removeIcmpBlockInversion(self.zone)
def set_disabled_permanent(self):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.setIcmpBlockInversion(False)
self.update_fw_settings(fw_zone, fw_settings)
class ServiceTransaction(FirewallTransaction):
"""
ServiceTransaction
"""
def __init__(self, module, action_args=None, zone=None, desired_state=None, permanent=False, immediate=False):
super(ServiceTransaction, self).__init__(
module, action_args=action_args, desired_state=desired_state, zone=zone, permanent=permanent, immediate=immediate
)
def get_enabled_immediate(self, service, timeout):
if service in self.fw.getServices(self.zone):
return True
else:
return False
def get_enabled_permanent(self, service, timeout):
fw_zone, fw_settings = self.get_fw_zone_settings()
if service in fw_settings.getServices():
return True
else:
return False
def set_enabled_immediate(self, service, timeout):
self.fw.addService(self.zone, service, timeout)
def set_enabled_permanent(self, service, timeout):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.addService(service)
self.update_fw_settings(fw_zone, fw_settings)
def set_disabled_immediate(self, service, timeout):
self.fw.removeService(self.zone, service)
def set_disabled_permanent(self, service, timeout):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.removeService(service)
self.update_fw_settings(fw_zone, fw_settings)
class MasqueradeTransaction(FirewallTransaction):
"""
MasqueradeTransaction
"""
def __init__(self, module, action_args=None, zone=None, desired_state=None, permanent=False, immediate=False):
super(MasqueradeTransaction, self).__init__(
module, action_args=action_args, desired_state=desired_state, zone=zone, permanent=permanent, immediate=immediate
)
self.enabled_msg = "Added masquerade to zone %s" % self.zone
self.disabled_msg = "Removed masquerade from zone %s" % self.zone
def get_enabled_immediate(self):
if self.fw.queryMasquerade(self.zone) is True:
return True
else:
return False
def get_enabled_permanent(self):
fw_zone, fw_settings = self.get_fw_zone_settings()
if fw_settings.getMasquerade() is True:
return True
else:
return False
def set_enabled_immediate(self):
self.fw.addMasquerade(self.zone)
def set_enabled_permanent(self):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.setMasquerade(True)
self.update_fw_settings(fw_zone, fw_settings)
def set_disabled_immediate(self):
self.fw.removeMasquerade(self.zone)
def set_disabled_permanent(self):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.setMasquerade(False)
self.update_fw_settings(fw_zone, fw_settings)
class PortTransaction(FirewallTransaction):
"""
PortTransaction
"""
def __init__(self, module, action_args=None, zone=None, desired_state=None, permanent=False, immediate=False):
super(PortTransaction, self).__init__(
module, action_args=action_args, desired_state=desired_state, zone=zone, permanent=permanent, immediate=immediate
)
def get_enabled_immediate(self, port, protocol, timeout):
port_proto = [port, protocol]
if self.fw_offline:
fw_zone, fw_settings = self.get_fw_zone_settings()
ports_list = fw_settings.getPorts()
else:
ports_list = self.fw.getPorts(self.zone)
if port_proto in ports_list:
return True
else:
return False
def get_enabled_permanent(self, port, protocol, timeout):
port_proto = (port, protocol)
fw_zone, fw_settings = self.get_fw_zone_settings()
if port_proto in fw_settings.getPorts():
return True
else:
return False
def set_enabled_immediate(self, port, protocol, timeout):
self.fw.addPort(self.zone, port, protocol, timeout)
def set_enabled_permanent(self, port, protocol, timeout):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.addPort(port, protocol)
self.update_fw_settings(fw_zone, fw_settings)
def set_disabled_immediate(self, port, protocol, timeout):
self.fw.removePort(self.zone, port, protocol)
def set_disabled_permanent(self, port, protocol, timeout):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.removePort(port, protocol)
self.update_fw_settings(fw_zone, fw_settings)
class InterfaceTransaction(FirewallTransaction):
"""
InterfaceTransaction
"""
def __init__(self, module, action_args=None, zone=None, desired_state=None, permanent=False, immediate=False):
super(InterfaceTransaction, self).__init__(
module, action_args=action_args, desired_state=desired_state, zone=zone, permanent=permanent, immediate=immediate
)
self.enabled_msg = "Changed %s to zone %s" % \
(self.action_args[0], self.zone)
self.disabled_msg = "Removed %s from zone %s" % \
(self.action_args[0], self.zone)
def get_enabled_immediate(self, interface):
if self.fw_offline:
fw_zone, fw_settings = self.get_fw_zone_settings()
interface_list = fw_settings.getInterfaces()
else:
interface_list = self.fw.getInterfaces(self.zone)
if interface in interface_list:
return True
else:
return False
def get_enabled_permanent(self, interface):
fw_zone, fw_settings = self.get_fw_zone_settings()
if interface in fw_settings.getInterfaces():
return True
else:
return False
def set_enabled_immediate(self, interface):
self.fw.changeZoneOfInterface(self.zone, interface)
def set_enabled_permanent(self, interface):
fw_zone, fw_settings = self.get_fw_zone_settings()
if self.fw_offline:
iface_zone_objs = []
for zone in self.fw.config.get_zones():
old_zone_obj = self.fw.config.get_zone(zone)
if interface in old_zone_obj.interfaces:
iface_zone_objs.append(old_zone_obj)
if len(iface_zone_objs) > 1:
# Even it shouldn't happen, it's actually possible that
# the same interface is in several zone XML files
self.module.fail_json(
msg='ERROR: interface {} is in {} zone XML file, can only be in one'.format(
interface,
len(iface_zone_objs)
)
)
old_zone_obj = iface_zone_objs[0]
if old_zone_obj.name != self.zone:
old_zone_settings = FirewallClientZoneSettings(
self.fw.config.get_zone_config(old_zone_obj)
)
old_zone_settings.removeInterface(interface) # remove from old
self.fw.config.set_zone_config(
old_zone_obj,
old_zone_settings.settings
)
fw_settings.addInterface(interface) # add to new
self.fw.config.set_zone_config(fw_zone, fw_settings.settings)
else:
old_zone_name = self.fw.config().getZoneOfInterface(interface)
if old_zone_name != self.zone:
if old_zone_name:
old_zone_obj = self.fw.config().getZoneByName(old_zone_name)
old_zone_settings = old_zone_obj.getSettings()
old_zone_settings.removeInterface(interface) # remove from old
old_zone_obj.update(old_zone_settings)
fw_settings.addInterface(interface) # add to new
fw_zone.update(fw_settings)
def set_disabled_immediate(self, interface):
self.fw.removeInterface(self.zone, interface)
def set_disabled_permanent(self, interface):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.removeInterface(interface)
self.update_fw_settings(fw_zone, fw_settings)
class RichRuleTransaction(FirewallTransaction):
"""
RichRuleTransaction
"""
def __init__(self, module, action_args=None, zone=None, desired_state=None, permanent=False, immediate=False):
super(RichRuleTransaction, self).__init__(
module, action_args=action_args, desired_state=desired_state, zone=zone, permanent=permanent, immediate=immediate
)
def get_enabled_immediate(self, rule, timeout):
# Convert the rule string to standard format
# before checking whether it is present
rule = str(Rich_Rule(rule_str=rule))
if rule in self.fw.getRichRules(self.zone):
return True
else:
return False
def get_enabled_permanent(self, rule, timeout):
fw_zone, fw_settings = self.get_fw_zone_settings()
# Convert the rule string to standard format
# before checking whether it is present
rule = str(Rich_Rule(rule_str=rule))
if rule in fw_settings.getRichRules():
return True
else:
return False
def set_enabled_immediate(self, rule, timeout):
self.fw.addRichRule(self.zone, rule, timeout)
def set_enabled_permanent(self, rule, timeout):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.addRichRule(rule)
self.update_fw_settings(fw_zone, fw_settings)
def set_disabled_immediate(self, rule, timeout):
self.fw.removeRichRule(self.zone, rule)
def set_disabled_permanent(self, rule, timeout):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.removeRichRule(rule)
self.update_fw_settings(fw_zone, fw_settings)
class SourceTransaction(FirewallTransaction):
"""
SourceTransaction
"""
def __init__(self, module, action_args=None, zone=None, desired_state=None, permanent=False, immediate=False):
super(SourceTransaction, self).__init__(
module, action_args=action_args, desired_state=desired_state, zone=zone, permanent=permanent, immediate=immediate
)
self.enabled_msg = "Added %s to zone %s" % \
(self.action_args[0], self.zone)
self.disabled_msg = "Removed %s from zone %s" % \
(self.action_args[0], self.zone)
def get_enabled_immediate(self, source):
if source in self.fw.getSources(self.zone):
return True
else:
return False
def get_enabled_permanent(self, source):
fw_zone, fw_settings = self.get_fw_zone_settings()
if source in fw_settings.getSources():
return True
else:
return False
def set_enabled_immediate(self, source):
self.fw.addSource(self.zone, source)
def set_enabled_permanent(self, source):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.addSource(source)
self.update_fw_settings(fw_zone, fw_settings)
def set_disabled_immediate(self, source):
self.fw.removeSource(self.zone, source)
def set_disabled_permanent(self, source):
fw_zone, fw_settings = self.get_fw_zone_settings()
fw_settings.removeSource(source)
self.update_fw_settings(fw_zone, fw_settings)
class ZoneTransaction(FirewallTransaction):
"""
ZoneTransaction
"""
def __init__(self, module, action_args=None, zone=None, desired_state=None,
permanent=True, immediate=False, enabled_values=None, disabled_values=None):
super(ZoneTransaction, self).__init__(
module, action_args=action_args, desired_state=desired_state, zone=zone,
permanent=permanent, immediate=immediate,
enabled_values=enabled_values or ["present"],
disabled_values=disabled_values or ["absent"])
self.enabled_msg = "Added zone %s" % \
(self.zone)
self.disabled_msg = "Removed zone %s" % \
(self.zone)
self.tx_not_permanent_error_msg = "Zone operations must be permanent. " \
"Make sure you didn't set the 'permanent' flag to 'false' or the 'immediate' flag to 'true'."
def get_enabled_immediate(self):
self.module.fail_json(msg=self.tx_not_permanent_error_msg)
def get_enabled_permanent(self):
zones = self.fw.config().listZones()
zone_names = [self.fw.config().getZone(z).get_property("name") for z in zones]
if self.zone in zone_names:
return True
else:
return False
def set_enabled_immediate(self):
self.module.fail_json(msg=self.tx_not_permanent_error_msg)
def set_enabled_permanent(self):
self.fw.config().addZone(self.zone, FirewallClientZoneSettings())
def set_disabled_immediate(self):
self.module.fail_json(msg=self.tx_not_permanent_error_msg)
def set_disabled_permanent(self):
zone_obj = self.fw.config().getZoneByName(self.zone)
zone_obj.remove()
def main():
module = AnsibleModule(
argument_spec=dict(
icmp_block=dict(type='str'),
icmp_block_inversion=dict(type='str'),
service=dict(type='str'),
port=dict(type='str'),
rich_rule=dict(type='str'),
zone=dict(type='str'),
immediate=dict(type='bool', default=False),
source=dict(type='str'),
permanent=dict(type='bool'),
state=dict(type='str', required=True, choices=['absent', 'disabled', 'enabled', 'present']),
timeout=dict(type='int', default=0),
interface=dict(type='str'),
masquerade=dict(type='str'),
offline=dict(type='bool'),
),
supports_check_mode=True
)
permanent = module.params['permanent']
desired_state = module.params['state']
immediate = module.params['immediate']
timeout = module.params['timeout']
interface = module.params['interface']
masquerade = module.params['masquerade']
# Sanity checks
FirewallTransaction.sanity_check(module)
# If neither permanent or immediate is provided, assume immediate (as
# written in the module's docs)
if not permanent and not immediate:
immediate = True
# Verify required params are provided
if immediate and fw_offline:
module.fail_json(msg='firewall is not currently running, unable to perform immediate actions without a running firewall daemon')
changed = False
msgs = []
icmp_block = module.params['icmp_block']
icmp_block_inversion = module.params['icmp_block_inversion']
service = module.params['service']
rich_rule = module.params['rich_rule']
source = module.params['source']
zone = module.params['zone']
if module.params['port'] is not None:
if '/' in module.params['port']:
port, protocol = module.params['port'].strip().split('/')
else:
protocol = None
if not protocol:
module.fail_json(msg='improper port format (missing protocol?)')
else:
port = None
modification_count = 0
if icmp_block is not None:
modification_count += 1
if icmp_block_inversion is not None:
modification_count += 1
if service is not None:
modification_count += 1
if port is not None:
modification_count += 1
if rich_rule is not None:
modification_count += 1
if interface is not None:
modification_count += 1
if masquerade is not None:
modification_count += 1
if modification_count > 1:
module.fail_json(
msg='can only operate on port, service, rich_rule, masquerade, icmp_block, icmp_block_inversion, or interface at once'
)
elif modification_count > 0 and desired_state in ['absent', 'present']:
module.fail_json(
msg='absent and present state can only be used in zone level operations'
)
if icmp_block is not None:
transaction = IcmpBlockTransaction(
module,
action_args=(icmp_block, timeout),
zone=zone,
desired_state=desired_state,
permanent=permanent,
immediate=immediate,
)
changed, transaction_msgs = transaction.run()
msgs = msgs + transaction_msgs
if changed is True:
msgs.append("Changed icmp-block %s to %s" % (icmp_block, desired_state))
if icmp_block_inversion is not None:
transaction = IcmpBlockInversionTransaction(
module,
action_args=(),
zone=zone,
desired_state=desired_state,
permanent=permanent,
immediate=immediate,
)
changed, transaction_msgs = transaction.run()
msgs = msgs + transaction_msgs
if changed is True:
msgs.append("Changed icmp-block-inversion %s to %s" % (icmp_block_inversion, desired_state))
if service is not None:
transaction = ServiceTransaction(
module,
action_args=(service, timeout),
zone=zone,
desired_state=desired_state,<|fim▁hole|> permanent=permanent,
immediate=immediate,
)
changed, transaction_msgs = transaction.run()
msgs = msgs + transaction_msgs
if changed is True:
msgs.append("Changed service %s to %s" % (service, desired_state))
if source is not None:
transaction = SourceTransaction(
module,
action_args=(source,),
zone=zone,
desired_state=desired_state,
permanent=permanent,
immediate=immediate,
)
changed, transaction_msgs = transaction.run()
msgs = msgs + transaction_msgs
if port is not None:
transaction = PortTransaction(
module,
action_args=(port, protocol, timeout),
zone=zone,
desired_state=desired_state,
permanent=permanent,
immediate=immediate,
)
changed, transaction_msgs = transaction.run()
msgs = msgs + transaction_msgs
if changed is True:
msgs.append(
"Changed port %s to %s" % (
"%s/%s" % (port, protocol), desired_state
)
)
if rich_rule is not None:
transaction = RichRuleTransaction(
module,
action_args=(rich_rule, timeout),
zone=zone,
desired_state=desired_state,
permanent=permanent,
immediate=immediate,
)
changed, transaction_msgs = transaction.run()
msgs = msgs + transaction_msgs
if changed is True:
msgs.append("Changed rich_rule %s to %s" % (rich_rule, desired_state))
if interface is not None:
transaction = InterfaceTransaction(
module,
action_args=(interface,),
zone=zone,
desired_state=desired_state,
permanent=permanent,
immediate=immediate,
)
changed, transaction_msgs = transaction.run()
msgs = msgs + transaction_msgs
if masquerade is not None:
transaction = MasqueradeTransaction(
module,
action_args=(),
zone=zone,
desired_state=desired_state,
permanent=permanent,
immediate=immediate,
)
changed, transaction_msgs = transaction.run()
msgs = msgs + transaction_msgs
''' If there are no changes within the zone we are operating on the zone itself '''
if modification_count == 0 and desired_state in ['absent', 'present']:
transaction = ZoneTransaction(
module,
action_args=(),
zone=zone,
desired_state=desired_state,
permanent=permanent,
immediate=immediate,
)
changed, transaction_msgs = transaction.run()
msgs = msgs + transaction_msgs
if changed is True:
msgs.append("Changed zone %s to %s" % (zone, desired_state))
if fw_offline:
msgs.append("(offline operation: only on-disk configs were altered)")
module.exit_json(changed=changed, msg=', '.join(msgs))
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>borrowck-move-out-of-overloaded-deref.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>use std::rc::Rc;
pub fn main() {
let _x = *Rc::new("hi".to_string());
//~^ ERROR cannot move out of dereference of `&`-pointer
}<|fim▁end|> | // option. This file may not be copied, modified, or distributed
// except according to those terms.
|
<|file_name|>jest.config.js<|end_file_name|><|fim▁begin|>module.exports = {
moduleFileExtensions: ['js', 'jsx', 'json', 'vue', 'ts', 'tsx'],
transform: {
'^.+\\.vue$': 'vue-jest',
'.+\\.(css|styl|less|sass|scss|svg|png|jpg|ttf|woff|woff2)$': 'jest-transform-stub',
'^.+\\.tsx?$': 'ts-jest'
},
transformIgnorePatterns: ['/node_modules/'],
moduleNameMapper: {
'^@/(.*)$': '<rootDir>/src/$1'
},
snapshotSerializers: ['jest-serializer-vue'],
testMatch: ['**/tests/unit/**/*.spec.(js|jsx|ts|tsx)|**/__tests__/*.(js|jsx|ts|tsx)'],
testURL: 'http://localhost/',
watchPlugins: ['jest-watch-typeahead/filename', 'jest-watch-typeahead/testname'],
globals: {<|fim▁hole|> }
}<|fim▁end|> | 'ts-jest': {
babelConfig: true
} |
<|file_name|>CalculateAreaForStatsVisitor.cpp<|end_file_name|><|fim▁begin|>/*
* This file is part of Hootenanny.
*
* Hootenanny is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* --------------------------------------------------------------------
*
* The following copyright notices are generated automatically. If you
* have a new notice to add, please use the format:
* " * @copyright Copyright ..."
* This will properly maintain the copyright information. DigitalGlobe
* copyrights will be updated automatically.
*
* @copyright Copyright (C) 2015 DigitalGlobe (http://www.digitalglobe.com/)
*/
#include "CalculateAreaForStatsVisitor.h"
// geos
#include <geos/geom/LineString.h>
// hoot
#include <hoot/core/Factory.h>
#include <hoot/core/OsmMap.h>
#include <hoot/core/util/ElementConverter.h>
<|fim▁hole|>{
HOOT_FACTORY_REGISTER(ElementVisitor, CalculateAreaForStatsVisitor)
Meters CalculateAreaForStatsVisitor::getArea(const OsmMapPtr& map, ElementPtr e)
{
CalculateAreaForStatsVisitor v;
v.setOsmMap(map.get());
e->visitRo(*map, v);
return v.getArea();
}
void CalculateAreaForStatsVisitor::visit(ElementType type, long id)
{
const shared_ptr<const Element> e = _map->getElement(type, id);
shared_ptr<Geometry> g = ElementConverter(_map->shared_from_this()).convertToGeometry(e, true);
_total += g->getArea();
//cout << "CAFS: geo id=" << e->getId() << ", area=" << g->getArea() << ", total area=" << _total << endl;
}
}<|fim▁end|> | namespace hoot |
<|file_name|>cifar10_train.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.<|fim▁hole|># ==============================================================================
"""A binary to train CIFAR-10 using a single GPU.
Accuracy:
cifar10_train.py achieves ~86% accuracy after 100K steps (256 epochs of
data) as judged by cifar10_eval.py.
Speed: With batch_size 128.
System | Step Time (sec/batch) | Accuracy
------------------------------------------------------------------
1 Tesla K20m | 0.35-0.60 | ~86% at 60K steps (5 hours)
1 Tesla K40m | 0.25-0.35 | ~86% at 100K steps (4 hours)
Usage:
Please see the tutorial and website for how to download the CIFAR-10
data set, compile the program and train the model.
http://tensorflow.org/tutorials/deep_cnn/
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datetime import datetime
import time
import tensorflow as tf
import cifar10
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('train_dir', '/tmp/cifar10_train',
"""Directory where to write event logs """
"""and checkpoint.""")
tf.app.flags.DEFINE_integer('max_steps', 100000, #reduced significantly -daniel
"""Number of batches to run.""")
tf.app.flags.DEFINE_boolean('log_device_placement', False,
"""Whether to log device placement.""")
def train():
"""Train CIFAR-10 for a number of steps."""
with tf.Graph().as_default():
global_step = tf.contrib.framework.get_or_create_global_step()
# Get images and labels for CIFAR-10.
images, labels = cifar10.distorted_inputs()
# Build a Graph that computes the logits predictions from the
# inference model.
logits = cifar10.inference(images)
# Calculate loss.
loss = cifar10.loss(logits, labels)
# Build a Graph that trains the model with one batch of examples and
# updates the model parameters.
train_op = cifar10.train(loss, global_step)
class _LoggerHook(tf.train.SessionRunHook):
"""Logs loss and runtime."""
def begin(self):
self._step = -1
def before_run(self, run_context):
self._step += 1
self._start_time = time.time()
return tf.train.SessionRunArgs(loss) # Asks for loss value.
def after_run(self, run_context, run_values):
duration = time.time() - self._start_time
loss_value = run_values.results
if self._step % 10 == 0:
num_examples_per_step = FLAGS.batch_size
examples_per_sec = num_examples_per_step / duration
sec_per_batch = float(duration)
format_str = ('%s: step %d, loss = %.2f (%.1f examples/sec; %.3f '
'sec/batch)')
print (format_str % (datetime.now(), self._step, loss_value,
examples_per_sec, sec_per_batch))
with tf.train.MonitoredTrainingSession(
checkpoint_dir=FLAGS.train_dir,
hooks=[tf.train.StopAtStepHook(last_step=FLAGS.max_steps),
tf.train.NanTensorHook(loss),
_LoggerHook()],
config=tf.ConfigProto(
log_device_placement=FLAGS.log_device_placement)) as mon_sess:
while not mon_sess.should_stop():
mon_sess.run(train_op)
def main(argv=None): # pylint: disable=unused-argument
cifar10.maybe_download_and_extract()
if tf.gfile.Exists(FLAGS.train_dir):
tf.gfile.DeleteRecursively(FLAGS.train_dir)
tf.gfile.MakeDirs(FLAGS.train_dir)
train()
if __name__ == '__main__':
tf.app.run()<|fim▁end|> | |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ** This file is automatically generated by gapic-generator-typescript. **
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **<|fim▁hole|>function doStuffWithPhishingProtectionServiceV1Beta1Client(
client: PhishingProtectionServiceV1Beta1Client
) {
client.close();
}
function main() {
// check that the client instance can be created
const phishingProtectionServiceV1Beta1Client =
new PhishingProtectionServiceV1Beta1Client();
doStuffWithPhishingProtectionServiceV1Beta1Client(
phishingProtectionServiceV1Beta1Client
);
}
main();<|fim▁end|> |
import {PhishingProtectionServiceV1Beta1Client} from '@google-cloud/phishing-protection';
// check that the client class type name can be used |
<|file_name|>sre_constants.py<|end_file_name|><|fim▁begin|>#
# Secret Labs' Regular Expression Engine
#
# various symbols used by the regular expression engine.
# run this script to update the _sre include files!
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# See the sre.py file for information on usage and redistribution.
#
"""Internal support module for sre"""
# update when constants are added or removed
MAGIC = 20031017
MAXREPEAT = 2147483648
#from _sre import MAXREPEAT
# SRE standard exception (access as sre.error)
# should this really be here?
class error(Exception):
pass
# operators
FAILURE = "failure"
SUCCESS = "success"
ANY = "any"
ANY_ALL = "any_all"
ASSERT = "assert"
ASSERT_NOT = "assert_not"
AT = "at"
BIGCHARSET = "bigcharset"
BRANCH = "branch"
CALL = "call"
CATEGORY = "category"
CHARSET = "charset"
GROUPREF = "groupref"
GROUPREF_IGNORE = "groupref_ignore"
GROUPREF_EXISTS = "groupref_exists"
IN = "in"
IN_IGNORE = "in_ignore"
INFO = "info"
JUMP = "jump"
LITERAL = "literal"
LITERAL_IGNORE = "literal_ignore"
MARK = "mark"
MAX_REPEAT = "max_repeat"
MAX_UNTIL = "max_until"
MIN_REPEAT = "min_repeat"
MIN_UNTIL = "min_until"
NEGATE = "negate"
NOT_LITERAL = "not_literal"
NOT_LITERAL_IGNORE = "not_literal_ignore"
RANGE = "range"
REPEAT = "repeat"
REPEAT_ONE = "repeat_one"
SUBPATTERN = "subpattern"
MIN_REPEAT_ONE = "min_repeat_one"
# positions
AT_BEGINNING = "at_beginning"
AT_BEGINNING_LINE = "at_beginning_line"
AT_BEGINNING_STRING = "at_beginning_string"
AT_BOUNDARY = "at_boundary"
AT_NON_BOUNDARY = "at_non_boundary"
AT_END = "at_end"
AT_END_LINE = "at_end_line"
AT_END_STRING = "at_end_string"
AT_LOC_BOUNDARY = "at_loc_boundary"
AT_LOC_NON_BOUNDARY = "at_loc_non_boundary"
AT_UNI_BOUNDARY = "at_uni_boundary"
AT_UNI_NON_BOUNDARY = "at_uni_non_boundary"
# categories
CATEGORY_DIGIT = "category_digit"
CATEGORY_NOT_DIGIT = "category_not_digit"
CATEGORY_SPACE = "category_space"
CATEGORY_NOT_SPACE = "category_not_space"
CATEGORY_WORD = "category_word"
CATEGORY_NOT_WORD = "category_not_word"
CATEGORY_LINEBREAK = "category_linebreak"
CATEGORY_NOT_LINEBREAK = "category_not_linebreak"
CATEGORY_LOC_WORD = "category_loc_word"
CATEGORY_LOC_NOT_WORD = "category_loc_not_word"
CATEGORY_UNI_DIGIT = "category_uni_digit"
CATEGORY_UNI_NOT_DIGIT = "category_uni_not_digit"
CATEGORY_UNI_SPACE = "category_uni_space"
CATEGORY_UNI_NOT_SPACE = "category_uni_not_space"
CATEGORY_UNI_WORD = "category_uni_word"
CATEGORY_UNI_NOT_WORD = "category_uni_not_word"
CATEGORY_UNI_LINEBREAK = "category_uni_linebreak"
CATEGORY_UNI_NOT_LINEBREAK = "category_uni_not_linebreak"
OPCODES = [
# failure=0 success=1 (just because it looks better that way :-)
FAILURE, SUCCESS,
ANY, ANY_ALL,
ASSERT, ASSERT_NOT,
AT,
BRANCH,
CALL,
CATEGORY,
CHARSET, BIGCHARSET,
GROUPREF, GROUPREF_EXISTS, GROUPREF_IGNORE,
IN, IN_IGNORE,
INFO,
JUMP,
LITERAL, LITERAL_IGNORE,
MARK,
MAX_UNTIL,
MIN_UNTIL,
NOT_LITERAL, NOT_LITERAL_IGNORE,
NEGATE,
RANGE,
REPEAT,
REPEAT_ONE,
SUBPATTERN,
MIN_REPEAT_ONE
]
ATCODES = [
AT_BEGINNING, AT_BEGINNING_LINE, AT_BEGINNING_STRING, AT_BOUNDARY,
AT_NON_BOUNDARY, AT_END, AT_END_LINE, AT_END_STRING,
AT_LOC_BOUNDARY, AT_LOC_NON_BOUNDARY, AT_UNI_BOUNDARY,
AT_UNI_NON_BOUNDARY
]
CHCODES = [
CATEGORY_DIGIT, CATEGORY_NOT_DIGIT, CATEGORY_SPACE,
CATEGORY_NOT_SPACE, CATEGORY_WORD, CATEGORY_NOT_WORD,
CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK, CATEGORY_LOC_WORD,
CATEGORY_LOC_NOT_WORD, CATEGORY_UNI_DIGIT, CATEGORY_UNI_NOT_DIGIT,
CATEGORY_UNI_SPACE, CATEGORY_UNI_NOT_SPACE, CATEGORY_UNI_WORD,
CATEGORY_UNI_NOT_WORD, CATEGORY_UNI_LINEBREAK,
CATEGORY_UNI_NOT_LINEBREAK
]
def makedict(list):
d = {}
i = 0
for item in list:
d[item] = i
i = i + 1
return d
OPCODES = makedict(OPCODES)
ATCODES = makedict(ATCODES)
CHCODES = makedict(CHCODES)
# replacement operations for "ignore case" mode
OP_IGNORE = {
GROUPREF: GROUPREF_IGNORE,
IN: IN_IGNORE,
LITERAL: LITERAL_IGNORE,
NOT_LITERAL: NOT_LITERAL_IGNORE
}
AT_MULTILINE = {
AT_BEGINNING: AT_BEGINNING_LINE,
AT_END: AT_END_LINE
}
AT_LOCALE = {
AT_BOUNDARY: AT_LOC_BOUNDARY,
AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY
}
AT_UNICODE = {
AT_BOUNDARY: AT_UNI_BOUNDARY,
AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY
}
CH_LOCALE = {
CATEGORY_DIGIT: CATEGORY_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE,
CATEGORY_WORD: CATEGORY_LOC_WORD,
CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK
}
<|fim▁hole|> CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE,
CATEGORY_WORD: CATEGORY_UNI_WORD,
CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK
}
# flags
SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking)
SRE_FLAG_IGNORECASE = 2 # case insensitive
SRE_FLAG_LOCALE = 4 # honour system locale
SRE_FLAG_MULTILINE = 8 # treat target as multiline string
SRE_FLAG_DOTALL = 16 # treat target as a single string
SRE_FLAG_UNICODE = 32 # use unicode "locale"
SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments
SRE_FLAG_DEBUG = 128 # debugging
SRE_FLAG_ASCII = 256 # use ascii "locale"
# flags for INFO primitive
SRE_INFO_PREFIX = 1 # has prefix
SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix)
SRE_INFO_CHARSET = 4 # pattern starts with character from given set
if __name__ == "__main__":
def dump(f, d, prefix):
items = sorted(d.items(), key=lambda a: a[1])
for k, v in items:
f.write("#define %s_%s %s\n" % (prefix, k.upper(), v))
f = open("sre_constants.h", "w")
f.write("""\
/*
* Secret Labs' Regular Expression Engine
*
* regular expression matching engine
*
* NOTE: This file is generated by sre_constants.py. If you need
* to change anything in here, edit sre_constants.py and run it.
*
* Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
*
* See the _sre.c file for information on usage and redistribution.
*/
""")
f.write("#define SRE_MAGIC %d\n" % MAGIC)
dump(f, OPCODES, "SRE_OP")
dump(f, ATCODES, "SRE")
dump(f, CHCODES, "SRE")
f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE)
f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE)
f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE)
f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE)
f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL)
f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE)
f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE)
f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX)
f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL)
f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET)
f.close()
print("done")<|fim▁end|> | CH_UNICODE = {
CATEGORY_DIGIT: CATEGORY_UNI_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_UNI_SPACE, |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2020 Christian Amsüss
//
// This file is subject to the terms and conditions of the GNU Lesser
// General Public License v2.1. See the file LICENSE in the top level
// directory for more details.
#![no_std]
use riot_wrappers::{riot_main, println};
use riot_wrappers::{gcoap, thread, ztimer, gnrc};
use coap_handler_implementations::{ReportingHandlerBuilder, HandlerBuilder};
riot_main!(main);
fn main() {
extern "C" {
fn do_vfs_init();
}
unsafe { do_vfs_init() };
let handler = coap_message_demos::full_application_tree(None)
.below(&["ps"], riot_coap_handler_demos::ps::ps_tree())
.below(&["vfs"], riot_coap_handler_demos::vfs::vfs("/const"))
.with_wkc()
;
let mut handler = riot_wrappers::coap_handler::GcoapHandler(handler);
let mut listener = gcoap::SingleHandlerListener::new_catch_all(&mut handler);
gcoap::scope(|greg| {
greg.register(&mut listener);
println!("CoAP server ready; waiting for interfaces to settle before reporting addresses...");
let sectimer = ztimer::ZTimer::sec();
sectimer.sleep_ticks(2);
for netif in gnrc::Netif::all() {
println!("Active interface from PID {:?} ({:?})", netif.pid(), netif.pid().get_name().unwrap_or("unnamed"));
match netif.ipv6_addrs() {
Ok(addrs) => {
for a in addrs.addresses() {
println!(" Address {:?}", a);
}
}
_ => {
println!(" Does not support IPv6.");
}
}
}
// Sending main thread to sleep; can't return or the Gcoap handler would need to be<|fim▁hole|>}<|fim▁end|> | // deregistered (which it can't).
loop { thread::sleep(); }
}) |
<|file_name|>highq_power_sweep_0813f12.py<|end_file_name|><|fim▁begin|>import matplotlib
from kid_readout.roach import baseband
matplotlib.use('agg')
import numpy as np
import time
import sys
from kid_readout.utils import data_file,sweeps
from kid_readout.analysis.resonator import fit_best_resonator
ri = baseband.RoachBasebandWide()
ri.initialize()
#ri.set_fft_gain(6)
#f0s = np.load('/home/gjones/workspace/apps/f8_fit_resonances.npy')
#f0s = np.load('/home/gjones/workspace/apps/first_pass_sc3x3_0813f9.npy')
#f0s = np.load('/home/gjones/workspace/apps/sc5x4_0813f10_first_pass.npy')#[:4]
#f0s = np.load('/home/gjones/workspace/readout/apps/sc3x3_0813f9_2014-02-11.npy')
#f0s = np.load('/home/gjones/workspace/readout/apps/sc3x3_0813f5_2014-02-27.npy')
f0s = np.load('/home/gjones/workspace/apps/sc5x4_0813f12.npy')
f0s.sort()<|fim▁hole|>suffix = "power"
nf = len(f0s)
atonce = 4
if nf % atonce > 0:
print "extending list of resonators to make a multiple of ",atonce
f0s = np.concatenate((f0s,np.arange(1,1+atonce-(nf%atonce))+f0s.max()))
offsets = np.linspace(-4882.8125,4638.671875,20)#[5:15]
offsets = offsets
#offsets = np.concatenate(([-40e3,-20e3],offsets,[20e3,40e3]))/1e6
offsets = np.concatenate(([-40e3],offsets,[40e3]))/1e6
#offsets = offsets*4
nsamp = 2**18
step = 1
nstep = 80
f0binned = np.round(f0s*nsamp/512.0)*512.0/nsamp
offset_bins = np.arange(-(nstep+1),(nstep+1))*step
offsets = offset_bins*512.0/nsamp
offsets = np.concatenate(([offsets.min()-20e-3,],offsets,[offsets.max()+20e-3]))
print f0s
print offsets*1e6
print len(f0s)
if False:
from kid_readout.utils.parse_srs import get_all_temperature_data
while True:
temp = get_all_temperature_data()[1][-1]
print "mk stage at", temp
if temp > 0.348:
break
time.sleep(300)
time.sleep(600)
start = time.time()
use_fmin = True
attenlist = np.linspace(33,45,5)-6
#attenlist = [44.0]
#attenlist = attenlist[:4]
for atten in attenlist:
print "setting attenuator to",atten
ri.set_dac_attenuator(atten)
measured_freqs = sweeps.prepare_sweep(ri,f0binned,offsets,nsamp=nsamp)
print "loaded waveforms in", (time.time()-start),"seconds"
sweep_data = sweeps.do_prepared_sweep(ri, nchan_per_step=atonce, reads_per_step=8)
orig_sweep_data = sweep_data
meas_cfs = []
idxs = []
delays = []
for m in range(len(f0s)):
fr,s21,errors = sweep_data.select_by_freq(f0s[m])
thiscf = f0s[m]
res = fit_best_resonator(fr[1:-1],s21[1:-1],errors=errors[1:-1]) #Resonator(fr,s21,errors=errors)
delay = res.delay
delays.append(delay)
s21 = s21*np.exp(2j*np.pi*res.delay*fr)
res = fit_best_resonator(fr,s21,errors=errors)
fmin = fr[np.abs(s21).argmin()]
print "s21 fmin", fmin, "original guess",thiscf,"this fit", res.f_0, "delay",delay,"resid delay",res.delay
if use_fmin:
meas_cfs.append(fmin)
else:
if abs(res.f_0 - thiscf) > 0.1:
if abs(fmin - thiscf) > 0.1:
print "using original guess"
meas_cfs.append(thiscf)
else:
print "using fmin"
meas_cfs.append(fmin)
else:
print "using this fit"
meas_cfs.append(res.f_0)
idx = np.unravel_index(abs(measured_freqs - meas_cfs[-1]).argmin(),measured_freqs.shape)
idxs.append(idx)
delay = np.median(delays)
print "median delay is ",delay
nsamp = 2**22
step = 1
f0binned_meas = np.round(f0s*nsamp/512.0)*512.0/nsamp
offset_bins = np.array([-8,-4,-2,-1,0,1,2,4])#np.arange(-4,4)*step
offset_bins = np.concatenate(([-40,-20],offset_bins,[20,40]))
offsets = offset_bins*512.0/nsamp
meas_cfs = np.array(meas_cfs)
f0binned_meas = np.round(meas_cfs*nsamp/512.0)*512.0/nsamp
f0s = f0binned_meas
measured_freqs = sweeps.prepare_sweep(ri,f0binned_meas,offsets,nsamp=nsamp)
print "loaded updated waveforms in", (time.time()-start),"seconds"
sys.stdout.flush()
time.sleep(1)
df = data_file.DataFile(suffix=suffix)
df.log_hw_state(ri)
sweep_data = sweeps.do_prepared_sweep(ri, nchan_per_step=atonce, reads_per_step=8, sweep_data=orig_sweep_data)
df.add_sweep(sweep_data)
meas_cfs = []
idxs = []
for m in range(len(f0s)):
fr,s21,errors = sweep_data.select_by_freq(f0s[m])
thiscf = f0s[m]
s21 = s21*np.exp(2j*np.pi*delay*fr)
res = fit_best_resonator(fr,s21,errors=errors) #Resonator(fr,s21,errors=errors)
fmin = fr[np.abs(s21).argmin()]
print "s21 fmin", fmin, "original guess",thiscf,"this fit", res.f_0
if use_fmin:
meas_cfs.append(fmin)
else:
if abs(res.f_0 - thiscf) > 0.1:
if abs(fmin - thiscf) > 0.1:
print "using original guess"
meas_cfs.append(thiscf)
else:
print "using fmin"
meas_cfs.append(fmin)
else:
print "using this fit"
meas_cfs.append(res.f_0)
idx = np.unravel_index(abs(measured_freqs - meas_cfs[-1]).argmin(),measured_freqs.shape)
idxs.append(idx)
print meas_cfs
ri.add_tone_freqs(np.array(meas_cfs))
ri.select_bank(ri.tone_bins.shape[0]-1)
ri._sync()
time.sleep(0.5)
#raw_input("turn on LED take data")
df.log_hw_state(ri)
nsets = len(meas_cfs)/atonce
tsg = None
for iset in range(nsets):
selection = range(len(meas_cfs))[iset::nsets]
ri.select_fft_bins(selection)
ri._sync()
time.sleep(0.2)
t0 = time.time()
dmod,addr = ri.get_data_seconds(30,demod=True)
print nsets,iset,tsg
tsg = df.add_timestream_data(dmod, ri, t0, tsg=tsg)
df.sync()
df.nc.close()
print "completed in",((time.time()-start)/60.0),"minutes"<|fim▁end|> | #f0s = f0s*(0.9995)
|
<|file_name|>BothBlockingAndNonBlockingOnClassTest.java<|end_file_name|><|fim▁begin|>package org.jboss.resteasy.reactive.server.vertx.test;
import static org.junit.jupiter.api.Assertions.fail;
import io.smallrye.common.annotation.Blocking;
import io.smallrye.common.annotation.NonBlocking;
import java.util.function.Supplier;
import javax.enterprise.inject.spi.DeploymentException;
import javax.ws.rs.Path;
import org.jboss.resteasy.reactive.server.vertx.test.framework.ResteasyReactiveUnitTest;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
public class BothBlockingAndNonBlockingOnClassTest {
@RegisterExtension
static ResteasyReactiveUnitTest test = new ResteasyReactiveUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(Resource.class);
}
}).setExpectedException(DeploymentException.class);
@Test
public void test() {
fail("Should never have been called");
}
@Path("test")
@Blocking
@NonBlocking
public static class Resource {
@Path("hello")<|fim▁hole|>}<|fim▁end|> | public String hello() {
return "hello";
}
} |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url
from cats.views.cat import (
CatList,
CatDetail<|fim▁hole|>from cats.views.breed import (
BreedList,
BreedDetail
)
urlpatterns = [
# Cats URL's
url(r'^cats/$', CatList.as_view(), name='list'),
url(r'^cats/(?P<pk>\d+)/$', CatDetail.as_view(), name='detail'),
# Breeds URL's
url(r'^breeds/$', BreedList.as_view(), name='list_breeds'),
url(r'^breeds/(?P<pk>\d+)/$', BreedDetail.as_view(), name='detail_breed'),
]<|fim▁end|> | ) |
<|file_name|>ResizableBehaviorTestCase.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2009 WiQuery team
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.odlabs.wiquery.ui.resizable;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.panel.Panel;
import org.junit.Before;
import org.junit.Test;
import org.odlabs.wiquery.core.options.LiteralOption;
import org.odlabs.wiquery.tester.WiQueryTestCase;
import org.odlabs.wiquery.ui.DivTestPanel;
import org.odlabs.wiquery.ui.resizable.ResizableContainment.ElementEnum;
/**
* Test on {@link ResizableBehavior}
*
* @author Julien Roche
*/
public class ResizableBehaviorTestCase extends WiQueryTestCase
{
// Properties
private ResizableBehavior resizableBehavior;
@Override
@Before
public void setUp()
{
super.setUp();
resizableBehavior = new ResizableBehavior();
Panel panel = new DivTestPanel("panelId");
WebMarkupContainer component = new WebMarkupContainer("anId");
component.setMarkupId("anId");
component.add(resizableBehavior);
panel.add(component);
tester.startComponentInPage(panel);
}
/**
* Test method for {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#destroy()}
* .
*/
@Test
public void testDestroy()
{
assertNotNull(resizableBehavior.destroy());
assertEquals(resizableBehavior.destroy().render().toString(),
"$('#anId').resizable('destroy');");
}
/**
* Test method for {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#disable()}
* .
*/
@Test
public void testDisable()
{
assertNotNull(resizableBehavior.disable());
assertEquals(resizableBehavior.disable().render().toString(),
"$('#anId').resizable('disable');");
}
/**
* Test method for {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#enable()}.
*/
@Test
public void testEnable()
{
assertNotNull(resizableBehavior.enable());
assertEquals(resizableBehavior.enable().render().toString(),
"$('#anId').resizable('enable');");
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getAlsoResizeComplex()} .
*/
@Test
public void testGetAlsoResizeComplex()
{
assertNull(resizableBehavior.getAlsoResizeComplex());
resizableBehavior.setAlsoResize(new ResizableAlsoResize(new LiteralOption("div")));
assertNotNull(resizableBehavior.getAlsoResizeComplex());
assertEquals(resizableBehavior.getAlsoResizeComplex().getJavascriptOption().toString(),
"'div'");
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getAnimateEasing()} .
*/
@Test
public void testGetAnimateEasing()
{
assertEquals(resizableBehavior.getAnimateEasing(), "swing");
resizableBehavior.setAnimateEasing("slide");
assertEquals(resizableBehavior.getAnimateEasing(), "slide");
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getAnimateDuration()} .
*/
@Test
public void testGetAnimateDuration()
{
assertNotNull(resizableBehavior.getAnimateDuration());
assertEquals(resizableBehavior.getAnimateDuration().getJavascriptOption().toString(),
"'slow'");
resizableBehavior.setAnimateDuration(new ResizableAnimeDuration(1000));
assertEquals(resizableBehavior.getAnimateDuration().getJavascriptOption().toString(),
"1000");
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getAspectRatio()} .
*/
@Test
public void testGetAspectRatio()
{
assertNull(resizableBehavior.getAspectRatio());
resizableBehavior.setAspectRatio(new ResizableAspectRatio(true));
assertNotNull(resizableBehavior.getAspectRatio());
assertEquals(resizableBehavior.getAspectRatio().getJavascriptOption().toString(), "true");
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getCancel()}.
*/
@Test
public void testGetCancel()
{
assertEquals(resizableBehavior.getCancel(), "input,option");
resizableBehavior.setCancel("input");
assertEquals(resizableBehavior.getCancel(), "input");
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getContainment()} .
*/
@Test
public void testGetContainment()
{
assertNull(resizableBehavior.getContainment());
resizableBehavior.setContainment(new ResizableContainment(ElementEnum.PARENT));
assertNotNull(resizableBehavior.getContainment());
assertEquals(resizableBehavior.getContainment().getJavascriptOption().toString(),
"'parent'");
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getDelay()}.
*/
@Test
public void testGetDelay()
{
assertEquals(resizableBehavior.getDelay(), 0);
resizableBehavior.setDelay(5);
assertEquals(resizableBehavior.getDelay(), 5);
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getDistance()}.
*/
@Test
public void testGetDistance()
{
assertEquals(resizableBehavior.getDistance(), 1);
resizableBehavior.setDistance(5);
assertEquals(resizableBehavior.getDistance(), 5);
}
/**
* Test method for {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getGrid()}
* .
*/
@Test
public void testGetGrid()
{
assertNull(resizableBehavior.getGrid());
resizableBehavior.setGrid(5, 6);
assertNotNull(resizableBehavior.getGrid());
assertEquals(resizableBehavior.getGrid().getJavascriptOption().toString(), "[5,6]");
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getHandles()}.
*/
@Test
public void testGetHandles()
{
assertNotNull(resizableBehavior.getHandles());
assertEquals(resizableBehavior.getHandles().getJavascriptOption().toString(), "'e,s,se'");
resizableBehavior.setHandles(new ResizableHandles(new LiteralOption("e,s")));
assertEquals(resizableBehavior.getHandles().getJavascriptOption().toString(), "'e,s'");
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getHelper()}.
*/
@Test
public void testGetHelper()
{
assertNull(resizableBehavior.getHelper());
resizableBehavior.setHelper(".aClass");
assertEquals(resizableBehavior.getHelper(), ".aClass");
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getMaxHeight()}.
*/
@Test<|fim▁hole|> resizableBehavior.setMaxHeight(100);
assertEquals(resizableBehavior.getMaxHeight(), 100);
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getMaxWidth()}.
*/
@Test
public void testGetMaxWidth()
{
assertEquals(resizableBehavior.getMaxWidth(), 0);
resizableBehavior.setMaxWidth(100);
assertEquals(resizableBehavior.getMaxWidth(), 100);
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getMinHeight()}.
*/
@Test
public void testGetMinHeight()
{
assertEquals(resizableBehavior.getMinHeight(), 10);
resizableBehavior.setMinHeight(100);
assertEquals(resizableBehavior.getMinHeight(), 100);
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getMinWidth()}.
*/
@Test
public void testGetMinWidth()
{
assertEquals(resizableBehavior.getMinWidth(), 10);
resizableBehavior.setMinWidth(100);
assertEquals(resizableBehavior.getMinWidth(), 100);
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#getOptions()}.
*/
@Test
public void testGetOptions()
{
assertNotNull(resizableBehavior.getOptions());
assertEquals(resizableBehavior.getOptions().getJavaScriptOptions().toString(), "{}");
resizableBehavior.setAnimate(true);
assertEquals(resizableBehavior.getOptions().getJavaScriptOptions().toString(),
"{animate: true}");
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#isAnimate()}.
*/
@Test
public void testIsAnimate()
{
assertFalse(resizableBehavior.isAnimate());
resizableBehavior.setAnimate(true);
assertTrue(resizableBehavior.isAnimate());
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#isAutoHide()}.
*/
@Test
public void testIsAutoHide()
{
assertFalse(resizableBehavior.isAutoHide());
resizableBehavior.setAutoHide(true);
assertTrue(resizableBehavior.isAutoHide());
}
/**
* Test method for
* {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#isDisabled()}.
*/
@Test
public void testIsDisabled()
{
assertFalse(resizableBehavior.isDisabled());
resizableBehavior.setDisabled(true);
assertTrue(resizableBehavior.isDisabled());
}
/**
* Test method for {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#isGhost()}
* .
*/
@Test
public void testIsGhost()
{
assertFalse(resizableBehavior.isGhost());
resizableBehavior.setGhost(true);
assertTrue(resizableBehavior.isGhost());
}
/**
* Test method for {@link org.odlabs.wiquery.ui.resizable.ResizableBehavior#widget()}.
*/
@Test
public void testWidget()
{
assertNotNull(resizableBehavior.widget());
assertEquals(resizableBehavior.widget().render().toString(),
"$('#anId').resizable('widget');");
}
}<|fim▁end|> | public void testGetMaxHeight()
{
assertEquals(resizableBehavior.getMaxHeight(), 0); |
<|file_name|>camera.rs<|end_file_name|><|fim▁begin|>// See LICENSE file for copyright and license details.
use std::f32::consts::{PI};
use num::{Float};
use cgmath::{perspective, rad, Matrix4, Vector, Vector3, Rad};
use common::types::{Size2, ZFloat};
use common::misc::{clamp};
use zgl::{Zgl};
use types::{WorldPos};
pub struct Camera {
x_angle: Rad<ZFloat>,
z_angle: Rad<ZFloat>,
pos: WorldPos,
max_pos: WorldPos,
zoom: ZFloat,
projection_mat: Matrix4<ZFloat>,
}
fn get_projection_mat(win_size: &Size2) -> Matrix4<ZFloat> {
let fov = rad(PI / 4.0);
let ratio = win_size.w as ZFloat / win_size.h as ZFloat;
let display_range_min = 0.1;
let display_range_max = 100.0;
perspective(
fov, ratio, display_range_min, display_range_max)
}
impl Camera {
pub fn new(win_size: &Size2) -> Camera {
Camera {
x_angle: rad(PI / 4.0),
z_angle: rad(0.0),
pos: WorldPos{v: Vector::from_value(0.0)},
max_pos: WorldPos{v: Vector::from_value(0.0)},
zoom: 20.0,
projection_mat: get_projection_mat(win_size),
}
}
pub fn mat(&self, zgl: &Zgl) -> Matrix4<ZFloat> {
let mut m = self.projection_mat;
m = zgl.tr(m, &Vector3{x: 0.0, y: 0.0, z: -self.zoom});
m = zgl.rot_x(m, &-self.x_angle);
m = zgl.rot_z(m, &-self.z_angle);
m = zgl.tr(m, &self.pos.v);
m
}
<|fim▁hole|> self.z_angle = self.z_angle + rad(PI * 2.0);
}
while self.z_angle > rad(PI * 2.0) {
self.z_angle = self.z_angle - rad(PI * 2.0);
}
}
pub fn add_vertical_angle(&mut self, angle: Rad<ZFloat>) {
self.x_angle = self.x_angle + angle;
let min = rad(PI / 18.0);
let max = rad(PI / 4.0);
self.x_angle = clamp(self.x_angle, min, max);
}
fn clamp_pos(&mut self) {
self.pos.v.x = clamp(self.pos.v.x, self.max_pos.v.x, 0.0);
self.pos.v.y = clamp(self.pos.v.y, self.max_pos.v.y, 0.0);
}
pub fn set_pos(&mut self, pos: WorldPos) {
self.pos = pos;
self.clamp_pos();
}
pub fn set_max_pos(&mut self, max_pos: WorldPos) {
self.max_pos = max_pos;
}
pub fn change_zoom(&mut self, ratio: ZFloat) {
self.zoom *= ratio;
self.zoom = clamp(self.zoom, 10.0, 40.0);
}
pub fn get_z_angle(&self) -> &Rad<ZFloat> {
&self.z_angle
}
pub fn get_x_angle(&self) -> &Rad<ZFloat> {
&self.x_angle
}
// TODO: rename to 'move'
pub fn move_camera(&mut self, angle: Rad<ZFloat>, speed: ZFloat) {
let diff = (self.z_angle - angle).s;
let dx = diff.sin();
let dy = diff.cos();
// TODO: handle zoom
// self.pos.v.x -= dy * speed * self.zoom;
// self.pos.v.y -= dx * speed * self.zoom;
self.pos.v.x -= dy * speed;
self.pos.v.y -= dx * speed;
self.clamp_pos();
}
pub fn regenerate_projection_mat(&mut self, win_size: &Size2) {
self.projection_mat = get_projection_mat(win_size);
}
}
// vim: set tabstop=4 shiftwidth=4 softtabstop=4 expandtab:<|fim▁end|> | pub fn add_horizontal_angle(&mut self, angle: Rad<ZFloat>) {
self.z_angle = self.z_angle + angle;
while self.z_angle < rad(0.0) { |
<|file_name|>youtube.py<|end_file_name|><|fim▁begin|>import logging
from ..models import Activity
from .date import activity_stream_date_to_datetime, datetime_to_string
log = logging.getLogger(__name__)
def activity_from_dict(data):
log.debug("Converting YouTube dict to Activity Model")
activity_dict = activity_dict_from_dict(data)
return Activity.from_activity_dict(activity_dict)
def activity_dict_from_dict(blob):
log.debug("Converting YouTube dict to activity dict: %s", blob)
stream_object = {}
stream_object["@context"] = "http://www.w3.org/ns/activitystreams"
stream_object["@type"] = "Activity"
date = blob.get("snippet").get("publishedAt")
date = activity_stream_date_to_datetime(date)
stream_object["published"] = datetime_to_string(date)
stream_object["provider"] = {
"@type": "Service",
"displayName": "YouTube"
}
snippet = blob.get("snippet")
stream_object["actor"] = {
"@type": "Person",
"@id": "https://www.youtube.com/user/{}".format(snippet.get("channelTitle")),
"displayName": snippet.get("channelTitle"),
}
stream_object["object"] = {
"@id": "https://www.youtube.com/watch?v={}".format(blob.get("id").get("videoId")),
"@type": "Video",
"displayName": snippet.get("title"),
"url": [{
"href": "https://www.youtube.com/watch?v={}".format(blob.get("id").get("videoId")),
"@type": "Link"
}],
"content": snippet.get("description"),
"youtube:etag": blob.get("etag"),
"youtube:kind": blob.get("kind"),
"youtube:id:kind": blob.get("id").get("kind"),
"youtube:channelId": snippet.get("channelId"),
"youtube:liveBroadcastContent": snippet.get("liveBroadcastContent"),
"image": [
{
"@type": "Link",
"href": snippet.get("thumbnails").get("default").get("url"),
"mediaType": "image/jpeg",
"youtube:resolution": "default"
},
{
"@type": "Link",
"href": snippet.get("thumbnails").get("medium").get("url"),
"mediaType": "image/jpeg",
"youtube:resolution": "medium"
},
{
"@type": "Link",
"href": snippet.get("thumbnails").get("high").get("url"),
"mediaType": "image/jpeg",
"youtube:resolution": "high"
},
]<|fim▁hole|> }
return stream_object
"""
"""
"""
{
"@context": "http://www.w3.org/ns/activitystreams",
"@type": "Activity", ------ Abstract wrapper
"published": "2015-02-10T15:04:55Z",
"provider": {
"@type": "Service",
"displayName": "Twitter|FaceBook|Instagram|YouTube"
},
"actor": {
"@type": "Person",
"@id": "https://www.twitter.com/{{user.screen_name}}
"displayName": "Martin Smith",
"url": "http://example.org/martin",
"image": {
"@type": "Link",
"href": "http://example.org/martin/image.jpg",
"mediaType": "image/jpeg"
}
},
------------------------------------------------------
"object" : {
"@id": "urn:example:blog:abc123/xyz",
"@type": "Note",
"url": "http://example.org/blog/2011/02/entry",
"content": "This is a short note"
},
------------------------------------------------------
"object" : {
"@id": "urn:example:blog:abc123/xyz",
"@type": "Video",
"displayName": "A Simple Video",
"url": "http://example.org/video.mkv",
"duration": "PT2H"
},
------------------------------------------------------
"object" : {
"@id": "urn:example:blog:abc123/xyz",
"@type": "Image",
"displayName": "A Simple Image",
"content": "any messages?"
"url": [
{
"@type": "Link",
"href": "http://example.org/image.jpeg",
"mediaType": "image/jpeg"
},
{
"@type": "Link",
"href": "http://example.org/image.png",
"mediaType": "image/png"
}
]
},
}
"""<|fim▁end|> | |
<|file_name|>checkJSPluginExist.js<|end_file_name|><|fim▁begin|>/*
* This code it's help you to check JS plugin function (e.g. jQuery) exist.
* When function not exist, the code will auto reload JS plugin from your setting.
*
* plugin_name: It's your plugin function name (e.g. jQuery). The type is string.
* reload_url: It's your reload plugin function URL. The type is string.
*
* Copyright 2015, opoepev (Matt, Paul.Lu, Yi-Chun Lu)
* Free to use and abuse under the MIT license.
* http://www.opensource.org/licenses/mit-license.php
*/
//Main code
var checkJSPluginExist = function (plugin_name, reload_url, depend_plugin_name) {
//window[plugin_name] || document.write('<script src="' + reload_url + '">\x3C/script>');
if (typeof depend_plugin_name !== 'undefined') {
if (typeof window[depend_plugin_name][plugin_name] !== "function") {
var tag = document.createElement('script');
tag.src = reload_url;
var headerElementTag = document.getElementsByTagName('head')[0];
headerElementTag.appendChild(tag);
return false;
}
} else {
if (typeof window[plugin_name] !== "function") {
var tag = document.createElement('script');
tag.src = reload_url;
var headerElementTag = document.getElementsByTagName('head')[0];
headerElementTag.appendChild(tag);
return false;<|fim▁hole|><|fim▁end|> | }
}
return true;
}; |
<|file_name|>mesonlib.py<|end_file_name|><|fim▁begin|># Copyright 2012-2015 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A library of random helper functionality."""
import functools
import sys
import stat
import time
import platform, subprocess, operator, os, shutil, re
import collections
from enum import Enum
from functools import lru_cache
from mesonbuild import mlog
have_fcntl = False
have_msvcrt = False
# {subproject: project_meson_version}
project_meson_versions = {}
try:
import fcntl
have_fcntl = True
except Exception:
pass
try:<|fim▁hole|> import msvcrt
have_msvcrt = True
except Exception:
pass
from glob import glob
if os.path.basename(sys.executable) == 'meson.exe':
# In Windows and using the MSI installed executable.
python_command = [sys.executable, 'runpython']
else:
python_command = [sys.executable]
meson_command = None
def set_meson_command(mainfile):
global python_command
global meson_command
# On UNIX-like systems `meson` is a Python script
# On Windows `meson` and `meson.exe` are wrapper exes
if not mainfile.endswith('.py'):
meson_command = [mainfile]
elif os.path.isabs(mainfile) and mainfile.endswith('mesonmain.py'):
# Can't actually run meson with an absolute path to mesonmain.py, it must be run as -m mesonbuild.mesonmain
meson_command = python_command + ['-m', 'mesonbuild.mesonmain']
else:
# Either run uninstalled, or full path to meson-script.py
meson_command = python_command + [mainfile]
# We print this value for unit tests.
if 'MESON_COMMAND_TESTS' in os.environ:
mlog.log('meson_command is {!r}'.format(meson_command))
def is_ascii_string(astring):
try:
if isinstance(astring, str):
astring.encode('ascii')
if isinstance(astring, bytes):
astring.decode('ascii')
except UnicodeDecodeError:
return False
return True
def check_direntry_issues(direntry_array):
import locale
# Warn if the locale is not UTF-8. This can cause various unfixable issues
# such as os.stat not being able to decode filenames with unicode in them.
# There is no way to reset both the preferred encoding and the filesystem
# encoding, so we can just warn about it.
e = locale.getpreferredencoding()
if e.upper() != 'UTF-8' and not is_windows():
if not isinstance(direntry_array, list):
direntry_array = [direntry_array]
for de in direntry_array:
if is_ascii_string(de):
continue
mlog.warning('''You are using {!r} which is not a Unicode-compatible '
locale but you are trying to access a file system entry called {!r} which is
not pure ASCII. This may cause problems.
'''.format(e, de), file=sys.stderr)
# Put this in objects that should not get dumped to pickle files
# by accident.
import threading
an_unpicklable_object = threading.Lock()
class MesonException(Exception):
'''Exceptions thrown by Meson'''
def get_msg_with_context(self):
s = ''
if hasattr(self, 'lineno') and hasattr(self, 'file'):
s = get_error_location_string(self.file, self.lineno) + ' '
s += str(self)
return s
class EnvironmentException(MesonException):
'''Exceptions thrown while processing and creating the build environment'''
class FileMode:
# The first triad is for owner permissions, the second for group permissions,
# and the third for others (everyone else).
# For the 1st character:
# 'r' means can read
# '-' means not allowed
# For the 2nd character:
# 'w' means can write
# '-' means not allowed
# For the 3rd character:
# 'x' means can execute
# 's' means can execute and setuid/setgid is set (owner/group triads only)
# 'S' means cannot execute and setuid/setgid is set (owner/group triads only)
# 't' means can execute and sticky bit is set ("others" triads only)
# 'T' means cannot execute and sticky bit is set ("others" triads only)
# '-' means none of these are allowed
#
# The meanings of 'rwx' perms is not obvious for directories; see:
# https://www.hackinglinuxexposed.com/articles/20030424.html
#
# For information on this notation such as setuid/setgid/sticky bits, see:
# https://en.wikipedia.org/wiki/File_system_permissions#Symbolic_notation
symbolic_perms_regex = re.compile('[r-][w-][xsS-]' # Owner perms
'[r-][w-][xsS-]' # Group perms
'[r-][w-][xtT-]') # Others perms
def __init__(self, perms=None, owner=None, group=None):
self.perms_s = perms
self.perms = self.perms_s_to_bits(perms)
self.owner = owner
self.group = group
def __repr__(self):
ret = '<FileMode: {!r} owner={} group={}'
return ret.format(self.perms_s, self.owner, self.group)
@classmethod
def perms_s_to_bits(cls, perms_s):
'''
Does the opposite of stat.filemode(), converts strings of the form
'rwxr-xr-x' to st_mode enums which can be passed to os.chmod()
'''
if perms_s is None:
# No perms specified, we will not touch the permissions
return -1
eg = 'rwxr-xr-x'
if not isinstance(perms_s, str):
msg = 'Install perms must be a string. For example, {!r}'
raise MesonException(msg.format(eg))
if len(perms_s) != 9 or not cls.symbolic_perms_regex.match(perms_s):
msg = 'File perms {!r} must be exactly 9 chars. For example, {!r}'
raise MesonException(msg.format(perms_s, eg))
perms = 0
# Owner perms
if perms_s[0] == 'r':
perms |= stat.S_IRUSR
if perms_s[1] == 'w':
perms |= stat.S_IWUSR
if perms_s[2] == 'x':
perms |= stat.S_IXUSR
elif perms_s[2] == 'S':
perms |= stat.S_ISUID
elif perms_s[2] == 's':
perms |= stat.S_IXUSR
perms |= stat.S_ISUID
# Group perms
if perms_s[3] == 'r':
perms |= stat.S_IRGRP
if perms_s[4] == 'w':
perms |= stat.S_IWGRP
if perms_s[5] == 'x':
perms |= stat.S_IXGRP
elif perms_s[5] == 'S':
perms |= stat.S_ISGID
elif perms_s[5] == 's':
perms |= stat.S_IXGRP
perms |= stat.S_ISGID
# Others perms
if perms_s[6] == 'r':
perms |= stat.S_IROTH
if perms_s[7] == 'w':
perms |= stat.S_IWOTH
if perms_s[8] == 'x':
perms |= stat.S_IXOTH
elif perms_s[8] == 'T':
perms |= stat.S_ISVTX
elif perms_s[8] == 't':
perms |= stat.S_IXOTH
perms |= stat.S_ISVTX
return perms
class File:
def __init__(self, is_built, subdir, fname):
self.is_built = is_built
self.subdir = subdir
self.fname = fname
assert(isinstance(self.subdir, str))
assert(isinstance(self.fname, str))
def __str__(self):
return self.relative_name()
def __repr__(self):
ret = '<File: {0}'
if not self.is_built:
ret += ' (not built)'
ret += '>'
return ret.format(self.relative_name())
@staticmethod
@lru_cache(maxsize=None)
def from_source_file(source_root, subdir, fname):
if not os.path.isfile(os.path.join(source_root, subdir, fname)):
raise MesonException('File %s does not exist.' % fname)
return File(False, subdir, fname)
@staticmethod
def from_built_file(subdir, fname):
return File(True, subdir, fname)
@staticmethod
def from_absolute_file(fname):
return File(False, '', fname)
@lru_cache(maxsize=None)
def rel_to_builddir(self, build_to_src):
if self.is_built:
return self.relative_name()
else:
return os.path.join(build_to_src, self.subdir, self.fname)
@lru_cache(maxsize=None)
def absolute_path(self, srcdir, builddir):
absdir = srcdir
if self.is_built:
absdir = builddir
return os.path.join(absdir, self.relative_name())
def endswith(self, ending):
return self.fname.endswith(ending)
def split(self, s):
return self.fname.split(s)
def __eq__(self, other):
return (self.fname, self.subdir, self.is_built) == (other.fname, other.subdir, other.is_built)
def __hash__(self):
return hash((self.fname, self.subdir, self.is_built))
@lru_cache(maxsize=None)
def relative_name(self):
return os.path.join(self.subdir, self.fname)
def get_compiler_for_source(compilers, src):
for comp in compilers:
if comp.can_compile(src):
return comp
raise MesonException('No specified compiler can handle file {!s}'.format(src))
def classify_unity_sources(compilers, sources):
compsrclist = {}
for src in sources:
comp = get_compiler_for_source(compilers, src)
if comp not in compsrclist:
compsrclist[comp] = [src]
else:
compsrclist[comp].append(src)
return compsrclist
class OrderedEnum(Enum):
"""
An Enum which additionally offers homogeneous ordered comparison.
"""
def __ge__(self, other):
if self.__class__ is other.__class__:
return self.value >= other.value
return NotImplemented
def __gt__(self, other):
if self.__class__ is other.__class__:
return self.value > other.value
return NotImplemented
def __le__(self, other):
if self.__class__ is other.__class__:
return self.value <= other.value
return NotImplemented
def __lt__(self, other):
if self.__class__ is other.__class__:
return self.value < other.value
return NotImplemented
MachineChoice = OrderedEnum('MachineChoice', ['BUILD', 'HOST', 'TARGET'])
class PerMachine:
def __init__(self, build, host, target):
self.build = build
self.host = host
self.target = target
def __getitem__(self, machine: MachineChoice):
return {
MachineChoice.BUILD: self.build,
MachineChoice.HOST: self.host,
MachineChoice.TARGET: self.target
}[machine]
def __setitem__(self, machine: MachineChoice, val):
key = {
MachineChoice.BUILD: 'build',
MachineChoice.HOST: 'host',
MachineChoice.TARGET: 'target'
}[machine]
setattr(self, key, val)
def is_osx():
return platform.system().lower() == 'darwin'
def is_linux():
return platform.system().lower() == 'linux'
def is_android():
return platform.system().lower() == 'android'
def is_haiku():
return platform.system().lower() == 'haiku'
def is_openbsd():
return platform.system().lower() == 'openbsd'
def is_windows():
platname = platform.system().lower()
return platname == 'windows' or 'mingw' in platname
def is_cygwin():
platname = platform.system().lower()
return platname.startswith('cygwin')
def is_debianlike():
return os.path.isfile('/etc/debian_version')
def is_dragonflybsd():
return platform.system().lower() == 'dragonfly'
def is_freebsd():
return platform.system().lower() == 'freebsd'
def _get_machine_is_cross(env, is_cross):
"""
This is not morally correct, but works for now. For cross builds the build
and host machines differ. `is_cross == true` means the host machine, while
`is_cross == false` means the build machine. Both are used in practice,
even though the documentation refers to the host machine implying we should
hard-code it. For non-cross builds `is_cross == false` is passed but the
host and build machines are identical so it doesn't matter.
Users for `for_*` should instead specify up front which machine they want
and query that like:
env.machines[MachineChoice.HOST].is_haiku()
"""
for_machine = MachineChoice.HOST if is_cross else MachineChoice.BUILD
return env.machines[for_machine]
def for_windows(is_cross, env):
"""
Host machine is windows?
Deprecated: Please use `env.machines[for_machine].is_windows()`.
Note: 'host' is the machine on which compiled binaries will run
"""
return _get_machine_is_cross(env, is_cross).is_windows()
def for_cygwin(is_cross, env):
"""
Host machine is cygwin?
Deprecated: Please use `env.machines[for_machine].is_cygwin()`.
Note: 'host' is the machine on which compiled binaries will run
"""
return _get_machine_is_cross(env, is_cross).is_cygwin()
def for_linux(is_cross, env):
"""
Host machine is linux?
Deprecated: Please use `env.machines[for_machine].is_linux()`.
Note: 'host' is the machine on which compiled binaries will run
"""
return _get_machine_is_cross(env, is_cross).is_linux()
def for_darwin(is_cross, env):
"""
Host machine is Darwin (iOS/OS X)?
Deprecated: Please use `env.machines[for_machine].is_darwin()`.
Note: 'host' is the machine on which compiled binaries will run
"""
return _get_machine_is_cross(env, is_cross).is_darwin()
def for_android(is_cross, env):
"""
Host machine is Android?
Deprecated: Please use `env.machines[for_machine].is_android()`.
Note: 'host' is the machine on which compiled binaries will run
"""
return _get_machine_is_cross(env, is_cross).is_android()
def for_haiku(is_cross, env):
"""
Host machine is Haiku?
Deprecated: Please use `env.machines[for_machine].is_haiku()`.
Note: 'host' is the machine on which compiled binaries will run
"""
return _get_machine_is_cross(env, is_cross).is_haiku()
def for_openbsd(is_cross, env):
"""
Host machine is OpenBSD?
Deprecated: Please use `env.machines[for_machine].is_openbsd()`.
Note: 'host' is the machine on which compiled binaries will run
"""
return _get_machine_is_cross(env, is_cross).is_openbsd()
def exe_exists(arglist):
try:
p = subprocess.Popen(arglist, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
if p.returncode == 0:
return True
except FileNotFoundError:
pass
return False
def detect_vcs(source_dir):
vcs_systems = [
dict(name = 'git', cmd = 'git', repo_dir = '.git', get_rev = 'git describe --dirty=+', rev_regex = '(.*)', dep = '.git/logs/HEAD'),
dict(name = 'mercurial', cmd = 'hg', repo_dir = '.hg', get_rev = 'hg id -i', rev_regex = '(.*)', dep = '.hg/dirstate'),
dict(name = 'subversion', cmd = 'svn', repo_dir = '.svn', get_rev = 'svn info', rev_regex = 'Revision: (.*)', dep = '.svn/wc.db'),
dict(name = 'bazaar', cmd = 'bzr', repo_dir = '.bzr', get_rev = 'bzr revno', rev_regex = '(.*)', dep = '.bzr'),
]
segs = source_dir.replace('\\', '/').split('/')
for i in range(len(segs), -1, -1):
curdir = '/'.join(segs[:i])
for vcs in vcs_systems:
if os.path.isdir(os.path.join(curdir, vcs['repo_dir'])) and shutil.which(vcs['cmd']):
vcs['wc_dir'] = curdir
return vcs
return None
# a helper class which implements the same version ordering as RPM
@functools.total_ordering
class Version:
def __init__(self, s):
self._s = s
# split into numeric, alphabetic and non-alphanumeric sequences
sequences = re.finditer(r'(\d+|[a-zA-Z]+|[^a-zA-Z\d]+)', s)
# non-alphanumeric separators are discarded
sequences = [m for m in sequences if not re.match(r'[^a-zA-Z\d]+', m.group(1))]
# numeric sequences have leading zeroes discarded
sequences = [re.sub(r'^0+(\d)', r'\1', m.group(1), 1) for m in sequences]
self._v = sequences
def __str__(self):
return '%s (V=%s)' % (self._s, str(self._v))
def __lt__(self, other):
return self.__cmp__(other) == -1
def __eq__(self, other):
return self.__cmp__(other) == 0
def __cmp__(self, other):
def cmp(a, b):
return (a > b) - (a < b)
# compare each sequence in order
for i in range(0, min(len(self._v), len(other._v))):
# sort a non-digit sequence before a digit sequence
if self._v[i].isdigit() != other._v[i].isdigit():
return 1 if self._v[i].isdigit() else -1
# compare as numbers
if self._v[i].isdigit():
# because leading zeros have already been removed, if one number
# has more digits, it is greater
c = cmp(len(self._v[i]), len(other._v[i]))
if c != 0:
return c
# fallthrough
# compare lexicographically
c = cmp(self._v[i], other._v[i])
if c != 0:
return c
# if equal length, all components have matched, so equal
# otherwise, the version with a suffix remaining is greater
return cmp(len(self._v), len(other._v))
def _version_extract_cmpop(vstr2):
if vstr2.startswith('>='):
cmpop = operator.ge
vstr2 = vstr2[2:]
elif vstr2.startswith('<='):
cmpop = operator.le
vstr2 = vstr2[2:]
elif vstr2.startswith('!='):
cmpop = operator.ne
vstr2 = vstr2[2:]
elif vstr2.startswith('=='):
cmpop = operator.eq
vstr2 = vstr2[2:]
elif vstr2.startswith('='):
cmpop = operator.eq
vstr2 = vstr2[1:]
elif vstr2.startswith('>'):
cmpop = operator.gt
vstr2 = vstr2[1:]
elif vstr2.startswith('<'):
cmpop = operator.lt
vstr2 = vstr2[1:]
else:
cmpop = operator.eq
return (cmpop, vstr2)
def version_compare(vstr1, vstr2):
(cmpop, vstr2) = _version_extract_cmpop(vstr2)
return cmpop(Version(vstr1), Version(vstr2))
def version_compare_many(vstr1, conditions):
if not isinstance(conditions, (list, tuple, frozenset)):
conditions = [conditions]
found = []
not_found = []
for req in conditions:
if not version_compare(vstr1, req):
not_found.append(req)
else:
found.append(req)
return not_found == [], not_found, found
# determine if the minimum version satisfying the condition |condition| exceeds
# the minimum version for a feature |minimum|
def version_compare_condition_with_min(condition, minimum):
if condition.startswith('>='):
cmpop = operator.le
condition = condition[2:]
elif condition.startswith('<='):
return False
elif condition.startswith('!='):
return False
elif condition.startswith('=='):
cmpop = operator.le
condition = condition[2:]
elif condition.startswith('='):
cmpop = operator.le
condition = condition[1:]
elif condition.startswith('>'):
cmpop = operator.lt
condition = condition[1:]
elif condition.startswith('<'):
return False
else:
cmpop = operator.le
# Declaring a project(meson_version: '>=0.46') and then using features in
# 0.46.0 is valid, because (knowing the meson versioning scheme) '0.46.0' is
# the lowest version which satisfies the constraint '>=0.46'.
#
# But this will fail here, because the minimum version required by the
# version constraint ('0.46') is strictly less (in our version comparison)
# than the minimum version needed for the feature ('0.46.0').
#
# Map versions in the constraint of the form '0.46' to '0.46.0', to embed
# this knowledge of the meson versioning scheme.
condition = condition.strip()
if re.match('^\d+.\d+$', condition):
condition += '.0'
return cmpop(Version(minimum), Version(condition))
def default_libdir():
if is_debianlike():
try:
pc = subprocess.Popen(['dpkg-architecture', '-qDEB_HOST_MULTIARCH'],
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL)
(stdo, _) = pc.communicate()
if pc.returncode == 0:
archpath = stdo.decode().strip()
return 'lib/' + archpath
except Exception:
pass
if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
return 'lib64'
return 'lib'
def default_libexecdir():
# There is no way to auto-detect this, so it must be set at build time
return 'libexec'
def default_prefix():
return 'c:/' if is_windows() else '/usr/local'
def get_library_dirs():
if is_windows():
return ['C:/mingw/lib'] # Fixme
if is_osx():
return ['/usr/lib'] # Fix me as well.
# The following is probably Debian/Ubuntu specific.
# /usr/local/lib is first because it contains stuff
# installed by the sysadmin and is probably more up-to-date
# than /usr/lib. If you feel that this search order is
# problematic, please raise the issue on the mailing list.
unixdirs = ['/usr/local/lib', '/usr/lib', '/lib']
plat = subprocess.check_output(['uname', '-m']).decode().strip()
# This is a terrible hack. I admit it and I'm really sorry.
# I just don't know what the correct solution is.
if plat == 'i686':
plat = 'i386'
if plat.startswith('arm'):
plat = 'arm'
unixdirs += glob('/usr/lib/' + plat + '*')
if os.path.exists('/usr/lib64'):
unixdirs.append('/usr/lib64')
unixdirs += glob('/lib/' + plat + '*')
if os.path.exists('/lib64'):
unixdirs.append('/lib64')
unixdirs += glob('/lib/' + plat + '*')
return unixdirs
def has_path_sep(name, sep='/\\'):
'Checks if any of the specified @sep path separators are in @name'
for each in sep:
if each in name:
return True
return False
def do_replacement(regex, line, format, confdata):
missing_variables = set()
start_tag = '@'
backslash_tag = '\\@'
if format == 'cmake':
start_tag = '${'
backslash_tag = '\\${'
def variable_replace(match):
# Pairs of escape characters before '@' or '\@'
if match.group(0).endswith('\\'):
num_escapes = match.end(0) - match.start(0)
return '\\' * (num_escapes // 2)
# Single escape character and '@'
elif match.group(0) == backslash_tag:
return start_tag
# Template variable to be replaced
else:
varname = match.group(1)
if varname in confdata:
(var, desc) = confdata.get(varname)
if isinstance(var, str):
pass
elif isinstance(var, int):
var = str(var)
else:
msg = 'Tried to replace variable {!r} value with ' \
'something other than a string or int: {!r}'
raise MesonException(msg.format(varname, var))
else:
missing_variables.add(varname)
var = ''
return var
return re.sub(regex, variable_replace, line), missing_variables
def do_mesondefine(line, confdata):
arr = line.split()
if len(arr) != 2:
raise MesonException('#mesondefine does not contain exactly two tokens: %s' % line.strip())
varname = arr[1]
try:
(v, desc) = confdata.get(varname)
except KeyError:
return '/* #undef %s */\n' % varname
if isinstance(v, bool):
if v:
return '#define %s\n' % varname
else:
return '#undef %s\n' % varname
elif isinstance(v, int):
return '#define %s %d\n' % (varname, v)
elif isinstance(v, str):
return '#define %s %s\n' % (varname, v)
else:
raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
def do_conf_file(src, dst, confdata, format, encoding='utf-8'):
try:
with open(src, encoding=encoding) as f:
data = f.readlines()
except Exception as e:
raise MesonException('Could not read input file %s: %s' % (src, str(e)))
# Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
# Also allow escaping '@' with '\@'
if format in ['meson', 'cmake@']:
regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@')
elif format == 'cmake':
regex = re.compile(r'(?:\\\\)+(?=\\?\$)|\\\${|\${([-a-zA-Z0-9_]+)}')
else:
raise MesonException('Format "{}" not handled'.format(format))
search_token = '#mesondefine'
if format != 'meson':
search_token = '#cmakedefine'
result = []
missing_variables = set()
# Detect when the configuration data is empty and no tokens were found
# during substitution so we can warn the user to use the `copy:` kwarg.
confdata_useless = not confdata.keys()
for line in data:
if line.startswith(search_token):
confdata_useless = False
line = do_mesondefine(line, confdata)
else:
line, missing = do_replacement(regex, line, format, confdata)
missing_variables.update(missing)
if missing:
confdata_useless = False
result.append(line)
dst_tmp = dst + '~'
try:
with open(dst_tmp, 'w', encoding=encoding) as f:
f.writelines(result)
except Exception as e:
raise MesonException('Could not write output file %s: %s' % (dst, str(e)))
shutil.copymode(src, dst_tmp)
replace_if_different(dst, dst_tmp)
return missing_variables, confdata_useless
CONF_C_PRELUDE = '''/*
* Autogenerated by the Meson build system.
* Do not edit, your changes will be lost.
*/
#pragma once
'''
CONF_NASM_PRELUDE = '''; Autogenerated by the Meson build system.
; Do not edit, your changes will be lost.
'''
def dump_conf_header(ofilename, cdata, output_format):
if output_format == 'c':
prelude = CONF_C_PRELUDE
prefix = '#'
elif output_format == 'nasm':
prelude = CONF_NASM_PRELUDE
prefix = '%'
ofilename_tmp = ofilename + '~'
with open(ofilename_tmp, 'w', encoding='utf-8') as ofile:
ofile.write(prelude)
for k in sorted(cdata.keys()):
(v, desc) = cdata.get(k)
if desc:
if output_format == 'c':
ofile.write('/* %s */\n' % desc)
elif output_format == 'nasm':
for line in desc.split('\n'):
ofile.write('; %s\n' % line)
if isinstance(v, bool):
if v:
ofile.write('%sdefine %s\n\n' % (prefix, k))
else:
ofile.write('%sundef %s\n\n' % (prefix, k))
elif isinstance(v, (int, str)):
ofile.write('%sdefine %s %s\n\n' % (prefix, k, v))
else:
raise MesonException('Unknown data type in configuration file entry: ' + k)
replace_if_different(ofilename, ofilename_tmp)
def replace_if_different(dst, dst_tmp):
# If contents are identical, don't touch the file to prevent
# unnecessary rebuilds.
different = True
try:
with open(dst, 'rb') as f1, open(dst_tmp, 'rb') as f2:
if f1.read() == f2.read():
different = False
except FileNotFoundError:
pass
if different:
os.replace(dst_tmp, dst)
else:
os.unlink(dst_tmp)
def listify(item, flatten=True, unholder=False):
'''
Returns a list with all args embedded in a list if they are not a list.
This function preserves order.
@flatten: Convert lists of lists to a flat list
@unholder: Replace each item with the object it holds, if required
Note: unholding only works recursively when flattening
'''
if not isinstance(item, list):
if unholder and hasattr(item, 'held_object'):
item = item.held_object
return [item]
result = []
for i in item:
if unholder and hasattr(i, 'held_object'):
i = i.held_object
if flatten and isinstance(i, list):
result += listify(i, flatten=True, unholder=unholder)
else:
result.append(i)
return result
def extract_as_list(dict_object, *keys, pop=False, **kwargs):
'''
Extracts all values from given dict_object and listifies them.
'''
result = []
fetch = dict_object.get
if pop:
fetch = dict_object.pop
# If there's only one key, we don't return a list with one element
if len(keys) == 1:
return listify(fetch(keys[0], []), **kwargs)
# Return a list of values corresponding to *keys
for key in keys:
result.append(listify(fetch(key, []), **kwargs))
return result
def typeslistify(item, types):
'''
Ensure that type(@item) is one of @types or a
list of items all of which are of type @types
'''
if isinstance(item, types):
item = [item]
if not isinstance(item, list):
raise MesonException('Item must be a list or one of {!r}'.format(types))
for i in item:
if i is not None and not isinstance(i, types):
raise MesonException('List item must be one of {!r}'.format(types))
return item
def stringlistify(item):
return typeslistify(item, str)
def expand_arguments(args):
expended_args = []
for arg in args:
if not arg.startswith('@'):
expended_args.append(arg)
continue
args_file = arg[1:]
try:
with open(args_file) as f:
extended_args = f.read().split()
expended_args += extended_args
except Exception as e:
print('Error expanding command line arguments, %s not found' % args_file)
print(e)
return None
return expended_args
def Popen_safe(args, write=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs):
import locale
encoding = locale.getpreferredencoding()
if sys.version_info < (3, 6) or not sys.stdout.encoding or encoding.upper() != 'UTF-8':
return Popen_safe_legacy(args, write=write, stdout=stdout, stderr=stderr, **kwargs)
p = subprocess.Popen(args, universal_newlines=True, close_fds=False,
stdout=stdout, stderr=stderr, **kwargs)
o, e = p.communicate(write)
return p, o, e
def Popen_safe_legacy(args, write=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs):
p = subprocess.Popen(args, universal_newlines=False,
stdout=stdout, stderr=stderr, **kwargs)
if write is not None:
write = write.encode('utf-8')
o, e = p.communicate(write)
if o is not None:
if sys.stdout.encoding:
o = o.decode(encoding=sys.stdout.encoding, errors='replace').replace('\r\n', '\n')
else:
o = o.decode(errors='replace').replace('\r\n', '\n')
if e is not None:
if sys.stderr.encoding:
e = e.decode(encoding=sys.stderr.encoding, errors='replace').replace('\r\n', '\n')
else:
e = e.decode(errors='replace').replace('\r\n', '\n')
return p, o, e
def iter_regexin_iter(regexiter, initer):
'''
Takes each regular expression in @regexiter and tries to search for it in
every item in @initer. If there is a match, returns that match.
Else returns False.
'''
for regex in regexiter:
for ii in initer:
if not isinstance(ii, str):
continue
match = re.search(regex, ii)
if match:
return match.group()
return False
def _substitute_values_check_errors(command, values):
# Error checking
inregex = ('@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@')
outregex = ('@OUTPUT([0-9]+)?@', '@OUTDIR@')
if '@INPUT@' not in values:
# Error out if any input-derived templates are present in the command
match = iter_regexin_iter(inregex, command)
if match:
m = 'Command cannot have {!r}, since no input files were specified'
raise MesonException(m.format(match))
else:
if len(values['@INPUT@']) > 1:
# Error out if @PLAINNAME@ or @BASENAME@ is present in the command
match = iter_regexin_iter(inregex[1:], command)
if match:
raise MesonException('Command cannot have {!r} when there is '
'more than one input file'.format(match))
# Error out if an invalid @INPUTnn@ template was specified
for each in command:
if not isinstance(each, str):
continue
match = re.search(inregex[0], each)
if match and match.group() not in values:
m = 'Command cannot have {!r} since there are only {!r} inputs'
raise MesonException(m.format(match.group(), len(values['@INPUT@'])))
if '@OUTPUT@' not in values:
# Error out if any output-derived templates are present in the command
match = iter_regexin_iter(outregex, command)
if match:
m = 'Command cannot have {!r} since there are no outputs'
raise MesonException(m.format(match))
else:
# Error out if an invalid @OUTPUTnn@ template was specified
for each in command:
if not isinstance(each, str):
continue
match = re.search(outregex[0], each)
if match and match.group() not in values:
m = 'Command cannot have {!r} since there are only {!r} outputs'
raise MesonException(m.format(match.group(), len(values['@OUTPUT@'])))
def substitute_values(command, values):
'''
Substitute the template strings in the @values dict into the list of
strings @command and return a new list. For a full list of the templates,
see get_filenames_templates_dict()
If multiple inputs/outputs are given in the @values dictionary, we
substitute @INPUT@ and @OUTPUT@ only if they are the entire string, not
just a part of it, and in that case we substitute *all* of them.
'''
# Error checking
_substitute_values_check_errors(command, values)
# Substitution
outcmd = []
rx_keys = [re.escape(key) for key in values if key not in ('@INPUT@', '@OUTPUT@')]
value_rx = re.compile('|'.join(rx_keys)) if rx_keys else None
for vv in command:
if not isinstance(vv, str):
outcmd.append(vv)
elif '@INPUT@' in vv:
inputs = values['@INPUT@']
if vv == '@INPUT@':
outcmd += inputs
elif len(inputs) == 1:
outcmd.append(vv.replace('@INPUT@', inputs[0]))
else:
raise MesonException("Command has '@INPUT@' as part of a "
"string and more than one input file")
elif '@OUTPUT@' in vv:
outputs = values['@OUTPUT@']
if vv == '@OUTPUT@':
outcmd += outputs
elif len(outputs) == 1:
outcmd.append(vv.replace('@OUTPUT@', outputs[0]))
else:
raise MesonException("Command has '@OUTPUT@' as part of a "
"string and more than one output file")
# Append values that are exactly a template string.
# This is faster than a string replace.
elif vv in values:
outcmd.append(values[vv])
# Substitute everything else with replacement
elif value_rx:
outcmd.append(value_rx.sub(lambda m: values[m.group(0)], vv))
else:
outcmd.append(vv)
return outcmd
def get_filenames_templates_dict(inputs, outputs):
'''
Create a dictionary with template strings as keys and values as values for
the following templates:
@INPUT@ - the full path to one or more input files, from @inputs
@OUTPUT@ - the full path to one or more output files, from @outputs
@OUTDIR@ - the full path to the directory containing the output files
If there is only one input file, the following keys are also created:
@PLAINNAME@ - the filename of the input file
@BASENAME@ - the filename of the input file with the extension removed
If there is more than one input file, the following keys are also created:
@INPUT0@, @INPUT1@, ... one for each input file
If there is more than one output file, the following keys are also created:
@OUTPUT0@, @OUTPUT1@, ... one for each output file
'''
values = {}
# Gather values derived from the input
if inputs:
# We want to substitute all the inputs.
values['@INPUT@'] = inputs
for (ii, vv) in enumerate(inputs):
# Write out @INPUT0@, @INPUT1@, ...
values['@INPUT{}@'.format(ii)] = vv
if len(inputs) == 1:
# Just one value, substitute @PLAINNAME@ and @BASENAME@
values['@PLAINNAME@'] = plain = os.path.basename(inputs[0])
values['@BASENAME@'] = os.path.splitext(plain)[0]
if outputs:
# Gather values derived from the outputs, similar to above.
values['@OUTPUT@'] = outputs
for (ii, vv) in enumerate(outputs):
values['@OUTPUT{}@'.format(ii)] = vv
# Outdir should be the same for all outputs
values['@OUTDIR@'] = os.path.dirname(outputs[0])
# Many external programs fail on empty arguments.
if values['@OUTDIR@'] == '':
values['@OUTDIR@'] = '.'
return values
def _make_tree_writable(topdir):
# Ensure all files and directories under topdir are writable
# (and readable) by owner.
for d, _, files in os.walk(topdir):
os.chmod(d, os.stat(d).st_mode | stat.S_IWRITE | stat.S_IREAD)
for fname in files:
fpath = os.path.join(d, fname)
if os.path.isfile(fpath):
os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
def windows_proof_rmtree(f):
# On Windows if anyone is holding a file open you can't
# delete it. As an example an anti virus scanner might
# be scanning files you are trying to delete. The only
# way to fix this is to try again and again.
delays = [0.1, 0.1, 0.2, 0.2, 0.2, 0.5, 0.5, 1, 1, 1, 1, 2]
# Start by making the tree wriable.
_make_tree_writable(f)
for d in delays:
try:
shutil.rmtree(f)
return
except FileNotFoundError:
return
except (OSError, PermissionError):
time.sleep(d)
# Try one last time and throw if it fails.
shutil.rmtree(f)
def detect_subprojects(spdir_name, current_dir='', result=None):
if result is None:
result = {}
spdir = os.path.join(current_dir, spdir_name)
if not os.path.exists(spdir):
return result
for trial in glob(os.path.join(spdir, '*')):
basename = os.path.basename(trial)
if trial == 'packagecache':
continue
append_this = True
if os.path.isdir(trial):
detect_subprojects(spdir_name, trial, result)
elif trial.endswith('.wrap') and os.path.isfile(trial):
basename = os.path.splitext(basename)[0]
else:
append_this = False
if append_this:
if basename in result:
result[basename].append(trial)
else:
result[basename] = [trial]
return result
def get_error_location_string(fname, lineno):
return '{}:{}:'.format(fname, lineno)
def substring_is_in_list(substr, strlist):
for s in strlist:
if substr in s:
return True
return False
class OrderedSet(collections.abc.MutableSet):
"""A set that preserves the order in which items are added, by first
insertion.
"""
def __init__(self, iterable=None):
self.__container = collections.OrderedDict()
if iterable:
self.update(iterable)
def __contains__(self, value):
return value in self.__container
def __iter__(self):
return iter(self.__container.keys())
def __len__(self):
return len(self.__container)
def __repr__(self):
# Don't print 'OrderedSet("")' for an empty set.
if self.__container:
return 'OrderedSet("{}")'.format(
'", "'.join(repr(e) for e in self.__container.keys()))
return 'OrderedSet()'
def __reversed__(self):
return reversed(self.__container)
def add(self, value):
self.__container[value] = None
def discard(self, value):
if value in self.__container:
del self.__container[value]
def update(self, iterable):
for item in iterable:
self.__container[item] = None
def difference(self, set_):
return type(self)(e for e in self if e not in set_)
class BuildDirLock:
def __init__(self, builddir):
self.lockfilename = os.path.join(builddir, 'meson-private/meson.lock')
def __enter__(self):
self.lockfile = open(self.lockfilename, 'w')
try:
if have_fcntl:
fcntl.flock(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
elif have_msvcrt:
msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1)
except (BlockingIOError, PermissionError):
self.lockfile.close()
raise MesonException('Some other Meson process is already using this build directory. Exiting.')
def __exit__(self, *args):
if have_fcntl:
fcntl.flock(self.lockfile, fcntl.LOCK_UN)
elif have_msvcrt:
msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1)
self.lockfile.close()
def relpath(path, start):
# On Windows a relative path can't be evaluated for paths on two different
# drives (i.e. c:\foo and f:\bar). The only thing left to do is to use the
# original absolute path.
try:
return os.path.relpath(path, start)
except ValueError:
return path<|fim▁end|> | |
<|file_name|>SendMessageToCare.js<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2021 Inera AB (http://www.inera.se)
*
* This file is part of sklintyg (https://github.com/sklintyg).
*
* sklintyg is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* sklintyg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*globals wcTestTools, JSON, logger*/
'use strict';
var testdataHelper = wcTestTools.helpers.testdata;
function addDays(date, days) {
date.setDate(date.getDate() + days);
return date;
}
module.exports.SendMessageToCare = function(user, person, intyg, message, testString, amneCode) {
var messageID = testdataHelper.generateTestGuid();
var skickatTidpunkt = new Date();
if (!intyg.messages) {
intyg.messages = [];
}
var svarPa = '';
var sistaDatumForSvar = '<urn1:sistaDatumForSvar>' + testdataHelper.dateFormat(addDays(skickatTidpunkt, 5)) + '</urn1:sistaDatumForSvar>';
if (amneCode) {
intyg.messages.unshift({
id: messageID,
typ: 'Fråga',
amne: amneCode,
testString: testString
});
} else {
// Om ämne inte skickas med till funktionen så behandlar vi det som
// ett svarsmeddelande och kopierar ämne från tidigare
amneCode = intyg.messages[0].amne;
svarPa = '<urn1:svarPa>' + '<urn3:meddelande-id>' + intyg.messages[0].id + '</urn3:meddelande-id>' + '</urn1:svarPa>';
sistaDatumForSvar = '';
intyg.messages.unshift({
id: messageID,
typ: 'Svar',
amne: amneCode,
testString: testString
});
}
logger.silly('this.intyg.messages: ' + JSON.stringify(intyg.messages));
var kompletteringar = '';
var paminnelseMeddelandeId = '';
if (intyg.messages[0].id && amneCode === 'PAMINN') {
paminnelseMeddelandeId = '<urn1:paminnelseMeddelande-id>' + intyg.messages[1].id + '</urn1:paminnelseMeddelande-id>';
} else if (amneCode === 'KOMPLT') {
kompletteringar = [];
for (var k = 1; k <= 26; k++) {
if (k === 24) {
continue; // Frage-id 24 finns inte
}
kompletteringar.push(
'<urn1:komplettering>' +
'<urn1:frage-id>' + k + '</urn1:frage-id>' +
'<urn1:text>Komplettering #' + k + '</urn1:text>' +
'</urn1:komplettering>'
);
}
kompletteringar = kompletteringar.join('\n');
}
return '<urn1:SendMessageToCare' +
' xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/"' +
' xmlns:urn="urn:riv:itintegration:registry:1"' +
' xmlns:urn1="urn:riv:clinicalprocess:healthcond:certificate:SendMessageToCareResponder:2"' +
' xmlns:urn2="urn:riv:clinicalprocess:healthcond:certificate:types:3"' +
' xmlns:urn3="urn:riv:clinicalprocess:healthcond:certificate:3"' +
'>' +
' <urn1:meddelande-id>' + messageID + '</urn1:meddelande-id>' +
' <urn1:skickatTidpunkt>' + skickatTidpunkt.toISOString().slice(0, -5) + '</urn1:skickatTidpunkt>' +
' <urn1:intygs-id>' +
' <urn2:root>' + user.enhetId + '</urn2:root>' +
' <urn2:extension>' + intyg.id + '</urn2:extension>' +
' </urn1:intygs-id>' +
' <urn1:patientPerson-id>' +
' <urn2:root>1.2.752.129.2.1.3.1</urn2:root>' +
' <urn2:extension>' + person.id.replace('-', '') + '</urn2:extension>' +
' </urn1:patientPerson-id>' +
' <urn1:logiskAdressMottagare>' + 'nmtWebcert' + process.env.environmentName + '</urn1:logiskAdressMottagare>' +
' <urn1:amne>' +
' <urn2:code>' + amneCode + '</urn2:code>' +
' <urn2:codeSystem>ffa59d8f-8d7e-46ae-ac9e-31804e8e8499</urn2:codeSystem>' +
' </urn1:amne>' +
' <urn1:meddelande>' + message + ' ' + testString + '</urn1:meddelande>' +
paminnelseMeddelandeId +<|fim▁hole|> ' <urn2:codeSystem>769bb12b-bd9f-4203-a5cd-fd14f2eb3b80</urn2:codeSystem>' +
' </urn1:part>' +
' </urn1:skickatAv>' +
kompletteringar +
sistaDatumForSvar +
'</urn1:SendMessageToCare>';
};<|fim▁end|> | svarPa +
' <urn1:skickatAv>' +
' <urn1:part>' +
' <urn2:code>FKASSA</urn2:code>' + |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>import system
# Create the computer system and power it up.
sys = system.System()<|fim▁hole|>sys.power_on()<|fim▁end|> | |
<|file_name|>get_non_locator_timeline.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# __author__: Yixuan LI
# __email__: [email protected]
import os
import json
import re
from optparse import OptionParser
import tweepy
import time
class UserTimeline:
def __init__(self,inputDir,outputDir):
self.inputDir = inputDir
self.outputDir = outputDir
os.system("mkdir -p %s"%(outputDir))
# Get the names of the files under the input directory and save them in a list
self.fileList = os.listdir(inputDir)
print self.fileList
self.userHash = {} # [key,value] pair to record the unique users in the tweets
self.uniqueUserCount = 0 # count unique users in the dataset
self.tweetCount = 0 # total tweets processed
self.api = None
def authentication(self):
consumer_key="z86C8djY3bYOPD1WkYV73nVP6"
consumer_secret="BT8oKrcj955MKjv0qS8Kra2Iw91E3uSMTqEVurfTmKjXfG0hNm"
access_token="746349096-Bz1n8T6vNEFBAMG2YqVdJFOtrM321d5HeupxMlxM"
access_token_secret="ZZQZsjvJXnIlyl04Mg2vCxS8g122b3AljpiytiKCKRFPL"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
self.api = tweepy.API(auth)
print "authentication finished"
def get_user_id(self):
written = 0
if os.path.exists(self.outputDir + "/" + "uniqueUserID.txt"):
pass
else:
for tweetFile in self.fileList[1:]:<|fim▁hole|> with open(self.inputDir+"/"+tweetFile,'r') as fin:
for line in fin:
try:
lineContents = json.loads(line) # load a line
self.tweetCount += 1
print self.tweetCount # for debugging
except:
continue
try:
if lineContents["coordinates"] is not None:
continue
else:
# extract user's id
userID = lineContents["user"]["id"]
# extract tweet text and convert the string to lower case (http://stackoverflow.com/questions/6797984/how-to-convert-string-to-lowercase-in-python)
#tweet = lineContents["text"].lower()
if not self.userHash.has_key(userID): # if the user has not been counted
self.uniqueUserCount += 1 # count the number of unique users
self.userHash[userID] = True
fileNum = int(self.uniqueUserCount/7250 + 1)
with open(self.outputDir + "/" + "uniqueUserID_"+str(fileNum)+".txt","a") as fileout:
written += 1
fileout.write(str(userID))
fileout.write("\n")
print written," written"
except:
continue
print "There are ", self.uniqueUserCount, "unique users"
print self.tweetCount, " tweets processed"
def get_user_timeline(self):
with open(self.outputDir + "/" + "uniqueUserID_6.txt",'r') as fin:
for userID in fin:
# store the tweets of each user in a single file named by the {userID}.json
filePath = self.outputDir + "/" + str(userID[:-1])+".json"
print userID
if os.path.exists(filePath):
with open(filePath,'r') as myfile:
count = sum(1 for line in myfile)
if count > 900:
continue
else:
# http://stackoverflow.com/questions/6996603/how-do-i-delete-a-file-or-folder-in-python
os.remove(filePath)
pageCount = 1
trialTime = 0
# get user timeline tweets
while pageCount < 6:
print "Collecting", pageCount, " -th page"
# open the output file in append mode
self.fout = open(filePath,"a")
try:
tweets = self.api.user_timeline(id=userID,count=200,page=pageCount)
pageCount += 1
except:
time.sleep(70)
trialTime += 1
if trialTime == 2:
pageCount = 8
continue
# write to file
# Note that data returned by api.user_timeline is status object
for tweet in tweets:
print tweet.text
# convert tweepy status object to json format
# http://stackoverflow.com/questions/27900451/convert-tweepy-status-object-into-json
self.fout.write(json.dumps(tweet._json))
self.fout.write('\n')
time.sleep(70) # rate limit (15 requests per 15 minutes window)
if __name__=='__main__':
#########################################################################################
# Parse the arguments
class MyParser(OptionParser):
def format_epilog(self, formatter):
return self.epilog
usage = "usage: python plot_stats.py [options]"
description = """
"""
epilog = """
"""
parser = MyParser(usage, description=description,epilog=epilog)
parser.add_option("--inputDir", "--input file of twitter data", dest="input_path", default=None,
help="input directory of twitter streaming data in JSON format [default: None]")
parser.add_option("--outputDir", "--output directory of twitter user timeline data", dest="output_path", default=None,
help="output directory of twitter user timeline data [default: None]")
(options, args) = parser.parse_args()
# input directory
inputDir = options.input_path
# output directory
outputDir = options.output_path
########################################################################
getter = UserTimeline(inputDir,outputDir)
getter.authentication()
#getter.get_user_id()
getter.get_user_timeline()<|fim▁end|> | |
<|file_name|>edgecast.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'Edgecast (Verizon Digital Media)'
<|fim▁hole|> schemes = [
self.matchHeader(('Server', r'^ECD(.+)?')),
self.matchHeader(('Server', r'^ECS(.*)?'))
]
if any(i for i in schemes):
return True
return False<|fim▁end|> |
def is_waf(self): |
<|file_name|>enum_trait.rs<|end_file_name|><|fim▁begin|>use protocol::Enum;<|fim▁hole|> Bar,
}
#[test]
fn can_get_discriminator() {
let foo = WithGenerics::Foo(99u16, "hello".to_owned());
let bar: WithGenerics<bool, bool> = WithGenerics::Bar;
assert_eq!("Foo", foo.discriminator());
assert_eq!("Bar", bar.discriminator());
}<|fim▁end|> |
#[derive(Protocol, Clone, Debug, PartialEq)]
pub enum WithGenerics<A, B> {
Foo(A, B), |
<|file_name|>EndgameEmber.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
By default, this module uses the pre-built Ember model from
https://pubdata.endgame.com/ember/ember_dataset.tar.bz2.
Documentation about training a new model can be found on the Ember GitHub page
(https://github.com/endgameinc/ember).
After training a new model, place the resulting txt file in
`multiscanner/etc` and update `config.ini` with the new filename.
"""
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
import os
from pathlib import Path
from multiscanner import CONFIG
__authors__ = "Patrick Copeland"
__license__ = "MPL 2.0"
TYPE = "MachineLearning"
NAME = "EndgameEmber"
REQUIRES = ['libmagic']
DEFAULTCONF = {
'ENABLED': False,
'path-to-model': os.path.join(os.path.split(CONFIG)[0], 'etc', 'ember', 'ember_model_2017.txt'),
}
LGBM_MODEL = None
try:
import ember
has_ember = True
except ImportError as e:
print("ember module not installed...")
has_ember = False
try:
import lightgbm as lgb
except ImportError as e:
print("lightgbm module needed for ember. Not installed...")
has_ember = False
def check(conf=DEFAULTCONF):
if not conf['ENABLED']:
return False
if not has_ember:
return False
if not Path(conf['path-to-model']).is_file():
print("'{}' does not exist. Check config.ini for model location.".format(conf['path-to-model']))
return False
try:
global LGBM_MODEL
LGBM_MODEL = lgb.Booster(model_file=conf['path-to-model'])
except lgb.LightGBMError as e:
print("Unable to load model, {}. ({})".format(conf['path-to-model'], e))
return False
return True
def scan(filelist, conf=DEFAULTCONF):
results = []
for fname in filelist:
# Ensure libmagic returns results
if REQUIRES[0] is not None:
# only run the analytic if it is an Office document
file_type = _get_libmagicresults(REQUIRES[0][0], fname)
if file_type.startswith('PE32'):
with open(fname, 'rb') as fh:
ember_result = ember.predict_sample(LGBM_MODEL, fh.read())
results.append(<|fim▁hole|> )
metadata = {}
metadata["Name"] = NAME
metadata["Type"] = TYPE
return (results, metadata)
def _get_libmagicresults(results, fname):
libmagicdict = dict(results)
return libmagicdict.get(fname)<|fim▁end|> | (fname, {'Prediction': ember_result}) |
<|file_name|>eval.rs<|end_file_name|><|fim▁begin|>use crate::error::*;
use crate::il;
/// Evaluate an `il::Expression` where all terminals are `il::Constant`, and
/// return the resulting `il::Constant`.
pub fn eval(expr: &il::Expression) -> Result<il::Constant> {
Ok(match *expr {
il::Expression::Scalar(ref scalar) => {
return Err(ErrorKind::ExecutorScalar(scalar.name().to_string()).into());
}
il::Expression::Constant(ref constant) => constant.clone(),
il::Expression::Add(ref lhs, ref rhs) => eval(lhs)?.add(&eval(rhs)?)?,
il::Expression::Sub(ref lhs, ref rhs) => eval(lhs)?.sub(&eval(rhs)?)?,
il::Expression::Mul(ref lhs, ref rhs) => eval(lhs)?.mul(&eval(rhs)?)?,
il::Expression::Divu(ref lhs, ref rhs) => eval(lhs)?.divu(&eval(rhs)?)?,
il::Expression::Modu(ref lhs, ref rhs) => eval(lhs)?.modu(&eval(rhs)?)?,
il::Expression::Divs(ref lhs, ref rhs) => eval(lhs)?.divs(&eval(rhs)?)?,
il::Expression::Mods(ref lhs, ref rhs) => eval(lhs)?.mods(&eval(rhs)?)?,
il::Expression::And(ref lhs, ref rhs) => eval(lhs)?.and(&eval(rhs)?)?,
il::Expression::Or(ref lhs, ref rhs) => eval(lhs)?.or(&eval(rhs)?)?,
il::Expression::Xor(ref lhs, ref rhs) => eval(lhs)?.xor(&eval(rhs)?)?,
il::Expression::Shl(ref lhs, ref rhs) => eval(lhs)?.shl(&eval(rhs)?)?,
il::Expression::Shr(ref lhs, ref rhs) => eval(lhs)?.shr(&eval(rhs)?)?,
il::Expression::Cmpeq(ref lhs, ref rhs) => eval(lhs)?.cmpeq(&eval(rhs)?)?,
il::Expression::Cmpneq(ref lhs, ref rhs) => eval(lhs)?.cmpneq(&eval(rhs)?)?,
il::Expression::Cmplts(ref lhs, ref rhs) => eval(lhs)?.cmplts(&eval(rhs)?)?,
il::Expression::Cmpltu(ref lhs, ref rhs) => eval(lhs)?.cmpltu(&eval(rhs)?)?,
il::Expression::Zext(bits, ref rhs) => eval(rhs)?.zext(bits)?,<|fim▁hole|> eval(then)?
} else {
eval(else_)?
}
}
})
}
#[test]
fn add() {
let lhs = il::expr_const(0x570000, 32);
let rhs = il::expr_const(0x703c, 32);
let expr = il::Expression::add(lhs, rhs).unwrap();
assert_eq!(eval(&expr).unwrap(), il::const_(0x57703c, 32));
let lhs = il::expr_const(0xffffffff, 32);
let rhs = il::expr_const(0x1, 32);
let expr = il::Expression::add(lhs, rhs).unwrap();
assert_eq!(eval(&expr).unwrap(), il::const_(0, 32));
}
#[test]
fn cmplts() {
let lhs = il::expr_const(0xffffffff, 32);
let rhs = il::expr_const(0, 32);
let expr = il::Expression::cmplts(lhs, rhs).unwrap();
assert_eq!(eval(&expr).unwrap(), il::const_(1, 1));
let lhs = il::expr_const(0, 32);
let rhs = il::expr_const(0xffffffff, 32);
let expr = il::Expression::cmplts(lhs, rhs).unwrap();
assert_eq!(eval(&expr).unwrap(), il::const_(0, 1));
}<|fim▁end|> | il::Expression::Trun(bits, ref rhs) => eval(rhs)?.trun(bits)?,
il::Expression::Sext(bits, ref rhs) => eval(rhs)?.sext(bits)?,
il::Expression::Ite(ref cond, ref then, ref else_) => {
if eval(cond)?.is_one() { |
<|file_name|>lattice.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
equip.analysis.dataflow.lattice
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The base lattice implementation (mostly used as semi-lattice).
:copyright: (c) 2014 by Romain Gaucher (@rgaucher)
:license: Apache 2, see LICENSE for more details.
"""
class Lattice(object):
"""
Interface for a lattice element. Practically, we only use the semi-lattice
with the join (V) operator.
"""
def __init__(self):
pass
def init_state(self):
"""
Returns a new initial state.
"""
pass
def join_all(self, *states):
result_state = None
for state in states:
if result_state is None:
result_state = state
else:
result_state = self.join(result_state, state)
return result_state
def join(self, state1, state2):
"""
Returns the result of the V (supremum) between the two states.
"""<|fim▁hole|> for state in states:
if result_state is None:
result_state = state
else:
result_state = self.meet(result_state, state)
return result_state
def meet(self, state1, state2):
"""
Returns the result of the meet \/ (infimum) between the two states.
"""
pass
def lte(self, state1, state2):
"""
This is the <= operator between two lattice elements (states) as defined by:
state1 <= state2 and state2 <= state1 <=> state1 == state2
"""
pass
def top(self):
"""
The top of the lattice.
"""
pass
def bottom(self):
"""
The bottom of the lattice.
"""
pass<|fim▁end|> | pass
def meet_all(self, *states):
result_state = None |
<|file_name|>translations.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import sys
file = sys.argv[1]<|fim▁hole|> xmlns:py="http://genshi.edgewall.org/"
xmlns:xi="http://www.w3.org/2001/XInclude"
py:strip="">
'''
try:
for lang in f:
lang = lang.strip()
if lang and not lang.startswith('#'):
print ' <option value="' + lang + '" py:attrs="{\'selected\': lang == \'' + lang + '\' and \'selected\' or None}">' + lang + '</option>'
finally:
f.close()
print '''</html>
'''<|fim▁end|> | f = open(file)
print '''
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" |
<|file_name|>about.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
Created on Oct 2, 2010
@author: dimitry (zavlab1)
'''
<|fim▁hole|>from gi.repository import Gdk
from foobnix.gui.service.path_service import get_foobnix_resourse_path_by_name
from foobnix.util.const import ICON_FOOBNIX
from foobnix.version import FOOBNIX_VERSION
class AboutWindow(Gtk.AboutDialog):
def __init__(self):
Gtk.AboutDialog.__init__(self)
self.set_program_name("Foobnix")
self.set_version(FOOBNIX_VERSION)
self.set_copyright("(c) Ivan Ivanenko <[email protected]>")
self.set_comments(_("Simple and Powerful player"))
self.set_website("http://www.foobnix.com")
self.set_authors(["Dmitry Kozhura (zavlab1) <[email protected]>", "Pietro Campagnano <fain182@gmailcom>", "Viktor Suprun <[email protected]>"])
self.set_translator_credits("""Bernardo Miguel Savone
Sérgio Marques
XsLiDian
KamilSPL
north
Alex Serada
Ivan Ivanenko
Dmitry-Kogura
Fitoschido
zeugma
Schaffino
Oleg «Eleidan» Kulik
Sergey Zigachev
Martino Barbon
Florian Heissenberger
Aldo Mann""")
self.set_logo(Gdk.pixbuf_new_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))) #@UndefinedVariable
def show(self):
self.run()
self.destroy()<|fim▁end|> | from gi.repository import Gtk |
<|file_name|>tops_sql.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# filename: tops_sql.py
# Copyright 2008-2010 Stefano Costa <[email protected]>
#
# This file is part of Total Open Station.<|fim▁hole|># modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Total Open Station is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Total Open Station. If not, see
# <http://www.gnu.org/licenses/>.
def to_sql(point, tablename):
'''Generate SQL line corresponding to the input point.
At this moment the column names are fixed, but they could change in the
future. The default names are reasonable.'''
params = {
'wkt': to_wkt(point),
'tablename': tablename,
'pid': point[0],
'text': point[4]}
sql_string = "INSERT INTO %(tablename)s" % params
sql_string += "(point_id, point_geom, point_text) VALUES"
sql_string += "(%(pid)s,GeomFromText('%(wkt)s'),'%(text)s');\n" % params
return sql_string
def to_wkt(point):
pid, x, y, z, text = point
wkt_representation = 'POINT(%s %s)' % (x, y)
return wkt_representation
class OutputFormat:
"""
Exports points data in SQL format suitable for use with PostGIS & friends.
http://postgis.refractions.net/documentation/manual-1.3/ch04.html#id2986280
has an example of loading an SQL file into a PostgreSQL database.
``data`` should be an iterable (e.g. list) containing one iterable (e.g.
tuple) for each point. The default order is PID, x, x, z, TEXT.
This is consistent with our current standard.
"""
def __init__(self, data, tablename='topsdata'):
self.data = data
self.tablename = tablename
def process(self):
lines = [to_sql(e, self.tablename) for e in self.data]
lines.insert(0, 'BEGIN;\n')
lines.append('COMMIT;\n')
output = "".join(lines)
return output
if __name__ == "__main__":
TotalOpenSQL(
[(1, 2, 3, 4, 'qwerty'),
("2.3", 42, 45, 12, 'asdfg')],
'prova')<|fim▁end|> | #
# Total Open Station is free software: you can redistribute it and/or |
<|file_name|>pkcs12.py<|end_file_name|><|fim▁begin|>#
# PKCS#12 syntax
#
# ASN.1 source from:
# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-12/pkcs-12.asn
#
# Sample captures could be obtained with "openssl pkcs12" command
#
from pyasn1_modules.rfc2459 import *
from pyasn1_modules import rfc2251
class Attributes(univ.SetOf):
componentType = rfc2251.Attribute()
class Version(univ.Integer): pass<|fim▁hole|> componentType = namedtype.NamedTypes(
namedtype.NamedType('version', Version()),
namedtype.NamedType('subject', Name()),
namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()),
namedtype.NamedType('attributes',
Attributes().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
)
class Signature(univ.BitString): pass
class SignatureAlgorithmIdentifier(AlgorithmIdentifier): pass
class CertificationRequest(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('certificationRequestInfo', CertificationRequestInfo()),
namedtype.NamedType('signatureAlgorithm', SignatureAlgorithmIdentifier()),
namedtype.NamedType('signature', Signature())
)<|fim▁end|> |
class CertificationRequestInfo(univ.Sequence): |
<|file_name|>ar_PS.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="ps_AF" sourcelanguage="en_US">
<context>
<name>ApplicationNotificationModel</name>
<message>
<location filename="../settings/applicationnotificationmodel.cpp" line="62"/>
<source>General</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>JobServer</name>
<message numerus="yes">
<location filename="../kjob/jobserver.cpp" line="77"/>
<source>%n jobs running</source>
<translation type="unfinished">
<numerusform></numerusform>
<numerusform></numerusform>
</translation>
</message>
</context>
<context>
<name>JobViewWidget</name>
<message>
<location filename="../kjob/jobviewwidget.ui" line="14"/>
<source>Frame</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>NotificationAppGroup</name>
<message>
<location filename="../notificationsWidget/notificationappgroup.cpp" line="122"/>
<source>Collapse Notifications</source>
<translation type="unfinished"></translation>
</message>
<message numerus="yes">
<location filename="../notificationsWidget/notificationappgroup.cpp" line="125"/>
<source>+%n notifications collapsed</source>
<translation type="unfinished">
<numerusform></numerusform>
<numerusform></numerusform>
</translation>
</message>
</context>
<context>
<name>NotificationPanel</name>
<message numerus="yes">
<location filename="../notificationsWidget/notificationpanel.cpp" line="54"/>
<source>%n d</source>
<translation type="unfinished">
<numerusform></numerusform>
<numerusform></numerusform>
</translation>
</message>
<message numerus="yes">
<location filename="../notificationsWidget/notificationpanel.cpp" line="56"/>
<source>%n hr</source>
<translation type="unfinished">
<numerusform></numerusform>
<numerusform></numerusform>
</translation>
</message>
<message numerus="yes">
<location filename="../notificationsWidget/notificationpanel.cpp" line="58"/>
<source>%n min</source>
<translation type="unfinished">
<numerusform></numerusform>
<numerusform></numerusform>
</translation>
</message>
<message>
<location filename="../notificationsWidget/notificationpanel.cpp" line="60"/>
<source>just now</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>NotificationPopup</name>
<message>
<location filename="../notificationsWidget/notificationpopup.ui" line="280"/>
<source>Hide</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../notificationsWidget/notificationpopup.ui" line="296"/>
<location filename="../notificationsWidget/notificationpopup.cpp" line="287"/>
<location filename="../notificationsWidget/notificationpopup.cpp" line="301"/>
<source>Dismiss</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>NotificationsWidget</name>
<message>
<location filename="../notificationsWidget/notificationswidget.ui" line="40"/>
<location filename="../notificationsWidget/notificationswidget.cpp" line="110"/>
<source>Notifications</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../notificationsWidget/notificationswidget.ui" line="88"/>
<source>Quiet Mode</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../notificationsWidget/notificationswidget.ui" line="100"/>
<source>Sound</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../notificationsWidget/notificationswidget.ui" line="117"/>
<source>Critical Only</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../notificationsWidget/notificationswidget.ui" line="131"/>
<location filename="../notificationsWidget/notificationswidget.cpp" line="81"/>
<source>No Notifications</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../notificationsWidget/notificationswidget.ui" line="145"/>
<source>Mute</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../notificationsWidget/notificationswidget.ui" line="245"/>
<source>Fore&ver</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../notificationsWidget/notificationswidget.ui" line="254"/>
<source>Turn off in</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../notificationsWidget/notificationswidget.ui" line="277"/>
<source>HH:mm</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../notificationsWidget/notificationswidget.ui" line="288"/>
<source>Turn off at</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../notificationsWidget/notificationswidget.ui" line="389"/>
<location filename="../notificationsWidget/notificationswidget.cpp" line="191"/>
<source>No notifications</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../notificationsWidget/notificationswidget.ui" line="399"/>
<source>There's nothing to see at the moment. Don't worry, we'll be sure to tell you as soon as something pops up.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../notificationsWidget/notificationswidget.ui" line="447"/>
<source>Clear All</source>
<translation type="unfinished"></translation>
</message>
<message numerus="yes">
<location filename="../notificationsWidget/notificationswidget.cpp" line="194"/>
<source>%n notification(s)</source>
<translation type="unfinished">
<numerusform></numerusform>
<numerusform></numerusform>
</translation>
</message>
</context>
<context>
<name>SettingsPane</name>
<message>
<location filename="../settings/settingspane.ui" line="14"/>
<source>Form</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="95"/>
<location filename="../settings/settingspane.cpp" line="114"/>
<source>Notifications</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="157"/>
<source>General</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="242"/>
<source>Show Contents</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="258"/>
<source>Hide Contents</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="271"/>
<source>Don't Show</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="300"/>
<source>On lock screen</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="313"/>
<source>DISPLAY</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="320"/>
<source>Emphasise sending app</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="366"/>
<source>Notification Sound</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="387"/>
<source>Triple Ping</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="392"/>
<source>Upside Down</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="397"/>
<source>Echo</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="405"/>
<source>Attenuate other sounds on notification</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="418"/>
<source>SOUNDS</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="425"/>
<source>Notification Volume</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="474"/>
<source>Charging</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="494"/>
<source>SYSTEM NOTIFICATIONS</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="501"/>
<source>Media Connection</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="521"/>
<source>Unplugged</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="658"/>
<source>Allow Notifications</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="709"/>
<source>Pop up onscreen</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="723"/>
<source>Play Sounds</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="730"/>
<source>Bypass Quiet Mode</source>
<translation type="unfinished"></translation>
</message><|fim▁hole|> <message>
<location filename="../settings/settingspane.ui" line="770"/>
<source>Even when Quiet Mode is set to No Notifications or Critical Only, notifications from this app will continue to show</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="786"/>
<source>BEHAVIOUR</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="833"/>
<source>ACTIONS</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="843"/>
<source>Mark as uninstalled</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.ui" line="846"/>
<source>If this application has been uninstalled, you can remove this application from the list of notifications.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.cpp" line="230"/>
<source>Mark as uninstalled?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../settings/settingspane.cpp" line="230"/>
<source>This will remove the settings from theShell. If the application sends another notification, it will reappear.
Mark "%1" as uninstalled?</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS><|fim▁end|> | |
<|file_name|>syntax-extension-bytes.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
static static_vec: &'static [u8] = bytes!("abc", 0xFF, '!');
pub fn main() {<|fim▁hole|> assert_eq!(vec, &[97_u8, 98_u8, 99_u8]);
let vec = bytes!("null", 0);
assert_eq!(vec, &[110_u8, 117_u8, 108_u8, 108_u8, 0_u8]);
let vec = bytes!(' ', " ", 32, 32u8);
assert_eq!(vec, &[32_u8, 32_u8, 32_u8, 32_u8]);
assert_eq!(static_vec, &[97_u8, 98_u8, 99_u8, 255_u8, 33_u8]);
}<|fim▁end|> | let vec = bytes!("abc"); |
<|file_name|>gui.py<|end_file_name|><|fim▁begin|>import cairo
from gi.repository import Gtk
from gi.repository import Gdk
from pylsner import plugin
class Window(Gtk.Window):
def __init__(self):
super(Window, self).__init__(skip_pager_hint=True,
skip_taskbar_hint=True,
)
self.set_title('Pylsner')
screen = self.get_screen()
self.width = screen.get_width()
self.height = screen.get_height()
self.set_size_request(self.width, self.height)
self.set_position(Gtk.WindowPosition.CENTER)
rgba = screen.get_rgba_visual()
self.set_visual(rgba)
self.override_background_color(Gtk.StateFlags.NORMAL,
Gdk.RGBA(0, 0, 0, 0),
)
self.set_wmclass('pylsner', 'pylsner')
self.set_type_hint(Gdk.WindowTypeHint.DOCK)
self.stick()
self.set_keep_below(True)
drawing_area = Gtk.DrawingArea()
drawing_area.connect('draw', self.redraw)
self.refresh_cnt = 0
self.add(drawing_area)
self.connect('destroy', lambda q: Gtk.main_quit())
self.widgets = []
self.show_all()
def refresh(self, force=False):
self.refresh_cnt += 1
if self.refresh_cnt >= 60000:
self.refresh_cnt = 0
redraw_required = False
for wid in self.widgets:
if (self.refresh_cnt % wid.metric.refresh_rate == 0) or force:
wid.refresh()
redraw_required = True
if redraw_required:
self.queue_draw()
return True
def redraw(self, _, ctx):
ctx.set_antialias(cairo.ANTIALIAS_SUBPIXEL)
for wid in self.widgets:
wid.redraw(ctx)
class Widget:
def __init__(self,
name='default',
metric={'plugin': 'time'},
indicator={'plugin': 'arc'},
fill={'plugin': 'rgba_255'},<|fim▁hole|> IndicatorPlugin = plugin.load_plugin('indicators', indicator['plugin'])
self.indicator = IndicatorPlugin(**indicator)
FillPlugin = plugin.load_plugin('fills', fill['plugin'])
self.fill = FillPlugin(**fill)
def refresh(self):
self.metric.refresh()
self.fill.refresh(self.metric.value)
def redraw(self, ctx):
ctx.set_source(self.fill.pattern)
self.indicator.redraw(ctx, self.metric.value)<|fim▁end|> | ):
self.name = name
MetricPlugin = plugin.load_plugin('metrics', metric['plugin'])
self.metric = MetricPlugin(**metric) |
<|file_name|>backend_bases.py<|end_file_name|><|fim▁begin|>"""
Abstract base classes define the primitives that renderers and
graphics contexts must implement to serve as a matplotlib backend
:class:`RendererBase`
An abstract base class to handle drawing/rendering operations.
:class:`FigureCanvasBase`
The abstraction layer that separates the
:class:`matplotlib.figure.Figure` from the backend specific
details like a user interface drawing area
:class:`GraphicsContextBase`
An abstract base class that provides color, line styles, etc...
:class:`Event`
The base class for all of the matplotlib event
handling. Derived classes suh as :class:`KeyEvent` and
:class:`MouseEvent` store the meta data like keys and buttons
pressed, x and y locations in pixel and
:class:`~matplotlib.axes.Axes` coordinates.
"""
from __future__ import division
import os, warnings, time
import numpy as np
import matplotlib.cbook as cbook
import matplotlib.colors as colors
import matplotlib.transforms as transforms
import matplotlib.widgets as widgets
from matplotlib import rcParams
class RendererBase:
"""An abstract base class to handle drawing/rendering operations.
The following methods *must* be implemented in the backend:
* :meth:`draw_path`
* :meth:`draw_image`
* :meth:`draw_text`
* :meth:`get_text_width_height_descent`
The following methods *should* be implemented in the backend for
optimization reasons:
* :meth:`draw_markers`
* :meth:`draw_path_collection`
* :meth:`draw_quad_mesh`
"""
def __init__(self):
self._texmanager = None
def open_group(self, s):
"""
Open a grouping element with label *s*. Is only currently used by
:mod:`~matplotlib.backends.backend_svg`
"""
pass
def close_group(self, s):
"""
Close a grouping element with label *s*
Is only currently used by :mod:`~matplotlib.backends.backend_svg`
"""
pass
def draw_path(self, gc, path, transform, rgbFace=None):
"""
Draws a :class:`~matplotlib.path.Path` instance using the
given affine transform.
"""
raise NotImplementedError
def draw_markers(self, gc, marker_path, marker_trans, path, trans, rgbFace=None):
"""
Draws a marker at each of the vertices in path. This includes
all vertices, including control points on curves. To avoid
that behavior, those vertices should be removed before calling
this function.
*gc*
the :class:`GraphicsContextBase` instance
*marker_trans*
is an affine transform applied to the marker.<|fim▁hole|> is an affine transform applied to the path.
This provides a fallback implementation of draw_markers that
makes multiple calls to :meth:`draw_path`. Some backends may
want to override this method in order to draw the marker only
once and reuse it multiple times.
"""
tpath = trans.transform_path(path)
for vertices, codes in tpath.iter_segments():
if len(vertices):
x,y = vertices[-2:]
self.draw_path(gc, marker_path,
marker_trans + transforms.Affine2D().translate(x, y),
rgbFace)
def draw_path_collection(self, master_transform, cliprect, clippath,
clippath_trans, paths, all_transforms, offsets,
offsetTrans, facecolors, edgecolors, linewidths,
linestyles, antialiaseds, urls):
"""
Draws a collection of paths, selecting drawing properties from
the lists *facecolors*, *edgecolors*, *linewidths*,
*linestyles* and *antialiaseds*. *offsets* is a list of
offsets to apply to each of the paths. The offsets in
*offsets* are first transformed by *offsetTrans* before
being applied.
This provides a fallback implementation of
:meth:`draw_path_collection` that makes multiple calls to
draw_path. Some backends may want to override this in order
to render each set of path data only once, and then reference
that path multiple times with the different offsets, colors,
styles etc. The generator methods
:meth:`_iter_collection_raw_paths` and
:meth:`_iter_collection` are provided to help with (and
standardize) the implementation across backends. It is highly
recommended to use those generators, so that changes to the
behavior of :meth:`draw_path_collection` can be made globally.
"""
path_ids = []
for path, transform in self._iter_collection_raw_paths(
master_transform, paths, all_transforms):
path_ids.append((path, transform))
for xo, yo, path_id, gc, rgbFace in self._iter_collection(
path_ids, cliprect, clippath, clippath_trans,
offsets, offsetTrans, facecolors, edgecolors,
linewidths, linestyles, antialiaseds, urls):
path, transform = path_id
transform = transforms.Affine2D(transform.get_matrix()).translate(xo, yo)
self.draw_path(gc, path, transform, rgbFace)
def draw_quad_mesh(self, master_transform, cliprect, clippath,
clippath_trans, meshWidth, meshHeight, coordinates,
offsets, offsetTrans, facecolors, antialiased,
showedges):
"""
This provides a fallback implementation of
:meth:`draw_quad_mesh` that generates paths and then calls
:meth:`draw_path_collection`.
"""
from matplotlib.collections import QuadMesh
paths = QuadMesh.convert_mesh_to_paths(
meshWidth, meshHeight, coordinates)
if showedges:
edgecolors = np.array([[0.0, 0.0, 0.0, 1.0]], np.float_)
linewidths = np.array([1.0], np.float_)
else:
edgecolors = facecolors
linewidths = np.array([0.0], np.float_)
return self.draw_path_collection(
master_transform, cliprect, clippath, clippath_trans,
paths, [], offsets, offsetTrans, facecolors, edgecolors,
linewidths, [], [antialiased], [None])
def _iter_collection_raw_paths(self, master_transform, paths, all_transforms):
"""
This is a helper method (along with :meth:`_iter_collection`) to make
it easier to write a space-efficent :meth:`draw_path_collection`
implementation in a backend.
This method yields all of the base path/transform
combinations, given a master transform, a list of paths and
list of transforms.
The arguments should be exactly what is passed in to
:meth:`draw_path_collection`.
The backend should take each yielded path and transform and
create an object that can be referenced (reused) later.
"""
Npaths = len(paths)
Ntransforms = len(all_transforms)
N = max(Npaths, Ntransforms)
if Npaths == 0:
return
transform = transforms.IdentityTransform()
for i in xrange(N):
path = paths[i % Npaths]
if Ntransforms:
transform = all_transforms[i % Ntransforms]
yield path, transform + master_transform
def _iter_collection(self, path_ids, cliprect, clippath, clippath_trans,
offsets, offsetTrans, facecolors, edgecolors,
linewidths, linestyles, antialiaseds, urls):
"""
This is a helper method (along with
:meth:`_iter_collection_raw_paths`) to make it easier to write
a space-efficent :meth:`draw_path_collection` implementation in a
backend.
This method yields all of the path, offset and graphics
context combinations to draw the path collection. The caller
should already have looped over the results of
:meth:`_iter_collection_raw_paths` to draw this collection.
The arguments should be the same as that passed into
:meth:`draw_path_collection`, with the exception of
*path_ids*, which is a list of arbitrary objects that the
backend will use to reference one of the paths created in the
:meth:`_iter_collection_raw_paths` stage.
Each yielded result is of the form::
xo, yo, path_id, gc, rgbFace
where *xo*, *yo* is an offset; *path_id* is one of the elements of
*path_ids*; *gc* is a graphics context and *rgbFace* is a color to
use for filling the path.
"""
Npaths = len(path_ids)
Noffsets = len(offsets)
N = max(Npaths, Noffsets)
Nfacecolors = len(facecolors)
Nedgecolors = len(edgecolors)
Nlinewidths = len(linewidths)
Nlinestyles = len(linestyles)
Naa = len(antialiaseds)
Nurls = len(urls)
if (Nfacecolors == 0 and Nedgecolors == 0) or Npaths == 0:
return
if Noffsets:
toffsets = offsetTrans.transform(offsets)
gc = self.new_gc()
gc.set_clip_rectangle(cliprect)
if clippath is not None:
clippath = transforms.TransformedPath(clippath, clippath_trans)
gc.set_clip_path(clippath)
if Nfacecolors == 0:
rgbFace = None
if Nedgecolors == 0:
gc.set_linewidth(0.0)
xo, yo = 0, 0
for i in xrange(N):
path_id = path_ids[i % Npaths]
if Noffsets:
xo, yo = toffsets[i % Noffsets]
if Nfacecolors:
rgbFace = facecolors[i % Nfacecolors]
if Nedgecolors:
gc.set_foreground(edgecolors[i % Nedgecolors])
if Nlinewidths:
gc.set_linewidth(linewidths[i % Nlinewidths])
if Nlinestyles:
gc.set_dashes(*linestyles[i % Nlinestyles])
if rgbFace is not None and len(rgbFace)==4:
gc.set_alpha(rgbFace[-1])
rgbFace = rgbFace[:3]
gc.set_antialiased(antialiaseds[i % Naa])
if Nurls:
gc.set_url(urls[i % Nurls])
yield xo, yo, path_id, gc, rgbFace
def get_image_magnification(self):
"""
Get the factor by which to magnify images passed to :meth:`draw_image`.
Allows a backend to have images at a different resolution to other
artists.
"""
return 1.0
def draw_image(self, x, y, im, bbox, clippath=None, clippath_trans=None):
"""
Draw the image instance into the current axes;
*x*
is the distance in pixels from the left hand side of the canvas.
*y*
the distance from the origin. That is, if origin is
upper, y is the distance from top. If origin is lower, y
is the distance from bottom
*im*
the :class:`matplotlib._image.Image` instance
*bbox*
a :class:`matplotlib.transforms.Bbox` instance for clipping, or
None
"""
raise NotImplementedError
def option_image_nocomposite(self):
"""
overwrite this method for renderers that do not necessarily
want to rescale and composite raster images. (like SVG)
"""
return False
def draw_tex(self, gc, x, y, s, prop, angle, ismath='TeX!'):
raise NotImplementedError
def draw_text(self, gc, x, y, s, prop, angle, ismath=False):
"""
Draw the text instance
*gc*
the :class:`GraphicsContextBase` instance
*x*
the x location of the text in display coords
*y*
the y location of the text in display coords
*s*
a :class:`matplotlib.text.Text` instance
*prop*
a :class:`matplotlib.font_manager.FontProperties` instance
*angle*
the rotation angle in degrees
**backend implementers note**
When you are trying to determine if you have gotten your bounding box
right (which is what enables the text layout/alignment to work
properly), it helps to change the line in text.py::
if 0: bbox_artist(self, renderer)
to if 1, and then the actual bounding box will be blotted along with
your text.
"""
raise NotImplementedError
def flipy(self):
"""
Return true if y small numbers are top for renderer Is used
for drawing text (:mod:`matplotlib.text`) and images
(:mod:`matplotlib.image`) only
"""
return True
def get_canvas_width_height(self):
'return the canvas width and height in display coords'
return 1, 1
def get_texmanager(self):
"""
return the :class:`matplotlib.texmanager.TexManager` instance
"""
if self._texmanager is None:
from matplotlib.texmanager import TexManager
self._texmanager = TexManager()
return self._texmanager
def get_text_width_height_descent(self, s, prop, ismath):
"""
get the width and height, and the offset from the bottom to the
baseline (descent), in display coords of the string s with
:class:`~matplotlib.font_manager.FontProperties` prop
"""
raise NotImplementedError
def new_gc(self):
"""
Return an instance of a :class:`GraphicsContextBase`
"""
return GraphicsContextBase()
def points_to_pixels(self, points):
"""
Convert points to display units
*points*
a float or a numpy array of float
return points converted to pixels
You need to override this function (unless your backend
doesn't have a dpi, eg, postscript or svg). Some imaging
systems assume some value for pixels per inch::
points to pixels = points * pixels_per_inch/72.0 * dpi/72.0
"""
return points
def strip_math(self, s):
return cbook.strip_math(s)
def start_rasterizing(self):
pass
def stop_rasterizing(self):
pass
class GraphicsContextBase:
"""
An abstract base class that provides color, line styles, etc...
"""
# a mapping from dash styles to suggested offset, dash pairs
dashd = {
'solid' : (None, None),
'dashed' : (0, (6.0, 6.0)),
'dashdot' : (0, (3.0, 5.0, 1.0, 5.0)),
'dotted' : (0, (1.0, 3.0)),
}
def __init__(self):
self._alpha = 1.0
self._antialiased = 1 # use 0,1 not True, False for extension code
self._capstyle = 'butt'
self._cliprect = None
self._clippath = None
self._dashes = None, None
self._joinstyle = 'miter'
self._linestyle = 'solid'
self._linewidth = 1
self._rgb = (0.0, 0.0, 0.0)
self._hatch = None
self._url = None
self._snap = None
def copy_properties(self, gc):
'Copy properties from gc to self'
self._alpha = gc._alpha
self._antialiased = gc._antialiased
self._capstyle = gc._capstyle
self._cliprect = gc._cliprect
self._clippath = gc._clippath
self._dashes = gc._dashes
self._joinstyle = gc._joinstyle
self._linestyle = gc._linestyle
self._linewidth = gc._linewidth
self._rgb = gc._rgb
self._hatch = gc._hatch
self._url = gc._url
self._snap = gc._snap
def get_alpha(self):
"""
Return the alpha value used for blending - not supported on
all backends
"""
return self._alpha
def get_antialiased(self):
"Return true if the object should try to do antialiased rendering"
return self._antialiased
def get_capstyle(self):
"""
Return the capstyle as a string in ('butt', 'round', 'projecting')
"""
return self._capstyle
def get_clip_rectangle(self):
"""
Return the clip rectangle as a :class:`~matplotlib.transforms.Bbox` instance
"""
return self._cliprect
def get_clip_path(self):
"""
Return the clip path in the form (path, transform), where path
is a :class:`~matplotlib.path.Path` instance, and transform is
an affine transform to apply to the path before clipping.
"""
if self._clippath is not None:
return self._clippath.get_transformed_path_and_affine()
return None, None
def get_dashes(self):
"""
Return the dash information as an offset dashlist tuple.
The dash list is a even size list that gives the ink on, ink
off in pixels.
See p107 of to PostScript `BLUEBOOK
<http://www-cdf.fnal.gov/offline/PostScript/BLUEBOOK.PDF>`_
for more info.
Default value is None
"""
return self._dashes
def get_joinstyle(self):
"""
Return the line join style as one of ('miter', 'round', 'bevel')
"""
return self._joinstyle
def get_linestyle(self, style):
"""
Return the linestyle: one of ('solid', 'dashed', 'dashdot',
'dotted').
"""
return self._linestyle
def get_linewidth(self):
"""
Return the line width in points as a scalar
"""
return self._linewidth
def get_rgb(self):
"""
returns a tuple of three floats from 0-1. color can be a
matlab format string, a html hex color string, or a rgb tuple
"""
return self._rgb
def get_url(self):
"""
returns a url if one is set, None otherwise
"""
return self._url
def get_snap(self):
"""
returns the snap setting which may be:
* True: snap vertices to the nearest pixel center
* False: leave vertices as-is
* None: (auto) If the path contains only rectilinear line
segments, round to the nearest pixel center
"""
return self._snap
def set_alpha(self, alpha):
"""
Set the alpha value used for blending - not supported on
all backends
"""
self._alpha = alpha
def set_antialiased(self, b):
"""
True if object should be drawn with antialiased rendering
"""
# use 0, 1 to make life easier on extension code trying to read the gc
if b: self._antialiased = 1
else: self._antialiased = 0
def set_capstyle(self, cs):
"""
Set the capstyle as a string in ('butt', 'round', 'projecting')
"""
if cs in ('butt', 'round', 'projecting'):
self._capstyle = cs
else:
raise ValueError('Unrecognized cap style. Found %s' % cs)
def set_clip_rectangle(self, rectangle):
"""
Set the clip rectangle with sequence (left, bottom, width, height)
"""
self._cliprect = rectangle
def set_clip_path(self, path):
"""
Set the clip path and transformation. Path should be a
:class:`~matplotlib.transforms.TransformedPath` instance.
"""
assert path is None or isinstance(path, transforms.TransformedPath)
self._clippath = path
def set_dashes(self, dash_offset, dash_list):
"""
Set the dash style for the gc.
*dash_offset*
is the offset (usually 0).
*dash_list*
specifies the on-off sequence as points. ``(None, None)`` specifies a solid line
"""
self._dashes = dash_offset, dash_list
def set_foreground(self, fg, isRGB=False):
"""
Set the foreground color. fg can be a matlab format string, a
html hex color string, an rgb unit tuple, or a float between 0
and 1. In the latter case, grayscale is used.
The :class:`GraphicsContextBase` converts colors to rgb
internally. If you know the color is rgb already, you can set
``isRGB=True`` to avoid the performace hit of the conversion
"""
if isRGB:
self._rgb = fg
else:
self._rgb = colors.colorConverter.to_rgba(fg)
def set_graylevel(self, frac):
"""
Set the foreground color to be a gray level with *frac*
"""
self._rgb = (frac, frac, frac)
def set_joinstyle(self, js):
"""
Set the join style to be one of ('miter', 'round', 'bevel')
"""
if js in ('miter', 'round', 'bevel'):
self._joinstyle = js
else:
raise ValueError('Unrecognized join style. Found %s' % js)
def set_linewidth(self, w):
"""
Set the linewidth in points
"""
self._linewidth = w
def set_linestyle(self, style):
"""
Set the linestyle to be one of ('solid', 'dashed', 'dashdot',
'dotted').
"""
try:
offset, dashes = self.dashd[style]
except:
raise ValueError('Unrecognized linestyle: %s' % style)
self._linestyle = style
self.set_dashes(offset, dashes)
def set_url(self, url):
"""
Sets the url for links in compatible backends
"""
self._url = url
def set_snap(self, snap):
"""
Sets the snap setting which may be:
* True: snap vertices to the nearest pixel center
* False: leave vertices as-is
* None: (auto) If the path contains only rectilinear line
segments, round to the nearest pixel center
"""
self._snap = snap
def set_hatch(self, hatch):
"""
Sets the hatch style for filling
"""
self._hatch = hatch
def get_hatch(self):
"""
Gets the current hatch style
"""
return self._hatch
class Event:
"""
A matplotlib event. Attach additional attributes as defined in
:meth:`FigureCanvasBase.mpl_connect`. The following attributes
are defined and shown with their default values
*name*
the event name
*canvas*
the FigureCanvas instance generating the event
*guiEvent*
the GUI event that triggered the matplotlib event
"""
def __init__(self, name, canvas,guiEvent=None):
self.name = name
self.canvas = canvas
self.guiEvent = guiEvent
class IdleEvent(Event):
"""
An event triggered by the GUI backend when it is idle -- useful
for passive animation
"""
pass
class DrawEvent(Event):
"""
An event triggered by a draw operation on the canvas
In addition to the :class:`Event` attributes, the following event attributes are defined:
*renderer*
the :class:`RendererBase` instance for the draw event
"""
def __init__(self, name, canvas, renderer):
Event.__init__(self, name, canvas)
self.renderer = renderer
class ResizeEvent(Event):
"""
An event triggered by a canvas resize
In addition to the :class:`Event` attributes, the following event attributes are defined:
*width*
width of the canvas in pixels
*height*
height of the canvas in pixels
"""
def __init__(self, name, canvas):
Event.__init__(self, name, canvas)
self.width, self.height = canvas.get_width_height()
class LocationEvent(Event):
"""
A event that has a screen location
The following additional attributes are defined and shown with
their default values
In addition to the :class:`Event` attributes, the following event attributes are defined:
*x*
x position - pixels from left of canvas
*y*
y position - pixels from bottom of canvas
*inaxes*
the :class:`~matplotlib.axes.Axes` instance if mouse is over axes
*xdata*
x coord of mouse in data coords
*ydata*
y coord of mouse in data coords
"""
x = None # x position - pixels from left of canvas
y = None # y position - pixels from right of canvas
inaxes = None # the Axes instance if mouse us over axes
xdata = None # x coord of mouse in data coords
ydata = None # y coord of mouse in data coords
# the last event that was triggered before this one
lastevent = None
def __init__(self, name, canvas, x, y,guiEvent=None):
"""
*x*, *y* in figure coords, 0,0 = bottom, left
"""
Event.__init__(self, name, canvas,guiEvent=guiEvent)
self.x = x
self.y = y
if x is None or y is None:
# cannot check if event was in axes if no x,y info
self.inaxes = None
self._update_enter_leave()
return
# Find all axes containing the mouse
axes_list = [a for a in self.canvas.figure.get_axes() if a.in_axes(self)]
if len(axes_list) == 0: # None found
self.inaxes = None
self._update_enter_leave()
return
elif (len(axes_list) > 1): # Overlap, get the highest zorder
axCmp = lambda _x,_y: cmp(_x.zorder, _y.zorder)
axes_list.sort(axCmp)
self.inaxes = axes_list[-1] # Use the highest zorder
else: # Just found one hit
self.inaxes = axes_list[0]
try:
xdata, ydata = self.inaxes.transData.inverted().transform_point((x, y))
except ValueError:
self.xdata = None
self.ydata = None
else:
self.xdata = xdata
self.ydata = ydata
self._update_enter_leave()
def _update_enter_leave(self):
'process the figure/axes enter leave events'
if LocationEvent.lastevent is not None:
last = LocationEvent.lastevent
if last.inaxes!=self.inaxes:
# process axes enter/leave events
if last.inaxes is not None:
last.canvas.callbacks.process('axes_leave_event', last)
if self.inaxes is not None:
self.canvas.callbacks.process('axes_enter_event', self)
else:
# process a figure enter event
if self.inaxes is not None:
self.canvas.callbacks.process('axes_enter_event', self)
LocationEvent.lastevent = self
class MouseEvent(LocationEvent):
"""
A mouse event ('button_press_event', 'button_release_event', 'scroll_event',
'motion_notify_event').
In addition to the :class:`Event` and :class:`LocationEvent`
attributes, the following attributes are defined:
*button*
button pressed None, 1, 2, 3, 'up', 'down' (up and down are used for scroll events)
*key*
the key pressed: None, chr(range(255), 'shift', 'win', or 'control'
*step*
number of scroll steps (positive for 'up', negative for 'down')
Example usage::
def on_press(event):
print 'you pressed', event.button, event.xdata, event.ydata
cid = fig.canvas.mpl_connect('button_press_event', on_press)
"""
x = None # x position - pixels from left of canvas
y = None # y position - pixels from right of canvas
button = None # button pressed None, 1, 2, 3
inaxes = None # the Axes instance if mouse us over axes
xdata = None # x coord of mouse in data coords
ydata = None # y coord of mouse in data coords
step = None # scroll steps for scroll events
def __init__(self, name, canvas, x, y, button=None, key=None,
step=0, guiEvent=None):
"""
x, y in figure coords, 0,0 = bottom, left
button pressed None, 1, 2, 3, 'up', 'down'
"""
LocationEvent.__init__(self, name, canvas, x, y, guiEvent=guiEvent)
self.button = button
self.key = key
self.step = step
class PickEvent(Event):
"""
a pick event, fired when the user picks a location on the canvas
sufficiently close to an artist.
Attrs: all the :class:`Event` attributes plus
*mouseevent*
the :class:`MouseEvent` that generated the pick
*artist*
the :class:`~matplotlib.artist.Artist` picked
other
extra class dependent attrs -- eg a
:class:`~matplotlib.lines.Line2D` pick may define different
extra attributes than a
:class:`~matplotlib.collections.PatchCollection` pick event
Example usage::
line, = ax.plot(rand(100), 'o', picker=5) # 5 points tolerance
def on_pick(event):
thisline = event.artist
xdata, ydata = thisline.get_data()
ind = event.ind
print 'on pick line:', zip(xdata[ind], ydata[ind])
cid = fig.canvas.mpl_connect('pick_event', on_pick)
"""
def __init__(self, name, canvas, mouseevent, artist, guiEvent=None, **kwargs):
Event.__init__(self, name, canvas, guiEvent)
self.mouseevent = mouseevent
self.artist = artist
self.__dict__.update(kwargs)
class KeyEvent(LocationEvent):
"""
A key event (key press, key release).
Attach additional attributes as defined in
:meth:`FigureCanvasBase.mpl_connect`.
In addition to the :class:`Event` and :class:`LocationEvent`
attributes, the following attributes are defined:
*key*
the key pressed: None, chr(range(255), shift, win, or control
This interface may change slightly when better support for
modifier keys is included.
Example usage::
def on_key(event):
print 'you pressed', event.key, event.xdata, event.ydata
cid = fig.canvas.mpl_connect('key_press_event', on_key)
"""
def __init__(self, name, canvas, key, x=0, y=0, guiEvent=None):
LocationEvent.__init__(self, name, canvas, x, y, guiEvent=guiEvent)
self.key = key
class FigureCanvasBase:
"""
The canvas the figure renders into.
Public attributes
*figure*
A :class:`matplotlib.figure.Figure` instance
"""
events = [
'resize_event',
'draw_event',
'key_press_event',
'key_release_event',
'button_press_event',
'button_release_event',
'scroll_event',
'motion_notify_event',
'pick_event',
'idle_event',
'figure_enter_event',
'figure_leave_event',
'axes_enter_event',
'axes_leave_event'
]
def __init__(self, figure):
figure.set_canvas(self)
self.figure = figure
# a dictionary from event name to a dictionary that maps cid->func
self.callbacks = cbook.CallbackRegistry(self.events)
self.widgetlock = widgets.LockDraw()
self._button = None # the button pressed
self._key = None # the key pressed
self._lastx, self._lasty = None, None
self.button_pick_id = self.mpl_connect('button_press_event',self.pick)
self.scroll_pick_id = self.mpl_connect('scroll_event',self.pick)
if False:
## highlight the artists that are hit
self.mpl_connect('motion_notify_event',self.onHilite)
## delete the artists that are clicked on
#self.mpl_disconnect(self.button_pick_id)
#self.mpl_connect('button_press_event',self.onRemove)
def onRemove(self, ev):
"""
Mouse event processor which removes the top artist
under the cursor. Connect this to the 'mouse_press_event'
using::
canvas.mpl_connect('mouse_press_event',canvas.onRemove)
"""
def sort_artists(artists):
# This depends on stable sort and artists returned
# from get_children in z order.
L = [ (h.zorder, h) for h in artists ]
L.sort()
return [ h for zorder, h in L ]
# Find the top artist under the cursor
under = sort_artists(self.figure.hitlist(ev))
h = None
if under: h = under[-1]
# Try deleting that artist, or its parent if you
# can't delete the artist
while h:
print "Removing",h
if h.remove():
self.draw_idle()
break
parent = None
for p in under:
if h in p.get_children():
parent = p
break
h = parent
def onHilite(self, ev):
"""
Mouse event processor which highlights the artists
under the cursor. Connect this to the 'motion_notify_event'
using::
canvas.mpl_connect('motion_notify_event',canvas.onHilite)
"""
if not hasattr(self,'_active'): self._active = dict()
under = self.figure.hitlist(ev)
enter = [a for a in under if a not in self._active]
leave = [a for a in self._active if a not in under]
print "within:"," ".join([str(x) for x in under])
#print "entering:",[str(a) for a in enter]
#print "leaving:",[str(a) for a in leave]
# On leave restore the captured colour
for a in leave:
if hasattr(a,'get_color'):
a.set_color(self._active[a])
elif hasattr(a,'get_edgecolor'):
a.set_edgecolor(self._active[a][0])
a.set_facecolor(self._active[a][1])
del self._active[a]
# On enter, capture the color and repaint the artist
# with the highlight colour. Capturing colour has to
# be done first in case the parent recolouring affects
# the child.
for a in enter:
if hasattr(a,'get_color'):
self._active[a] = a.get_color()
elif hasattr(a,'get_edgecolor'):
self._active[a] = (a.get_edgecolor(),a.get_facecolor())
else: self._active[a] = None
for a in enter:
if hasattr(a,'get_color'):
a.set_color('red')
elif hasattr(a,'get_edgecolor'):
a.set_edgecolor('red')
a.set_facecolor('lightblue')
else: self._active[a] = None
self.draw_idle()
def pick(self, mouseevent):
if not self.widgetlock.locked():
self.figure.pick(mouseevent)
def blit(self, bbox=None):
"""
blit the canvas in bbox (default entire canvas)
"""
pass
def resize(self, w, h):
"""
set the canvas size in pixels
"""
pass
def draw_event(self, renderer):
"""
This method will be call all functions connected to the
'draw_event' with a :class:`DrawEvent`
"""
s = 'draw_event'
event = DrawEvent(s, self, renderer)
self.callbacks.process(s, event)
def resize_event(self):
"""
This method will be call all functions connected to the
'resize_event' with a :class:`ResizeEvent`
"""
s = 'resize_event'
event = ResizeEvent(s, self)
self.callbacks.process(s, event)
def key_press_event(self, key, guiEvent=None):
"""
This method will be call all functions connected to the
'key_press_event' with a :class:`KeyEvent`
"""
self._key = key
s = 'key_press_event'
event = KeyEvent(s, self, key, self._lastx, self._lasty, guiEvent=guiEvent)
self.callbacks.process(s, event)
def key_release_event(self, key, guiEvent=None):
"""
This method will be call all functions connected to the
'key_release_event' with a :class:`KeyEvent`
"""
s = 'key_release_event'
event = KeyEvent(s, self, key, self._lastx, self._lasty, guiEvent=guiEvent)
self.callbacks.process(s, event)
self._key = None
def pick_event(self, mouseevent, artist, **kwargs):
"""
This method will be called by artists who are picked and will
fire off :class:`PickEvent` callbacks registered listeners
"""
s = 'pick_event'
event = PickEvent(s, self, mouseevent, artist, **kwargs)
self.callbacks.process(s, event)
def scroll_event(self, x, y, step, guiEvent=None):
"""
Backend derived classes should call this function on any
scroll wheel event. x,y are the canvas coords: 0,0 is lower,
left. button and key are as defined in MouseEvent.
This method will be call all functions connected to the
'scroll_event' with a :class:`MouseEvent` instance.
"""
if step >= 0:
self._button = 'up'
else:
self._button = 'down'
s = 'scroll_event'
mouseevent = MouseEvent(s, self, x, y, self._button, self._key,
step=step, guiEvent=guiEvent)
self.callbacks.process(s, mouseevent)
def button_press_event(self, x, y, button, guiEvent=None):
"""
Backend derived classes should call this function on any mouse
button press. x,y are the canvas coords: 0,0 is lower, left.
button and key are as defined in :class:`MouseEvent`.
This method will be call all functions connected to the
'button_press_event' with a :class:`MouseEvent` instance.
"""
self._button = button
s = 'button_press_event'
mouseevent = MouseEvent(s, self, x, y, button, self._key, guiEvent=guiEvent)
self.callbacks.process(s, mouseevent)
def button_release_event(self, x, y, button, guiEvent=None):
"""
Backend derived classes should call this function on any mouse
button release.
*x*
the canvas coordinates where 0=left
*y*
the canvas coordinates where 0=bottom
*guiEvent*
the native UI event that generated the mpl event
This method will be call all functions connected to the
'button_release_event' with a :class:`MouseEvent` instance.
"""
s = 'button_release_event'
event = MouseEvent(s, self, x, y, button, self._key, guiEvent=guiEvent)
self.callbacks.process(s, event)
self._button = None
def motion_notify_event(self, x, y, guiEvent=None):
"""
Backend derived classes should call this function on any
motion-notify-event.
*x*
the canvas coordinates where 0=left
*y*
the canvas coordinates where 0=bottom
*guiEvent*
the native UI event that generated the mpl event
This method will be call all functions connected to the
'motion_notify_event' with a :class:`MouseEvent` instance.
"""
self._lastx, self._lasty = x, y
s = 'motion_notify_event'
event = MouseEvent(s, self, x, y, self._button, self._key,
guiEvent=guiEvent)
self.callbacks.process(s, event)
def leave_notify_event(self, guiEvent=None):
"""
Backend derived classes should call this function when leaving
canvas
*guiEvent*
the native UI event that generated the mpl event
"""
self.callbacks.process('figure_leave_event', LocationEvent.lastevent)
LocationEvent.lastevent = None
def enter_notify_event(self, guiEvent=None):
"""
Backend derived classes should call this function when entering
canvas
*guiEvent*
the native UI event that generated the mpl event
"""
event = Event('figure_enter_event', self, guiEvent)
self.callbacks.process('figure_enter_event', event)
def idle_event(self, guiEvent=None):
'call when GUI is idle'
s = 'idle_event'
event = IdleEvent(s, self, guiEvent=guiEvent)
self.callbacks.process(s, event)
def draw(self, *args, **kwargs):
"""
Render the :class:`~matplotlib.figure.Figure`
"""
pass
def draw_idle(self, *args, **kwargs):
"""
:meth:`draw` only if idle; defaults to draw but backends can overrride
"""
self.draw(*args, **kwargs)
def draw_cursor(self, event):
"""
Draw a cursor in the event.axes if inaxes is not None. Use
native GUI drawing for efficiency if possible
"""
pass
def get_width_height(self):
"""
return the figure width and height in points or pixels
(depending on the backend), truncated to integers
"""
return int(self.figure.bbox.width), int(self.figure.bbox.height)
filetypes = {
'emf': 'Enhanced Metafile',
'eps': 'Encapsulated Postscript',
'pdf': 'Portable Document Format',
'png': 'Portable Network Graphics',
'ps' : 'Postscript',
'raw': 'Raw RGBA bitmap',
'rgba': 'Raw RGBA bitmap',
'svg': 'Scalable Vector Graphics',
'svgz': 'Scalable Vector Graphics'
}
# All of these print_* functions do a lazy import because
# a) otherwise we'd have cyclical imports, since all of these
# classes inherit from FigureCanvasBase
# b) so we don't import a bunch of stuff the user may never use
def print_emf(self, *args, **kwargs):
from backends.backend_emf import FigureCanvasEMF # lazy import
emf = self.switch_backends(FigureCanvasEMF)
return emf.print_emf(*args, **kwargs)
def print_eps(self, *args, **kwargs):
from backends.backend_ps import FigureCanvasPS # lazy import
ps = self.switch_backends(FigureCanvasPS)
return ps.print_eps(*args, **kwargs)
def print_pdf(self, *args, **kwargs):
from backends.backend_pdf import FigureCanvasPdf # lazy import
pdf = self.switch_backends(FigureCanvasPdf)
return pdf.print_pdf(*args, **kwargs)
def print_png(self, *args, **kwargs):
from backends.backend_agg import FigureCanvasAgg # lazy import
agg = self.switch_backends(FigureCanvasAgg)
return agg.print_png(*args, **kwargs)
def print_ps(self, *args, **kwargs):
from backends.backend_ps import FigureCanvasPS # lazy import
ps = self.switch_backends(FigureCanvasPS)
return ps.print_ps(*args, **kwargs)
def print_raw(self, *args, **kwargs):
from backends.backend_agg import FigureCanvasAgg # lazy import
agg = self.switch_backends(FigureCanvasAgg)
return agg.print_raw(*args, **kwargs)
print_bmp = print_rgb = print_raw
def print_svg(self, *args, **kwargs):
from backends.backend_svg import FigureCanvasSVG # lazy import
svg = self.switch_backends(FigureCanvasSVG)
return svg.print_svg(*args, **kwargs)
def print_svgz(self, *args, **kwargs):
from backends.backend_svg import FigureCanvasSVG # lazy import
svg = self.switch_backends(FigureCanvasSVG)
return svg.print_svgz(*args, **kwargs)
def get_supported_filetypes(self):
return self.filetypes
def get_supported_filetypes_grouped(self):
groupings = {}
for ext, name in self.filetypes.items():
groupings.setdefault(name, []).append(ext)
groupings[name].sort()
return groupings
def print_figure(self, filename, dpi=None, facecolor='w', edgecolor='w',
orientation='portrait', format=None, **kwargs):
"""
Render the figure to hardcopy. Set the figure patch face and edge
colors. This is useful because some of the GUIs have a gray figure
face color background and you'll probably want to override this on
hardcopy.
Arguments are:
*filename*
can also be a file object on image backends
*orientation*
only currently applies to PostScript printing.
*dpi*
the dots per inch to save the figure in; if None, use savefig.dpi
*facecolor*
the facecolor of the figure
*edgecolor*
the edgecolor of the figure
*orientation* '
landscape' | 'portrait' (not supported on all backends)
*format*
when set, forcibly set the file format to save to
"""
if format is None:
if cbook.is_string_like(filename):
format = os.path.splitext(filename)[1][1:]
if format is None or format == '':
format = self.get_default_filetype()
if cbook.is_string_like(filename):
filename = filename.rstrip('.') + '.' + format
format = format.lower()
method_name = 'print_%s' % format
if (format not in self.filetypes or
not hasattr(self, method_name)):
formats = self.filetypes.keys()
formats.sort()
raise ValueError(
'Format "%s" is not supported.\n'
'Supported formats: '
'%s.' % (format, ', '.join(formats)))
if dpi is None:
dpi = rcParams['savefig.dpi']
origDPI = self.figure.dpi
origfacecolor = self.figure.get_facecolor()
origedgecolor = self.figure.get_edgecolor()
self.figure.dpi = dpi
self.figure.set_facecolor(facecolor)
self.figure.set_edgecolor(edgecolor)
try:
result = getattr(self, method_name)(
filename,
dpi=dpi,
facecolor=facecolor,
edgecolor=edgecolor,
orientation=orientation,
**kwargs)
finally:
self.figure.dpi = origDPI
self.figure.set_facecolor(origfacecolor)
self.figure.set_edgecolor(origedgecolor)
self.figure.set_canvas(self)
#self.figure.canvas.draw() ## seems superfluous
return result
def get_default_filetype(self):
raise NotImplementedError
def set_window_title(self, title):
"""
Set the title text of the window containing the figure. Note that
this has no effect if there is no window (eg, a PS backend).
"""
if hasattr(self, "manager"):
self.manager.set_window_title(title)
def switch_backends(self, FigureCanvasClass):
"""
instantiate an instance of FigureCanvasClass
This is used for backend switching, eg, to instantiate a
FigureCanvasPS from a FigureCanvasGTK. Note, deep copying is
not done, so any changes to one of the instances (eg, setting
figure size or line props), will be reflected in the other
"""
newCanvas = FigureCanvasClass(self.figure)
return newCanvas
def mpl_connect(self, s, func):
"""
Connect event with string *s* to *func*. The signature of *func* is::
def func(event)
where event is a :class:`matplotlib.backend_bases.Event`. The
following events are recognized
- 'button_press_event'
- 'button_release_event'
- 'draw_event'
- 'key_press_event'
- 'key_release_event'
- 'motion_notify_event'
- 'pick_event'
- 'resize_event'
- 'scroll_event'
For the location events (button and key press/release), if the
mouse is over the axes, the variable ``event.inaxes`` will be
set to the :class:`~matplotlib.axes.Axes` the event occurs is
over, and additionally, the variables ``event.xdata`` and
``event.ydata`` will be defined. This is the mouse location
in data coords. See
:class:`~matplotlib.backend_bases.KeyEvent` and
:class:`~matplotlib.backend_bases.MouseEvent` for more info.
Return value is a connection id that can be used with
:meth:`~matplotlib.backend_bases.Event.mpl_disconnect`.
Example usage::
def on_press(event):
print 'you pressed', event.button, event.xdata, event.ydata
cid = canvas.mpl_connect('button_press_event', on_press)
"""
return self.callbacks.connect(s, func)
def mpl_disconnect(self, cid):
"""
disconnect callback id cid
Example usage::
cid = canvas.mpl_connect('button_press_event', on_press)
#...later
canvas.mpl_disconnect(cid)
"""
return self.callbacks.disconnect(cid)
def flush_events(self):
"""
Flush the GUI events for the figure. Implemented only for
backends with GUIs.
"""
raise NotImplementedError
def start_event_loop(self,timeout):
"""
Start an event loop. This is used to start a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events. This should not be
confused with the main GUI event loop, which is always running
and has nothing to do with this.
This is implemented only for backends with GUIs.
"""
raise NotImplementedError
def stop_event_loop(self):
"""
Stop an event loop. This is used to stop a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events.
This is implemented only for backends with GUIs.
"""
raise NotImplementedError
def start_event_loop_default(self,timeout=0):
"""
Start an event loop. This is used to start a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events. This should not be
confused with the main GUI event loop, which is always running
and has nothing to do with this.
This function provides default event loop functionality based
on time.sleep that is meant to be used until event loop
functions for each of the GUI backends can be written. As
such, it throws a deprecated warning.
Call signature::
start_event_loop_default(self,timeout=0)
This call blocks until a callback function triggers
stop_event_loop() or *timeout* is reached. If *timeout* is
<=0, never timeout.
"""
str = "Using default event loop until function specific"
str += " to this GUI is implemented"
warnings.warn(str,DeprecationWarning)
if timeout <= 0: timeout = np.inf
timestep = 0.01
counter = 0
self._looping = True
while self._looping and counter*timestep < timeout:
self.flush_events()
time.sleep(timestep)
counter += 1
def stop_event_loop_default(self):
"""
Stop an event loop. This is used to stop a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events.
Call signature::
stop_event_loop_default(self)
"""
self._looping = False
class FigureManagerBase:
"""
Helper class for matlab mode, wraps everything up into a neat bundle
Public attibutes:
*canvas*
A :class:`FigureCanvasBase` instance
*num*
The figure nuamber
"""
def __init__(self, canvas, num):
self.canvas = canvas
canvas.manager = self # store a pointer to parent
self.num = num
self.canvas.mpl_connect('key_press_event', self.key_press)
def destroy(self):
pass
def full_screen_toggle (self):
pass
def resize(self, w, h):
'For gui backends: resize window in pixels'
pass
def key_press(self, event):
# these bindings happen whether you are over an axes or not
#if event.key == 'q':
# self.destroy() # how cruel to have to destroy oneself!
# return
if event.key == 'f':
self.full_screen_toggle()
# *h*ome or *r*eset mnemonic
elif event.key == 'h' or event.key == 'r' or event.key == "home":
self.canvas.toolbar.home()
# c and v to enable left handed quick navigation
elif event.key == 'left' or event.key == 'c' or event.key == 'backspace':
self.canvas.toolbar.back()
elif event.key == 'right' or event.key == 'v':
self.canvas.toolbar.forward()
# *p*an mnemonic
elif event.key == 'p':
self.canvas.toolbar.pan()
# z*o*om mnemonic
elif event.key == 'o':
self.canvas.toolbar.zoom()
elif event.key == 's':
self.canvas.toolbar.save_figure(self.canvas.toolbar)
if event.inaxes is None:
return
# the mouse has to be over an axes to trigger these
if event.key == 'g':
event.inaxes.grid()
self.canvas.draw()
elif event.key == 'l':
ax = event.inaxes
scale = ax.get_yscale()
if scale=='log':
ax.set_yscale('linear')
ax.figure.canvas.draw()
elif scale=='linear':
ax.set_yscale('log')
ax.figure.canvas.draw()
elif event.key is not None and (event.key.isdigit() and event.key!='0') or event.key=='a':
# 'a' enables all axes
if event.key!='a':
n=int(event.key)-1
for i, a in enumerate(self.canvas.figure.get_axes()):
if event.x is not None and event.y is not None and a.in_axes(event):
if event.key=='a':
a.set_navigate(True)
else:
a.set_navigate(i==n)
def show_popup(self, msg):
"""
Display message in a popup -- GUI only
"""
pass
def set_window_title(self, title):
"""
Set the title text of the window containing the figure. Note that
this has no effect if there is no window (eg, a PS backend).
"""
pass
# cursors
class Cursors: #namespace
HAND, POINTER, SELECT_REGION, MOVE = range(4)
cursors = Cursors()
class NavigationToolbar2:
"""
Base class for the navigation cursor, version 2
backends must implement a canvas that handles connections for
'button_press_event' and 'button_release_event'. See
:meth:`FigureCanvasBase.mpl_connect` for more information
They must also define
:meth:`save_figure`
save the current figure
:meth:`set_cursor`
if you want the pointer icon to change
:meth:`_init_toolbar`
create your toolbar widget
:meth:`draw_rubberband` (optional)
draw the zoom to rect "rubberband" rectangle
:meth:`press` (optional)
whenever a mouse button is pressed, you'll be notified with
the event
:meth:`release` (optional)
whenever a mouse button is released, you'll be notified with
the event
:meth:`dynamic_update` (optional)
dynamically update the window while navigating
:meth:`set_message` (optional)
display message
:meth:`set_history_buttons` (optional)
you can change the history back / forward buttons to
indicate disabled / enabled state.
That's it, we'll do the rest!
"""
def __init__(self, canvas):
self.canvas = canvas
canvas.toolbar = self
# a dict from axes index to a list of view limits
self._views = cbook.Stack()
self._positions = cbook.Stack() # stack of subplot positions
self._xypress = None # the location and axis info at the time of the press
self._idPress = None
self._idRelease = None
self._active = None
self._lastCursor = None
self._init_toolbar()
self._idDrag=self.canvas.mpl_connect('motion_notify_event', self.mouse_move)
self._button_pressed = None # determined by the button pressed at start
self.mode = '' # a mode string for the status bar
self.set_history_buttons()
def set_message(self, s):
'display a message on toolbar or in status bar'
pass
def back(self, *args):
'move back up the view lim stack'
self._views.back()
self._positions.back()
self.set_history_buttons()
self._update_view()
def dynamic_update(self):
pass
def draw_rubberband(self, event, x0, y0, x1, y1):
'draw a rectangle rubberband to indicate zoom limits'
pass
def forward(self, *args):
'move forward in the view lim stack'
self._views.forward()
self._positions.forward()
self.set_history_buttons()
self._update_view()
def home(self, *args):
'restore the original view'
self._views.home()
self._positions.home()
self.set_history_buttons()
self._update_view()
def _init_toolbar(self):
"""
This is where you actually build the GUI widgets (called by
__init__). The icons ``home.xpm``, ``back.xpm``, ``forward.xpm``,
``hand.xpm``, ``zoom_to_rect.xpm`` and ``filesave.xpm`` are standard
across backends (there are ppm versions in CVS also).
You just need to set the callbacks
home : self.home
back : self.back
forward : self.forward
hand : self.pan
zoom_to_rect : self.zoom
filesave : self.save_figure
You only need to define the last one - the others are in the base
class implementation.
"""
raise NotImplementedError
def mouse_move(self, event):
#print 'mouse_move', event.button
if not event.inaxes or not self._active:
if self._lastCursor != cursors.POINTER:
self.set_cursor(cursors.POINTER)
self._lastCursor = cursors.POINTER
else:
if self._active=='ZOOM':
if self._lastCursor != cursors.SELECT_REGION:
self.set_cursor(cursors.SELECT_REGION)
self._lastCursor = cursors.SELECT_REGION
if self._xypress:
x, y = event.x, event.y
lastx, lasty, a, ind, lim, trans = self._xypress[0]
self.draw_rubberband(event, x, y, lastx, lasty)
elif (self._active=='PAN' and
self._lastCursor != cursors.MOVE):
self.set_cursor(cursors.MOVE)
self._lastCursor = cursors.MOVE
if event.inaxes and event.inaxes.get_navigate():
try: s = event.inaxes.format_coord(event.xdata, event.ydata)
except ValueError: pass
except OverflowError: pass
else:
if len(self.mode):
self.set_message('%s : %s' % (self.mode, s))
else:
self.set_message(s)
else: self.set_message(self.mode)
def pan(self,*args):
'Activate the pan/zoom tool. pan with left button, zoom with right'
# set the pointer icon and button press funcs to the
# appropriate callbacks
if self._active == 'PAN':
self._active = None
else:
self._active = 'PAN'
if self._idPress is not None:
self._idPress = self.canvas.mpl_disconnect(self._idPress)
self.mode = ''
if self._idRelease is not None:
self._idRelease = self.canvas.mpl_disconnect(self._idRelease)
self.mode = ''
if self._active:
self._idPress = self.canvas.mpl_connect(
'button_press_event', self.press_pan)
self._idRelease = self.canvas.mpl_connect(
'button_release_event', self.release_pan)
self.mode = 'pan/zoom mode'
self.canvas.widgetlock(self)
else:
self.canvas.widgetlock.release(self)
for a in self.canvas.figure.get_axes():
a.set_navigate_mode(self._active)
self.set_message(self.mode)
def press(self, event):
'this will be called whenver a mouse button is pressed'
pass
def press_pan(self, event):
'the press mouse button in pan/zoom mode callback'
if event.button == 1:
self._button_pressed=1
elif event.button == 3:
self._button_pressed=3
else:
self._button_pressed=None
return
x, y = event.x, event.y
# push the current view to define home if stack is empty
if self._views.empty(): self.push_current()
self._xypress=[]
for i, a in enumerate(self.canvas.figure.get_axes()):
if x is not None and y is not None and a.in_axes(event) and a.get_navigate():
a.start_pan(x, y, event.button)
self._xypress.append((a, i))
self.canvas.mpl_disconnect(self._idDrag)
self._idDrag=self.canvas.mpl_connect('motion_notify_event', self.drag_pan)
self.press(event)
def press_zoom(self, event):
'the press mouse button in zoom to rect mode callback'
if event.button == 1:
self._button_pressed=1
elif event.button == 3:
self._button_pressed=3
else:
self._button_pressed=None
return
x, y = event.x, event.y
# push the current view to define home if stack is empty
if self._views.empty(): self.push_current()
self._xypress=[]
for i, a in enumerate(self.canvas.figure.get_axes()):
if x is not None and y is not None and a.in_axes(event) \
and a.get_navigate() and a.can_zoom():
self._xypress.append(( x, y, a, i, a.viewLim.frozen(), a.transData.frozen()))
self.press(event)
def push_current(self):
'push the current view limits and position onto the stack'
lims = []; pos = []
for a in self.canvas.figure.get_axes():
xmin, xmax = a.get_xlim()
ymin, ymax = a.get_ylim()
lims.append( (xmin, xmax, ymin, ymax) )
# Store both the original and modified positions
pos.append( (
a.get_position(True).frozen(),
a.get_position().frozen() ) )
self._views.push(lims)
self._positions.push(pos)
self.set_history_buttons()
def release(self, event):
'this will be called whenever mouse button is released'
pass
def release_pan(self, event):
'the release mouse button callback in pan/zoom mode'
self.canvas.mpl_disconnect(self._idDrag)
self._idDrag=self.canvas.mpl_connect('motion_notify_event', self.mouse_move)
for a, ind in self._xypress:
a.end_pan()
if not self._xypress: return
self._xypress = []
self._button_pressed=None
self.push_current()
self.release(event)
self.draw()
def drag_pan(self, event):
'the drag callback in pan/zoom mode'
for a, ind in self._xypress:
#safer to use the recorded button at the press than current button:
#multiple button can get pressed during motion...
a.drag_pan(self._button_pressed, event.key, event.x, event.y)
self.dynamic_update()
def release_zoom(self, event):
'the release mouse button callback in zoom to rect mode'
if not self._xypress: return
last_a = []
for cur_xypress in self._xypress:
x, y = event.x, event.y
lastx, lasty, a, ind, lim, trans = cur_xypress
# ignore singular clicks - 5 pixels is a threshold
if abs(x-lastx)<5 or abs(y-lasty)<5:
self._xypress = None
self.release(event)
self.draw()
return
x0, y0, x1, y1 = lim.extents
# zoom to rect
inverse = a.transData.inverted()
lastx, lasty = inverse.transform_point( (lastx, lasty) )
x, y = inverse.transform_point( (x, y) )
Xmin,Xmax=a.get_xlim()
Ymin,Ymax=a.get_ylim()
# detect twinx,y axes and avoid double zooming
twinx, twiny = False, False
if last_a:
for la in last_a:
if a.get_shared_x_axes().joined(a,la): twinx=True
if a.get_shared_y_axes().joined(a,la): twiny=True
last_a.append(a)
if twinx:
x0, x1 = Xmin, Xmax
else:
if Xmin < Xmax:
if x<lastx: x0, x1 = x, lastx
else: x0, x1 = lastx, x
if x0 < Xmin: x0=Xmin
if x1 > Xmax: x1=Xmax
else:
if x>lastx: x0, x1 = x, lastx
else: x0, x1 = lastx, x
if x0 > Xmin: x0=Xmin
if x1 < Xmax: x1=Xmax
if twiny:
y0, y1 = Ymin, Ymax
else:
if Ymin < Ymax:
if y<lasty: y0, y1 = y, lasty
else: y0, y1 = lasty, y
if y0 < Ymin: y0=Ymin
if y1 > Ymax: y1=Ymax
else:
if y>lasty: y0, y1 = y, lasty
else: y0, y1 = lasty, y
if y0 > Ymin: y0=Ymin
if y1 < Ymax: y1=Ymax
if self._button_pressed == 1:
a.set_xlim((x0, x1))
a.set_ylim((y0, y1))
elif self._button_pressed == 3:
if a.get_xscale()=='log':
alpha=np.log(Xmax/Xmin)/np.log(x1/x0)
rx1=pow(Xmin/x0,alpha)*Xmin
rx2=pow(Xmax/x0,alpha)*Xmin
else:
alpha=(Xmax-Xmin)/(x1-x0)
rx1=alpha*(Xmin-x0)+Xmin
rx2=alpha*(Xmax-x0)+Xmin
if a.get_yscale()=='log':
alpha=np.log(Ymax/Ymin)/np.log(y1/y0)
ry1=pow(Ymin/y0,alpha)*Ymin
ry2=pow(Ymax/y0,alpha)*Ymin
else:
alpha=(Ymax-Ymin)/(y1-y0)
ry1=alpha*(Ymin-y0)+Ymin
ry2=alpha*(Ymax-y0)+Ymin
a.set_xlim((rx1, rx2))
a.set_ylim((ry1, ry2))
self.draw()
self._xypress = None
self._button_pressed = None
self.push_current()
self.release(event)
def draw(self):
'redraw the canvases, update the locators'
for a in self.canvas.figure.get_axes():
xaxis = getattr(a, 'xaxis', None)
yaxis = getattr(a, 'yaxis', None)
locators = []
if xaxis is not None:
locators.append(xaxis.get_major_locator())
locators.append(xaxis.get_minor_locator())
if yaxis is not None:
locators.append(yaxis.get_major_locator())
locators.append(yaxis.get_minor_locator())
for loc in locators:
loc.refresh()
self.canvas.draw()
def _update_view(self):
'''update the viewlim and position from the view and
position stack for each axes
'''
lims = self._views()
if lims is None: return
pos = self._positions()
if pos is None: return
for i, a in enumerate(self.canvas.figure.get_axes()):
xmin, xmax, ymin, ymax = lims[i]
a.set_xlim((xmin, xmax))
a.set_ylim((ymin, ymax))
# Restore both the original and modified positions
a.set_position( pos[i][0], 'original' )
a.set_position( pos[i][1], 'active' )
self.draw()
def save_figure(self, *args):
'save the current figure'
raise NotImplementedError
def set_cursor(self, cursor):
"""
Set the current cursor to one of the :class:`Cursors`
enums values
"""
pass
def update(self):
'reset the axes stack'
self._views.clear()
self._positions.clear()
self.set_history_buttons()
def zoom(self, *args):
'activate zoom to rect mode'
if self._active == 'ZOOM':
self._active = None
else:
self._active = 'ZOOM'
if self._idPress is not None:
self._idPress=self.canvas.mpl_disconnect(self._idPress)
self.mode = ''
if self._idRelease is not None:
self._idRelease=self.canvas.mpl_disconnect(self._idRelease)
self.mode = ''
if self._active:
self._idPress = self.canvas.mpl_connect('button_press_event', self.press_zoom)
self._idRelease = self.canvas.mpl_connect('button_release_event', self.release_zoom)
self.mode = 'Zoom to rect mode'
self.canvas.widgetlock(self)
else:
self.canvas.widgetlock.release(self)
for a in self.canvas.figure.get_axes():
a.set_navigate_mode(self._active)
self.set_message(self.mode)
def set_history_buttons(self):
'enable or disable back/forward button'
pass<|fim▁end|> |
*trans* |
<|file_name|>consts.py<|end_file_name|><|fim▁begin|># coding=utf-8<|fim▁hole|>HOSTNAME = 'localhost'
DATABASE = 'r'
USERNAME = 'web'
PASSWORD = 'web'
DB_URI = 'mysql://{}:{}@{}/{}'.format(
USERNAME, PASSWORD, HOSTNAME, DATABASE)<|fim▁end|> | |
<|file_name|>IdxBtOmp.hpp<|end_file_name|><|fim▁begin|>/**
* \file
* Copyright 2014-2015 Benjamin Worpitz
*
* This file is part of alpaka.
*
* alpaka is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* alpaka is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with alpaka.
* If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
#include <alpaka/idx/Traits.hpp> // idx::GetIdx
#include <alpaka/core/OpenMp.hpp>
#include <alpaka/core/MapIdx.hpp> // core::mapIdx
#include <boost/core/ignore_unused.hpp> // boost::ignore_unused
namespace alpaka
{
namespace idx
{
namespace bt
{
//#############################################################################
//! The OpenMP accelerator index provider.
//#############################################################################
template<
typename TDim,
typename TSize>
class IdxBtOmp
{
public:
using IdxBtBase = IdxBtOmp;
//-----------------------------------------------------------------------------
//! Constructor.
//-----------------------------------------------------------------------------
ALPAKA_FN_ACC_NO_CUDA IdxBtOmp() = default;
//-----------------------------------------------------------------------------
//! Copy constructor.
//-----------------------------------------------------------------------------
ALPAKA_FN_ACC_NO_CUDA IdxBtOmp(IdxBtOmp const &) = delete;
//-----------------------------------------------------------------------------
//! Move constructor.
//-----------------------------------------------------------------------------
ALPAKA_FN_ACC_NO_CUDA IdxBtOmp(IdxBtOmp &&) = delete;
//-----------------------------------------------------------------------------
//! Copy assignment operator.
//-----------------------------------------------------------------------------
ALPAKA_FN_ACC_NO_CUDA auto operator=(IdxBtOmp const &) -> IdxBtOmp & = delete;
//-----------------------------------------------------------------------------
//! Move assignment operator.
//-----------------------------------------------------------------------------
ALPAKA_FN_ACC_NO_CUDA auto operator=(IdxBtOmp &&) -> IdxBtOmp & = delete;
//-----------------------------------------------------------------------------
//! Destructor.
//-----------------------------------------------------------------------------
ALPAKA_FN_ACC_NO_CUDA /*virtual*/ ~IdxBtOmp() = default;
};
}
}
namespace dim
{
namespace traits
{
//#############################################################################
//! The OpenMP accelerator index dimension get trait specialization.
//#############################################################################
template<
typename TDim,
typename TSize>
struct DimType<
idx::bt::IdxBtOmp<TDim, TSize>>
{
using type = TDim;
};
}
}
namespace idx
{<|fim▁hole|> {
//#############################################################################
//! The OpenMP accelerator block thread index get trait specialization.
//#############################################################################
template<
typename TDim,
typename TSize>
struct GetIdx<
idx::bt::IdxBtOmp<TDim, TSize>,
origin::Block,
unit::Threads>
{
//-----------------------------------------------------------------------------
//! \return The index of the current thread in the block.
//-----------------------------------------------------------------------------
template<
typename TWorkDiv>
ALPAKA_FN_ACC_NO_CUDA static auto getIdx(
idx::bt::IdxBtOmp<TDim, TSize> const & idx,
TWorkDiv const & workDiv)
-> Vec<TDim, TSize>
{
boost::ignore_unused(idx);
// We assume that the thread id is positive.
assert(::omp_get_thread_num()>=0);
// \TODO: Would it be faster to precompute the index and cache it inside an array?
return core::mapIdx<TDim::value>(
Vec1<TSize>(static_cast<TSize>(::omp_get_thread_num())),
workdiv::getWorkDiv<Block, Threads>(workDiv));
}
};
}
}
namespace size
{
namespace traits
{
//#############################################################################
//! The OpenMP accelerator block thread index size type trait specialization.
//#############################################################################
template<
typename TDim,
typename TSize>
struct SizeType<
idx::bt::IdxBtOmp<TDim, TSize>>
{
using type = TSize;
};
}
}
}<|fim▁end|> | namespace traits |
<|file_name|>GenericsExample.java<|end_file_name|><|fim▁begin|>package com.ryanharter.auto.value.moshi.example;
import com.google.auto.value.AutoValue;
import com.squareup.moshi.JsonAdapter;
import com.squareup.moshi.Moshi;
import java.lang.reflect.Type;
@AutoValue public abstract class GenericsExample<A, B, C> {
public abstract A a();
public abstract B b();
public abstract C c();
@AutoValue.Builder
public interface Builder<A, B, C> {
Builder<A, B, C> a(A a);
Builder<A, B, C> b(B b);
Builder<A, B, C> c(C c);
GenericsExample<A, B, C> build();<|fim▁hole|> }
public static <A, B, C> Builder<A, B, C> builder() {
return new AutoValue_GenericsExample.Builder<A, B, C>();
}
public static <A, B, C> JsonAdapter<GenericsExample<A, B, C>> jsonAdapter(Moshi moshi, Type[] types) {
return new AutoValue_GenericsExample.MoshiJsonAdapter(moshi, types);
}
}<|fim▁end|> | |
<|file_name|>services.js<|end_file_name|><|fim▁begin|>'use strict';
/* Services */
// Demonstrate how to register services
// In this case it is a simple value service.
angular.module('baApp.services', []).<|fim▁hole|><|fim▁end|> | value('version', '0.1'); |
<|file_name|>colorpicker.js<|end_file_name|><|fim▁begin|>/*
* JCE Editor 2.2.4
* @package JCE
* @url http://www.joomlacontenteditor.net
* @copyright Copyright (C) 2006 - 2012 Ryan Demmer. All rights reserved
* @license GNU/GPL Version 2 or later - http://www.gnu.org/licenses/gpl-2.0.html
* @date 16 July 2012
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.<|fim▁hole|> * GNU General Public License for more details.
*
* NOTE : Javascript files have been compressed for speed and can be uncompressed using http://jsbeautifier.org/
*/
tinyMCEPopup.requireLangPack();var ColorPicker={settings:{},init:function(){var self=this,ed=tinyMCEPopup.editor,color=tinyMCEPopup.getWindowArg('input_color')||'#FFFFFF';$('#tmp_color').val(color).colorpicker($.extend(this.settings,{dialog:false,insert:function(){return ColorPicker.insert();},close:function(){return tinyMCEPopup.close();}}));$('button#insert').button({icons:{primary:'ui-icon-check'}});$('#jce').css('display','block');},insert:function(){var color=$("#colorpicker_color").val(),f=tinyMCEPopup.getWindowArg('func');tinyMCEPopup.restoreSelection();if(f)
f(color);tinyMCEPopup.close();}};tinyMCEPopup.onInit.add(ColorPicker.init,ColorPicker);<|fim▁end|> |
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
<|file_name|>Feed.cpp<|end_file_name|><|fim▁begin|>#include "Feed.h"
Feed::Feed(QObject *parent) :<|fim▁hole|>}<|fim▁end|> | QObject(parent)
{ |
<|file_name|>split_string.rs<|end_file_name|><|fim▁begin|>fn str_split<T: Into<String>>(string: T, delim: char) -> Vec<String> {
let mut words = vec![];
let mut curr = String::new();
for c in string.into().chars() {
match c == delim {
true => {
words.push(curr);
curr = String::new();
},
false => curr.push(c),
}
}
words.push(curr);
words
}
fn main() {
println!("{:?}", str_split("hello, world", ','));<|fim▁hole|><|fim▁end|> | } |
<|file_name|>valid.js<|end_file_name|><|fim▁begin|>/**
* @license Apache-2.0
*
* Copyright (c) 2018 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var valid = [];
var test;
test = {
'code': [
'/**',
'* Merges objects into a target object. Note that the target object is mutated.',
'*',
'* @name merge',
'* @type {Function}',
'* @param {Object} target - target object',
'* @param {...Object} source - source objects (i.e., objects to be merged into the target object)',
'* @throws {Error} must provide a target object and one or more source objects',
'* @throws {TypeError} first argument must be an object',
'* @throws {TypeError} source arguments must be objects',
'* @returns {Object} merged (target) object',
'*',
'* @example',
'* var target = {',
'* \'a\': \'beep\'',
'* };',
'* var source = {',
'* \'a\': \'boop\',',
'* \'b\': \'bap\'',
'* };',
'*',
'* var out = merge( target, source );',
'* // returns {\'a\':\'boop\', \'b\':\'bap\'}',
'*/',
'var merge = mergefcn( defaults );'
].join( '\n' )
};
valid.push( test );
test = {
'code': [
'/**',
'* Merges objects into a target object. Note that the target object is mutated.',
'*',
'* @name merge',
'* @type {Function}',
'* @param {Object} target - target object',
'* @param {...Object} source - source objects (i.e., objects to be merged into the target object)',
'* @throws {Error} must provide a target object and one or more source objects',
'* @throws {TypeError} first argument must be an object',
'* @throws {TypeError} source arguments must be objects',
'* @returns {Object} merged (target) object',
'*',
'* @example',
'* var target = {',<|fim▁hole|> '* \'a\': \'beep\'',
'* };',
'* var source = {',
'* \'a\': \'boop\',',
'* \'b\': \'bap\'',
'* };',
'*',
'* var out = merge( target, source );',
'* // returns {...}',
'*/',
'var merge = mergefcn( defaults );'
].join( '\n' )
};
valid.push( test );
test = {
'code': [
'/**',
'* Merges objects into a target object. Note that the target object is mutated.',
'*',
'* @name merge',
'* @type {Function}',
'* @param {Object} target - target object',
'* @param {...Object} source - source objects (i.e., objects to be merged into the target object)',
'* @throws {Error} must provide a target object and one or more source objects',
'* @throws {TypeError} first argument must be an object',
'* @throws {TypeError} source arguments must be objects',
'* @returns {Object} merged (target) object',
'*',
'* @example',
'* var target = {',
'* \'a\': \'beep\'',
'* };',
'* var source = {',
'* \'a\': \'boop\',',
'* \'b\': \'bap\'',
'* };',
'*',
'* var out = merge( target, source );',
'* /* returns {',
'* \'a\':\'boop\',',
'* \'b\':\'bap\'}',
'* *\\/',
'*/',
'var merge = mergefcn( defaults );'
].join( '\n' )
};
valid.push( test );
test = {
'code': [
'/**',
'* Returns a high-resolution time difference.',
'*',
'* ## Notes',
'*',
'* - Output format: `[seconds, nanoseconds]`.',
'*',
'*',
'* @param {NonNegativeIntegerArray} time - high-resolution time',
'* @throws {TypeError} must provide a nonnegative integer array',
'* @throws {RangeError} input array must have length `2`',
'* @returns {NumberArray} high resolution time difference',
'*',
'* @example',
'* var tic = require( \'@stdlib/time/tic\' );',
'*',
'* var start = tic();',
'* var delta = toc( start );',
'* // returns [<number>,<number>]',
'*/',
'function toc( time ) {',
' var now = tic();',
' var sec;',
' var ns;',
' if ( !isNonNegativeIntegerArray( time ) ) {',
' throw new TypeError( \'invalid argument. Must provide an array of nonnegative integers. Value: `\' + time + \'`.\' );',
' }',
' if ( time.length !== 2 ) {',
' throw new RangeError( \'invalid argument. Input array must have length `2`.\' );',
' }',
' sec = now[ 0 ] - time[ 0 ];',
' ns = now[ 1 ] - time[ 1 ];',
' if ( sec > 0 && ns < 0 ) {',
' sec -= 1;',
' ns += 1e9;',
' }',
' else if ( sec < 0 && ns > 0 ) {',
' sec += 1;',
' ns -= 1e9;',
' }',
' return [ sec, ns ];',
'}'
].join( '\n' )
};
valid.push( test );
test = {
'code': [
'/**',
'* Returns Anscombe\'s quartet.',
'*',
'* ## Notes',
'*',
'* - This function synchronously reads data from disk for each invocation. Such behavior is intentional and so is the avoidance of `require`. We assume that invocations are infrequent, and we want to avoid the `require` cache. This means that we allow data to be garbage collected and a user is responsible for explicitly caching data.',
'*',
'*',
'* @throws {Error} unable to read data',
'* @returns {ArrayArray} Anscombe\'s quartet',
'*',
'* @example',
'* var d = data();',
'* // returns [[[10,8.04],...],[[10,9.14],...],[[10,7.46],...],[[8,6.58],...]]',
'*/',
'function data() {',
' var d = readJSON( fpath, opts );',
' if ( d instanceof Error ) {',
' throw d;',
' }',
' return d;',
'}'
].join( '\n' )
};
valid.push( test );
// EXPORTS //
module.exports = valid;<|fim▁end|> | |
<|file_name|>blog_datastore_factory.py<|end_file_name|><|fim▁begin|>'''
Created on Apr 30, 2012
@author: h87966
'''
from unit5.blog_datastore_memory import BlogMemoryDataStore
from unit5.blog_datastore_appengine import BlogAppengineDataStore<|fim▁hole|>class BlogDataStoreFactory():
'''
classdocs
'''
storage_implementations = {'memory':BlogMemoryDataStore(),
'appengine':BlogAppengineDataStore()}
def __init__(self, storage_impl='appengine'):
'''
Constructor
'''
self.storage = self.storage_implementations[storage_impl]
def set_storage(self, blog_storage):
self.storage = blog_storage
def get_storage(self):
return self.storage<|fim▁end|> | |
<|file_name|>output.py<|end_file_name|><|fim▁begin|>import sublime, sublime_plugin
def clean_layout(layout):
row_set = set()
col_set = set()
for cell in layout["cells"]:
row_set.add(cell[1])
row_set.add(cell[3])
col_set.add(cell[0])
col_set.add(cell[2])
row_set = sorted(row_set)
col_set = sorted(col_set)
rows = layout["rows"]
cols = layout["cols"]
layout["rows"] = [row for i, row in enumerate(rows) if i in row_set]
layout["cols"] = [col for i, col in enumerate(cols) if i in col_set]
row_map = { row : i for i, row in enumerate(row_set) }
col_map = { col : i for i, col in enumerate(col_set) }
layout["cells"] = [[col_map[cell[0]], row_map[cell[1]], col_map[cell[2]], row_map[cell[3]]] for cell in layout["cells"]]
return layout
def collapse_group(group):
LEFT = 0
TOP = 1
RIGHT = 2
BOTTOM = 3
window = sublime.active_window()
layout = window.get_layout()
cells = layout["cells"]
new_cells = []
group_cell = cells[group]
cells = cells[:group] + cells[group + 1:]
for cell in cells:
if cell[BOTTOM] == group_cell[TOP] and cell[LEFT] >= group_cell[LEFT] and cell[RIGHT] <= group_cell[RIGHT]:
new_cells.append([
cell[LEFT],
cell[TOP],
cell[RIGHT],
group_cell[BOTTOM]
])
elif cell != group_cell:
new_cells.append(cell)
layout["cells"] = new_cells
window.set_layout(clean_layout(layout))
class OutputView:
content = ""
position = 0.0
id = None
def __init__(self, view):
self.view = view
def __getattr__(self, name):
if self.view.id() != id:
output = OutputView.find_view()
if output:
self.view = output.view
return getattr(self.view, name)
def clear(self):
OutputView.content = ""
self.run_command("output_view_clear")
def append(self, text):
OutputView.content += text
self.run_command("output_view_append", { "text" : text })
<|fim▁hole|> self.append("[cmd: {}]\n".format(command))
self.append("[dir: {}]\n".format(working_dir))
else:
self.append("[Finished in {:.2f}s]\n".format(elapsed_time))
def _collapse(self, group):
window = sublime.active_window()
views = window.views_in_group(group)
if (len(views) == 0 or len(views) == 1 and
views[0].id() == self.view.id()):
collapse_group(group)
def _close(self):
window = sublime.active_window()
group, index = window.get_view_index(self.view)
window.run_command("close_by_index", {"group": group, "index": index})
self._collapse(group)
OutputView.id = None
@staticmethod
def close():
window = sublime.active_window()
for view in window.views():
if view.is_scratch() and view.name() == "Output":
OutputView(view)._close()
@staticmethod
def find_view():
window = sublime.active_window()
for view in window.views():
if view.is_scratch() and view.name() == "Output":
return OutputView(view)
return None
@staticmethod
def create():
view = OutputView.request()
view.clear()
return view
@staticmethod
def request():
window = sublime.active_window()
num_groups = window.num_groups()
if num_groups < 3:
layout = window.get_layout()
num_rows = len(layout["rows"]) - 1
num_cols = len(layout["cols"]) - 1
if len(layout["rows"]) < 3:
begin = layout["rows"][-2]
end = layout["rows"][-1]
layout["rows"] = layout["rows"][:-1] + [begin * 0.33 + end * 0.66, layout["rows"][-1]]
cells = []
new_num_rows = len(layout["rows"]) - 1
for cell in layout["cells"]:
if cell[3] == num_rows and cell[2] != num_cols:
cells.append([cell[0], cell[1], cell[2], new_num_rows])
else:
cells.append(cell)
cells.append([num_cols - 1, new_num_rows - 1, num_cols, new_num_rows])
layout["cells"] = cells
window.set_layout(layout)
num_groups = window.num_groups()
views = window.views_in_group(num_groups - 1)
output = None
for view in views:
if view.name() == "Output" and view.is_scratch():
output = view
if output == None:
active = window.active_view()
output = window.new_file()
output.settings().set("line_numbers", False)
output.settings().set("scroll_past_end", False)
output.settings().set("scroll_speed", 0.0)
output.settings().set("gutter", False)
output.settings().set("spell_check", False)
output.set_scratch(True)
output.set_name("Output")
output.run_command("output_view_append", { "text" : OutputView.content })
def update():
output.set_viewport_position((0, OutputView.position), False)
sublime.set_timeout(update, 0.0)
OutputView.id = output.id()
window.set_view_index(output, num_groups - 1, len(views))
window.focus_view(active)
return OutputView(output)
class OutputViewClearCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.view.erase(edit, sublime.Region(0, self.view.size()))
class OutputViewAppendCommand(sublime_plugin.TextCommand):
def run(self, edit, text):
scroll = self.view.visible_region().end() == self.view.size()
view = self.view
view.insert(edit, view.size(), text)
if scroll:
viewport = view.viewport_extent()
last_line = view.text_to_layout(view.size())
view.set_viewport_position((0, last_line[1] - viewport[1]), False)
class OpenOutputCommand(sublime_plugin.WindowCommand):
def run(self):
OutputView.request()
class CloseOutputCommand(sublime_plugin.ApplicationCommand):
def run(self):
OutputView.close()
class OutputEventListener(sublime_plugin.EventListener):
def on_query_context(self, view, key, operator, operand, match_all):
print(key)
if key == "output_visible":
return OutputView.find_view() != None
else:
return None
def on_close(self, view):
if view.is_scratch() and view.name() == "Output":
OutputView.position = view.viewport_position()[1]<|fim▁end|> | def append_finish_message(self, command, working_dir, return_code, elapsed_time):
if return_code != 0:
templ = "[Finished in {:.2f}s with exit code {}]\n"
self.append(templ.format(elapsed_time, return_code)) |
<|file_name|>BasicHttpClient.java<|end_file_name|><|fim▁begin|>/***************************************************************************
* Project file: NPlugins - NCore - BasicHttpClient.java *
* Full Class name: fr.ribesg.com.mojang.api.http.BasicHttpClient *
* *
* Copyright (c) 2012-2014 Ribesg - www.ribesg.fr *
* This file is under GPLv3 -> http://www.gnu.org/licenses/gpl-3.0.txt *
* Please contact me at ribesg[at]yahoo.fr if you improve this file! *
***************************************************************************/
package fr.ribesg.com.mojang.api.http;
<|fim▁hole|>import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.Proxy;
import java.net.URL;
import java.util.List;
public class BasicHttpClient implements HttpClient {
private static BasicHttpClient instance;
private BasicHttpClient() {
}
public static BasicHttpClient getInstance() {
if (instance == null) {
instance = new BasicHttpClient();
}
return instance;
}
@Override
public String post(final URL url, final HttpBody body, final List<HttpHeader> headers) throws IOException {
return this.post(url, null, body, headers);
}
@Override
public String post(final URL url, Proxy proxy, final HttpBody body, final List<HttpHeader> headers) throws IOException {
if (proxy == null) {
proxy = Proxy.NO_PROXY;
}
final HttpURLConnection connection = (HttpURLConnection)url.openConnection(proxy);
connection.setRequestMethod("POST");
for (final HttpHeader header : headers) {
connection.setRequestProperty(header.getName(), header.getValue());
}
connection.setUseCaches(false);
connection.setDoInput(true);
connection.setDoOutput(true);
final DataOutputStream writer = new DataOutputStream(connection.getOutputStream());
writer.write(body.getBytes());
writer.flush();
writer.close();
final BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String line;
final StringBuilder response = new StringBuilder();
while ((line = reader.readLine()) != null) {
response.append(line);
response.append('\r');
}
reader.close();
return response.toString();
}
}<|fim▁end|> | import java.io.BufferedReader; |
<|file_name|>table.go<|end_file_name|><|fim▁begin|>package html
type traits uint16
const (
normalTag traits = 1 << iota
rawTag // raw tags need special processing for their content
nonPhrasingTag // non-phrasing elements are unaffected by whitespace, remove spaces around these tags
objectTag // content tags with a few exclusions, keep spaces after these open/close tags
omitPTag // omit p end tag if it is followed by this start tag
keepPTag // keep p end tag if it is followed by this end tag
)
const (
booleanAttr traits = 1 << iota
caselessAttr
urlAttr
trimAttr
)
var tagMap = map[Hash]traits{
A: keepPTag,
Abbr: normalTag,
Address: nonPhrasingTag | omitPTag,
Area: normalTag,
Article: nonPhrasingTag | omitPTag,
Aside: nonPhrasingTag | omitPTag,
Audio: keepPTag,
B: normalTag,
Base: normalTag,
Bb: normalTag,
Bdi: normalTag,
Bdo: normalTag,
Blockquote: nonPhrasingTag | omitPTag,
Body: nonPhrasingTag,
Br: nonPhrasingTag,
Button: objectTag,
Canvas: objectTag,
Caption: nonPhrasingTag,
Cite: normalTag,
Code: normalTag,
Col: nonPhrasingTag,
Colgroup: nonPhrasingTag,
Data: normalTag,
Datalist: normalTag,
Dd: nonPhrasingTag,
Del: keepPTag,
Details: omitPTag,
Dfn: normalTag,
Dialog: normalTag,
Div: nonPhrasingTag | omitPTag,
Dl: nonPhrasingTag | omitPTag,
Dt: nonPhrasingTag,
Em: normalTag,
Embed: nonPhrasingTag,
Fieldset: nonPhrasingTag | omitPTag,
Figcaption: nonPhrasingTag | omitPTag,
Figure: nonPhrasingTag | omitPTag,
Footer: nonPhrasingTag | omitPTag,
Form: nonPhrasingTag | omitPTag,
H1: nonPhrasingTag | omitPTag,
H2: nonPhrasingTag | omitPTag,
H3: nonPhrasingTag | omitPTag,
H4: nonPhrasingTag | omitPTag,
H5: nonPhrasingTag | omitPTag,
H6: nonPhrasingTag | omitPTag,
Head: nonPhrasingTag,
Header: nonPhrasingTag | omitPTag,
Hgroup: nonPhrasingTag,
Hr: nonPhrasingTag | omitPTag,
Html: nonPhrasingTag,
I: normalTag,
Iframe: rawTag | objectTag,
Img: objectTag,
Input: objectTag,
Ins: keepPTag,
Kbd: normalTag,
Label: normalTag,
Legend: normalTag,
Li: nonPhrasingTag,
Link: normalTag,
Main: nonPhrasingTag | omitPTag,
Map: keepPTag,
Mark: normalTag,
Math: rawTag,
Menu: omitPTag,
Meta: nonPhrasingTag,
Meter: objectTag,
Nav: nonPhrasingTag | omitPTag,
Noscript: nonPhrasingTag | keepPTag,
Object: objectTag,
Ol: nonPhrasingTag | omitPTag,
Optgroup: normalTag,
Option: normalTag,
Output: nonPhrasingTag,
P: nonPhrasingTag | omitPTag,
Param: normalTag,
Picture: normalTag,
Pre: nonPhrasingTag | omitPTag,
Progress: objectTag,
Q: objectTag,
Rp: normalTag,
Rt: normalTag,
Ruby: normalTag,
S: normalTag,
Samp: normalTag,
Script: rawTag,
Section: nonPhrasingTag | omitPTag,
Select: objectTag,
Slot: normalTag,
Small: normalTag,
Source: normalTag,
Span: normalTag,
Strong: normalTag,
Style: rawTag | nonPhrasingTag,
Sub: normalTag,
Summary: normalTag,
Sup: normalTag,
Svg: rawTag | objectTag,
Table: nonPhrasingTag | omitPTag,
Tbody: nonPhrasingTag,
Td: nonPhrasingTag,
Template: normalTag,
Textarea: rawTag | objectTag,
Tfoot: nonPhrasingTag,
Th: nonPhrasingTag,
Thead: nonPhrasingTag,
Time: normalTag,
Title: nonPhrasingTag,
Tr: nonPhrasingTag,
Track: normalTag,
U: normalTag,
Ul: nonPhrasingTag | omitPTag,
Var: normalTag,
Video: objectTag | keepPTag,
Wbr: normalTag,
}
var attrMap = map[Hash]traits{
Accept: caselessAttr,
Accept_Charset: caselessAttr,
Action: urlAttr,
Align: caselessAttr,
Alink: caselessAttr,
Allowfullscreen: booleanAttr,
Async: booleanAttr,
Autofocus: booleanAttr,
Autoplay: booleanAttr,
Axis: caselessAttr,
Background: urlAttr,
Bgcolor: caselessAttr,
Charset: caselessAttr,
Checked: booleanAttr,
Cite: urlAttr,
Class: trimAttr,
Classid: urlAttr,
Clear: caselessAttr,
Codebase: urlAttr,
Codetype: caselessAttr,
Color: caselessAttr,
Cols: trimAttr,
Colspan: trimAttr,
Compact: booleanAttr,
Controls: booleanAttr,
Data: urlAttr,
Declare: booleanAttr,
Default: booleanAttr,
DefaultChecked: booleanAttr,
DefaultMuted: booleanAttr,
DefaultSelected: booleanAttr,
Defer: booleanAttr,
Dir: caselessAttr,
Disabled: booleanAttr,
Enabled: booleanAttr,
Enctype: caselessAttr,
Face: caselessAttr,
Formaction: urlAttr,
Formnovalidate: booleanAttr,
Frame: caselessAttr,
Hidden: booleanAttr,
Href: urlAttr,
Hreflang: caselessAttr,
Http_Equiv: caselessAttr,
Icon: urlAttr,
Inert: booleanAttr,
Ismap: booleanAttr,
Itemscope: booleanAttr,
Lang: caselessAttr,
Language: caselessAttr,
Link: caselessAttr,
Longdesc: urlAttr,
Manifest: urlAttr,
Maxlength: trimAttr,
Media: caselessAttr | trimAttr,
Method: caselessAttr,
Multiple: booleanAttr,
Muted: booleanAttr,
Nohref: booleanAttr,
Noresize: booleanAttr,
Noshade: booleanAttr,
Novalidate: booleanAttr,
Nowrap: booleanAttr,
Open: booleanAttr,
Pauseonexit: booleanAttr,
Poster: urlAttr,
Profile: urlAttr,
Readonly: booleanAttr,
Rel: caselessAttr,
Required: booleanAttr,
Rev: caselessAttr,
Reversed: booleanAttr,
Rows: trimAttr,
Rowspan: trimAttr,
Rules: caselessAttr,
Scope: caselessAttr,
Scoped: booleanAttr,
Scrolling: caselessAttr,
Seamless: booleanAttr,
Selected: booleanAttr,
Shape: caselessAttr,
Size: trimAttr,
Sortable: booleanAttr,
Span: trimAttr,
Src: urlAttr,
Srcset: trimAttr,
Tabindex: trimAttr,
Target: caselessAttr,
Text: caselessAttr,
Translate: booleanAttr,
Truespeed: booleanAttr,
Type: caselessAttr,
Typemustmatch: booleanAttr,
Undeterminate: booleanAttr,
Usemap: urlAttr,
Valign: caselessAttr,
Valuetype: caselessAttr,
Vlink: caselessAttr,
Visible: booleanAttr,
Xmlns: urlAttr,
}
var jsMimetypes = map[string]bool{
"text/javascript": true,
"application/javascript": true,
}
// Entities are all named character entities.
var EntitiesMap = map[string][]byte{
"AElig": []byte("Æ"),
"AMP": []byte("&"),
"Aacute": []byte("Á"),
"Abreve": []byte("Ă"),
"Acirc": []byte("Â"),
"Agrave": []byte("À"),
"Alpha": []byte("Α"),
"Amacr": []byte("Ā"),
"Aogon": []byte("Ą"),
"ApplyFunction": []byte("⁡"),
"Aring": []byte("Å"),
"Assign": []byte("≔"),
"Atilde": []byte("Ã"),
"Backslash": []byte("∖"),
"Barwed": []byte("⌆"),
"Because": []byte("∵"),
"Bernoullis": []byte("ℬ"),
"Breve": []byte("˘"),
"Bumpeq": []byte("≎"),
"Cacute": []byte("Ć"),
"CapitalDifferentialD": []byte("ⅅ"),
"Cayleys": []byte("ℭ"),
"Ccaron": []byte("Č"),
"Ccedil": []byte("Ç"),
"Ccirc": []byte("Ĉ"),
"Cconint": []byte("∰"),
"Cedilla": []byte("¸"),
"CenterDot": []byte("·"),
"CircleDot": []byte("⊙"),
"CircleMinus": []byte("⊖"),
"CirclePlus": []byte("⊕"),
"CircleTimes": []byte("⊗"),
"ClockwiseContourIntegral": []byte("∲"),
"CloseCurlyDoubleQuote": []byte("”"),
"CloseCurlyQuote": []byte("’"),
"Congruent": []byte("≡"),
"Conint": []byte("∯"),
"ContourIntegral": []byte("∮"),
"Coproduct": []byte("∐"),
"CounterClockwiseContourIntegral": []byte("∳"),
"CupCap": []byte("≍"),
"DDotrahd": []byte("⤑"),
"Dagger": []byte("‡"),
"Dcaron": []byte("Ď"),
"Delta": []byte("Δ"),
"DiacriticalAcute": []byte("´"),
"DiacriticalDot": []byte("˙"),
"DiacriticalDoubleAcute": []byte("˝"),
"DiacriticalGrave": []byte("`"),
"DiacriticalTilde": []byte("˜"),
"Diamond": []byte("⋄"),
"DifferentialD": []byte("ⅆ"),
"DotDot": []byte("⃜"),
"DotEqual": []byte("≐"),
"DoubleContourIntegral": []byte("∯"),
"DoubleDot": []byte("¨"),
"DoubleDownArrow": []byte("⇓"),
"DoubleLeftArrow": []byte("⇐"),
"DoubleLeftRightArrow": []byte("⇔"),
"DoubleLeftTee": []byte("⫤"),
"DoubleLongLeftArrow": []byte("⟸"),
"DoubleLongLeftRightArrow": []byte("⟺"),
"DoubleLongRightArrow": []byte("⟹"),
"DoubleRightArrow": []byte("⇒"),
"DoubleRightTee": []byte("⊨"),
"DoubleUpArrow": []byte("⇑"),
"DoubleUpDownArrow": []byte("⇕"),
"DoubleVerticalBar": []byte("∥"),
"DownArrow": []byte("↓"),
"DownArrowBar": []byte("⤓"),
"DownArrowUpArrow": []byte("⇵"),
"DownBreve": []byte("̑"),
"DownLeftRightVector": []byte("⥐"),
"DownLeftTeeVector": []byte("⥞"),
"DownLeftVector": []byte("↽"),
"DownLeftVectorBar": []byte("⥖"),
"DownRightTeeVector": []byte("⥟"),
"DownRightVector": []byte("⇁"),
"DownRightVectorBar": []byte("⥗"),
"DownTee": []byte("⊤"),
"DownTeeArrow": []byte("↧"),
"Downarrow": []byte("⇓"),
"Dstrok": []byte("Đ"),
"Eacute": []byte("É"),
"Ecaron": []byte("Ě"),
"Ecirc": []byte("Ê"),
"Egrave": []byte("È"),
"Element": []byte("∈"),
"Emacr": []byte("Ē"),
"EmptySmallSquare": []byte("◻"),
"EmptyVerySmallSquare": []byte("▫"),
"Eogon": []byte("Ę"),
"Epsilon": []byte("Ε"),
"EqualTilde": []byte("≂"),
"Equilibrium": []byte("⇌"),
"Exists": []byte("∃"),
"ExponentialE": []byte("ⅇ"),
"FilledSmallSquare": []byte("◼"),
"FilledVerySmallSquare": []byte("▪"),
"ForAll": []byte("∀"),
"Fouriertrf": []byte("ℱ"),
"GT": []byte(">"),
"Gamma": []byte("Γ"),
"Gammad": []byte("Ϝ"),
"Gbreve": []byte("Ğ"),
"Gcedil": []byte("Ģ"),
"Gcirc": []byte("Ĝ"),
"GreaterEqual": []byte("≥"),
"GreaterEqualLess": []byte("⋛"),
"GreaterFullEqual": []byte("≧"),
"GreaterGreater": []byte("⪢"),
"GreaterLess": []byte("≷"),
"GreaterSlantEqual": []byte("⩾"),
"GreaterTilde": []byte("≳"),
"HARDcy": []byte("Ъ"),
"Hacek": []byte("ˇ"),
"Hat": []byte("^"),
"Hcirc": []byte("Ĥ"),
"HilbertSpace": []byte("ℋ"),
"HorizontalLine": []byte("─"),
"Hstrok": []byte("Ħ"),
"HumpDownHump": []byte("≎"),
"HumpEqual": []byte("≏"),
"IJlig": []byte("IJ"),
"Iacute": []byte("Í"),
"Icirc": []byte("Î"),
"Ifr": []byte("ℑ"),
"Igrave": []byte("Ì"),
"Imacr": []byte("Ī"),
"ImaginaryI": []byte("ⅈ"),
"Implies": []byte("⇒"),
"Integral": []byte("∫"),
"Intersection": []byte("⋂"),
"InvisibleComma": []byte("⁣"),
"InvisibleTimes": []byte("⁢"),
"Iogon": []byte("Į"),
"Itilde": []byte("Ĩ"),
"Jcirc": []byte("Ĵ"),
"Jsercy": []byte("Ј"),
"Kappa": []byte("Κ"),
"Kcedil": []byte("Ķ"),
"LT": []byte("<"),
"Lacute": []byte("Ĺ"),
"Lambda": []byte("Λ"),
"Laplacetrf": []byte("ℒ"),
"Lcaron": []byte("Ľ"),
"Lcedil": []byte("Ļ"),
"LeftAngleBracket": []byte("⟨"),
"LeftArrow": []byte("←"),
"LeftArrowBar": []byte("⇤"),
"LeftArrowRightArrow": []byte("⇆"),
"LeftCeiling": []byte("⌈"),
"LeftDoubleBracket": []byte("⟦"),
"LeftDownTeeVector": []byte("⥡"),
"LeftDownVector": []byte("⇃"),
"LeftDownVectorBar": []byte("⥙"),
"LeftFloor": []byte("⌊"),
"LeftRightArrow": []byte("↔"),
"LeftRightVector": []byte("⥎"),
"LeftTee": []byte("⊣"),
"LeftTeeArrow": []byte("↤"),
"LeftTeeVector": []byte("⥚"),
"LeftTriangle": []byte("⊲"),
"LeftTriangleBar": []byte("⧏"),
"LeftTriangleEqual": []byte("⊴"),
"LeftUpDownVector": []byte("⥑"),
"LeftUpTeeVector": []byte("⥠"),
"LeftUpVector": []byte("↿"),
"LeftUpVectorBar": []byte("⥘"),
"LeftVector": []byte("↼"),
"LeftVectorBar": []byte("⥒"),
"Leftarrow": []byte("⇐"),
"Leftrightarrow": []byte("⇔"),
"LessEqualGreater": []byte("⋚"),
"LessFullEqual": []byte("≦"),
"LessGreater": []byte("≶"),
"LessLess": []byte("⪡"),
"LessSlantEqual": []byte("⩽"),
"LessTilde": []byte("≲"),
"Lleftarrow": []byte("⇚"),
"Lmidot": []byte("Ŀ"),
"LongLeftArrow": []byte("⟵"),
"LongLeftRightArrow": []byte("⟷"),
"LongRightArrow": []byte("⟶"),
"Longleftarrow": []byte("⟸"),
"Longleftrightarrow": []byte("⟺"),
"Longrightarrow": []byte("⟹"),
"LowerLeftArrow": []byte("↙"),
"LowerRightArrow": []byte("↘"),
"Lstrok": []byte("Ł"),
"MediumSpace": []byte(" "),
"Mellintrf": []byte("ℳ"),
"MinusPlus": []byte("∓"),
"Nacute": []byte("Ń"),
"Ncaron": []byte("Ň"),
"Ncedil": []byte("Ņ"),
"NegativeMediumSpace": []byte("​"),
"NegativeThickSpace": []byte("​"),
"NegativeThinSpace": []byte("​"),
"NegativeVeryThinSpace": []byte("​"),
"NestedGreaterGreater": []byte("≫"),
"NestedLessLess": []byte("≪"),
"NewLine": []byte("\n"),
"NoBreak": []byte("⁠"),
"NonBreakingSpace": []byte(" "),
"NotCongruent": []byte("≢"),
"NotCupCap": []byte("≭"),
"NotDoubleVerticalBar": []byte("∦"),
"NotElement": []byte("∉"),
"NotEqual": []byte("≠"),
"NotExists": []byte("∄"),
"NotGreater": []byte("≯"),
"NotGreaterEqual": []byte("≱"),
"NotGreaterLess": []byte("≹"),
"NotGreaterTilde": []byte("≵"),
"NotLeftTriangle": []byte("⋪"),
"NotLeftTriangleEqual": []byte("⋬"),
"NotLess": []byte("≮"),
"NotLessEqual": []byte("≰"),
"NotLessGreater": []byte("≸"),
"NotLessTilde": []byte("≴"),
"NotPrecedes": []byte("⊀"),
"NotPrecedesSlantEqual": []byte("⋠"),
"NotReverseElement": []byte("∌"),
"NotRightTriangle": []byte("⋫"),
"NotRightTriangleEqual": []byte("⋭"),
"NotSquareSubsetEqual": []byte("⋢"),
"NotSquareSupersetEqual": []byte("⋣"),
"NotSubsetEqual": []byte("⊈"),
"NotSucceeds": []byte("⊁"),
"NotSucceedsSlantEqual": []byte("⋡"),
"NotSupersetEqual": []byte("⊉"),
"NotTilde": []byte("≁"),
"NotTildeEqual": []byte("≄"),
"NotTildeFullEqual": []byte("≇"),
"NotTildeTilde": []byte("≉"),
"NotVerticalBar": []byte("∤"),
"Ntilde": []byte("Ñ"),
"OElig": []byte("Œ"),
"Oacute": []byte("Ó"),
"Ocirc": []byte("Ô"),
"Odblac": []byte("Ő"),
"Ograve": []byte("Ò"),
"Omacr": []byte("Ō"),
"Omega": []byte("Ω"),
"Omicron": []byte("Ο"),
"OpenCurlyDoubleQuote": []byte("“"),
"OpenCurlyQuote": []byte("‘"),
"Oslash": []byte("Ø"),
"Otilde": []byte("Õ"),
"OverBar": []byte("‾"),
"OverBrace": []byte("⏞"),
"OverBracket": []byte("⎴"),
"OverParenthesis": []byte("⏜"),
"PartialD": []byte("∂"),
"PlusMinus": []byte("±"),
"Poincareplane": []byte("ℌ"),
"Precedes": []byte("≺"),
"PrecedesEqual": []byte("⪯"),
"PrecedesSlantEqual": []byte("≼"),
"PrecedesTilde": []byte("≾"),
"Product": []byte("∏"),
"Proportion": []byte("∷"),
"Proportional": []byte("∝"),
"QUOT": []byte("\""),
"Racute": []byte("Ŕ"),
"Rcaron": []byte("Ř"),
"Rcedil": []byte("Ŗ"),
"ReverseElement": []byte("∋"),
"ReverseEquilibrium": []byte("⇋"),
"ReverseUpEquilibrium": []byte("⥯"),
"Rfr": []byte("ℜ"),
"RightAngleBracket": []byte("⟩"),
"RightArrow": []byte("→"),
"RightArrowBar": []byte("⇥"),
"RightArrowLeftArrow": []byte("⇄"),
"RightCeiling": []byte("⌉"),
"RightDoubleBracket": []byte("⟧"),
"RightDownTeeVector": []byte("⥝"),
"RightDownVector": []byte("⇂"),
"RightDownVectorBar": []byte("⥕"),
"RightFloor": []byte("⌋"),
"RightTee": []byte("⊢"),
"RightTeeArrow": []byte("↦"),
"RightTeeVector": []byte("⥛"),
"RightTriangle": []byte("⊳"),
"RightTriangleBar": []byte("⧐"),
"RightTriangleEqual": []byte("⊵"),
"RightUpDownVector": []byte("⥏"),
"RightUpTeeVector": []byte("⥜"),
"RightUpVector": []byte("↾"),
"RightUpVectorBar": []byte("⥔"),
"RightVector": []byte("⇀"),
"RightVectorBar": []byte("⥓"),
"Rightarrow": []byte("⇒"),
"RoundImplies": []byte("⥰"),
"Rrightarrow": []byte("⇛"),
"RuleDelayed": []byte("⧴"),
"SHCHcy": []byte("Щ"),
"SOFTcy": []byte("Ь"),
"Sacute": []byte("Ś"),
"Scaron": []byte("Š"),
"Scedil": []byte("Ş"),
"Scirc": []byte("Ŝ"),
"ShortDownArrow": []byte("↓"),
"ShortLeftArrow": []byte("←"),
"ShortRightArrow": []byte("→"),
"ShortUpArrow": []byte("↑"),
"Sigma": []byte("Σ"),
"SmallCircle": []byte("∘"),
"Square": []byte("□"),
"SquareIntersection": []byte("⊓"),
"SquareSubset": []byte("⊏"),
"SquareSubsetEqual": []byte("⊑"),
"SquareSuperset": []byte("⊐"),
"SquareSupersetEqual": []byte("⊒"),
"SquareUnion": []byte("⊔"),
"Subset": []byte("⋐"),
"SubsetEqual": []byte("⊆"),
"Succeeds": []byte("≻"),
"SucceedsEqual": []byte("⪰"),
"SucceedsSlantEqual": []byte("≽"),
"SucceedsTilde": []byte("≿"),
"SuchThat": []byte("∋"),
"Superset": []byte("⊃"),
"SupersetEqual": []byte("⊇"),
"Supset": []byte("⋑"),
"THORN": []byte("Þ"),
"Tab": []byte(" "),
"Tcaron": []byte("Ť"),
"Tcedil": []byte("Ţ"),
"Therefore": []byte("∴"),
"Theta": []byte("Θ"),
"ThinSpace": []byte(" "),
"Tilde": []byte("∼"),
"TildeEqual": []byte("≃"),
"TildeFullEqual": []byte("≅"),
"TildeTilde": []byte("≈"),
"TripleDot": []byte("⃛"),
"Tstrok": []byte("Ŧ"),
"Uacute": []byte("Ú"),
"Uarrocir": []byte("⥉"),
"Ubreve": []byte("Ŭ"),
"Ucirc": []byte("Û"),
"Udblac": []byte("Ű"),
"Ugrave": []byte("Ù"),
"Umacr": []byte("Ū"),
"UnderBar": []byte("_"),
"UnderBrace": []byte("⏟"),
"UnderBracket": []byte("⎵"),
"UnderParenthesis": []byte("⏝"),
"Union": []byte("⋃"),
"UnionPlus": []byte("⊎"),
"Uogon": []byte("Ų"),
"UpArrow": []byte("↑"),
"UpArrowBar": []byte("⤒"),
"UpArrowDownArrow": []byte("⇅"),
"UpDownArrow": []byte("↕"),
"UpEquilibrium": []byte("⥮"),
"UpTee": []byte("⊥"),
"UpTeeArrow": []byte("↥"),
"Uparrow": []byte("⇑"),
"Updownarrow": []byte("⇕"),
"UpperLeftArrow": []byte("↖"),
"UpperRightArrow": []byte("↗"),
"Upsilon": []byte("Υ"),
"Uring": []byte("Ů"),
"Utilde": []byte("Ũ"),
"Verbar": []byte("‖"),
"VerticalBar": []byte("∣"),
"VerticalLine": []byte("|"),
"VerticalSeparator": []byte("❘"),
"VerticalTilde": []byte("≀"),
"VeryThinSpace": []byte(" "),
"Vvdash": []byte("⊪"),
"Wcirc": []byte("Ŵ"),
"Yacute": []byte("Ý"),
"Ycirc": []byte("Ŷ"),
"Zacute": []byte("Ź"),
"Zcaron": []byte("Ž"),
"ZeroWidthSpace": []byte("​"),
"aacute": []byte("á"),
"abreve": []byte("ă"),
"acirc": []byte("â"),
"acute": []byte("´"),
"aelig": []byte("æ"),
"agrave": []byte("à"),
"alefsym": []byte("ℵ"),
"alpha": []byte("α"),
"amacr": []byte("ā"),
"amp": []byte("&"),
"andslope": []byte("⩘"),
"angle": []byte("∠"),
"angmsd": []byte("∡"),
"angmsdaa": []byte("⦨"),
"angmsdab": []byte("⦩"),
"angmsdac": []byte("⦪"),
"angmsdad": []byte("⦫"),
"angmsdae": []byte("⦬"),
"angmsdaf": []byte("⦭"),
"angmsdag": []byte("⦮"),
"angmsdah": []byte("⦯"),
"angrtvb": []byte("⊾"),
"angrtvbd": []byte("⦝"),
"angsph": []byte("∢"),
"angst": []byte("Å"),
"angzarr": []byte("⍼"),
"aogon": []byte("ą"),
"apos": []byte("'"),
"approx": []byte("≈"),
"approxeq": []byte("≊"),
"aring": []byte("å"),
"ast": []byte("*"),
"asymp": []byte("≈"),
"asympeq": []byte("≍"),
"atilde": []byte("ã"),
"awconint": []byte("∳"),
"backcong": []byte("≌"),
"backepsilon": []byte("϶"),
"backprime": []byte("‵"),
"backsim": []byte("∽"),
"backsimeq": []byte("⋍"),
"barvee": []byte("⊽"),
"barwed": []byte("⌅"),
"barwedge": []byte("⌅"),
"bbrktbrk": []byte("⎶"),
"becaus": []byte("∵"),
"because": []byte("∵"),
"bemptyv": []byte("⦰"),
"bernou": []byte("ℬ"),
"between": []byte("≬"),
"bigcap": []byte("⋂"),
"bigcirc": []byte("◯"),
"bigcup": []byte("⋃"),
"bigodot": []byte("⨀"),
"bigoplus": []byte("⨁"),
"bigotimes": []byte("⨂"),
"bigsqcup": []byte("⨆"),
"bigstar": []byte("★"),
"bigtriangledown": []byte("▽"),
"bigtriangleup": []byte("△"),
"biguplus": []byte("⨄"),
"bigvee": []byte("⋁"),
"bigwedge": []byte("⋀"),
"bkarow": []byte("⤍"),
"blacklozenge": []byte("⧫"),
"blacksquare": []byte("▪"),
"blacktriangle": []byte("▴"),
"blacktriangledown": []byte("▾"),
"blacktriangleleft": []byte("◂"),
"blacktriangleright": []byte("▸"),
"bottom": []byte("⊥"),
"bowtie": []byte("⋈"),
"boxminus": []byte("⊟"),
"boxplus": []byte("⊞"),
"boxtimes": []byte("⊠"),
"bprime": []byte("‵"),
"breve": []byte("˘"),
"brvbar": []byte("¦"),
"bsol": []byte("\\"),
"bsolhsub": []byte("⟈"),
"bullet": []byte("•"),
"bumpeq": []byte("≏"),
"cacute": []byte("ć"),
"capbrcup": []byte("⩉"),
"caron": []byte("ˇ"),
"ccaron": []byte("č"),
"ccedil": []byte("ç"),
"ccirc": []byte("ĉ"),
"ccupssm": []byte("⩐"),
"cedil": []byte("¸"),
"cemptyv": []byte("⦲"),
"centerdot": []byte("·"),
"checkmark": []byte("✓"),
"circeq": []byte("≗"),
"circlearrowleft": []byte("↺"),
"circlearrowright": []byte("↻"),
"circledR": []byte("®"),
"circledS": []byte("Ⓢ"),
"circledast": []byte("⊛"),
"circledcirc": []byte("⊚"),
"circleddash": []byte("⊝"),
"cirfnint": []byte("⨐"),
"cirscir": []byte("⧂"),
"clubsuit": []byte("♣"),
"colon": []byte(":"),
"colone": []byte("≔"),
"coloneq": []byte("≔"),
"comma": []byte(","),
"commat": []byte("@"),
"compfn": []byte("∘"),
"complement": []byte("∁"),
"complexes": []byte("ℂ"),
"congdot": []byte("⩭"),
"conint": []byte("∮"),
"coprod": []byte("∐"),
"copysr": []byte("℗"),
"cudarrl": []byte("⤸"),
"cudarrr": []byte("⤵"),
"cularr": []byte("↶"),
"cularrp": []byte("⤽"),
"cupbrcap": []byte("⩈"),
"cupdot": []byte("⊍"),
"curarr": []byte("↷"),
"curarrm": []byte("⤼"),
"curlyeqprec": []byte("⋞"),
"curlyeqsucc": []byte("⋟"),
"curlyvee": []byte("⋎"),
"curlywedge": []byte("⋏"),
"curren": []byte("¤"),
"curvearrowleft": []byte("↶"),
"curvearrowright": []byte("↷"),
"cwconint": []byte("∲"),
"cylcty": []byte("⌭"),
"dagger": []byte("†"),
"daleth": []byte("ℸ"),
"dbkarow": []byte("⤏"),
"dblac": []byte("˝"),
"dcaron": []byte("ď"),
"ddagger": []byte("‡"),
"ddotseq": []byte("⩷"),
"delta": []byte("δ"),
"demptyv": []byte("⦱"),
"diamond": []byte("⋄"),
"diamondsuit": []byte("♦"),
"digamma": []byte("ϝ"),
"divide": []byte("÷"),
"divideontimes": []byte("⋇"),
"divonx": []byte("⋇"),
"dlcorn": []byte("⌞"),
"dlcrop": []byte("⌍"),
"dollar": []byte("$"),
"doteqdot": []byte("≑"),
"dotminus": []byte("∸"),
"dotplus": []byte("∔"),
"dotsquare": []byte("⊡"),
"doublebarwedge": []byte("⌆"),
"downarrow": []byte("↓"),
"downdownarrows": []byte("⇊"),
"downharpoonleft": []byte("⇃"),
"downharpoonright": []byte("⇂"),
"drbkarow": []byte("⤐"),
"drcorn": []byte("⌟"),
"drcrop": []byte("⌌"),
"dstrok": []byte("đ"),
"dwangle": []byte("⦦"),
"dzigrarr": []byte("⟿"),
"eacute": []byte("é"),
"ecaron": []byte("ě"),
"ecirc": []byte("ê"),
"ecolon": []byte("≕"),
"egrave": []byte("è"),
"elinters": []byte("⏧"),
"emacr": []byte("ē"),
"emptyset": []byte("∅"),
"emptyv": []byte("∅"),
"emsp13": []byte(" "),
"emsp14": []byte(" "),
"eogon": []byte("ę"),
"epsilon": []byte("ε"),
"eqcirc": []byte("≖"),
"eqcolon": []byte("≕"),
"eqsim": []byte("≂"),
"eqslantgtr": []byte("⪖"),
"eqslantless": []byte("⪕"),
"equals": []byte("="),
"equest": []byte("≟"),
"equivDD": []byte("⩸"),
"eqvparsl": []byte("⧥"),
"excl": []byte("!"),
"expectation": []byte("ℰ"),
"exponentiale": []byte("ⅇ"),
"fallingdotseq": []byte("≒"),
"female": []byte("♀"),
"forall": []byte("∀"),
"fpartint": []byte("⨍"),
"frac12": []byte("½"),
"frac13": []byte("⅓"),
"frac14": []byte("¼"),
"frac15": []byte("⅕"),
"frac16": []byte("⅙"),
"frac18": []byte("⅛"),
"frac23": []byte("⅔"),
"frac25": []byte("⅖"),
"frac34": []byte("¾"),
"frac35": []byte("⅗"),
"frac38": []byte("⅜"),
"frac45": []byte("⅘"),
"frac56": []byte("⅚"),
"frac58": []byte("⅝"),
"frac78": []byte("⅞"),
"gacute": []byte("ǵ"),
"gamma": []byte("γ"),
"gammad": []byte("ϝ"),
"gbreve": []byte("ğ"),
"gcirc": []byte("ĝ"),
"geq": []byte("≥"),
"geqq": []byte("≧"),
"geqslant": []byte("⩾"),
"gesdoto": []byte("⪂"),
"gesdotol": []byte("⪄"),
"ggg": []byte("⋙"),
"gnapprox": []byte("⪊"),
"gneq": []byte("⪈"),
"gneqq": []byte("≩"),
"grave": []byte("`"),
"gt": []byte(">"),
"gtquest": []byte("⩼"),
"gtrapprox": []byte("⪆"),
"gtrdot": []byte("⋗"),
"gtreqless": []byte("⋛"),
"gtreqqless": []byte("⪌"),
"gtrless": []byte("≷"),
"gtrsim": []byte("≳"),
"hArr": []byte("⇔"),
"hairsp": []byte(" "),
"hamilt": []byte("ℋ"),
"hardcy": []byte("ъ"),
"harrcir": []byte("⥈"),
"hcirc": []byte("ĥ"),
"hearts": []byte("♥"),
"heartsuit": []byte("♥"),
"hellip": []byte("…"),
"hercon": []byte("⊹"),
"hksearow": []byte("⤥"),
"hkswarow": []byte("⤦"),
"homtht": []byte("∻"),
"hookleftarrow": []byte("↩"),
"hookrightarrow": []byte("↪"),
"horbar": []byte("―"),
"hslash": []byte("ℏ"),
"hstrok": []byte("ħ"),
"hybull": []byte("⁃"),
"hyphen": []byte("‐"),
"iacute": []byte("í"),
"icirc": []byte("î"),
"iexcl": []byte("¡"),
"igrave": []byte("ì"),
"iiiint": []byte("⨌"),
"iiint": []byte("∭"),
"ijlig": []byte("ij"),
"imacr": []byte("ī"),
"image": []byte("ℑ"),
"imagline": []byte("ℐ"),
"imagpart": []byte("ℑ"),
"imath": []byte("ı"),
"imped": []byte("Ƶ"),
"incare": []byte("℅"),
"infintie": []byte("⧝"),
"inodot": []byte("ı"),
"intcal": []byte("⊺"),
"integers": []byte("ℤ"),
"intercal": []byte("⊺"),
"intlarhk": []byte("⨗"),
"intprod": []byte("⨼"),
"iogon": []byte("į"),
"iquest": []byte("¿"),
"isin": []byte("∈"),
"isindot": []byte("⋵"),
"isinsv": []byte("⋳"),
"isinv": []byte("∈"),
"itilde": []byte("ĩ"),
"jcirc": []byte("ĵ"),
"jmath": []byte("ȷ"),
"jsercy": []byte("ј"),
"kappa": []byte("κ"),
"kappav": []byte("ϰ"),
"kcedil": []byte("ķ"),
"kgreen": []byte("ĸ"),
"lacute": []byte("ĺ"),
"laemptyv": []byte("⦴"),
"lagran": []byte("ℒ"),
"lambda": []byte("λ"),
"langle": []byte("⟨"),
"laquo": []byte("«"),
"larrbfs": []byte("⤟"),
"larrhk": []byte("↩"),
"larrlp": []byte("↫"),
"larrsim": []byte("⥳"),
"larrtl": []byte("↢"),
"lbrace": []byte("{"),
"lbrack": []byte("["),
"lbrksld": []byte("⦏"),
"lbrkslu": []byte("⦍"),
"lcaron": []byte("ľ"),
"lcedil": []byte("ļ"),
"lcub": []byte("{"),
"ldquor": []byte("„"),
"ldrdhar": []byte("⥧"),
"ldrushar": []byte("⥋"),
"leftarrow": []byte("←"),
"leftarrowtail": []byte("↢"),
"leftharpoondown": []byte("↽"),
"leftharpoonup": []byte("↼"),
"leftleftarrows": []byte("⇇"),
"leftrightarrow": []byte("↔"),
"leftrightarrows": []byte("⇆"),
"leftrightharpoons": []byte("⇋"),
"leftrightsquigarrow": []byte("↭"),
"leftthreetimes": []byte("⋋"),
"leq": []byte("≤"),
"leqq": []byte("≦"),
"leqslant": []byte("⩽"),
"lesdoto": []byte("⪁"),
"lesdotor": []byte("⪃"),
"lessapprox": []byte("⪅"),
"lessdot": []byte("⋖"),
"lesseqgtr": []byte("⋚"),
"lesseqqgtr": []byte("⪋"),
"lessgtr": []byte("≶"),
"lesssim": []byte("≲"),
"lfloor": []byte("⌊"),
"llcorner": []byte("⌞"),
"lmidot": []byte("ŀ"),
"lmoust": []byte("⎰"),
"lmoustache": []byte("⎰"),
"lnapprox": []byte("⪉"),
"lneq": []byte("⪇"),
"lneqq": []byte("≨"),
"longleftarrow": []byte("⟵"),
"longleftrightarrow": []byte("⟷"),
"longmapsto": []byte("⟼"),
"longrightarrow": []byte("⟶"),
"looparrowleft": []byte("↫"),
"looparrowright": []byte("↬"),
"lotimes": []byte("⨴"),
"lowast": []byte("∗"),
"lowbar": []byte("_"),
"lozenge": []byte("◊"),
"lpar": []byte("("),
"lrcorner": []byte("⌟"),
"lsaquo": []byte("‹"),
"lsqb": []byte("["),
"lsquor": []byte("‚"),
"lstrok": []byte("ł"),
"lt": []byte("<"),
"lthree": []byte("⋋"),
"ltimes": []byte("⋉"),
"ltquest": []byte("⩻"),
"lurdshar": []byte("⥊"),
"luruhar": []byte("⥦"),
"maltese": []byte("✠"),<|fim▁hole|> "mapstodown": []byte("↧"),
"mapstoleft": []byte("↤"),
"mapstoup": []byte("↥"),
"marker": []byte("▮"),
"measuredangle": []byte("∡"),
"micro": []byte("µ"),
"midast": []byte("*"),
"middot": []byte("·"),
"minusb": []byte("⊟"),
"minusd": []byte("∸"),
"minusdu": []byte("⨪"),
"mnplus": []byte("∓"),
"models": []byte("⊧"),
"mstpos": []byte("∾"),
"multimap": []byte("⊸"),
"nLeftarrow": []byte("⇍"),
"nLeftrightarrow": []byte("⇎"),
"nRightarrow": []byte("⇏"),
"nVDash": []byte("⊯"),
"nVdash": []byte("⊮"),
"nabla": []byte("∇"),
"nacute": []byte("ń"),
"napos": []byte("ʼn"),
"napprox": []byte("≉"),
"natural": []byte("♮"),
"naturals": []byte("ℕ"),
"ncaron": []byte("ň"),
"ncedil": []byte("ņ"),
"nearrow": []byte("↗"),
"nequiv": []byte("≢"),
"nesear": []byte("⤨"),
"nexist": []byte("∄"),
"nexists": []byte("∄"),
"ngeq": []byte("≱"),
"ngtr": []byte("≯"),
"niv": []byte("∋"),
"nleftarrow": []byte("↚"),
"nleftrightarrow": []byte("↮"),
"nleq": []byte("≰"),
"nless": []byte("≮"),
"nltrie": []byte("⋬"),
"notinva": []byte("∉"),
"notinvb": []byte("⋷"),
"notinvc": []byte("⋶"),
"notniva": []byte("∌"),
"notnivb": []byte("⋾"),
"notnivc": []byte("⋽"),
"nparallel": []byte("∦"),
"npolint": []byte("⨔"),
"nprcue": []byte("⋠"),
"nprec": []byte("⊀"),
"nrightarrow": []byte("↛"),
"nrtrie": []byte("⋭"),
"nsccue": []byte("⋡"),
"nshortmid": []byte("∤"),
"nshortparallel": []byte("∦"),
"nsimeq": []byte("≄"),
"nsmid": []byte("∤"),
"nspar": []byte("∦"),
"nsqsube": []byte("⋢"),
"nsqsupe": []byte("⋣"),
"nsubseteq": []byte("⊈"),
"nsucc": []byte("⊁"),
"nsupseteq": []byte("⊉"),
"ntilde": []byte("ñ"),
"ntriangleleft": []byte("⋪"),
"ntrianglelefteq": []byte("⋬"),
"ntriangleright": []byte("⋫"),
"ntrianglerighteq": []byte("⋭"),
"num": []byte("#"),
"numero": []byte("№"),
"nvDash": []byte("⊭"),
"nvdash": []byte("⊬"),
"nvinfin": []byte("⧞"),
"nwarrow": []byte("↖"),
"oacute": []byte("ó"),
"ocirc": []byte("ô"),
"odblac": []byte("ő"),
"oelig": []byte("œ"),
"ograve": []byte("ò"),
"olcross": []byte("⦻"),
"omacr": []byte("ō"),
"omega": []byte("ω"),
"omicron": []byte("ο"),
"ominus": []byte("⊖"),
"order": []byte("ℴ"),
"orderof": []byte("ℴ"),
"origof": []byte("⊶"),
"orslope": []byte("⩗"),
"oslash": []byte("ø"),
"otilde": []byte("õ"),
"otimes": []byte("⊗"),
"otimesas": []byte("⨶"),
"parallel": []byte("∥"),
"percnt": []byte("%"),
"period": []byte("."),
"permil": []byte("‰"),
"perp": []byte("⊥"),
"pertenk": []byte("‱"),
"phmmat": []byte("ℳ"),
"pitchfork": []byte("⋔"),
"planck": []byte("ℏ"),
"planckh": []byte("ℎ"),
"plankv": []byte("ℏ"),
"plus": []byte("+"),
"plusacir": []byte("⨣"),
"pluscir": []byte("⨢"),
"plusdo": []byte("∔"),
"plusmn": []byte("±"),
"plussim": []byte("⨦"),
"plustwo": []byte("⨧"),
"pointint": []byte("⨕"),
"pound": []byte("£"),
"prec": []byte("≺"),
"precapprox": []byte("⪷"),
"preccurlyeq": []byte("≼"),
"preceq": []byte("⪯"),
"precnapprox": []byte("⪹"),
"precneqq": []byte("⪵"),
"precnsim": []byte("⋨"),
"precsim": []byte("≾"),
"primes": []byte("ℙ"),
"prnsim": []byte("⋨"),
"profalar": []byte("⌮"),
"profline": []byte("⌒"),
"profsurf": []byte("⌓"),
"propto": []byte("∝"),
"prurel": []byte("⊰"),
"puncsp": []byte(" "),
"qprime": []byte("⁗"),
"quaternions": []byte("ℍ"),
"quatint": []byte("⨖"),
"quest": []byte("?"),
"questeq": []byte("≟"),
"quot": []byte("\""),
"racute": []byte("ŕ"),
"radic": []byte("√"),
"raemptyv": []byte("⦳"),
"rangle": []byte("⟩"),
"raquo": []byte("»"),
"rarrbfs": []byte("⤠"),
"rarrhk": []byte("↪"),
"rarrlp": []byte("↬"),
"rarrsim": []byte("⥴"),
"rarrtl": []byte("↣"),
"rationals": []byte("ℚ"),
"rbrace": []byte("}"),
"rbrack": []byte("]"),
"rbrksld": []byte("⦎"),
"rbrkslu": []byte("⦐"),
"rcaron": []byte("ř"),
"rcedil": []byte("ŗ"),
"rcub": []byte("}"),
"rdldhar": []byte("⥩"),
"rdquor": []byte("”"),
"real": []byte("ℜ"),
"realine": []byte("ℛ"),
"realpart": []byte("ℜ"),
"reals": []byte("ℝ"),
"rfloor": []byte("⌋"),
"rightarrow": []byte("→"),
"rightarrowtail": []byte("↣"),
"rightharpoondown": []byte("⇁"),
"rightharpoonup": []byte("⇀"),
"rightleftarrows": []byte("⇄"),
"rightleftharpoons": []byte("⇌"),
"rightrightarrows": []byte("⇉"),
"rightsquigarrow": []byte("↝"),
"rightthreetimes": []byte("⋌"),
"risingdotseq": []byte("≓"),
"rmoust": []byte("⎱"),
"rmoustache": []byte("⎱"),
"rotimes": []byte("⨵"),
"rpar": []byte(")"),
"rppolint": []byte("⨒"),
"rsaquo": []byte("›"),
"rsqb": []byte("]"),
"rsquor": []byte("’"),
"rthree": []byte("⋌"),
"rtimes": []byte("⋊"),
"rtriltri": []byte("⧎"),
"ruluhar": []byte("⥨"),
"sacute": []byte("ś"),
"scaron": []byte("š"),
"scedil": []byte("ş"),
"scirc": []byte("ŝ"),
"scnsim": []byte("⋩"),
"scpolint": []byte("⨓"),
"searrow": []byte("↘"),
"semi": []byte(";"),
"seswar": []byte("⤩"),
"setminus": []byte("∖"),
"sfrown": []byte("⌢"),
"shchcy": []byte("щ"),
"shortmid": []byte("∣"),
"shortparallel": []byte("∥"),
"sigma": []byte("σ"),
"sigmaf": []byte("ς"),
"sigmav": []byte("ς"),
"simeq": []byte("≃"),
"simplus": []byte("⨤"),
"simrarr": []byte("⥲"),
"slarr": []byte("←"),
"smallsetminus": []byte("∖"),
"smeparsl": []byte("⧤"),
"smid": []byte("∣"),
"softcy": []byte("ь"),
"sol": []byte("/"),
"solbar": []byte("⌿"),
"spades": []byte("♠"),
"spadesuit": []byte("♠"),
"spar": []byte("∥"),
"sqsube": []byte("⊑"),
"sqsubset": []byte("⊏"),
"sqsubseteq": []byte("⊑"),
"sqsupe": []byte("⊒"),
"sqsupset": []byte("⊐"),
"sqsupseteq": []byte("⊒"),
"square": []byte("□"),
"squarf": []byte("▪"),
"srarr": []byte("→"),
"ssetmn": []byte("∖"),
"ssmile": []byte("⌣"),
"sstarf": []byte("⋆"),
"straightepsilon": []byte("ϵ"),
"straightphi": []byte("ϕ"),
"strns": []byte("¯"),
"subedot": []byte("⫃"),
"submult": []byte("⫁"),
"subplus": []byte("⪿"),
"subrarr": []byte("⥹"),
"subset": []byte("⊂"),
"subseteq": []byte("⊆"),
"subseteqq": []byte("⫅"),
"subsetneq": []byte("⊊"),
"subsetneqq": []byte("⫋"),
"succ": []byte("≻"),
"succapprox": []byte("⪸"),
"succcurlyeq": []byte("≽"),
"succeq": []byte("⪰"),
"succnapprox": []byte("⪺"),
"succneqq": []byte("⪶"),
"succnsim": []byte("⋩"),
"succsim": []byte("≿"),
"supdsub": []byte("⫘"),
"supedot": []byte("⫄"),
"suphsol": []byte("⟉"),
"suphsub": []byte("⫗"),
"suplarr": []byte("⥻"),
"supmult": []byte("⫂"),
"supplus": []byte("⫀"),
"supset": []byte("⊃"),
"supseteq": []byte("⊇"),
"supseteqq": []byte("⫆"),
"supsetneq": []byte("⊋"),
"supsetneqq": []byte("⫌"),
"swarrow": []byte("↙"),
"szlig": []byte("ß"),
"target": []byte("⌖"),
"tcaron": []byte("ť"),
"tcedil": []byte("ţ"),
"telrec": []byte("⌕"),
"there4": []byte("∴"),
"therefore": []byte("∴"),
"theta": []byte("θ"),
"thetasym": []byte("ϑ"),
"thetav": []byte("ϑ"),
"thickapprox": []byte("≈"),
"thicksim": []byte("∼"),
"thinsp": []byte(" "),
"thkap": []byte("≈"),
"thksim": []byte("∼"),
"thorn": []byte("þ"),
"tilde": []byte("˜"),
"times": []byte("×"),
"timesb": []byte("⊠"),
"timesbar": []byte("⨱"),
"topbot": []byte("⌶"),
"topfork": []byte("⫚"),
"tprime": []byte("‴"),
"triangle": []byte("▵"),
"triangledown": []byte("▿"),
"triangleleft": []byte("◃"),
"trianglelefteq": []byte("⊴"),
"triangleq": []byte("≜"),
"triangleright": []byte("▹"),
"trianglerighteq": []byte("⊵"),
"tridot": []byte("◬"),
"triminus": []byte("⨺"),
"triplus": []byte("⨹"),
"tritime": []byte("⨻"),
"trpezium": []byte("⏢"),
"tstrok": []byte("ŧ"),
"twoheadleftarrow": []byte("↞"),
"twoheadrightarrow": []byte("↠"),
"uacute": []byte("ú"),
"ubreve": []byte("ŭ"),
"ucirc": []byte("û"),
"udblac": []byte("ű"),
"ugrave": []byte("ù"),
"ulcorn": []byte("⌜"),
"ulcorner": []byte("⌜"),
"ulcrop": []byte("⌏"),
"umacr": []byte("ū"),
"uogon": []byte("ų"),
"uparrow": []byte("↑"),
"updownarrow": []byte("↕"),
"upharpoonleft": []byte("↿"),
"upharpoonright": []byte("↾"),
"upsih": []byte("ϒ"),
"upsilon": []byte("υ"),
"upuparrows": []byte("⇈"),
"urcorn": []byte("⌝"),
"urcorner": []byte("⌝"),
"urcrop": []byte("⌎"),
"uring": []byte("ů"),
"utilde": []byte("ũ"),
"uwangle": []byte("⦧"),
"varepsilon": []byte("ϵ"),
"varkappa": []byte("ϰ"),
"varnothing": []byte("∅"),
"varphi": []byte("ϕ"),
"varpi": []byte("ϖ"),
"varpropto": []byte("∝"),
"varrho": []byte("ϱ"),
"varsigma": []byte("ς"),
"vartheta": []byte("ϑ"),
"vartriangleleft": []byte("⊲"),
"vartriangleright": []byte("⊳"),
"vee": []byte("∨"),
"veebar": []byte("⊻"),
"vellip": []byte("⋮"),
"verbar": []byte("|"),
"vert": []byte("|"),
"vprop": []byte("∝"),
"vzigzag": []byte("⦚"),
"wcirc": []byte("ŵ"),
"wedge": []byte("∧"),
"wedgeq": []byte("≙"),
"weierp": []byte("℘"),
"wreath": []byte("≀"),
"xvee": []byte("⋁"),
"xwedge": []byte("⋀"),
"yacute": []byte("ý"),
"ycirc": []byte("ŷ"),
"zacute": []byte("ź"),
"zcaron": []byte("ž"),
"zeetrf": []byte("ℨ"),
"zigrarr": []byte("⇝"),
}
var TextRevEntitiesMap = map[byte][]byte{
'<': []byte("<"),
}<|fim▁end|> | "mapsto": []byte("↦"), |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># Generated by Django 2.2 on 2020-06-20 15:23
from django.db import migrations, models
class Migration(migrations.Migration):
<|fim▁hole|> operations = [
migrations.CreateModel(
name="A",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("col", models.CharField(max_length=10, null=True)),
],
),
]<|fim▁end|> | initial = True
dependencies = []
|
<|file_name|>types.js<|end_file_name|><|fim▁begin|>export const ADD_COCKTAIL = 'ADD_COCKTAIL';
export const LOAD_COCKTAILS = 'LOAD_COCKTAILS';
export const ADD_SPIRIT = 'ADD_SPIRIT';
export const REMOVE_SPIRIT = 'REMOVE_SPIRIT';<|fim▁hole|><|fim▁end|> | export const UPDATE_HUE = 'UPDATE_HUE'; |
<|file_name|>serializer.py<|end_file_name|><|fim▁begin|>from website.addons.base.serializer import CitationsAddonSerializer
<|fim▁hole|><|fim▁end|> | class MendeleySerializer(CitationsAddonSerializer):
addon_short_name = 'mendeley' |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>def validate(hand):
if hand < 0 or hand > 2:
return False
<|fim▁hole|> return True
def print_hand(hand, name='ゲスト'):
hands = ['グー', 'チョキ', 'パー']
print(name + 'は' + hands[hand] + 'を出しました')
def judge(player, computer):
if player == computer:
return '引き分け'
elif player == 0 and computer == 1:
return '勝ち'
elif player == 1 and computer == 2:
return '勝ち'
elif player == 2 and computer == 0:
return '勝ち'
else:
return '負け'<|fim▁end|> | |
<|file_name|>X.js<|end_file_name|><|fim▁begin|>package test0657;
<|fim▁hole|><|fim▁end|> | public class X {}
class A {} |
<|file_name|>gssapi_not_enabled.go<|end_file_name|><|fim▁begin|>// Copyright (C) MongoDB, Inc. 2017-present.<|fim▁hole|>// not use this file except in compliance with the License. You may obtain
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
//+build !gssapi
package auth
// GSSAPI is the mechanism name for GSSAPI.
const GSSAPI = "GSSAPI"
func newGSSAPIAuthenticator(cred *Cred) (Authenticator, error) {
return nil, newAuthError("GSSAPI support not enabled during build (-tags gssapi)", nil)
}<|fim▁end|> | //
// Licensed under the Apache License, Version 2.0 (the "License"); you may |
<|file_name|>test_review.py<|end_file_name|><|fim▁begin|>from . import load_fixture
from lintreview.config import load_config
from lintreview.diff import DiffCollection
from lintreview.review import Review, Problems, Comment
from lintreview.repo import GithubRepository, GithubPullRequest
from mock import Mock, call
from nose.tools import eq_
from github3.issues.comment import IssueComment as GhIssueComment
from github3.pulls import PullFile
from unittest import TestCase
import json
config = load_config()
class TestReview(TestCase):
def setUp(self):
repo = Mock(spec=GithubRepository)
pr = Mock(spec=GithubPullRequest,
head='abc123',
display_name='markstory/lint-review#1',
number=2)
repo.pull_request.return_value = pr
self.repo, self.pr = repo, pr
self.review = Review(repo, pr)
def test_load_comments__none_active(self):
fixture_data = load_fixture('comments_none_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture_data))
review = Review(self.repo, self.pr)
review.load_comments()
eq_(0, len(review.comments("View/Helper/AssetCompressHelper.php")))
def test_load_comments__loads_comments(self):
fixture_data = load_fixture('comments_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture_data))
review = Review(self.repo, self.pr)
review.load_comments()
filename = "Routing/Filter/AssetCompressor.php"
res = review.comments(filename)
eq_(1, len(res))
expected = Comment(filename, None, 87, "A pithy remark")
eq_(expected, res[0])
filename = "View/Helper/AssetCompressHelper.php"
res = review.comments(filename)
eq_(2, len(res))
expected = Comment(filename, None, 40, "Some witty comment.")
eq_(expected, res[0])
expected = Comment(filename, None, 89, "Not such a good comment")
eq_(expected, res[1])
def test_filter_existing__removes_duplicates(self):
fixture_data = load_fixture('comments_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture_data))
problems = Problems()
review = Review(self.repo, self.pr)
filename_1 = "Routing/Filter/AssetCompressor.php"
filename_2 = "View/Helper/AssetCompressHelper.php"
problems.add(filename_1, 87, 'A pithy remark')
problems.add(filename_1, 87, 'Something different')
problems.add(filename_2, 88, 'I <3 it')
problems.add(filename_2, 89, 'Not such a good comment')
review.load_comments()
review.remove_existing(problems)
res = problems.all(filename_1)
eq_(1, len(res))
expected = Comment(filename_1,
87,
87,
'A pithy remark\nSomething different')
eq_(res[0], expected)
res = problems.all(filename_2)
eq_(1, len(res))
expected = Comment(filename_2, 88, 88, 'I <3 it')
eq_(res[0], expected)
def test_publish_problems(self):
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
sha = 'abc123'
review = Review(self.repo, self.pr)
review.publish_problems(problems, sha)
assert self.pr.create_review_comment.called
eq_(2, self.pr.create_review_comment.call_count)
assert_review_comments_created(
self.pr.create_review_comment.call_args_list,
errors,
sha)
def test_publish_status__ok_no_comment_label_or_status(self):
config = {
'OK_COMMENT': None,
'OK_LABEL': None,
'PULLREQUEST_STATUS': False,
}
review = Review(self.repo, self.pr, config)
review.publish_status(0)
assert not self.repo.create_status.called, 'Create status called'
assert not self.pr.create_comment.called, 'Comment not created'
assert not self.pr.add_label.called, 'Label added created'
def test_publish_status__ok_with_comment_label_and_status(self):
config = {
'OK_COMMENT': 'Great job!',
'OK_LABEL': 'No lint errors',
'PULLREQUEST_STATUS': True,
}
review = Review(self.repo, self.pr, config)
review.publish_status(0)
assert self.repo.create_status.called, 'Create status not called'
self.repo.create_status.assert_called_with(
self.pr.head,
'success',
'No lint errors found.')
assert self.pr.create_comment.called, 'Issue comment created'
self.pr.create_comment.assert_called_with('Great job!')
assert self.pr.add_label.called, 'Label added created'
self.pr.add_label.assert_called_with('No lint errors')
def test_publish_status__has_errors(self):
config = {
'OK_COMMENT': 'Great job!',
'OK_LABEL': 'No lint errors',
'APP_NAME': 'custom-name'
}
review = Review(self.repo, self.pr, config)
review.publish_status(1)
assert self.repo.create_status.called, 'Create status not called'
self.repo.create_status.assert_called_with(
self.pr.head,
'failure',
'Lint errors found, see pull request comments.')
assert not self.pr.create_comment.called, 'Comment not created'
assert not self.pr.add_label.called, 'Label added created'
def test_publish_problems_remove_ok_label(self):
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
sha = 'abc123'
config = {'OK_LABEL': 'No lint'}
review = Review(self.repo, self.pr, config)
sha = 'abc123'
review.publish_problems(problems, sha)
assert self.pr.remove_label.called, 'Label should be removed'
assert self.pr.create_review_comment.called, 'Comments should be added'
eq_(2, self.pr.create_review_comment.call_count)
self.pr.remove_label.assert_called_with(config['OK_LABEL'])
assert_review_comments_created(
self.pr.create_review_comment.call_args_list,
errors,
sha)
def test_publish_empty_comment(self):
problems = Problems(changes=[])
review = Review(self.repo, self.pr)
sha = 'abc123'
review.publish(problems, sha)
assert self.pr.create_comment.called, 'Should create a comment'
msg = ('Could not review pull request. '
'It may be too large, or contain no reviewable changes.')
self.pr.create_comment.assert_called_with(msg)
def test_publish_empty_comment_add_ok_label(self):
problems = Problems(changes=[])
config = {'OK_LABEL': 'No lint'}
review = Review(self.repo, self.pr, config)
sha = 'abc123'
review.publish(problems, sha)
assert self.pr.create_comment.called, 'ok comment should be added.'
assert self.pr.remove_label.called, 'label should be removed.'
self.pr.remove_label.assert_called_with(config['OK_LABEL'])
msg = ('Could not review pull request. '
'It may be too large, or contain no reviewable changes.')
self.pr.create_comment.assert_called_with(msg)
def test_publish_empty_comment_with_comment_status(self):
config = {
'PULLREQUEST_STATUS': True,
}
problems = Problems(changes=[])
review = Review(self.repo, self.pr, config)
sha = 'abc123'
review.publish(problems, sha)
assert self.pr.create_comment.called, 'Should create a comment'
msg = ('Could not review pull request. '
'It may be too large, or contain no reviewable changes.')
self.repo.create_status.assert_called_with(
self.pr.head,
'error',
msg)
self.pr.create_comment.assert_called_with(msg)
def test_publish_comment_threshold_checks(self):
fixture = load_fixture('comments_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture))
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
problems.set_changes([1])
sha = 'abc123'
review = Review(self.repo, self.pr)
review.publish_summary = Mock()
review.publish(problems, sha, 1)
assert review.publish_summary.called, 'Should have been called.'
def test_publish_summary(self):
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
problems.set_changes([1])
review = Review(self.repo, self.pr)
review.publish_summary(problems)
assert self.pr.create_comment.called
eq_(1, self.pr.create_comment.call_count)
msg = """There are 2 errors:
* Console/Command/Task/AssetBuildTask.php, line 117 - Something bad
* Console/Command/Task/AssetBuildTask.php, line 119 - Something bad
"""
self.pr.create_comment.assert_called_with(msg)
class TestProblems(TestCase):
two_files_json = load_fixture('two_file_pull_request.json')
# Block offset so lines don't match offsets
block_offset = load_fixture('pull_request_line_offset.json')
def setUp(self):
self.problems = Problems()
def test_add(self):
self.problems.add('file.py', 10, 'Not good')
for item in self.problems:
print item
eq_(1, len(self.problems))
self.problems.add('file.py', 11, 'Not good')
eq_(2, len(self.problems))
eq_(2, len(self.problems.all()))
eq_(2, len(self.problems.all('file.py')))
eq_(0, len(self.problems.all('not there')))
def test_add__duplicate_is_ignored(self):
self.problems.add('file.py', 10, 'Not good')
eq_(1, len(self.problems))
self.problems.add('file.py', 10, 'Not good')
eq_(1, len(self.problems))
def test_add__same_line_combines(self):
self.problems.add('file.py', 10, 'Tabs bad')
self.problems.add('file.py', 10, 'Spaces are good')
eq_(1, len(self.problems))
result = self.problems.all()
expected = 'Tabs bad\nSpaces are good'
eq_(expected, result[0].body)
def test_add__same_line_ignores_duplicates(self):
self.problems.add('file.py', 10, 'Tabs bad')
self.problems.add('file.py', 10, 'Tabs bad')
eq_(1, len(self.problems))
result = self.problems.all()
expected = 'Tabs bad'
eq_(expected, result[0].body)
def test_add__with_base_path(self):
problems = Problems('/some/path/')
problems.add('/some/path/file.py', 10, 'Not good')
eq_([], problems.all('/some/path/file.py'))
eq_(1, len(problems.all('file.py')))
eq_(1, len(problems))
def test_add__with_base_path_no_trailing_slash(self):
problems = Problems('/some/path')
problems.add('/some/path/file.py', 10, 'Not good')
eq_([], problems.all('/some/path/file.py'))
eq_(1, len(problems.all('file.py')))
eq_(1, len(problems))
def test_add__with_diff_containing_block_offset(self):
res = map(lambda f: PullFile(f),
json.loads(self.block_offset))
changes = DiffCollection(res)
problems = Problems(changes=changes)
line_num = 32
problems.add('somefile.py', line_num, 'Not good')
eq_(1, len(problems))
result = problems.all('somefile.py')
eq_(changes.line_position('somefile.py', line_num), result[0].position,
'Offset should be transformed to match value in changes')
def test_add_many(self):
errors = [
('some/file.py', 10, 'Thing is wrong'),
('some/file.py', 12, 'Not good'),
]
self.problems.add_many(errors)
result = self.problems.all('some/file.py')
eq_(2, len(result))
expected = [
Comment(errors[0][0], errors[0][1], errors[0][1], errors[0][2]),
Comment(errors[1][0], errors[1][1], errors[1][1], errors[1][2]),
]
eq_(expected, result)
def test_limit_to_changes__remove_problems(self):
res = map(lambda f: PullFile(f),
json.loads(self.two_files_json))
changes = DiffCollection(res)
# Setup some fake problems.
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(None, None, 'This is a general comment'),
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something else bad'),
(filename_1, 130, 'Filtered out, as line is not changed'),
)
self.problems.add_many(errors)
filename_2 = 'Test/test_files/View/Parse/single.ctp'
errors = (
(filename_2, 2, 'Filtered out'),
(filename_2, 3, 'Something bad'),
(filename_2, 7, 'Filtered out'),
)
self.problems.add_many(errors)
self.problems.set_changes(changes)
self.problems.limit_to_changes()
result = self.problems.all(filename_1)<|fim▁hole|> (filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something else bad')]
eq_(result.sort(), expected.sort())
result = self.problems.all(filename_2)
eq_(1, len(result))
expected = [
Comment(filename_2, 3, 3, 'Something bad')
]
eq_(result, expected)
def test_has_changes(self):
problems = Problems(changes=None)
self.assertFalse(problems.has_changes())
problems = Problems(changes=[1])
assert problems.has_changes()
def assert_review_comments_created(call_args, errors, sha):
"""
Check that the review comments match the error list.
"""
eq_(len(call_args), len(errors), 'Errors and comment counts are off')
for i, err in enumerate(errors):
expected = call(
commit_id=sha,
path=err[0],
position=err[1],
body=err[2])
eq_(expected, call_args[i])<|fim▁end|> | eq_(2, len(result))
expected = [
(None, None, 'This is a general comment'), |
<|file_name|>gpmdp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Display currently playing song from Google Play Music Desktop Player.
Configuration parameters:
cache_timeout: how often we refresh this module in seconds (default 5)
format: specify the items and ordering of the data in the status bar.
These area 1:1 match to gpmdp-remote's options (default is '♫ {info}').
Format of status string placeholders:
See `gpmdp-remote help`. Simply surround the items you want displayed (i.e. `album`)
with curly braces (i.e. `{album}`) and place as-desired in the format string.
{info} Print info about now playing song
{title} Print current song title
{artist} Print current song artist
{album} Print current song album
{album_art} Print current song album art URL
{time_current} Print current song time in milliseconds
{time_total} Print total song time in milliseconds
{status} Print whether GPMDP is paused or playing
{current} Print now playing song in "artist - song" format
{help} Print this help message
Requires:
gpmdp: http://www.googleplaymusicdesktopplayer.com/
gpmdp-remote: https://github.com/iandrewt/gpmdp-remote
@author Aaron Fields https://twitter.com/spirotot
@license BSD
"""
from time import time
from subprocess import check_output
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 5
format = u'♫ {info}'
@staticmethod
def _run_cmd(cmd):
return check_output(['gpmdp-remote', cmd]).decode('utf-8').strip()
def gpmdp(self, i3s_output_list, i3s_config):
if self._run_cmd('status') == 'Paused':
result = ''<|fim▁hole|> 'time_total', 'time_current', 'album_art']
data = {}
for cmd in cmds:
if '{%s}' % cmd in self.format:
data[cmd] = self._run_cmd(cmd)
result = self.format.format(**data)
response = {
'cached_until': time() + self.cache_timeout,
'full_text': result
}
return response
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)<|fim▁end|> | else:
cmds = ['info', 'title', 'artist', 'album', 'status', 'current', |
<|file_name|>runtests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
This script is a trick to setup a fake Django environment, since this reusable
app will be developed and tested outside any specifiv Django project.
Via ``settings.configure`` you will be able to set all necessary settings
for your app and run the tests as if you were calling ``./manage.py test``.
Taken from https://github.com/mbrochh/tdd-with-django-reusable-app
"""
import os
import sys
from django.conf import settings
EXTERNAL_APPS = [
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.sitemaps',
'django.contrib.sites',
]
INTERNAL_APPS = [
'portlet',
'django_nose',<|fim▁hole|>]
INSTALLED_APPS = EXTERNAL_APPS + INTERNAL_APPS
COVERAGE_MODULE_EXCLUDES = [
'tests$', 'settings$', 'urls$', 'locale$',
'migrations', 'fixtures', 'admin$', 'django_extensions',
]
COVERAGE_MODULE_EXCLUDES += EXTERNAL_APPS
if not settings.configured:
settings.configure(
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
},
INSTALLED_APPS=INSTALLED_APPS,
ROOT_URLCONF='portlet.urls',
TEMPLATE_DIRS=(
os.path.join(os.path.dirname(__file__), '../templates'),
),
COVERAGE_MODULE_EXCLUDES=COVERAGE_MODULE_EXCLUDES,
COVERAGE_REPORT_HTML_OUTPUT_DIR=os.path.join(
os.path.dirname(__file__), 'coverage')
)
from django_coverage.coverage_runner import CoverageRunner
from django_nose import NoseTestSuiteRunner
class NoseCoverageTestRunner(CoverageRunner, NoseTestSuiteRunner):
"""Custom test runner that uses nose and coverage"""
pass
def runtests(*test_args):
failures = NoseTestSuiteRunner(verbosity=2, interactive=True).run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])<|fim▁end|> | |
<|file_name|>UpdateDomainEndpointOptionsResultStaxUnmarshaller.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0<|fim▁hole|> * and limitations under the License.
*/
package com.amazonaws.services.cloudsearchv2.model.transform;
import javax.xml.stream.events.XMLEvent;
import javax.annotation.Generated;
import com.amazonaws.services.cloudsearchv2.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* UpdateDomainEndpointOptionsResult StAX Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateDomainEndpointOptionsResultStaxUnmarshaller implements Unmarshaller<UpdateDomainEndpointOptionsResult, StaxUnmarshallerContext> {
public UpdateDomainEndpointOptionsResult unmarshall(StaxUnmarshallerContext context) throws Exception {
UpdateDomainEndpointOptionsResult updateDomainEndpointOptionsResult = new UpdateDomainEndpointOptionsResult();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 2;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return updateDomainEndpointOptionsResult;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("DomainEndpointOptions", targetDepth)) {
updateDomainEndpointOptionsResult.setDomainEndpointOptions(DomainEndpointOptionsStatusStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return updateDomainEndpointOptionsResult;
}
}
}
}
private static UpdateDomainEndpointOptionsResultStaxUnmarshaller instance;
public static UpdateDomainEndpointOptionsResultStaxUnmarshaller getInstance() {
if (instance == null)
instance = new UpdateDomainEndpointOptionsResultStaxUnmarshaller();
return instance;
}
}<|fim▁end|> | *
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions |
<|file_name|>casa-external_twilio_sms.py<|end_file_name|><|fim▁begin|># This is a modified version of original twilio_sms Gluu's script to work with Casa
from java.util import Arrays
from javax.faces.application import FacesMessage
from org.gluu.jsf2.message import FacesMessages
from org.gluu.oxauth.security import Identity
from org.gluu.oxauth.service import UserService, AuthenticationService
from org.gluu.oxauth.util import ServerUtil
from org.gluu.model.custom.script.type.auth import PersonAuthenticationType
from org.gluu.service.cdi.util import CdiUtil
from org.gluu.util import StringHelper, ArrayHelper
from com.google.common.base import Joiner
from com.twilio import Twilio
import com.twilio.rest.api.v2010.account.Message as TwMessage
from com.twilio.type import PhoneNumber
import random
import sys
class PersonAuthentication(PersonAuthenticationType):
def __init__(self, currentTimeMillis):
self.currentTimeMillis = currentTimeMillis
def init(self, customScript, configurationAttributes):
print "Twilio SMS. Initialized"
return True
def destroy(self, configurationAttributes):
print "Twilio SMS. Destroyed successfully"
return True
def getApiVersion(self):
return 11
def getAuthenticationMethodClaims(self, configurationAttributes):
return None
def isValidAuthenticationMethod(self, usageType, configurationAttributes):
return True
def getAlternativeAuthenticationMethod(self, usageType, configurationAttributes):
return None
def authenticate(self, configurationAttributes, requestParameters, step):
print "TwilioSMS. Authenticate for Step %s" % str(step)
identity = CdiUtil.bean(Identity)
authenticationService = CdiUtil.bean(AuthenticationService)
user = authenticationService.getAuthenticatedUser()
if step == 1:
if user == None:
credentials = identity.getCredentials()<|fim▁hole|> user_password = credentials.getPassword()
if StringHelper.isNotEmptyString(user_name) and StringHelper.isNotEmptyString(user_password):
authenticationService.authenticate(user_name, user_password)
user = authenticationService.getAuthenticatedUser()
if user == None:
return False
#Attempt to send message now if user has only one mobile number
mobiles = user.getAttributeValues("mobile")
if mobiles == None:
return False
else:
code = random.randint(100000, 999999)
identity.setWorkingParameter("randCode", code)
sid = configurationAttributes.get("twilio_sid").getValue2()
token = configurationAttributes.get("twilio_token").getValue2()
self.from_no = configurationAttributes.get("from_number").getValue2()
Twilio.init(sid, token)
if mobiles.size() == 1:
self.sendMessage(code, mobiles.get(0))
else:
chopped = ""
for numb in mobiles:
l = len(numb)
chopped += "," + numb[max(0, l-4) : l]
#converting to comma-separated list (identity does not remember lists in 3.1.3)
identity.setWorkingParameter("numbers", Joiner.on(",").join(mobiles.toArray()))
identity.setWorkingParameter("choppedNos", chopped[1:])
return True
else:
if user == None:
return False
session_attributes = identity.getSessionId().getSessionAttributes()
code = session_attributes.get("randCode")
numbers = session_attributes.get("numbers")
if step == 2 and numbers != None:
#Means the selection number page was used
idx = ServerUtil.getFirstValue(requestParameters, "OtpSmsloginForm:indexOfNumber")
if idx != None and code != None:
sendToNumber = numbers.split(",")[int(idx)]
self.sendMessage(code, sendToNumber)
return True
else:
return False
success = False
form_passcode = ServerUtil.getFirstValue(requestParameters, "OtpSmsloginForm:passcode")
if form_passcode != None and code == form_passcode:
print "TwilioSMS. authenticate. 6-digit code matches with code sent via SMS"
success = True
else:
facesMessages = CdiUtil.bean(FacesMessages)
facesMessages.setKeepMessages()
facesMessages.clear()
facesMessages.add(FacesMessage.SEVERITY_ERROR, "Wrong code entered")
return success
def prepareForStep(self, configurationAttributes, requestParameters, step):
print "TwilioSMS. Prepare for Step %s" % str(step)
return True
def getExtraParametersForStep(self, configurationAttributes, step):
if step > 1:
return Arrays.asList("randCode", "numbers", "choppedNos")
return None
def getCountAuthenticationSteps(self, configurationAttributes):
print "TwilioSMS. getCountAuthenticationSteps called"
if CdiUtil.bean(Identity).getWorkingParameter("numbers") == None:
return 2
else:
return 3
def getPageForStep(self, configurationAttributes, step):
print "TwilioSMS. getPageForStep called %s" % step
print "numbers are %s" % CdiUtil.bean(Identity).getWorkingParameter("numbers")
defPage = "/casa/otp_sms.xhtml"
if step == 2:
if CdiUtil.bean(Identity).getWorkingParameter("numbers") == None:
return defPage
else:
return "/casa/otp_sms_prompt.xhtml"
elif step == 3:
return defPage
return ""
def logout(self, configurationAttributes, requestParameters):
return True
def hasEnrollments(self, configurationAttributes, user):
return user.getAttribute("mobile") != None
def sendMessage(self, code, numb):
try:
if numb[:1] != "+":
numb = "+" + numb
print "TwilioSMS. Sending SMS message (%s) to %s" % (code, numb)
msg = "%s is your passcode to access your account" % code
message = TwMessage.creator(PhoneNumber(numb), PhoneNumber(self.from_no), msg).create()
print "TwilioSMS. Message Sid: %s" % message.getSid()
except:
print "TwilioSMS. Error sending message", sys.exc_info()[1]<|fim▁end|> | user_name = credentials.getUsername() |
<|file_name|>bloom.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import turtle
import random
def bloom(radius):
turtle.colormode(255)
for rad in range(40, 10, -5):
for looper in range(360//rad):
turtle.up()
turtle.circle(radius+rad, rad)
turtle.begin_fill()
turtle.fillcolor((200+random.randint(0, rad),
200+random.randint(0, rad),
200+random.randint(0, rad)))
turtle.down()
turtle.circle(-rad)
turtle.end_fill()
def main():
"""Simple flower, using global turtle instance"""
turtle.speed(0)
turtle.colormode(1.0)
bloom(5)
turtle.exitonclick()
###
<|fim▁hole|> main()<|fim▁end|> | if __name__ == "__main__": |
<|file_name|>ErrorSimulationService.java<|end_file_name|><|fim▁begin|>package com.capgemini.resilience.employer.service;
public interface ErrorSimulationService {
void generateErrorDependingOnErrorPossibility();<|fim▁hole|><|fim▁end|> | } |
<|file_name|>lp2pass.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import csv
import sys
import json
import hashlib
from subprocess import Popen, PIPE
from urlparse import urlparse
DEFAULT_GROUP = "lastpass-import"
class Record:
def __init__(self, d):
self.d = d
self.password = d['password']
if d['grouping'] in [None, "", "(none)"]:
self.group = DEFAULT_GROUP
else:
self.group = d['grouping']
self.d['kind'] = "lastpass imported item"
self.name = d['name']
self.username = d['username']
self.netloc = urlparse(d['url']).netloc
self.text = "{}\n{}".format(
self.password, json.dumps(self.d, sort_keys=True,
indent=2, separators=(',', ': ')))
self.md5 = hashlib.md5(self.text).hexdigest()
if self.name is None or self.name == "":
if self.netloc is None or self.netloc == "":
self.name = self.md5
else:
self.name = self.netloc
if self.username is None or self.username == "":
self.username = "unknown"
self.id = "{}/{}/{}".format(self.group,
self.name.replace('/', '_'),
self.username.replace('/', '_'))
self.items = [self]
def append(self, entry):
self.items.append(entry)
def writeToPass(self):
if len(self.items) == 1:<|fim▁hole|> self.stdout = process.communicate(str(self))
self.result = process.returncode
else:
for (i, v) in enumerate(self.items):
key = "{}/{}".format(self.id, i)
process = Popen(["pass", "insert", "-m", key],
stdin=PIPE, stdout=PIPE, stderr=None)
self.stdout = process.communicate(str(v))
self.result = process.returncode
def __str__(self):
return self.text
class Records:
def __init__(self):
self.d = dict()
def add(self, r):
if r.id not in self.d:
self.d[r.id] = r
else:
self.d[r.id].append(r)
def get(self, k):
return self.d[k]
fn = sys.argv[1]
with open(fn, 'rb') as cf:
lp = csv.DictReader(cf, delimiter=',')
rs = Records()
for l in lp:
r = Record(l)
rs.add(r)
for k, v in rs.d.items():
v.writeToPass()
if v.result != 0:
print "{} {} {}".format(v.result, len(v.items), k)<|fim▁end|> | process = Popen(["pass", "insert", "-m", self.id], stdin=PIPE,
stdout=PIPE, stderr=None) |
<|file_name|>pulsar_objectstore_test.py<|end_file_name|><|fim▁begin|>from os import makedirs
from os.path import join, dirname, exists
from string import Template
from galaxy.util.bunch import Bunch
from galaxy.objectstore import build_object_store_from_config
from .test_utils import TempDirectoryTestCase
from .test_objectstore import MockDataset
class PulsarObjectStoreTest(TempDirectoryTestCase):
def __write(self, contents, name):
path = join(self.temp_directory, name)
directory = dirname(path)
if not exists(directory):
makedirs(directory)
open(path, "wb").write(contents)
return path
def test_pulsar_objectstore(self):
# Define real object store used by Pulsar server.
object_store_config_file = join(self.temp_directory, "object_store_conf.xml")
with open(object_store_config_file, "w") as configf:
config_template = Template("""<?xml version="1.0"?>
<object_store type="disk">
<files_dir path="${temp_directory}"/>
<extra_dir type="temp" path="${temp_directory}"/>
<extra_dir type="job_work" path="${temp_directory}"/>
</object_store>
""")
config_contents = config_template.safe_substitute(temp_directory=self.temp_directory)
configf.write(config_contents)
app_conf = dict(
object_store_config_file=object_store_config_file,
private_token="12345",
)
from .test_utils import test_pulsar_server
with test_pulsar_server(app_conf=app_conf) as server:
url = server.application_url
# Define a proxy Pulsar object store.
proxy_object_store_config_file = join(self.temp_directory, "proxy_object_store_conf.xml")
with open(proxy_object_store_config_file, "w") as configf:
config_template = Template("""<?xml version="1.0"?>
<object_store type="pulsar" url="$url" private_token="12345" transport="urllib">
<!-- private_token is optional - see Pulsar documentation for more information. -->
<!-- transport is optional, set to curl to use libcurl instead of urllib for communication with Pulsar. -->
</object_store>
""")
contents = config_template.safe_substitute(url=url)
configf.write(contents)
config = Bunch(object_store_config_file=proxy_object_store_config_file)
object_store = build_object_store_from_config(config=config)
# Test no dataset with id 1 exists.
absent_dataset = MockDataset(1)
assert not object_store.exists(absent_dataset)
# Write empty dataset 2 in second backend, ensure it is empty and
# exists.
empty_dataset = MockDataset(2)
self.__write(b"", "000/dataset_2.dat")
assert object_store.exists(empty_dataset)
assert object_store.empty(empty_dataset)
# Write non-empty dataset in backend 1, test it is not emtpy & exists.
hello_world_dataset = MockDataset(3)
self.__write(b"Hello World!", "000/dataset_3.dat")
assert object_store.exists(hello_world_dataset)
assert not object_store.empty(hello_world_dataset)
# Test get_data
data = object_store.get_data(hello_world_dataset)
assert data == "Hello World!"
data = object_store.get_data(hello_world_dataset, start=1, count=6)
assert data == "ello W"
# Test Size
# Test absent and empty datasets yield size of 0.
assert object_store.size(absent_dataset) == 0
assert object_store.size(empty_dataset) == 0
# Elsewise
assert object_store.size(hello_world_dataset) > 0 # Should this always be the number of bytes?
# Test percent used (to some degree)
percent_store_used = object_store.get_store_usage_percent()
assert percent_store_used > 0.0
assert percent_store_used < 100.0
# Test update_from_file test
output_dataset = MockDataset(4)
output_real_path = join(self.temp_directory, "000", "dataset_4.dat")
assert not exists(output_real_path)
output_working_path = self.__write(b"NEW CONTENTS", "job_working_directory1/example_output")
object_store.update_from_file(output_dataset, file_name=output_working_path, create=True)
assert exists(output_real_path)
# Test delete
to_delete_dataset = MockDataset(5)<|fim▁hole|> to_delete_real_path = self.__write(b"content to be deleted!", "000/dataset_5.dat")
assert object_store.exists(to_delete_dataset)
assert object_store.delete(to_delete_dataset)
assert not object_store.exists(to_delete_dataset)
assert not exists(to_delete_real_path)
# Test json content.
complex_contents_dataset = MockDataset(6)
complex_content = b'{"a":6}'
self.__write(complex_content, "000/dataset_6.dat")
assert object_store.exists(complex_contents_dataset)
data = object_store.get_data(complex_contents_dataset) == complex_content<|fim▁end|> | |
<|file_name|>math.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
pyextend.core.math
~~~~~~~~~~~~~~~~~~
pyextend core math tools.
:copyright: (c) 2016 by Vito.
:license: GNU, see LICENSE for more details.
"""
def isprime(n):
"""Check the number is prime value. if prime value returns True, not False."""
n = abs(int(n))
if n < 2:
return False
if n == 2:
return True
if not n & 1:
return False
# 在一般领域, 对正整数n, 如果用2 到 sqrt(n) 之间所有整数去除, 均无法整除, 则n为质数.
for x in range(3, int(n ** 0.5)+1, 2):
if n % x == 0:
return False
<|fim▁hole|><|fim▁end|> | return True |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>/// <reference path="../typings/main.d.ts"/>
import {Request, Response} from "express";
var express = require('express');
var util = require('util');
var router = express.Router();
/* GET home page. */<|fim▁hole|> res.render('index', { title: 'Express' });
});
module.exports = router;<|fim▁end|> | router.get('/', function(req: Request, res: Response, next: Function) { |
<|file_name|>debug.rs<|end_file_name|><|fim▁begin|>use core::sync::atomic::{AtomicUsize, Ordering};
use spin::{Once, RwLock, RwLockReadGuard, RwLockWriteGuard};
use crate::arch::debug::Writer;
use crate::event;
use crate::scheme::*;
use crate::sync::WaitQueue;
use crate::syscall::flag::{EventFlags, EVENT_READ, F_GETFL, F_SETFL, O_ACCMODE, O_NONBLOCK};
use crate::syscall::scheme::Scheme;
static SCHEME_ID: AtomicSchemeId = AtomicSchemeId::default();
static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
/// Input queue
static INPUT: Once<WaitQueue<u8>> = Once::new();
/// Initialize input queue, called if needed
fn init_input() -> WaitQueue<u8> {
WaitQueue::new()
}
#[derive(Clone, Copy)]
struct Handle {
flags: usize,
}
static HANDLES: Once<RwLock<BTreeMap<usize, Handle>>> = Once::new();
fn init_handles() -> RwLock<BTreeMap<usize, Handle>> {
RwLock::new(BTreeMap::new())
}
fn handles() -> RwLockReadGuard<'static, BTreeMap<usize, Handle>> {
HANDLES.call_once(init_handles).read()
}
fn handles_mut() -> RwLockWriteGuard<'static, BTreeMap<usize, Handle>> {
HANDLES.call_once(init_handles).write()
}
/// Add to the input queue
pub fn debug_input(data: u8) {
INPUT.call_once(init_input).send(data);
}
// Notify readers of input updates
pub fn debug_notify() {
for (id, _handle) in handles().iter() {
event::trigger(SCHEME_ID.load(Ordering::SeqCst), *id, EVENT_READ);
}
}
pub struct DebugScheme;
impl DebugScheme {
pub fn new(scheme_id: SchemeId) -> Self {
SCHEME_ID.store(scheme_id, Ordering::SeqCst);
Self
}
}
impl Scheme for DebugScheme {
fn open(&self, path: &str, flags: usize, uid: u32, _gid: u32) -> Result<usize> {
if uid != 0 {
return Err(Error::new(EPERM));
}
if ! path.is_empty() {
return Err(Error::new(ENOENT));
}
let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
handles_mut().insert(id, Handle {
flags: flags & ! O_ACCMODE
});
Ok(id)
}
/// Read the file `number` into the `buffer`
///
/// Returns the number of bytes read
fn read(&self, id: usize, buf: &mut [u8]) -> Result<usize> {<|fim▁hole|>
INPUT.call_once(init_input)
.receive_into(buf, handle.flags & O_NONBLOCK != O_NONBLOCK, "DebugScheme::read")
.ok_or(Error::new(EINTR))
}
/// Write the `buffer` to the `file`
///
/// Returns the number of bytes written
fn write(&self, id: usize, buf: &[u8]) -> Result<usize> {
let _handle = {
let handles = handles();
*handles.get(&id).ok_or(Error::new(EBADF))?
};
Writer::new().write(buf);
Ok(buf.len())
}
fn fcntl(&self, id: usize, cmd: usize, arg: usize) -> Result<usize> {
let mut handles = handles_mut();
if let Some(handle) = handles.get_mut(&id) {
match cmd {
F_GETFL => Ok(handle.flags),
F_SETFL => {
handle.flags = arg & ! O_ACCMODE;
Ok(0)
},
_ => Err(Error::new(EINVAL))
}
} else {
Err(Error::new(EBADF))
}
}
fn fevent(&self, id: usize, _flags: EventFlags) -> Result<EventFlags> {
let _handle = {
let handles = handles();
*handles.get(&id).ok_or(Error::new(EBADF))?
};
Ok(EventFlags::empty())
}
fn fpath(&self, id: usize, buf: &mut [u8]) -> Result<usize> {
let _handle = {
let handles = handles();
*handles.get(&id).ok_or(Error::new(EBADF))?
};
let mut i = 0;
let scheme_path = b"debug:";
while i < buf.len() && i < scheme_path.len() {
buf[i] = scheme_path[i];
i += 1;
}
Ok(i)
}
fn fsync(&self, id: usize) -> Result<usize> {
let _handle = {
let handles = handles();
*handles.get(&id).ok_or(Error::new(EBADF))?
};
Ok(0)
}
/// Close the file `number`
fn close(&self, id: usize) -> Result<usize> {
let _handle = {
let mut handles = handles_mut();
handles.remove(&id).ok_or(Error::new(EBADF))?
};
Ok(0)
}
}<|fim▁end|> | let handle = {
let handles = handles();
*handles.get(&id).ok_or(Error::new(EBADF))?
}; |
<|file_name|>getSNREClassList.py<|end_file_name|><|fim▁begin|>import urllib.request as urllib
from bs4 import BeautifulSoup as bs
import unicodedata
import pandas as pd
import os
baseURL='http://snre.ifas.ufl.edu/academics/graduate/courses-syllabi-and-curriculum/'
classListFile='majorClassLists/SNREList.csv'
html_titles = ['Principles of Ecology Courses','Particular Perspectives and Systems Ecology Courses',
'Natural Science Courses','Social Sciences Courses',
'Sustainability Studies Courses','Research and Design Methods Courses']
short_names = ['Principles of Ecology', 'Particular Systems', 'Natural Science',
'Social Science', 'Sustainability', 'Research & Design']
catagories = pd.DataFrame({'html_title':html_titles,'subCatagory':short_names})
#Only run if this datafile doesn't exist
if os.path.exists(classListFile):
print('SNRE List exists. Delete it if you want to remake it: ', classListFile)
exit()
pageSoup=bs(urllib.urlopen(baseURL), 'lxml')
# deal with unicode
def convert_u(t):
return unicodedata.normalize('NFKD', t)
################################################
# functions defining different html sections for
# use with beautifulsoup
# Class rows are 'tr' elements with exactly 4 'td' elements
def is_class_listing(tag):
if tag.name=='tr':
return len(tag.find_all('td')) == 4
else:
return False
######################################################<|fim▁hole|>for catagory_section in pageSoup.find_all('table'):
html_title = convert_u(catagory_section.find('h3').text)
subCatagory = catagories['subCatagory'][catagories.html_title==html_title].tolist()[0]
for class_listing in catagory_section.find_all(is_class_listing):
prefix_and_number = convert_u(class_listing.find_all('td')[0].text)
title = convert_u(class_listing.find_all('td')[1].text).strip()
prefix = prefix_and_number.split(' ')[0].strip()
number = prefix_and_number.split(' ')[1].strip()
class_list.append({'coursePrefix':prefix,
'courseNum':number,
'title':title,
'subCategory':subCatagory})
class_list = pd.DataFrame(class_list)
############################
#Some class have multiple sub catagories. Go thru and make one row per class
#with multiple subCatagoeries.
#There are duplicate rows where the only difference is the subcategory. First find
#all unique rows.
class_list_temp=class_list[['coursePrefix','courseNum','title']].drop_duplicates()
#Initialize a subCategory for the unique rows
class_list_temp['subCategory']=''
#Go thru row by row and pull the subCategories out, combining them where there are multiple
for index, row in class_list_temp.iterrows():
#pull out the subcategories in a list
subCats=class_list['subCategory'][class_list['title']==row['title']].drop_duplicates().tolist()
#Clear any nan values that sneak in
subCats=[x for x in subCats if str(x) != 'nan']
#Combine them in a string and put them in the temp dataframe
row['subCategory']=','.join(subCats)
class_list = class_list_temp
class_list.to_csv(classListFile, index=False)<|fim▁end|> | # Primary scraping code
class_list = []
|
<|file_name|>joni.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | showWord(["v. ","vin jòn.<br>"]) |
<|file_name|>mutations.py<|end_file_name|><|fim▁begin|>from django.contrib.auth import authenticate, get_user_model
from graphene import AbstractType, relay, Field, String, ObjectType, Union, List
from users.jwt_schema import TokensSuccess
from users.jwt_util import get_jwt_token
from users.schema.definitions import Viewer
class Error(ObjectType):
"""Form Errors
https://medium.com/@tarkus/validation-and-user-errors-in-graphql-mutations-39ca79cd00bf#.ts99uxfnr
"""
key = String()
message = String(required=True)
class FormErrors(ObjectType):
"""Form Errors
https://medium.com/@tarkus/validation-and-user-errors-in-graphql-mutations-39ca79cd00bf#.ts99uxfnr
"""
errors = List(Error)
class AuthFormUnion(Union):
"""Returns either token error or token success"""
<|fim▁hole|>class LoginMutation(relay.ClientIDMutation):
class Input:
email = String(required=True)
password = String(required=True)
auth_form_payload = Field(AuthFormUnion)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
email = input.get('email')
password = input.get('password')
user_exists = get_user_model().objects.filter(email=email)
errors = []
if not user_exists:
error = Error(
key='email',
message='A user with this email doesn\'t exist.')
errors.append(error)
return LoginMutation(FormErrors(errors))
user_password_correct = user_exists[0].check_password(password)
if not user_password_correct:
error = Error(key='password', message='Password is incorrect')
errors.append(error)
return LoginMutation(FormErrors(errors))
user = authenticate(username=email, password=password)
jwt_token = get_jwt_token(user)
if user and jwt_token:
tokens = TokensSuccess(
jwt_token
)
viewer = Viewer(
user=user,
tokens=tokens
)
return LoginMutation(viewer)
class SignupUserMutation(relay.ClientIDMutation):
class Input:
email = String(required=True)
password = String(required=True)
auth_form_payload = Field(AuthFormUnion)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
email = input.get('email')
password = input.get('password')
user = get_user_model().objects.filter(email=email)
errors = []
if not user:
user = get_user_model().objects.create_user(email=email, password=password)
jwt_token = get_jwt_token(user)
token = TokensSuccess(
token=jwt_token
)
viewer = Viewer(
user=user,
tokens=token
)
return SignupUserMutation(viewer)
if user:
error = Error(
key='email',
message='A user with this email already exists.')
errors.append(error)
return SignupUserMutation(FormErrors(errors))
class UserMutations(AbstractType):
login = LoginMutation.Field()
signup = SignupUserMutation.Field()<|fim▁end|> | class Meta:
types = (Viewer, FormErrors)
|
<|file_name|>prelude.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>//! Traits and essential types inteded for blanket imports.
pub use {
AppInfoExt,
Cast,
Continue,
IsA,
ObjectExt,
StaticType,
ToValue,
};<|fim▁end|> | |
<|file_name|>TargetClass.java<|end_file_name|><|fim▁begin|>package com.bjorktech.cayman.idea.designpattern.structure.proxy;
public class TargetClass implements TargetInterface {
@Override
public long add(long a, long b) {
long temp = a + b;
System.out.println(temp);
return temp;
}
<|fim▁hole|> return temp;
}
}<|fim▁end|> | @Override
public long sub(long a, long b) {
long temp = a - b;
System.out.println(temp); |
<|file_name|>generated.pb.go<|end_file_name|><|fim▁begin|>/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by protoc-gen-gogo.
// source: k8s.io/kubernetes/vendor/k8s.io/api/authorization/v1beta1/generated.proto
// DO NOT EDIT!
/*
Package v1beta1 is a generated protocol buffer package.
It is generated from these files:
k8s.io/kubernetes/vendor/k8s.io/api/authorization/v1beta1/generated.proto
It has these top-level messages:
ExtraValue
LocalSubjectAccessReview
NonResourceAttributes
NonResourceRule
ResourceAttributes
ResourceRule
SelfSubjectAccessReview
SelfSubjectAccessReviewSpec
SelfSubjectRulesReview
SelfSubjectRulesReviewSpec
SubjectAccessReview
SubjectAccessReviewSpec
SubjectAccessReviewStatus
SubjectRulesReviewStatus
*/
package v1beta1
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import github_com_gogo_protobuf_sortkeys "github.com/gogo/protobuf/sortkeys"
import strings "strings"
import reflect "reflect"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
func (m *ExtraValue) Reset() { *m = ExtraValue{} }
func (*ExtraValue) ProtoMessage() {}
func (*ExtraValue) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{0} }
func (m *LocalSubjectAccessReview) Reset() { *m = LocalSubjectAccessReview{} }
func (*LocalSubjectAccessReview) ProtoMessage() {}
func (*LocalSubjectAccessReview) Descriptor() ([]byte, []int) {
return fileDescriptorGenerated, []int{1}
}
func (m *NonResourceAttributes) Reset() { *m = NonResourceAttributes{} }
func (*NonResourceAttributes) ProtoMessage() {}
func (*NonResourceAttributes) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{2} }
func (m *NonResourceRule) Reset() { *m = NonResourceRule{} }
func (*NonResourceRule) ProtoMessage() {}
func (*NonResourceRule) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{3} }
func (m *ResourceAttributes) Reset() { *m = ResourceAttributes{} }
func (*ResourceAttributes) ProtoMessage() {}
func (*ResourceAttributes) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{4} }
func (m *ResourceRule) Reset() { *m = ResourceRule{} }
func (*ResourceRule) ProtoMessage() {}
func (*ResourceRule) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{5} }
func (m *SelfSubjectAccessReview) Reset() { *m = SelfSubjectAccessReview{} }
func (*SelfSubjectAccessReview) ProtoMessage() {}
func (*SelfSubjectAccessReview) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{6} }
func (m *SelfSubjectAccessReviewSpec) Reset() { *m = SelfSubjectAccessReviewSpec{} }
func (*SelfSubjectAccessReviewSpec) ProtoMessage() {}
func (*SelfSubjectAccessReviewSpec) Descriptor() ([]byte, []int) {
return fileDescriptorGenerated, []int{7}
}
func (m *SelfSubjectRulesReview) Reset() { *m = SelfSubjectRulesReview{} }
func (*SelfSubjectRulesReview) ProtoMessage() {}
func (*SelfSubjectRulesReview) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{8} }
func (m *SelfSubjectRulesReviewSpec) Reset() { *m = SelfSubjectRulesReviewSpec{} }
func (*SelfSubjectRulesReviewSpec) ProtoMessage() {}
func (*SelfSubjectRulesReviewSpec) Descriptor() ([]byte, []int) {
return fileDescriptorGenerated, []int{9}
}
func (m *SubjectAccessReview) Reset() { *m = SubjectAccessReview{} }
func (*SubjectAccessReview) ProtoMessage() {}
func (*SubjectAccessReview) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{10} }
func (m *SubjectAccessReviewSpec) Reset() { *m = SubjectAccessReviewSpec{} }
func (*SubjectAccessReviewSpec) ProtoMessage() {}
func (*SubjectAccessReviewSpec) Descriptor() ([]byte, []int) {
return fileDescriptorGenerated, []int{11}
}
func (m *SubjectAccessReviewStatus) Reset() { *m = SubjectAccessReviewStatus{} }
func (*SubjectAccessReviewStatus) ProtoMessage() {}
func (*SubjectAccessReviewStatus) Descriptor() ([]byte, []int) {
return fileDescriptorGenerated, []int{12}
}
func (m *SubjectRulesReviewStatus) Reset() { *m = SubjectRulesReviewStatus{} }
func (*SubjectRulesReviewStatus) ProtoMessage() {}
func (*SubjectRulesReviewStatus) Descriptor() ([]byte, []int) {
return fileDescriptorGenerated, []int{13}
}
func init() {
proto.RegisterType((*ExtraValue)(nil), "k8s.io.api.authorization.v1beta1.ExtraValue")
proto.RegisterType((*LocalSubjectAccessReview)(nil), "k8s.io.api.authorization.v1beta1.LocalSubjectAccessReview")
proto.RegisterType((*NonResourceAttributes)(nil), "k8s.io.api.authorization.v1beta1.NonResourceAttributes")
proto.RegisterType((*NonResourceRule)(nil), "k8s.io.api.authorization.v1beta1.NonResourceRule")
proto.RegisterType((*ResourceAttributes)(nil), "k8s.io.api.authorization.v1beta1.ResourceAttributes")
proto.RegisterType((*ResourceRule)(nil), "k8s.io.api.authorization.v1beta1.ResourceRule")
proto.RegisterType((*SelfSubjectAccessReview)(nil), "k8s.io.api.authorization.v1beta1.SelfSubjectAccessReview")
proto.RegisterType((*SelfSubjectAccessReviewSpec)(nil), "k8s.io.api.authorization.v1beta1.SelfSubjectAccessReviewSpec")
proto.RegisterType((*SelfSubjectRulesReview)(nil), "k8s.io.api.authorization.v1beta1.SelfSubjectRulesReview")
proto.RegisterType((*SelfSubjectRulesReviewSpec)(nil), "k8s.io.api.authorization.v1beta1.SelfSubjectRulesReviewSpec")
proto.RegisterType((*SubjectAccessReview)(nil), "k8s.io.api.authorization.v1beta1.SubjectAccessReview")
proto.RegisterType((*SubjectAccessReviewSpec)(nil), "k8s.io.api.authorization.v1beta1.SubjectAccessReviewSpec")
proto.RegisterType((*SubjectAccessReviewStatus)(nil), "k8s.io.api.authorization.v1beta1.SubjectAccessReviewStatus")
proto.RegisterType((*SubjectRulesReviewStatus)(nil), "k8s.io.api.authorization.v1beta1.SubjectRulesReviewStatus")
}
func (m ExtraValue) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m ExtraValue) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m) > 0 {
for _, s := range m {
dAtA[i] = 0xa
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
return i, nil
}
func (m *LocalSubjectAccessReview) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *LocalSubjectAccessReview) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ObjectMeta.Size()))
n1, err := m.ObjectMeta.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n1
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.Spec.Size()))
n2, err := m.Spec.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n2
dAtA[i] = 0x1a
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.Status.Size()))
n3, err := m.Status.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n3
return i, nil
}
func (m *NonResourceAttributes) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *NonResourceAttributes) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Path)))
i += copy(dAtA[i:], m.Path)
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Verb)))
i += copy(dAtA[i:], m.Verb)
return i, nil
}
func (m *NonResourceRule) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *NonResourceRule) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.Verbs) > 0 {
for _, s := range m.Verbs {
dAtA[i] = 0xa
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
if len(m.NonResourceURLs) > 0 {
for _, s := range m.NonResourceURLs {
dAtA[i] = 0x12
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
return i, nil
}
func (m *ResourceAttributes) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ResourceAttributes) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Namespace)))
i += copy(dAtA[i:], m.Namespace)
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Verb)))
i += copy(dAtA[i:], m.Verb)
dAtA[i] = 0x1a
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Group)))
i += copy(dAtA[i:], m.Group)
dAtA[i] = 0x22
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Version)))
i += copy(dAtA[i:], m.Version)
dAtA[i] = 0x2a
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Resource)))
i += copy(dAtA[i:], m.Resource)
dAtA[i] = 0x32
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Subresource)))
i += copy(dAtA[i:], m.Subresource)
dAtA[i] = 0x3a
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Name)))
i += copy(dAtA[i:], m.Name)
return i, nil
}
func (m *ResourceRule) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ResourceRule) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.Verbs) > 0 {
for _, s := range m.Verbs {
dAtA[i] = 0xa
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
if len(m.APIGroups) > 0 {
for _, s := range m.APIGroups {
dAtA[i] = 0x12
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
if len(m.Resources) > 0 {
for _, s := range m.Resources {
dAtA[i] = 0x1a
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
if len(m.ResourceNames) > 0 {
for _, s := range m.ResourceNames {
dAtA[i] = 0x22
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
return i, nil
}
func (m *SelfSubjectAccessReview) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *SelfSubjectAccessReview) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ObjectMeta.Size()))
n4, err := m.ObjectMeta.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n4
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.Spec.Size()))
n5, err := m.Spec.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n5
dAtA[i] = 0x1a
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.Status.Size()))
n6, err := m.Status.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n6
return i, nil
}
func (m *SelfSubjectAccessReviewSpec) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *SelfSubjectAccessReviewSpec) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.ResourceAttributes != nil {
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ResourceAttributes.Size()))
n7, err := m.ResourceAttributes.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n7
}
if m.NonResourceAttributes != nil {
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.NonResourceAttributes.Size()))
n8, err := m.NonResourceAttributes.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n8
}
return i, nil
}
func (m *SelfSubjectRulesReview) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *SelfSubjectRulesReview) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ObjectMeta.Size()))
n9, err := m.ObjectMeta.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n9
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.Spec.Size()))
n10, err := m.Spec.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n10
dAtA[i] = 0x1a
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.Status.Size()))
n11, err := m.Status.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n11
return i, nil
}
func (m *SelfSubjectRulesReviewSpec) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *SelfSubjectRulesReviewSpec) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Namespace)))
i += copy(dAtA[i:], m.Namespace)
return i, nil
}
func (m *SubjectAccessReview) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *SubjectAccessReview) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ObjectMeta.Size()))
n12, err := m.ObjectMeta.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n12
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.Spec.Size()))
n13, err := m.Spec.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n13
dAtA[i] = 0x1a
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.Status.Size()))
n14, err := m.Status.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n14
return i, nil
}
func (m *SubjectAccessReviewSpec) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *SubjectAccessReviewSpec) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.ResourceAttributes != nil {
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ResourceAttributes.Size()))
n15, err := m.ResourceAttributes.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n15
}
if m.NonResourceAttributes != nil {
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.NonResourceAttributes.Size()))
n16, err := m.NonResourceAttributes.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n16
}
dAtA[i] = 0x1a
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.User)))
i += copy(dAtA[i:], m.User)
if len(m.Groups) > 0 {
for _, s := range m.Groups {
dAtA[i] = 0x22
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
if len(m.Extra) > 0 {
keysForExtra := make([]string, 0, len(m.Extra))
for k := range m.Extra {
keysForExtra = append(keysForExtra, string(k))
}
github_com_gogo_protobuf_sortkeys.Strings(keysForExtra)
for _, k := range keysForExtra {
dAtA[i] = 0x2a
i++
v := m.Extra[string(k)]
msgSize := 0
if (&v) != nil {
msgSize = (&v).Size()
msgSize += 1 + sovGenerated(uint64(msgSize))
}
mapSize := 1 + len(k) + sovGenerated(uint64(len(k))) + msgSize
i = encodeVarintGenerated(dAtA, i, uint64(mapSize))
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(k)))
i += copy(dAtA[i:], k)
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64((&v).Size()))
n17, err := (&v).MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n17
}
}
dAtA[i] = 0x32
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.UID)))
i += copy(dAtA[i:], m.UID)
return i, nil
}
func (m *SubjectAccessReviewStatus) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *SubjectAccessReviewStatus) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0x8
i++
if m.Allowed {
dAtA[i] = 1
} else {
dAtA[i] = 0
}
i++
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Reason)))
i += copy(dAtA[i:], m.Reason)
dAtA[i] = 0x1a
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.EvaluationError)))
i += copy(dAtA[i:], m.EvaluationError)
dAtA[i] = 0x20
i++
if m.Denied {
dAtA[i] = 1
} else {
dAtA[i] = 0
}
i++
return i, nil
}
func (m *SubjectRulesReviewStatus) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *SubjectRulesReviewStatus) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.ResourceRules) > 0 {
for _, msg := range m.ResourceRules {
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(msg.Size()))
n, err := msg.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n
}
}
if len(m.NonResourceRules) > 0 {
for _, msg := range m.NonResourceRules {
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(msg.Size()))
n, err := msg.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n
}
}
dAtA[i] = 0x18
i++
if m.Incomplete {
dAtA[i] = 1
} else {
dAtA[i] = 0
}
i++
dAtA[i] = 0x22
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.EvaluationError)))
i += copy(dAtA[i:], m.EvaluationError)
return i, nil
}
func encodeFixed64Generated(dAtA []byte, offset int, v uint64) int {
dAtA[offset] = uint8(v)
dAtA[offset+1] = uint8(v >> 8)
dAtA[offset+2] = uint8(v >> 16)
dAtA[offset+3] = uint8(v >> 24)
dAtA[offset+4] = uint8(v >> 32)
dAtA[offset+5] = uint8(v >> 40)
dAtA[offset+6] = uint8(v >> 48)
dAtA[offset+7] = uint8(v >> 56)
return offset + 8
}
func encodeFixed32Generated(dAtA []byte, offset int, v uint32) int {
dAtA[offset] = uint8(v)
dAtA[offset+1] = uint8(v >> 8)
dAtA[offset+2] = uint8(v >> 16)
dAtA[offset+3] = uint8(v >> 24)
return offset + 4
}
func encodeVarintGenerated(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func (m ExtraValue) Size() (n int) {
var l int
_ = l
if len(m) > 0 {
for _, s := range m {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
return n
}
func (m *LocalSubjectAccessReview) Size() (n int) {
var l int
_ = l
l = m.ObjectMeta.Size()
n += 1 + l + sovGenerated(uint64(l))
l = m.Spec.Size()
n += 1 + l + sovGenerated(uint64(l))
l = m.Status.Size()
n += 1 + l + sovGenerated(uint64(l))
return n
}
func (m *NonResourceAttributes) Size() (n int) {
var l int
_ = l
l = len(m.Path)
n += 1 + l + sovGenerated(uint64(l))
l = len(m.Verb)
n += 1 + l + sovGenerated(uint64(l))
return n
}
func (m *NonResourceRule) Size() (n int) {
var l int
_ = l
if len(m.Verbs) > 0 {
for _, s := range m.Verbs {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
if len(m.NonResourceURLs) > 0 {
for _, s := range m.NonResourceURLs {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
return n
}
func (m *ResourceAttributes) Size() (n int) {
var l int
_ = l
l = len(m.Namespace)
n += 1 + l + sovGenerated(uint64(l))
l = len(m.Verb)
n += 1 + l + sovGenerated(uint64(l))
l = len(m.Group)
n += 1 + l + sovGenerated(uint64(l))
l = len(m.Version)
n += 1 + l + sovGenerated(uint64(l))
l = len(m.Resource)
n += 1 + l + sovGenerated(uint64(l))
l = len(m.Subresource)
n += 1 + l + sovGenerated(uint64(l))
l = len(m.Name)
n += 1 + l + sovGenerated(uint64(l))
return n
}
func (m *ResourceRule) Size() (n int) {
var l int
_ = l
if len(m.Verbs) > 0 {
for _, s := range m.Verbs {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
if len(m.APIGroups) > 0 {
for _, s := range m.APIGroups {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
if len(m.Resources) > 0 {
for _, s := range m.Resources {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
if len(m.ResourceNames) > 0 {
for _, s := range m.ResourceNames {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
return n
}
func (m *SelfSubjectAccessReview) Size() (n int) {
var l int
_ = l
l = m.ObjectMeta.Size()
n += 1 + l + sovGenerated(uint64(l))
l = m.Spec.Size()
n += 1 + l + sovGenerated(uint64(l))
l = m.Status.Size()
n += 1 + l + sovGenerated(uint64(l))
return n
}
func (m *SelfSubjectAccessReviewSpec) Size() (n int) {
var l int
_ = l
if m.ResourceAttributes != nil {
l = m.ResourceAttributes.Size()
n += 1 + l + sovGenerated(uint64(l))
}
if m.NonResourceAttributes != nil {
l = m.NonResourceAttributes.Size()
n += 1 + l + sovGenerated(uint64(l))
}
return n
}
func (m *SelfSubjectRulesReview) Size() (n int) {
var l int
_ = l
l = m.ObjectMeta.Size()
n += 1 + l + sovGenerated(uint64(l))
l = m.Spec.Size()
n += 1 + l + sovGenerated(uint64(l))
l = m.Status.Size()
n += 1 + l + sovGenerated(uint64(l))
return n
}
func (m *SelfSubjectRulesReviewSpec) Size() (n int) {
var l int
_ = l
l = len(m.Namespace)
n += 1 + l + sovGenerated(uint64(l))
return n
}
func (m *SubjectAccessReview) Size() (n int) {
var l int
_ = l
l = m.ObjectMeta.Size()
n += 1 + l + sovGenerated(uint64(l))
l = m.Spec.Size()
n += 1 + l + sovGenerated(uint64(l))
l = m.Status.Size()
n += 1 + l + sovGenerated(uint64(l))
return n
}
func (m *SubjectAccessReviewSpec) Size() (n int) {
var l int
_ = l
if m.ResourceAttributes != nil {
l = m.ResourceAttributes.Size()
n += 1 + l + sovGenerated(uint64(l))
}
if m.NonResourceAttributes != nil {
l = m.NonResourceAttributes.Size()
n += 1 + l + sovGenerated(uint64(l))
}
l = len(m.User)
n += 1 + l + sovGenerated(uint64(l))
if len(m.Groups) > 0 {
for _, s := range m.Groups {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
if len(m.Extra) > 0 {
for k, v := range m.Extra {
_ = k
_ = v
l = v.Size()
mapEntrySize := 1 + len(k) + sovGenerated(uint64(len(k))) + 1 + l + sovGenerated(uint64(l))
n += mapEntrySize + 1 + sovGenerated(uint64(mapEntrySize))
}
}
l = len(m.UID)
n += 1 + l + sovGenerated(uint64(l))
return n
}
func (m *SubjectAccessReviewStatus) Size() (n int) {
var l int
_ = l
n += 2
l = len(m.Reason)
n += 1 + l + sovGenerated(uint64(l))
l = len(m.EvaluationError)
n += 1 + l + sovGenerated(uint64(l))
n += 2
return n
}
func (m *SubjectRulesReviewStatus) Size() (n int) {
var l int
_ = l
if len(m.ResourceRules) > 0 {
for _, e := range m.ResourceRules {
l = e.Size()
n += 1 + l + sovGenerated(uint64(l))
}
}
if len(m.NonResourceRules) > 0 {
for _, e := range m.NonResourceRules {
l = e.Size()
n += 1 + l + sovGenerated(uint64(l))
}
}
n += 2
l = len(m.EvaluationError)
n += 1 + l + sovGenerated(uint64(l))
return n
}
func sovGenerated(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozGenerated(x uint64) (n int) {
return sovGenerated(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *LocalSubjectAccessReview) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&LocalSubjectAccessReview{`,
`ObjectMeta:` + strings.Replace(strings.Replace(this.ObjectMeta.String(), "ObjectMeta", "k8s_io_apimachinery_pkg_apis_meta_v1.ObjectMeta", 1), `&`, ``, 1) + `,`,
`Spec:` + strings.Replace(strings.Replace(this.Spec.String(), "SubjectAccessReviewSpec", "SubjectAccessReviewSpec", 1), `&`, ``, 1) + `,`,
`Status:` + strings.Replace(strings.Replace(this.Status.String(), "SubjectAccessReviewStatus", "SubjectAccessReviewStatus", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *NonResourceAttributes) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&NonResourceAttributes{`,
`Path:` + fmt.Sprintf("%v", this.Path) + `,`,
`Verb:` + fmt.Sprintf("%v", this.Verb) + `,`,
`}`,
}, "")
return s
}
func (this *NonResourceRule) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&NonResourceRule{`,
`Verbs:` + fmt.Sprintf("%v", this.Verbs) + `,`,
`NonResourceURLs:` + fmt.Sprintf("%v", this.NonResourceURLs) + `,`,
`}`,
}, "")
return s
}
func (this *ResourceAttributes) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ResourceAttributes{`,
`Namespace:` + fmt.Sprintf("%v", this.Namespace) + `,`,
`Verb:` + fmt.Sprintf("%v", this.Verb) + `,`,
`Group:` + fmt.Sprintf("%v", this.Group) + `,`,
`Version:` + fmt.Sprintf("%v", this.Version) + `,`,
`Resource:` + fmt.Sprintf("%v", this.Resource) + `,`,
`Subresource:` + fmt.Sprintf("%v", this.Subresource) + `,`,
`Name:` + fmt.Sprintf("%v", this.Name) + `,`,
`}`,
}, "")
return s
}
func (this *ResourceRule) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ResourceRule{`,
`Verbs:` + fmt.Sprintf("%v", this.Verbs) + `,`,
`APIGroups:` + fmt.Sprintf("%v", this.APIGroups) + `,`,
`Resources:` + fmt.Sprintf("%v", this.Resources) + `,`,
`ResourceNames:` + fmt.Sprintf("%v", this.ResourceNames) + `,`,
`}`,
}, "")
return s
}
func (this *SelfSubjectAccessReview) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SelfSubjectAccessReview{`,
`ObjectMeta:` + strings.Replace(strings.Replace(this.ObjectMeta.String(), "ObjectMeta", "k8s_io_apimachinery_pkg_apis_meta_v1.ObjectMeta", 1), `&`, ``, 1) + `,`,
`Spec:` + strings.Replace(strings.Replace(this.Spec.String(), "SelfSubjectAccessReviewSpec", "SelfSubjectAccessReviewSpec", 1), `&`, ``, 1) + `,`,
`Status:` + strings.Replace(strings.Replace(this.Status.String(), "SubjectAccessReviewStatus", "SubjectAccessReviewStatus", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *SelfSubjectAccessReviewSpec) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SelfSubjectAccessReviewSpec{`,
`ResourceAttributes:` + strings.Replace(fmt.Sprintf("%v", this.ResourceAttributes), "ResourceAttributes", "ResourceAttributes", 1) + `,`,
`NonResourceAttributes:` + strings.Replace(fmt.Sprintf("%v", this.NonResourceAttributes), "NonResourceAttributes", "NonResourceAttributes", 1) + `,`,
`}`,
}, "")
return s
}
func (this *SelfSubjectRulesReview) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SelfSubjectRulesReview{`,
`ObjectMeta:` + strings.Replace(strings.Replace(this.ObjectMeta.String(), "ObjectMeta", "k8s_io_apimachinery_pkg_apis_meta_v1.ObjectMeta", 1), `&`, ``, 1) + `,`,
`Spec:` + strings.Replace(strings.Replace(this.Spec.String(), "SelfSubjectRulesReviewSpec", "SelfSubjectRulesReviewSpec", 1), `&`, ``, 1) + `,`,
`Status:` + strings.Replace(strings.Replace(this.Status.String(), "SubjectRulesReviewStatus", "SubjectRulesReviewStatus", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *SelfSubjectRulesReviewSpec) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SelfSubjectRulesReviewSpec{`,
`Namespace:` + fmt.Sprintf("%v", this.Namespace) + `,`,
`}`,
}, "")
return s
}
func (this *SubjectAccessReview) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SubjectAccessReview{`,
`ObjectMeta:` + strings.Replace(strings.Replace(this.ObjectMeta.String(), "ObjectMeta", "k8s_io_apimachinery_pkg_apis_meta_v1.ObjectMeta", 1), `&`, ``, 1) + `,`,
`Spec:` + strings.Replace(strings.Replace(this.Spec.String(), "SubjectAccessReviewSpec", "SubjectAccessReviewSpec", 1), `&`, ``, 1) + `,`,
`Status:` + strings.Replace(strings.Replace(this.Status.String(), "SubjectAccessReviewStatus", "SubjectAccessReviewStatus", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *SubjectAccessReviewSpec) String() string {
if this == nil {
return "nil"
}
keysForExtra := make([]string, 0, len(this.Extra))
for k := range this.Extra {
keysForExtra = append(keysForExtra, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForExtra)
mapStringForExtra := "map[string]ExtraValue{"
for _, k := range keysForExtra {
mapStringForExtra += fmt.Sprintf("%v: %v,", k, this.Extra[k])
}
mapStringForExtra += "}"
s := strings.Join([]string{`&SubjectAccessReviewSpec{`,
`ResourceAttributes:` + strings.Replace(fmt.Sprintf("%v", this.ResourceAttributes), "ResourceAttributes", "ResourceAttributes", 1) + `,`,
`NonResourceAttributes:` + strings.Replace(fmt.Sprintf("%v", this.NonResourceAttributes), "NonResourceAttributes", "NonResourceAttributes", 1) + `,`,
`User:` + fmt.Sprintf("%v", this.User) + `,`,
`Groups:` + fmt.Sprintf("%v", this.Groups) + `,`,
`Extra:` + mapStringForExtra + `,`,
`UID:` + fmt.Sprintf("%v", this.UID) + `,`,
`}`,
}, "")
return s
}
func (this *SubjectAccessReviewStatus) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SubjectAccessReviewStatus{`,
`Allowed:` + fmt.Sprintf("%v", this.Allowed) + `,`,
`Reason:` + fmt.Sprintf("%v", this.Reason) + `,`,
`EvaluationError:` + fmt.Sprintf("%v", this.EvaluationError) + `,`,
`Denied:` + fmt.Sprintf("%v", this.Denied) + `,`,
`}`,
}, "")
return s
}
func (this *SubjectRulesReviewStatus) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SubjectRulesReviewStatus{`,
`ResourceRules:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.ResourceRules), "ResourceRule", "ResourceRule", 1), `&`, ``, 1) + `,`,
`NonResourceRules:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.NonResourceRules), "NonResourceRule", "NonResourceRule", 1), `&`, ``, 1) + `,`,
`Incomplete:` + fmt.Sprintf("%v", this.Incomplete) + `,`,
`EvaluationError:` + fmt.Sprintf("%v", this.EvaluationError) + `,`,
`}`,
}, "")
return s
}
func valueToStringGenerated(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *ExtraValue) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ExtraValue: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ExtraValue: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Items", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
*m = append(*m, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *LocalSubjectAccessReview) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: LocalSubjectAccessReview: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: LocalSubjectAccessReview: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ObjectMeta", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ObjectMeta.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Spec", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Spec.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Status", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Status.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *NonResourceAttributes) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: NonResourceAttributes: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: NonResourceAttributes: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Path", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Path = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Verb", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Verb = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *NonResourceRule) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: NonResourceRule: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: NonResourceRule: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Verbs", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Verbs = append(m.Verbs, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field NonResourceURLs", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.NonResourceURLs = append(m.NonResourceURLs, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ResourceAttributes) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ResourceAttributes: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ResourceAttributes: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Namespace", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Namespace = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Verb", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Verb = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Group", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Group = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Version", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Version = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 5:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Resource", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Resource = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 6:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Subresource", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Subresource = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 7:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Name = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ResourceRule) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ResourceRule: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ResourceRule: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Verbs", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Verbs = append(m.Verbs, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field APIGroups", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.APIGroups = append(m.APIGroups, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Resources", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Resources = append(m.Resources, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ResourceNames", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.ResourceNames = append(m.ResourceNames, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *SelfSubjectAccessReview) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: SelfSubjectAccessReview: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: SelfSubjectAccessReview: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ObjectMeta", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ObjectMeta.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Spec", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Spec.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Status", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Status.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *SelfSubjectAccessReviewSpec) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: SelfSubjectAccessReviewSpec: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: SelfSubjectAccessReviewSpec: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ResourceAttributes", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.ResourceAttributes == nil {
m.ResourceAttributes = &ResourceAttributes{}
}
if err := m.ResourceAttributes.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field NonResourceAttributes", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.NonResourceAttributes == nil {
m.NonResourceAttributes = &NonResourceAttributes{}
}
if err := m.NonResourceAttributes.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *SelfSubjectRulesReview) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: SelfSubjectRulesReview: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: SelfSubjectRulesReview: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ObjectMeta", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ObjectMeta.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Spec", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Spec.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Status", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Status.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *SelfSubjectRulesReviewSpec) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: SelfSubjectRulesReviewSpec: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: SelfSubjectRulesReviewSpec: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Namespace", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Namespace = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *SubjectAccessReview) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: SubjectAccessReview: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: SubjectAccessReview: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ObjectMeta", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ObjectMeta.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Spec", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Spec.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Status", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Status.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *SubjectAccessReviewSpec) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: SubjectAccessReviewSpec: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: SubjectAccessReviewSpec: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ResourceAttributes", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.ResourceAttributes == nil {
m.ResourceAttributes = &ResourceAttributes{}
}
if err := m.ResourceAttributes.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field NonResourceAttributes", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen<|fim▁hole|> if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.NonResourceAttributes == nil {
m.NonResourceAttributes = &NonResourceAttributes{}
}
if err := m.NonResourceAttributes.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field User", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.User = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Groups", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Groups = append(m.Groups, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
case 5:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Extra", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
var keykey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
keykey |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthGenerated
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey := string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
if m.Extra == nil {
m.Extra = make(map[string]ExtraValue)
}
if iNdEx < postIndex {
var valuekey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
valuekey |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
var mapmsglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
mapmsglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if mapmsglen < 0 {
return ErrInvalidLengthGenerated
}
postmsgIndex := iNdEx + mapmsglen
if mapmsglen < 0 {
return ErrInvalidLengthGenerated
}
if postmsgIndex > l {
return io.ErrUnexpectedEOF
}
mapvalue := &ExtraValue{}
if err := mapvalue.Unmarshal(dAtA[iNdEx:postmsgIndex]); err != nil {
return err
}
iNdEx = postmsgIndex
m.Extra[mapkey] = *mapvalue
} else {
var mapvalue ExtraValue
m.Extra[mapkey] = mapvalue
}
iNdEx = postIndex
case 6:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field UID", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.UID = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *SubjectAccessReviewStatus) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: SubjectAccessReviewStatus: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: SubjectAccessReviewStatus: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Allowed", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.Allowed = bool(v != 0)
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Reason", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Reason = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field EvaluationError", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.EvaluationError = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 4:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Denied", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.Denied = bool(v != 0)
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *SubjectRulesReviewStatus) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: SubjectRulesReviewStatus: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: SubjectRulesReviewStatus: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ResourceRules", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.ResourceRules = append(m.ResourceRules, ResourceRule{})
if err := m.ResourceRules[len(m.ResourceRules)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field NonResourceRules", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.NonResourceRules = append(m.NonResourceRules, NonResourceRule{})
if err := m.NonResourceRules[len(m.NonResourceRules)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Incomplete", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.Incomplete = bool(v != 0)
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field EvaluationError", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.EvaluationError = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipGenerated(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowGenerated
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowGenerated
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowGenerated
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthGenerated
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowGenerated
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipGenerated(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthGenerated = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowGenerated = fmt.Errorf("proto: integer overflow")
)
func init() {
proto.RegisterFile("k8s.io/kubernetes/vendor/k8s.io/api/authorization/v1beta1/generated.proto", fileDescriptorGenerated)
}
var fileDescriptorGenerated = []byte{
// 1154 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x56, 0x4d, 0x6f, 0x1b, 0xc5,
0x1b, 0xf7, 0xfa, 0x25, 0xb1, 0xc7, 0xcd, 0x3f, 0xe9, 0x44, 0x69, 0xb6, 0xf9, 0x0b, 0xdb, 0x32,
0x12, 0x0a, 0xa2, 0xdd, 0x25, 0xa1, 0x90, 0x12, 0xe8, 0x21, 0x56, 0x22, 0x14, 0xa9, 0x2d, 0xd5,
0x44, 0xc9, 0x81, 0x4a, 0xc0, 0x78, 0x3d, 0xb1, 0x17, 0xdb, 0xbb, 0xcb, 0xcc, 0xac, 0x43, 0x10,
0x87, 0x1e, 0x39, 0x72, 0xe4, 0xc8, 0x89, 0xef, 0xc0, 0x05, 0x09, 0x4e, 0x39, 0xf6, 0x18, 0x24,
0x64, 0x91, 0xe5, 0x43, 0x70, 0x45, 0x33, 0x3b, 0xf6, 0xae, 0xe3, 0x75, 0x1c, 0xe7, 0x40, 0x2f,
0xbd, 0xed, 0x3c, 0xbf, 0xe7, 0x6d, 0x9e, 0x97, 0xd9, 0x1f, 0xd8, 0x6f, 0x3f, 0x64, 0x86, 0xed,
0x9a, 0x6d, 0xbf, 0x4e, 0xa8, 0x43, 0x38, 0x61, 0x66, 0x8f, 0x38, 0x0d, 0x97, 0x9a, 0x0a, 0xc0,
0x9e, 0x6d, 0x62, 0x9f, 0xb7, 0x5c, 0x6a, 0x7f, 0x8b, 0xb9, 0xed, 0x3a, 0x66, 0x6f, 0xa3, 0x4e,
0x38, 0xde, 0x30, 0x9b, 0xc4, 0x21, 0x14, 0x73, 0xd2, 0x30, 0x3c, 0xea, 0x72, 0x17, 0x56, 0x42,
0x0b, 0x03, 0x7b, 0xb6, 0x31, 0x62, 0x61, 0x28, 0x8b, 0xb5, 0xfb, 0x4d, 0x9b, 0xb7, 0xfc, 0xba,
0x61, 0xb9, 0x5d, 0xb3, 0xe9, 0x36, 0x5d, 0x53, 0x1a, 0xd6, 0xfd, 0x63, 0x79, 0x92, 0x07, 0xf9,
0x15, 0x3a, 0x5c, 0x7b, 0x10, 0xa5, 0xd0, 0xc5, 0x56, 0xcb, 0x76, 0x08, 0x3d, 0x35, 0xbd, 0x76,
0x53, 0x08, 0x98, 0xd9, 0x25, 0x1c, 0x9b, 0xbd, 0xb1, 0x34, 0xd6, 0xcc, 0x49, 0x56, 0xd4, 0x77,
0xb8, 0xdd, 0x25, 0x63, 0x06, 0x1f, 0x4c, 0x33, 0x60, 0x56, 0x8b, 0x74, 0xf1, 0x98, 0xdd, 0x7b,
0x93, 0xec, 0x7c, 0x6e, 0x77, 0x4c, 0xdb, 0xe1, 0x8c, 0xd3, 0xcb, 0x46, 0xd5, 0x2d, 0x00, 0xf6,
0xbe, 0xe1, 0x14, 0x1f, 0xe1, 0x8e, 0x4f, 0x60, 0x19, 0xe4, 0x6c, 0x4e, 0xba, 0x4c, 0xd7, 0x2a,
0x99, 0xf5, 0x42, 0xad, 0x10, 0xf4, 0xcb, 0xb9, 0x7d, 0x21, 0x40, 0xa1, 0x7c, 0x3b, 0xff, 0xe3,
0x4f, 0xe5, 0xd4, 0x8b, 0x3f, 0x2b, 0xa9, 0xea, 0xaf, 0x69, 0xa0, 0x3f, 0x76, 0x2d, 0xdc, 0x39,
0xf0, 0xeb, 0x5f, 0x11, 0x8b, 0xef, 0x58, 0x16, 0x61, 0x0c, 0x91, 0x9e, 0x4d, 0x4e, 0xe0, 0x97,
0x20, 0x2f, 0xca, 0xd1, 0xc0, 0x1c, 0xeb, 0x5a, 0x45, 0x5b, 0x2f, 0x6e, 0xbe, 0x6b, 0x44, 0xdd,
0x18, 0x66, 0x67, 0x78, 0xed, 0xa6, 0x10, 0x30, 0x43, 0x68, 0x1b, 0xbd, 0x0d, 0xe3, 0x53, 0xe9,
0xeb, 0x09, 0xe1, 0xb8, 0x06, 0xcf, 0xfa, 0xe5, 0x54, 0xd0, 0x2f, 0x83, 0x48, 0x86, 0x86, 0x5e,
0xe1, 0x73, 0x90, 0x65, 0x1e, 0xb1, 0xf4, 0xb4, 0xf4, 0xfe, 0xa1, 0x31, 0xad, 0xd7, 0x46, 0x42,
0x9a, 0x07, 0x1e, 0xb1, 0x6a, 0xb7, 0x54, 0x98, 0xac, 0x38, 0x21, 0xe9, 0x14, 0x5a, 0x60, 0x8e,
0x71, 0xcc, 0x7d, 0xa6, 0x67, 0xa4, 0xfb, 0x8f, 0x6e, 0xe6, 0x5e, 0xba, 0xa8, 0xfd, 0x4f, 0x05,
0x98, 0x0b, 0xcf, 0x48, 0xb9, 0xae, 0x3e, 0x07, 0x2b, 0x4f, 0x5d, 0x07, 0x11, 0xe6, 0xfa, 0xd4,
0x22, 0x3b, 0x9c, 0x53, 0xbb, 0xee, 0x73, 0xc2, 0x60, 0x05, 0x64, 0x3d, 0xcc, 0x5b, 0xb2, 0x70,
0x85, 0x28, 0xbf, 0x67, 0x98, 0xb7, 0x90, 0x44, 0x84, 0x46, 0x8f, 0xd0, 0xba, 0xbc, 0x7c, 0x4c,
0xe3, 0x88, 0xd0, 0x3a, 0x92, 0x48, 0xf5, 0x6b, 0xb0, 0x18, 0x73, 0x8e, 0xfc, 0x8e, 0xec, 0xad,
0x80, 0x46, 0x7a, 0x2b, 0x2c, 0x18, 0x0a, 0xe5, 0xf0, 0x11, 0x58, 0x74, 0x22, 0x9b, 0x43, 0xf4,
0x98, 0xe9, 0x69, 0xa9, 0xba, 0x1c, 0xf4, 0xcb, 0x71, 0x77, 0x02, 0x42, 0x97, 0x75, 0xc5, 0x40,
0xc0, 0x84, 0xdb, 0x98, 0xa0, 0xe0, 0xe0, 0x2e, 0x61, 0x1e, 0xb6, 0x88, 0xba, 0xd2, 0x6d, 0x95,
0x70, 0xe1, 0xe9, 0x00, 0x40, 0x91, 0xce, 0xf4, 0xcb, 0xc1, 0x37, 0x41, 0xae, 0x49, 0x5d, 0xdf,
0x93, 0xdd, 0x29, 0xd4, 0x16, 0x94, 0x4a, 0xee, 0x13, 0x21, 0x44, 0x21, 0x06, 0xdf, 0x06, 0xf3,
0x3d, 0x42, 0x99, 0xed, 0x3a, 0x7a, 0x56, 0xaa, 0x2d, 0x2a, 0xb5, 0xf9, 0xa3, 0x50, 0x8c, 0x06,
0x38, 0xbc, 0x07, 0xf2, 0x54, 0x25, 0xae, 0xe7, 0xa4, 0xee, 0x92, 0xd2, 0xcd, 0x0f, 0x2b, 0x38,
0xd4, 0x80, 0xef, 0x83, 0x22, 0xf3, 0xeb, 0x43, 0x83, 0x39, 0x69, 0xb0, 0xac, 0x0c, 0x8a, 0x07,
0x11, 0x84, 0xe2, 0x7a, 0xe2, 0x5a, 0xe2, 0x8e, 0xfa, 0xfc, 0xe8, 0xb5, 0x44, 0x09, 0x90, 0x44,
0xaa, 0xbf, 0x6b, 0xe0, 0xd6, 0x6c, 0x1d, 0x7b, 0x07, 0x14, 0xb0, 0x67, 0xcb, 0x6b, 0x0f, 0x7a,
0xb5, 0x20, 0xea, 0xba, 0xf3, 0x6c, 0x3f, 0x14, 0xa2, 0x08, 0x17, 0xca, 0x83, 0x64, 0xc4, 0x5c,
0x0f, 0x95, 0x07, 0x21, 0x19, 0x8a, 0x70, 0xb8, 0x05, 0x16, 0x06, 0x07, 0xd9, 0x24, 0x3d, 0x2b,
0x0d, 0x6e, 0x07, 0xfd, 0xf2, 0x02, 0x8a, 0x03, 0x68, 0x54, 0xaf, 0xfa, 0x5b, 0x1a, 0xac, 0x1e,
0x90, 0xce, 0xf1, 0xab, 0x79, 0x15, 0xbe, 0x18, 0x79, 0x15, 0x1e, 0x5d, 0x63, 0x6d, 0x93, 0x53,
0x7d, 0xb5, 0x2f, 0xc3, 0xcf, 0x69, 0xf0, 0xff, 0x2b, 0x12, 0x83, 0xdf, 0x01, 0x48, 0xc7, 0x16,
0x4d, 0x55, 0xf4, 0xc1, 0xf4, 0x84, 0xc6, 0x97, 0xb4, 0x76, 0x27, 0xe8, 0x97, 0x13, 0x96, 0x17,
0x25, 0xc4, 0x81, 0xdf, 0x6b, 0x60, 0xc5, 0x49, 0x7a, 0xb8, 0x54, 0xd5, 0xb7, 0xa6, 0x67, 0x90,
0xf8, 0xee, 0xd5, 0xee, 0x06, 0xfd, 0x72, 0xf2, 0x93, 0x88, 0x92, 0x03, 0x8a, 0x27, 0xe7, 0x4e,
0xac, 0x50, 0x62, 0x69, 0xfe, 0xbb, 0x59, 0xfb, 0x7c, 0x64, 0xd6, 0x3e, 0x9e, 0x69, 0xd6, 0x62,
0x99, 0x4e, 0x1c, 0xb5, 0xfa, 0xa5, 0x51, 0xdb, 0xbe, 0xf6, 0xa8, 0xc5, 0xbd, 0x5f, 0x3d, 0x69,
0x4f, 0xc0, 0xda, 0xe4, 0xac, 0x66, 0x7e, 0xba, 0xab, 0xbf, 0xa4, 0xc1, 0xf2, 0x6b, 0x3a, 0x70,
0xb3, 0xa5, 0x3f, 0xcf, 0x82, 0xd5, 0xd7, 0x0b, 0x7f, 0xf5, 0xc2, 0x8b, 0x9f, 0xa8, 0xcf, 0x08,
0x55, 0x3f, 0xfe, 0x61, 0xaf, 0x0e, 0x19, 0xa1, 0x48, 0x22, 0xb0, 0x32, 0xe0, 0x06, 0xe1, 0x0f,
0x0b, 0x88, 0x4a, 0xab, 0x7f, 0xa1, 0x22, 0x06, 0x36, 0xc8, 0x11, 0xc1, 0x78, 0xf5, 0x5c, 0x25,
0xb3, 0x5e, 0xdc, 0xdc, 0xbd, 0xf1, 0xac, 0x18, 0x92, 0x38, 0xef, 0x39, 0x9c, 0x9e, 0x46, 0x1c,
0x44, 0xca, 0x50, 0x18, 0x01, 0xbe, 0x01, 0x32, 0xbe, 0xdd, 0x50, 0x14, 0xa1, 0xa8, 0x54, 0x32,
0x87, 0xfb, 0xbb, 0x48, 0xc8, 0xd7, 0x8e, 0x15, 0xf7, 0x96, 0x2e, 0xe0, 0x12, 0xc8, 0xb4, 0xc9,
0x69, 0xb8, 0x67, 0x48, 0x7c, 0xc2, 0x1a, 0xc8, 0xf5, 0x04, 0x2d, 0x57, 0x75, 0xbe, 0x37, 0x3d,
0xd3, 0x88, 0xca, 0xa3, 0xd0, 0x74, 0x3b, 0xfd, 0x50, 0xab, 0xfe, 0xa1, 0x81, 0xbb, 0x13, 0x07,
0x52, 0x10, 0x25, 0xdc, 0xe9, 0xb8, 0x27, 0xa4, 0x21, 0x63, 0xe7, 0x23, 0xa2, 0xb4, 0x13, 0x8a,
0xd1, 0x00, 0x87, 0x6f, 0x81, 0x39, 0x4a, 0x30, 0x73, 0x1d, 0x45, 0xce, 0x86, 0xb3, 0x8c, 0xa4,
0x14, 0x29, 0x14, 0xee, 0x80, 0x45, 0x22, 0xc2, 0xcb, 0xe4, 0xf6, 0x28, 0x75, 0x07, 0x1d, 0x5b,
0x55, 0x06, 0x8b, 0x7b, 0xa3, 0x30, 0xba, 0xac, 0x2f, 0x42, 0x35, 0x88, 0x63, 0x93, 0x86, 0x64,
0x6f, 0xf9, 0x28, 0xd4, 0xae, 0x94, 0x22, 0x85, 0x56, 0xff, 0x49, 0x03, 0x7d, 0xd2, 0xb3, 0x07,
0xdb, 0x11, 0x8b, 0x91, 0xa0, 0x24, 0x52, 0xc5, 0x4d, 0xe3, 0xfa, 0x2b, 0x23, 0xcc, 0x6a, 0x2b,
0x2a, 0xf6, 0x42, 0x5c, 0x1a, 0x63, 0x3e, 0xf2, 0x08, 0x4f, 0xc0, 0x92, 0x33, 0x4a, 0xb9, 0x43,
0x4e, 0x56, 0xdc, 0xdc, 0x98, 0x69, 0x41, 0x64, 0x48, 0x5d, 0x85, 0x5c, 0xba, 0x04, 0x30, 0x34,
0x16, 0x04, 0x6e, 0x02, 0x60, 0x3b, 0x96, 0xdb, 0xf5, 0x3a, 0x84, 0x13, 0x59, 0xe8, 0x7c, 0xf4,
0x5a, 0xee, 0x0f, 0x11, 0x14, 0xd3, 0x4a, 0xea, 0x50, 0x76, 0xb6, 0x0e, 0xd5, 0xee, 0x9f, 0x5d,
0x94, 0x52, 0x2f, 0x2f, 0x4a, 0xa9, 0xf3, 0x8b, 0x52, 0xea, 0x45, 0x50, 0xd2, 0xce, 0x82, 0x92,
0xf6, 0x32, 0x28, 0x69, 0xe7, 0x41, 0x49, 0xfb, 0x2b, 0x28, 0x69, 0x3f, 0xfc, 0x5d, 0x4a, 0x7d,
0x36, 0xaf, 0x6e, 0xf8, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xcc, 0xb3, 0x5e, 0x05, 0xd9, 0x0f,
0x00, 0x00,
}<|fim▁end|> | |
<|file_name|>SystemdJournalUtilsTest.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.rao2100.starter.utils;
import org.junit.After;<|fim▁hole|>import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author openetdev
*/
public class SystemdJournalUtilsTest {
public SystemdJournalUtilsTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of send method, of class SystemdJournalUtils.
*/
@Test
public void testSend() {
System.out.println("send");
SystemdJournalUtils.send();
}
/**
* Test of send method, of class SystemdJournalUtils.
*/
@Test
public void testRead() {
System.out.println("read");
// SystemdJournalUtils.read();
}
}<|fim▁end|> | import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass; |
<|file_name|>imagestreamlookup.go<|end_file_name|><|fim▁begin|>package app
import (
"fmt"
"strings"
"github.com/golang/glog"
kapi "k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/errors"
"github.com/openshift/origin/pkg/client"
imageapi "github.com/openshift/origin/pkg/image/api"
)
// ImageStreamSearcher searches the openshift server image streams for images matching a particular name
type ImageStreamSearcher struct {
Client client.ImageStreamsNamespacer
ImageStreamImages client.ImageStreamImagesNamespacer
Namespaces []string
}
// Search will attempt to find imagestreams with names that match the passed in value
func (r ImageStreamSearcher) Search(precise bool, terms ...string) (ComponentMatches, []error) {
componentMatches := ComponentMatches{}
var errs []error
for _, term := range terms {
ref, err := imageapi.ParseDockerImageReference(term)
if err != nil || len(ref.Registry) != 0 {
glog.V(2).Infof("image streams must be of the form [<namespace>/]<name>[:<tag>|@<digest>], term %q did not qualify", term)
continue
}
if term == "__imagestream_fail" {
errs = append(errs, fmt.Errorf("unable to find the specified image: %s", term))
continue
}
namespaces := r.Namespaces
if len(ref.Namespace) != 0 {
namespaces = []string{ref.Namespace}
}
followTag := false
searchTag := ref.Tag
if len(searchTag) == 0 {
searchTag = imageapi.DefaultImageTag
followTag = true
}
for _, namespace := range namespaces {
glog.V(4).Infof("checking ImageStreams %s/%s with ref %q", namespace, ref.Name, searchTag)
exact := false
streams, err := r.Client.ImageStreams(namespace).List(kapi.ListOptions{})
if err != nil {
if errors.IsNotFound(err) || errors.IsForbidden(err) {
continue
}
errs = append(errs, err)
continue
}
original := ref
ref.Namespace = namespace
for i := range streams.Items {
stream := &streams.Items[i]
score, scored := imageStreamScorer(*stream, ref.Name)
if !scored {
glog.V(2).Infof("unscored %s: %v", stream.Name, score)
continue
}
// indicate the server knows how to directly import image stream tags
var meta map[string]string
if stream.Generation > 0 {
meta = map[string]string{"direct-tag": "1"}
}
imageref := original
imageref.Name = stream.Name
imageref.Registry = ""
matchName := fmt.Sprintf("%s/%s", stream.Namespace, stream.Name)
// When an image stream contains a tag that references another local tag, and the user has not
// provided a tag themselves (i.e. they asked for mysql and we defaulted to mysql:latest), walk
// the chain of references to the end. This ensures that applications can default to using a "stable"
// branch by giving the control over version to the image stream author.
finalTag := searchTag
if specTag, ok := stream.Spec.Tags[searchTag]; ok && followTag {
if specTag.From != nil && specTag.From.Kind == "ImageStreamTag" && !strings.Contains(specTag.From.Name, ":") {
if imageapi.LatestTaggedImage(stream, specTag.From.Name) != nil {
finalTag = specTag.From.Name
}
}
}
latest := imageapi.LatestTaggedImage(stream, finalTag)
if latest == nil || len(latest.Image) == 0 {
glog.V(2).Infof("no image recorded for %s/%s:%s", stream.Namespace, stream.Name, finalTag)
componentMatches = append(componentMatches, &ComponentMatch{
Value: term,
Argument: fmt.Sprintf("--image-stream=%q", matchName),
Name: matchName,
Description: fmt.Sprintf("Image stream %s in project %s", stream.Name, stream.Namespace),
Score: 0.5 + score,
ImageStream: stream,
ImageTag: finalTag,
Meta: meta,
})
continue
}
imageStreamImage, err := r.ImageStreamImages.ImageStreamImages(namespace).Get(stream.Name, latest.Image)
if err != nil {
if errors.IsNotFound(err) {
// continue searching
glog.V(2).Infof("tag %q is set, but image %q has been removed", finalTag, latest.Image)
continue
}
errs = append(errs, err)
continue
}
match := &ComponentMatch{
Value: term,
Argument: fmt.Sprintf("--image-stream=%q", matchName),
Name: matchName,
Description: fmt.Sprintf("Image stream %q (tag %q) in project %q", stream.Name, finalTag, stream.Namespace),
Score: score,
ImageStream: stream,
Image: &imageStreamImage.Image.DockerImageMetadata,
ImageTag: finalTag,
Meta: meta,
}
glog.V(2).Infof("Adding %s as component match for %q with score %v", match.Description, term, score)
if score == 0.0 {
exact = true
}
componentMatches = append(componentMatches, match)
}
// If we found one or more exact matches in this namespace, do not continue looking at
// other namespaces
if exact && precise {
break
}
}
}
return componentMatches, errs
}
// InputImageFromMatch returns an image reference from a component match.
// The component match will either be an image stream or an image.
func InputImageFromMatch(match *ComponentMatch) (*ImageRef, error) {
g := NewImageRefGenerator()
switch {
case match.ImageStream != nil:
input, err := g.FromStream(match.ImageStream, match.ImageTag)
if err != nil {
return nil, err
}
if match.Meta["direct-tag"] == "1" {
input.TagDirectly = true
}
input.AsImageStream = true
input.Info = match.Image
return input, nil
case match.Image != nil:
input, err := g.FromName(match.Value)
if err != nil {
return nil, err
}
if match.Meta["direct-tag"] == "1" {
input.TagDirectly = true
input.AsResolvedImage = true
}
input.AsImageStream = !match.LocalOnly
input.Info = match.Image
input.Insecure = match.Insecure
return input, nil
default:
input, err := g.FromName(match.Value)
if err != nil {
return nil, err
}
return input, nil<|fim▁hole|>
// ImageStreamByAnnotationSearcher searches for image streams based on 'supports' annotations
// found in tagged images belonging to the stream
type ImageStreamByAnnotationSearcher struct {
Client client.ImageStreamsNamespacer
ImageStreamImages client.ImageStreamImagesNamespacer
Namespaces []string
imageStreams map[string]*imageapi.ImageStreamList
}
const supportsAnnotationKey = "supports"
// NewImageStreamByAnnotationSearcher creates a new ImageStreamByAnnotationSearcher
func NewImageStreamByAnnotationSearcher(streamClient client.ImageStreamsNamespacer, imageClient client.ImageStreamImagesNamespacer, namespaces []string) Searcher {
return &ImageStreamByAnnotationSearcher{
Client: streamClient,
ImageStreamImages: imageClient,
Namespaces: namespaces,
imageStreams: make(map[string]*imageapi.ImageStreamList),
}
}
func (r *ImageStreamByAnnotationSearcher) getImageStreams(namespace string) ([]imageapi.ImageStream, error) {
imageStreamList, ok := r.imageStreams[namespace]
if !ok {
var err error
imageStreamList, err = r.Client.ImageStreams(namespace).List(kapi.ListOptions{})
if err != nil {
return nil, err
}
r.imageStreams[namespace] = imageStreamList
}
return imageStreamList.Items, nil
}
func matchSupportsAnnotation(value, annotation string) (float32, bool) {
valueBase := strings.Split(value, ":")[0]
parts := strings.Split(annotation, ",")
// attempt an exact match first
for _, p := range parts {
if value == p {
return 0.0, true
}
}
// attempt a partial match
for _, p := range parts {
partBase := strings.Split(p, ":")[0]
if valueBase == partBase {
return 0.5, true
}
}
return 0, false
}
func (r *ImageStreamByAnnotationSearcher) annotationMatches(stream *imageapi.ImageStream, value string) []*ComponentMatch {
if stream.Spec.Tags == nil {
glog.Infof("No tags found on image, returning nil")
return nil
}
matches := []*ComponentMatch{}
for tag, tagref := range stream.Spec.Tags {
if tagref.Annotations == nil {
continue
}
supports, ok := tagref.Annotations[supportsAnnotationKey]
if !ok {
continue
}
score, ok := matchSupportsAnnotation(value, supports)
if !ok {
continue
}
latest := imageapi.LatestTaggedImage(stream, tag)
if latest == nil {
continue
}
imageStream, err := r.ImageStreamImages.ImageStreamImages(stream.Namespace).Get(stream.Name, latest.Image)
if err != nil {
glog.V(2).Infof("Could not retrieve image stream image for stream %q, tag %q: %v", stream.Name, tag, err)
continue
}
if imageStream == nil {
continue
}
// indicate the server knows how to directly tag images
var meta map[string]string
if imageStream.Generation > 0 {
meta = map[string]string{"direct-tag": "1"}
}
imageData := imageStream.Image
matchName := fmt.Sprintf("%s/%s", stream.Namespace, stream.Name)
glog.V(5).Infof("ImageStreamAnnotationSearcher match found: %s for %s with score %f", matchName, value, score)
match := &ComponentMatch{
Value: value,
Name: fmt.Sprintf("%s", matchName),
Argument: fmt.Sprintf("--image-stream=%q", matchName),
Description: fmt.Sprintf("Image stream %s in project %s", stream.Name, stream.Namespace),
Score: score,
ImageStream: stream,
Image: &imageData.DockerImageMetadata,
ImageTag: tag,
Meta: meta,
}
matches = append(matches, match)
}
return matches
}
// Search finds image stream images using their 'supports' annotation
func (r *ImageStreamByAnnotationSearcher) Search(precise bool, terms ...string) (ComponentMatches, []error) {
matches := ComponentMatches{}
var errs []error
for _, namespace := range r.Namespaces {
streams, err := r.getImageStreams(namespace)
if err != nil {
errs = append(errs, err)
continue
}
for i := range streams {
for _, term := range terms {
if term == "__imagestreamannotation_fail" {
errs = append(errs, fmt.Errorf("unable to find the specified image: %s", term))
continue
}
glog.V(5).Infof("Checking imagestream %s/%s for supports annotation %q", namespace, streams[i].Name, term)
matches = append(matches, r.annotationMatches(&streams[i], term)...)
}
}
if precise {
for _, m := range matches {
if m.Score == 0.0 {
return matches, errs
}
}
}
}
return matches, errs
}<|fim▁end|> | }
} |
<|file_name|>LiNMT-postprocess-text-chunking-rmNP.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Author: Qiang Li
# Email: [email protected]
# Time: 10:27, 03/30/2017
import sys
import codecs
import argparse
import random
from io import open
argparse.open = open
reload(sys)
sys.setdefaultencoding('utf8')
if sys.version_info < (3, 0):
sys.stderr = codecs.getwriter('UTF-8')(sys.stderr)
sys.stdout = codecs.getwriter('UTF-8')(sys.stdout)
sys.stdin = codecs.getreader('UTF-8')(sys.stdin)
else:
sys.stderr = codecs.getwriter('UTF-8')(sys.stderr.buffer)
sys.stdout = codecs.getwriter('UTF-8')(sys.stdout.buffer)
sys.stdin = codecs.getreader('UTF-8')(sys.stdin.buffer)
<|fim▁hole|> formatter_class=argparse.RawDescriptionHelpFormatter,
description='Text Chunking')
parser.add_argument(
'--input', '-i', type=argparse.FileType('r'), default=sys.stdin,
metavar='PATH', help='Input text (default: standard input).')
parser.add_argument(
'--outword', '-w', type=argparse.FileType('w'), required=True,
metavar='PATH', help='Output word file')
parser.add_argument(
'--outlabel', '-l', type=argparse.FileType('w'), required=True,
metavar='PATH', help='Output label file')
return parser
def pos_postprocess(ifobj, owfobj, olfobj, ologfobj):
line_word = ''
line_label = ''
total_words = 0
reserved_words = 0
remove_words = 0
for line in ifobj:
line = line.strip()
if line == '':
line_word = line_word.strip()
line_label = line_label.strip()
owfobj.write('{0}\n'.format(line_word))
olfobj.write('{0}\n'.format(line_label))
line_word = ''
line_label = ''
else:
words = line.split('\t')
total_words += 1
if words[0] == '':
words[0] = 'NA'
if words[3] == '':
words[3] = 'O'
if "NP" in words[3]:
words[0] = '#'
words[3] = '#'
remove_words += 1
line_word += ' '+words[0]
line_label += ' '+words[3]
ologfobj.write('total word:{0}\n'.format(total_words))
ologfobj.write('remove word:{0}\n'.format(remove_words))
reserve_words = total_words - remove_words
ologfobj.write('reserve word:{0}\n'.format(reserve_words))
reserve_rate = float(reserve_words) / float(total_words)
print reserve_rate
ologfobj.write('reserve rate:{0}\n'.format(reserve_rate))
if __name__ == '__main__':
parser = create_parser()
args = parser.parse_args()
# read/write files as UTF-8
if args.input.name != '<stdin>':
args.input = codecs.open(args.input.name, encoding='utf-8')
args.outword = codecs.open(args.outword.name, 'w', encoding='utf-8')
args.outlabel = codecs.open(args.outlabel.name, 'w', encoding='utf-8')
args.outlog = codecs.open(args.outword.name+".log", 'w', encoding='utf-8')
pos_postprocess(args.input, args.outword, args.outlabel, args.outlog)<|fim▁end|> |
def create_parser():
parser = argparse.ArgumentParser( |
<|file_name|>plot_55_setting_eeg_reference.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
.. _tut-set-eeg-ref:
Setting the EEG reference
=========================
This tutorial describes how to set or change the EEG reference in MNE-Python.
.. contents:: Page contents
:local:
:depth: 2
As usual we'll start by importing the modules we need, loading some
:ref:`example data <sample-dataset>`, and cropping it to save memory. Since
this tutorial deals specifically with EEG, we'll also restrict the dataset to
just a few EEG channels so the plots are easier to see:
"""
import os
import mne
sample_data_folder = mne.datasets.sample.data_path()
sample_data_raw_file = os.path.join(sample_data_folder, 'MEG', 'sample',
'sample_audvis_raw.fif')
raw = mne.io.read_raw_fif(sample_data_raw_file, verbose=False)
raw.crop(tmax=60).load_data()
raw.pick(['EEG 0{:02}'.format(n) for n in range(41, 60)])
###############################################################################
# Background
# ^^^^^^^^^^
#
# EEG measures a voltage (difference in electric potential) between each
# electrode and a reference electrode. This means that whatever signal is
# present at the reference electrode is effectively subtracted from all the
# measurement electrodes. Therefore, an ideal reference signal is one that
# captures *none* of the brain-specific fluctuations in electric potential,
# while capturing *all* of the environmental noise/interference that is being
# picked up by the measurement electrodes.
#
# In practice, this means that the reference electrode is often placed in a
# location on the subject's body and close to their head (so that any
# environmental interference affects the reference and measurement electrodes
# similarly) but as far away from the neural sources as possible (so that the
# reference signal doesn't pick up brain-based fluctuations). Typical reference
# locations are the subject's earlobe, nose, mastoid process, or collarbone.
# Each of these has advantages and disadvantages regarding how much brain
# signal it picks up (e.g., the mastoids pick up a fair amount compared to the
# others), and regarding the environmental noise it picks up (e.g., earlobe
# electrodes may shift easily, and have signals more similar to electrodes on
# the same side of the head).
#
# Even in cases where no electrode is specifically designated as the reference,
# EEG recording hardware will still treat one of the scalp electrodes as the
# reference, and the recording software may or may not display it to you (it
# might appear as a completely flat channel, or the software might subtract out
# the average of all signals before displaying, making it *look like* there is
# no reference).
#
#
# Setting or changing the reference channel
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# If you want to recompute your data with a different reference than was used
# when the raw data were recorded and/or saved, MNE-Python provides the
# :meth:`~mne.io.Raw.set_eeg_reference` method on :class:`~mne.io.Raw` objects
# as well as the :func:`mne.add_reference_channels` function. To use an
# existing channel as the new reference, use the
# :meth:`~mne.io.Raw.set_eeg_reference` method; you can also designate multiple
# existing electrodes as reference channels, as is sometimes done with mastoid
# references:
# code lines below are commented out because the sample data doesn't have
# earlobe or mastoid channels, so this is just for demonstration purposes:
# use a single channel reference (left earlobe)
# raw.set_eeg_reference(ref_channels=['A1'])
# use average of mastoid channels as reference
# raw.set_eeg_reference(ref_channels=['M1', 'M2'])
###############################################################################
# If a scalp electrode was used as reference but was not saved alongside the
# raw data (reference channels often aren't), you may wish to add it back to
# the dataset before re-referencing. For example, if your EEG system recorded
# with channel ``Fp1`` as the reference but did not include ``Fp1`` in the data
# file, using :meth:`~mne.io.Raw.set_eeg_reference` to set (say) ``Cz`` as the
# new reference will then subtract out the signal at ``Cz`` *without restoring
# the signal at* ``Fp1``. In this situation, you can add back ``Fp1`` as a flat
# channel prior to re-referencing using :func:`~mne.add_reference_channels`.
# (Since our example data doesn't use the `10-20 electrode naming system`_, the
# example below adds ``EEG 999`` as the missing reference, then sets the
# reference to ``EEG 050``.) Here's how the data looks in its original state:
raw.plot()
###############################################################################
# By default, :func:`~mne.add_reference_channels` returns a copy, so we can go
# back to our original ``raw`` object later. If you wanted to alter the
# existing :class:`~mne.io.Raw` object in-place you could specify
# ``copy=False``.
# add new reference channel (all zero)
raw_new_ref = mne.add_reference_channels(raw, ref_channels=['EEG 999'])
raw_new_ref.plot()
###############################################################################
# .. KEEP THESE BLOCKS SEPARATE SO FIGURES ARE BIG ENOUGH TO READ
# set reference to `EEG 050`
raw_new_ref.set_eeg_reference(ref_channels=['EEG 050'])
raw_new_ref.plot()
###############################################################################
# Notice that the new reference (``EEG 050``) is now flat, while the original
# reference channel that we added back to the data (``EEG 999``) has a non-zero
# signal. Notice also that ``EEG 053`` (which is marked as "bad" in
# ``raw.info['bads']``) is not affected by the re-referencing.
#
#
# Setting average reference
# ^^^^^^^^^^^^^^^^^^^^^^^^^
#
# To set a "virtual reference" that is the average of all channels, you can use
# :meth:`~mne.io.Raw.set_eeg_reference` with ``ref_channels='average'``. Just
# as above, this will not affect any channels marked as "bad", nor will it
# include bad channels when computing the average. However, it does modify the
# :class:`~mne.io.Raw` object in-place, so we'll make a copy first so we can<|fim▁hole|># use the average of all channels as reference
raw_avg_ref = raw.copy().set_eeg_reference(ref_channels='average')
raw_avg_ref.plot()
###############################################################################
# Creating the average reference as a projector
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# If using an average reference, it is possible to create the reference as a
# :term:`projector` rather than subtracting the reference from the data
# immediately by specifying ``projection=True``:
raw.set_eeg_reference('average', projection=True)
print(raw.info['projs'])
###############################################################################
# Creating the average reference as a projector has a few advantages:
#
# 1. It is possible to turn projectors on or off when plotting, so it is easy
# to visualize the effect that the average reference has on the data.
#
# 2. If additional channels are marked as "bad" or if a subset of channels are
# later selected, the projector will be re-computed to take these changes
# into account (thus guaranteeing that the signal is zero-mean).
#
# 3. If there are other unapplied projectors affecting the EEG channels (such
# as SSP projectors for removing heartbeat or blink artifacts), EEG
# re-referencing cannot be performed until those projectors are either
# applied or removed; adding the EEG reference as a projector is not subject
# to that constraint. (The reason this wasn't a problem when we applied the
# non-projector average reference to ``raw_avg_ref`` above is that the
# empty-room projectors included in the sample data :file:`.fif` file were
# only computed for the magnetometers.)
for title, proj in zip(['Original', 'Average'], [False, True]):
fig = raw.plot(proj=proj, n_channels=len(raw))
# make room for title
fig.subplots_adjust(top=0.9)
fig.suptitle('{} reference'.format(title), size='xx-large', weight='bold')
###############################################################################
# EEG reference and source modeling
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# If you plan to perform source modeling (either with EEG or combined EEG/MEG
# data), it is **strongly recommended** to use the
# average-reference-as-projection approach. It is important to use an average
# reference because using a specific
# reference sensor (or even an average of a few sensors) spreads the forward
# model error from the reference sensor(s) into all sensors, effectively
# amplifying the importance of the reference sensor(s) when computing source
# estimates. In contrast, using the average of all EEG channels as reference
# spreads the forward modeling error evenly across channels, so no one channel
# is weighted more strongly during source estimation. See also this `FieldTrip
# FAQ on average referencing`_ for more information.
#
# The main reason for specifying the average reference as a projector was
# mentioned in the previous section: an average reference projector adapts if
# channels are dropped, ensuring that the signal will always be zero-mean when
# the source modeling is performed. In contrast, applying an average reference
# by the traditional subtraction method offers no such guarantee.
#
# For these reasons, when performing inverse imaging, *MNE-Python will
# automatically average-reference the EEG channels if they are present and no
# reference strategy has been specified*. If you want to perform inverse
# imaging and do not want to use an average reference (and hence you accept the
# risks presented in the previous paragraphs), you can force MNE-Python to
# relax its average reference requirement by passing an empty list to
# :meth:`~mne.io.Raw.set_eeg_reference` (i.e., by calling
# ``raw.set_eeg_reference(ref_channels=[])``) prior to performing inverse
# imaging.
#
#
# .. LINKS
#
# .. _`FieldTrip FAQ on average referencing`:
# http://www.fieldtriptoolbox.org/faq/why_should_i_use_an_average_reference_for_eeg_source_reconstruction/
# .. _`10-20 electrode naming system`:
# https://en.wikipedia.org/wiki/10%E2%80%9320_system_(EEG)<|fim▁end|> | # still go back to the unmodified :class:`~mne.io.Raw` object later:
# sphinx_gallery_thumbnail_number = 4 |
<|file_name|>LoLN_convergence_examples_ch04.py<|end_file_name|><|fim▁begin|>import numpy as np
import matplotlib.pyplot as plt
import pymc as pm
def main():<|fim▁hole|> N_samples = range(1, sample_size, 100)
for k in range(3):
samples = pm.rpoisson(lambda_, size=sample_size)
partial_average = [samples[:i].mean() for i in N_samples]
label = "average of $n$ samples; seq. %d" % k
plt.plot(N_samples, partial_average, lw=1.5, label=label)
plt.plot(N_samples, expected_value * np.ones_like(partial_average),
ls="--", label="true expected value", c="k")
plt.ylim(4.35, 4.65)
plt.title("Convergence of the average of \n random variables to its" +
"expected value")
plt.ylabel("average of $n$ samples")
plt.xlabel("# of samples, $n$")
plt.legend()
plt.show()
if __name__ == '__main__':
main()<|fim▁end|> | sample_size = 100000
expected_value = lambda_ = 4.5 |
<|file_name|>bytearray_construct_array.py<|end_file_name|><|fim▁begin|># test construction of bytearray from different objects
try:
from uarray import array
except ImportError:
try:
from array import array
except ImportError:<|fim▁hole|> raise SystemExit
# arrays
print(bytearray(array('b', [1, 2])))
print(bytearray(array('h', [0x101, 0x202])))<|fim▁end|> | print("SKIP") |
<|file_name|>main.ts<|end_file_name|><|fim▁begin|>import {enableProdMode} from '@angular/core';
import {platformBrowserDynamic} from '@angular/platform-browser-dynamic';
import {AppModule} from './app/app.module';
import {environment} from './environments/environment';
if (environment.production) {
enableProdMode();
}<|fim▁hole|>platformBrowserDynamic()
.bootstrapModule(AppModule)
.catch(err => console.log(err));<|fim▁end|> | |
<|file_name|>wrappers.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
werkzeug.contrib.wrappers
~~~~~~~~~~~~~~~~~~~~~~~~~
Extra wrappers or mixins contributed by the community. These wrappers can
be mixed in into request objects to add extra functionality.
Example::
from werkzeug.wrappers import Request as RequestBase
from werkzeug.contrib.wrappers import JSONRequestMixin
class Request(RequestBase, JSONRequestMixin):
pass
Afterwards this request object provides the extra functionality of the
:class:`JSONRequestMixin`.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import codecs
try:
from simplejson import loads
except ImportError:
from json import loads
from werkzeug.exceptions import BadRequest
from werkzeug.utils import cached_property
from werkzeug.http import dump_options_header, parse_options_header
from werkzeug._compat import wsgi_decoding_dance
def is_known_charset(charset):
"""Checks if the given charset is known to Python."""
try:
codecs.lookup(charset)
except LookupError:
return False
return True
class JSONRequestMixin(object):
"""Add json method to a request object. This will parse the input data
through simplejson if possible.
:exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type
is not json or if the data itself cannot be parsed as json.
"""
@cached_property
def json(self):
"""Get the result of simplejson.loads if possible."""
if 'json' not in self.environ.get('CONTENT_TYPE', ''):
raise BadRequest('Not a JSON request')
try:
return loads(self.data)
except Exception:
raise BadRequest('Unable to read JSON request')
class ProtobufRequestMixin(object):
"""Add protobuf parsing method to a request object. This will parse the
input data through `protobuf`_ if possible.
:exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type
is not protobuf or if the data itself cannot be parsed property.
.. _protobuf: http://code.google.com/p/protobuf/
"""
#: by default the :class:`ProtobufRequestMixin` will raise a
#: :exc:`~werkzeug.exceptions.BadRequest` if the object is not
#: initialized. You can bypass that check by setting this
#: attribute to `False`.
protobuf_check_initialization = True
def parse_protobuf(self, proto_type):
"""Parse the data into an instance of proto_type."""
if 'protobuf' not in self.environ.get('CONTENT_TYPE', ''):
raise BadRequest('Not a Protobuf request')
obj = proto_type()
try:
obj.ParseFromString(self.data)
except Exception:
raise BadRequest("Unable to parse Protobuf request")
# Fail if not all required fields are set
if self.protobuf_check_initialization and not obj.IsInitialized():
raise BadRequest("Partial Protobuf request")
return obj
class RoutingArgsRequestMixin(object):
"""This request mixin adds support for the wsgiorg routing args
`specification`_.
.. _specification: http://www.wsgi.org/wsgi/Specifications/routing_args
"""
def _get_routing_args(self):
return self.environ.get('wsgiorg.routing_args', (()))[0]
def _set_routing_args(self, value):
if self.shallow:
raise RuntimeError('A shallow request tried to modify the WSGI '
'environment. If you really want to do that, '
'set `shallow` to False.')
self.environ['wsgiorg.routing_args'] = (value, self.routing_vars)
routing_args = property(_get_routing_args, _set_routing_args, doc='''
The positional URL arguments as `tuple`.''')
del _get_routing_args, _set_routing_args
def _get_routing_vars(self):
rv = self.environ.get('wsgiorg.routing_args')
if rv is not None:
return rv[1]
rv = {}
if not self.shallow:
self.routing_vars = rv
return rv
def _set_routing_vars(self, value):
if self.shallow:
raise RuntimeError('A shallow request tried to modify the WSGI '
'environment. If you really want to do that, '
'set `shallow` to False.')
self.environ['wsgiorg.routing_args'] = (self.routing_args, value)
routing_vars = property(_get_routing_vars, _set_routing_vars, doc='''
The keyword URL arguments as `dict`.''')
del _get_routing_vars, _set_routing_vars
class ReverseSlashBehaviorRequestMixin(object):
"""This mixin reverses the trailing slash behavior of :attr:`script_root`
and :attr:`path`. This makes it possible to use :func:`~urlparse.urljoin`
directly on the paths.
Because it changes the behavior or :class:`Request` this class has to be
mixed in *before* the actual request class::
class MyRequest(ReverseSlashBehaviorRequestMixin, Request):
pass
This example shows the differences (for an application mounted on
`/application` and the request going to `/application/foo/bar`):
+---------------+-------------------+---------------------+
| | normal behavior | reverse behavior |
+===============+===================+=====================+
| `script_root` | ``/application`` | ``/application/`` |
+---------------+-------------------+---------------------+
| `path` | ``/foo/bar`` | ``foo/bar`` |
+---------------+-------------------+---------------------+
"""
@cached_property
def path(self):
"""Requested path as unicode. This works a bit like the regular path
info in the WSGI environment but will not include a leading slash.
"""
path = wsgi_decoding_dance(self.environ.get('PATH_INFO') or '',
self.charset, self.encoding_errors)
return path.lstrip('/')
@cached_property
def script_root(self):
"""The root path of the script includling a trailing slash."""
path = wsgi_decoding_dance(self.environ.get('SCRIPT_NAME') or '',
self.charset, self.encoding_errors)
return path.rstrip('/') + '/'<|fim▁hole|> """"If this mixin is mixed into a request class it will provide
a dynamic `charset` attribute. This means that if the charset is
transmitted in the content type headers it's used from there.
Because it changes the behavior or :class:`Request` this class has
to be mixed in *before* the actual request class::
class MyRequest(DynamicCharsetRequestMixin, Request):
pass
By default the request object assumes that the URL charset is the
same as the data charset. If the charset varies on each request
based on the transmitted data it's not a good idea to let the URLs
change based on that. Most browsers assume either utf-8 or latin1
for the URLs if they have troubles figuring out. It's strongly
recommended to set the URL charset to utf-8::
class MyRequest(DynamicCharsetRequestMixin, Request):
url_charset = 'utf-8'
.. versionadded:: 0.6
"""
#: the default charset that is assumed if the content type header
#: is missing or does not contain a charset parameter. The default
#: is latin1 which is what HTTP specifies as default charset.
#: You may however want to set this to utf-8 to better support
#: browsers that do not transmit a charset for incoming data.
default_charset = 'latin1'
def unknown_charset(self, charset):
"""Called if a charset was provided but is not supported by
the Python codecs module. By default latin1 is assumed then
to not lose any information, you may override this method to
change the behavior.
:param charset: the charset that was not found.
:return: the replacement charset.
"""
return 'latin1'
@cached_property
def charset(self):
"""The charset from the content type."""
header = self.environ.get('CONTENT_TYPE')
if header:
ct, options = parse_options_header(header)
charset = options.get('charset')
if charset:
if is_known_charset(charset):
return charset
return self.unknown_charset(charset)
return self.default_charset
class DynamicCharsetResponseMixin(object):
"""If this mixin is mixed into a response class it will provide
a dynamic `charset` attribute. This means that if the charset is
looked up and stored in the `Content-Type` header and updates
itself automatically. This also means a small performance hit but
can be useful if you're working with different charsets on
responses.
Because the charset attribute is no a property at class-level, the
default value is stored in `default_charset`.
Because it changes the behavior or :class:`Response` this class has
to be mixed in *before* the actual response class::
class MyResponse(DynamicCharsetResponseMixin, Response):
pass
.. versionadded:: 0.6
"""
#: the default charset.
default_charset = 'utf-8'
def _get_charset(self):
header = self.headers.get('content-type')
if header:
charset = parse_options_header(header)[1].get('charset')
if charset:
return charset
return self.default_charset
def _set_charset(self, charset):
header = self.headers.get('content-type')
ct, options = parse_options_header(header)
if not ct:
raise TypeError('Cannot set charset if Content-Type '
'header is missing.')
options['charset'] = charset
self.headers['Content-Type'] = dump_options_header(ct, options)
charset = property(_get_charset, _set_charset, doc="""
The charset for the response. It's stored inside the
Content-Type header as a parameter.""")
del _get_charset, _set_charset<|fim▁end|> |
class DynamicCharsetRequestMixin(object):
|
<|file_name|>CharacterPlanetResponseTest.java<|end_file_name|><|fim▁begin|>/*
* EVE Swagger Interface
* An OpenAPI for EVE Online
*<|fim▁hole|> *
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package net.troja.eve.esi.model;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import net.troja.eve.esi.model.PlanetLink;
import net.troja.eve.esi.model.PlanetPin;
import net.troja.eve.esi.model.PlanetRoute;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
/**
* Model tests for CharacterPlanetResponse
*/
public class CharacterPlanetResponseTest {
private final CharacterPlanetResponse model = new CharacterPlanetResponse();
/**
* Model tests for CharacterPlanetResponse
*/
@Test
public void testCharacterPlanetResponse() {
// TODO: test CharacterPlanetResponse
}
/**
* Test the property 'routes'
*/
@Test
public void routesTest() {
// TODO: test routes
}
/**
* Test the property 'links'
*/
@Test
public void linksTest() {
// TODO: test links
}
/**
* Test the property 'pins'
*/
@Test
public void pinsTest() {
// TODO: test pins
}
}<|fim▁end|> | * |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|># Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import uuid
import fixtures
import mock
import oslo_config.fixture
from oslo_db.sqlalchemy import migration
from oslo_log import log
from six.moves import configparser
from six.moves import range
from testtools import matchers
from keystone.auth import controllers
from keystone.cmd import cli
from keystone.cmd.doctor import caching
from keystone.cmd.doctor import credential
from keystone.cmd.doctor import database as doc_database
from keystone.cmd.doctor import debug
from keystone.cmd.doctor import federation
from keystone.cmd.doctor import ldap
from keystone.cmd.doctor import security_compliance
from keystone.cmd.doctor import tokens
from keystone.cmd.doctor import tokens_fernet
from keystone.common import dependency
from keystone.common.sql import upgrades
import keystone.conf
from keystone import exception
from keystone.i18n import _
from keystone.identity.mapping_backends import mapping as identity_mapping
from keystone.tests import unit
from keystone.tests.unit import default_fixtures
from keystone.tests.unit.ksfixtures import database
from keystone.tests.unit.ksfixtures import ldapdb
CONF = keystone.conf.CONF
class CliTestCase(unit.SQLDriverOverrides, unit.TestCase):
def config_files(self):
config_files = super(CliTestCase, self).config_files()
config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
return config_files
def test_token_flush(self):
self.useFixture(database.Database())
self.load_backends()
cli.TokenFlush.main()
class CliNoConfigTestCase(unit.BaseTestCase):
def setUp(self):
self.config_fixture = self.useFixture(oslo_config.fixture.Config(CONF))
self.config_fixture.register_cli_opt(cli.command_opt)
self.useFixture(fixtures.MockPatch(
'oslo_config.cfg.find_config_files', return_value=[]))
super(CliNoConfigTestCase, self).setUp()
# NOTE(crinkle): the command call doesn't have to actually work,
# that's what the other unit tests are for. So just mock it out.
class FakeConfCommand(object):
def __init__(self):
self.cmd_class = mock.Mock()
self.useFixture(fixtures.MockPatchObject(
CONF, 'command', FakeConfCommand()))
self.logging = self.useFixture(fixtures.FakeLogger(level=log.WARN))
def test_cli(self):
expected_msg = 'Config file not found, using default configs.'
cli.main(argv=['keystone-manage', 'db_sync'])
self.assertThat(self.logging.output, matchers.Contains(expected_msg))
class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase):
def setUp(self):
self.useFixture(database.Database())
super(CliBootStrapTestCase, self).setUp()
def config_files(self):
self.config_fixture.register_cli_opt(cli.command_opt)
config_files = super(CliBootStrapTestCase, self).config_files()
config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
return config_files
def config(self, config_files):
CONF(args=['bootstrap', '--bootstrap-password', uuid.uuid4().hex],
project='keystone',
default_config_files=config_files)
def test_bootstrap(self):
bootstrap = cli.BootStrap()
self._do_test_bootstrap(bootstrap)
def _do_test_bootstrap(self, bootstrap):
bootstrap.do_bootstrap()
project = bootstrap.resource_manager.get_project_by_name(
bootstrap.project_name,
'default')
user = bootstrap.identity_manager.get_user_by_name(
bootstrap.username,
'default')
role = bootstrap.role_manager.get_role(bootstrap.role_id)
role_list = (
bootstrap.assignment_manager.get_roles_for_user_and_project(
user['id'],
project['id']))
self.assertIs(1, len(role_list))
self.assertEqual(role_list[0], role['id'])
# NOTE(morganfainberg): Pass an empty context, it isn't used by
# `authenticate` method.
bootstrap.identity_manager.authenticate(
self.make_request(),
user['id'],
bootstrap.password)
if bootstrap.region_id:
region = bootstrap.catalog_manager.get_region(bootstrap.region_id)
self.assertEqual(self.region_id, region['id'])
if bootstrap.service_id:
svc = bootstrap.catalog_manager.get_service(bootstrap.service_id)
self.assertEqual(self.service_name, svc['name'])
self.assertEqual(set(['admin', 'public', 'internal']),
set(bootstrap.endpoints))
urls = {'public': self.public_url,
'internal': self.internal_url,
'admin': self.admin_url}
for interface, url in urls.items():
endpoint_id = bootstrap.endpoints[interface]
endpoint = bootstrap.catalog_manager.get_endpoint(endpoint_id)
self.assertEqual(self.region_id, endpoint['region_id'])
self.assertEqual(url, endpoint['url'])
self.assertEqual(svc['id'], endpoint['service_id'])
self.assertEqual(interface, endpoint['interface'])
def test_bootstrap_is_idempotent_when_password_does_not_change(self):
# NOTE(morganfainberg): Ensure we can run bootstrap with the same
# configuration multiple times without erroring.
bootstrap = cli.BootStrap()
self._do_test_bootstrap(bootstrap)
v3_token_controller = controllers.Auth()
v3_password_data = {
'identity': {
"methods": ["password"],
"password": {
"user": {
"name": bootstrap.username,
"password": bootstrap.password,
"domain": {
"id": CONF.identity.default_domain_id
}
}
}
}
}
auth_response = v3_token_controller.authenticate_for_token(
self.make_request(), v3_password_data)
token = auth_response.headers['X-Subject-Token']
self._do_test_bootstrap(bootstrap)
# build validation request
request = self.make_request(is_admin=True)
request.context_dict['subject_token_id'] = token
# Make sure the token we authenticate for is still valid.
v3_token_controller.validate_token(request)
def test_bootstrap_is_not_idempotent_when_password_does_change(self):
# NOTE(lbragstad): Ensure bootstrap isn't idempotent when run with
# different arguments or configuration values.
bootstrap = cli.BootStrap()
self._do_test_bootstrap(bootstrap)
v3_token_controller = controllers.Auth()
v3_password_data = {
'identity': {
"methods": ["password"],
"password": {
"user": {
"name": bootstrap.username,
"password": bootstrap.password,
"domain": {
"id": CONF.identity.default_domain_id
}
}
}
}
}
auth_response = v3_token_controller.authenticate_for_token(
self.make_request(), v3_password_data)
token = auth_response.headers['X-Subject-Token']
os.environ['OS_BOOTSTRAP_PASSWORD'] = uuid.uuid4().hex
self._do_test_bootstrap(bootstrap)
# build validation request
request = self.make_request(is_admin=True)
request.context_dict['subject_token_id'] = token
# Since the user account was recovered with a different password, we
# shouldn't be able to validate this token. Bootstrap should have
# persisted a revocation event because the user's password was updated.
# Since this token was obtained using the original password, it should
# now be invalid.
self.assertRaises(
exception.TokenNotFound,
v3_token_controller.validate_token,
request
)
def test_bootstrap_recovers_user(self):
bootstrap = cli.BootStrap()
self._do_test_bootstrap(bootstrap)
# Completely lock the user out.
user_id = bootstrap.identity_manager.get_user_by_name(
bootstrap.username,
'default')['id']
bootstrap.identity_manager.update_user(
user_id,
{'enabled': False,
'password': uuid.uuid4().hex})
# The second bootstrap run will recover the account.
self._do_test_bootstrap(bootstrap)
# Sanity check that the original password works again.
bootstrap.identity_manager.authenticate(
self.make_request(),
user_id,
bootstrap.password)
def test_bootstrap_creates_default_role(self):
bootstrap = cli.BootStrap()
try:
role = bootstrap.role_manager.get_role(CONF.member_role_id)
self.fail('Member Role is created and should not be.')
except exception.RoleNotFound:
pass
self._do_test_bootstrap(bootstrap)
role = bootstrap.role_manager.get_role(CONF.member_role_id)
self.assertEqual(role['name'], CONF.member_role_name)
self.assertEqual(role['id'], CONF.member_role_id)
class CliBootStrapTestCaseWithEnvironment(CliBootStrapTestCase):
def config(self, config_files):
CONF(args=['bootstrap'], project='keystone',
default_config_files=config_files)
def setUp(self):
super(CliBootStrapTestCaseWithEnvironment, self).setUp()
self.password = uuid.uuid4().hex
self.username = uuid.uuid4().hex
self.project_name = uuid.uuid4().hex
self.role_name = uuid.uuid4().hex
self.service_name = uuid.uuid4().hex
self.public_url = uuid.uuid4().hex
self.internal_url = uuid.uuid4().hex
self.admin_url = uuid.uuid4().hex
self.region_id = uuid.uuid4().hex
self.default_domain = {
'id': CONF.identity.default_domain_id,
'name': 'Default',
}
self.useFixture(
fixtures.EnvironmentVariable('OS_BOOTSTRAP_PASSWORD',
newvalue=self.password))
self.useFixture(
fixtures.EnvironmentVariable('OS_BOOTSTRAP_USERNAME',
newvalue=self.username))
self.useFixture(
fixtures.EnvironmentVariable('OS_BOOTSTRAP_PROJECT_NAME',
newvalue=self.project_name))
self.useFixture(
fixtures.EnvironmentVariable('OS_BOOTSTRAP_ROLE_NAME',
newvalue=self.role_name))
self.useFixture(
fixtures.EnvironmentVariable('OS_BOOTSTRAP_SERVICE_NAME',
newvalue=self.service_name))
self.useFixture(
fixtures.EnvironmentVariable('OS_BOOTSTRAP_PUBLIC_URL',
newvalue=self.public_url))
self.useFixture(
fixtures.EnvironmentVariable('OS_BOOTSTRAP_INTERNAL_URL',
newvalue=self.internal_url))
self.useFixture(
fixtures.EnvironmentVariable('OS_BOOTSTRAP_ADMIN_URL',
newvalue=self.admin_url))
self.useFixture(
fixtures.EnvironmentVariable('OS_BOOTSTRAP_REGION_ID',
newvalue=self.region_id))
def test_assignment_created_with_user_exists(self):
# test assignment can be created if user already exists.
bootstrap = cli.BootStrap()
bootstrap.resource_manager.create_domain(self.default_domain['id'],
self.default_domain)
user_ref = unit.new_user_ref(self.default_domain['id'],
name=self.username,
password=self.password)
bootstrap.identity_manager.create_user(user_ref)
self._do_test_bootstrap(bootstrap)
def test_assignment_created_with_project_exists(self):
# test assignment can be created if project already exists.
bootstrap = cli.BootStrap()
bootstrap.resource_manager.create_domain(self.default_domain['id'],
self.default_domain)
project_ref = unit.new_project_ref(self.default_domain['id'],
name=self.project_name)
bootstrap.resource_manager.create_project(project_ref['id'],
project_ref)
self._do_test_bootstrap(bootstrap)
def test_assignment_created_with_role_exists(self):
# test assignment can be created if role already exists.
bootstrap = cli.BootStrap()
bootstrap.resource_manager.create_domain(self.default_domain['id'],
self.default_domain)
role = unit.new_role_ref(name=self.role_name)
bootstrap.role_manager.create_role(role['id'], role)
self._do_test_bootstrap(bootstrap)
def test_assignment_created_with_region_exists(self):
# test assignment can be created if region already exists.
bootstrap = cli.BootStrap()
bootstrap.resource_manager.create_domain(self.default_domain['id'],
self.default_domain)
region = unit.new_region_ref(id=self.region_id)
bootstrap.catalog_manager.create_region(region)
self._do_test_bootstrap(bootstrap)
def test_endpoints_created_with_service_exists(self):
# test assignment can be created if service already exists.
bootstrap = cli.BootStrap()
bootstrap.resource_manager.create_domain(self.default_domain['id'],
self.default_domain)
service = unit.new_service_ref(name=self.service_name)
bootstrap.catalog_manager.create_service(service['id'], service)
self._do_test_bootstrap(bootstrap)
def test_endpoints_created_with_endpoint_exists(self):
# test assignment can be created if endpoint already exists.
bootstrap = cli.BootStrap()
bootstrap.resource_manager.create_domain(self.default_domain['id'],
self.default_domain)
service = unit.new_service_ref(name=self.service_name)
bootstrap.catalog_manager.create_service(service['id'], service)
region = unit.new_region_ref(id=self.region_id)
bootstrap.catalog_manager.create_region(region)
endpoint = unit.new_endpoint_ref(interface='public',
service_id=service['id'],
url=self.public_url,
region_id=self.region_id)
bootstrap.catalog_manager.create_endpoint(endpoint['id'], endpoint)
self._do_test_bootstrap(bootstrap)
class CliDomainConfigAllTestCase(unit.SQLDriverOverrides, unit.TestCase):
def setUp(self):
self.useFixture(database.Database())
super(CliDomainConfigAllTestCase, self).setUp()
self.load_backends()
self.config_fixture.config(
group='identity',
domain_config_dir=unit.TESTCONF + '/domain_configs_multi_ldap')
self.domain_count = 3
self.setup_initial_domains()
self.logging = self.useFixture(
fixtures.FakeLogger(level=logging.INFO))
def config_files(self):
self.config_fixture.register_cli_opt(cli.command_opt)
config_files = super(CliDomainConfigAllTestCase, self).config_files()
config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
return config_files
def cleanup_domains(self):
for domain in self.domains:
if domain == 'domain_default':
# Not allowed to delete the default domain, but should at least
# delete any domain-specific config for it.
self.domain_config_api.delete_config(
CONF.identity.default_domain_id)
continue
this_domain = self.domains[domain]
this_domain['enabled'] = False
self.resource_api.update_domain(this_domain['id'], this_domain)
self.resource_api.delete_domain(this_domain['id'])
self.domains = {}
def config(self, config_files):
CONF(args=['domain_config_upload', '--all'], project='keystone',
default_config_files=config_files)
def setup_initial_domains(self):
def create_domain(domain):
return self.resource_api.create_domain(domain['id'], domain)
self.domains = {}
self.addCleanup(self.cleanup_domains)
for x in range(1, self.domain_count):
domain = 'domain%s' % x
self.domains[domain] = create_domain(
{'id': uuid.uuid4().hex, 'name': domain})
self.default_domain = unit.new_domain_ref(
description=u'The default domain',
id=CONF.identity.default_domain_id,
name=u'Default')
self.domains['domain_default'] = create_domain(self.default_domain)
def test_config_upload(self):
# The values below are the same as in the domain_configs_multi_ldap
# directory of test config_files.
default_config = {
'ldap': {'url': 'fake://memory',
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=example,cn=com'},
'identity': {'driver': 'ldap'}
}
domain1_config = {
'ldap': {'url': 'fake://memory1',
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=example,cn=com'},
'identity': {'driver': 'ldap',
'list_limit': '101'}
}
domain2_config = {
'ldap': {'url': 'fake://memory',
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=myroot,cn=com',
'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org',
'user_tree_dn': 'ou=Users,dc=myroot,dc=org'},
'identity': {'driver': 'ldap'}
}
# Clear backend dependencies, since cli loads these manually
dependency.reset()
cli.DomainConfigUpload.main()
res = self.domain_config_api.get_config_with_sensitive_info(
CONF.identity.default_domain_id)
self.assertEqual(default_config, res)
res = self.domain_config_api.get_config_with_sensitive_info(
self.domains['domain1']['id'])
self.assertEqual(domain1_config, res)
res = self.domain_config_api.get_config_with_sensitive_info(
self.domains['domain2']['id'])
self.assertEqual(domain2_config, res)
class CliDomainConfigSingleDomainTestCase(CliDomainConfigAllTestCase):
def config(self, config_files):
CONF(args=['domain_config_upload', '--domain-name', 'Default'],
project='keystone', default_config_files=config_files)
def test_config_upload(self):
# The values below are the same as in the domain_configs_multi_ldap
# directory of test config_files.
default_config = {
'ldap': {'url': 'fake://memory',
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=example,cn=com'},
'identity': {'driver': 'ldap'}
}
# Clear backend dependencies, since cli loads these manually
dependency.reset()
cli.DomainConfigUpload.main()
res = self.domain_config_api.get_config_with_sensitive_info(
CONF.identity.default_domain_id)
self.assertEqual(default_config, res)
res = self.domain_config_api.get_config_with_sensitive_info(
self.domains['domain1']['id'])
self.assertEqual({}, res)
res = self.domain_config_api.get_config_with_sensitive_info(
self.domains['domain2']['id'])
self.assertEqual({}, res)
def test_no_overwrite_config(self):
# Create a config for the default domain
default_config = {
'ldap': {'url': uuid.uuid4().hex},
'identity': {'driver': 'ldap'}
}
self.domain_config_api.create_config(
CONF.identity.default_domain_id, default_config)
# Now try and upload the settings in the configuration file for the
# default domain
dependency.reset()
with mock.patch('six.moves.builtins.print') as mock_print:
self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main)
file_name = ('keystone.%s.conf' % self.default_domain['name'])
error_msg = _(
'Domain: %(domain)s already has a configuration defined - '
'ignoring file: %(file)s.') % {
'domain': self.default_domain['name'],
'file': os.path.join(CONF.identity.domain_config_dir,
file_name)}
mock_print.assert_has_calls([mock.call(error_msg)])
res = self.domain_config_api.get_config(
CONF.identity.default_domain_id)
# The initial config should not have been overwritten
self.assertEqual(default_config, res)
class CliDomainConfigNoOptionsTestCase(CliDomainConfigAllTestCase):
def config(self, config_files):
CONF(args=['domain_config_upload'],
project='keystone', default_config_files=config_files)
def test_config_upload(self):
dependency.reset()
with mock.patch('six.moves.builtins.print') as mock_print:
self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main)
mock_print.assert_has_calls(
[mock.call(
_('At least one option must be provided, use either '
'--all or --domain-name'))])
class CliDomainConfigTooManyOptionsTestCase(CliDomainConfigAllTestCase):
def config(self, config_files):
CONF(args=['domain_config_upload', '--all', '--domain-name',
'Default'],
project='keystone', default_config_files=config_files)
def test_config_upload(self):
dependency.reset()
with mock.patch('six.moves.builtins.print') as mock_print:
self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main)
mock_print.assert_has_calls(
[mock.call(_('The --all option cannot be used with '
'the --domain-name option'))])
class CliDomainConfigInvalidDomainTestCase(CliDomainConfigAllTestCase):
def config(self, config_files):
self.invalid_domain_name = uuid.uuid4().hex
CONF(args=['domain_config_upload', '--domain-name',
self.invalid_domain_name],
project='keystone', default_config_files=config_files)
def test_config_upload(self):
dependency.reset()
with mock.patch('six.moves.builtins.print') as mock_print:
self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main)
file_name = 'keystone.%s.conf' % self.invalid_domain_name
error_msg = (_(
'Invalid domain name: %(domain)s found in config file name: '
'%(file)s - ignoring this file.') % {
'domain': self.invalid_domain_name,
'file': os.path.join(CONF.identity.domain_config_dir,
file_name)})
mock_print.assert_has_calls([mock.call(error_msg)])
class TestDomainConfigFinder(unit.BaseTestCase):
def setUp(self):
super(TestDomainConfigFinder, self).setUp()
self.logging = self.useFixture(fixtures.LoggerFixture())
@mock.patch('os.walk')
def test_finder_ignores_files(self, mock_walk):
mock_walk.return_value = [
['.', [], ['file.txt', 'keystone.conf', 'keystone.domain0.conf']],
]
domain_configs = list(cli._domain_config_finder('.'))
expected_domain_configs = [('./keystone.domain0.conf', 'domain0')]
self.assertThat(domain_configs,
matchers.Equals(expected_domain_configs))
expected_msg_template = ('Ignoring file (%s) while scanning '
'domain config directory')
self.assertThat(
self.logging.output,
matchers.Contains(expected_msg_template % 'file.txt'))
self.assertThat(
self.logging.output,
matchers.Contains(expected_msg_template % 'keystone.conf'))
class CliDBSyncTestCase(unit.BaseTestCase):
class FakeConfCommand(object):
def __init__(self, parent):
self.extension = False
self.check = parent.command_check
self.expand = parent.command_expand
self.migrate = parent.command_migrate
self.contract = parent.command_contract
self.version = None
def setUp(self):
super(CliDBSyncTestCase, self).setUp()
self.config_fixture = self.useFixture(oslo_config.fixture.Config(CONF))
self.config_fixture.register_cli_opt(cli.command_opt)
upgrades.offline_sync_database_to_version = mock.Mock()
upgrades.expand_schema = mock.Mock()
upgrades.migrate_data = mock.Mock()
upgrades.contract_schema = mock.Mock()
self.command_check = False
self.command_expand = False
self.command_migrate = False
self.command_contract = False
def _assert_correct_call(self, mocked_function):
for func in [upgrades.offline_sync_database_to_version,
upgrades.expand_schema,
upgrades.migrate_data,
upgrades.contract_schema]:
if func == mocked_function:
self.assertTrue(func.called)
else:
self.assertFalse(func.called)
def test_db_sync(self):
self.useFixture(fixtures.MockPatchObject(
CONF, 'command', self.FakeConfCommand(self)))
cli.DbSync.main()
self._assert_correct_call(
upgrades.offline_sync_database_to_version)
def test_db_sync_expand(self):
self.command_expand = True
self.useFixture(fixtures.MockPatchObject(
CONF, 'command', self.FakeConfCommand(self)))
cli.DbSync.main()
self._assert_correct_call(upgrades.expand_schema)
def test_db_sync_migrate(self):
self.command_migrate = True
self.useFixture(fixtures.MockPatchObject(
CONF, 'command', self.FakeConfCommand(self)))
cli.DbSync.main()
self._assert_correct_call(upgrades.migrate_data)
def test_db_sync_contract(self):
self.command_contract = True
self.useFixture(fixtures.MockPatchObject(
CONF, 'command', self.FakeConfCommand(self)))
cli.DbSync.main()
self._assert_correct_call(upgrades.contract_schema)
@mock.patch('keystone.cmd.cli.upgrades.get_db_version')
def test_db_sync_check_when_database_is_empty(self, mocked_get_db_version):
e = migration.exception.DbMigrationError("Invalid version")
mocked_get_db_version.side_effect = e
checker = cli.DbSync()
log_info = self.useFixture(fixtures.FakeLogger(level=log.INFO))
status = checker.check_db_sync_status()
self.assertIn("not currently under version control", log_info.output)
self.assertEqual(status, 2)
class TestMappingPopulate(unit.SQLDriverOverrides, unit.TestCase):
def setUp(self):
sqldb = self.useFixture(database.Database())
super(TestMappingPopulate, self).setUp()
self.ldapdb = self.useFixture(ldapdb.LDAPDatabase())
self.ldapdb.clear()
self.load_backends()
sqldb.recreate()
self.load_fixtures(default_fixtures)
def config_files(self):
self.config_fixture.register_cli_opt(cli.command_opt)
config_files = super(TestMappingPopulate, self).config_files()
config_files.append(unit.dirs.tests_conf('backend_ldap_sql.conf'))
return config_files
def config_overrides(self):
super(TestMappingPopulate, self).config_overrides()
self.config_fixture.config(group='identity', driver='ldap')
self.config_fixture.config(group='identity_mapping',
backward_compatible_ids=False)
def config(self, config_files):
CONF(args=['mapping_populate', '--domain-name', 'Default'],
project='keystone',
default_config_files=config_files)
def test_mapping_populate(self):
# mapping_populate should create id mappings. Test plan:
# 0. Purge mappings
# 1. Fetch user list directly via backend. It will not create any
# mappings because it bypasses identity manager
# 2. Verify that users have no public_id yet
# 3. Execute mapping_populate. It should create id mappings
# 4. For the same users verify that they have public_id now
purge_filter = {}
self.id_mapping_api.purge_mappings(purge_filter)
hints = None
users = self.identity_api.driver.list_users(hints)
for user in users:
local_entity = {
'domain_id': CONF.identity.default_domain_id,
'local_id': user['id'],
'entity_type': identity_mapping.EntityType.USER}
self.assertIsNone(self.id_mapping_api.get_public_id(local_entity))
dependency.reset() # backends are loaded again in the command handler
cli.MappingPopulate.main()
for user in users:
local_entity = {
'domain_id': CONF.identity.default_domain_id,
'local_id': user['id'],
'entity_type': identity_mapping.EntityType.USER}
self.assertIsNotNone(
self.id_mapping_api.get_public_id(local_entity))
def test_bad_domain_name(self):
CONF(args=['mapping_populate', '--domain-name', uuid.uuid4().hex],
project='keystone')
dependency.reset() # backends are loaded again in the command handler
# NOTE: assertEqual is used on purpose. assertFalse passes with None.
self.assertEqual(False, cli.MappingPopulate.main())
class CliDomainConfigUploadNothing(unit.BaseTestCase):
def setUp(self):
super(CliDomainConfigUploadNothing, self).setUp()
config_fixture = self.useFixture(oslo_config.fixture.Config(CONF))
config_fixture.register_cli_opt(cli.command_opt)
# NOTE(dstanek): since this is not testing any database
# functionality there is no need to go through the motions and
# setup a test database.
def fake_load_backends(self):
self.resource_manager = mock.Mock()
self.useFixture(fixtures.MockPatchObject(
cli.DomainConfigUploadFiles, 'load_backends', fake_load_backends))
tempdir = self.useFixture(fixtures.TempDir())
config_fixture.config(group='identity', domain_config_dir=tempdir.path)
self.logging = self.useFixture(
fixtures.FakeLogger(level=logging.DEBUG))
def test_uploading_all_from_an_empty_directory(self):
CONF(args=['domain_config_upload', '--all'], project='keystone',
default_config_files=[])
cli.DomainConfigUpload.main()
expected_msg = ('No domain configs uploaded from %r' %
CONF.identity.domain_config_dir)
self.assertThat(self.logging.output,
matchers.Contains(expected_msg))
class CachingDoctorTests(unit.TestCase):
def test_symptom_caching_disabled(self):
# Symptom Detected: Caching disabled
self.config_fixture.config(group='cache', enabled=False)
self.assertTrue(caching.symptom_caching_disabled())
# No Symptom Detected: Caching is enabled
self.config_fixture.config(group='cache', enabled=True)
self.assertFalse(caching.symptom_caching_disabled())
def test_caching_symptom_caching_enabled_without_a_backend(self):
# Success Case: Caching enabled and backend configured
self.config_fixture.config(group='cache', enabled=True)
self.config_fixture.config(group='cache', backend='dogpile.cache.null')
self.assertTrue(caching.symptom_caching_enabled_without_a_backend())
# Failure Case 1: Caching disabled and backend not configured
self.config_fixture.config(group='cache', enabled=False)
self.config_fixture.config(group='cache', backend='dogpile.cache.null')
self.assertFalse(caching.symptom_caching_enabled_without_a_backend())
# Failure Case 2: Caching disabled and backend configured
self.config_fixture.config(group='cache', enabled=False)
self.config_fixture.config(group='cache',
backend='dogpile.cache.memory')
self.assertFalse(caching.symptom_caching_enabled_without_a_backend())
# Failure Case 3: Caching enabled and backend configured
self.config_fixture.config(group='cache', enabled=True)
self.config_fixture.config(group='cache',
backend='dogpile.cache.memory')
self.assertFalse(caching.symptom_caching_enabled_without_a_backend())
class CredentialDoctorTests(unit.TestCase):
def test_credential_and_fernet_key_repositories_match(self):
# Symptom Detected: Key repository paths are not unique
directory = self.useFixture(fixtures.TempDir()).path
self.config_fixture.config(group='credential',
key_repository=directory)
self.config_fixture.config(group='fernet_tokens',
key_repository=directory)
self.assertTrue(credential.symptom_unique_key_repositories())
def test_credential_and_fernet_key_repositories_are_unique(self):
# No Symptom Detected: Key repository paths are unique
self.config_fixture.config(group='credential',
key_repository='/etc/keystone/cred-repo')
self.config_fixture.config(group='fernet_tokens',
key_repository='/etc/keystone/fernet-repo')
self.assertFalse(credential.symptom_unique_key_repositories())
@mock.patch('keystone.cmd.doctor.credential.utils')
def test_usability_of_cred_fernet_key_repo_raised(self, mock_utils):
# Symptom Detected: credential fernet key repository is world readable
self.config_fixture.config(group='credential', provider='fernet')
mock_utils.FernetUtils().validate_key_repository.return_value = False
self.assertTrue(
credential.symptom_usability_of_credential_fernet_key_repository())
@mock.patch('keystone.cmd.doctor.credential.utils')
def test_usability_of_cred_fernet_key_repo_not_raised(self, mock_utils):
# No Symptom Detected: Custom driver is used
self.config_fixture.config(group='credential', provider='my-driver')
mock_utils.FernetUtils().validate_key_repository.return_value = True
self.assertFalse(
credential.symptom_usability_of_credential_fernet_key_repository())
# No Symptom Detected: key repository is not world readable
self.config_fixture.config(group='credential', provider='fernet')
mock_utils.FernetUtils().validate_key_repository.return_value = True
self.assertFalse(
credential.symptom_usability_of_credential_fernet_key_repository())
@mock.patch('keystone.cmd.doctor.credential.utils')
def test_keys_in_credential_fernet_key_repository_raised(self, mock_utils):
# Symptom Detected: Key repo is empty
self.config_fixture.config(group='credential', provider='fernet')
mock_utils.FernetUtils().load_keys.return_value = False
self.assertTrue(
credential.symptom_keys_in_credential_fernet_key_repository())
@mock.patch('keystone.cmd.doctor.credential.utils')
def test_keys_in_credential_fernet_key_repository_not_raised(
self, mock_utils):
# No Symptom Detected: Custom driver is used
self.config_fixture.config(group='credential', provider='my-driver')
mock_utils.FernetUtils().load_keys.return_value = True
self.assertFalse(
credential.symptom_keys_in_credential_fernet_key_repository())
# No Symptom Detected: Key repo is not empty, fernet is current driver
self.config_fixture.config(group='credential', provider='fernet')
mock_utils.FernetUtils().load_keys.return_value = True
self.assertFalse(
credential.symptom_keys_in_credential_fernet_key_repository())
class DatabaseDoctorTests(unit.TestCase):
def test_symptom_is_raised_if_database_connection_is_SQLite(self):
# Symptom Detected: Database connection is sqlite
self.config_fixture.config(
group='database',
connection='sqlite:///mydb')
self.assertTrue(
doc_database.symptom_database_connection_is_not_SQLite())
# No Symptom Detected: Database connection is MySQL
self.config_fixture.config(
group='database',
connection='mysql+mysqlconnector://admin:secret@localhost/mydb')
self.assertFalse(
doc_database.symptom_database_connection_is_not_SQLite())
class DebugDoctorTests(unit.TestCase):
def test_symptom_debug_mode_is_enabled(self):
# Symptom Detected: Debug mode is enabled
self.config_fixture.config(debug=True)
self.assertTrue(debug.symptom_debug_mode_is_enabled())
# No Symptom Detected: Debug mode is disabled
self.config_fixture.config(debug=False)
self.assertFalse(debug.symptom_debug_mode_is_enabled())
class FederationDoctorTests(unit.TestCase):
def test_symptom_comma_in_SAML_public_certificate_path(self):
# Symptom Detected: There is a comma in path to public cert file
self.config_fixture.config(group='saml', certfile='file,cert.pem')
self.assertTrue(
federation.symptom_comma_in_SAML_public_certificate_path())
# No Symptom Detected: There is no comma in the path
self.config_fixture.config(group='saml', certfile='signing_cert.pem')
self.assertFalse(
federation.symptom_comma_in_SAML_public_certificate_path())
def test_symptom_comma_in_SAML_private_key_file_path(self):
# Symptom Detected: There is a comma in path to private key file
self.config_fixture.config(group='saml', keyfile='file,key.pem')
self.assertTrue(
federation.symptom_comma_in_SAML_private_key_file_path())
# No Symptom Detected: There is no comma in the path
self.config_fixture.config(group='saml', keyfile='signing_key.pem')
self.assertFalse(
federation.symptom_comma_in_SAML_private_key_file_path())
class LdapDoctorTests(unit.TestCase):
def test_user_enabled_emulation_dn_ignored_raised(self):
# Symptom when user_enabled_emulation_dn is being ignored because the
# user did not enable the user_enabled_emulation
self.config_fixture.config(group='ldap', user_enabled_emulation=False)
self.config_fixture.config(
group='ldap',
user_enabled_emulation_dn='cn=enabled_users,dc=example,dc=com')
self.assertTrue(
ldap.symptom_LDAP_user_enabled_emulation_dn_ignored())
def test_user_enabled_emulation_dn_ignored_not_raised(self):
# No symptom when configuration set properly
self.config_fixture.config(group='ldap', user_enabled_emulation=True)
self.config_fixture.config(
group='ldap',
user_enabled_emulation_dn='cn=enabled_users,dc=example,dc=com')
self.assertFalse(
ldap.symptom_LDAP_user_enabled_emulation_dn_ignored())
# No symptom when both configurations disabled
self.config_fixture.config(group='ldap', user_enabled_emulation=False)
self.config_fixture.config(group='ldap',
user_enabled_emulation_dn=None)
self.assertFalse(
ldap.symptom_LDAP_user_enabled_emulation_dn_ignored())
def test_user_enabled_emulation_use_group_config_ignored_raised(self):
# Symptom when user enabled emulation isn't enabled but group_config is
# enabled
self.config_fixture.config(group='ldap', user_enabled_emulation=False)
self.config_fixture.config(
group='ldap',
user_enabled_emulation_use_group_config=True)
self.assertTrue(
ldap.
symptom_LDAP_user_enabled_emulation_use_group_config_ignored())
def test_user_enabled_emulation_use_group_config_ignored_not_raised(self):
# No symptom when configuration deactivated
self.config_fixture.config(group='ldap', user_enabled_emulation=False)
self.config_fixture.config(
group='ldap',
user_enabled_emulation_use_group_config=False)
self.assertFalse(
ldap.
symptom_LDAP_user_enabled_emulation_use_group_config_ignored())
# No symptom when configurations set properly
self.config_fixture.config(group='ldap', user_enabled_emulation=True)
self.config_fixture.config(
group='ldap',
user_enabled_emulation_use_group_config=True)
self.assertFalse(
ldap.
symptom_LDAP_user_enabled_emulation_use_group_config_ignored())
def test_group_members_are_ids_disabled_raised(self):
# Symptom when objectclass is set to posixGroup but members_are_ids are
# not enabled
self.config_fixture.config(group='ldap',
group_objectclass='posixGroup')
self.config_fixture.config(group='ldap',
group_members_are_ids=False)
self.assertTrue(ldap.symptom_LDAP_group_members_are_ids_disabled())
def test_group_members_are_ids_disabled_not_raised(self):
# No symptom when the configurations are set properly
self.config_fixture.config(group='ldap',
group_objectclass='posixGroup')
self.config_fixture.config(group='ldap',
group_members_are_ids=True)
self.assertFalse(ldap.symptom_LDAP_group_members_are_ids_disabled())
# No symptom when configuration deactivated
self.config_fixture.config(group='ldap',
group_objectclass='groupOfNames')
self.config_fixture.config(group='ldap',
group_members_are_ids=False)
self.assertFalse(ldap.symptom_LDAP_group_members_are_ids_disabled())
@mock.patch('os.listdir')
@mock.patch('os.path.isdir')
def test_file_based_domain_specific_configs_raised(self, mocked_isdir,
mocked_listdir):
self.config_fixture.config(
group='identity',
domain_specific_drivers_enabled=True)
self.config_fixture.config(
group='identity',
domain_configurations_from_database=False)
# Symptom if there is no existing directory
mocked_isdir.return_value = False
self.assertTrue(ldap.symptom_LDAP_file_based_domain_specific_configs())
# Symptom if there is an invalid filename inside the domain directory
mocked_isdir.return_value = True
mocked_listdir.return_value = ['openstack.domains.conf']
self.assertTrue(ldap.symptom_LDAP_file_based_domain_specific_configs())
@mock.patch('os.listdir')
@mock.patch('os.path.isdir')
def test_file_based_domain_specific_configs_not_raised(self, mocked_isdir,
mocked_listdir):
# No symptom if both configurations deactivated
self.config_fixture.config(
group='identity',
domain_specific_drivers_enabled=False)
self.config_fixture.config(
group='identity',
domain_configurations_from_database=False)
self.assertFalse(
ldap.symptom_LDAP_file_based_domain_specific_configs())
# No symptom if directory exists with no invalid filenames
self.config_fixture.config(
group='identity',
domain_specific_drivers_enabled=True)
self.config_fixture.config(
group='identity',
domain_configurations_from_database=False)
mocked_isdir.return_value = True
mocked_listdir.return_value = ['keystone.domains.conf']
self.assertFalse(
ldap.symptom_LDAP_file_based_domain_specific_configs())
@mock.patch('os.listdir')
@mock.patch('os.path.isdir')
@mock.patch('keystone.cmd.doctor.ldap.configparser.ConfigParser')
def test_file_based_domain_specific_configs_formatted_correctly_raised(
self, mocked_parser, mocked_isdir, mocked_listdir):
symptom = ('symptom_LDAP_file_based_domain_specific_configs'
'_formatted_correctly')
# Symptom Detected: Ldap domain specific configuration files are not
# formatted correctly
self.config_fixture.config(
group='identity',
domain_specific_drivers_enabled=True)
self.config_fixture.config(
group='identity',
domain_configurations_from_database=False)
mocked_isdir.return_value = True
mocked_listdir.return_value = ['keystone.domains.conf']
mock_instance = mock.MagicMock()
mock_instance.read.side_effect = configparser.Error('No Section')
mocked_parser.return_value = mock_instance
self.assertTrue(getattr(ldap, symptom)())
@mock.patch('os.listdir')
@mock.patch('os.path.isdir')
def test_file_based_domain_specific_configs_formatted_correctly_not_raised(
self, mocked_isdir, mocked_listdir):
symptom = ('symptom_LDAP_file_based_domain_specific_configs'
'_formatted_correctly')
# No Symptom Detected: Domain_specific drivers is not enabled
self.config_fixture.config(
group='identity',<|fim▁hole|> self.config_fixture.config(
group='identity',
domain_specific_drivers_enabled=True)
self.assertFalse(getattr(ldap, symptom)())
self.config_fixture.config(
group='identity',
domain_configurations_from_database=True)
self.assertFalse(getattr(ldap, symptom)())
# No Symptom Detected: The directory in domain_config_dir doesn't exist
mocked_isdir.return_value = False
self.assertFalse(getattr(ldap, symptom)())
# No Symptom Detected: domain specific drivers are enabled, domain
# configurations from database are disabled, directory exists, and no
# exceptions found.
self.config_fixture.config(
group='identity',
domain_configurations_from_database=False)
mocked_isdir.return_value = True
# An empty directory should not raise this symptom
self.assertFalse(getattr(ldap, symptom)())
# Test again with a file inside the directory
mocked_listdir.return_value = ['keystone.domains.conf']
self.assertFalse(getattr(ldap, symptom)())
class SecurityComplianceDoctorTests(unit.TestCase):
def test_minimum_password_age_greater_than_password_expires_days(self):
# Symptom Detected: Minimum password age is greater than the password
# expires days. Both values are positive integers greater than zero.
self.config_fixture.config(group='security_compliance',
minimum_password_age=2)
self.config_fixture.config(group='security_compliance',
password_expires_days=1)
self.assertTrue(
security_compliance.
symptom_minimum_password_age_greater_than_expires_days())
def test_minimum_password_age_equal_to_password_expires_days(self):
# Symptom Detected: Minimum password age is equal to the password
# expires days. Both values are positive integers greater than zero.
self.config_fixture.config(group='security_compliance',
minimum_password_age=1)
self.config_fixture.config(group='security_compliance',
password_expires_days=1)
self.assertTrue(
security_compliance.
symptom_minimum_password_age_greater_than_expires_days())
def test_minimum_password_age_less_than_password_expires_days(self):
# No Symptom Detected: Minimum password age is less than password
# expires days. Both values are positive integers greater than zero.
self.config_fixture.config(group='security_compliance',
minimum_password_age=1)
self.config_fixture.config(group='security_compliance',
password_expires_days=2)
self.assertFalse(
security_compliance.
symptom_minimum_password_age_greater_than_expires_days())
def test_minimum_password_age_and_password_expires_days_deactivated(self):
# No Symptom Detected: when minimum_password_age's default value is 0
# and password_expires_days' default value is None
self.assertFalse(
security_compliance.
symptom_minimum_password_age_greater_than_expires_days())
def test_invalid_password_regular_expression(self):
# Symptom Detected: Regular expression is invalid
self.config_fixture.config(
group='security_compliance',
password_regex='^^(??=.*\d)$')
self.assertTrue(
security_compliance.symptom_invalid_password_regular_expression())
def test_valid_password_regular_expression(self):
# No Symptom Detected: Regular expression is valid
self.config_fixture.config(
group='security_compliance',
password_regex='^(?=.*\d)(?=.*[a-zA-Z]).{7,}$')
self.assertFalse(
security_compliance.symptom_invalid_password_regular_expression())
def test_password_regular_expression_deactivated(self):
# No Symptom Detected: Regular expression deactivated to None
self.config_fixture.config(
group='security_compliance',
password_regex=None)
self.assertFalse(
security_compliance.symptom_invalid_password_regular_expression())
def test_password_regular_expression_description_not_set(self):
# Symptom Detected: Regular expression is set but description is not
self.config_fixture.config(
group='security_compliance',
password_regex='^(?=.*\d)(?=.*[a-zA-Z]).{7,}$')
self.config_fixture.config(
group='security_compliance',
password_regex_description=None)
self.assertTrue(
security_compliance.
symptom_password_regular_expression_description_not_set())
def test_password_regular_expression_description_set(self):
# No Symptom Detected: Regular expression and description are set
desc = '1 letter, 1 digit, and a minimum length of 7 is required'
self.config_fixture.config(
group='security_compliance',
password_regex='^(?=.*\d)(?=.*[a-zA-Z]).{7,}$')
self.config_fixture.config(
group='security_compliance',
password_regex_description=desc)
self.assertFalse(
security_compliance.
symptom_password_regular_expression_description_not_set())
def test_password_regular_expression_description_deactivated(self):
# No Symptom Detected: Regular expression and description are
# deactivated to None
self.config_fixture.config(
group='security_compliance', password_regex=None)
self.config_fixture.config(
group='security_compliance', password_regex_description=None)
self.assertFalse(
security_compliance.
symptom_password_regular_expression_description_not_set())
class TokensDoctorTests(unit.TestCase):
def test_unreasonable_max_token_size_raised(self):
# Symptom Detected: the max_token_size for uuid is not 32
self.config_fixture.config(group='token', provider='uuid')
self.config_fixture.config(max_token_size=33)
self.assertTrue(tokens.symptom_unreasonable_max_token_size())
# Symptom Detected: the max_token_size for fernet is greater than 255
self.config_fixture.config(group='token', provider='fernet')
self.config_fixture.config(max_token_size=256)
self.assertTrue(tokens.symptom_unreasonable_max_token_size())
def test_unreasonable_max_token_size_not_raised(self):
# No Symptom Detected: the max_token_size for uuid is 32
self.config_fixture.config(group='token', provider='uuid')
self.config_fixture.config(max_token_size=32)
self.assertFalse(tokens.symptom_unreasonable_max_token_size())
# No Symptom Detected: the max_token_size for fernet is 255 or less
self.config_fixture.config(group='token', provider='fernet')
self.config_fixture.config(max_token_size=255)
self.assertFalse(tokens.symptom_unreasonable_max_token_size())
class TokenFernetDoctorTests(unit.TestCase):
@mock.patch('keystone.cmd.doctor.tokens_fernet.utils')
def test_usability_of_Fernet_key_repository_raised(self, mock_utils):
# Symptom Detected: Fernet key repo is world readable
self.config_fixture.config(group='token', provider='fernet')
mock_utils.FernetUtils().validate_key_repository.return_value = False
self.assertTrue(
tokens_fernet.symptom_usability_of_Fernet_key_repository())
@mock.patch('keystone.cmd.doctor.tokens_fernet.utils')
def test_usability_of_Fernet_key_repository_not_raised(self, mock_utils):
# No Symptom Detected: UUID is used instead of fernet
self.config_fixture.config(group='token', provider='uuid')
mock_utils.FernetUtils().validate_key_repository.return_value = False
self.assertFalse(
tokens_fernet.symptom_usability_of_Fernet_key_repository())
# No Symptom Detected: configs set properly, key repo is not world
# readable but is user readable
self.config_fixture.config(group='token', provider='fernet')
mock_utils.FernetUtils().validate_key_repository.return_value = True
self.assertFalse(
tokens_fernet.symptom_usability_of_Fernet_key_repository())
@mock.patch('keystone.cmd.doctor.tokens_fernet.utils')
def test_keys_in_Fernet_key_repository_raised(self, mock_utils):
# Symptom Detected: Fernet key repository is empty
self.config_fixture.config(group='token', provider='fernet')
mock_utils.FernetUtils().load_keys.return_value = False
self.assertTrue(
tokens_fernet.symptom_keys_in_Fernet_key_repository())
@mock.patch('keystone.cmd.doctor.tokens_fernet.utils')
def test_keys_in_Fernet_key_repository_not_raised(self, mock_utils):
# No Symptom Detected: UUID is used instead of fernet
self.config_fixture.config(group='token', provider='uuid')
mock_utils.FernetUtils().load_keys.return_value = True
self.assertFalse(
tokens_fernet.symptom_usability_of_Fernet_key_repository())
# No Symptom Detected: configs set properly, key repo has been
# populated with keys
self.config_fixture.config(group='token', provider='fernet')
mock_utils.FernetUtils().load_keys.return_value = True
self.assertFalse(
tokens_fernet.symptom_usability_of_Fernet_key_repository())<|fim▁end|> | domain_specific_drivers_enabled=False)
self.assertFalse(getattr(ldap, symptom)())
# No Symptom Detected: Domain configuration from database is enabled |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.