prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>475.cc<|end_file_name|><|fim▁begin|>class Solution {
public:
int findRadius(vector<int>& houses, vector<int>& heaters) {
sort(houses.begin(), houses.end());
sort(heaters.begin(), heaters.end());
auto radius = 0;
auto idx = 0;
for (auto house : houses) {
auto current = abs(house - heaters[idx]);
if (current <= radius) continue;
for (auto n = idx + 1; n < heaters.size(); ++n) {
if (abs(house - heaters[n]) <= current) {
current = abs(house - heaters[n]);
idx = n;
} else {
break;<|fim▁hole|> radius = max(radius, current);
}
return radius;
}
};<|fim▁end|> | }
} |
<|file_name|>test_views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import itertools
import os
import re
from importlib import import_module
from django.apps import apps
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.contrib.auth import REDIRECT_FIELD_NAME, SESSION_KEY
from django.contrib.auth.forms import (
AuthenticationForm, PasswordChangeForm, SetPasswordForm,
)
from django.contrib.auth.models import User
from django.contrib.auth.tests.custom_user import CustomUser
from django.contrib.auth.views import login as login_view, redirect_to_login
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sites.requests import RequestSite
from django.core import mail
from django.core.urlresolvers import NoReverseMatch, reverse, reverse_lazy
from django.db import connection
from django.http import HttpRequest, QueryDict
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.test import (
TestCase, ignore_warnings, modify_settings, override_settings,
)
from django.test.utils import patch_logger
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text
from django.utils.http import urlquote
from django.utils.six.moves.urllib.parse import ParseResult, urlparse
from django.utils.translation import LANGUAGE_SESSION_KEY
from .models import UUIDUser
from .settings import AUTH_TEMPLATES
@override_settings(
LANGUAGES=[
('en', 'English'),
],
LANGUAGE_CODE='en',
TEMPLATES=AUTH_TEMPLATES,
USE_TZ=False,
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='auth_tests.urls',
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='testclient',
first_name='Test', last_name='Client', email='[email protected]', is_staff=False, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u2 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='inactive',
first_name='Inactive', last_name='User', email='[email protected]', is_staff=False, is_active=False,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u3 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='staff',
first_name='Staff', last_name='Member', email='[email protected]', is_staff=True, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u4 = User.objects.create(
password='', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False,
username='empty_password', first_name='Empty', last_name='Password', email='[email protected]',
is_staff=False, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u5 = User.objects.create(
password='$', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False,
username='unmanageable_password', first_name='Unmanageable', last_name='Password',
email='[email protected]', is_staff=False, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u6 = User.objects.create(
password='foo$bar', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False,
username='unknown_password', first_name='Unknown', last_name='Password',
email='[email protected]', is_staff=False, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
def login(self, username='testclient', password='password'):
response = self.client.post('/login/', {
'username': username,
'password': password,
})
self.assertIn(SESSION_KEY, self.client.session)
return response
def logout(self):
response = self.client.get('/admin/logout/')
self.assertEqual(response.status_code, 200)
self.assertNotIn(SESSION_KEY, self.client.session)
def assertFormError(self, response, error):
"""Assert that error is found in response.context['form'] errors"""
form_errors = list(itertools.chain(*response.context['form'].errors.values()))
self.assertIn(force_text(error), form_errors)
def assertURLEqual(self, url, expected, parse_qs=False):
"""
Given two URLs, make sure all their components (the ones given by
urlparse) are equal, only comparing components that are present in both
URLs.
If `parse_qs` is True, then the querystrings are parsed with QueryDict.
This is useful if you don't want the order of parameters to matter.
Otherwise, the query strings are compared as-is.
"""
fields = ParseResult._fields
for attr, x, y in zip(fields, urlparse(url), urlparse(expected)):
if parse_qs and attr == 'query':
x, y = QueryDict(x), QueryDict(y)
if x and y and x != y:
self.fail("%r != %r (%s doesn't match)" % (url, expected, attr))
@override_settings(ROOT_URLCONF='django.contrib.auth.urls')
class AuthViewNamedURLTests(AuthViewsTestCase):
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb64': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"""If the provided email is not registered, don't raise any error but
also don't send any email."""
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# optional multipart text/html email has been added. Make sure original,
# default functionality is 100% the same
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_extra_email_context(self):
"""
extra_email_context should be available in the email template context.
"""
response = self.client.post(
'/password_reset_extra_email_context/',
{'email': '[email protected]'},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Email email context: "Hello!"', mail.outbox[0].body)
def test_html_mail_template(self):
"""
A multipart email with text/plain and text/html is sent
if the html_email_template parameter is passed to the view
"""
response = self.client.post('/password_reset/html_email_template/', {'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
self.assertNotIn('<html>', message.get_payload(0).get_payload())
self.assertIn('<html>', message.get_payload(1).get_payload())
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("[email protected]", mail.outbox[0].from_email)
@ignore_warnings(category=RemovedInDjango110Warning)
@override_settings(ALLOWED_HOSTS=['adminsite.com'])
def test_admin_reset(self):
"If the reset view is marked as being for admin, the HTTP_HOST header is used for a domain override."
response = self.client.post('/admin_password_reset/',
{'email': '[email protected]'},
HTTP_HOST='adminsite.com'
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://adminsite.com", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
response = self.client.post(
'/password_reset/',
{'email': '[email protected]'},
HTTP_HOST='www.example:[email protected]'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
response = self.client.post(
'/admin_password_reset/',
{'email': '[email protected]'},
HTTP_HOST='www.example:[email protected]'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
def _test_confirm_start(self):
# Start by creating the email
self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existent user, not a 404
response = self.client.get('/reset/123456/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# Ensure that we get a 200 response for a base36 user id that overflows int
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
# Check the password has not been changed
u = User.objects.get(email='[email protected]')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# Check the password has been changed
u = User.objects.get(email='[email protected]')
self.assertTrue(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'x'})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_reset_redirect_default(self):
response = self.client.post('/password_reset/',
{'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/done/')
def test_reset_custom_redirect(self):
response = self.client.post('/password_reset/custom_redirect/',
{'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_reset_custom_redirect_named(self):
response = self.client.post('/password_reset/custom_redirect/named/',
{'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
def test_confirm_redirect_default(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/reset/done/')
def test_confirm_redirect_custom(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/')
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_confirm_redirect_custom_named(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/named/')
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
def test_confirm_display_user_from_form(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# #16919 -- The ``password_reset_confirm`` view should pass the user
# object to the ``SetPasswordForm``, even on GET requests.
# For this test, we render ``{{ form.user }}`` in the template
# ``registration/password_reset_confirm.html`` so that we can test this.
username = User.objects.get(email='[email protected]').username
self.assertContains(response, "Hello, %s." % username)
# However, the view should NOT pass any user object on a form if the
# password reset link was invalid.
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "Hello, .")
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
class CustomUserPasswordResetTest(AuthViewsTestCase):
user_email = '[email protected]'
@classmethod
def setUpTestData(cls):
cls.u1 = CustomUser.custom_objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), email='[email protected]', is_active=True,
is_admin=False, date_of_birth=datetime.date(1976, 11, 8)
)
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': self.user_email})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
# then submit a new password
response = self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': 'anewpassword',
})
self.assertRedirects(response, '/reset/done/')
@override_settings(AUTH_USER_MODEL='auth.UUIDUser')
class UUIDUserPasswordResetTest(CustomUserPasswordResetTest):
def _test_confirm_start(self):
# instead of fixture
UUIDUser.objects.create_user(
email=self.user_email,
username='foo',
password='foo',
)
return super(UUIDUserPasswordResetTest, self)._test_confirm_start()
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
})
def logout(self):
self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertFormError(response, PasswordChangeForm.error_messages['password_incorrect'])
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_password_change_succeeds(self):
self.login()
self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.fail_login()
self.login(password='password1')
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_change/done/')
@override_settings(LOGIN_URL='/login/')
def test_password_change_done_fails(self):
response = self.client.get('/password_change/done/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/?next=/password_change/done/')
<|fim▁hole|> def test_password_change_redirect_default(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_change/done/')
def test_password_change_redirect_custom(self):
self.login()
response = self.client.post('/password_change/custom/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_password_change_redirect_custom_named(self):
self.login()
response = self.client.post('/password_change/custom/named/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
@modify_settings(MIDDLEWARE_CLASSES={
'append': 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
})
class SessionAuthenticationTests(AuthViewsTestCase):
def test_user_password_change_updates_session(self):
"""
#21649 - Ensure contrib.auth.views.password_change updates the user's
session auth hash after a password change so the session isn't logged out.
"""
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
# if the hash isn't updated, retrieving the redirection page will fail.
self.assertRedirects(response, '/password_change/done/')
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('login'))
self.assertEqual(response.status_code, 200)
if apps.is_installed('django.contrib.sites'):
Site = apps.get_model('sites.Site')
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertIsInstance(response.context['form'], AuthenticationForm)
def test_security_check(self, password='password'):
login_url = reverse('login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'http:///example.com',
'https://example.com',
'ftp://exampel.com',
'///example.com',
'//example.com',
'javascript:alert("XSS")'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertNotIn(bad_url, response.url,
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
def test_login_form_contains_request(self):
# 15198
self.client.post('/custom_requestauth_login/', {
'username': 'testclient',
'password': 'password',
}, follow=True)
# the custom authentication form used by this login asserts
# that a request is passed to the form successfully.
def test_login_csrf_rotate(self, password='password'):
"""
Makes sure that a login rotates the currently-used CSRF token.
"""
# Do a GET to establish a CSRF token
# TestClient isn't used here as we're testing middleware, essentially.
req = HttpRequest()
CsrfViewMiddleware().process_view(req, login_view, (), {})
# get_token() triggers CSRF token inclusion in the response
get_token(req)
resp = login_view(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token1 = csrf_cookie.coded_value
# Prepare the POST request
req = HttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = token1
req.method = "POST"
req.POST = {'username': 'testclient', 'password': password, 'csrfmiddlewaretoken': token1}
# Use POST request to log in
SessionMiddleware().process_request(req)
CsrfViewMiddleware().process_view(req, login_view, (), {})
req.META["SERVER_NAME"] = "testserver" # Required to have redirect work in login view
req.META["SERVER_PORT"] = 80
resp = login_view(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token2 = csrf_cookie.coded_value
# Check the CSRF token switched
self.assertNotEqual(token1, token2)
def test_session_key_flushed_on_login(self):
"""
To avoid reusing another user's session, ensure a new, empty session is
created if the existing session corresponds to a different authenticated
user.
"""
self.login()
original_session_key = self.client.session.session_key
self.login(username='staff')
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_session_key_flushed_on_login_after_password_change(self):
"""
As above, but same user logging in after a password change.
"""
self.login()
original_session_key = self.client.session.session_key
# If no password change, session key should not be flushed.
self.login()
self.assertEqual(original_session_key, self.client.session.session_key)
user = User.objects.get(username='testclient')
user.set_password('foobar')
user.save()
self.login(password='foobar')
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_session_without_hash_session_key(self):
"""
Session without django.contrib.auth.HASH_SESSION_KEY should login
without an exception.
"""
user = User.objects.get(username='testclient')
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[SESSION_KEY] = user.id
session.save()
original_session_key = session.session_key
self.client.cookies[settings.SESSION_COOKIE_NAME] = original_session_key
self.login()
self.assertNotEqual(original_session_key, self.client.session.session_key)
class LoginURLSettings(AuthViewsTestCase):
"""Tests for settings.LOGIN_URL."""
def assertLoginURLEquals(self, url, parse_qs=False):
response = self.client.get('/login_required/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, url, parse_qs=parse_qs)
@override_settings(LOGIN_URL='/login/')
def test_standard_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='login')
def test_named_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='http://remote.example.com/login')
def test_remote_login_url(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'http://remote.example.com/login?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='https:///login/')
def test_https_login_url(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'https:///login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='/login/?pretty=1')
def test_login_url_with_querystring(self):
self.assertLoginURLEquals('/login/?pretty=1&next=/login_required/', parse_qs=True)
@override_settings(LOGIN_URL='http://remote.example.com/login/?next=/default/')
def test_remote_login_url_with_next_querystring(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'http://remote.example.com/login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_lazy_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
class LoginRedirectUrlTest(AuthViewsTestCase):
"""Tests for settings.LOGIN_REDIRECT_URL."""
def assertLoginRedirectURLEqual(self, url):
response = self.login()
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, url)
def test_default(self):
self.assertLoginRedirectURLEqual('/accounts/profile/')
@override_settings(LOGIN_REDIRECT_URL='/custom/')
def test_custom(self):
self.assertLoginRedirectURLEqual('/custom/')
@override_settings(LOGIN_REDIRECT_URL='password_reset')
def test_named(self):
self.assertLoginRedirectURLEqual('/password_reset/')
@override_settings(LOGIN_REDIRECT_URL='http://remote.example.com/welcome/')
def test_remote(self):
self.assertLoginRedirectURLEqual('http://remote.example.com/welcome/')
class RedirectToLoginTests(AuthViewsTestCase):
"""Tests for the redirect_to_login view"""
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_redirect_to_login_with_lazy(self):
login_redirect_response = redirect_to_login(next='/else/where/')
expected = '/login/?next=/else/where/'
self.assertEqual(expected, login_redirect_response.url)
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_redirect_to_login_with_lazy_and_unicode(self):
login_redirect_response = redirect_to_login(next='/else/where/झ/')
expected = '/login/?next=/else/where/%E0%A4%9D/'
self.assertEqual(expected, login_redirect_response.url)
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertIn('site', response.context)
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
response = self.client.get('/logout/next_page/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/')
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/')
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
self.confirm_logged_out()
def test_logout_with_named_redirect(self):
"Logout resolves names or URLs passed as next_page."
self.login()
response = self.client.get('/logout/next_page/named/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
self.confirm_logged_out()
def test_security_check(self, password='password'):
logout_url = reverse('logout')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'http:///example.com',
'https://example.com',
'ftp://exampel.com',
'///example.com',
'//example.com',
'javascript:alert("XSS")'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertNotIn(bad_url, response.url,
"%s should be blocked" % bad_url)
self.confirm_logged_out()
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
self.confirm_logged_out()
def test_logout_preserve_language(self):
"""Check that language stored in session is preserved after logout"""
# Create a new session with language
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[LANGUAGE_SESSION_KEY] = 'pl'
session.save()
self.client.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
self.client.get('/logout/')
self.assertEqual(self.client.session[LANGUAGE_SESSION_KEY], 'pl')
# Redirect in test_user_change_password will fail if session auth hash
# isn't updated after password change (#21649)
@modify_settings(MIDDLEWARE_CLASSES={
'append': 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
})
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='auth_tests.urls_admin',
)
class ChangelistTests(AuthViewsTestCase):
def setUp(self):
# Make me a superuser before logging in.
User.objects.filter(username='testclient').update(is_staff=True, is_superuser=True)
self.login()
self.admin = User.objects.get(pk=self.u1.pk)
def get_user_data(self, user):
return {
'username': user.username,
'password': user.password,
'email': user.email,
'is_active': user.is_active,
'is_staff': user.is_staff,
'is_superuser': user.is_superuser,
'last_login_0': user.last_login.strftime('%Y-%m-%d'),
'last_login_1': user.last_login.strftime('%H:%M:%S'),
'initial-last_login_0': user.last_login.strftime('%Y-%m-%d'),
'initial-last_login_1': user.last_login.strftime('%H:%M:%S'),
'date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'initial-date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'initial-date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'first_name': user.first_name,
'last_name': user.last_name,
}
# #20078 - users shouldn't be allowed to guess password hashes via
# repeated password__startswith queries.
def test_changelist_disallows_password_lookups(self):
# A lookup that tries to filter on password isn't OK
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as logger_calls:
response = self.client.get(reverse('auth_test_admin:auth_user_changelist') + '?password__startswith=sha1$')
self.assertEqual(response.status_code, 400)
self.assertEqual(len(logger_calls), 1)
def test_user_change_email(self):
data = self.get_user_data(self.admin)
data['email'] = 'new_' + data['email']
response = self.client.post(
reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)),
data
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
row = LogEntry.objects.latest('id')
self.assertEqual(row.change_message, 'Changed email.')
def test_user_not_change(self):
response = self.client.post(
reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)),
self.get_user_data(self.admin)
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
row = LogEntry.objects.latest('id')
self.assertEqual(row.change_message, 'No fields changed.')
def test_user_change_password(self):
user_change_url = reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,))
password_change_url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,))
response = self.client.get(user_change_url)
# Test the link inside password field help_text.
rel_link = re.search(
r'you can change the password using <a href="([^"]*)">this form</a>',
force_text(response.content)
).groups()[0]
self.assertEqual(
os.path.normpath(user_change_url + rel_link),
os.path.normpath(password_change_url)
)
response = self.client.post(
password_change_url,
{
'password1': 'password1',
'password2': 'password1',
}
)
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest('id')
self.assertEqual(row.change_message, 'Changed password.')
self.logout()
self.login(password='password1')
def test_user_change_different_user_password(self):
u = User.objects.get(email='[email protected]')
response = self.client.post(
reverse('auth_test_admin:auth_user_password_change', args=(u.pk,)),
{
'password1': 'password1',
'password2': 'password1',
}
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_change', args=(u.pk,)))
row = LogEntry.objects.latest('id')
self.assertEqual(row.user_id, self.admin.pk)
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.change_message, 'Changed password.')
def test_password_change_bad_url(self):
response = self.client.get(reverse('auth_test_admin:auth_user_password_change', args=('foobar',)))
self.assertEqual(response.status_code, 404)
@override_settings(
AUTH_USER_MODEL='auth.UUIDUser',
ROOT_URLCONF='auth_tests.urls_custom_user_admin',
)
class UUIDUserTests(TestCase):
def test_admin_password_change(self):
u = UUIDUser.objects.create_superuser(username='uuid', email='[email protected]', password='test')
self.assertTrue(self.client.login(username='uuid', password='test'))
user_change_url = reverse('custom_user_admin:auth_uuiduser_change', args=(u.pk,))
response = self.client.get(user_change_url)
self.assertEqual(response.status_code, 200)
password_change_url = reverse('custom_user_admin:auth_user_password_change', args=(u.pk,))
response = self.client.get(password_change_url)
self.assertEqual(response.status_code, 200)
# A LogEntry is created with pk=1 which breaks a FK constraint on MySQL
with connection.constraint_checks_disabled():
response = self.client.post(password_change_url, {
'password1': 'password1',
'password2': 'password1',
})
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest('id')
self.assertEqual(row.user_id, 1) # harcoded in CustomUserAdmin.log_change()
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.change_message, 'Changed password.')<|fim▁end|> | |
<|file_name|>CMAny.hpp<|end_file_name|><|fim▁begin|>/*
* The Apache Software License, Version 1.1
*
* Copyright (c) 2001 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions<|fim▁hole|> * 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Xerces" and "Apache Software Foundation" must
* not be used to endorse or promote products derived from this
* software without prior written permission. For written
* permission, please contact apache\@apache.org.
*
* 5. Products derived from this software may not be called "Apache",
* nor may "Apache" appear in their name, without prior written
* permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation, and was
* originally based on software copyright (c) 2001, International
* Business Machines, Inc., http://www.ibm.com . For more information
* on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
/*
* $Log: CMAny.hpp,v $
* Revision 1.2 2001/05/11 13:27:14 tng
* Copyright update.
*
* Revision 1.1 2001/02/27 14:48:46 tng
* Schema: Add CMAny and ContentLeafNameTypeVector, by Pei Yong Zhang
*
*/
#if !defined(CMANY_HPP)
#define CMANY_HPP
#include <util/XercesDefs.hpp>
#include <validators/common/CMNode.hpp>
class CMStateSet;
class CMAny : public CMNode
{
public :
// -----------------------------------------------------------------------
// Constructors
// -----------------------------------------------------------------------
CMAny
(
const ContentSpecNode::NodeTypes type
, const unsigned int URI
, const unsigned int position
);
~CMAny();
// -----------------------------------------------------------------------
// Getter methods
// -----------------------------------------------------------------------
unsigned int getURI() const;
unsigned int getPosition() const;
// -----------------------------------------------------------------------
// Setter methods
// -----------------------------------------------------------------------
void setPosition(const unsigned int newPosition);
// -----------------------------------------------------------------------
// Implementation of the public CMNode virtual interface
// -----------------------------------------------------------------------
bool isNullable() const;
protected :
// -----------------------------------------------------------------------
// Implementation of the protected CMNode virtual interface
// -----------------------------------------------------------------------
void calcFirstPos(CMStateSet& toSet) const;
void calcLastPos(CMStateSet& toSet) const;
private :
// -----------------------------------------------------------------------
// Private data members
//
// fURI;
// URI of the any content model. This value is set if the type is
// of the following:
// XMLContentSpec.CONTENTSPECNODE_ANY,
// XMLContentSpec.CONTENTSPECNODE_ANY_OTHER.
//
// fPosition
// Part of the algorithm to convert a regex directly to a DFA
// numbers each leaf sequentially. If its -1, that means its an
// epsilon node. Zero and greater are non-epsilon positions.
// -----------------------------------------------------------------------
unsigned int fURI;
unsigned int fPosition;
};
#endif<|fim▁end|> | * are met:
* |
<|file_name|>gaya.js<|end_file_name|><|fim▁begin|>/*
* Sidebar toggle function
*/
(function(document) {
var toggle = document.querySelector('.sidebar-toggle');
var sidebar = document.querySelector('#sidebar');
var checkbox = document.querySelector('#sidebar-checkbox');
document.addEventListener('click', function(e) {
var target = e.target;
if(!checkbox.checked ||
sidebar.contains(target) ||
(target === checkbox || target === toggle)) return;
checkbox.checked = false;
}, false);
})(document);
/*global jQuery */
/*jshint browser:true */
/*!
* FitVids 1.1
*
* Copyright 2013, Chris Coyier - http://css-tricks.com + Dave Rupert - http://daverupert.com
* Credit to Thierry Koblentz - http://www.alistapart.com/articles/creating-intrinsic-ratios-for-video/
* Released under the WTFPL license - http://sam.zoy.org/wtfpl/
*
*/
;(function( $ ){
'use strict';
$.fn.fitVids = function( options ) {
var settings = {
customSelector: null,
ignore: null
};
if(!document.getElementById('fit-vids-style')) {
// appendStyles: https://github.com/toddmotto/fluidvids/blob/master/dist/fluidvids.js
var head = document.head || document.getElementsByTagName('head')[0];
var css = '.fluid-width-video-wrapper{width:100%;position:relative;padding:0;}.fluid-width-video-wrapper iframe,.fluid-width-video-wrapper object,.fluid-width-video-wrapper embed {position:absolute;top:0;left:0;width:100%;height:100%;}';
var div = document.createElement("div");
div.innerHTML = '<p>x</p><style id="fit-vids-style">' + css + '</style>';
head.appendChild(div.childNodes[1]);
}
if ( options ) {
$.extend( settings, options );
}
return this.each(function(){
var selectors = [
'iframe[src*="player.vimeo.com"]',
'iframe[src*="youtube.com"]',
'iframe[src*="youtube-nocookie.com"]',
'iframe[src*="kickstarter.com"][src*="video.html"]',
'object',
'embed'
];
if (settings.customSelector) {
selectors.push(settings.customSelector);
}
var ignoreList = '.fitvidsignore';
if(settings.ignore) {
ignoreList = ignoreList + ', ' + settings.ignore;
}
var $allVideos = $(this).find(selectors.join(','));
$allVideos = $allVideos.not('object object'); // SwfObj conflict patch
$allVideos = $allVideos.not(ignoreList); // Disable FitVids on this video.
$allVideos.each(function(){
var $this = $(this);
if($this.parents(ignoreList).length > 0) {
return; // Disable FitVids on this video.
}
if (this.tagName.toLowerCase() === 'embed' && $this.parent('object').length || $this.parent('.fluid-width-video-wrapper').length) { return; }
if ((!$this.css('height') && !$this.css('width')) && (isNaN($this.attr('height')) || isNaN($this.attr('width'))))
{
$this.attr('height', 9);
$this.attr('width', 16);
}
var height = ( this.tagName.toLowerCase() === 'object' || ($this.attr('height') && !isNaN(parseInt($this.attr('height'), 10))) ) ? parseInt($this.attr('height'), 10) : $this.height(),
width = !isNaN(parseInt($this.attr('width'), 10)) ? parseInt($this.attr('width'), 10) : $this.width(),
aspectRatio = height / width;
if(!$this.attr('id')){
var videoID = 'fitvid' + Math.floor(Math.random()*999999);
$this.attr('id', videoID);
}
$this.wrap('<div class="fluid-width-video-wrapper"></div>').parent('.fluid-width-video-wrapper').css('padding-top', (aspectRatio * 100)+'%');
$this.removeAttr('height').removeAttr('width');
});
});
};
// Works with either jQuery or Zepto
})( window.jQuery || window.Zepto );
/*
* Show disqus comments
*/
jQuery(document).ready(function() {
jQuery(".post").fitVids();
// Load discus comment
function initDisqusComments(){
if(config.disqus_shortname != '' && config.disqus_shortname != null && config.disqus_shortname != undefined) {
var disqus_shortname = config.disqus_shortname;
(function() {
var dsq = document.createElement('script'); dsq.type = 'text/javascript'; dsq.async = true;
dsq.src = '//' + disqus_shortname + '.disqus.com/embed.js';
(document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(dsq);
})();
}else {
alert("Please check Disqus short name configuration on your _config.yml");
}
}
initDisqusComments();
$(this).fadeOut(200);
/*$('.load-view').click(function(){
initDisqusComments();
$(this).fadeOut(200);
});*/
});
/*
* Scroll to top button
*/
jQuery(document).ready(function($){
// browser window scroll (in pixels) after which the "back to top" link is shown
var offset = 300,
//browser window scroll (in pixels) after which the "back to top" link opacity is reduced
offset_opacity = 1200,
//duration of the top scrolling animation (in ms)
scroll_top_duration = 700,
//grab the "back to top" link
$back_to_top = $('.wc-top');
//hide or show the "back to top" link
$(window).scroll(function(){
( $(this).scrollTop() > offset ) ? $back_to_top.addClass('wc-is-visible') : $back_to_top.removeClass('wc-is-visible wc-fade-out');
if( $(this).scrollTop() > offset_opacity ) {
$back_to_top.addClass('wc-fade-out');
}
});
//smooth scroll to top
$back_to_top.on('click', function(event){<|fim▁hole|> event.preventDefault();
$('body,html').animate({
scrollTop: 0 ,
}, scroll_top_duration
);
});
});<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import glob
import json
from os.path import basename, dirname, realpath
from BeautifulSoup import BeautifulSoup
from flask import Response, request, render_template, send_from_directory
from annotaria import app
from store import Store
app.config.from_object(__name__)
# Load default config and override config from an environment variable
app.config.update(dict(
SPARQL_ENDPOINT="http://localhost:3030/annotaria",
DEBUG=True
))
app.config.from_envvar('ANNOTARIA_SETTINGS', silent=True)
# We define our own jsonify rather than using flask.jsonify because we wish
# to jsonify arbitrary objects (e.g. index returns a list) rather than kwargs.
def jsonify(obj, *args, **kwargs):
res = json.dumps(obj, indent=None if request.is_xhr else 2)
return Response(res, mimetype='application/json', *args, **kwargs)
# html = lxml.html.document_fromstring(html)
# for element, attribute, link, pos in html.iterlinks():
# if attribute == "src":
# new_src = 'articles/images/' + basename(link)
# element.set('src', new_src)
# print lxml.html.tostring(html)
def parse_article(html):
soup = BeautifulSoup(html)
# fix img "src" attribute
for img in soup.findAll('img'):
img['src'] = 'articles/images/' + basename(img['src'])
return {
'title': soup.title.string,
'body': str(soup.body)
}
# ## ROUTING ###
# root
@app.route('/')
def root():
return render_template('index.html')
# retrieve articles list
@app.route('/articles', methods=['GET'])
def get_articles():
path = dirname(realpath(__file__))
ret = []
for f in sorted(glob.glob(path + "/articles/*.html")):
if basename(f) != "index.html": # skip index
ret.append({
'href': basename(f),
'title': basename(f)
})
return jsonify(ret)
# retrieve a single article
@app.route('/article/<file_name>', methods=['GET'])
def get_article(file_name):
try:
path = dirname(realpath(__file__))
with open(path + '/articles/' + file_name, 'r') as content_file:
ret = parse_article(content_file.read())
except Exception, e:
raise e
return jsonify(ret)
# proxy article images
@app.route('/articles/images/<file_name>', methods=['GET'])
def get_article_image(file_name):
try:
path = dirname(realpath(__file__))
return send_from_directory(path + '/articles/images/', file_name)
except Exception, e:
raise e
# get all annotations for a single article
@app.route('/annotations/<article>', methods=['GET'])
def get_annotations(article):
store = Store(app.config['SPARQL_ENDPOINT'])
return jsonify(store.query_article(article))
# store one or more annotations in the triple store
@app.route('/annotations/', methods=['POST'])
def set_annotations():
store = Store(app.config['SPARQL_ENDPOINT'])
annotations = json.loads(request.form['data'])
return jsonify(store.store_annotations(annotations))
# store one or more annotations in the triple store
@app.route('/person', methods=['GET'])
def get_person():
store = Store(app.config['SPARQL_ENDPOINT'])<|fim▁hole|>
# store one or more annotations in the triple store
@app.route('/person/', methods=['POST'])
def set_person():
store = Store(app.config['SPARQL_ENDPOINT'])
return store.insert_author(json.loads(request.form['data']))
# store one or more annotations in the triple store
@app.route('/organization', methods=['GET'])
def get_organization():
store = Store(app.config['SPARQL_ENDPOINT'])
return jsonify(store.query_organization())
# store one or more annotations in the triple store
@app.route('/organization/', methods=['POST'])
def set_organization():
store = Store(app.config['SPARQL_ENDPOINT'])
return store.insert_organization(json.loads(request.form['data']))
# store one or more annotations in the triple store
@app.route('/place', methods=['GET'])
def get_place():
store = Store(app.config['SPARQL_ENDPOINT'])
return jsonify(store.query_place())
# store one or more annotations in the triple store
@app.route('/place/', methods=['POST'])
def set_place():
store = Store(app.config['SPARQL_ENDPOINT'])
return store.insert_place(json.loads(request.form['data']))
# store one or more annotations in the triple store
@app.route('/disease', methods=['GET'])
def get_disease():
store = Store(app.config['SPARQL_ENDPOINT'])
return jsonify(store.query_concept())
# store one or more annotations in the triple store
@app.route('/disease/', methods=['POST'])
def set_disease():
store = Store(app.config['SPARQL_ENDPOINT'])
return store.insert_concept(json.loads(request.form['data']))
# store one or more annotations in the triple store
@app.route('/subject', methods=['GET'])
def get_subject():
store = Store(app.config['SPARQL_ENDPOINT'])
return jsonify(store.query_concept())
# store one or more annotations in the triple store
@app.route('/subject/', methods=['POST'])
def set_subject():
store = Store(app.config['SPARQL_ENDPOINT'])
return store.insert_concept(json.loads(request.form['data']))
# store one or more annotations in the triple store
@app.route('/dbpedia', methods=['GET'])
def get_dbpedia():
store = Store(app.config['SPARQL_ENDPOINT'])
return jsonify(store.query_concept())
# store one or more annotations in the triple store
@app.route('/dbpedia/', methods=['POST'])
def set_dbpedia():
store = Store(app.config['SPARQL_ENDPOINT'])
return store.insert_concept(json.loads(request.form['data']))<|fim▁end|> | return jsonify(store.query_authors()) |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export * from './OutlineFilter'; |
<|file_name|>Ed2SheetLabelProvider.java<|end_file_name|><|fim▁begin|>package es.ucm.fdi.emf.model.ed2.diagram.sheet;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.gmf.runtime.emf.type.core.IElementType;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.jface.viewers.BaseLabelProvider;
import org.eclipse.jface.viewers.ILabelProvider;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.swt.graphics.Image;
import es.ucm.fdi.emf.model.ed2.diagram.navigator.Ed2NavigatorGroup;
import es.ucm.fdi.emf.model.ed2.diagram.part.Ed2VisualIDRegistry;
import es.ucm.fdi.emf.model.ed2.diagram.providers.Ed2ElementTypes;
/**
* @generated
*/
public class Ed2SheetLabelProvider extends BaseLabelProvider implements
ILabelProvider {
/**
* @generated
*/
public String getText(Object element) {
element = unwrap(element);
if (element instanceof Ed2NavigatorGroup) {
return ((Ed2NavigatorGroup) element).getGroupName();
}
IElementType etype = getElementType(getView(element));
return etype == null ? "" : etype.getDisplayName();
}
/**
* @generated
*/
public Image getImage(Object element) {
IElementType etype = getElementType(getView(unwrap(element)));
return etype == null ? null : Ed2ElementTypes.getImage(etype);
}
/**
* @generated
*/
private Object unwrap(Object element) {
if (element instanceof IStructuredSelection) {
return ((IStructuredSelection) element).getFirstElement();
}
return element;
}
/**
* @generated
*/
private View getView(Object element) {
if (element instanceof View) {
return (View) element;
}
if (element instanceof IAdaptable) {
return (View) ((IAdaptable) element).getAdapter(View.class);
}
<|fim▁hole|>
/**
* @generated
*/
private IElementType getElementType(View view) {
// For intermediate views climb up the containment hierarchy to find the one associated with an element type.
while (view != null) {
int vid = Ed2VisualIDRegistry.getVisualID(view);
IElementType etype = Ed2ElementTypes.getElementType(vid);
if (etype != null) {
return etype;
}
view = view.eContainer() instanceof View ? (View) view.eContainer()
: null;
}
return null;
}
}<|fim▁end|> | return null;
}
|
<|file_name|>JDBCAddressDAO.java<|end_file_name|><|fim▁begin|>/* JDBCAddressDAO.java
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Copyright Ⓒ 2014-2015 Universiteit Gent
*
* This file is part of the Degage Web Application
*
* Corresponding author (see also AUTHORS.txt)
*
* Kris Coolsaet
* Department of Applied Mathematics, Computer Science and Statistics
* Ghent University
* Krijgslaan 281-S9
* B-9000 GENT Belgium
*
* The Degage Web Application is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The Degage Web Application is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with the Degage Web Application (file LICENSE.txt in the
* distribution). If not, see <http://www.gnu.org/licenses/>.
*/
package be.ugent.degage.db.jdbc;
import be.ugent.degage.db.DataAccessException;
import be.ugent.degage.db.dao.AddressDAO;
import be.ugent.degage.db.models.Address;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* JDBC implementation of {@link AddressDAO}
*/
class JDBCAddressDAO extends AbstractDAO implements AddressDAO {
public JDBCAddressDAO(JDBCDataAccessContext context){
super (context);
}
// TODO: avoid these
static Address populateAddress(ResultSet rs) throws SQLException {
if(rs.getObject("address_id") == null)
return null;
else
return new Address(
rs.getInt("address_id"),
rs.getString("address_country"),
rs.getString("address_zipcode"),
rs.getString("address_city"),<|fim▁hole|> rs.getFloat("address_longitude")
);
}
// TODO: avoid these
static Address populateAddress(ResultSet rs, String tableName) throws SQLException {
if(rs.getObject(tableName + ".address_id") == null)
return null;
else
return new Address(
rs.getInt(tableName + ".address_id"),
rs.getString(tableName + ".address_country"),
rs.getString(tableName + ".address_zipcode"),
rs.getString(tableName + ".address_city"),
rs.getString(tableName + ".address_street"),
rs.getString(tableName + ".address_number"),
rs.getFloat(tableName + ".address_latitude"),
rs.getFloat(tableName + ".address_longitude")
);
}
public static final String ADDRESS_FIELDS =
"address_id, address_city, address_zipcode, address_street, address_number, address_country, address_latitude, address_longitude ";
private LazyStatement getAddressStatement = new LazyStatement(
"SELECT " + ADDRESS_FIELDS + "FROM addresses WHERE address_id = ?");
@Override
public Address getAddress(int id) throws DataAccessException {
try {
PreparedStatement ps = getAddressStatement.value(); // reused so should not be auto-closed
ps.setInt(1, id);
try (ResultSet rs = ps.executeQuery()) {
if(rs.next()) {
return populateAddress(rs);
} else
return null;
}
} catch (SQLException ex) {
throw new DataAccessException("Could not fetch address by id.", ex);
}
}
private LazyStatement createAddressStatement = new LazyStatement(
"INSERT INTO addresses(address_city, address_zipcode, address_street, address_number, address_country, address_latitude, address_longitude) " +
"VALUES (?,?,?,?,?)",
"address_id"
);
@Override
public Address createAddress(String country, String zip, String city, String street, String num, float lat, float lng) throws DataAccessException {
try {
PreparedStatement ps = createAddressStatement.value(); // reused so should not be auto-closed
ps.setString(1, city);
ps.setString(2, zip);
ps.setString(3, street);
ps.setString(4, num);
ps.setString(5, country);
ps.setFloat(6, lat);
ps.setFloat(7, lng);
if(ps.executeUpdate() == 0)
throw new DataAccessException("No rows were affected when creating address.");
try (ResultSet keys = ps.getGeneratedKeys()) {
keys.next(); //if this fails we want an exception anyway
return new Address(keys.getInt(1), country, zip, city, street, num, lat, lng);
}
} catch (SQLException ex) {
throw new DataAccessException("Failed to create address.", ex);
}
}
private LazyStatement deleteAddressStatement = new LazyStatement(
"DELETE FROM addresses WHERE address_id = ?"
);
@Override
public void deleteAddress(int addressId) throws DataAccessException {
try {
PreparedStatement ps = deleteAddressStatement.value(); // reused so should not be auto-closed
ps.setInt(1, addressId);
if(ps.executeUpdate() == 0)
throw new DataAccessException("No rows were affected when deleting address with ID=" + addressId);
} catch(SQLException ex){
throw new DataAccessException("Failed to execute address deletion query.", ex);
}
}
// used to update addresses as part of updates of other tables
static void updateLocation(Connection conn, String joinSQL, String idName, int id, Address location) {
try (PreparedStatement ps = conn.prepareStatement(
"UPDATE addresses " + joinSQL +
" SET address_city = ?, address_zipcode = ?, address_street = ?, address_number = ?, address_country=?, address_latitude=?, address_longitude=? " +
"WHERE " + idName + " = ?"
)) {
ps.setString(1, location.getCity());
ps.setString(2, location.getZip());
ps.setString(3, location.getStreet());
ps.setString(4, location.getNum());
ps.setString(5, location.getCountry());
ps.setFloat(6, location.getLat());
ps.setFloat(7, location.getLng());
ps.setInt(8, id);
ps.executeUpdate();
} catch (SQLException ex) {
throw new DataAccessException("Failed to update location.", ex);
}
}
private LazyStatement updateAddressStatement = new LazyStatement(
"UPDATE addresses SET address_city = ?, address_zipcode = ?, address_street = ?, " +
"address_number = ?, address_country=?, " + "address_latitude = ?, address_longitude=? " +
"WHERE address_id = ?"
);
@Override
public void updateAddress(Address address) throws DataAccessException {
try {
PreparedStatement ps = updateAddressStatement.value(); // reused so should not be auto-closed
ps.setString(1, address.getCity());
ps.setString(2, address.getZip());
ps.setString(3, address.getStreet());
ps.setString(4, address.getNum());
ps.setString(5, address.getCountry());
ps.setFloat(6, address.getLat());
ps.setFloat(7, address.getLng());
ps.setInt(8, address.getId());
if(ps.executeUpdate() == 0)
throw new DataAccessException("Address update affected 0 rows.");
} catch(SQLException ex) {
throw new DataAccessException("Failed to update address.", ex);
}
}
}<|fim▁end|> | rs.getString("address_street"),
rs.getString("address_number"),
rs.getFloat("address_latitude"), |
<|file_name|>csv.py<|end_file_name|><|fim▁begin|>import csv
__author__ = "Aaron Eppert <[email protected]>"
def process(options, results, output_handle):
headers = options['select'].split(',')
writer = csv.writer(output_handle)
writer.writerow(headers)
for result in results:
row = []<|fim▁hole|> for header in headers:
if header in result:
if isinstance(result[header], list):
result[header] = ', '.join(result[header])
row.append(result[header])
else:
row.append('')
writer.writerow(row)<|fim▁end|> | |
<|file_name|>neos_cs.ts<|end_file_name|><|fim▁begin|><TS language="cs" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Pravým kliknutím upravte adresu nebo popisek</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Vytvořit novou adresu</translation>
</message>
<message>
<source>&New</source>
<translation>&Nová</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopírovat vybranou adresu do mezipaměti</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Kopírovat</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Smazat aktuálně vybranou adresu ze seznamu</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Smazat</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exportovat data z aktulní záložky do souboru</translation>
</message>
<message>
<source>&Export</source>
<translation>&Exportovat</translation>
</message>
<message>
<source>C&lose</source>
<translation>Z&avřít</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Vybrat adresu kam poslat peníze</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Vybrat adresu pro přijetí peněz</translation>
</message>
<message>
<source>C&hoose</source>
<translation>V&ybrat</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Adresy pro odeslání peněz</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Adresy pro přijetí peněz</translation>
</message>
<message>
<source>These are your NEOS addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Toto jsou Vaše NEOS adresy pro poslání platby. Vždy si překontrolujte množství peněz a cílovou adresu než platbu odešlete.</translation>
</message>
<message>
<source>These are your NEOS addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>Toto jsou Vaše NEOS adresy pro přijetí plateb. Je doporučeno použít novou adresu pro každou novou transakci.</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>&Kopírovat Adresu</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>Kopírovat &Popis</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Upravit</translation>
</message>
<message>
<source>Export Address List</source>
<translation>Exportovat Seznam Adres</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Comma separated file (*.csv)</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Export selhal</translation>
</message><|fim▁hole|> <message>
<source>There was an error trying to save the address list to %1. Please try again.</source>
<translation>Objevila se chyba při pokusu o uložení seznamu adres do %1. Prosím, zkuste to znovu.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>Popis</translation>
</message>
<message>
<source>Address</source>
<translation>Adresa</translation>
</message>
<message>
<source>(no label)</source>
<translation>(bez popisku)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Dialog frázového hesla</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Zadejte frázové heslo</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Nové frázové heslo</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Zopakujte frázové heslo</translation>
</message>
<message>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation>Slouží k neumožnění zaslání jednoduché platby, pokud je učet OS kompromitován. Neposkytuje tak reálné zabezpeční.</translation>
</message>
<message>
<source>For anonymization and staking only</source>
<translation>Pouze pro anonymizaci a sázení</translation>
</message>
<message>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Zadejte nové frázové heslo pro Vaši peněženku <br/> Prosím, použijte frázové heslo z <b> nebo více náhodných znaků </b>, nebo<b>z osmi nebo více slov</b> .</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>Šifrovat peněženku</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Pro tuto operaci potřebujete frázové heslo k odemčení Vaší paněženky.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Odemknout peněženku</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Pro tuto operaci potřebujete frázové heslo pro odšifrování Vaší paněženky.</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>Odšifrovat peněženku</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Změnit frázové heslo</translation>
</message>
<message>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Zadajete staré a nové frázové heslo Vaší peněženky.</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>Potvrdit zašifrování peněženky</translation>
</message>
<message>
<source>NEOS will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your NEOS from being stolen by malware infecting your computer.</source>
<translation>NEOS se teď zavře pro dokončení šifrovacího procesu. Prosím, vemte na vědomí, že zašifrování Vaší peněženky plně neochrání Vaše NEOS před krádží, pokud je Váš počítač infikován malwarem.</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Opravdu chcete zašifrovat Vaši peněženku?</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR NEOS</b>!</source>
<translation>Varování: Pokud zašifrujete svou peněženku a ztratíte frázové heslo, tak <b>ZTRATÍTE VŠECHNY VAŠE NEOS</b>!</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>Pěněženka je zašifrována</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>DŮLEŽITÉ: Každá předešlá zaloha, kterou jste provedli, by měla být nahrazena nově vygenerovanou, šifrovavou zálohou soboru Vaší peněženky. Z bezpečnostních důvodů budou všechny předešlé zálohy nezašifrované peněženky nepoužitelné, jakmile začnete používat nově zašifrovanou peněženku.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>Šifrování peněženky selhalo</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Šifrování peněženky selhalo kvůli vnitřní chybě aplikace. Vaše peněženka není zašifrovaná.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>Zadaná frázová hesla se neshodují.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>Uzamčení pěněženky selhalo</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Zadané frázové heslo pro dešifrování peněženky není správné.</translation>
</message>
<message>
<source>Wallet decryption failed</source>
<translation>Odšifrování peněženky selhalo</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>Frázové heslo peněženky bylo úspěšně změněno.</translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>Vaarování: Cpas Lock je zapnutý!</translation>
</message>
</context>
<context>
<name>Bip38ToolDialog</name>
<message>
<source>BIP 38 Tool</source>
<translation>BIP 38 Nástroj</translation>
</message>
<message>
<source>&BIP 38 Encrypt</source>
<translation>&BIP 38 Šifrovat</translation>
</message>
<message>
<source>Enter a Neos Address that you would like to encrypt using BIP 38. Enter a passphrase in the middle box. Press encrypt to compute the encrypted private key.</source>
<translation>Zadejte NEOS adresu, kterou si přejete zašifrovat pomocí BIP38. Frázové heslo zadejte do prostředního boxu. Stiskněte šifrovat pro výpočet šifrovaného privátního klíče.</translation>
</message>
<message>
<source>Address:</source>
<translation>Adresa:</translation>
</message>
<message>
<source>The NEOS address to sign the message with</source>
<translation>NEOS adresa pro podepsání zprávy</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Vyberte již dříve použitou adresu</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Vložit adresu z mezipamětí</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Passphrase: </source>
<translation>Frázové heslo:</translation>
</message>
<message>
<source>Encrypted Key:</source>
<translation>Zašifrovaný Klíč:</translation>
</message>
<message>
<source>Copy the current signature to the system clipboard</source>
<translation>Kopírovat aktuální podpis do systémové mezipaměti</translation>
</message>
<message>
<source>Sign the message to prove you own this NEOS address</source>
<translation>Podepsat zprávu k prokázání, že vlastníte tuto NEOS adresu</translation>
</message>
<message>
<source>Encrypt &Key</source>
<translation>Šifrovat &Klíč</translation>
</message>
<message>
<source>Reset all sign message fields</source>
<translation>Resetovat všechny položky podepsání zprávy</translation>
</message>
<message>
<source>Clear &All</source>
<translation>Smazat &Vše</translation>
</message>
<message>
<source>&BIP 38 Decrypt</source>
<translation>&BIP 38 Dešifrování</translation>
</message>
<message>
<source>Enter the BIP 38 encrypted private key. Enter the passphrase in the middle box. Click Decrypt Key to compute the private key. After the key is decrypted, clicking 'Import Address' will add this private key to the wallet.</source>
<translation>Vložte BIP 38 šifrovaný privítní klíc. Frázové heslo vložte do prostředního boxu. Kliknětě na Dešifrovat Klíč pro výpočet privátního klíče. Poté co bude klíč dešifrován, kliknutím na 'Importovat Adresu' přidáte privátní klíč do Vaší peněženky.</translation>
</message>
<message>
<source>The NEOS address the message was signed with</source>
<translation>NEOS adresa zprávy byla podpsána</translation>
</message>
<message>
<source>Verify the message to ensure it was signed with the specified NEOS address</source>
<translation>Verifikujte zprávu pro ujištění, že byla podepsána zmíněnou NEOS adresou</translation>
</message>
<message>
<source>Decrypt &Key</source>
<translation>Dešifrovat &Klíč</translation>
</message>
<message>
<source>Reset all verify message fields</source>
<translation>Resetovat všechny položky pro ověření zprávy</translation>
</message>
<message>
<source>Decrypted Key:</source>
<translation>Dešifrovaný Klíč</translation>
</message>
<message>
<source>Import Address</source>
<translation>Importovat Adresu</translation>
</message>
<message>
<source>Click "Decrypt Key" to compute key</source>
<translation>Kliněte na "Dešifrovat Klíč" pro výpočet klíče</translation>
</message>
<message>
<source>The entered passphrase is invalid. </source>
<translation>Zadané frázové heslo není validní.</translation>
</message>
<message>
<source>Allowed: 0-9,a-z,A-Z,</source>
<translation>Povoleno: 0-9,a-z,A-Z,</translation>
</message>
<message>
<source>The entered address is invalid.</source>
<translation>Zadaná adresa není validní.</translation>
</message>
<message>
<source>Please check the address and try again.</source>
<translation>Prosím zkontolujte adresu a zkuste to znovu.</translation>
</message>
<message>
<source>The entered address does not refer to a key.</source>
<translation>Zadaná adresa neodpovídá klíči.</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>Odemknutí peněženky bylo zrušeno.</translation>
</message>
<message>
<source>Private key for the entered address is not available.</source>
<translation>Privátní klíč pro zadanou adresu není dostupný.</translation>
</message>
<message>
<source>Failed to decrypt.</source>
<translation>Dešifrování selhalo.</translation>
</message>
<message>
<source>Please check the key and passphrase and try again.</source>
<translation>Prosím, zkontrolujte klíč a frázové heslo a zkuste to znovu.</translation>
</message>
<message>
<source>Data Not Valid.</source>
<translation>Data nejsou validní.</translation>
</message>
<message>
<source>Please try again.</source>
<translation>Prosím, zkuste to znovu.</translation>
</message>
<message>
<source>Please wait while key is imported</source>
<translation>Prosím, počkejte než se klíč importuje</translation>
</message>
<message>
<source>Key Already Held By Wallet</source>
<translation>Klíč se už v peněžence nachází</translation>
</message>
<message>
<source>Error Adding Key To Wallet</source>
<translation>Chyba při vkládání klíče do peněženky</translation>
</message>
<message>
<source>Successfully Added Private Key To Wallet</source>
<translation>Klíč byl úspěšně přidán do peněženky</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Wallet</source>
<translation>Peněženka</translation>
</message>
<message>
<source>Node</source>
<translation>Uzel</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Přehled</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Ukaž celkový přehled peněženky</translation>
</message>
<message>
<source>&Send</source>
<translation>&Odeslat</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Přijmout</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transakce</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Procházet historii transakcí</translation>
</message>
<message>
<source>E&xit</source>
<translation>E&xit</translation>
</message>
<message>
<source>Quit application</source>
<translation>Zavřít aplikaci</translation>
</message>
<message>
<source>About &Qt</source>
<translation>O &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Ukaž informace o Qt</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Možnosti...</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Zobrazit / Schovat</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>Zobrazit nebo schovat hlavní okno</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>&Šifrovat Peněženku...</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Šifrovat privátní klíče náležící Vaší peněžence</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>&Zálohovat peněženku...</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Zálohovat peněženku na jiné místo</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>&Změnit frázové heslo...</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Změnit frázové heslo pro šifrování peněženky</translation>
</message>
<message>
<source>&Unlock Wallet...</source>
<translation>&Odemknout peněženku...</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Odemknout peněženku</translation>
</message>
<message>
<source>&Lock Wallet</source>
<translation>&Zamknout Peněženku</translation>
</message>
<message>
<source>Sign &message...</source>
<translation>Podepsat &zprávu...</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>&Verifikovat zprávu...</translation>
</message>
<message>
<source>&Information</source>
<translation>&Informace</translation>
</message>
<message>
<source>Show diagnostic information</source>
<translation>Zobrazit diagnostická data</translation>
</message>
<message>
<source>&Debug console</source>
<translation>&Ladící konzolce</translation>
</message>
<message>
<source>Open debugging console</source>
<translation>Otevřít ladící konzoli</translation>
</message>
<message>
<source>&Network Monitor</source>
<translation>&Monitorování sítě</translation>
</message>
<message>
<source>Show network monitor</source>
<translation>Zobrazit monitorování sítě</translation>
</message>
<message>
<source>&Peers list</source>
<translation>&Seznam peerů</translation>
</message>
<message>
<source>Show peers info</source>
<translation>Zobrazit info peerů</translation>
</message>
<message>
<source>Wallet &Repair</source>
<translation>&Oprava Peněženky</translation>
</message>
<message>
<source>Show wallet repair options</source>
<translation>Zobrazit možnosti opravy peněženky</translation>
</message>
<message>
<source>Open configuration file</source>
<translation>Otevřít konfigurační soubor</translation>
</message>
<message>
<source>Show Automatic &Backups</source>
<translation>Zobrazit Automatické &Zálohy</translation>
</message>
<message>
<source>Show automatically created wallet backups</source>
<translation>Zobrazit automaticky vytvořené zálohy peněženky</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation>&Odesílací adresy...</translation>
</message>
<message>
<source>Show the list of used sending addresses and labels</source>
<translation>Zobrazit seznam použitých adres a popisků pro odedslání platby</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>&Příjimací adresy...</translation>
</message>
<message>
<source>Show the list of used receiving addresses and labels</source>
<translation>Zobrazit seznam použitých adres a popisků pro přijetí plateb</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>Otevřít &URI...</translation>
</message>
<message>
<source>&Command-line options</source>
<translation>Možnosti příkazové řádky</translation>
</message>
<message>
<source>Synchronizing additional data: %p%</source>
<translation>Synchronizuji přídavná data: %p%</translation>
</message>
<message>
<source>&File</source>
<translation>&Soubor</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Nastavení</translation>
</message>
<message>
<source>&Tools</source>
<translation>&Nástroje</translation>
</message>
<message>
<source>&Help</source>
<translation>&Pomoc</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Nástrojová lišta záložek</translation>
</message>
<message>
<source>NEOS Core</source>
<translation>NEOS Core</translation>
</message>
<message>
<source>Send coins to a NEOS address</source>
<translation>Odeslat platbuna NEOS adresu</translation>
</message>
<message>
<source>Request payments (generates QR codes and neos: URIs)</source>
<translation>Vyžádat platbu (generování QK kódu a neos: URIs)</translation>
</message>
<message>
<source>&Masternodes</source>
<translation>&Masternody</translation>
</message>
<message>
<source>Browse masternodes</source>
<translation>Procházet masternody</translation>
</message>
<message>
<source>&About NEOS Core</source>
<translation>&O NEOS Core</translation>
</message>
<message>
<source>Show information about NEOS Core</source>
<translation>Zobraz informace o NEOS Core</translation>
</message>
<message>
<source>Modify configuration options for NEOS</source>
<translation>Upravit možnosti konfigurace pro NEOS</translation>
</message>
<message>
<source>Sign messages with your NEOS addresses to prove you own them</source>
<translation>Podepsat zprávy Vaší NEOS adresou pro prokázaní, že jste jejich vlastníkem</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified NEOS addresses</source>
<translation>Ověřit zprávy k zajištění, že bylypodepsány vybranými NEOS adresami</translation>
</message>
<message>
<source>&BIP38 tool</source>
<translation>&BIP38 nástroj</translation>
</message>
<message>
<source>Encrypt and decrypt private keys using a passphrase</source>
<translation>Šifrovat a dešivraovat klíče s použitím frázového hesla</translation>
</message>
<message>
<source>&MultiSend</source>
<translation>&MultiSend</translation>
</message>
<message>
<source>MultiSend Settings</source>
<translation>Nastavení MultiSendu</translation>
</message>
<message>
<source>Open Wallet &Configuration File</source>
<translation>Otevřít Pěněženkový &Konfigurační soubor</translation>
</message>
<message>
<source>Open &Masternode Configuration File</source>
<translation>Otevřít &Masternodový Konfigurační Soubor</translation>
</message>
<message>
<source>Open Masternode configuration file</source>
<translation>Otevřít Masternodový konfigurační soubor</translation>
</message>
<message>
<source>Open a NEOS: URI or payment request</source>
<translation>Otevřít NEOS: URI nebo platební žádost</translation>
</message>
<message>
<source>&Blockchain explorer</source>
<translation>&Blockchanový průzkumník</translation>
</message>
<message>
<source>Block explorer window</source>
<translation>Okno blokového průzkumníka</translation>
</message>
<message>
<source>Show the NEOS Core help message to get a list with possible NEOS command-line options</source>
<translation>Zobrazit NEOS Core pomocnou zpráv pro získání seznamu možných parametrů NEOS pro příkazy do příkazové řádky</translation>
</message>
<message>
<source>NEOS Core client</source>
<translation>NEOS Core klient</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Synchronizace se sítí...</translation>
</message>
<message>
<source>Importing blocks from disk...</source>
<translation>Import bloků z disku...</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Reindexace bloků na disku...</translation>
</message>
<message>
<source>No block source available...</source>
<translation>Není dostupný žádný zdroj bloků...</translation>
</message>
<message>
<source>Up to date</source>
<translation>Aktualizováno</translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 a %2</translation>
</message>
<message>
<source>%1 behind</source>
<translation>%1 za</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>Poslední blok byl vygenerovaný před %1.</translation>
</message>
<message>
<source>Transactions after this will not yet be visible.</source>
<translation>Po této akci už nebudou transakce viditelné.</translation>
</message>
<message>
<source>Error</source>
<translation>Chyba</translation>
</message>
<message>
<source>Warning</source>
<translation>Varování</translation>
</message>
<message>
<source>Information</source>
<translation>Informace</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Odeslané transakce</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Příchozí transakce</translation>
</message>
<message>
<source>Sent MultiSend transaction</source>
<translation>Odeslat MultiSend transakci</translation>
</message>
<message>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Datum: %1
Hodnota: %2
Typ: %3
Adresa: %4
</translation>
</message>
<message>
<source>Staking is active
MultiSend: %1</source>
<translation>Sázení je aktivní
MultiSend: %1</translation>
</message>
<message>
<source>Active</source>
<translation>Aktivní</translation>
</message>
<message>
<source>Not Active</source>
<translation>Neaktivní</translation>
</message>
<message>
<source>Staking is not active
MultiSend: %1</source>
<translation>Sázení není aktivní
MultiSend: %1</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Peněženka je <b>zašifrovaná</b> a momentálně je <b>odemčená</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b> for anonimization and staking only</source>
<translation>Peněženka je <b>zašifrovaná</b> a momentálně je <b>odemčená</b> pouze pro anonimizace a sázení</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Peněženka je <b>zašifrovaná</b> a momentálně je <b>zamčená</b></translation>
</message>
</context>
<context>
<name>BlockExplorer</name>
<message>
<source>Blockchain Explorer</source>
<translation>Blockchainový průzkumník</translation>
</message>
<message>
<source>Address / Block / Transaction</source>
<translation>Adresa / Blok / Transakce</translation>
</message>
<message>
<source>Search</source>
<translation>Hledat</translation>
</message>
<message>
<source>TextLabel</source>
<translation>TextPopisku</translation>
</message>
<message>
<source>Not all transactions will be shown. To view all transactions you need to set txindex=1 in the configuration file (neos.conf).</source>
<translation>Ne všechny transakce budou zobrazeny. Pro zobrazení všech transackí nastavte v konfiguračním souboru (neos.conf) txindex=1.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<source>Network Alert</source>
<translation>Upozornění sítě</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Quantity:</source>
<translation>Množství:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Byty:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Hodnota:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Priorita:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Poplatek:</translation>
</message>
<message>
<source>Coin Selection</source>
<translation>Výběr mince</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Po poplatku:</translation>
</message>
<message>
<source>Change:</source>
<translation>Změna:</translation>
</message>
<message>
<source>Tree mode</source>
<translation>Stromový mód</translation>
</message>
<message>
<source>List mode</source>
<translation>Seznamový mód</translation>
</message>
<message>
<source>(1 locked)</source>
<translation>(1 zamčeno)</translation>
</message>
<message>
<source>Amount</source>
<translation>Hodnota</translation>
</message>
<message>
<source>Received with label</source>
<translation>Obdrženo s popiskem</translation>
</message>
<message>
<source>Received with address</source>
<translation>Obdrženo s adresou</translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Confirmations</source>
<translation>Potvrzení</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Potvrzeno</translation>
</message>
<message>
<source>Priority</source>
<translation>Priorita</translation>
</message>
<message>
<source>Copy address</source>
<translation>Kopírovat adresu</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopírovat popisek</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopírovat hodnotu</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Kopírovat ID transakce</translation>
</message>
<message>
<source>Lock unspent</source>
<translation>Zamknout neutracené</translation>
</message>
<message>
<source>Unlock unspent</source>
<translation>Odemknout neutracené</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Kopíroat množstí</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Kopírovat poplatek</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Kopírovat s poplatkem</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Kopírovat byty</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Kopírovat prioritu</translation>
</message>
<message>
<source>Copy change</source>
<translation>Kopírovat změnu</translation>
</message>
<message>
<source>Please switch to "List mode" to use this function.</source>
<translation>Prosím, přepněto do "Seznamového módu" pro použití této funkce</translation>
</message>
<message>
<source>highest</source>
<translation>nejvyšší</translation>
</message>
<message>
<source>higher</source>
<translation>vyšší</translation>
</message>
<message>
<source>high</source>
<translation>vysoký</translation>
</message>
<message>
<source>medium-high</source>
<translation>středně vysoký</translation>
</message>
<message>
<source>n/a</source>
<translation>n/a</translation>
</message>
<message>
<source>medium</source>
<translation>střední</translation>
</message>
<message>
<source>low-medium</source>
<translation>středně malý</translation>
</message>
<message>
<source>low</source>
<translation>nízký</translation>
</message>
<message>
<source>lower</source>
<translation>nižší</translation>
</message>
<message>
<source>lowest</source>
<translation>nejnižší</translation>
</message>
<message>
<source>(%1 locked)</source>
<translation>(%1 zamknuto)</translation>
</message>
<message>
<source>none</source>
<translation>žádný</translation>
</message>
<message>
<source>yes</source>
<translation>ano</translation>
</message>
<message>
<source>no</source>
<translation>ne</translation>
</message>
<message>
<source>This label turns red, if the transaction size is greater than 1000 bytes.</source>
<translation>Tento text zčervená, pokud bude velikost transakce větší než 1000 bytů.</translation>
</message>
<message>
<source>This means a fee of at least %1 per kB is required.</source>
<translation>To znaměná, že nejnižší nutný poplatek musí být nejméně %1 za kB.</translation>
</message>
<message>
<source>Can vary +/- 1 byte per input.</source>
<translation>Může se lišit +/- 1 byte na vstup.</translation>
</message>
<message>
<source>Transactions with higher priority are more likely to get included into a block.</source>
<translation>Transakce s vyšší prioritou bude pravděpodobněji zařazena do bloku.</translation>
</message>
<message>
<source>This label turns red, if the priority is smaller than "medium".</source>
<translation>Tento text zčervená, pokud je priorita menší než "střední".</translation>
</message>
<message>
<source>This label turns red, if any recipient receives an amount smaller than %1.</source>
<translation>Tento text zčervená, pokud je hodnota pro některého z příjemců menší než %1.</translation>
</message>
<message>
<source>(no label)</source>
<translation>(bez popisku)</translation>
</message>
<message>
<source>change from %1 (%2)</source>
<translation>změna z %1 (%2)</translation>
</message>
<message>
<source>(change)</source>
<translation>(změna)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Upravit adresu</translation>
</message>
<message>
<source>&Label</source>
<translation>&Popis</translation>
</message>
<message>
<source>&Address</source>
<translation>&Adresy</translation>
</message>
<message>
<source>New receiving address</source>
<translation>Nová adresa pro přijetí platby</translation>
</message>
<message>
<source>New sending address</source>
<translation>Nová adresa k odeslání platby</translation>
</message>
<message>
<source>Edit receiving address</source>
<translation>Upravit adresu pro přijetí platby</translation>
</message>
<message>
<source>Edit sending address</source>
<translation>Upravit adresu k odeslání platby</translation>
</message>
<message>
<source>The entered address "%1" is not a valid NEOS address.</source>
<translation>Zadaná adresa "%1" není validní NEOS adresa.</translation>
</message>
<message>
<source>The entered address "%1" is already in the address book.</source>
<translation>Zadaná adresa "%1" je již ve Vašem seznamu adres.</translation>
</message>
<message>
<source>Could not unlock wallet.</source>
<translation>Nepodařilo se odemknout peněženku.</translation>
</message>
<message>
<source>New key generation failed.</source>
<translation>Generování nového klíče selhalo.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>A new data directory will be created.</source>
<translation>Nová složka byla vytvořena.</translation>
</message>
<message>
<source>name</source>
<translation>jméno</translation>
</message>
<message>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>Složka již existuje. Přidejte %1 pokud zde chcete vytvořit novou složku.</translation>
</message>
<message>
<source>Path already exists, and is not a directory.</source>
<translation>Cesta již existuje, a není to složka</translation>
</message>
<message>
<source>Cannot create data directory here.</source>
<translation>Zde nelze vytvořit složku.</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>version</source>
<translation>verze</translation>
</message>
<message>
<source>NEOS Core</source>
<translation>NEOS Core</translation>
</message>
<message>
<source>About NEOS Core</source>
<translation>O NEOS Core</translation>
</message>
<message>
<source>Command-line options</source>
<translation>Možnosti příkazové řádky</translation>
</message>
<message>
<source>Usage:</source>
<translation>Použití:</translation>
</message>
<message>
<source>command-line options</source>
<translation>možnosti příkazové řádky</translation>
</message>
<message>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Nastavit jazyk, například "de_DE" (defaultně: systémová lokalizace)</translation>
</message>
<message>
<source>Start minimized</source>
<translation>Spustit minimalizované</translation>
</message>
<message>
<source>Set SSL root certificates for payment request (default: -system-)</source>
<translation>Nastavit SSL kořenový certifikát pro platební žádosti (defaultně: - system-)</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>Vítejte</translation>
</message>
<message>
<source>Welcome to NEOS Core.</source>
<translation>Vítejte v NEOS Core.</translation>
</message>
<message>
<source>As this is the first time the program is launched, you can choose where NEOS Core will store its data.</source>
<translation>Při prvním spuštění programu si můžete vybrat, kam bude NEOS Core ukládat svá data.</translation>
</message>
<message>
<source>NEOS Core will download and store a copy of the NEOS block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source>
<translation>NEOS Core stáhne a uloží kopii NEOS blockchainu. Nejméně %1GB dat bude do této složky uloženo a v průběhu času bude ukládat další data. Peněženka bude v této složce uložena také.</translation>
</message>
<message>
<source>Use the default data directory</source>
<translation>Použít defaultně nastavenou složku pro data</translation>
</message>
<message>
<source>Use a custom data directory:</source>
<translation>Použít vlastní složku pro data</translation>
</message>
<message>
<source>NEOS Core</source>
<translation>NEOS Core</translation>
</message>
<message>
<source>Error: Specified data directory "%1" cannot be created.</source>
<translation>Chyba: Zvolená složka "%1" nemůže být vytvořena.</translation>
</message>
<message>
<source>Error</source>
<translation>Chyba</translation>
</message>
<message>
<source>%1 GB of free space available</source>
<translation>%1 GB dostupného volného místa</translation>
</message>
<message>
<source>(of %1 GB needed)</source>
<translation>(z %1 GB potřeba)</translation>
</message>
</context>
<context>
<name>MasternodeList</name>
<message>
<source>Form</source>
<translation>Od</translation>
</message>
<message>
<source>My Masternodes</source>
<translation>Moje Masternody</translation>
</message>
<message>
<source>Alias</source>
<translation>Alias</translation>
</message>
<message>
<source>Address</source>
<translation>Adresa</translation>
</message>
<message>
<source>Protocol</source>
<translation>Protokol</translation>
</message>
<message>
<source>Status</source>
<translation>Stav</translation>
</message>
<message>
<source>Active</source>
<translation>Aktivní</translation>
</message>
<message>
<source>Pubkey</source>
<translation>Veřejný klíč</translation>
</message>
<message>
<source>S&tart alias</source>
<translation>S&pustit alias</translation>
</message>
<message>
<source>Start &all</source>
<translation>Spustit &vše</translation>
</message>
<message>
<source>Start &MISSING</source>
<translation>Spustit &CHYBĚJÍCÍ</translation>
</message>
<message>
<source>&Update status</source>
<translation>&Update stavu</translation>
</message>
<message>
<source>Status will be updated automatically in (sec):</source>
<translation>Stav bude automaticky updateován za (sec):</translation>
</message>
<message>
<source>0</source>
<translation>0</translation>
</message>
<message>
<source>Start alias</source>
<translation>Spustit alias:</translation>
</message>
<message>
<source>Confirm masternode start</source>
<translation>Potvrdit spuštění masternodu</translation>
</message>
<message>
<source>Are you sure you want to start masternode %1?</source>
<translation>Opravdu chcete spustit masternode %1?</translation>
</message>
<message>
<source>Confirm all masternodes start</source>
<translation>Potvrdit spuštění všech masternodů</translation>
</message>
<message>
<source>Are you sure you want to start ALL masternodes?</source>
<translation>Opravdu chcete spustit VŠECHNY masternody?</translation>
</message>
<message>
<source>Command is not available right now</source>
<translation>Příkaz teď není dostupný</translation>
</message>
<message>
<source>You can't use this command until masternode list is synced</source>
<translation>Nemůžete použít tento příkaz, dokud nebude seznam masternodů synchronizován</translation>
</message>
<message>
<source>Confirm missing masternodes start</source>
<translation>Potvrdit spuštění chybějícího masternodu</translation>
</message>
<message>
<source>Are you sure you want to start MISSING masternodes?</source>
<translation>Opravdu chcete spostit CHYBĚJÍCÍ masternody?</translation>
</message>
</context>
<context>
<name>MultiSendDialog</name>
<message>
<source>MultiSend</source>
<translation>MultiSend</translation>
</message>
<message>
<source>Enter whole numbers 1 - 100</source>
<translation>Zadejte celé čísla 1-100</translation>
</message>
<message>
<source>Enter % to Give (1-100)</source>
<translation>Zadejte % pro získání (1-100)</translation>
</message>
<message>
<source>Enter Address to Send to</source>
<translation>Zadejte adresu pro odeslání platby</translation>
</message>
<message>
<source>Add to MultiSend Vector</source>
<translation>Přidat MultiSend Vektor</translation>
</message>
<message>
<source>Add</source>
<translation>Přidat</translation>
</message>
<message>
<source>Deactivate MultiSend</source>
<translation>Deaktivovat MultiSend</translation>
</message>
<message>
<source>Deactivate</source>
<translation>Deaktivovat</translation>
</message>
<message>
<source>Choose an address from the address book</source>
<translation>Vybrat adresu z Vašeho seznamu adres</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Percentage of stake to send</source>
<translation>Procento ze sázek k odeslání</translation>
</message>
<message>
<source>Percentage:</source>
<translation>Procento:</translation>
</message>
<message>
<source>Address to send portion of stake to</source>
<translation>Adresa pro zaslání části sázky - stake</translation>
</message>
<message>
<source>Address:</source>
<translation>Adresa:</translation>
</message>
<message>
<source>Delete Address From MultiSend Vector</source>
<translation>Smazat adresu z MultiSend Vektoru</translation>
</message>
<message>
<source>Delete</source>
<translation>Smazat</translation>
</message>
<message>
<source>Activate MultiSend</source>
<translation>Aktivovat MultiSend</translation>
</message>
<message>
<source>Activate</source>
<translation>Aktivovat</translation>
</message>
<message>
<source>View MultiSend Vector</source>
<translation>Zobrazit MultiSend Vecktor</translation>
</message>
<message>
<source>View MultiSend</source>
<translation>Zobrazit MultiSend</translation>
</message>
<message>
<source>Send For Stakes</source>
<translation>Poslat pro sázky - stake</translation>
</message>
<message>
<source>Send For Masternode Rewards</source>
<translation>Poslat pro odměny masternodů</translation>
</message>
<message>
<source>The entered address:
</source>
<translation>Zadaná adresa:
</translation>
</message>
<message>
<source> is invalid.
Please check the address and try again.</source>
<translation>není validní.
Prosím zkontrolujte adresu a zkuste to znovu.</translation>
</message>
<message>
<source>The total amount of your MultiSend vector is over 100% of your stake reward
</source>
<translation>Celkovvá hodnota Vašeho MultiSend Vekktoru je přes 100% vaší odměny ze vsázení
</translation>
</message>
<message>
<source>Please Enter 1 - 100 for percent.</source>
<translation>Prosím, zadejte 1-100 procent.</translation>
</message>
<message>
<source>MultiSend Vector
</source>
<translation>MultiSend Vektor
</translation>
</message>
<message>
<source>Removed </source>
<translation>Odstraněno</translation>
</message>
<message>
<source>Could not locate address
</source>
<translation>Nemůžu najít adresu
</translation>
</message>
</context>
<context>
<name>ObfuscationConfig</name>
<message>
<source>Configure Obfuscation</source>
<translation>Konfigurace obfuskace</translation>
</message>
<message>
<source>Basic Privacy</source>
<translation>Základní ochrana soukromí</translation>
</message>
<message>
<source>High Privacy</source>
<translation>Vysoká ochrana soukromí</translation>
</message>
<message>
<source>Maximum Privacy</source>
<translation>Maximální ochrana soukromí</translation>
</message>
<message>
<source>Please select a privacy level.</source>
<translation>Vyberte úrpvěň ochrany soukromí</translation>
</message>
<message>
<source>Use 2 separate masternodes to mix funds up to 10000 NEOS</source>
<translation>Použí 2 oddělené masternody k promíchání prostředků až do 10000 NEOS</translation>
</message>
<message>
<source>Use 16 separate masternodes</source>
<translation>Použít 16 oddělených masternodů</translation>
</message>
<message>
<source>This option is the quickest and will cost about ~0.025 NEOS to anonymize 10000 NEOS</source>
<translation>Tato možnost je nejrychleší a bude stát zhruba ~0.025 NEOS pro anonymizaci 10000 NEOS</translation>
</message>
<message>
<source>This is the slowest and most secure option. Using maximum anonymity will cost</source>
<translation>Toto je nejpomalejší a nejvíce bezpečná volba. Použití maximalní anonymity bude stát</translation>
</message>
<message>
<source>0.1 NEOS per 10000 NEOS you anonymize.</source>
<translation>0.1 NEOS za 10000 NEOS anonymizujete.</translation>
</message>
<message>
<source>Obfuscation Configuration</source>
<translation>Konfigurace obufuskace</translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>Open URI</source>
<translation>Otevřít URI</translation>
</message>
<message>
<source>URI:</source>
<translation>URI:</translation>
</message>
<message>
<source>Select payment request file</source>
<translation>Vybrat soubor vyžádání platby</translation>
</message>
<message>
<source>Select payment request file to open</source>
<translation>Vybrat soubor vyžádání platby pro otevření</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Možnosti</translation>
</message>
<message>
<source>&Main</source>
<translation>&Hlavní</translation>
</message>
<message>
<source>Size of &database cache</source>
<translation>Velikost &databatové cahce</translation>
</message>
<message>
<source>MB</source>
<translation>MB</translation>
</message>
<message>
<source>Number of script &verification threads</source>
<translation>Počet skriptových &ověřovacích vláken</translation>
</message>
<message>
<source>W&allet</source>
<translation>P&eněženka</translation>
</message>
<message>
<source>Accept connections from outside</source>
<translation>Přijmout připojení z venčí</translation>
</message>
<message>
<source>Allow incoming connections</source>
<translation>Povolit příchozí spojení</translation>
</message>
<message>
<source>Expert</source>
<translation>Expert</translation>
</message>
<message>
<source>Automatically start NEOS after logging in to the system.</source>
<translation>Automaticky spustit NEOS po přihlášení do systému</translation>
</message>
<message>
<source>&Start NEOS on system login</source>
<translation>&Spusti NEOS při přihlášení do systému</translation>
</message>
<message>
<source>Amount of NEOS to keep anonymized</source>
<translation>Počet NEOS pro anonymní držení</translation>
</message>
<message>
<source>Show Masternodes Tab</source>
<translation>Zobrazit záložku Masternodů</translation>
</message>
<message>
<source>&Network</source>
<translation>&Síť</translation>
</message>
<message>
<source>Proxy &IP:</source>
<translation>Proxy &IP</translation>
</message>
<message>
<source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source>
<translation>IP adresa proxy (například IPv4: 127.0.0.1 / IPv6: ::1)</translation>
</message>
<message>
<source>&Port:</source>
<translation>&Port</translation>
</message>
<message>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Port proxy (například 9050)</translation>
</message>
<message>
<source>&Window</source>
<translation>&Okno</translation>
</message>
<message>
<source>M&inimize on close</source>
<translation>Při zavření minimalizovat</translation>
</message>
<message>
<source>&Display</source>
<translation>&Zobrazit</translation>
</message>
<message>
<source>Reset all client options to default.</source>
<translation>Resetovat všechny klintské volby na defaultní hodnoty.</translation>
</message>
<message>
<source>&Reset Options</source>
<translation>&Resetovat Volby</translation>
</message>
<message>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<source>&Cancel</source>
<translation>&Zrušit</translation>
</message>
<message>
<source>none</source>
<translation>žádný</translation>
</message>
<message>
<source>Confirm options reset</source>
<translation>Potvrdit resetování voleb</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Od</translation>
</message>
<message>
<source>Available:</source>
<translation>Dostupné:</translation>
</message>
<message>
<source>Your current spendable balance</source>
<translation>Vaše aktuální disponibilní bilance</translation>
</message>
<message>
<source>Pending:</source>
<translation>Zpracovávané:</translation>
</message>
<message>
<source>Balances</source>
<translation>Balance</translation>
</message>
<message>
<source>Total:</source>
<translation>Celkem:</translation>
</message>
<message>
<source>Your current total balance</source>
<translation>Vaše aktuální celková balance</translation>
</message>
<message>
<source>Spendable:</source>
<translation>Disponibilní:</translation>
</message>
<message>
<source>Status:</source>
<translation>Stav:</translation>
</message>
<message>
<source>Obfuscation Balance:</source>
<translation>Obfuskační Balance:</translation>
</message>
<message>
<source>0 NEOS / 0 Rounds</source>
<translation>0 NEOS / 0 Kol</translation>
</message>
<message>
<source>Enabled/Disabled</source>
<translation>Zapnuté/Vypnuté</translation>
</message>
<message>
<source>Obfuscation</source>
<translation>Obfuskace</translation>
</message>
<message>
<source>n/a</source>
<translation>n/a</translation>
</message>
<message>
<source>Start/Stop Mixing</source>
<translation>Spustit/Zastavit Míchání</translation>
</message>
<message>
<source>Reset</source>
<translation>Reset</translation>
</message>
<message>
<source>Disabled</source>
<translation>Vypnuto</translation>
</message>
<message>
<source>No inputs detected</source>
<translation>Nedetekovány žádné vstupy</translation>
</message>
<message>
<source>Overall progress</source>
<translation>Celkový postup</translation>
</message>
<message>
<source>Anonymized</source>
<translation>Anonymizováno</translation>
</message>
<message>
<source>Obfuscation was successfully reset.</source>
<translation>Obfuskace byla úspěšně resetována</translation>
</message>
<message>
<source>Start Obfuscation</source>
<translation>Spustit Obfuskaci</translation>
</message>
<message>
<source>Stop Obfuscation</source>
<translation>Zastavit Obfuskaci</translation>
</message>
<message>
<source>Enabled</source>
<translation>Zapnuto</translation>
</message>
<message>
<source>N/A</source>
<translation>N/A</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<source>Invalid payment address %1</source>
<translation>Nevalidní adresa pro platbu %1</translation>
</message>
</context>
<context>
<name>PeerTableModel</name>
<message>
<source>Version</source>
<translation>Verze</translation>
</message>
<message>
<source>Ping Time</source>
<translation>Čas pingnutí</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Hodnota</translation>
</message>
<message>
<source>%1 d</source>
<translation>%1 d</translation>
</message>
<message>
<source>%1 h</source>
<translation>%1 h</translation>
</message>
<message>
<source>%1 m</source>
<translation>%1 m</translation>
</message>
<message>
<source>%1 s</source>
<translation>%1 s</translation>
</message>
<message>
<source>N/A</source>
<translation>N/A</translation>
</message>
</context>
<context>
<name>QRImageWidget</name>
<message>
<source>&Save Image...</source>
<translation>&Uložit Obrázek...</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>&Information</source>
<translation>&Informace</translation>
</message>
<message>
<source>General</source>
<translation>Obecné</translation>
</message>
<message>
<source>Name</source>
<translation>Jméno</translation>
</message>
<message>
<source>Client name</source>
<translation>Jméno klienta</translation>
</message>
<message>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<source>Number of connections</source>
<translation>Počet spojení</translation>
</message>
<message>
<source>&Open</source>
<translation>&Otevřít</translation>
</message>
<message>
<source>Network</source>
<translation>Síť</translation>
</message>
<message>
<source>Last block time</source>
<translation>Čas posledního bloku</translation>
</message>
<message>
<source>Using OpenSSL version</source>
<translation>Použítí OpenSSL verze</translation>
</message>
<message>
<source>Build date</source>
<translation>Čas buildu</translation>
</message>
<message>
<source>Current number of blocks</source>
<translation>Aktuální počet bloků</translation>
</message>
<message>
<source>Client version</source>
<translation>Verze Klienta</translation>
</message>
<message>
<source>Block chain</source>
<translation>Blockchain</translation>
</message>
<message>
<source>Number of Masternodes</source>
<translation>Počet Masternodů</translation>
</message>
<message>
<source>&Console</source>
<translation>Konzole</translation>
</message>
<message>
<source>Clear console</source>
<translation>Vymazat konzoli</translation>
</message>
<message>
<source>&Clear</source>
<translation>&Vymazat</translation>
</message>
<message>
<source>Totals</source>
<translation>Celkem</translation>
</message>
<message>
<source>Received</source>
<translation>Přijato</translation>
</message>
<message>
<source>Sent</source>
<translation>Odesláno</translation>
</message>
<message>
<source>&Peers</source>
<translation>&Peerů</translation>
</message>
<message>
<source>Direction</source>
<translation>Směr</translation>
</message>
<message>
<source>Protocol</source>
<translation>Protokol</translation>
</message>
<message>
<source>Version</source>
<translation>Verze</translation>
</message>
<message>
<source>Services</source>
<translation>Služby</translation>
</message>
<message>
<source>Last Send</source>
<translation>Poslední odeslané</translation>
</message>
<message>
<source>Last Receive</source>
<translation>Poslední přijaté</translation>
</message>
<message>
<source>Bytes Sent</source>
<translation>Odeslané Byty</translation>
</message>
<message>
<source>Bytes Received</source>
<translation>Přijaté Byty</translation>
</message>
<message>
<source>Ping Time</source>
<translation>Čas pingnutí</translation>
</message>
<message>
<source>&Wallet Repair</source>
<translation>&Oprava Peněženky</translation>
</message>
<message>
<source>Rescan blockchain files</source>
<translation>Reskenovat soubory blockchainu</translation>
</message>
<message>
<source>Upgrade wallet format</source>
<translation>Upgradovat formát peněženky</translation>
</message>
<message>
<source>In:</source>
<translation>Vstup:</translation>
</message>
<message>
<source>Out:</source>
<translation>Výstup</translation>
</message>
<message>
<source>%1 B</source>
<translation>%1 B</translation>
</message>
<message>
<source>%1 KB</source>
<translation>%1 KB</translation>
</message>
<message>
<source>%1 MB</source>
<translation>%1 MB</translation>
</message>
<message>
<source>%1 GB</source>
<translation>%1 GB</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Label:</source>
<translation>&Popis</translation>
</message>
<message>
<source>&Amount:</source>
<translation>&Hodnota</translation>
</message>
<message>
<source>&Request payment</source>
<translation>&Vyžádat platbu</translation>
</message>
<message>
<source>Clear</source>
<translation>Vymazat</translation>
</message>
<message>
<source>Show</source>
<translation>Zobrazit</translation>
</message>
<message>
<source>Remove</source>
<translation>Odstranit</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopírovat popisek</translation>
</message>
<message>
<source>Copy message</source>
<translation>Kopírovat zprávu</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopírovat hodnotu</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>QR Code</source>
<translation>QR kód</translation>
</message>
<message>
<source>Copy &URI</source>
<translation>Kopírovat &URI</translation>
</message>
<message>
<source>Copy &Address</source>
<translation>Kopírovat &Adresu</translation>
</message>
<message>
<source>&Save Image...</source>
<translation>&Uložit Obrázek...</translation>
</message>
<message>
<source>URI</source>
<translation>URI</translation>
</message>
<message>
<source>Address</source>
<translation>Adresa</translation>
</message>
<message>
<source>Amount</source>
<translation>Hodnota</translation>
</message>
<message>
<source>Label</source>
<translation>Popis</translation>
</message>
<message>
<source>Message</source>
<translation>Zpráva</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Label</source>
<translation>Popis</translation>
</message>
<message>
<source>Message</source>
<translation>Zpráva</translation>
</message>
<message>
<source>Amount</source>
<translation>Hodnota</translation>
</message>
<message>
<source>(no label)</source>
<translation>(bez popisku)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Inputs...</source>
<translation>Vstupy...</translation>
</message>
<message>
<source>automatically selected</source>
<translation>automaticky vybráno</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Nedostatek prostředků!</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Množství:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Byty:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Hodnota:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Priorita:</translation>
</message>
<message>
<source>medium</source>
<translation>střední</translation>
</message>
<message>
<source>Fee:</source>
<translation>Poplatek:</translation>
</message>
<message>
<source>no</source>
<translation>ne</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Po poplatku:</translation>
</message>
<message>
<source>Change:</source>
<translation>Změna:</translation>
</message>
<message>
<source>0 NEOS</source>
<translation>0 NEOS</translation>
</message>
<message>
<source>Transaction Fee:</source>
<translation>Poplatek Transakce:</translation>
</message>
<message>
<source>Choose...</source>
<translation>Vybrat...</translation>
</message>
<message>
<source>Minimize</source>
<translation>Minimalizovat</translation>
</message>
<message>
<source>Obfuscation</source>
<translation>Obfuskace</translation>
</message>
<message>
<source>per kilobyte</source>
<translation>za kilobyte</translation>
</message>
<message>
<source>total at least</source>
<translation>celkem nejméně</translation>
</message>
<message>
<source>Custom:</source>
<translation>Upravit:</translation>
</message>
<message>
<source>Recommended</source>
<translation>Doporučeno</translation>
</message>
<message>
<source>S&end</source>
<translation>O&deslat</translation>
</message>
<message>
<source>Clear &All</source>
<translation>Smazat &Vše</translation>
</message>
<message>
<source>Add &Recipient</source>
<translation>Přidat &Příjemce</translation>
</message>
<message>
<source>SwiftTX</source>
<translation>SwiftTX</translation>
</message>
<message>
<source>Balance:</source>
<translation>Balance:</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Kopíroat množstí</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopírovat hodnotu</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Kopírovat poplatek</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Kopírovat s poplatkem</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Kopírovat byty</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Kopírovat prioritu</translation>
</message>
<message>
<source>Copy change</source>
<translation>Kopírovat změnu</translation>
</message>
<message>
<source>Are you sure you want to send?</source>
<translation>Opravdu chcete odeslat?</translation>
</message>
<message>
<source>(no label)</source>
<translation>(bez popisku)</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>Choose previously used address</source>
<translation>Vyberte již dříve použitou adresu</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Vložit adresu z mezipamětí</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Popis</translation>
</message>
<message>
<source>A&mount:</source>
<translation>H&odnota:</translation>
</message>
<message>
<source>Message:</source>
<translation>Zpráva:</translation>
</message>
<message>
<source>Pay To:</source>
<translation>Zaplatin na:</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>The NEOS address to sign the message with</source>
<translation>NEOS adresa pro podepsání zprávy</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Vyberte již dříve použitou adresu</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Vložit adresu z mezipamětí</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Signature</source>
<translation>Podpis</translation>
</message>
<message>
<source>Copy the current signature to the system clipboard</source>
<translation>Kopírovat aktuální podpis do systémové mezipaměti</translation>
</message>
<message>
<source>Sign the message to prove you own this NEOS address</source>
<translation>Podepsat zprávu k prokázání, že vlastníte tuto NEOS adresu</translation>
</message>
<message>
<source>The NEOS address the message was signed with</source>
<translation>NEOS adresa zprávy byla podpsána</translation>
</message>
<message>
<source>Verify the message to ensure it was signed with the specified NEOS address</source>
<translation>Verifikujte zprávu pro ujištění, že byla podepsána zmíněnou NEOS adresou</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>Podepsat &Zprávu</translation>
</message>
<message>
<source>Reset all sign message fields</source>
<translation>Resetovat všechny položky podepsání zprávy</translation>
</message>
<message>
<source>Clear &All</source>
<translation>Smazat &Vše</translation>
</message>
<message>
<source>Reset all verify message fields</source>
<translation>Resetovat všechny položky pro ověření zprávy</translation>
</message>
<message>
<source>The entered address is invalid.</source>
<translation>Zadaná adresa není validní.</translation>
</message>
<message>
<source>Please check the address and try again.</source>
<translation>Prosím zkontolujte adresu a zkuste to znovu.</translation>
</message>
<message>
<source>The entered address does not refer to a key.</source>
<translation>Zadaná adresa neodpovídá klíči.</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>Odemknutí peněženky bylo zrušeno.</translation>
</message>
<message>
<source>Private key for the entered address is not available.</source>
<translation>Privátní klíč pro zadanou adresu není dostupný.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>NEOS Core</source>
<translation>NEOS Core</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
</context>
<context>
<name>TransactionDesc</name>
<message>
<source>Status</source>
<translation>Stav</translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Message</source>
<translation>Zpráva</translation>
</message>
<message>
<source>Amount</source>
<translation>Hodnota</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Address</source>
<translation>Adresa</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>Copy address</source>
<translation>Kopírovat adresu</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopírovat popisek</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopírovat hodnotu</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Kopírovat ID transakce</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Comma separated file (*.csv)</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Potvrzeno</translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Label</source>
<translation>Popis</translation>
</message>
<message>
<source>Address</source>
<translation>Adresa</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Export selhal</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>WalletFrame</name>
</context>
<context>
<name>WalletModel</name>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>&Exportovat</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exportovat data z aktulní záložky do souboru</translation>
</message>
</context>
<context>
<name>neos-core</name>
<message>
<source>Error</source>
<translation>Chyba</translation>
</message>
<message>
<source>Information</source>
<translation>Informace</translation>
</message>
<message>
<source>SwiftTX options:</source>
<translation>SwiftTx možnosti:</translation>
</message>
<message>
<source>Synchronization failed</source>
<translation>Synchronizace selhala</translation>
</message>
<message>
<source>Synchronization finished</source>
<translation>Synchronizace dokončena</translation>
</message>
<message>
<source>Synchronization pending...</source>
<translation>Synchronizace probíhá</translation>
</message>
<message>
<source>Synchronizing budgets...</source>
<translation>Synchronizace rozpočtu...</translation>
</message>
<message>
<source>Synchronizing masternode winners...</source>
<translation>Synchronizace vítězných masternodů...</translation>
</message>
<message>
<source>Synchronizing masternodes...</source>
<translation>Synchronizace masternodů...</translation>
</message>
<message>
<source>This is experimental software.</source>
<translation>Toto je experimentální software.</translation>
</message>
<message>
<source>This is not a Masternode.</source>
<translation>Toto není Masternode.</translation>
</message>
<message>
<source>Transaction amount too small</source>
<translation>Hodnota transakce je příliš malá</translation>
</message>
<message>
<source>Transaction amounts must be positive</source>
<translation>Hodnota transakce musí být kladná</translation>
</message>
<message>
<source>Transaction created successfully.</source>
<translation>Transakce byla uspěšně vytvořena.</translation>
</message>
<message>
<source>Transaction fees are too high.</source>
<translation>Poplatek za transakci je příliš vysoký.</translation>
</message>
<message>
<source>Transaction not valid.</source>
<translation>Transakce není validní.</translation>
</message>
<message>
<source>Transaction too large for fee policy</source>
<translation>Transakce je příliš velká s ohledem na pravidla poplatků</translation>
</message>
<message>
<source>Transaction too large</source>
<translation>Transakce je příliš velká</translation>
</message>
<message>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Neznámá síť uvedená v -onlynet: '%s'</translation>
</message>
<message>
<source>Unknown state: id = %u</source>
<translation>Neznámý stav: id = %u</translation>
</message>
<message>
<source>Upgrade wallet to latest format</source>
<translation>Upgradovat peněženku do nejnovějšího formátu</translation>
</message>
<message>
<source>Use the test network</source>
<translation>Použít testovací síť</translation>
</message>
<message>
<source>Verifying blocks...</source>
<translation>Ověřování bloků...</translation>
</message>
<message>
<source>Verifying wallet...</source>
<translation>Ověřování peněženky...</translation>
</message>
<message>
<source>Wallet is locked.</source>
<translation>Peněženka je zamčená</translation>
</message>
<message>
<source>Wallet options:</source>
<translation>Možnosti peněženky:</translation>
</message>
<message>
<source>Wallet window title</source>
<translation>Titulek okna peněženky</translation>
</message>
<message>
<source>Warning</source>
<translation>Varování</translation>
</message>
<message>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Varování: Tato verze je zastaralá, vyžadován upgrade!</translation>
</message>
<message>
<source>Warning: Unsupported argument -benchmark ignored, use -debug=bench.</source>
<translation>Varování: Nepodporovaný argument -benchmark je ignorován, použijte -debug=bench</translation>
</message>
<message>
<source>Warning: Unsupported argument -debugnet ignored, use -debug=net.</source>
<translation>Varování: Nepodporovaný argument -debugnet je ignorován, použijte -debug=net</translation>
</message>
<message>
<source>on startup</source>
<translation>při spuštění</translation>
</message>
</context>
</TS><|fim▁end|> | |
<|file_name|>simple.rs<|end_file_name|><|fim▁begin|>pub fn main() {
let a = true;
let b = false;
if a {
print!("a");
} else if b {
print!("b");
} else {
print!("c")
}
let mut a: int = 0;
let b: bool = true;
let c: bool = false;
let _numbers = [1, 2, 3];
let t = _numbers[a..];
// As a naked if
if b || c {
a = 1;
}
a << 32 > a << 16;
a >> 32 < a >> 16;
a >>= 1;
<|fim▁hole|> 10
} else {
20
};
'foo: loop {
println!("{}", a);
break 'foo;
}
println!("{}", a);
// Tuple expressions. They are not at all ambiguious.
(0,);
(0.0, 4.5);
("a", 4u, true);
// Tuple destructuring let with a statement block!
let (_cap, _cap_name, _oldflags) = {
(1, 2, 3)
};
}<|fim▁end|> | println!("{}", a);
// As an expression
a = if !c { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>from . import ek90, porcelain<|fim▁end|> | #
|
<|file_name|>BulbReleaseControl.hpp<|end_file_name|><|fim▁begin|>//
// RemotePhotoTool - remote camera control software
// Copyright (C) 2008-2014 Michael Fink
//
/// \file BulbReleaseControl.hpp Canon control - Release control for Bulb mode
//
#pragma once
// includes
/// \brief bulb shutter release control
/// this object is created by RemoteReleaseControl::StartBulb; just destroy it to stop bulb mode
class BulbReleaseControl
{
public:
/// dtor
virtual ~BulbReleaseControl() {}
<|fim▁hole|> /// returns elapsed time, in seconds, since bulb start
virtual double ElapsedTime() const = 0;
/// stops bulb method; can be used when the shared_ptr of BulbReleaseControl
/// cannot be destroyed, e.g. since it is held somewhere (e.g. Lua)
virtual void Stop() = 0;
};<|fim▁end|> | |
<|file_name|>SettingsForm.js<|end_file_name|><|fim▁begin|>import { useState } from "react";
import { PropTypes } from "prop-types";
import { SaveOutlined, WarningOutlined } from "@ant-design/icons";
import {
Button,
Col,
Form,
Input,
InputNumber,
Row,
Select,
Switch,
Typography,
Space,
} from "@nextgisweb/gui/antd";
import i18n from "@nextgisweb/pyramid/i18n!";
import {
AddressGeocoderOptions,
DegreeFormatOptions,
UnitsAreaOptions,
UnitsLengthOptions,
} from "./select-options";
const { Title } = Typography;
const INPUT_DEFAULT_WIDTH = { width: "100%" };
export const SettingsForm = ({
onFinish,
initialValues,
srsOptions,
status,
}) => {
const [geocoder, setGeocoder] = useState(
initialValues.address_geocoder || "nominatim"
);
const onValuesChange = (changedValues, allValues) => {
setGeocoder(allValues.address_geocoder);
};
return (
<Form
name="webmap_settings"
className="webmap-settings-form"
initialValues={initialValues}
onFinish={onFinish}
onValuesChange={onValuesChange}
layout="vertical"
>
<Title level={4}>{i18n.gettext("Identify popup")}</Title>
<Row gutter={[16, 16]}>
<Col span={8}>
<Form.Item
name="popup_width"
label={i18n.gettext("Width, px")}
rules={[
{
required: true,
},
]}
>
<InputNumber min="100" style={INPUT_DEFAULT_WIDTH} />
</Form.Item>
</Col>
<Col span={8}>
<Form.Item
name="popup_height"
label={i18n.gettext("Height, px")}
rules={[
{
required: true,
},
]}
>
<InputNumber min="100" style={INPUT_DEFAULT_WIDTH} />
</Form.Item>
</Col>
<Col span={8}>
<Form.Item
name="identify_radius"
label={i18n.gettext("Radius, px")}
rules={[
{
required: true,
},
]}
>
<InputNumber min="1" style={INPUT_DEFAULT_WIDTH} />
</Form.Item>
</Col>
</Row>
<Row gutter={[16, 16]}>
<Col span={24}>
<Form.Item>
<Space direction="horizontal">
<Form.Item
noStyle
name="identify_attributes"
valuePropName="checked"
>
<Switch />
</Form.Item>
{i18n.gettext("Show feature attributes")}
</Space>
</Form.Item>
</Col>
</Row>
<Title level={4}>{i18n.gettext("Measurement")}</Title>
<Row gutter={[16, 16]}>
<Col span={8}>
<Form.Item
name="units_length"
label={i18n.gettext("Length units")}
>
<Select
options={UnitsLengthOptions}
style={INPUT_DEFAULT_WIDTH}
/>
</Form.Item>
</Col>
<Col span={8}>
<Form.Item
name="units_area"
label={i18n.gettext("Area units")}
>
<Select
options={UnitsAreaOptions}
style={INPUT_DEFAULT_WIDTH}
/>
</Form.Item>
</Col>
<Col span={8}>
<Form.Item
name="degree_format"
label={i18n.gettext("Degree format")}
>
<Select
options={DegreeFormatOptions}
style={INPUT_DEFAULT_WIDTH}
/>
</Form.Item>
</Col>
</Row>
<Row gutter={[16, 16]}>
<Col span={24}>
<Form.Item
name="measurement_srid"
label={i18n.gettext("Measurement SRID")}
>
<Select
options={srsOptions}
style={INPUT_DEFAULT_WIDTH}
/>
</Form.Item>
</Col>
</Row>
<Title level={4}>{i18n.gettext("Address search")}</Title>
<Row gutter={[16, 16]}>
<Col span={8}>
<Form.Item>
<Space direction="horizontal">
<Form.Item
noStyle
name="address_search_enabled"
valuePropName="checked"
>
<Switch />
</Form.Item>
{i18n.gettext("Enable")}
</Space>
</Form.Item>
</Col>
<Col span={16}>
<Form.Item>
<Space direction="horizontal">
<Form.Item
noStyle
name="address_search_extent"
valuePropName="checked"
>
<Switch />
</Form.Item>
{i18n.gettext("Limit by web map initial extent")}
</Space>
</Form.Item>
</Col>
</Row>
<Row gutter={[16, 16]}>
<Col span={8}>
<Form.Item
name="address_geocoder"
label={i18n.gettext("Provider")}
>
<Select
options={AddressGeocoderOptions}
style={INPUT_DEFAULT_WIDTH}<|fim▁hole|> {geocoder == "nominatim" ? (
<Form.Item
name="nominatim_countrycodes"
label={i18n.gettext(
"Limit search results to countries"
)}
rules={[
{
pattern: new RegExp(
/^(?:(?:[A-Za-z]+)(?:-[A-Za-z]+)?(?:,|$))+(?<!,)$/
),
message: (
<div>
{i18n.gettext(
"Invalid countries. For example ru or gb,de"
)}
</div>
),
},
]}
>
<Input style={INPUT_DEFAULT_WIDTH} />
</Form.Item>
) : (
<Form.Item
name="yandex_api_geocoder_key"
label={i18n.gettext("Yandex.Maps API Geocoder Key")}
>
<Input style={INPUT_DEFAULT_WIDTH} />
</Form.Item>
)}
</Col>
</Row>
<Row className="row-submit">
<Col>
<Button
htmlType="submit"
type={"primary"}
danger={status === "saved-error"}
icon={
status === "saved-error" ? (
<WarningOutlined />
) : (
<SaveOutlined />
)
}
loading={status === "saving"}
>
{i18n.gettext("Save")}
</Button>
</Col>
</Row>
</Form>
);
};
SettingsForm.propTypes = {
initialValues: PropTypes.object,
onFinish: PropTypes.func,
srsOptions: PropTypes.array,
status: PropTypes.string,
};<|fim▁end|> | />
</Form.Item>
</Col>
<Col span={16}> |
<|file_name|>suicides.py<|end_file_name|><|fim▁begin|>from datetime import datetime
from pymongo import DESCENDING
from libs.lib import tee_db
from libs.statisticator import Job
class SuicidesJob(Job):
def __init__(self):
""" Job to get player suicides
Collection name: kill_results
Struture:
{'player': STR ,
'suicides': INT ,
'gametype': STR,
'last_event_date': DATE ,
}
Primary key : 'player'
"""
Job.__init__(self)
results_db_name = 'results_suicides'<|fim▁hole|>
self.dependencies = ('players', 'gametypes')
def get_dependencies(self):
return self.dependencies
def load_results_from_cache(self):
res = self.results_db.find(spec={
'player': self.player_name,
'gametype': self.gametype,
},
limit=1,
sort=[{'date', DESCENDING}],
)
if res.count() > 0:
return res[0]
else:
return None
def get_results(self):
res = self.load_results_from_cache()
if res is None:
return []
else:
return res['suicides']
def save_results_to_cache(self):
# Save new line only when data changes
# Else update only the date
last_data = self.load_results_from_cache()
if last_data is not None and last_data['suicides'] == self.results['suicides']:
last_data['date'] = self.results['date']
self.results = last_data
self.results_db.save(self.results)
def process(self, player_name, gametype):
self.player_name = player_name
self.gametype = gametype
# Change status
self.status = 'processing'
# Get old data
self.results = self.load_results_from_cache()
# Set data if no history
if self.results is None:
self.results = {}
self.results['player'] = self.player_name
self.results['gametype'] = self.gametype
self.results['suicides'] = 0
self.results['last_event_date'] = datetime(1,1,1,0,0,0)
# Get new suicides
if self.gametype:
suicides = tee_db['kill'].find(spec={'$and': [
{'weapon': {'$in': ['-1', '0', '1', '2', '3', '4', '5']}},
{'killer': self.player_name},
{'victim': self.player_name},
{'gametype': self.gametype},
{'round': { "$ne": None}},
{'when': {'$gt': self.results['last_event_date']}},
]},
sort=[{'when', DESCENDING}],
)
else:
suicides = tee_db['kill'].find(spec={'$and': [
{'weapon': {'$in': ['-1', '0', '1', '2', '3', '4', '5']}},
{'killer': self.player_name},
{'victim': self.player_name},
{'round': { "$ne": None}},
{'when': {'$gt': self.results['last_event_date']}},
]},
sort=[{'when', DESCENDING}],
)
# Set new suicides
self.results['suicides'] += suicides.count()
# Set last event date
if suicides.count() > 0:
self.results['last_event_date'] = suicides[0]['when']
self.results['date'] = datetime.now()
# Save to mongo
self.save_results_to_cache()
# Change status
self.status = 'done'<|fim▁end|> | self.results_db = tee_db[results_db_name] |
<|file_name|>vfsgnome.py<|end_file_name|><|fim▁begin|># -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
#
# Flumotion - a streaming media server
# Copyright (C) 2008 Fluendo, S.L. (www.fluendo.com).
# All rights reserved.
# This file may be distributed and/or modified under the terms of
# the GNU General Public License version 2 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.GPL" in the source distribution for more information.
# Licensees having purchased or holding a valid Flumotion Advanced
# Streaming Server license may use this file in accordance with the
# Flumotion Advanced Streaming Server Commercial License Agreement.
# See "LICENSE.Flumotion" in the source distribution for more information.
# Headers in this file shall remain intact.
"""GnomeVFS backend for Virtual File System.
"""
import os
from twisted.internet.defer import succeed
from twisted.spread.flavors import Copyable, RemoteCopy
from twisted.spread.jelly import setUnjellyableForClass
from zope.interface import implements
from flumotion.common import log
from flumotion.common.errors import AccessDeniedError
from flumotion.common.interfaces import IDirectory, IFile
# gnomevfs is only imported inside nested scopes so that
# pychecker can ignore them, If pychecker ever gets fixed,
# move it back where it belongs
__pychecker__ = 'keepgoing'
class GnomeVFSFile(Copyable, RemoteCopy):
"""I am object implementing L{IFile} on top of GnomeVFS,
see L{IFile} for more information.
"""
implements(IFile)
def __init__(self, parent, fileInfo):
self.parent = parent
self.filename = fileInfo.name
self.iconNames = ['gnome-fs-regular']
# IFile
def getPath(self):
return os.path.join(self.parent, self.filename)
class GnomeVFSDirectory(Copyable, RemoteCopy):
"""I am object implementing L{IDirectory} on top of GnomeVFS,
see L{IDirectory} for more information.
"""
implements(IDirectory)<|fim▁hole|> import gnomevfs
if not os.path.exists(path):
self.path = '/'
else:
self.path = os.path.abspath(path)
if name is None:
fileInfo = gnomevfs.get_file_info(self.path)
name = fileInfo.name
self.filename = name
self.iconNames = ['gnome-fs-directory']
self._cachedFiles = None
# IFile
def getPath(self):
return self.path
# IDirectory
def getFiles(self):
return succeed(self._cachedFiles)
def cacheFiles(self):
"""
Fetches the files contained on the directory for posterior usage of
them. This should be called on the worker side to work or the files
wouldn't be the expected ones.
"""
import gnomevfs
log.debug('vfsgnome', 'getting files for %s' % (self.path, ))
retval = []
try:
fileInfos = gnomevfs.open_directory(self.path)
except gnomevfs.AccessDeniedError:
raise AccessDeniedError
if self.path != '/':
retval.append(GnomeVFSDirectory(os.path.dirname(self.path),
name='..'))
for fileInfo in fileInfos:
filename = fileInfo.name
if filename.startswith('.'):
continue
if fileInfo.type == gnomevfs.FILE_TYPE_DIRECTORY:
obj = GnomeVFSDirectory(os.path.join(self.path,
fileInfo.name))
else:
obj = GnomeVFSFile(self.path, fileInfo)
retval.append(obj)
log.log('vfsgnome', 'returning %r' % (retval, ))
self._cachedFiles = retval
def registerGnomeVFSJelly():
"""Register the jelly used by the GnomeVFS VFS backend.
"""
setUnjellyableForClass(GnomeVFSFile, GnomeVFSFile)
setUnjellyableForClass(GnomeVFSDirectory, GnomeVFSDirectory)
log.info('jelly', 'GnomeVFS registered')<|fim▁end|> |
def __init__(self, path, name=None): |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! A serialization/deserialization library for [CSA] format.
//!
//! [CSA] format is a plaintext format for recording Shogi games.
//! This library supports parsing CSA-formatted string as well as composing CSA-formatted string from structs.
//! Detail about CSA format is found at http://www.computer-shogi.org/protocol/record_v22.html.
//!
//! # Examples
//! Below is an example of parsing CSA-formatted string into structs.
//!
//! ```
//! use std::time::Duration;
//! use csa::{parse_csa, Action, Color, GameRecord, MoveRecord, PieceType, Square};
//!
//! let csa_str = "\
//! V2.2
//! N+NAKAHARA
//! N-YONENAGA
//! $EVENT:13th World Computer Shogi Championship
//! PI
//! +
//! +2726FU
//! T12
//! ";
//!
//! let game = parse_csa(csa_str).expect("failed to parse the csa content");
//! assert_eq!(game.black_player, Some("NAKAHARA".to_string()));
//! assert_eq!(game.white_player, Some("YONENAGA".to_string()));
//! assert_eq!(game.event, Some("13th World Computer Shogi Championship".to_string()));
//! assert_eq!(game.moves[0], MoveRecord{
//! action: Action::Move(Color::Black, Square::new(2, 7), Square::new(2, 6), PieceType::Pawn),
//! time: Some(Duration::from_secs(12))
//! });
//! ```
//!
//! In contrast, structs can be composed into CSA-formatted string.
//!
//! ```
//! use std::time::Duration;
//! use csa::{ Action, Color, GameRecord, MoveRecord, PieceType, Square};
//!
//! let mut g = GameRecord::default();
//! g.black_player = Some("NAKAHARA".to_string());
//! g.white_player = Some("YONENAGA".to_string());
//! g.event = Some("13th World Computer Shogi Championship".to_string());
//! g.moves.push(MoveRecord {
//! action: Action::Move(
//! Color::Black,
//! Square::new(2, 7),
//! Square::new(2, 6),
//! PieceType::Pawn,
//! ),
//! time: Some(Duration::from_secs(5)),
//! });
//! g.moves.push(MoveRecord {
//! action: Action::Toryo,
//! time: None,
//! });
//!
//! let csa_str = "\
//! V2.2
//! N+NAKAHARA
//! N-YONENAGA
//! $EVENT:13th World Computer Shogi Championship
//! PI<|fim▁hole|>//! +2726FU
//! T5
//! %TORYO
//! ";
//!
//! assert_eq!(csa_str, g.to_string());
//! ```
//!
//! [CSA]: http://www2.computer-shogi.org/protocol/record_v22.html
pub mod parser;
pub mod value;
pub use parser::*;
pub use value::*;<|fim▁end|> | //! + |
<|file_name|>XmlAttributeImportsTestCases.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 1998, 2012 Oracle and/or its affiliates. All rights reserved.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0
* which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* Denise Smith - 2.4 - February 11, 2013
******************************************************************************/
package org.eclipse.persistence.testing.jaxb.xmlattribute.imports;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.persistence.testing.jaxb.JAXBWithJSONTestCases;
import org.eclipse.persistence.testing.jaxb.xmlattribute.imports2.IdentifierType;
public class XmlAttributeImportsTestCases extends JAXBWithJSONTestCases {
private final static String XML_RESOURCE = "org/eclipse/persistence/testing/jaxb/xmlattribute/imports.xml";
private final static String JSON_RESOURCE = "org/eclipse/persistence/testing/jaxb/xmlattribute/imports.json";
private final static String XSD_RESOURCE = "org/eclipse/persistence/testing/jaxb/xmlattribute/imports.xsd";
private final static String XSD_RESOURCE2 = "org/eclipse/persistence/testing/jaxb/xmlattribute/imports2.xsd";
public XmlAttributeImportsTestCases(String name) throws Exception {
super(name);
setControlDocument(XML_RESOURCE);
setControlJSON(JSON_RESOURCE);<|fim▁hole|>
protected Object getControlObject() {
Person obj = new Person();
obj.name = "theName";
obj.setId(IdentifierType.thirdThing);
return obj;
}
public void testSchemaGen() throws Exception{
List<InputStream> controlSchemas = new ArrayList<InputStream>();
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(XSD_RESOURCE);
InputStream is2 = Thread.currentThread().getContextClassLoader().getResourceAsStream(XSD_RESOURCE2);
controlSchemas.add(is);
controlSchemas.add(is2);
super.testSchemaGen(controlSchemas);
}
}<|fim▁end|> | setClasses(new Class[]{Person.class});
} |
<|file_name|>equal_test.go<|end_file_name|><|fim▁begin|>package simple
<|fim▁hole|> "testing"
)
func TestEqualShouldFail(t *testing.T) {
a := 1
b := 1
shouldNotBe := false
if real := equal(a, b); real == shouldNotBe {
t.Errorf("equal(%d, %d) should not be %v, but is:%v\n", a, b, shouldNotBe, real)
}
}<|fim▁end|> | import ( |
<|file_name|>data2db.py<|end_file_name|><|fim▁begin|>import os
from library.connecter.ansible.yaml import Yaml_Base
class Data_DB(Yaml_Base):
def router(self, content, name, yaml_tpye='main', file_type='tasks', preserve=True, together=False, describe=''):
'''
检测yaml数据的语法是否正确,如果含有include或/和roles,将把这些存储在后端数据库中
:参数
content:内容
name:名称,yaml文件内容写入数据的名称
preserve:是否写入数据库
together:是否返回该main下所有文件内容=
describe:yaml文件内容写入数据的描述
zhname:yaml文件内容写入数据的中文名称,很简短说明
:return
元组,第一个为执行结果,
成功为true,文件内容(格式为字典))
失败为False,返回失败原因
'''
if yaml_tpye in ('full_roles' , 'main') :
result = self.main(content, name, preserve=preserve, together=together, describe=describe)
elif yaml_tpye == 'include' :
result = self.include(content, name, file_type=file_type, preserve=preserve, describe=describe)
elif yaml_tpye == 'roles' :
result = self.roles(content, name, preserve=preserve, together=together, describe=describe)
else :
self.logger.error('动作:检测yaml数据的语法是否正确并将把这些存储在后端数据库中,执行结果:失败,原因:参数yaml_data' + yaml_tpye + '不是接受值,只能接受full_roles、main、include、roles')
return (False, '参数yaml_data' + yaml_tpye + '不是接受值,只能接受full_roles、main、include、roles')
return result
def main(self, content, name, preserve=True, together=False, describe=''):
'''
main文件的语法等是否正确,如果含有include或/和roles,将认为这些存储在后端数据库中
:参数
content:内容
name:名称,yaml文件内容写入数据的名称
preserve:是否写入数据库
together:是否返回该main下所有文件内容=
describe:yaml文件内容写入数据的描述
zhname:yaml文件内容写入数据的中文名称,很简短说明
:return
元组,第一个为执行结果,
成功为true,文件内容(格式为字典))
失败为False,返回失败原因
'''
result = self.yaml_loader(content, data_type='data')
if result[0] :
(content, yaml_data) = result[2:]
else :
self.logger.error('检测yaml数据名为' + name + '类型为full_roles或者main的语法失败,转化成yaml数据时失败,原因:' + result[1])
return (False, '转化成yaml数据时失败,' + result[1])
result = self.check_main(yaml_data)
self.logger.error(result)
if result[0] :
(roles_list, includefile_dict) = result[1:]
else :<|fim▁hole|> return (False, '未通过yaml语法检测,' + result[1])
include_content_dict = {}
roles_content_dict = {}
for file in includefile_dict :
result = self.read2db(file, word_field='name')
if not result[0] :
self.logger.error('检测yaml数据名为' + name + '类型为full_roles或者main的语法失败,名为' + file + '的include查询出错,原因:' + result[1])
return (False, '名为' + file + '的include查询出错,' + result[1])
else :
try :
include_content = result[1]['content']
include_content_dict.update({file:include_content})
except :
self.logger.error('检测yaml数据名为' + name + '类型为full_roles或者main的语法失败,名为' + file + '的include查询出错,原因:查询结果不含content字段')
return (False, '名为' + file + '的include查询出错,查询结果不含content字段')
for roles in roles_list :
result = self.read2db(roles, word_field='name')
if result[0] :
try :
content_dict = result[1]['content']
if 'include' in content_dict :
include_content.update(content_dict['include'])
roles_content_dict.update({roles:content_dict['roles']})
except :
self.logger.error('检测yaml数据名为' + name + '类型为full_roles或者main的语法失败,名为' + roles + '的roles查询出错,查询结果不含content字段')
return (False, '名为' + roles + '的roles查询出错,查询结果不含content字段')
else :
return (False, '名为' + roles + '的roles查询出错,' + result[1])
data = {
'main' : content,
'include': include_content_dict,
'roles': roles_content_dict,
}
if preserve :
result = self.write2db(name, data, 'main', describe=describe)
if not result[0] :
self.logger.error('检测yaml数据名为' + name + '类型为full_roles或者main的语法失败,通过yaml语法检测,但无法写入数据库,原因:' + result[1])
return (False, '通过yaml语法检测,但无法写入数据库' + result[1])
self.logger.info('检测yaml数据名为' + name + '类型为full_roles或者main语法成功')
if together :
return (True, data)
else :
return (True, content)
def include(self, content, name, file_type='main', preserve=True, describe=''):
'''
main文件的语法等是否正确,如果含有include或/和roles,将认为这些存储在后端数据库中
:参数
content:内容
name:名称,yaml文件内容写入数据的名称
preserve:是否写入数据库
file_type:类型
together:是否返回该main下所有文件内容=
describe:yaml文件内容写入数据的描述
zhname:yaml文件内容写入数据的中文名称,很简短说明
:return
元组,第一个为执行结果,
成功为true,文件内容(格式为字典))
失败为False,返回失败原因
'''
result = self.yaml_loader(content, data_type='data')
if result[0] :
(content, yaml_data) = result[2:]
else :
self.logger.error('检测yaml数据名为' + name + '类型为include的语法失败,转化成yaml数据时失败,原因:' + result[1])
return (False, '转化成yaml数据时失败,' + result[1])
result = self.check_include(yaml_data, file_type=file_type)
if not result[0] :
self.logger.error('检测yaml数据名为' + name + '类型为include的语法失败,未通过yaml语法检测,原因:' + result[1])
return (False, '未通过yaml语法检测,' + result[1])
if preserve :
result = self.write2db(name, content, 'include', describe=describe)
if not result[0] :
self.logger.error('检测yaml数据名为' + name + '类型为include的语法失败,但无法写入数据库,原因:' + result[1])
return (False, '通过yaml语法检测,但无法写入数据库' + result[1])
self.logger.info('检测yaml数据名为' + name + '类型为include语法成功')
return (True, content)
def roles(self, content, name, preserve=True, together=False, describe=''):
'''
main文件的语法等是否正确,如果含有include或/和roles,将认为这些存储在后端数据库中
:参数
content:内容
name:名称,yaml文件内容写入数据的名称
preserve:是否写入数据库
together:是否返回该main下所有文件内容
describe:yaml文件内容写入数据的描述
zhname:yaml文件内容写入数据的中文名称,很简短说明
:return
元组,第一个为执行结果,
成功为true,文件内容(格式为字典))
失败为False,返回失败原因
'''
content_dict = {}
result = self._isrolesname(name)
if not result :
self.logger.error('检测yaml数据名为' + name + '类型为roles的语法失败,未通过语法检测,原因:roles名不符合本系统要求的,注:虽然原生ansible支持这样写')
return (False, '未通过yaml语法检测,roles名不符合本系统要求的,注:虽然原生ansible支持这样写')
if not isinstance(content, dict) :
self.logger.error('检测yaml数据名为' + name + '类型为roles的语法失败,未通过语法检测,原因:参数content必须是字典格式')
self.logger.error('roles名为' + str(name) + '未通过语法检测,原因:参数content必须是字典格式')
return (False, '未通过yaml语法检测,参数content必须是字典格式')
result = self.check_roles(content)
include_content_dict = {}
if result[0] :
includefile_dict = result[1]
for file in includefile_dict:
result = self.read2db(file, word_field='name')
if not result[0] :
self.logger.error('检测yaml数据名为' + name + '类型为roles的语法失败,未通过语法检测,原因:' + 'include名为' + file + '的include查询出错,' + result[1])
return (False, '未通过yaml语法检测,名为' + file + '的include查询出错,' + result[1])
else :
try :
include_content = result[1]['content']
include_content_dict.update({file:include_content})
except :
self.logger.error('检测yaml数据名为' + name + '类型为roles的语法失败,未通过语法检测,原因:' + '名为' + file + '的include查询出错,查询结果不含content关键字段')
return (False, '未通过yaml语法检测,名为' + file + '的include查询出错,查询结果不含content关键字段')
else :
self.logger.error('检测yaml数据名为' + name + '类型为roles的语法失败,未通过yaml语法检测,语法错误,原因:' + result[1])
return (False, '未通过yaml语法检测,语法错误,' + result[1])
if 'templates' in content :
temp_content = content['templates']
if not isinstance(temp_content, dict) :
self.logger.error('检测yaml数据名为' + name + '类型为roles的语法失败,未通过yaml语法检测,templates查询错误,查询结果的数据类型不为字典')
return (False, '未通过yaml语法检测,templates查询错误,查询结果的数据类型不为字典')
content_dict['templates'] = {}
for temp_file , tempfile_content in temp_content.items() :
temp_file = os.path.basename(temp_file)
content_dict['templates'][temp_file] = tempfile_content
data = {
'main' : {},
'include': include_content_dict,
'roles': content_dict,
}
if preserve :
result = self.write2db(name, data, 'roles', describe=describe)
if not result[0] :
self.logger.error('检测yaml数据名为' + name + '类型为roles的语法失败,通过yaml语法检测,无法写入数据库,' + result[1])
return (False, '通过yaml语法检测,无法写入数据库,' + result[1])
self.logger.error('检测yaml数据名为' + name + '类型为roles的语法成功')
if together :
return (True, content_dict, include_content)
else :
return (True, {}, {})<|fim▁end|> | self.logger.error('检测yaml数据名为' + name + '类型为full_roles或者main的语法失败,未通过yaml语法检测,原因:' + result[1]) |
<|file_name|>azure_dns_zone.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Automatic provisioning of Azure DNS zones.
import os
import azure
import json
from requests import Request
try:
from urllib import quote
except:
from urllib.parse import quote
from nixops.util import attr_property
from nixops.azure_common import ResourceDefinition, ResourceState, ResId
from azure.common import AzureHttpError
from azure.mgmt.network import *
class AzureDNSZoneDefinition(ResourceDefinition):<|fim▁hole|>
@classmethod
def get_type(cls):
return "azure-dns-zone"
@classmethod
def get_resource_type(cls):
return "azureDNSZones"
def __init__(self, xml):
ResourceDefinition.__init__(self, xml)
self.dns_zone_name = self.get_option_value(xml, 'name', str)
self.copy_option(xml, 'resourceGroup', 'resource')
self.copy_tags(xml)
def show_type(self):
return self.get_type()
class AzureDNSZoneState(ResourceState):
"""State of an Azure DNS Zone"""
dns_zone_name = attr_property("azure.name", None)
resource_group = attr_property("azure.resourceGroup", None)
tags = attr_property("azure.tags", {}, 'json')
@classmethod
def get_type(cls):
return "azure-dns-zone"
@property
def resource_id(self):
return self.dns_zone_name
@property
def full_name(self):
return "Azure DNS zone '{0}'".format(self.resource_id)
def is_settled(self, resource):
return True
def get_resource_url(self):
return ("https://management.azure.com/subscriptions/{0}"
"/resourceGroups/{1}/providers/Microsoft.Network"
"/dnsZones/{2}?api-version=2015-05-04-preview"
.format(quote(self.subscription_id),
quote(self.resource_group),
quote(self.dns_zone_name)))
def mk_request(self, method):
http_request = Request()
http_request.url = self.get_resource_url()
http_request.method = method
http_request.headers['Content-Type'] = 'application/json'
return http_request
def get_resource(self):
response = self.nrpc().send_request(self.mk_request('GET'))
if response.status_code == 200:
return json.loads(response.content.decode())
else:
return None
def destroy_resource(self):
response = self.nrpc().send_request(self.mk_request('DELETE'))
if response.status_code != 200:
raise AzureHttpError(response.content, response.status_code)
defn_properties = [ 'tags' ]
def _create_or_update(self, defn):
info = {
"location": "global",
"tags": defn.tags,
"properties": { }
}
http_request = self.mk_request('PUT')
http_request.data = json.dumps(info)
http_request.headers['Content-Length'] = len(http_request.data)
response = self.nrpc().send_request(http_request)
if response.status_code not in [200, 201]:
raise AzureHttpError(response.content, response.status_code)
self.state = self.UP
self.copy_properties(defn)
def create(self, defn, check, allow_reboot, allow_recreate):
self.no_subscription_id_change(defn)
self.no_property_change(defn, 'resource_group')
self.copy_mgmt_credentials(defn)
self.dns_zone_name = defn.dns_zone_name
self.resource_group = defn.resource_group
if check:
zone = self.get_settled_resource()
if not zone:
self.warn_missing_resource()
elif self.state == self.UP:
self.handle_changed_property('tags', zone['tags'])
else:
self.warn_not_supposed_to_exist()
self.confirm_destroy()
if self.state != self.UP:
if self.get_settled_resource():
raise Exception("tried creating a DNS zone that already exists; "
"please run 'deploy --check' to fix this")
self.log("creating {0}...".format(self.full_name))
self._create_or_update(defn)
if self.properties_changed(defn):
self.log("updating properties of {0}...".format(self.full_name))
self.get_settled_resource_assert_exists()
self._create_or_update(defn)
def create_after(self, resources, defn):
from nixops.resources.azure_resource_group import AzureResourceGroupState
return {r for r in resources
if isinstance(r, AzureResourceGroupState) }<|fim▁end|> | """Definition of an Azure DNS Zone""" |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>pub use self::os::{FNM_PERIOD};
pub use self::os::{FNM_NOESCAPE};
use {NTStr, int_t, char_t};
#[cfg(target_os = "linux")]
#[path = "linux/mod.rs"]
mod os;
pub fn fnmatch<T: NTStr, U: NTStr>(pattern: &T, string: &U, flags: int_t) -> int_t {
extern {
fn fnmatch(pattern: *const char_t, name: *const char_t, flags: int_t) -> int_t;
}
unsafe { fnmatch(pattern.as_ptr(), string.as_ptr(), flags) }
}
#[cfg(test)]
mod tests {
use {ToNTStr};
#[test]
fn test() {
let pat = "abc*123".to_nt_str();
let stn = "abcTE/ST123".to_nt_str();
let pat2 = "*123".to_nt_str();
let stn2 = ".123".to_nt_str();
assert_eq!(super::fnmatch(&pat, &stn, 0), 0);
assert_eq!(super::fnmatch(&pat, &stn, super::FNM_PATHNAME), super::FNM_NOMATCH);
assert_eq!(super::fnmatch(&pat2, &stn2, super::FNM_PATHNAME), 0);
assert_eq!(super::fnmatch(&pat, &stn, super::FNM_PERIOD), 0);
assert_eq!(super::fnmatch(&pat2, &stn2, super::FNM_PERIOD), super::FNM_NOMATCH);
}
}<|fim▁end|> | pub use self::os::{FNM_NOMATCH};
pub use self::os::{FNM_PATHNAME}; |
<|file_name|>mdn_experiment_one_ahead.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, division
import cPickle
import gzip
import os
import sys
import timeit
import numpy
import theano
from theano import tensor
import mdn_one_ahead
# parameters
batch_size = 100
L1_reg=0.00
L2_reg=0.0001
n_epochs=200
learning_rate = 0.001
momentum = 0.9
sigma_in = 320
mixing_in = 320
n_components = 5
EPS = numpy.finfo(theano.config.floatX).eps
# load data
datasets = mdn_one_ahead.load_data()
train_set_x, train_set_y = datasets[0]
valid_set_x, valid_set_y = datasets[1]
test_set_x, test_set_y = datasets[2]
X = train_set_x.get_value(borrow=True)[:20].copy()
Y = train_set_y.get_value(borrow=True)[:20].copy()
n_train_batches = train_set_x.get_value(borrow=True).shape[0] // batch_size
n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] // batch_size
n_test_batches = test_set_x.get_value(borrow=True).shape[0] // batch_size
print( '... building the model')
# allocate symbolic variables for the data
index = tensor.lscalar() # index to a [mini]batch
x = tensor.matrix('x') # the data is presented as rasterized images
y = tensor.vector('y') # the labels are presented as 1D vector of
rng = numpy.random.RandomState(1234)
classifier = mdn_one_ahead.MLP(
rng=rng,
input=x,
n_in=320,
n_hiddens=[300, 300, 300, 300]
)
cost = (
classifier.negative_log_likelihood(y)
+ L2_reg * classifier.L2_sqr
)
test_model = theano.function(
inputs=[index],
outputs=classifier.errors(y),
givens={
x: test_set_x[index * batch_size:(index + 1) * batch_size],
y: test_set_y[index * batch_size:(index + 1) * batch_size]
}
)
validate_model = theano.function(
inputs=[index],
outputs=classifier.errors(y),
givens={
x: valid_set_x[index * batch_size:(index + 1) * batch_size],
y: valid_set_y[index * batch_size:(index + 1) * batch_size]
}
)
gparams = [tensor.grad(cost, param) for param in classifier.params]
updates = [
(param, param - learning_rate * gparam)
for param, gparam in zip(classifier.params, gparams)
]
model_gradients = theano.function(
inputs = [x, y], outputs=gparams)
train_gradients = theano.function(
inputs=[index],
outputs=gparams,
givens={
x: train_set_x[index * batch_size: (index + 1) * batch_size],
y: train_set_y[index * batch_size: (index + 1) * batch_size]
}
)
train_model = theano.function(
inputs=[index],
outputs=cost,
updates=updates,
givens={
x: train_set_x[index * batch_size: (index + 1) * batch_size],
y: train_set_y[index * batch_size: (index + 1) * batch_size]
}
)
print('... training')<|fim▁hole|># early-stopping parameters
patience = 10000 # look as this many examples regardless
patience_increase = 2 # wait this much longer when a new best is
# found
improvement_threshold = 0.99995 # a relative improvement of this much is
# considered significant
validation_frequency = min(n_train_batches, patience / 2)
# go through this many
# minibatche before checking the network
# on the validation set; in this case we
# check every epoch
best_validation_loss = numpy.inf
best_iter = 0
test_score = 0.
start_time = timeit.default_timer()
epoch = 0
done_looping = False
while (epoch < n_epochs) and (not done_looping):
epoch = epoch + 1
for minibatch_index in xrange(n_train_batches):
gs = train_gradients(minibatch_index)
if any(numpy.any(numpy.isnan(g)) for g in gs):
import pdb; pdb.set_trace()
minibatch_avg_cost = train_model(minibatch_index)
# iteration number
iter = (epoch - 1) * n_train_batches + minibatch_index
if (iter + 1) % validation_frequency == 0:
# compute zero-one loss on validation set
validation_losses = [validate_model(i) for i
in xrange(n_valid_batches)]
this_validation_loss = numpy.mean(validation_losses)
print(
'epoch %i, minibatch %i/%i, validation error %f %%' %
(
epoch,
minibatch_index + 1,
n_train_batches,
this_validation_loss * 100.
)
)
# if we got the best validation score until now
if this_validation_loss < best_validation_loss:
#improve patience if loss improvement is good enough
if (
this_validation_loss < best_validation_loss *
improvement_threshold
):
patience = max(patience, iter * patience_increase)
best_validation_loss = this_validation_loss
best_iter = iter
# test it on the test set
test_losses = [test_model(i) for i
in xrange(n_test_batches)]
test_score = numpy.mean(test_losses)
print((' epoch %i, minibatch %i/%i, test error of '
'best model %f %%') %
(epoch, minibatch_index + 1, n_train_batches,
test_score * 100.))
if patience <= iter:
done_looping = True
break
end_time = timeit.default_timer()
print(('Optimization complete. Best validation score of %f %% '
'obtained at iteration %i, with test performance %f %%') %
(best_validation_loss * 100., best_iter + 1, test_score * 100.))
print >> sys.stderr, ('The code for file ' +
os.path.split(__file__)[1] +
' ran for %.2fm' % ((end_time - start_time) / 60.))
# l = 7.752, tanh, 3 components, 20 hid, 1 hidlayer,
# l = 5.057, relu, 3 components, (100, 100) hid
# l = 4.865, relu, 5 components, (150, 150, 150) hid<|fim▁end|> | |
<|file_name|>scaling_options.py<|end_file_name|><|fim▁begin|>"""
Phil scope of options for scaling.
"""
from __future__ import annotations
import iotbx.phil
phil_scope = iotbx.phil.parse(
"""
anomalous = False
.type = bool
.help = "Separate anomalous pairs in scaling and error model optimisation."
.expert_level=0
overwrite_existing_models = False
.type = bool
.help = "If True, create new scaling models for all datasets"
.expert_level = 0
reflection_selection {
method = *quasi_random intensity_ranges use_all random
.type = choice
.help = "Method to use when choosing a reflection subset for scaling model"
"minimisation."
"The quasi_random option randomly selects reflections groups"
"within a dataset, and also selects groups which have good"
"connectedness across datasets for multi-dataset cases. The random"
"option selects reflection groups randomly for both single"
"and multi dataset scaling, so for a single dataset"
"quasi_random == random."
"The intensity_ranges option uses the E2_range, Isigma_range and"
"d_range options to the subset of reflections"
"The use_all option uses all suitable reflections, which may be"
"slow for large datasets."
random {
multi_dataset {
Isigma_cutoff = 1.0
.type = float
.help = "Minimum average I/sigma of reflection groups to use when"
"selecting random reflections for minimisation."
}
min_groups = 2000
.type = int
.help = "The minimum number of symmetry groups to use during"
"minimisation."
.expert_level=1
min_reflections = 50000
.type = int
.help = "The minimum number of reflections to use during minimisation."
.expert_level=1
}
best_unit_cell = None
.type = unit_cell
.help = "Best unit cell value, to use when performing resolution cutting"
"and merging statistics. If None, the median cell will be used."
E2_range = 0.8, 5.0
.type = floats(size=2)
.help = "Minimum and maximum normalised E^2 value to used to select a"
"subset of reflections for minimisation."
.expert_level = 1
Isigma_range = -5.0, 0.0
.type = floats(size=2)
.help = "Minimum and maximum I/sigma values used to select a subset of"
"reflections for minimisation. A value of 0.0 for the maximum"
"indicates that no upper limit should be applied."
.expert_level = 1
d_range = None
.type = floats(size=2)
.help = "Minimum and maximum d-values used to select a subset of"
"reflections for minimisation."
.expert_level = 1
min_partiality = 0.95
.type = float
.help = "Minimum partiality to use when selecting reflections to use"
"to determine the scaling model and error model."
.expert_level = 2
intensity_choice = profile sum *combine
.alias = intensity
.type = choice
.help = "Option to choose from profile fitted or summation intensities, or
an optimised combination of profile/sum."
.expert_level = 1
combine.Imid = None
.type = floats
.help = "A list of values to try for the midpoint, for profile/sum combination
calculation: the value with the lowest Rmeas will be chosen.
0 and 1 are special values that can be supplied to include profile
and sum respectively in the comparison."
.expert_level = 2
combine.joint_analysis = True
.type = bool
.help = "Option of whether to do intensity combination optimisation
separately (i.e. different Imid per dataset) or joint for
multiple datasets"
.expert_level = 2
}
weighting {
weighting_scheme = *invvar
.type = choice
.help = "Weighting scheme used during Ih calculation. Weighting schemes
other than invvar and unity may trigger iterative reweighting
during minimisation, which may be unstable for certain minimisation
engines (LBFGS)."
.expert_level = 2
error_model {
include scope dials.algorithms.scaling.error_model.error_model.phil_scope
}
}
cut_data {
d_min = None
.type = float
.help = "Option to apply a high resolution cutoff for the dataset (i.e.
the chosen reflections have d > d_min)."
.expert_level = 1
d_max = None
.type = float
.help = "Option to apply a low resolution cutoff for the dataset (i.e.
the chosen reflections have d < d_max)."
.expert_level = 1
partiality_cutoff = 0.4
.type = float
.help = "Value below which reflections are removed from the dataset due
to low partiality."
.expert_level = 1
min_isigi = -5
.type = float
.help = "Value below which reflections are removed from the dataset due"
"to low I/sigI in either profile or summation intensity estimates"
.expert_level = 1
}
scaling_options {
check_consistent_indexing = False
.type = bool
.help = "If True, run dials.cosym on all data in the data preparation"
"step, to ensure consistent indexing."
target_cycle = True
.type = bool
.help = "Option to turn of initial round of targeted scaling
if some datasets are already scaled."
.expert_level = 2
only_target = False
.type = bool
.help = "Option to only do targeted scaling if some datasets
are already scaled."
.expert_level = 2
only_save_targeted = True
.type = bool
.help = "If only_target is true, this option to change whether the dataset
that is being scaled will be saved on its own, or combined with the
already scaled dataset."
.expert_level = 2
target_model = None
.type = path
.help = "Path to cif file to use to calculate target intensities for
scaling."
.expert_level = 2
target_mtz = None
.type = path
.help = "Path to merged mtz file to use as a target for scaling."
.expert_level = 2
nproc = 1
.type = int(value_min=1)
.help = "Number of blocks to divide the data into for minimisation.
This also sets the number of processes to use if the option is
available."
.expert_level = 2
use_free_set = False
.type = bool
.help = "Option to use a free set during scaling to check for overbiasing.
This free set is used to calculate an RMSD, which is shown alongisde
the 'working' RMSD during refinement, but is not currently used
to terminate refinement or make any choices on the model."
.expert_level = 2
free_set_percentage = 10.0
.type = float
.help = "Percentage of symmetry equivalent groups to use for the free set,
if use_free_set is True."
.expert_level = 2
free_set_offset = 0
.type = int
.help = "Offset for choosing unique groups for the free set from the whole
set of unique groups."
.expert_level = 2<|fim▁hole|> .type = bool
.help = "Option to turn off GN/LM refinement round used to determine
error estimates on scale factors."
.expert_level = 2
outlier_rejection = *standard simple
.type = choice
.help = "Choice of outlier rejection routine. Standard may take a
significant amount of time to run for large datasets or high
multiplicities, whereas simple should be quick for these datasets."
.expert_level = 1
outlier_zmax = 6.0
.type = float(value_min=3.0)
.help = "Cutoff z-score value for identifying outliers based on their
normalised deviation within the group of equivalent reflections"
.expert_level = 1
emax = 10
.type = float(value_min = 0)
.help = "Reject reflections with normalised intensities E^2 > emax^2"
.expert_level = 2
}
"""
)<|fim▁end|> | full_matrix = True |
<|file_name|>x509TestUtilities.py<|end_file_name|><|fim▁begin|>""" Configuration and utilities for all the X509 unit tests """
import os
import sys
from datetime import datetime
from pytest import fixture
# We use certificates stored in the same folder as this test file
CERTDIR = os.path.join(os.path.dirname(__file__), "certs")
HOSTCERT = os.path.join(CERTDIR, "host/hostcert.pem")
HOSTKEY = os.path.join(CERTDIR, "host/hostkey.pem")
USERCERT = os.path.join(CERTDIR, "user/usercert.pem")
USERKEY = os.path.join(CERTDIR, "user/userkey.pem")
VOMSPROXY = os.path.join(CERTDIR, "voms/proxy.pem")
ENCRYPTEDKEY = os.path.join(CERTDIR, "key/encrypted_key_pass_0000.pem")
ENCRYPTEDKEYPASS = "0000"
CERTS = (HOSTCERT, USERCERT)
CERTKEYS = (HOSTKEY, USERKEY)
CERTCONTENTS = {
"HOSTCERTCONTENT": """-----BEGIN CERTIFICATE-----
MIIGQTCCBCmgAwIBAgICEAIwDQYJKoZIhvcNAQELBQAwVDEYMBYGA1UECgwPRElS
QUMgQ29tcHV0aW5nMTgwNgYDVQQDDC9ESVJBQyBDb21wdXRpbmcgU2lnbmluZyBD
ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xODA4MjIwOTE4MTdaFw0zNzEwMjEw
OTE4MTdaMDkxGDAWBgNVBAoMD0RpcmFjIENvbXB1dGluZzENMAsGA1UECgwEQ0VS
TjEOMAwGA1UEAwwFVk9Cb3gwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
AQDjV5Y6AQI61nZHy6hjr1MziFFeh/z1DdAgkPfiUnHQLxWtvXGcc4sX/tBcD6tv
NKTzJCwyFVAML0WNTD/w480TUmGILlRtg+17qfSWfeCvDygSbGNINX+la0auEqY7
u5oXtwhFAEnqBe+6pzvgfTpzh8eOtBSrqgJUwMtaI81P6LQn5urIQbJ7hg9HKh9d
AX+mR/mwxDTPpzTP6YT5oiqXE5hRaPAO6ibeGGduyphFiAwVzAV2B5UfB4tL8C/S
eyPX7+70W+paHD7ffJaHLKFQjdA9q7EHRGbm068+aPRmNCKtl1ptgbYquVmp0DiO
5qOSq+LU2v8W5/y8W75DajyqGbJuMdo4zMjCvOafOvHHabOfYrOHcI6MNJx2Z6v/
G0C7mMVwcBPcuLkqtia2uPnzwDcwxVL3wK/uJiHHw3T6odmOE/6KxYM+SJf9weBf
RFW/fCfkWYfEA1FJhncfDZPzwiJnQJTrRls367rwnNLH0VkvxDLOHY7Lhl+j1vwd
dnjONYrKVMttf1IfFN5QdMX2rRrkLX2jZXXaJ4IBeVBWWPVmWj8e892dh2FpzZV8
8XE72y17YRx+uX7x/76p3J9H3vEI0Lj/53q3lxH/W3VRGnbac7tT7kvVoqeUaXc4
AQiIF2tlR2dtjHbOAA3Sl7KCxJBvad8yq7YSm2I58sQN1wIDAQABo4IBNjCCATIw
CQYDVR0TBAIwADAzBglghkgBhvhCAQ0EJhYkT3BlblNTTCBHZW5lcmF0ZWQgU2Vy
dmVyIENlcnRpZmljYXRlMB0GA1UdDgQWBBTLQlHIlgopkniwA7yxCpuQ68gYgTCB
hAYDVR0jBH0we4AUBMIXrzhk4Ia/H8kAbpdvG7tOhx+hWKRWMFQxGDAWBgNVBAoM
D0RJUkFDIENvbXB1dGluZzE4MDYGA1UEAwwvRElSQUMgQ29tcHV0aW5nIFNpZ25p
bmcgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHmCCQCsvNC5K0fF2DAOBgNVHQ8BAf8E
BAMCBaAwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUFBwMCMBsGA1UdEQQUMBKC
BVZPQm94gglsb2NhbGhvc3QwDQYJKoZIhvcNAQELBQADggIBAB38IzhseSjULM80
ZdiG6GlYaOiBGIRglBJJqDeslhhei9upgn35yz64RqMoM4bFWSae0gFCMGNAdV5D
IXUZiTfZIRKqN35zOEZvbAU/t5Hi70ted3DPOAXM4XaghnFGg26ZTB86Z6Dph33Q
JLqNkqU8oaOfl1ET4TDoimpolQI0M82datPlhDe2EkvPjJaclNXKGZ0kX5gquZKK
pTYe+cj/4E7AG9mAQTB9M6XXpx5i/E+NLkGLjCm05QZdblhLmJ4Mjj2iCGMOL/z2
/bhncJYVyceAAFG/fTb2Yk6uXo/yDakq3SfyrOpSy5/bcy5YVcaGOlah74ppB26l
bO/cJWAOcTm6zroLzQteorJDif96EsSJj5fxGKDnSRcg+K+2sA3c+G/395FHn1qK
RRlcNm/yIWySrkUjtbSkZHChSU5vfjwlIq5acV/XtkXJpY7L4scQ0AeFDKdIhbXx
8ajVwBrU/GzyMmw7+p0PVvzNFZSn006D6zI6DRwUcPp/NRNi1oxrnzv1XVZ/MtiW
FNZgz+mnqpakOUAsCGt9YiElVFanmS7iMkqhobt54UlFXhfd+FQyRI2kSrW8kL8e
Is33dZgJZTT/KSsG8e883ISBb5zDeN47pxjU5pF/uhk2/eBY1EwEevpYdQPokY0R
Hia1xkpBKOPRY0BrSGCdEUT5+ict
-----END CERTIFICATE-----
""",
"USERCERTCONTENT": """-----BEGIN CERTIFICATE-----
MIIFszCCA5ugAwIBAgICEAEwDQYJKoZIhvcNAQELBQAwVDEYMBYGA1UECgwPRElS
QUMgQ29tcHV0aW5nMTgwNgYDVQQDDC9ESVJBQyBDb21wdXRpbmcgU2lnbmluZyBD
ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xODA4MjIwOTE1MTRaFw0zNzEwMjEw
OTE1MTRaMDoxGDAWBgNVBAoMD0RpcmFjIENvbXB1dGluZzENMAsGA1UECgwEQ0VS
TjEPMA0GA1UEAwwGTXJVc2VyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC
AgEAqfZnf9wK+a+qx8kfRlIaehzD2ix+6TKZJ+w9aBlh11b5cPfmIMOmTEXe8rD5
G6WKofOKNBiQ4vX2tEv7psYpetMwQ9R5ks67RN/YGFkzEEO7jzYFtWsS2jbsdHVf
/2wejICPhABYP1sGaQbRWtcp690fZ97cM1c7AuN/fFZ9m3mAoop5Bc6p1hqWSXyZ
ce/0J+/SjtrLeWY8yvMx4ztR+8wQG+hXEAifnT77zwxeH7pPkwj3IFpRozimTmaP
g0wpwUJXUd8LpPnF6pBeZPMybJ4b4TfoddCXSF/wT7q9UfTKptcoLayFCLp+mNJI
KkKUzm/1CBMFkhenzSP7uhjhu3Swr6SXlz1pEW7B9FFyyghLd7FMEuDIAu8ULqLA
ATFR95p5ec3GbObV4OX4G1Up9f6vDle+qhwkQ81uWxebsaVWveUo38Hsl37dqxB9
IxNOC/nTQu58l3KnLodMOweCmDnzHFrC5V96pYrKOaFj2Ijg6TO5maQHo0hfwiAC
FNIvYDb8AxNmDzOVAAZkd/Y0nbYeaO6/eNJzRiwJGKZMnXC3UpzRmIBenDTVMCjE
O1ZjsXe0hwjS0/sRytZHN1jWztnMuYftu3BLUQJQL0cmkWvPGjXKBd9kHhuYjtZu
+SEyLni+6VXJJCyR7/2kmlkq9UimB+RLA+EemW7Ik0oDI48CAwEAAaOBqDCBpTAJ
BgNVHRMEAjAAMB0GA1UdDgQWBBRKwv3rLMXxY6XyF2JDa52CbJoTJDAfBgNVHSME
GDAWgBQEwhevOGTghr8fyQBul28bu06HHzAOBgNVHQ8BAf8EBAMCBeAwEwYDVR0l
BAwwCgYIKwYBBQUHAwIwMwYJYIZIAYb4QgENBCYWJE9wZW5TU0wgR2VuZXJhdGVk
IENsaWVudCBDZXJ0aWZpY2F0ZTANBgkqhkiG9w0BAQsFAAOCAgEAOe2uEU17UWOU
iDsZWLDVYC820sXcC19ijco9zNDVfCkKzPMKKPlEA56dY/Kt0cWAtiklPOiWEtKy
bsM7ayZ2FEiPdBSd9P8qHYFMlbsXcyib5QXpdHebcipu9ORzp+hlFvTA1fFErDn+
nPW+xTCp19tdlrNywxDWXbB4KJZ/VxSVuT4lMZYn6wUOMFN/xj41evGqqQfJm+yT
feW3n2ClDCDbk3br/3KY8eCPLUllZfdJgnN24SWrS4S0tBuOZt+hTt7LISPSPIix
xXNsxLCXq7KsElIlzPPbMsdqDJ/lhDUoHPZZu9chi4t8F5JGkzcn1MOSmn5d74kx
SYD1QTgvX77t0A1E7G55NYiZJTSjoaIQiQwBNEak7Oz9QCh+5qHwR/Np4vo4+d4p
yuWxpzHHBuQrV6dDZ0mONBWx6gxpkFN42mt8EUd26faG7kebbeVoUt1VBTcp9HHH
DKQq9loodgGokarycFeJ8l+ZMM93YoPPVlsijG6Jmn+UrZNzwbi5JcE731qEurGY
U4kjpzpirauwCnOgSm7DwawNoilLFOSSh3/iZgDjMyhspGJ2FwXBlJm7wBWyS+0q
TnsekqTamuTDTAPJRhb2LPVFl0L8+frk1gkpw4KTCzGw4rKW++EUjS1i09sq2Dv6
/fW/ybqxpROqmyLHbqEExj0/hPxPKPw=
-----END CERTIFICATE-----
""",
}
# This is not just a copy paste of the key file content.
# The key file is an RSA key (PKCS1)
# What PyGSI and M2Crypto will print are PKCS8 format.
# To go from RSA to generic key:
# openssl pkcs8 -topk8 -nocrypt -in privkey.pem
# Look for 'BEGIN RSA PRIVATE KEY' in the link bellow
# https://tls.mbed.org/kb/cryptography/asn1-key-structures-in-der-and-pem
KEYCONTENTS_PKCS8 = {
HOSTKEY: """-----BEGIN PRIVATE KEY-----
MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQDjV5Y6AQI61nZH
y6hjr1MziFFeh/z1DdAgkPfiUnHQLxWtvXGcc4sX/tBcD6tvNKTzJCwyFVAML0WN
TD/w480TUmGILlRtg+17qfSWfeCvDygSbGNINX+la0auEqY7u5oXtwhFAEnqBe+6
pzvgfTpzh8eOtBSrqgJUwMtaI81P6LQn5urIQbJ7hg9HKh9dAX+mR/mwxDTPpzTP
6YT5oiqXE5hRaPAO6ibeGGduyphFiAwVzAV2B5UfB4tL8C/SeyPX7+70W+paHD7f
fJaHLKFQjdA9q7EHRGbm068+aPRmNCKtl1ptgbYquVmp0DiO5qOSq+LU2v8W5/y8
W75DajyqGbJuMdo4zMjCvOafOvHHabOfYrOHcI6MNJx2Z6v/G0C7mMVwcBPcuLkq
tia2uPnzwDcwxVL3wK/uJiHHw3T6odmOE/6KxYM+SJf9weBfRFW/fCfkWYfEA1FJ
hncfDZPzwiJnQJTrRls367rwnNLH0VkvxDLOHY7Lhl+j1vwddnjONYrKVMttf1If
FN5QdMX2rRrkLX2jZXXaJ4IBeVBWWPVmWj8e892dh2FpzZV88XE72y17YRx+uX7x
/76p3J9H3vEI0Lj/53q3lxH/W3VRGnbac7tT7kvVoqeUaXc4AQiIF2tlR2dtjHbO
AA3Sl7KCxJBvad8yq7YSm2I58sQN1wIDAQABAoICAAtXAhpQlJDkw6+fG/4k76yB
XzWs6NQ8ZSZKtOKoJB8zSgyJh5I7PTPsNO5ypaV9ZcDvC/lPkNeawAhlRkc4xbDy
CgVl8jYoP39MofOjwcJZqjEJEQa4DG7u4+6o5XvTRsNqENKISiePNj8EOntfI7xB
iJW4q9NIPqeFml8brBERVXMsFIf6pvF8ZWSyWDAmc/ySWIUVtGCrQXohds2Q5jj0
9EMTTe4gheHMK9Sd7GyDdb7cl2Ukya5rjOozx97i343U3QF5WD44bHZvW37QnhdL
i5iX6NOo+M0IwBQH3jD+5r/r7cnKj5CgADX1Oez+2iflxQHDDrhQyA2JMftg4Dev
xus6PsNUcsafhIsXlLP1Zx6dq1u3sBUw1s1TMaSP8g611tyiwrNqiaCR+WAd705Q
EGWfp4ddRcuB2BvV6NDQb8Z+A9vTqmEW+yqQdtji9VlH0XcPEu8qwjeSw6IrE7UV
dW/6HWKfRLoV+kajZwPkHHfS97/3T4jWPt3dZrEyT3T3Zno9hLbNFUXfDvvAqjOP
PkOgSMjUl/92J7SOu/fiPHjl4klxmSrG0OE79CKUU3C7a8Id81AYFKgr+3XNUvwJ
ZgjvKsHXDkoka8/y1YYeMEmH7dD4y02hd055mYfTIvYWdDIfaQcxvnCPvV7HUhpb
JMzvx7hveyxsHpRMRgk5AoIBAQD/oed5cl5mwvt3QcenBhvwgzz2uOocs42MPzvp
77RCn5cur80pBTgez1GZFnWBZEu1ygwKsu7l9ES+szlAMs37yD0LbNALTVHNQdbH
KZ7TyzY1vFQXyw730BvyKGVLnRm+/wuWnJuSDPGomATOon+ILDK5ps1NiYLpvbBR
ogAdk+llpIk/sRuoTCVlY9BYfd/XSiHyUEtVzq6CtG75Gqq7/MGEKl1xVDyXip92
6+KNr2CN6+/0lwdVUVWKCJpjrD7Yk4BwOzeGKIIsdNIaG+fl5O9UNe/njb0+joM4
177Lf1oaaBjjHwpqi9q8B78ud0/Jl+xFGB1HOBrHV7n52w8zAoIBAQDjq0TtCByO
HBdwn7Q/6JLMCU475dTs0DKBhbPfyK9GD26BTFccMcp8OuRX4S62Gkvq47s9UKAW
3R4x0ZFAkIHo+kxt9H4Sw8PPWDlVSbb6qf+rOhSPlEeW8nf6BJHreqMaWxTnzr+j
cQRY4O9GvKv3Y/fWqOe/iToQKkhjtnmtGyRVdsRVKkUrW+Ly/oxaxxe9eXOkUTOd
4UXxxSMbic6GJ57HRAfDpNYrnhbIk6JXYeuuArJeFBFmJ8vd0Nwd99y/uQ19kaxb
/F0km2zLI+2S+1j6I6p1dA7G2oA+K54er4jgGAF6guq1F/SVPO545x41YPkxGXNF
qwEz5OCyy5bNAoIBADJoP5ewGLNUwXdbrj3eM4YyqsPP5MIyGbhNA8h2buowRAR9
wAvVrqJMqT9xsUwJdfBr3gICFJ+dkiy0dJaXLgz3CCqHk2KXJYk+8VYme94xlQf1
kfN7JAFztP8EPi0x1lDWQ/e3++lJyiE/kLsaSeGVLY90N8mRUxI6SFlgg3tRnlVf
o3y+tMBz+2/JxdydPZVbVeRNNv29mqXFZJiUTJRzG8mu/OwK+0O6nwU5MFxV98kk
fBWT7mtBdYeZeLAs19unAk2fL6yxsjGH+6IQXKL1iMfnNt5HEckTGwcLa+D+xMqu
OjIW/dvSphgrwuQrvLz4yys4vRU9F/K09sQxEQcCggEBAMxFtXg/mO9hAR8KDD5z
PJNZnhpcIum//DD+d9/IPotL+UiF6Hrhqd5BMPQwlSrK+Wbtoehn2Nvq1da5Q+x8
PDN/sOfPQPcxMxVtATQnCchqk31chWo2Du2+7Cslwo9X39Qb+OvsM0JAezgLymTb
kChOR+cQca8HP1OVvJHK/e11tun/wDTx0lIPBdgk0GX60LAusrWyLe/wWkONL+zb
frQcBHih75143rkQBT0+SaDBuSbOQJ/svZe9CUwiw/0XkbdsIFCUTePS0PexhLHX
sKf6YWE+cwkjcsa08e/WTu8VbGg04c68fD60Gb11iDpulEoskimdvjG6N0AKkhma
VdkCggEBAJC5Byfjk5SMFbH4uIP2yScQAJ3lwrNsxOgnnm6C5vANWqMEsXyH+Qcs
lawDdGUmb0E/7eaYoxgsEq8OUPluZNVFgA3O9iZfyF49G36PvKGRBHtHkiE9n13n
c85Ksre6haNHO4BboojNovPMF0bqvseAoWTPaCYjktBcqB1I8Y/EzApN+zuZQWCQ
vhBLq/cZi5jOwECbR2LMebth521/4C/j2E3Ssy+5uTMlDFQh0yYZnaS8OaecQ0Hc
qRk0GL7AI33fPBBPD7b/Ptc8HHeeB0F61vzIE2ZOJEwLDtHqQr5fZs7Qn9aiN7Nc
CrerHYr0zdgIXTt+xus9RGGmZi1mfjI=
-----END PRIVATE KEY-----
""",
USERKEY: """-----BEGIN PRIVATE KEY-----
MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCp9md/3Ar5r6rH
yR9GUhp6HMPaLH7pMpkn7D1oGWHXVvlw9+Ygw6ZMRd7ysPkbpYqh84o0GJDi9fa0
S/umxil60zBD1HmSzrtE39gYWTMQQ7uPNgW1axLaNux0dV//bB6MgI+EAFg/WwZp
BtFa1ynr3R9n3twzVzsC4398Vn2beYCiinkFzqnWGpZJfJlx7/Qn79KO2st5ZjzK
8zHjO1H7zBAb6FcQCJ+dPvvPDF4fuk+TCPcgWlGjOKZOZo+DTCnBQldR3wuk+cXq
kF5k8zJsnhvhN+h10JdIX/BPur1R9Mqm1ygtrIUIun6Y0kgqQpTOb/UIEwWSF6fN
I/u6GOG7dLCvpJeXPWkRbsH0UXLKCEt3sUwS4MgC7xQuosABMVH3mnl5zcZs5tXg
5fgbVSn1/q8OV76qHCRDzW5bF5uxpVa95SjfweyXft2rEH0jE04L+dNC7nyXcqcu
h0w7B4KYOfMcWsLlX3qliso5oWPYiODpM7mZpAejSF/CIAIU0i9gNvwDE2YPM5UA
BmR39jSdth5o7r940nNGLAkYpkydcLdSnNGYgF6cNNUwKMQ7VmOxd7SHCNLT+xHK
1kc3WNbO2cy5h+27cEtRAlAvRyaRa88aNcoF32QeG5iO1m75ITIueL7pVckkLJHv
/aSaWSr1SKYH5EsD4R6ZbsiTSgMjjwIDAQABAoICAC4S8/+/QOJq8pryNJ41h6Pu
xFESmtzQsKAX9JWRu+pKU5iCO0pKf3xRvJyBySXrfGdmw+JXfn9oOhaqOm/9bCU1
tvHMWaColi+XltcS5zrTgbbS6D1D53psRTFU2E8/mhBwkXcxOLsEC/rQtFQx29Vq
vibETWFFlmO0FE06jRZmm650Z1ZhrbyyvGbzdg1jBQcGhkffnCUux/AkeTOmUxU1
PnCyTVe1Xr+b4VtBeQqU0RmE5qlIkrTymHLMbr8jGHaha1ZwZpG0fCiYNl6bZuH3
AovNQiEeCMS/7T9P2h6rg3wy+1tWV0IEfGklKBb8saY8x2oG7g2qh/yecpECSb68
Cauh18mXJ5JsT6P8dwDoxTxR1/lImvOU2Nys7T7nEhXrls1Dc0tv6Emi37hNwihn
vAnzXYx0MwIh0N85LrdbRtVM+dis2LLpDScVt9CHS+Vl0+qO9fsgDnUKYYGONYq+
MHjtDdTMB0DhxTNjaWOU0J1RgmlAFV63lx7iWs0twH44Fbylo6DYYkAiNGOUvpKD
7GNz/aooEtrTf/3GnHoB2UBdvsmI8RZ7TSXCsoCkldQRsJJnzjo5fxTyH8ufCeEh
Umw+lmK2OFldkPSrVL8eBPV8QTECbJOyFQC8IpVy/QnJhZlDmgrOJAVtl6xjkaEf
qPV2sLruhNBqxh2zgsMhAoIBAQDfXwQBa+sf1J6oOo872ev68aQ5Zx5bZWV6Vke/
sxjab/GiZ44A33TRTUpIermdR3zJ5D0B9vh6IW6tbuU3wBgJACjs9VjWFfb5T46M
Z5FNtN3zNvxJ9YhbQ2RJc4GRzCNcGAquDecD9xUk91k9kI07UZKUIDywGA2OGKra
USRdS8LqAfpAxANu3JvinlqTQFfOxT3AZY03UWmXJI9xXtgxX1KLB+46Luy5GIWs
/BNFi1Nk12OHql19woMKpx4iw89cA3S26FjViuGX0g9domT+biatPNan96Refp4s
/jTHOFZ4HuhmWGugb1J9yhcHEZp9XreUtbrm8Xm++16f9bdJAoIBAQDCyir3lw94
X0EfNE4dMO0KlQiYXobTxQ7y0aZdL9a58t5C0Pq5nTyvX7NcIyk2KcxhMjJDJC1M
mVmQz2dvb3aJt+VKhVl2q0H/qSRI2Rp5QB5o7BlpszVkMt5CP36HZE7xz1LXZ+74
WMEsePkbn1GrRts/QsAy3iqmoBsy/fq8rqU3tXaajAzORb3KFNKkbdBX7nXnS8v+
xizWccKMTf0QuaLiC/Wcdi9vPB4UQogpa8vpAl8gM5YqaDs94eVpSv23UMhNrvAg
V3tn7FNSQNh+ugnLBwNqwam95fBMteGUh4HapnoEDlOezE7qUwGAaTswk5TnxiON
VIjpQlk2VkwXAoIBAQC1l4orGbABpZoCS/EsCCMXVKFc5V9BkDIqfcBAsXowAzfe
/u7r+L4AdiRAvjzuBzME8t9CHKSurUVMC86fPzSLBK1AzskU6rBoyGur63quQK77
ziTWf50GDMiYCiY5AEty0DzGeZjomVOARPIw4bZflhZjA74yrqs+bQFhEPxOOIxS
L59iTbg4xXKZjoE2GuYHvERSiHyAj1gXPuq6kQ+TO9pgGudqN8HNTIlIM3n7XKRE
Y/KPVUpCNgLQg0I1oxiNxmV5WXT2zbxO77/8MEyIp8Ybqk0cKnBfPfKbw2Hm3/80
EnR+171PpZDboJKN9Zqx93GpnQBARenjAHpR8rG5AoIBAH1JnbNchUXONsvET830
zzJ0Q3AFtMD3SbMi59eeUoWN0im10t6aZRMEAhBsSTCeV+fYan3HAh/3rqU20ffa
AKt6DdANz0pFwxCXEVCN27pLZIPmAD59VwUYtt5zioW5HhHoYQdNwWYZaD6bnNaI
dfYtgA3DeG3/ef1sk7ILrD+6MWiQnjWviPkP4I/fLtE2FMDKDynzFcXMX8CasSCf
dPtR+5NbT+IQHlh0mYA8funtfN1lehvzMk4adqhJ6M39vw0ut3dH4wlaW3Svi7Qn
I1j3fh8JZsg+wlfzUsl0XyCyu/IQDAEZ2e0UyllrhFa82KZY9njRd8KKsfkehNUv
UocCggEAGFGpLq8flL4lU4AnetR5Gs2BFaHBeqyGL1pWY1oPgF8jE/aNafIDs6Nq
wMBIOQmekhEOxBf9Ti9qJDaTkTNyIiPFYS3/sm+thfqJFVMZX8LKnjSntSCp/pGD
YELJ+GOYwOnqcni7psF4+cvxQmRkI1LHpIwiUOMniwcfPVCtoEHdJ5Pn0jFFkcAV
VPWLyXcPH0WpgklFGvCNvvVthRkZTuT4Zy2QXgP6dfIK/2UAUDE6Uk1odkNyAtw9
d2tkfZjxzb8djGdcmTCbVzyRdkkhRsp/grQbg+qXfmiTlAyPE3uB5VFPJYcx5gJL
oYjpqlB4Kj08eIAI5vcWnt/RcE1tLw==
-----END PRIVATE KEY-----
""",
}
# This contains the attributes of the certificates in order to be compared in the tests
# If they are the same, they are directly at the root, otherwise,
# they are in subdirectory
CERT_ATTRS = {
# Just take the date, it is the same for both
"endDate": datetime.strptime("2037-10-21", "%Y-%m-%d").date(),
"startDate": datetime.strptime("2018-08-22", "%Y-%m-%d").date(),
"issuerDN": "/O=DIRAC Computing/CN=DIRAC Computing Signing Certification Authority",
HOSTCERT: {
"subjectDN": "/O=Dirac Computing/O=CERN/CN=VOBox",
"serial": 4098,
"availableExtensions": [
"authorityKeyIdentifier",
"basicConstraints",
"extendedKeyUsage",
"keyUsage",
"nsComment",
"subjectAltName",
"subjectKeyIdentifier",
],
"basicConstraints": "CA:FALSE",
"subjectAltName": "DNS:VOBox, DNS:localhost",
"extendedKeyUsage": "TLS Web Server Authentication, TLS Web Client Authentication",
"content": CERTCONTENTS["HOSTCERTCONTENT"],
"keyFile": HOSTKEY,
},
USERCERT: {
"subjectDN": "/O=Dirac Computing/O=CERN/CN=MrUser",
"serial": 4097,
"availableExtensions": [
"authorityKeyIdentifier",
"basicConstraints",
"extendedKeyUsage",
"keyUsage",
"nsComment",
"subjectKeyIdentifier",
],
"basicConstraints": "CA:FALSE",
"subjectAltName": "DNS:VOBox, DNS:localhost",
"extendedKeyUsage": "TLS Web Client Authentication",
"content": CERTCONTENTS["USERCERTCONTENT"],
"keyFile": USERKEY,
},
}
VOMS_PROXY_ATTR = {
"notBefore": datetime(2018, 10, 23, 9, 11, 44),
"notAfter": datetime(2024, 7, 6, 17, 11, 44),
"fqan": ["/fakevo/Role=user/Capability=NULL"],
"vo": "fakevo",
"subject": "/O=Dirac Computing/O=CERN/CN=MrUser",
"issuer": "/O=Dirac Computing/O=CERN/CN=VOBox",
}
def getCertOption(cert, optionName):
"""Return a given option of a given certificate, taken from CERT_ATTRS
:param cert: effectively, path to the certificate in question
:param optionName: name of the options
:returns: the option
"""
if optionName in CERT_ATTRS:
return CERT_ATTRS[optionName]
return CERT_ATTRS[cert][optionName]
def deimportDIRAC():
"""clean all what has already been imported from DIRAC.
This method is extremely fragile, but hopefully, we can get ride of all these
messy tests soon, when PyGSI has gone.
"""
if len(X509CHAINTYPES) != 1 or len(X509REQUESTTYPES) != 1:
raise NotImplementedError(
"This no longer de-imports DIRAC, if we want to test another SSL wrapper "
"we will have to find another way of doing this or run a separate pytest "
"process again"
)
# for mod in list(sys.modules):
# # You should be careful with what you remove....
# if (mod == 'DIRAC' or mod.startswith('DIRAC.')) and not mod.startswith('DIRAC.Core.Security.test'):<|fim▁hole|>
# This fixture will return a pyGSI or M2Crypto X509Chain class
# https://docs.pytest.org/en/latest/fixture.html#automatic-grouping-of-tests-by-fixture-instances
@fixture(scope="function", params=X509CHAINTYPES)
def get_X509Chain_class(request):
"""Fixture to return either the X509Certificate class.
It also 'de-import' DIRAC before and after
"""
# Clean before
deimportDIRAC()
x509Class = request.param
if x509Class == "M2_X509Chain":
from DIRAC.Core.Security.m2crypto.X509Chain import X509Chain
else:
raise NotImplementedError()
yield X509Chain
# Clean after
deimportDIRAC()
X509REQUESTTYPES = ("M2_X509Request",)
# This fixture will return a X509Request class
# https://docs.pytest.org/en/latest/fixture.html#automatic-grouping-of-tests-by-fixture-instances
@fixture(scope="function", params=X509REQUESTTYPES)
def get_X509Request(request):
"""Fixture to return either the X509Request instance.
It also 'de-import' DIRAC before and after
"""
# Clean before
deimportDIRAC()
x509Class = request.param
if x509Class == "M2_X509Request":
from DIRAC.Core.Security.m2crypto.X509Request import X509Request
else:
raise NotImplementedError()
def _generateX509Request():
"""Instanciate the object
:returns: an X509Request instance
"""
return X509Request()
yield _generateX509Request
# Clean after
deimportDIRAC()
def get_X509Chain_from_X509Request(x509ReqObj):
"""This returns an X509Chain class from the same "type" as the X509Request
object given as param
:param x509ReqObj: instance of a X509Request object
:returns: X509Chain class
"""
# In principle, we should deimport Dirac everywhere, but I am not even sure it makes any difference
if "m2crypto" in x509ReqObj.__class__.__module__:
from DIRAC.Core.Security.m2crypto.X509Chain import X509Chain
else:
raise NotImplementedError()
return X509Chain<|fim▁end|> | # sys.modules.pop(mod)
X509CHAINTYPES = ("M2_X509Chain",) |
<|file_name|>regions-in-enums.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that lifetimes must be declared for use on enums.
// See also regions-undeclared.rs
enum yes0<'lt> {
X3(&'lt usize)
}
enum yes1<'a> {
X4(&'a usize)
}
enum no0 {<|fim▁hole|> X5(&'foo usize) //~ ERROR use of undeclared lifetime name `'foo`
}
enum no1 {
X6(&'a usize) //~ ERROR use of undeclared lifetime name `'a`
}
fn main() {}<|fim▁end|> | |
<|file_name|>sites.js<|end_file_name|><|fim▁begin|>// Copyright 2015 LastLeaf, LICENSE: github.lastleaf.me/MIT
'use strict';
var fs = require('fs');
var fse = require('fs-extra');
var async = require('async');
var User = fw.module('/db_model').User;
var exports = module.exports = function(conn, res, args){
User.checkPermission(conn, 'admin', function(perm){
if(!perm) return res.err('noPermission');
res.next();
});
};
exports.list = function(conn, res){
// read site list
var sitesDir = conn.app.config.app.siteRoot + '/xbackup/';
fs.readdir(sitesDir, function(err, files){
if(err) return res.err('system');
var sites = files.sort();
// list available backup files
var details = [];
var local = null;
async.eachSeries(sites, function(site, cb){
var siteDir = sitesDir + site;
if(site !== 'local' && site.slice(-5) !== '.site') return cb();
fs.readdir(siteDir, function(err, files){
if(err) return cb('system');
var zips = [];
for(var i=0; i<files.length; i++) {
var match = files[i].match(/^(.*?)\.xbackup\.zip(\.enc|)$/);
if(match) {
var ft = match[1].replace(/^([0-9]+)-([0-9]+)-([0-9]+)_([0-9]+)-([0-9]+)-([0-9]+)/, function(m, m1, m2, m3, m4, m5, m6){
return m1 + '-' + m2 + '-' + m3 + ' ' + m4 + ':' + m5 + ':' + m6;
});
zips.push({
file: files[i],
timeString: ft
});
}
}
if(site === 'local') {
local = zips;
} else details.push({
domain: site.slice(0, -5),
files: zips
});
cb();
});
}, function(err){
if(err) return res.err('system');
res({
local: local,
sites: details
});
});
});
};
exports.modify = function(conn, res, args){
var addSites = String(args.add).match(/\S+/g) || [];
var removeSites = String(args.remove).match(/\S+/g) || [];
async.eachSeries(removeSites, function(site, cb){
var dir = conn.app.config.app.siteRoot + '/xbackup/' + site + '.site';
fse.remove(dir, function(){
cb();
});
}, function(){
async.eachSeries(addSites, function(site, cb){
var dir = conn.app.config.app.siteRoot + '/xbackup/' + site + '.site';<|fim▁hole|> fs.mkdir(dir, function(){
cb();
});
}, function(){
res();
});
});
};<|fim▁end|> | |
<|file_name|>ecdsa_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import unittest
from pycoin.ecdsa import generator_secp256k1, sign, verify, public_pair_for_secret_exponent
class ECDSATestCase(unittest.TestCase):
def test_sign_verify(self):
def do_test(secret_exponent, val_list):
public_point = public_pair_for_secret_exponent(generator_secp256k1, secret_exponent)<|fim▁hole|> for v in val_list:
signature = sign(generator_secp256k1, secret_exponent, v)
r = verify(generator_secp256k1, public_point, v, signature)
# Check that the 's' value is 'low', to prevent possible transaction malleability as per
# https://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki#low-s-values-in-signatures
assert signature[1] <= 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF5D576E7357A4501DDFE92F46681B20A0
assert r == True
signature = signature[0],signature[1]+1
r = verify(generator_secp256k1, public_point, v, signature)
assert r == False
val_list = [100,20000,30000000,400000000000,50000000000000000,60000000000000000000000]
do_test(0x1111111111111111111111111111111111111111111111111111111111111111, val_list)
do_test(0xdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd, val_list)
do_test(0x47f7616ea6f9b923076625b4488115de1ef1187f760e65f89eb6f4f7ff04b012, val_list)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>PhoneNumber.java<|end_file_name|><|fim▁begin|>package fr.sii.ogham.sms.message;
import fr.sii.ogham.core.util.EqualsBuilder;
import fr.sii.ogham.core.util.HashCodeBuilder;
/**
* Represents a phone number. It wraps a simple string. The aim is to abstracts
* the concept and to be able to provide other fields latter if needed.
*
* @author Aurélien Baudet
*
*/
public class PhoneNumber {
/**
* The phone number as string
*/
private String number;
/**
* Initialize the phone number with the provided number.
*
* @param number
* the phone number
*/
public PhoneNumber(String number) {
super();
this.number = number;
}
public String getNumber() {
return number;<|fim▁hole|> this.number = number;
}
@Override
public String toString() {
return number;
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(number).hashCode();
}
@Override
public boolean equals(Object obj) {
return new EqualsBuilder(this, obj).appendFields("number").isEqual();
}
}<|fim▁end|> | }
public void setNumber(String number) { |
<|file_name|>PathologyRestTranslatorTest.java<|end_file_name|><|fim▁begin|>/**
*
Package: MAG - VistA Imaging
WARNING: Per VHA Directive 2004-038, this routine should not be modified.
Date Created: Jul 10, 2012
Site Name: Washington OI Field Office, Silver Spring, MD
Developer: VHAISWWERFEJ
Description:
;; +--------------------------------------------------------------------+
;; Property of the US Government.
;; No permission to copy or redistribute this software is given.
;; Use of unreleased versions of this software requires the user
;; to execute a written test agreement with the VistA Imaging
;; Development Office of the Department of Veterans Affairs,
;; telephone (301) 734-0100.
;;
;; The Food and Drug Administration classifies this software as
;; a Class II medical device. As such, it may not be changed
;; in any way. Modifications to this software may result in an
;; adulterated medical device under 21CFR820, the use of which<|fim▁hole|> ;; +--------------------------------------------------------------------+
*/
package gov.va.med.imaging.pathology.rest.translator;
import java.util.Date;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* @author VHAISWWERFEJ
*
*/
public class PathologyRestTranslatorTest
{
@Test
public void testDateTranslation()
{
try
{
Date date = PathologyRestTranslator.translateDate("201207101435");
System.out.println("Date: " + date);
}
catch(Exception ex)
{
ex.printStackTrace();
fail(ex.getMessage());
}
}
}<|fim▁end|> | ;; is considered to be a violation of US Federal Statutes. |
<|file_name|>Apartment.js<|end_file_name|><|fim▁begin|>import React from 'react'
import ApartmentTable from './ApartmentListContainer'
import TextFieldForm from './ApartmentForm'
import MuiThemeProvider from 'material-ui/styles/MuiThemeProvider'
import {Tabs, Tab} from 'material-ui/Tabs'
import Paper from 'material-ui/Paper'
import AppBar from 'material-ui/AppBar'
import '../../css/apartment.css'
const styles = {
headline: {
fontSize: 24,
paddingTop: 16,
marginBottom: 12,
fontWeight: 400
}
}
const style = {
height: 500,
width: 1000,
margin: 20,
textAlign: 'center',
display: 'inline-block'
}
const Apartment = () => (
<MuiThemeProvider>
<div className='apartment'>
<Tabs
initialSelectedIndex={0}
contentContainerClassName='5'
>
<Tab label='Apartment Form'>
<div style={{textAlign: 'center', margin: '25px'}}>
<Paper style={style} zDepth={1}>
<AppBar<|fim▁hole|> <TextFieldForm />
</Paper>
</div>
</Tab>
<Tab label='Apartment List'>
<div style={{textAlign: 'center', margin: '25px'}}>
<Paper style={style} zDepth={1}>
<AppBar
title='Apartment List'
showMenuIconButton={false}
/>
<ApartmentTable />
</Paper>
</div>
</Tab>
</Tabs>
</div>
</MuiThemeProvider>
)
export default Apartment<|fim▁end|> | title='Apartment Form'
showMenuIconButton={false}
/> |
<|file_name|>nosy.py<|end_file_name|><|fim▁begin|>"""Watch for changes in a collection of source files. If changes, run
the specified test runner (nosetests, by default).
"""
from argparse import ArgumentParser
import ConfigParser
import glob
import os
import stat
import subprocess
import sys
import time
class Nosy(object):
"""Watch for changes in all source files. If changes, run the
specified test runner (nosetests, by default).
"""
def __init__(self):
"""Return an instance with the default configuration, and a
command line parser.
"""
self.config = ConfigParser.SafeConfigParser()
self.config.add_section('nosy')
self.config.set('nosy', 'test_runner', 'nosetests')
self.config.set('nosy', 'base_path', '.')
self.config.set('nosy', 'glob_patterns', '')
self.config.set('nosy', 'exclude_patterns', '')
self.config.set('nosy', 'extra_paths', '')
self.config.set('nosy', 'options', '')
self.config.set('nosy', 'tests', '')
# paths config retained for backward compatibility; use<|fim▁hole|>
def _build_cmdline_parser(self):
self.parser = ArgumentParser(
description='Automatically run a command (nosetest, by default) '
'whenever source files change.')
self.parser.add_argument(
'-c', '--config', dest='config_file', default='setup.cfg',
help='configuration file path and name; defaults to %(default)s')
def parse_cmdline(self):
"""Parse the command line and set the config_file attribute.
"""
args = self.parser.parse_args()
self.config_file = args.config_file
def _read_config(self):
try:
self.config.readfp(open(self.config_file, 'rt'))
except IOError, msg:
self.parser.error("can't read config file:\n %s" % msg)
self.test_runner = self.config.get('nosy', 'test_runner')
self.base_path = self.config.get('nosy', 'base_path')
self.glob_patterns = self.config.get(
'nosy', 'glob_patterns').split()
self.exclude_patterns = self.config.get(
'nosy', 'exclude_patterns').split()
self.extra_paths = self.config.get('nosy', 'extra_paths').split()
self.cmd_opts = self.config.get('nosy', 'options')
self.cmd_args = self.config.get('nosy', 'tests')
# paths config retained for backward compatibility; use
# extra_paths for any files or paths that aren't easily
# included via base_path, glob_patterns, and
# exclude_patterns
self.paths = self.config.get('nosy', 'paths').split()
def _calc_extra_paths_checksum(self):
"""Return the checksum for the files given by the extra paths
pattern(s).
self.paths is included for backward compatibility.
"""
checksum = 0
for path in self.extra_paths + self.paths:
for file_path in glob.iglob(path):
stats = os.stat(file_path)
checksum += stats[stat.ST_SIZE] + stats[stat.ST_MTIME]
return checksum
def _calc_exclusions(self, root):
"""Return a set of file paths to be excluded from the checksum
calculation.
"""
exclusions = set()
for pattern in self.exclude_patterns:
for file_path in glob.iglob(os.path.join(root, pattern)):
exclusions.add(file_path)
return exclusions
def _calc_dir_checksum(self, exclusions, root):
"""Return the checksum for the monitored files in the
specified directory tree.
"""
checksum = 0
for pattern in self.glob_patterns:
for file_path in glob.iglob(os.path.join(root, pattern)):
if file_path not in exclusions:
stats = os.stat(file_path)
checksum += stats[stat.ST_SIZE] + stats[stat.ST_MTIME]
return checksum
def _checksum(self):
"""Return a checksum which indicates if any files in the paths
list have changed.
"""
checksum = self._calc_extra_paths_checksum()
for root, dirs, files in os.walk(self.base_path):
exclusions = self._calc_exclusions(root)
checksum += self._calc_dir_checksum(exclusions, root)
return checksum
def run(self):
"""Run specified test runner (default nosetests) whenever the
source files (default ./*.py) change.
Re-read the configuration before each run so that options and
arguments may be changed.
"""
checksum = 0
self._read_config()
while True:
if self._checksum() != checksum:
self._read_config()
checksum = self._checksum()
cmd = (self.test_runner.split() if ' ' in self.test_runner
else [self.test_runner])
try:
subprocess.call(
cmd
+ self.cmd_opts.replace('\\\n', '').split()
+ self.cmd_args.replace('\\\n', '').split())
except OSError, msg:
sys.stderr.write('Command error: %s: %s\n' % (msg, cmd))
sys.exit(2)
time.sleep(1)
def main():
nosy = Nosy()
nosy.parse_cmdline()
try:
nosy.run()
except KeyboardInterrupt:
sys.exit(130)
except SystemExit:
sys.exit(0)
if __name__ == '__main__':
main()<|fim▁end|> | # extra_paths for any files or paths that aren't easily
# included via base_path, glob_patterns, and exclude_patterns
self.config.set('nosy', 'paths', '*.py')
self._build_cmdline_parser() |
<|file_name|>test_url.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,<|fim▁hole|># including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from unittest import TestCase
from gofer.messaging.adapter.url import URL
from gofer.messaging.adapter.url import PORT, Scheme
class Test(object):
def __init__(self,
url,
adapter=None,
scheme=None,
host=None,
port=None,
userid=None,
password=None,
path=None):
self.url = url
self.adapter = adapter
self.scheme = scheme
self.host = host
self.port = port
self.userid = userid
self.password = password
self.path = path
def __call__(self, test):
url = URL(self.url)
test.assertEqual(url.adapter, self.adapter)
test.assertEqual(url.scheme, self.scheme)
test.assertEqual(url.host, self.host)
test.assertEqual(url.port, self.port)
test.assertEqual(url.userid, self.userid)
test.assertEqual(url.password, self.password)
test.assertEqual(url.path, self.path)
TESTS = [
Test('qpid+amqp://elmer:fudd@blue:5000/all',
adapter='qpid',
scheme='amqp',
host='blue',
port=5000,
userid='elmer',
password='fudd',
path='all'),
Test('amqp://elmer:fudd@yellow:1234//',
scheme='amqp',
host='yellow',
port=1234,
userid='elmer',
password='fudd',
path='/'),
Test('amqp://green:5678/all/good',
scheme='amqp',
host='green',
port=5678,
path='all/good'),
Test('amqp://red:2323',
scheme='amqp',
host='red',
port=2323),
Test('amqp://black',
scheme='amqp',
host='black',
port=5672),
Test('amqps://purple',
scheme='amqps',
host='purple',
port=5671),
Test('orange:6545',
scheme='amqp',
host='orange',
port=6545),
Test('localhost',
scheme='amqp',
host='localhost',
port=5672),
Test('',
scheme='amqp',
port=5672),
]
class TestURL(TestCase):
def test_parsing(self):
for test in TESTS:
test(self)
def test_canonical(self):
urls = [
'qpid+amqp://elmer:fudd@test-host:5000/all',
'amqp://elmer:fudd@test-host:5000/all',
'amqp://test-host:5000/all',
'amqp://test-host:5000'
]
for _url in urls:
url = URL(_url)
self.assertEqual(url.canonical, _url.split('+')[-1].rsplit('/all')[0])
def test_is_ssl(self):
# false
url = URL('amqp://localhost')
self.assertFalse(url.is_ssl())
# true
url = URL('amqps://localhost')
self.assertTrue(url.is_ssl())
def test_hash(self):
url = URL('test')
self.assertEqual(hash(url), hash(url.canonical))
def test_str(self):
urls = [
'qpid+amqp://elmer:fudd@test-host:5000/all',
'amqp://elmer:fudd@test-host:5000/all',
'amqp://test-host:5000/all',
'amqp://test-host:5000',
'amqp://test-host',
]
for _url in urls:
url = URL(_url)
self.assertEqual(str(url), url.canonical)
class TestScheme(TestCase):
def test_validated(self):
for n in PORT:
self.assertEqual(Scheme.validated(n), n.lower())
self.assertRaises(ValueError, Scheme.validated, 'unsupported')<|fim▁end|> | |
<|file_name|>bluetooth_api.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2013 Intel Corporation. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
var _callbacks = {};
var _next_reply_id = 0;
var postMessage = function(msg, callback) {
var reply_id = _next_reply_id;
_next_reply_id += 1;
_callbacks[reply_id] = callback;
msg.reply_id = reply_id.toString();
extension.postMessage(JSON.stringify(msg));
};
function checkPostError(msg, errorCallback) {
if (msg.error == tizen.WebAPIException.NO_ERROR)
return false;
if (errorCallback) {
var error = new tizen.WebAPIError(msg.error);
errorCallback(error);
}
return true;
}
extension.setMessageListener(function(json) {
var msg = JSON.parse(json);
if (msg.cmd == 'BondedDevice')
handleBondedDevice(msg);
else if (msg.cmd == 'DeviceFound')
handleDeviceFound(msg);
else if (msg.cmd == 'DiscoveryFinished')
handleDiscoveryFinished();
else if (msg.cmd == 'DeviceRemoved')
handleDeviceRemoved(msg.Address);
else if (msg.cmd == 'DeviceUpdated')
handleDeviceUpdated(msg);
else if (msg.cmd == 'AdapterUpdated')
handleAdapterUpdated(msg);
else if (msg.cmd == 'RFCOMMSocketAccept')
handleRFCOMMSocketAccept(msg);
else if (msg.cmd == 'SocketHasData')
handleSocketHasData(msg);
else if (msg.cmd == 'SocketClosed')
handleSocketClosed(msg);
else { // Then we are dealing with postMessage return.
var reply_id = msg.reply_id;
var callback = _callbacks[reply_id];
if (callback) {
delete msg.reply_id;
delete _callbacks[reply_id];
callback(msg);
} else {
// do not print error log when the postmessage was not initiated by JS
if (reply_id != '')
console.log('Invalid reply_id from Tizen Bluetooth: ' + reply_id);
}
}
});
function Adapter() {
this.found_devices = []; // Filled while a Discovering.
this.known_devices = []; // Keeps Managed and Found devices.
this.discovery_callbacks = {};
this.isReady = false;
this.service_handlers = [];
this.sockets = [];
this.change_listener = {};
this.health_apps = {};
this.health_channel_listener = {};
}
function validateAddress(address) {
if (typeof address !== 'string')
return false;
var regExp = /([\dA-F][\dA-F]:){5}[\dA-F][\dA-F]/i;
if (!address.match(regExp))
return false;
return true;
}
Adapter.prototype.checkServiceAvailability = function(errorCallback) {
if (adapter.isReady && defaultAdapter.powered)
return false;
if (errorCallback) {
var error = new tizen.WebAPIError(tizen.WebAPIException.SERVICE_NOT_AVAILABLE_ERR);
errorCallback(error);
}
return true;
};
Adapter.prototype.indexOfDevice = function(devices, address) {
for (var i = 0; i < devices.length; i++) {
if (devices[i].address == address)
return i;
}
return -1;
};
Adapter.prototype.addDevice = function(device, on_discovery) {
var new_device = false;
if (on_discovery) {
var index = this.indexOfDevice(this.found_devices, device.address);
if (index == -1) {
this.found_devices.push(device);
new_device = true;
} else {
this.found_devices[index] = device;
new_device = false;
}
}
var i = this.indexOfDevice(this.known_devices, device.address);
if (i == -1)
this.known_devices.push(device);
else
this.known_devices[i] = device;
return new_device;
};
Adapter.prototype.updateDevice = function(device) {
var index = this.indexOfDevice(this.known_devices, device.address);
if (index == -1)
this.known_devices.push(device);
else
this.known_devices[index]._updateProperties(device);
};
// This holds the adapter the Bluetooth backend is currently using.
// In BlueZ 4, for instance, this would represent the "default adapter".
// BlueZ 5 has no such concept, so this will hold the currently available
// adapter, which can be just the first one found.
var adapter = new Adapter();
var deepCopyDevices = function(devices) {<|fim▁hole|>
return copiedDevices;
};
var handleBondedDevice = function(msg) {
var device = new BluetoothDevice(msg);
adapter.addDevice(device, false);
};
var handleDeviceFound = function(msg) {
var device = new BluetoothDevice(msg);
var is_new = adapter.addDevice(device, msg.found_on_discovery);
// FIXME(jeez): we are not returning a deep copy so we can keep
// the devices up-to-date. We have to find a better way to handle this.
if (is_new && msg.found_on_discovery && adapter.discovery_callbacks.ondevicefound)
adapter.discovery_callbacks.ondevicefound(device);
};
var handleDiscoveryFinished = function() {
// FIXME(jeez): we are not returning a deep copy so we can keep
// the devices up-to-date. We have to find a better way to handle this.
if (typeof adapter.discovery_callbacks.onfinished === 'function')
adapter.discovery_callbacks.onfinished(adapter.found_devices);
adapter.found_devices = [];
adapter.discovery_callbacks = {};
};
var handleDeviceRemoved = function(address) {
var foundDevices = adapter.found_devices;
var knownDevices = adapter.known_devices;
for (var i = 0; i < foundDevices.length; i++) {
if (foundDevices[i].address === address) {
foundDevices.splice(i, 1);
break;
}
}
for (var i = 0; i < knownDevices.length; i++) {
if (knownDevices[i].address === address) {
knownDevices.splice(i, 1);
break;
}
}
if (adapter.discovery_callbacks.ondevicedisappeared)
adapter.discovery_callbacks.ondevicedisappeared(address);
};
var handleDeviceUpdated = function(msg) {
var device = new BluetoothDevice(msg);
adapter.updateDevice(device);
};
var handleAdapterUpdated = function(msg) {
var listener = adapter.change_listener;
if (msg.Name) {
_addConstProperty(defaultAdapter, 'name', msg.Name);
if (listener && listener.onnamechanged) {
adapter.change_listener.onnamechanged(msg.Name);
}
}
if (msg.Address)
_addConstProperty(defaultAdapter, 'address', msg.Address);
if (msg.Powered) {
var powered = (msg.Powered === 'true');
_addConstProperty(defaultAdapter, 'powered', powered);
if (listener && listener.onstatechanged) {
adapter.change_listener.onstatechanged(powered);
}
}
if (msg.Discoverable) {
var visibility = (msg.Discoverable === 'true');
if (defaultAdapter.visible !== visibility && listener && listener.onvisibilitychanged) {
adapter.change_listener.onvisibilitychanged(visibility);
}
_addConstProperty(defaultAdapter, 'visible', visibility);
}
defaultAdapter.isReady = true;
};
var handleRFCOMMSocketAccept = function(msg) {
for (var i in adapter.service_handlers) {
var server = adapter.service_handlers[i];
// FIXME(clecou) BlueZ4 backend compares rfcomm channel number but this parameter
// is not available in Tizen C API so we check socket fd.
// A better approach would be to adapt backends instances to have a single JSON protocol.
if (server.channel === msg.channel || server.server_fd === msg.socket_fd) {
var j = adapter.indexOfDevice(adapter.known_devices, msg.peer);
var peer = adapter.known_devices[j];
var socket = new BluetoothSocket(server.uuid, peer, msg);
adapter.sockets.push(socket);
_addConstProperty(server, 'isConnected', true);
if (server.onconnect && typeof server.onconnect === 'function')
server.onconnect(socket);
return;
}
}
};
var handleSocketHasData = function(msg) {
for (var i in adapter.sockets) {
var socket = adapter.sockets[i];
if (socket.socket_fd === msg.socket_fd) {
socket.data = msg.data;
if (socket.onmessage && typeof socket.onmessage === 'function')
socket.onmessage();
socket.data = [];
return;
}
}
};
var handleSocketClosed = function(msg) {
for (var i in adapter.sockets) {
var socket = adapter.sockets[i];
if (socket.socket_fd === msg.socket_fd) {
if (socket.onclose && typeof socket.onmessage === 'function')
socket.onclose();
return;
}
}
};
function _addConstProperty(obj, propertyKey, propertyValue) {
Object.defineProperty(obj, propertyKey, {
configurable: true,
writable: false,
value: propertyValue
});
}
exports.deviceMajor = {};
var deviceMajor = {
'MISC': { value: 0x00, configurable: false, writable: false },
'COMPUTER': { value: 0x01, configurable: false, writable: false },
'PHONE': { value: 0x02, configurable: false, writable: false },
'NETWORK': { value: 0x03, configurable: false, writable: false },
'AUDIO_VIDEO': { value: 0x04, configurable: false, writable: false },
'PERIPHERAL': { value: 0x05, configurable: false, writable: false },
'IMAGING': { value: 0x06, configurable: false, writable: false },
'WEARABLE': { value: 0x07, configurable: false, writable: false },
'TOY': { value: 0x08, configurable: false, writable: false },
'HEALTH': { value: 0x09, configurable: false, writable: false },
'UNCATEGORIZED': { value: 0x1F, configurable: false, writable: false }
};
Object.defineProperties(exports.deviceMajor, deviceMajor);
_addConstProperty(exports, 'deviceMajor', exports.deviceMajor);
exports.deviceMinor = {};
var deviceMinor = {
'COMPUTER_UNCATEGORIZED': { value: 0x00, configurable: false, writable: false },
'COMPUTER_DESKTOP': { value: 0x01, configurable: false, writable: false },
'COMPUTER_SERVER': { value: 0x02, configurable: false, writable: false },
'COMPUTER_LAPTOP': { value: 0x03, configurable: false, writable: false },
'COMPUTER_HANDHELD_PC_OR_PDA': { value: 0x04, configurable: false, writable: false },
'COMPUTER_PALM_PC_OR_PDA': { value: 0x05, configurable: false, writable: false },
'COMPUTER_WEARABLE': { value: 0x06, configurable: false, writable: false },
'PHONE_UNCATEGORIZED': { value: 0x00, configurable: false, writable: false },
'PHONE_CELLULAR': { value: 0x01, configurable: false, writable: false },
'PHONE_CORDLESS': { value: 0x02, configurable: false, writable: false },
'PHONE_SMARTPHONE': { value: 0x03, configurable: false, writable: false },
'PHONE_MODEM_OR_GATEWAY': { value: 0x04, configurable: false, writable: false },
'PHONE_ISDN': { value: 0x05, configurable: false, writable: false },
'AV_UNRECOGNIZED': { value: 0x00, configurable: false, writable: false },
'AV_WEARABLE_HEADSET': { value: 0x01, configurable: false, writable: false },
'AV_HANDSFREE': { value: 0x02, configurable: false, writable: false },
'AV_MICROPHONE': { value: 0x04, configurable: false, writable: false },
'AV_LOUDSPEAKER': { value: 0x05, configurable: false, writable: false },
'AV_HEADPHONES': { value: 0x06, configurable: false, writable: false },
'AV_PORTABLE_AUDIO': { value: 0x07, configurable: false, writable: false },
'AV_CAR_AUDIO': { value: 0x08, configurable: false, writable: false },
'AV_SETTOP_BOX': { value: 0x09, configurable: false, writable: false },
'AV_HIFI': { value: 0x0a, configurable: false, writable: false },
'AV_VCR': { value: 0x0b, configurable: false, writable: false },
'AV_VIDEO_CAMERA': { value: 0x0c, configurable: false, writable: false },
'AV_CAMCORDER': { value: 0x0d, configurable: false, writable: false },
'AV_MONITOR': { value: 0x0e, configurable: false, writable: false },
'AV_DISPLAY_AND_LOUDSPEAKER': { value: 0x0f, configurable: false, writable: false },
'AV_VIDEO_CONFERENCING': { value: 0x10, configurable: false, writable: false },
'AV_GAMING_TOY': { value: 0x12, configurable: false, writable: false },
'PERIPHERAL_UNCATEGORIZED': { value: 0, configurable: false, writable: false },
'PERIPHERAL_KEYBOARD': { value: 0x10, configurable: false, writable: false },
'PERIPHERAL_POINTING_DEVICE': { value: 0x20, configurable: false, writable: false },
'PERIPHERAL_KEYBOARD_AND_POINTING_DEVICE': { value: 0x30, configurable: false, writable: false },
'PERIPHERAL_JOYSTICK': { value: 0x01, configurable: false, writable: false },
'PERIPHERAL_GAMEPAD': { value: 0x02, configurable: false, writable: false },
'PERIPHERAL_REMOTE_CONTROL': { value: 0x03, configurable: false, writable: false },
'PERIPHERAL_SENSING_DEVICE': { value: 0x04, configurable: false, writable: false },
'PERIPHERAL_DEGITIZER_TABLET': { value: 0x05, configurable: false, writable: false },
'PERIPHERAL_CARD_READER': { value: 0x06, configurable: false, writable: false },
'PERIPHERAL_DIGITAL_PEN': { value: 0x07, configurable: false, writable: false },
'PERIPHERAL_HANDHELD_SCANNER': { value: 0x08, configurable: false, writable: false },
'PERIPHERAL_HANDHELD_INPUT_DEVICE': { value: 0x09, configurable: false, writable: false },
'IMAGING_UNCATEGORIZED': { value: 0x00, configurable: false, writable: false },
'IMAGING_DISPLAY': { value: 0x04, configurable: false, writable: false },
'IMAGING_CAMERA': { value: 0x08, configurable: false, writable: false },
'IMAGING_SCANNER': { value: 0x10, configurable: false, writable: false },
'IMAGING_PRINTER': { value: 0x20, configurable: false, writable: false },
'WEARABLE_WRITST_WATCH': { value: 0x01, configurable: false, writable: false },
'WEARABLE_PAGER': { value: 0x02, configurable: false, writable: false },
'WEARABLE_JACKET': { value: 0x03, configurable: false, writable: false },
'WEARABLE_HELMET': { value: 0x04, configurable: false, writable: false },
'WEARABLE_GLASSES': { value: 0x05, configurable: false, writable: false },
'TOY_ROBOT': { value: 0x01, configurable: false, writable: false },
'TOY_VEHICLE': { value: 0x02, configurable: false, writable: false },
'TOY_DOLL': { value: 0x03, configurable: false, writable: false },
'TOY_CONTROLLER': { value: 0x04, configurable: false, writable: false },
'TOY_GAME': { value: 0x05, configurable: false, writable: false },
'HEALTH_UNDEFINED': { value: 0x00, configurable: false, writable: false },
'HEALTH_BLOOD_PRESSURE_MONITOR': { value: 0x01, configurable: false, writable: false },
'HEALTH_THERMOMETER': { value: 0x02, configurable: false, writable: false },
'HEALTH_WEIGHING_SCALE': { value: 0x03, configurable: false, writable: false },
'HEALTH_GLUCOSE_METER': { value: 0x04, configurable: false, writable: false },
'HEALTH_PULSE_OXIMETER': { value: 0x05, configurable: false, writable: false },
'HEALTH_PULSE_RATE_MONITOR': { value: 0x06, configurable: false, writable: false },
'HEALTH_DATA_DISPLAY': { value: 0x07, configurable: false, writable: false },
'HEALTH_STEP_COUNTER': { value: 0x08, configurable: false, writable: false },
'HEALTH_BODY_COMPOSITION_ANALYZER': { value: 0x09, configurable: false, writable: false },
'HEALTH_PEAK_FLOW_MONITOR': { value: 0x0a, configurable: false, writable: false },
'HEALTH_MEDICATION_MONITOR': { value: 0x0b, configurable: false, writable: false },
'HEALTH_KNEE_PROSTHESIS': { value: 0x0c, configurable: false, writable: false },
'HEALTH_ANKLE_PROSTHESIS': { value: 0x0d, configurable: false, writable: false }
};
Object.defineProperties(exports.deviceMinor, deviceMinor);
_addConstProperty(exports, 'deviceMinor', exports.deviceMinor);
exports.deviceService = {};
var deviceService = {
'LIMITED_DISCOVERABILITY': { value: 0x0001, configurable: false, writable: false },
'POSITIONING': { value: 0x0008, configurable: false, writable: false },
'NETWORKING': { value: 0x0010, configurable: false, writable: false },
'RENDERING': { value: 0x0020, configurable: false, writable: false },
'CAPTURING': { value: 0x0040, configurable: false, writable: false },
'OBJECT_TRANSFER': { value: 0x0080, configurable: false, writable: false },
'AUDIO': { value: 0x0100, configurable: false, writable: false },
'TELEPHONY': { value: 0x0200, configurable: false, writable: false },
'INFORMATION': { value: 0x0400, configurable: false, writable: false }
};
Object.defineProperties(exports.deviceService, deviceService);
_addConstProperty(exports, 'deviceService', exports.deviceService);
var defaultAdapter = new BluetoothAdapter();
exports.getDefaultAdapter = function() {
if (!defaultAdapter.name) {
var msg = { 'cmd': 'GetDefaultAdapter' };
var result = JSON.parse(extension.internal.sendSyncMessage(JSON.stringify(msg)));
if (result.error != tizen.WebAPIException.NO_ERROR) {
_addConstProperty(defaultAdapter, 'name', result.name);
_addConstProperty(defaultAdapter, 'address', result.address);
_addConstProperty(defaultAdapter, 'powered', result.powered);
_addConstProperty(defaultAdapter, 'visible', result.visible);
if (result.hasOwnProperty('address') && result.address != '')
adapter.isReady = true;
} else {
adapter.isReady = false;
throw new tizen.WebAPIException(tizen.WebAPIException.UNKNOWN_ERR);
}
}
return defaultAdapter;
};
function BluetoothAdapter() {
_addConstProperty(this, 'name', '');
_addConstProperty(this, 'address', '00:00:00:00:00:00');
_addConstProperty(this, 'powered', false);
_addConstProperty(this, 'visible', false);
}
BluetoothAdapter.prototype.setName = function(name, successCallback, errorCallback) {
if (!xwalk.utils.validateArguments('s?ff', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (adapter.checkServiceAvailability(errorCallback))
return;
if (name === defaultAdapter.name) {
if (successCallback)
successCallback();
return;
}
var msg = {
'cmd': 'SetAdapterProperty',
'property': 'Name',
'value': name
};
postMessage(msg, function(result) {
if (checkPostError(result, errorCallback))
return;
handleAdapterUpdated(result);
if (successCallback)
successCallback();
});
};
BluetoothAdapter.prototype.setPowered = function(state, successCallback, errorCallback) {
if (!xwalk.utils.validateArguments('b?ff', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (state === defaultAdapter.powered) {
if (successCallback)
successCallback();
return;
}
var msg = {
'cmd': 'SetAdapterProperty',
'property': 'Powered',
'value': state
};
postMessage(msg, function(result) {
if (checkPostError(result, errorCallback))
return;
handleAdapterUpdated(result);
if (successCallback)
successCallback();
});
};
BluetoothAdapter.prototype.setVisible = function(mode, successCallback, errorCallback, timeout) {
if (!xwalk.utils.validateArguments('b?ffn', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (adapter.checkServiceAvailability(errorCallback))
return;
if (timeout === undefined || typeof timeout !== 'number' || timeout < 0)
timeout = 180; // According to tizen.bluetooth documentation.
var msg = {
'cmd': 'SetAdapterProperty',
'property': 'Discoverable',
'value': mode,
'timeout': timeout
};
postMessage(msg, function(result) {
if (checkPostError(result, errorCallback))
return;
handleAdapterUpdated(result);
if (successCallback)
successCallback();
});
};
BluetoothAdapter.prototype.discoverDevices = function(discoverySuccessCallback, errorCallback) {
if (!xwalk.utils.validateArguments('o?f', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (!xwalk.utils.validateObject(discoverySuccessCallback, 'ffff',
['onstarted', 'ondevicefound', 'ondevicedisappeared', 'onfinished'])) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (adapter.checkServiceAvailability(errorCallback))
return;
var msg = {
'cmd': 'DiscoverDevices'
};
postMessage(msg, function(result) {
if (checkPostError(result, errorCallback))
return;
adapter.discovery_callbacks = discoverySuccessCallback;
if (discoverySuccessCallback && discoverySuccessCallback.onstarted)
discoverySuccessCallback.onstarted();
});
};
BluetoothAdapter.prototype.stopDiscovery = function(successCallback, errorCallback) {
if (!xwalk.utils.validateArguments('?ff', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (adapter.checkServiceAvailability(errorCallback))
return;
var msg = {
'cmd': 'StopDiscovery'
};
postMessage(msg, function(result) {
if (checkPostError(result, errorCallback))
return;
if (successCallback)
successCallback();
handleDiscoveryFinished();
});
};
BluetoothAdapter.prototype.getKnownDevices = function(deviceArraySuccessCallback, errorCallback) {
if (!xwalk.utils.validateArguments('f?f', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (adapter.checkServiceAvailability(errorCallback))
return;
// FIXME(jeez): we are not returning a deep copy so we can keep
// the devices up-to-date. We have to find a better way to handle this.
deviceArraySuccessCallback(adapter.known_devices);
};
BluetoothAdapter.prototype.getDevice = function(address, deviceSuccessCallback, errorCallback) {
if (!xwalk.utils.validateArguments('sf?f', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (!validateAddress(address)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (adapter.checkServiceAvailability(errorCallback))
return;
var index = adapter.indexOfDevice(adapter.known_devices, address);
if (index == -1) {
var error = new tizen.WebAPIError(tizen.WebAPIException.NOT_FOUND_ERR);
errorCallback(error);
return;
}
deviceSuccessCallback(adapter.known_devices[index]);
};
BluetoothAdapter.prototype.createBonding = function(address, successCallback, errorCallback) {
if (!xwalk.utils.validateArguments('sf?f', arguments)) {
throw new tizen.WebAPIError(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (!validateAddress(address)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (adapter.checkServiceAvailability(errorCallback))
return;
var index = adapter.indexOfDevice(adapter.known_devices, address);
if (index == -1) {
var error = new tizen.WebAPIError(tizen.WebAPIException.NOT_FOUND_ERR);
if (errorCallback)
errorCallback(error);
return;
}
var msg = {
'cmd': 'CreateBonding',
'address': address
};
postMessage(msg, function(result) {
var cb_device;
if (checkPostError(result, errorCallback))
return;
if (successCallback) {
var known_devices = adapter.known_devices;
for (var i = 0; i < known_devices.length; i++) {
if (known_devices[i].address === address) {
cb_device = known_devices[i];
break;
}
}
// FIXME(clecou) Update known device state here when using C API Tizen backend
// BlueZ backends update the device state automatically when catching dbus signals.
// A better approach would be to adapt backends instances to have a single JSON protocol.
if (result.capi)
_addConstProperty(adapter.known_devices[i], 'isBonded', true);
successCallback(cb_device);
}
});
};
BluetoothAdapter.prototype.destroyBonding = function(address, successCallback, errorCallback) {
if (!xwalk.utils.validateArguments('s?ff', arguments)) {
throw new tizen.WebAPIError(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (adapter.checkServiceAvailability(errorCallback))
return;
if (!validateAddress(address)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
var index = adapter.indexOfDevice(adapter.known_devices, address);
if (index == -1) {
var error = new tizen.WebAPIError(tizen.WebAPIException.NOT_FOUND_ERR);
if (errorCallback)
errorCallback(error);
return;
}
var msg = {
'cmd': 'DestroyBonding',
'address': address
};
postMessage(msg, function(result) {
var cb_device;
if (checkPostError(result, errorCallback))
return;
if (successCallback) {
var known_devices = adapter.known_devices;
for (var i = 0; i < known_devices.length; i++) {
if (known_devices[i].address === address) {
cb_device = known_devices[i];
break;
}
}
// FIXME(clecou) Update known device state here when using C API Tizen backend
// BlueZ backends update the device state automatically when catching dbus signals
// A better approach would be to adapt backends instances to have a single JSON protocol.
if (result.capi)
adapter.known_devices.splice(i, 1);
successCallback(cb_device);
}
});
};
BluetoothAdapter.prototype.registerRFCOMMServiceByUUID =
function(uuid, name, serviceSuccessCallback, errorCallback) {
if (!xwalk.utils.validateArguments('ssf?f', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (adapter.checkServiceAvailability(errorCallback))
return;
var msg = {
'cmd': 'RFCOMMListen',
'uuid': uuid,
'name': name
};
postMessage(msg, function(result) {
if (checkPostError(result, errorCallback))
return;
var service = new BluetoothServiceHandler(uuid.toUpperCase(), name, result);
adapter.service_handlers.push(service);
if (serviceSuccessCallback) {
serviceSuccessCallback(service);
}
});
};
BluetoothAdapter.prototype.setChangeListener = function(listener) {
if (!xwalk.utils.validateArguments('o', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (!xwalk.utils.validateObject(listener, 'fff',
['onstatechanged', 'onnamechanged', 'onvisibilitychanged'])) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
adapter.change_listener = listener;
};
BluetoothAdapter.prototype.unsetChangeListener = function() {
adapter.change_listener = {};
};
function BluetoothProfileHandler(profileType) {
_addConstProperty(this, 'profileType', profileType);
}
function BluetoothHealthProfileHandler() {
BluetoothProfileHandler.call(this, 'HEALTH');
}
BluetoothHealthProfileHandler.prototype = Object.create(BluetoothProfileHandler.prototype);
BluetoothHealthProfileHandler.prototype.constructor = BluetoothHealthProfileHandler;
BluetoothAdapter.prototype.getBluetoothProfileHandler = function(profile_type) {
if (!xwalk.utils.validateArguments('s', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
return (profile_type === 'HEALTH') ? new BluetoothHealthProfileHandler() :
new BluetoothProfileHandler(profile_type);
};
var _deviceClassMask = {
'MINOR': 0x3F,
'MAJOR': 0x1F,
'SERVICE': 0x7F9
};
function BluetoothDevice(msg) {
if (!msg) {
_addConstProperty(this, 'name', '');
_addConstProperty(this, 'address', '');
_addConstProperty(this, 'deviceClass', new BluetoothClass());
_addConstProperty(this, 'isBonded', false);
_addConstProperty(this, 'isTrusted', false);
_addConstProperty(this, 'isConnected', false);
_addConstProperty(this, 'uuids', []);
return;
}
_addConstProperty(this, 'name', msg.Alias);
_addConstProperty(this, 'address', msg.Address);
_addConstProperty(this, 'deviceClass', new BluetoothClass());
_addConstProperty(this.deviceClass, 'minor', (msg.ClassMinor >> 2) & _deviceClassMask.MINOR);
_addConstProperty(this.deviceClass, 'major', msg.ClassMajor & _deviceClassMask.MAJOR);
_addConstProperty(this, 'isBonded', (msg.Paired == 'true'));
_addConstProperty(this, 'isTrusted', (msg.Trusted == 'true'));
_addConstProperty(this, 'isConnected', (msg.Connected == 'true'));
if (msg.UUIDs) {
var uuids_array = [];
if (typeof msg.UUIDs === 'string') {
// FIXME(clecou) BlueZ backend sends a string to convert it into an array
// A better approach would be to adapt backends instances to have a single JSON protocol.
uuids_array = msg.UUIDs.substring(msg.UUIDs.indexOf('[') + 1,
msg.UUIDs.indexOf(']')).split(',');
for (var i = 0; i < uuids_array.length; i++) {
uuids_array[i] = uuids_array[i].substring(2, uuids_array[i].length - 1);
}
} else {
// Tizen C API backend directly sends an array
uuids_array = msg.UUIDs;
for (var i = 0; i < msg.UUIDs.length; i++)
_addConstProperty(uuids_array, i.toString(), msg.UUIDs[i].toUpperCase());
}
_addConstProperty(this, 'uuids', uuids_array);
}
var services = (msg.ClassService >> 13) & _deviceClassMask.SERVICE;
var services_array = [];
var index = 0;
var SERVICE_CLASS_BITS_NUMBER = 11;
for (var i = 0; i < SERVICE_CLASS_BITS_NUMBER; i++) {
if ((services & (1 << i)) !== 0) {
_addConstProperty(services_array, index.toString(), (1 << i));
index++;
}
}
_addConstProperty(this.deviceClass, 'services', services_array);
}
BluetoothDevice.prototype.connectToServiceByUUID =
function(uuid, socketSuccessCallback, errorCallback) {
if (!xwalk.utils.validateArguments('sf?f', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (adapter.checkServiceAvailability(errorCallback))
return;
var uuid_found = false;
for (var i = 0; i < this.uuids.length; i++) {
if (this.uuids[i] == uuid.toUpperCase()) {
uuid_found = true;
break;
}
}
if (uuid_found == false) {
var error = new tizen.WebAPIError(tizen.WebAPIException.NOT_FOUND_ERR);
errorCallback(error);
}
var msg = {
'cmd': 'ConnectToService',
'uuid': uuid,
'address' : this.address
};
postMessage(msg, function(result) {
if (checkPostError(result, errorCallback))
return;
var i = adapter.indexOfDevice(adapter.known_devices, result.peer);
var socket = new BluetoothSocket(result.uuid, adapter.known_devices[i], result);
adapter.sockets.push(socket);
socketSuccessCallback(socket);
});
};
BluetoothDevice.prototype._clone = function() {
var clone = new BluetoothDevice();
_addConstProperty(clone, 'name', this.name);
_addConstProperty(clone, 'address', this.address);
_addConstProperty(clone, 'deviceClass', this.deviceClass);
_addConstProperty(clone, 'isBonded', this.isBonded);
_addConstProperty(clone, 'isTrusted', this.isTrusted);
_addConstProperty(clone, 'isConnected', this.isConnected);
var uuids_array = [];
for (var i = 0; i < this.uuids.length; i++)
uuids_array[i] = this.uuids[i];
_addConstProperty(clone, 'uuids', uuids_array);
return clone;
};
BluetoothDevice.prototype._updateProperties = function(device) {
if (device.hasOwnProperty('name'))
_addConstProperty(this, 'name', device.name);
if (device.hasOwnProperty('address'))
_addConstProperty(this, 'address', device.address);
if (device.hasOwnProperty('deviceClass'))
_addConstProperty(this, 'deviceClass', device.deviceClass);
if (device.hasOwnProperty('isBonded'))
_addConstProperty(this, 'isBonded', device.isBonded);
if (device.hasOwnProperty('isTrusted'))
_addConstProperty(this, 'isTrusted', device.isTrusted);
if (device.hasOwnProperty('isConnected'))
_addConstProperty(this, 'isConnected', device.isConnected);
if (device.hasOwnProperty('uuids')) {
for (var i = 0; i < this.uuids.length; i++)
this.uuids[i] = device.uuids[i];
}
};
function BluetoothSocket(uuid, peer, msg) {
_addConstProperty(this, 'uuid', uuid);
_addConstProperty(this, 'peer', peer);
_addConstProperty(this, 'state', 'OPEN');
this.onclose = null;
this.onmessage = null;
this.data = [];
this.channel = 0;
this.socket_fd = 0;
if (msg) {
this.channel = msg.channel;
this.socket_fd = msg.socket_fd;
}
}
BluetoothSocket.prototype.writeData = function(data) {
// make sure that socket is connected and opened.
if (this.state == 'CLOSED') {
throw new tizen.WebAPIException(tizen.WebAPIException.UNKNOWN_ERR);
return;
}
var msg = {
'cmd': 'SocketWriteData',
'data': data,
'socket_fd': this.socket_fd
};
var result = JSON.parse(extension.internal.sendSyncMessage(JSON.stringify(msg)));
return result.size;
};
BluetoothSocket.prototype.readData = function() {
return this.data;
};
BluetoothSocket.prototype.close = function() {
var msg = {
'cmd': 'CloseSocket',
'socket_fd': this.socket_fd
};
postMessage(msg, function(result) {
if (checkPostError(result, errorCallback)) {
console.log('Can\'t close socket (' + this.socket_fd + ').');
throw new tizen.WebAPIException(tizen.WebAPIException.UNKNOWN_ERR);
return;
}
// FIXME(clecou) Update socket object state only when using Tizen C API backend.
// BlueZ4 backend independently updates socket state based on a dbus callback mechanism.
// A better approach would be to adapt backends instances to have a single JSON protocol.
if (result.capi) {
for (var i in adapter.sockets) {
var socket = adapter.sockets[i];
if (socket.socket_fd === msg.socket_fd) {
if (socket.onclose && typeof socket.onmessage === 'function') {
_addConstProperty(adapter.sockets[i], 'state', 'CLOSED');
socket.onclose();
}
}
}
}
});
};
function BluetoothClass() {}
BluetoothClass.prototype.hasService = function(service) {
for (var i = 0; i < this.services.length; i++) {
if (this.services[i] === service)
return true;
}
return false;
};
function BluetoothServiceHandler(name, uuid, msg) {
_addConstProperty(this, 'name', name);
_addConstProperty(this, 'uuid', uuid);
_addConstProperty(this, 'isConnected', false);
this.onconnect = null;
if (msg) {
this.server_fd = msg.server_fd;
this.sdp_handle = msg.sdp_handle;
this.channel = msg.channel;
}
}
BluetoothServiceHandler.prototype.unregister = function(successCallback, errorCallback) {
if (!xwalk.utils.validateArguments('?ff', arguments)) {
throw new tizen.WebAPIError(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
var msg = {
'cmd': 'UnregisterServer',
'server_fd': this.server_fd,
'sdp_handle': this.sdp_handle
};
postMessage(msg, function(result) {
if (checkPostError(result, errorCallback))
return;
for (var i in adapter.service_handlers) {
var service = adapter.service_handlers[i];
if (service.server_fd == result.socket_fd)
adapter.service_handlers.splice(i, 1);
}
if (successCallback)
successCallback();
});
};
function BluetoothHealthApplication(data_type, app_name, msg) {
_addConstProperty(this, 'dataType', data_type);
_addConstProperty(this, 'name', app_name);
this.onconnect = null;
if (msg)
this.app_id = msg.app_id;
}
BluetoothHealthApplication.prototype.unregister =
function(successCallback, errorCallback) {
if (!xwalk.utils.validateArguments('?ff', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (adapter.checkServiceAvailability(errorCallback))
return;
var msg = {
'cmd': 'UnregisterSinkApp',
'app_id': this.app_id
};
var app = this;
postMessage(msg, function(result) {
if (checkPostError(result, errorCallback))
return;
if (app.app_id)
delete adapter.health_apps[app.app_id];
if (successCallback)
successCallback();
});
};
BluetoothHealthProfileHandler.prototype.registerSinkApplication =
function(dataType, name, successCallback, errorCallback) {
if (!xwalk.utils.validateArguments('nsf?f', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (adapter.checkServiceAvailability(errorCallback))
return;
var msg = {
'cmd': 'RegisterSinkApp',
'datatype': dataType
};
postMessage(msg, function(result) {
if (checkPostError(result, errorCallback))
return;
if (successCallback) {
var application = new BluetoothHealthApplication(dataType, name, result);
adapter.health_apps[result.app_id] = application;
successCallback(application);
}
});
};
BluetoothHealthProfileHandler.prototype.connectToSource =
function(peer, application, successCallback, errorCallback) {
if (!xwalk.utils.validateArguments('oof?f', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (adapter.checkServiceAvailability(errorCallback))
return;
var msg = {
'cmd': 'ConnectToSource',
'address': peer.address,
'app_id': application.app_id
};
postMessage(msg, function(result) {
if (checkPostError(result, errorCallback))
return;
if (successCallback) {
var i = adapter.indexOfDevice(adapter.known_devices, result.address);
var channel = new BluetoothHealthChannel(adapter.known_devices[i],
adapter.health_apps[result.app_id], result);
successCallback(channel);
}
});
};
function BluetoothHealthChannel(device, application, msg) {
_addConstProperty(this, 'peer', device);
_addConstProperty(this, 'channelType', (msg.channel_type == 1) ? 'RELIABLE' : 'STREAMING');
_addConstProperty(this, 'application', application);
_addConstProperty(this, 'isConnected', (msg.connected == 'true'));
this.channel = msg.channel;
this.data = [];
}
BluetoothHealthChannel.prototype.close = function() {
if (adapter.checkServiceAvailability(errorCallback))
return;
var msg = {
'cmd': 'DisconnectSource',
'address': this.peer.address,
'channel': this.channel
};
var channel = this;
postMessage(msg, function(result) {
if (checkPostError(result, errorCallback))
return;
_addConstProperty(channel, 'isConnected', false);
if (adapter.health_channel_listener.onclose)
adapter.health_channel_listener.onclose();
});
};
BluetoothHealthChannel.prototype.sendData = function(data) {
if (adapter.checkServiceAvailability(errorCallback))
return;
if (!xwalk.utils.validateArguments('o', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
var msg = {
'cmd': 'SendHealthData',
'data': data,
'channel' : this.channel
};
postMessage(msg, function(result) {
if (result.error != tizen.WebAPIException.NO_ERROR) {
var error = new tizen.WebAPIError(tizen.WebAPIException.UNKNOWN_ERR);
return 0;
}
return result.size;
});
};
BluetoothHealthChannel.prototype.setListener = function(listener) {
if (!xwalk.utils.validateArguments('o', arguments)) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
if (!xwalk.utils.validateObject(listener, 'ff', ['onmessage', 'onclose'])) {
throw new tizen.WebAPIException(tizen.WebAPIException.TYPE_MISMATCH_ERR);
}
adapter.health_channel_listener = listener;
};
BluetoothHealthChannel.prototype.unsetListener = function() {
adapter.health_channel_listener = {};
};<|fim▁end|> | var copiedDevices = [];
for (var i = 0; i < devices.length; i++)
copiedDevices[i] = devices[i]._clone(); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
# Copyright (c) 05 2015 | surya
# 18/05/15 [email protected]
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# __init__.py.py
"""
import urlparse
from niimanga.libs.exceptions import HtmlError
from requests import request
class Site:
def __init__(self):
pass
def get_html(self, url, method='GET', **kwargs):
resp = request(method, url, **kwargs)
if resp.status_code != 200:
raise HtmlError({'msg': 'external_request_fail', 'url': url})
return resp.content
def fetch_manga_seed_page(self, url, **kwargs):
return self.get_html(url, **kwargs)
def fetch_chapter_seed_page(self, url, **kwargs):
return self.get_html(url, **kwargs)
def fetch_page_image(self, url, **kwargs):
return self.get_html(url, **kwargs)
<|fim▁hole|> url
site
This should be specifically implemented in each Site subclass. If not,
this method will be used which returns an empty list.
"""
return []
from mangaeden import MangaEden
from batoto import Batoto
available_sites = [
# Kissmanga(),
# Vitaku(),
Batoto(),
# Mangafox(),
# Mangahere(),
# MangaHereMob(),
MangaEden()
]
# Factory function, return instance of suitable "site" class from url
def get_site(url):
netloc = urlparse.urlparse(url).netloc
for site in available_sites:
if netloc in site.netlocs:
return site
return None<|fim▁end|> | def search_by_author(self, author):
"""
Return list of chapter dicts whose keys are:
name |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Tesserama - Simple membership cards manager
// Copyright (C) 2017-2021 Andrea Bolognani <[email protected]>
// SPDX-License-Identifier: GPL-2.0-or-later
<|fim▁hole|>}<|fim▁end|> | fn main() {
tesserama::Application::new().run(); |
<|file_name|>miss_vs_stall.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import analyze_conf
import sys
import datetime, glob, job_stats, os, subprocess, time<|fim▁hole|># Set the matplotlib output mode from config if it exists
if not 'matplotlib.pyplot' in sys.modules:
try:
matplotlib.use(analyze_conf.matplotlib_output_mode)
except NameError:
matplotlib.use('pdf')
import matplotlib.pyplot as plt
import numpy
import scipy, scipy.stats
import argparse
import tspl, tspl_utils, lariat_utils, plot
import math
import multiprocessing, functools, itertools
import cPickle as pickle
def do_work(file,mintime,wayness,lariat_dict):
retval=(None,None,None,None,None)
res=plot.get_data(file,mintime,wayness,lariat_dict)
if (res is None):
return retval
(ts, ld, tmid,
read_rate, write_rate, stall_rate, clock_rate, avx_rate, sse_rate, inst_rate,
meta_rate, l1_rate, l2_rate, l3_rate, load_rate, read_frac, stall_frac) = res
# return (scipy.stats.tmean(stall_frac),
# scipy.stats.tmean((load_rate - (l1_rate + l2_rate +
# l3_rate))/load_rate))
mean_mem_rate=scipy.stats.tmean(read_rate+write_rate)*64.0
ename=ld.exc.split('/')[-1]
ename=tspl_utils.string_shorten(ld.comp_name(ename,ld.equiv_patterns),8)
if ename=='unknown':
return retval
flag=False
if mean_mem_rate < 75.*1000000000./16.:
flag=True
return (scipy.stats.tmean(stall_frac),
scipy.stats.tmean((load_rate - (l1_rate))/load_rate),
scipy.stats.tmean(clock_rate/inst_rate),ename,
flag)
def main():
parser = argparse.ArgumentParser(description='Look for imbalance between'
'hosts for a pair of keys')
parser.add_argument('filearg', help='File, directory, or quoted'
' glob pattern', nargs='?',default='jobs')
parser.add_argument('-p', help='Set number of processes',
nargs=1, type=int, default=[1])
n=parser.parse_args()
filelist=tspl_utils.getfilelist(n.filearg)
procs = min(len(filelist),n.p[0])
job=pickle.load(open(filelist[0]))
jid=job.id
epoch=job.end_time
ld=lariat_utils.LariatData(jid,end_epoch=epoch,daysback=3,directory=analyze_conf.lariat_path)
if procs < 1:
print 'Must have at least one file'
exit(1)
pool = multiprocessing.Pool(processes=procs)
partial_work=functools.partial(do_work,mintime=3600.,wayness=16,lariat_dict=ld.ld)
results=pool.map(partial_work,filelist)
fig1,ax1=plt.subplots(1,1,figsize=(20,8),dpi=80)
fig2,ax2=plt.subplots(1,1,figsize=(20,8),dpi=80)
maxx=0.
for state in [ True, False ]:
stalls=[]
misses=[]
cpis=[]
enames=[]
for (s,m,cpi,ename,flag) in results:
if (s != None and m > 0. and m < 1.0 and flag==state):
stalls.extend([s])
misses.extend([m])
cpis.extend([cpi])
enames.extend([ename])
markers = itertools.cycle(('o','x','+','^','s','8','p',
'h','*','D','<','>','v','d','.'))
colors = itertools.cycle(('b','g','r','c','m','k','y'))
fmt={}
for e in enames:
if not e in fmt:
fmt[e]=markers.next()+colors.next()
for (s,c,e) in zip(stalls,cpis,enames):
# ax1.plot(numpy.log10(1.-(1.-s)),numpy.log10(c),
maxx=max(maxx,1./(1.-s))
ax1.plot((1./(1.-s)),(c),
marker=fmt[e][0],
markeredgecolor=fmt[e][1],
linestyle='', markerfacecolor='None',
label=e)
ax1.hold=True
ax2.plot((1./(1.-s)),(c),
marker=fmt[e][0],
markeredgecolor=fmt[e][1],
linestyle='', markerfacecolor='None',
label=e)
ax2.hold=True
#ax.plot(numpy.log10(stalls),numpy.log10(cpis),fmt)
#ax.plot(numpy.log10(1.0/(1.0-numpy.array(stalls))),numpy.log10(cpis),fmt)
ax1.set_xscale('log')
ax1.set_xlim(left=0.95,right=1.05*maxx)
ax1.set_yscale('log')
box = ax1.get_position()
ax1.set_position([box.x0, box.y0, box.width * 0.45, box.height])
box = ax2.get_position()
ax2.set_position([box.x0, box.y0, box.width * 0.45, box.height])
handles=[]
labels=[]
for h,l in zip(*ax1.get_legend_handles_labels()):
if l in labels:
continue
else:
handles.extend([h])
labels.extend([l])
ax1.legend(handles,labels,bbox_to_anchor=(1.05, 1),
loc=2, borderaxespad=0., numpoints=1,ncol=4)
ax1.set_xlabel('log(Cycles per Execution Cycle)')
ax1.set_ylabel('log(CPI)')
handles=[]
labels=[]
for h,l in zip(*ax2.get_legend_handles_labels()):
if l in labels:
continue
else:
handles.extend([h])
labels.extend([l])
ax2.legend(handles,labels,bbox_to_anchor=(1.05, 1),
loc=2, borderaxespad=0., numpoints=1,ncol=4)
ax2.set_xlabel('Cycles per Execution Cycle')
ax2.set_ylabel('CPI')
fname='miss_v_stall_log'
fig1.savefig(fname)
fname='miss_v_stall'
fig2.savefig(fname)
plt.close()
if __name__ == '__main__':
main()<|fim▁end|> | import operator
import matplotlib |
<|file_name|>writer.py<|end_file_name|><|fim▁begin|>import numpy as np
from displays.letters import ALPHABET
class Writer:
"""Produce scrolling text for the LED display, frame by frame"""
def __init__(self):
self.font = ALPHABET
self.spacer = np.zeros([8, 1], dtype=int)
self.phrase = None
def make_phrase(self, phrase):
"""Convert a string into a long numpy array with spacing"""
# phrase.lower() called because ALPHABET currently doesn't have capitals
converted = [np.hstack([self.font[letter], self.spacer])<|fim▁hole|> for letter in phrase.lower()]
self.phrase = np.hstack(converted)
def generate_frames(self):
"""Produce single 8*8 frames scrolling across phrase"""
height, width = np.shape(self.phrase)
for frame in range(width - 8):
yield self.phrase[:, frame:frame + 8]
def write(self, phrase):
"""Easily get frames for a phrase"""
self.make_phrase(phrase)
for frame in self.generate_frames():
yield frame<|fim▁end|> | |
<|file_name|>release.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# ***** BEGIN LICENSE BLOCK *****
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
"""release.py
"""
import os<|fim▁hole|># SignAndroid {{{1
class ReleaseMixin():
release_config = {}
def query_release_config(self):
if self.release_config:
return self.release_config
c = self.config
dirs = self.query_abs_dirs()
if c.get("release_config_file"):
self.info("Getting release config from %s..." % c["release_config_file"])
rc = None
try:
rc = parse_config_file(
os.path.join(dirs['abs_work_dir'],
c["release_config_file"]),
config_dict_name="releaseConfig"
)
except IOError:
self.fatal("Release config file %s not found!" % c["release_config_file"])
except RuntimeError:
self.fatal("Invalid release config file %s!" % c["release_config_file"])
self.release_config['version'] = rc['version']
self.release_config['buildnum'] = rc['buildNumber']
self.release_config['ftp_server'] = rc['stagingServer']
self.release_config['ftp_user'] = c.get('ftp_user', rc['hgUsername'])
self.release_config['ftp_ssh_key'] = c.get('ftp_ssh_key', rc['hgSshKey'])
else:
self.info("No release config file; using default config.")
for key in ('version', 'buildnum',
'ftp_server', 'ftp_user', 'ftp_ssh_key'):
self.release_config[key] = c[key]
self.info("Release config:\n%s" % self.release_config)
return self.release_config<|fim▁end|> |
from mozharness.base.config import parse_config_file
|
<|file_name|>extension.rs<|end_file_name|><|fim▁begin|>//! ESMTP features
use std::{
collections::HashSet,
fmt::{self, Display, Formatter},
net::{Ipv4Addr, Ipv6Addr},
result::Result,
};
use crate::transport::smtp::{
authentication::Mechanism,
error::{self, Error},
response::Response,
util::XText,
};
/// Client identifier, the parameter to `EHLO`
#[derive(PartialEq, Eq, Clone, Debug)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[non_exhaustive]
pub enum ClientId {
/// A fully-qualified domain name
Domain(String),
/// An IPv4 address
Ipv4(Ipv4Addr),
/// An IPv6 address
Ipv6(Ipv6Addr),
}
const LOCALHOST_CLIENT: ClientId = ClientId::Ipv4(Ipv4Addr::new(127, 0, 0, 1));
impl Default for ClientId {
fn default() -> Self {
// https://tools.ietf.org/html/rfc5321#section-4.1.4
//
// The SMTP client MUST, if possible, ensure that the domain parameter
// to the EHLO command is a primary host name as specified for this
// command in Section 2.3.5. If this is not possible (e.g., when the
// client's address is dynamically assigned and the client does not have
// an obvious name), an address literal SHOULD be substituted for the
// domain name.
#[cfg(feature = "hostname")]
{
hostname::get()
.ok()
.and_then(|s| s.into_string().map(Self::Domain).ok())
.unwrap_or(LOCALHOST_CLIENT)
}
#[cfg(not(feature = "hostname"))]
LOCALHOST_CLIENT
}
}
impl Display for ClientId {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match *self {
Self::Domain(ref value) => f.write_str(value),
Self::Ipv4(ref value) => write!(f, "[{}]", value),
Self::Ipv6(ref value) => write!(f, "[IPv6:{}]", value),
}
}
}
impl ClientId {
#[doc(hidden)]
#[deprecated(since = "0.10.0", note = "Please use ClientId::Domain(domain) instead")]
/// Creates a new `ClientId` from a fully qualified domain name
pub fn new(domain: String) -> Self {
Self::Domain(domain)
}
}
/// Supported ESMTP keywords
#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[non_exhaustive]
pub enum Extension {
/// 8BITMIME keyword
///
/// Defined in [RFC 6152](https://tools.ietf.org/html/rfc6152)
EightBitMime,
/// SMTPUTF8 keyword
///
/// Defined in [RFC 6531](https://tools.ietf.org/html/rfc6531)
SmtpUtfEight,
/// STARTTLS keyword
///
/// Defined in [RFC 2487](https://tools.ietf.org/html/rfc2487)
StartTls,
/// AUTH mechanism
Authentication(Mechanism),
}
impl Display for Extension {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match *self {
Extension::EightBitMime => f.write_str("8BITMIME"),
Extension::SmtpUtfEight => f.write_str("SMTPUTF8"),
Extension::StartTls => f.write_str("STARTTLS"),
Extension::Authentication(ref mechanism) => write!(f, "AUTH {}", mechanism),
}
}
}
/// Contains information about an SMTP server
#[derive(Clone, Debug, Eq, PartialEq, Default)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct ServerInfo {
/// Server name
///
/// The name given in the server banner
name: String,
/// ESMTP features supported by the server
///
/// It contains the features supported by the server and known by the `Extension` module.
features: HashSet<Extension>,
}
impl Display for ServerInfo {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let features = if self.features.is_empty() {
"no supported features".to_string()
} else {
format!("{:?}", self.features)
};
write!(f, "{} with {}", self.name, features)
}
}
impl ServerInfo {
/// Parses a EHLO response to create a `ServerInfo`
pub fn from_response(response: &Response) -> Result<ServerInfo, Error> {
let name = match response.first_word() {
Some(name) => name,
None => return Err(error::response("Could not read server name")),
};
let mut features: HashSet<Extension> = HashSet::new();
for line in response.message() {
if line.is_empty() {
continue;
}
let mut split = line.split_whitespace();
match split.next().unwrap() {
"8BITMIME" => {
features.insert(Extension::EightBitMime);
}
"SMTPUTF8" => {
features.insert(Extension::SmtpUtfEight);
}
"STARTTLS" => {
features.insert(Extension::StartTls);
}
"AUTH" => {
for mechanism in split {
match mechanism {
"PLAIN" => {
features.insert(Extension::Authentication(Mechanism::Plain));
}
"LOGIN" => {
features.insert(Extension::Authentication(Mechanism::Login));
}
"XOAUTH2" => {
features.insert(Extension::Authentication(Mechanism::Xoauth2));
}
_ => (),
}
}
}
_ => (),
};
}
Ok(ServerInfo {
name: name.to_string(),
features,
})
}
/// Checks if the server supports an ESMTP feature
pub fn supports_feature(&self, keyword: Extension) -> bool {
self.features.contains(&keyword)
}
/// Checks if the server supports an ESMTP feature
pub fn supports_auth_mechanism(&self, mechanism: Mechanism) -> bool {
self.features
.contains(&Extension::Authentication(mechanism))
}
/// Gets a compatible mechanism from list
pub fn get_auth_mechanism(&self, mechanisms: &[Mechanism]) -> Option<Mechanism> {
for mechanism in mechanisms {
if self.supports_auth_mechanism(*mechanism) {
return Some(*mechanism);
}
}
None
}
/// The name given in the server banner
pub fn name(&self) -> &str {
self.name.as_ref()
}
}
/// A `MAIL FROM` extension parameter
#[derive(PartialEq, Eq, Clone, Debug)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum MailParameter {
/// `BODY` parameter
Body(MailBodyParameter),
/// `SIZE` parameter
Size(usize),
/// `SMTPUTF8` parameter
SmtpUtfEight,
/// Custom parameter
Other {
/// Parameter keyword
keyword: String,
/// Parameter value
value: Option<String>,
},
}
impl Display for MailParameter {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match *self {
MailParameter::Body(ref value) => write!(f, "BODY={}", value),
MailParameter::Size(size) => write!(f, "SIZE={}", size),
MailParameter::SmtpUtfEight => f.write_str("SMTPUTF8"),
MailParameter::Other {
ref keyword,
value: Some(ref value),<|fim▁hole|> } => f.write_str(keyword),
}
}
}
/// Values for the `BODY` parameter to `MAIL FROM`
#[derive(PartialEq, Eq, Clone, Debug, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum MailBodyParameter {
/// `7BIT`
SevenBit,
/// `8BITMIME`
EightBitMime,
}
impl Display for MailBodyParameter {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match *self {
MailBodyParameter::SevenBit => f.write_str("7BIT"),
MailBodyParameter::EightBitMime => f.write_str("8BITMIME"),
}
}
}
/// A `RCPT TO` extension parameter
#[derive(PartialEq, Eq, Clone, Debug)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum RcptParameter {
/// Custom parameter
Other {
/// Parameter keyword
keyword: String,
/// Parameter value
value: Option<String>,
},
}
impl Display for RcptParameter {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match *self {
RcptParameter::Other {
ref keyword,
value: Some(ref value),
} => write!(f, "{}={}", keyword, XText(value)),
RcptParameter::Other {
ref keyword,
value: None,
} => f.write_str(keyword),
}
}
}
#[cfg(test)]
mod test {
use std::collections::HashSet;
use super::*;
use crate::transport::smtp::{
authentication::Mechanism,
response::{Category, Code, Detail, Response, Severity},
};
#[test]
fn test_clientid_fmt() {
assert_eq!(
format!("{}", ClientId::Domain("test".to_string())),
"test".to_string()
);
assert_eq!(format!("{}", LOCALHOST_CLIENT), "[127.0.0.1]".to_string());
}
#[test]
fn test_extension_fmt() {
assert_eq!(
format!("{}", Extension::EightBitMime),
"8BITMIME".to_string()
);
assert_eq!(
format!("{}", Extension::Authentication(Mechanism::Plain)),
"AUTH PLAIN".to_string()
);
}
#[test]
fn test_serverinfo_fmt() {
let mut eightbitmime = HashSet::new();
assert!(eightbitmime.insert(Extension::EightBitMime));
assert_eq!(
format!(
"{}",
ServerInfo {
name: "name".to_string(),
features: eightbitmime,
}
),
"name with {EightBitMime}".to_string()
);
let empty = HashSet::new();
assert_eq!(
format!(
"{}",
ServerInfo {
name: "name".to_string(),
features: empty,
}
),
"name with no supported features".to_string()
);
let mut plain = HashSet::new();
assert!(plain.insert(Extension::Authentication(Mechanism::Plain)));
assert_eq!(
format!(
"{}",
ServerInfo {
name: "name".to_string(),
features: plain,
}
),
"name with {Authentication(Plain)}".to_string()
);
}
#[test]
fn test_serverinfo() {
let response = Response::new(
Code::new(
Severity::PositiveCompletion,
Category::Unspecified4,
Detail::One,
),
vec![
"me".to_string(),
"8BITMIME".to_string(),
"SIZE 42".to_string(),
],
);
let mut features = HashSet::new();
assert!(features.insert(Extension::EightBitMime));
let server_info = ServerInfo {
name: "me".to_string(),
features,
};
assert_eq!(ServerInfo::from_response(&response).unwrap(), server_info);
assert!(server_info.supports_feature(Extension::EightBitMime));
assert!(!server_info.supports_feature(Extension::StartTls));
let response2 = Response::new(
Code::new(
Severity::PositiveCompletion,
Category::Unspecified4,
Detail::One,
),
vec![
"me".to_string(),
"AUTH PLAIN CRAM-MD5 XOAUTH2 OTHER".to_string(),
"8BITMIME".to_string(),
"SIZE 42".to_string(),
],
);
let mut features2 = HashSet::new();
assert!(features2.insert(Extension::EightBitMime));
assert!(features2.insert(Extension::Authentication(Mechanism::Plain),));
assert!(features2.insert(Extension::Authentication(Mechanism::Xoauth2),));
let server_info2 = ServerInfo {
name: "me".to_string(),
features: features2,
};
assert_eq!(ServerInfo::from_response(&response2).unwrap(), server_info2);
assert!(server_info2.supports_feature(Extension::EightBitMime));
assert!(server_info2.supports_auth_mechanism(Mechanism::Plain));
assert!(!server_info2.supports_feature(Extension::StartTls));
}
}<|fim▁end|> | } => write!(f, "{}={}", keyword, XText(value)),
MailParameter::Other {
ref keyword,
value: None, |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#![cfg(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "openbsd"))]
use ContextError;
use CreationError;
use GlAttributes;
use GlProfile;
use GlRequest;
use Api;
use PixelFormat;
use PixelFormatRequirements;
use ReleaseBehavior;
use Robustness;
use libc;
use libc::c_int;
use std::ffi::{CStr, CString};
use std::{mem, ptr, slice};
pub mod ffi {
pub use x11_dl::xlib::*;
pub use self::glx::types::GLXContext;
/// GLX bindings
pub mod glx {
include!(concat!(env!("OUT_DIR"), "/glx_bindings.rs"));
}
/// Functions that are not necessarly always available
pub mod glx_extra {
include!(concat!(env!("OUT_DIR"), "/glx_extra_bindings.rs"));
}
}
pub struct Context {
glx: ffi::glx::Glx,
display: *mut ffi::Display,
window: ffi::Window,
context: ffi::GLXContext,
pixel_format: PixelFormat,<|fim▁hole|>}
// TODO: remove me
fn with_c_str<F, T>(s: &str, f: F) -> T where F: FnOnce(*const libc::c_char) -> T {
use std::ffi::CString;
let c_str = CString::new(s.as_bytes().to_vec()).unwrap();
f(c_str.as_ptr())
}
impl Context {
pub fn new<'a>(
glx: ffi::glx::Glx,
xlib: &'a ffi::Xlib,
pf_reqs: &PixelFormatRequirements,
opengl: &'a GlAttributes<&'a Context>,
display: *mut ffi::Display,
screen_id: libc::c_int,
transparent: bool,
) -> Result<ContextPrototype<'a>, CreationError>
{
// This is completely ridiculous, but VirtualBox's OpenGL driver needs some call handled by
// *it* (i.e. not Mesa) to occur before anything else can happen. That is because
// VirtualBox's OpenGL driver is going to apply binary patches to Mesa in the DLL
// constructor and until it's loaded it won't have a chance to do that.
//
// The easiest way to do this is to just call `glXQueryVersion()` before doing anything
// else. See: https://www.virtualbox.org/ticket/8293
let (mut major, mut minor) = (0, 0);
unsafe {
glx.QueryVersion(display as *mut _, &mut major, &mut minor);
}
// loading the list of extensions
let extensions = unsafe {
let extensions = glx.QueryExtensionsString(display as *mut _, screen_id);
let extensions = CStr::from_ptr(extensions).to_bytes().to_vec();
String::from_utf8(extensions).unwrap()
};
// finding the pixel format we want
let (fb_config, pixel_format) = unsafe {
try!(choose_fbconfig(&glx, &extensions, xlib, display, screen_id, pf_reqs, transparent)
.map_err(|_| CreationError::NoAvailablePixelFormat))
};
// getting the visual infos
let visual_infos: ffi::glx::types::XVisualInfo = unsafe {
let vi = glx.GetVisualFromFBConfig(display as *mut _, fb_config);
if vi.is_null() {
return Err(CreationError::OsError(format!("glxGetVisualFromFBConfig failed")));
}
let vi_copy = ptr::read(vi as *const _);
(xlib.XFree)(vi as *mut _);
vi_copy
};
Ok(ContextPrototype {
glx: glx,
extensions: extensions,
xlib: xlib,
opengl: opengl,
display: display,
fb_config: fb_config,
visual_infos: unsafe { mem::transmute(visual_infos) },
pixel_format: pixel_format,
})
}
pub unsafe fn make_current(&self) -> Result<(), ContextError> {
// TODO: glutin needs some internal changes for proper error recovery
let res = self.glx.MakeCurrent(self.display as *mut _, self.window, self.context);
if res == 0 {
panic!("glx::MakeCurrent failed");
}
Ok(())
}
#[inline]
pub fn is_current(&self) -> bool {
unsafe { self.glx.GetCurrentContext() == self.context }
}
pub fn get_proc_address(&self, addr: &str) -> *const () {
let addr = CString::new(addr.as_bytes()).unwrap();
let addr = addr.as_ptr();
unsafe {
self.glx.GetProcAddress(addr as *const _) as *const _
}
}
#[inline]
pub fn swap_buffers(&self) -> Result<(), ContextError> {
// TODO: glutin needs some internal changes for proper error recovery
unsafe { self.glx.SwapBuffers(self.display as *mut _, self.window); }
Ok(())
}
#[inline]
pub fn get_api(&self) -> ::Api {
::Api::OpenGl
}
#[inline]
pub fn get_pixel_format(&self) -> PixelFormat {
self.pixel_format.clone()
}
#[inline]
pub unsafe fn raw_handle(&self) -> ffi::GLXContext {
self.context
}
}
unsafe impl Send for Context {}
unsafe impl Sync for Context {}
impl Drop for Context {
fn drop(&mut self) {
unsafe {
if self.is_current() {
self.glx.MakeCurrent(self.display as *mut _, 0, ptr::null_mut());
}
self.glx.DestroyContext(self.display as *mut _, self.context);
}
}
}
pub struct ContextPrototype<'a> {
glx: ffi::glx::Glx,
extensions: String,
xlib: &'a ffi::Xlib,
opengl: &'a GlAttributes<&'a Context>,
display: *mut ffi::Display,
fb_config: ffi::glx::types::GLXFBConfig,
visual_infos: ffi::XVisualInfo,
pixel_format: PixelFormat,
}
impl<'a> ContextPrototype<'a> {
#[inline]
pub fn get_visual_infos(&self) -> &ffi::XVisualInfo {
&self.visual_infos
}
pub fn finish(self, window: ffi::Window) -> Result<Context, CreationError> {
let share = match self.opengl.sharing {
Some(ctxt) => ctxt.context,
None => ptr::null()
};
// loading the extra GLX functions
let extra_functions = ffi::glx_extra::Glx::load_with(|addr| {
with_c_str(addr, |s| {
unsafe { self.glx.GetProcAddress(s as *const u8) as *const _ }
})
});
// creating GL context
let context = match self.opengl.version {
GlRequest::Latest => {
let opengl_versions = [(4, 6), (4, 5), (4, 4), (4, 3), (4, 2), (4, 1), (4, 0),
(3, 3), (3, 2), (3, 1)];
let ctxt;
'outer: loop
{
// Try all OpenGL versions in descending order because some non-compliant
// drivers don't return the latest supported version but the one requested
for opengl_version in opengl_versions.iter()
{
match create_context(&self.glx, &extra_functions, &self.extensions, &self.xlib,
*opengl_version, self.opengl.profile,
self.opengl.debug, self.opengl.robustness, share,
self.display, self.fb_config, &self.visual_infos)
{
Ok(x) => {
ctxt = x;
break 'outer;
},
Err(_) => continue
}
}
ctxt = try!(create_context(&self.glx, &extra_functions, &self.extensions, &self.xlib, (1, 0),
self.opengl.profile, self.opengl.debug,
self.opengl.robustness, share,
self.display, self.fb_config, &self.visual_infos));
break;
}
ctxt
},
GlRequest::Specific(Api::OpenGl, (major, minor)) => {
try!(create_context(&self.glx, &extra_functions, &self.extensions, &self.xlib, (major, minor),
self.opengl.profile, self.opengl.debug,
self.opengl.robustness, share, self.display, self.fb_config,
&self.visual_infos))
},
GlRequest::Specific(_, _) => panic!("Only OpenGL is supported"),
GlRequest::GlThenGles { opengl_version: (major, minor), .. } => {
try!(create_context(&self.glx, &extra_functions, &self.extensions, &self.xlib, (major, minor),
self.opengl.profile, self.opengl.debug,
self.opengl.robustness, share, self.display, self.fb_config,
&self.visual_infos))
},
};
// vsync
if self.opengl.vsync {
unsafe { self.glx.MakeCurrent(self.display as *mut _, window, context) };
if extra_functions.SwapIntervalEXT.is_loaded() {
// this should be the most common extension
unsafe {
extra_functions.SwapIntervalEXT(self.display as *mut _, window, 1);
}
// checking that it worked
// TODO: handle this
/*if self.builder.strict {
let mut swap = unsafe { mem::uninitialized() };
unsafe {
self.glx.QueryDrawable(self.display as *mut _, window,
ffi::glx_extra::SWAP_INTERVAL_EXT as i32,
&mut swap);
}
if swap != 1 {
return Err(CreationError::OsError(format!("Couldn't setup vsync: expected \
interval `1` but got `{}`", swap)));
}
}*/
// GLX_MESA_swap_control is not official
/*} else if extra_functions.SwapIntervalMESA.is_loaded() {
unsafe {
extra_functions.SwapIntervalMESA(1);
}*/
} else if extra_functions.SwapIntervalSGI.is_loaded() {
unsafe {
extra_functions.SwapIntervalSGI(1);
}
}/* else if self.builder.strict {
// TODO: handle this
return Err(CreationError::OsError(format!("Couldn't find any available vsync extension")));
}*/
unsafe { self.glx.MakeCurrent(self.display as *mut _, 0, ptr::null()) };
}
Ok(Context {
glx: self.glx,
display: self.display,
window: window,
context: context,
pixel_format: self.pixel_format,
})
}
}
extern fn x_error_callback(_dpy: *mut ffi::Display, _err: *mut ffi::XErrorEvent) -> i32
{
0
}
fn create_context(glx: &ffi::glx::Glx, extra_functions: &ffi::glx_extra::Glx, extensions: &str, xlib: &ffi::Xlib,
version: (u8, u8), profile: Option<GlProfile>, debug: bool,
robustness: Robustness, share: ffi::GLXContext, display: *mut ffi::Display,
fb_config: ffi::glx::types::GLXFBConfig,
visual_infos: &ffi::XVisualInfo)
-> Result<ffi::GLXContext, CreationError>
{
unsafe {
let old_callback = (xlib.XSetErrorHandler)(Some(x_error_callback));
let context = if extensions.split(' ').find(|&i| i == "GLX_ARB_create_context").is_some() {
let mut attributes = Vec::with_capacity(9);
attributes.push(ffi::glx_extra::CONTEXT_MAJOR_VERSION_ARB as c_int);
attributes.push(version.0 as c_int);
attributes.push(ffi::glx_extra::CONTEXT_MINOR_VERSION_ARB as c_int);
attributes.push(version.1 as c_int);
if let Some(profile) = profile {
let flag = match profile {
GlProfile::Compatibility =>
ffi::glx_extra::CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB,
GlProfile::Core =>
ffi::glx_extra::CONTEXT_CORE_PROFILE_BIT_ARB,
};
attributes.push(ffi::glx_extra::CONTEXT_PROFILE_MASK_ARB as c_int);
attributes.push(flag as c_int);
}
let flags = {
let mut flags = 0;
// robustness
if extensions.split(' ').find(|&i| i == "GLX_ARB_create_context_robustness").is_some() {
match robustness {
Robustness::RobustNoResetNotification | Robustness::TryRobustNoResetNotification => {
attributes.push(ffi::glx_extra::CONTEXT_RESET_NOTIFICATION_STRATEGY_ARB as c_int);
attributes.push(ffi::glx_extra::NO_RESET_NOTIFICATION_ARB as c_int);
flags = flags | ffi::glx_extra::CONTEXT_ROBUST_ACCESS_BIT_ARB as c_int;
},
Robustness::RobustLoseContextOnReset | Robustness::TryRobustLoseContextOnReset => {
attributes.push(ffi::glx_extra::CONTEXT_RESET_NOTIFICATION_STRATEGY_ARB as c_int);
attributes.push(ffi::glx_extra::LOSE_CONTEXT_ON_RESET_ARB as c_int);
flags = flags | ffi::glx_extra::CONTEXT_ROBUST_ACCESS_BIT_ARB as c_int;
},
Robustness::NotRobust => (),
Robustness::NoError => (),
}
} else {
match robustness {
Robustness::RobustNoResetNotification | Robustness::RobustLoseContextOnReset => {
return Err(CreationError::RobustnessNotSupported);
},
_ => ()
}
}
if debug {
flags = flags | ffi::glx_extra::CONTEXT_DEBUG_BIT_ARB as c_int;
}
flags
};
attributes.push(ffi::glx_extra::CONTEXT_FLAGS_ARB as c_int);
attributes.push(flags);
attributes.push(0);
extra_functions.CreateContextAttribsARB(display as *mut _, fb_config, share, 1,
attributes.as_ptr())
} else {
let visual_infos: *const ffi::XVisualInfo = visual_infos;
glx.CreateContext(display as *mut _, visual_infos as *mut _, share, 1)
};
(xlib.XSetErrorHandler)(old_callback);
if context.is_null() {
// TODO: check for errors and return `OpenGlVersionNotSupported`
return Err(CreationError::OsError(format!("GL context creation failed")));
}
Ok(context)
}
}
/// Enumerates all available FBConfigs
unsafe fn choose_fbconfig(glx: &ffi::glx::Glx, extensions: &str, xlib: &ffi::Xlib,
display: *mut ffi::Display, screen_id: libc::c_int,
reqs: &PixelFormatRequirements, transparent: bool)
-> Result<(ffi::glx::types::GLXFBConfig, PixelFormat), ()>
{
let descriptor = {
let mut out: Vec<c_int> = Vec::with_capacity(37);
out.push(ffi::glx::X_RENDERABLE as c_int);
out.push(1);
out.push(ffi::glx::X_VISUAL_TYPE as c_int);
out.push(ffi::glx::TRUE_COLOR as c_int);
out.push(ffi::glx::DRAWABLE_TYPE as c_int);
out.push(ffi::glx::WINDOW_BIT as c_int);
out.push(ffi::glx::RENDER_TYPE as c_int);
if reqs.float_color_buffer {
if extensions.split(' ').find(|&i| i == "GLX_ARB_fbconfig_float").is_some() {
out.push(ffi::glx_extra::RGBA_FLOAT_BIT_ARB as c_int);
} else {
return Err(());
}
} else {
out.push(ffi::glx::RGBA_BIT as c_int);
}
if let Some(color) = reqs.color_bits {
out.push(ffi::glx::RED_SIZE as c_int);
out.push((color / 3) as c_int);
out.push(ffi::glx::GREEN_SIZE as c_int);
out.push((color / 3 + if color % 3 != 0 { 1 } else { 0 }) as c_int);
out.push(ffi::glx::BLUE_SIZE as c_int);
out.push((color / 3 + if color % 3 == 2 { 1 } else { 0 }) as c_int);
}
if let Some(alpha) = reqs.alpha_bits {
out.push(ffi::glx::ALPHA_SIZE as c_int);
out.push(alpha as c_int);
}
if let Some(depth) = reqs.depth_bits {
out.push(ffi::glx::DEPTH_SIZE as c_int);
out.push(depth as c_int);
}
if let Some(stencil) = reqs.stencil_bits {
out.push(ffi::glx::STENCIL_SIZE as c_int);
out.push(stencil as c_int);
}
let double_buffer = reqs.double_buffer.unwrap_or(true);
out.push(ffi::glx::DOUBLEBUFFER as c_int);
out.push(if double_buffer { 1 } else { 0 });
if let Some(multisampling) = reqs.multisampling {
if extensions.split(' ').find(|&i| i == "GLX_ARB_multisample").is_some() {
out.push(ffi::glx_extra::SAMPLE_BUFFERS_ARB as c_int);
out.push(if multisampling == 0 { 0 } else { 1 });
out.push(ffi::glx_extra::SAMPLES_ARB as c_int);
out.push(multisampling as c_int);
} else {
return Err(());
}
}
out.push(ffi::glx::STEREO as c_int);
out.push(if reqs.stereoscopy { 1 } else { 0 });
if reqs.srgb {
if extensions.split(' ').find(|&i| i == "GLX_ARB_framebuffer_sRGB").is_some() {
out.push(ffi::glx_extra::FRAMEBUFFER_SRGB_CAPABLE_ARB as c_int);
out.push(1);
} else if extensions.split(' ').find(|&i| i == "GLX_EXT_framebuffer_sRGB").is_some() {
out.push(ffi::glx_extra::FRAMEBUFFER_SRGB_CAPABLE_EXT as c_int);
out.push(1);
} else {
return Err(());
}
}
match reqs.release_behavior {
ReleaseBehavior::Flush => (),
ReleaseBehavior::None => {
if extensions.split(' ').find(|&i| i == "GLX_ARB_context_flush_control").is_some() {
out.push(ffi::glx_extra::CONTEXT_RELEASE_BEHAVIOR_ARB as c_int);
out.push(ffi::glx_extra::CONTEXT_RELEASE_BEHAVIOR_NONE_ARB as c_int);
}
},
}
out.push(ffi::glx::CONFIG_CAVEAT as c_int);
out.push(ffi::glx::DONT_CARE as c_int);
out.push(0);
out
};
// calling glXChooseFBConfig
let fb_config = {
let mut num_configs = 1;
let configs = glx.ChooseFBConfig(display as *mut _, screen_id, descriptor.as_ptr(),
&mut num_configs);
if configs.is_null() { return Err(()); }
if num_configs == 0 { return Err(()); }
let config = if transparent {
let configs = slice::from_raw_parts(configs, num_configs as usize);
configs.iter().find(|&config| {
let vi = glx.GetVisualFromFBConfig(display as *mut _, *config);
// Transparency was requested, so only choose configs with 32 bits for RGBA.
let found = !vi.is_null() && (*vi).depth == 32;
(xlib.XFree)(vi as *mut _);
found
})
} else {
Some(&*configs)
};
let res = if let Some(&conf) = config {
Ok(conf)
} else {
Err(())
};
(xlib.XFree)(configs as *mut _);
res?
};
let get_attrib = |attrib: c_int| -> i32 {
let mut value = 0;
glx.GetFBConfigAttrib(display as *mut _, fb_config, attrib, &mut value);
// TODO: check return value
value
};
let pf_desc = PixelFormat {
hardware_accelerated: get_attrib(ffi::glx::CONFIG_CAVEAT as c_int) !=
ffi::glx::SLOW_CONFIG as c_int,
color_bits: get_attrib(ffi::glx::RED_SIZE as c_int) as u8 +
get_attrib(ffi::glx::GREEN_SIZE as c_int) as u8 +
get_attrib(ffi::glx::BLUE_SIZE as c_int) as u8,
alpha_bits: get_attrib(ffi::glx::ALPHA_SIZE as c_int) as u8,
depth_bits: get_attrib(ffi::glx::DEPTH_SIZE as c_int) as u8,
stencil_bits: get_attrib(ffi::glx::STENCIL_SIZE as c_int) as u8,
stereoscopy: get_attrib(ffi::glx::STEREO as c_int) != 0,
double_buffer: get_attrib(ffi::glx::DOUBLEBUFFER as c_int) != 0,
multisampling: if get_attrib(ffi::glx::SAMPLE_BUFFERS as c_int) != 0 {
Some(get_attrib(ffi::glx::SAMPLES as c_int) as u16)
} else {
None
},
srgb: get_attrib(ffi::glx_extra::FRAMEBUFFER_SRGB_CAPABLE_ARB as c_int) != 0 ||
get_attrib(ffi::glx_extra::FRAMEBUFFER_SRGB_CAPABLE_EXT as c_int) != 0,
};
Ok((fb_config, pf_desc))
}<|fim▁end|> | |
<|file_name|>f64.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Operations and constants for 64-bits floats (`f64` type)
#![doc(primitive = "f64")]
// FIXME: MIN_VALUE and MAX_VALUE literals are parsed as -inf and inf #14353
#![allow(overflowing_literals)]
#![stable(feature = "rust1", since = "1.0.0")]
use prelude::*;
use intrinsics;
use mem;
use num::FpCategory as Fp;
use num::{Float, ParseFloatError};
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(missing_docs)]
pub const RADIX: u32 = 2;
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(missing_docs)]
pub const MANTISSA_DIGITS: u32 = 53;
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(missing_docs)]
pub const DIGITS: u32 = 15;
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(missing_docs)]
pub const EPSILON: f64 = 2.2204460492503131e-16_f64;
/// Smallest finite f64 value
#[stable(feature = "rust1", since = "1.0.0")]
pub const MIN: f64 = -1.7976931348623157e+308_f64;
/// Smallest positive, normalized f64 value
#[stable(feature = "rust1", since = "1.0.0")]
pub const MIN_POSITIVE: f64 = 2.2250738585072014e-308_f64;
/// Largest finite f64 value
#[stable(feature = "rust1", since = "1.0.0")]
pub const MAX: f64 = 1.7976931348623157e+308_f64;
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(missing_docs)]
pub const MIN_EXP: i32 = -1021;
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(missing_docs)]
pub const MAX_EXP: i32 = 1024;
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(missing_docs)]
pub const MIN_10_EXP: i32 = -307;
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(missing_docs)]
pub const MAX_10_EXP: i32 = 308;
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(missing_docs)]
pub const NAN: f64 = 0.0_f64/0.0_f64;
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(missing_docs)]
pub const INFINITY: f64 = 1.0_f64/0.0_f64;
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(missing_docs)]
pub const NEG_INFINITY: f64 = -1.0_f64/0.0_f64;
/// Basic mathematial constants.
#[stable(feature = "rust1", since = "1.0.0")]
pub mod consts {
// FIXME: replace with mathematical constants from cmath.
/// Archimedes' constant
#[stable(feature = "rust1", since = "1.0.0")]
pub const PI: f64 = 3.14159265358979323846264338327950288_f64;
/// pi * 2.0
#[unstable(feature = "float_consts",
reason = "unclear naming convention/usefulness")]
#[deprecated(since = "1.2.0", reason = "unclear on usefulness")]
pub const PI_2: f64 = 6.28318530717958647692528676655900576_f64;
/// pi/2.0
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_PI_2: f64 = 1.57079632679489661923132169163975144_f64;
/// pi/3.0
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_PI_3: f64 = 1.04719755119659774615421446109316763_f64;
/// pi/4.0
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_PI_4: f64 = 0.785398163397448309615660845819875721_f64;
/// pi/6.0
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_PI_6: f64 = 0.52359877559829887307710723054658381_f64;
/// pi/8.0
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_PI_8: f64 = 0.39269908169872415480783042290993786_f64;
/// 1.0/pi
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_1_PI: f64 = 0.318309886183790671537767526745028724_f64;
/// 2.0/pi
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_2_PI: f64 = 0.636619772367581343075535053490057448_f64;
/// 2.0/sqrt(pi)
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_2_SQRT_PI: f64 = 1.12837916709551257389615890312154517_f64;
/// sqrt(2.0)
#[stable(feature = "rust1", since = "1.0.0")]
pub const SQRT_2: f64 = 1.41421356237309504880168872420969808_f64;
/// 1.0/sqrt(2.0)
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_1_SQRT_2: f64 = 0.707106781186547524400844362104849039_f64;
/// Euler's number
#[stable(feature = "rust1", since = "1.0.0")]
pub const E: f64 = 2.71828182845904523536028747135266250_f64;
/// log2(e)
#[stable(feature = "rust1", since = "1.0.0")]
pub const LOG2_E: f64 = 1.44269504088896340735992468100189214_f64;
/// log10(e)
#[stable(feature = "rust1", since = "1.0.0")]
pub const LOG10_E: f64 = 0.434294481903251827651128918916605082_f64;
/// ln(2.0)
#[stable(feature = "rust1", since = "1.0.0")]
pub const LN_2: f64 = 0.693147180559945309417232121458176568_f64;
/// ln(10.0)
#[stable(feature = "rust1", since = "1.0.0")]
pub const LN_10: f64 = 2.30258509299404568401799145468436421_f64;
}
impl Float for f64 {
#[inline]
fn nan() -> f64 { NAN }
#[inline]
fn infinity() -> f64 { INFINITY }
#[inline]
fn neg_infinity() -> f64 { NEG_INFINITY }
#[inline]
fn zero() -> f64 { 0.0 }
#[inline]
fn neg_zero() -> f64 { -0.0 }
#[inline]
fn one() -> f64 { 1.0 }
from_str_radix_float_impl! { f64 }
/// Returns `true` if the number is NaN.
#[inline]
fn is_nan(self) -> bool { self != self }
/// Returns `true` if the number is infinite.
#[inline]
fn is_infinite(self) -> bool {
self == Float::infinity() || self == Float::neg_infinity()
}
/// Returns `true` if the number is neither infinite or NaN.
#[inline]
fn is_finite(self) -> bool {
!(self.is_nan() || self.is_infinite())
}
/// Returns `true` if the number is neither zero, infinite, subnormal or NaN.
#[inline]
fn is_normal(self) -> bool {
self.classify() == Fp::Normal
}
/// Returns the floating point category of the number. If only one property
/// is going to be tested, it is generally faster to use the specific
/// predicate instead.
fn classify(self) -> Fp {
const EXP_MASK: u64 = 0x7ff0000000000000;
const MAN_MASK: u64 = 0x000fffffffffffff;
let bits: u64 = unsafe { mem::transmute(self) };
match (bits & MAN_MASK, bits & EXP_MASK) {
(0, 0) => Fp::Zero,
(_, 0) => Fp::Subnormal,
(0, EXP_MASK) => Fp::Infinite,
(_, EXP_MASK) => Fp::Nan,
_ => Fp::Normal,
}
}
/// Returns the mantissa, exponent and sign as integers.
fn integer_decode(self) -> (u64, i16, i8) {
let bits: u64 = unsafe { mem::transmute(self) };
let sign: i8 = if bits >> 63 == 0 { 1 } else { -1 };
let mut exponent: i16 = ((bits >> 52) & 0x7ff) as i16;
let mantissa = if exponent == 0 {
(bits & 0xfffffffffffff) << 1
} else {
(bits & 0xfffffffffffff) | 0x10000000000000
};
// Exponent bias + mantissa shift
exponent -= 1023 + 52;
(mantissa, exponent, sign)
}
/// Rounds towards minus infinity.
#[inline]
fn floor(self) -> f64 {
unsafe { intrinsics::floorf64(self) }
}
/// Rounds towards plus infinity.
#[inline]
fn ceil(self) -> f64 {
unsafe { intrinsics::ceilf64(self) }
}
/// Rounds to nearest integer. Rounds half-way cases away from zero.
#[inline]
fn round(self) -> f64 {
unsafe { intrinsics::roundf64(self) }
}
/// Returns the integer part of the number (rounds towards zero).
#[inline]
fn trunc(self) -> f64 {
unsafe { intrinsics::truncf64(self) }
}
/// The fractional part of the number, satisfying:
///
/// ```
/// let x = 1.65f64;
/// assert!(x == x.trunc() + x.fract())
/// ```
#[inline]
fn fract(self) -> f64 { self - self.trunc() }
/// Computes the absolute value of `self`. Returns `Float::nan()` if the
/// number is `Float::nan()`.
#[inline]
fn abs(self) -> f64 {
unsafe { intrinsics::fabsf64(self) }
}
/// Returns a number that represents the sign of `self`.
///
/// - `1.0` if the number is positive, `+0.0` or `Float::infinity()`
/// - `-1.0` if the number is negative, `-0.0` or `Float::neg_infinity()`
/// - `Float::nan()` if the number is `Float::nan()`
#[inline]
fn signum(self) -> f64 {
if self.is_nan() {
Float::nan()
} else {
unsafe { intrinsics::copysignf64(1.0, self) }
}
}
/// Returns `true` if `self` is positive, including `+0.0` and
/// `Float::infinity()`.
#[inline]
fn is_positive(self) -> bool {
self > 0.0 || (1.0 / self) == Float::infinity()
}
<|fim▁hole|> #[inline]
fn is_negative(self) -> bool {
self < 0.0 || (1.0 / self) == Float::neg_infinity()
}
/// Fused multiply-add. Computes `(self * a) + b` with only one rounding
/// error. This produces a more accurate result with better performance than
/// a separate multiplication operation followed by an add.
#[inline]
fn mul_add(self, a: f64, b: f64) -> f64 {
unsafe { intrinsics::fmaf64(self, a, b) }
}
/// Returns the reciprocal (multiplicative inverse) of the number.
#[inline]
fn recip(self) -> f64 { 1.0 / self }
#[inline]
fn powf(self, n: f64) -> f64 {
unsafe { intrinsics::powf64(self, n) }
}
#[inline]
fn powi(self, n: i32) -> f64 {
unsafe { intrinsics::powif64(self, n) }
}
#[inline]
fn sqrt(self) -> f64 {
if self < 0.0 {
NAN
} else {
unsafe { intrinsics::sqrtf64(self) }
}
}
#[inline]
fn rsqrt(self) -> f64 { self.sqrt().recip() }
/// Returns the exponential of the number.
#[inline]
fn exp(self) -> f64 {
unsafe { intrinsics::expf64(self) }
}
/// Returns 2 raised to the power of the number.
#[inline]
fn exp2(self) -> f64 {
unsafe { intrinsics::exp2f64(self) }
}
/// Returns the natural logarithm of the number.
#[inline]
fn ln(self) -> f64 {
unsafe { intrinsics::logf64(self) }
}
/// Returns the logarithm of the number with respect to an arbitrary base.
#[inline]
fn log(self, base: f64) -> f64 { self.ln() / base.ln() }
/// Returns the base 2 logarithm of the number.
#[inline]
fn log2(self) -> f64 {
unsafe { intrinsics::log2f64(self) }
}
/// Returns the base 10 logarithm of the number.
#[inline]
fn log10(self) -> f64 {
unsafe { intrinsics::log10f64(self) }
}
/// Converts to degrees, assuming the number is in radians.
#[inline]
fn to_degrees(self) -> f64 { self * (180.0f64 / consts::PI) }
/// Converts to radians, assuming the number is in degrees.
#[inline]
fn to_radians(self) -> f64 {
let value: f64 = consts::PI;
self * (value / 180.0)
}
}<|fim▁end|> | /// Returns `true` if `self` is negative, including `-0.0` and
/// `Float::neg_infinity()`. |
<|file_name|>overloaded-deref.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(collections)]
use std::cell::RefCell;
use std::rc::Rc;
use std::string::String;
#[derive(PartialEq, Debug)]
struct Point {<|fim▁hole|>
pub fn main() {
assert_eq!(*Rc::new(5), 5);
assert_eq!(***Rc::new(Box::new(Box::new(5))), 5);
assert_eq!(*Rc::new(Point {x: 2, y: 4}), Point {x: 2, y: 4});
let i = Rc::new(RefCell::new(2));
let i_value = *(*i).borrow();
*(*i).borrow_mut() = 5;
assert_eq!((i_value, *(*i).borrow()), (2, 5));
let s = Rc::new("foo".to_string());
assert_eq!(*s, "foo".to_string());
assert_eq!((*s), "foo");
let mut_s = Rc::new(RefCell::new(String::from("foo")));
(*(*mut_s).borrow_mut()).push_str("bar");
// assert_eq! would panic here because it stores the LHS and RHS in two locals.
assert!((*(*mut_s).borrow()) == "foobar");
assert!((*(*mut_s).borrow_mut()) == "foobar");
let p = Rc::new(RefCell::new(Point {x: 1, y: 2}));
(*(*p).borrow_mut()).x = 3;
(*(*p).borrow_mut()).y += 3;
assert_eq!(*(*p).borrow(), Point {x: 3, y: 5});
let v = Rc::new(RefCell::new(vec!(1, 2, 3)));
(*(*v).borrow_mut())[0] = 3;
(*(*v).borrow_mut())[1] += 3;
assert_eq!(((*(*v).borrow())[0],
(*(*v).borrow())[1],
(*(*v).borrow())[2]), (3, 5, 3));
}<|fim▁end|> | x: isize,
y: isize
} |
<|file_name|>with-identifier.ts<|end_file_name|><|fim▁begin|>export interface IWithIdentifier {<|fim▁hole|> id?: string;
}<|fim▁end|> | |
<|file_name|>__main__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
file: __main__.py
Description: the entry point to SEM.
author: Yoann Dupont
MIT License
Copyright (c) 2018 Yoann Dupont
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import print_function
import logging
import os.path
import unittest
import sys
import sem
import sem.modules
from sem.logger import logging_format
from sem.misc import find_suggestions
sem_logger = logging.getLogger("sem")
def valid_module(m):
return m.endswith(".py") and not (m.startswith(u"_") or m in ["sem_module.py", "pipeline.py"])
def main(args=None):
def banter():
def username():
import os
return os.environ.get("USERNAME", os.environ.get("USER", os.path.split(os.path.expanduser(u"~"))[-1]))
import random
l = [
u"Do thou mockest me?",
u"Try again?",
u"I'm sorry {0}, I'm afraid I can't do that.".format(username()),
u'The greatest trick this module ever pulled what convincing the users it did not exist.',
u"It's just a typo."
]
random.shuffle(l)
return l[0]
modules = {}
for element in os.listdir(os.path.join(sem.SEM_HOME, "modules")):
m = element[:-3]
if valid_module(element):
modules[m] = sem.modules.get_package(m)
name = os.path.basename(sys.argv[0])
operation = (sys.argv[1] if len(sys.argv) > 1 else "-h")
if operation in modules:
module = modules[operation]
module.main(sem.argument_parser.parse_args())
elif operation in ["-h", "--help"]:<|fim▁hole|> print("\t{0}".format(module))
print()
print("for SEM's current version: -v or --version\n")
print("for informations about the last revision: -i or --informations")
print("for playing all tests: --test")
elif operation in ["-v", "--version"]:
print(sem.full_name())
elif operation in ["-i", "--informations"]:
informations = sem.informations()
try:
print(informations)
except UnicodeEncodeError:
print(informations.encode(sys.getfilesystemencoding(), errors="replace"))
elif operation == "--test":
testsuite = unittest.TestLoader().discover(os.path.join(sem.SEM_HOME, "tests"))
unittest.TextTestRunner(verbosity=2).run(testsuite)
else:
print("Module not found: " + operation)
suggestions = find_suggestions(operation, modules)
if len(suggestions) > 0:
print("Did you mean one of the following?")
for suggestion in suggestions:
print("\t{0}".format(suggestion))
else:
print("No suggestions found...", banter())
if __name__ == "__main__":
main()<|fim▁end|> | print("Usage: {0} <module> [module arguments]\n".format(name))
print("Module list:")
for module in modules: |
<|file_name|>TransientNernstPlanck.py<|end_file_name|><|fim▁begin|>from openpnm.algorithms import TransientReactiveTransport
from openpnm.utils import logging
logger = logging.getLogger(__name__)
class TransientNernstPlanck(TransientReactiveTransport):
r"""
A subclass of GenericTransport to perform steady and transient simulations
of pure diffusion, advection-diffusion and advection-diffusion with
migration.
"""
def __init__(self, settings={}, phase=None, ion='', **kwargs):
def_set = {'phase': None,
'quantity': 'pore.concentration.'+ion,
'conductance': 'throat.ad_dif_mig_conductance.'+ion,
'ion': ion,
'gui': {'setup': {'phase': None,
'quantity': '',
'conductance': '',
'ion': '',
't_initial': None,
't_final': None,
't_step': None,
't_output': None,
't_tolerance': None,
't_scheme': ''},
'set_IC': {'values': None},
'set_rate_BC': {'pores': None,
'values': None},
'set_value_BC': {'pores': None,
'values': None},
'set_source': {'pores': None,
'propname': ''}
}
}
super().__init__(**kwargs)
self.settings.update(def_set)
self.settings.update(settings)
if phase is not None:
self.setup(phase=phase)
def setup(self, phase=None, quantity='', conductance='', ion='',
t_initial=None, t_final=None, t_step=None, t_output=None,
t_tolerance=None, t_precision=None, t_scheme='', **kwargs):<|fim▁hole|> if conductance:
self.settings['conductance'] = conductance
if ion:
self.settings['ion'] = ion
if t_initial is not None:
self.settings['t_initial'] = t_initial
if t_final is not None:
self.settings['t_final'] = t_final
if t_step is not None:
self.settings['t_step'] = t_step
if t_output is not None:
self.settings['t_output'] = t_output
if t_tolerance is not None:
self.settings['t_tolerance'] = t_tolerance
if t_precision is not None:
self.settings['t_precision'] = t_precision
if t_scheme:
self.settings['t_scheme'] = t_scheme
self.settings.update(kwargs)<|fim▁end|> | if phase:
self.settings['phase'] = phase.name
if quantity:
self.settings['quantity'] = quantity |
<|file_name|>category.py<|end_file_name|><|fim▁begin|>import operator
import numpy as np
from pandas._libs import index as libindex
from pandas import compat
from pandas.compat.numpy import function as nv
from pandas.core.dtypes.generic import ABCCategorical, ABCSeries
from pandas.core.dtypes.dtypes import CategoricalDtype
from pandas.core.dtypes.common import (
is_categorical_dtype,
_ensure_platform_int,
is_list_like,
is_interval_dtype,
is_scalar)
from pandas.core.dtypes.missing import array_equivalent, isna
from pandas.core.algorithms import take_1d
from pandas.util._decorators import Appender, cache_readonly
from pandas.core.config import get_option
from pandas.core.indexes.base import Index, _index_shared_docs
from pandas.core import accessor
import pandas.core.common as com
import pandas.core.missing as missing
import pandas.core.indexes.base as ibase
_index_doc_kwargs = dict(ibase._index_doc_kwargs)
_index_doc_kwargs.update(dict(target_klass='CategoricalIndex'))
class CategoricalIndex(Index, accessor.PandasDelegate):
"""
Immutable Index implementing an ordered, sliceable set. CategoricalIndex
represents a sparsely populated Index with an underlying Categorical.
Parameters
----------
data : array-like or Categorical, (1-dimensional)
categories : optional, array-like
categories for the CategoricalIndex
ordered : boolean,
designating if the categories are ordered
copy : bool
Make a copy of input ndarray
name : object
Name to be stored in the index
Attributes
----------
codes
categories
ordered
Methods
-------
rename_categories
reorder_categories
add_categories
remove_categories
remove_unused_categories
set_categories
as_ordered
as_unordered
map
See Also
--------
Categorical, Index
"""
_typ = 'categoricalindex'
_engine_type = libindex.Int64Engine
_attributes = ['name']
def __new__(cls, data=None, categories=None, ordered=None, dtype=None,
copy=False, name=None, fastpath=False):
if fastpath:
return cls._simple_new(data, name=name, dtype=dtype)
if name is None and hasattr(data, 'name'):
name = data.name
if isinstance(data, ABCCategorical):
data = cls._create_categorical(cls, data, categories, ordered,
dtype)
elif isinstance(data, CategoricalIndex):
data = data._data
data = cls._create_categorical(cls, data, categories, ordered,
dtype)
else:
# don't allow scalars
# if data is None, then categories must be provided
if is_scalar(data):
if data is not None or categories is None:
cls._scalar_data_error(data)
data = []
data = cls._create_categorical(cls, data, categories, ordered,
dtype)
if copy:
data = data.copy()
return cls._simple_new(data, name=name)
def _create_from_codes(self, codes, categories=None, ordered=None,
name=None):
"""
*this is an internal non-public method*
create the correct categorical from codes
Parameters
----------
codes : new codes
categories : optional categories, defaults to existing
ordered : optional ordered attribute, defaults to existing
name : optional name attribute, defaults to existing
Returns
-------
CategoricalIndex
"""
from pandas.core.arrays import Categorical
if categories is None:
categories = self.categories
if ordered is None:
ordered = self.ordered
if name is None:
name = self.name
cat = Categorical.from_codes(codes, categories=categories,
ordered=self.ordered)
return CategoricalIndex(cat, name=name)
@staticmethod
def _create_categorical(self, data, categories=None, ordered=None,
dtype=None):
"""
*this is an internal non-public method*
create the correct categorical from data and the properties
Parameters
----------
data : data for new Categorical
categories : optional categories, defaults to existing
ordered : optional ordered attribute, defaults to existing
dtype : CategoricalDtype, defaults to existing
Returns
-------
Categorical
"""
if (isinstance(data, (ABCSeries, type(self))) and
is_categorical_dtype(data)):
data = data.values
if not isinstance(data, ABCCategorical):
if ordered is None and dtype is None:
ordered = False
from pandas.core.arrays import Categorical
data = Categorical(data, categories=categories, ordered=ordered,
dtype=dtype)
else:
if categories is not None:
data = data.set_categories(categories, ordered=ordered)
elif ordered is not None and ordered != data.ordered:
data = data.set_ordered(ordered)
if isinstance(dtype, CategoricalDtype):
# we want to silently ignore dtype='category'
data = data._set_dtype(dtype)
return data
@classmethod
def _simple_new(cls, values, name=None, categories=None, ordered=None,
dtype=None, **kwargs):
result = object.__new__(cls)
values = cls._create_categorical(cls, values, categories, ordered,
dtype=dtype)
result._data = values
result.name = name
for k, v in compat.iteritems(kwargs):
setattr(result, k, v)
result._reset_identity()
return result
@Appender(_index_shared_docs['_shallow_copy'])
def _shallow_copy(self, values=None, categories=None, ordered=None,
dtype=None, **kwargs):
# categories and ordered can't be part of attributes,
# as these are properties
# we want to reuse self.dtype if possible, i.e. neither are
# overridden.
if dtype is not None and (categories is not None or
ordered is not None):
raise TypeError("Cannot specify both `dtype` and `categories` "
"or `ordered`")
if categories is None and ordered is None:
dtype = self.dtype if dtype is None else dtype
return super(CategoricalIndex, self)._shallow_copy(
values=values, dtype=dtype, **kwargs)
if categories is None:
categories = self.categories
if ordered is None:
ordered = self.ordered
return super(CategoricalIndex, self)._shallow_copy(
values=values, categories=categories,
ordered=ordered, **kwargs)
def _is_dtype_compat(self, other):
"""
*this is an internal non-public method*
provide a comparison between the dtype of self and other (coercing if
needed)
Raises
------
TypeError if the dtypes are not compatible
"""
if is_categorical_dtype(other):
if isinstance(other, CategoricalIndex):
other = other._values
if not other.is_dtype_equal(self):
raise TypeError("categories must match existing categories "
"when appending")
else:
values = other
if not is_list_like(values):
values = [values]
other = CategoricalIndex(self._create_categorical(
self, other, categories=self.categories, ordered=self.ordered))
if not other.isin(values).all():
raise TypeError("cannot append a non-category item to a "
"CategoricalIndex")
return other
def equals(self, other):
"""
Determines if two CategorialIndex objects contain the same elements.
"""
if self.is_(other):
return True
if not isinstance(other, Index):
return False
try:
other = self._is_dtype_compat(other)
return array_equivalent(self._data, other)
except (TypeError, ValueError):
pass
return False
@property
def _formatter_func(self):
return self.categories._formatter_func
def _format_attrs(self):
"""
Return a list of tuples of the (attr,formatted_value)
"""
max_categories = (10 if get_option("display.max_categories") == 0 else
get_option("display.max_categories"))
attrs = [
('categories',
ibase.default_pprint(self.categories,
max_seq_items=max_categories)),
('ordered', self.ordered)]
if self.name is not None:
attrs.append(('name', ibase.default_pprint(self.name)))
attrs.append(('dtype', "'%s'" % self.dtype.name))
max_seq_items = get_option('display.max_seq_items') or len(self)
if len(self) > max_seq_items:
attrs.append(('length', len(self)))
return attrs
@property
def inferred_type(self):
return 'categorical'
@property
def values(self):
""" return the underlying data, which is a Categorical """
return self._data
@property
def itemsize(self):
# Size of the items in categories, not codes.
return self.values.itemsize
def get_values(self):
""" return the underlying data as an ndarray """
return self._data.get_values()
def tolist(self):
return self._data.tolist()
@property
def codes(self):
return self._data.codes
@property
def categories(self):
return self._data.categories
@property
def ordered(self):
return self._data.ordered
def _reverse_indexer(self):
return self._data._reverse_indexer()
@Appender(_index_shared_docs['__contains__'] % _index_doc_kwargs)
def __contains__(self, key):
hash(key)
if isna(key): # if key is a NaN, check if any NaN is in self.
return self.hasnans
# is key in self.categories? Then get its location.
# If not (i.e. KeyError), it logically can't be in self either
try:
loc = self.categories.get_loc(key)
except KeyError:
return False
# loc is the location of key in self.categories, but also the value
# for key in self.codes and in self._engine. key may be in categories,
# but still not in self, check this. Example:
# 'b' in CategoricalIndex(['a'], categories=['a', 'b']) # False
if is_scalar(loc):
return loc in self._engine
else:
# if self.categories is IntervalIndex, loc is an array
# check if any scalar of the array is in self._engine
return any(loc_ in self._engine for loc_ in loc)
@Appender(_index_shared_docs['contains'] % _index_doc_kwargs)
def contains(self, key):
hash(key)
return key in self
def __array__(self, dtype=None):
""" the array interface, return my values """
return np.array(self._data, dtype=dtype)
@Appender(_index_shared_docs['astype'])
def astype(self, dtype, copy=True):
if is_interval_dtype(dtype):
from pandas import IntervalIndex
return IntervalIndex(np.array(self))
elif is_categorical_dtype(dtype):
# GH 18630
dtype = self.dtype.update_dtype(dtype)
if dtype == self.dtype:
return self.copy() if copy else self
return super(CategoricalIndex, self).astype(dtype=dtype, copy=copy)
@cache_readonly
def _isnan(self):
""" return if each value is nan"""
return self._data.codes == -1
@Appender(ibase._index_shared_docs['fillna'])
def fillna(self, value, downcast=None):
self._assert_can_do_op(value)
return CategoricalIndex(self._data.fillna(value), name=self.name)
def argsort(self, *args, **kwargs):
return self.values.argsort(*args, **kwargs)
@cache_readonly
def _engine(self):
# we are going to look things up with the codes themselves
return self._engine_type(lambda: self.codes.astype('i8'), len(self))
# introspection
@cache_readonly<|fim▁hole|> def is_monotonic_increasing(self):
return self._engine.is_monotonic_increasing
@property
def is_monotonic_decreasing(self):
return self._engine.is_monotonic_decreasing
@Appender(_index_shared_docs['index_unique'] % _index_doc_kwargs)
def unique(self, level=None):
if level is not None:
self._validate_index_level(level)
result = self.values.unique()
# CategoricalIndex._shallow_copy keeps original categories
# and ordered if not otherwise specified
return self._shallow_copy(result, categories=result.categories,
ordered=result.ordered)
@Appender(Index.duplicated.__doc__)
def duplicated(self, keep='first'):
from pandas._libs.hashtable import duplicated_int64
codes = self.codes.astype('i8')
return duplicated_int64(codes, keep)
def _to_safe_for_reshape(self):
""" convert to object if we are a categorical """
return self.astype('object')
def get_loc(self, key, method=None):
"""
Get integer location, slice or boolean mask for requested label.
Parameters
----------
key : label
method : {None}
* default: exact matches only.
Returns
-------
loc : int if unique index, slice if monotonic index, else mask
Examples
---------
>>> unique_index = pd.CategoricalIndex(list('abc'))
>>> unique_index.get_loc('b')
1
>>> monotonic_index = pd.CategoricalIndex(list('abbc'))
>>> monotonic_index.get_loc('b')
slice(1, 3, None)
>>> non_monotonic_index = p.dCategoricalIndex(list('abcb'))
>>> non_monotonic_index.get_loc('b')
array([False, True, False, True], dtype=bool)
"""
codes = self.categories.get_loc(key)
if (codes == -1):
raise KeyError(key)
return self._engine.get_loc(codes)
def get_value(self, series, key):
"""
Fast lookup of value from 1-dimensional ndarray. Only use this if you
know what you're doing
"""
try:
k = com._values_from_object(key)
k = self._convert_scalar_indexer(k, kind='getitem')
indexer = self.get_loc(k)
return series.iloc[indexer]
except (KeyError, TypeError):
pass
# we might be a positional inexer
return super(CategoricalIndex, self).get_value(series, key)
def _can_reindex(self, indexer):
""" always allow reindexing """
pass
@Appender(_index_shared_docs['where'])
def where(self, cond, other=None):
if other is None:
other = self._na_value
values = np.where(cond, self.values, other)
from pandas.core.arrays import Categorical
cat = Categorical(values,
categories=self.categories,
ordered=self.ordered)
return self._shallow_copy(cat, **self._get_attributes_dict())
def reindex(self, target, method=None, level=None, limit=None,
tolerance=None):
"""
Create index with target's values (move/add/delete values as necessary)
Returns
-------
new_index : pd.Index
Resulting index
indexer : np.ndarray or None
Indices of output values in original index
"""
if method is not None:
raise NotImplementedError("argument method is not implemented for "
"CategoricalIndex.reindex")
if level is not None:
raise NotImplementedError("argument level is not implemented for "
"CategoricalIndex.reindex")
if limit is not None:
raise NotImplementedError("argument limit is not implemented for "
"CategoricalIndex.reindex")
target = ibase._ensure_index(target)
if not is_categorical_dtype(target) and not target.is_unique:
raise ValueError("cannot reindex with a non-unique indexer")
indexer, missing = self.get_indexer_non_unique(np.array(target))
if len(self.codes):
new_target = self.take(indexer)
else:
new_target = target
# filling in missing if needed
if len(missing):
cats = self.categories.get_indexer(target)
if (cats == -1).any():
# coerce to a regular index here!
result = Index(np.array(self), name=self.name)
new_target, indexer, _ = result._reindex_non_unique(
np.array(target))
else:
codes = new_target.codes.copy()
codes[indexer == -1] = cats[missing]
new_target = self._create_from_codes(codes)
# we always want to return an Index type here
# to be consistent with .reindex for other index types (e.g. they don't
# coerce based on the actual values, only on the dtype)
# unless we had an initial Categorical to begin with
# in which case we are going to conform to the passed Categorical
new_target = np.asarray(new_target)
if is_categorical_dtype(target):
new_target = target._shallow_copy(new_target, name=self.name)
else:
new_target = Index(new_target, name=self.name)
return new_target, indexer
def _reindex_non_unique(self, target):
""" reindex from a non-unique; which CategoricalIndex's are almost
always
"""
new_target, indexer = self.reindex(target)
new_indexer = None
check = indexer == -1
if check.any():
new_indexer = np.arange(len(self.take(indexer)))
new_indexer[check] = -1
cats = self.categories.get_indexer(target)
if not (cats == -1).any():
# .reindex returns normal Index. Revert to CategoricalIndex if
# all targets are included in my categories
new_target = self._shallow_copy(new_target)
return new_target, indexer, new_indexer
@Appender(_index_shared_docs['get_indexer'] % _index_doc_kwargs)
def get_indexer(self, target, method=None, limit=None, tolerance=None):
from pandas.core.arrays.categorical import _recode_for_categories
method = missing.clean_reindex_fill_method(method)
target = ibase._ensure_index(target)
if self.is_unique and self.equals(target):
return np.arange(len(self), dtype='intp')
if method == 'pad' or method == 'backfill':
raise NotImplementedError("method='pad' and method='backfill' not "
"implemented yet for CategoricalIndex")
elif method == 'nearest':
raise NotImplementedError("method='nearest' not implemented yet "
'for CategoricalIndex')
if (isinstance(target, CategoricalIndex) and
self.values.is_dtype_equal(target)):
if self.values.equals(target.values):
# we have the same codes
codes = target.codes
else:
codes = _recode_for_categories(target.codes,
target.categories,
self.values.categories)
else:
if isinstance(target, CategoricalIndex):
code_indexer = self.categories.get_indexer(target.categories)
codes = take_1d(code_indexer, target.codes, fill_value=-1)
else:
codes = self.categories.get_indexer(target)
indexer, _ = self._engine.get_indexer_non_unique(codes)
return _ensure_platform_int(indexer)
@Appender(_index_shared_docs['get_indexer_non_unique'] % _index_doc_kwargs)
def get_indexer_non_unique(self, target):
target = ibase._ensure_index(target)
if isinstance(target, CategoricalIndex):
# Indexing on codes is more efficient if categories are the same:
if target.categories is self.categories:
target = target.codes
indexer, missing = self._engine.get_indexer_non_unique(target)
return _ensure_platform_int(indexer), missing
target = target.values
codes = self.categories.get_indexer(target)
indexer, missing = self._engine.get_indexer_non_unique(codes)
return _ensure_platform_int(indexer), missing
@Appender(_index_shared_docs['_convert_scalar_indexer'])
def _convert_scalar_indexer(self, key, kind=None):
if self.categories._defer_to_indexing:
return self.categories._convert_scalar_indexer(key, kind=kind)
return super(CategoricalIndex, self)._convert_scalar_indexer(
key, kind=kind)
@Appender(_index_shared_docs['_convert_list_indexer'])
def _convert_list_indexer(self, keyarr, kind=None):
# Return our indexer or raise if all of the values are not included in
# the categories
if self.categories._defer_to_indexing:
indexer = self.categories._convert_list_indexer(keyarr, kind=kind)
return Index(self.codes).get_indexer_for(indexer)
indexer = self.categories.get_indexer(np.asarray(keyarr))
if (indexer == -1).any():
raise KeyError(
"a list-indexer must only "
"include values that are "
"in the categories")
return self.get_indexer(keyarr)
@Appender(_index_shared_docs['_convert_arr_indexer'])
def _convert_arr_indexer(self, keyarr):
keyarr = com._asarray_tuplesafe(keyarr)
if self.categories._defer_to_indexing:
return keyarr
return self._shallow_copy(keyarr)
@Appender(_index_shared_docs['_convert_index_indexer'])
def _convert_index_indexer(self, keyarr):
return self._shallow_copy(keyarr)
@Appender(_index_shared_docs['take'] % _index_doc_kwargs)
def take(self, indices, axis=0, allow_fill=True,
fill_value=None, **kwargs):
nv.validate_take(tuple(), kwargs)
indices = _ensure_platform_int(indices)
taken = self._assert_take_fillable(self.codes, indices,
allow_fill=allow_fill,
fill_value=fill_value,
na_value=-1)
return self._create_from_codes(taken)
def is_dtype_equal(self, other):
return self._data.is_dtype_equal(other)
take_nd = take
def map(self, mapper):
"""
Map values using input correspondence (a dict, Series, or function).
Maps the values (their categories, not the codes) of the index to new
categories. If the mapping correspondence is one-to-one the result is a
:class:`~pandas.CategoricalIndex` which has the same order property as
the original, otherwise an :class:`~pandas.Index` is returned.
If a `dict` or :class:`~pandas.Series` is used any unmapped category is
mapped to `NaN`. Note that if this happens an :class:`~pandas.Index`
will be returned.
Parameters
----------
mapper : function, dict, or Series
Mapping correspondence.
Returns
-------
pandas.CategoricalIndex or pandas.Index
Mapped index.
See Also
--------
Index.map : Apply a mapping correspondence on an
:class:`~pandas.Index`.
Series.map : Apply a mapping correspondence on a
:class:`~pandas.Series`.
Series.apply : Apply more complex functions on a
:class:`~pandas.Series`.
Examples
--------
>>> idx = pd.CategoricalIndex(['a', 'b', 'c'])
>>> idx
CategoricalIndex(['a', 'b', 'c'], categories=['a', 'b', 'c'],
ordered=False, dtype='category')
>>> idx.map(lambda x: x.upper())
CategoricalIndex(['A', 'B', 'C'], categories=['A', 'B', 'C'],
ordered=False, dtype='category')
>>> idx.map({'a': 'first', 'b': 'second', 'c': 'third'})
CategoricalIndex(['first', 'second', 'third'], categories=['first',
'second', 'third'], ordered=False, dtype='category')
If the mapping is one-to-one the ordering of the categories is
preserved:
>>> idx = pd.CategoricalIndex(['a', 'b', 'c'], ordered=True)
>>> idx
CategoricalIndex(['a', 'b', 'c'], categories=['a', 'b', 'c'],
ordered=True, dtype='category')
>>> idx.map({'a': 3, 'b': 2, 'c': 1})
CategoricalIndex([3, 2, 1], categories=[3, 2, 1], ordered=True,
dtype='category')
If the mapping is not one-to-one an :class:`~pandas.Index` is returned:
>>> idx.map({'a': 'first', 'b': 'second', 'c': 'first'})
Index(['first', 'second', 'first'], dtype='object')
If a `dict` is used, all unmapped categories are mapped to `NaN` and
the result is an :class:`~pandas.Index`:
>>> idx.map({'a': 'first', 'b': 'second'})
Index(['first', 'second', nan], dtype='object')
"""
return self._shallow_copy_with_infer(self.values.map(mapper))
def delete(self, loc):
"""
Make new Index with passed location(-s) deleted
Returns
-------
new_index : Index
"""
return self._create_from_codes(np.delete(self.codes, loc))
def insert(self, loc, item):
"""
Make new Index inserting new item at location. Follows
Python list.append semantics for negative values
Parameters
----------
loc : int
item : object
Returns
-------
new_index : Index
Raises
------
ValueError if the item is not in the categories
"""
code = self.categories.get_indexer([item])
if (code == -1) and not (is_scalar(item) and isna(item)):
raise TypeError("cannot insert an item into a CategoricalIndex "
"that is not already an existing category")
codes = self.codes
codes = np.concatenate((codes[:loc], code, codes[loc:]))
return self._create_from_codes(codes)
def _concat(self, to_concat, name):
# if calling index is category, don't check dtype of others
return CategoricalIndex._concat_same_dtype(self, to_concat, name)
def _concat_same_dtype(self, to_concat, name):
"""
Concatenate to_concat which has the same class
ValueError if other is not in the categories
"""
to_concat = [self._is_dtype_compat(c) for c in to_concat]
codes = np.concatenate([c.codes for c in to_concat])
result = self._create_from_codes(codes, name=name)
# if name is None, _create_from_codes sets self.name
result.name = name
return result
def _codes_for_groupby(self, sort, observed):
""" Return a Categorical adjusted for groupby """
return self.values._codes_for_groupby(sort, observed)
@classmethod
def _add_comparison_methods(cls):
""" add in comparison methods """
def _make_compare(op):
opname = '__{op}__'.format(op=op.__name__)
def _evaluate_compare(self, other):
# if we have a Categorical type, then must have the same
# categories
if isinstance(other, CategoricalIndex):
other = other._values
elif isinstance(other, Index):
other = self._create_categorical(
self, other._values, categories=self.categories,
ordered=self.ordered)
if isinstance(other, (ABCCategorical, np.ndarray,
ABCSeries)):
if len(self.values) != len(other):
raise ValueError("Lengths must match to compare")
if isinstance(other, ABCCategorical):
if not self.values.is_dtype_equal(other):
raise TypeError("categorical index comparisons must "
"have the same categories and ordered "
"attributes")
result = op(self.values, other)
if isinstance(result, ABCSeries):
# Dispatch to pd.Categorical returned NotImplemented
# and we got a Series back; down-cast to ndarray
result = result.values
return result
return compat.set_function_name(_evaluate_compare, opname, cls)
cls.__eq__ = _make_compare(operator.eq)
cls.__ne__ = _make_compare(operator.ne)
cls.__lt__ = _make_compare(operator.lt)
cls.__gt__ = _make_compare(operator.gt)
cls.__le__ = _make_compare(operator.le)
cls.__ge__ = _make_compare(operator.ge)
def _delegate_method(self, name, *args, **kwargs):
""" method delegation to the ._values """
method = getattr(self._values, name)
if 'inplace' in kwargs:
raise ValueError("cannot use inplace with CategoricalIndex")
res = method(*args, **kwargs)
if is_scalar(res):
return res
return CategoricalIndex(res, name=self.name)
@classmethod
def _add_accessors(cls):
""" add in Categorical accessor methods """
from pandas.core.arrays import Categorical
CategoricalIndex._add_delegate_accessors(
delegate=Categorical, accessors=["rename_categories",
"reorder_categories",
"add_categories",
"remove_categories",
"remove_unused_categories",
"set_categories",
"as_ordered", "as_unordered",
"min", "max"],
typ='method', overwrite=True)
CategoricalIndex._add_numeric_methods_add_sub_disabled()
CategoricalIndex._add_numeric_methods_disabled()
CategoricalIndex._add_logical_methods_disabled()
CategoricalIndex._add_comparison_methods()
CategoricalIndex._add_accessors()<|fim▁end|> | def is_unique(self):
return self._engine.is_unique
@property |
<|file_name|>panelScatter.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
from PyQt4.uic import loadUiType
from pyqtgraph.Qt import QtCore, QtGui
#from matplotlib.figure import Figure
from matplotlib import pyplot as plt
import numpy as np
from matplotlib.backends.backend_qt4agg import (
FigureCanvasQTAgg as FigureCanvas,
NavigationToolbar2QT as NavigationToolbar)
import selEpisodio
import matplotlib.dates as md
from sklearn import preprocessing
import colores
import lectorFichero as lf
DEBUG = 0
class PanelScatter():
def __init__(self, selep, layout, cbSueno, cbSedentario, cbLigera, cbModerada, cbIzq, cbDer, btnPrev, btnNext, label):
self.layoutMatplot1 = layout
self.cbSueno = cbSueno
self.cbSedentario = cbSedentario
self.cbLigera = cbLigera
self.cbModerada = cbModerada
self.cbx_izq = cbIzq
self.cbx_der = cbDer
self.btnPrev = btnPrev
self.btnNext = btnNext
self.label = label
self.selep = selep
self.configureComboBox()
self.updateView()
self.cbSueno.clicked.connect(self.filtrarSueno)
self.cbSedentario.clicked.connect(self.filtrarSedentario)
self.cbLigera.clicked.connect(self.filtrarLigera)
self.cbModerada.clicked.connect(self.filtrarModerada)
self.btnPrev.clicked.connect(self.retroceder)
self.btnNext.clicked.connect(self.avanzar)
self.cbx_izq.activated[str].connect(self.cbx_izqListener)
self.cbx_der.activated[str].connect(self.cbx_derListener)
self.filSueno = True
self.filSedentario = True
self.filLigero =True
self.filModerado = True
def configureComboBox(self):
print "Configurando combobox"
self.cbx_izq.clear()
self.cbx_der.clear()
for i in self.selep.epFiltro:
self.cbx_izq.addItem(i.nombre)
self.cbx_der.addItem(i.nombre)
if(len(self.selep.epFiltro) > 1):
self.cbx_der.setCurrentIndex(1)
else:
self.cbx_der.setCurrentIndex(0)
self.cbx_izq.setCurrentIndex(0)
def openFile(self):
self.selep = self.loadData()
self.configureComboBox()
self.limpiarLayout()
self.updateView()
def loadData(self):
if(DEBUG): fname = '../data.csv'
else: fname = QtGui.QFileDialog.getOpenFileName(self, 'Open file')
print "Abriendo fichero ", fname
csv = lf.LectorFichero(fname).getDatos()
selep = selEpisodio.selEpisodio(csv)
return selep
#ep 0 -> plot izquierdo
#ep 1 -> plot derecho
def getTime(self, a, b, ep):
if(ep == 0):
cbxId = self.cbx_izq.currentIndex()
else:
cbxId = self.cbx_der.currentIndex()
print "get time", cbxId
for i in self.selep.epFiltro[cbxId].temp:
if(a == i):
ind = 0
for k in self.selep.epFiltro[cbxId].flujo:
if(b == k):
print "encontrado"
return self.selep.epFiltro[cbxId].tiempo[ind]
else:
ind += 1
def onpick(self, event, ep):
thisline = event.artist
xdata, ydata = thisline.get_data()
ind = event.ind
print xdata[ind[0]], ydata[ind[0]]<|fim▁hole|> #Serie temporal
fig0 = plt.figure(tight_layout=True)
#Normalizar
preprocessing.scale(ep.temp, copy=True)
preprocessing.scale(ep.flujo, copy=True)
#Curva temperatura
ax1 = fig0.add_subplot(111)
ax1.plot(ep.tiempo, ep.temp, '-', color=colores.temperatura)
ax1.set_ylim([self.selep.csv.cotas.temp_min,self.selep.csv.cotas.temp_max])
#ax1.set_xlabel('Tiempo (m)')
ax1.set_ylabel('Temperatura (ºC)', color=colores.temperatura)
for tl in ax1.get_yticklabels():
tl.set_color(colores.temperatura)
fig0.autofmt_xdate()
xfmt = md.DateFormatter('%H:%M')
ax1.xaxis.set_major_formatter(xfmt)
start, end = ax1.get_xlim()
#ax1.xaxis.set_ticks(np.arange(start, end, 30))
ax1.grid(True)
#Curva flujo térmico
ax2 = ax1.twinx()
ax2.plot(ep.tiempo, ep.flujo, '-', color=colores.flujo)
ax2.set_ylim([self.selep.csv.cotas.flujo_min, self.selep.csv.cotas.flujo_max])
ax2.set_ylabel('Flujo térmico', color=colores.flujo)
for tl in ax2.get_yticklabels():
tl.set_color(colores.flujo)
#Scatterplot
#Lineas verticales con la clasificación de sueños
if(ep.tipo == selEpisodio.tipoSueno):
profundo = self.selep.getProfundo(ep.ini, ep.fin)
despierto = self.selep.getDespierto(ep.ini, ep.fin)
for i in profundo:
ax1.axvspan(i[0], i[1], facecolor=colores.suenoProfundo, alpha=0.3, edgecolor=colores.suenoProfundo)
for i in despierto:
ax1.axvspan(i[0], i[1], facecolor=colores.despierto, alpha=0.5, edgecolor=colores.despierto)
fig1 = plt.figure(tight_layout=True)
ax1f1 = fig1.add_subplot(111)
k = 0
for i in range(ep.ini, ep.fin):
t = self.selep.getColorSueno(i)
ax1f1.plot(ep.temp[k], ep.flujo[k], 'o', picker=5, color=t)
k+=1
ax1f1.set_xlabel('Temperatura (ºC)', color=colores.temperatura)
ax1f1.set_ylabel('Flujo térmico', color=colores.flujo)
else:
fig1 = plt.figure(tight_layout=True)
ax1f1 = fig1.add_subplot(111)
line, = ax1f1.plot(ep.temp, ep.flujo, 'o', picker=5, color = "b")
ax1f1.set_xlabel('Temperatura (ºC)', color=colores.temperatura)
ax1f1.set_ylabel('Flujo térmico', color=colores.flujo)
#ax1f1.set_xlim([self.selep.csv.cotas.temp_min, self.selep.csv.cotas.temp_max])
#ax1f1.set_ylim([self.selep.csv.cotas.flujo_min, self.selep.csv.cotas.flujo_max])
return fig0, fig1
def crearWidget(self, ep, derecho):
"""
ep: Episodio a visualizar
derecho: 0/1 episodio izquierdo o derecho
"""
fig10, fig11 = self.creaFiguras(ep)
canvas1 = FigureCanvas(fig10)
canvas2 = FigureCanvas(fig11)
vbox = QtGui.QGridLayout()
vbox.addWidget(QtGui.QLabel("<b>Episodio:</b> " + ep.nombre))
vbox.addWidget(QtGui.QLabel("<b>Inicio:</b> " + str(ep.tiempo[0])))
vbox.addWidget(QtGui.QLabel("<b>Final:</b> " + str(ep.tiempo[-1])))
vbox.addWidget(QtGui.QLabel("<b>Duración:</b> %s min" % (ep.tiempo[-1] - ep.tiempo[0])))
vbox.addWidget(QtGui.QLabel("<b>Coeficiente de correlación:</b> " + str(ep.correlacion)[:5]))
vbox.addWidget(QtGui.QLabel("<b>Calorías consumidas:</b> " + str(ep.numCalorias)[:6] + " (" + str(ep.numCalorias/self.selep.totalCal*100)[:4] + "%)"))
vbox.addWidget(canvas1)
vbox.addWidget(canvas2)
canvas2.mpl_connect('pick_event', lambda event: self.onpick(event, derecho))
return vbox
#Inserta elementos en el layout con los nuevos episodios
def updateView(self):
if(len(self.selep.epFiltro) > 0):
self.vbox = self.crearWidget(self.selep.epFiltro[self.cbx_izq.currentIndex()], 0)
self.layoutMatplot1.addLayout(self.vbox)
if(len(self.selep.epFiltro) > 1):
self.vbox2 = self.crearWidget(self.selep.epFiltro[self.cbx_der.currentIndex()], 1)
self.layoutMatplot1.addLayout(self.vbox2)
#Elimina el contenido del layout actual
def limpiarLayout(self):
plt.close('all') #Cerrar todos las gráficas dibujadas para vaciar memoria
for cnt in reversed(range(self.vbox.count())):
widget = self.vbox.takeAt(cnt).widget()
if widget is not None:
widget.deleteLater()
for cnt in reversed(range(self.vbox2.count())):
widget = self.vbox2.takeAt(cnt).widget()
if widget is not None:
widget.deleteLater()
def filtrarSueno(self):
print "Filtrar sueño", self.cbSueno.isChecked()
self.filSueno = self.cbSueno.isChecked() #Cambiar el filtro
self.selep.update(self.filSueno, self.filSueno, self.filSedentario, self.filLigero, self.filModerado) #Actualizar el array de episodios filtrados
self.configureComboBox()
self.limpiarLayout()
self.updateView()
def filtrarSedentario(self):
print "Filtrar sedentario"
self.filSedentario = self.cbSedentario.isChecked()
self.selep.update(self.filSueno, self.filSueno, self.filSedentario, self.filLigero, self.filModerado)
self.configureComboBox()
self.limpiarLayout()
self.updateView()
def filtrarLigera(self):
print "Filtrar ligera"
self.filLigero = self.cbLigera.isChecked()
self.selep.update(self.filSueno, self.filSueno, self.filSedentario, self.filLigero, self.filModerado)
self.configureComboBox()
self.limpiarLayout()
self.updateView()
def filtrarModerada(self):
print "Filtrar moderada"
self.filModerado = self.cbModerada.isChecked()
self.selep.update(self.filSueno, self.filSueno, self.filSedentario, self.filLigero, self.filModerado)
self.configureComboBox()
self.limpiarLayout()
self.updateView()
def retroceder(self):
idxI = self.cbx_izq.currentIndex()
idxD = self.cbx_der.currentIndex()
if (idxI > 0):
self.cbx_izq.setCurrentIndex(idxI-1)
if(idxD > 0):
self.cbx_der.setCurrentIndex(idxD-1)
print "episodios", self.cbx_izq.currentIndex(), "y", self.cbx_der.currentIndex()
self.limpiarLayout()
self.updateView()
def avanzar(self):
idxI = self.cbx_izq.currentIndex()
idxD = self.cbx_der.currentIndex()
if (idxI < len(self.selep.epFiltro) - 1):
self.cbx_izq.setCurrentIndex(idxI+1)
if(idxD < len(self.selep.epFiltro) - 1):
self.cbx_der.setCurrentIndex(idxD+1)
print "episodios", self.cbx_izq.currentIndex(), "y", self.cbx_der.currentIndex()
self.limpiarLayout()
self.updateView()
def cbx_izqListener(self):
print "episodios", self.cbx_izq.currentIndex(), "y", self.cbx_der.currentIndex()
self.limpiarLayout()
self.updateView()
def cbx_derListener(self):
print "episodios", self.cbx_izq.currentIndex(), "y", self.cbx_der.currentIndex()
self.limpiarLayout()
self.updateView()<|fim▁end|> | self.label.setText('Instante ' + str(self.getTime(xdata[ind[0]], ydata[ind[0]], ep)))
def creaFiguras(self, ep):
""" ep: tiempo, temp, flujo""" |
<|file_name|>adapter.js<|end_file_name|><|fim▁begin|>// Don't need this for our purposes
module = function(){};
if(typeof equal != 'undefined') {
equals = equal;
}
ok = function(actual, message) {
equal(actual, true, message);
}
raises = function(fn, expected, message) {
raisesError(fn, message);
};
asyncTest = function(name, delay, fn) {
test(name, fn);
}
start = function() {
// Just pass through...
}
notStrictEqual = function(a, b, message) {
equal(a === b, false, message);
}
var ensureArray = function(obj) {
if(obj === null) {
return [];
} else if(Object.isArray(obj) && (!obj.indexOf || !obj.lastIndexOf)) {
return obj.concat();
} else if(!Object.isArray(obj) && typeof obj == 'object') {
return Array.prototype.slice.call(obj);
} else {
return obj;
}
}
var CompatibleMethods = [
{
module: Array.prototype,
methods: [
{
name: 'first',<|fim▁hole|> if(guard) {
return arr[0];
}
return ensureArray(arr).first(n);
}
},
{
name: 'last',
method: function(arr, n, third){
// This is the same check that Underscore makes to hack
// _.last to work with _.map
if(third) n = 1;
return ensureArray(arr).last(n);
}
},
{
name: 'rest',
method: function(arr, n, guard){
if(n === undefined) n = 1;
if(guard) {
return arr.slice(1);
}
return ensureArray(arr).from(n);
}
},
{
name: 'compact',
method: function(arr){
return ensureArray(arr).compact(true);
}
},
/* Object.extend is no longer compatible as it has conflict resolution now.
{
name: 'extend',
method: function(){
return Object.SugarMethods['merge'].method.apply(this, arguments);
}
},
*/
/* Array#flatten is no longer compatible as it has levels of flattening (not just deep/shallow)
{
name: 'flatten',
method: function(arr){
return ensureArray(arr).flatten();
}
},
*/
{
name: 'uniq',
method: function(arr){
return ensureArray(arr).unique();
}
},
{
name: 'intersection',
method: function(arr){
arr = ensureArray(arr);
var args = Array.prototype.slice.call(arguments, 1);
return Array.prototype.intersect.apply(arr, args);
}
},
{
name: 'union',
method: function(arr, a){
arr = ensureArray(arr);
var args = Array.prototype.slice.call(arguments, 1);
return Array.prototype.union.apply(arr, args);
}
},
/*
{
name: 'difference',
method: function(arr, a){
arr = ensureArray(arr);
var args = Array.prototype.slice.call(arguments, 1);
return Array.prototype.subtract.apply(arr, args);
}
},
*/
{
name: 'indexOf',
method: function(arr, a){
return ensureArray(arr).indexOf(a);
}
},
{
name: 'lastIndexOf',
method: function(arr, a){
return ensureArray(arr).lastIndexOf(a);
}
},
{
name: 'range',
method: function(start, stop, step){
if(arguments.length == 1){
stop = arguments[0];
start = 0;
}
var shift = step < 0 ? 1 : -1;
return start.upto(stop + shift, null, step);
}
},
// Collections
// _.each -> Array#forEach OR Object.each
// _.map -> Array#map
// _.reduce -> Array#reduce
// _.reduceRight -> Array#reduceRight
// _.invoke is doing some strange tapdancing for passing methods directly...
// _.sortedIndex ... no direct equivalent
// _.toArray ... no direct equivalent for arguments... Array.create?
// _.size ... no direct equivalent for objects... obj.keys().length?
{
name: 'detect',
method: function(arr, fn, context){
return Array.SugarMethods['find'].method.call(arr, fn.bind(context));
}
},
{
name: 'select',
method: function(arr, fn, context){
return Array.SugarMethods['findAll'].method.call(arr, fn.bind(context));
}
},
{
name: 'reject',
method: function(arr, fn, context){
return Array.SugarMethods['exclude'].method.call(arr, fn.bind(context));
}
},
{
name: 'all',
method: function(arr, fn, context){
return Array.SugarMethods['all'].method.call(arr, fn.bind(context));
}
},
{
name: 'any',
method: function(arr, fn, context){
if(!fn) fn = function(a){ return a; };
return Array.SugarMethods['some'].method.call(arr, fn.bind(context));
}
},
/*
{
name: 'include',
method: function(arr, val){
return Array.SugarMethods['has'].method.call(arr, val);
}
},
*/
{
name: 'pluck',
method: function(arr, prop){
return Array.SugarMethods['map'].method.call(arr, prop);
}
},
{
name: 'max',
method: function(arr, fn, context){
if(!fn) fn = function(a){ return a; };
return Array.SugarMethods['max'].method.call(arr, fn.bind(context))[0];
}
},
{
name: 'min',
method: function(arr, fn, context){
if(!fn) fn = function(a){ return a; };
return Array.SugarMethods['min'].method.call(arr, fn.bind(context))[0];
}
},
{
name: 'sortBy',
method: function(arr, fn, context){
return Array.SugarMethods['sortBy'].method.call(arr, fn.bind(context));
}
},
{
name: 'groupBy',
method: function(arr, fn){
return Array.SugarMethods['groupBy'].method.call(arr, fn);
}
},
// Objects
// _.functions ... no direct equivalent
// _.defaults ... no direct equivalent
// _.tap ... no direct equivalent
// _.isElement ... no direct equivalent
// _.isArguments ... no direct equivalent
// _.isNaN ... no direct equivalent
// _.isNull ... no direct equivalent
// _.isUndefined ... no direct equivalent
{
name: 'keys',
method: function(){
return Object.SugarMethods['keys'].method.apply(this, arguments);
}
},
{
name: 'values',
method: function(){
return Object.SugarMethods['values'].method.apply(this, arguments);
}
},
{
name: 'clone',
method: function(){
return Object.SugarMethods['clone'].method.apply(this, arguments);
}
},
{
name: 'isEqual',
method: function(a, b){
if (a && a._chain) a = a._wrapped;
if (b && b._chain) b = b._wrapped;
if (a && a.isEqual) return a.isEqual(b);
if (b && b.isEqual) return b.isEqual(a);
return Object.SugarMethods['equal'].method.apply(this, arguments);
}
},
{
name: 'isEmpty',
method: function(){
return Object.SugarMethods['isEmpty'].method.apply(this, arguments);
}
},
{
name: 'isArray',
method: function(arr){
return Array.isArray(arr);
}
},
{
name: 'isFunction',
method: function(){
return Object.SugarMethods['isFunction'].method.apply(this, arguments);
}
},
{
name: 'isString',
method: function(){
return Object.SugarMethods['isString'].method.apply(this, arguments);
}
},
{
name: 'isNumber',
method: function(){
if(isNaN(arguments[0])) {
// Sugar differs here as it's trying to stay aligned with Javascript and is
// checking types only.
return false;
}
return Object.SugarMethods['isNumber'].method.apply(this, arguments);
}
},
{
name: 'isBoolean',
method: function(){
return Object.SugarMethods['isBoolean'].method.apply(this, arguments);
}
},
{
name: 'isDate',
method: function(){
return Object.SugarMethods['isDate'].method.apply(this, arguments);
}
},
{
name: 'isRegExp',
method: function(){
return Object.SugarMethods['isRegExp'].method.apply(this, arguments);
}
},
// Functions
// _.bindAll ... no direct equivalent (similar to bindAsEventListener??)
// _.memoize ... no direct equivalent
// _.debounce ... no direct equivalent
// _.once ... no direct equivalent.. is this not similar to memoize?
// _.wrap ... no direct equivalent..
// _.compose ... no direct equivalent.. math stuff
{
name: 'bind',
method: function(fn){
var args = Array.prototype.slice.call(arguments, 1);
return Function.prototype.bind.apply(fn, args);
}
},
{
name: 'after',
method: function(num, fn){
return Function.prototype.after.apply(fn, [num]);
}
},
{
name: 'delay',
method: function(fn){
var args = Array.prototype.slice.call(arguments, 1);
return Function.prototype.delay.apply(fn, args);
}
},
{
name: 'defer',
method: function(fn){
var args = Array.prototype.slice.call(arguments, 1);
return Function.prototype.delay.apply(fn, [1].concat(args));
}
},
{
name: 'throttle',
method: function(fn, wait){
return Function.prototype.lazy.apply(fn, [wait]);
}
},
// Utility
// _.noConflict ... no direct equivalent
// _.identity ... no direct equivalent
// _.mixin ... no direct equivalent
// _.uniqueId ... no direct equivalent
// _.template ... no direct equivalent
// _.chain ... no direct equivalent
// _.value ... no direct equivalent
{
name: 'times',
method: function(n, fn){
return n.times(fn);
}
}
]
}
];
var mapMethods = function() {
var proto;
CompatibleMethods.forEach(function(cm) {
cm.methods.forEach(function(m) {
_[m.name] = m.method;
});
});
}
mapMethods();<|fim▁end|> | method: function(arr, n, guard){ |
<|file_name|>files_6b.js<|end_file_name|><|fim▁begin|>var searchData=
[
<|fim▁hole|> ['keep_5fhistory_2ecpp',['keep_history.cpp',['../keep__history_8cpp.html',1,'']]],
['keep_5fhistory_5fpass_2ecpp',['keep_history_pass.cpp',['../keep__history__pass_8cpp.html',1,'']]],
['keep_5fhistory_5fpass_2eh',['keep_history_pass.h',['../keep__history__pass_8h.html',1,'']]]
];<|fim▁end|> | |
<|file_name|>extract_xml.py<|end_file_name|><|fim▁begin|>import os
import types
from sllib.LLSD import LLSD
try:
os.makedirs('./httpcap')
except:
pass
data = open('httpcap.txt','r').read()
c = 0
btag = '<llsd>'
etag = '</llsd>'
##mbtag = '<key>message</key><string>'
##metag = '</string>'
b = data.find(btag)
mnames = {}
while b >= 0:
e = data.find(etag, b) + len(etag)
xml = data[b:e]
## bm = xml.rfind(mbtag)
## em = xml.find(metag, bm)
<|fim▁hole|>## bm = bm + len(mbtag)
## m = xml[bm:em]
## mnames[m] = None
## else:
## m = 'Unknown'
ll = LLSD.fromstring(xml)
m = 'DATA'
if type(ll) == types.DictType and ll.has_key('events'):
## print ll
for msg in ll['events']:
m = msg['message']
## print m
mnames[m] = None
name = './httpcap/%s_%d.xml' % (m,c)
try:
open(name, 'w+').write(xml)
except:
print xml
raise
c += 1
b = data.find(btag, e)
print mnames.keys()<|fim▁end|> | ## if bm >= 0 and em >= 0 and em >= bm:
|
<|file_name|>postgislayers.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
gsconfig is a python library for manipulating a GeoServer instance via the GeoServer RESTConfig API.
The project is distributed under a MIT License .
'''
__author__ = "David Winslow"
__copyright__ = "Copyright 2012-2015 Boundless, Copyright 2010-2012 OpenPlans"
__license__ = "MIT"
<|fim▁hole|>from geoserver.catalog import Catalog
cat = Catalog("http://localhost:8080/geoserver/rest", "admin", "geoserver")
pg_stores = [s for s in cat.get_stores()
if s.connection_parameters and \
s.connection_parameters.get("dbtype") == "postgis"]
res = []
for s in pg_stores:
res.extend(r.name for r in cat.get_resources(store=s))
print res<|fim▁end|> | |
<|file_name|>rc.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! An owned, task-local, reference counted type
//!
//! # Safety note
//!
//! XXX There is currently no type-system mechanism for enforcing that
//! reference counted types are both allocated on the exchange heap
//! and also non-sendable
//!
//! This doesn't prevent borrowing multiple aliasable mutable pointers
use ops::Drop;
use clone::Clone;
use libc::c_void;
use cast;
pub struct RC<T> {
priv p: *c_void // ~(uint, T)
}
impl<T> RC<T> {
pub fn new(val: T) -> RC<T> {
unsafe {
let v = ~(1, val);
let p: *c_void = cast::transmute(v);
RC { p: p }
}
}
fn get_mut_state(&mut self) -> *mut (uint, T) {
unsafe {
let p: &mut ~(uint, T) = cast::transmute(&mut self.p);
let p: *mut (uint, T) = &mut **p;
return p;
}
}
fn get_state(&self) -> *(uint, T) {
unsafe {
let p: &~(uint, T) = cast::transmute(&self.p);
let p: *(uint, T) = &**p;
return p;
}
}
pub fn unsafe_borrow_mut(&mut self) -> *mut T {
unsafe {
match *self.get_mut_state() {
(_, ref mut p) => {
let p: *mut T = p;
return p;
}
}
}
}
pub fn refcount(&self) -> uint {
unsafe {
match *self.get_state() {
(count, _) => count
}
}<|fim▁hole|> }
}
#[unsafe_destructor]
impl<T> Drop for RC<T> {
fn drop(&mut self) {
assert!(self.refcount() > 0);
unsafe {
match *self.get_mut_state() {
(ref mut count, _) => {
*count = *count - 1
}
}
if self.refcount() == 0 {
let _: ~(uint, T) = cast::transmute(self.p);
}
}
}
}
impl<T> Clone for RC<T> {
fn clone(&self) -> RC<T> {
unsafe {
// XXX: Mutable clone
let this: &mut RC<T> = cast::transmute_mut(self);
match *this.get_mut_state() {
(ref mut count, _) => {
*count = *count + 1;
}
}
}
RC { p: self.p }
}
}
#[cfg(test)]
mod test {
use super::RC;
#[test]
fn smoke_test() {
unsafe {
let mut v1 = RC::new(100);
assert!(*v1.unsafe_borrow_mut() == 100);
assert!(v1.refcount() == 1);
let mut v2 = v1.clone();
assert!(*v2.unsafe_borrow_mut() == 100);
assert!(v2.refcount() == 2);
*v2.unsafe_borrow_mut() = 200;
assert!(*v2.unsafe_borrow_mut() == 200);
assert!(*v1.unsafe_borrow_mut() == 200);
let v3 = v2.clone();
assert!(v3.refcount() == 3);
{
let _v1 = v1;
let _v2 = v2;
}
assert!(v3.refcount() == 1);
}
}
}<|fim▁end|> | |
<|file_name|>apiissue.go<|end_file_name|><|fim▁begin|>package apimanagement
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// APIIssueClient is the apiManagement Client
type APIIssueClient struct {
BaseClient
}
// NewAPIIssueClient creates an instance of the APIIssueClient client.
func NewAPIIssueClient(subscriptionID string) APIIssueClient {
return NewAPIIssueClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewAPIIssueClientWithBaseURI creates an instance of the APIIssueClient client using a custom endpoint. Use this
// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack).
func NewAPIIssueClientWithBaseURI(baseURI string, subscriptionID string) APIIssueClient {
return APIIssueClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// CreateOrUpdate creates a new Issue for an API or updates an existing one.
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
// apiid - API identifier. Must be unique in the current API Management service instance.
// issueID - issue identifier. Must be unique in the current API Management service instance.
// parameters - create parameters.
// ifMatch - eTag of the Entity. Not required when creating an entity, but required when updating an entity.
func (client APIIssueClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, serviceName string, apiid string, issueID string, parameters IssueContract, ifMatch string) (result IssueContract, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/APIIssueClient.CreateOrUpdate")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{<|fim▁hole|> {Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}},
{TargetValue: apiid,
Constraints: []validation.Constraint{{Target: "apiid", Name: validation.MaxLength, Rule: 80, Chain: nil},
{Target: "apiid", Name: validation.MinLength, Rule: 1, Chain: nil}}},
{TargetValue: issueID,
Constraints: []validation.Constraint{{Target: "issueID", Name: validation.MaxLength, Rule: 256, Chain: nil},
{Target: "issueID", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "issueID", Name: validation.Pattern, Rule: `^[^*#&+:<>?]+$`, Chain: nil}}},
{TargetValue: parameters,
Constraints: []validation.Constraint{{Target: "parameters.IssueContractProperties", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.IssueContractProperties.Title", Name: validation.Null, Rule: true, Chain: nil},
{Target: "parameters.IssueContractProperties.Description", Name: validation.Null, Rule: true, Chain: nil},
{Target: "parameters.IssueContractProperties.UserID", Name: validation.Null, Rule: true, Chain: nil},
}}}}}); err != nil {
return result, validation.NewError("apimanagement.APIIssueClient", "CreateOrUpdate", err.Error())
}
req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, serviceName, apiid, issueID, parameters, ifMatch)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
resp, err := client.CreateOrUpdateSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "CreateOrUpdate", resp, "Failure sending request")
return
}
result, err = client.CreateOrUpdateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "CreateOrUpdate", resp, "Failure responding to request")
return
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client APIIssueClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, serviceName string, apiid string, issueID string, parameters IssueContract, ifMatch string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"apiId": autorest.Encode("path", apiid),
"issueId": autorest.Encode("path", issueID),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2021-08-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/issues/{issueId}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
if len(ifMatch) > 0 {
preparer = autorest.DecoratePreparer(preparer,
autorest.WithHeader("If-Match", autorest.String(ifMatch)))
}
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client APIIssueClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client APIIssueClient) CreateOrUpdateResponder(resp *http.Response) (result IssueContract, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes the specified Issue from an API.
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
// apiid - API identifier. Must be unique in the current API Management service instance.
// issueID - issue identifier. Must be unique in the current API Management service instance.
// ifMatch - eTag of the Entity. ETag should match the current entity state from the header response of the GET
// request or it should be * for unconditional update.
func (client APIIssueClient) Delete(ctx context.Context, resourceGroupName string, serviceName string, apiid string, issueID string, ifMatch string) (result autorest.Response, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/APIIssueClient.Delete")
defer func() {
sc := -1
if result.Response != nil {
sc = result.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}},
{TargetValue: apiid,
Constraints: []validation.Constraint{{Target: "apiid", Name: validation.MaxLength, Rule: 80, Chain: nil},
{Target: "apiid", Name: validation.MinLength, Rule: 1, Chain: nil}}},
{TargetValue: issueID,
Constraints: []validation.Constraint{{Target: "issueID", Name: validation.MaxLength, Rule: 256, Chain: nil},
{Target: "issueID", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "issueID", Name: validation.Pattern, Rule: `^[^*#&+:<>?]+$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("apimanagement.APIIssueClient", "Delete", err.Error())
}
req, err := client.DeletePreparer(ctx, resourceGroupName, serviceName, apiid, issueID, ifMatch)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "Delete", nil, "Failure preparing request")
return
}
resp, err := client.DeleteSender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "Delete", resp, "Failure sending request")
return
}
result, err = client.DeleteResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "Delete", resp, "Failure responding to request")
return
}
return
}
// DeletePreparer prepares the Delete request.
func (client APIIssueClient) DeletePreparer(ctx context.Context, resourceGroupName string, serviceName string, apiid string, issueID string, ifMatch string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"apiId": autorest.Encode("path", apiid),
"issueId": autorest.Encode("path", issueID),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2021-08-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/issues/{issueId}", pathParameters),
autorest.WithQueryParameters(queryParameters),
autorest.WithHeader("If-Match", autorest.String(ifMatch)))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client APIIssueClient) DeleteSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client APIIssueClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// Get gets the details of the Issue for an API specified by its identifier.
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
// apiid - API identifier. Must be unique in the current API Management service instance.
// issueID - issue identifier. Must be unique in the current API Management service instance.
// expandCommentsAttachments - expand the comment attachments.
func (client APIIssueClient) Get(ctx context.Context, resourceGroupName string, serviceName string, apiid string, issueID string, expandCommentsAttachments *bool) (result IssueContract, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/APIIssueClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}},
{TargetValue: apiid,
Constraints: []validation.Constraint{{Target: "apiid", Name: validation.MaxLength, Rule: 80, Chain: nil},
{Target: "apiid", Name: validation.MinLength, Rule: 1, Chain: nil}}},
{TargetValue: issueID,
Constraints: []validation.Constraint{{Target: "issueID", Name: validation.MaxLength, Rule: 256, Chain: nil},
{Target: "issueID", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "issueID", Name: validation.Pattern, Rule: `^[^*#&+:<>?]+$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("apimanagement.APIIssueClient", "Get", err.Error())
}
req, err := client.GetPreparer(ctx, resourceGroupName, serviceName, apiid, issueID, expandCommentsAttachments)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "Get", resp, "Failure responding to request")
return
}
return
}
// GetPreparer prepares the Get request.
func (client APIIssueClient) GetPreparer(ctx context.Context, resourceGroupName string, serviceName string, apiid string, issueID string, expandCommentsAttachments *bool) (*http.Request, error) {
pathParameters := map[string]interface{}{
"apiId": autorest.Encode("path", apiid),
"issueId": autorest.Encode("path", issueID),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2021-08-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if expandCommentsAttachments != nil {
queryParameters["expandCommentsAttachments"] = autorest.Encode("query", *expandCommentsAttachments)
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/issues/{issueId}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client APIIssueClient) GetSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client APIIssueClient) GetResponder(resp *http.Response) (result IssueContract, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// GetEntityTag gets the entity state (Etag) version of the Issue for an API specified by its identifier.
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
// apiid - API identifier. Must be unique in the current API Management service instance.
// issueID - issue identifier. Must be unique in the current API Management service instance.
func (client APIIssueClient) GetEntityTag(ctx context.Context, resourceGroupName string, serviceName string, apiid string, issueID string) (result autorest.Response, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/APIIssueClient.GetEntityTag")
defer func() {
sc := -1
if result.Response != nil {
sc = result.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}},
{TargetValue: apiid,
Constraints: []validation.Constraint{{Target: "apiid", Name: validation.MaxLength, Rule: 80, Chain: nil},
{Target: "apiid", Name: validation.MinLength, Rule: 1, Chain: nil}}},
{TargetValue: issueID,
Constraints: []validation.Constraint{{Target: "issueID", Name: validation.MaxLength, Rule: 256, Chain: nil},
{Target: "issueID", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "issueID", Name: validation.Pattern, Rule: `^[^*#&+:<>?]+$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("apimanagement.APIIssueClient", "GetEntityTag", err.Error())
}
req, err := client.GetEntityTagPreparer(ctx, resourceGroupName, serviceName, apiid, issueID)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "GetEntityTag", nil, "Failure preparing request")
return
}
resp, err := client.GetEntityTagSender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "GetEntityTag", resp, "Failure sending request")
return
}
result, err = client.GetEntityTagResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "GetEntityTag", resp, "Failure responding to request")
return
}
return
}
// GetEntityTagPreparer prepares the GetEntityTag request.
func (client APIIssueClient) GetEntityTagPreparer(ctx context.Context, resourceGroupName string, serviceName string, apiid string, issueID string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"apiId": autorest.Encode("path", apiid),
"issueId": autorest.Encode("path", issueID),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2021-08-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsHead(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/issues/{issueId}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetEntityTagSender sends the GetEntityTag request. The method will close the
// http.Response Body if it receives an error.
func (client APIIssueClient) GetEntityTagSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// GetEntityTagResponder handles the response to the GetEntityTag request. The method always
// closes the http.Response Body.
func (client APIIssueClient) GetEntityTagResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByClosing())
result.Response = resp
return
}
// ListByService lists all issues associated with the specified API.
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
// apiid - API identifier. Must be unique in the current API Management service instance.
// filter - | Field | Usage | Supported operators | Supported functions
// |</br>|-------------|-------------|-------------|-------------|</br>| name | filter | ge, le, eq, ne, gt, lt
// | substringof, contains, startswith, endswith |</br>| userId | filter | ge, le, eq, ne, gt, lt |
// substringof, contains, startswith, endswith |</br>| state | filter | eq | |</br>
// expandCommentsAttachments - expand the comment attachments.
// top - number of records to return.
// skip - number of records to skip.
func (client APIIssueClient) ListByService(ctx context.Context, resourceGroupName string, serviceName string, apiid string, filter string, expandCommentsAttachments *bool, top *int32, skip *int32) (result IssueCollectionPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/APIIssueClient.ListByService")
defer func() {
sc := -1
if result.ic.Response.Response != nil {
sc = result.ic.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}},
{TargetValue: apiid,
Constraints: []validation.Constraint{{Target: "apiid", Name: validation.MaxLength, Rule: 80, Chain: nil},
{Target: "apiid", Name: validation.MinLength, Rule: 1, Chain: nil}}},
{TargetValue: top,
Constraints: []validation.Constraint{{Target: "top", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "top", Name: validation.InclusiveMinimum, Rule: int64(1), Chain: nil}}}}},
{TargetValue: skip,
Constraints: []validation.Constraint{{Target: "skip", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "skip", Name: validation.InclusiveMinimum, Rule: int64(0), Chain: nil}}}}}}); err != nil {
return result, validation.NewError("apimanagement.APIIssueClient", "ListByService", err.Error())
}
result.fn = client.listByServiceNextResults
req, err := client.ListByServicePreparer(ctx, resourceGroupName, serviceName, apiid, filter, expandCommentsAttachments, top, skip)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "ListByService", nil, "Failure preparing request")
return
}
resp, err := client.ListByServiceSender(req)
if err != nil {
result.ic.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "ListByService", resp, "Failure sending request")
return
}
result.ic, err = client.ListByServiceResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "ListByService", resp, "Failure responding to request")
return
}
if result.ic.hasNextLink() && result.ic.IsEmpty() {
err = result.NextWithContext(ctx)
return
}
return
}
// ListByServicePreparer prepares the ListByService request.
func (client APIIssueClient) ListByServicePreparer(ctx context.Context, resourceGroupName string, serviceName string, apiid string, filter string, expandCommentsAttachments *bool, top *int32, skip *int32) (*http.Request, error) {
pathParameters := map[string]interface{}{
"apiId": autorest.Encode("path", apiid),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2021-08-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if len(filter) > 0 {
queryParameters["$filter"] = autorest.Encode("query", filter)
}
if expandCommentsAttachments != nil {
queryParameters["expandCommentsAttachments"] = autorest.Encode("query", *expandCommentsAttachments)
}
if top != nil {
queryParameters["$top"] = autorest.Encode("query", *top)
}
if skip != nil {
queryParameters["$skip"] = autorest.Encode("query", *skip)
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/issues", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListByServiceSender sends the ListByService request. The method will close the
// http.Response Body if it receives an error.
func (client APIIssueClient) ListByServiceSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListByServiceResponder handles the response to the ListByService request. The method always
// closes the http.Response Body.
func (client APIIssueClient) ListByServiceResponder(resp *http.Response) (result IssueCollection, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listByServiceNextResults retrieves the next set of results, if any.
func (client APIIssueClient) listByServiceNextResults(ctx context.Context, lastResults IssueCollection) (result IssueCollection, err error) {
req, err := lastResults.issueCollectionPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "listByServiceNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListByServiceSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "listByServiceNextResults", resp, "Failure sending next results request")
}
result, err = client.ListByServiceResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "listByServiceNextResults", resp, "Failure responding to next results request")
}
return
}
// ListByServiceComplete enumerates all values, automatically crossing page boundaries as required.
func (client APIIssueClient) ListByServiceComplete(ctx context.Context, resourceGroupName string, serviceName string, apiid string, filter string, expandCommentsAttachments *bool, top *int32, skip *int32) (result IssueCollectionIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/APIIssueClient.ListByService")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.ListByService(ctx, resourceGroupName, serviceName, apiid, filter, expandCommentsAttachments, top, skip)
return
}
// Update updates an existing issue for an API.
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
// apiid - API identifier. Must be unique in the current API Management service instance.
// issueID - issue identifier. Must be unique in the current API Management service instance.
// parameters - update parameters.
// ifMatch - eTag of the Entity. ETag should match the current entity state from the header response of the GET
// request or it should be * for unconditional update.
func (client APIIssueClient) Update(ctx context.Context, resourceGroupName string, serviceName string, apiid string, issueID string, parameters IssueUpdateContract, ifMatch string) (result IssueContract, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/APIIssueClient.Update")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}},
{TargetValue: apiid,
Constraints: []validation.Constraint{{Target: "apiid", Name: validation.MaxLength, Rule: 80, Chain: nil},
{Target: "apiid", Name: validation.MinLength, Rule: 1, Chain: nil}}},
{TargetValue: issueID,
Constraints: []validation.Constraint{{Target: "issueID", Name: validation.MaxLength, Rule: 256, Chain: nil},
{Target: "issueID", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "issueID", Name: validation.Pattern, Rule: `^[^*#&+:<>?]+$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("apimanagement.APIIssueClient", "Update", err.Error())
}
req, err := client.UpdatePreparer(ctx, resourceGroupName, serviceName, apiid, issueID, parameters, ifMatch)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "Update", nil, "Failure preparing request")
return
}
resp, err := client.UpdateSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "Update", resp, "Failure sending request")
return
}
result, err = client.UpdateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.APIIssueClient", "Update", resp, "Failure responding to request")
return
}
return
}
// UpdatePreparer prepares the Update request.
func (client APIIssueClient) UpdatePreparer(ctx context.Context, resourceGroupName string, serviceName string, apiid string, issueID string, parameters IssueUpdateContract, ifMatch string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"apiId": autorest.Encode("path", apiid),
"issueId": autorest.Encode("path", issueID),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2021-08-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPatch(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/issues/{issueId}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters),
autorest.WithHeader("If-Match", autorest.String(ifMatch)))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// UpdateSender sends the Update request. The method will close the
// http.Response Body if it receives an error.
func (client APIIssueClient) UpdateSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// UpdateResponder handles the response to the Update request. The method always
// closes the http.Response Body.
func (client APIIssueClient) UpdateResponder(resp *http.Response) (result IssueContract, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}<|fim▁end|> | {TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil}, |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># Copyright 2017 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|># distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import nova.conf
hyperv_opts = [
cfg.IntOpt('evacuate_task_state_timeout',
default=600,
help='Number of seconds to wait for an instance to be '
'evacuated during host maintenance.'),
cfg.IntOpt('cluster_event_check_interval',
deprecated_for_removal=True,
deprecated_since="5.0.1",
default=2),
cfg.BoolOpt('instance_automatic_shutdown',
default=False,
help='Automatically shutdown instances when the host is '
'shutdown. By default, instances will be saved, which '
'adds a disk overhead. Changing this option will not '
'affect existing instances.'),
cfg.IntOpt('instance_live_migration_timeout',
default=300,
min=0,
help='Number of seconds to wait for an instance to be '
'live migrated (Only applies to clustered instances '
'for the moment).'),
cfg.IntOpt('max_failover_count',
default=1,
min=1,
help="The maximum number of failovers that can occur in the "
"failover_period timeframe per VM. Once a VM's number "
"failover reaches this number, the VM will simply end up "
"in a Failed state."),
cfg.IntOpt('failover_period',
default=6,
min=1,
help="The number of hours in which the max_failover_count "
"number of failovers can occur."),
cfg.BoolOpt('recreate_ports_on_failover',
default=True,
help="When enabled, the ports will be recreated for failed "
"over instances. This ensures that we're not left with "
"a stale port."),
cfg.BoolOpt('auto_failback',
default=True,
help="Allow the VM the failback to its original host once it "
"is available."),
cfg.BoolOpt('force_destroy_instances',
default=False,
help="If this option is enabled, instance destroy requests "
"are executed immediately, regardless of instance "
"pending tasks. In some situations, the destroy "
"operation will fail (e.g. due to file locks), "
"requiring subsequent retries."),
cfg.BoolOpt('move_disks_on_cold_migration',
default=True,
help="Move the instance files to the instance dir configured "
"on the destination host. You may consider disabling "
"this when using multiple CSVs or shares and you wish "
"the source location to be preserved."),
]
coordination_opts = [
cfg.StrOpt('backend_url',
default='file:///C:/OpenStack/Lock',
help='The backend URL to use for distributed coordination.'),
]
CONF = nova.conf.CONF
CONF.register_opts(coordination_opts, 'coordination')
CONF.register_opts(hyperv_opts, 'hyperv')
def list_opts():
return [('coordination', coordination_opts),
('hyperv', hyperv_opts)]<|fim▁end|> | #
# Unless required by applicable law or agreed to in writing, software |
<|file_name|>adaboostSAMME.py<|end_file_name|><|fim▁begin|>'''
adaboost.py script can be used to train and test adaboost with a SGDclassifier
for mnist data. Look up commandline options for more information.
'''
import argparse
import cPickle as pickle
import numpy as np
import pylab as pl
import time
from sklearn.ensemble import AdaBoostClassifier
from sklearn.linear_model import SGDClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import zero_one_loss
def write(model, model_file):
'''
Writes the trained model to the fiven file.
'''
pickle.dump(model,open(model_file, 'wb'))
def load_data(data_file):
'''
Loads X and Y data from the pickled data_file
'''
data = pickle.load(open(data_file))
return (data['X'], data['Y'])
def get_adboost_classifier(algo, num_estimators, wl_loss, wl_penalty, passes):
'''
Constructs a adaboost classifier object based on the algorithm, number of
estimators, loss and penalty function given. Configures the object to run on
all cores.
'''
'''
weak_learner = SGDClassifier(loss=wl_loss, penalty=wl_penalty,
n_jobs=-1, n_iter = passes, shuffle = True)
'''
weak_learner = DecisionTreeClassifier(max_depth=30)
ab_classifier = AdaBoostClassifier( weak_learner, n_estimators =
num_estimators, algorithm = algo)
return ab_classifier
def train(ab_classifier, train_file, validation_file, model_file, graph_file):
'''
Takes a configured adaboost classifier object and train it with the training
data from the data_file and write the learned model to the model_file.
'''
s = time.time()
train_x, train_y = load_data(train_file)
ab_classifier = ab_classifier.fit(train_x, train_y)
write(ab_classifier, model_file)
valid_x, valid_y = load_data(validation_file)
# find out stage wise training error
n_estimators = len(ab_classifier.estimators_)
train_err = np.zeros((n_estimators,))
valid_err = np.zeros((n_estimators,))
for i, y_pred in enumerate(ab_classifier.staged_predict(train_x)):
train_err[i] = zero_one_loss(y_pred, train_y)
for i, y_pred in enumerate(ab_classifier.staged_predict(valid_x)):
valid_err[i] = zero_one_loss(y_pred, valid_y)
save_fig(train_err, valid_err, n_estimators, graph_file)
print 'Training time:', time.time() - s, 'seconds'
def save_fig(train_err, valid_err, n_estimators, file_name):
fig = pl.figure()
ax = fig.add_subplot(111)
ax.plot(np.arange(n_estimators) + 1, train_err, label='Train Error', color='red')
ax.plot(np.arange(n_estimators) + 1, valid_err, label='Validation Error',
color='green')
ax.set_ylim((0.0, 1.0))
ax.set_xlabel('Number of Learners')
ax.set_ylabel('Error')
ax.set_title('Adaboost SAMME on MNIST dataset')
ax.xaxis.grid(True)<|fim▁hole|>
def test(model_file, test_file):
'''
Tests the model on the test data in data_file using the model in model_file.
Prints accuracy to report the performance of the classifier.
'''
test_x, test_y = load_data(test_file)
ab_classifier = pickle.load(open(model_file))
pred_y = ab_classifier.predict(test_x)
correct = np.count_nonzero(test_y == pred_y)
print 'Accuracy: ', correct / (1.0 * len(test_y))
def parse_train_args(args):
'''
parsers args required for training and calls the appropriate function.
'''
ab_classifier = get_adboost_classifier('SAMME.R', args.num_learners,
args.loss, args.pen, args.epochs)
train(ab_classifier, args.train_file, args.validation_file, args.model_file,
args.graph_file)
def parse_test_args(args):
'''
parsers args required for testing and calls the appropriate function.
'''
test(args.model_file, args.test_file)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help = 'sub-command help')
train_parser = subparsers.add_parser('train', help= 'train adaboost')
train_parser.add_argument('train_file', help='path to training data')
train_parser.add_argument('validation_file', help='path to validation data')
train_parser.add_argument('model_file', help='filepath for model')
train_parser.add_argument('graph_file', help='filepath for training graph')
train_parser.add_argument('epochs', help='number of epochs for weak \
learners', type = int)
train_parser.add_argument('num_learners', nargs='?', help='number of weak \
learners', default = 10, type=int)
loss_gp = train_parser.add_mutually_exclusive_group()
loss_gp.set_defaults(loss = 'log')
loss_gp.add_argument('--log_loss', action = 'store_const', dest = 'loss',
const = 'log', help = 'use log loss function for training weak\
learners')
loss_gp.add_argument('--hinge_loss', action = 'store_const', dest = 'loss',
const = 'hinge', help = 'use hinge loss function for training weak\
learners')
penalty_gp = train_parser.add_mutually_exclusive_group()
penalty_gp.set_defaults(pen = 'l2')
penalty_gp.add_argument('--l1', action = 'store_const', dest = 'pen', const
= 'l1', help = 'use l1 penalty for training weak learners')
penalty_gp.add_argument('--l2', action = 'store_const', dest = 'pen', const
= 'l2', help = 'use l2 penalty for training weak learners')
train_parser.set_defaults(func = parse_train_args)
test_parser = subparsers.add_parser('test', help = 'test neural network')
test_parser.add_argument('test_file', help='path to test data')
test_parser.add_argument('model_file', help='filepath for model')
test_parser.set_defaults(func = parse_test_args)
args = parser.parse_args()
args.func(args)<|fim▁end|> | ax.yaxis.grid(True)
leg = ax.legend(loc='upper right', fancybox=True)
leg.get_frame().set_alpha(0.7)
pl.savefig(file_name) |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Networking I/O
use old_io::{IoError, IoResult, InvalidInput};
use ops::FnMut;<|fim▁hole|>
pub use self::addrinfo::get_host_addresses;
pub mod addrinfo;
pub mod tcp;
pub mod udp;
pub mod ip;
pub mod pipe;
fn with_addresses<A, T, F>(addr: A, mut action: F) -> IoResult<T> where
A: ToSocketAddr,
F: FnMut(SocketAddr) -> IoResult<T>,
{
const DEFAULT_ERROR: IoError = IoError {
kind: InvalidInput,
desc: "no addresses found for hostname",
detail: None
};
let addresses = try!(addr.to_socket_addr_all());
let mut err = DEFAULT_ERROR;
for addr in addresses {
match action(addr) {
Ok(r) => return Ok(r),
Err(e) => err = e
}
}
Err(err)
}<|fim▁end|> | use option::Option::None;
use result::Result::{Ok, Err};
use self::ip::{SocketAddr, ToSocketAddr}; |
<|file_name|>debug.rs<|end_file_name|><|fim▁begin|>use alloc::boxed::Box;
use collections::string::String;
use scheduler::context::{context_switch, context_i, contexts_ptr};
use scheduler;
use schemes::{KScheme, Resource, Url};
use syscall::handle;
/// A debug resource
pub struct DebugResource {
pub scheme: *mut DebugScheme,
pub command: String,
pub line_toggle: bool,
}
impl Resource for DebugResource {
fn dup(&self) -> Option<Box<Resource>> {
Some(box DebugResource {
scheme: self.scheme,
command: self.command.clone(),
line_toggle: self.line_toggle,
})
}
fn url(&self) -> Url {
return Url::from_str("debug:");
}
fn read(&mut self, buf: &mut [u8]) -> Option<usize> {
if self.line_toggle {
self.line_toggle = false;
return Some(0);
}
if self.command.is_empty() {
loop {
unsafe {
let reenable = scheduler::start_no_ints();
// Hack!
if (*self.scheme).context >= (*contexts_ptr).len() ||
(*self.scheme).context < context_i {
(*self.scheme).context = context_i;
}
if (*self.scheme).context == context_i && (*::console).command.is_some() {
if let Some(ref command) = (*::console).command {
self.command = command.clone();
}
(*::console).command = None;
break;
}
scheduler::end_no_ints(reenable);
context_switch(false);
}
}
}
// TODO: Unicode
let mut i = 0;
while i < buf.len() && ! self.command.is_empty() {
buf[i] = unsafe { self.command.as_mut_vec().remove(0) };
i += 1;
}
if i > 0 && self.command.is_empty() {
self.line_toggle = true;
}<|fim▁hole|> Some(i)
}
fn write(&mut self, buf: &[u8]) -> Option<usize> {
for byte in buf {
unsafe {
handle::do_sys_debug(*byte);
}
}
return Some(buf.len());
}
fn sync(&mut self) -> bool {
true
}
}
pub struct DebugScheme {
pub context: usize,
}
impl DebugScheme {
pub fn new() -> Box<Self> {
box DebugScheme { context: 0 }
}
}
impl KScheme for DebugScheme {
fn scheme(&self) -> &str {
"debug"
}
fn open(&mut self, _: &Url, _: usize) -> Option<Box<Resource>> {
Some(box DebugResource {
scheme: self,
command: String::new(),
line_toggle: false,
})
}
}<|fim▁end|> | |
<|file_name|>example.py<|end_file_name|><|fim▁begin|>class Year(object):
def __init__(self, year):<|fim▁hole|> self.year = year
def is_leap_year(self):
return (self._by_4() and not self._by_100()) \
or self._by_400()
def _by_4(self):
return self.year % 4 == 0
def _by_100(self):
return self.year % 100 == 0
def _by_400(self):
return self.year % 400 == 0<|fim▁end|> | |
<|file_name|>visit_ast.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Rust AST Visitor. Extracts useful information and massages it into a form
//! usable for clean
use std::collections::HashSet;
use std::mem;
use syntax::abi;
use syntax::ast;
use syntax::ast_util;
use syntax::ast_map;
use syntax::attr;
use syntax::attr::AttrMetaMethods;
use syntax::codemap::Span;
use rustc::middle::stability;
use core;
use doctree::*;
<|fim▁hole|>// output parameters, maybe only mutated once; perhaps
// better simply to have the visit method return a tuple
// containing them?
// also, is there some reason that this doesn't use the 'visit'
// framework from syntax?
pub struct RustdocVisitor<'a, 'tcx: 'a> {
pub module: Module,
pub attrs: Vec<ast::Attribute>,
pub cx: &'a core::DocContext<'tcx>,
pub analysis: Option<&'a core::CrateAnalysis>,
view_item_stack: HashSet<ast::NodeId>,
inlining_from_glob: bool,
}
impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
pub fn new(cx: &'a core::DocContext<'tcx>,
analysis: Option<&'a core::CrateAnalysis>) -> RustdocVisitor<'a, 'tcx> {
// If the root is reexported, terminate all recursion.
let mut stack = HashSet::new();
stack.insert(ast::CRATE_NODE_ID);
RustdocVisitor {
module: Module::new(None),
attrs: Vec::new(),
cx: cx,
analysis: analysis,
view_item_stack: stack,
inlining_from_glob: false,
}
}
fn stability(&self, id: ast::NodeId) -> Option<attr::Stability> {
self.cx.tcx_opt().and_then(|tcx| stability::lookup(tcx, ast_util::local_def(id)))
}
pub fn visit(&mut self, krate: &ast::Crate) {
self.attrs = krate.attrs.clone();
self.module = self.visit_mod_contents(krate.span,
krate.attrs.clone(),
ast::Public,
ast::CRATE_NODE_ID,
&krate.module,
None);
// attach the crate's exported macros to the top-level module:
self.module.macros = krate.exported_macros.iter()
.map(|def| self.visit_macro(def)).collect();
self.module.is_crate = true;
}
pub fn visit_struct_def(&mut self, item: &ast::Item,
name: ast::Ident, sd: &ast::StructDef,
generics: &ast::Generics) -> Struct {
debug!("Visiting struct");
let struct_type = struct_type_from_def(&*sd);
Struct {
id: item.id,
struct_type: struct_type,
name: name,
vis: item.vis,
stab: self.stability(item.id),
attrs: item.attrs.clone(),
generics: generics.clone(),
fields: sd.fields.clone(),
whence: item.span
}
}
pub fn visit_enum_def(&mut self, it: &ast::Item,
name: ast::Ident, def: &ast::EnumDef,
params: &ast::Generics) -> Enum {
debug!("Visiting enum");
Enum {
name: name,
variants: def.variants.iter().map(|v| Variant {
name: v.node.name,
attrs: v.node.attrs.clone(),
vis: v.node.vis,
stab: self.stability(v.node.id),
id: v.node.id,
kind: v.node.kind.clone(),
whence: v.span,
}).collect(),
vis: it.vis,
stab: self.stability(it.id),
generics: params.clone(),
attrs: it.attrs.clone(),
id: it.id,
whence: it.span,
}
}
pub fn visit_fn(&mut self, item: &ast::Item,
name: ast::Ident, fd: &ast::FnDecl,
unsafety: &ast::Unsafety, abi: &abi::Abi,
gen: &ast::Generics) -> Function {
debug!("Visiting fn");
Function {
id: item.id,
vis: item.vis,
stab: self.stability(item.id),
attrs: item.attrs.clone(),
decl: fd.clone(),
name: name,
whence: item.span,
generics: gen.clone(),
unsafety: *unsafety,
abi: *abi,
}
}
pub fn visit_mod_contents(&mut self, span: Span, attrs: Vec<ast::Attribute> ,
vis: ast::Visibility, id: ast::NodeId,
m: &ast::Mod,
name: Option<ast::Ident>) -> Module {
let mut om = Module::new(name);
om.where_outer = span;
om.where_inner = m.inner;
om.attrs = attrs;
om.vis = vis;
om.stab = self.stability(id);
om.id = id;
for i in &m.items {
self.visit_item(&**i, None, &mut om);
}
om
}
fn visit_view_path(&mut self, path: ast::ViewPath_,
om: &mut Module,
id: ast::NodeId,
please_inline: bool) -> Option<ast::ViewPath_> {
match path {
ast::ViewPathSimple(dst, base) => {
if self.resolve_id(id, Some(dst), false, om, please_inline) {
None
} else {
Some(ast::ViewPathSimple(dst, base))
}
}
ast::ViewPathList(p, paths) => {
let mine = paths.into_iter().filter(|path| {
!self.resolve_id(path.node.id(), None, false, om,
please_inline)
}).collect::<Vec<ast::PathListItem>>();
if mine.len() == 0 {
None
} else {
Some(ast::ViewPathList(p, mine))
}
}
// these are feature gated anyway
ast::ViewPathGlob(base) => {
if self.resolve_id(id, None, true, om, please_inline) {
None
} else {
Some(ast::ViewPathGlob(base))
}
}
}
}
fn resolve_id(&mut self, id: ast::NodeId, renamed: Option<ast::Ident>,
glob: bool, om: &mut Module, please_inline: bool) -> bool {
let tcx = match self.cx.tcx_opt() {
Some(tcx) => tcx,
None => return false
};
let def = tcx.def_map.borrow()[&id].def_id();
if !ast_util::is_local(def) { return false }
let analysis = match self.analysis {
Some(analysis) => analysis, None => return false
};
if !please_inline && analysis.public_items.contains(&def.node) {
return false
}
if !self.view_item_stack.insert(def.node) { return false }
let ret = match tcx.map.get(def.node) {
ast_map::NodeItem(it) => {
if glob {
let prev = mem::replace(&mut self.inlining_from_glob, true);
match it.node {
ast::ItemMod(ref m) => {
for i in &m.items {
self.visit_item(&**i, None, om);
}
}
ast::ItemEnum(..) => {}
_ => { panic!("glob not mapped to a module or enum"); }
}
self.inlining_from_glob = prev;
} else {
self.visit_item(it, renamed, om);
}
true
}
_ => false,
};
self.view_item_stack.remove(&id);
return ret;
}
pub fn visit_item(&mut self, item: &ast::Item,
renamed: Option<ast::Ident>, om: &mut Module) {
debug!("Visiting item {:?}", item);
let name = renamed.unwrap_or(item.ident);
match item.node {
ast::ItemExternCrate(ref p) => {
let path = match *p {
None => None,
Some(x) => Some(x.to_string()),
};
om.extern_crates.push(ExternCrate {
name: name,
path: path,
vis: item.vis,
attrs: item.attrs.clone(),
whence: item.span,
})
}
ast::ItemUse(ref vpath) => {
let node = vpath.node.clone();
let node = if item.vis == ast::Public {
let please_inline = item.attrs.iter().any(|item| {
match item.meta_item_list() {
Some(list) => {
list.iter().any(|i| &i.name()[..] == "inline")
}
None => false,
}
});
match self.visit_view_path(node, om, item.id, please_inline) {
None => return,
Some(p) => p
}
} else {
node
};
om.imports.push(Import {
id: item.id,
vis: item.vis,
attrs: item.attrs.clone(),
node: node,
whence: item.span,
});
}
ast::ItemMod(ref m) => {
om.mods.push(self.visit_mod_contents(item.span,
item.attrs.clone(),
item.vis,
item.id,
m,
Some(name)));
},
ast::ItemEnum(ref ed, ref gen) =>
om.enums.push(self.visit_enum_def(item, name, ed, gen)),
ast::ItemStruct(ref sd, ref gen) =>
om.structs.push(self.visit_struct_def(item, name, &**sd, gen)),
ast::ItemFn(ref fd, ref pur, ref abi, ref gen, _) =>
om.fns.push(self.visit_fn(item, name, &**fd, pur, abi, gen)),
ast::ItemTy(ref ty, ref gen) => {
let t = Typedef {
ty: ty.clone(),
gen: gen.clone(),
name: name,
id: item.id,
attrs: item.attrs.clone(),
whence: item.span,
vis: item.vis,
stab: self.stability(item.id),
};
om.typedefs.push(t);
},
ast::ItemStatic(ref ty, ref mut_, ref exp) => {
let s = Static {
type_: ty.clone(),
mutability: mut_.clone(),
expr: exp.clone(),
id: item.id,
name: name,
attrs: item.attrs.clone(),
whence: item.span,
vis: item.vis,
stab: self.stability(item.id),
};
om.statics.push(s);
},
ast::ItemConst(ref ty, ref exp) => {
let s = Constant {
type_: ty.clone(),
expr: exp.clone(),
id: item.id,
name: name,
attrs: item.attrs.clone(),
whence: item.span,
vis: item.vis,
stab: self.stability(item.id),
};
om.constants.push(s);
},
ast::ItemTrait(unsafety, ref gen, ref b, ref items) => {
let t = Trait {
unsafety: unsafety,
name: name,
items: items.clone(),
generics: gen.clone(),
bounds: b.iter().cloned().collect(),
id: item.id,
attrs: item.attrs.clone(),
whence: item.span,
vis: item.vis,
stab: self.stability(item.id),
};
om.traits.push(t);
},
ast::ItemImpl(unsafety, polarity, ref gen, ref tr, ref ty, ref items) => {
let i = Impl {
unsafety: unsafety,
polarity: polarity,
generics: gen.clone(),
trait_: tr.clone(),
for_: ty.clone(),
items: items.clone(),
attrs: item.attrs.clone(),
id: item.id,
whence: item.span,
vis: item.vis,
stab: self.stability(item.id),
};
// Don't duplicate impls when inlining glob imports, we'll pick
// them up regardless of where they're located.
if !self.inlining_from_glob {
om.impls.push(i);
}
},
ast::ItemDefaultImpl(unsafety, ref trait_ref) => {
let i = DefaultImpl {
unsafety: unsafety,
trait_: trait_ref.clone(),
id: item.id,
attrs: item.attrs.clone(),
whence: item.span,
};
// see comment above about ItemImpl
if !self.inlining_from_glob {
om.def_traits.push(i);
}
}
ast::ItemForeignMod(ref fm) => {
om.foreigns.push(fm.clone());
}
ast::ItemMac(_) => {
panic!("rustdoc: macros should be gone, after expansion");
}
}
}
// convert each exported_macro into a doc item
fn visit_macro(&self, def: &ast::MacroDef) -> Macro {
Macro {
id: def.id,
attrs: def.attrs.clone(),
name: def.ident,
whence: def.span,
stab: self.stability(def.id),
}
}
}<|fim▁end|> | // looks to me like the first two of these are actually |
<|file_name|>UnknownTypeException.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2005-2006 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or<|fim▁hole|> */
package javax.lang.model.type;
/**
* Indicates that an unknown kind of type was encountered. This can
* occur if the language evolves and new kinds of types are added to
* the {@code TypeMirror} hierarchy. May be thrown by a {@linkplain
* TypeVisitor type visitor} to indicate that the visitor was created
* for a prior version of the language.
*
* @author Joseph D. Darcy
* @author Scott Seligman
* @author Peter von der Ahé
* @see TypeVisitor#visitUnknown
* @since 1.6
*/
public class UnknownTypeException extends RuntimeException {
private static final long serialVersionUID = 269L;
private transient TypeMirror type;
private transient Object parameter;
/**
* Creates a new {@code UnknownTypeException}.The {@code p}
* parameter may be used to pass in an additional argument with
* information about the context in which the unknown type was
* encountered; for example, the visit methods of {@link
* TypeVisitor} may pass in their additional parameter.
*
* @param t the unknown type, may be {@code null}
* @param p an additional parameter, may be {@code null}
*/
public UnknownTypeException(TypeMirror t, Object p) {
super("Unknown type: " + t);
type = t;
this.parameter = p;
}
/**
* Returns the unknown type.
* The value may be unavailable if this exception has been
* serialized and then read back in.
*
* @return the unknown type, or {@code null} if unavailable
*/
public TypeMirror getUnknownType() {
return type;
}
/**
* Returns the additional argument.
*
* @return the additional argument
*/
public Object getArgument() {
return parameter;
}
}<|fim▁end|> | * have any questions. |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os
from setuptools import setup
import sys
if sys.version_info < (2, 6):
raise Exception('Wiggelen requires Python 2.6 or higher.')
install_requires = []
# Python 2.6 does not include the argparse module.
try:
import argparse
except ImportError:
install_requires.append('argparse')
# Python 2.6 does not include OrderedDict.
try:
from collections import OrderedDict
except ImportError:
install_requires.append('ordereddict')
<|fim▁hole|>except IOError:
long_description = 'See https://pypi.python.org/pypi/wiggelen'
# This is quite the hack, but we don't want to import our package from here
# since that's recipe for disaster (it might have some uninstalled
# dependencies, or we might import another already installed version).
distmeta = {}
for line in open(os.path.join('wiggelen', '__init__.py')):
try:
field, value = (x.strip() for x in line.split('='))
except ValueError:
continue
if field == '__version_info__':
value = value.strip('[]()')
value = '.'.join(x.strip(' \'"') for x in value.split(','))
else:
value = value.strip('\'"')
distmeta[field] = value
setup(
name='wiggelen',
version=distmeta['__version_info__'],
description='Working with wiggle tracks in Python',
long_description=long_description,
author=distmeta['__author__'],
author_email=distmeta['__contact__'],
url=distmeta['__homepage__'],
license='MIT License',
platforms=['any'],
packages=['wiggelen'],
install_requires=install_requires,
entry_points = {
'console_scripts': ['wiggelen = wiggelen.commands:main']
},
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
keywords='bioinformatics'
)<|fim▁end|> | try:
with open('README.rst') as readme:
long_description = readme.read() |
<|file_name|>encryption-generator.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# encryption-generator.py
#
# Copyright 2016 Netuser <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
# encryption-generator.py Version 2.0
# site http://zorgonteam.wordpress.com
import os
import sys
import time
import base64
import urllib
import hashlib
import subprocess
from datetime import date
from datetime import datetime
from Crypto.Cipher import DES
from Crypto import Random
date=date.today()
now=datetime.now()
if os.name in ['nt','win32']:
os.system('cls')
else:
os.system('clear')
print "[*] Author Netuser [*]"
print "[*] encryption generator [*]"
print "[*] date :",date," [*]"
print
print "[*] Encrypt With Strong Crypto is Coming soon"
back = 'back'
#while back == 'back':<|fim▁hole|> if menu_item == menu:
print "[*] Updating Databases Information .... "
url=urllib.urlretrieve("https://raw.githubusercontent.com/P1d0f/encryptGen/master/encryption-generator.py","encryption-generator.py")
print "[*] Update Succesfully"
sys.exit()
menu_item="help"
if menu == menu_item:
print """
you just type encrypt or decrypt
example :
encrypt = encrypt or decrypt $ encrypt (enter)
decrypt = encrypt or decrypt $ decrypt (enter)
"""
menu_item="encrypt"
if menu == menu_item:
print
print "----> md5"
print "----> sha1"
print "----> sha224"
print "----> sha256"
print "----> sha384"
print "----> sha512"
print "----> base16"
print "----> base32"
print "----> base64"
print "----> cryptoDES"
print
raw=raw_input('[*] type and choice one $ ')
menu_item="exit"
if raw == menu_item:
print "[*] thanks for shopping"
sys.exit()
menu_item="cryptoDES"
if menu_item == raw:
telo=raw_input('[*] your text $ ')
iv=Random.get_random_bytes(8)
des1=DES.new('01234567', DES.MODE_CFB, iv)
des2=DES.new('01234567', DES.MODE_CFB, iv)
text=telo
cipher_text=des2.encrypt(text)
nama_file=open('text.encrypt','w')
nama_file.writelines(cipher_text)
nama_file.close()
time.sleep(2)
for i in(5,4,3,2,1):
print "[*] encrypted at",now
print "\n[*] saved into text.encrypt"
menu_item="base16"
if menu_item == raw:
telo=raw_input('[*] text $ ')
base16=base64.b16encode('%s' % (telo))
for i in(5,4,3,2,1):
print "[*] encoded at",now
print "\n[*] result :",base16
menu_item="sha224"
if menu_item == raw:
telo=raw_input('[*] text $ ')
sha224=hashlib.sha224('%s' % (telo)).hexdigest()
for i in(5,4,3,2,1):
print "[*] encrypted at",now
print "\n[*] result :",sha224
menu_item="sha384"
if menu_item == raw:
telo=raw_input('[*] text $ ')
sha384=hashlib.sha384('%s' % (telo)).hexdigest()
for i in(5,4,3,2,1):
print "[*] encrypted at",now
print "\n[*] result :",sha384
menu_item="sha512"
if menu_item == raw:
telo=raw_input('[*] text $ ')
sha512=hashlib.sha512('%s' % (telo)).hexdigest()
for i in(5,4,3,2,1):
print "[*] encrypted at",now
print "\n[*] result :",sha512
menu_item="base64"
if menu_item == raw:
telo=raw_input('[*] text $ ')
base64=base64.b64encode('%s' % (telo))
for i in(5,4,3,2,1):
print "[*] encoded at",now
print "\n[*] result :",base64
menu_item="md5"
if menu_item == raw:
telo=raw_input('[*] text $ ')
md5=hashlib.md5('%s' % (telo)).hexdigest()
for i in(1,2,3,4,5):
print "[*] encrypted at",now
print "\n[*] result :",md5
menu_item="sha256"
if menu_item == raw:
telo=raw_input('[*] text $ ')
sha256=hashlib.sha256('%s' % (telo)).hexdigest()
print
for i in(1,2,3,4,5):
print "[*] encrypted at",now
print "\n[*] result :",sha256
menu_item="sha1"
if menu_item == raw:
telo=raw_input('[*] text $ ')
sha1=hashlib.sha1('%s' % (telo)).hexdigest()
print
for i in(1,2,3,4,5):
print "[*] encrypted at",now
print "\n[*] result :",sha1
menu_item="base32"
if menu_item == raw:
ff=raw_input('[*] text or file $ ')
menu_fuck="text"
if menu_fuck == ff:
telo=raw_input('text $ ')
base32=base64.b32encode('%s' % (telo))
print
for i in(1,2,3,4,5):
print "[*] encoded at",now
print "\n[*] result :",base32
menu_ss="file"
if menu_ss == ff:
try:
print "[*] WARNING : if you encrypt this file your file original will be remove !"
fileno=raw_input('\n[*] file to encrypt $ ')
baca=open('%s' % (fileno), 'r')
ss=baca.read()
decrypt=base64.b32encode(ss)
simpan=open('text.enc','w')
simpan.writelines(decrypt)
simpan.close()
time.sleep(2)
for i in(5,4,3,2,1):
print "[*] encoded at",now
print "\n[*] saved to text.enc"
os.remove(fileno)
except IOError:
print "\n[*] no file found",fileno
sys.exit()
menu_telo="decrypt"
if menu_telo == menu:
print
print "----> base16"
print "----> base32"
print "----> base64"
print "----> cryptoDES"
print
oke=raw_input('[*] type and choice one $ ')
menu_telo="cryptoDES"
if menu_telo == oke:
try:
telo=raw_input('[*] file.encrypt : ')
iv=Random.get_random_bytes(8)
des1=DES.new('01234567', DES.MODE_CFB, iv)
des2=DES.new('01234567', DES.MODE_CFB, iv)
nama_file=open('%s' % (telo),'r')
ss=nama_file.read()
decs=des2.decrypt(ss)
save1=open('text.decrypt','w')
save1.writelines(decs)
save1.close()
time.sleep(2)
for i in(5,4,3,2,1):
print "[*] decrypted at",now
print "\n[*] saved file text.decrypt"
except IOError:
print "\n[*] Not found file encrypt",telo
menu_telo="base16"
if oke == menu_telo:
raw1=raw_input('[*] text base16 $ ')
dec16=base64.b16decode('%s' % (raw1))
for i in(5,4,3,2,1):
print "[*] decoded at",now
print "\n[*] result :",dec16
menu_telo="base32"
if oke == menu_telo:
ss=raw_input('[*] text or file $ ')
menu_gg="text"
if menu_gg == ss:
raw2=raw_input('[*] text base32 $ ')
print
dec32=base64.b32decode('%s' % (raw2))
for i in(5,4,3,2,1):
print "[*] decoded at",now
print "\n[*] result :",dec32
menu_hh="file"
if menu_hh == ss:
try:
fileno=raw_input('[*] file text.enc $ ')
print
fuck=open('%s' % (fileno), 'r')
anjir=fuck.read()
dec43=base64.b32decode(anjir)
telo=open('text.dec','w')
telo.writelines(dec43)
telo.close()
time.sleep(2)
for i in(5,4,3,2,1):
print "[*] decoded at",now
print "\n[*] save file text.dec"
os.remove(fileno)
except:
print "[*] Not found file enc "
menu_telo="base64" #this is Bug Sorry
if oke == menu_telo:#
raw3=raw_input('[*] text base64 $ ')#
dec64=base64.b64decode('%s' % (raw3))#
for i in (5,4,3,2,1):#
print "[*] decoded at",now#
print "\n[*] result :",dec64#
menu_telo="exit"
if menu_telo == oke:
print "[*] thanks for shopping"
sys.exit()
menu_item="exit"
if menu == menu_item:
print "[*] thanks for shopping"
sys.exit()
except KeyboardInterrupt:
print "\n[*] ctrl+c active "
sys.exit()
##### Finished #################################### Finished ##################
###############################################################################
#the Bug is cannot decrypt crypto encryption but i will try to repair and make#
#progam is the best ever #you can wait this progam to be version 2.0 #<|fim▁end|> | while True:
try:
menu=raw_input('\n[*] encrypt or decrypt $ ')
menu_item="update" |
<|file_name|>municipaltax.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import StringIO
import csv
from xml.etree.ElementTree import Element, SubElement, Comment, tostring
from xml.dom import minidom
import configdb
def prettify(elem):
"""Return a pretty-printed XML string for the Element.
"""
rough_string = tostring(elem, 'utf-8')
reparsed = minidom.parseString(rough_string)
return reparsed.toprettyxml(indent=" ")
## SYNTAX
# script.py cities.csv 2015-01-01 2015-04-01 csv|xml
# cities.csv obtained from "Gestió agrupada impost 1.5%"
class MunicipalTaxesInvoicingReport:
def __init__(self, cursor, start_date, end_date, tax, aggregated):
self.cursor = cursor
self.start_date = start_date
self.end_date = end_date
self.tax = tax
self.aggregated = aggregated
pass
def by_city(self, ids, file_type):
sql = '''
SELECT
municipi.name AS name,
municipi.ine AS ine,
EXTRACT(YEAR FROM invoice.date_invoice) AS invoice_year,
EXTRACT(QUARTER FROM invoice.date_invoice) AS invoice_quarter,
COALESCE(SUM(invoice_line.price_subtotal::float*(
CASE
WHEN factura_line.tipus IN ('subtotal_xml') AND invoice.type='in_invoice' THEN 1
WHEN factura_line.tipus IN ('subtotal_xml') AND invoice.type='in_refund' THEN -1
ELSE 0
END
)),0.0) AS provider_amount,
COALESCE(SUM(invoice_line.price_subtotal::float*(
CASE
WHEN factura_line.tipus IN ('energia','reactiva','potencia') AND invoice.type='out_invoice' THEN 1
WHEN factura_line.tipus IN ('energia','reactiva','potencia') AND invoice.type='out_refund' THEN -1
ELSE 0
END
)),0.0) AS client_amount
FROM giscedata_facturacio_factura_linia AS factura_line
LEFT JOIN account_invoice_line AS invoice_line ON invoice_line.id = factura_line.invoice_line_id
LEFT JOIN giscedata_facturacio_factura AS factura ON factura.id = factura_line.factura_id
LEFT JOIN account_invoice AS invoice ON invoice.id = factura.invoice_id
LEFT JOIN giscedata_polissa AS polissa ON polissa.id = factura.polissa_id
LEFT JOIN giscedata_cups_ps AS cups ON cups.id = polissa.cups
LEFT JOIN res_municipi as municipi on municipi.id = cups.id_municipi
WHERE municipi.ID IN ({0})
AND ((invoice.date_invoice >= '{1}') AND (invoice.date_invoice < '{2}'))
AND (((invoice.type LIKE 'out_%%')
AND ((invoice.state = 'open') OR (invoice.state = 'paid')))
OR (invoice.type LIKE 'in_%%'))
GROUP BY 1,2,3,4
ORDER BY 1,2,3,4
'''.format(','.join(map(str, ids)), self.start_date, self.end_date)
self.cursor.execute(sql, {'start_date': self.start_date,
'end_date': self.end_date,
'ids': ids})
return self.build_report(self.cursor.fetchall(), file_type)
def build_report(self, records, file_type):
invoicing_by_name = {}
invoicing_by_date = {}
ines = {}
for record in records:
name = record[0]
ine = record[1]
year = record[2]
quarter = record[3]
invoicing_by_name.setdefault(name, {'total_provider_amount': 0, 'total_client_amount': 0, 'quarters': []})
invoicing_by_name[name]['total_provider_amount'] += record[4]
invoicing_by_name[name]['total_client_amount'] += record[5]
invoicing_by_name[name]['quarters'].append({
'year': record[2],
'quarter': record[3],
'provider_amount': record[4],
'client_amount': record[5]
})
invoicing_by_date.setdefault(year, {})
invoicing_by_date[year].setdefault(quarter, {'total_provider_amount': 0, 'total_client_amount': 0})
invoicing_by_date[year][quarter]['total_provider_amount'] += record[4]
invoicing_by_date[year][quarter]['total_client_amount'] += record[5]
ines.setdefault(name, ine)
if file_type=='csv':
## CSV
csv_doc=StringIO.StringIO()
writer_report = csv.writer(csv_doc)
for name,v in sorted(invoicing_by_name.items()):
writer_report.writerow([name])
writer_report.writerow(['Año', 'Trimestre', 'Pagos a distribuidora', 'Facturas a clientes'])
for quarter in v['quarters']:
writer_report.writerow([
quarter['year'],
quarter['quarter'],
round(quarter['provider_amount'], 2),
round(quarter['client_amount'], 2)
])
writer_report.writerow([])
writer_report.writerow(['', '', '', '', 'Ingresos brutos', 'Tasa', 'Total'])
diff = v['total_client_amount'] - v['total_provider_amount']
writer_report.writerow(['Total',
'',
round(v['total_provider_amount'], 2),
round(v['total_client_amount'], 2),
round(diff, 2),
self.tax,
round(diff*(self.tax/100.0), 2)
])
writer_report.writerow([])
writer_report.writerow([])
writer_report.writerow(['Año', 'Trimestre', 'Pagos a distribuidora', 'Factuas a clientes', 'Ingresos',
'Tasta', 'Total'])
for year, v in sorted(invoicing_by_date.items()):
for quarter, v in sorted(invoicing_by_date[year].items()):
diff = v['total_client_amount'] - v['total_provider_amount']
writer_report.writerow([
year,
quarter,
round(v['total_provider_amount'], 2),
round(v['total_client_amount'], 2),
round(diff, 2),
self.tax,
round(diff*(self.tax/100.0), 2)
])
doc = csv_doc.getvalue()
if file_type == 'xml':
## XML
_empresa = Element("EMPRESA")
_datos = SubElement(_empresa, 'DATOS')
_nombre = SubElement(_datos, 'NOMBRE')
_nombre.text = "Som Energia SCCL"
_nif = SubElement(_datos, 'NIF')
_nif.text = "F55091367"
_municipios = SubElement(_empresa, 'MUNICIPIOS')
for name,v in sorted(invoicing_by_name.items()):
for quarter in v['quarters']:
_municipio = SubElement(_municipios, 'MUNICIPIO')
_ine = SubElement(_municipio, 'INEMUNICIPIO')
_ine.text = ines[name]
_ejercicio = SubElement(_municipio, 'EJERCICIO')
_ejercicio.text = str(int(quarter['year']))
_periodo = SubElement(_municipio, 'PERIODO')
_periodo.text = str(int(quarter['quarter']))
_fechaalta = SubElement(_municipio, 'FECHAALTA')
_fechabaja = SubElement(_municipio, 'FECHABAJA')
_tiposumin = SubElement(_municipio, 'TIPOSUMIN')
_tiposumin.text = '2'
_descsum = SubElement(_municipio, 'DESCSUM')
_descsum.text = 'Electricidad'
_descperi = SubElement(_municipio, 'DESCPERI')
_facturacion = SubElement(_municipio, 'FACTURACION')<|fim▁hole|> _derechosacceso.text = '%0.2f' % quarter['provider_amount']
_compensacion = SubElement(_municipio, 'COMPENSACION')
_compensacion.text = '0.00'
_baseimponible = SubElement(_municipio, 'BASEIMPONIBLE')
diff = (quarter['client_amount'] - quarter['provider_amount'])
_baseimponible.text = '%0.2f' % diff
_cuotabasica = SubElement(_municipio, 'CUOTABASICA')
_cuotabasica.text = '%0.2f' % (self.tax/100)
_totalingresar = SubElement(_municipio, 'TOTALINGRESAR')
_totalingresar.text = '%0.2f' % (diff*(self.tax/100.0))
doc = prettify(_empresa)
return doc
import psycopg2
import psycopg2.extras
import csv
import sys
municipis_file = sys.argv[1]
start_date = sys.argv[2]
end_date = sys.argv[3]
type_file = sys.argv[4]
municipis_id = []
with open(municipis_file, 'r') as csvfile:
reader = csv.reader(csvfile, delimiter=';')
for row in reader:
municipis_id.append(int(row[0]))
try:
dbconn=psycopg2.connect(**configdb.psycopg)
except Exception, ex:
print "Unable to connect to database " + configdb['DB_NAME']
raise ex
m = MunicipalTaxesInvoicingReport(dbconn.cursor(), start_date,end_date,1.5,False)
print m.by_city(municipis_id, type_file)<|fim▁end|> | _facturacion.text = '%0.2f' % quarter['client_amount']
_derechosacceso = SubElement(_municipio, 'DERECHOSACCESO') |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use std::io::IoError;
use std::error::{Error, FromError};
/// Possible parser errors
#[derive(Show, PartialEq)]
pub enum ParserErrorKind {
/// Parser met EOF before parsing a proper datum
UnexpectedEOF,
/// Unexpected token: the first string describes expected token, and the second describes
/// actual token
UnexpectedToken(String, String),
/// Lexer met character not allowed in source code
InvalidCharacter(char),
/// Parser met un-parseable token
InvalidToken(String),
/// Parser met IoError while reading the underlying stream
UnderlyingError(IoError)
}<|fim▁hole|>/// Parser error
#[derive(Show, PartialEq)]
pub struct ParserError {
pub line: usize,
pub column: usize,
pub kind: ParserErrorKind,
}
impl Error for ParserError {
fn description(&self) -> &str {
""
}
fn detail(&self) -> Option<String> {
None
}
fn cause(&self) -> Option<&Error> {
match self.kind {
ParserErrorKind::UnderlyingError(ref e) => Some(e as &Error),
_ => None
}
}
}
impl FromError<IoError> for ParserError {
fn from_error(err: IoError) -> ParserError {
ParserError {
line: 0,
column: 0,
kind: ParserErrorKind::UnderlyingError(err)
}
}
}
/// Possible compiler errors
#[derive(Show, PartialEq, Copy)]
pub enum CompileErrorKind {
/// The syntax is not implemented yet
NotImplemented,
/// Trying to evaluate `()`
NullEval,
/// Trying to evaluate non-proper list, such as `(a b c . d)`
DottedEval,
/// Expression body is non-proper list, such as `(a b c . d)`
DottedBody,
/// Invalid lambda syntax
BadLambdaSyntax,
/// Trying to apply non-function constant
NotCallable,
/// Trying to refer a syntax variable
SyntaxReference,
/// Trying to refer an unbound variable
UnboundVariable
}
/// Compiler error
#[derive(Show, PartialEq, Copy)]
pub struct CompileError {
pub kind: CompileErrorKind
}
/// Errors raised in runtime
#[derive(Show, PartialEq, Copy, Clone)]
pub enum RuntimeErrorKind {
/// Number of arguments did not match
NumArgs,
/// Argument type did not match
InvalidType
}
/// Errors raised in runtime
#[derive(Show, PartialEq, Clone)]
pub struct RuntimeError {
pub kind: RuntimeErrorKind,
pub desc: String
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>'''
ps = psi4.Solver
with psi4.quite_run():
ps.prepare_chkpt(mo_coeff, fock_on_mo, nelec, e_scf, nuclear_repulsion)
ecc = ps.energy('CCSD', c.shape[1], hcore_on_mo, eri_on_mo)<|fim▁hole|> rdm1, rdm2 = ps.density(mo_coeff.shape[1])
eccsdt = ps.energy('CCSD(T)', c.shape[1], hcore_on_mo, eri_on_mo)
rdm1, rdm2 = ps.density(mo_coeff.shape[1])
'''
from wrapper import *
__all__ = filter(lambda s: not s.startswith('_'), dir())<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from setuptools import setup
from gun import __version__<|fim▁hole|>setup(
name = 'gun',
version = __version__,
description = 'Gentoo Updates Notifier',
author = 'Andriy Yurchuk',
author_email = '[email protected]',
url = 'https://github.com/Ch00k/gun',
license = 'LICENSE.txt',
long_description = open('README.rst').read(),
entry_points = {
'console_scripts': [
'gun = gun.sync:main'
]
},
packages = ['gun'],
data_files = [('/etc/gun/', ['data/gun.conf'])],
install_requires = ['xmpppy >= 0.5.0-rc1']
)<|fim▁end|> | |
<|file_name|>fields.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import re
from collections import OrderedDict
from odoo import api, fields, models, _
from PIL import Image
from cStringIO import StringIO
import babel
from odoo.tools import html_escape as escape, posix_to_ldml, safe_eval, float_utils
from .qweb import unicodifier
import logging
_logger = logging.getLogger(__name__)
def nl2br(string):
""" Converts newlines to HTML linebreaks in ``string``. returns
the unicode result
:param str string:
:rtype: unicode
"""
return unicodifier(string).replace(u'\n', u'<br>\n')
def html_escape(string, options):
""" Automatically escapes content unless options['html-escape']
is set to False
:param str string:
:param dict options:
"""
return escape(string) if not options or options.get('html-escape', True) else string
#--------------------------------------------------------------------
# QWeb Fields converters
#--------------------------------------------------------------------
class FieldConverter(models.AbstractModel):
""" Used to convert a t-field specification into an output HTML field.
:meth:`~.to_html` is the entry point of this conversion from QWeb, it:
* converts the record value to html using :meth:`~.record_to_html`
* generates the metadata attributes (``data-oe-``) to set on the root
result node
* generates the root result node itself through :meth:`~.render_element`
"""
_name = 'ir.qweb.field'
@api.model
def attributes(self, record, field_name, options, values=None):
""" attributes(record, field_name, field, options, values)
Generates the metadata attributes (prefixed by ``data-oe-``) for the
root node of the field conversion.
The default attributes are:
* ``model``, the name of the record's model
* ``id`` the id of the record to which the field belongs
* ``type`` the logical field type (widget, may not match the field's
``type``, may not be any Field subclass name)
* ``translate``, a boolean flag (``0`` or ``1``) denoting whether the
field is translatable
* ``readonly``, has this attribute if the field is readonly
* ``expression``, the original expression
:returns: OrderedDict (attribute name, attribute value).
"""
data = OrderedDict()
field = record._fields[field_name]
if not options['inherit_branding'] and not options['translate']:
return data
data['data-oe-model'] = record._name
data['data-oe-id'] = record.id
data['data-oe-field'] = field.name
data['data-oe-type'] = options.get('type')
data['data-oe-expression'] = options.get('expression')
if field.readonly:
data['data-oe-readonly'] = 1
return data
@api.model
def value_to_html(self, value, options):
""" value_to_html(value, field, options=None)
Converts a single value to its HTML version/output
:rtype: unicode
"""
return html_escape(unicodifier(value) or u'', options)
@api.model
def record_to_html(self, record, field_name, options):
""" record_to_html(record, field_name, options)
Converts the specified field of the browse_record ``record`` to HTML
:rtype: unicode
"""
if not record:
return False
value = record[field_name]
return False if value is False else record.env[self._name].value_to_html(value, options=options)
@api.model
def user_lang(self):
""" user_lang()
Fetches the res.lang record corresponding to the language code stored
in the user's context. Fallbacks to en_US if no lang is present in the
context *or the language code is not valid*.
:returns: res.lang browse_record
"""
lang_code = self._context.get('lang') or 'en_US'
return self.env['res.lang']._lang_get(lang_code)
class IntegerConverter(models.AbstractModel):
_name = 'ir.qweb.field.integer'
_inherit = 'ir.qweb.field'
<|fim▁hole|> @api.model
def value_to_html(self, value, options):
return unicodifier(self.user_lang().format('%d', value, grouping=True))
class FloatConverter(models.AbstractModel):
_name = 'ir.qweb.field.float'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
if 'decimal_precision' in options:
precision = self.env['decimal.precision'].search([('name', '=', options['decimal_precision'])]).digits
else:
precision = options['precision']
if precision is None:
fmt = '%f'
else:
value = float_utils.float_round(value, precision_digits=precision)
fmt = '%.{precision}f'.format(precision=precision)
formatted = self.user_lang().format(fmt, value, grouping=True)
# %f does not strip trailing zeroes. %g does but its precision causes
# it to switch to scientific notation starting at a million *and* to
# strip decimals. So use %f and if no precision was specified manually
# strip trailing 0.
if precision is None:
formatted = re.sub(r'(?:(0|\d+?)0+)$', r'\1', formatted)
return unicodifier(formatted)
@api.model
def record_to_html(self, record, field_name, options):
if 'precision' not in options and 'decimal_precision' not in options:
_, precision = record._fields[field_name].digits or (None, None)
options = dict(options, precision=precision)
return super(FloatConverter, self).record_to_html(record, field_name, options)
class DateConverter(models.AbstractModel):
_name = 'ir.qweb.field.date'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
if not value or len(value) < 10:
return ''
lang = self.user_lang()
locale = babel.Locale.parse(lang.code)
if isinstance(value, basestring):
value = fields.Datetime.from_string(value[:10])
if options and 'format' in options:
pattern = options['format']
else:
strftime_pattern = lang.date_format
pattern = posix_to_ldml(strftime_pattern, locale=locale)
return babel.dates.format_date(value, format=pattern, locale=locale)
class DateTimeConverter(models.AbstractModel):
_name = 'ir.qweb.field.datetime'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
if not value:
return ''
lang = self.user_lang()
locale = babel.Locale.parse(lang.code)
if isinstance(value, basestring):
value = fields.Datetime.from_string(value)
value = fields.Datetime.context_timestamp(self, value)
if options and 'format' in options:
pattern = options['format']
else:
strftime_pattern = (u"%s %s" % (lang.date_format, lang.time_format))
pattern = posix_to_ldml(strftime_pattern, locale=locale)
if options and options.get('hide_seconds'):
pattern = pattern.replace(":ss", "").replace(":s", "")
return unicodifier(babel.dates.format_datetime(value, format=pattern, locale=locale))
class TextConverter(models.AbstractModel):
_name = 'ir.qweb.field.text'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
"""
Escapes the value and converts newlines to br. This is bullshit.
"""
return nl2br(html_escape(value, options)) if value else ''
class SelectionConverter(models.AbstractModel):
_name = 'ir.qweb.field.selection'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
if not value:
return ''
return html_escape(unicodifier(options['selection'][value]) or u'', options)
@api.model
def record_to_html(self, record, field_name, options):
if 'selection' not in options:
options = dict(options, selection=dict(record._fields[field_name].get_description(self.env)['selection']))
return super(SelectionConverter, self).record_to_html(record, field_name, options)
class ManyToOneConverter(models.AbstractModel):
_name = 'ir.qweb.field.many2one'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
if not value:
return False
value = value.sudo().display_name
if not value:
return False
return nl2br(html_escape(value, options)) if value else ''
class HTMLConverter(models.AbstractModel):
_name = 'ir.qweb.field.html'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
return unicodifier(value) or u''
class ImageConverter(models.AbstractModel):
""" ``image`` widget rendering, inserts a data:uri-using image tag in the
document. May be overridden by e.g. the website module to generate links
instead.
.. todo:: what happens if different output need different converters? e.g.
reports may need embedded images or FS links whereas website
needs website-aware
"""
_name = 'ir.qweb.field.image'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
try:
image = Image.open(StringIO(value.decode('base64')))
image.verify()
except IOError:
raise ValueError("Non-image binary fields can not be converted to HTML")
except: # image.verify() throws "suitable exceptions", I have no idea what they are
raise ValueError("Invalid image content")
return unicodifier('<img src="data:%s;base64,%s">' % (Image.MIME[image.format], value))
class MonetaryConverter(models.AbstractModel):
""" ``monetary`` converter, has a mandatory option
``display_currency`` only if field is not of type Monetary.
Otherwise, if we are in presence of a monetary field, the field definition must
have a currency_field attribute set.
The currency is used for formatting *and rounding* of the float value. It
is assumed that the linked res_currency has a non-empty rounding value and
res.currency's ``round`` method is used to perform rounding.
.. note:: the monetary converter internally adds the qweb context to its
options mapping, so that the context is available to callees.
It's set under the ``_values`` key.
"""
_name = 'ir.qweb.field.monetary'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
display_currency = options['display_currency']
# lang.format mandates a sprintf-style format. These formats are non-
# minimal (they have a default fixed precision instead), and
# lang.format will not set one by default. currency.round will not
# provide one either. So we need to generate a precision value
# (integer > 0) from the currency's rounding (a float generally < 1.0).
fmt = "%.{0}f".format(display_currency.decimal_places)
if options.get('from_currency'):
value = options['from_currency'].compute(value, display_currency)
lang = self.user_lang()
formatted_amount = lang.format(fmt, display_currency.round(value),
grouping=True, monetary=True).replace(r' ', u'\N{NO-BREAK SPACE}')
pre = post = u''
if display_currency.position == 'before':
pre = u'{symbol}\N{NO-BREAK SPACE}'.format(symbol=display_currency.symbol or '')
else:
post = u'\N{NO-BREAK SPACE}{symbol}'.format(symbol=display_currency.symbol or '')
return u'{pre}<span class="oe_currency_value">{0}</span>{post}'.format(formatted_amount, pre=pre, post=post)
@api.model
def record_to_html(self, record, field_name, options):
options = dict(options)
#currency should be specified by monetary field
field = record._fields[field_name]
if not options.get('display_currency') and field.type == 'monetary' and field.currency_field:
options['display_currency'] = record[field.currency_field]
return self.value_to_html(record[field_name], options)
TIMEDELTA_UNITS = (
('year', 3600 * 24 * 365),
('month', 3600 * 24 * 30),
('week', 3600 * 24 * 7),
('day', 3600 * 24),
('hour', 3600),
('minute', 60),
('second', 1)
)
class DurationConverter(models.AbstractModel):
""" ``duration`` converter, to display integral or fractional values as
human-readable time spans (e.g. 1.5 as "1 hour 30 minutes").
Can be used on any numerical field.
Has a mandatory option ``unit`` which can be one of ``second``, ``minute``,
``hour``, ``day``, ``week`` or ``year``, used to interpret the numerical
field value before converting it.
Sub-second values will be ignored.
"""
_name = 'ir.qweb.field.duration'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
units = dict(TIMEDELTA_UNITS)
if value < 0:
raise ValueError(_("Durations can't be negative"))
if not options or options.get('unit') not in units:
raise ValueError(_("A unit must be provided to duration widgets"))
locale = babel.Locale.parse(self.user_lang().code)
factor = units[options['unit']]
sections = []
r = value * factor
if options.get('round') in units:
round_to = units[options['round']]
r = round(r / round_to) * round_to
for unit, secs_per_unit in TIMEDELTA_UNITS:
v, r = divmod(r, secs_per_unit)
if not v:
continue
section = babel.dates.format_timedelta(
v*secs_per_unit, threshold=1, locale=locale)
if section:
sections.append(section)
return u' '.join(sections)
class RelativeDatetimeConverter(models.AbstractModel):
_name = 'ir.qweb.field.relative'
_inherit = 'ir.qweb.field'
@api.model
def value_to_html(self, value, options):
locale = babel.Locale.parse(self.user_lang().code)
if isinstance(value, basestring):
value = fields.Datetime.from_string(value)
# value should be a naive datetime in UTC. So is fields.Datetime.now()
reference = fields.Datetime.from_string(options['now'])
return unicodifier(babel.dates.format_timedelta(value - reference, add_direction=True, locale=locale))
@api.model
def record_to_html(self, record, field_name, options):
if 'now' not in options:
options = dict(options, now=record._fields[field_name].now())
return super(RelativeDatetimeConverter, self).record_to_html(record, field_name, options)
class Contact(models.AbstractModel):
_name = 'ir.qweb.field.contact'
_inherit = 'ir.qweb.field.many2one'
@api.model
def value_to_html(self, value, options):
if not value.exists():
return False
opf = options and options.get('fields') or ["name", "address", "phone", "mobile", "fax", "email"]
value = value.sudo().with_context(show_address=True)
name_get = value.name_get()[0][1]
val = {
'name': name_get.split("\n")[0],
'address': escape("\n".join(name_get.split("\n")[1:])).strip(),
'phone': value.phone,
'mobile': value.mobile,
'fax': value.fax,
'city': value.city,
'country_id': value.country_id.display_name,
'website': value.website,
'email': value.email,
'fields': opf,
'object': value,
'options': options
}
return self.env['ir.qweb'].render('base.contact', val)
class QwebView(models.AbstractModel):
_name = 'ir.qweb.field.qweb'
_inherit = 'ir.qweb.field.many2one'
@api.model
def record_to_html(self, record, field_name, options):
if not getattr(record, field_name):
return None
view = getattr(record, field_name)
if view._name != "ir.ui.view":
_logger.warning("%s.%s must be a 'ir.ui.view' model." % (record, field_name))
return None
view = view.with_context(object=record)
return unicodifier(view.render(view._context, engine='ir.qweb'))<|fim▁end|> | |
<|file_name|>timetrav.cpp<|end_file_name|><|fim▁begin|>/*
* ____ DAPHNE COPYRIGHT NOTICE ____
*
* Copyright (C) 2005 Mark Broadhead
*
* This file is part of DAPHNE, a laserdisc arcade game emulator
*
* DAPHNE is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* DAPHNE is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "config.h"
#include <string.h> // for memset
#include <stdio.h> // for sprintf (we shouldn't use sprintf anymore)
#include "timetrav.h"
#include "../ldp-out/ldp.h"
#include "../io/conout.h"
#include "../sound/sound.h"
#include "../video/palette.h"
#include "../video/video.h"
// Time Traveler
timetrav::timetrav()
{
m_shortgamename = "timetrav";
memset(m_cpumem, 0, cpu::MEM_SIZE);
struct cpu::def cpu;
memset(&cpu, 0, sizeof(struct cpu::def));
cpu.type = cpu::type::I88;
cpu.hz = TIMETRAV_CPU_HZ;
cpu.irq_period[0] = 0;
cpu.irq_period[1] = 0;
cpu.nmi_period = (1000.0 / 59.94);
cpu.initial_pc = 0xFFFF0;
cpu.must_copy_context = false;
cpu.mem = m_cpumem;
cpu::add(&cpu); // add this cpu to the list (it will be our only one)
m_disc_fps = 29.97;
// m_game_type = GAME_TIMETRAV;
m_game_uses_video_overlay = true;
m_video_overlay_width = 320; // default values we start with for video
// overlay
m_video_overlay_height = 240;
m_palette_color_count = 256;
m_video_overlay_count = 1;
m_overlay_size_is_dynamic = true; // this game does dynamically change its
// overlay size
static struct rom_def g_timetrav_roms[] = {{"TT061891.BIN", NULL,
&m_cpumem[0xc0000], 0x40000, 0x00000000},
{NULL}};
m_rom_list = g_timetrav_roms;
}
void timetrav::do_nmi() {}
Uint8 timetrav::cpu_mem_read(Uint32 addr)
{
char s[80];
Uint8 result = m_cpumem[addr];
// Scratch ram
if (addr < 0x10000) {
}
// ROM
else if (addr >= 0xc0000) {
} else {
sprintf(s, "Unmapped read from %x", addr);
printline(s);
}
return (result);
}
void timetrav::cpu_mem_write(Uint32 addr, Uint8 value)
{
char s[80];
m_cpumem[addr] = value;
// Scratch ram
if (addr < 0x10000) {
}
// ROM
else if (addr >= 0xc0000) {
sprintf(s, "Write to rom at %x with %x!", addr, value);
printline(s);
} else {
sprintf(s, "Unmapped write to %x with %x", addr, value);
printline(s);
}
}
void timetrav::port_write(Uint16 port, Uint8 value)
{
char s[80];
//static char display_string[9] = {0};
switch (port) {
case 0x1180:
case 0x1181:
case 0x1182:
case 0x1183:
case 0x1184:
case 0x1185:
case 0x1186:
case 0x1187:
m_video_overlay_needs_update = true;
//display_string[port & 0x07] = value;
//draw_string(display_string, 0, 0, get_active_video_overlay());
blit();
break;
default:
sprintf(s, "Unmapped write to port %x, value %x", port, value);
printline(s);
break;
}
}
Uint8 timetrav::port_read(Uint16 port)
{
char s[80];
unsigned char result = 0;
sprintf(s, "Unmapped read from port %x", port);
printline(s);
return (result);
}
// used to set dip switch values
bool timetrav::set_bank(unsigned char which_bank, unsigned char value)
{
bool result = true;
if (which_bank == 0) {
} else {
printline("ERROR: Bank specified is out of range!");
result = false;
}
return result;
}
<|fim▁hole|>
void timetrav::palette_calculate()
{
SDL_Color temp_color;
// fill color palette with schlop because we only use colors 0 and 0xFF for
// now
for (int i = 0; i < 256; i++) {
temp_color.r = (unsigned char)i;
temp_color.g = (unsigned char)i;
temp_color.b = (unsigned char)i;
palette::set_color(i, temp_color);
}
}<|fim▁end|> | void timetrav::input_disable(Uint8 move) {}
void timetrav::input_enable(Uint8 move) {} |
<|file_name|>csharp.js<|end_file_name|><|fim▁begin|>/*
* _ _ _
* | | | | | |
* | | __ _| |__ ___ ___ __ _| |_ Labcoat (R)
* | |/ _` | '_ \ / __/ _ \ / _` | __| Powerful development environment for Quirrel.
* | | (_| | |_) | (_| (_) | (_| | |_ Copyright (C) 2010 - 2013 SlamData, Inc.
* |_|\__,_|_.__/ \___\___/ \__,_|\__| All Rights Reserved.
*
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version <|fim▁hole|> * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this
* program. If not, see <http://www.gnu.org/licenses/>.
*
*/
define(function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var TextMode = require("./text").Mode;
var Tokenizer = require("../tokenizer").Tokenizer;
var CSharpHighlightRules = require("./csharp_highlight_rules").CSharpHighlightRules;
var MatchingBraceOutdent = require("./matching_brace_outdent").MatchingBraceOutdent;
var CstyleBehaviour = require("./behaviour/cstyle").CstyleBehaviour;
var CStyleFoldMode = require("./folding/cstyle").FoldMode;
var Mode = function() {
this.$tokenizer = new Tokenizer(new CSharpHighlightRules().getRules());
this.$outdent = new MatchingBraceOutdent();
this.$behaviour = new CstyleBehaviour();
this.foldingRules = new CStyleFoldMode();
};
oop.inherits(Mode, TextMode);
(function() {
this.lineCommentStart = "//";
this.blockComment = {start: "/*", end: "*/"};
this.getNextLineIndent = function(state, line, tab) {
var indent = this.$getIndent(line);
var tokenizedLine = this.$tokenizer.getLineTokens(line, state);
var tokens = tokenizedLine.tokens;
if (tokens.length && tokens[tokens.length-1].type == "comment") {
return indent;
}
if (state == "start") {
var match = line.match(/^.*[\{\(\[]\s*$/);
if (match) {
indent += tab;
}
}
return indent;
};
this.checkOutdent = function(state, line, input) {
return this.$outdent.checkOutdent(line, input);
};
this.autoOutdent = function(state, doc, row) {
this.$outdent.autoOutdent(doc, row);
};
this.createWorker = function(session) {
return null;
};
}).call(Mode.prototype);
exports.Mode = Mode;
});<|fim▁end|> | * 3 of the License, or (at your option) any later version.
* |
<|file_name|>discovery-wrapper.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python2
"""
discovery-wrapper A small tool which wraps around discovery and tries to
guide the discovery process with a more modern approach with a
Queue and workers.
Based on the original version of poller-wrapper.py by Job Snijders
Author: Neil Lathwood <[email protected]>
Date: Sep 2016
Usage: This program accepts one command line argument: the number of threads
that should run simultaneously. If no argument is given it will assume
a default of 1 thread.
Ubuntu Linux: apt-get install python-mysqldb
FreeBSD: cd /usr/ports/*/py-MySQLdb && make install clean
License: This program is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
Public License for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/.
LICENSE.txt contains a copy of the full GPLv3 licensing conditions.
"""
try:
import json
import os
import Queue
import subprocess
import sys
import threading
import time
except:
print "ERROR: missing one or more of the following python modules:"
print "threading, Queue, sys, subprocess, time, os, json"
sys.exit(2)
try:
import MySQLdb
except:
print "ERROR: missing the mysql python module:"
print "On ubuntu: apt-get install python-mysqldb"
print "On FreeBSD: cd /usr/ports/*/py-MySQLdb && make install clean"
sys.exit(2)
"""
Fetch configuration details from the config_to_json.php script
"""
install_dir = os.path.dirname(os.path.realpath(__file__))
config_file = install_dir + '/config.php'
def get_config_data():
config_cmd = ['/usr/bin/env', 'php', '%s/config_to_json.php' % install_dir]
try:
proc = subprocess.Popen(config_cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
except:
print "ERROR: Could not execute: %s" % config_cmd
sys.exit(2)
return proc.communicate()[0]
try:
with open(config_file) as f:
pass
except IOError as e:
print "ERROR: Oh dear... %s does not seem readable" % config_file
sys.exit(2)
try:
config = json.loads(get_config_data())
except:
print "ERROR: Could not load or parse configuration, are PATHs correct?"
sys.exit(2)
discovery_path = config['install_dir'] + '/discovery.php'
db_username = config['db_user']
db_password = config['db_pass']
db_port = int(config['db_port'])
if config['db_socket']:
db_server = config['db_host']
db_socket = config['db_socket']
else:
db_server = config['db_host']
db_socket = None
db_dbname = config['db_name']
def db_open():
try:
if db_socket:
db = MySQLdb.connect(host=db_server, unix_socket=db_socket, user=db_username, passwd=db_password, db=db_dbname)
else:
db = MySQLdb.connect(host=db_server, port=db_port, user=db_username, passwd=db_password, db=db_dbname)
return db
except:
print "ERROR: Could not connect to MySQL database!"
sys.exit(2)
# (c) 2015, GPLv3, Daniel Preussker <[email protected]> <<<EOC1
if 'distributed_poller_group' in config:
discovery_group = str(config['distributed_poller_group'])
else:
discovery_group = False
def memc_alive():
try:
global memc
key = str(uuid.uuid4())
memc.set('discovery.ping.' + key, key, 60)
if memc.get('discovery.ping.' + key) == key:
memc.delete('discovery.ping.' + key)
return True
else:
return False
except:
return False
def memc_touch(key, time):
try:
global memc
val = memc.get(key)
memc.set(key, val, time)
except:
pass
if ('distributed_poller' in config and
'distributed_poller_memcached_host' in config and
'distributed_poller_memcached_port' in config and
config['distributed_poller']):
try:
import memcache
import uuid
memc = memcache.Client([config['distributed_poller_memcached_host'] + ':' +
str(config['distributed_poller_memcached_port'])])
if str(memc.get("discovery.master")) == config['distributed_poller_name']:
print "This system is already joined as the discovery master."
sys.exit(2)
if memc_alive():
if memc.get("discovery.master") is None:
print "Registered as Master"
memc.set("discovery.master", config['distributed_poller_name'], 30)
memc.set("discovery.nodes", 0, 3600)
IsNode = False
else:
print "Registered as Node joining Master %s" % memc.get("discovery.master")
IsNode = True
memc.incr("discovery.nodes")
distdisco = True
else:
print "Could not connect to memcached, disabling distributed discovery."
distdisco = False
IsNode = False
except SystemExit:
raise
except ImportError:
print "ERROR: missing memcache python module:"
print "On deb systems: apt-get install python-memcache"
print "On other systems: easy_install python-memcached"
print "Disabling distributed discovery."
distdisco = False
else:
distdisco = False
# EOC1
s_time = time.time()
real_duration = 0
per_device_duration = {}
discovered_devices = 0
"""
Take the amount of threads we want to run in parallel from the commandline
if None are given or the argument was garbage, fall back to default of 16
"""
try:
amount_of_workers = int(sys.argv[1])
if amount_of_workers == 0:
print "ERROR: 0 threads is not a valid value"
sys.exit(2)
except:
amount_of_workers = 1
devices_list = []
"""
This query specificly orders the results depending on the last_discovered_timetaken variable
Because this way, we put the devices likely to be slow, in the top of the queue
thus greatening our chances of completing _all_ the work in exactly the time it takes to
discover the slowest device! cool stuff he
"""
# (c) 2015, GPLv3, Daniel Preussker <[email protected]> <<<EOC2
if discovery_group is not False:
query = "select device_id from devices where poller_group IN(" + discovery_group + ") and disabled = 0 order by last_polled_timetaken desc"
else:
query = "select device_id from devices where disabled = 0 order by last_polled_timetaken desc"
# EOC2
db = db_open()
cursor = db.cursor()
cursor.execute(query)
devices = cursor.fetchall()
for row in devices:
devices_list.append(int(row[0]))
# (c) 2015, GPLv3, Daniel Preussker <[email protected]> <<<EOC3
if distdisco and not IsNode:
query = "select max(device_id),min(device_id) from devices"
cursor.execute(query)
devices = cursor.fetchall()
maxlocks = devices[0][0]
minlocks = devices[0][1]
# EOC3
db.close()
"""
A seperate queue and a single worker for printing information to the screen prevents
the good old joke:
Some people, when confronted with a problem, think,
"I know, I'll use threads," and then they two they hav erpoblesms.
"""
def printworker():
nodeso = 0
while True:
# (c) 2015, GPLv3, Daniel Preussker <[email protected]> <<<EOC4
global IsNode
global distdisco
if distdisco:
if not IsNode:
memc_touch('discovery.master', 30)
nodes = memc.get('discovery.nodes')
if nodes is None and not memc_alive():
print "WARNING: Lost Memcached. Taking over all devices. Nodes will quit shortly."
distdisco = False
nodes = nodeso
if nodes is not nodeso:
print "INFO: %s Node(s) Total" % (nodes)
nodeso = nodes
else:
memc_touch('discovery.nodes', 30)
try:
worker_id, device_id, elapsed_time = print_queue.get(False)
except:
pass<|fim▁hole|> continue
else:
worker_id, device_id, elapsed_time = print_queue.get()
# EOC4
global real_duration
global per_device_duration
global discovered_devices
real_duration += elapsed_time
per_device_duration[device_id] = elapsed_time
discovered_devices += 1
if elapsed_time < 300:
print "INFO: worker %s finished device %s in %s seconds" % (worker_id, device_id, elapsed_time)
else:
print "WARNING: worker %s finished device %s in %s seconds" % (worker_id, device_id, elapsed_time)
print_queue.task_done()
"""
This class will fork off single instances of the discovery.php process, record
how long it takes, and push the resulting reports to the printer queue
"""
def poll_worker():
while True:
device_id = poll_queue.get()
# (c) 2015, GPLv3, Daniel Preussker <[email protected]> <<<EOC5
if not distdisco or memc.get('discovery.device.' + str(device_id)) is None:
if distdisco:
result = memc.add('discovery.device.' + str(device_id), config['distributed_poller_name'], 300)
if not result:
print "This device (%s) appears to be being discovered by another discovery node" % (device_id)
poll_queue.task_done()
continue
if not memc_alive() and IsNode:
print "Lost Memcached, Not discovering Device %s as Node. Master will discover it." % device_id
poll_queue.task_done()
continue
# EOC5
try:
start_time = time.time()
command = "/usr/bin/env php %s -h %s >> /dev/null 2>&1" % (discovery_path, device_id)
subprocess.check_call(command, shell=True)
elapsed_time = int(time.time() - start_time)
print_queue.put([threading.current_thread().name, device_id, elapsed_time])
except (KeyboardInterrupt, SystemExit):
raise
except:
pass
poll_queue.task_done()
poll_queue = Queue.Queue()
print_queue = Queue.Queue()
print "INFO: starting the discovery at %s with %s threads, slowest devices first" % (time.strftime("%Y-%m-%d %H:%M:%S"),
amount_of_workers)
for device_id in devices_list:
poll_queue.put(device_id)
for i in range(amount_of_workers):
t = threading.Thread(target=poll_worker)
t.setDaemon(True)
t.start()
p = threading.Thread(target=printworker)
p.setDaemon(True)
p.start()
try:
poll_queue.join()
print_queue.join()
except (KeyboardInterrupt, SystemExit):
raise
total_time = int(time.time() - s_time)
print "INFO: discovery-wrapper polled %s devices in %s seconds with %s workers" % (discovered_devices, total_time, amount_of_workers)
# (c) 2015, GPLv3, Daniel Preussker <[email protected]> <<<EOC6
if distdisco or memc_alive():
master = memc.get("discovery.master")
if master == config['distributed_poller_name'] and not IsNode:
print "Wait for all discovery-nodes to finish"
nodes = memc.get("discovery.nodes")
while nodes > 0 and nodes is not None:
try:
time.sleep(1)
nodes = memc.get("discovery.nodes")
except:
pass
print "Clearing Locks"
x = minlocks
while x <= maxlocks:
memc.delete('discovery.device.' + str(x))
x = x + 1
print "%s Locks Cleared" % x
print "Clearing Nodes"
memc.delete("discovery.master")
memc.delete("discovery.nodes")
else:
memc.decr("discovery.nodes")
print "Finished %s." % time.time()
# EOC6
show_stopper = False
if total_time > 21600:
print "WARNING: the process took more than 6 hours to finish, you need faster hardware or more threads"
print "INFO: in sequential style discovery the elapsed time would have been: %s seconds" % real_duration
for device in per_device_duration:
if per_device_duration[device] > 3600:
print "WARNING: device %s is taking too long: %s seconds" % (device, per_device_duration[device])
show_stopper = True
if show_stopper:
print "ERROR: Some devices are taking more than 3600 seconds, the script cannot recommend you what to do."
else:
recommend = int(total_time / 300.0 * amount_of_workers + 1)
print "WARNING: Consider setting a minimum of %d threads. (This does not constitute professional advice!)" % recommend
sys.exit(2)<|fim▁end|> | try:
time.sleep(1)
except:
pass |
<|file_name|>test_tags.py<|end_file_name|><|fim▁begin|>try:
import unittest2 as unittest # Python2.6
except ImportError:
import unittest
from tests.functional import test_base
@unittest.skipIf(test_base.get_test_server_api() == 1,
"The tag API didn't work at v1 - see frontend issue #927")
class TestTags(test_base.TestBase):
testcase_name = "tag API"
def test_create_delete(self, tag_id="create_tag"):
"""
Create a tag then delete it.<|fim▁hole|> self.assertTrue(self.client.tag.create(tag_id))
# Check that the tag doesn't exist (It has no photos, so it's invisible)
self.assertNotIn(tag_id, [t.id for t in self.client.tags.list()])
# Create a tag on one of the photos
self.photos[0].update(tagsAdd=tag_id)
# Check that the tag now exists
self.assertIn(tag_id, [t.id for t in self.client.tags.list()])
# Delete the tag
self.assertTrue(self.client.tag.delete(tag_id))
# Check that the tag is now gone
self.assertNotIn(tag_id, [t.id for t in self.client.tags.list()])
# Also remove the tag from the photo
self.photos[0].update(tagsRemove=tag_id)
# Create the tag again
self.photos[0].update(tagsAdd=tag_id)
self.assertIn(tag_id, [t.id for t in self.client.tags.list()])
# Delete using the tag object directly
tag = [t for t in self.client.tags.list() if t.id == tag_id][0]
self.assertTrue(tag.delete())
# Check that the tag is now gone
self.assertNotIn(tag_id, [t.id for t in self.client.tags.list()])
# Also remove the tag from the photo
self.photos[0].update(tagsRemove=tag_id)
# TODO: Un-skip and update this tests once there are tag fields
# that can be updated (the owner field cannot be updated).
@unittest.skip("Can't test the tag.update endpoint, "
"since there are no fields that can be updated")
def test_update(self):
""" Test that a tag can be updated """
# Update the tag using the Trovebox class, passing in the tag object
owner = "[email protected]"
ret_val = self.client.tag.update(self.tags[0], owner=owner)
# Check that the tag is updated
self.tags = self.client.tags.list()
self.assertEqual(self.tags[0].owner, owner)
self.assertEqual(ret_val.owner, owner)
# Update the tag using the Trovebox class, passing in the tag id
owner = "[email protected]"
ret_val = self.client.tag.update(self.TEST_TAG, owner=owner)
# Check that the tag is updated
self.tags = self.client.tags.list()
self.assertEqual(self.tags[0].owner, owner)
self.assertEqual(ret_val.owner, owner)
# Update the tag using the Tag object directly
owner = "[email protected]"
ret_val = self.tags[0].update(owner=owner)
# Check that the tag is updated
self.tags = self.client.tags.list()
self.assertEqual(self.tags[0].owner, owner)
self.assertEqual(ret_val.owner, owner)
def test_tag_with_spaces(self):
""" Run test_create_delete using a tag containing spaces """
self.test_create_delete("tag with spaces")
def test_tag_with_slashes(self):
""" Run test_create_delete using a tag containing slashes """
self.test_create_delete("tag/with/slashes")
# TODO: Un-skip this test once issue #919 is resolved -
# tags with double-slashes cannot be deleted
@unittest.skip("Tags with double-slashed cannot be deleted")
def test_tag_with_double_slashes(self):
""" Run test_create_delete using a tag containing double-slashes """
self.test_create_delete("tag//with//double//slashes")<|fim▁end|> | This test is a little contrived, since the tag create/delete
endpoints are only intended for internal use.
"""
# Create a tag |
<|file_name|>position.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%! from data import to_rust_ident %>
<%namespace name="helpers" file="/helpers.mako.rs" />
<% from data import ALL_SIZES, PHYSICAL_SIDES, LOGICAL_SIDES %>
<% data.new_style_struct("Position", inherited=False) %>
// "top" / "left" / "bottom" / "right"
% for side in PHYSICAL_SIDES:
${helpers.predefined_type(side, "LengthOrPercentageOrAuto",
"computed::LengthOrPercentageOrAuto::Auto",
spec="https://www.w3.org/TR/CSS2/visuren.html#propdef-%s" % side,
animatable=True)}
% endfor
// offset-* logical properties, map to "top" / "left" / "bottom" / "right"
% for side in LOGICAL_SIDES:
${helpers.predefined_type("offset-%s" % side, "LengthOrPercentageOrAuto",
"computed::LengthOrPercentageOrAuto::Auto",
spec="https://drafts.csswg.org/css-logical-props/#propdef-offset-%s" % side,
animatable=True, logical=True)}
% endfor
<%helpers:longhand name="z-index" spec="https://www.w3.org/TR/CSS2/visuren.html#z-index" animatable="True">
use values::HasViewportPercentage;
use values::computed::ComputedValueAsSpecified;
impl ComputedValueAsSpecified for SpecifiedValue {}
no_viewport_percentage!(SpecifiedValue);
pub type SpecifiedValue = computed_value::T;
pub mod computed_value {
use std::fmt;
use style_traits::ToCss;
#[derive(PartialEq, Clone, Eq, Copy, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum T {
Auto,
Number(i32),
}
impl ToCss for T {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match *self {
T::Auto => dest.write_str("auto"),
T::Number(number) => write!(dest, "{}", number),
}
}
}
impl T {
pub fn number_or_zero(self) -> i32 {
match self {
T::Auto => 0,
T::Number(value) => value,
}
}
}
}
#[inline]
pub fn get_initial_value() -> computed_value::T {
computed_value::T::Auto
}
fn parse(_context: &ParserContext, input: &mut Parser) -> Result<SpecifiedValue, ()> {
if input.try(|input| input.expect_ident_matching("auto")).is_ok() {
Ok(computed_value::T::Auto)
} else {
specified::parse_integer(input).map(computed_value::T::Number)
}
}
</%helpers:longhand>
// CSS Flexible Box Layout Module Level 1
// http://www.w3.org/TR/css3-flexbox/
// Flex container properties
${helpers.single_keyword("flex-direction", "row row-reverse column column-reverse",
spec="https://drafts.csswg.org/css-flexbox/#flex-direction-property",
extra_prefixes="webkit", animatable=False)}
${helpers.single_keyword("flex-wrap", "nowrap wrap wrap-reverse",
spec="https://drafts.csswg.org/css-flexbox/#flex-wrap-property",
extra_prefixes="webkit", animatable=False)}
% if product == "servo":
// FIXME: Update Servo to support the same Syntax as Gecko.
${helpers.single_keyword("justify-content", "stretch flex-start flex-end center space-between space-around",
extra_prefixes="webkit",
spec="https://drafts.csswg.org/css-align/#propdef-justify-content",
animatable=False)}
% else:
${helpers.predefined_type(name="justify-content",
type="AlignJustifyContent",
initial_value="specified::AlignJustifyContent::normal()",
spec="https://drafts.csswg.org/css-align/#propdef-justify-content",
extra_prefixes="webkit",
animatable=False)}
% endif
% if product == "servo":
// FIXME: Update Servo to support the same Syntax as Gecko.
${helpers.single_keyword("align-content", "stretch flex-start flex-end center space-between space-around",
extra_prefixes="webkit",
spec="https://drafts.csswg.org/css-align/#propdef-align-content",
animatable=False)}
${helpers.single_keyword("align-items",
"stretch flex-start flex-end center baseline",
need_clone=True,
extra_prefixes="webkit",
spec="https://drafts.csswg.org/css-flexbox/#align-items-property",
animatable=False)}
% else:
${helpers.predefined_type(name="align-content",
type="AlignJustifyContent",
initial_value="specified::AlignJustifyContent::normal()",
spec="https://drafts.csswg.org/css-align/#propdef-align-content",
extra_prefixes="webkit",
animatable=False)}
${helpers.predefined_type(name="align-items",
type="AlignItems",
initial_value="specified::AlignItems::normal()",
spec="https://drafts.csswg.org/css-align/#propdef-align-items",
extra_prefixes="webkit",
animatable=False)}
${helpers.predefined_type(name="justify-items",
type="JustifyItems",<|fim▁hole|> initial_value="specified::JustifyItems::auto()",
spec="https://drafts.csswg.org/css-align/#propdef-justify-items",
animatable=False)}
% endif
// Flex item properties
${helpers.predefined_type("flex-grow", "Number",
"0.0", "parse_non_negative",
spec="https://drafts.csswg.org/css-flexbox/#flex-grow-property",
extra_prefixes="webkit",
needs_context=False,
animatable=True)}
${helpers.predefined_type("flex-shrink", "Number",
"1.0", "parse_non_negative",
spec="https://drafts.csswg.org/css-flexbox/#flex-shrink-property",
extra_prefixes="webkit",
needs_context=False,
animatable=True)}
// https://drafts.csswg.org/css-align/#align-self-property
% if product == "servo":
// FIXME: Update Servo to support the same syntax as Gecko.
${helpers.single_keyword("align-self", "auto stretch flex-start flex-end center baseline",
need_clone=True,
extra_prefixes="webkit",
spec="https://drafts.csswg.org/css-flexbox/#propdef-align-self",
animatable=False)}
% else:
${helpers.predefined_type(name="align-self",
type="AlignJustifySelf",
initial_value="specified::AlignJustifySelf::auto()",
spec="https://drafts.csswg.org/css-align/#align-self-property",
extra_prefixes="webkit",
animatable=False)}
${helpers.predefined_type(name="justify-self",
type="AlignJustifySelf",
initial_value="specified::AlignJustifySelf::auto()",
spec="https://drafts.csswg.org/css-align/#justify-self-property",
animatable=False)}
% endif
// https://drafts.csswg.org/css-flexbox/#propdef-order
<%helpers:longhand name="order" animatable="True" extra_prefixes="webkit"
spec="https://drafts.csswg.org/css-flexbox/#order-property">
use values::computed::ComputedValueAsSpecified;
impl ComputedValueAsSpecified for SpecifiedValue {}
pub type SpecifiedValue = computed_value::T;
pub mod computed_value {
pub type T = i32;
}
#[inline]
pub fn get_initial_value() -> computed_value::T {
0
}
fn parse(_context: &ParserContext, input: &mut Parser) -> Result<SpecifiedValue, ()> {
specified::parse_integer(input)
}
</%helpers:longhand>
// FIXME: This property should be animatable.
${helpers.predefined_type("flex-basis",
"LengthOrPercentageOrAutoOrContent",
"computed::LengthOrPercentageOrAutoOrContent::Auto",
spec="https://drafts.csswg.org/css-flexbox/#flex-basis-property",
extra_prefixes="webkit",
animatable=False)}
% for (size, logical) in ALL_SIZES:
<%
spec = "https://drafts.csswg.org/css-box/#propdef-%s"
if logical:
spec = "https://drafts.csswg.org/css-logical-props/#propdef-%s"
%>
// width, height, block-size, inline-size
${helpers.predefined_type("%s" % size,
"LengthOrPercentageOrAuto",
"computed::LengthOrPercentageOrAuto::Auto",
"parse_non_negative",
needs_context=False,
spec=spec % size,
animatable=True, logical = logical)}
// min-width, min-height, min-block-size, min-inline-size
${helpers.predefined_type("min-%s" % size,
"LengthOrPercentage",
"computed::LengthOrPercentage::Length(Au(0))",
"parse_non_negative",
needs_context=False,
spec=spec % ("min-%s" % size),
animatable=True, logical = logical)}
// max-width, max-height, max-block-size, max-inline-size
${helpers.predefined_type("max-%s" % size,
"LengthOrPercentageOrNone",
"computed::LengthOrPercentageOrNone::None",
"parse_non_negative",
needs_context=False,
spec=spec % ("max-%s" % size),
animatable=True, logical = logical)}
% endfor
${helpers.single_keyword("box-sizing",
"content-box border-box",
extra_prefixes="moz webkit",
spec="https://drafts.csswg.org/css-ui/#propdef-box-sizing",
animatable=False)}
${helpers.single_keyword("object-fit", "fill contain cover none scale-down",
products="gecko", animatable=False,
spec="https://drafts.csswg.org/css-images/#propdef-object-fit")}
${helpers.predefined_type("object-position",
"Position",
"computed::Position::zero()",
products="gecko",
boxed="True",
spec="https://drafts.csswg.org/css-images-3/#the-object-position",
animatable=True)}
% for kind in ["row", "column"]:
${helpers.predefined_type("grid-%s-gap" % kind,
"LengthOrPercentage",
"computed::LengthOrPercentage::Length(Au(0))",
spec="https://drafts.csswg.org/css-grid/#propdef-grid-%s-gap" % kind,
animatable=True,
products="gecko")}
% for range in ["start", "end"]:
${helpers.predefined_type("grid-%s-%s" % (kind, range),
"GridLine",
"Default::default()",
animatable=False,
spec="https://drafts.csswg.org/css-grid/#propdef-grid-%s-%s" % (kind, range),
products="gecko",
boxed=True)}
% endfor
// NOTE: According to the spec, this should handle multiple values of `<track-size>`,
// but gecko supports only a single value
${helpers.predefined_type("grid-auto-%ss" % kind,
"TrackSize",
"Default::default()",
animatable=False,
spec="https://drafts.csswg.org/css-grid/#propdef-grid-auto-%ss" % kind,
products="gecko",
boxed=True)}
% endfor<|fim▁end|> | |
<|file_name|>register.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('aquilaApp')
.config(function ($stateProvider) {
$stateProvider
.state('register', {
parent: 'account',
url: '/register',
data: {
roles: [],
pageTitle: 'register.title'
},
views: {
'content@': {
templateUrl: 'scripts/app/account/register/register.html',
controller: 'RegisterController'
}
},
resolve: {<|fim▁hole|> return $translate.refresh();
}]
}
});
});<|fim▁end|> | translatePartialLoader: ['$translate', '$translatePartialLoader', function ($translate, $translatePartialLoader) {
$translatePartialLoader.addPart('register'); |
<|file_name|>logical_geometry.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Geometry in flow-relative space.
use euclid::{Point2D, Rect, Size2D};
use euclid::num::Zero;
use euclid::side_offsets::SideOffsets2D;
use std::cmp::{max, min};
use std::fmt::{self, Debug, Error, Formatter};
use std::ops::{Add, Sub};
use unicode_bidi as bidi;
pub enum BlockFlowDirection {
TopToBottom,
RightToLeft,
LeftToRight
}
pub enum InlineBaseDirection {
LeftToRight,
RightToLeft
}
// TODO: improve the readability of the WritingMode serialization, refer to the Debug:fmt()
bitflags!(
#[cfg_attr(feature = "servo", derive(HeapSizeOf, Serialize))]
pub flags WritingMode: u8 {
const FLAG_RTL = 1 << 0,
const FLAG_VERTICAL = 1 << 1,
const FLAG_VERTICAL_LR = 1 << 2,
/// For vertical writing modes only. When set, line-over/line-under
/// sides are inverted from block-start/block-end. This flag is
/// set when sideways-lr is used.
const FLAG_LINE_INVERTED = 1 << 3,
const FLAG_SIDEWAYS = 1 << 4,
const FLAG_UPRIGHT = 1 << 5,
}
);
impl WritingMode {
#[inline]
pub fn is_vertical(&self) -> bool {
self.intersects(FLAG_VERTICAL)
}
/// Assuming .is_vertical(), does the block direction go left to right?
#[inline]
pub fn is_vertical_lr(&self) -> bool {
self.intersects(FLAG_VERTICAL_LR)
}
/// Assuming .is_vertical(), does the inline direction go top to bottom?
#[inline]
pub fn is_inline_tb(&self) -> bool {
// https://drafts.csswg.org/css-writing-modes-3/#logical-to-physical
self.intersects(FLAG_RTL) == self.intersects(FLAG_LINE_INVERTED)
}
#[inline]
pub fn is_bidi_ltr(&self) -> bool {
!self.intersects(FLAG_RTL)
}
#[inline]
pub fn is_sideways(&self) -> bool {
self.intersects(FLAG_SIDEWAYS)
}
#[inline]
pub fn is_upright(&self) -> bool {
self.intersects(FLAG_UPRIGHT)
}
#[inline]
pub fn inline_start_physical_side(&self) -> PhysicalSide {
match (self.is_vertical(), self.is_inline_tb(), self.is_bidi_ltr()) {
(false, _, true) => PhysicalSide::Left,
(false, _, false) => PhysicalSide::Right,
(true, true, _) => PhysicalSide::Top,
(true, false, _) => PhysicalSide::Bottom,
}
}
#[inline]
pub fn inline_end_physical_side(&self) -> PhysicalSide {
match (self.is_vertical(), self.is_inline_tb(), self.is_bidi_ltr()) {
(false, _, true) => PhysicalSide::Right,
(false, _, false) => PhysicalSide::Left,
(true, true, _) => PhysicalSide::Bottom,
(true, false, _) => PhysicalSide::Top,
}
}
#[inline]
pub fn block_start_physical_side(&self) -> PhysicalSide {
match (self.is_vertical(), self.is_vertical_lr()) {
(false, _) => PhysicalSide::Top,
(true, true) => PhysicalSide::Left,
(true, false) => PhysicalSide::Right,
}
}
#[inline]
pub fn block_end_physical_side(&self) -> PhysicalSide {
match (self.is_vertical(), self.is_vertical_lr()) {
(false, _) => PhysicalSide::Bottom,
(true, true) => PhysicalSide::Right,
(true, false) => PhysicalSide::Left,
}
}
#[inline]
pub fn block_flow_direction(&self) -> BlockFlowDirection {
match (self.is_vertical(), self.is_vertical_lr()) {
(false, _) => BlockFlowDirection::TopToBottom,
(true, true) => BlockFlowDirection::LeftToRight,
(true, false) => BlockFlowDirection::RightToLeft,
}
}
#[inline]
pub fn inline_base_direction(&self) -> InlineBaseDirection {
if self.intersects(FLAG_RTL) {
InlineBaseDirection::RightToLeft
} else {
InlineBaseDirection::LeftToRight
}
}
#[inline]
/// The default bidirectional embedding level for this writing mode.
///
/// Returns bidi level 0 if the mode is LTR, or 1 otherwise.
pub fn to_bidi_level(&self) -> bidi::Level {
if self.is_bidi_ltr() {
bidi::Level::ltr()
} else {
bidi::Level::rtl()
}
}
}
impl fmt::Display for WritingMode {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
if self.is_vertical() {
try!(write!(formatter, "V"));
if self.is_vertical_lr() {
try!(write!(formatter, " LR"));
} else {
try!(write!(formatter, " RL"));
}
if self.intersects(FLAG_SIDEWAYS) {
try!(write!(formatter, " Sideways"));
}
if self.intersects(FLAG_LINE_INVERTED) {
try!(write!(formatter, " Inverted"));
}
} else {
try!(write!(formatter, "H"));
}
if self.is_bidi_ltr() {
write!(formatter, " LTR")
} else {
write!(formatter, " RTL")
}
}
}
/// Wherever logical geometry is used, the writing mode is known based on context:
/// every method takes a `mode` parameter.
/// However, this context is easy to get wrong.
/// In debug builds only, logical geometry objects store their writing mode
/// (in addition to taking it as a parameter to methods) and check it.
/// In non-debug builds, make this storage zero-size and the checks no-ops.
#[cfg(not(debug_assertions))]
#[derive(PartialEq, Eq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(Serialize))]
struct DebugWritingMode;
#[cfg(debug_assertions)]
#[derive(PartialEq, Eq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(Serialize))]
struct DebugWritingMode {
mode: WritingMode
}
#[cfg(not(debug_assertions))]
impl DebugWritingMode {
#[inline]
fn check(&self, _other: WritingMode) {}
<|fim▁hole|> #[inline]
fn check_debug(&self, _other: DebugWritingMode) {}
#[inline]
fn new(_mode: WritingMode) -> DebugWritingMode {
DebugWritingMode
}
}
#[cfg(debug_assertions)]
impl DebugWritingMode {
#[inline]
fn check(&self, other: WritingMode) {
assert!(self.mode == other)
}
#[inline]
fn check_debug(&self, other: DebugWritingMode) {
assert!(self.mode == other.mode)
}
#[inline]
fn new(mode: WritingMode) -> DebugWritingMode {
DebugWritingMode { mode: mode }
}
}
impl Debug for DebugWritingMode {
#[cfg(not(debug_assertions))]
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(formatter, "?")
}
#[cfg(debug_assertions)]
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(formatter, "{}", self.mode)
}
}
// Used to specify the logical direction.
#[derive(Debug, Clone, Copy, PartialEq)]
#[cfg_attr(feature = "servo", derive(Serialize))]
pub enum Direction {
Inline,
Block
}
/// A 2D size in flow-relative dimensions
#[derive(PartialEq, Eq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(Serialize))]
pub struct LogicalSize<T> {
pub inline: T, // inline-size, a.k.a. logical width, a.k.a. measure
pub block: T, // block-size, a.k.a. logical height, a.k.a. extent
debug_writing_mode: DebugWritingMode,
}
impl<T: Debug> Debug for LogicalSize<T> {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(formatter, "LogicalSize({:?}, i{:?}×b{:?})",
self.debug_writing_mode, self.inline, self.block)
}
}
// Can not implement the Zero trait: its zero() method does not have the `mode` parameter.
impl<T: Zero> LogicalSize<T> {
#[inline]
pub fn zero(mode: WritingMode) -> LogicalSize<T> {
LogicalSize {
inline: Zero::zero(),
block: Zero::zero(),
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
impl<T: Copy> LogicalSize<T> {
#[inline]
pub fn new(mode: WritingMode, inline: T, block: T) -> LogicalSize<T> {
LogicalSize {
inline: inline,
block: block,
debug_writing_mode: DebugWritingMode::new(mode),
}
}
#[inline]
pub fn from_physical(mode: WritingMode, size: Size2D<T>) -> LogicalSize<T> {
if mode.is_vertical() {
LogicalSize::new(mode, size.height, size.width)
} else {
LogicalSize::new(mode, size.width, size.height)
}
}
#[inline]
pub fn width(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.block
} else {
self.inline
}
}
#[inline]
pub fn set_width(&mut self, mode: WritingMode, width: T) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.block = width
} else {
self.inline = width
}
}
#[inline]
pub fn height(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.inline
} else {
self.block
}
}
#[inline]
pub fn set_height(&mut self, mode: WritingMode, height: T) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.inline = height
} else {
self.block = height
}
}
#[inline]
pub fn to_physical(&self, mode: WritingMode) -> Size2D<T> {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
Size2D::new(self.block, self.inline)
} else {
Size2D::new(self.inline, self.block)
}
}
#[inline]
pub fn convert(&self, mode_from: WritingMode, mode_to: WritingMode) -> LogicalSize<T> {
if mode_from == mode_to {
self.debug_writing_mode.check(mode_from);
*self
} else {
LogicalSize::from_physical(mode_to, self.to_physical(mode_from))
}
}
}
impl<T: Add<T, Output=T>> Add for LogicalSize<T> {
type Output = LogicalSize<T>;
#[inline]
fn add(self, other: LogicalSize<T>) -> LogicalSize<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalSize {
debug_writing_mode: self.debug_writing_mode,
inline: self.inline + other.inline,
block: self.block + other.block,
}
}
}
impl<T: Sub<T, Output=T>> Sub for LogicalSize<T> {
type Output = LogicalSize<T>;
#[inline]
fn sub(self, other: LogicalSize<T>) -> LogicalSize<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalSize {
debug_writing_mode: self.debug_writing_mode,
inline: self.inline - other.inline,
block: self.block - other.block,
}
}
}
/// A 2D point in flow-relative dimensions
#[derive(PartialEq, Eq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(Serialize))]
pub struct LogicalPoint<T> {
/// inline-axis coordinate
pub i: T,
/// block-axis coordinate
pub b: T,
debug_writing_mode: DebugWritingMode,
}
impl<T: Debug> Debug for LogicalPoint<T> {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
write!(formatter, "LogicalPoint({:?} (i{:?}, b{:?}))",
self.debug_writing_mode, self.i, self.b)
}
}
// Can not implement the Zero trait: its zero() method does not have the `mode` parameter.
impl<T: Zero> LogicalPoint<T> {
#[inline]
pub fn zero(mode: WritingMode) -> LogicalPoint<T> {
LogicalPoint {
i: Zero::zero(),
b: Zero::zero(),
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
impl<T: Copy> LogicalPoint<T> {
#[inline]
pub fn new(mode: WritingMode, i: T, b: T) -> LogicalPoint<T> {
LogicalPoint {
i: i,
b: b,
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
impl<T: Copy + Sub<T, Output=T>> LogicalPoint<T> {
#[inline]
pub fn from_physical(mode: WritingMode, point: Point2D<T>, container_size: Size2D<T>)
-> LogicalPoint<T> {
if mode.is_vertical() {
LogicalPoint {
i: if mode.is_inline_tb() { point.y } else { container_size.height - point.y },
b: if mode.is_vertical_lr() { point.x } else { container_size.width - point.x },
debug_writing_mode: DebugWritingMode::new(mode),
}
} else {
LogicalPoint {
i: if mode.is_bidi_ltr() { point.x } else { container_size.width - point.x },
b: point.y,
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
#[inline]
pub fn x(&self, mode: WritingMode, container_size: Size2D<T>) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_vertical_lr() { self.b } else { container_size.width - self.b }
} else {
if mode.is_bidi_ltr() { self.i } else { container_size.width - self.i }
}
}
#[inline]
pub fn set_x(&mut self, mode: WritingMode, x: T, container_size: Size2D<T>) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.b = if mode.is_vertical_lr() { x } else { container_size.width - x }
} else {
self.i = if mode.is_bidi_ltr() { x } else { container_size.width - x }
}
}
#[inline]
pub fn y(&self, mode: WritingMode, container_size: Size2D<T>) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_inline_tb() { self.i } else { container_size.height - self.i }
} else {
self.b
}
}
#[inline]
pub fn set_y(&mut self, mode: WritingMode, y: T, container_size: Size2D<T>) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.i = if mode.is_inline_tb() { y } else { container_size.height - y }
} else {
self.b = y
}
}
#[inline]
pub fn to_physical(&self, mode: WritingMode, container_size: Size2D<T>) -> Point2D<T> {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
Point2D::new(
if mode.is_vertical_lr() { self.b } else { container_size.width - self.b },
if mode.is_inline_tb() { self.i } else { container_size.height - self.i })
} else {
Point2D::new(
if mode.is_bidi_ltr() { self.i } else { container_size.width - self.i },
self.b)
}
}
#[inline]
pub fn convert(&self, mode_from: WritingMode, mode_to: WritingMode, container_size: Size2D<T>)
-> LogicalPoint<T> {
if mode_from == mode_to {
self.debug_writing_mode.check(mode_from);
*self
} else {
LogicalPoint::from_physical(
mode_to, self.to_physical(mode_from, container_size), container_size)
}
}
}
impl<T: Copy + Add<T, Output=T>> LogicalPoint<T> {
/// This doesn’t really makes sense,
/// but happens when dealing with multiple origins.
#[inline]
pub fn add_point(&self, other: &LogicalPoint<T>) -> LogicalPoint<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalPoint {
debug_writing_mode: self.debug_writing_mode,
i: self.i + other.i,
b: self.b + other.b,
}
}
}
impl<T: Copy + Add<T, Output=T>> Add<LogicalSize<T>> for LogicalPoint<T> {
type Output = LogicalPoint<T>;
#[inline]
fn add(self, other: LogicalSize<T>) -> LogicalPoint<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalPoint {
debug_writing_mode: self.debug_writing_mode,
i: self.i + other.inline,
b: self.b + other.block,
}
}
}
impl<T: Copy + Sub<T, Output=T>> Sub<LogicalSize<T>> for LogicalPoint<T> {
type Output = LogicalPoint<T>;
#[inline]
fn sub(self, other: LogicalSize<T>) -> LogicalPoint<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalPoint {
debug_writing_mode: self.debug_writing_mode,
i: self.i - other.inline,
b: self.b - other.block,
}
}
}
/// A "margin" in flow-relative dimensions
/// Represents the four sides of the margins, borders, or padding of a CSS box,
/// or a combination of those.
/// A positive "margin" can be added to a rectangle to obtain a bigger rectangle.
#[derive(PartialEq, Eq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(Serialize))]
pub struct LogicalMargin<T> {
pub block_start: T,
pub inline_end: T,
pub block_end: T,
pub inline_start: T,
debug_writing_mode: DebugWritingMode,
}
impl<T: Debug> Debug for LogicalMargin<T> {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
let writing_mode_string = if cfg!(debug_assertions) {
format!("{:?}, ", self.debug_writing_mode)
} else {
"".to_owned()
};
write!(formatter, "LogicalMargin({}i:{:?}..{:?} b:{:?}..{:?})",
writing_mode_string,
self.inline_start,
self.inline_end,
self.block_start,
self.block_end)
}
}
impl<T: Zero> LogicalMargin<T> {
#[inline]
pub fn zero(mode: WritingMode) -> LogicalMargin<T> {
LogicalMargin {
block_start: Zero::zero(),
inline_end: Zero::zero(),
block_end: Zero::zero(),
inline_start: Zero::zero(),
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
impl<T: Copy> LogicalMargin<T> {
#[inline]
pub fn new(mode: WritingMode, block_start: T, inline_end: T, block_end: T, inline_start: T)
-> LogicalMargin<T> {
LogicalMargin {
block_start: block_start,
inline_end: inline_end,
block_end: block_end,
inline_start: inline_start,
debug_writing_mode: DebugWritingMode::new(mode),
}
}
#[inline]
pub fn new_all_same(mode: WritingMode, value: T) -> LogicalMargin<T> {
LogicalMargin::new(mode, value, value, value, value)
}
#[inline]
pub fn from_physical(mode: WritingMode, offsets: SideOffsets2D<T>) -> LogicalMargin<T> {
let block_start;
let inline_end;
let block_end;
let inline_start;
if mode.is_vertical() {
if mode.is_vertical_lr() {
block_start = offsets.left;
block_end = offsets.right;
} else {
block_start = offsets.right;
block_end = offsets.left;
}
if mode.is_inline_tb() {
inline_start = offsets.top;
inline_end = offsets.bottom;
} else {
inline_start = offsets.bottom;
inline_end = offsets.top;
}
} else {
block_start = offsets.top;
block_end = offsets.bottom;
if mode.is_bidi_ltr() {
inline_start = offsets.left;
inline_end = offsets.right;
} else {
inline_start = offsets.right;
inline_end = offsets.left;
}
}
LogicalMargin::new(mode, block_start, inline_end, block_end, inline_start)
}
#[inline]
pub fn top(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_inline_tb() { self.inline_start } else { self.inline_end }
} else {
self.block_start
}
}
#[inline]
pub fn set_top(&mut self, mode: WritingMode, top: T) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_inline_tb() { self.inline_start = top } else { self.inline_end = top }
} else {
self.block_start = top
}
}
#[inline]
pub fn right(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_vertical_lr() { self.block_end } else { self.block_start }
} else {
if mode.is_bidi_ltr() { self.inline_end } else { self.inline_start }
}
}
#[inline]
pub fn set_right(&mut self, mode: WritingMode, right: T) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_vertical_lr() { self.block_end = right } else { self.block_start = right }
} else {
if mode.is_bidi_ltr() { self.inline_end = right } else { self.inline_start = right }
}
}
#[inline]
pub fn bottom(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_inline_tb() { self.inline_end } else { self.inline_start }
} else {
self.block_end
}
}
#[inline]
pub fn set_bottom(&mut self, mode: WritingMode, bottom: T) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_inline_tb() { self.inline_end = bottom } else { self.inline_start = bottom }
} else {
self.block_end = bottom
}
}
#[inline]
pub fn left(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_vertical_lr() { self.block_start } else { self.block_end }
} else {
if mode.is_bidi_ltr() { self.inline_start } else { self.inline_end }
}
}
#[inline]
pub fn set_left(&mut self, mode: WritingMode, left: T) {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
if mode.is_vertical_lr() { self.block_start = left } else { self.block_end = left }
} else {
if mode.is_bidi_ltr() { self.inline_start = left } else { self.inline_end = left }
}
}
#[inline]
pub fn to_physical(&self, mode: WritingMode) -> SideOffsets2D<T> {
self.debug_writing_mode.check(mode);
let top;
let right;
let bottom;
let left;
if mode.is_vertical() {
if mode.is_vertical_lr() {
left = self.block_start;
right = self.block_end;
} else {
right = self.block_start;
left = self.block_end;
}
if mode.is_inline_tb() {
top = self.inline_start;
bottom = self.inline_end;
} else {
bottom = self.inline_start;
top = self.inline_end;
}
} else {
top = self.block_start;
bottom = self.block_end;
if mode.is_bidi_ltr() {
left = self.inline_start;
right = self.inline_end;
} else {
right = self.inline_start;
left = self.inline_end;
}
}
SideOffsets2D::new(top, right, bottom, left)
}
#[inline]
pub fn convert(&self, mode_from: WritingMode, mode_to: WritingMode) -> LogicalMargin<T> {
if mode_from == mode_to {
self.debug_writing_mode.check(mode_from);
*self
} else {
LogicalMargin::from_physical(mode_to, self.to_physical(mode_from))
}
}
}
impl<T: PartialEq + Zero> LogicalMargin<T> {
#[inline]
pub fn is_zero(&self) -> bool {
self.block_start == Zero::zero() && self.inline_end == Zero::zero() &&
self.block_end == Zero::zero() && self.inline_start == Zero::zero()
}
}
impl<T: Copy + Add<T, Output=T>> LogicalMargin<T> {
#[inline]
pub fn inline_start_end(&self) -> T {
self.inline_start + self.inline_end
}
#[inline]
pub fn block_start_end(&self) -> T {
self.block_start + self.block_end
}
#[inline]
pub fn start_end(&self, direction: Direction) -> T {
match direction {
Direction::Inline =>
self.inline_start + self.inline_end,
Direction::Block =>
self.block_start + self.block_end
}
}
#[inline]
pub fn top_bottom(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.inline_start_end()
} else {
self.block_start_end()
}
}
#[inline]
pub fn left_right(&self, mode: WritingMode) -> T {
self.debug_writing_mode.check(mode);
if mode.is_vertical() {
self.block_start_end()
} else {
self.inline_start_end()
}
}
}
impl<T: Add<T, Output=T>> Add for LogicalMargin<T> {
type Output = LogicalMargin<T>;
#[inline]
fn add(self, other: LogicalMargin<T>) -> LogicalMargin<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalMargin {
debug_writing_mode: self.debug_writing_mode,
block_start: self.block_start + other.block_start,
inline_end: self.inline_end + other.inline_end,
block_end: self.block_end + other.block_end,
inline_start: self.inline_start + other.inline_start,
}
}
}
impl<T: Sub<T, Output=T>> Sub for LogicalMargin<T> {
type Output = LogicalMargin<T>;
#[inline]
fn sub(self, other: LogicalMargin<T>) -> LogicalMargin<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalMargin {
debug_writing_mode: self.debug_writing_mode,
block_start: self.block_start - other.block_start,
inline_end: self.inline_end - other.inline_end,
block_end: self.block_end - other.block_end,
inline_start: self.inline_start - other.inline_start,
}
}
}
/// A rectangle in flow-relative dimensions
#[derive(PartialEq, Eq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(Serialize))]
pub struct LogicalRect<T> {
pub start: LogicalPoint<T>,
pub size: LogicalSize<T>,
debug_writing_mode: DebugWritingMode,
}
impl<T: Debug> Debug for LogicalRect<T> {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> {
let writing_mode_string = if cfg!(debug_assertions) {
format!("{:?}, ", self.debug_writing_mode)
} else {
"".to_owned()
};
write!(formatter, "LogicalRect({}i{:?}×b{:?}, @ (i{:?},b{:?}))",
writing_mode_string,
self.size.inline,
self.size.block,
self.start.i,
self.start.b)
}
}
impl<T: Zero> LogicalRect<T> {
#[inline]
pub fn zero(mode: WritingMode) -> LogicalRect<T> {
LogicalRect {
start: LogicalPoint::zero(mode),
size: LogicalSize::zero(mode),
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
impl<T: Copy> LogicalRect<T> {
#[inline]
pub fn new(mode: WritingMode, inline_start: T, block_start: T, inline: T, block: T)
-> LogicalRect<T> {
LogicalRect {
start: LogicalPoint::new(mode, inline_start, block_start),
size: LogicalSize::new(mode, inline, block),
debug_writing_mode: DebugWritingMode::new(mode),
}
}
#[inline]
pub fn from_point_size(mode: WritingMode, start: LogicalPoint<T>, size: LogicalSize<T>)
-> LogicalRect<T> {
start.debug_writing_mode.check(mode);
size.debug_writing_mode.check(mode);
LogicalRect {
start: start,
size: size,
debug_writing_mode: DebugWritingMode::new(mode),
}
}
}
impl<T: Copy + Add<T, Output=T> + Sub<T, Output=T>> LogicalRect<T> {
#[inline]
pub fn from_physical(mode: WritingMode, rect: Rect<T>, container_size: Size2D<T>)
-> LogicalRect<T> {
let inline_start;
let block_start;
let inline;
let block;
if mode.is_vertical() {
inline = rect.size.height;
block = rect.size.width;
if mode.is_vertical_lr() {
block_start = rect.origin.x;
} else {
block_start = container_size.width - (rect.origin.x + rect.size.width);
}
if mode.is_inline_tb() {
inline_start = rect.origin.y;
} else {
inline_start = container_size.height - (rect.origin.y + rect.size.height);
}
} else {
inline = rect.size.width;
block = rect.size.height;
block_start = rect.origin.y;
if mode.is_bidi_ltr() {
inline_start = rect.origin.x;
} else {
inline_start = container_size.width - (rect.origin.x + rect.size.width);
}
}
LogicalRect {
start: LogicalPoint::new(mode, inline_start, block_start),
size: LogicalSize::new(mode, inline, block),
debug_writing_mode: DebugWritingMode::new(mode),
}
}
#[inline]
pub fn inline_end(&self) -> T {
self.start.i + self.size.inline
}
#[inline]
pub fn block_end(&self) -> T {
self.start.b + self.size.block
}
#[inline]
pub fn to_physical(&self, mode: WritingMode, container_size: Size2D<T>) -> Rect<T> {
self.debug_writing_mode.check(mode);
let x;
let y;
let width;
let height;
if mode.is_vertical() {
width = self.size.block;
height = self.size.inline;
if mode.is_vertical_lr() {
x = self.start.b;
} else {
x = container_size.width - self.block_end();
}
if mode.is_inline_tb() {
y = self.start.i;
} else {
y = container_size.height - self.inline_end();
}
} else {
width = self.size.inline;
height = self.size.block;
y = self.start.b;
if mode.is_bidi_ltr() {
x = self.start.i;
} else {
x = container_size.width - self.inline_end();
}
}
Rect {
origin: Point2D::new(x, y),
size: Size2D::new(width, height),
}
}
#[inline]
pub fn convert(&self, mode_from: WritingMode, mode_to: WritingMode, container_size: Size2D<T>)
-> LogicalRect<T> {
if mode_from == mode_to {
self.debug_writing_mode.check(mode_from);
*self
} else {
LogicalRect::from_physical(
mode_to, self.to_physical(mode_from, container_size), container_size)
}
}
pub fn translate_by_size(&self, offset: LogicalSize<T>) -> LogicalRect<T> {
LogicalRect {
start: self.start + offset,
..*self
}
}
pub fn translate(&self, offset: &LogicalPoint<T>) -> LogicalRect<T> {
LogicalRect {
start: self.start + LogicalSize {
inline: offset.i,
block: offset.b,
debug_writing_mode: offset.debug_writing_mode,
},
size: self.size,
debug_writing_mode: self.debug_writing_mode,
}
}
}
impl<T: Copy + Ord + Add<T, Output=T> + Sub<T, Output=T>> LogicalRect<T> {
#[inline]
pub fn union(&self, other: &LogicalRect<T>) -> LogicalRect<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
let inline_start = min(self.start.i, other.start.i);
let block_start = min(self.start.b, other.start.b);
LogicalRect {
start: LogicalPoint {
i: inline_start,
b: block_start,
debug_writing_mode: self.debug_writing_mode,
},
size: LogicalSize {
inline: max(self.inline_end(), other.inline_end()) - inline_start,
block: max(self.block_end(), other.block_end()) - block_start,
debug_writing_mode: self.debug_writing_mode,
},
debug_writing_mode: self.debug_writing_mode,
}
}
}
impl<T: Copy + Add<T, Output=T> + Sub<T, Output=T>> Add<LogicalMargin<T>> for LogicalRect<T> {
type Output = LogicalRect<T>;
#[inline]
fn add(self, other: LogicalMargin<T>) -> LogicalRect<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalRect {
start: LogicalPoint {
// Growing a rectangle on the start side means pushing its
// start point on the negative direction.
i: self.start.i - other.inline_start,
b: self.start.b - other.block_start,
debug_writing_mode: self.debug_writing_mode,
},
size: LogicalSize {
inline: self.size.inline + other.inline_start_end(),
block: self.size.block + other.block_start_end(),
debug_writing_mode: self.debug_writing_mode,
},
debug_writing_mode: self.debug_writing_mode,
}
}
}
impl<T: Copy + Add<T, Output=T> + Sub<T, Output=T>> Sub<LogicalMargin<T>> for LogicalRect<T> {
type Output = LogicalRect<T>;
#[inline]
fn sub(self, other: LogicalMargin<T>) -> LogicalRect<T> {
self.debug_writing_mode.check_debug(other.debug_writing_mode);
LogicalRect {
start: LogicalPoint {
// Shrinking a rectangle on the start side means pushing its
// start point on the positive direction.
i: self.start.i + other.inline_start,
b: self.start.b + other.block_start,
debug_writing_mode: self.debug_writing_mode,
},
size: LogicalSize {
inline: self.size.inline - other.inline_start_end(),
block: self.size.block - other.block_start_end(),
debug_writing_mode: self.debug_writing_mode,
},
debug_writing_mode: self.debug_writing_mode,
}
}
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum PhysicalSide {
Top,
Right,
Bottom,
Left,
}<|fim▁end|> | |
<|file_name|>jquery.wikiEditor.publish.js<|end_file_name|><|fim▁begin|>/* Publish module for wikiEditor */
( function ( $ ) {
$.wikiEditor.modules.publish = {
/**
* Compatability map
*/
browsers: {
// Left-to-right languages
ltr: {
msie: [['>=', 7]],
firefox: [['>=', 3]],
opera: [['>=', 9.6]],
safari: [['>=', 4]]
},
// Right-to-left languages
rtl: {
msie: [['>=', 8]],
firefox: [['>=', 3]],
opera: [['>=', 9.6]],
safari: [['>=', 4]]
}
},
/**
* Internally used functions
*/
fn: {
/**
* Creates a publish module within a wikiEditor
* @param context Context object of editor to create module in
* @param config Configuration object to create module from
*/
create: function ( context, config ) {
// Build the dialog behind the Publish button
var dialogID = 'wikiEditor-' + context.instance + '-dialog';
$.wikiEditor.modules.dialogs.fn.create(
context,
{
previewsave: {
id: dialogID,
titleMsg: 'wikieditor-publish-dialog-title',
html: '\
<div class="wikiEditor-publish-dialog-copywarn"></div>\
<div class="wikiEditor-publish-dialog-editoptions">\
<form id="wikieditor-' + context.instance + '-publish-dialog-form">\
<div class="wikiEditor-publish-dialog-summary">\
<label for="wikiEditor-' + context.instance + '-dialog-summary"\
rel="wikieditor-publish-dialog-summary"></label>\
<br />\
<input type="text" id="wikiEditor-' + context.instance + '-dialog-summary"\
style="width: 100%;" />\
</div>\
<div class="wikiEditor-publish-dialog-options">\
<input type="checkbox"\
id="wikiEditor-' + context.instance + '-dialog-minor" />\
<label for="wikiEditor-' + context.instance + '-dialog-minor"\
rel="wikieditor-publish-dialog-minor"></label>\
<input type="checkbox"\
id="wikiEditor-' + context.instance + '-dialog-watch" />\
<label for="wikiEditor-' + context.instance + '-dialog-watch"\
rel="wikieditor-publish-dialog-watch"></label>\
</div>\
</form>\
</div>',
init: function () {
var i;
$(this).find( '[rel]' ).each( function () {
<|fim▁hole|> $(this).text( mediaWiki.msg( $(this).attr( 'rel' ) ) );
});
/* REALLY DIRTY HACK! */
// Reformat the copyright warning stuff
var copyWarnHTML = $( '#editpage-copywarn p' ).html();
// TODO: internationalize by splitting on other characters that end statements
var copyWarnStatements = copyWarnHTML.split( '. ' );
var newCopyWarnHTML = '<ul>';
for ( i = 0; i < copyWarnStatements.length; i++ ) {
if ( copyWarnStatements[i] !== '' ) {
var copyWarnStatement = $.trim( copyWarnStatements[i] ).replace( /\.*$/, '' );
newCopyWarnHTML += '<li>' + copyWarnStatement + '.</li>';
}
}
newCopyWarnHTML += '</ul>';
// No list if there's only one element
$(this).find( '.wikiEditor-publish-dialog-copywarn' ).html(
copyWarnStatements.length > 1 ? newCopyWarnHTML : copyWarnHTML
);
/* END OF REALLY DIRTY HACK */
if ( $( '#wpMinoredit' ).length === 0 )
$( '#wikiEditor-' + context.instance + '-dialog-minor' ).hide();
else if ( $( '#wpMinoredit' ).is( ':checked' ) )
$( '#wikiEditor-' + context.instance + '-dialog-minor' )
.prop( 'checked', true );
if ( $( '#wpWatchthis' ).length === 0 )
$( '#wikiEditor-' + context.instance + '-dialog-watch' ).hide();
else if ( $( '#wpWatchthis' ).is( ':checked' ) )
$( '#wikiEditor-' + context.instance + '-dialog-watch' )
.prop( 'checked', true );
$(this).find( 'form' ).submit( function ( e ) {
$(this).closest( '.ui-dialog' ).find( 'button:first' ).click();
e.preventDefault();
});
},
dialog: {
buttons: {
'wikieditor-publish-dialog-publish': function () {
var minorChecked = $( '#wikiEditor-' + context.instance +
'-dialog-minor' ).is( ':checked' ) ?
'checked' : '';
var watchChecked = $( '#wikiEditor-' + context.instance +
'-dialog-watch' ).is( ':checked' ) ?
'checked' : '';
$( '#wpMinoredit' ).prop( 'checked', minorChecked );
$( '#wpWatchthis' ).prop( 'checked', watchChecked );
$( '#wpSummary' ).val( $( '#wikiEditor-' + context.instance +
'-dialog-summary' ).val() );
$( '#editform' ).submit();
},
'wikieditor-publish-dialog-goback': function () {
$(this).dialog( 'close' );
}
},
open: function () {
$( '#wikiEditor-' + context.instance + '-dialog-summary' ).focus();
},
width: 500
},
resizeme: false
}
}
);
context.fn.addButton( {
'captionMsg': 'wikieditor-publish-button-publish',
'action': function () {
$( '#' + dialogID ).dialog( 'open' );
return false;
}
} );
context.fn.addButton( {
'captionMsg': 'wikieditor-publish-button-cancel',
'action': function () { }
} );
}
}
};
}( jQuery ) );<|fim▁end|> | |
<|file_name|>events.js<|end_file_name|><|fim▁begin|>var draw = SVG('mainPage');
var energyBar = draw.rect(0,5).move(0,598)
.fill({ color: '#cc0', opacity: '1' })
.stroke({ color: '#fff', width: '1', opacity: '0.6'});
var port = 25550;
var images = "http://"+document.location.hostname+":"+port+"/game/images/";
var localPlayers = new Array();
var localBullets = new Array();
var localBonus = new Array();
var bonusBars = new Array();
function PlayerEntity(mark, text) {
this.mark = mark;
this.text = text;
}
// Gestion des joueurs
socket.on('refreshPlayers', function (players) {
for(var i in players) {
// Création des nouveaux joueurs
if(typeof(localPlayers[i]) === "undefined") {
var ownColor = '#fff';
if(players[i].id == socket.socket.sessionid) {
// Attribution d'un marqueur de couleur pour le joueur en cours
ownColor = '#c00';
}
// Création des éléments
var circle = draw.circle(6).move(players[i].x,players[i].y)
.fill({ color: ownColor, opacity: '1' })
.stroke({ color: '#fff', width: '1' });
var text = draw.text(players[i].pseudo).font({ size: 12 })
.fill({ color: '#fff', opacity: '0.6' })
.stroke({ color: '#fff', width: '1', opacity: '0.4'});
// Déplacement du texte au dessus du marqueur
text.move(players[i].x - text.bbox().width /2, players[i].y - text.bbox().height - 10);
// Ajout de l'entité au tableau
localPlayers[i] = new PlayerEntity(circle, text);
}
else {
// Déplacement du joueur
localPlayers[i].mark.move(players[i].x, players[i].y);
localPlayers[i].text.move(players[i].x - localPlayers[i].text.bbox().width /2, players[i].y - localPlayers[i].text.bbox().height - 10);
// Actualisation du joueur local
if(players[i].id == socket.socket.sessionid) {
// Affichage du bouton au bon endroit en fonction du mode
if(players[i].spec == false) {
document.getElementById("b1").style.display = "none";
document.getElementById("b2").style.display = "block";
}
else {
document.getElementById("b2").style.display = "none";
document.getElementById("b1").style.display = "block";
}
// Actualisation de la barre d'énergie
if (players[i].energy > 1)
{ energyBar.width(((players[i].energy-1)/100)*800); }
else
{ energyBar.width(0); }
// Actualisation des barres de bonus
for(var j in bonusBars) {
switch(bonusBars[j].name) {
case "speed":
bonusBars[j].bar.width(players[i].bSpeed);
break;
case "arrow":
bonusBars[j].bar.width(players[i].bArrow);
break;
}
}
}
}
}
});
// Passage en spectateur
function _setSpec() {
socket.emit('setSpec', 1);
}
// Ajout d'une barre de bonus
socket.on('newPlayerBonus', function (bonus) {
// Vérification de la non existence de la barre
for(var i in bonusBars) {
if(bonusBars[i].name == bonus.name) {
return;
}
}
var rect = draw.rect(0,12).move(0,15*(bonusBars.length+1))
.fill({ color: bonus.color, opacity: '0.4' });
bonusBars.push({name: bonus.name, bar: rect});
});
// Rerait d'un joueur
socket.on('removePlayer', function (id) {
localPlayers[id].mark.remove(); localPlayers[id].text.remove();
localPlayers.splice(id,1);
});
// Affichage d'un bonus
socket.on('displayBonus', function (bonus) {
for(var i in bonus) {
// Création des nouveaux bonus
if(typeof(localBonus[i]) === "undefined") {
localBonus[i] = draw.image(images+""+bonus[i].image+".png")
.move(bonus[i].x,bonus[i].y);
}
}
});
// Retrait d'un bonus
socket.on('removeBonus', function (bonusID) {
if (bonusID == -1) {
for(var i in localBonus) {
localBonus[i].remove();
}
localBonus = [];
}
else {
localBonus[bonusID].remove();
localBonus.splice(bonusID,1);
}
});
<|fim▁hole|> return a.points > b.points;
}).reverse();
// Formattage de la liste des joueurs
var list = "<b>Joueurs en ligne : </b><br />";
var listSpec = "<b>Spectateurs : </b><br />";
for(var i in players) {
if(players[i].spec == 0) {
if(players[i].alive == 0) {
list = list + "<span style='color:#" + players[i].color + "; float:left;'><s>" + players[i].pseudo + "</s></span><span style='float:right;'>- " + players[i].points + " points</span><br />";
} else {
list = list + "<span style='color:#" + players[i].color + "; float:left;'>" + players[i].pseudo + "</span><span style='float:right;'>- " + players[i].points + " points</span><br />";
}
}
else {
listSpec = listSpec + "<span style='color:#" + players[i].color + "; float:left;'>" + players[i].pseudo + "</span><br />";
}
}
// Mise à jour de l'affichage de la liste des joueurs
document.getElementById("scores").innerHTML = list;
document.getElementById("specs").innerHTML = listSpec;
});
// Ajout des nouvelles balles contenues dans le buffer
var max = 0;
socket.on('refreshBullets', function (bulletTable) {
for(var i in bulletTable) {
// Création des traces
var length = max + i;
if(typeof(localBullets[length]) === "undefined") {
localBullets[length] = draw.circle(5/*heignt line*/)
.move(bulletTable[i].x,bulletTable[i].y)
.fill({ color:'#'+bulletTable[i].color })
.stroke({ color: '#fff', width: '1', opacity: '0.5' });
max++;
}
}
});
// Réinitialisation du terrain
socket.on('resetGround', function (e) {
for(var i in localBullets) {
localBullets[i].remove();
}
localBullets = [];
});
// Arret du serveur
socket.on('stopServer', function (e) {
window.location.replace("http://"+document.location.hostname+"/?alert=1");
});
// Kick du joueur
socket.on('kickPlayer', function (e) {
window.location.replace("http://"+document.location.hostname+"/?kick=1");
});
// Gestion d'un nouveau message
socket.on('newMessage', function (e) {
var tmp = document.getElementById("comments").innerHTML;
document.getElementById("comments").innerHTML = "<b>"+e.pseudo+" : </b>"+e.message+"<br />"+tmp;
});
// Affichage d'une alerte
socket.on('displayAlert', function(text, color, duration) {
if(color == '') {
color = "#fff";
}
if(duration == '') {
duration = 1000;
}
var appear, disappear, deleteAlert,
alert = draw.text(text).font({ size: 36 });
appear = function() {
alert.move(400-(alert.bbox().width / 2), 100)
.fill({ color: color, opacity: '0' })
.animate(100).fill({ opacity: '1' })
.after(disappear);
};
disappear = function() {
setTimeout(function() {
alert.animate(500).fill({ opacity: '0' }).after(deleteAlert);
}, duration);
};
deleteAlert = function() {
alert.remove();
}
appear();
});
// Affichage d'une victoire
socket.on('displayVictory', function(pseudo) {
var appear, disappear, deleteAlert,
alert = draw.text("Victoire de "+pseudo+" !").font({ size: 20 });
appear = function() {
alert.move(400-(alert.bbox().width / 2), 50)
.fill({ color: '#fff', opacity: '0' })
.animate(100).fill({ opacity: '1' })
.after(disappear);
};
disappear = function() {
setTimeout(function() {
alert.animate(500).fill({ opacity: '0' }).after(deleteAlert);
}, 1000);
};
deleteAlert = function() {
alert.remove();
}
appear();
});<|fim▁end|> | // Rafraichissement du tableau de scores
socket.on('refreshScores', function (players) {
// Arrangement du tableau en fonction des scores
players = players.sort(function(a,b) { |
<|file_name|>enkf_node.py<|end_file_name|><|fim▁begin|># Copyright (C) 2012 Statoil ASA, Norway.
#
# The file 'enkf_node.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
from ert.cwrap import BaseCClass, CWrapper
from ert.enkf import ENKF_LIB, EnkfFs, NodeId
from ert.enkf.data import EnkfConfigNode, GenKw, GenData, CustomKW
from ert.enkf.enums import ErtImplType
class EnkfNode(BaseCClass):
def __init__(self, config_node, private=False):
assert isinstance(config_node, EnkfConfigNode)
if private:
c_pointer = EnkfNode.cNamespace().alloc_private(config_node)
else:
c_pointer = EnkfNode.cNamespace().alloc(config_node)
super(EnkfNode, self).__init__(c_pointer, config_node, True)
def valuePointer(self):
return EnkfNode.cNamespace().value_ptr(self)
def asGenData(self):
""" @rtype: GenData """
impl_type = EnkfNode.cNamespace().get_impl_type(self)
assert impl_type == ErtImplType.GEN_DATA
return GenData.createCReference(self.valuePointer(), self)
def asGenKw(self):
""" @rtype: GenKw """
impl_type = EnkfNode.cNamespace().get_impl_type(self)
assert impl_type == ErtImplType.GEN_KW
return GenKw.createCReference(self.valuePointer(), self)
def asCustomKW(self):
""" @rtype: CustomKW """
impl_type = EnkfNode.cNamespace().get_impl_type(self)<|fim▁hole|>
return CustomKW.createCReference(self.valuePointer(), self)
def tryLoad(self, fs, node_id):
"""
@type fs: EnkfFS
@type node_id: NodeId
@rtype: bool
"""
assert isinstance(fs, EnkfFs)
assert isinstance(node_id, NodeId)
return EnkfNode.cNamespace().try_load(self, fs, node_id)
def name(self):
return EnkfNode.cNamespace().get_name(self)
def load(self, fs, node_id):
if not self.tryLoad(fs, node_id):
raise Exception("Could not load node: %s iens: %d report: %d" % (self.name(), node_id.iens, node_id.report_step))
def save(self, fs, node_id):
assert isinstance(fs, EnkfFs)
assert isinstance(node_id, NodeId)
EnkfNode.cNamespace().store(self, fs, True, node_id)
def free(self):
EnkfNode.cNamespace().free(self)
cwrapper = CWrapper(ENKF_LIB)
cwrapper.registerObjectType("enkf_node", EnkfNode)
EnkfNode.cNamespace().free = cwrapper.prototype("void enkf_node_free(enkf_node)")
EnkfNode.cNamespace().alloc = cwrapper.prototype("c_void_p enkf_node_alloc(enkf_node)")
EnkfNode.cNamespace().alloc_private = cwrapper.prototype("c_void_p enkf_node_alloc_private_container(enkf_node)")
EnkfNode.cNamespace().get_name = cwrapper.prototype("char* enkf_node_get_key(enkf_node)")
EnkfNode.cNamespace().value_ptr = cwrapper.prototype("c_void_p enkf_node_value_ptr(enkf_node)")
EnkfNode.cNamespace().try_load = cwrapper.prototype("bool enkf_node_try_load(enkf_node, enkf_fs, node_id)")
EnkfNode.cNamespace().get_impl_type = cwrapper.prototype("ert_impl_type_enum enkf_node_get_impl_type(enkf_node)")
EnkfNode.cNamespace().store = cwrapper.prototype("void enkf_node_store(enkf_node, enkf_fs, bool, node_id)")<|fim▁end|> | assert impl_type == ErtImplType.CUSTOM_KW |
<|file_name|>readme_maker.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Tue Apr 9 21:30:31 2019
<|fim▁hole|>import os
from os.path import join, split
lines = []
for root, folders, filenames in os.walk('..'):
for filename in filenames:
if filename == 'readme.md':
lines += ['']
with open(join(root, filename), 'r', encoding='utf-8') as file:
spoiler = False
lines.append(f"\n# {split(root)[-1]}")
for line in file.readlines():
if line.startswith('#') and not spoiler:
lines.append(f"\n<details>\n<summary> {split(root)[-1]} </summary>\n\n")
spoiler = True
if line.startswith('!'):
path = line.split('(')[-1][:-1]
line = line.replace(path, root[3:] + '/' + path)
lines.append(line)
lines.append("\n</details>\n")
with open(join('..', 'README.md'), 'w', encoding='utf-8') as file:
for line in lines:
file.write(line)<|fim▁end|> | @author: Rignak
"""
|
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>module.exports = function (grunt) {
grunt.initConfig({
less: {
test: {
src: 'test/test.less',<|fim▁hole|> }
}
})
grunt.loadNpmTasks('grunt-contrib-less')
grunt.registerTask('default', ['less'])
}<|fim▁end|> | dest: 'test/test.css' |
<|file_name|>stock_picking.py<|end_file_name|><|fim▁begin|># Copyright 2014-2017 Pedro M. Baeza <[email protected]>
# Copyright 2018-2019 Sergio Teruel <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, fields, models
from odoo.fields import first
class StockPicking(models.Model):
_inherit = 'stock.picking'
returned_ids = fields.Many2many(
comodel_name="stock.picking", compute="_compute_returned_ids",
string="Returned pickings")
source_picking_id = fields.Many2one(
comodel_name="stock.picking",
compute="_compute_source_picking_id",
string="Source picking")
@api.multi
def _compute_returned_ids(self):
for picking in self:<|fim▁hole|> def _compute_source_picking_id(self):
"""Get source piking from this picking. Only one origin is possible.
"""
for picking in self:
picking.source_picking_id = first(picking.mapped(
'move_lines.origin_returned_move_id.picking_id'))
def action_show_source_picking(self):
""" Open source picking form action """
return self.source_picking_id.get_formview_action()<|fim▁end|> | picking.returned_ids = picking.mapped(
'move_lines.returned_move_ids.picking_id')
@api.depends('move_lines.origin_returned_move_id') |
<|file_name|>conflicting-repr-hints.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)]
#[repr(C)]
enum A {
A,
}
#[repr(u64)]
enum B {
B,
}
#[repr(C, u64)] //~ ERROR conflicting representation hints
//~^ WARN this was previously accepted
enum C {
C,
}
#[repr(u32, u64)] //~ ERROR conflicting representation hints
//~^ WARN this was previously accepted
enum D {
D,
}
#[repr(C, packed)]
struct E(i32);
<|fim▁hole|>#[repr(packed, align(8))]
struct F(i32); //~ ERROR type has conflicting packed and align representation hints
#[repr(packed)]
#[repr(align(8))]
struct G(i32); //~ ERROR type has conflicting packed and align representation hints
#[repr(align(8))]
#[repr(packed)]
struct H(i32); //~ ERROR type has conflicting packed and align representation hints
#[repr(packed, packed(2))]
struct I(i32); //~ ERROR type has conflicting packed representation hints
#[repr(packed(2))]
#[repr(packed)]
struct J(i32); //~ ERROR type has conflicting packed representation hints
#[repr(packed, packed(1))]
struct K(i32);
#[repr(packed, align(8))]
union X {
//~^ ERROR type has conflicting packed and align representation hints
i: i32,
}
#[repr(packed)]
#[repr(align(8))]
union Y {
//~^ ERROR type has conflicting packed and align representation hints
i: i32,
}
#[repr(align(8))]
#[repr(packed)]
union Z {
//~^ ERROR type has conflicting packed and align representation hints
i: i32,
}
#[repr(packed, align(0x100))]
pub struct S(u16); //~ ERROR type has conflicting packed and align representation hints
#[repr(packed, align(0x100))]
pub union U { //~ ERROR type has conflicting packed and align representation hints
u: u16
}
static B: U = U { u: 0 };
static A: S = S(0);
fn main() {}<|fim▁end|> | |
<|file_name|>hello.go<|end_file_name|><|fim▁begin|>// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"fmt"<|fim▁hole|>
func main() {
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, "hello, world")
})
port := os.Getenv("PORT")
if port == "" {
port = "8080"
}
log.Printf("Handling HTTP requests on %s.", port)
log.Fatal(http.ListenAndServe(fmt.Sprintf(":%s", port), nil))
}<|fim▁end|> | "log"
"net/http"
"os"
) |
<|file_name|>tight_bbox.py<|end_file_name|><|fim▁begin|>"""
This module is to support *bbox_inches* option in savefig command.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import warnings
from matplotlib.transforms import Bbox, TransformedBbox, Affine2D
def adjust_bbox(fig, bbox_inches, fixed_dpi=None):
"""
Temporarily adjust the figure so that only the specified area
(bbox_inches) is saved.
It modifies fig.bbox, fig.bbox_inches,
fig.transFigure._boxout, and fig.patch. While the figure size
changes, the scale of the original figure is conserved. A<|fim▁hole|> origBboxInches = fig.bbox_inches
_boxout = fig.transFigure._boxout
asp_list = []
locator_list = []
for ax in fig.axes:
pos = ax.get_position(original=False).frozen()
locator_list.append(ax.get_axes_locator())
asp_list.append(ax.get_aspect())
def _l(a, r, pos=pos):
return pos
ax.set_axes_locator(_l)
ax.set_aspect("auto")
def restore_bbox():
for ax, asp, loc in zip(fig.axes, asp_list, locator_list):
ax.set_aspect(asp)
ax.set_axes_locator(loc)
fig.bbox = origBbox
fig.bbox_inches = origBboxInches
fig.transFigure._boxout = _boxout
fig.transFigure.invalidate()
fig.patch.set_bounds(0, 0, 1, 1)
if fixed_dpi is not None:
tr = Affine2D().scale(fixed_dpi)
dpi_scale = fixed_dpi / fig.dpi
else:
tr = Affine2D().scale(fig.dpi)
dpi_scale = 1.
_bbox = TransformedBbox(bbox_inches, tr)
fig.bbox_inches = Bbox.from_bounds(0, 0,
bbox_inches.width, bbox_inches.height)
x0, y0 = _bbox.x0, _bbox.y0
w1, h1 = fig.bbox.width * dpi_scale, fig.bbox.height * dpi_scale
fig.transFigure._boxout = Bbox.from_bounds(-x0, -y0, w1, h1)
fig.transFigure.invalidate()
fig.bbox = TransformedBbox(fig.bbox_inches, tr)
fig.patch.set_bounds(x0 / w1, y0 / h1,
fig.bbox.width / w1, fig.bbox.height / h1)
return restore_bbox
def process_figure_for_rasterizing(fig, bbox_inches_restore, fixed_dpi=None):
"""
This need to be called when figure dpi changes during the drawing
(e.g., rasterizing). It recovers the bbox and re-adjust it with
the new dpi.
"""
bbox_inches, restore_bbox = bbox_inches_restore
restore_bbox()
r = adjust_bbox(figure, bbox_inches, fixed_dpi)
return bbox_inches, r<|fim▁end|> | function which restores the original values are returned.
"""
origBbox = fig.bbox |
<|file_name|>cmdln_main2.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
$ python cmdln_main2.py
This is my shell.
$ python cmdln_main2.py foo
hello from foo
"""<|fim▁hole|>
import sys
import cmdln
class Shell(cmdln.RawCmdln):
"This is my shell."
name = "shell"
def do_foo(self, argv):
print("hello from foo")
if __name__ == "__main__":
shell = Shell()
retval = shell.cmd(sys.argv[1:]) # just run one command
sys.exit(retval)<|fim▁end|> | |
<|file_name|>qquote.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-pretty
// ignore-test
#![feature(quote)]<|fim▁hole|>
use syntax::diagnostic;
use syntax::ast;
use syntax::codemap;
use syntax::codemap::span;
use syntax::parse;
use syntax::print::*;
trait fake_ext_ctxt {
fn cfg() -> ast::CrateConfig;
fn parse_sess() -> parse::parse_sess;
fn call_site() -> span;
fn ident_of(st: &str) -> ast::ident;
}
type fake_session = parse::parse_sess;
impl fake_ext_ctxt for fake_session {
fn cfg() -> ast::CrateConfig { Vec::new() }
fn parse_sess() -> parse::parse_sess { self }
fn call_site() -> span {
codemap::span {
lo: codemap::BytePos(0),
hi: codemap::BytePos(0),
expn_info: None
}
}
fn ident_of(st: &str) -> ast::ident {
self.interner.intern(st)
}
}
fn mk_ctxt() -> fake_ext_ctxt {
parse::new_parse_sess(None) as fake_ext_ctxt
}
fn main() {
let cx = mk_ctxt();
let abc = quote_expr!(cx, 23);
check_pp(ext_cx, abc, pprust::print_expr, "23".to_owned());
let ty = quote_ty!(cx, int);
check_pp(ext_cx, ty, pprust::print_type, "int".to_owned());
let item = quote_item!(cx, static x : int = 10;).get();
check_pp(ext_cx, item, pprust::print_item, "static x: int = 10;".to_owned());
let stmt = quote_stmt!(cx, let x = 20;);
check_pp(ext_cx, *stmt, pprust::print_stmt, "let x = 20;".to_owned());
let pat = quote_pat!(cx, Some(_));
check_pp(ext_cx, pat, pprust::print_pat, "Some(_)".to_owned());
}
fn check_pp<T>(cx: fake_ext_ctxt,
expr: T, f: |pprust::ps, T|, expect: StrBuf) {
let s = io::with_str_writer(|wr| {
let pp = pprust::rust_printer(wr, cx.parse_sess().interner);
f(pp, expr);
pp::eof(pp.s);
});
stdout().write_line(s);
if expect != "".to_owned() {
println!("expect: '%s', got: '%s'", expect, s);
assert_eq!(s, expect);
}
}<|fim▁end|> |
extern crate syntax;
use std::io::*; |
<|file_name|>fisherfaces_example.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) Philipp Wagner. All rights reserved.
# Licensed under the BSD license. See LICENSE file in the project root for full license information.
import sys
# append facerec to module search path
sys.path.append("../..")
# import facerec stuff
from facerec.dataset import DataSet
from facerec.feature import Fisherfaces
from facerec.distance import EuclideanDistance, CosineDistance
from facerec.classifier import NearestNeighbor
from facerec.classifier import SVM
from facerec.model import PredictableModel
from facerec.validation import KFoldCrossValidation<|fim▁hole|>from facerec.util import minmax_normalize
# import numpy
import numpy as np
# import matplotlib colormaps
import matplotlib.cm as cm
# import for logging
import logging,sys
# set up a handler for logging
handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
# add handler to facerec modules
logger = logging.getLogger("facerec")
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
# load a dataset (e.g. AT&T Facedatabase)
dataSet = DataSet("/home/philipp/facerec/data/yalefaces_recognition")
# define Fisherfaces as feature extraction method
feature = Fisherfaces()
# define a 1-NN classifier with Euclidean Distance
classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
# define the model as the combination
model = PredictableModel(feature=feature, classifier=classifier)
# show fisherfaces
model.compute(dataSet.data, dataSet.labels)
# turn the first (at most) 16 eigenvectors into grayscale
# images (note: eigenvectors are stored by column!)
E = []
for i in xrange(min(model.feature.eigenvectors.shape[1], 16)):
e = model.feature.eigenvectors[:,i].reshape(dataSet.data[0].shape)
E.append(minmax_normalize(e,0,255, dtype=np.uint8))
# plot them and store the plot to "python_fisherfaces_fisherfaces.pdf"
subplot(title="Fisherfaces", images=E, rows=4, cols=4, sptitle="Fisherface", colormap=cm.jet, filename="fisherfaces.pdf")
# perform a 10-fold cross validation
cv = KFoldCrossValidation(model, k=10)
cv.validate(dataSet.data, dataSet.labels)
cv.print_results()<|fim▁end|> | from facerec.visual import subplot |
<|file_name|>issue-24086.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
#![allow(unused_mut)]
#![allow(unused_variables)]
pub struct Registry<'a> {
listener: &'a mut (),
}
pub struct Listener<'a> {
pub announce: Option<Box<FnMut(&mut Registry) + 'a>>,
pub remove: Option<Box<FnMut(&mut Registry) + 'a>>,
}
impl<'a> Drop for Registry<'a> {<|fim▁hole|>
fn main() {
let mut registry_listener = Listener {
announce: None,
remove: None,
};
}<|fim▁end|> | fn drop(&mut self) {}
} |
<|file_name|>featuresDuck.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (c) "Neo4j"
* Neo4j Sweden AB [http://neo4j.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import Rx from 'rxjs/Rx'
import { SYSTEM_DB } from '../dbMeta/constants'
import { canSendTxMetadata } from '../features/versionedFeatures'
import bolt from 'services/bolt/bolt'
import { APP_START, CLOUD, DESKTOP } from 'shared/modules/app/appDuck'
import {
CONNECTION_SUCCESS,
DISCONNECTION_SUCCESS
} from 'shared/modules/connections/connectionsDuck'
import { getBackgroundTxMetadata } from 'shared/services/bolt/txMetadata'
export const NAME = 'features'
const CLEAR = 'features/CLEAR'
export const UPDATE_ALL_FEATURES = 'features/UPDATE_ALL_FEATURES'
export const UPDATE_USER_CAPABILITIES = 'features/UPDATE_USER_CAPABILITIES'
export const FEATURE_DETECTION_DONE = 'features/FEATURE_DETECTION_DONE'
export const DETECTED_CLIENT_CONFIG = 'features/DETECTED_CLIENT_CONFIG'
export const getAvailableProcedures = (state: any) =>
state[NAME].availableProcedures
export const isMultiDatabase = (state: any) =>
getAvailableProcedures(state).includes('dbms.databases.overview')
export const canAssignRolesToUser = (state: any) =>
getAvailableProcedures(state).includes('dbms.security.addRoleToUser')
export const hasClientConfig = (state: any) => state[NAME].clientConfig
export const utilizeBrowserSync = (state: any) => !!state[NAME].browserSync
export const getUserCapabilities = (state: any) => state[NAME].userCapabilities
export const USER_CAPABILITIES = {
serverConfigReadable: 'serverConfigReadable',
proceduresReadable: 'proceduresReadable'
}
export const initialState = {
availableProcedures: [] as any[],
browserSync: true,
clientConfig: null,
userCapabilities: {<|fim▁hole|>
export default function (state = initialState, action: any) {
switch (action.type) {
case APP_START:
return {
...initialState,
...state,
browserSync: shouldUtilizeBrowserSync(action)
}
case UPDATE_ALL_FEATURES:
return { ...state, availableProcedures: [...action.availableProcedures] }
case DETECTED_CLIENT_CONFIG:
return { ...state, clientConfig: action.isAvailable }
case UPDATE_USER_CAPABILITIES:
return {
...state,
userCapabilities: {
...state.userCapabilities,
[action.capabilityName]: action.capabilityValue
}
}
case CLEAR:
return initialState
default:
return state
}
}
// Helper functions
const shouldUtilizeBrowserSync = (action: any) => {
return ![DESKTOP, CLOUD].includes(action.env)
}
// Action creators
export const updateFeatures = (availableProcedures: any) => {
return {
type: UPDATE_ALL_FEATURES,
availableProcedures
}
}
export const updateUserCapability = (
capabilityName: any,
capabilityValue: any
) => {
return {
type: UPDATE_USER_CAPABILITIES,
capabilityName,
capabilityValue
}
}
export const setClientConfig = (isAvailable: any) => {
return {
type: DETECTED_CLIENT_CONFIG,
isAvailable
}
}
export const featuresDiscoveryEpic = (action$: any, store: any) => {
return action$
.ofType(CONNECTION_SUCCESS)
.mergeMap(() => {
return new Promise(async (resolve, reject) => {
try {
const supportsMultiDb = await bolt.hasMultiDbSupport()
const res = await bolt.routedReadTransaction(
'CALL dbms.procedures YIELD name',
{},
{
useDb: supportsMultiDb ? SYSTEM_DB : '',
...getBackgroundTxMetadata({
hasServerSupport: canSendTxMetadata(store.getState())
})
}
)
resolve(res)
} catch (e) {
reject(e)
}
})
.then((res: any) => {
store.dispatch(
updateFeatures(res.records.map((record: any) => record.get('name')))
)
store.dispatch(
updateUserCapability(USER_CAPABILITIES.proceduresReadable, true)
)
return Rx.Observable.of(null)
})
.catch(() => {
store.dispatch(
updateUserCapability(USER_CAPABILITIES.proceduresReadable, false)
)
return Rx.Observable.of(null)
})
})
.mapTo({ type: FEATURE_DETECTION_DONE })
}
export const clearOnDisconnectEpic = (some$: any) =>
some$.ofType(DISCONNECTION_SUCCESS).mapTo({ type: CLEAR })<|fim▁end|> | [USER_CAPABILITIES.serverConfigReadable]: false,
[USER_CAPABILITIES.proceduresReadable]: false
}
} |
<|file_name|>_file_cache.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import errno
import os
import re
import tempfile
from hashlib import md5
class _FileCacheError(Exception):
"""Base exception class for FileCache related errors"""
class _FileCache(object):
DEPTH = 3
def __init__(self, root_directory=None):
self._InitializeRootDirectory(root_directory)
def Get(self, key):
path = self._GetPath(key)
if os.path.exists(path):
with open(path) as f:
return f.read()
else:
return None
def Set(self, key, data):
path = self._GetPath(key)
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.isdir(directory):
raise _FileCacheError('%s exists but is not a directory' % directory)
temp_fd, temp_path = tempfile.mkstemp()
temp_fp = os.fdopen(temp_fd, 'w')
temp_fp.write(data)
temp_fp.close()
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory))
if os.path.exists(path):
os.remove(path)
os.rename(temp_path, path)
def Remove(self, key):
path = self._GetPath(key)
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory ))
if os.path.exists(path):
os.remove(path)
def GetCachedTime(self, key):
path = self._GetPath(key)
if os.path.exists(path):
return os.path.getmtime(path)
else:
return None
def _GetUsername(self):
"""Attempt to find the username in a cross-platform fashion."""
try:
return os.getenv('USER') or \
os.getenv('LOGNAME') or \
os.getenv('USERNAME') or \
os.getlogin() or \
'nobody'
except (AttributeError, IOError, OSError):
return 'nobody'
def _GetTmpCachePath(self):
username = self._GetUsername()
cache_directory = 'python.cache_' + username
return os.path.join(tempfile.gettempdir(), cache_directory)
def _InitializeRootDirectory(self, root_directory):
if not root_directory:
root_directory = self._GetTmpCachePath()
root_directory = os.path.abspath(root_directory)
try:
os.mkdir(root_directory)
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(root_directory):
# directory already exists
pass
else:
# exists but is a file, or no permissions, or...
raise
self._root_directory = root_directory
def _GetPath(self, key):
try:
hashed_key = md5(key.encode('utf-8')).hexdigest()
except TypeError:
hashed_key = md5.new(key).hexdigest()
return os.path.join(self._root_directory,
self._GetPrefix(hashed_key),
hashed_key)
def _GetPrefix(self, hashed_key):
return os.path.sep.join(hashed_key[0:_FileCache.DEPTH])
class ParseTweet(object):
# compile once on import
regexp = {"RT": "^RT", "MT": r"^MT", "ALNUM": r"(@[a-zA-Z0-9_]+)",
"HASHTAG": r"(#[\w\d]+)", "URL": r"([http://]?[a-zA-Z\d\/]+[\.]+[a-zA-Z\d\/\.]+)"}
regexp = dict((key, re.compile(value)) for key, value in list(regexp.items()))
def __init__(self, timeline_owner, tweet):
""" timeline_owner : twitter handle of user account. tweet - 140 chars from feed; object does all computation on construction
properties:
RT, MT - boolean
URLs - list of URL
Hashtags - list of tags
"""
self.Owner = timeline_owner
self.tweet = tweet
self.UserHandles = ParseTweet.getUserHandles(tweet)
self.Hashtags = ParseTweet.getHashtags(tweet)
self.URLs = ParseTweet.getURLs(tweet)
self.RT = ParseTweet.getAttributeRT(tweet)
self.MT = ParseTweet.getAttributeMT(tweet)
# additional intelligence
if ( self.RT and len(self.UserHandles) > 0 ): # change the owner of tweet?
self.Owner = self.UserHandles[0]<|fim▁hole|> def __str__(self):
""" for display method """
return "owner %s, urls: %d, hashtags %d, user_handles %d, len_tweet %d, RT = %s, MT = %s" % (
self.Owner, len(self.URLs), len(self.Hashtags), len(self.UserHandles),
len(self.tweet), self.RT, self.MT)
@staticmethod
def getAttributeRT(tweet):
""" see if tweet is a RT """
return re.search(ParseTweet.regexp["RT"], tweet.strip()) is not None
@staticmethod
def getAttributeMT(tweet):
""" see if tweet is a MT """
return re.search(ParseTweet.regexp["MT"], tweet.strip()) is not None
@staticmethod
def getUserHandles(tweet):
""" given a tweet we try and extract all user handles in order of occurrence"""
return re.findall(ParseTweet.regexp["ALNUM"], tweet)
@staticmethod
def getHashtags(tweet):
""" return all hashtags"""
return re.findall(ParseTweet.regexp["HASHTAG"], tweet)
@staticmethod
def getURLs(tweet):
""" URL : [http://]?[\w\.?/]+"""
return re.findall(ParseTweet.regexp["URL"], tweet)<|fim▁end|> | return
|
<|file_name|>component_registrator.js<|end_file_name|><|fim▁begin|>/**
* DevExtreme (core/component_registrator.js)
* Version: 16.2.5
* Build date: Mon Feb 27 2017
*
* Copyright (c) 2012 - 2017 Developer Express Inc. ALL RIGHTS RESERVED
* EULA: https://www.devexpress.com/Support/EULAs/DevExtreme.xml
*/
"use strict";
var $ = require("jquery"),
errors = require("./errors"),<|fim▁hole|> publicComponentUtils = require("./utils/public_component");
var callbacks = new MemorizedCallbacks;
var registerComponent = function(name, namespace, componentClass) {
if (!componentClass) {
componentClass = namespace
} else {
namespace[name] = componentClass
}
publicComponentUtils.name(componentClass, name);
callbacks.fire(name, componentClass)
};
registerComponent.callbacks = callbacks;
var registerJQueryComponent = function(name, componentClass) {
$.fn[name] = function(options) {
var result, isMemberInvoke = "string" === typeof options;
if (isMemberInvoke) {
var memberName = options,
memberArgs = $.makeArray(arguments).slice(1);
this.each(function() {
var instance = componentClass.getInstance(this);
if (!instance) {
throw errors.Error("E0009", name)
}
var member = instance[memberName],
memberValue = member.apply(instance, memberArgs);
if (void 0 === result) {
result = memberValue
}
})
} else {
this.each(function() {
var instance = componentClass.getInstance(this);
if (instance) {
instance.option(options)
} else {
new componentClass(this, options)
}
});
result = this
}
return result
}
};
callbacks.add(registerJQueryComponent);
module.exports = registerComponent;<|fim▁end|> | MemorizedCallbacks = require("./memorized_callbacks"), |
<|file_name|>main_window.hpp<|end_file_name|><|fim▁begin|>/*
* Tweeteria - A minimalistic tweet reader.
* Copyright (C) 2017 Andreas Weis ([email protected])
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef TWEETERIA_CLIENT_INCLUDE_GUARD_UI_MAIN_WINDOW_HPP
#define TWEETERIA_CLIENT_INCLUDE_GUARD_UI_MAIN_WINDOW_HPP
#include <qt_begin_disable_warnings.hpp>
#include <QMainWindow>
#include <QVector>
#include <QBoxLayout>
#include <QListWidget>
#include <qt_end_disable_warnings.hpp>
#include <tweeteria/id_types.hpp>
#include <boost/optional.hpp>
#include <memory>
class CentralWidget;
class DataModel;
namespace tweeteria {
class Tweeteria;
struct User;
struct Tweet;
template <typename T>
class MultiPageResult;
}
class ClientDatabase;
class MainWindow : public QMainWindow
{
Q_OBJECT
private:
std::shared_ptr<tweeteria::Tweeteria> m_tweeteria;
DataModel* m_dataModel;
CentralWidget* m_centralWidget;
std::unique_ptr<ClientDatabase> m_database;
public:
MainWindow(std::shared_ptr<tweeteria::Tweeteria> tweeteria, tweeteria::User const& user);
~MainWindow();
void populateUsers();
CentralWidget* getCentralWidget();
signals:
void userInfoUpdate(tweeteria::UserId, bool is_friend);
void userTimelineUpdate(tweeteria::UserId);<|fim▁hole|> void onUserSelectionChange(tweeteria::UserId selected_user_id);
void onAdditionalTimelineTweetsRequest(tweeteria::UserId user, tweeteria::TweetId max_id);
void onUserTimelineUpdate(tweeteria::UserId user_id);
private:
void getUserIds_impl(std::shared_ptr<tweeteria::MultiPageResult<std::vector<tweeteria::UserId>>> mpres,
std::vector<tweeteria::UserId>&& acc);
void getUserDetails_impl(std::vector<tweeteria::User> const& new_users, bool is_friend);
void getUserTimeline_impl(tweeteria::UserId user, std::vector<tweeteria::Tweet> const& tweets);
void updateLastRead(tweeteria::UserId user_id, boost::optional<tweeteria::TweetId> const& last_read_id);
};
#endif<|fim▁end|> | void unreadForUserChanged(tweeteria::UserId, int);
public slots:
void markTweetAsRead(tweeteria::TweetId tweet_id, tweeteria::UserId user_id); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Sebastian Raschka 2016
#
# `siteinterlock` is a Python package for selecting near-native protein-ligand
# docking poses based upon the hypothesis that interfacial rigidification
# of both the protein and ligand prove to be important characteristics of
# the native binding mode and are sensitive to the spatial coupling of
# interactions and bond-rotational degrees of freedom in the interface.
#
# Copyright (C) 2016 Michigan State University
# License: GPLv3
#
# SiteInterlock was developed in the
# Protein Structural Analysis & Design Laboratory
# (http://www.kuhnlab.bmb.msu.edu)
# Contact email: [email protected]
#
# Package author: Sebastian Raschka <http://sebastianraschka.com>
#
<|fim▁hole|><|fim▁end|> | from .hether import hether
__all__ = ["hether"] |
<|file_name|>data.js<|end_file_name|><|fim▁begin|>/*jshint node:true, indent:2, curly:false, eqeqeq:true, immed:true, latedef:true, newcap:true, noarg:true,
regexp:true, undef:true, strict:true, trailing:true, white:true */
/*global X:true */<|fim▁hole|>(function () {
"use strict";
var _ = X._;
/**
Defines the data route.
@extends X.Route
@class
*/
X.dataRoute = X.Route.create({
handle: function (xtr) {
var path, handler, session;
path = xtr.get("requestType");
handler = this.find(path);
if (!handler) {
xtr.error("Could not handle %@".f(path));
} else {
if (handler.needsSession) session = X.Session.create(xtr.get("data"));
handler.handle(xtr, session);
}
},
find: function (path) {
var ret = X.functorMap[path];
//console.log("find(): ", Object.keys(X.functorMap));
return ret;
},
handles: "data /data".w()
});
}());<|fim▁end|> | |
<|file_name|>beanstalkc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""beanstalkc - A beanstalkd Client Library for Python"""
import logging
import socket
import sys
__license__ = '''
Copyright (C) 2008-2016 Andreas Bolka
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
__version__ = '0.4.0'
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = 11300
DEFAULT_PRIORITY = 2 ** 31
DEFAULT_TTR = 120
DEFAULT_TUBE_NAME = 'default'
class BeanstalkcException(Exception): pass
class UnexpectedResponse(BeanstalkcException): pass
class CommandFailed(BeanstalkcException): pass
class DeadlineSoon(BeanstalkcException): pass
class SocketError(BeanstalkcException):
@staticmethod
def wrap(wrapped_function, *args, **kwargs):
try:
return wrapped_function(*args, **kwargs)
except socket.error:
err = sys.exc_info()[1]
raise SocketError(err)
class Connection(object):
def __init__(self, host=DEFAULT_HOST, port=DEFAULT_PORT, parse_yaml=True,
connect_timeout=socket.getdefaulttimeout()):
if parse_yaml is True:
try:
parse_yaml = __import__('yaml').load
except ImportError:
logging.error('Failed to load PyYAML, will not parse YAML')
parse_yaml = False
self._connect_timeout = connect_timeout
self._parse_yaml = parse_yaml or (lambda x: x)
self.host = host
self.port = port
self.connect()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def connect(self):
"""Connect to beanstalkd server."""
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.settimeout(self._connect_timeout)
SocketError.wrap(self._socket.connect, (self.host, self.port))
self._socket.settimeout(None)
self._socket_file = self._socket.makefile('rb')
def close(self):
"""Close connection to server."""
try:
self._socket.sendall('quit\r\n')
except socket.error:
pass
try:
self._socket.close()
except socket.error:
pass
def reconnect(self):
"""Re-connect to server."""
self.close()
self.connect()
def _interact(self, command, expected_ok, expected_err=[]):
SocketError.wrap(self._socket.sendall, command)
status, results = self._read_response()
if status in expected_ok:
return results
elif status in expected_err:
raise CommandFailed(command.split()[0], status, results)
else:
raise UnexpectedResponse(command.split()[0], status, results)
def _read_response(self):
line = SocketError.wrap(self._socket_file.readline)
if not line:
raise SocketError()
response = line.split()
return response[0], response[1:]
def _read_body(self, size):
body = SocketError.wrap(self._socket_file.read, size)
SocketError.wrap(self._socket_file.read, 2) # trailing crlf
if size > 0 and not body:
raise SocketError()
return body
def _interact_value(self, command, expected_ok, expected_err=[]):
return self._interact(command, expected_ok, expected_err)[0]
def _interact_job(self, command, expected_ok, expected_err, reserved=True):
jid, size = self._interact(command, expected_ok, expected_err)
body = self._read_body(int(size))
return Job(self, int(jid), body, reserved)
def _interact_yaml(self, command, expected_ok, expected_err=[]):
size, = self._interact(command, expected_ok, expected_err)
body = self._read_body(int(size))
return self._parse_yaml(body)
def _interact_peek(self, command):
try:
return self._interact_job(command, ['FOUND'], ['NOT_FOUND'], False)
except CommandFailed:
return None
# -- public interface --
def put(self, body, priority=DEFAULT_PRIORITY, delay=0, ttr=DEFAULT_TTR):
"""Put a job into the current tube. Returns job id."""
assert isinstance(body, str), 'Job body must be a str instance'
jid = self._interact_value('put %d %d %d %d\r\n%s\r\n' % (
priority, delay, ttr, len(body), body),
['INSERTED'],
['JOB_TOO_BIG', 'BURIED', 'DRAINING'])
return int(jid)
def reserve(self, timeout=None):
"""Reserve a job from one of the watched tubes, with optional timeout
in seconds. Returns a Job object, or None if the request times out."""
if timeout is not None:
command = 'reserve-with-timeout %d\r\n' % timeout
else:
command = 'reserve\r\n'
try:
return self._interact_job(command,
['RESERVED'],
['DEADLINE_SOON', 'TIMED_OUT'])
except CommandFailed:
exc = sys.exc_info()[1]
_, status, results = exc.args
if status == 'TIMED_OUT':
return None
elif status == 'DEADLINE_SOON':
raise DeadlineSoon(results)
def kick(self, bound=1):
"""Kick at most bound jobs into the ready queue."""
return int(self._interact_value('kick %d\r\n' % bound, ['KICKED']))
def kick_job(self, jid):
"""Kick a specific job into the ready queue."""
self._interact('kick-job %d\r\n' % jid, ['KICKED'], ['NOT_FOUND'])
def peek(self, jid):
"""Peek at a job. Returns a Job, or None."""
return self._interact_peek('peek %d\r\n' % jid)
def peek_ready(self):
"""Peek at next ready job. Returns a Job, or None."""
return self._interact_peek('peek-ready\r\n')
def peek_delayed(self):
"""Peek at next delayed job. Returns a Job, or None."""
return self._interact_peek('peek-delayed\r\n')
def peek_buried(self):
"""Peek at next buried job. Returns a Job, or None."""
return self._interact_peek('peek-buried\r\n')
def tubes(self):
"""Return a list of all existing tubes."""
return self._interact_yaml('list-tubes\r\n', ['OK'])
def using(self):
"""Return the tube currently being used."""
return self._interact_value('list-tube-used\r\n', ['USING'])
def use(self, name):
"""Use a given tube."""
return self._interact_value('use %s\r\n' % name, ['USING'])
def watching(self):
"""Return a list of all tubes being watched."""
return self._interact_yaml('list-tubes-watched\r\n', ['OK'])
def watch(self, name):
"""Watch a given tube."""
return int(self._interact_value('watch %s\r\n' % name, ['WATCHING']))
def ignore(self, name):
"""Stop watching a given tube."""
try:
return int(self._interact_value('ignore %s\r\n' % name,
['WATCHING'],
['NOT_IGNORED']))
except CommandFailed:
# Tried to ignore the only tube in the watchlist, which failed.
return 0
def stats(self):
"""Return a dict of beanstalkd statistics."""
return self._interact_yaml('stats\r\n', ['OK'])
def stats_tube(self, name):
"""Return a dict of stats about a given tube."""
return self._interact_yaml('stats-tube %s\r\n' % name,
['OK'],
['NOT_FOUND'])
def pause_tube(self, name, delay):
"""Pause a tube for a given delay time, in seconds."""
self._interact('pause-tube %s %d\r\n' % (name, delay),
['PAUSED'],
['NOT_FOUND'])
# -- job interactors --
def delete(self, jid):
"""Delete a job, by job id."""
self._interact('delete %d\r\n' % jid, ['DELETED'], ['NOT_FOUND'])
def release(self, jid, priority=DEFAULT_PRIORITY, delay=0):
"""Release a reserved job back into the ready queue."""
self._interact('release %d %d %d\r\n' % (jid, priority, delay),
['RELEASED', 'BURIED'],
['NOT_FOUND'])
def bury(self, jid, priority=DEFAULT_PRIORITY):
"""Bury a job, by job id."""<|fim▁hole|> def touch(self, jid):
"""Touch a job, by job id, requesting more time to work on a reserved
job before it expires."""
self._interact('touch %d\r\n' % jid, ['TOUCHED'], ['NOT_FOUND'])
def stats_job(self, jid):
"""Return a dict of stats about a job, by job id."""
return self._interact_yaml('stats-job %d\r\n' % jid,
['OK'],
['NOT_FOUND'])
class Job(object):
def __init__(self, conn, jid, body, reserved=True):
self.conn = conn
self.jid = jid
self.body = body
self.reserved = reserved
def _priority(self):
stats = self.stats()
if isinstance(stats, dict):
return stats['pri']
return DEFAULT_PRIORITY
# -- public interface --
def delete(self):
"""Delete this job."""
self.conn.delete(self.jid)
self.reserved = False
def release(self, priority=None, delay=0):
"""Release this job back into the ready queue."""
if self.reserved:
self.conn.release(self.jid, priority or self._priority(), delay)
self.reserved = False
def bury(self, priority=None):
"""Bury this job."""
if self.reserved:
self.conn.bury(self.jid, priority or self._priority())
self.reserved = False
def kick(self):
"""Kick this job alive."""
self.conn.kick_job(self.jid)
def touch(self):
"""Touch this reserved job, requesting more time to work on it before
it expires."""
if self.reserved:
self.conn.touch(self.jid)
def stats(self):
"""Return a dict of stats about this job."""
return self.conn.stats_job(self.jid)
if __name__ == '__main__':
import nose
nose.main(argv=['nosetests', '-c', '.nose.cfg'])<|fim▁end|> | self._interact('bury %d %d\r\n' % (jid, priority),
['BURIED'],
['NOT_FOUND'])
|
<|file_name|>dstr-async-gen-meth-dflt-obj-ptrn-id-init-skipped.js<|end_file_name|><|fim▁begin|>// This file was procedurally generated from the following sources:
// - src/dstr-binding/obj-ptrn-id-init-skipped.case
// - src/dstr-binding/default/cls-decl-async-gen-meth-dflt.template
/*---
description: Destructuring initializer is not evaluated when value is not `undefined` (class expression async generator method (default parameters))
esid: sec-class-definitions-runtime-semantics-evaluation
features: [async-iteration]
flags: [generated, async]
info: |
ClassDeclaration : class BindingIdentifier ClassTail
1. Let className be StringValue of BindingIdentifier.
2. Let value be the result of ClassDefinitionEvaluation of ClassTail with
argument className.
[...]
14.5.14 Runtime Semantics: ClassDefinitionEvaluation
21. For each ClassElement m in order from methods
a. If IsStatic of m is false, then
i. Let status be the result of performing
PropertyDefinitionEvaluation for m with arguments proto and
false.
[...]
Runtime Semantics: PropertyDefinitionEvaluation
AsyncGeneratorMethod :
async [no LineTerminator here] * PropertyName ( UniqueFormalParameters )
{ AsyncGeneratorBody }
1. Let propKey be the result of evaluating PropertyName.
2. ReturnIfAbrupt(propKey).
3. If the function code for this AsyncGeneratorMethod is strict mode code, let strict be true.
Otherwise let strict be false.
4. Let scope be the running execution context's LexicalEnvironment.
5. Let closure be ! AsyncGeneratorFunctionCreate(Method, UniqueFormalParameters,
AsyncGeneratorBody, scope, strict).
[...]
13.3.3.7 Runtime Semantics: KeyedBindingInitialization
SingleNameBinding : BindingIdentifier Initializeropt
[...]
6. If Initializer is present and v is undefined, then
[...]
[...]
---*/
var initCount = 0;
function counter() {
initCount += 1;
}
var callCount = 0;
class C {
async *method({ w = counter(), x = counter(), y = counter(), z = counter() } = { w: null, x: 0, y: false, z: '' }) {
assert.sameValue(w, null);
assert.sameValue(x, 0);
assert.sameValue(y, false);
assert.sameValue(z, '');
assert.sameValue(initCount, 0);
callCount = callCount + 1;<|fim▁hole|>new C().method().next().then(() => {
assert.sameValue(callCount, 1, 'invoked exactly once');
}).then($DONE, $DONE);<|fim▁end|> | }
};
|
<|file_name|>component.py<|end_file_name|><|fim▁begin|># Copyright © 2020, Joseph Berry, Rico Tabor ([email protected])
# OpenDrop is released under the GNU GPL License. You are free to
# modify and distribute the code, but always under the same license
#
# If you use this software in your research, please cite the following
# journal articles:
#
# J. D. Berry, M. J. Neeson, R. R. Dagastine, D. Y. C. Chan and
# R. F. Tabor, Measurement of surface and interfacial tension using
# pendant drop tensiometry. Journal of Colloid and Interface Science 454
# (2015) 226–237. https://doi.org/10.1016/j.jcis.2015.05.012
#
# E. Huang, T. Denning, A. Skoufis, J. Qi, R. R. Dagastine, R. F. Tabor
# and J. D. Berry, OpenDrop: Open-source software for pendant drop
# tensiometry & contact angle measurements, submitted to the Journal of
# Open Source Software
#
# These citations help us not only to understand who is using and
# developing OpenDrop, and for what purpose, but also to justify
# continued development of this code and other open source resources.
#
# OpenDrop is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details. You
# should have received a copy of the GNU General Public License along
# with this software. If not, see <https://www.gnu.org/licenses/>.
from typing import Any, Tuple
from gi.repository import Gtk, Gdk
from opendrop.app import keyboard
from opendrop.app.common.image_processing.image_processor import ImageProcessorPluginViewContext
from opendrop.mvp import ComponentSymbol, View, Presenter
from opendrop.utility.bindable.gextension import GObjectPropertyBindable
from opendrop.geometry import Vector2, Line2
from opendrop.widgets.canvas import LineArtist, CircleArtist
from .model import DefineLinePluginModel
define_line_plugin_cs = ComponentSymbol() # type: ComponentSymbol[None]
@define_line_plugin_cs.view(options=['view_context', 'tool_id', 'color', 'z_index'])
class DefineLinePluginView(View['DefineLinePluginPresenter', None]):
def _do_init(
self,
view_context: ImageProcessorPluginViewContext,
tool_id: Any,
color: Tuple[float, float, float],
z_index: int,
) -> None:
self._view_context = view_context
self._tool_ref = view_context.get_tool_item(tool_id)
view_context.canvas.connect(
'cursor-up',
lambda canvas, pos: self.presenter.cursor_up(pos),
)
view_context.canvas.connect(
'cursor-down',
lambda canvas, pos: self.presenter.cursor_down(pos),
)
view_context.canvas.connect(
'cursor-motion',
lambda canvas, pos: self.presenter.cursor_move(pos),
)
view_context.canvas.connect(
'key-press-event',
self._hdl_canvas_key_press_event,
)
self.bn_tool_button_is_active = self._tool_ref.bn_is_active
self._canvas = view_context.canvas
self._defined_artist = LineArtist(
stroke_color=color,
stroke_width=1,
scale_strokes=True,
)
self._canvas.add_artist(self._defined_artist, z_index=z_index)
self._dragging_artist = LineArtist(<|fim▁hole|> stroke_width=1,
scale_strokes=True,
)
self._canvas.add_artist(self._dragging_artist, z_index=z_index)
self._control_point_artist = CircleArtist(
fill_color=color,
scale_radius=True,
)
self._canvas.add_artist(self._control_point_artist, z_index=z_index)
self.bn_defined = GObjectPropertyBindable(
g_obj=self._defined_artist,
prop_name='line',
)
self.bn_dragging = GObjectPropertyBindable(
g_obj=self._dragging_artist,
prop_name='line',
)
self.presenter.view_ready()
def show_control_point(self, xc: float, yc: float) -> None:
self._control_point_artist.props.xc = xc
self._control_point_artist.props.yc = yc
self._control_point_artist.props.radius = 2.0
def hide_control_point(self) -> None:
self._control_point_artist.props.radius = 0.0
def _hdl_canvas_key_press_event(self, widget: Gtk.Widget, event: Gdk.EventKey) -> bool:
self.presenter.key_press(
keyboard.KeyEvent(
key=keyboard.Key.from_value(event.keyval),
modifier=int(event.state)
)
)
# Stop event propagation.
return True
def _do_destroy(self) -> None:
self._canvas.remove_artist(self._defined_artist)
self._canvas.remove_artist(self._dragging_artist)
@define_line_plugin_cs.presenter(options=['model'])
class DefineLinePluginPresenter(Presenter['DefineLinePluginView']):
def _do_init(self, model: DefineLinePluginModel) -> None:
self._model = model
self.__data_bindings = []
self.__event_connections = []
def view_ready(self) -> None:
self.__data_bindings.extend([
self._model.bn_line.bind(
self.view.bn_defined
),
])
self.__event_connections.extend([
self.view.bn_tool_button_is_active.on_changed.connect(
self._hdl_tool_button_is_active_changed
),
])
self._hdl_tool_button_is_active_changed()
def _hdl_tool_button_is_active_changed(self) -> None:
if self._model.is_defining and not self.view.bn_tool_button_is_active.get():
self._model.discard_define()
def cursor_down(self, pos: Vector2[float]) -> None:
if not self.view.bn_tool_button_is_active.get():
return
if self._model.is_defining:
self._model.discard_define()
self._model.begin_define(pos)
self._update_dragging_indicator(pos)
def cursor_up(self, pos: Vector2[float]) -> None:
if not self.view.bn_tool_button_is_active.get():
return
if not self._model.is_defining:
return
self._model.commit_define(pos)
self._update_dragging_indicator(pos)
def cursor_move(self, pos: Vector2[float]) -> None:
self._update_dragging_indicator(pos)
def key_press(self, event: keyboard.KeyEvent) -> None:
if not self.view.bn_tool_button_is_active.get():
return
if self._model.is_defining:
# User is currently using mouse to define
return
if event.key is keyboard.Key.Up:
self._model.nudge_up()
elif event.key is keyboard.Key.Down:
self._model.nudge_down()
elif event.key is keyboard.Key.Left:
self._model.nudgerot_anticlockwise()
elif event.key is keyboard.Key.Right:
self._model.nudgerot_clockwise()
def _update_dragging_indicator(self, current_cursor_pos: Vector2[float]) -> None:
if not self._model.is_defining:
self.view.bn_dragging.set(None)
self.view.hide_control_point()
return
pt0 = self._model.begin_define_pos
pt1 = current_cursor_pos
if pt0 == pt1:
self.view.bn_dragging.set(None)
self.view.hide_control_point()
return
self.view.bn_dragging.set(Line2(
pt0=pt0,
pt1=pt1,
))
self.view.show_control_point(*self._model.begin_define_pos)
def _do_destroy(self) -> None:
for db in self.__data_bindings:
db.unbind()
for ec in self.__event_connections:
ec.disconnect()<|fim▁end|> | stroke_color=color, |
<|file_name|>class_mix_e_r_p_1_1_net_1_1_core_1_1_modules_1_1_sales_1_1_setup_1_1_bonus_slabs.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var class_mix_e_r_p_1_1_net_1_1_core_1_1_modules_1_1_sales_1_1_setup_1_1_bonus_slabs =
[
[ "OnControlLoad", "class_mix_e_r_p_1_1_net_1_1_core_1_1_modules_1_1_sales_1_1_setup_1_1_bonus_slabs.html#ab16b782e4c9f10ebbbc47fd52dbc6ae1", null ],
[ "ScrudPlaceholder", "class_mix_e_r_p_1_1_net_1_1_core_1_1_modules_1_1_sales_1_1_setup_1_1_bonus_slabs.html#a3402588f607d9920f295a38336e9ca8d", null ]
];<|fim▁end|> | |
<|file_name|>mlfutil.py<|end_file_name|><|fim▁begin|># -*- coding=utf-8 -*-
import cPickle
import numpy as np
import os
import sys
"""Common tools for this project.
Utils are defined in this module for sharing.
"""
PROJ_DIR = os.path.split(os.path.realpath(__file__))[0]
def draw_progress(iteration, total, pref='Progress:', suff='',
decimals=1, barlen=50):
"""Call in a loop to create terminal progress bar
"""
formatStr = "{0:." + str(decimals) + "f}"
pcts = formatStr.format(100 * (iteration / float(total)))
filledlen = int(round(barlen * iteration / float(total)))
bar = '█' * filledlen + '-' * (barlen - filledlen)
out_str = '\r%s |%s| %s%s %s' % (pref, bar, pcts, '%', suff)
out_str = '\x1b[0;34;40m' + out_str + '\x1b[0m'
sys.stdout.write(out_str),
if iteration == total:
sys.stdout.write('\n')
sys.stdout.flush()
class CatEncoder(object):
"""Transform category to global uniq index
"""
def __init__(self):
self.cats = {}
def build_dict(self, ifnames, columns):
"""need override
ifnames are ',' separated
fields are ',' separated, from 0. means from ... to
"""
self.cats = {}
cat_idx = 0
ifnames = ifnames.split(',')
cols = columns.split(',')
col_st = int(cols[0])
col_ed = int(cols[1]) if len(cols) > 1 else -1
for ifname in ifnames:
with open(ifname) as f:
data = map(lambda l: l.strip('\n').split('\t'), f.readlines())
for fields in data:
for idx in xrange(col_st, len(fields) if col_ed < 0 else
col_ed+1):
if fields[idx] not in self.cats and fields[idx] != '':
self.cats[fields[idx]] = cat_idx
cat_idx += 1
def save_dict(self, ofname):
with open(ofname, 'w') as fo:
for key in self.cats:
print >> fo, \
'\t'.join([key.encode('utf8'), str(self.cats[key])])
def load_dict(self, dfname):
self.cats = {}
with open(dfname) as f:
data = [l.strip('\n').decode('utf8').split('\t')
for l in f.readlines()]
for fields in data:
self.cats[fields[0]] = int(fields[1])
def n_cat(self):<|fim▁hole|> def cat2idx(self, cat):
if cat in self.cats:
return self.cats[cat]
else:
return -1
def cat2onehot(self, cat, missing=False):
idx = self.cat2idx(cat)
if missing:
res = [0] * (self.n_cat() + 1)
idx = idx if idx >= 0 else (len(res) - 1)
res[idx] = 1
return res
else:
res = [0] * self.n_cat
if idx > 0:
res[idx] = 1
return res
class PortEncoder(CatEncoder):
def init(self, ifnames='data_all/data_all.tsv', cols='11,11'):
self.build_dict(ifnames, cols)
def encode(self, port):
return self.cat2onehot(port, missing=True)
class CharEncoder(CatEncoder):
def build_dict(self, ifname):
"""PAD: 0
UNK: -1
"""
self.cats = {} # clean inner dict
cat_idx = 1
with open(ifname) as f:
data = [x.strip('\n').split('\t')[1] for x in f.readlines()]
for sent in data:
for char in sent.decode('utf8'):
if char not in self.cats:
self.cats[char] = cat_idx
cat_idx += 1
self.cats['UNK'] = cat_idx
def fill_missing_value(rec_fields):
for idx, col in enumerate(rec_fields):
if col == '':
rec_fields[idx] = '-999.0'
return rec_fields
if __name__ == '__main__':
print "PROJ_DIR:\t" + PROJ_DIR
from time import sleep
for i in range(50):
sleep(0.05)
draw_progress(i, 49, pref='Progress:')<|fim▁end|> | return len(self.cats)
|
<|file_name|>fragment.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `Fragment` type, which represents the leaves of the layout tree.
#![deny(unsafe_code)]
use canvas_traits::CanvasMsg;
use context::LayoutContext;
use euclid::{Point2D, Rect, Size2D};
use floats::ClearType;
use flow;
use flow::Flow;
use flow_ref::{self, FlowRef};
use gfx;
use gfx::display_list::{BLUR_INFLATION_FACTOR, OpaqueNode};
use gfx::text::glyph::CharIndex;
use gfx::text::text_run::{TextRun, TextRunSlice};
use incremental::{self, RestyleDamage};
use inline::{FIRST_FRAGMENT_OF_ELEMENT, InlineFragmentContext, InlineFragmentNodeInfo};
use inline::{InlineMetrics, LAST_FRAGMENT_OF_ELEMENT};
use ipc_channel::ipc::IpcSender;
use layout_debug;
use model::{self, IntrinsicISizes, IntrinsicISizesContribution, MaybeAuto, specified};
use msg::constellation_msg::{ConstellationChan, Msg, PipelineId, SubpageId};
use net_traits::image::base::Image;
use net_traits::image_cache_task::UsePlaceholder;
use rustc_serialize::{Encodable, Encoder};
use std::borrow::ToOwned;
use std::cmp::{max, min};
use std::collections::LinkedList;
use std::fmt;
use std::sync::{Arc, Mutex};
use string_cache::Atom;
use style::computed_values::content::ContentItem;
use style::computed_values::{border_collapse, clear, mix_blend_mode, overflow_wrap, overflow_x};
use style::computed_values::{position, text_align, text_decoration, transform_style, white_space};
use style::computed_values::{word_break, z_index};
use style::properties::ComputedValues;
use style::values::computed::{LengthOrPercentage, LengthOrPercentageOrAuto};
use style::values::computed::{LengthOrPercentageOrNone};
use text;
use text::TextRunScanner;
use url::Url;
use util;
use util::geometry::{Au, ZERO_POINT};
use util::logical_geometry::{LogicalRect, LogicalSize, LogicalMargin, WritingMode};
use util::range::*;
use util::str::{is_whitespace, slice_chars};
use wrapper::{PseudoElementType, ThreadSafeLayoutNode};
/// Fragments (`struct Fragment`) are the leaves of the layout tree. They cannot position
/// themselves. In general, fragments do not have a simple correspondence with CSS fragments in the
/// specification:
///
/// * Several fragments may correspond to the same CSS box or DOM node. For example, a CSS text box
/// broken across two lines is represented by two fragments.
///
/// * Some CSS fragments are not created at all, such as some anonymous block fragments induced by
/// inline fragments with block-level sibling fragments. In that case, Servo uses an `InlineFlow`
/// with `BlockFlow` siblings; the `InlineFlow` is block-level, but not a block container. It is
/// positioned as if it were a block fragment, but its children are positioned according to
/// inline flow.
///
/// A `SpecificFragmentInfo::Generic` is an empty fragment that contributes only borders, margins,
/// padding, and backgrounds. It is analogous to a CSS nonreplaced content box.
///
/// A fragment's type influences how its styles are interpreted during layout. For example,
/// replaced content such as images are resized differently from tables, text, or other content.
/// Different types of fragments may also contain custom data; for example, text fragments contain
/// text.
///
/// Do not add fields to this structure unless they're really really mega necessary! Fragments get
/// moved around a lot and thus their size impacts performance of layout quite a bit.
///
/// FIXME(#2260, pcwalton): This can be slimmed down some by (at least) moving `inline_context`
/// to be on `InlineFlow` only.
#[derive(Clone)]
pub struct Fragment {
/// An opaque reference to the DOM node that this `Fragment` originates from.
pub node: OpaqueNode,
/// The CSS style of this fragment.
pub style: Arc<ComputedValues>,
/// The position of this fragment relative to its owning flow. The size includes padding and
/// border, but not margin.
///
/// NB: This does not account for relative positioning.
/// NB: Collapsed borders are not included in this.
pub border_box: LogicalRect<Au>,
/// The sum of border and padding; i.e. the distance from the edge of the border box to the
/// content edge of the fragment.
pub border_padding: LogicalMargin<Au>,
/// The margin of the content box.
pub margin: LogicalMargin<Au>,
/// Info specific to the kind of fragment. Keep this enum small.
pub specific: SpecificFragmentInfo,
/// Holds the style context information for fragments that are part of an inline formatting
/// context.
pub inline_context: Option<InlineFragmentContext>,
/// How damaged this fragment is since last reflow.
pub restyle_damage: RestyleDamage,
/// The pseudo-element that this fragment represents.
pub pseudo: PseudoElementType<()>,
/// Various flags for this fragment.
pub flags: FragmentFlags,
/// A debug ID that is consistent for the life of this fragment (via transform etc).
pub debug_id: u16,
}
impl Encodable for Fragment {
fn encode<S: Encoder>(&self, e: &mut S) -> Result<(), S::Error> {
e.emit_struct("fragment", 0, |e| {
try!(e.emit_struct_field("id", 0, |e| self.debug_id().encode(e)));
try!(e.emit_struct_field("border_box", 1, |e| self.border_box.encode(e)));
e.emit_struct_field("margin", 2, |e| self.margin.encode(e))
})
}
}
/// Info specific to the kind of fragment.
///
/// Keep this enum small. As in, no more than one word. Or pcwalton will yell at you.
#[derive(Clone)]
pub enum SpecificFragmentInfo {
Generic,
/// A piece of generated content that cannot be resolved into `ScannedText` until the generated
/// content resolution phase (e.g. an ordered list item marker).
GeneratedContent(Box<GeneratedContentInfo>),
Iframe(Box<IframeFragmentInfo>),
Image(Box<ImageFragmentInfo>),
Canvas(Box<CanvasFragmentInfo>),
/// A hypothetical box (see CSS 2.1 § 10.3.7) for an absolutely-positioned block that was
/// declared with `display: inline;`.
InlineAbsoluteHypothetical(InlineAbsoluteHypotheticalFragmentInfo),
InlineBlock(InlineBlockFragmentInfo),
/// An inline fragment that establishes an absolute containing block for its descendants (i.e.
/// a positioned inline fragment).
InlineAbsolute(InlineAbsoluteFragmentInfo),
ScannedText(Box<ScannedTextFragmentInfo>),
Table,
TableCell,
TableColumn(TableColumnFragmentInfo),
TableRow,
TableWrapper,
UnscannedText(UnscannedTextFragmentInfo),
}
impl SpecificFragmentInfo {
fn restyle_damage(&self) -> RestyleDamage {
let flow =
match *self {
SpecificFragmentInfo::Canvas(_) |
SpecificFragmentInfo::GeneratedContent(_) |
SpecificFragmentInfo::Iframe(_) |
SpecificFragmentInfo::Image(_) |
SpecificFragmentInfo::ScannedText(_) |
SpecificFragmentInfo::Table |
SpecificFragmentInfo::TableCell |
SpecificFragmentInfo::TableColumn(_) |
SpecificFragmentInfo::TableRow |
SpecificFragmentInfo::TableWrapper |
SpecificFragmentInfo::UnscannedText(_) |
SpecificFragmentInfo::Generic => return RestyleDamage::empty(),
SpecificFragmentInfo::InlineAbsoluteHypothetical(ref info) => &info.flow_ref,
SpecificFragmentInfo::InlineAbsolute(ref info) => &info.flow_ref,
SpecificFragmentInfo::InlineBlock(ref info) => &info.flow_ref,
};
flow::base(&**flow).restyle_damage
}
pub fn get_type(&self) -> &'static str {
match *self {
SpecificFragmentInfo::Canvas(_) => "SpecificFragmentInfo::Canvas",
SpecificFragmentInfo::Generic => "SpecificFragmentInfo::Generic",
SpecificFragmentInfo::GeneratedContent(_) => "SpecificFragmentInfo::GeneratedContent",
SpecificFragmentInfo::Iframe(_) => "SpecificFragmentInfo::Iframe",
SpecificFragmentInfo::Image(_) => "SpecificFragmentInfo::Image",
SpecificFragmentInfo::InlineAbsolute(_) => "SpecificFragmentInfo::InlineAbsolute",
SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => {
"SpecificFragmentInfo::InlineAbsoluteHypothetical"
}
SpecificFragmentInfo::InlineBlock(_) => "SpecificFragmentInfo::InlineBlock",
SpecificFragmentInfo::ScannedText(_) => "SpecificFragmentInfo::ScannedText",
SpecificFragmentInfo::Table => "SpecificFragmentInfo::Table",
SpecificFragmentInfo::TableCell => "SpecificFragmentInfo::TableCell",
SpecificFragmentInfo::TableColumn(_) => "SpecificFragmentInfo::TableColumn",
SpecificFragmentInfo::TableRow => "SpecificFragmentInfo::TableRow",
SpecificFragmentInfo::TableWrapper => "SpecificFragmentInfo::TableWrapper",
SpecificFragmentInfo::UnscannedText(_) => "SpecificFragmentInfo::UnscannedText",
}
}
}
impl fmt::Debug for SpecificFragmentInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
SpecificFragmentInfo::ScannedText(ref info) => {
write!(f, " \"{}\"", slice_chars(&*info.run.text, info.range.begin().get() as usize,
info.range.end().get() as usize))
}
SpecificFragmentInfo::UnscannedText(ref info) => {
write!(f, " \"{}\"", info.text)
}
_ => Ok(())
}
}
}
/// Clamp a value obtained from style_length, based on min / max lengths.
fn clamp_size(size: Au,
min_size: LengthOrPercentage,
max_size: LengthOrPercentageOrNone,
container_size: Au)
-> Au {
let min_size = model::specified(min_size, container_size);
let max_size = model::specified_or_none(max_size, container_size);
max(min_size, match max_size {
None => size,
Some(max_size) => min(size, max_size),
})
}
/// Information for generated content.
#[derive(Clone)]
pub enum GeneratedContentInfo {
ListItem,
ContentItem(ContentItem),
}
/// A hypothetical box (see CSS 2.1 § 10.3.7) for an absolutely-positioned block that was declared
/// with `display: inline;`.
///
/// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout
/// can clone it.
#[derive(Clone)]
pub struct InlineAbsoluteHypotheticalFragmentInfo {
pub flow_ref: FlowRef,
}
impl InlineAbsoluteHypotheticalFragmentInfo {
pub fn new(flow_ref: FlowRef) -> InlineAbsoluteHypotheticalFragmentInfo {
InlineAbsoluteHypotheticalFragmentInfo {
flow_ref: flow_ref,
}
}
}
/// A fragment that represents an inline-block element.
///
/// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout
/// can clone it.
#[derive(Clone)]
pub struct InlineBlockFragmentInfo {
pub flow_ref: FlowRef,
}
impl InlineBlockFragmentInfo {
pub fn new(flow_ref: FlowRef) -> InlineBlockFragmentInfo {
InlineBlockFragmentInfo {
flow_ref: flow_ref,
}
}
}
/// An inline fragment that establishes an absolute containing block for its descendants (i.e.
/// a positioned inline fragment).
///
/// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout
/// can clone it.
#[derive(Clone)]
pub struct InlineAbsoluteFragmentInfo {
pub flow_ref: FlowRef,
}
impl InlineAbsoluteFragmentInfo {
pub fn new(flow_ref: FlowRef) -> InlineAbsoluteFragmentInfo {
InlineAbsoluteFragmentInfo {
flow_ref: flow_ref,
}
}
}
#[derive(Clone)]
pub struct CanvasFragmentInfo {
pub replaced_image_fragment_info: ReplacedImageFragmentInfo,
pub renderer_id: Option<usize>,
pub ipc_renderer: Option<Arc<Mutex<IpcSender<CanvasMsg>>>>,
}
impl CanvasFragmentInfo {
pub fn new(node: &ThreadSafeLayoutNode) -> CanvasFragmentInfo {
CanvasFragmentInfo {
replaced_image_fragment_info: ReplacedImageFragmentInfo::new(node,
Some(Au::from_px(node.canvas_width() as i32)),
Some(Au::from_px(node.canvas_height() as i32))),
renderer_id: node.canvas_renderer_id(),
ipc_renderer: node.canvas_ipc_renderer()
.map(|renderer| Arc::new(Mutex::new(renderer))),
}
}
/// Returns the original inline-size of the canvas.
pub fn canvas_inline_size(&self) -> Au {
self.replaced_image_fragment_info.dom_inline_size.unwrap_or(Au(0))
}
/// Returns the original block-size of the canvas.
pub fn canvas_block_size(&self) -> Au {
self.replaced_image_fragment_info.dom_block_size.unwrap_or(Au(0))
}
}
/// A fragment that represents a replaced content image and its accompanying borders, shadows, etc.
#[derive(Clone)]
pub struct ImageFragmentInfo {
/// The image held within this fragment.
pub replaced_image_fragment_info: ReplacedImageFragmentInfo,
pub image: Option<Arc<Image>>,
}
impl ImageFragmentInfo {
/// Creates a new image fragment from the given URL and local image cache.
///
/// FIXME(pcwalton): The fact that image fragments store the cache in the fragment makes little
/// sense to me.
pub fn new(node: &ThreadSafeLayoutNode,
url: Option<Url>,
layout_context: &LayoutContext)
-> ImageFragmentInfo {
fn convert_length(node: &ThreadSafeLayoutNode, name: &Atom) -> Option<Au> {
let element = node.as_element();
element.get_attr(&ns!(""), name)
.and_then(|string| string.parse().ok())
.map(Au::from_px)
}
let image = url.and_then(|url| {
layout_context.get_or_request_image(url, UsePlaceholder::Yes)
});
ImageFragmentInfo {
replaced_image_fragment_info: ReplacedImageFragmentInfo::new(node,
convert_length(node, &atom!("width")),
convert_length(node, &atom!("height"))),
image: image,
}
}
/// Returns the original inline-size of the image.
pub fn image_inline_size(&mut self) -> Au {
match self.image {
Some(ref image) => {
Au::from_px(if self.replaced_image_fragment_info.writing_mode_is_vertical {
image.height
} else {
image.width
} as i32)
}
None => Au(0)
}
}
/// Returns the original block-size of the image.
pub fn image_block_size(&mut self) -> Au {
match self.image {
Some(ref image) => {
Au::from_px(if self.replaced_image_fragment_info.writing_mode_is_vertical {
image.width
} else {
image.height
} as i32)
}
None => Au(0)
}
}
/// Tile an image
pub fn tile_image(position: &mut Au, size: &mut Au, virtual_position: Au, image_size: u32) {
// Avoid division by zero below!
let image_size = image_size as i32;
if image_size == 0 {
return
}
let delta_pixels = (virtual_position - *position).to_px();
let tile_count = (delta_pixels + image_size - 1) / image_size;
let offset = Au::from_px(image_size * tile_count);
let new_position = virtual_position - offset;
*size = *position - new_position + *size;
*position = new_position;
}
}
#[derive(Clone)]
pub struct ReplacedImageFragmentInfo {
pub computed_inline_size: Option<Au>,
pub computed_block_size: Option<Au>,
pub dom_inline_size: Option<Au>,
pub dom_block_size: Option<Au>,
pub writing_mode_is_vertical: bool,
}
impl ReplacedImageFragmentInfo {
pub fn new(node: &ThreadSafeLayoutNode,
dom_width: Option<Au>,
dom_height: Option<Au>) -> ReplacedImageFragmentInfo {
let is_vertical = node.style().writing_mode.is_vertical();
ReplacedImageFragmentInfo {
computed_inline_size: None,
computed_block_size: None,
dom_inline_size: if is_vertical {
dom_height
} else {
dom_width
},
dom_block_size: if is_vertical {
dom_width
} else {
dom_height
},
writing_mode_is_vertical: is_vertical,
}
}
/// Returns the calculated inline-size of the image, accounting for the inline-size attribute.
pub fn computed_inline_size(&self) -> Au {
self.computed_inline_size.expect("image inline_size is not computed yet!")
}
/// Returns the calculated block-size of the image, accounting for the block-size attribute.
pub fn computed_block_size(&self) -> Au {
self.computed_block_size.expect("image block_size is not computed yet!")
}
// Return used value for inline-size or block-size.
//
// `dom_length`: inline-size or block-size as specified in the `img` tag.
// `style_length`: inline-size as given in the CSS
pub fn style_length(style_length: LengthOrPercentageOrAuto,
dom_length: Option<Au>,
container_size: Option<Au>) -> MaybeAuto {
match (style_length, dom_length, container_size) {
(LengthOrPercentageOrAuto::Length(length), _, _) => MaybeAuto::Specified(length),
(LengthOrPercentageOrAuto::Percentage(pc), _, Some(container_size)) => {
MaybeAuto::Specified(container_size.scale_by(pc))
}
(LengthOrPercentageOrAuto::Percentage(_), _, None) => MaybeAuto::Auto,
(LengthOrPercentageOrAuto::Calc(calc), _, Some(container_size)) => {
MaybeAuto::Specified(calc.length() + container_size.scale_by(calc.percentage()))
}
(LengthOrPercentageOrAuto::Calc(_), _, None) => MaybeAuto::Auto,
(LengthOrPercentageOrAuto::Auto, Some(dom_length), _) => MaybeAuto::Specified(dom_length),
(LengthOrPercentageOrAuto::Auto, None, _) => MaybeAuto::Auto,<|fim▁hole|>
pub fn calculate_replaced_inline_size(&mut self,
style: &ComputedValues,
noncontent_inline_size: Au,
container_inline_size: Au,
fragment_inline_size: Au,
fragment_block_size: Au)
-> Au {
let style_inline_size = style.content_inline_size();
let style_block_size = style.content_block_size();
let style_min_inline_size = style.min_inline_size();
let style_max_inline_size = style.max_inline_size();
let style_min_block_size = style.min_block_size();
let style_max_block_size = style.max_block_size();
// TODO(ksh8281): compute border,margin
let inline_size = ReplacedImageFragmentInfo::style_length(
style_inline_size,
self.dom_inline_size,
Some(container_inline_size));
let inline_size = match inline_size {
MaybeAuto::Auto => {
let intrinsic_width = fragment_inline_size;
let intrinsic_height = fragment_block_size;
if intrinsic_height == Au(0) {
intrinsic_width
} else {
let ratio = intrinsic_width.to_f32_px() /
intrinsic_height.to_f32_px();
let specified_height = ReplacedImageFragmentInfo::style_length(
style_block_size,
self.dom_block_size,
None);
let specified_height = match specified_height {
MaybeAuto::Auto => intrinsic_height,
MaybeAuto::Specified(h) => h,
};
let specified_height = clamp_size(specified_height,
style_min_block_size,
style_max_block_size,
Au(0));
Au::from_f32_px(specified_height.to_f32_px() * ratio)
}
},
MaybeAuto::Specified(w) => w,
};
let inline_size = clamp_size(inline_size,
style_min_inline_size,
style_max_inline_size,
container_inline_size);
self.computed_inline_size = Some(inline_size);
inline_size + noncontent_inline_size
}
pub fn calculate_replaced_block_size(&mut self,
style: &ComputedValues,
noncontent_block_size: Au,
containing_block_block_size: Option<Au>,
fragment_inline_size: Au,
fragment_block_size: Au)
-> Au {
// TODO(ksh8281): compute border,margin,padding
let style_block_size = style.content_block_size();
let style_min_block_size = style.min_block_size();
let style_max_block_size = style.max_block_size();
let inline_size = self.computed_inline_size();
let block_size = ReplacedImageFragmentInfo::style_length(
style_block_size,
self.dom_block_size,
containing_block_block_size);
let block_size = match block_size {
MaybeAuto::Auto => {
let intrinsic_width = fragment_inline_size;
let intrinsic_height = fragment_block_size;
let scale = intrinsic_width.to_f32_px() / inline_size.to_f32_px();
Au::from_f32_px(intrinsic_height.to_f32_px() / scale)
},
MaybeAuto::Specified(h) => {
h
}
};
let block_size = clamp_size(block_size,
style_min_block_size,
style_max_block_size,
Au(0));
self.computed_block_size = Some(block_size);
block_size + noncontent_block_size
}
}
/// A fragment that represents an inline frame (iframe). This stores the pipeline ID so that the
/// size of this iframe can be communicated via the constellation to the iframe's own layout task.
#[derive(Clone)]
pub struct IframeFragmentInfo {
/// The pipeline ID of this iframe.
pub pipeline_id: PipelineId,
/// The subpage ID of this iframe.
pub subpage_id: SubpageId,
}
impl IframeFragmentInfo {
/// Creates the information specific to an iframe fragment.
pub fn new(node: &ThreadSafeLayoutNode) -> IframeFragmentInfo {
let (pipeline_id, subpage_id) = node.iframe_pipeline_and_subpage_ids();
IframeFragmentInfo {
pipeline_id: pipeline_id,
subpage_id: subpage_id,
}
}
#[inline]
pub fn calculate_replaced_inline_size(&self, style: &ComputedValues, containing_size: Au)
-> Au {
// Calculate the replaced inline size (or default) as per CSS 2.1 § 10.3.2
IframeFragmentInfo::calculate_replaced_size(style.content_inline_size(),
style.min_inline_size(),
style.max_inline_size(),
Some(containing_size),
Au::from_px(300))
}
#[inline]
pub fn calculate_replaced_block_size(&self, style: &ComputedValues, containing_size: Option<Au>)
-> Au {
// Calculate the replaced block size (or default) as per CSS 2.1 § 10.3.2
IframeFragmentInfo::calculate_replaced_size(style.content_block_size(),
style.min_block_size(),
style.max_block_size(),
containing_size,
Au::from_px(150))
}
fn calculate_replaced_size(content_size: LengthOrPercentageOrAuto,
style_min_size: LengthOrPercentage,
style_max_size: LengthOrPercentageOrNone,
containing_size: Option<Au>,
default_size: Au) -> Au {
let computed_size = match (content_size, containing_size) {
(LengthOrPercentageOrAuto::Length(length), _) => length,
(LengthOrPercentageOrAuto::Percentage(pc), Some(container_size)) => container_size.scale_by(pc),
(LengthOrPercentageOrAuto::Calc(calc), Some(container_size)) => {
container_size.scale_by(calc.percentage()) + calc.length()
},
(LengthOrPercentageOrAuto::Calc(calc), None) => calc.length(),
(LengthOrPercentageOrAuto::Percentage(_), None) => default_size,
(LengthOrPercentageOrAuto::Auto, _) => default_size,
};
let containing_size = containing_size.unwrap_or(Au(0));
clamp_size(computed_size,
style_min_size,
style_max_size,
containing_size)
}
}
/// A scanned text fragment represents a single run of text with a distinct style. A `TextFragment`
/// may be split into two or more fragments across line breaks. Several `TextFragment`s may
/// correspond to a single DOM text node. Split text fragments are implemented by referring to
/// subsets of a single `TextRun` object.
#[derive(Clone)]
pub struct ScannedTextFragmentInfo {
/// The text run that this represents.
pub run: Arc<TextRun>,
/// The intrinsic size of the text fragment.
pub content_size: LogicalSize<Au>,
/// The position of the insertion point in characters, if any.
///
/// TODO(pcwalton): Make this a range.
pub insertion_point: Option<CharIndex>,
/// The range within the above text run that this represents.
pub range: Range<CharIndex>,
/// The endpoint of the above range, including whitespace that was stripped out. This exists
/// so that we can restore the range to its original value (before line breaking occurred) when
/// performing incremental reflow.
pub range_end_including_stripped_whitespace: CharIndex,
/// Whether a line break is required after this fragment if wrapping on newlines (e.g. if
/// `white-space: pre` is in effect).
pub requires_line_break_afterward_if_wrapping_on_newlines: bool,
}
impl ScannedTextFragmentInfo {
/// Creates the information specific to a scanned text fragment from a range and a text run.
pub fn new(run: Arc<TextRun>,
range: Range<CharIndex>,
content_size: LogicalSize<Au>,
insertion_point: &Option<CharIndex>,
requires_line_break_afterward_if_wrapping_on_newlines: bool)
-> ScannedTextFragmentInfo {
ScannedTextFragmentInfo {
run: run,
range: range,
insertion_point: *insertion_point,
content_size: content_size,
range_end_including_stripped_whitespace: range.end(),
requires_line_break_afterward_if_wrapping_on_newlines:
requires_line_break_afterward_if_wrapping_on_newlines,
}
}
}
/// Describes how to split a fragment. This is used during line breaking as part of the return
/// value of `find_split_info_for_inline_size()`.
#[derive(Debug, Clone)]
pub struct SplitInfo {
// TODO(bjz): this should only need to be a single character index, but both values are
// currently needed for splitting in the `inline::try_append_*` functions.
pub range: Range<CharIndex>,
pub inline_size: Au,
}
impl SplitInfo {
fn new(range: Range<CharIndex>, info: &ScannedTextFragmentInfo) -> SplitInfo {
let inline_size = info.run.advance_for_range(&range);
SplitInfo {
range: range,
inline_size: inline_size,
}
}
}
/// Describes how to split a fragment into two. This contains up to two `SplitInfo`s.
pub struct SplitResult {
/// The part of the fragment that goes on the first line.
pub inline_start: Option<SplitInfo>,
/// The part of the fragment that goes on the second line.
pub inline_end: Option<SplitInfo>,
/// The text run which is being split.
pub text_run: Arc<TextRun>,
}
/// Describes how a fragment should be truncated.
pub struct TruncationResult {
/// The part of the fragment remaining after truncation.
pub split: SplitInfo,
/// The text run which is being truncated.
pub text_run: Arc<TextRun>,
}
/// Data for an unscanned text fragment. Unscanned text fragments are the results of flow
/// construction that have not yet had their inline-size determined.
#[derive(Clone)]
pub struct UnscannedTextFragmentInfo {
/// The text inside the fragment.
pub text: Box<str>,
/// The position of the insertion point, if any.
///
/// TODO(pcwalton): Make this a range.
pub insertion_point: Option<CharIndex>,
}
impl UnscannedTextFragmentInfo {
/// Creates a new instance of `UnscannedTextFragmentInfo` from the given text.
#[inline]
pub fn new(text: String, insertion_point: Option<CharIndex>) -> UnscannedTextFragmentInfo {
UnscannedTextFragmentInfo {
text: text.into_boxed_str(),
insertion_point: insertion_point,
}
}
}
/// A fragment that represents a table column.
#[derive(Copy, Clone)]
pub struct TableColumnFragmentInfo {
/// the number of columns a <col> element should span
pub span: u32,
}
impl TableColumnFragmentInfo {
/// Create the information specific to an table column fragment.
pub fn new(node: &ThreadSafeLayoutNode) -> TableColumnFragmentInfo {
let element = node.as_element();
let span = element.get_attr(&ns!(""), &atom!("span"))
.and_then(|string| string.parse().ok())
.unwrap_or(0);
TableColumnFragmentInfo {
span: span,
}
}
}
impl Fragment {
/// Constructs a new `Fragment` instance.
pub fn new(node: &ThreadSafeLayoutNode, specific: SpecificFragmentInfo) -> Fragment {
let style = node.style().clone();
let writing_mode = style.writing_mode;
Fragment {
node: node.opaque(),
style: style,
restyle_damage: node.restyle_damage(),
border_box: LogicalRect::zero(writing_mode),
border_padding: LogicalMargin::zero(writing_mode),
margin: LogicalMargin::zero(writing_mode),
specific: specific,
inline_context: None,
pseudo: node.get_pseudo_element_type().strip(),
flags: FragmentFlags::empty(),
debug_id: layout_debug::generate_unique_debug_id(),
}
}
/// Constructs a new `Fragment` instance from an opaque node.
pub fn from_opaque_node_and_style(node: OpaqueNode,
pseudo: PseudoElementType<()>,
style: Arc<ComputedValues>,
restyle_damage: RestyleDamage,
specific: SpecificFragmentInfo)
-> Fragment {
let writing_mode = style.writing_mode;
Fragment {
node: node,
style: style,
restyle_damage: restyle_damage,
border_box: LogicalRect::zero(writing_mode),
border_padding: LogicalMargin::zero(writing_mode),
margin: LogicalMargin::zero(writing_mode),
specific: specific,
inline_context: None,
pseudo: pseudo,
flags: FragmentFlags::empty(),
debug_id: layout_debug::generate_unique_debug_id(),
}
}
pub fn reset_inline_sizes(&mut self) {
self.border_padding = LogicalMargin::zero(self.style.writing_mode);
self.margin = LogicalMargin::zero(self.style.writing_mode);
}
/// Returns a debug ID of this fragment. This ID should not be considered stable across
/// multiple layouts or fragment manipulations.
pub fn debug_id(&self) -> u16 {
self.debug_id
}
/// Transforms this fragment into another fragment of the given type, with the given size,
/// preserving all the other data.
pub fn transform(&self, size: LogicalSize<Au>, info: SpecificFragmentInfo)
-> Fragment {
let new_border_box = LogicalRect::from_point_size(self.style.writing_mode,
self.border_box.start,
size);
Fragment {
node: self.node,
style: self.style.clone(),
restyle_damage: incremental::rebuild_and_reflow(),
border_box: new_border_box,
border_padding: self.border_padding,
margin: self.margin,
specific: info,
inline_context: self.inline_context.clone(),
pseudo: self.pseudo.clone(),
flags: FragmentFlags::empty(),
debug_id: self.debug_id,
}
}
/// Transforms this fragment using the given `SplitInfo`, preserving all the other data.
pub fn transform_with_split_info(&self, split: &SplitInfo, text_run: Arc<TextRun>)
-> Fragment {
let size = LogicalSize::new(self.style.writing_mode,
split.inline_size,
self.border_box.size.block);
let requires_line_break_afterward_if_wrapping_on_newlines =
self.requires_line_break_afterward_if_wrapping_on_newlines();
// FIXME(pcwalton): This should modify the insertion point as necessary.
let info = box ScannedTextFragmentInfo::new(
text_run,
split.range,
size,
&None,
requires_line_break_afterward_if_wrapping_on_newlines);
self.transform(size, SpecificFragmentInfo::ScannedText(info))
}
/// Transforms this fragment into an ellipsis fragment, preserving all the other data.
pub fn transform_into_ellipsis(&self, layout_context: &LayoutContext) -> Fragment {
let mut unscanned_ellipsis_fragments = LinkedList::new();
unscanned_ellipsis_fragments.push_back(self.transform(
self.border_box.size,
SpecificFragmentInfo::UnscannedText(UnscannedTextFragmentInfo::new("…".to_owned(),
None))));
let ellipsis_fragments = TextRunScanner::new().scan_for_runs(&mut layout_context.font_context(),
unscanned_ellipsis_fragments);
debug_assert!(ellipsis_fragments.len() == 1);
ellipsis_fragments.fragments.into_iter().next().unwrap()
}
pub fn restyle_damage(&self) -> RestyleDamage {
self.restyle_damage | self.specific.restyle_damage()
}
pub fn contains_node(&self, node_address: OpaqueNode) -> bool {
node_address == self.node ||
self.inline_context.as_ref().map_or(false, |ctx| {
ctx.contains_node(node_address)
})
}
/// Adds a style to the inline context for this fragment. If the inline context doesn't exist
/// yet, it will be created.
pub fn add_inline_context_style(&mut self, node_info: InlineFragmentNodeInfo) {
if self.inline_context.is_none() {
self.inline_context = Some(InlineFragmentContext::new());
}
self.inline_context.as_mut().unwrap().nodes.push(node_info);
}
/// Determines which quantities (border/padding/margin/specified) should be included in the
/// intrinsic inline size of this fragment.
fn quantities_included_in_intrinsic_inline_size(&self)
-> QuantitiesIncludedInIntrinsicInlineSizes {
match self.specific {
SpecificFragmentInfo::Canvas(_) |
SpecificFragmentInfo::Generic |
SpecificFragmentInfo::GeneratedContent(_) |
SpecificFragmentInfo::Iframe(_) |
SpecificFragmentInfo::Image(_) |
SpecificFragmentInfo::InlineAbsolute(_) => {
QuantitiesIncludedInIntrinsicInlineSizes::all()
}
SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell => {
let base_quantities = INTRINSIC_INLINE_SIZE_INCLUDES_PADDING |
INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED;
if self.style.get_inheritedtable().border_collapse ==
border_collapse::T::separate {
base_quantities | INTRINSIC_INLINE_SIZE_INCLUDES_BORDER
} else {
base_quantities
}
}
SpecificFragmentInfo::TableWrapper => {
let base_quantities = INTRINSIC_INLINE_SIZE_INCLUDES_MARGINS |
INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED;
if self.style.get_inheritedtable().border_collapse ==
border_collapse::T::separate {
base_quantities | INTRINSIC_INLINE_SIZE_INCLUDES_BORDER
} else {
base_quantities
}
}
SpecificFragmentInfo::TableRow => {
let base_quantities = INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED;
if self.style.get_inheritedtable().border_collapse ==
border_collapse::T::separate {
base_quantities | INTRINSIC_INLINE_SIZE_INCLUDES_BORDER
} else {
base_quantities
}
}
SpecificFragmentInfo::ScannedText(_) |
SpecificFragmentInfo::TableColumn(_) |
SpecificFragmentInfo::UnscannedText(_) |
SpecificFragmentInfo::InlineAbsoluteHypothetical(_) |
SpecificFragmentInfo::InlineBlock(_) => {
QuantitiesIncludedInIntrinsicInlineSizes::empty()
}
}
}
/// Returns the portion of the intrinsic inline-size that consists of borders, padding, and/or
/// margins.
///
/// FIXME(#2261, pcwalton): This won't work well for inlines: is this OK?
pub fn surrounding_intrinsic_inline_size(&self) -> Au {
let flags = self.quantities_included_in_intrinsic_inline_size();
let style = self.style();
// FIXME(pcwalton): Percentages should be relative to any definite size per CSS-SIZING.
// This will likely need to be done by pushing down definite sizes during selector
// cascading.
let margin = if flags.contains(INTRINSIC_INLINE_SIZE_INCLUDES_MARGINS) {
let margin = style.logical_margin();
(MaybeAuto::from_style(margin.inline_start, Au(0)).specified_or_zero() +
MaybeAuto::from_style(margin.inline_end, Au(0)).specified_or_zero())
} else {
Au(0)
};
// FIXME(pcwalton): Percentages should be relative to any definite size per CSS-SIZING.
// This will likely need to be done by pushing down definite sizes during selector
// cascading.
let padding = if flags.contains(INTRINSIC_INLINE_SIZE_INCLUDES_PADDING) {
let padding = style.logical_padding();
(model::specified(padding.inline_start, Au(0)) +
model::specified(padding.inline_end, Au(0)))
} else {
Au(0)
};
let border = if flags.contains(INTRINSIC_INLINE_SIZE_INCLUDES_BORDER) {
self.border_width().inline_start_end()
} else {
Au(0)
};
margin + padding + border
}
/// Uses the style only to estimate the intrinsic inline-sizes. These may be modified for text
/// or replaced elements.
fn style_specified_intrinsic_inline_size(&self) -> IntrinsicISizesContribution {
let flags = self.quantities_included_in_intrinsic_inline_size();
let style = self.style();
let specified = if flags.contains(INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED) {
max(model::specified(style.min_inline_size(), Au(0)),
MaybeAuto::from_style(style.content_inline_size(), Au(0)).specified_or_zero())
} else {
Au(0)
};
// FIXME(#2261, pcwalton): This won't work well for inlines: is this OK?
let surrounding_inline_size = self.surrounding_intrinsic_inline_size();
IntrinsicISizesContribution {
content_intrinsic_sizes: IntrinsicISizes {
minimum_inline_size: specified,
preferred_inline_size: specified,
},
surrounding_size: surrounding_inline_size,
}
}
pub fn calculate_line_height(&self, layout_context: &LayoutContext) -> Au {
let font_style = self.style.get_font_arc();
let font_metrics = text::font_metrics_for_style(&mut layout_context.font_context(), font_style);
text::line_height_from_style(&*self.style, &font_metrics)
}
/// Returns the sum of the inline-sizes of all the borders of this fragment. Note that this
/// can be expensive to compute, so if possible use the `border_padding` field instead.
#[inline]
fn border_width(&self) -> LogicalMargin<Au> {
let style_border_width = match self.specific {
SpecificFragmentInfo::ScannedText(_) |
SpecificFragmentInfo::InlineBlock(_) => LogicalMargin::zero(self.style.writing_mode),
_ => self.style().logical_border_width(),
};
match self.inline_context {
None => style_border_width,
Some(ref inline_fragment_context) => {
inline_fragment_context.nodes.iter().fold(style_border_width, |accumulator, node| {
let mut this_border_width = node.style.logical_border_width();
if !node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) {
this_border_width.inline_start = Au(0)
}
if !node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) {
this_border_width.inline_end = Au(0)
}
accumulator + this_border_width
})
}
}
}
/// Computes the margins in the inline direction from the containing block inline-size and the
/// style. After this call, the inline direction of the `margin` field will be correct.
///
/// Do not use this method if the inline direction margins are to be computed some other way
/// (for example, via constraint solving for blocks).
pub fn compute_inline_direction_margins(&mut self, containing_block_inline_size: Au) {
match self.specific {
SpecificFragmentInfo::Table |
SpecificFragmentInfo::TableCell |
SpecificFragmentInfo::TableRow |
SpecificFragmentInfo::TableColumn(_) => {
self.margin.inline_start = Au(0);
self.margin.inline_end = Au(0);
return
}
SpecificFragmentInfo::InlineBlock(_) => {
// Inline-blocks do not take self margins into account but do account for margins
// from outer inline contexts.
self.margin.inline_start = Au(0);
self.margin.inline_end = Au(0);
}
_ => {
let margin = self.style().logical_margin();
self.margin.inline_start =
MaybeAuto::from_style(margin.inline_start,
containing_block_inline_size).specified_or_zero();
self.margin.inline_end =
MaybeAuto::from_style(margin.inline_end,
containing_block_inline_size).specified_or_zero();
}
}
if let Some(ref inline_context) = self.inline_context {
for node in &inline_context.nodes {
let margin = node.style.logical_margin();
let this_inline_start_margin = if !node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) {
Au(0)
} else {
MaybeAuto::from_style(margin.inline_start,
containing_block_inline_size).specified_or_zero()
};
let this_inline_end_margin = if !node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) {
Au(0)
} else {
MaybeAuto::from_style(margin.inline_end,
containing_block_inline_size).specified_or_zero()
};
self.margin.inline_start = self.margin.inline_start + this_inline_start_margin;
self.margin.inline_end = self.margin.inline_end + this_inline_end_margin;
}
}
}
/// Computes the margins in the block direction from the containing block inline-size and the
/// style. After this call, the block direction of the `margin` field will be correct.
///
/// Do not use this method if the block direction margins are to be computed some other way
/// (for example, via constraint solving for absolutely-positioned flows).
pub fn compute_block_direction_margins(&mut self, containing_block_inline_size: Au) {
match self.specific {
SpecificFragmentInfo::Table |
SpecificFragmentInfo::TableCell |
SpecificFragmentInfo::TableRow |
SpecificFragmentInfo::TableColumn(_) => {
self.margin.block_start = Au(0);
self.margin.block_end = Au(0)
}
_ => {
// NB: Percentages are relative to containing block inline-size (not block-size)
// per CSS 2.1.
let margin = self.style().logical_margin();
self.margin.block_start =
MaybeAuto::from_style(margin.block_start, containing_block_inline_size)
.specified_or_zero();
self.margin.block_end =
MaybeAuto::from_style(margin.block_end, containing_block_inline_size)
.specified_or_zero();
}
}
}
/// Computes the border and padding in both inline and block directions from the containing
/// block inline-size and the style. After this call, the `border_padding` field will be
/// correct.
///
/// TODO(pcwalton): Remove `border_collapse`; we can figure it out from our style and specific
/// fragment info.
pub fn compute_border_and_padding(&mut self,
containing_block_inline_size: Au,
border_collapse: border_collapse::T) {
// Compute border.
let border = match border_collapse {
border_collapse::T::separate => self.border_width(),
border_collapse::T::collapse => LogicalMargin::zero(self.style.writing_mode),
};
// Compute padding from the fragment's style.
//
// This is zero in the case of `inline-block` because that padding is applied to the
// wrapped block, not the fragment.
let padding_from_style = match self.specific {
SpecificFragmentInfo::TableColumn(_) |
SpecificFragmentInfo::TableRow |
SpecificFragmentInfo::TableWrapper |
SpecificFragmentInfo::InlineBlock(_) => LogicalMargin::zero(self.style.writing_mode),
_ => model::padding_from_style(self.style(), containing_block_inline_size),
};
// Compute padding from the inline fragment context.
let padding_from_inline_fragment_context = match (&self.specific, &self.inline_context) {
(_, &None) |
(&SpecificFragmentInfo::TableColumn(_), _) |
(&SpecificFragmentInfo::TableRow, _) |
(&SpecificFragmentInfo::TableWrapper, _) => {
LogicalMargin::zero(self.style.writing_mode)
}
(_, &Some(ref inline_fragment_context)) => {
let zero_padding = LogicalMargin::zero(self.style.writing_mode);
inline_fragment_context.nodes.iter().fold(zero_padding, |accumulator, node| {
let mut padding = model::padding_from_style(&*node.style, Au(0));
if !node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) {
padding.inline_start = Au(0)
}
if !node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) {
padding.inline_end = Au(0)
}
accumulator + padding
})
}
};
self.border_padding = border + padding_from_style + padding_from_inline_fragment_context
}
// Return offset from original position because of `position: relative`.
pub fn relative_position(&self, containing_block_size: &LogicalSize<Au>) -> LogicalSize<Au> {
fn from_style(style: &ComputedValues, container_size: &LogicalSize<Au>)
-> LogicalSize<Au> {
let offsets = style.logical_position();
let offset_i = if offsets.inline_start != LengthOrPercentageOrAuto::Auto {
MaybeAuto::from_style(offsets.inline_start,
container_size.inline).specified_or_zero()
} else {
-MaybeAuto::from_style(offsets.inline_end,
container_size.inline).specified_or_zero()
};
let offset_b = if offsets.block_start != LengthOrPercentageOrAuto::Auto {
MaybeAuto::from_style(offsets.block_start,
container_size.inline).specified_or_zero()
} else {
-MaybeAuto::from_style(offsets.block_end,
container_size.inline).specified_or_zero()
};
LogicalSize::new(style.writing_mode, offset_i, offset_b)
}
// Go over the ancestor fragments and add all relative offsets (if any).
let mut rel_pos = if self.style().get_box().position == position::T::relative {
from_style(self.style(), containing_block_size)
} else {
LogicalSize::zero(self.style.writing_mode)
};
if let Some(ref inline_fragment_context) = self.inline_context {
for node in &inline_fragment_context.nodes {
if node.style.get_box().position == position::T::relative {
rel_pos = rel_pos + from_style(&*node.style, containing_block_size);
}
}
}
rel_pos
}
/// Always inline for SCCP.
///
/// FIXME(pcwalton): Just replace with the clear type from the style module for speed?
#[inline(always)]
pub fn clear(&self) -> Option<ClearType> {
let style = self.style();
match style.get_box().clear {
clear::T::none => None,
clear::T::left => Some(ClearType::Left),
clear::T::right => Some(ClearType::Right),
clear::T::both => Some(ClearType::Both),
}
}
#[inline(always)]
pub fn style(&self) -> &ComputedValues {
&*self.style
}
/// Returns the text alignment of the computed style of the nearest ancestor-or-self `Element`
/// node.
pub fn text_align(&self) -> text_align::T {
self.style().get_inheritedtext().text_align
}
pub fn white_space(&self) -> white_space::T {
self.style().get_inheritedtext().white_space
}
/// Returns the text decoration of this fragment, according to the style of the nearest ancestor
/// element.
///
/// NB: This may not be the actual text decoration, because of the override rules specified in
/// CSS 2.1 § 16.3.1. Unfortunately, computing this properly doesn't really fit into Servo's
/// model. Therefore, this is a best lower bound approximation, but the end result may actually
/// have the various decoration flags turned on afterward.
pub fn text_decoration(&self) -> text_decoration::T {
self.style().get_text().text_decoration
}
/// Returns the inline-start offset from margin edge to content edge.
///
/// FIXME(#2262, pcwalton): I think this method is pretty bogus, because it won't work for
/// inlines.
pub fn inline_start_offset(&self) -> Au {
match self.specific {
SpecificFragmentInfo::TableWrapper => self.margin.inline_start,
SpecificFragmentInfo::Table |
SpecificFragmentInfo::TableCell |
SpecificFragmentInfo::TableRow => self.border_padding.inline_start,
SpecificFragmentInfo::TableColumn(_) => Au(0),
_ => self.margin.inline_start + self.border_padding.inline_start,
}
}
/// Returns true if this element can be split. This is true for text fragments, unless
/// `white-space: pre` is set.
pub fn can_split(&self) -> bool {
self.is_scanned_text_fragment() &&
self.style.get_inheritedtext().white_space != white_space::T::pre
}
/// Returns true if and only if this fragment is a generated content fragment.
pub fn is_generated_content(&self) -> bool {
match self.specific {
SpecificFragmentInfo::GeneratedContent(..) => true,
_ => false,
}
}
/// Returns true if and only if this is a scanned text fragment.
pub fn is_scanned_text_fragment(&self) -> bool {
match self.specific {
SpecificFragmentInfo::ScannedText(..) => true,
_ => false,
}
}
/// Computes the intrinsic inline-sizes of this fragment.
pub fn compute_intrinsic_inline_sizes(&mut self) -> IntrinsicISizesContribution {
let mut result = self.style_specified_intrinsic_inline_size();
match self.specific {
SpecificFragmentInfo::Generic |
SpecificFragmentInfo::GeneratedContent(_) |
SpecificFragmentInfo::Iframe(_) |
SpecificFragmentInfo::Table |
SpecificFragmentInfo::TableCell |
SpecificFragmentInfo::TableColumn(_) |
SpecificFragmentInfo::TableRow |
SpecificFragmentInfo::TableWrapper |
SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => {}
SpecificFragmentInfo::InlineBlock(ref info) => {
let block_flow = info.flow_ref.as_block();
result.union_block(&block_flow.base.intrinsic_inline_sizes)
}
SpecificFragmentInfo::InlineAbsolute(ref info) => {
let block_flow = info.flow_ref.as_block();
result.union_block(&block_flow.base.intrinsic_inline_sizes)
}
SpecificFragmentInfo::Image(ref mut image_fragment_info) => {
// FIXME(pcwalton): Shouldn't `width` and `height` be preshints?
let image_inline_size = match (image_fragment_info.replaced_image_fragment_info
.dom_inline_size,
self.style.content_inline_size()) {
(None, LengthOrPercentageOrAuto::Auto) |
(None, LengthOrPercentageOrAuto::Percentage(_)) => {
image_fragment_info.image_inline_size()
}
(Some(dom_inline_size), _) => dom_inline_size,
(None, LengthOrPercentageOrAuto::Length(length)) => length,
(None, LengthOrPercentageOrAuto::Calc(calc)) => calc.length(),
};
result.union_block(&IntrinsicISizes {
minimum_inline_size: image_inline_size,
preferred_inline_size: image_inline_size,
});
}
SpecificFragmentInfo::Canvas(ref mut canvas_fragment_info) => {
let canvas_inline_size = canvas_fragment_info.canvas_inline_size();
result.union_block(&IntrinsicISizes {
minimum_inline_size: canvas_inline_size,
preferred_inline_size: canvas_inline_size,
})
}
SpecificFragmentInfo::ScannedText(ref text_fragment_info) => {
let range = &text_fragment_info.range;
// See http://dev.w3.org/csswg/css-sizing/#max-content-inline-size.
// TODO: Account for soft wrap opportunities.
let max_line_inline_size = text_fragment_info.run
.metrics_for_range(range)
.advance_width;
let min_line_inline_size = match self.style.get_inheritedtext().white_space {
white_space::T::pre | white_space::T::nowrap => max_line_inline_size,
white_space::T::normal => text_fragment_info.run.min_width_for_range(range),
};
result.union_block(&IntrinsicISizes {
minimum_inline_size: min_line_inline_size,
preferred_inline_size: max_line_inline_size,
})
}
SpecificFragmentInfo::UnscannedText(..) => {
panic!("Unscanned text fragments should have been scanned by now!")
}
};
// Take borders and padding for parent inline fragments into account, if necessary.
if self.is_primary_fragment() {
if let Some(ref context) = self.inline_context {
for node in &context.nodes {
let mut border_width = node.style.logical_border_width();
let mut padding = model::padding_from_style(&*node.style, Au(0));
let mut margin = model::specified_margin_from_style(&*node.style);
if !node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) {
border_width.inline_start = Au(0);
padding.inline_start = Au(0);
margin.inline_start = Au(0);
}
if !node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) {
border_width.inline_end = Au(0);
padding.inline_end = Au(0);
margin.inline_end = Au(0);
}
result.surrounding_size =
result.surrounding_size +
border_width.inline_start_end() +
padding.inline_start_end() +
margin.inline_start_end();
}
}
}
result
}
/// TODO: What exactly does this function return? Why is it Au(0) for
/// `SpecificFragmentInfo::Generic`?
pub fn content_inline_size(&self) -> Au {
match self.specific {
SpecificFragmentInfo::Generic |
SpecificFragmentInfo::GeneratedContent(_) |
SpecificFragmentInfo::Iframe(_) |
SpecificFragmentInfo::Table |
SpecificFragmentInfo::TableCell |
SpecificFragmentInfo::TableRow |
SpecificFragmentInfo::TableWrapper |
SpecificFragmentInfo::InlineBlock(_) |
SpecificFragmentInfo::InlineAbsoluteHypothetical(_) |
SpecificFragmentInfo::InlineAbsolute(_) => Au(0),
SpecificFragmentInfo::Canvas(ref canvas_fragment_info) => {
canvas_fragment_info.replaced_image_fragment_info.computed_inline_size()
}
SpecificFragmentInfo::Image(ref image_fragment_info) => {
image_fragment_info.replaced_image_fragment_info.computed_inline_size()
}
SpecificFragmentInfo::ScannedText(ref text_fragment_info) => {
let (range, run) = (&text_fragment_info.range, &text_fragment_info.run);
let text_bounds = run.metrics_for_range(range).bounding_box;
text_bounds.size.width
}
SpecificFragmentInfo::TableColumn(_) => {
panic!("Table column fragments do not have inline_size")
}
SpecificFragmentInfo::UnscannedText(_) => {
panic!("Unscanned text fragments should have been scanned by now!")
}
}
}
/// Returns the dimensions of the content box.
///
/// This is marked `#[inline]` because it is frequently called when only one or two of the
/// values are needed and that will save computation.
#[inline]
pub fn content_box(&self) -> LogicalRect<Au> {
self.border_box - self.border_padding
}
/// Attempts to find the split positions of a text fragment so that its inline-size is no more
/// than `max_inline_size`.
///
/// A return value of `None` indicates that the fragment could not be split. Otherwise the
/// information pertaining to the split is returned. The inline-start and inline-end split
/// information are both optional due to the possibility of them being whitespace.
pub fn calculate_split_position(&self, max_inline_size: Au, starts_line: bool)
-> Option<SplitResult> {
let text_fragment_info =
if let SpecificFragmentInfo::ScannedText(ref text_fragment_info) = self.specific {
text_fragment_info
} else {
return None
};
let mut flags = SplitOptions::empty();
if starts_line {
flags.insert(STARTS_LINE);
if self.style().get_inheritedtext().overflow_wrap == overflow_wrap::T::break_word {
flags.insert(RETRY_AT_CHARACTER_BOUNDARIES)
}
}
match self.style().get_inheritedtext().word_break {
word_break::T::normal => {
// Break at normal word boundaries.
let natural_word_breaking_strategy =
text_fragment_info.run.natural_word_slices_in_range(&text_fragment_info.range);
self.calculate_split_position_using_breaking_strategy(
natural_word_breaking_strategy,
max_inline_size,
flags)
}
word_break::T::break_all => {
// Break at character boundaries.
let character_breaking_strategy =
text_fragment_info.run.character_slices_in_range(&text_fragment_info.range);
flags.remove(RETRY_AT_CHARACTER_BOUNDARIES);
return self.calculate_split_position_using_breaking_strategy(
character_breaking_strategy,
max_inline_size,
flags)
}
}
}
/// Truncates this fragment to the given `max_inline_size`, using a character-based breaking
/// strategy. If no characters could fit, returns `None`.
pub fn truncate_to_inline_size(&self, max_inline_size: Au) -> Option<TruncationResult> {
let text_fragment_info =
if let SpecificFragmentInfo::ScannedText(ref text_fragment_info) = self.specific {
text_fragment_info
} else {
return None
};
let character_breaking_strategy =
text_fragment_info.run.character_slices_in_range(&text_fragment_info.range);
match self.calculate_split_position_using_breaking_strategy(character_breaking_strategy,
max_inline_size,
SplitOptions::empty()) {
None => None,
Some(split_info) => {
match split_info.inline_start {
None => None,
Some(split) => {
Some(TruncationResult {
split: split,
text_run: split_info.text_run.clone(),
})
}
}
}
}
}
/// A helper method that uses the breaking strategy described by `slice_iterator` (at present,
/// either natural word breaking or character breaking) to split this fragment.
fn calculate_split_position_using_breaking_strategy<'a, I>(
&self,
slice_iterator: I,
max_inline_size: Au,
flags: SplitOptions)
-> Option<SplitResult>
where I: Iterator<Item=TextRunSlice<'a>> {
let text_fragment_info =
if let SpecificFragmentInfo::ScannedText(ref text_fragment_info) = self.specific {
text_fragment_info
} else {
return None
};
let mut pieces_processed_count: u32 = 0;
let mut remaining_inline_size = max_inline_size;
let mut inline_start_range = Range::new(text_fragment_info.range.begin(), CharIndex(0));
let mut inline_end_range = None;
let mut overflowing = false;
debug!("calculate_split_position_using_breaking_strategy: splitting text fragment \
(strlen={}, range={:?}, max_inline_size={:?})",
text_fragment_info.run.text.len(),
text_fragment_info.range,
max_inline_size);
for slice in slice_iterator {
debug!("calculate_split_position_using_breaking_strategy: considering slice \
(offset={:?}, slice range={:?}, remaining_inline_size={:?})",
slice.offset,
slice.range,
remaining_inline_size);
// Use the `remaining_inline_size` to find a split point if possible. If not, go around
// the loop again with the next slice.
let metrics = text_fragment_info.run.metrics_for_slice(slice.glyphs, &slice.range);
let advance = metrics.advance_width;
// Have we found the split point?
if advance <= remaining_inline_size || slice.glyphs.is_whitespace() {
// Keep going; we haven't found the split point yet.
if flags.contains(STARTS_LINE) &&
pieces_processed_count == 0 &&
slice.glyphs.is_whitespace() {
debug!("calculate_split_position_using_breaking_strategy: skipping \
leading trimmable whitespace");
inline_start_range.shift_by(slice.range.length());
} else {
debug!("calculate_split_position_using_breaking_strategy: enlarging span");
remaining_inline_size = remaining_inline_size - advance;
inline_start_range.extend_by(slice.range.length());
}
pieces_processed_count += 1;
continue
}
// The advance is more than the remaining inline-size, so split here. First, check to
// see if we're going to overflow the line. If so, perform a best-effort split.
let mut remaining_range = slice.text_run_range();
let split_is_empty = inline_start_range.is_empty() &&
!self.requires_line_break_afterward_if_wrapping_on_newlines();
if split_is_empty {
// We're going to overflow the line.
overflowing = true;
inline_start_range = slice.text_run_range();
remaining_range = Range::new(slice.text_run_range().end(), CharIndex(0));
remaining_range.extend_to(text_fragment_info.range.end());
}
// Check to see if we need to create an inline-end chunk.
let slice_begin = remaining_range.begin();
if slice_begin < text_fragment_info.range.end() {
// There still some things left over at the end of the line, so create the
// inline-end chunk.
let mut inline_end = remaining_range;
inline_end.extend_to(text_fragment_info.range.end());
inline_end_range = Some(inline_end);
debug!("calculate_split_position: splitting remainder with inline-end range={:?}",
inline_end);
}
// If we failed to find a suitable split point, we're on the verge of overflowing the
// line.
if split_is_empty || overflowing {
// If we've been instructed to retry at character boundaries (probably via
// `overflow-wrap: break-word`), do so.
if flags.contains(RETRY_AT_CHARACTER_BOUNDARIES) {
let character_breaking_strategy =
text_fragment_info.run
.character_slices_in_range(&text_fragment_info.range);
let mut flags = flags;
flags.remove(RETRY_AT_CHARACTER_BOUNDARIES);
return self.calculate_split_position_using_breaking_strategy(
character_breaking_strategy,
max_inline_size,
flags)
}
// We aren't at the start of the line, so don't overflow. Let inline layout wrap to
// the next line instead.
if !flags.contains(STARTS_LINE) {
return None
}
}
break
}
let split_is_empty = inline_start_range.is_empty() &&
!self.requires_line_break_afterward_if_wrapping_on_newlines();
let inline_start = if !split_is_empty {
Some(SplitInfo::new(inline_start_range, &**text_fragment_info))
} else {
None
};
let inline_end = inline_end_range.map(|inline_end_range| {
SplitInfo::new(inline_end_range, &**text_fragment_info)
});
Some(SplitResult {
inline_start: inline_start,
inline_end: inline_end,
text_run: text_fragment_info.run.clone(),
})
}
/// The opposite of `calculate_split_position_using_breaking_strategy`: merges this fragment
/// with the next one.
pub fn merge_with(&mut self, next_fragment: Fragment) {
match (&mut self.specific, &next_fragment.specific) {
(&mut SpecificFragmentInfo::ScannedText(ref mut this_info),
&SpecificFragmentInfo::ScannedText(ref other_info)) => {
debug_assert!(util::arc_ptr_eq(&this_info.run, &other_info.run));
this_info.range.extend_to(other_info.range_end_including_stripped_whitespace);
this_info.content_size.inline =
this_info.run.metrics_for_range(&this_info.range).advance_width;
this_info.requires_line_break_afterward_if_wrapping_on_newlines =
this_info.requires_line_break_afterward_if_wrapping_on_newlines ||
other_info.requires_line_break_afterward_if_wrapping_on_newlines;
self.border_padding.inline_end = next_fragment.border_padding.inline_end;
self.border_box.size.inline = this_info.content_size.inline +
self.border_padding.inline_start_end();
}
_ => panic!("Can only merge two scanned-text fragments!"),
}
self.meld_with_next_inline_fragment(&next_fragment);
}
/// Returns true if this fragment is an unscanned text fragment that consists entirely of
/// whitespace that should be stripped.
pub fn is_ignorable_whitespace(&self) -> bool {
match self.white_space() {
white_space::T::pre => return false,
white_space::T::normal | white_space::T::nowrap => {}
}
match self.specific {
SpecificFragmentInfo::UnscannedText(ref text_fragment_info) => {
is_whitespace(&text_fragment_info.text)
}
_ => false,
}
}
/// Assigns replaced inline-size, padding, and margins for this fragment only if it is replaced
/// content per CSS 2.1 § 10.3.2.
pub fn assign_replaced_inline_size_if_necessary<'a>(&'a mut self, container_inline_size: Au) {
match self.specific {
SpecificFragmentInfo::Generic |
SpecificFragmentInfo::GeneratedContent(_) |
SpecificFragmentInfo::Table |
SpecificFragmentInfo::TableCell |
SpecificFragmentInfo::TableRow |
SpecificFragmentInfo::TableWrapper => return,
SpecificFragmentInfo::TableColumn(_) => {
panic!("Table column fragments do not have inline size")
}
SpecificFragmentInfo::UnscannedText(_) => {
panic!("Unscanned text fragments should have been scanned by now!")
}
SpecificFragmentInfo::Canvas(_) |
SpecificFragmentInfo::Image(_) |
SpecificFragmentInfo::Iframe(_) |
SpecificFragmentInfo::InlineBlock(_) |
SpecificFragmentInfo::InlineAbsoluteHypothetical(_) |
SpecificFragmentInfo::InlineAbsolute(_) |
SpecificFragmentInfo::ScannedText(_) => {}
};
let style = &*self.style;
let noncontent_inline_size = self.border_padding.inline_start_end();
match self.specific {
SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => {
let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_mut_block();
block_flow.base.position.size.inline =
block_flow.base.intrinsic_inline_sizes.preferred_inline_size;
// This is a hypothetical box, so it takes up no space.
self.border_box.size.inline = Au(0);
}
SpecificFragmentInfo::InlineBlock(ref mut info) => {
let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_mut_block();
self.border_box.size.inline =
max(block_flow.base.intrinsic_inline_sizes.minimum_inline_size,
block_flow.base.intrinsic_inline_sizes.preferred_inline_size);
block_flow.base.block_container_inline_size = self.border_box.size.inline;
block_flow.base.block_container_writing_mode = self.style.writing_mode;
}
SpecificFragmentInfo::InlineAbsolute(ref mut info) => {
let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_mut_block();
self.border_box.size.inline =
max(block_flow.base.intrinsic_inline_sizes.minimum_inline_size,
block_flow.base.intrinsic_inline_sizes.preferred_inline_size);
block_flow.base.block_container_inline_size = self.border_box.size.inline;
block_flow.base.block_container_writing_mode = self.style.writing_mode;
}
SpecificFragmentInfo::ScannedText(ref info) => {
// Scanned text fragments will have already had their content inline-sizes assigned
// by this point.
self.border_box.size.inline = info.content_size.inline + noncontent_inline_size
}
SpecificFragmentInfo::Image(ref mut image_fragment_info) => {
let fragment_inline_size = image_fragment_info.image_inline_size();
let fragment_block_size = image_fragment_info.image_block_size();
self.border_box.size.inline =
image_fragment_info.replaced_image_fragment_info
.calculate_replaced_inline_size(style,
noncontent_inline_size,
container_inline_size,
fragment_inline_size,
fragment_block_size);
}
SpecificFragmentInfo::Canvas(ref mut canvas_fragment_info) => {
let fragment_inline_size = canvas_fragment_info.canvas_inline_size();
let fragment_block_size = canvas_fragment_info.canvas_block_size();
self.border_box.size.inline =
canvas_fragment_info.replaced_image_fragment_info
.calculate_replaced_inline_size(style,
noncontent_inline_size,
container_inline_size,
fragment_inline_size,
fragment_block_size);
}
SpecificFragmentInfo::Iframe(ref iframe_fragment_info) => {
self.border_box.size.inline =
iframe_fragment_info.calculate_replaced_inline_size(style,
container_inline_size) +
noncontent_inline_size;
}
_ => panic!("this case should have been handled above"),
}
}
/// Assign block-size for this fragment if it is replaced content. The inline-size must have
/// been assigned first.
///
/// Ideally, this should follow CSS 2.1 § 10.6.2.
pub fn assign_replaced_block_size_if_necessary(&mut self, containing_block_block_size: Option<Au>) {
match self.specific {
SpecificFragmentInfo::Generic |
SpecificFragmentInfo::GeneratedContent(_) |
SpecificFragmentInfo::Table |
SpecificFragmentInfo::TableCell |
SpecificFragmentInfo::TableRow |
SpecificFragmentInfo::TableWrapper => return,
SpecificFragmentInfo::TableColumn(_) => {
panic!("Table column fragments do not have block size")
}
SpecificFragmentInfo::UnscannedText(_) => {
panic!("Unscanned text fragments should have been scanned by now!")
}
SpecificFragmentInfo::Canvas(_) |
SpecificFragmentInfo::Iframe(_) |
SpecificFragmentInfo::Image(_) |
SpecificFragmentInfo::InlineBlock(_) |
SpecificFragmentInfo::InlineAbsoluteHypothetical(_) |
SpecificFragmentInfo::InlineAbsolute(_) |
SpecificFragmentInfo::ScannedText(_) => {}
}
let style = &*self.style;
let noncontent_block_size = self.border_padding.block_start_end();
match self.specific {
SpecificFragmentInfo::Image(ref mut image_fragment_info) => {
let fragment_inline_size = image_fragment_info.image_inline_size();
let fragment_block_size = image_fragment_info.image_block_size();
self.border_box.size.block =
image_fragment_info.replaced_image_fragment_info
.calculate_replaced_block_size(style,
noncontent_block_size,
containing_block_block_size,
fragment_inline_size,
fragment_block_size);
}
SpecificFragmentInfo::Canvas(ref mut canvas_fragment_info) => {
let fragment_inline_size = canvas_fragment_info.canvas_inline_size();
let fragment_block_size = canvas_fragment_info.canvas_block_size();
self.border_box.size.block =
canvas_fragment_info.replaced_image_fragment_info
.calculate_replaced_block_size(style,
noncontent_block_size,
containing_block_block_size,
fragment_inline_size,
fragment_block_size);
}
SpecificFragmentInfo::ScannedText(ref info) => {
// Scanned text fragments' content block-sizes are calculated by the text run
// scanner during flow construction.
self.border_box.size.block = info.content_size.block + noncontent_block_size
}
SpecificFragmentInfo::InlineBlock(ref mut info) => {
// Not the primary fragment, so we do not take the noncontent size into account.
let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_block();
self.border_box.size.block = block_flow.base.position.size.block +
block_flow.fragment.margin.block_start_end()
}
SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => {
// Not the primary fragment, so we do not take the noncontent size into account.
let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_block();
self.border_box.size.block = block_flow.base.position.size.block;
}
SpecificFragmentInfo::InlineAbsolute(ref mut info) => {
// Not the primary fragment, so we do not take the noncontent size into account.
let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_block();
self.border_box.size.block = block_flow.base.position.size.block +
block_flow.fragment.margin.block_start_end()
}
SpecificFragmentInfo::Iframe(ref info) => {
self.border_box.size.block =
info.calculate_replaced_block_size(style, containing_block_block_size) +
noncontent_block_size;
}
_ => panic!("should have been handled above"),
}
}
/// Calculates block-size above baseline, depth below baseline, and ascent for this fragment
/// when used in an inline formatting context. See CSS 2.1 § 10.8.1.
pub fn inline_metrics(&self, layout_context: &LayoutContext) -> InlineMetrics {
match self.specific {
SpecificFragmentInfo::Image(ref image_fragment_info) => {
let computed_block_size = image_fragment_info.replaced_image_fragment_info
.computed_block_size();
InlineMetrics {
block_size_above_baseline: computed_block_size +
self.border_padding.block_start,
depth_below_baseline: self.border_padding.block_end,
ascent: computed_block_size + self.border_padding.block_start,
}
}
SpecificFragmentInfo::ScannedText(ref text_fragment) => {
// See CSS 2.1 § 10.8.1.
let line_height = self.calculate_line_height(layout_context);
let font_derived_metrics =
InlineMetrics::from_font_metrics(&text_fragment.run.font_metrics, line_height);
InlineMetrics {
block_size_above_baseline: font_derived_metrics.block_size_above_baseline +
self.border_padding.block_start,
depth_below_baseline: font_derived_metrics.depth_below_baseline +
self.border_padding.block_end,
ascent: font_derived_metrics.ascent + self.border_padding.block_start,
}
}
SpecificFragmentInfo::InlineBlock(ref info) => {
// See CSS 2.1 § 10.8.1.
let block_flow = info.flow_ref.as_block();
let font_style = self.style.get_font_arc();
let font_metrics = text::font_metrics_for_style(&mut layout_context.font_context(),
font_style);
InlineMetrics::from_block_height(&font_metrics,
block_flow.base.position.size.block,
block_flow.fragment.margin.block_start,
block_flow.fragment.margin.block_end)
}
SpecificFragmentInfo::InlineAbsoluteHypothetical(_) |
SpecificFragmentInfo::InlineAbsolute(_) => {
// Hypothetical boxes take up no space.
InlineMetrics {
block_size_above_baseline: Au(0),
depth_below_baseline: Au(0),
ascent: Au(0),
}
}
_ => {
InlineMetrics {
block_size_above_baseline: self.border_box.size.block,
depth_below_baseline: Au(0),
ascent: self.border_box.size.block,
}
}
}
}
/// Returns true if this fragment is a hypothetical box. See CSS 2.1 § 10.3.7.
pub fn is_hypothetical(&self) -> bool {
match self.specific {
SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => true,
_ => false,
}
}
/// Returns true if this fragment can merge with another immediately-following fragment or
/// false otherwise.
pub fn can_merge_with_fragment(&self, other: &Fragment) -> bool {
match (&self.specific, &other.specific) {
(&SpecificFragmentInfo::UnscannedText(ref first_unscanned_text),
&SpecificFragmentInfo::UnscannedText(_)) => {
// FIXME: Should probably use a whitelist of styles that can safely differ (#3165)
if self.style().get_font() != other.style().get_font() ||
self.text_decoration() != other.text_decoration() ||
self.white_space() != other.white_space() {
return false
}
let length = first_unscanned_text.text.len();
if length != 0 && first_unscanned_text.text.char_at_reverse(length) == '\n' {
return false
}
// If this node has any styles that have border/padding/margins on the following
// side, then we can't merge with the next fragment.
if let Some(ref inline_context) = self.inline_context {
for inline_context_node in inline_context.nodes.iter() {
if !inline_context_node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) {
continue
}
if inline_context_node.style.logical_margin().inline_end !=
LengthOrPercentageOrAuto::Length(Au(0)) {
return false
}
if inline_context_node.style.logical_padding().inline_end !=
LengthOrPercentage::Length(Au(0)) {
return false
}
if inline_context_node.style.logical_border_width().inline_end != Au(0) {
return false
}
}
}
// If the next fragment has any styles that have border/padding/margins on the
// preceding side, then it can't merge with us.
if let Some(ref inline_context) = other.inline_context {
for inline_context_node in inline_context.nodes.iter() {
if !inline_context_node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) {
continue
}
if inline_context_node.style.logical_margin().inline_start !=
LengthOrPercentageOrAuto::Length(Au(0)) {
return false
}
if inline_context_node.style.logical_padding().inline_start !=
LengthOrPercentage::Length(Au(0)) {
return false
}
if inline_context_node.style.logical_border_width().inline_start != Au(0) {
return false
}
}
}
true
}
_ => false,
}
}
/// Returns true if and only if this is the *primary fragment* for the fragment's style object
/// (conceptually, though style sharing makes this not really true, of course). The primary
/// fragment is the one that draws backgrounds, borders, etc., and takes borders, padding and
/// margins into account. Every style object has at most one primary fragment.
///
/// At present, all fragments are primary fragments except for inline-block and table wrapper
/// fragments. Inline-block fragments are not primary fragments because the corresponding block
/// flow is the primary fragment, while table wrapper fragments are not primary fragments
/// because the corresponding table flow is the primary fragment.
pub fn is_primary_fragment(&self) -> bool {
match self.specific {
SpecificFragmentInfo::InlineBlock(_) |
SpecificFragmentInfo::InlineAbsoluteHypothetical(_) |
SpecificFragmentInfo::InlineAbsolute(_) |
SpecificFragmentInfo::TableWrapper => false,
SpecificFragmentInfo::Canvas(_) |
SpecificFragmentInfo::Generic |
SpecificFragmentInfo::GeneratedContent(_) |
SpecificFragmentInfo::Iframe(_) |
SpecificFragmentInfo::Image(_) |
SpecificFragmentInfo::ScannedText(_) |
SpecificFragmentInfo::Table |
SpecificFragmentInfo::TableCell |
SpecificFragmentInfo::TableColumn(_) |
SpecificFragmentInfo::TableRow |
SpecificFragmentInfo::UnscannedText(_) => true,
}
}
/// Determines the inline sizes of inline-block fragments. These cannot be fully computed until
/// inline size assignment has run for the child flow: thus it is computed "late", during
/// block size assignment.
pub fn update_late_computed_replaced_inline_size_if_necessary(&mut self) {
if let SpecificFragmentInfo::InlineBlock(ref mut inline_block_info) = self.specific {
let block_flow = flow_ref::deref_mut(&mut inline_block_info.flow_ref).as_block();
let margin = block_flow.fragment.style.logical_margin();
self.border_box.size.inline = block_flow.fragment.border_box.size.inline +
MaybeAuto::from_style(margin.inline_start, Au(0)).specified_or_zero() +
MaybeAuto::from_style(margin.inline_end, Au(0)).specified_or_zero()
}
}
pub fn update_late_computed_inline_position_if_necessary(&mut self) {
match self.specific {
SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => {
let position = self.border_box.start.i;
flow_ref::deref_mut(&mut info.flow_ref)
.update_late_computed_inline_position_if_necessary(position)
}
_ => {}
}
}
pub fn update_late_computed_block_position_if_necessary(&mut self) {
match self.specific {
SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => {
let position = self.border_box.start.b;
flow_ref::deref_mut(&mut info.flow_ref)
.update_late_computed_block_position_if_necessary(position)
}
_ => {}
}
}
pub fn repair_style(&mut self, new_style: &Arc<ComputedValues>) {
self.style = (*new_style).clone()
}
/// Given the stacking-context-relative position of the containing flow, returns the border box
/// of this fragment relative to the parent stacking context. This takes `position: relative`
/// into account.
///
/// If `coordinate_system` is `Parent`, this returns the border box in the parent stacking
/// context's coordinate system. Otherwise, if `coordinate_system` is `Own` and this fragment
/// establishes a stacking context itself, this returns a border box anchored at (0, 0). (If
/// this fragment does not establish a stacking context, then it always belongs to its parent
/// stacking context and thus `coordinate_system` is ignored.)
///
/// This is the method you should use for display list construction as well as
/// `getBoundingClientRect()` and so forth.
pub fn stacking_relative_border_box(&self,
stacking_relative_flow_origin: &Point2D<Au>,
relative_containing_block_size: &LogicalSize<Au>,
relative_containing_block_mode: WritingMode,
coordinate_system: CoordinateSystem)
-> Rect<Au> {
let container_size =
relative_containing_block_size.to_physical(relative_containing_block_mode);
let border_box = self.border_box.to_physical(self.style.writing_mode, container_size);
if coordinate_system == CoordinateSystem::Own && self.establishes_stacking_context() {
return Rect::new(ZERO_POINT, border_box.size)
}
// FIXME(pcwalton): This can double-count relative position sometimes for inlines (e.g.
// `<div style="position:relative">x</div>`, because the `position:relative` trickles down
// to the inline flow. Possibly we should extend the notion of "primary fragment" to fix
// this.
let relative_position = self.relative_position(relative_containing_block_size);
border_box.translate_by_size(&relative_position.to_physical(self.style.writing_mode))
.translate(stacking_relative_flow_origin)
}
/// Given the stacking-context-relative border box, returns the stacking-context-relative
/// content box.
pub fn stacking_relative_content_box(&self, stacking_relative_border_box: &Rect<Au>)
-> Rect<Au> {
let border_padding = self.border_padding.to_physical(self.style.writing_mode);
Rect::new(Point2D::new(stacking_relative_border_box.origin.x + border_padding.left,
stacking_relative_border_box.origin.y + border_padding.top),
Size2D::new(stacking_relative_border_box.size.width - border_padding.horizontal(),
stacking_relative_border_box.size.height - border_padding.vertical()))
}
/// Returns true if this fragment establishes a new stacking context and false otherwise.
pub fn establishes_stacking_context(&self) -> bool {
if self.flags.contains(HAS_LAYER) {
return true
}
if self.style().get_effects().opacity != 1.0 {
return true
}
if !self.style().get_effects().filter.is_empty() {
return true
}
if self.style().get_effects().mix_blend_mode != mix_blend_mode::T::normal {
return true
}
if self.style().get_effects().transform.0.is_some() {
return true
}
match self.style().get_used_transform_style() {
transform_style::T::flat | transform_style::T::preserve_3d => {
return true
}
transform_style::T::auto => {}
}
// Canvas always layerizes, as an special case
// FIXME(pcwalton): Don't unconditionally form stacking contexts for each canvas.
if let SpecificFragmentInfo::Canvas(_) = self.specific {
return true
}
// FIXME(pcwalton): Don't unconditionally form stacking contexts for `overflow_x: scroll`
// and `overflow_y: scroll`. This needs multiple layers per stacking context.
match (self.style().get_box().position,
self.style().get_box().z_index,
self.style().get_box().overflow_x,
self.style().get_box().overflow_y.0) {
(position::T::absolute,
z_index::T::Auto,
overflow_x::T::visible,
overflow_x::T::visible) |
(position::T::fixed,
z_index::T::Auto,
overflow_x::T::visible,
overflow_x::T::visible) |
(position::T::relative,
z_index::T::Auto,
overflow_x::T::visible,
overflow_x::T::visible) => false,
(position::T::absolute, _, _, _) |
(position::T::fixed, _, _, _) |
(position::T::relative, _, _, _) => true,
(position::T::static_, _, _, _) => {
false
}
}
}
/// Computes the overflow rect of this fragment relative to the start of the flow.
pub fn compute_overflow(&self, relative_containing_block_size: &LogicalSize<Au>) -> Rect<Au> {
// FIXME(pcwalton, #2795): Get the real container size.
let container_size = Size2D::zero();
let mut border_box = self.border_box.to_physical(self.style.writing_mode, container_size);
// Relative position can cause us to draw outside our border box.
//
// FIXME(pcwalton): I'm not a fan of the way this makes us crawl though so many styles all
// the time. Can't we handle relative positioning by just adjusting `border_box`?
let relative_position = self.relative_position(relative_containing_block_size);
border_box =
border_box.translate_by_size(&relative_position.to_physical(self.style.writing_mode));
let mut overflow = border_box;
// Box shadows cause us to draw outside our border box.
for box_shadow in &self.style().get_effects().box_shadow.0 {
let offset = Point2D::new(box_shadow.offset_x, box_shadow.offset_y);
let inflation = box_shadow.spread_radius + box_shadow.blur_radius *
BLUR_INFLATION_FACTOR;
overflow = overflow.union(&border_box.translate(&offset).inflate(inflation, inflation))
}
// Outlines cause us to draw outside our border box.
let outline_width = self.style.get_outline().outline_width;
if outline_width != Au(0) {
overflow = overflow.union(&border_box.inflate(outline_width, outline_width))
}
// Include the overflow of the block flow, if any.
match self.specific {
SpecificFragmentInfo::InlineBlock(ref info) => {
let block_flow = info.flow_ref.as_block();
overflow = overflow.union(&flow::base(block_flow).overflow);
}
SpecificFragmentInfo::InlineAbsolute(ref info) => {
let block_flow = info.flow_ref.as_block();
overflow = overflow.union(&flow::base(block_flow).overflow);
}
_ => (),
}
// FIXME(pcwalton): Sometimes excessively fancy glyphs can make us draw outside our border
// box too.
overflow
}
/// Remove any compositor layers associated with this fragment - it is being
/// removed from the tree or had its display property set to none.
/// TODO(gw): This just hides the compositor layer for now. In the future
/// it probably makes sense to provide a hint to the compositor whether
/// the layers should be destroyed to free memory.
pub fn remove_compositor_layers(&self, constellation_chan: ConstellationChan) {
match self.specific {
SpecificFragmentInfo::Iframe(ref iframe_info) => {
let ConstellationChan(ref chan) = constellation_chan;
chan.send(Msg::FrameRect(iframe_info.pipeline_id,
iframe_info.subpage_id,
Rect::zero())).unwrap();
}
_ => {}
}
}
pub fn requires_line_break_afterward_if_wrapping_on_newlines(&self) -> bool {
match self.specific {
SpecificFragmentInfo::ScannedText(ref scanned_text) => {
scanned_text.requires_line_break_afterward_if_wrapping_on_newlines
}
_ => false,
}
}
pub fn strip_leading_whitespace_if_necessary(&mut self) -> WhitespaceStrippingResult {
if self.style.get_inheritedtext().white_space == white_space::T::pre {
return WhitespaceStrippingResult::RetainFragment
}
match self.specific {
SpecificFragmentInfo::ScannedText(ref mut scanned_text_fragment_info) => {
let mut leading_whitespace_character_count = 0;
{
let text = slice_chars(
&*scanned_text_fragment_info.run.text,
scanned_text_fragment_info.range.begin().to_usize(),
scanned_text_fragment_info.range.end().to_usize());
for character in text.chars() {
if util::str::char_is_whitespace(character) {
leading_whitespace_character_count += 1
} else {
break
}
}
}
let whitespace_range = Range::new(scanned_text_fragment_info.range.begin(),
CharIndex(leading_whitespace_character_count));
let text_bounds =
scanned_text_fragment_info.run.metrics_for_range(&whitespace_range).bounding_box;
self.border_box.size.inline = self.border_box.size.inline - text_bounds.size.width;
scanned_text_fragment_info.content_size.inline =
scanned_text_fragment_info.content_size.inline - text_bounds.size.width;
scanned_text_fragment_info.range.adjust_by(
CharIndex(leading_whitespace_character_count),
-CharIndex(leading_whitespace_character_count));
return WhitespaceStrippingResult::RetainFragment
}
SpecificFragmentInfo::UnscannedText(ref mut unscanned_text_fragment_info) => {
let mut new_text_string = String::new();
let mut modified = false;
for (i, character) in unscanned_text_fragment_info.text.char_indices() {
if gfx::text::util::is_bidi_control(character) {
new_text_string.push(character);
continue
}
if util::str::char_is_whitespace(character) {
modified = true;
continue
}
new_text_string.push_str(&unscanned_text_fragment_info.text[i..]);
break
}
if modified {
unscanned_text_fragment_info.text = new_text_string.into_boxed_str();
}
WhitespaceStrippingResult::from_unscanned_text_fragment_info(
&unscanned_text_fragment_info)
}
_ => WhitespaceStrippingResult::RetainFragment,
}
}
/// Returns true if the entire fragment was stripped.
pub fn strip_trailing_whitespace_if_necessary(&mut self) -> WhitespaceStrippingResult {
if self.style.get_inheritedtext().white_space == white_space::T::pre {
return WhitespaceStrippingResult::RetainFragment
}
match self.specific {
SpecificFragmentInfo::ScannedText(ref mut scanned_text_fragment_info) => {
// FIXME(pcwalton): Is there a more clever (i.e. faster) way to do this?
debug!("stripping trailing whitespace: range={:?}, len={}",
scanned_text_fragment_info.range,
scanned_text_fragment_info.run.text.chars().count());
let mut trailing_whitespace_character_count = 0;
let text_bounds;
{
let text = slice_chars(&*scanned_text_fragment_info.run.text,
scanned_text_fragment_info.range.begin().to_usize(),
scanned_text_fragment_info.range.end().to_usize());
for ch in text.chars().rev() {
if util::str::char_is_whitespace(ch) {
trailing_whitespace_character_count += 1
} else {
break
}
}
let whitespace_range =
Range::new(scanned_text_fragment_info.range.end() -
CharIndex(trailing_whitespace_character_count),
CharIndex(trailing_whitespace_character_count));
text_bounds = scanned_text_fragment_info.run
.metrics_for_range(&whitespace_range)
.bounding_box;
self.border_box.size.inline = self.border_box.size.inline -
text_bounds.size.width;
}
scanned_text_fragment_info.content_size.inline =
scanned_text_fragment_info.content_size.inline - text_bounds.size.width;
if trailing_whitespace_character_count != 0 {
scanned_text_fragment_info.range.extend_by(
CharIndex(-trailing_whitespace_character_count));
}
WhitespaceStrippingResult::RetainFragment
}
SpecificFragmentInfo::UnscannedText(ref mut unscanned_text_fragment_info) => {
let mut trailing_bidi_control_characters_to_retain = Vec::new();
let (mut modified, mut last_character_index) = (true, 0);
for (i, character) in unscanned_text_fragment_info.text.char_indices().rev() {
if gfx::text::util::is_bidi_control(character) {
trailing_bidi_control_characters_to_retain.push(character);
continue
}
if util::str::char_is_whitespace(character) {
modified = true;
continue
}
last_character_index = i + character.len_utf8();
break
}
if modified {
let mut text = unscanned_text_fragment_info.text.to_string();
text.truncate(last_character_index);
for character in trailing_bidi_control_characters_to_retain.iter().rev() {
text.push(*character);
}
unscanned_text_fragment_info.text = text.into_boxed_str();
}
WhitespaceStrippingResult::from_unscanned_text_fragment_info(
&unscanned_text_fragment_info)
}
_ => WhitespaceStrippingResult::RetainFragment,
}
}
pub fn inline_styles<'a>(&'a self) -> InlineStyleIterator<'a> {
InlineStyleIterator::new(self)
}
/// Returns the inline-size of this fragment's margin box.
pub fn margin_box_inline_size(&self) -> Au {
self.border_box.size.inline + self.margin.inline_start_end()
}
/// Returns true if this node *or any of the nodes within its inline fragment context* have
/// non-`static` `position`.
pub fn is_positioned(&self) -> bool {
if self.style.get_box().position != position::T::static_ {
return true
}
if let Some(ref inline_context) = self.inline_context {
for node in inline_context.nodes.iter() {
if node.style.get_box().position != position::T::static_ {
return true
}
}
}
false
}
/// Returns true if this node is absolutely positioned.
pub fn is_absolutely_positioned(&self) -> bool {
self.style.get_box().position == position::T::absolute
}
pub fn is_inline_absolute(&self) -> bool {
match self.specific {
SpecificFragmentInfo::InlineAbsolute(..) => true,
_ => false,
}
}
pub fn meld_with_next_inline_fragment(&mut self, next_fragment: &Fragment) {
if let Some(ref mut inline_context_of_this_fragment) = self.inline_context {
if let Some(ref inline_context_of_next_fragment) = next_fragment.inline_context {
for (i, inline_context_node_from_next_fragment) in
inline_context_of_next_fragment.nodes.iter().enumerate() {
if i >= inline_context_of_this_fragment.nodes.len() {
continue
}
if !inline_context_node_from_next_fragment.flags.contains(
LAST_FRAGMENT_OF_ELEMENT) {
continue
}
if inline_context_node_from_next_fragment.address !=
inline_context_of_this_fragment.nodes[i].address {
continue
}
inline_context_of_this_fragment.nodes[i].flags.insert(
LAST_FRAGMENT_OF_ELEMENT);
}
}
}
}
}
impl fmt::Debug for Fragment {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "({} {} ", self.debug_id(), self.specific.get_type()));
try!(write!(f, "bb {:?} bp {:?} m {:?}{:?}",
self.border_box,
self.border_padding,
self.margin,
self.specific));
write!(f, ")")
}
}
bitflags! {
flags QuantitiesIncludedInIntrinsicInlineSizes: u8 {
const INTRINSIC_INLINE_SIZE_INCLUDES_MARGINS = 0x01,
const INTRINSIC_INLINE_SIZE_INCLUDES_PADDING = 0x02,
const INTRINSIC_INLINE_SIZE_INCLUDES_BORDER = 0x04,
const INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED = 0x08,
}
}
bitflags! {
// Various flags we can use when splitting fragments. See
// `calculate_split_position_using_breaking_strategy()`.
flags SplitOptions: u8 {
#[doc = "True if this is the first fragment on the line."]
const STARTS_LINE = 0x01,
#[doc = "True if we should attempt to split at character boundaries if this split fails. \
This is used to implement `overflow-wrap: break-word`."]
const RETRY_AT_CHARACTER_BOUNDARIES = 0x02,
}
}
/// A top-down fragment border box iteration handler.
pub trait FragmentBorderBoxIterator {
/// The operation to perform.
fn process(&mut self, fragment: &Fragment, level: i32, overflow: &Rect<Au>);
/// Returns true if this fragment must be processed in-order. If this returns false,
/// we skip the operation for this fragment, but continue processing siblings.
fn should_process(&mut self, fragment: &Fragment) -> bool;
}
/// The coordinate system used in `stacking_relative_border_box()`. See the documentation of that
/// method for details.
#[derive(Clone, PartialEq, Debug)]
pub enum CoordinateSystem {
/// The border box returned is relative to the fragment's parent stacking context.
Parent,
/// The border box returned is relative to the fragment's own stacking context, if applicable.
Own,
}
pub struct InlineStyleIterator<'a> {
fragment: &'a Fragment,
inline_style_index: usize,
primary_style_yielded: bool,
}
impl<'a> Iterator for InlineStyleIterator<'a> {
type Item = &'a ComputedValues;
fn next(&mut self) -> Option<&'a ComputedValues> {
if !self.primary_style_yielded {
self.primary_style_yielded = true;
return Some(&*self.fragment.style)
}
let inline_context = match self.fragment.inline_context {
None => return None,
Some(ref inline_context) => inline_context,
};
let inline_style_index = self.inline_style_index;
if inline_style_index == inline_context.nodes.len() {
return None
}
self.inline_style_index += 1;
Some(&*inline_context.nodes[inline_style_index].style)
}
}
impl<'a> InlineStyleIterator<'a> {
fn new<'b>(fragment: &'b Fragment) -> InlineStyleIterator<'b> {
InlineStyleIterator {
fragment: fragment,
inline_style_index: 0,
primary_style_yielded: false,
}
}
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum WhitespaceStrippingResult {
RetainFragment,
FragmentContainedOnlyBidiControlCharacters,
FragmentContainedOnlyWhitespace,
}
impl WhitespaceStrippingResult {
fn from_unscanned_text_fragment_info(info: &UnscannedTextFragmentInfo)
-> WhitespaceStrippingResult {
if info.text.is_empty() {
WhitespaceStrippingResult::FragmentContainedOnlyWhitespace
} else if info.text.chars().all(gfx::text::util::is_bidi_control) {
WhitespaceStrippingResult::FragmentContainedOnlyBidiControlCharacters
} else {
WhitespaceStrippingResult::RetainFragment
}
}
}
bitflags! {
flags FragmentFlags: u8 {
/// Whether this fragment has a layer.
const HAS_LAYER = 0x01,
}
}<|fim▁end|> | }
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.