prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>forms.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import re
from django import forms
from django.contrib.auth.models import User
from django.forms import formset_factory
from django.forms.widgets import TextInput
from django.utils import timezone
from dal import autocomplete
from tagging.fields import TagField
import accounts.utils
from bulb.models import Book, NeededBook, Request, Group, Session, Report, Membership, ReaderProfile, Recruitment, NewspaperSignup, DewanyaSuggestion, BookCommitment, RecommendedBook, BookRecommendation
from bulb import models, utils
city_choices = (
('-', u'الرياض وجدة والأحساء'),
(u'الرياض', u'الرياض فقط'),
(u'جدة', u'جدة فقط'),
(u'الأخساء', u'الأحساء فقط'),
)
gender_choices = (
('-', u'الطلاب والطالبات'),
('F', u'الطالبات'),
('M', u'الطلاب'),
)
class CommonControl:
def control_gender(self):
# Modify the choice only if the user is not a superuser not a
# Bulb coordinator. This is a really, really, really stupid
# default option, but it's just to make sure that people know
# what are chosing.
if self.user_gender == 'F':
if not self.instance.id:
self.fields['gender'].initial = 'F'
self.fields['gender'].choices = (
('-', u'الطلاب والطالبات'),
('F', u'الطالبات'),
)
elif self.user_gender == 'M':
if not self.instance.id:
self.fields['gender'].initial = 'M'
self.fields['gender'].choices = (
('-', u'الطلاب والطالبات'),
('M', u'الطلاب')
)
class NeededBookForm(forms.ModelForm):
class Meta:
model = models.NeededBook
fields = ['title', 'authors', 'description', 'cover', 'tags',
'category']
class GenericBookForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
# Remove is_publicly_owned field from ordinary users.
user = kwargs.pop('user')
super(GenericBookForm, self).__init__(*args, **kwargs)
if not user.is_superuser and \
not utils.is_bulb_coordinator_or_deputy(user) and \
not utils.is_bulb_member(user):
del self.fields['is_publicly_owned']
class BookEditForm(GenericBookForm):
"""Form used to edit books. It allows changing contribution type from
giving to lending."""
tags = TagField()
class Meta:
model = models.Book
fields = ['title', 'authors', 'edition', 'pages', 'condition',
'description', 'cover', 'tags', 'category',
'contribution', 'available_until', 'is_publicly_owned']
class BookGiveForm(GenericBookForm):
class Meta:
model = models.Book
fields = ['title', 'authors', 'edition', 'pages',
'condition', 'description', 'cover', 'tags',
'category', 'is_publicly_owned']
class BookLendForm(GenericBookForm):
class Meta:
model = models.Book
fields = ['title', 'authors', 'edition', 'pages', 'condition',
'description', 'cover', 'category', 'tags',
'available_until', 'is_publicly_owned']
class RequestForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
instance = kwargs.get('instance', None)
super(RequestForm, self).__init__(*args, **kwargs)
if instance.book.contribution == 'L':
self.fields['borrowing_end_date'].required = True
def clean_delivery(self):
# You know the "males and females are not supposed to meet"
# bullshit? Yeah.
data = self.cleaned_data['delivery']
if not data:
return data
requester_gender = accounts.utils.get_user_gender(self.instance.requester)
owner_gender = accounts.utils.get_user_gender(self.instance.book.submitter)
if data == 'I' or requester_gender != owner_gender:
delivery = 'I'
else:
delivery = 'D'
return delivery
class Meta:
model = models.Request
fields = ['delivery', 'borrowing_end_date']
widgets = {'delivery': forms.HiddenInput()}
class GroupForm(forms.ModelForm, CommonControl):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(GroupForm, self).__init__(*args, **kwargs)
# After creating the group, members can be controlled for a
# dedicated page.
if self.instance.pk:
del self.fields['members']
if self.instance.id:
self.user_city = accounts.utils.get_user_city(self.instance.coordinator)
self.user_gender = accounts.utils.get_user_gender(self.instance.coordinator)
if self.instance.is_limited_by_city:
self.fields['city'].initial = self.user_city
if self.instance.is_limited_by_gender:
self.fields['gender'].initial = self.user_gender
else:
self.user_city = accounts.utils.get_user_city(self.user)
self.user_gender = accounts.utils.get_user_gender(self.user)
self.fields['city'].initial = '-'
if not self.user.is_superuser and \
not utils.is_bulb_coordinator_or_deputy(self.user):
self.control_gender()
if self.user_city == u'الرياض':
self.fields['city'].choices = (
('-', u'الرياض وجدة والأحساء'),
('R', u'الرياض فقط'),
)
elif self.user_city == u'الأحساء':
self.fields['city'].choices = (
('-', u'الرياض وجدة والأحساء'),
('A', u'الأحساء فقط'),
)
elif self.user_city == u'جدة':
self.fields['city'].choices = (
('-', u'الرياض وجدة والأحساء'),
('J', u'جدة فقط'),
)
gender = forms.ChoiceField(choices=gender_choices, label=u"المجموعة تقبل عضوية")
city = forms.ChoiceField(choices=city_choices, label=u"تشمل المجموعة")
members = forms.ModelMultipleChoiceField(
widget=autocomplete.ModelSelect2Multiple(url='bulb:bulb-user-autocomplete',
attrs={
'data-html': 'true',
'data-placeholder': 'أَضف عنصرا',
}),
label=u"الأعضاء",
queryset=User.objects.all(),
required=False)
def save(self):
group = super(GroupForm, self).save(commit=False)
if self.user_gender == self.cleaned_data['gender']:
group.is_limited_by_gender = True
else:
group.is_limited_by_gender = False
if self.user_city == self.cleaned_data['city']:
group.is_limited_by_city = True
else:
group.is_limited_by_city = False
group.save()
return group
class Meta:
model = models.Group
fields = ['name', 'image', 'description', 'category',
'is_private']
class FreeSessionForm(forms.ModelForm, CommonControl):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(FreeSessionForm, self).__init__(*args, **kwargs)
self.user_city = accounts.utils.get_user_city(self.user)
self.user_gender = accounts.utils.get_user_gender(self.user)
# Limit the choice only if the user is not a superuser not a
# Bulb coordinator.
if not self.user.is_superuser and \
not utils.is_bulb_coordinator_or_deputy(self.user):
self.control_gender()
def save(self):
session = super(FreeSessionForm, self).save(commit=False)
if self.user_gender == self.cleaned_data['gender']:
session.is_limited_by_gender = True
session.save()
return session
gender = forms.ChoiceField(choices=gender_choices, label=u"الجلسة تقبل حضور")
class Meta:
model = models.Session
fields = ['title', 'agenda', 'location', 'date', 'start_time',
'end_time']
class SessionForm(forms.ModelForm):
class Meta:
model = models.Session
fields = ['title', 'agenda', 'location', 'date', 'start_time',
'end_time']
class ReportForm(forms.ModelForm):
attendees = forms.ModelMultipleChoiceField(
widget=autocomplete.ModelSelect2Multiple(url='bulb:bulb-user-autocomplete',
attrs={
'data-placeholder': 'أَضف اسما',
'data-html': 'true',
}),
label=u"الحضور",
queryset=User.objects.all(),
required=False)
class Meta:
model = models.Report
fields = ['attendees']#, 'description']
class ReaderProfileForm(forms.ModelForm):
def clean_twitter(self):
data = self.cleaned_data['twitter']
if not data:
return data
data = re.sub(u'^(?:https?://(?:m\.)?twitter\.com/)?@?', '', data)
if not re.match(u'^[A-Za-z\d_]+$', data):
raise forms.ValidationError(u"أدخل اسم مستخدم صحيح.")
else:
return data
def clean_goodreads(self):
data = self.cleaned_data['goodreads']
if not data:
return data
if not re.match(u'^(?:https?://)?(?:www.)?goodreads\.com/user/show/', data):
raise forms.ValidationError(u"أدخل رابط صفحتك على Goodreads.")
else:
# Because!
data = re.sub('^http://', 'https://', data)
if not re.match('^https?://', data):
data = u"https://" + data
return data
class Meta:
model = models.ReaderProfile
fields = ['areas_of_interests', 'favorite_books',
'favorite_writers', 'average_reading',
'goodreads', 'twitter']
class RecruitmentForm(forms.ModelForm):
class Meta:
model = models.Recruitment
exclude = ['user', 'year']
class NewspaperSignupForm(forms.ModelForm):
email = forms.EmailField(required=True)
class Meta:
model = models.NewspaperSignup
fields = ['email']
class DewanyaSuggestionForm(forms.ModelForm):
class Meta:
model = models.DewanyaSuggestion
fields = ['name', 'subject']<|fim▁hole|>class BookCommitmentForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
readathon = kwargs.pop('readathon')
super(BookCommitmentForm, self).__init__(*args, **kwargs)
if readathon.start_date < timezone.now().date():
del self.fields['wants_to_attend']
class Meta:
model = models.BookCommitment
fields = ['title', 'cover', 'pages', 'reason',
'wants_to_attend', 'wants_to_contribute']
class UpdateBookCommitmentForm(forms.ModelForm):
class Meta:
model = models.BookCommitment
fields = ['pages', 'completed_pages']
class CulturalProgramForm(forms.Form):
user = forms.ModelChoiceField(
widget=autocomplete.ModelSelect2(url='bulb:bulb-user-autocomplete',
attrs={
'data-html': 'true',
'data-placeholder': 'أَضف شخصا',
}),
label=u"المستعير/ة",
queryset=User.objects.filter(is_active=True))
book = forms.ModelChoiceField(
widget=autocomplete.ModelSelect2(url='bulb:bulb-book-autocomplete',
attrs={
'data-placeholder': 'أَضف كتابا',
}),
label=u"الكتاب",
queryset=models.Book.objects.available())
class EditBookRecommendationForm(forms.ModelForm):
class Meta:
model = models.BookRecommendation
fields = ['comment']
class AddBookRecommendationForm(forms.Form):
recommended_book = forms.ModelChoiceField(required=False,
widget=autocomplete.ModelSelect2(url='bulb:bulb-recommended-book-autocomplete',
attrs={
'data-html': 'true',
'data-placeholder': 'أَضف كتابا',
}),
label=u"الكتاب",
queryset=models.RecommendedBook.objects.all())
category = forms.ModelChoiceField(label=u"التصنيف",
required=False,
queryset=models.Category.objects.filter(is_meta=False))
title = forms.CharField(required=False, max_length=200, label=u"العنوان")
authors = forms.CharField(required=False, max_length=200, label=u"تأليف")
cover = forms.ImageField(required=False, label=u"الغلاف")
comment = forms.CharField(widget=forms.Textarea(attrs={'class': 'form-control input-lg'}), label=u"تعليق")
def clean(self):
cleaned_data = super(AddBookRecommendationForm, self).clean()
self.recommended_book = self.cleaned_data.get('recommended_book')
self.recommended_book_fields = {'title': self.cleaned_data['title'],
'authors': self.cleaned_data['authors'],
'category': self.cleaned_data['category'],
'cover': self.cleaned_data['cover']}
if not self.recommended_book and\
not all(self.recommended_book_fields.values()):
raise forms.ValidationError(u"لم تدخل بيانات كافية عن الكتاب")
def save(self, user):
if self.recommended_book:
book_recommendation = models.BookRecommendation.objects\
.create(recommended_book=self.recommended_book,
user=user,
comment=self.cleaned_data['comment'])
else:
recommended_book = models.RecommendedBook.objects.create(**self.recommended_book_fields)
book_recommendation = models.BookRecommendation.objects\
.create(recommended_book=recommended_book,
user=user,
comment=self.cleaned_data['comment'])
return book_recommendation<|fim▁end|> | widgets = {'name': forms.widgets.TextInput(attrs={'class': 'user-autocomplete'})}
DewanyaSuggestionFormSet = forms.formset_factory(DewanyaSuggestionForm, extra=3)
|
<|file_name|>html5shiv-printshiv.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1<|fim▁hole|><|fim▁end|> | oid sha256:df5a7287a63d8b28fe1df2552b7f2deaa719327f8aa49fef192f5fb72bbbbaad
size 4023 |
<|file_name|>proxy.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from pywb.framework.wbrequestresponse import WbResponse, WbRequest
from pywb.framework.archivalrouter import ArchivalRouter
from six.moves.urllib.parse import urlsplit
import base64
import socket
import ssl
from io import BytesIO
from pywb.rewrite.url_rewriter import SchemeOnlyUrlRewriter, UrlRewriter
from pywb.rewrite.rewrite_content import RewriteContent
from pywb.utils.wbexception import BadRequestException
from pywb.utils.bufferedreaders import BufferedReader
from pywb.utils.loaders import to_native_str
from pywb.framework.proxy_resolvers import ProxyAuthResolver, CookieResolver, IPCacheResolver
from tempfile import SpooledTemporaryFile
#=================================================================
class ProxyArchivalRouter(ArchivalRouter):
"""
A router which combines both archival and proxy modes support
First, request is treated as a proxy request using ProxyRouter
Second, if not handled by the router, it is treated as a regular
archival mode request.
"""
def __init__(self, routes, **kwargs):
super(ProxyArchivalRouter, self).__init__(routes, **kwargs)
self.proxy = ProxyRouter(routes, **kwargs)
def __call__(self, env):
response = self.proxy(env)
if response:
return response
response = super(ProxyArchivalRouter, self).__call__(env)
if response:
return response
#=================================================================
class ProxyRouter(object):
"""
A router which supports http proxy mode requests
Handles requests of the form: GET http://example.com
The router returns latest capture by default.
However, if Memento protocol support is enabled,
the memento Accept-Datetime header can be used
to select specific capture.
See: http://www.mementoweb.org/guide/rfc/#Pattern1.3
for more details.
"""
BLOCK_SIZE = 4096
DEF_MAGIC_NAME = 'pywb.proxy'
BUFF_RESPONSE_MEM_SIZE = 1024*1024
CERT_DL_PEM = '/pywb-ca.pem'
CERT_DL_P12 = '/pywb-ca.p12'
CA_ROOT_FILE = './ca/pywb-ca.pem'
CA_ROOT_NAME = 'pywb https proxy replay CA'
CA_CERTS_DIR = './ca/certs/'
EXTRA_HEADERS = {'cache-control': 'no-cache',
'connection': 'close',
'p3p': 'CP="NOI ADM DEV COM NAV OUR STP"'}
def __init__(self, routes, **kwargs):
self.error_view = kwargs.get('error_view')
proxy_options = kwargs.get('config', {})
if proxy_options:
proxy_options = proxy_options.get('proxy_options', {})
self.magic_name = proxy_options.get('magic_name')
if not self.magic_name:
self.magic_name = self.DEF_MAGIC_NAME
proxy_options['magic_name'] = self.magic_name
self.extra_headers = proxy_options.get('extra_headers')
if not self.extra_headers:
self.extra_headers = self.EXTRA_HEADERS
proxy_options['extra_headers'] = self.extra_headers
res_type = proxy_options.get('cookie_resolver', True)
if res_type == 'auth' or not res_type:
self.resolver = ProxyAuthResolver(routes, proxy_options)
elif res_type == 'ip':
self.resolver = IPCacheResolver(routes, proxy_options)
#elif res_type == True or res_type == 'cookie':
# self.resolver = CookieResolver(routes, proxy_options)
else:
self.resolver = CookieResolver(routes, proxy_options)
self.use_banner = proxy_options.get('use_banner', True)
self.use_wombat = proxy_options.get('use_client_rewrite', True)
self.proxy_cert_dl_view = proxy_options.get('proxy_cert_download_view')
if not proxy_options.get('enable_https_proxy'):
self.ca = None
return
try:
from certauth.certauth import CertificateAuthority
except ImportError: #pragma: no cover
print('HTTPS proxy is not available as the "certauth" module ' +
'is not installed')
print('Please install via "pip install certauth" ' +
'to enable HTTPS support')
self.ca = None
return
# HTTPS Only Options
ca_file = proxy_options.get('root_ca_file', self.CA_ROOT_FILE)
# attempt to create the root_ca_file if doesn't exist
# (generally recommended to create this seperately)
ca_name = proxy_options.get('root_ca_name', self.CA_ROOT_NAME)
certs_dir = proxy_options.get('certs_dir', self.CA_CERTS_DIR)
self.ca = CertificateAuthority(ca_file=ca_file,
certs_dir=certs_dir,
ca_name=ca_name)
self.use_wildcard = proxy_options.get('use_wildcard_certs', True)
def __call__(self, env):
is_https = (env['REQUEST_METHOD'] == 'CONNECT')
ArchivalRouter.ensure_rel_uri_set(env)
# for non-https requests, check non-proxy urls
if not is_https:
url = env['REL_REQUEST_URI']
if not url.startswith(('http://', 'https://')):
return None
env['pywb.proxy_scheme'] = 'http'
route = None
coll = None
matcher = None
response = None
ts = None
# check resolver, for pre connect resolve
if self.resolver.pre_connect:
route, coll, matcher, ts, response = self.resolver.resolve(env)
if response:
return response
# do connect, then get updated url
if is_https:
response = self.handle_connect(env)
if response:
return response
url = env['REL_REQUEST_URI']
else:
parts = urlsplit(env['REL_REQUEST_URI'])
hostport = parts.netloc.split(':', 1)
env['pywb.proxy_host'] = hostport[0]
env['pywb.proxy_port'] = hostport[1] if len(hostport) == 2 else ''
env['pywb.proxy_req_uri'] = parts.path
if parts.query:
env['pywb.proxy_req_uri'] += '?' + parts.query
env['pywb.proxy_query'] = parts.query
if self.resolver.supports_switching:
env['pywb_proxy_magic'] = self.magic_name
# route (static) and other resources to archival replay
if env['pywb.proxy_host'] == self.magic_name:
env['REL_REQUEST_URI'] = env['pywb.proxy_req_uri']
# special case for proxy install
response = self.handle_cert_install(env)
if response:
return response
return None
# check resolver, post connect
if not self.resolver.pre_connect:
route, coll, matcher, ts, response = self.resolver.resolve(env)
if response:
return response
rel_prefix = ''
custom_prefix = env.get('HTTP_PYWB_REWRITE_PREFIX', '')
if custom_prefix:
host_prefix = custom_prefix
urlrewriter_class = UrlRewriter
abs_prefix = True
# always rewrite to absolute here
rewrite_opts = dict(no_match_rel=True)
else:
host_prefix = env['pywb.proxy_scheme'] + '://' + self.magic_name
urlrewriter_class = SchemeOnlyUrlRewriter
abs_prefix = False
rewrite_opts = {}
# special case for proxy calendar
if (env['pywb.proxy_host'] == 'query.' + self.magic_name):
url = env['pywb.proxy_req_uri'][1:]
rel_prefix = '/'
if ts is not None:
url = ts + '/' + url
wbrequest = route.request_class(env,
request_uri=url,
wb_url_str=url,
coll=coll,
host_prefix=host_prefix,
rel_prefix=rel_prefix,
wburl_class=route.handler.get_wburl_type(),
urlrewriter_class=urlrewriter_class,
use_abs_prefix=abs_prefix,
rewrite_opts=rewrite_opts,
is_proxy=True)
if matcher:
route.apply_filters(wbrequest, matcher)
# full rewrite and banner
if self.use_wombat and self.use_banner:
wbrequest.wb_url.mod = ''
elif self.use_banner:
# banner only, no rewrite
wbrequest.wb_url.mod = 'bn_'
else:
# unaltered, no rewrite or banner
wbrequest.wb_url.mod = 'uo_'
response = route.handler(wbrequest)
if not response:
return None
# add extra headers for replay responses
if wbrequest.wb_url and wbrequest.wb_url.is_replay():
response.status_headers.replace_headers(self.extra_headers)
# check for content-length
res = response.status_headers.get_header('content-length')
try:
if int(res) > 0:
return response
except:
pass
# need to either chunk or buffer to get content-length
if env.get('SERVER_PROTOCOL') == 'HTTP/1.1':
response.status_headers.remove_header('content-length')
response.status_headers.headers.append(('Transfer-Encoding', 'chunked'))
response.body = self._chunk_encode(response.body)
else:
response.body = self._buffer_response(response.status_headers,
response.body)
return response
<|fim▁hole|> if not len(chunk):
continue
chunk_len = b'%X\r\n' % len(chunk)
yield chunk_len
yield chunk
yield b'\r\n'
yield b'0\r\n\r\n'
@staticmethod
def _buffer_response(status_headers, iterator):
out = SpooledTemporaryFile(ProxyRouter.BUFF_RESPONSE_MEM_SIZE)
size = 0
for buff in iterator:
size += len(buff)
out.write(buff)
content_length_str = str(size)
# remove existing content length
status_headers.replace_header('Content-Length',
content_length_str)
out.seek(0)
return RewriteContent.stream_to_gen(out)
def get_request_socket(self, env):
if not self.ca:
return None
sock = None
if env.get('uwsgi.version'): # pragma: no cover
try:
import uwsgi
fd = uwsgi.connection_fd()
conn = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM)
try:
sock = socket.socket(_sock=conn)
except:
sock = conn
except Exception as e:
pass
elif env.get('gunicorn.socket'): # pragma: no cover
sock = env['gunicorn.socket']
if not sock:
# attempt to find socket from wsgi.input
input_ = env.get('wsgi.input')
if input_:
if hasattr(input_, '_sock'): # pragma: no cover
raw = input_._sock
sock = socket.socket(_sock=raw) # pragma: no cover
elif hasattr(input_, 'raw'):
sock = input_.raw._sock
return sock
def handle_connect(self, env):
sock = self.get_request_socket(env)
if not sock:
return WbResponse.text_response('HTTPS Proxy Not Supported',
'405 HTTPS Proxy Not Supported')
sock.send(b'HTTP/1.0 200 Connection Established\r\n')
sock.send(b'Proxy-Connection: close\r\n')
sock.send(b'Server: pywb proxy\r\n')
sock.send(b'\r\n')
hostname, port = env['REL_REQUEST_URI'].split(':')
if not self.use_wildcard:
certfile = self.ca.cert_for_host(hostname)
else:
certfile = self.ca.get_wildcard_cert(hostname)
try:
ssl_sock = ssl.wrap_socket(sock,
server_side=True,
certfile=certfile,
#ciphers="ALL",
suppress_ragged_eofs=False,
ssl_version=ssl.PROTOCOL_SSLv23
)
env['pywb.proxy_ssl_sock'] = ssl_sock
buffreader = BufferedReader(ssl_sock, block_size=self.BLOCK_SIZE)
statusline = to_native_str(buffreader.readline().rstrip())
except Exception as se:
raise BadRequestException(se.message)
statusparts = statusline.split(' ')
if len(statusparts) < 3:
raise BadRequestException('Invalid Proxy Request: ' + statusline)
env['REQUEST_METHOD'] = statusparts[0]
env['REL_REQUEST_URI'] = ('https://' +
env['REL_REQUEST_URI'].replace(':443', '') +
statusparts[1])
env['SERVER_PROTOCOL'] = statusparts[2].strip()
env['pywb.proxy_scheme'] = 'https'
env['pywb.proxy_host'] = hostname
env['pywb.proxy_port'] = port
env['pywb.proxy_req_uri'] = statusparts[1]
queryparts = env['REL_REQUEST_URI'].split('?', 1)
env['PATH_INFO'] = queryparts[0]
env['QUERY_STRING'] = queryparts[1] if len(queryparts) > 1 else ''
env['pywb.proxy_query'] = env['QUERY_STRING']
while True:
line = to_native_str(buffreader.readline())
if line:
line = line.rstrip()
if not line:
break
parts = line.split(':', 1)
if len(parts) < 2:
continue
name = parts[0].strip()
value = parts[1].strip()
name = name.replace('-', '_').upper()
if name not in ('CONTENT_LENGTH', 'CONTENT_TYPE'):
name = 'HTTP_' + name
env[name] = value
env['wsgi.input'] = buffreader
#remain = buffreader.rem_length()
#if remain > 0:
#remainder = buffreader.read()
#env['wsgi.input'] = BufferedReader(BytesIO(remainder))
#remainder = buffreader.read(self.BLOCK_SIZE)
#env['wsgi.input'] = BufferedReader(ssl_sock,
# block_size=self.BLOCK_SIZE,
# starting_data=remainder)
def handle_cert_install(self, env):
if env['pywb.proxy_req_uri'] in ('/', '/index.html', '/index.html'):
available = (self.ca is not None)
if self.proxy_cert_dl_view:
return (self.proxy_cert_dl_view.
render_response(available=available,
pem_path=self.CERT_DL_PEM,
p12_path=self.CERT_DL_P12))
elif env['pywb.proxy_req_uri'] == self.CERT_DL_PEM:
if not self.ca:
return None
buff = b''
with open(self.ca.ca_file, 'rb') as fh:
buff = fh.read()
content_type = 'application/x-x509-ca-cert'
headers = [('Content-Length', str(len(buff)))]
return WbResponse.bin_stream([buff],
content_type=content_type,
headers=headers)
elif env['pywb.proxy_req_uri'] == self.CERT_DL_P12:
if not self.ca:
return None
buff = self.ca.get_root_PKCS12()
content_type = 'application/x-pkcs12'
headers = [('Content-Length', str(len(buff)))]
return WbResponse.bin_stream([buff],
content_type=content_type,
headers=headers)<|fim▁end|> | @staticmethod
def _chunk_encode(orig_iter):
for chunk in orig_iter: |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""This application provides a framework for disqualifying users for various<|fim▁hole|><|fim▁end|> | reasons, as well as simple modeling of any custom disqualification.
""" |
<|file_name|>boid.js<|end_file_name|><|fim▁begin|>window.Boid = (function(){
function Boid(x, y, settings){
this.location = new Vector(x, y);
this.acceleration = new Vector(0, 0);
this.velocity = new Vector(Helper.getRandomInt(-1,1), Helper.getRandomInt(-1,1));
this.settings = settings || {};
this.show_connections = settings.show_connections || true;
this.r = settings.r || 3.0;
this.maxspeed = settings.maxspeed || 2;
this.maxforce = settings.maxforce || 0.5;
this.perchSite = settings.perchSite || [h - 100, h - 50];
this.laziness_level = settings.laziness_level || 0.7;
this.min_perch = settings.min_perch || 1;
this.max_perch = settings.max_perch || 100;
this.perching = settings.perching || false;
this.perchTimer = settings.perchTimer || 100;
this.separation_multiple = settings.separation || 0.2;
this.cohesion_multiple = settings.cohesion || 2.0;
this.alignment_multiple = settings.alignment || 1.0;
this.separation_neighbor_dist = (settings.separation_neighbor_dis || 10) * 10;
this.cohesion_neighbor_dist = settings.cohesion_neighbor_dis || 200;
this.alignment_neighbor_dist = settings.alignment_neighbor_dis || 200;
}
Boid.prototype = {
constructor: Boid,
update: function(){
if (this.perching) {
this.perchTimer--;
if (this.perchTimer < 0){
this.perching = false;
}
} else {
this.velocity.add(this.acceleration);
this.velocity.limit(this.maxspeed);
this.location.add(this.velocity);
this.acceleration.multiply(0);
}
},
applyForce: function(force){
this.acceleration.add(force);
},
tired: function(){
var x = Math.random();
if (x < this.laziness_level){
return false;
} else {
return true;
}
},
seek: function(target) {
var desired = Vector.subtract(target, this.location);
desired.normalize();
desired.multiply(this.maxspeed);
var steer = Vector.subtract(desired, this.velocity);
steer.limit(this.maxforce);
return steer;
},
//Leaving this function here for experiments
//You can replace "seek" inside cohesion()
//For a more fish-like behaviour
arrive: function(target) {
var desired = Vector.subtract(target, this.location);
var dMag = desired.magnitude();
desired.normalize();
// closer than 100 pixels?
if (dMag < 100) {
var m = Helper.map(dMag,0,100,0,this.maxspeed);<|fim▁hole|> }
var steer = Vector.subtract(desired, this.velocity);
steer.limit(this.maxforce);
return steer;
},
align: function(boids){
var sum = new Vector();
var count = 0;
for (var i = 0; i < boids.length; i++){
if (boids[i].perching == false) {
var distance = Vector.distance(this.location, boids[i].location);
//if ((distance > 0) && (distance < this.align_neighbor_dist)) {
sum.add(boids[i].velocity);
count++;
//}
}
}
if (count > 0) {
sum.divide(count);
sum.normalize();
sum.multiply(this.maxspeed);
var steer = Vector.subtract(sum,this.velocity);
steer.limit(this.maxforce);
return steer;
} else {
return new Vector(0,0);
}
},
cohesion: function(boids){
var sum = new Vector();
var count = 0;
for (var i = 0; i < boids.length; i++){
if (boids[i].perching == false) {
var distance = Vector.distance(this.location, boids[i].location);
//if ((distance > 0) && (distance < this.cohesion_neighbor_dist)) {
sum.add(boids[i].location);
count++;
//}
}
}
if (count > 0) {
sum.divide(count);
return this.seek(sum);
} else {
return new Vector(0,0);
}
},
separate: function(boids) {
var sum = new Vector();
var count = 0;
for (var i=0; i< boids.length; i++){
var distance = Vector.distance(this.location, boids[i].location);
if ((distance > 0) && (distance < this.separation_neighbor_dist)) {
var diff = Vector.subtract(this.location, boids[i].location);
diff.normalize();
diff.divide(distance);
sum.add(diff);
count++;
}
}
if(count > 0){
sum.divide(count);
sum.normalize();
sum.multiply(this.maxspeed);
var steer = Vector.subtract(sum, this.velocity);
steer.limit(this.maxforce);
}
return sum;
},
borders: function() {
//We are allowing boids to fly a bit outside
//the view and then return.
var offset = 20;
var isTired = this.tired();
if (this.onPerchSite() && isTired ){
this.perching = true;
} else {
if (this.location.x < -offset) this.location.x += 5;
if (this.location.x > w + offset) this.location.x -= 5;
if (this.location.y > h + offset) this.location.y -= 5;
if (this.location.y < -offset) this.location.y += 5;
}
},
onPerchSite: function(){
for (var i = 0; i < this.perchSite.length; i++){
if( this.location.y > this.perchSite[i] -2 && this.location.y < this.perchSite[i] + 2 )
return true;
}
return false;
},
borders2: function() {
var offset = 20;
var isTired = this.tired();
if (this.location.y > this.perchSite - 2 && this.location.y < this.perchSite + 2 && isTired ){
this.perching = true;
} else {
if (this.location.x < -this.r) this.location.x = w+this.r;
if (this.location.y < -this.r) this.location.y = h+this.r;
if (this.location.x > w+this.r) this.location.x = -this.r;
if (this.location.y > h+this.r) this.location.y = -this.r;
}
},
render: function() {
var theta = this.velocity.heading() + Math.PI/2;
context.stroke();
context.save();
context.translate(this.location.x, this.location.y);
context.rotate(theta);
if(this.settings.boid_shape){
this.settings.boid_shape();
} else {
this.default_boid_shape();
}
context.restore();
},
default_boid_shape: function(){
var radius = 5;
context.fillStyle = "#636570";
context.beginPath();
context.arc(0, 0, radius, 0, 2 * Math.PI, false);
context.closePath();
context.fill();
},
flock: function(boids){
var separate = this.separate(boids);
var align = this.align(boids);
var cohesion = this.cohesion(boids);
separate.multiply(this.separation_multiple);
align.multiply(this.alignment_multiple);
cohesion.multiply(this.cohesion_multiple);
this.applyForce(separate);
this.applyForce(align);
this.applyForce(cohesion);
},
run: function(boids){
if (this.perching){
this.perchTimer--;
if(this.perchTimer < 0 )
{
this.perching = false;
this.perchTimer = Helper.getRandomInt(this.min_perch,this.max_perch);
}
} else {
this.flock(boids);
this.update();
this.borders();
}
}
};
return Boid;
})();<|fim▁end|> | desired.multiply(m);
} else {
desired.multiply(this.maxspeed); |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use rand::{thread_rng, seq::SliceRandom};<|fim▁hole|>
fn main() {
env_logger::init();
let game = ConnectFour::<BitState>::new(7, 6).unwrap();
let human_player = HumanPlayer::new();
let ai_player = TreeSearchPlayer::new(&game);
let mut players: Vec<Box<PlayerTrait<Game=_>>> = vec![Box::new(human_player), Box::new(ai_player)];
players.shuffle(&mut thread_rng());
println!("\x1B[2J\x1B[H");
println!("{}", game.state());
for (state, player, move_, winner) in game.iter(players) {
print!("\x1B[2J\x1B[H");
println!("Player {} has moved {}", player, move_);
println!("{}", state);
match winner {
Winner::Winner(winner) => println!("Player {} has won.", winner),
Winner::Draw => println!("Draw."),
Winner::NotFinishedYet => {}
};
}
}<|fim▁end|> |
use mcc4::*; |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>/**
* xDo app client
*
* Auther: [email protected]
*/
var app = angular.module('app', ['ngResource']);
app.controller('AppCtrl', ['$scope', function($scope) {
// Parent controller for all the Ctrls
$scope.appModel = {}
}]);<|fim▁hole|><|fim▁end|> |
// Can define config block here or use ngRoute |
<|file_name|>index.py<|end_file_name|><|fim▁begin|># Patchwork - automated patch tracking system
# Copyright (C) 2016 Linaro Corporation
#
# SPDX-License-Identifier: GPL-2.0-or-later
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework.views import APIView
class IndexView(APIView):
def get(self, request, *args, **kwargs):
"""List API resources."""
return Response({
'projects': reverse('api-project-list', request=request),
'users': reverse('api-user-list', request=request),
'people': reverse('api-person-list', request=request),
'patches': reverse('api-patch-list', request=request),
'covers': reverse('api-cover-list', request=request),<|fim▁hole|> 'series': reverse('api-series-list', request=request),
'events': reverse('api-event-list', request=request),
'bundles': reverse('api-bundle-list', request=request),
})<|fim▁end|> | |
<|file_name|>sandman_pasta.py<|end_file_name|><|fim▁begin|>"""
sandman_pasta reimplements the behaviour of decaf-masta, but instead evaluates all calls to deployable heat templates
"""
import json
from decaf_storage.json_base import StorageJSONEncoder
from decaf_storage import Endpoint
from decaf_utils_components.base_daemon import daemonize
import yaml
import time
import urllib
from decaf_utils_components import BasePlugin, In, Out
import base64
import sys
import math
import traceback
__author__ = "Banana PG-SANDMAN"
__date__ = "$01-jun-2016$"
TMPDIR = "/tmp/decaf/"
class Pasta(BasePlugin):
__version__ = "0.1-dev01"
datacenters = dict()
config = None
logger = None
def __init__(self, logger=None, config=None):
super(Pasta, self).__init__(logger=logger, config=config)
with open('/etc/decaf/pastad.cfg') as file:
self.config = yaml.safe_load(file)
if self.config is None:
self.logger.error("No configuration file found or not in yaml format.")
sys.exit(1)
try:
self.datacenters = self.config["datacenters"]
except KeyError as e:
self.logger.error("Please check the configuration. There is no datacenter defined.")
sys.exit(1)
self.logger.debug('Configuration seems sane.')
def _before_connect(self, url=None, rpc=None, routing_key=None):
pass
# same behaviour as masta
def _after_connect(self):
self.rpc.set_json_encoder(StorageJSONEncoder)
self.storage = Endpoint(self.rpc, self.logger)
# Check if all the datacenters are also registered in Storage, if not, register them
storage_datacenters = self.storage.get('datacenter', options=[], filters={})
def connect(self, url=None, rpc=None, routing_key=None):
# fake being masta, so we don't have to change other code
super(Pasta, self).connect(self.config["rpc"]["url"], None, "decaf_masta")
@In("datacenter_id", int)
@Out("success_code", int)
def initialize_datacenter(self, datacenter_config):
"""
Reimplemented method of decaf_masta
:param datacenter_config: A DatacenterConfig object describing the datacenter to be added.
:return: The id of the new entry.
"""
self.logger.info("Call to initialize_datacenter")
return 0
@In("keystone_credentials", dict)
@Out("keystone_id", int)
def create_keystone_credentials(self, keystone_credentials):
self.logger.info("Call to create_keystone_credentials")
return 0
@In("keystone_id", int)
@Out("keystone_credentials", dict)
def get_keystone_credentials(self, keystone_id):
"""
Gets a keystone entry from the database.
:param keystone_id: The id of the database entry.
:return: The data of the keystone entry with the given id, or an error code if not found.
"""
return 400
@Out("keystone_list", list)<|fim▁hole|> """
Get keystone entries contained in the database.
:return: A list of keystone entries currently existing in the Masta database.
"""
return None
# ----------------------------------------------------------
# DATACENTERS
# Every datacenter has a respective set of keystone credentials and a region.
# Keystone does not have to be installed on the actual datacenter, but could.
# ----------------------------------------------------------
@In("datacenter", dict)
@Out("datacenter_id", int)
def create_datacenter(self, datacenter):
"""
Adds a datacenter entry to the database.
:param datacenter: A Datacenter dictionary containing information of the datacenter.
:return: The id of the new entry in the database.
"""
return int(datacenter.datacenter_id)
@Out("datacenter_list", list)
def get_datacenters(self):
"""
Get datacenter entries contained in the database.
:return: A list of datacenter entries currently existing in the Masta database.
"""
return [datacenter.to_dict() for datacenter in self.datacenters]
@In("datacenter_id", int)
@Out("datacenter_stats", dict)
def get_datacenter_stats(self, datacenter_id):
"""
Returns information about the datacenter.
:param datacenter_id: The id of the datacenter.
:return: A list of datacenter entries currently existing in the Masta database
"""
return datacenter_stats
@In("datacenter_id", int)
@Out("ip_namespace", str)
def get_datacenter_ip_namespace(self, datacenter_id):
"""
Returns the name of the IP namespace of the router on the given datacenter.
:param datacenter_id: The masta id of the datacenter.
:return: IP namespace name.
"""
ip_namespace = "qrouter-1"
return ip_namespace
# ----------------------------------------------------------
# DEPLOY SCENARIO
# A scenario is deployed in two steps: First, the edges are created.
# Secondly, the nodes are created.
# If the process fails at one step, MaSta will rollback the deployment.
# ----------------------------------------------------------
@In("instance_graph", dict)
@Out("instance_graph", dict)
def deploy_scenario(self, instance_graph):
"""
Deploy scenario on the infrastructure.
:param instance_graph: An object of type InstanceGraph to be deployed.
:return: The modified instance graph with ips and keynames, if successful.
"""
return instance_graph
# ----------------------------------------------------------
# DESTROY SCENARIO
# Deletes all the nodes and edges and removes
# the scenario from the database.
# ----------------------------------------------------------
@In("scenario_instance_id", str)
@Out("success_code", int)
def destroy_scenario(self, scenario_instance_id):
"""
Destroy scenario by deleting all its nodes and removing from database.
:param scenario_instance_id: The id of the scenario instance.
:return: 200, if successful. 404, if not found.
"""
return 200
@Out("success_code", int)
def destroy_all_scenarios(self):
"""
Destroys all scenarios in the MaSta database.
:return: 200, if successful.
"""
return 200
# ----------------------------------------------------------
# ALTER SCENARIO
# Methods to change a running scenario.
# ----------------------------------------------------------
@In("instance_graph", dict)
@Out("instance_graph", dict)
def extend_scenario(self, instance_graph):
"""
Method to extend an existing scenario.
:param instance_graph: An InstanceGraph with all the nodes and edges to add.
:return: 200, if successful.
"""
return 200
@In("shrink_graph", dict)
@Out("success_code", int)
def shrink_scenario(self, shrink_graph):
"""
Method to shrink an existing scenario.
:param shrink_graph: An object of type InstanceGraph that lists all the nodes and edges to delete.
:return: 200, if successful.
"""
return 200
# ----------------------------------------------------------
# INTERNAL SCENARIO METHODS
# Internal methods for creation and deletion
# of nodes and edges.
# ----------------------------------------------------------
def create_nodes(self, instance_graph, session):
"""
Internal method to create nodes in database and deploy the nodes on the infrastructure.
:param instance_graph: The graph of the scenario.
:param session: The session object.
:return:
"""
pass
def create_edges(self, instance_graph, session):
"""
Internal method to create edges in the database and set up the networks in OpenStack.
:param instance_graph: The graph of the scenario.
:param session: The session object.
:return:
"""
pass
def rollback(self, instance_graph, session, del_scenario=False):
"""
Internal method to rollback the creation or altering of a scenario.
:param instance_graph: The graph of the scenario.
:param session: The session object.
:return:
"""
pass
def delete_nodes(self, vm_instance_id_list, session):
"""
Internal method to delete nodes from a scenario.
:param scenario_instance_id: The id of the scenario.
:param session: The session object.
:return: 200, if successful.
"""
return 200
def delete_edges(self, edge_list, session):
"""
Internal method to delete edges from a scenario.
:param edge_list: A list containing objects of internal edges, management ports and public ports from the db.
:param session: The session object.
:return:
"""
pass
# ----------------------------------------------------------
# ACTIONS
# Perform actions on the VMS.
# ----------------------------------------------------------
@In("vm_action", dict)
@Out("success_code", int)
def action_vm_instance(self, vm_action):
"""
Perform an action on a single vm instance.
:param vm_action: A dictionary of type VMAction containing the vm instance id and the action to perform.
:return: 200, if successful.
"""
return 200
@In("scenario_action", dict)
@Out("success_code", int)
def action_scenario(self, scenario_action):
"""
Perform an action on a scenario.
:param scenario_action: A dictionary of type ScenarioAction containing the scenario instance id and the action to perform.
:return: 200, if successful.
"""
return 200
# ----------------------------------------------------------
# FLAVORS
# ----------------------------------------------------------
@In("flavor_data", dict)
@Out("success_code", int)
def create_flavor(self, flavor_data):
"""
Adds a flavor entry to the database and uploads the flavor to OpenStack.
:param flavor_data: A FlavorData object containing data about the flavor.
:return: 201: flavor created. 200: flavor already exists, not created
"""
return 201
@In("flavor_id", str)
@Out("success_code", int)
def delete_flavor(self, flavor_id):
"""
Deletes a flavor from the database and OpenStack.
:param flavor_id: The id of the flavor.
:return: 200, if successful. 404, if not found.
"""
return 200
# ----------------------------------------------------------
# IMAGES
# ----------------------------------------------------------
@In("image_data", dict)
@Out("success_code", int)
def create_image(self, image_data):
"""
Stores an image in OpenStack.
:param image_data: A ImageData object containing data about the image.
:return: 201: image created. 200: image already exists, not created
"""
return 201
@In("image_id", str)
@Out("success_code", int)
def delete_image(self, image_id):
"""
Deletes an image from the database and OpenStack.
:param image_id: The id of the image.
:return: 200, if successful. 404, if not found.
"""
return 200
# ----------------------------------------------------------
# NETWORKS
# ----------------------------------------------------------
@In("vm_instance_id", str)
@Out("instance_ip", str)
def get_vm_mgmt_ip(self, vm_instance_id, session=None):
"""
Retrieves the management IP address of an instance.
:param vm_instance_id: The id of the VM instance.
:return: The ip of the instance.
"""
return "10.0.0.1"
# ----------------------------------------------------------
# MONITORING DATA
# ----------------------------------------------------------
@In("monitoring_request", dict)
@Out("monitoring_response", dict)
def get_monitoring_data(self, monitoring_request):
"""
Retrieves monitoring data for a specific VM.
:param monitoring_request: A MonitoringRequest object.
:return: A MonitoringResponse object.
"""
monitoring_request = monitoring_request["monitoring_request"]
monitoring_response = {
"monitoring_response": {
"type": monitoring_request["type"],
"vm_instance_id": monitoring_request["vm_instance_id"],
"value": {
"current": 10,
"total": 100
}
}
}
return monitoring_response
@In("monitoring_alarm_request", dict)
@Out("subscription_name", str)
def create_monitoring_alarm(self, monitoring_alarm_request):
"""
Sets up an alarm and returns a subscription id to subscribe to the message broker.
:param monitoring_alarm_request: A MonitoringAlarmRequest object containing data about the alarm to be set up.
:return: The name of the subscription
"""
return "test"
@In("subscription_name", str)
@Out("success_code", int)
def delete_monitoring_alarm(self, subscription_name):
"""
Delete monitoring alarm by subscription_name.
:param subscription_name: The name of the Subscription.
:return: 200, if successful. 404, if not found.
"""
return 200
@In("monitoring_alarm_id", int)
@Out("success_code", int)
def delete_monitoring_alarm_by_id(self, monitoring_alarm_id):
"""
Delete monitoring alarm by alarm id.
:param monitoring_alarm_id: The id of the alarm, under which it is registered in the MaSta database.
:return: 200, if successful. 404, if not found.
"""
return 200
@Out("success_code", int)
def delete_all_monitoring_alarms(self):
"""
Deletes all monitoring alarms in the DB.
:return: 200, if successful.
"""
return 200
def invoke_monitoring_alarm(self, data):
"""
Internal method. Called by the MaSta-Server when an alarm message arrives.
:param data: data
:return:
"""
pass
def daemon():
daemonize(Pasta)
if __name__ == '__main__':
daemon()<|fim▁end|> | def get_keystones(self): |
<|file_name|>register_ext.rs<|end_file_name|><|fim▁begin|>// SPDX-License-Identifier: MIT
// Copyright [email protected]
// Copyright iced contributors
use super::register::{iced_to_register, register_to_iced, Register};
use wasm_bindgen::prelude::*;
/// [`Register`] enum extension methods
///
/// [`Register`]: enum.Register.html
#[wasm_bindgen]
pub struct RegisterExt;
#[wasm_bindgen]
impl RegisterExt {
/// Gets the base register, eg. `AL`, `AX`, `EAX`, `RAX`, `MM0`, `XMM0`, `YMM0`, `ZMM0`, `ES`
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.equal(RegisterExt.base(Register.GS), Register.ES);
/// assert.equal(RegisterExt.base(Register.SIL), Register.AL);
/// assert.equal(RegisterExt.base(Register.SP), Register.AX);
/// assert.equal(RegisterExt.base(Register.R13D), Register.EAX);
/// assert.equal(RegisterExt.base(Register.RBP), Register.RAX);
/// assert.equal(RegisterExt.base(Register.MM6), Register.MM0);
/// assert.equal(RegisterExt.base(Register.XMM28), Register.XMM0);
/// assert.equal(RegisterExt.base(Register.YMM12), Register.YMM0);
/// assert.equal(RegisterExt.base(Register.ZMM31), Register.ZMM0);
/// assert.equal(RegisterExt.base(Register.K3), Register.K0);
/// assert.equal(RegisterExt.base(Register.BND1), Register.BND0);
/// assert.equal(RegisterExt.base(Register.ST7), Register.ST0);
/// assert.equal(RegisterExt.base(Register.CR8), Register.CR0);
/// assert.equal(RegisterExt.base(Register.DR6), Register.DR0);
/// assert.equal(RegisterExt.base(Register.TR3), Register.TR0);
/// assert.equal(RegisterExt.base(Register.RIP), Register.EIP);
/// ```
pub fn base(value: Register) -> Register {
iced_to_register(register_to_iced(value).base())
}
/// The register number (index) relative to [`RegisterExt.base()`], eg. 0-15, or 0-31, or if 8-bit GPR, 0-19
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
/// [`RegisterExt.base()`]: #method.base
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.equal(RegisterExt.number(Register.GS), 5);
/// assert.equal(RegisterExt.number(Register.SIL), 10);
/// assert.equal(RegisterExt.number(Register.SP), 4);
/// assert.equal(RegisterExt.number(Register.R13D), 13);
/// assert.equal(RegisterExt.number(Register.RBP), 5);
/// assert.equal(RegisterExt.number(Register.MM6), 6);
/// assert.equal(RegisterExt.number(Register.XMM28), 28);
/// assert.equal(RegisterExt.number(Register.YMM12), 12);
/// assert.equal(RegisterExt.number(Register.ZMM31), 31);
/// assert.equal(RegisterExt.number(Register.K3), 3);
/// assert.equal(RegisterExt.number(Register.BND1), 1);
/// assert.equal(RegisterExt.number(Register.ST7), 7);
/// assert.equal(RegisterExt.number(Register.CR8), 8);
/// assert.equal(RegisterExt.number(Register.DR6), 6);
/// assert.equal(RegisterExt.number(Register.TR3), 3);
/// assert.equal(RegisterExt.number(Register.RIP), 1);
/// ```
pub fn number(value: Register) -> u32 {
register_to_iced(value).number() as u32
}
/// Gets the full register that this one is a part of, eg. `CL`/`CH`/`CX`/`ECX`/`RCX` -> `RCX`, `XMM11`/`YMM11`/`ZMM11` -> `ZMM11`
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.equal(RegisterExt.fullRegister(Register.GS), Register.GS);
/// assert.equal(RegisterExt.fullRegister(Register.SIL), Register.RSI);
/// assert.equal(RegisterExt.fullRegister(Register.SP), Register.RSP);
/// assert.equal(RegisterExt.fullRegister(Register.R13D), Register.R13);
/// assert.equal(RegisterExt.fullRegister(Register.RBP), Register.RBP);
/// assert.equal(RegisterExt.fullRegister(Register.MM6), Register.MM6);
/// assert.equal(RegisterExt.fullRegister(Register.XMM10), Register.ZMM10);
/// assert.equal(RegisterExt.fullRegister(Register.YMM10), Register.ZMM10);
/// assert.equal(RegisterExt.fullRegister(Register.ZMM10), Register.ZMM10);
/// assert.equal(RegisterExt.fullRegister(Register.K3), Register.K3);
/// assert.equal(RegisterExt.fullRegister(Register.BND1), Register.BND1);
/// assert.equal(RegisterExt.fullRegister(Register.ST7), Register.ST7);
/// assert.equal(RegisterExt.fullRegister(Register.CR8), Register.CR8);
/// assert.equal(RegisterExt.fullRegister(Register.DR6), Register.DR6);
/// assert.equal(RegisterExt.fullRegister(Register.TR3), Register.TR3);
/// assert.equal(RegisterExt.fullRegister(Register.RIP), Register.RIP);
/// ```
#[wasm_bindgen(js_name = "fullRegister")]
pub fn full_register(value: Register) -> Register {
iced_to_register(register_to_iced(value).full_register())
}
/// Gets the full register that this one is a part of, except if it's a GPR in which case the 32-bit register is returned,
/// eg. `CL`/`CH`/`CX`/`ECX`/`RCX` -> `ECX`, `XMM11`/`YMM11`/`ZMM11` -> `ZMM11`
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.equal(RegisterExt.fullRegister32(Register.GS), Register.GS);
/// assert.equal(RegisterExt.fullRegister32(Register.SIL), Register.ESI);
/// assert.equal(RegisterExt.fullRegister32(Register.SP), Register.ESP);
/// assert.equal(RegisterExt.fullRegister32(Register.R13D), Register.R13D);
/// assert.equal(RegisterExt.fullRegister32(Register.RBP), Register.EBP);
/// assert.equal(RegisterExt.fullRegister32(Register.MM6), Register.MM6);
/// assert.equal(RegisterExt.fullRegister32(Register.XMM10), Register.ZMM10);
/// assert.equal(RegisterExt.fullRegister32(Register.YMM10), Register.ZMM10);
/// assert.equal(RegisterExt.fullRegister32(Register.ZMM10), Register.ZMM10);
/// assert.equal(RegisterExt.fullRegister32(Register.K3), Register.K3);
/// assert.equal(RegisterExt.fullRegister32(Register.BND1), Register.BND1);
/// assert.equal(RegisterExt.fullRegister32(Register.ST7), Register.ST7);
/// assert.equal(RegisterExt.fullRegister32(Register.CR8), Register.CR8);
/// assert.equal(RegisterExt.fullRegister32(Register.DR6), Register.DR6);
/// assert.equal(RegisterExt.fullRegister32(Register.TR3), Register.TR3);
/// assert.equal(RegisterExt.fullRegister32(Register.RIP), Register.RIP);
/// ```
#[wasm_bindgen(js_name = "fullRegister32")]
pub fn full_register32(value: Register) -> Register {
iced_to_register(register_to_iced(value).full_register32())
}
/// Gets the size of the register in bytes
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.equal(RegisterExt.size(Register.GS), 2);
/// assert.equal(RegisterExt.size(Register.SIL), 1);
/// assert.equal(RegisterExt.size(Register.SP), 2);
/// assert.equal(RegisterExt.size(Register.R13D), 4);
/// assert.equal(RegisterExt.size(Register.RBP), 8);
/// assert.equal(RegisterExt.size(Register.MM6), 8);
/// assert.equal(RegisterExt.size(Register.XMM10), 16);
/// assert.equal(RegisterExt.size(Register.YMM10), 32);
/// assert.equal(RegisterExt.size(Register.ZMM10), 64);
/// assert.equal(RegisterExt.size(Register.K3), 8);
/// assert.equal(RegisterExt.size(Register.BND1), 16);
/// assert.equal(RegisterExt.size(Register.ST7), 10);
/// assert.equal(RegisterExt.size(Register.CR8), 8);
/// assert.equal(RegisterExt.size(Register.DR6), 8);
/// assert.equal(RegisterExt.size(Register.TR3), 4);
/// assert.equal(RegisterExt.size(Register.RIP), 8);
/// ```
pub fn size(value: Register) -> u32 {
register_to_iced(value).size() as u32
}
}
#[wasm_bindgen]
impl RegisterExt {
/// Checks if it's a segment register (`ES`, `CS`, `SS`, `DS`, `FS`, `GS`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(RegisterExt.isSegmentRegister(Register.GS));
/// assert.ok(!RegisterExt.isSegmentRegister(Register.RCX));
/// ```
#[wasm_bindgen(js_name = "isSegmentRegister")]
pub fn is_segment_register(value: Register) -> bool {
register_to_iced(value).is_segment_register()
}
/// Checks if it's a general purpose register (`AL`-`R15L`, `AX`-`R15W`, `EAX`-`R15D`, `RAX`-`R15`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isGPR(Register.GS));
/// assert.ok(RegisterExt.isGPR(Register.CH));
/// assert.ok(RegisterExt.isGPR(Register.DX));
/// assert.ok(RegisterExt.isGPR(Register.R13D));
/// assert.ok(RegisterExt.isGPR(Register.RSP));
/// assert.ok(!RegisterExt.isGPR(Register.XMM0));
/// ```
#[wasm_bindgen(js_name = "isGPR")]
pub fn is_gpr(value: Register) -> bool {
register_to_iced(value).is_gpr()
}
/// Checks if it's an 8-bit general purpose register (`AL`-`R15L`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isGPR8(Register.GS));
/// assert.ok(RegisterExt.isGPR8(Register.CH));
/// assert.ok(!RegisterExt.isGPR8(Register.DX));
/// assert.ok(!RegisterExt.isGPR8(Register.R13D));
/// assert.ok(!RegisterExt.isGPR8(Register.RSP));
/// assert.ok(!RegisterExt.isGPR8(Register.XMM0));
/// ```
#[wasm_bindgen(js_name = "isGPR8")]
pub fn is_gpr8(value: Register) -> bool {
register_to_iced(value).is_gpr8()
}
/// Checks if it's a 16-bit general purpose register (`AX`-`R15W`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html<|fim▁hole|> /// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isGPR16(Register.GS));
/// assert.ok(!RegisterExt.isGPR16(Register.CH));
/// assert.ok(RegisterExt.isGPR16(Register.DX));
/// assert.ok(!RegisterExt.isGPR16(Register.R13D));
/// assert.ok(!RegisterExt.isGPR16(Register.RSP));
/// assert.ok(!RegisterExt.isGPR16(Register.XMM0));
/// ```
#[wasm_bindgen(js_name = "isGPR16")]
pub fn is_gpr16(value: Register) -> bool {
register_to_iced(value).is_gpr16()
}
/// Checks if it's a 32-bit general purpose register (`EAX`-`R15D`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isGPR32(Register.GS));
/// assert.ok(!RegisterExt.isGPR32(Register.CH));
/// assert.ok(!RegisterExt.isGPR32(Register.DX));
/// assert.ok(RegisterExt.isGPR32(Register.R13D));
/// assert.ok(!RegisterExt.isGPR32(Register.RSP));
/// assert.ok(!RegisterExt.isGPR32(Register.XMM0));
/// ```
#[wasm_bindgen(js_name = "isGPR32")]
pub fn is_gpr32(value: Register) -> bool {
register_to_iced(value).is_gpr32()
}
/// Checks if it's a 64-bit general purpose register (`RAX`-`R15`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isGPR64(Register.GS));
/// assert.ok(!RegisterExt.isGPR64(Register.CH));
/// assert.ok(!RegisterExt.isGPR64(Register.DX));
/// assert.ok(!RegisterExt.isGPR64(Register.R13D));
/// assert.ok(RegisterExt.isGPR64(Register.RSP));
/// assert.ok(!RegisterExt.isGPR64(Register.XMM0));
/// ```
#[wasm_bindgen(js_name = "isGPR64")]
pub fn is_gpr64(value: Register) -> bool {
register_to_iced(value).is_gpr64()
}
/// Checks if it's a 128-bit vector register (`XMM0`-`XMM31`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isXMM(Register.R13D));
/// assert.ok(!RegisterExt.isXMM(Register.RSP));
/// assert.ok(RegisterExt.isXMM(Register.XMM0));
/// assert.ok(!RegisterExt.isXMM(Register.YMM0));
/// assert.ok(!RegisterExt.isXMM(Register.ZMM0));
/// ```
#[wasm_bindgen(js_name = "isXMM")]
pub fn is_xmm(value: Register) -> bool {
register_to_iced(value).is_xmm()
}
/// Checks if it's a 256-bit vector register (`YMM0`-`YMM31`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isYMM(Register.R13D));
/// assert.ok(!RegisterExt.isYMM(Register.RSP));
/// assert.ok(!RegisterExt.isYMM(Register.XMM0));
/// assert.ok(RegisterExt.isYMM(Register.YMM0));
/// assert.ok(!RegisterExt.isYMM(Register.ZMM0));
/// ```
#[wasm_bindgen(js_name = "isYMM")]
pub fn is_ymm(value: Register) -> bool {
register_to_iced(value).is_ymm()
}
/// Checks if it's a 512-bit vector register (`ZMM0`-`ZMM31`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isZMM(Register.R13D));
/// assert.ok(!RegisterExt.isZMM(Register.RSP));
/// assert.ok(!RegisterExt.isZMM(Register.XMM0));
/// assert.ok(!RegisterExt.isZMM(Register.YMM0));
/// assert.ok(RegisterExt.isZMM(Register.ZMM0));
/// ```
#[wasm_bindgen(js_name = "isZMM")]
pub fn is_zmm(value: Register) -> bool {
register_to_iced(value).is_zmm()
}
/// Checks if it's an `XMM`, `YMM` or `ZMM` register
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isVectorRegister(Register.R13D));
/// assert.ok(!RegisterExt.isVectorRegister(Register.RSP));
/// assert.ok(RegisterExt.isVectorRegister(Register.XMM0));
/// assert.ok(RegisterExt.isVectorRegister(Register.YMM0));
/// assert.ok(RegisterExt.isVectorRegister(Register.ZMM0));
/// ```
#[wasm_bindgen(js_name = "isVectorRegister")]
pub fn is_vector_register(value: Register) -> bool {
register_to_iced(value).is_vector_register()
}
/// Checks if it's `EIP`/`RIP`
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(RegisterExt.isIP(Register.EIP));
/// assert.ok(RegisterExt.isIP(Register.RIP));
/// ```
#[wasm_bindgen(js_name = "isIP")]
pub fn is_ip(value: Register) -> bool {
register_to_iced(value).is_ip()
}
/// Checks if it's an opmask register (`K0`-`K7`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isK(Register.R13D));
/// assert.ok(RegisterExt.isK(Register.K3));
/// ```
#[wasm_bindgen(js_name = "isK")]
pub fn is_k(value: Register) -> bool {
register_to_iced(value).is_k()
}
/// Checks if it's a control register (`CR0`-`CR15`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isCR(Register.R13D));
/// assert.ok(RegisterExt.isCR(Register.CR3));
/// ```
#[wasm_bindgen(js_name = "isCR")]
pub fn is_cr(value: Register) -> bool {
register_to_iced(value).is_cr()
}
/// Checks if it's a debug register (`DR0`-`DR15`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isDR(Register.R13D));
/// assert.ok(RegisterExt.isDR(Register.DR3));
/// ```
#[wasm_bindgen(js_name = "isDR")]
pub fn is_dr(value: Register) -> bool {
register_to_iced(value).is_dr()
}
/// Checks if it's a test register (`TR0`-`TR7`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isTR(Register.R13D));
/// assert.ok(RegisterExt.isTR(Register.TR3));
/// ```
#[wasm_bindgen(js_name = "isTR")]
pub fn is_tr(value: Register) -> bool {
register_to_iced(value).is_tr()
}
/// Checks if it's an FPU stack register (`ST0`-`ST7`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isST(Register.R13D));
/// assert.ok(RegisterExt.isST(Register.ST3));
/// ```
#[wasm_bindgen(js_name = "isST")]
pub fn is_st(value: Register) -> bool {
register_to_iced(value).is_st()
}
/// Checks if it's a bound register (`BND0`-`BND3`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isBND(Register.R13D));
/// assert.ok(RegisterExt.isBND(Register.BND3));
/// ```
#[wasm_bindgen(js_name = "isBND")]
pub fn is_bnd(value: Register) -> bool {
register_to_iced(value).is_bnd()
}
/// Checks if it's an MMX register (`MM0`-`MM7`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isMM(Register.R13D));
/// assert.ok(RegisterExt.isMM(Register.MM3));
/// ```
#[wasm_bindgen(js_name = "isMM")]
pub fn is_mm(value: Register) -> bool {
register_to_iced(value).is_mm()
}
/// Checks if it's a tile register (`TMM0`-`TMM7`)
///
/// # Arguments
///
/// - `value`: A [`Register`] enum value
///
/// [`Register`]: enum.Register.html
///
/// # Examples
///
/// ```js
/// const assert = require("assert").strict;
/// const { Register, RegisterExt } = require("iced-x86");
///
/// assert.ok(!RegisterExt.isTMM(Register.R13D));
/// assert.ok(RegisterExt.isTMM(Register.TMM3));
/// ```
#[wasm_bindgen(js_name = "isTMM")]
pub fn is_tmm(value: Register) -> bool {
register_to_iced(value).is_tmm()
}
}<|fim▁end|> | ///
/// # Examples
/// |
<|file_name|>index.test.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>// import React from 'react'
// import { shallow } from 'enzyme'
// import SectionLabel from '../index'
describe('TODO <SectionLabel />', () => {
it('Expect to have unit tests specified', () => {
expect(true).toEqual(true)
})
})<|fim▁end|> | |
<|file_name|>expr-block-ref.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for issue #388<|fim▁hole|><|fim▁end|> | pub fn main() { let _x = { { @10 } }; } |
<|file_name|>description.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
This description.py file was generated by:
'/Users/ronmarianetti/nupic/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer,
DeferredDictLookup)
from nupic.frameworks.opf.clamodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
# Model Configuration Dictionary:
#
# Define the model parameters and adjust for any modifications if imported
# from a sub-experiment.
#
# These fields might be modified by a sub-experiment; this dict is passed
# between the sub-experiment and base experiment
#
#
# NOTE: Use of DEFERRED VALUE-GETTERs: dictionary fields and list elements
# within the config dictionary may be assigned futures derived from the
# ValueGetterBase class, such as DeferredDictLookup.
# This facility is particularly handy for enabling substitution of values in
# the config dictionary from other values in the config dictionary, which is
# needed by permutation.py-based experiments. These values will be resolved
# during the call to applyValueGettersToContainer(),
# which we call after the base experiment's config dictionary is updated from
# the sub-experiment. See ValueGetterBase and
# DeferredDictLookup for more details about value-getters.
#
# For each custom encoder parameter to be exposed to the sub-experiment/
# permutation overrides, define a variable in this section, using key names
# beginning with a single underscore character to avoid collisions with
# pre-defined keys (e.g., _dsEncoderFieldName2_N).
#
# Example:
# config = dict(
# _dsEncoderFieldName2_N = 70,
# _dsEncoderFieldName2_W = 5,
# dsEncoderSchema = [
# base=dict(
# fieldname='Name2', type='ScalarEncoder',
# name='Name2', minval=0, maxval=270, clipInput=True,
# n=DeferredDictLookup('_dsEncoderFieldName2_N'),
# w=DeferredDictLookup('_dsEncoderFieldName2_W')),
# ],
# )
# updateConfigFromSubConfig(config)
# applyValueGettersToContainer(config)
config = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': { 'days': 0,
'fields': [],
'hours': 0,
'microseconds': 0,
'milliseconds': 0,
'minutes': 0,
'months': 0,
'seconds': 0,
'weeks': 0,
'years': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'TemporalNextStep',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Example:
# dsEncoderSchema = [
# DeferredDictLookup('__field_name_encoder'),
# ],
#
# (value generated from DS_ENCODER_SCHEMA)
'encoders': { u'A': { 'fieldname': u'daynight',
'n': 300,
'name': u'daynight',
'type': 'SDRCategoryEncoder',
'w': 21},
u'B': { 'fieldname': u'daynight',
'n': 300,
'name': u'daynight',
'type': 'SDRCategoryEncoder',
'w': 21},
u'C': { 'fieldname': u'precip',
'n': 300,
'name': u'precip',
'type': 'SDRCategoryEncoder',
'w': 21},
u'D': { 'clipInput': True,
'fieldname': u'visitor_winloss',
'maxval': 0.78600000000000003,
'minval': 0.0,
'n': 150,
'name': u'visitor_winloss',
'type': 'AdaptiveScalarEncoder',
'w': 21},
u'E': { 'clipInput': True,
'fieldname': u'home_winloss',
'maxval': 0.69999999999999996,
'minval': 0.0,
'n': 150,
'name': u'home_winloss',
'type': 'AdaptiveScalarEncoder',
'w': 21},
u'F': { 'dayOfWeek': (7, 1),
'fieldname': u'timestamp',
'name': u'timestamp_dayOfWeek',
'type': 'DateEncoder'},
u'G': { 'fieldname': u'timestamp',
'name': u'timestamp_timeOfDay',
'timeOfDay': (7, 1),
'type': 'DateEncoder'},
u'pred': { 'clipInput': True,
'fieldname': u'attendance',
'maxval': 36067,
'minval': 0,
'n': 150,
'name': u'attendance',
'type': 'AdaptiveScalarEncoder',
'w': 21}},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': True,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActiveColumnsPerInhArea': 40,
'seed': 1956,
# potentialPct
# What percent of the columns's receptive field is available
# for potential synapses. At initialization time, we will
# choose potentialPct * (2*potentialRadius+1)^2
'potentialPct': 1.0,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.1,
'synPermInactiveDec': 0.01,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tpEnable' : True,
'tpParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nupic/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 15,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
'regionName' : 'CLAClassifierRegion',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'clVerbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.001,
# This is set after the call to updateConfigFromSubConfig and is<|fim▁hole|> 'steps': '1',
},
'trainSPNetOnlyIfRequested': False,
},
}
# end of config dictionary
# Adjust base config dictionary for any modifications if imported from a
# sub-experiment
updateConfigFromSubConfig(config)
# Compute predictionSteps based on the predictAheadTime and the aggregation
# period, which may be permuted over.
if config['predictAheadTime'] is not None:
predictionSteps = int(round(aggregationDivide(
config['predictAheadTime'], config['aggregationInfo'])))
assert (predictionSteps >= 1)
config['modelParams']['clParams']['steps'] = str(predictionSteps)
# Adjust config by applying ValueGetterBase-derived
# futures. NOTE: this MUST be called after updateConfigFromSubConfig() in order
# to support value-getter-based substitutions from the sub-experiment (if any)
applyValueGettersToContainer(config)
control = {
# The environment that the current model is being run in
"environment": 'nupic',
# Input stream specification per py/nupicengine/cluster/database/StreamDef.json.
#
'dataset' : { u'info': u'baseball benchmark test',
u'streams': [ { u'columns': [ u'daynight',
u'precip',
u'home_winloss',
u'visitor_winloss',
u'attendance',
u'timestamp'],
u'info': u'OAK01.csv',
u'source': u'file://extra/baseball_stadium/OAK01reformatted.csv'}],
u'version': 1},
# Iteration count: maximum number of iterations. Each iteration corresponds
# to one record from the (possibly aggregated) dataset. The task is
# terminated when either number of iterations reaches iterationCount or
# all records in the (possibly aggregated) database have been processed,
# whichever occurs first.
#
# iterationCount of -1 = iterate over the entire dataset
#'iterationCount' : ITERATION_COUNT,
# Metrics: A list of MetricSpecs that instantiate the metrics that are
# computed for this experiment
'metrics':[
MetricSpec(field=u'attendance', inferenceElement=InferenceElement.prediction,
metric='aae', params={'window': 1000}),
MetricSpec(field=u'attendance', inferenceElement=InferenceElement.prediction,
metric='trivial_aae', params={'window': 1000}),
MetricSpec(field=u'attendance', inferenceElement=InferenceElement.prediction,
metric='nupicScore_scalar', params={'frequencyWindow': 1000, 'movingAverageWindow': 1000}),
MetricSpec(field=u'attendance', inferenceElement=InferenceElement.prediction,
metric='nupicScore_scalar', params={'frequencyWindow': 1000})
],
# Logged Metrics: A sequence of regular expressions that specify which of
# the metrics from the Inference Specifications section MUST be logged for
# every prediction. The regex's correspond to the automatically generated
# metric labels. This is similar to the way the optimization metric is
# specified in permutations.py.
'loggedMetrics': ['.*nupicScore.*'],
}
descriptionInterface = ExperimentDescriptionAPI(modelConfig=config,
control=control)<|fim▁end|> | # computed from the aggregationInfo and predictAheadTime. |
<|file_name|>transactionview.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2011-2013 The Biton developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "transactionview.h"
#include "transactionfilterproxy.h"
#include "transactionrecord.h"
#include "walletmodel.h"
#include "addresstablemodel.h"
#include "transactiontablemodel.h"
#include "bitonunits.h"
#include "csvmodelwriter.h"
#include "transactiondescdialog.h"
#include "editaddressdialog.h"
#include "optionsmodel.h"
#include "guiutil.h"
#include <QScrollBar>
#include <QComboBox>
#include <QDoubleValidator>
#include <QHBoxLayout>
#include <QVBoxLayout>
#include <QLineEdit>
#include <QTableView>
#include <QHeaderView>
#include <QMessageBox>
#include <QPoint>
#include <QMenu>
#include <QLabel>
#include <QDateTimeEdit>
TransactionView::TransactionView(QWidget *parent) :
QWidget(parent), model(0), transactionProxyModel(0),
transactionView(0)
{
// Build filter row
setContentsMargins(0,0,0,0);
QHBoxLayout *hlayout = new QHBoxLayout();
hlayout->setContentsMargins(0,0,0,0);
#ifdef Q_OS_MAC
hlayout->setSpacing(5);
hlayout->addSpacing(26);
#else
hlayout->setSpacing(0);
hlayout->addSpacing(23);
#endif
dateWidget = new QComboBox(this);
#ifdef Q_OS_MAC
dateWidget->setFixedWidth(121);
#else
dateWidget->setFixedWidth(120);
#endif
dateWidget->addItem(tr("All"), All);
dateWidget->addItem(tr("Today"), Today);
dateWidget->addItem(tr("This week"), ThisWeek);
dateWidget->addItem(tr("This month"), ThisMonth);
dateWidget->addItem(tr("Last month"), LastMonth);
dateWidget->addItem(tr("This year"), ThisYear);
dateWidget->addItem(tr("Range..."), Range);
hlayout->addWidget(dateWidget);
typeWidget = new QComboBox(this);
#ifdef Q_OS_MAC
typeWidget->setFixedWidth(121);
#else
typeWidget->setFixedWidth(120);
#endif
typeWidget->addItem(tr("All"), TransactionFilterProxy::ALL_TYPES);
typeWidget->addItem(tr("Received with"), TransactionFilterProxy::TYPE(TransactionRecord::RecvWithAddress) |
TransactionFilterProxy::TYPE(TransactionRecord::RecvFromOther));
typeWidget->addItem(tr("Sent to"), TransactionFilterProxy::TYPE(TransactionRecord::SendToAddress) |
TransactionFilterProxy::TYPE(TransactionRecord::SendToOther));
typeWidget->addItem(tr("To yourself"), TransactionFilterProxy::TYPE(TransactionRecord::SendToSelf));
typeWidget->addItem(tr("Mined"), TransactionFilterProxy::TYPE(TransactionRecord::Generated));
typeWidget->addItem(tr("Other"), TransactionFilterProxy::TYPE(TransactionRecord::Other));
hlayout->addWidget(typeWidget);
addressWidget = new QLineEdit(this);
#if QT_VERSION >= 0x040700
/* Do not move this to the XML file, Qt before 4.7 will choke on it */
addressWidget->setPlaceholderText(tr("Enter address or label to search"));
#endif
hlayout->addWidget(addressWidget);
amountWidget = new QLineEdit(this);
#if QT_VERSION >= 0x040700
/* Do not move this to the XML file, Qt before 4.7 will choke on it */
amountWidget->setPlaceholderText(tr("Min amount"));
#endif
#ifdef Q_OS_MAC
amountWidget->setFixedWidth(97);
#else
amountWidget->setFixedWidth(100);
#endif
amountWidget->setValidator(new QDoubleValidator(0, 1e20, 8, this));
hlayout->addWidget(amountWidget);
QVBoxLayout *vlayout = new QVBoxLayout(this);
vlayout->setContentsMargins(0,0,0,0);
vlayout->setSpacing(0);
QTableView *view = new QTableView(this);
vlayout->addLayout(hlayout);
vlayout->addWidget(createDateRangeWidget());
vlayout->addWidget(view);
vlayout->setSpacing(0);
int width = view->verticalScrollBar()->sizeHint().width();
// Cover scroll bar width with spacing
#ifdef Q_OS_MAC
hlayout->addSpacing(width+2);
#else
hlayout->addSpacing(width);
#endif
// Always show scroll bar
view->setVerticalScrollBarPolicy(Qt::ScrollBarAlwaysOn);
view->setTabKeyNavigation(false);
view->setContextMenuPolicy(Qt::CustomContextMenu);
transactionView = view;
// Actions
QAction *copyAddressAction = new QAction(tr("Copy address"), this);
QAction *copyLabelAction = new QAction(tr("Copy label"), this);
QAction *copyAmountAction = new QAction(tr("Copy amount"), this);
QAction *copyTxIDAction = new QAction(tr("Copy transaction ID"), this);
QAction *editLabelAction = new QAction(tr("Edit label"), this);
QAction *showDetailsAction = new QAction(tr("Show transaction details"), this);
contextMenu = new QMenu();
contextMenu->addAction(copyAddressAction);
contextMenu->addAction(copyLabelAction);
contextMenu->addAction(copyAmountAction);
contextMenu->addAction(copyTxIDAction);
contextMenu->addAction(editLabelAction);
contextMenu->addAction(showDetailsAction);
// Connect actions
connect(dateWidget, SIGNAL(activated(int)), this, SLOT(chooseDate(int)));
connect(typeWidget, SIGNAL(activated(int)), this, SLOT(chooseType(int)));
connect(addressWidget, SIGNAL(textChanged(QString)), this, SLOT(changedPrefix(QString)));
connect(amountWidget, SIGNAL(textChanged(QString)), this, SLOT(changedAmount(QString)));
connect(view, SIGNAL(doubleClicked(QModelIndex)), this, SIGNAL(doubleClicked(QModelIndex)));
connect(view, SIGNAL(customContextMenuRequested(QPoint)), this, SLOT(contextualMenu(QPoint)));
connect(copyAddressAction, SIGNAL(triggered()), this, SLOT(copyAddress()));
connect(copyLabelAction, SIGNAL(triggered()), this, SLOT(copyLabel()));
connect(copyAmountAction, SIGNAL(triggered()), this, SLOT(copyAmount()));
connect(copyTxIDAction, SIGNAL(triggered()), this, SLOT(copyTxID()));
connect(editLabelAction, SIGNAL(triggered()), this, SLOT(editLabel()));
connect(showDetailsAction, SIGNAL(triggered()), this, SLOT(showDetails()));
}
void TransactionView::setModel(WalletModel *model)
{
this->model = model;
if(model)
{
transactionProxyModel = new TransactionFilterProxy(this);
transactionProxyModel->setSourceModel(model->getTransactionTableModel());
transactionProxyModel->setDynamicSortFilter(true);
transactionProxyModel->setSortCaseSensitivity(Qt::CaseInsensitive);
transactionProxyModel->setFilterCaseSensitivity(Qt::CaseInsensitive);
transactionProxyModel->setSortRole(Qt::EditRole);
transactionView->setModel(transactionProxyModel);
transactionView->setAlternatingRowColors(true);
transactionView->setSelectionBehavior(QAbstractItemView::SelectRows);
transactionView->setSelectionMode(QAbstractItemView::ExtendedSelection);
transactionView->setSortingEnabled(true);
transactionView->sortByColumn(TransactionTableModel::Status, Qt::DescendingOrder);
transactionView->verticalHeader()->hide();
transactionView->horizontalHeader()->resizeSection(TransactionTableModel::Status, 23);
transactionView->horizontalHeader()->resizeSection(TransactionTableModel::Date, 120);
transactionView->horizontalHeader()->resizeSection(TransactionTableModel::Type, 120);
#if QT_VERSION < 0x050000
transactionView->horizontalHeader()->setResizeMode(TransactionTableModel::ToAddress, QHeaderView::Stretch);
#else
transactionView->horizontalHeader()->setSectionResizeMode(TransactionTableModel::ToAddress, QHeaderView::Stretch);
#endif
transactionView->horizontalHeader()->resizeSection(TransactionTableModel::Amount, 100);
}
}
void TransactionView::chooseDate(int idx)
{
if(!transactionProxyModel)
return;
QDate current = QDate::currentDate();
dateRangeWidget->setVisible(false);
switch(dateWidget->itemData(idx).toInt())
{
case All:
transactionProxyModel->setDateRange(
TransactionFilterProxy::MIN_DATE,
TransactionFilterProxy::MAX_DATE);
break;
case Today:
transactionProxyModel->setDateRange(
QDateTime(current),
TransactionFilterProxy::MAX_DATE);
break;
case ThisWeek: {
// Find last Monday
QDate startOfWeek = current.addDays(-(current.dayOfWeek()-1));
transactionProxyModel->setDateRange(
QDateTime(startOfWeek),
TransactionFilterProxy::MAX_DATE);
} break;
case ThisMonth:
transactionProxyModel->setDateRange(
QDateTime(QDate(current.year(), current.month(), 1)),
TransactionFilterProxy::MAX_DATE);
break;
case LastMonth:
transactionProxyModel->setDateRange(
QDateTime(QDate(current.year(), current.month()-1, 1)),
QDateTime(QDate(current.year(), current.month(), 1)));
break;
case ThisYear:
transactionProxyModel->setDateRange(
QDateTime(QDate(current.year(), 1, 1)),
TransactionFilterProxy::MAX_DATE);
break;
case Range:
dateRangeWidget->setVisible(true);
dateRangeChanged();
break;
}
}
void TransactionView::chooseType(int idx)
{
if(!transactionProxyModel)
return;
transactionProxyModel->setTypeFilter(
typeWidget->itemData(idx).toInt());
}
void TransactionView::changedPrefix(const QString &prefix)
{
if(!transactionProxyModel)
return;
transactionProxyModel->setAddressPrefix(prefix);
}
void TransactionView::changedAmount(const QString &amount)
{
if(!transactionProxyModel)
return;
qint64 amount_parsed = 0;
if(BitonUnits::parse(model->getOptionsModel()->getDisplayUnit(), amount, &amount_parsed))
{
transactionProxyModel->setMinAmount(amount_parsed);
}
else
{
transactionProxyModel->setMinAmount(0);
}
}
void TransactionView::exportClicked()
{
// CSV is currently the only supported format
QString filename = GUIUtil::getSaveFileName(
this,
tr("Export Transaction Data"), QString(),
tr("Comma separated file (*.csv)"));
if (filename.isNull()) return;
CSVModelWriter writer(filename);
// name, column, role
writer.setModel(transactionProxyModel);
writer.addColumn(tr("Confirmed"), 0, TransactionTableModel::ConfirmedRole);
writer.addColumn(tr("Date"), 0, TransactionTableModel::DateRole);
writer.addColumn(tr("Type"), TransactionTableModel::Type, Qt::EditRole);
writer.addColumn(tr("Label"), 0, TransactionTableModel::LabelRole);
writer.addColumn(tr("Address"), 0, TransactionTableModel::AddressRole);
writer.addColumn(tr("Amount"), 0, TransactionTableModel::FormattedAmountRole);
writer.addColumn(tr("ID"), 0, TransactionTableModel::TxIDRole);
if(!writer.write())
{
QMessageBox::critical(this, tr("Error exporting"), tr("Could not write to file %1.").arg(filename),
QMessageBox::Abort, QMessageBox::Abort);
}
}
void TransactionView::contextualMenu(const QPoint &point)
{
QModelIndex index = transactionView->indexAt(point);
if(index.isValid())
{
contextMenu->exec(QCursor::pos());
}
}
void TransactionView::copyAddress()
{
GUIUtil::copyEntryData(transactionView, 0, TransactionTableModel::AddressRole);
}
void TransactionView::copyLabel()
{
GUIUtil::copyEntryData(transactionView, 0, TransactionTableModel::LabelRole);
}
void TransactionView::copyAmount()
{
GUIUtil::copyEntryData(transactionView, 0, TransactionTableModel::FormattedAmountRole);
}
void TransactionView::copyTxID()
{
GUIUtil::copyEntryData(transactionView, 0, TransactionTableModel::TxIDRole);
}
void TransactionView::editLabel()
{
if(!transactionView->selectionModel() ||!model)
return;
QModelIndexList selection = transactionView->selectionModel()->selectedRows();
if(!selection.isEmpty())
{
AddressTableModel *addressBook = model->getAddressTableModel();
if(!addressBook)
return;
QString address = selection.at(0).data(TransactionTableModel::AddressRole).toString();
if(address.isEmpty())
{
// If this transaction has no associated address, exit
return;
}
// Is address in address book? Address book can miss address when a transaction is
// sent from outside the UI.
int idx = addressBook->lookupAddress(address);
if(idx != -1)
{
// Edit sending / receiving address
QModelIndex modelIdx = addressBook->index(idx, 0, QModelIndex());
// Determine type of address, launch appropriate editor dialog type
QString type = modelIdx.data(AddressTableModel::TypeRole).toString();
EditAddressDialog dlg(type==AddressTableModel::Receive
? EditAddressDialog::EditReceivingAddress<|fim▁hole|> dlg.loadRow(idx);
dlg.exec();
}
else
{
// Add sending address
EditAddressDialog dlg(EditAddressDialog::NewSendingAddress,
this);
dlg.setModel(addressBook);
dlg.setAddress(address);
dlg.exec();
}
}
}
void TransactionView::showDetails()
{
if(!transactionView->selectionModel())
return;
QModelIndexList selection = transactionView->selectionModel()->selectedRows();
if(!selection.isEmpty())
{
TransactionDescDialog dlg(selection.at(0));
dlg.exec();
}
}
QWidget *TransactionView::createDateRangeWidget()
{
dateRangeWidget = new QFrame();
dateRangeWidget->setFrameStyle(QFrame::Panel | QFrame::Raised);
dateRangeWidget->setContentsMargins(1,1,1,1);
QHBoxLayout *layout = new QHBoxLayout(dateRangeWidget);
layout->setContentsMargins(0,0,0,0);
layout->addSpacing(23);
layout->addWidget(new QLabel(tr("Range:")));
dateFrom = new QDateTimeEdit(this);
dateFrom->setDisplayFormat("dd/MM/yy");
dateFrom->setCalendarPopup(true);
dateFrom->setMinimumWidth(100);
dateFrom->setDate(QDate::currentDate().addDays(-7));
layout->addWidget(dateFrom);
layout->addWidget(new QLabel(tr("to")));
dateTo = new QDateTimeEdit(this);
dateTo->setDisplayFormat("dd/MM/yy");
dateTo->setCalendarPopup(true);
dateTo->setMinimumWidth(100);
dateTo->setDate(QDate::currentDate());
layout->addWidget(dateTo);
layout->addStretch();
// Hide by default
dateRangeWidget->setVisible(false);
// Notify on change
connect(dateFrom, SIGNAL(dateChanged(QDate)), this, SLOT(dateRangeChanged()));
connect(dateTo, SIGNAL(dateChanged(QDate)), this, SLOT(dateRangeChanged()));
return dateRangeWidget;
}
void TransactionView::dateRangeChanged()
{
if(!transactionProxyModel)
return;
transactionProxyModel->setDateRange(
QDateTime(dateFrom->date()),
QDateTime(dateTo->date()).addDays(1));
}
void TransactionView::focusTransaction(const QModelIndex &idx)
{
if(!transactionProxyModel)
return;
QModelIndex targetIdx = transactionProxyModel->mapFromSource(idx);
transactionView->scrollTo(targetIdx);
transactionView->setCurrentIndex(targetIdx);
transactionView->setFocus();
}<|fim▁end|> | : EditAddressDialog::EditSendingAddress,
this);
dlg.setModel(addressBook); |
<|file_name|>entry.model.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> description: string;
comments: any[];
}<|fim▁end|> | export class Entry {
title: string;
photo: string; |
<|file_name|>island.py<|end_file_name|><|fim▁begin|>from __future__ import division
import random
import matrix
from tile import Tile
class Island(object):
def __init__(self, width=300, height=300):
self.radius = None
self.shore_noise = None
self.rect_shore = None
self.shore_lines = None
self.peak = None
self.spokes = None
self.tiles = [[None] * width for _ in range(height)]
def cells_to_tiles(self, *cells):
"""
Apply a Cell(x, y, z) into an Island tile height.
"""
for x, y, z in cells:
self.tiles[x][y] = Tile(x, y, z)
<|fim▁hole|> """
if self.peak:
center_x, center_y = self.peak.x, self.peak.y
elif start:
center_x, center_y = start.x, start.y
else:
raise ValueError('Must define peak or start cell for flood fill.')
print('Flood filling')
seen = set()
start = (center_x, center_y)
stack = [start]
while True:
adjacent = False # Has no adjacent unvisited pixels
for dx, dy in [(1, 0), (-1, 0), (0, 1), (0, -1)]: # Check 4 neighbours
x, y = start[0] + dx, start[1] + dy
if (x, y) in seen:
continue
else:
if self.tiles[x][y] is None:
adjacent = True
stack.append((x, y))
self.tiles[x][y] = Tile(x, y, -1) # Set height -1
seen.add((x, y))
if not adjacent:
stack.pop()
if not stack:
break
else:
start = stack[-1]
def normalize(self):
max_height = 1
for row in self.tiles:
for tile in row:
if tile is not None:
if tile.height > max_height:
max_height = tile.height
for row in self.tiles:
for tile in row:
if tile is not None:
if tile.height > 0: # Ignore negative tiles
tile.height = float(tile.height) / max_height
elif tile.height < 0:
tile.height = -1
def height_fill(self):
attempt = 0
last_empty_count = 0
while self.has_empty:
empties = self.empties()
empty_count = len(empties)
print('Island has {} empty tiles'.format(empty_count))
if empty_count == last_empty_count:
attempt += 1
last_empty_count = empty_count
if attempt > 10: break;
random.shuffle(empties)
while empties:
i, j = empties.pop()
tile = self.tiles[i][j]
if tile and tile.height == -1:
averages = []
for span in range(1, 5):
ring_total = 0
neighbour_count = 0
ring_avg = 0
for x, y in matrix.find_neighbours_2D(self.tiles, (i, j), span):
try:
value = self.tiles[x][y].height
# print('value: {}'.format(value))
except (IndexError, AttributeError):
continue
if value in [-1,]:
continue
ring_total += value
neighbour_count += 1
if ring_total:
ring_avg = ring_total/neighbour_count
# averages.append(ring_avg * 9 / span ** 0.9) # Further away == less impact
averages.append(ring_avg) # Further away == less impact
if averages:
# print(averages)
overall = sum(averages)/len(averages)
# print('overall: {}'.format(overall))
tile.height = overall
@property
def has_empty(self):
return any(True if tile.height == -1 else False
for row in self.tiles for tile in row if tile is not None)
def empties(self):
empty_cells = []
for i in range(len(self.tiles)):
for j in range(len(self.tiles[0])):
if self.tiles[i][j] is not None and self.tiles[i][j].height == -1:
empty_cells.append((i, j))
return empty_cells<|fim▁end|> | def flood_fill(self, start=None):
"""
Sets all None tiles to Tile(x, y, -1) within the island shore. |
<|file_name|>test_db_api.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# encoding=UTF8
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for the DB API."""
import copy
import datetime
import iso8601
import types
import uuid as stdlib_uuid
import mox
import netaddr
from oslo.config import cfg
from sqlalchemy.dialects import sqlite
from sqlalchemy import exc
from sqlalchemy.exc import IntegrityError
from sqlalchemy import MetaData
from sqlalchemy.orm import exc as sqlalchemy_orm_exc
from sqlalchemy.orm import query
from sqlalchemy.sql.expression import select
from nova import block_device
from nova.compute import vm_states
from nova import context
from nova import db
from nova.db.sqlalchemy import api as sqlalchemy_api
from nova.db.sqlalchemy import models
from nova.db.sqlalchemy import utils as db_utils
from nova import exception
from nova.openstack.common.db import exception as db_exc
from nova.openstack.common.db.sqlalchemy import session as db_session
from nova.openstack.common import timeutils
from nova.openstack.common import uuidutils
from nova import quota
from nova import test
from nova.tests import matchers
from nova import utils
CONF = cfg.CONF
CONF.import_opt('reserved_host_memory_mb', 'nova.compute.resource_tracker')
CONF.import_opt('reserved_host_disk_mb', 'nova.compute.resource_tracker')
get_engine = db_session.get_engine
get_session = db_session.get_session
def _quota_reserve(context, project_id, user_id):
"""Create sample Quota, QuotaUsage and Reservation objects.
There is no method db.quota_usage_create(), so we have to use
db.quota_reserve() for creating QuotaUsage objects.
Returns reservations uuids.
"""
def get_sync(resource, usage):
def sync(elevated, project_id, user_id, session):
return {resource: usage}
return sync
quotas = {}
user_quotas = {}
resources = {}
deltas = {}
for i in range(3):
resource = 'resource%d' % i
if i == 2:
# test for project level resources
resource = 'fixed_ips'
quotas[resource] = db.quota_create(context,
project_id, resource, i)
user_quotas[resource] = quotas[resource]
else:
quotas[resource] = db.quota_create(context,
project_id, resource, i)
user_quotas[resource] = db.quota_create(context, project_id,
resource, i,
user_id=user_id)
sync_name = '_sync_%s' % resource
resources[resource] = quota.ReservableResource(
resource, sync_name, 'quota_res_%d' % i)
deltas[resource] = i
setattr(sqlalchemy_api, sync_name, get_sync(resource, i))
sqlalchemy_api.QUOTA_SYNC_FUNCTIONS[sync_name] = getattr(
sqlalchemy_api, sync_name)
return db.quota_reserve(context, resources, quotas, user_quotas, deltas,
timeutils.utcnow(), CONF.until_refresh,
datetime.timedelta(days=1), project_id, user_id)
class DbTestCase(test.TestCase):
def setUp(self):
super(DbTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
def create_instance_with_args(self, **kwargs):
args = {'reservation_id': 'a', 'image_ref': 1, 'host': 'host1',
'node': 'node1', 'project_id': self.project_id,
'vm_state': 'fake'}
if 'context' in kwargs:
ctxt = kwargs.pop('context')
args['project_id'] = ctxt.project_id
else:
ctxt = self.context
args.update(kwargs)
return db.instance_create(ctxt, args)
def fake_metadata(self, content):
meta = {}
for i in range(0, 10):
meta["foo%i" % i] = "this is %s item %i" % (content, i)
return meta
def create_metadata_for_instance(self, instance_uuid):
meta = self.fake_metadata('metadata')
db.instance_metadata_update(self.context, instance_uuid, meta, False)
sys_meta = self.fake_metadata('system_metadata')
db.instance_system_metadata_update(self.context, instance_uuid,
sys_meta, False)
return meta, sys_meta
class DecoratorTestCase(test.TestCase):
def _test_decorator_wraps_helper(self, decorator):
def test_func():
"""Test docstring."""
decorated_func = decorator(test_func)
self.assertEquals(test_func.func_name, decorated_func.func_name)
self.assertEquals(test_func.__doc__, decorated_func.__doc__)
self.assertEquals(test_func.__module__, decorated_func.__module__)
def test_require_context_decorator_wraps_functions_properly(self):
self._test_decorator_wraps_helper(sqlalchemy_api.require_context)
def test_require_admin_context_decorator_wraps_functions_properly(self):
self._test_decorator_wraps_helper(sqlalchemy_api.require_admin_context)
def _get_fake_aggr_values():
return {'name': 'fake_aggregate'}
def _get_fake_aggr_metadata():
return {'fake_key1': 'fake_value1',
'fake_key2': 'fake_value2',
'availability_zone': 'fake_avail_zone'}
def _get_fake_aggr_hosts():
return ['foo.openstack.org']
def _create_aggregate(context=context.get_admin_context(),
values=_get_fake_aggr_values(),
metadata=_get_fake_aggr_metadata()):
return db.aggregate_create(context, values, metadata)
def _create_aggregate_with_hosts(context=context.get_admin_context(),
values=_get_fake_aggr_values(),
metadata=_get_fake_aggr_metadata(),
hosts=_get_fake_aggr_hosts()):
result = _create_aggregate(context=context,
values=values, metadata=metadata)
for host in hosts:
db.aggregate_host_add(context, result['id'], host)
return result
class NotDbApiTestCase(DbTestCase):
def setUp(self):
super(NotDbApiTestCase, self).setUp()
self.flags(connection='notdb://', group='database')
def test_instance_get_all_by_filters_regex_unsupported_db(self):
# Ensure that the 'LIKE' operator is used for unsupported dbs.
self.create_instance_with_args(display_name='test1')
self.create_instance_with_args(display_name='test.*')
self.create_instance_with_args(display_name='diff')
result = db.instance_get_all_by_filters(self.context,
{'display_name': 'test.*'})
self.assertEqual(1, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'})
self.assertEqual(2, len(result))
def test_instance_get_all_by_filters_paginate(self):
test1 = self.create_instance_with_args(display_name='test1')
test2 = self.create_instance_with_args(display_name='test2')
test3 = self.create_instance_with_args(display_name='test3')
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
marker=None)
self.assertEqual(3, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
sort_dir="asc",
marker=test1['uuid'])
self.assertEqual(2, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
sort_dir="asc",
marker=test2['uuid'])
self.assertEqual(1, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
sort_dir="asc",
marker=test3['uuid'])
self.assertEqual(0, len(result))
self.assertRaises(exception.MarkerNotFound,
db.instance_get_all_by_filters,
self.context, {'display_name': '%test%'},
marker=str(stdlib_uuid.uuid4()))
class AggregateDBApiTestCase(test.TestCase):
def setUp(self):
super(AggregateDBApiTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
def test_aggregate_create_no_metadata(self):
result = _create_aggregate(metadata=None)
self.assertEquals(result['name'], 'fake_aggregate')
def test_aggregate_create_avoid_name_conflict(self):
r1 = _create_aggregate(metadata=None)
db.aggregate_delete(context.get_admin_context(), r1['id'])
values = {'name': r1['name']}
metadata = {'availability_zone': 'new_zone'}
r2 = _create_aggregate(values=values, metadata=metadata)
self.assertEqual(r2['name'], values['name'])
self.assertEqual(r2['availability_zone'],
metadata['availability_zone'])
def test_aggregate_create_raise_exist_exc(self):
_create_aggregate(metadata=None)
self.assertRaises(exception.AggregateNameExists,
_create_aggregate, metadata=None)
def test_aggregate_get_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
self.assertRaises(exception.AggregateNotFound,
db.aggregate_get,
ctxt, aggregate_id)
def test_aggregate_metadata_get_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
self.assertRaises(exception.AggregateNotFound,
db.aggregate_metadata_get,
ctxt, aggregate_id)
def test_aggregate_create_with_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
expected_metadata = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(expected_metadata,
matchers.DictMatches(_get_fake_aggr_metadata()))
def test_aggregate_create_delete_create_with_metadata(self):
#test for bug 1052479
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
expected_metadata = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(expected_metadata,
matchers.DictMatches(_get_fake_aggr_metadata()))
db.aggregate_delete(ctxt, result['id'])
result = _create_aggregate(metadata={'availability_zone':
'fake_avail_zone'})
expected_metadata = db.aggregate_metadata_get(ctxt, result['id'])
self.assertEqual(expected_metadata, {'availability_zone':
'fake_avail_zone'})
def test_aggregate_create_low_privi_context(self):
self.assertRaises(exception.AdminRequired,
db.aggregate_create,
self.context, _get_fake_aggr_values())
def test_aggregate_get(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt)
expected = db.aggregate_get(ctxt, result['id'])
self.assertEqual(_get_fake_aggr_hosts(), expected['hosts'])
self.assertEqual(_get_fake_aggr_metadata(), expected['metadetails'])
def test_aggregate_get_by_host(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate2'}
values3 = {'name': 'fake_aggregate3'}
values4 = {'name': 'fake_aggregate4'}
values5 = {'name': 'fake_aggregate5'}
a1 = _create_aggregate_with_hosts(context=ctxt)
a2 = _create_aggregate_with_hosts(context=ctxt, values=values2)
# a3 has no hosts and should not be in the results.
a3 = _create_aggregate(context=ctxt, values=values3)
# a4 has no matching hosts.
a4 = _create_aggregate_with_hosts(context=ctxt, values=values4,
hosts=['foo4.openstack.org'])
# a5 has no matching hosts after deleting the only matching host.
a5 = _create_aggregate_with_hosts(context=ctxt, values=values5,
hosts=['foo5.openstack.org', 'foo.openstack.org'])
db.aggregate_host_delete(ctxt, a5['id'],
'foo.openstack.org')
r1 = db.aggregate_get_by_host(ctxt, 'foo.openstack.org')
self.assertEqual([a1['id'], a2['id']], [x['id'] for x in r1])
def test_aggregate_get_by_host_with_key(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate2'}
values3 = {'name': 'fake_aggregate3'}
values4 = {'name': 'fake_aggregate4'}
a1 = _create_aggregate_with_hosts(context=ctxt,
metadata={'goodkey': 'good'})
_create_aggregate_with_hosts(context=ctxt, values=values2)
_create_aggregate(context=ctxt, values=values3)
_create_aggregate_with_hosts(context=ctxt, values=values4,
hosts=['foo4.openstack.org'], metadata={'goodkey': 'bad'})
# filter result by key
r1 = db.aggregate_get_by_host(ctxt, 'foo.openstack.org', key='goodkey')
self.assertEqual([a1['id']], [x['id'] for x in r1])
def test_aggregate_metadata_get_by_host(self):
ctxt = context.get_admin_context()
values = {'name': 'fake_aggregate2'}
values2 = {'name': 'fake_aggregate3'}
_create_aggregate_with_hosts(context=ctxt)
_create_aggregate_with_hosts(context=ctxt, values=values)
_create_aggregate_with_hosts(context=ctxt, values=values2,
hosts=['bar.openstack.org'], metadata={'badkey': 'bad'})
r1 = db.aggregate_metadata_get_by_host(ctxt, 'foo.openstack.org')
self.assertEqual(r1['fake_key1'], set(['fake_value1']))
self.assertFalse('badkey' in r1)
def test_aggregate_metadata_get_by_metadata_key(self):
ctxt = context.get_admin_context()
values = {'aggregate_id': 'fake_id',
'name': 'fake_aggregate'}
aggr = _create_aggregate_with_hosts(context=ctxt, values=values,
hosts=['bar.openstack.org'],
metadata={'availability_zone':
'az1'})
r1 = db.aggregate_metadata_get_by_metadata_key(ctxt, aggr['id'],
'availability_zone')
self.assertEqual(r1['availability_zone'], set(['az1']))
self.assertTrue('availability_zone' in r1)
self.assertFalse('name' in r1)
def test_aggregate_metadata_get_by_host_with_key(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate12'}
values3 = {'name': 'fake_aggregate23'}
a2_hosts = ['foo1.openstack.org', 'foo2.openstack.org']
a2_metadata = {'good': 'value12', 'bad': 'badvalue12'}
a3_hosts = ['foo2.openstack.org', 'foo3.openstack.org']
a3_metadata = {'good': 'value23', 'bad': 'badvalue23'}
a1 = _create_aggregate_with_hosts(context=ctxt)
a2 = _create_aggregate_with_hosts(context=ctxt, values=values2,
hosts=a2_hosts, metadata=a2_metadata)
a3 = _create_aggregate_with_hosts(context=ctxt, values=values3,
hosts=a3_hosts, metadata=a3_metadata)
r1 = db.aggregate_metadata_get_by_host(ctxt, 'foo2.openstack.org',
key='good')
self.assertEqual(r1['good'], set(['value12', 'value23']))
self.assertFalse('fake_key1' in r1)
self.assertFalse('bad' in r1)
# Delete metadata
db.aggregate_metadata_delete(ctxt, a3['id'], 'good')
r2 = db.aggregate_metadata_get_by_host(ctxt, 'foo.openstack.org',
key='good')
self.assertFalse('good' in r2)
def test_aggregate_host_get_by_metadata_key(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate12'}
values3 = {'name': 'fake_aggregate23'}
a2_hosts = ['foo1.openstack.org', 'foo2.openstack.org']
a2_metadata = {'good': 'value12', 'bad': 'badvalue12'}
a3_hosts = ['foo2.openstack.org', 'foo3.openstack.org']
a3_metadata = {'good': 'value23', 'bad': 'badvalue23'}
a1 = _create_aggregate_with_hosts(context=ctxt)
a2 = _create_aggregate_with_hosts(context=ctxt, values=values2,
hosts=a2_hosts, metadata=a2_metadata)
a3 = _create_aggregate_with_hosts(context=ctxt, values=values3,
hosts=a3_hosts, metadata=a3_metadata)
r1 = db.aggregate_host_get_by_metadata_key(ctxt, key='good')
self.assertEqual({
'foo1.openstack.org': set(['value12']),
'foo2.openstack.org': set(['value12', 'value23']),
'foo3.openstack.org': set(['value23']),
}, r1)
self.assertFalse('fake_key1' in r1)
def test_aggregate_get_by_host_not_found(self):
ctxt = context.get_admin_context()
_create_aggregate_with_hosts(context=ctxt)
self.assertEqual([], db.aggregate_get_by_host(ctxt, 'unknown_host'))
def test_aggregate_delete_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
self.assertRaises(exception.AggregateNotFound,
db.aggregate_delete,
ctxt, aggregate_id)
def test_aggregate_delete(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
db.aggregate_delete(ctxt, result['id'])
expected = db.aggregate_get_all(ctxt)
self.assertEqual(0, len(expected))
aggregate = db.aggregate_get(ctxt.elevated(read_deleted='yes'),
result['id'])
self.assertEqual(aggregate['deleted'], result['id'])
def test_aggregate_update(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata={'availability_zone':
'fake_avail_zone'})
self.assertEqual(result['availability_zone'], 'fake_avail_zone')
new_values = _get_fake_aggr_values()
new_values['availability_zone'] = 'different_avail_zone'
updated = db.aggregate_update(ctxt, result['id'], new_values)
self.assertNotEqual(result['availability_zone'],
updated['availability_zone'])
def test_aggregate_update_with_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
values = _get_fake_aggr_values()
values['metadata'] = _get_fake_aggr_metadata()
values['availability_zone'] = 'different_avail_zone'
db.aggregate_update(ctxt, result['id'], values)
expected = db.aggregate_metadata_get(ctxt, result['id'])
updated = db.aggregate_get(ctxt, result['id'])
self.assertThat(values['metadata'],
matchers.DictMatches(expected))
self.assertNotEqual(result['availability_zone'],
updated['availability_zone'])
def test_aggregate_update_with_existing_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
values = _get_fake_aggr_values()
values['metadata'] = _get_fake_aggr_metadata()
values['metadata']['fake_key1'] = 'foo'
db.aggregate_update(ctxt, result['id'], values)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(values['metadata'], matchers.DictMatches(expected))
def test_aggregate_update_zone_with_existing_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
new_zone = {'availability_zone': 'fake_avail_zone_2'}
metadata = _get_fake_aggr_metadata()
metadata.update(new_zone)
db.aggregate_update(ctxt, result['id'], new_zone)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_update_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
new_values = _get_fake_aggr_values()
self.assertRaises(exception.AggregateNotFound,
db.aggregate_update, ctxt, aggregate_id, new_values)
def test_aggregate_get_all(self):
ctxt = context.get_admin_context()
counter = 3
for c in range(counter):
_create_aggregate(context=ctxt,
values={'name': 'fake_aggregate_%d' % c},
metadata=None)
results = db.aggregate_get_all(ctxt)
self.assertEqual(len(results), counter)
def test_aggregate_get_all_non_deleted(self):
ctxt = context.get_admin_context()
add_counter = 5
remove_counter = 2
aggregates = []
for c in range(1, add_counter):
values = {'name': 'fake_aggregate_%d' % c}
aggregates.append(_create_aggregate(context=ctxt,
values=values, metadata=None))
for c in range(1, remove_counter):
db.aggregate_delete(ctxt, aggregates[c - 1]['id'])
results = db.aggregate_get_all(ctxt)
self.assertEqual(len(results), add_counter - remove_counter)
def test_aggregate_metadata_add(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
metadata = _get_fake_aggr_metadata()
db.aggregate_metadata_add(ctxt, result['id'], metadata)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_add_retry(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
def counted():
def get_query(context, id, session, read_deleted):
get_query.counter += 1
raise db_exc.DBDuplicateEntry
get_query.counter = 0
return get_query
get_query = counted()
self.stubs.Set(sqlalchemy_api,
'_aggregate_metadata_get_query', get_query)
self.assertRaises(db_exc.DBDuplicateEntry, sqlalchemy_api.
aggregate_metadata_add, ctxt, result['id'], {},
max_retries=5)
self.assertEqual(get_query.counter, 5)
def test_aggregate_metadata_update(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
metadata = _get_fake_aggr_metadata()
key = metadata.keys()[0]
db.aggregate_metadata_delete(ctxt, result['id'], key)
new_metadata = {key: 'foo'}
db.aggregate_metadata_add(ctxt, result['id'], new_metadata)
expected = db.aggregate_metadata_get(ctxt, result['id'])
metadata[key] = 'foo'
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_delete(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
metadata = _get_fake_aggr_metadata()
db.aggregate_metadata_add(ctxt, result['id'], metadata)
db.aggregate_metadata_delete(ctxt, result['id'], metadata.keys()[0])
expected = db.aggregate_metadata_get(ctxt, result['id'])
del metadata[metadata.keys()[0]]
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_remove_availability_zone(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata={'availability_zone':
'fake_avail_zone'})
db.aggregate_metadata_delete(ctxt, result['id'], 'availability_zone')
expected = db.aggregate_metadata_get(ctxt, result['id'])
aggregate = db.aggregate_get(ctxt, result['id'])
self.assertEquals(aggregate['availability_zone'], None)
self.assertThat({}, matchers.DictMatches(expected))
def test_aggregate_metadata_delete_raise_not_found(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
self.assertRaises(exception.AggregateMetadataNotFound,
db.aggregate_metadata_delete,
ctxt, result['id'], 'foo_key')
def test_aggregate_host_add(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
expected = db.aggregate_host_get_all(ctxt, result['id'])
self.assertEqual(_get_fake_aggr_hosts(), expected)
def test_aggregate_host_re_add(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
host = _get_fake_aggr_hosts()[0]
db.aggregate_host_delete(ctxt, result['id'], host)
db.aggregate_host_add(ctxt, result['id'], host)
expected = db.aggregate_host_get_all(ctxt, result['id'])
self.assertEqual(len(expected), 1)
def test_aggregate_host_add_duplicate_works(self):
ctxt = context.get_admin_context()
r1 = _create_aggregate_with_hosts(context=ctxt, metadata=None)
r2 = _create_aggregate_with_hosts(ctxt,
values={'name': 'fake_aggregate2'},
metadata={'availability_zone': 'fake_avail_zone2'})
h1 = db.aggregate_host_get_all(ctxt, r1['id'])
h2 = db.aggregate_host_get_all(ctxt, r2['id'])
self.assertEqual(h1, h2)
def test_aggregate_host_add_duplicate_raise_exist_exc(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
self.assertRaises(exception.AggregateHostExists,
db.aggregate_host_add,
ctxt, result['id'], _get_fake_aggr_hosts()[0])
def test_aggregate_host_add_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
host = _get_fake_aggr_hosts()[0]
self.assertRaises(exception.AggregateNotFound,
db.aggregate_host_add,
ctxt, aggregate_id, host)
def test_aggregate_host_delete(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
db.aggregate_host_delete(ctxt, result['id'],
_get_fake_aggr_hosts()[0])
expected = db.aggregate_host_get_all(ctxt, result['id'])
self.assertEqual(0, len(expected))
def test_aggregate_host_delete_raise_not_found(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
self.assertRaises(exception.AggregateHostNotFound,
db.aggregate_host_delete,
ctxt, result['id'], _get_fake_aggr_hosts()[0])
class SqlAlchemyDbApiTestCase(DbTestCase):
def test_instance_get_all_by_host(self):
ctxt = context.get_admin_context()
self.create_instance_with_args()
self.create_instance_with_args()
self.create_instance_with_args(host='host2')
result = sqlalchemy_api._instance_get_all_uuids_by_host(ctxt, 'host1')
self.assertEqual(2, len(result))
def test_instance_get_all_uuids_by_host(self):
ctxt = context.get_admin_context()
self.create_instance_with_args()
self.create_instance_with_args()
self.create_instance_with_args(host='host2')
result = sqlalchemy_api._instance_get_all_uuids_by_host(ctxt, 'host1')
self.assertEqual(2, len(result))
self.assertEqual(types.UnicodeType, type(result[0]))
class MigrationTestCase(test.TestCase):
def setUp(self):
super(MigrationTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self._create()
self._create()
self._create(status='reverted')
self._create(status='confirmed')
self._create(source_compute='host2', source_node='b',
dest_compute='host1', dest_node='a')
self._create(source_compute='host2', dest_compute='host3')
self._create(source_compute='host3', dest_compute='host4')
def _create(self, status='migrating', source_compute='host1',
source_node='a', dest_compute='host2', dest_node='b',
system_metadata=None):
values = {'host': source_compute}
instance = db.instance_create(self.ctxt, values)
if system_metadata:
db.instance_system_metadata_update(self.ctxt, instance['uuid'],
system_metadata, False)
values = {'status': status, 'source_compute': source_compute,
'source_node': source_node, 'dest_compute': dest_compute,
'dest_node': dest_node, 'instance_uuid': instance['uuid']}
db.migration_create(self.ctxt, values)
def _assert_in_progress(self, migrations):
for migration in migrations:
self.assertNotEqual('confirmed', migration['status'])
self.assertNotEqual('reverted', migration['status'])
def test_migration_get_in_progress_joins(self):
self._create(source_compute='foo', system_metadata={'foo': 'bar'})
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'foo', 'a')
system_metadata = migrations[0]['instance']['system_metadata'][0]
self.assertEqual(system_metadata['key'], 'foo')
self.assertEqual(system_metadata['value'], 'bar')
def test_in_progress_host1_nodea(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host1', 'a')
# 2 as source + 1 as dest
self.assertEqual(3, len(migrations))
self._assert_in_progress(migrations)
def test_in_progress_host1_nodeb(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host1', 'b')
# some migrations are to/from host1, but none with a node 'b'
self.assertEqual(0, len(migrations))
def test_in_progress_host2_nodeb(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host2', 'b')
# 2 as dest, 1 as source
self.assertEqual(3, len(migrations))
self._assert_in_progress(migrations)
def test_instance_join(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host2', 'b')
for migration in migrations:
instance = migration['instance']
self.assertEqual(migration['instance_uuid'], instance['uuid'])
def test_get_migrations_by_filters(self):
filters = {"status": "migrating", "host": "host3"}
migrations = db.migration_get_all_by_filters(self.ctxt, filters)
self.assertEqual(2, len(migrations))
for migration in migrations:
self.assertEqual(filters["status"], migration['status'])
hosts = [migration['source_compute'], migration['dest_compute']]
self.assertIn(filters["host"], hosts)
def test_only_admin_can_get_all_migrations_by_filters(self):
user_ctxt = context.RequestContext(user_id=None, project_id=None,
is_admin=False, read_deleted="no",
overwrite=False)
self.assertRaises(exception.AdminRequired,
db.migration_get_all_by_filters, user_ctxt, {})
def test_migration_get_unconfirmed_by_dest_compute(self):
# Ensure no migrations are returned.
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host')
self.assertEqual(0, len(results))
# Ensure no migrations are returned.
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host2')
self.assertEqual(0, len(results))
updated_at = datetime.datetime(2000, 1, 1, 12, 0, 0)
values = {"status": "finished", "updated_at": updated_at,
"dest_compute": "fake_host2"}
migration = db.migration_create(self.ctxt, values)
# Ensure different host is not returned
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host')
self.assertEqual(0, len(results))
# Ensure one migration older than 10 seconds is returned.
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host2')
self.assertEqual(1, len(results))
db.migration_update(self.ctxt, migration['id'],
{"status": "CONFIRMED"})
# Ensure the new migration is not returned.
updated_at = timeutils.utcnow()
values = {"status": "finished", "updated_at": updated_at,
"dest_compute": "fake_host2"}
migration = db.migration_create(self.ctxt, values)
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
"fake_host2")
self.assertEqual(0, len(results))
db.migration_update(self.ctxt, migration['id'],
{"status": "CONFIRMED"})
def test_migration_update_not_found(self):
self.assertRaises(exception.MigrationNotFound,
db.migration_update, self.ctxt, 42, {})
class ModelsObjectComparatorMixin(object):
def _dict_from_object(self, obj, ignored_keys):
if ignored_keys is None:
ignored_keys = []
return dict([(k, v) for k, v in obj.iteritems()
if k not in ignored_keys])
def _assertEqualObjects(self, obj1, obj2, ignored_keys=None):
obj1 = self._dict_from_object(obj1, ignored_keys)
obj2 = self._dict_from_object(obj2, ignored_keys)
self.assertEqual(len(obj1),
len(obj2),
"Keys mismatch: %s" %
str(set(obj1.keys()) ^ set(obj2.keys())))
for key, value in obj1.iteritems():
self.assertEqual(value, obj2[key])
def _assertEqualListsOfObjects(self, objs1, objs2, ignored_keys=None):
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys)
sort_key = lambda d: [d[k] for k in sorted(d)]
conv_and_sort = lambda obj: sorted(map(obj_to_dict, obj), key=sort_key)
self.assertEqual(conv_and_sort(objs1), conv_and_sort(objs2))
def _assertEqualListsOfPrimitivesAsSets(self, primitives1, primitives2):
self.assertEqual(len(primitives1), len(primitives2))
for primitive in primitives1:
self.assertIn(primitive, primitives2)
for primitive in primitives2:
self.assertIn(primitive, primitives1)
class InstanceSystemMetadataTestCase(test.TestCase):
"""Tests for db.api.instance_system_metadata_* methods."""
def setUp(self):
super(InstanceSystemMetadataTestCase, self).setUp()
values = {'host': 'h1', 'project_id': 'p1',
'system_metadata': {'key': 'value'}}
self.ctxt = context.get_admin_context()
self.instance = db.instance_create(self.ctxt, values)
def test_instance_system_metadata_get(self):
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'key': 'value'})
def test_instance_system_metadata_update_new_pair(self):
db.instance_system_metadata_update(
self.ctxt, self.instance['uuid'],
{'new_key': 'new_value'}, False)
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'key': 'value', 'new_key': 'new_value'})
def test_instance_system_metadata_update_existent_pair(self):
db.instance_system_metadata_update(
self.ctxt, self.instance['uuid'],
{'key': 'new_value'}, True)
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'key': 'new_value'})
def test_instance_system_metadata_update_delete_true(self):
db.instance_system_metadata_update(
self.ctxt, self.instance['uuid'],
{'new_key': 'new_value'}, True)
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'new_key': 'new_value'})
@test.testtools.skip("bug 1189462")
def test_instance_system_metadata_update_nonexistent(self):
self.assertRaises(exception.InstanceNotFound,
db.instance_system_metadata_update,
self.ctxt, 'nonexistent-uuid',
{'key': 'value'}, True)
class ReservationTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.reservation_* methods."""
def setUp(self):
super(ReservationTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.values = {'uuid': 'sample-uuid',
'project_id': 'project1',
'user_id': 'user1',
'resource': 'resource',
'delta': 42,
'expire': timeutils.utcnow() + datetime.timedelta(days=1),
'usage': {'id': 1}}
def test_reservation_create(self):
reservation = db.reservation_create(self.ctxt, **self.values)
self._assertEqualObjects(self.values, reservation, ignored_keys=(
'deleted', 'updated_at',
'deleted_at', 'id',
'created_at', 'usage',
'usage_id'))
self.assertEqual(reservation['usage_id'], self.values['usage']['id'])
def test_reservation_get(self):
reservation = db.reservation_create(self.ctxt, **self.values)
reservation_db = db.reservation_get(self.ctxt, self.values['uuid'])
self._assertEqualObjects(reservation, reservation_db)
def test_reservation_get_nonexistent(self):
self.assertRaises(exception.ReservationNotFound, db.reservation_get,
self.ctxt, 'non-exitent-resevation-uuid')
def test_reservation_commit(self):
reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 1, 'in_use': 1},
'fixed_ips': {'reserved': 2, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
db.reservation_get(self.ctxt, reservations[0])
db.reservation_commit(self.ctxt, reservations, 'project1', 'user1')
self.assertRaises(exception.ReservationNotFound,
db.reservation_get, self.ctxt, reservations[0])
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 0, 'in_use': 2},
'fixed_ips': {'reserved': 0, 'in_use': 4}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
def test_reservation_rollback(self):
reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 1, 'in_use': 1},
'fixed_ips': {'reserved': 2, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
db.reservation_get(self.ctxt, reservations[0])
db.reservation_rollback(self.ctxt, reservations, 'project1', 'user1')
self.assertRaises(exception.ReservationNotFound,
db.reservation_get, self.ctxt, reservations[0])
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 0, 'in_use': 1},
'fixed_ips': {'reserved': 0, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
def test_reservation_expire(self):
self.values['expire'] = timeutils.utcnow() + datetime.timedelta(days=1)
_quota_reserve(self.ctxt, 'project1', 'user1')
db.reservation_expire(self.ctxt)
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 0, 'in_use': 1},
'fixed_ips': {'reserved': 0, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
class SecurityGroupRuleTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(SecurityGroupRuleTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'name': 'fake_sec_group',
'description': 'fake_sec_group_descr',
'user_id': 'fake',
'project_id': 'fake',
'instances': []
}
def _get_base_rule_values(self):
return {
'protocol': "tcp",
'from_port': 80,
'to_port': 8080,
'cidr': None,
'deleted': 0,
'deleted_at': None,
'grantee_group': None,
'updated_at': None
}
def _create_security_group(self, values):
v = self._get_base_values()
v.update(values)
return db.security_group_create(self.ctxt, v)
def _create_security_group_rule(self, values):
v = self._get_base_rule_values()
v.update(values)
return db.security_group_rule_create(self.ctxt, v)
def test_security_group_rule_create(self):
security_group_rule = self._create_security_group_rule({})
self.assertIsNotNone(security_group_rule['id'])
for key, value in self._get_base_rule_values().items():
self.assertEqual(value, security_group_rule[key])
def test_security_group_rule_get_by_security_group(self):
security_group = self._create_security_group({})
security_group_rule = self._create_security_group_rule(
{'parent_group': security_group})
security_group_rule1 = self._create_security_group_rule(
{'parent_group': security_group})
found_rules = db.security_group_rule_get_by_security_group(self.ctxt,
security_group['id'])
self.assertEqual(len(found_rules), 2)
rules_ids = [security_group_rule['id'], security_group_rule1['id']]
for rule in found_rules:
self.assertIn(rule['id'], rules_ids)
def test_security_group_rule_get_by_security_group_grantee(self):
security_group = self._create_security_group({})
security_group_rule = self._create_security_group_rule(
{'grantee_group': security_group})
rules = db.security_group_rule_get_by_security_group_grantee(self.ctxt,
security_group['id'])
self.assertEqual(len(rules), 1)
self.assertEqual(rules[0]['id'], security_group_rule['id'])
def test_security_group_rule_destroy(self):
security_group1 = self._create_security_group({'name': 'fake1'})
security_group2 = self._create_security_group({'name': 'fake2'})
security_group_rule1 = self._create_security_group_rule({})
security_group_rule2 = self._create_security_group_rule({})
db.security_group_rule_destroy(self.ctxt, security_group_rule1['id'])
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_rule_get,
self.ctxt, security_group_rule1['id'])
self._assertEqualObjects(db.security_group_rule_get(self.ctxt,
security_group_rule2['id']),
security_group_rule2, ['grantee_group'])
def test_security_group_rule_destroy_not_found_exception(self):
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_rule_destroy, self.ctxt, 100500)
def test_security_group_rule_get(self):
security_group_rule1 = (
self._create_security_group_rule({}))
security_group_rule2 = self._create_security_group_rule({})
real_security_group_rule = db.security_group_rule_get(self.ctxt,
security_group_rule1['id'])
self._assertEqualObjects(security_group_rule1,
real_security_group_rule, ['grantee_group'])
def test_security_group_rule_get_not_found_exception(self):
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_rule_get, self.ctxt, 100500)
def test_security_group_rule_count_by_group(self):
sg1 = self._create_security_group({'name': 'fake1'})
sg2 = self._create_security_group({'name': 'fake2'})
rules_by_group = {sg1: [], sg2: []}
for group in rules_by_group:
rules = rules_by_group[group]
for i in range(0, 10):
rules.append(
self._create_security_group_rule({'parent_group_id':
group['id']}))
db.security_group_rule_destroy(self.ctxt,
rules_by_group[sg1][0]['id'])
counted_groups = [db.security_group_rule_count_by_group(self.ctxt,
group['id'])
for group in [sg1, sg2]]
expected = [9, 10]
self.assertEqual(counted_groups, expected)
class SecurityGroupTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(SecurityGroupTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'name': 'fake_sec_group',
'description': 'fake_sec_group_descr',
'user_id': 'fake',
'project_id': 'fake',
'instances': []
}
def _create_security_group(self, values):
v = self._get_base_values()
v.update(values)
return db.security_group_create(self.ctxt, v)
def test_security_group_create(self):
security_group = self._create_security_group({})
self.assertFalse(security_group['id'] is None)
for key, value in self._get_base_values().iteritems():
self.assertEqual(value, security_group[key])
def test_security_group_destroy(self):
security_group1 = self._create_security_group({})
security_group2 = \
self._create_security_group({'name': 'fake_sec_group2'})
db.security_group_destroy(self.ctxt, security_group1['id'])
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_get,
self.ctxt, security_group1['id'])
self._assertEqualObjects(db.security_group_get(
self.ctxt, security_group2['id'],
columns_to_join=['instances']), security_group2)
def test_security_group_get(self):
security_group1 = self._create_security_group({})
self._create_security_group({'name': 'fake_sec_group2'})
real_security_group = db.security_group_get(self.ctxt,
security_group1['id'],
columns_to_join=['instances'])
self._assertEqualObjects(security_group1,
real_security_group)
def test_security_group_get_no_instances(self):
instance = db.instance_create(self.ctxt, {})
sid = self._create_security_group({'instances': [instance]})['id']
session = get_session()
self.mox.StubOutWithMock(sqlalchemy_api, 'get_session')
sqlalchemy_api.get_session().AndReturn(session)
sqlalchemy_api.get_session().AndReturn(session)
self.mox.ReplayAll()
security_group = db.security_group_get(self.ctxt, sid,
columns_to_join=['instances'])
session.expunge(security_group)
self.assertEqual(1, len(security_group['instances']))
security_group = db.security_group_get(self.ctxt, sid)
session.expunge(security_group)
self.assertRaises(sqlalchemy_orm_exc.DetachedInstanceError,
getattr, security_group, 'instances')
def test_security_group_get_not_found_exception(self):
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_get, self.ctxt, 100500)
def test_security_group_get_by_name(self):
security_group1 = self._create_security_group({'name': 'fake1'})
security_group2 = self._create_security_group({'name': 'fake2'})
real_security_group1 = db.security_group_get_by_name(
self.ctxt,
security_group1['project_id'],
security_group1['name'])
real_security_group2 = db.security_group_get_by_name(
self.ctxt,
security_group2['project_id'],
security_group2['name'])
self._assertEqualObjects(security_group1, real_security_group1)
self._assertEqualObjects(security_group2, real_security_group2)
def test_security_group_get_by_project(self):
security_group1 = self._create_security_group(
{'name': 'fake1', 'project_id': 'fake_proj1'})
security_group2 = self._create_security_group(
{'name': 'fake2', 'project_id': 'fake_proj2'})
real1 = db.security_group_get_by_project(
self.ctxt,
security_group1['project_id'])
real2 = db.security_group_get_by_project(
self.ctxt,
security_group2['project_id'])
expected1, expected2 = [security_group1], [security_group2]
self._assertEqualListsOfObjects(expected1, real1,
ignored_keys=['instances'])
self._assertEqualListsOfObjects(expected2, real2,
ignored_keys=['instances'])
def test_security_group_get_by_instance(self):
instance = db.instance_create(self.ctxt, dict(host='foo'))
values = [
{'name': 'fake1', 'instances': [instance]},
{'name': 'fake2', 'instances': [instance]},
{'name': 'fake3', 'instances': []},
]
security_groups = [self._create_security_group(vals)
for vals in values]
real = db.security_group_get_by_instance(self.ctxt,
instance['uuid'])
expected = security_groups[:2]
self._assertEqualListsOfObjects(expected, real,
ignored_keys=['instances'])
def test_security_group_get_all(self):
values = [
{'name': 'fake1', 'project_id': 'fake_proj1'},
{'name': 'fake2', 'project_id': 'fake_proj2'},
]
security_groups = [self._create_security_group(vals)
for vals in values]
real = db.security_group_get_all(self.ctxt)
self._assertEqualListsOfObjects(security_groups, real,
ignored_keys=['instances'])
def test_security_group_in_use(self):
instance = db.instance_create(self.ctxt, dict(host='foo'))
values = [
{'instances': [instance],
'name': 'fake_in_use'},
{'instances': []},
]
security_groups = [self._create_security_group(vals)
for vals in values]
real = []
for security_group in security_groups:
in_use = db.security_group_in_use(self.ctxt,
security_group['id'])
real.append(in_use)
expected = [True, False]
self.assertEquals(expected, real)
def test_security_group_ensure_default(self):
self.assertEquals(0, len(db.security_group_get_by_project(
self.ctxt,
self.ctxt.project_id)))
db.security_group_ensure_default(self.ctxt)
security_groups = db.security_group_get_by_project(
self.ctxt,
self.ctxt.project_id)
self.assertEquals(1, len(security_groups))
self.assertEquals("default", security_groups[0]["name"])
def test_security_group_update(self):
security_group = self._create_security_group({})
new_values = {
'name': 'sec_group1',
'description': 'sec_group_descr1',
'user_id': 'fake_user1',
'project_id': 'fake_proj1',
}
updated_group = db.security_group_update(self.ctxt,
security_group['id'],
new_values,
columns_to_join=['rules.grantee_group'])
for key, value in new_values.iteritems():
self.assertEqual(updated_group[key], value)
self.assertEqual(updated_group['rules'], [])
def test_security_group_update_to_duplicate(self):
security_group1 = self._create_security_group(
{'name': 'fake1', 'project_id': 'fake_proj1'})
security_group2 = self._create_security_group(
{'name': 'fake1', 'project_id': 'fake_proj2'})
self.assertRaises(exception.SecurityGroupExists,
db.security_group_update,
self.ctxt, security_group2['id'],
{'project_id': 'fake_proj1'})
class InstanceTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.instance_* methods."""
sample_data = {
'project_id': 'project1',
'hostname': 'example.com',
'host': 'h1',
'node': 'n1',
'metadata': {'mkey1': 'mval1', 'mkey2': 'mval2'},
'system_metadata': {'smkey1': 'smval1', 'smkey2': 'smval2'},
'info_cache': {'ckey': 'cvalue'},
}
def setUp(self):
super(InstanceTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _assertEqualInstances(self, instance1, instance2):
self._assertEqualObjects(instance1, instance2,
ignored_keys=['metadata', 'system_metadata', 'info_cache'])
def _assertEqualListsOfInstances(self, list1, list2):
self._assertEqualListsOfObjects(list1, list2,
ignored_keys=['metadata', 'system_metadata', 'info_cache'])
def create_instance_with_args(self, **kwargs):
if 'context' in kwargs:
context = kwargs.pop('context')
else:
context = self.ctxt
args = self.sample_data.copy()
args.update(kwargs)
return db.instance_create(context, args)
def test_instance_create(self):
instance = self.create_instance_with_args()
self.assertTrue(uuidutils.is_uuid_like(instance['uuid']))
def test_instance_create_with_object_values(self):
values = {
'access_ip_v4': netaddr.IPAddress('1.2.3.4'),
'access_ip_v6': netaddr.IPAddress('::1'),
}
dt_keys = ('created_at', 'deleted_at', 'updated_at',
'launched_at', 'terminated_at', 'scheduled_at')
dt = timeutils.utcnow()
dt_utc = dt.replace(tzinfo=iso8601.iso8601.Utc())
for key in dt_keys:
values[key] = dt_utc
inst = db.instance_create(self.ctxt, values)
self.assertEqual(inst['access_ip_v4'], '1.2.3.4')
self.assertEqual(inst['access_ip_v6'], '::1')
for key in dt_keys:
self.assertEqual(inst[key], dt)
def test_instance_update_with_object_values(self):
values = {
'access_ip_v4': netaddr.IPAddress('1.2.3.4'),
'access_ip_v6': netaddr.IPAddress('::1'),
}
dt_keys = ('created_at', 'deleted_at', 'updated_at',
'launched_at', 'terminated_at', 'scheduled_at')
dt = timeutils.utcnow()
dt_utc = dt.replace(tzinfo=iso8601.iso8601.Utc())
for key in dt_keys:
values[key] = dt_utc
inst = db.instance_create(self.ctxt, {})
inst = db.instance_update(self.ctxt, inst['uuid'], values)
self.assertEqual(inst['access_ip_v4'], '1.2.3.4')
self.assertEqual(inst['access_ip_v6'], '::1')
for key in dt_keys:
self.assertEqual(inst[key], dt)
def test_instance_update_no_metadata_clobber(self):
meta = {'foo': 'bar'}
sys_meta = {'sfoo': 'sbar'}
values = {
'metadata': meta,
'system_metadata': sys_meta,
}
inst = db.instance_create(self.ctxt, {})
inst = db.instance_update(self.ctxt, inst['uuid'], values)
self.assertEqual({'foo': 'bar'}, meta)
self.assertEqual({'sfoo': 'sbar'}, sys_meta)
def test_instance_get_all_with_meta(self):
inst = self.create_instance_with_args()
for inst in db.instance_get_all(self.ctxt):
meta = utils.metadata_to_dict(inst['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(inst['system_metadata'])
self.assertEqual(sys_meta, self.sample_data['system_metadata'])
def test_instance_update(self):
instance = self.create_instance_with_args()
metadata = {'host': 'bar', 'key2': 'wuff'}
system_metadata = {'original_image_ref': 'baz'}
# Update the metadata
db.instance_update(self.ctxt, instance['uuid'], {'metadata': metadata,
'system_metadata': system_metadata})
# Retrieve the user-provided metadata to ensure it was successfully
# updated
self.assertEqual(metadata,
db.instance_metadata_get(self.ctxt, instance['uuid']))
self.assertEqual(system_metadata,
db.instance_system_metadata_get(self.ctxt, instance['uuid']))
def test_instance_update_bad_str_dates(self):
instance = self.create_instance_with_args()
values = {'created_at': '123'}
self.assertRaises(ValueError,
db.instance_update,
self.ctxt, instance['uuid'], values)
def test_instance_update_good_str_dates(self):
instance = self.create_instance_with_args()
values = {'created_at': '2011-01-31T00:00:00.0'}
actual = db.instance_update(self.ctxt, instance['uuid'], values)
expected = datetime.datetime(2011, 1, 31)
self.assertEquals(expected, actual["created_at"])
def test_create_instance_unique_hostname(self):
context1 = context.RequestContext('user1', 'p1')
context2 = context.RequestContext('user2', 'p2')
self.create_instance_with_args(hostname='h1', project_id='p1')
# With scope 'global' any duplicate should fail, be it this project:
self.flags(osapi_compute_unique_server_name_scope='global')
self.assertRaises(exception.InstanceExists,
self.create_instance_with_args,
context=context1,
hostname='h1', project_id='p3')
# or another:
self.assertRaises(exception.InstanceExists,
self.create_instance_with_args,
context=context2,
hostname='h1', project_id='p2')
# With scope 'project' a duplicate in the project should fail:
self.flags(osapi_compute_unique_server_name_scope='project')
self.assertRaises(exception.InstanceExists,
self.create_instance_with_args,
context=context1,
hostname='h1', project_id='p1')
# With scope 'project' a duplicate in a different project should work:
self.flags(osapi_compute_unique_server_name_scope='project')
self.create_instance_with_args(context=context2, hostname='h2')
self.flags(osapi_compute_unique_server_name_scope=None)
def test_instance_get_all_by_filters_with_meta(self):
inst = self.create_instance_with_args()
for inst in db.instance_get_all_by_filters(self.ctxt, {}):
meta = utils.metadata_to_dict(inst['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(inst['system_metadata'])
self.assertEqual(sys_meta, self.sample_data['system_metadata'])
def test_instance_get_all_by_filters_without_meta(self):
inst = self.create_instance_with_args()
result = db.instance_get_all_by_filters(self.ctxt, {},
columns_to_join=[])
for inst in result:
meta = utils.metadata_to_dict(inst['metadata'])
self.assertEqual(meta, {})
sys_meta = utils.metadata_to_dict(inst['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_get_all_by_filters(self):
instances = [self.create_instance_with_args() for i in range(3)]
filtered_instances = db.instance_get_all_by_filters(self.ctxt, {})
self._assertEqualListsOfInstances(instances, filtered_instances)
def test_instance_metadata_get_multi(self):
uuids = [self.create_instance_with_args()['uuid'] for i in range(3)]
meta = sqlalchemy_api._instance_metadata_get_multi(self.ctxt, uuids)
for row in meta:
self.assertTrue(row['instance_uuid'] in uuids)
def test_instance_metadata_get_multi_no_uuids(self):
self.mox.StubOutWithMock(query.Query, 'filter')
self.mox.ReplayAll()
sqlalchemy_api._instance_metadata_get_multi(self.ctxt, [])
def test_instance_system_system_metadata_get_multi(self):
uuids = [self.create_instance_with_args()['uuid'] for i in range(3)]
sys_meta = sqlalchemy_api._instance_system_metadata_get_multi(
self.ctxt, uuids)
for row in sys_meta:
self.assertTrue(row['instance_uuid'] in uuids)
def test_instance_system_metadata_get_multi_no_uuids(self):
self.mox.StubOutWithMock(query.Query, 'filter')
self.mox.ReplayAll()
sqlalchemy_api._instance_system_metadata_get_multi(self.ctxt, [])
def test_instance_get_all_by_filters_regex(self):
i1 = self.create_instance_with_args(display_name='test1')
i2 = self.create_instance_with_args(display_name='teeeest2')
self.create_instance_with_args(display_name='diff')
result = db.instance_get_all_by_filters(self.ctxt,
{'display_name': 't.*st.'})
self._assertEqualListsOfInstances(result, [i1, i2])
def test_instance_get_all_by_filters_exact_match(self):
instance = self.create_instance_with_args(host='host1')
self.create_instance_with_args(host='host12')
result = db.instance_get_all_by_filters(self.ctxt,
{'host': 'host1'})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_metadata(self):
instance = self.create_instance_with_args(metadata={'foo': 'bar'})
self.create_instance_with_args()
result = db.instance_get_all_by_filters(self.ctxt,
{'metadata': {'foo': 'bar'}})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_system_metadata(self):
instance = self.create_instance_with_args(
system_metadata={'foo': 'bar'})
self.create_instance_with_args()
result = db.instance_get_all_by_filters(self.ctxt,
{'system_metadata': {'foo': 'bar'}})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_unicode_value(self):
instance = self.create_instance_with_args(display_name=u'test♥')
result = db.instance_get_all_by_filters(self.ctxt,
{'display_name': u'test'})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_tags(self):
instance = self.create_instance_with_args(
metadata={'foo': 'bar'})
self.create_instance_with_args()
#For format 'tag-'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag-key', 'value': 'foo'},
{'name': 'tag-value', 'value': 'bar'},
]})
self._assertEqualListsOfInstances([instance], result)
#For format 'tag:'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag:foo', 'value': 'bar'},
]})
self._assertEqualListsOfInstances([instance], result)
#For non-existent tag
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag:foo', 'value': 'barred'},
]})
self.assertEqual([], result)
#Confirm with deleted tags
db.instance_metadata_delete(self.ctxt, instance['uuid'], 'foo')
#For format 'tag-'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag-key', 'value': 'foo'},
]})
self.assertEqual([], result)
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag-value', 'value': 'bar'}
]})
self.assertEqual([], result)
#For format 'tag:'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag:foo', 'value': 'bar'},
]})
self.assertEqual([], result)
def test_instance_get_by_uuid(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'])
self._assertEqualInstances(inst, result)
def test_instance_get_by_uuid_join_empty(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'],
columns_to_join=[])
meta = utils.metadata_to_dict(result['metadata'])
self.assertEqual(meta, {})
sys_meta = utils.metadata_to_dict(result['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_get_by_uuid_join_meta(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'],
columns_to_join=['metadata'])
meta = utils.metadata_to_dict(result['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(result['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_get_by_uuid_join_sys_meta(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'],
columns_to_join=['system_metadata'])
meta = utils.metadata_to_dict(result['metadata'])
self.assertEqual(meta, {})
sys_meta = utils.metadata_to_dict(result['system_metadata'])
self.assertEqual(sys_meta, self.sample_data['system_metadata'])
def test_instance_get_all_by_filters_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(reservation_id='b')
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt, {})
self._assertEqualListsOfObjects([inst1, inst2], result,
ignored_keys=['metadata', 'system_metadata',
'deleted', 'deleted_at', 'info_cache',
'pci_devices'])
def test_instance_get_all_by_filters_deleted_and_soft_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
inst3 = self.create_instance_with_args()
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': True})
self._assertEqualListsOfObjects([inst1, inst2], result,
ignored_keys=['metadata', 'system_metadata',
'deleted', 'deleted_at', 'info_cache',
'pci_devices'])
def test_instance_get_all_by_filters_deleted_no_soft_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
inst3 = self.create_instance_with_args()
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': True,
'soft_deleted': False})
self._assertEqualListsOfObjects([inst1], result,
ignored_keys=['deleted', 'deleted_at', 'metadata',
'system_metadata', 'info_cache', 'pci_devices'])
def test_instance_get_all_by_filters_alive_and_soft_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
inst3 = self.create_instance_with_args()
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': False,
'soft_deleted': True})
self._assertEqualListsOfInstances([inst2, inst3], result)
def test_instance_get_all_by_filters_cleaned(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(reservation_id='b')
db.instance_update(self.ctxt, inst1['uuid'], {'cleaned': 1})
result = db.instance_get_all_by_filters(self.ctxt, {})
self.assertEqual(2, len(result))
self.assertIn(inst1['uuid'], [result[0]['uuid'], result[1]['uuid']])
self.assertIn(inst2['uuid'], [result[0]['uuid'], result[1]['uuid']])
if inst1['uuid'] == result[0]['uuid']:
self.assertTrue(result[0]['cleaned'])
self.assertFalse(result[1]['cleaned'])
else:
self.assertTrue(result[1]['cleaned'])
self.assertFalse(result[0]['cleaned'])
def test_instance_get_all_by_host_and_node_no_join(self):
instance = self.create_instance_with_args()
result = db.instance_get_all_by_host_and_node(self.ctxt, 'h1', 'n1')
self.assertEqual(result[0]['uuid'], instance['uuid'])
self.assertEqual(result[0]['system_metadata'], [])
def test_instance_get_all_hung_in_rebooting(self):
# Ensure no instances are returned.
results = db.instance_get_all_hung_in_rebooting(self.ctxt, 10)
self.assertEqual([], results)
# Ensure one rebooting instance with updated_at older than 10 seconds
# is returned.
instance = self.create_instance_with_args(task_state="rebooting",
updated_at=datetime.datetime(2000, 1, 1, 12, 0, 0))
results = db.instance_get_all_hung_in_rebooting(self.ctxt, 10)
self._assertEqualListsOfObjects([instance], results,
ignored_keys=['task_state', 'info_cache', 'security_groups',
'metadata', 'system_metadata', 'pci_devices'])
db.instance_update(self.ctxt, instance['uuid'], {"task_state": None})
# Ensure the newly rebooted instance is not returned.
instance = self.create_instance_with_args(task_state="rebooting",
updated_at=timeutils.utcnow())
results = db.instance_get_all_hung_in_rebooting(self.ctxt, 10)
self.assertEqual([], results)
def test_instance_update_with_expected_vm_state(self):
instance = self.create_instance_with_args(vm_state='foo')
db.instance_update(self.ctxt, instance['uuid'], {'host': 'h1',
'expected_vm_state': ('foo', 'bar')})
def test_instance_update_with_unexpected_vm_state(self):
instance = self.create_instance_with_args(vm_state='foo')
self.assertRaises(exception.UnexpectedVMStateError,
db.instance_update, self.ctxt, instance['uuid'],
{'host': 'h1', 'expected_vm_state': ('spam', 'bar')})
def test_instance_update_with_instance_uuid(self):
# test instance_update() works when an instance UUID is passed.
ctxt = context.get_admin_context()
# Create an instance with some metadata
values = {'metadata': {'host': 'foo', 'key1': 'meow'},
'system_metadata': {'original_image_ref': 'blah'}}
instance = db.instance_create(ctxt, values)
# Update the metadata
values = {'metadata': {'host': 'bar', 'key2': 'wuff'},
'system_metadata': {'original_image_ref': 'baz'}}
db.instance_update(ctxt, instance['uuid'], values)
# Retrieve the user-provided metadata to ensure it was successfully
# updated
instance_meta = db.instance_metadata_get(ctxt, instance['uuid'])
self.assertEqual('bar', instance_meta['host'])
self.assertEqual('wuff', instance_meta['key2'])
self.assertNotIn('key1', instance_meta)
# Retrieve the system metadata to ensure it was successfully updated
system_meta = db.instance_system_metadata_get(ctxt, instance['uuid'])
self.assertEqual('baz', system_meta['original_image_ref'])
def test_delete_instance_metadata_on_instance_destroy(self):
ctxt = context.get_admin_context()
# Create an instance with some metadata
values = {'metadata': {'host': 'foo', 'key1': 'meow'},
'system_metadata': {'original_image_ref': 'blah'}}
instance = db.instance_create(ctxt, values)
instance_meta = db.instance_metadata_get(ctxt, instance['uuid'])
self.assertEqual('foo', instance_meta['host'])
self.assertEqual('meow', instance_meta['key1'])
db.instance_destroy(ctxt, instance['uuid'])
instance_meta = db.instance_metadata_get(ctxt, instance['uuid'])
# Make sure instance metadata is deleted as well
self.assertEqual({}, instance_meta)
def test_instance_update_with_and_get_original(self):
instance = self.create_instance_with_args(vm_state='building')
(old_ref, new_ref) = db.instance_update_and_get_original(self.ctxt,
instance['uuid'], {'vm_state': 'needscoffee'})
self.assertEqual('building', old_ref['vm_state'])
self.assertEqual('needscoffee', new_ref['vm_state'])
def test_instance_update_and_get_original_metadata(self):
instance = self.create_instance_with_args()
columns_to_join = ['metadata']
(old_ref, new_ref) = db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {'vm_state': 'needscoffee'},
columns_to_join=columns_to_join)
meta = utils.metadata_to_dict(new_ref['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(new_ref['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_update_and_get_original_metadata_none_join(self):
instance = self.create_instance_with_args()
(old_ref, new_ref) = db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {'metadata': {'mk1': 'mv3'}})
meta = utils.metadata_to_dict(new_ref['metadata'])
self.assertEqual(meta, {'mk1': 'mv3'})
def test_instance_update_unique_name(self):
context1 = context.RequestContext('user1', 'p1')
context2 = context.RequestContext('user2', 'p2')
inst1 = self.create_instance_with_args(context=context1,
project_id='p1',
hostname='fake_name1')
inst2 = self.create_instance_with_args(context=context1,
project_id='p1',
hostname='fake_name2')
inst3 = self.create_instance_with_args(context=context2,
project_id='p2',
hostname='fake_name3')
# osapi_compute_unique_server_name_scope is unset so this should work:
db.instance_update(context1, inst1['uuid'], {'hostname': 'fake_name2'})
db.instance_update(context1, inst1['uuid'], {'hostname': 'fake_name1'})
# With scope 'global' any duplicate should fail.
self.flags(osapi_compute_unique_server_name_scope='global')
self.assertRaises(exception.InstanceExists,
db.instance_update,
context1,
inst2['uuid'],
{'hostname': 'fake_name1'})
self.assertRaises(exception.InstanceExists,
db.instance_update,
context2,
inst3['uuid'],
{'hostname': 'fake_name1'})
# But we should definitely be able to update our name if we aren't
# really changing it.
db.instance_update(context1, inst1['uuid'], {'hostname': 'fake_NAME'})
# With scope 'project' a duplicate in the project should fail:
self.flags(osapi_compute_unique_server_name_scope='project')
self.assertRaises(exception.InstanceExists, db.instance_update,
context1, inst2['uuid'], {'hostname': 'fake_NAME'})
# With scope 'project' a duplicate in a different project should work:
self.flags(osapi_compute_unique_server_name_scope='project')
db.instance_update(context2, inst3['uuid'], {'hostname': 'fake_NAME'})
def _test_instance_update_updates_metadata(self, metadata_type):
instance = self.create_instance_with_args()
def set_and_check(meta):
inst = db.instance_update(self.ctxt, instance['uuid'],
{metadata_type: dict(meta)})
_meta = utils.metadata_to_dict(inst[metadata_type])
self.assertEqual(meta, _meta)
meta = {'speed': '88', 'units': 'MPH'}
set_and_check(meta)
meta['gigawatts'] = '1.21'
set_and_check(meta)
del meta['gigawatts']
set_and_check(meta)
def test_security_group_in_use(self):
instance = db.instance_create(self.ctxt, dict(host='foo'))
values = [
{'instances': [instance]},
{'instances': []},
]
def test_instance_update_updates_system_metadata(self):
# Ensure that system_metadata is updated during instance_update
self._test_instance_update_updates_metadata('system_metadata')
def test_instance_update_updates_metadata(self):
# Ensure that metadata is updated during instance_update
self._test_instance_update_updates_metadata('metadata')
def test_instance_floating_address_get_all(self):
ctxt = context.get_admin_context()
instance1 = db.instance_create(ctxt, {'host': 'h1', 'hostname': 'n1'})
instance2 = db.instance_create(ctxt, {'host': 'h2', 'hostname': 'n2'})
fixed_addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_addresses = ['2.1.1.1', '2.1.1.2', '2.1.1.3']
instance_uuids = [instance1['uuid'], instance1['uuid'],
instance2['uuid']]
for fixed_addr, float_addr, instance_uuid in zip(fixed_addresses,
float_addresses,
instance_uuids):
db.fixed_ip_create(ctxt, {'address': fixed_addr,
'instance_uuid': instance_uuid})
fixed_id = db.fixed_ip_get_by_address(ctxt, fixed_addr)['id']
db.floating_ip_create(ctxt,
{'address': float_addr,
'fixed_ip_id': fixed_id})
real_float_addresses = \
db.instance_floating_address_get_all(ctxt, instance_uuids[0])
self.assertEqual(set(float_addresses[:2]), set(real_float_addresses))
real_float_addresses = \
db.instance_floating_address_get_all(ctxt, instance_uuids[2])
self.assertEqual(set([float_addresses[2]]), set(real_float_addresses))
def test_instance_stringified_ips(self):
instance = self.create_instance_with_args()
instance = db.instance_update(
self.ctxt, instance['uuid'],
{'access_ip_v4': netaddr.IPAddress('1.2.3.4'),
'access_ip_v6': netaddr.IPAddress('::1')})
self.assertTrue(isinstance(instance['access_ip_v4'], basestring))
self.assertTrue(isinstance(instance['access_ip_v6'], basestring))
instance = db.instance_get_by_uuid(self.ctxt, instance['uuid'])
self.assertTrue(isinstance(instance['access_ip_v4'], basestring))
self.assertTrue(isinstance(instance['access_ip_v6'], basestring))
class InstanceMetadataTestCase(test.TestCase):
"""Tests for db.api.instance_metadata_* methods."""
def setUp(self):
super(InstanceMetadataTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_instance_metadata_get(self):
instance = db.instance_create(self.ctxt, {'metadata':
{'key': 'value'}})
self.assertEqual({'key': 'value'}, db.instance_metadata_get(
self.ctxt, instance['uuid']))
def test_instance_metadata_delete(self):
instance = db.instance_create(self.ctxt,
{'metadata': {'key': 'val',
'key1': 'val1'}})
db.instance_metadata_delete(self.ctxt, instance['uuid'], 'key1')
self.assertEqual({'key': 'val'}, db.instance_metadata_get(
self.ctxt, instance['uuid']))
def test_instance_metadata_update(self):
instance = db.instance_create(self.ctxt, {'host': 'h1',
'project_id': 'p1', 'metadata': {'key': 'value'}})
# This should add new key/value pair
metadata = db.instance_metadata_update(
self.ctxt, instance['uuid'],
{'new_key': 'new_value'}, False)
metadata = db.instance_metadata_get(self.ctxt, instance['uuid'])
self.assertEqual(metadata, {'key': 'value', 'new_key': 'new_value'})
# This should leave only one key/value pair
metadata = db.instance_metadata_update(
self.ctxt, instance['uuid'],
{'new_key': 'new_value'}, True)
metadata = db.instance_metadata_get(self.ctxt, instance['uuid'])
self.assertEqual(metadata, {'new_key': 'new_value'})
class ServiceTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ServiceTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'host': 'fake_host',
'binary': 'fake_binary',
'topic': 'fake_topic',
'report_count': 3,
'disabled': False
}
def _create_service(self, values):
v = self._get_base_values()
v.update(values)
return db.service_create(self.ctxt, v)
def test_service_create(self):
service = self._create_service({})
self.assertFalse(service['id'] is None)
for key, value in self._get_base_values().iteritems():
self.assertEqual(value, service[key])
def test_service_destroy(self):
service1 = self._create_service({})
service2 = self._create_service({'host': 'fake_host2'})
db.service_destroy(self.ctxt, service1['id'])
self.assertRaises(exception.ServiceNotFound,
db.service_get, self.ctxt, service1['id'])
self._assertEqualObjects(db.service_get(self.ctxt, service2['id']),
service2, ignored_keys=['compute_node'])
def test_service_update(self):
service = self._create_service({})
new_values = {
'host': 'fake_host1',
'binary': 'fake_binary1',
'topic': 'fake_topic1',
'report_count': 4,
'disabled': True
}
db.service_update(self.ctxt, service['id'], new_values)
updated_service = db.service_get(self.ctxt, service['id'])
for key, value in new_values.iteritems():
self.assertEqual(value, updated_service[key])
def test_service_update_not_found_exception(self):
self.assertRaises(exception.ServiceNotFound,
db.service_update, self.ctxt, 100500, {})
def test_service_get(self):
service1 = self._create_service({})
self._create_service({'host': 'some_other_fake_host'})
real_service1 = db.service_get(self.ctxt, service1['id'])
self._assertEqualObjects(service1, real_service1,
ignored_keys=['compute_node'])
def test_service_get_with_compute_node(self):
service = self._create_service({})
compute_values = dict(vcpus=2, memory_mb=1024, local_gb=2048,
vcpus_used=0, memory_mb_used=0,
local_gb_used=0, free_ram_mb=1024,
free_disk_gb=2048, hypervisor_type="xen",
hypervisor_version=1, cpu_info="",
running_vms=0, current_workload=0,
service_id=service['id'])
compute = db.compute_node_create(self.ctxt, compute_values)
real_service = db.service_get(self.ctxt, service['id'])
real_compute = real_service['compute_node'][0]
self.assertEqual(compute['id'], real_compute['id'])
def test_service_get_not_found_exception(self):
self.assertRaises(exception.ServiceNotFound,
db.service_get, self.ctxt, 100500)
def test_service_get_by_host_and_topic(self):
service1 = self._create_service({'host': 'host1', 'topic': 'topic1'})
self._create_service({'host': 'host2', 'topic': 'topic2'})
real_service1 = db.service_get_by_host_and_topic(self.ctxt,
host='host1',
topic='topic1')
self._assertEqualObjects(service1, real_service1)
def test_service_get_all(self):
values = [
{'host': 'host1', 'topic': 'topic1'},
{'host': 'host2', 'topic': 'topic2'},
{'disabled': True}
]
services = [self._create_service(vals) for vals in values]
disabled_services = [services[-1]]
non_disabled_services = services[:-1]
compares = [
(services, db.service_get_all(self.ctxt)),
(disabled_services, db.service_get_all(self.ctxt, True)),
(non_disabled_services, db.service_get_all(self.ctxt, False))
]
for comp in compares:
self._assertEqualListsOfObjects(*comp)
def test_service_get_all_by_topic(self):
values = [
{'host': 'host1', 'topic': 't1'},
{'host': 'host2', 'topic': 't1'},
{'disabled': True, 'topic': 't1'},
{'host': 'host3', 'topic': 't2'}
]
services = [self._create_service(vals) for vals in values]
expected = services[:2]
real = db.service_get_all_by_topic(self.ctxt, 't1')
self._assertEqualListsOfObjects(expected, real)
def test_service_get_all_by_host(self):
values = [
{'host': 'host1', 'topic': 't11', 'binary': 'b11'},
{'host': 'host1', 'topic': 't12', 'binary': 'b12'},
{'host': 'host2', 'topic': 't1'},
{'host': 'host3', 'topic': 't1'}
]
services = [self._create_service(vals) for vals in values]
expected = services[:2]
real = db.service_get_all_by_host(self.ctxt, 'host1')
self._assertEqualListsOfObjects(expected, real)
def test_service_get_by_compute_host(self):
values = [
{'host': 'host1', 'topic': CONF.compute_topic},
{'host': 'host2', 'topic': 't1'},
{'host': 'host3', 'topic': CONF.compute_topic}
]
services = [self._create_service(vals) for vals in values]
real_service = db.service_get_by_compute_host(self.ctxt, 'host1')
self._assertEqualObjects(services[0], real_service,
ignored_keys=['compute_node'])
self.assertRaises(exception.ComputeHostNotFound,
db.service_get_by_compute_host,
self.ctxt, 'non-exists-host')
def test_service_get_by_compute_host_not_found(self):
self.assertRaises(exception.ComputeHostNotFound,
db.service_get_by_compute_host,
self.ctxt, 'non-exists-host')
def test_service_get_by_args(self):
values = [
{'host': 'host1', 'binary': 'a'},
{'host': 'host2', 'binary': 'b'}
]
services = [self._create_service(vals) for vals in values]
service1 = db.service_get_by_args(self.ctxt, 'host1', 'a')
self._assertEqualObjects(services[0], service1)
service2 = db.service_get_by_args(self.ctxt, 'host2', 'b')
self._assertEqualObjects(services[1], service2)
def test_service_get_by_args_not_found_exception(self):
self.assertRaises(exception.HostBinaryNotFound,
db.service_get_by_args,
self.ctxt, 'non-exists-host', 'a')
def test_service_binary_exists_exception(self):
db.service_create(self.ctxt, self._get_base_values())
values = self._get_base_values()
values.update({'topic': 'top1'})
self.assertRaises(exception.ServiceBinaryExists, db.service_create,
self.ctxt, values)
def test_service_topic_exists_exceptions(self):
db.service_create(self.ctxt, self._get_base_values())
values = self._get_base_values()
values.update({'binary': 'bin1'})
self.assertRaises(exception.ServiceTopicExists, db.service_create,
self.ctxt, values)
class BaseInstanceTypeTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(BaseInstanceTypeTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.user_ctxt = context.RequestContext('user', 'user')
def _get_base_values(self):
return {
'name': 'fake_name',
'memory_mb': 512,
'vcpus': 1,
'root_gb': 10,
'ephemeral_gb': 10,
'flavorid': 'fake_flavor',
'swap': 0,
'rxtx_factor': 0.5,
'vcpu_weight': 1,
'disabled': False,
'is_public': True
}
def _create_inst_type(self, values):
v = self._get_base_values()
v.update(values)
return db.flavor_create(self.ctxt, v)
class InstanceActionTestCase(test.TestCase, ModelsObjectComparatorMixin):
IGNORED_FIELDS = [
'id',
'created_at',
'updated_at',
'deleted_at',
'deleted'
]
def setUp(self):
super(InstanceActionTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _create_action_values(self, uuid, action='run_instance', ctxt=None):
if ctxt is None:
ctxt = self.ctxt
return {
'action': action,
'instance_uuid': uuid,
'request_id': ctxt.request_id,
'user_id': ctxt.user_id,
'project_id': ctxt.project_id,
'start_time': timeutils.utcnow(),
'message': 'action-message'
}
def _create_event_values(self, uuid, event='schedule',
ctxt=None, extra=None):
if ctxt is None:
ctxt = self.ctxt
values = {
'event': event,
'instance_uuid': uuid,
'request_id': ctxt.request_id,
'start_time': timeutils.utcnow()
}
if extra is not None:
values.update(extra)
return values
def _assertActionSaved(self, action, uuid):
"""Retrieve the action to ensure it was successfully added."""
actions = db.actions_get(self.ctxt, uuid)
self.assertEqual(1, len(actions))
self._assertEqualObjects(action, actions[0])
def _assertActionEventSaved(self, event, action_id):
# Retrieve the event to ensure it was successfully added
events = db.action_events_get(self.ctxt, action_id)
self.assertEqual(1, len(events))
self._assertEqualObjects(event, events[0],
['instance_uuid', 'request_id'])
def test_instance_action_start(self):
"""Create an instance action."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
action = db.action_start(self.ctxt, action_values)
ignored_keys = self.IGNORED_FIELDS + ['finish_time']
self._assertEqualObjects(action_values, action, ignored_keys)
self._assertActionSaved(action, uuid)
def test_instance_action_finish(self):
"""Create an instance action."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
db.action_start(self.ctxt, action_values)
action_values['finish_time'] = timeutils.utcnow()
action = db.action_finish(self.ctxt, action_values)
self._assertEqualObjects(action_values, action, self.IGNORED_FIELDS)
self._assertActionSaved(action, uuid)
def test_instance_action_finish_without_started_event(self):
"""Create an instance finish action."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
action_values['finish_time'] = timeutils.utcnow()
self.assertRaises(exception.InstanceActionNotFound, db.action_finish,
self.ctxt, action_values)
def test_instance_actions_get_by_instance(self):
"""Ensure we can get actions by UUID."""
uuid1 = str(stdlib_uuid.uuid4())
expected = []
action_values = self._create_action_values(uuid1)
action = db.action_start(self.ctxt, action_values)
expected.append(action)
action_values['action'] = 'resize'
action = db.action_start(self.ctxt, action_values)
expected.append(action)
# Create some extra actions
uuid2 = str(stdlib_uuid.uuid4())
ctxt2 = context.get_admin_context()
action_values = self._create_action_values(uuid2, 'reboot', ctxt2)
db.action_start(ctxt2, action_values)
db.action_start(ctxt2, action_values)
# Retrieve the action to ensure it was successfully added
actions = db.actions_get(self.ctxt, uuid1)
self._assertEqualListsOfObjects(expected, actions)
def test_instance_action_get_by_instance_and_action(self):
"""Ensure we can get an action by instance UUID and action id."""
ctxt2 = context.get_admin_context()
uuid1 = str(stdlib_uuid.uuid4())
uuid2 = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid1)
db.action_start(self.ctxt, action_values)
action_values['action'] = 'resize'
db.action_start(self.ctxt, action_values)
action_values = self._create_action_values(uuid2, 'reboot', ctxt2)
db.action_start(ctxt2, action_values)
db.action_start(ctxt2, action_values)
actions = db.actions_get(self.ctxt, uuid1)
request_id = actions[0]['request_id']
action = db.action_get_by_request_id(self.ctxt, uuid1, request_id)
self.assertEqual('run_instance', action['action'])
self.assertEqual(self.ctxt.request_id, action['request_id'])
def test_instance_action_event_start(self):
"""Create an instance action event."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
action = db.action_start(self.ctxt, action_values)
event_values = self._create_event_values(uuid)
event = db.action_event_start(self.ctxt, event_values)
# self.fail(self._dict_from_object(event, None))
event_values['action_id'] = action['id']
ignored = self.IGNORED_FIELDS + ['finish_time', 'traceback', 'result']
self._assertEqualObjects(event_values, event, ignored)
self._assertActionEventSaved(event, action['id'])
def test_instance_action_event_start_without_action(self):
"""Create an instance action event."""
uuid = str(stdlib_uuid.uuid4())
event_values = self._create_event_values(uuid)
self.assertRaises(exception.InstanceActionNotFound,
db.action_event_start, self.ctxt, event_values)
def test_instance_action_event_finish_without_started_event(self):
"""Finish an instance action event."""
uuid = str(stdlib_uuid.uuid4())
db.action_start(self.ctxt, self._create_action_values(uuid))
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Success'
}
event_values = self._create_event_values(uuid, extra=event_values)
self.assertRaises(exception.InstanceActionEventNotFound,
db.action_event_finish, self.ctxt, event_values)
def test_instance_action_event_finish_without_action(self):
"""Finish an instance action event."""
uuid = str(stdlib_uuid.uuid4())
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Success'
}
event_values = self._create_event_values(uuid, extra=event_values)
self.assertRaises(exception.InstanceActionNotFound,
db.action_event_finish, self.ctxt, event_values)
def test_instance_action_event_finish_success(self):
"""Finish an instance action event."""
uuid = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt, self._create_action_values(uuid))
db.action_event_start(self.ctxt, self._create_event_values(uuid))
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Success'
}
event_values = self._create_event_values(uuid, extra=event_values)
event = db.action_event_finish(self.ctxt, event_values)
self._assertActionEventSaved(event, action['id'])
action = db.action_get_by_request_id(self.ctxt, uuid,
self.ctxt.request_id)
self.assertNotEqual('Error', action['message'])
def test_instance_action_event_finish_error(self):
"""Finish an instance action event with an error."""
uuid = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt, self._create_action_values(uuid))
db.action_event_start(self.ctxt, self._create_event_values(uuid))
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Error'
}
event_values = self._create_event_values(uuid, extra=event_values)
event = db.action_event_finish(self.ctxt, event_values)
self._assertActionEventSaved(event, action['id'])
action = db.action_get_by_request_id(self.ctxt, uuid,
self.ctxt.request_id)
self.assertEqual('Error', action['message'])
def test_instance_action_and_event_start_string_time(self):
"""Create an instance action and event with a string start_time."""
uuid = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt, self._create_action_values(uuid))
event_values = {'start_time': timeutils.strtime(timeutils.utcnow())}
event_values = self._create_event_values(uuid, extra=event_values)
event = db.action_event_start(self.ctxt, event_values)
self._assertActionEventSaved(event, action['id'])
def test_instance_action_event_get_by_id(self):
"""Get a specific instance action event."""
ctxt2 = context.get_admin_context()
uuid1 = str(stdlib_uuid.uuid4())
uuid2 = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt,
self._create_action_values(uuid1))
db.action_start(ctxt2,
self._create_action_values(uuid2, 'reboot', ctxt2))
event = db.action_event_start(self.ctxt,
self._create_event_values(uuid1))
event_values = self._create_event_values(uuid2, 'reboot', ctxt2)
db.action_event_start(ctxt2, event_values)
# Retrieve the event to ensure it was successfully added
saved_event = db.action_event_get_by_id(self.ctxt,
action['id'],
event['id'])
self._assertEqualObjects(event, saved_event,
['instance_uuid', 'request_id'])
class InstanceFaultTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(InstanceFaultTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _create_fault_values(self, uuid, code=404):
return {
'message': 'message',
'details': 'detail',
'instance_uuid': uuid,
'code': code,
'host': 'localhost'
}
def test_instance_fault_create(self):
"""Ensure we can create an instance fault."""
uuid = str(stdlib_uuid.uuid4())
# Ensure no faults registered for this instance
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [uuid])
self.assertEqual(0, len(faults[uuid]))
# Create a fault
fault_values = self._create_fault_values(uuid)
fault = db.instance_fault_create(self.ctxt, fault_values)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id']
self._assertEqualObjects(fault_values, fault, ignored_keys)
# Retrieve the fault to ensure it was successfully added
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [uuid])
self.assertEqual(1, len(faults[uuid]))
self._assertEqualObjects(fault, faults[uuid][0])
def test_instance_fault_get_by_instance(self):
"""Ensure we can retrieve faults for instance."""
uuids = [str(stdlib_uuid.uuid4()), str(stdlib_uuid.uuid4())]
fault_codes = [404, 500]
expected = {}
# Create faults
for uuid in uuids:
expected[uuid] = []
for code in fault_codes:
fault_values = self._create_fault_values(uuid, code)
fault = db.instance_fault_create(self.ctxt, fault_values)
expected[uuid].append(fault)
# Ensure faults are saved
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, uuids)
self.assertEqual(len(expected), len(faults))
for uuid in uuids:
self._assertEqualListsOfObjects(expected[uuid], faults[uuid])
def test_instance_faults_get_by_instance_uuids_no_faults(self):
uuid = str(stdlib_uuid.uuid4())
# None should be returned when no faults exist.
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [uuid])
expected = {uuid: []}
self.assertEqual(expected, faults)
def test_instance_faults_get_by_instance_uuids_no_uuids(self):
self.mox.StubOutWithMock(query.Query, 'filter')
self.mox.ReplayAll()
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [])
self.assertEqual({}, faults)
class InstanceTypeTestCase(BaseInstanceTypeTestCase):
def test_flavor_create(self):
inst_type = self._create_inst_type({})
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at', 'extra_specs']
self.assertFalse(inst_type['id'] is None)
self._assertEqualObjects(inst_type, self._get_base_values(),
ignored_keys)
def test_instance_type_destroy(self):
specs1 = {'a': '1', 'b': '2'}
inst_type1 = self._create_inst_type({'name': 'name1', 'flavorid': 'a1',
'extra_specs': specs1})
specs2 = {'c': '4', 'd': '3'}
inst_type2 = self._create_inst_type({'name': 'name2', 'flavorid': 'a2',
'extra_specs': specs2})
db.flavor_destroy(self.ctxt, 'name1')
self.assertRaises(exception.InstanceTypeNotFound,
db.flavor_get, self.ctxt, inst_type1['id'])
real_specs1 = db.flavor_extra_specs_get(self.ctxt,
inst_type1['flavorid'])
self._assertEqualObjects(real_specs1, {})
r_inst_type2 = db.flavor_get(self.ctxt, inst_type2['id'])
self._assertEqualObjects(inst_type2, r_inst_type2, 'extra_specs')
def test_instance_type_destroy_not_found(self):
self.assertRaises(exception.InstanceTypeNotFound,
db.flavor_destroy, self.ctxt, 'nonexists')
def test_flavor_create_duplicate_name(self):
self._create_inst_type({})
self.assertRaises(exception.InstanceTypeExists,
self._create_inst_type,
{'flavorid': 'some_random_flavor'})
def test_flavor_create_duplicate_flavorid(self):
self._create_inst_type({})
self.assertRaises(exception.InstanceTypeIdExists,
self._create_inst_type,
{'name': 'some_random_name'})
def test_flavor_create_with_extra_specs(self):
extra_specs = dict(a='abc', b='def', c='ghi')
inst_type = self._create_inst_type({'extra_specs': extra_specs})
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at', 'extra_specs']
self._assertEqualObjects(inst_type, self._get_base_values(),
ignored_keys)
self._assertEqualObjects(extra_specs, inst_type['extra_specs'])
def test_instance_type_get_all(self):
# NOTE(boris-42): Remove base instance types
for it in db.flavor_get_all(self.ctxt):
db.flavor_destroy(self.ctxt, it['name'])
instance_types = [
{'root_gb': 600, 'memory_mb': 100, 'disabled': True,
'is_public': True, 'name': 'a1', 'flavorid': 'f1'},
{'root_gb': 500, 'memory_mb': 200, 'disabled': True,
'is_public': True, 'name': 'a2', 'flavorid': 'f2'},
{'root_gb': 400, 'memory_mb': 300, 'disabled': False,
'is_public': True, 'name': 'a3', 'flavorid': 'f3'},
{'root_gb': 300, 'memory_mb': 400, 'disabled': False,
'is_public': False, 'name': 'a4', 'flavorid': 'f4'},
{'root_gb': 200, 'memory_mb': 500, 'disabled': True,
'is_public': False, 'name': 'a5', 'flavorid': 'f5'},
{'root_gb': 100, 'memory_mb': 600, 'disabled': True,
'is_public': False, 'name': 'a6', 'flavorid': 'f6'}
]
instance_types = [self._create_inst_type(it) for it in instance_types]
lambda_filters = {
'min_memory_mb': lambda it, v: it['memory_mb'] >= v,
'min_root_gb': lambda it, v: it['root_gb'] >= v,
'disabled': lambda it, v: it['disabled'] == v,
'is_public': lambda it, v: (v is None or it['is_public'] == v)
}
mem_filts = [{'min_memory_mb': x} for x in [100, 350, 550, 650]]
root_filts = [{'min_root_gb': x} for x in [100, 350, 550, 650]]
disabled_filts = [{'disabled': x} for x in [True, False]]
is_public_filts = [{'is_public': x} for x in [True, False, None]]
def assert_multi_filter_instance_type_get(filters=None):
if filters is None:
filters = {}
expected_it = instance_types
for name, value in filters.iteritems():
filt = lambda it: lambda_filters[name](it, value)
expected_it = filter(filt, expected_it)
real_it = db.flavor_get_all(self.ctxt, filters=filters)
self._assertEqualListsOfObjects(expected_it, real_it)
#no filter
assert_multi_filter_instance_type_get()
#test only with one filter
for filt in mem_filts:
assert_multi_filter_instance_type_get(filt)
for filt in root_filts:
assert_multi_filter_instance_type_get(filt)
for filt in disabled_filts:
assert_multi_filter_instance_type_get(filt)
for filt in is_public_filts:
assert_multi_filter_instance_type_get(filt)
#test all filters together
for mem in mem_filts:
for root in root_filts:
for disabled in disabled_filts:
for is_public in is_public_filts:
filts = [f.items() for f in
[mem, root, disabled, is_public]]
filts = dict(reduce(lambda x, y: x + y, filts, []))
assert_multi_filter_instance_type_get(filts)
def test_flavor_get_all_limit_sort(self):
def assert_sorted_by_key_dir(sort_key, asc=True):
sort_dir = 'asc' if asc else 'desc'
results = db.flavor_get_all(self.ctxt, sort_key='name',
sort_dir=sort_dir)
# Manually sort the results as we would expect them
expected_results = sorted(results,
key=lambda item: item['name'],
reverse=(not asc))
self.assertEqual(expected_results, results)
def assert_sorted_by_key_both_dir(sort_key):
assert_sorted_by_key_dir(sort_key, True)
assert_sorted_by_key_dir(sort_key, False)
for attr in ['memory_mb', 'root_gb', 'deleted_at', 'name', 'deleted',
'created_at', 'ephemeral_gb', 'updated_at', 'disabled',
'vcpus', 'swap', 'rxtx_factor', 'is_public', 'flavorid',
'vcpu_weight', 'id']:
assert_sorted_by_key_both_dir(attr)
def test_flavor_get_all_limit(self):
limited_flavors = db.flavor_get_all(self.ctxt, limit=2)
self.assertEqual(2, len(limited_flavors))
def test_flavor_get_all_list_marker(self):
all_flavors = db.flavor_get_all(self.ctxt)
# Set the 3rd result as the marker
marker_flavorid = all_flavors[2]['flavorid']
marked_flavors = db.flavor_get_all(self.ctxt, marker=marker_flavorid)
# We expect everything /after/ the 3rd result
expected_results = all_flavors[3:]
self.assertEqual(expected_results, marked_flavors)
def test_instance_type_get(self):
inst_types = [{'name': 'abc', 'flavorid': '123'},
{'name': 'def', 'flavorid': '456'},
{'name': 'ghi', 'flavorid': '789'}]
inst_types = [self._create_inst_type(t) for t in inst_types]
for inst_type in inst_types:
inst_type_by_id = db.flavor_get(self.ctxt, inst_type['id'])
self._assertEqualObjects(inst_type, inst_type_by_id)
def test_instance_type_get_non_public(self):
inst_type = self._create_inst_type({'name': 'abc', 'flavorid': '123',
'is_public': False})
# Admin can see it
inst_type_by_id = db.flavor_get(self.ctxt, inst_type['id'])
self._assertEqualObjects(inst_type, inst_type_by_id)
# Regular user can not
self.assertRaises(exception.InstanceTypeNotFound, db.flavor_get,
self.user_ctxt, inst_type['id'])
# Regular user can see it after being granted access
db.flavor_access_add(self.ctxt, inst_type['flavorid'],
self.user_ctxt.project_id)
inst_type_by_id = db.flavor_get(self.user_ctxt, inst_type['id'])
self._assertEqualObjects(inst_type, inst_type_by_id)
def test_instance_type_get_by_name(self):
inst_types = [{'name': 'abc', 'flavorid': '123'},
{'name': 'def', 'flavorid': '456'},
{'name': 'ghi', 'flavorid': '789'}]
inst_types = [self._create_inst_type(t) for t in inst_types]
for inst_type in inst_types:
inst_type_by_name = db.flavor_get_by_name(self.ctxt,
inst_type['name'])
self._assertEqualObjects(inst_type, inst_type_by_name)
def test_instance_type_get_by_name_not_found(self):
self._create_inst_type({})
self.assertRaises(exception.InstanceTypeNotFoundByName,
db.flavor_get_by_name, self.ctxt, 'nonexists')
def test_instance_type_get_by_name_non_public(self):
inst_type = self._create_inst_type({'name': 'abc', 'flavorid': '123',
'is_public': False})
# Admin can see it
inst_type_by_name = db.flavor_get_by_name(self.ctxt,
inst_type['name'])
self._assertEqualObjects(inst_type, inst_type_by_name)
# Regular user can not
self.assertRaises(exception.InstanceTypeNotFoundByName,
db.flavor_get_by_name, self.user_ctxt,
inst_type['name'])
# Regular user can see it after being granted access
db.flavor_access_add(self.ctxt, inst_type['flavorid'],
self.user_ctxt.project_id)
inst_type_by_name = db.flavor_get_by_name(self.user_ctxt,
inst_type['name'])
self._assertEqualObjects(inst_type, inst_type_by_name)
def test_instance_type_get_by_flavor_id(self):
inst_types = [{'name': 'abc', 'flavorid': '123'},
{'name': 'def', 'flavorid': '456'},
{'name': 'ghi', 'flavorid': '789'}]
inst_types = [self._create_inst_type(t) for t in inst_types]
for inst_type in inst_types:
params = (self.ctxt, inst_type['flavorid'])
inst_type_by_flavorid = db.flavor_get_by_flavor_id(*params)
self._assertEqualObjects(inst_type, inst_type_by_flavorid)
def test_instance_type_get_by_flavor_not_found(self):
self._create_inst_type({})
self.assertRaises(exception.FlavorNotFound,
db.flavor_get_by_flavor_id,
self.ctxt, 'nonexists')
def test_instance_type_get_by_flavor_id_non_public(self):
inst_type = self._create_inst_type({'name': 'abc', 'flavorid': '123',
'is_public': False})
# Admin can see it
inst_type_by_fid = db.flavor_get_by_flavor_id(self.ctxt,
inst_type['flavorid'])
self._assertEqualObjects(inst_type, inst_type_by_fid)
# Regular user can not
self.assertRaises(exception.FlavorNotFound,
db.flavor_get_by_flavor_id, self.user_ctxt,
inst_type['flavorid'])
# Regular user can see it after being granted access
db.flavor_access_add(self.ctxt, inst_type['flavorid'],
self.user_ctxt.project_id)
inst_type_by_fid = db.flavor_get_by_flavor_id(self.user_ctxt,
inst_type['flavorid'])
self._assertEqualObjects(inst_type, inst_type_by_fid)
def test_instance_type_get_by_flavor_id_deleted(self):
inst_type = self._create_inst_type({'name': 'abc', 'flavorid': '123'})
db.flavor_destroy(self.ctxt, 'abc')
inst_type_by_fid = db.flavor_get_by_flavor_id(self.ctxt,
inst_type['flavorid'], read_deleted='yes')
self.assertEqual(inst_type['id'], inst_type_by_fid['id'])
class InstanceTypeExtraSpecsTestCase(BaseInstanceTypeTestCase):
def setUp(self):
super(InstanceTypeExtraSpecsTestCase, self).setUp()
values = ({'name': 'n1', 'flavorid': 'f1',
'extra_specs': dict(a='a', b='b', c='c')},
{'name': 'n2', 'flavorid': 'f2',
'extra_specs': dict(d='d', e='e', f='f')})
# NOTE(boris-42): We have already tested flavor_create method
# with extra_specs in InstanceTypeTestCase.
self.inst_types = [self._create_inst_type(v) for v in values]
def test_instance_type_extra_specs_get(self):
for it in self.inst_types:
real_specs = db.flavor_extra_specs_get(self.ctxt,
it['flavorid'])
self._assertEqualObjects(it['extra_specs'], real_specs)
def test_instance_type_extra_specs_get_item(self):
expected = dict(f1=dict(a='a', b='b', c='c'),
f2=dict(d='d', e='e', f='f'))
for flavor, specs in expected.iteritems():
for key, val in specs.iteritems():
spec = db.flavor_extra_specs_get_item(self.ctxt, flavor,
key)
self.assertEqual(spec[key], val)
def test_instance_type_extra_specs_delete(self):
for it in self.inst_types:
specs = it['extra_specs']
key = specs.keys()[0]
del specs[key]
db.flavor_extra_specs_delete(self.ctxt, it['flavorid'], key)
real_specs = db.flavor_extra_specs_get(self.ctxt,
it['flavorid'])
self._assertEqualObjects(it['extra_specs'], real_specs)
def test_instance_type_extra_specs_delete_failed(self):
for it in self.inst_types:
self.assertRaises(exception.InstanceTypeExtraSpecsNotFound,
db.flavor_extra_specs_delete,
self.ctxt, it['flavorid'], 'dummy')
def test_instance_type_extra_specs_update_or_create(self):
for it in self.inst_types:
current_specs = it['extra_specs']
current_specs.update(dict(b='b1', c='c1', d='d1', e='e1'))
params = (self.ctxt, it['flavorid'], current_specs)
db.flavor_extra_specs_update_or_create(*params)
real_specs = db.flavor_extra_specs_get(self.ctxt,
it['flavorid'])
self._assertEqualObjects(current_specs, real_specs)
def test_instance_type_extra_specs_update_or_create_flavor_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_extra_specs_update_or_create,
self.ctxt, 'nonexists', {})
def test_instance_type_extra_specs_update_or_create_retry(self):
def counted():
def get_id(context, flavorid, session):
get_id.counter += 1
raise db_exc.DBDuplicateEntry
get_id.counter = 0
return get_id
get_id = counted()
self.stubs.Set(sqlalchemy_api,
'_instance_type_get_id_from_flavor', get_id)
self.assertRaises(db_exc.DBDuplicateEntry, sqlalchemy_api.
flavor_extra_specs_update_or_create,
self.ctxt, 1, {}, 5)
self.assertEqual(get_id.counter, 5)
class InstanceTypeAccessTestCase(BaseInstanceTypeTestCase):
def _create_inst_type_access(self, instance_type_id, project_id):
return db.flavor_access_add(self.ctxt, instance_type_id,
project_id)
def test_instance_type_access_get_by_flavor_id(self):
inst_types = ({'name': 'n1', 'flavorid': 'f1'},
{'name': 'n2', 'flavorid': 'f2'})
it1, it2 = tuple((self._create_inst_type(v) for v in inst_types))
access_it1 = [self._create_inst_type_access(it1['flavorid'], 'pr1'),
self._create_inst_type_access(it1['flavorid'], 'pr2')]
access_it2 = [self._create_inst_type_access(it2['flavorid'], 'pr1')]
for it, access_it in zip((it1, it2), (access_it1, access_it2)):
params = (self.ctxt, it['flavorid'])
real_access_it = db.flavor_access_get_by_flavor_id(*params)
self._assertEqualListsOfObjects(access_it, real_access_it)
def test_instance_type_access_get_by_flavor_id_flavor_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_get_by_flavor_id,
self.ctxt, 'nonexists')
def test_instance_type_access_add(self):
inst_type = self._create_inst_type({'flavorid': 'f1'})
project_id = 'p1'
access = self._create_inst_type_access(inst_type['flavorid'],
project_id)
# NOTE(boris-42): Check that instance_type_access_add doesn't fail and
# returns correct value. This is enough because other
# logic is checked by other methods.
self.assertFalse(access['id'] is None)
self.assertEqual(access['instance_type_id'], inst_type['id'])
self.assertEqual(access['project_id'], project_id)
def test_instance_type_access_add_to_non_existing_flavor(self):
self.assertRaises(exception.FlavorNotFound,
self._create_inst_type_access,
'nonexists', 'does_not_matter')
def test_instance_type_access_add_duplicate_project_id_flavor(self):
inst_type = self._create_inst_type({'flavorid': 'f1'})
params = (inst_type['flavorid'], 'p1')
self._create_inst_type_access(*params)
self.assertRaises(exception.FlavorAccessExists,
self._create_inst_type_access, *params)
def test_instance_type_access_remove(self):
inst_types = ({'name': 'n1', 'flavorid': 'f1'},
{'name': 'n2', 'flavorid': 'f2'})
it1, it2 = tuple((self._create_inst_type(v) for v in inst_types))
access_it1 = [self._create_inst_type_access(it1['flavorid'], 'pr1'),
self._create_inst_type_access(it1['flavorid'], 'pr2')]
access_it2 = [self._create_inst_type_access(it2['flavorid'], 'pr1')]
db.flavor_access_remove(self.ctxt, it1['flavorid'],
access_it1[1]['project_id'])
for it, access_it in zip((it1, it2), (access_it1[:1], access_it2)):
params = (self.ctxt, it['flavorid'])
real_access_it = db.flavor_access_get_by_flavor_id(*params)
self._assertEqualListsOfObjects(access_it, real_access_it)
def test_instance_type_access_remove_flavor_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_access_remove,
self.ctxt, 'nonexists', 'does_not_matter')
def test_instance_type_access_remove_access_not_found(self):
inst_type = self._create_inst_type({'flavorid': 'f1'})
params = (inst_type['flavorid'], 'p1')
self._create_inst_type_access(*params)
self.assertRaises(exception.FlavorAccessNotFound,
db.flavor_access_remove,
self.ctxt, inst_type['flavorid'], 'p2')
def test_instance_type_access_removed_after_instance_type_destroy(self):
inst_type1 = self._create_inst_type({'flavorid': 'f1', 'name': 'n1'})
inst_type2 = self._create_inst_type({'flavorid': 'f2', 'name': 'n2'})
values = [
(inst_type1['flavorid'], 'p1'),
(inst_type1['flavorid'], 'p2'),
(inst_type2['flavorid'], 'p3')
]
for v in values:
self._create_inst_type_access(*v)
db.flavor_destroy(self.ctxt, inst_type1['name'])
p = (self.ctxt, inst_type1['flavorid'])
self.assertEqual(0, len(db.flavor_access_get_by_flavor_id(*p)))
p = (self.ctxt, inst_type2['flavorid'])
self.assertEqual(1, len(db.flavor_access_get_by_flavor_id(*p)))
db.flavor_destroy(self.ctxt, inst_type2['name'])
self.assertEqual(0, len(db.flavor_access_get_by_flavor_id(*p)))
class FixedIPTestCase(BaseInstanceTypeTestCase):
def _timeout_test(self, ctxt, timeout, multi_host):
instance = db.instance_create(ctxt, dict(host='foo'))
net = db.network_create_safe(ctxt, dict(multi_host=multi_host,
host='bar'))
old = timeout - datetime.timedelta(seconds=5)
new = timeout + datetime.timedelta(seconds=5)
# should deallocate
db.fixed_ip_create(ctxt, dict(allocated=False,
instance_uuid=instance['uuid'],
network_id=net['id'],
updated_at=old))
# still allocated
db.fixed_ip_create(ctxt, dict(allocated=True,
instance_uuid=instance['uuid'],
network_id=net['id'],
updated_at=old))
# wrong network
db.fixed_ip_create(ctxt, dict(allocated=False,
instance_uuid=instance['uuid'],
network_id=None,
updated_at=old))
# too new
db.fixed_ip_create(ctxt, dict(allocated=False,
instance_uuid=instance['uuid'],
network_id=None,
updated_at=new))
def mock_db_query_first_to_raise_data_error_exception(self):
self.mox.StubOutWithMock(query.Query, 'first')
query.Query.first().AndRaise(exc.DataError(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg()))
self.mox.ReplayAll()
def test_fixed_ip_disassociate_all_by_timeout_single_host(self):
now = timeutils.utcnow()
self._timeout_test(self.ctxt, now, False)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'foo', now)
self.assertEqual(result, 0)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'bar', now)
self.assertEqual(result, 1)
def test_fixed_ip_disassociate_all_by_timeout_multi_host(self):
now = timeutils.utcnow()
self._timeout_test(self.ctxt, now, True)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'foo', now)
self.assertEqual(result, 1)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'bar', now)
self.assertEqual(result, 0)
def test_fixed_ip_get_by_floating_address(self):
fixed_ip = db.fixed_ip_create(self.ctxt, {'address': '192.168.0.2'})
values = {'address': '8.7.6.5',
'fixed_ip_id': fixed_ip['id']}
floating = db.floating_ip_create(self.ctxt, values)['address']
fixed_ip_ref = db.fixed_ip_get_by_floating_address(self.ctxt, floating)
self._assertEqualObjects(fixed_ip, fixed_ip_ref)
def test_fixed_ip_get_by_host(self):
host_ips = {
'host1': ['1.1.1.1', '1.1.1.2', '1.1.1.3'],
'host2': ['1.1.1.4', '1.1.1.5'],
'host3': ['1.1.1.6']
}
for host, ips in host_ips.iteritems():
for ip in ips:
instance_uuid = self._create_instance(host=host)
db.fixed_ip_create(self.ctxt, {'address': ip})
db.fixed_ip_associate(self.ctxt, ip, instance_uuid)
for host, ips in host_ips.iteritems():
ips_on_host = map(lambda x: x['address'],
db.fixed_ip_get_by_host(self.ctxt, host))
self._assertEqualListsOfPrimitivesAsSets(ips_on_host, ips)
def test_fixed_ip_get_by_network_host_not_found_exception(self):
self.assertRaises(
exception.FixedIpNotFoundForNetworkHost,
db.fixed_ip_get_by_network_host,
self.ctxt, 1, 'ignore')
def test_fixed_ip_get_by_network_host_fixed_ip_found(self):
db.fixed_ip_create(self.ctxt, dict(network_id=1, host='host'))
fip = db.fixed_ip_get_by_network_host(self.ctxt, 1, 'host')
self.assertEquals(1, fip['network_id'])
self.assertEquals('host', fip['host'])
def _create_instance(self, **kwargs):
instance = db.instance_create(self.ctxt, kwargs)
return instance['uuid']
def test_fixed_ip_get_by_instance_fixed_ip_found(self):
instance_uuid = self._create_instance()
FIXED_IP_ADDRESS = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS))
ips_list = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfPrimitivesAsSets([FIXED_IP_ADDRESS],
[ips_list[0].address])
def test_fixed_ip_get_by_instance_multiple_fixed_ips_found(self):
instance_uuid = self._create_instance()
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_2))
ips_list = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ip_get_by_instance_inappropriate_ignored(self):
instance_uuid = self._create_instance()
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_2))
another_instance = db.instance_create(self.ctxt, {})
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=another_instance['uuid'], address="192.168.1.7"))
ips_list = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ip_get_by_instance_not_found_exception(self):
instance_uuid = self._create_instance()
self.assertRaises(exception.FixedIpNotFoundForInstance,
db.fixed_ip_get_by_instance,
self.ctxt, instance_uuid)
def test_fixed_ips_by_virtual_interface_fixed_ip_found(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
FIXED_IP_ADDRESS = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self._assertEqualListsOfPrimitivesAsSets([FIXED_IP_ADDRESS],
[ips_list[0].address])
def test_fixed_ips_by_virtual_interface_multiple_fixed_ips_found(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_2))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ips_by_virtual_interface_inappropriate_ignored(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_2))
another_vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=another_vif.id, address="192.168.1.7"))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ips_by_virtual_interface_no_ip_found(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self.assertEquals(0, len(ips_list))
def create_fixed_ip(self, **params):
default_params = {'address': '192.168.0.1'}
default_params.update(params)
return db.fixed_ip_create(self.ctxt, default_params)['address']
def test_fixed_ip_associate_fails_if_ip_not_in_network(self):
instance_uuid = self._create_instance()
self.assertRaises(exception.FixedIpNotFoundForNetwork,
db.fixed_ip_associate,
self.ctxt, None, instance_uuid)
def test_fixed_ip_associate_fails_if_ip_in_use(self):
instance_uuid = self._create_instance()
address = self.create_fixed_ip(instance_uuid=instance_uuid)
self.assertRaises(exception.FixedIpAlreadyInUse,
db.fixed_ip_associate,
self.ctxt, address, instance_uuid)
def test_fixed_ip_associate_succeeds(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip(network_id=network['id'])
db.fixed_ip_associate(self.ctxt, address, instance_uuid,
network_id=network['id'])
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
def test_fixed_ip_associate_succeeds_and_sets_network(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip()
db.fixed_ip_associate(self.ctxt, address, instance_uuid,
network_id=network['id'])
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
self.assertEqual(fixed_ip['network_id'], network['id'])
def test_fixed_ip_associate_pool_invalid_uuid(self):
instance_uuid = '123'
self.assertRaises(exception.InvalidUUID, db.fixed_ip_associate_pool,
self.ctxt, None, instance_uuid)
def test_fixed_ip_associate_pool_no_more_fixed_ips(self):
instance_uuid = self._create_instance()
self.assertRaises(exception.NoMoreFixedIps, db.fixed_ip_associate_pool,
self.ctxt, None, instance_uuid)
def test_fixed_ip_associate_pool_succeeds(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip(network_id=network['id'])
db.fixed_ip_associate_pool(self.ctxt, network['id'], instance_uuid)
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
def test_fixed_ip_create_same_address(self):
address = '192.168.1.5'
params = {'address': address}
db.fixed_ip_create(self.ctxt, params)
self.assertRaises(exception.FixedIpExists, db.fixed_ip_create,
self.ctxt, params)
def test_fixed_ip_create_success(self):
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': '192.168.1.5',
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
fixed_ip_data = db.fixed_ip_create(self.ctxt, param)
self._assertEqualObjects(param, fixed_ip_data, ignored_keys)
def test_fixed_ip_bulk_create_same_address(self):
address_1 = '192.168.1.5'
address_2 = '192.168.1.6'
instance_uuid = self._create_instance()
network_id_1 = db.network_create_safe(self.ctxt, {})['id']
network_id_2 = db.network_create_safe(self.ctxt, {})['id']
params = [
{'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': address_2, 'allocated': False,
'instance_uuid': instance_uuid, 'network_id': network_id_1,
'virtual_interface_id': None},
{'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': address_1, 'allocated': False,
'instance_uuid': instance_uuid, 'network_id': network_id_1,
'virtual_interface_id': None},
{'reserved': False, 'deleted': 0, 'leased': False,
'host': 'localhost', 'address': address_2, 'allocated': True,
'instance_uuid': instance_uuid, 'network_id': network_id_2,
'virtual_interface_id': None},
]
self.assertRaises(exception.FixedIpExists, db.fixed_ip_bulk_create,
self.ctxt, params)
# In this case the transaction will be rolled back and none of the ips
# will make it to the database.
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address, self.ctxt, address_1)
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address, self.ctxt, address_2)
def test_fixed_ip_bulk_create_success(self):
address_1 = '192.168.1.5'
address_2 = '192.168.1.6'
instance_uuid = self._create_instance()
network_id_1 = db.network_create_safe(self.ctxt, {})['id']
network_id_2 = db.network_create_safe(self.ctxt, {})['id']
params = [
{'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': address_1, 'allocated': False,
'instance_uuid': instance_uuid, 'network_id': network_id_1,
'virtual_interface_id': None},
{'reserved': False, 'deleted': 0, 'leased': False,
'host': 'localhost', 'address': address_2, 'allocated': True,
'instance_uuid': instance_uuid, 'network_id': network_id_2,
'virtual_interface_id': None}
]
db.fixed_ip_bulk_create(self.ctxt, params)
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
fixed_ip_data = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
# we have no `id` in incoming data so we can not use
# _assertEqualListsOfObjects to compare incoming data and received
# objects
fixed_ip_data = sorted(fixed_ip_data, key=lambda i: i['network_id'])
params = sorted(params, key=lambda i: i['network_id'])
for param, ip in zip(params, fixed_ip_data):
self._assertEqualObjects(param, ip, ignored_keys)
def test_fixed_ip_disassociate(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
db.fixed_ip_create(self.ctxt, param)
db.fixed_ip_disassociate(self.ctxt, address)
fixed_ip_data = db.fixed_ip_get_by_address(self.ctxt, address)
ignored_keys = ['created_at', 'id', 'deleted_at',
'updated_at', 'instance_uuid']
self._assertEqualObjects(param, fixed_ip_data, ignored_keys)
self.assertIsNone(fixed_ip_data['instance_uuid'])
def test_fixed_ip_get_not_found_exception(self):
self.assertRaises(exception.FixedIpNotFound,
db.fixed_ip_get, self.ctxt, 0)
def test_fixed_ip_get_success2(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
fixed_ip_id = db.fixed_ip_create(self.ctxt, param)
self.ctxt.is_admin = False
self.assertRaises(exception.NotAuthorized, db.fixed_ip_get,
self.ctxt, fixed_ip_id)
def test_fixed_ip_get_success(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
db.fixed_ip_create(self.ctxt, param)
fixed_ip_id = db.fixed_ip_get_by_address(self.ctxt, address)['id']
fixed_ip_data = db.fixed_ip_get(self.ctxt, fixed_ip_id)
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
self._assertEqualObjects(param, fixed_ip_data, ignored_keys)
def test_fixed_ip_get_by_address_detailed_not_found_exception(self):
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address_detailed, self.ctxt,
'192.168.1.5')
def test_fixed_ip_get_by_address_with_data_error_exception(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.FixedIpInvalid,
db.fixed_ip_get_by_address_detailed, self.ctxt,
'192.168.1.6')
def test_fixed_ip_get_by_address_detailed_sucsess(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
db.fixed_ip_create(self.ctxt, param)
fixed_ip_data = db.fixed_ip_get_by_address_detailed(self.ctxt, address)
# fixed ip check here
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
self._assertEqualObjects(param, fixed_ip_data[0], ignored_keys)
# network model check here
network_data = db.network_get(self.ctxt, network_id)
self._assertEqualObjects(network_data, fixed_ip_data[1])
# Instance check here
instance_data = db.instance_get_by_uuid(self.ctxt, instance_uuid)
ignored_keys = ['info_cache', 'system_metadata',
'security_groups', 'metadata',
'pci_devices'] # HOW ????
self._assertEqualObjects(instance_data, fixed_ip_data[2], ignored_keys)
def test_fixed_ip_update_not_found_for_address(self):
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_update, self.ctxt,
'192.168.1.5', {})
def test_fixed_ip_update(self):
instance_uuid_1 = self._create_instance()
instance_uuid_2 = self._create_instance()
network_id_1 = db.network_create_safe(self.ctxt, {})['id']
network_id_2 = db.network_create_safe(self.ctxt, {})['id']
param_1 = {
'reserved': True, 'deleted': 0, 'leased': True,
'host': '192.168.133.1', 'address': '10.0.0.2',
'allocated': True, 'instance_uuid': instance_uuid_1,
'network_id': network_id_1, 'virtual_interface_id': '123',
}
param_2 = {
'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': '10.0.0.3', 'allocated': False,
'instance_uuid': instance_uuid_2, 'network_id': network_id_2,
'virtual_interface_id': None
}
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
fixed_ip_addr = db.fixed_ip_create(self.ctxt, param_1)['address']
db.fixed_ip_update(self.ctxt, fixed_ip_addr, param_2)
fixed_ip_after_update = db.fixed_ip_get_by_address(self.ctxt,
param_2['address'])
self._assertEqualObjects(param_2, fixed_ip_after_update, ignored_keys)
class FloatingIpTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(FloatingIpTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'address': '1.1.1.1',
'fixed_ip_id': None,
'project_id': 'fake_project',
'host': 'fake_host',
'auto_assigned': False,
'pool': 'fake_pool',
'interface': 'fake_interface',
}
def mock_db_query_first_to_raise_data_error_exception(self):
self.mox.StubOutWithMock(query.Query, 'first')
query.Query.first().AndRaise(exc.DataError(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg()))
self.mox.ReplayAll()
def _create_floating_ip(self, values):
if not values:
values = {}
vals = self._get_base_values()
vals.update(values)
return db.floating_ip_create(self.ctxt, vals)
def test_floating_ip_get(self):
values = [{'address': '0.0.0.0'}, {'address': '1.1.1.1'}]
floating_ips = [self._create_floating_ip(val) for val in values]
for floating_ip in floating_ips:
real_floating_ip = db.floating_ip_get(self.ctxt, floating_ip['id'])
self._assertEqualObjects(floating_ip, real_floating_ip,
ignored_keys=['fixed_ip'])
def test_floating_ip_get_not_found(self):
self.assertRaises(exception.FloatingIpNotFound,
db.floating_ip_get, self.ctxt, 100500)
def test_floating_ip_get_with_long_id_not_found(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.InvalidID,
db.floating_ip_get, self.ctxt, 123456789101112)
def test_floating_ip_get_pools(self):
values = [
{'address': '0.0.0.0', 'pool': 'abc'},
{'address': '1.1.1.1', 'pool': 'abc'},
{'address': '2.2.2.2', 'pool': 'def'},
{'address': '3.3.3.3', 'pool': 'ghi'},
]
for val in values:
self._create_floating_ip(val)
expected_pools = [{'name': x}
for x in set(map(lambda x: x['pool'], values))]
real_pools = db.floating_ip_get_pools(self.ctxt)
self._assertEqualListsOfPrimitivesAsSets(real_pools, expected_pools)
def test_floating_ip_allocate_address(self):
pools = {
'pool1': ['0.0.0.0', '1.1.1.1'],
'pool2': ['2.2.2.2'],
'pool3': ['3.3.3.3', '4.4.4.4', '5.5.5.5']
}
for pool, addresses in pools.iteritems():
for address in addresses:
vals = {'pool': pool, 'address': address, 'project_id': None}
self._create_floating_ip(vals)
project_id = self._get_base_values()['project_id']
for pool, addresses in pools.iteritems():
alloc_addrs = []
for i in addresses:
float_addr = db.floating_ip_allocate_address(self.ctxt,
project_id, pool)
alloc_addrs.append(float_addr)
self._assertEqualListsOfPrimitivesAsSets(alloc_addrs, addresses)
def test_floating_ip_allocate_auto_assigned(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4']
float_ips = []
for i in range(0, 2):
float_ips.append(self._create_floating_ip(
{"address": addresses[i]}))
for i in range(2, 4):
float_ips.append(self._create_floating_ip({"address": addresses[i],
"auto_assigned": True}))
for i in range(0, 2):
float_ip = db.floating_ip_get(self.ctxt, float_ips[i].id)
self.assertFalse(float_ip.auto_assigned)
for i in range(2, 4):
float_ip = db.floating_ip_get(self.ctxt, float_ips[i].id)
self.assertTrue(float_ip.auto_assigned)
def test_floating_ip_allocate_address_no_more_floating_ips(self):
self.assertRaises(exception.NoMoreFloatingIps,
db.floating_ip_allocate_address,
self.ctxt, 'any_project_id', 'no_such_pool')
def test_floating_ip_allocate_not_authorized(self):
ctxt = context.RequestContext(user_id='a', project_id='abc',
is_admin=False)
self.assertRaises(exception.NotAuthorized,
db.floating_ip_allocate_address,
ctxt, 'other_project_id', 'any_pool')
def _get_existing_ips(self):
return [ip['address'] for ip in db.floating_ip_get_all(self.ctxt)]
def test_floating_ip_bulk_create(self):
expected_ips = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4']
db.floating_ip_bulk_create(self.ctxt,
map(lambda x: {'address': x}, expected_ips))
self._assertEqualListsOfPrimitivesAsSets(self._get_existing_ips(),
expected_ips)
def test_floating_ip_bulk_create_duplicate(self):
ips = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4']
prepare_ips = lambda x: {'address': x}
db.floating_ip_bulk_create(self.ctxt, map(prepare_ips, ips))
self.assertRaises(exception.FloatingIpExists,
db.floating_ip_bulk_create,
self.ctxt, map(prepare_ips, ['1.1.1.5', '1.1.1.4']))
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_get_by_address,
self.ctxt, '1.1.1.5')
def test_floating_ip_bulk_destroy(self):
ips_for_delete = []
ips_for_non_delete = []
def create_ips(i):
return [{'address': '1.1.%s.%s' % (i, k)} for k in range(1, 256)]
# NOTE(boris-42): Create more then 256 ip to check that
# _ip_range_splitter works properly.
for i in range(1, 3):
ips_for_delete.extend(create_ips(i))
ips_for_non_delete.extend(create_ips(3))
db.floating_ip_bulk_create(self.ctxt,
ips_for_delete + ips_for_non_delete)
db.floating_ip_bulk_destroy(self.ctxt, ips_for_delete)
expected_addresses = map(lambda x: x['address'], ips_for_non_delete)
self._assertEqualListsOfPrimitivesAsSets(self._get_existing_ips(),
expected_addresses)
def test_floating_ip_create(self):
floating_ip = self._create_floating_ip({})
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self.assertFalse(floating_ip['id'] is None)
self._assertEqualObjects(floating_ip, self._get_base_values(),
ignored_keys)
def test_floating_ip_create_duplicate(self):
self._create_floating_ip({})
self.assertRaises(exception.FloatingIpExists,
self._create_floating_ip, {})
def _create_fixed_ip(self, params):
default_params = {'address': '192.168.0.1'}
default_params.update(params)
return db.fixed_ip_create(self.ctxt, default_params)['address']
def test_floating_ip_fixed_ip_associate(self):
float_addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
fixed_addresses = ['2.2.2.1', '2.2.2.2', '2.2.2.3']
float_ips = [self._create_floating_ip({'address': address})
for address in float_addresses]
fixed_addrs = [self._create_fixed_ip({'address': address})
for address in fixed_addresses]
for float_ip, fixed_addr in zip(float_ips, fixed_addrs):
fixed_ip = db.floating_ip_fixed_ip_associate(self.ctxt,
float_ip.address,
fixed_addr, 'host')
self.assertEqual(fixed_ip.address, fixed_addr)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip.id)
self.assertEqual(fixed_ip.id, updated_float_ip.fixed_ip_id)
self.assertEqual('host', updated_float_ip.host)
# Test that already allocated float_ip returns None
result = db.floating_ip_fixed_ip_associate(self.ctxt,
float_addresses[0],
fixed_addresses[0], 'host')
self.assertTrue(result is None)
def test_floating_ip_fixed_ip_associate_float_ip_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_fixed_ip_associate,
self.ctxt, '10.10.10.10', 'some', 'some')
def test_floating_ip_deallocate(self):
values = {'address': '1.1.1.1', 'project_id': 'fake', 'host': 'fake'}
float_ip = self._create_floating_ip(values)
db.floating_ip_deallocate(self.ctxt, float_ip.address)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip.id)
self.assertTrue(updated_float_ip.project_id is None)
self.assertTrue(updated_float_ip.host is None)
self.assertFalse(updated_float_ip.auto_assigned)
def test_floating_ip_destroy(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr})
for addr in addresses]
expected_len = len(addresses)
for float_ip in float_ips:
db.floating_ip_destroy(self.ctxt, float_ip.address)
self.assertRaises(exception.FloatingIpNotFound,
db.floating_ip_get, self.ctxt, float_ip.id)
expected_len -= 1
if expected_len > 0:
self.assertEqual(expected_len,
len(db.floating_ip_get_all(self.ctxt)))
else:
self.assertRaises(exception.NoFloatingIpsDefined,
db.floating_ip_get_all, self.ctxt)
def test_floating_ip_disassociate(self):
float_addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
fixed_addresses = ['2.2.2.1', '2.2.2.2', '2.2.2.3']
float_ips = [self._create_floating_ip({'address': address})
for address in float_addresses]
fixed_addrs = [self._create_fixed_ip({'address': address})
for address in fixed_addresses]
for float_ip, fixed_addr in zip(float_ips, fixed_addrs):
db.floating_ip_fixed_ip_associate(self.ctxt,
float_ip.address,
fixed_addr, 'host')
for float_ip, fixed_addr in zip(float_ips, fixed_addrs):
fixed = db.floating_ip_disassociate(self.ctxt, float_ip.address)
self.assertEqual(fixed.address, fixed_addr)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip.id)
self.assertTrue(updated_float_ip.fixed_ip_id is None)
self.assertTrue(updated_float_ip.host is None)
def test_floating_ip_disassociate_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_disassociate, self.ctxt,
'11.11.11.11')
def test_floating_ip_set_auto_assigned(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr,
'auto_assigned': False})
for addr in addresses]
for i in range(2):
db.floating_ip_set_auto_assigned(self.ctxt, float_ips[i].address)
for i in range(2):
float_ip = db.floating_ip_get(self.ctxt, float_ips[i].id)
self.assertTrue(float_ip.auto_assigned)
float_ip = db.floating_ip_get(self.ctxt, float_ips[2].id)
self.assertFalse(float_ip.auto_assigned)
def test_floating_ip_get_all(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr})
for addr in addresses]
self._assertEqualListsOfObjects(float_ips,
db.floating_ip_get_all(self.ctxt))
def test_floating_ip_get_all_not_found(self):
self.assertRaises(exception.NoFloatingIpsDefined,
db.floating_ip_get_all, self.ctxt)
def test_floating_ip_get_all_by_host(self):
hosts = {
'host1': ['1.1.1.1', '1.1.1.2'],
'host2': ['2.1.1.1', '2.1.1.2'],
'host3': ['3.1.1.1', '3.1.1.2', '3.1.1.3']
}
hosts_with_float_ips = {}
for host, addresses in hosts.iteritems():
hosts_with_float_ips[host] = []
for address in addresses:
float_ip = self._create_floating_ip({'host': host,
'address': address})
hosts_with_float_ips[host].append(float_ip)
for host, float_ips in hosts_with_float_ips.iteritems():
real_float_ips = db.floating_ip_get_all_by_host(self.ctxt, host)
self._assertEqualListsOfObjects(float_ips, real_float_ips)
def test_floating_ip_get_all_by_host_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForHost,
db.floating_ip_get_all_by_host,
self.ctxt, 'non_exists_host')
def test_floating_ip_get_all_by_project(self):
projects = {
'pr1': ['1.1.1.1', '1.1.1.2'],
'pr2': ['2.1.1.1', '2.1.1.2'],
'pr3': ['3.1.1.1', '3.1.1.2', '3.1.1.3']
}
projects_with_float_ips = {}
for project_id, addresses in projects.iteritems():
projects_with_float_ips[project_id] = []
for address in addresses:
float_ip = self._create_floating_ip({'project_id': project_id,
'address': address})
projects_with_float_ips[project_id].append(float_ip)
for project_id, float_ips in projects_with_float_ips.iteritems():
real_float_ips = db.floating_ip_get_all_by_project(self.ctxt,
project_id)
self._assertEqualListsOfObjects(float_ips, real_float_ips,
ignored_keys='fixed_ip')
def test_floating_ip_get_all_by_project_not_authorized(self):
ctxt = context.RequestContext(user_id='a', project_id='abc',
is_admin=False)
self.assertRaises(exception.NotAuthorized,
db.floating_ip_get_all_by_project,
ctxt, 'other_project')
def test_floating_ip_get_by_address(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr})
for addr in addresses]
for float_ip in float_ips:
real_float_ip = db.floating_ip_get_by_address(self.ctxt,
float_ip.address)
self._assertEqualObjects(float_ip, real_float_ip,
ignored_keys='fixed_ip')
def test_floating_ip_get_by_address_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_get_by_address,
self.ctxt, '20.20.20.20')
def test_floating_ip_get_by_invalid_address(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.InvalidIpAddressError,
db.floating_ip_get_by_address,
self.ctxt, 'non_exists_host')
def test_floating_ip_get_by_fixed_address(self):
fixed_float = [
('1.1.1.1', '2.2.2.1'),
('1.1.1.2', '2.2.2.2'),
('1.1.1.3', '2.2.2.3')
]
for fixed_addr, float_addr in fixed_float:
self._create_floating_ip({'address': float_addr})
self._create_fixed_ip({'address': fixed_addr})
db.floating_ip_fixed_ip_associate(self.ctxt, float_addr,
fixed_addr, 'some_host')
for fixed_addr, float_addr in fixed_float:
float_ip = db.floating_ip_get_by_fixed_address(self.ctxt,
fixed_addr)
self.assertEqual(float_addr, float_ip[0]['address'])
def test_floating_ip_get_by_fixed_ip_id(self):
fixed_float = [
('1.1.1.1', '2.2.2.1'),
('1.1.1.2', '2.2.2.2'),
('1.1.1.3', '2.2.2.3')
]
for fixed_addr, float_addr in fixed_float:
self._create_floating_ip({'address': float_addr})
self._create_fixed_ip({'address': fixed_addr})
db.floating_ip_fixed_ip_associate(self.ctxt, float_addr,
fixed_addr, 'some_host')
for fixed_addr, float_addr in fixed_float:
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, fixed_addr)
float_ip = db.floating_ip_get_by_fixed_ip_id(self.ctxt,
fixed_ip['id'])
self.assertEqual(float_addr, float_ip[0]['address'])
def test_floating_ip_update(self):
float_ip = self._create_floating_ip({})
values = {
'project_id': 'some_pr',
'host': 'some_host',
'auto_assigned': True,
'interface': 'some_interface',
'pool': 'some_pool'
}
db.floating_ip_update(self.ctxt, float_ip['address'], values)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip['id'])
self._assertEqualObjects(updated_float_ip, values,
ignored_keys=['id', 'address', 'updated_at',
'deleted_at', 'created_at',
'deleted', 'fixed_ip_id',
'fixed_ip'])
def test_floating_ip_update_to_duplicate(self):
float_ip1 = self._create_floating_ip({'address': '1.1.1.1'})
float_ip2 = self._create_floating_ip({'address': '1.1.1.2'})
self.assertRaises(exception.FloatingIpExists,
db.floating_ip_update,
self.ctxt, float_ip2['address'],
{'address': float_ip1['address']})
class InstanceDestroyConstraints(test.TestCase):
def test_destroy_with_equal_any_constraint_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'task_state': 'deleting'})
constraint = db.constraint(task_state=db.equal_any('deleting'))
db.instance_destroy(ctx, instance['uuid'], constraint)
self.assertRaises(exception.InstanceNotFound, db.instance_get_by_uuid,
ctx, instance['uuid'])
def test_destroy_with_equal_any_constraint_not_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'vm_state': 'resize'})
constraint = db.constraint(vm_state=db.equal_any('active', 'error'))
self.assertRaises(exception.ConstraintNotMet, db.instance_destroy,
ctx, instance['uuid'], constraint)
instance = db.instance_get_by_uuid(ctx, instance['uuid'])
self.assertFalse(instance['deleted'])
def test_destroy_with_not_equal_constraint_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'task_state': 'deleting'})
constraint = db.constraint(task_state=db.not_equal('error', 'resize'))
db.instance_destroy(ctx, instance['uuid'], constraint)
self.assertRaises(exception.InstanceNotFound, db.instance_get_by_uuid,
ctx, instance['uuid'])
def test_destroy_with_not_equal_constraint_not_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'vm_state': 'active'})
constraint = db.constraint(vm_state=db.not_equal('active', 'error'))
self.assertRaises(exception.ConstraintNotMet, db.instance_destroy,
ctx, instance['uuid'], constraint)
instance = db.instance_get_by_uuid(ctx, instance['uuid'])
self.assertFalse(instance['deleted'])
class VolumeUsageDBApiTestCase(test.TestCase):
def setUp(self):
super(VolumeUsageDBApiTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.useFixture(test.TimeOverride())
def test_vol_usage_update_no_totals_update(self):
ctxt = context.get_admin_context()
now = timeutils.utcnow()
start_time = now - datetime.timedelta(seconds=10)
self.mox.StubOutWithMock(timeutils, 'utcnow')
timeutils.utcnow().AndReturn(now)
timeutils.utcnow().AndReturn(now)
timeutils.utcnow().AndReturn(now)
self.mox.ReplayAll()
expected_vol_usages = [{'volume_id': u'1',
'instance_uuid': 'fake-instance-uuid1',
'project_id': 'fake-project-uuid1',
'user_id': 'fake-user-uuid1',
'curr_reads': 1000,
'curr_read_bytes': 2000,
'curr_writes': 3000,
'curr_write_bytes': 4000,
'curr_last_refreshed': now,
'tot_reads': 0,
'tot_read_bytes': 0,
'tot_writes': 0,
'tot_write_bytes': 0,
'tot_last_refreshed': None},
{'volume_id': u'2',
'instance_uuid': 'fake-instance-uuid2',
'project_id': 'fake-project-uuid2',
'user_id': 'fake-user-uuid2',
'curr_reads': 100,
'curr_read_bytes': 200,
'curr_writes': 300,
'curr_write_bytes': 400,
'tot_reads': 0,
'tot_read_bytes': 0,
'tot_writes': 0,
'tot_write_bytes': 0,
'tot_last_refreshed': None}]
def _compare(vol_usage, expected):
for key, value in expected.items():
self.assertEqual(vol_usage[key], value)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 0)
db.vol_usage_update(ctxt, u'1', rd_req=10, rd_bytes=20,
wr_req=30, wr_bytes=40,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
user_id='fake-user-uuid1',
availability_zone='fake-az')
db.vol_usage_update(ctxt, u'2', rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid2',
project_id='fake-project-uuid2',
user_id='fake-user-uuid2',
availability_zone='fake-az')
db.vol_usage_update(ctxt, u'1', rd_req=1000, rd_bytes=2000,
wr_req=3000, wr_bytes=4000,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
user_id='fake-user-uuid1',
availability_zone='fake-az')
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 2)
_compare(vol_usages[0], expected_vol_usages[0])
_compare(vol_usages[1], expected_vol_usages[1])
def test_vol_usage_update_totals_update(self):
ctxt = context.get_admin_context()
now = datetime.datetime(1, 1, 1, 1, 0, 0)
start_time = now - datetime.timedelta(seconds=10)
self.mox.StubOutWithMock(timeutils, 'utcnow')
timeutils.utcnow().AndReturn(now)
now1 = now + datetime.timedelta(minutes=1)
timeutils.utcnow().AndReturn(now1)
now2 = now + datetime.timedelta(minutes=2)
timeutils.utcnow().AndReturn(now2)
now3 = now + datetime.timedelta(minutes=3)
timeutils.utcnow().AndReturn(now3)
self.mox.ReplayAll()
db.vol_usage_update(ctxt, u'1', rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
user_id='fake-user-uuid',
availability_zone='fake-az')
current_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
self.assertEqual(current_usage['tot_reads'], 0)
self.assertEqual(current_usage['curr_reads'], 100)
db.vol_usage_update(ctxt, u'1', rd_req=200, rd_bytes=300,
wr_req=400, wr_bytes=500,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
user_id='fake-user-uuid',
availability_zone='fake-az',
update_totals=True)
current_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
self.assertEqual(current_usage['tot_reads'], 200)
self.assertEqual(current_usage['curr_reads'], 0)
db.vol_usage_update(ctxt, u'1', rd_req=300, rd_bytes=400,
wr_req=500, wr_bytes=600,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
availability_zone='fake-az',
user_id='fake-user-uuid')
current_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
self.assertEqual(current_usage['tot_reads'], 200)
self.assertEqual(current_usage['curr_reads'], 300)
db.vol_usage_update(ctxt, u'1', rd_req=400, rd_bytes=500,
wr_req=600, wr_bytes=700,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
user_id='fake-user-uuid',
availability_zone='fake-az',
update_totals=True)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
expected_vol_usages = {'volume_id': u'1',
'project_id': 'fake-project-uuid',
'user_id': 'fake-user-uuid',
'instance_uuid': 'fake-instance-uuid',
'availability_zone': 'fake-az',
'tot_reads': 600,
'tot_read_bytes': 800,
'tot_writes': 1000,
'tot_write_bytes': 1200,
'tot_last_refreshed': now3,
'curr_reads': 0,
'curr_read_bytes': 0,
'curr_writes': 0,
'curr_write_bytes': 0,
'curr_last_refreshed': now2}
self.assertEquals(1, len(vol_usages))
for key, value in expected_vol_usages.items():
self.assertEqual(vol_usages[0][key], value, key)
def test_vol_usage_update_when_blockdevicestats_reset(self):
ctxt = context.get_admin_context()
now = timeutils.utcnow()
start_time = now - datetime.timedelta(seconds=10)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 0)
db.vol_usage_update(ctxt, u'1',
rd_req=10000, rd_bytes=20000,
wr_req=30000, wr_bytes=40000,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
# Instance rebooted or crashed. block device stats were reset and are
# less then the previous values
db.vol_usage_update(ctxt, u'1',
rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
db.vol_usage_update(ctxt, u'1',
rd_req=200, rd_bytes=300,
wr_req=400, wr_bytes=500,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
vol_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
expected_vol_usage = {'volume_id': u'1',
'instance_uuid': 'fake-instance-uuid1',
'project_id': 'fake-project-uuid1',
'availability_zone': 'fake-az',
'user_id': 'fake-user-uuid1',
'curr_reads': 200,
'curr_read_bytes': 300,
'curr_writes': 400,
'curr_write_bytes': 500,
'tot_reads': 10000,
'tot_read_bytes': 20000,
'tot_writes': 30000,
'tot_write_bytes': 40000}
for key, value in expected_vol_usage.items():
self.assertEqual(vol_usage[key], value, key)
def test_vol_usage_update_totals_update_when_blockdevicestats_reset(self):
# This is unlikely to happen, but could when a volume is detached
# right after a instance has rebooted / recovered and before
# the system polled and updated the volume usage cache table.
ctxt = context.get_admin_context()
now = timeutils.utcnow()
start_time = now - datetime.timedelta(seconds=10)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 0)
db.vol_usage_update(ctxt, u'1',
rd_req=10000, rd_bytes=20000,
wr_req=30000, wr_bytes=40000,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
# Instance rebooted or crashed. block device stats were reset and are
# less then the previous values
db.vol_usage_update(ctxt, u'1',
rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1',
update_totals=True)
vol_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
expected_vol_usage = {'volume_id': u'1',
'instance_uuid': 'fake-instance-uuid1',
'project_id': 'fake-project-uuid1',
'availability_zone': 'fake-az',
'user_id': 'fake-user-uuid1',
'curr_reads': 0,
'curr_read_bytes': 0,
'curr_writes': 0,
'curr_write_bytes': 0,
'tot_reads': 10100,
'tot_read_bytes': 20200,
'tot_writes': 30300,
'tot_write_bytes': 40400}
for key, value in expected_vol_usage.items():
self.assertEqual(vol_usage[key], value, key)
class TaskLogTestCase(test.TestCase):
def setUp(self):
super(TaskLogTestCase, self).setUp()
self.context = context.get_admin_context()
now = timeutils.utcnow()
self.begin = now - datetime.timedelta(seconds=10)
self.end = now - datetime.timedelta(seconds=5)
self.task_name = 'fake-task-name'
self.host = 'fake-host'
self.message = 'Fake task message'
db.task_log_begin_task(self.context, self.task_name, self.begin,
self.end, self.host, message=self.message)
def test_task_log_get(self):
result = db.task_log_get(self.context, self.task_name, self.begin,
self.end, self.host)
self.assertEqual(result['task_name'], self.task_name)
self.assertEqual(result['period_beginning'], self.begin)
self.assertEqual(result['period_ending'], self.end)
self.assertEqual(result['host'], self.host)
self.assertEqual(result['message'], self.message)
def test_task_log_get_all(self):
result = db.task_log_get_all(self.context, self.task_name, self.begin,
self.end, host=self.host)
self.assertEqual(len(result), 1)
result = db.task_log_get_all(self.context, self.task_name, self.begin,
self.end, host=self.host, state='')
self.assertEqual(len(result), 0)
def test_task_log_begin_task(self):
db.task_log_begin_task(self.context, 'fake', self.begin,
self.end, self.host, task_items=42,
message=self.message)
result = db.task_log_get(self.context, 'fake', self.begin,
self.end, self.host)
self.assertEqual(result['task_name'], 'fake')
def test_task_log_begin_task_duplicate(self):
params = (self.context, 'fake', self.begin, self.end, self.host)
db.task_log_begin_task(*params, message=self.message)
self.assertRaises(exception.TaskAlreadyRunning,
db.task_log_begin_task,
*params, message=self.message)
def test_task_log_end_task(self):
errors = 1
db.task_log_end_task(self.context, self.task_name, self.begin,
self.end, self.host, errors, message=self.message)
result = db.task_log_get(self.context, self.task_name, self.begin,
self.end, self.host)
self.assertEqual(result['errors'], 1)
def test_task_log_end_task_task_not_running(self):
self.assertRaises(exception.TaskNotRunning,
db.task_log_end_task, self.context, 'nonexistent',
self.begin, self.end, self.host, 42,
message=self.message)
class BlockDeviceMappingTestCase(test.TestCase):
def setUp(self):
super(BlockDeviceMappingTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.instance = db.instance_create(self.ctxt, {})
def _create_bdm(self, values):
values.setdefault('instance_uuid', self.instance['uuid'])
values.setdefault('device_name', 'fake_device')
values.setdefault('source_type', 'volume')
values.setdefault('destination_type', 'volume')
block_dev = block_device.BlockDeviceDict(values)
db.block_device_mapping_create(self.ctxt, block_dev, legacy=False)
uuid = block_dev['instance_uuid']
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
for bdm in bdms:
if bdm['device_name'] == values['device_name']:
return bdm
def test_scrub_empty_str_values_no_effect(self):
values = {'volume_size': 5}
expected = copy.copy(values)
sqlalchemy_api._scrub_empty_str_values(values, ['volume_size'])
self.assertEqual(values, expected)
def test_scrub_empty_str_values_empty_string(self):
values = {'volume_size': ''}
sqlalchemy_api._scrub_empty_str_values(values, ['volume_size'])
self.assertEqual(values, {})
def test_scrub_empty_str_values_empty_unicode(self):
values = {'volume_size': u''}
sqlalchemy_api._scrub_empty_str_values(values, ['volume_size'])
self.assertEqual(values, {})
def test_block_device_mapping_create(self):
bdm = self._create_bdm({})
self.assertFalse(bdm is None)
def test_block_device_mapping_update(self):
bdm = self._create_bdm({})
result = db.block_device_mapping_update(
self.ctxt, bdm['id'], {'destination_type': 'moon'},
legacy=False)
uuid = bdm['instance_uuid']
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(bdm_real[0]['destination_type'], 'moon')
# Also make sure the update call returned correct data
self.assertEqual(dict(bdm_real[0].iteritems()),
dict(result.iteritems()))
def test_block_device_mapping_update_or_create(self):
values = {
'instance_uuid': self.instance['uuid'],
'device_name': 'fake_name',
'source_type': 'volume',
'destination_type': 'volume'
}
# check create
db.block_device_mapping_update_or_create(self.ctxt, values,
legacy=False)
uuid = values['instance_uuid']
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 1)
self.assertEqual(bdm_real[0]['device_name'], 'fake_name')
# check update
values['destination_type'] = 'camelot'
db.block_device_mapping_update_or_create(self.ctxt, values,
legacy=False)
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 1)
bdm_real = bdm_real[0]
self.assertEqual(bdm_real['device_name'], 'fake_name')
self.assertEqual(bdm_real['destination_type'], 'camelot')
# check create without device_name
bdm1 = dict(values)
bdm1['device_name'] = None
db.block_device_mapping_update_or_create(self.ctxt, bdm1, legacy=False)
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 2)
bdm_real = bdm_real[1]
self.assertEqual(bdm_real['device_name'], None)
# check create multiple devices without device_name
bdm2 = dict(values)
bdm2['device_name'] = None
db.block_device_mapping_update_or_create(self.ctxt, bdm2, legacy=False)
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 3)
bdm_real = bdm_real[2]
self.assertEqual(bdm_real['device_name'], None)
def test_block_device_mapping_update_or_create_multiple_ephemeral(self):
uuid = self.instance['uuid']
values = {
'instance_uuid': uuid,
'source_type': 'blank',
'guest_format': 'myformat',
}
bdm1 = dict(values)
bdm1['device_name'] = '/dev/sdb'
db.block_device_mapping_update_or_create(self.ctxt, bdm1, legacy=False)
bdm2 = dict(values)
bdm2['device_name'] = '/dev/sdc'
db.block_device_mapping_update_or_create(self.ctxt, bdm2, legacy=False)
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 2)
for bdm, device_name in zip(bdm_real, ['/dev/sdb', '/dev/sdc']):
self.assertEqual(bdm['device_name'], device_name)
self.assertEqual(bdm['guest_format'], 'myformat')
def test_block_device_mapping_update_or_create_check_remove_virt(self):
uuid = self.instance['uuid']
values = {
'instance_uuid': uuid,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': 'swap',
}
# check that old swap bdms are deleted on create
val1 = dict(values)
val1['device_name'] = 'device1'
db.block_device_mapping_create(self.ctxt, val1, legacy=False)
val2 = dict(values)
val2['device_name'] = 'device2'
db.block_device_mapping_update_or_create(self.ctxt, val2, legacy=False)
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 1)
bdm_real = bdm_real[0]
self.assertEqual(bdm_real['device_name'], 'device2')
self.assertEqual(bdm_real['source_type'], 'blank')
self.assertEqual(bdm_real['guest_format'], 'swap')
db.block_device_mapping_destroy(self.ctxt, bdm_real['id'])
def test_block_device_mapping_get_all_by_instance(self):
uuid1 = self.instance['uuid']
uuid2 = db.instance_create(self.ctxt, {})['uuid']
bmds_values = [{'instance_uuid': uuid1,
'device_name': 'first'},
{'instance_uuid': uuid2,
'device_name': 'second'},
{'instance_uuid': uuid2,
'device_name': 'third'}]
for bdm in bmds_values:
self._create_bdm(bdm)
bmd = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid1)
self.assertEqual(len(bmd), 1)
self.assertEqual(bmd[0]['device_name'], 'first')
bmd = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid2)
self.assertEqual(len(bmd), 2)
def test_block_device_mapping_destroy(self):
bdm = self._create_bdm({})
db.block_device_mapping_destroy(self.ctxt, bdm['id'])
bdm = db.block_device_mapping_get_all_by_instance(self.ctxt,
bdm['instance_uuid'])
self.assertEqual(len(bdm), 0)
def test_block_device_mapping_destory_by_instance_and_volumne(self):
vol_id1 = '69f5c254-1a5b-4fff-acf7-cb369904f58f'
vol_id2 = '69f5c254-1a5b-4fff-acf7-cb369904f59f'
self._create_bdm({'device_name': 'fake1', 'volume_id': vol_id1})
self._create_bdm({'device_name': 'fake2', 'volume_id': vol_id2})
uuid = self.instance['uuid']
db.block_device_mapping_destroy_by_instance_and_volume(self.ctxt, uuid,
vol_id1)
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdms), 1)
self.assertEqual(bdms[0]['device_name'], 'fake2')
def test_block_device_mapping_destroy_by_instance_and_device(self):
self._create_bdm({'device_name': 'fake1'})
self._create_bdm({'device_name': 'fake2'})
uuid = self.instance['uuid']
params = (self.ctxt, uuid, 'fake1')
db.block_device_mapping_destroy_by_instance_and_device(*params)
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdms), 1)
self.assertEqual(bdms[0]['device_name'], 'fake2')
def test_block_device_mapping_get_by_volume_id(self):
self._create_bdm({'volume_id': 'fake_id'})
bdm = db.block_device_mapping_get_by_volume_id(self.ctxt, 'fake_id')
self.assertEqual(bdm['volume_id'], 'fake_id')
def test_block_device_mapping_get_by_volume_id_join_instance(self):
self._create_bdm({'volume_id': 'fake_id'})
bdm = db.block_device_mapping_get_by_volume_id(self.ctxt, 'fake_id',
['instance'])
self.assertEqual(bdm['volume_id'], 'fake_id')
self.assertEqual(bdm['instance']['uuid'], self.instance['uuid'])
class AgentBuildTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.agent_build_* methods."""
def setUp(self):
super(AgentBuildTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_agent_build_create_and_get_all(self):
self.assertEqual(0, len(db.agent_build_get_all(self.ctxt)))
agent_build = db.agent_build_create(self.ctxt, {'os': 'GNU/HURD'})
all_agent_builds = db.agent_build_get_all(self.ctxt)
self.assertEqual(1, len(all_agent_builds))
self._assertEqualObjects(agent_build, all_agent_builds[0])
def test_agent_build_get_by_triple(self):
agent_build = db.agent_build_create(self.ctxt, {'hypervisor': 'kvm',
'os': 'FreeBSD', 'architecture': 'x86_64'})
self.assertIsNone(db.agent_build_get_by_triple(self.ctxt, 'kvm',
'FreeBSD', 'i386'))
self._assertEqualObjects(agent_build, db.agent_build_get_by_triple(
self.ctxt, 'kvm', 'FreeBSD', 'x86_64'))
def test_agent_build_destroy(self):
agent_build = db.agent_build_create(self.ctxt, {})
self.assertEqual(1, len(db.agent_build_get_all(self.ctxt)))
db.agent_build_destroy(self.ctxt, agent_build.id)
self.assertEqual(0, len(db.agent_build_get_all(self.ctxt)))
def test_agent_build_update(self):
agent_build = db.agent_build_create(self.ctxt, {'os': 'HaikuOS'})
db.agent_build_update(self.ctxt, agent_build.id, {'os': 'ReactOS'})
self.assertEqual('ReactOS', db.agent_build_get_all(self.ctxt)[0].os)
def test_agent_build_destroy_destroyed(self):
agent_build = db.agent_build_create(self.ctxt, {})
db.agent_build_destroy(self.ctxt, agent_build.id)
self.assertRaises(exception.AgentBuildNotFound,
db.agent_build_destroy, self.ctxt, agent_build.id)
def test_agent_build_update_destroyed(self):
agent_build = db.agent_build_create(self.ctxt, {'os': 'HaikuOS'})
db.agent_build_destroy(self.ctxt, agent_build.id)
self.assertRaises(exception.AgentBuildNotFound,
db.agent_build_update, self.ctxt, agent_build.id, {'os': 'OS/2'})
def test_agent_build_exists(self):
values = {'hypervisor': 'kvm', 'os': 'FreeBSD',
'architecture': 'x86_64'}
db.agent_build_create(self.ctxt, values)
self.assertRaises(exception.AgentBuildExists, db.agent_build_create,
self.ctxt, values)
def test_agent_build_get_all_by_hypervisor(self):
values = {'hypervisor': 'kvm', 'os': 'FreeBSD',
'architecture': 'x86_64'}
created = db.agent_build_create(self.ctxt, values)
actual = db.agent_build_get_all(self.ctxt, hypervisor='kvm')
self._assertEqualListsOfObjects([created], actual)
class VirtualInterfaceTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(VirtualInterfaceTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.instance_uuid = db.instance_create(self.ctxt, {})['uuid']
values = {'host': 'localhost', 'project_id': 'project1'}
self.network = db.network_create_safe(self.ctxt, values)
def _get_base_values(self):
return {
'instance_uuid': self.instance_uuid,
'address': 'fake_address',
'network_id': self.network['id'],
'uuid': str(stdlib_uuid.uuid4())
}
def mock_db_query_first_to_raise_data_error_exception(self):
self.mox.StubOutWithMock(query.Query, 'first')
query.Query.first().AndRaise(exc.DataError(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg()))
self.mox.ReplayAll()
def _create_virt_interface(self, values):
v = self._get_base_values()
v.update(values)
return db.virtual_interface_create(self.ctxt, v)
def test_virtual_interface_create(self):
vif = self._create_virt_interface({})
self.assertFalse(vif['id'] is None)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at', 'uuid']
self._assertEqualObjects(vif, self._get_base_values(), ignored_keys)
def test_virtual_interface_create_with_duplicate_address(self):
vif = self._create_virt_interface({})
self.assertRaises(exception.VirtualInterfaceCreateException,
self._create_virt_interface, {"uuid": vif['uuid']})
def test_virtual_interface_get(self):
vifs = [self._create_virt_interface({'address': 'a'}),
self._create_virt_interface({'address': 'b'})]
for vif in vifs:
real_vif = db.virtual_interface_get(self.ctxt, vif['id'])
self._assertEqualObjects(vif, real_vif)
def test_virtual_interface_get_by_address(self):
vifs = [self._create_virt_interface({'address': 'first'}),
self._create_virt_interface({'address': 'second'})]
for vif in vifs:
real_vif = db.virtual_interface_get_by_address(self.ctxt,
vif['address'])
self._assertEqualObjects(vif, real_vif)
def test_virtual_interface_get_by_address_not_found(self):
self.assertIsNone(db.virtual_interface_get_by_address(self.ctxt,
"i.nv.ali.ip"))
def test_virtual_interface_get_by_address_data_error_exception(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.InvalidIpAddressError,
db.virtual_interface_get_by_address,
self.ctxt,
"i.nv.ali.ip")
def test_virtual_interface_get_by_uuid(self):
vifs = [self._create_virt_interface({"address": "address_1"}),
self._create_virt_interface({"address": "address_2"})]
for vif in vifs:
real_vif = db.virtual_interface_get_by_uuid(self.ctxt, vif['uuid'])
self._assertEqualObjects(vif, real_vif)
def test_virtual_interface_get_by_instance(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
vifs1 = [self._create_virt_interface({'address': 'fake1'}),
self._create_virt_interface({'address': 'fake2'})]
vifs2 = [self._create_virt_interface({'address': 'fake3',
'instance_uuid': inst_uuid2})]
vifs1_real = db.virtual_interface_get_by_instance(self.ctxt,
self.instance_uuid)
vifs2_real = db.virtual_interface_get_by_instance(self.ctxt,
inst_uuid2)
self._assertEqualListsOfObjects(vifs1, vifs1_real)
self._assertEqualListsOfObjects(vifs2, vifs2_real)
def test_virtual_interface_get_by_instance_and_network(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
values = {'host': 'localhost', 'project_id': 'project2'}
network_id = db.network_create_safe(self.ctxt, values)['id']
vifs = [self._create_virt_interface({'address': 'fake1'}),
self._create_virt_interface({'address': 'fake2',
'network_id': network_id,
'instance_uuid': inst_uuid2}),
self._create_virt_interface({'address': 'fake3',
'instance_uuid': inst_uuid2})]
for vif in vifs:
params = (self.ctxt, vif['instance_uuid'], vif['network_id'])
r_vif = db.virtual_interface_get_by_instance_and_network(*params)
self._assertEqualObjects(r_vif, vif)
def test_virtual_interface_delete_by_instance(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
values = [dict(address='fake1'), dict(address='fake2'),
dict(address='fake3', instance_uuid=inst_uuid2)]
for vals in values:
self._create_virt_interface(vals)
db.virtual_interface_delete_by_instance(self.ctxt, self.instance_uuid)
real_vifs1 = db.virtual_interface_get_by_instance(self.ctxt,
self.instance_uuid)
real_vifs2 = db.virtual_interface_get_by_instance(self.ctxt,
inst_uuid2)
self.assertEqual(len(real_vifs1), 0)
self.assertEqual(len(real_vifs2), 1)
def test_virtual_interface_get_all(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
values = [dict(address='fake1'), dict(address='fake2'),
dict(address='fake3', instance_uuid=inst_uuid2)]
vifs = [self._create_virt_interface(val) for val in values]
real_vifs = db.virtual_interface_get_all(self.ctxt)
self._assertEqualListsOfObjects(vifs, real_vifs)
class NetworkTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.network_* methods."""
def setUp(self):
super(NetworkTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_associated_fixed_ip(self, host, cidr, ip):
network = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'cidr': cidr})
self.assertFalse(db.network_in_use_on_host(self.ctxt, network.id,
host))
instance = db.instance_create(self.ctxt,
{'project_id': 'project1', 'host': host})
virtual_interface = db.virtual_interface_create(self.ctxt,
{'instance_uuid': instance.uuid, 'network_id': network.id,
'address': ip})
db.fixed_ip_create(self.ctxt, {'address': ip,
'network_id': network.id, 'allocated': True,
'virtual_interface_id': virtual_interface.id})
db.fixed_ip_associate(self.ctxt, ip, instance.uuid,
network.id)
return network, instance
def test_network_get_associated_fixed_ips(self):
network, instance = self._get_associated_fixed_ip('host.net',
'192.0.2.0/30', '192.0.2.1')
data = db.network_get_associated_fixed_ips(self.ctxt, network.id)
self.assertEqual(1, len(data))
self.assertEqual('192.0.2.1', data[0]['address'])
self.assertEqual('192.0.2.1', data[0]['vif_address'])
self.assertEqual(instance.uuid, data[0]['instance_uuid'])
self.assertTrue(data[0]['allocated'])
def test_network_create_safe(self):
values = {'host': 'localhost', 'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
self.assertEqual(36, len(network['uuid']))
db_network = db.network_get(self.ctxt, network['id'])
self._assertEqualObjects(network, db_network)
def test_network_create_with_duplicate_vlan(self):
values1 = {'host': 'localhost', 'project_id': 'project1', 'vlan': 1}
values2 = {'host': 'something', 'project_id': 'project1', 'vlan': 1}
db.network_create_safe(self.ctxt, values1)
self.assertRaises(exception.DuplicateVlan,
db.network_create_safe, self.ctxt, values2)
def test_network_delete_safe(self):
values = {'host': 'localhost', 'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
db_network = db.network_get(self.ctxt, network['id'])
values = {'network_id': network['id'], 'address': '192.168.1.5'}
address1 = db.fixed_ip_create(self.ctxt, values)['address']
values = {'network_id': network['id'],
'address': '192.168.1.6',
'allocated': True}
address2 = db.fixed_ip_create(self.ctxt, values)['address']
self.assertRaises(exception.NetworkInUse,
db.network_delete_safe, self.ctxt, network['id'])
db.fixed_ip_update(self.ctxt, address2, {'allocated': False})
network = db.network_delete_safe(self.ctxt, network['id'])
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address, self.ctxt, address1)
ctxt = self.ctxt.elevated(read_deleted='yes')
fixed_ip = db.fixed_ip_get_by_address(ctxt, address1)
self.assertTrue(fixed_ip['deleted'])
def test_network_in_use_on_host(self):
values = {'host': 'foo', 'hostname': 'myname'}
instance = db.instance_create(self.ctxt, values)
values = {'address': '192.168.1.5', 'instance_uuid': instance['uuid']}
vif = db.virtual_interface_create(self.ctxt, values)
values = {'address': '192.168.1.6',
'network_id': 1,
'allocated': True,
'instance_uuid': instance['uuid'],
'virtual_interface_id': vif['id']}
db.fixed_ip_create(self.ctxt, values)
self.assertEqual(db.network_in_use_on_host(self.ctxt, 1, 'foo'), True)
self.assertEqual(db.network_in_use_on_host(self.ctxt, 1, 'bar'), False)
def test_network_update_nonexistent(self):
self.assertRaises(exception.NetworkNotFound,
db.network_update, self.ctxt, 'nonexistent', {})
def test_network_update_with_duplicate_vlan(self):
values1 = {'host': 'localhost', 'project_id': 'project1', 'vlan': 1}
values2 = {'host': 'something', 'project_id': 'project1', 'vlan': 2}
network_ref = db.network_create_safe(self.ctxt, values1)
db.network_create_safe(self.ctxt, values2)
self.assertRaises(exception.DuplicateVlan,
db.network_update, self.ctxt,
network_ref["id"], values2)
def test_network_update(self):
network = db.network_create_safe(self.ctxt, {'project_id': 'project1',
'vlan': 1, 'host': 'test.com'})
db.network_update(self.ctxt, network.id, {'vlan': 2})
network_new = db.network_get(self.ctxt, network.id)
self.assertEqual(2, network_new.vlan)
def test_network_set_host_nonexistent_network(self):
self.assertRaises(exception.NetworkNotFound,
db.network_set_host, self.ctxt, 'nonexistent', 'nonexistent')
def test_network_set_host_with_initially_no_host(self):
values = {'host': 'example.com', 'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
self.assertEqual(
db.network_set_host(self.ctxt, network.id, 'new.example.com'),
'example.com')
def test_network_set_host(self):
values = {'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
self.assertEqual(
db.network_set_host(self.ctxt, network.id, 'example.com'),
'example.com')
self.assertEqual('example.com',
db.network_get(self.ctxt, network.id).host)
def test_network_get_all_by_host(self):
self.assertEqual([],
db.network_get_all_by_host(self.ctxt, 'example.com'))
host = 'h1.example.com'
# network with host set
net1 = db.network_create_safe(self.ctxt, {'host': host})
self._assertEqualListsOfObjects([net1],
db.network_get_all_by_host(self.ctxt, host))
# network with fixed ip with host set
net2 = db.network_create_safe(self.ctxt, {})
db.fixed_ip_create(self.ctxt, {'host': host, 'network_id': net2.id})
data = db.network_get_all_by_host(self.ctxt, host)
self._assertEqualListsOfObjects([net1, net2],
db.network_get_all_by_host(self.ctxt, host))
# network with instance with host set
net3 = db.network_create_safe(self.ctxt, {})
instance = db.instance_create(self.ctxt, {'host': host})
vif = db.virtual_interface_create(self.ctxt,
{'instance_uuid': instance.uuid})
db.fixed_ip_create(self.ctxt, {'network_id': net3.id,
'virtual_interface_id': vif.id})
self._assertEqualListsOfObjects([net1, net2, net3],
db.network_get_all_by_host(self.ctxt, host))
def test_network_get_by_cidr(self):
cidr = '192.0.2.0/30'
cidr_v6 = '2001:db8:1::/64'
network = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'cidr': cidr, 'cidr_v6': cidr_v6})
self._assertEqualObjects(network,
db.network_get_by_cidr(self.ctxt, cidr))
self._assertEqualObjects(network,
db.network_get_by_cidr(self.ctxt, cidr_v6))
def test_network_get_by_cidr_nonexistent(self):
self.assertRaises(exception.NetworkNotFoundForCidr,
db.network_get_by_cidr, self.ctxt, '192.0.2.0/30')
def test_network_get_by_uuid(self):
network = db.network_create_safe(self.ctxt,
{'project_id': 'project_1'})
self._assertEqualObjects(network,
db.network_get_by_uuid(self.ctxt, network.uuid))
def test_network_get_by_uuid_nonexistent(self):
self.assertRaises(exception.NetworkNotFoundForUUID,
db.network_get_by_uuid, self.ctxt, 'non-existent-uuid')
def test_network_get_all_by_uuids_no_networks(self):
self.assertRaises(exception.NoNetworksFound,
db.network_get_all_by_uuids, self.ctxt, ['non-existent-uuid'])
def test_network_get_all_by_uuids(self):
net1 = db.network_create_safe(self.ctxt, {})
net2 = db.network_create_safe(self.ctxt, {})
self._assertEqualListsOfObjects([net1, net2],
db.network_get_all_by_uuids(self.ctxt, [net1.uuid, net2.uuid]))
def test_network_get_all_no_networks(self):
self.assertRaises(exception.NoNetworksFound,
db.network_get_all, self.ctxt)
def test_network_get_all(self):
network = db.network_create_safe(self.ctxt, {})
network_db = db.network_get_all(self.ctxt)
self.assertEqual(1, len(network_db))
self._assertEqualObjects(network, network_db[0])
def test_network_get_all_admin_user(self):
network1 = db.network_create_safe(self.ctxt, {})
network2 = db.network_create_safe(self.ctxt,
{'project_id': 'project1'})
self._assertEqualListsOfObjects([network1, network2],
db.network_get_all(self.ctxt,
project_only=True))
def test_network_get_all_normal_user(self):
normal_ctxt = context.RequestContext('fake', 'fake')
db.network_create_safe(self.ctxt, {})
db.network_create_safe(self.ctxt, {'project_id': 'project1'})
network1 = db.network_create_safe(self.ctxt,
{'project_id': 'fake'})
network_db = db.network_get_all(normal_ctxt, project_only=True)
self.assertEqual(1, len(network_db))
self._assertEqualObjects(network1, network_db[0])
def test_network_get(self):
network = db.network_create_safe(self.ctxt, {})
self._assertEqualObjects(db.network_get(self.ctxt, network.id),
network)
db.network_delete_safe(self.ctxt, network.id)
self.assertRaises(exception.NetworkNotFound,
db.network_get, self.ctxt, network.id)
def test_network_associate(self):
network = db.network_create_safe(self.ctxt, {})
self.assertIsNone(network.project_id)
db.network_associate(self.ctxt, "project1", network.id)
self.assertEqual("project1", db.network_get(self.ctxt,
network.id).project_id)
def test_network_diassociate(self):
network = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'host': 'test.net'})
# disassociate project
db.network_disassociate(self.ctxt, network.id, False, True)
self.assertIsNone(db.network_get(self.ctxt, network.id).project_id)
# disassociate host
db.network_disassociate(self.ctxt, network.id, True, False)
self.assertIsNone(db.network_get(self.ctxt, network.id).host)
def test_network_count_reserved_ips(self):
net = db.network_create_safe(self.ctxt, {})
self.assertEqual(0, db.network_count_reserved_ips(self.ctxt, net.id))
db.fixed_ip_create(self.ctxt, {'network_id': net.id,
'reserved': True})
self.assertEqual(1, db.network_count_reserved_ips(self.ctxt, net.id))
class KeyPairTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(KeyPairTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _create_key_pair(self, values):
return db.key_pair_create(self.ctxt, values)
def test_key_pair_create(self):
param = {
'name': 'test_1',
'user_id': 'test_user_id_1',
'public_key': 'test_public_key_1',
'fingerprint': 'test_fingerprint_1'
}
key_pair = self._create_key_pair(param)
self.assertTrue(key_pair['id'] is not None)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id']
self._assertEqualObjects(key_pair, param, ignored_keys)
def test_key_pair_create_with_duplicate_name(self):
params = {'name': 'test_name', 'user_id': 'test_user_id'}
self._create_key_pair(params)
self.assertRaises(exception.KeyPairExists, self._create_key_pair,
params)
def test_key_pair_get(self):
params = [
{'name': 'test_1', 'user_id': 'test_user_id_1'},
{'name': 'test_2', 'user_id': 'test_user_id_2'},
{'name': 'test_3', 'user_id': 'test_user_id_3'}
]
key_pairs = [self._create_key_pair(p) for p in params]
for key in key_pairs:
real_key = db.key_pair_get(self.ctxt, key['user_id'], key['name'])
self._assertEqualObjects(key, real_key)
def test_key_pair_get_no_results(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1'}
self.assertRaises(exception.KeypairNotFound, db.key_pair_get,
self.ctxt, param['user_id'], param['name'])
def test_key_pair_get_deleted(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1'}
key_pair_created = self._create_key_pair(param)
db.key_pair_destroy(self.ctxt, param['user_id'], param['name'])
self.assertRaises(exception.KeypairNotFound, db.key_pair_get,
self.ctxt, param['user_id'], param['name'])
ctxt = self.ctxt.elevated(read_deleted='yes')
key_pair_deleted = db.key_pair_get(ctxt, param['user_id'],
param['name'])
ignored_keys = ['deleted', 'created_at', 'updated_at', 'deleted_at']
self._assertEqualObjects(key_pair_deleted, key_pair_created,
ignored_keys)
self.assertEqual(key_pair_deleted['deleted'], key_pair_deleted['id'])
def test_key_pair_get_all_by_user(self):
params = [
{'name': 'test_1', 'user_id': 'test_user_id_1'},
{'name': 'test_2', 'user_id': 'test_user_id_1'},
{'name': 'test_3', 'user_id': 'test_user_id_2'}
]
key_pairs_user_1 = [self._create_key_pair(p) for p in params
if p['user_id'] == 'test_user_id_1']
key_pairs_user_2 = [self._create_key_pair(p) for p in params
if p['user_id'] == 'test_user_id_2']
real_keys_1 = db.key_pair_get_all_by_user(self.ctxt, 'test_user_id_1')
real_keys_2 = db.key_pair_get_all_by_user(self.ctxt, 'test_user_id_2')
self._assertEqualListsOfObjects(key_pairs_user_1, real_keys_1)
self._assertEqualListsOfObjects(key_pairs_user_2, real_keys_2)
def test_key_pair_count_by_user(self):
params = [
{'name': 'test_1', 'user_id': 'test_user_id_1'},
{'name': 'test_2', 'user_id': 'test_user_id_1'},
{'name': 'test_3', 'user_id': 'test_user_id_2'}
]
for p in params:
self._create_key_pair(p)
count_1 = db.key_pair_count_by_user(self.ctxt, 'test_user_id_1')
self.assertEqual(count_1, 2)
count_2 = db.key_pair_count_by_user(self.ctxt, 'test_user_id_2')
self.assertEqual(count_2, 1)
def test_key_pair_destroy(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1'}
self._create_key_pair(param)
db.key_pair_destroy(self.ctxt, param['user_id'], param['name'])
self.assertRaises(exception.KeypairNotFound, db.key_pair_get,
self.ctxt, param['user_id'], param['name'])
def test_key_pair_destroy_no_such_key(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1'}
self.assertRaises(exception.KeypairNotFound,
db.key_pair_destroy, self.ctxt,
param['user_id'], param['name'])
class QuotaTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.quota_* methods."""
def setUp(self):
super(QuotaTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_quota_create(self):
quota = db.quota_create(self.ctxt, 'project1', 'resource', 99)
self.assertEqual(quota.resource, 'resource')
self.assertEqual(quota.hard_limit, 99)
self.assertEqual(quota.project_id, 'project1')
def test_quota_get(self):
quota = db.quota_create(self.ctxt, 'project1', 'resource', 99)
quota_db = db.quota_get(self.ctxt, 'project1', 'resource')
self._assertEqualObjects(quota, quota_db)
def test_quota_get_all_by_project(self):
for i in range(3):
for j in range(3):
db.quota_create(self.ctxt, 'proj%d' % i, 'resource%d' % j, j)
for i in range(3):
quotas_db = db.quota_get_all_by_project(self.ctxt, 'proj%d' % i)
self.assertEqual(quotas_db, {'project_id': 'proj%d' % i,
'resource0': 0,
'resource1': 1,
'resource2': 2})
def test_quota_get_all_by_project_and_user(self):
for i in range(3):
for j in range(3):
db.quota_create(self.ctxt, 'proj%d' % i, 'resource%d' % j,
j - 1, user_id='user%d' % i)
for i in range(3):
quotas_db = db.quota_get_all_by_project_and_user(self.ctxt,
'proj%d' % i,
'user%d' % i)
self.assertEqual(quotas_db, {'project_id': 'proj%d' % i,
'user_id': 'user%d' % i,
'resource0': -1,
'resource1': 0,
'resource2': 1})
def test_quota_update(self):
db.quota_create(self.ctxt, 'project1', 'resource1', 41)
db.quota_update(self.ctxt, 'project1', 'resource1', 42)
quota = db.quota_get(self.ctxt, 'project1', 'resource1')
self.assertEqual(quota.hard_limit, 42)
self.assertEqual(quota.resource, 'resource1')
self.assertEqual(quota.project_id, 'project1')
def test_quota_update_nonexistent(self):
self.assertRaises(exception.ProjectQuotaNotFound,
db.quota_update, self.ctxt, 'project1', 'resource1', 42)
def test_quota_get_nonexistent(self):
self.assertRaises(exception.ProjectQuotaNotFound,
db.quota_get, self.ctxt, 'project1', 'resource1')
def test_quota_reserve_all_resources(self):
quotas = {}
deltas = {}
reservable_resources = {}
for i, resource in enumerate(quota.resources):
if isinstance(resource, quota.ReservableResource):
quotas[resource.name] = db.quota_create(self.ctxt, 'project1',
resource.name, 100)
deltas[resource.name] = i
reservable_resources[resource.name] = resource
usages = {'instances': 3, 'cores': 6, 'ram': 9}
instances = []
for i in range(3):
instances.append(db.instance_create(self.ctxt,
{'vcpus': 2, 'memory_mb': 3,
'project_id': 'project1'}))
usages['fixed_ips'] = 2
network = db.network_create_safe(self.ctxt, {})
for i in range(2):
address = '192.168.0.%d' % i
ip = db.fixed_ip_create(self.ctxt, {'project_id': 'project1',
'address': address,
'network_id': network['id']})
db.fixed_ip_associate(self.ctxt, address,
instances[0].uuid, network['id'])
usages['floating_ips'] = 5
for i in range(5):
db.floating_ip_create(self.ctxt, {'project_id': 'project1'})
usages['security_groups'] = 3
for i in range(3):
db.security_group_create(self.ctxt, {'project_id': 'project1'})
reservations_uuids = db.quota_reserve(self.ctxt, reservable_resources,
quotas, quotas, deltas, None,
None, None, 'project1')
resources_names = reservable_resources.keys()
for reservation_uuid in reservations_uuids:
reservation = db.reservation_get(self.ctxt, reservation_uuid)
usage = db.quota_usage_get(self.ctxt, 'project1',
reservation.resource)
self.assertEqual(usage.in_use, usages[reservation.resource],
'Resource: %s' % reservation.resource)
self.assertEqual(usage.reserved, deltas[reservation.resource])
self.assertIn(reservation.resource, resources_names)
resources_names.remove(reservation.resource)
self.assertEqual(len(resources_names), 0)
def test_quota_destroy_all_by_project(self):
reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
db.quota_destroy_all_by_project(self.ctxt, 'project1')
self.assertEqual(db.quota_get_all_by_project(self.ctxt, 'project1'),
{'project_id': 'project1'})
self.assertEqual(db.quota_get_all_by_project_and_user(self.ctxt,
'project1', 'user1'),
{'project_id': 'project1', 'user_id': 'user1'})
self.assertEqual(db.quota_usage_get_all_by_project(
self.ctxt, 'project1'),
{'project_id': 'project1'})
for r in reservations:
self.assertRaises(exception.ReservationNotFound,
db.reservation_get, self.ctxt, r)
def test_quota_destroy_all_by_project_and_user(self):
reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
db.quota_destroy_all_by_project_and_user(self.ctxt, 'project1',
'user1')
self.assertEqual(db.quota_get_all_by_project_and_user(self.ctxt,
'project1', 'user1'),
{'project_id': 'project1',
'user_id': 'user1'})
self.assertEqual(db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'),
{'project_id': 'project1',
'user_id': 'user1',
'fixed_ips': {'in_use': 2, 'reserved': 2}})
for r in reservations:
self.assertRaises(exception.ReservationNotFound,
db.reservation_get, self.ctxt, r)
def test_quota_usage_get_nonexistent(self):
self.assertRaises(exception.QuotaUsageNotFound, db.quota_usage_get,
self.ctxt, 'p1', 'nonexitent_resource')
def test_quota_usage_get(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
quota_usage = db.quota_usage_get(self.ctxt, 'p1', 'resource0')
expected = {'resource': 'resource0', 'project_id': 'p1',
'in_use': 0, 'reserved': 0, 'total': 0}
for key, value in expected.iteritems():
self.assertEqual(value, quota_usage[key])
def test_quota_usage_get_all_by_project(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
expected = {'project_id': 'p1',
'resource0': {'in_use': 0, 'reserved': 0},
'resource1': {'in_use': 1, 'reserved': 1},
'fixed_ips': {'in_use': 2, 'reserved': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project(
self.ctxt, 'p1'))
def test_quota_usage_get_all_by_project_and_user(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
expected = {'project_id': 'p1',
'user_id': 'u1',
'resource0': {'in_use': 0, 'reserved': 0},
'resource1': {'in_use': 1, 'reserved': 1},
'fixed_ips': {'in_use': 2, 'reserved': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'p1', 'u1'))
def test_quota_usage_update_nonexistent(self):
self.assertRaises(exception.QuotaUsageNotFound, db.quota_usage_update,
self.ctxt, 'p1', 'u1', 'resource', in_use=42)
def test_quota_usage_update(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
db.quota_usage_update(self.ctxt, 'p1', 'u1', 'resource0', in_use=42,
reserved=43)
quota_usage = db.quota_usage_get(self.ctxt, 'p1', 'resource0', 'u1')
expected = {'resource': 'resource0', 'project_id': 'p1',
'user_id': 'u1', 'in_use': 42, 'reserved': 43, 'total': 85}
for key, value in expected.iteritems():
self.assertEqual(value, quota_usage[key])
def test_quota_create_exists(self):
db.quota_create(self.ctxt, 'project1', 'resource1', 41)
self.assertRaises(exception.QuotaExists, db.quota_create, self.ctxt,
'project1', 'resource1', 42)
class QuotaClassTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(QuotaClassTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_quota_class_get_default(self):
params = {
'test_resource1': '10',
'test_resource2': '20',
'test_resource3': '30',
}
for res, limit in params.items():
db.quota_class_create(self.ctxt, 'default', res, limit)
defaults = db.quota_class_get_default(self.ctxt)
self.assertEqual(defaults, dict(class_name='default',
test_resource1=10,
test_resource2=20,
test_resource3=30))
def test_quota_class_create(self):
qc = db.quota_class_create(self.ctxt, 'class name', 'resource', 42)
self.assertEqual(qc.class_name, 'class name')
self.assertEqual(qc.resource, 'resource')
self.assertEqual(qc.hard_limit, 42)
def test_quota_class_get(self):
qc = db.quota_class_create(self.ctxt, 'class name', 'resource', 42)
qc_db = db.quota_class_get(self.ctxt, 'class name', 'resource')
self._assertEqualObjects(qc, qc_db)
def test_quota_class_get_nonexistent(self):
self.assertRaises(exception.QuotaClassNotFound, db.quota_class_get,
self.ctxt, 'nonexistent', 'resource')
def test_quota_class_get_all_by_name(self):
for i in range(3):
for j in range(3):
db.quota_class_create(self.ctxt, 'class%d' % i,
'resource%d' % j, j)
for i in range(3):
classes = db.quota_class_get_all_by_name(self.ctxt, 'class%d' % i)
self.assertEqual(classes, {'class_name': 'class%d' % i,
'resource0': 0, 'resource1': 1, 'resource2': 2})
def test_quota_class_update(self):
db.quota_class_create(self.ctxt, 'class name', 'resource', 42)
db.quota_class_update(self.ctxt, 'class name', 'resource', 43)
self.assertEqual(db.quota_class_get(self.ctxt, 'class name',
'resource').hard_limit, 43)
def test_quota_class_update_nonexistent(self):
self.assertRaises(exception.QuotaClassNotFound, db.quota_class_update,
self.ctxt, 'class name', 'resource', 42)
<|fim▁hole|>
def setUp(self):
super(S3ImageTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.values = [uuidutils.generate_uuid() for i in xrange(3)]
self.images = [db.s3_image_create(self.ctxt, uuid)
for uuid in self.values]
def test_s3_image_create(self):
for ref in self.images:
self.assertTrue(uuidutils.is_uuid_like(ref.uuid))
self.assertEqual(sorted(self.values),
sorted([ref.uuid for ref in self.images]))
def test_s3_image_get_by_uuid(self):
for uuid in self.values:
ref = db.s3_image_get_by_uuid(self.ctxt, uuid)
self.assertTrue(uuidutils.is_uuid_like(ref.uuid))
self.assertEqual(uuid, ref.uuid)
def test_s3_image_get(self):
self.assertEqual(sorted(self.values),
sorted([db.s3_image_get(self.ctxt, ref.id).uuid
for ref in self.images]))
def test_s3_image_get_not_found(self):
self.assertRaises(exception.ImageNotFound, db.s3_image_get, self.ctxt,
100500)
def test_s3_image_get_by_uuid_not_found(self):
self.assertRaises(exception.ImageNotFound, db.s3_image_get_by_uuid,
self.ctxt, uuidutils.generate_uuid())
class ComputeNodeTestCase(test.TestCase, ModelsObjectComparatorMixin):
_ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at', 'updated_at']
def setUp(self):
super(ComputeNodeTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.service_dict = dict(host='host1', binary='nova-compute',
topic=CONF.compute_topic, report_count=1,
disabled=False)
self.service = db.service_create(self.ctxt, self.service_dict)
self.compute_node_dict = dict(vcpus=2, memory_mb=1024, local_gb=2048,
vcpus_used=0, memory_mb_used=0,
local_gb_used=0, free_ram_mb=1024,
free_disk_gb=2048, hypervisor_type="xen",
hypervisor_version=1, cpu_info="",
running_vms=0, current_workload=0,
service_id=self.service['id'],
disk_available_least=100,
hypervisor_hostname='abracadabra104',
host_ip='127.0.0.1',
supported_instances='',
pci_stats='')
# add some random stats
self.stats = dict(num_instances=3, num_proj_12345=2,
num_proj_23456=2, num_vm_building=3)
self.compute_node_dict['stats'] = self.stats
self.flags(reserved_host_memory_mb=0)
self.flags(reserved_host_disk_mb=0)
self.item = db.compute_node_create(self.ctxt, self.compute_node_dict)
def _stats_as_dict(self, stats):
d = {}
for s in stats:
key = s['key']
d[key] = s['value']
return d
def _stats_equal(self, stats, new_stats):
for k, v in stats.iteritems():
self.assertEqual(v, int(new_stats[k]))
def test_compute_node_create(self):
self._assertEqualObjects(self.compute_node_dict, self.item,
ignored_keys=self._ignored_keys + ['stats'])
new_stats = self._stats_as_dict(self.item['stats'])
self._stats_equal(self.stats, new_stats)
def test_compute_node_get_all(self):
date_fields = set(['created_at', 'updated_at',
'deleted_at', 'deleted'])
for no_date_fields in [False, True]:
nodes = db.compute_node_get_all(self.ctxt, no_date_fields)
self.assertEqual(1, len(nodes))
node = nodes[0]
self._assertEqualObjects(self.compute_node_dict, node,
ignored_keys=self._ignored_keys +
['stats', 'service'])
node_fields = set(node.keys())
if no_date_fields:
self.assertFalse(date_fields & node_fields)
else:
self.assertTrue(date_fields <= node_fields)
new_stats = self._stats_as_dict(node['stats'])
self._stats_equal(self.stats, new_stats)
def test_compute_node_get_all_deleted_compute_node(self):
# Create a service and compute node and ensure we can find its stats;
# delete the service and compute node when done and loop again
for x in range(2, 5):
# Create a service
service_data = self.service_dict.copy()
service_data['host'] = 'host-%s' % x
service = db.service_create(self.ctxt, service_data)
# Create a compute node
compute_node_data = self.compute_node_dict.copy()
compute_node_data['service_id'] = service['id']
compute_node_data['stats'] = self.stats.copy()
compute_node_data['hypervisor_hostname'] = 'hypervisor-%s' % x
node = db.compute_node_create(self.ctxt, compute_node_data)
# Ensure the "new" compute node is found
nodes = db.compute_node_get_all(self.ctxt, False)
self.assertEqual(2, len(nodes))
found = None
for n in nodes:
if n['id'] == node['id']:
found = n
break
self.assertNotEqual(None, found)
# Now ensure the match has stats!
self.assertNotEqual(self._stats_as_dict(found['stats']), {})
# Now delete the newly-created compute node to ensure the related
# compute node stats are wiped in a cascaded fashion
db.compute_node_delete(self.ctxt, node['id'])
# Clean up the service
db.service_destroy(self.ctxt, service['id'])
def test_compute_node_get_all_mult_compute_nodes_one_service_entry(self):
service_data = self.service_dict.copy()
service_data['host'] = 'host2'
service = db.service_create(self.ctxt, service_data)
existing_node = dict(self.item.iteritems())
existing_node['service'] = dict(self.service.iteritems())
expected = [existing_node]
for name in ['bm_node1', 'bm_node2']:
compute_node_data = self.compute_node_dict.copy()
compute_node_data['service_id'] = service['id']
compute_node_data['stats'] = self.stats
compute_node_data['hypervisor_hostname'] = 'bm_node_1'
node = db.compute_node_create(self.ctxt, compute_node_data)
node = dict(node.iteritems())
node['service'] = dict(service.iteritems())
expected.append(node)
result = sorted(db.compute_node_get_all(self.ctxt, False),
key=lambda n: n['hypervisor_hostname'])
self._assertEqualListsOfObjects(expected, result,
ignored_keys=['stats'])
def test_compute_node_get(self):
compute_node_id = self.item['id']
node = db.compute_node_get(self.ctxt, compute_node_id)
self._assertEqualObjects(self.compute_node_dict, node,
ignored_keys=self._ignored_keys + ['stats', 'service'])
new_stats = self._stats_as_dict(node['stats'])
self._stats_equal(self.stats, new_stats)
def test_compute_node_update(self):
compute_node_id = self.item['id']
stats = self._stats_as_dict(self.item['stats'])
# change some values:
stats['num_instances'] = 8
stats['num_tribbles'] = 1
values = {
'vcpus': 4,
'stats': stats,
}
item_updated = db.compute_node_update(self.ctxt, compute_node_id,
values)
self.assertEqual(4, item_updated['vcpus'])
new_stats = self._stats_as_dict(item_updated['stats'])
self._stats_equal(stats, new_stats)
def test_compute_node_delete(self):
compute_node_id = self.item['id']
db.compute_node_delete(self.ctxt, compute_node_id)
nodes = db.compute_node_get_all(self.ctxt)
self.assertEqual(len(nodes), 0)
def test_compute_node_search_by_hypervisor(self):
nodes_created = []
new_service = copy.copy(self.service_dict)
for i in xrange(3):
new_service['binary'] += str(i)
new_service['topic'] += str(i)
service = db.service_create(self.ctxt, new_service)
self.compute_node_dict['service_id'] = service['id']
self.compute_node_dict['hypervisor_hostname'] = 'testhost' + str(i)
self.compute_node_dict['stats'] = self.stats
node = db.compute_node_create(self.ctxt, self.compute_node_dict)
nodes_created.append(node)
nodes = db.compute_node_search_by_hypervisor(self.ctxt, 'host')
self.assertEqual(3, len(nodes))
self._assertEqualListsOfObjects(nodes_created, nodes,
ignored_keys=self._ignored_keys + ['stats', 'service'])
def test_compute_node_statistics(self):
stats = db.compute_node_statistics(self.ctxt)
self.assertEqual(stats.pop('count'), 1)
for k, v in stats.iteritems():
self.assertEqual(v, self.item[k])
def test_compute_node_not_found(self):
self.assertRaises(exception.ComputeHostNotFound, db.compute_node_get,
self.ctxt, 100500)
def test_compute_node_update_always_updates_updated_at(self):
item_updated = db.compute_node_update(self.ctxt,
self.item['id'], {})
self.assertNotEqual(self.item['updated_at'],
item_updated['updated_at'])
def test_compute_node_update_override_updated_at(self):
# Update the record once so updated_at is set.
first = db.compute_node_update(self.ctxt, self.item['id'],
{'free_ram_mb': '12'})
self.assertIsNotNone(first['updated_at'])
# Update a second time. Make sure that the updated_at value we send
# is overridden.
second = db.compute_node_update(self.ctxt, self.item['id'],
{'updated_at': first.updated_at,
'free_ram_mb': '13'})
self.assertNotEqual(first['updated_at'], second['updated_at'])
def test_compute_node_stat_unchanged(self):
# don't update unchanged stat values:
stats = self.item['stats']
stats_updated_at = dict([(stat['key'], stat['updated_at'])
for stat in stats])
stats_values = self._stats_as_dict(stats)
new_values = {'stats': stats_values}
compute_node_id = self.item['id']
db.compute_node_update(self.ctxt, compute_node_id, new_values)
updated_node = db.compute_node_get(self.ctxt, compute_node_id)
updated_stats = updated_node['stats']
for stat in updated_stats:
self.assertEqual(stat['updated_at'], stats_updated_at[stat['key']])
def test_compute_node_stat_prune(self):
for stat in self.item['stats']:
if stat['key'] == 'num_instances':
num_instance_stat = stat
break
values = {
'stats': dict(num_instances=1)
}
db.compute_node_update(self.ctxt, self.item['id'], values,
prune_stats=True)
item_updated = db.compute_node_get_all(self.ctxt)[0]
self.assertEqual(1, len(item_updated['stats']))
stat = item_updated['stats'][0]
self.assertEqual(num_instance_stat['id'], stat['id'])
self.assertEqual(num_instance_stat['key'], stat['key'])
self.assertEqual(1, int(stat['value']))
class ProviderFwRuleTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ProviderFwRuleTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.values = self._get_rule_values()
self.rules = [db.provider_fw_rule_create(self.ctxt, rule)
for rule in self.values]
def _get_rule_values(self):
cidr_samples = ['192.168.0.0/24', '10.1.2.3/32',
'2001:4f8:3:ba::/64',
'2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128']
values = []
for i in xrange(len(cidr_samples)):
rule = {}
rule['protocol'] = 'foo' + str(i)
rule['from_port'] = 9999 + i
rule['to_port'] = 9898 + i
rule['cidr'] = cidr_samples[i]
values.append(rule)
return values
def test_provider_fw_rule_create(self):
ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at',
'updated_at']
for i, rule in enumerate(self.values):
self._assertEqualObjects(self.rules[i], rule,
ignored_keys=ignored_keys)
def test_provider_fw_rule_get_all(self):
self._assertEqualListsOfObjects(self.rules,
db.provider_fw_rule_get_all(self.ctxt))
def test_provider_fw_rule_destroy(self):
for rule in self.rules:
db.provider_fw_rule_destroy(self.ctxt, rule.id)
self.assertEqual([], db.provider_fw_rule_get_all(self.ctxt))
class CertificateTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(CertificateTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.created = self._certificates_create()
def _get_certs_values(self):
base_values = {
'user_id': 'user',
'project_id': 'project',
'file_name': 'filename'
}
return [dict((k, v + str(x)) for k, v in base_values.iteritems())
for x in xrange(1, 4)]
def _certificates_create(self):
return [db.certificate_create(self.ctxt, cert)
for cert in self._get_certs_values()]
def test_certificate_create(self):
ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at',
'updated_at']
for i, cert in enumerate(self._get_certs_values()):
self._assertEqualObjects(self.created[i], cert,
ignored_keys=ignored_keys)
def test_certificate_get_all_by_project(self):
cert = db.certificate_get_all_by_project(self.ctxt,
self.created[1].project_id)
self._assertEqualObjects(self.created[1], cert[0])
def test_certificate_get_all_by_user(self):
cert = db.certificate_get_all_by_user(self.ctxt,
self.created[1].user_id)
self._assertEqualObjects(self.created[1], cert[0])
def test_certificate_get_all_by_user_and_project(self):
cert = db.certificate_get_all_by_user_and_project(self.ctxt,
self.created[1].user_id, self.created[1].project_id)
self._assertEqualObjects(self.created[1], cert[0])
class ConsoleTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ConsoleTestCase, self).setUp()
self.ctxt = context.get_admin_context()
pools_data = [
{'address': '192.168.10.10',
'username': 'user1',
'password': 'passwd1',
'console_type': 'type1',
'public_hostname': 'public_host1',
'host': 'host1',
'compute_host': 'compute_host1',
},
{'address': '192.168.10.11',
'username': 'user2',
'password': 'passwd2',
'console_type': 'type2',
'public_hostname': 'public_host2',
'host': 'host2',
'compute_host': 'compute_host2',
},
]
self.console_pools = [db.console_pool_create(self.ctxt, val)
for val in pools_data]
instance_uuid = uuidutils.generate_uuid()
db.instance_create(self.ctxt, {'uuid': instance_uuid})
self.console_data = [dict([('instance_name', 'name' + str(x)),
('instance_uuid', instance_uuid),
('password', 'pass' + str(x)),
('port', 7878 + x),
('pool_id', self.console_pools[x]['id'])])
for x in xrange(len(pools_data))]
self.consoles = [db.console_create(self.ctxt, val)
for val in self.console_data]
def test_console_create(self):
ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at',
'updated_at']
for console in self.consoles:
self.assertIsNotNone(console['id'])
self._assertEqualListsOfObjects(self.console_data, self.consoles,
ignored_keys=ignored_keys)
def test_console_get_by_id(self):
console = self.consoles[0]
console_get = db.console_get(self.ctxt, console['id'])
self._assertEqualObjects(console, console_get,
ignored_keys=['pool'])
def test_console_get_by_id_uuid(self):
console = self.consoles[0]
console_get = db.console_get(self.ctxt, console['id'],
console['instance_uuid'])
self._assertEqualObjects(console, console_get,
ignored_keys=['pool'])
def test_console_get_by_pool_instance(self):
console = self.consoles[0]
console_get = db.console_get_by_pool_instance(self.ctxt,
console['pool_id'], console['instance_uuid'])
self._assertEqualObjects(console, console_get,
ignored_keys=['pool'])
def test_console_get_all_by_instance(self):
instance_uuid = self.consoles[0]['instance_uuid']
consoles_get = db.console_get_all_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfObjects(self.consoles, consoles_get)
def test_console_get_all_by_instance_with_pool(self):
instance_uuid = self.consoles[0]['instance_uuid']
consoles_get = db.console_get_all_by_instance(self.ctxt, instance_uuid,
columns_to_join=['pool'])
self._assertEqualListsOfObjects(self.consoles, consoles_get,
ignored_keys=['pool'])
self._assertEqualListsOfObjects([pool for pool in self.console_pools],
[c['pool'] for c in consoles_get])
def test_console_get_all_by_instance_empty(self):
consoles_get = db.console_get_all_by_instance(self.ctxt,
uuidutils.generate_uuid())
self.assertEqual(consoles_get, [])
def test_console_delete(self):
console_id = self.consoles[0]['id']
db.console_delete(self.ctxt, console_id)
self.assertRaises(exception.ConsoleNotFound, db.console_get,
self.ctxt, console_id)
def test_console_get_by_pool_instance_not_found(self):
self.assertRaises(exception.ConsoleNotFoundInPoolForInstance,
db.console_get_by_pool_instance, self.ctxt,
self.consoles[0]['pool_id'],
uuidutils.generate_uuid())
def test_console_get_not_found(self):
self.assertRaises(exception.ConsoleNotFound, db.console_get,
self.ctxt, 100500)
def test_console_get_not_found_instance(self):
self.assertRaises(exception.ConsoleNotFoundForInstance, db.console_get,
self.ctxt, self.consoles[0]['id'],
uuidutils.generate_uuid())
class CellTestCase(test.TestCase, ModelsObjectComparatorMixin):
_ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at', 'updated_at']
def setUp(self):
super(CellTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_cell_base_values(self):
return {
'name': 'myname',
'api_url': 'apiurl',
'transport_url': 'transporturl',
'weight_offset': 0.5,
'weight_scale': 1.5,
'is_parent': True,
}
def _cell_value_modify(self, value, step):
if isinstance(value, str):
return value + str(step)
elif isinstance(value, float):
return value + step + 0.6
elif isinstance(value, bool):
return bool(step % 2)
elif isinstance(value, int):
return value + step
def _create_cells(self):
test_values = []
for x in xrange(1, 4):
modified_val = dict([(k, self._cell_value_modify(v, x))
for k, v in self._get_cell_base_values().iteritems()])
db.cell_create(self.ctxt, modified_val)
test_values.append(modified_val)
return test_values
def test_cell_create(self):
cell = db.cell_create(self.ctxt, self._get_cell_base_values())
self.assertFalse(cell['id'] is None)
self._assertEqualObjects(cell, self._get_cell_base_values(),
ignored_keys=self._ignored_keys)
def test_cell_update(self):
db.cell_create(self.ctxt, self._get_cell_base_values())
new_values = {
'api_url': 'apiurl1',
'transport_url': 'transporturl1',
'weight_offset': 0.6,
'weight_scale': 1.6,
'is_parent': False,
}
test_cellname = self._get_cell_base_values()['name']
updated_cell = db.cell_update(self.ctxt, test_cellname, new_values)
self._assertEqualObjects(updated_cell, new_values,
ignored_keys=self._ignored_keys + ['name'])
def test_cell_delete(self):
new_cells = self._create_cells()
for cell in new_cells:
test_cellname = cell['name']
db.cell_delete(self.ctxt, test_cellname)
self.assertRaises(exception.CellNotFound, db.cell_get, self.ctxt,
test_cellname)
def test_cell_get(self):
new_cells = self._create_cells()
for cell in new_cells:
cell_get = db.cell_get(self.ctxt, cell['name'])
self._assertEqualObjects(cell_get, cell,
ignored_keys=self._ignored_keys)
def test_cell_get_all(self):
new_cells = self._create_cells()
cells = db.cell_get_all(self.ctxt)
self.assertEqual(len(new_cells), len(cells))
cells_byname = dict([(newcell['name'],
newcell) for newcell in new_cells])
for cell in cells:
self._assertEqualObjects(cell, cells_byname[cell['name']],
self._ignored_keys)
def test_cell_get_not_found(self):
self._create_cells()
self.assertRaises(exception.CellNotFound, db.cell_get, self.ctxt,
'cellnotinbase')
def test_cell_update_not_found(self):
self._create_cells()
self.assertRaises(exception.CellNotFound, db.cell_update, self.ctxt,
'cellnotinbase', self._get_cell_base_values())
def test_cell_create_exists(self):
db.cell_create(self.ctxt, self._get_cell_base_values())
self.assertRaises(exception.CellExists, db.cell_create,
self.ctxt, self._get_cell_base_values())
class ConsolePoolTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ConsolePoolTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.test_console_pool_1 = {
'address': '192.168.2.10',
'username': 'user_1',
'password': 'secret_123',
'console_type': 'type_1',
'public_hostname': 'public_hostname_123',
'host': 'localhost',
'compute_host': '127.0.0.1',
}
self.test_console_pool_2 = {
'address': '192.168.2.11',
'username': 'user_2',
'password': 'secret_1234',
'console_type': 'type_2',
'public_hostname': 'public_hostname_1234',
'host': '127.0.0.1',
'compute_host': 'localhost',
}
self.test_console_pool_3 = {
'address': '192.168.2.12',
'username': 'user_3',
'password': 'secret_12345',
'console_type': 'type_2',
'public_hostname': 'public_hostname_12345',
'host': '127.0.0.1',
'compute_host': '192.168.1.1',
}
def test_console_pool_create(self):
console_pool = db.console_pool_create(
self.ctxt, self.test_console_pool_1)
self.assertTrue(console_pool.get('id') is not None)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id']
self._assertEqualObjects(
console_pool, self.test_console_pool_1, ignored_keys)
def test_console_pool_create_duplicate(self):
db.console_pool_create(self.ctxt, self.test_console_pool_1)
self.assertRaises(exception.ConsolePoolExists, db.console_pool_create,
self.ctxt, self.test_console_pool_1)
def test_console_pool_get_by_host_type(self):
params = [
self.test_console_pool_1,
self.test_console_pool_2,
]
for p in params:
db.console_pool_create(self.ctxt, p)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id', 'consoles']
cp = self.test_console_pool_1
db_cp = db.console_pool_get_by_host_type(
self.ctxt, cp['compute_host'], cp['host'], cp['console_type']
)
self._assertEqualObjects(cp, db_cp, ignored_keys)
def test_console_pool_get_by_host_type_no_resuls(self):
self.assertRaises(
exception.ConsolePoolNotFoundForHostType,
db.console_pool_get_by_host_type, self.ctxt, 'compute_host',
'host', 'console_type')
def test_console_pool_get_all_by_host_type(self):
params = [
self.test_console_pool_1,
self.test_console_pool_2,
self.test_console_pool_3,
]
for p in params:
db.console_pool_create(self.ctxt, p)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id', 'consoles']
cp = self.test_console_pool_2
db_cp = db.console_pool_get_all_by_host_type(
self.ctxt, cp['host'], cp['console_type'])
self._assertEqualListsOfObjects(
db_cp, [self.test_console_pool_2, self.test_console_pool_3],
ignored_keys)
def test_console_pool_get_all_by_host_type_no_results(self):
res = db.console_pool_get_all_by_host_type(
self.ctxt, 'cp_host', 'cp_console_type')
self.assertEqual([], res)
class DnsdomainTestCase(test.TestCase):
def setUp(self):
super(DnsdomainTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.domain = 'test.domain'
self.testzone = 'testzone'
self.project = 'fake'
def test_dnsdomain_register_for_zone(self):
db.dnsdomain_register_for_zone(self.ctxt, self.domain, self.testzone)
domain = db.dnsdomain_get(self.ctxt, self.domain)
self.assertEqual(domain['domain'], self.domain)
self.assertEqual(domain['availability_zone'], self.testzone)
self.assertEqual(domain['scope'], 'private')
def test_dnsdomain_register_for_project(self):
db.dnsdomain_register_for_project(self.ctxt, self.domain, self.project)
domain = db.dnsdomain_get(self.ctxt, self.domain)
self.assertEqual(domain['domain'], self.domain)
self.assertEqual(domain['project_id'], self.project)
self.assertEqual(domain['scope'], 'public')
def test_dnsdomain_list(self):
d_list = ['test.domain.one', 'test.domain.two']
db.dnsdomain_register_for_zone(self.ctxt, d_list[0], self.testzone)
db.dnsdomain_register_for_project(self.ctxt, d_list[1], self.project)
db_list = db.dnsdomain_list(self.ctxt)
self.assertEqual(sorted(d_list), sorted(db_list))
def test_dnsdomain_unregister(self):
db.dnsdomain_register_for_zone(self.ctxt, self.domain, self.testzone)
db.dnsdomain_unregister(self.ctxt, self.domain)
domain = db.dnsdomain_get(self.ctxt, self.domain)
self.assertIsNone(domain)
class BwUsageTestCase(test.TestCase, ModelsObjectComparatorMixin):
_ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at', 'updated_at']
def setUp(self):
super(BwUsageTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.useFixture(test.TimeOverride())
def test_bw_usage_get_by_uuids(self):
now = timeutils.utcnow()
start_period = now - datetime.timedelta(seconds=10)
uuid3_refreshed = now - datetime.timedelta(seconds=5)
expected_bw_usages = [{'uuid': 'fake_uuid1',
'mac': 'fake_mac1',
'start_period': start_period,
'bw_in': 100,
'bw_out': 200,
'last_ctr_in': 12345,
'last_ctr_out': 67890,
'last_refreshed': now},
{'uuid': 'fake_uuid2',
'mac': 'fake_mac2',
'start_period': start_period,
'bw_in': 200,
'bw_out': 300,
'last_ctr_in': 22345,
'last_ctr_out': 77890,
'last_refreshed': now},
{'uuid': 'fake_uuid3',
'mac': 'fake_mac3',
'start_period': start_period,
'bw_in': 400,
'bw_out': 500,
'last_ctr_in': 32345,
'last_ctr_out': 87890,
'last_refreshed': uuid3_refreshed}]
bw_usages = db.bw_usage_get_by_uuids(self.ctxt,
['fake_uuid1', 'fake_uuid2'], start_period)
# No matches
self.assertEqual(len(bw_usages), 0)
# Add 3 entries
db.bw_usage_update(self.ctxt, 'fake_uuid1',
'fake_mac1', start_period,
100, 200, 12345, 67890)
db.bw_usage_update(self.ctxt, 'fake_uuid2',
'fake_mac2', start_period,
100, 200, 42, 42)
# Test explicit refreshed time
db.bw_usage_update(self.ctxt, 'fake_uuid3',
'fake_mac3', start_period,
400, 500, 32345, 87890,
last_refreshed=uuid3_refreshed)
# Update 2nd entry
db.bw_usage_update(self.ctxt, 'fake_uuid2',
'fake_mac2', start_period,
200, 300, 22345, 77890)
bw_usages = db.bw_usage_get_by_uuids(self.ctxt,
['fake_uuid1', 'fake_uuid2', 'fake_uuid3'], start_period)
self.assertEqual(len(bw_usages), 3)
for i, expected in enumerate(expected_bw_usages):
self._assertEqualObjects(bw_usages[i], expected,
ignored_keys=self._ignored_keys)
def test_bw_usage_get(self):
now = timeutils.utcnow()
start_period = now - datetime.timedelta(seconds=10)
expected_bw_usage = {'uuid': 'fake_uuid1',
'mac': 'fake_mac1',
'start_period': start_period,
'bw_in': 100,
'bw_out': 200,
'last_ctr_in': 12345,
'last_ctr_out': 67890,
'last_refreshed': now}
bw_usage = db.bw_usage_get(self.ctxt, 'fake_uuid1', start_period,
'fake_mac1')
self.assertIsNone(bw_usage)
db.bw_usage_update(self.ctxt, 'fake_uuid1',
'fake_mac1', start_period,
100, 200, 12345, 67890)
bw_usage = db.bw_usage_get(self.ctxt, 'fake_uuid1', start_period,
'fake_mac1')
self._assertEqualObjects(bw_usage, expected_bw_usage,
ignored_keys=self._ignored_keys)
class Ec2TestCase(test.TestCase):
def setUp(self):
super(Ec2TestCase, self).setUp()
self.ctxt = context.RequestContext('fake_user', 'fake_project')
def test_ec2_ids_not_found_are_printable(self):
def check_exc_format(method, value):
try:
method(self.ctxt, value)
except exception.NotFound as exc:
self.assertTrue(unicode(value) in unicode(exc))
check_exc_format(db.get_ec2_volume_id_by_uuid, 'fake')
check_exc_format(db.get_volume_uuid_by_ec2_id, 123456)
check_exc_format(db.get_ec2_snapshot_id_by_uuid, 'fake')
check_exc_format(db.get_snapshot_uuid_by_ec2_id, 123456)
check_exc_format(db.get_ec2_instance_id_by_uuid, 'fake')
check_exc_format(db.get_instance_uuid_by_ec2_id, 123456)
def test_ec2_volume_create(self):
vol = db.ec2_volume_create(self.ctxt, 'fake-uuid')
self.assertIsNotNone(vol['id'])
self.assertEqual(vol['uuid'], 'fake-uuid')
def test_get_ec2_volume_id_by_uuid(self):
vol = db.ec2_volume_create(self.ctxt, 'fake-uuid')
vol_id = db.get_ec2_volume_id_by_uuid(self.ctxt, 'fake-uuid')
self.assertEqual(vol['id'], vol_id)
def test_get_volume_uuid_by_ec2_id(self):
vol = db.ec2_volume_create(self.ctxt, 'fake-uuid')
vol_uuid = db.get_volume_uuid_by_ec2_id(self.ctxt, vol['id'])
self.assertEqual(vol_uuid, 'fake-uuid')
def test_get_ec2_volume_id_by_uuid_not_found(self):
self.assertRaises(exception.VolumeNotFound,
db.get_ec2_volume_id_by_uuid,
self.ctxt, 'uuid-not-present')
def test_get_volume_uuid_by_ec2_id_not_found(self):
self.assertRaises(exception.VolumeNotFound,
db.get_volume_uuid_by_ec2_id,
self.ctxt, 100500)
def test_ec2_snapshot_create(self):
snap = db.ec2_snapshot_create(self.ctxt, 'fake-uuid')
self.assertIsNotNone(snap['id'])
self.assertEqual(snap['uuid'], 'fake-uuid')
def test_get_ec2_snapshot_id_by_uuid(self):
snap = db.ec2_snapshot_create(self.ctxt, 'fake-uuid')
snap_id = db.get_ec2_snapshot_id_by_uuid(self.ctxt, 'fake-uuid')
self.assertEqual(snap['id'], snap_id)
def test_get_snapshot_uuid_by_ec2_id(self):
snap = db.ec2_snapshot_create(self.ctxt, 'fake-uuid')
snap_uuid = db.get_snapshot_uuid_by_ec2_id(self.ctxt, snap['id'])
self.assertEqual(snap_uuid, 'fake-uuid')
def test_get_ec2_snapshot_id_by_uuid_not_found(self):
self.assertRaises(exception.SnapshotNotFound,
db.get_ec2_snapshot_id_by_uuid,
self.ctxt, 'uuid-not-present')
def test_get_snapshot_uuid_by_ec2_id_not_found(self):
self.assertRaises(exception.SnapshotNotFound,
db.get_snapshot_uuid_by_ec2_id,
self.ctxt, 100500)
def test_ec2_instance_create(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
self.assertIsNotNone(inst['id'])
self.assertEqual(inst['uuid'], 'fake-uuid')
def test_get_ec2_instance_id_by_uuid(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
inst_id = db.get_ec2_instance_id_by_uuid(self.ctxt, 'fake-uuid')
self.assertEqual(inst['id'], inst_id)
def test_get_instance_uuid_by_ec2_id(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
inst_uuid = db.get_instance_uuid_by_ec2_id(self.ctxt, inst['id'])
self.assertEqual(inst_uuid, 'fake-uuid')
def test_get_ec2_instance_id_by_uuid_not_found(self):
self.assertRaises(exception.InstanceNotFound,
db.get_ec2_instance_id_by_uuid,
self.ctxt, 'uuid-not-present')
def test_get_instance_uuid_by_ec2_id_not_found(self):
self.assertRaises(exception.InstanceNotFound,
db.get_instance_uuid_by_ec2_id,
self.ctxt, 100500)
class ArchiveTestCase(test.TestCase):
def setUp(self):
super(ArchiveTestCase, self).setUp()
self.context = context.get_admin_context()
self.engine = get_engine()
self.conn = self.engine.connect()
self.instance_id_mappings = db_utils.get_table(self.engine,
"instance_id_mappings")
self.shadow_instance_id_mappings = db_utils.get_table(self.engine,
"shadow_instance_id_mappings")
self.dns_domains = db_utils.get_table(self.engine, "dns_domains")
self.shadow_dns_domains = db_utils.get_table(self.engine,
"shadow_dns_domains")
self.consoles = db_utils.get_table(self.engine, "consoles")
self.console_pools = db_utils.get_table(self.engine, "console_pools")
self.shadow_consoles = db_utils.get_table(self.engine,
"shadow_consoles")
self.shadow_console_pools = db_utils.get_table(self.engine,
"shadow_console_pools")
self.instances = db_utils.get_table(self.engine, "instances")
self.shadow_instances = db_utils.get_table(self.engine,
"shadow_instances")
self.uuidstrs = []
for unused in range(6):
self.uuidstrs.append(stdlib_uuid.uuid4().hex)
self.ids = []
self.id_tablenames_to_cleanup = set(["console_pools", "consoles"])
self.uuid_tablenames_to_cleanup = set(["instance_id_mappings",
"instances"])
self.domain_tablenames_to_cleanup = set(["dns_domains"])
def tearDown(self):
super(ArchiveTestCase, self).tearDown()
for tablename in self.id_tablenames_to_cleanup:
for name in [tablename, "shadow_" + tablename]:
table = db_utils.get_table(self.engine, name)
del_statement = table.delete(table.c.id.in_(self.ids))
self.conn.execute(del_statement)
for tablename in self.uuid_tablenames_to_cleanup:
for name in [tablename, "shadow_" + tablename]:
table = db_utils.get_table(self.engine, name)
del_statement = table.delete(table.c.uuid.in_(self.uuidstrs))
self.conn.execute(del_statement)
for tablename in self.domain_tablenames_to_cleanup:
for name in [tablename, "shadow_" + tablename]:
table = db_utils.get_table(self.engine, name)
del_statement = table.delete(table.c.domain.in_(self.uuidstrs))
self.conn.execute(del_statement)
def test_shadow_tables(self):
metadata = MetaData(bind=self.engine)
metadata.reflect()
for table_name in metadata.tables:
if table_name.startswith("shadow_"):
self.assertIn(table_name[7:], metadata.tables)
continue
self.assertTrue(db_utils.check_shadow_table(self.engine,
table_name))
def test_archive_deleted_rows(self):
# Add 6 rows to table
for uuidstr in self.uuidstrs:
ins_stmt = self.instance_id_mappings.insert().values(uuid=uuidstr)
self.conn.execute(ins_stmt)
# Set 4 to deleted
update_statement = self.instance_id_mappings.update().\
where(self.instance_id_mappings.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement)
qiim = select([self.instance_id_mappings]).where(self.
instance_id_mappings.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qiim).fetchall()
# Verify we have 6 in main
self.assertEqual(len(rows), 6)
qsiim = select([self.shadow_instance_id_mappings]).\
where(self.shadow_instance_id_mappings.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qsiim).fetchall()
# Verify we have 0 in shadow
self.assertEqual(len(rows), 0)
# Archive 2 rows
db.archive_deleted_rows(self.context, max_rows=2)
rows = self.conn.execute(qiim).fetchall()
# Verify we have 4 left in main
self.assertEqual(len(rows), 4)
rows = self.conn.execute(qsiim).fetchall()
# Verify we have 2 in shadow
self.assertEqual(len(rows), 2)
# Archive 2 more rows
db.archive_deleted_rows(self.context, max_rows=2)
rows = self.conn.execute(qiim).fetchall()
# Verify we have 2 left in main
self.assertEqual(len(rows), 2)
rows = self.conn.execute(qsiim).fetchall()
# Verify we have 4 in shadow
self.assertEqual(len(rows), 4)
# Try to archive more, but there are no deleted rows left.
db.archive_deleted_rows(self.context, max_rows=2)
rows = self.conn.execute(qiim).fetchall()
# Verify we still have 2 left in main
self.assertEqual(len(rows), 2)
rows = self.conn.execute(qsiim).fetchall()
# Verify we still have 4 in shadow
self.assertEqual(len(rows), 4)
def test_archive_deleted_rows_for_every_uuid_table(self):
tablenames = []
for model_class in models.__dict__.itervalues():
if hasattr(model_class, "__tablename__"):
tablenames.append(model_class.__tablename__)
tablenames.sort()
for tablename in tablenames:
ret = self._test_archive_deleted_rows_for_one_uuid_table(tablename)
if ret == 0:
self.uuid_tablenames_to_cleanup.add(tablename)
def _test_archive_deleted_rows_for_one_uuid_table(self, tablename):
"""
:returns: 0 on success, 1 if no uuid column, 2 if insert failed
"""
main_table = db_utils.get_table(self.engine, tablename)
if not hasattr(main_table.c, "uuid"):
# Not a uuid table, so skip it.
return 1
shadow_table = db_utils.get_table(self.engine, "shadow_" + tablename)
# Add 6 rows to table
for uuidstr in self.uuidstrs:
ins_stmt = main_table.insert().values(uuid=uuidstr)
try:
self.conn.execute(ins_stmt)
except IntegrityError:
# This table has constraints that require a table-specific
# insert, so skip it.
return 2
# Set 4 to deleted
update_statement = main_table.update().\
where(main_table.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement)
qmt = select([main_table]).where(main_table.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qmt).fetchall()
# Verify we have 6 in main
self.assertEqual(len(rows), 6)
qst = select([shadow_table]).\
where(shadow_table.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qst).fetchall()
# Verify we have 0 in shadow
self.assertEqual(len(rows), 0)
# Archive 2 rows
db.archive_deleted_rows_for_table(self.context, tablename, max_rows=2)
# Verify we have 4 left in main
rows = self.conn.execute(qmt).fetchall()
self.assertEqual(len(rows), 4)
# Verify we have 2 in shadow
rows = self.conn.execute(qst).fetchall()
self.assertEqual(len(rows), 2)
# Archive 2 more rows
db.archive_deleted_rows_for_table(self.context, tablename, max_rows=2)
# Verify we have 2 left in main
rows = self.conn.execute(qmt).fetchall()
self.assertEqual(len(rows), 2)
# Verify we have 4 in shadow
rows = self.conn.execute(qst).fetchall()
self.assertEqual(len(rows), 4)
# Try to archive more, but there are no deleted rows left.
db.archive_deleted_rows_for_table(self.context, tablename, max_rows=2)
# Verify we still have 2 left in main
rows = self.conn.execute(qmt).fetchall()
self.assertEqual(len(rows), 2)
# Verify we still have 4 in shadow
rows = self.conn.execute(qst).fetchall()
self.assertEqual(len(rows), 4)
return 0
def test_archive_deleted_rows_no_id_column(self):
uuidstr0 = self.uuidstrs[0]
ins_stmt = self.dns_domains.insert().values(domain=uuidstr0)
self.conn.execute(ins_stmt)
update_statement = self.dns_domains.update().\
where(self.dns_domains.c.domain == uuidstr0).\
values(deleted=True)
self.conn.execute(update_statement)
qdd = select([self.dns_domains], self.dns_domains.c.domain ==
uuidstr0)
rows = self.conn.execute(qdd).fetchall()
self.assertEqual(len(rows), 1)
qsdd = select([self.shadow_dns_domains],
self.shadow_dns_domains.c.domain == uuidstr0)
rows = self.conn.execute(qsdd).fetchall()
self.assertEqual(len(rows), 0)
db.archive_deleted_rows(self.context, max_rows=1)
rows = self.conn.execute(qdd).fetchall()
self.assertEqual(len(rows), 0)
rows = self.conn.execute(qsdd).fetchall()
self.assertEqual(len(rows), 1)
def test_archive_deleted_rows_fk_constraint(self):
# consoles.pool_id depends on console_pools.id
# SQLite doesn't enforce foreign key constraints without a pragma.
dialect = self.engine.url.get_dialect()
if dialect == sqlite.dialect:
# We're seeing issues with foreign key support in SQLite 3.6.20
# SQLAlchemy doesn't support it at all with < SQLite 3.6.19
# It works fine in SQLite 3.7.
# So return early to skip this test if running SQLite < 3.7
import sqlite3
tup = sqlite3.sqlite_version_info
if tup[0] < 3 or (tup[0] == 3 and tup[1] < 7):
self.skipTest(
'sqlite version too old for reliable SQLA foreign_keys')
self.conn.execute("PRAGMA foreign_keys = ON")
ins_stmt = self.console_pools.insert().values(deleted=1)
result = self.conn.execute(ins_stmt)
id1 = result.inserted_primary_key[0]
self.ids.append(id1)
ins_stmt = self.consoles.insert().values(deleted=1,
pool_id=id1)
result = self.conn.execute(ins_stmt)
id2 = result.inserted_primary_key[0]
self.ids.append(id2)
# The first try to archive console_pools should fail, due to FK.
num = db.archive_deleted_rows_for_table(self.context, "console_pools")
self.assertEqual(num, 0)
# Then archiving consoles should work.
num = db.archive_deleted_rows_for_table(self.context, "consoles")
self.assertEqual(num, 1)
# Then archiving console_pools should work.
num = db.archive_deleted_rows_for_table(self.context, "console_pools")
self.assertEqual(num, 1)
def test_archive_deleted_rows_2_tables(self):
# Add 6 rows to each table
for uuidstr in self.uuidstrs:
ins_stmt = self.instance_id_mappings.insert().values(uuid=uuidstr)
self.conn.execute(ins_stmt)
ins_stmt2 = self.instances.insert().values(uuid=uuidstr)
self.conn.execute(ins_stmt2)
# Set 4 of each to deleted
update_statement = self.instance_id_mappings.update().\
where(self.instance_id_mappings.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement)
update_statement2 = self.instances.update().\
where(self.instances.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement2)
# Verify we have 6 in each main table
qiim = select([self.instance_id_mappings]).where(
self.instance_id_mappings.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qiim).fetchall()
self.assertEqual(len(rows), 6)
qi = select([self.instances]).where(self.instances.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(rows), 6)
# Verify we have 0 in each shadow table
qsiim = select([self.shadow_instance_id_mappings]).\
where(self.shadow_instance_id_mappings.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qsiim).fetchall()
self.assertEqual(len(rows), 0)
qsi = select([self.shadow_instances]).\
where(self.shadow_instances.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(rows), 0)
# Archive 7 rows, which should be 4 in one table and 3 in the other.
db.archive_deleted_rows(self.context, max_rows=7)
# Verify we have 5 left in the two main tables combined
iim_rows = self.conn.execute(qiim).fetchall()
i_rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(iim_rows) + len(i_rows), 5)
# Verify we have 7 in the two shadow tables combined.
siim_rows = self.conn.execute(qsiim).fetchall()
si_rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(siim_rows) + len(si_rows), 7)
# Archive the remaining deleted rows.
db.archive_deleted_rows(self.context, max_rows=1)
# Verify we have 4 total left in both main tables.
iim_rows = self.conn.execute(qiim).fetchall()
i_rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(iim_rows) + len(i_rows), 4)
# Verify we have 8 in shadow
siim_rows = self.conn.execute(qsiim).fetchall()
si_rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(siim_rows) + len(si_rows), 8)
# Try to archive more, but there are no deleted rows left.
db.archive_deleted_rows(self.context, max_rows=500)
# Verify we have 4 total left in both main tables.
iim_rows = self.conn.execute(qiim).fetchall()
i_rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(iim_rows) + len(i_rows), 4)
# Verify we have 8 in shadow
siim_rows = self.conn.execute(qsiim).fetchall()
si_rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(siim_rows) + len(si_rows), 8)
class InstanceGroupDBApiTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(InstanceGroupDBApiTestCase, self).setUp()
self.user_id = 'fake_user'
self.project_id = 'fake_project'
self.context = context.RequestContext(self.user_id, self.project_id)
def _get_default_values(self):
return {'name': 'fake_name',
'user_id': self.user_id,
'project_id': self.project_id}
def _create_instance_group(self, context, values, policies=None,
metadata=None, members=None):
return db.instance_group_create(context, values, policies=policies,
metadata=metadata, members=members)
def test_instance_group_create_no_key(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
ignored_keys = ['id', 'uuid', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self.assertTrue(uuidutils.is_uuid_like(result['uuid']))
def test_instance_group_create_with_key(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
def test_instance_group_create_with_same_key(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
self.assertRaises(exception.InstanceGroupIdExists,
self._create_instance_group, self.context, values)
def test_instance_group_get(self):
values = self._get_default_values()
result1 = self._create_instance_group(self.context, values)
result2 = db.instance_group_get(self.context, result1['uuid'])
self._assertEqualObjects(result1, result2)
def test_instance_group_update_simple(self):
values = self._get_default_values()
result1 = self._create_instance_group(self.context, values)
values = {'name': 'new_name', 'user_id': 'new_user',
'project_id': 'new_project'}
db.instance_group_update(self.context, result1['uuid'],
values)
result2 = db.instance_group_get(self.context, result1['uuid'])
self.assertEquals(result1['uuid'], result2['uuid'])
ignored_keys = ['id', 'uuid', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result2, values, ignored_keys)
def test_instance_group_delete(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
db.instance_group_delete(self.context, result['uuid'])
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_delete, self.context,
result['uuid'])
def test_instance_group_get_nonexistent(self):
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_get,
self.context,
'nonexistent')
def test_instance_group_delete_nonexistent(self):
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_delete,
self.context,
'nonexistent')
def test_instance_group_get_all(self):
groups = db.instance_group_get_all(self.context)
self.assertEquals(0, len(groups))
value = self._get_default_values()
result1 = self._create_instance_group(self.context, value)
groups = db.instance_group_get_all(self.context)
self.assertEquals(1, len(groups))
value = self._get_default_values()
result2 = self._create_instance_group(self.context, value)
groups = db.instance_group_get_all(self.context)
results = [result1, result2]
self._assertEqualListsOfObjects(results, groups)
def test_instance_group_get_all_by_project_id(self):
groups = db.instance_group_get_all_by_project_id(self.context,
'invalid_project_id')
self.assertEquals(0, len(groups))
values = self._get_default_values()
result1 = self._create_instance_group(self.context, values)
groups = db.instance_group_get_all_by_project_id(self.context,
'fake_project')
self.assertEquals(1, len(groups))
values = self._get_default_values()
values['project_id'] = 'new_project_id'
result2 = self._create_instance_group(self.context, values)
groups = db.instance_group_get_all(self.context)
results = [result1, result2]
self._assertEqualListsOfObjects(results, groups)
projects = [{'name': 'fake_project', 'value': [result1]},
{'name': 'new_project_id', 'value': [result2]}]
for project in projects:
groups = db.instance_group_get_all_by_project_id(self.context,
project['name'])
self._assertEqualListsOfObjects(project['value'], groups)
def test_instance_group_update(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
ignored_keys = ['id', 'uuid', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self.assertTrue(uuidutils.is_uuid_like(result['uuid']))
id = result['uuid']
values = self._get_default_values()
values['name'] = 'new_fake_name'
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
self.assertEquals(result['name'], 'new_fake_name')
# update metadata
values = self._get_default_values()
metadataInput = {'key11': 'value1',
'key12': 'value2'}
values['metadata'] = metadataInput
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
metadata = result['metadetails']
self._assertEqualObjects(metadata, metadataInput)
# update update members
values = self._get_default_values()
members = ['instance_id1', 'instance_id2']
values['members'] = members
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(result['members'], members)
# update update policies
values = self._get_default_values()
policies = ['policy1', 'policy2']
values['policies'] = policies
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(result['policies'], policies)
# test invalid ID
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_update, self.context,
'invalid_id', values)
class InstanceGroupMetadataDBApiTestCase(InstanceGroupDBApiTestCase):
def test_instance_group_metadata_on_create(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
metadata = {'key11': 'value1',
'key12': 'value2'}
result = self._create_instance_group(self.context, values,
metadata=metadata)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self._assertEqualObjects(metadata, result['metadetails'])
def test_instance_group_metadata_add(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
metadata = db.instance_group_metadata_get(self.context, id)
self._assertEqualObjects(metadata, {})
metadata = {'key1': 'value1',
'key2': 'value2'}
db.instance_group_metadata_add(self.context, id, metadata)
metadata2 = db.instance_group_metadata_get(self.context, id)
self._assertEqualObjects(metadata, metadata2)
def test_instance_group_update(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
metadata = {'key1': 'value1',
'key2': 'value2'}
db.instance_group_metadata_add(self.context, id, metadata)
metadata2 = db.instance_group_metadata_get(self.context, id)
self._assertEqualObjects(metadata, metadata2)
# check add with existing keys
metadata = {'key1': 'value1',
'key2': 'value2',
'key3': 'value3'}
db.instance_group_metadata_add(self.context, id, metadata)
metadata3 = db.instance_group_metadata_get(self.context, id)
self._assertEqualObjects(metadata, metadata3)
def test_instance_group_delete(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
metadata = {'key1': 'value1',
'key2': 'value2',
'key3': 'value3'}
db.instance_group_metadata_add(self.context, id, metadata)
metadata3 = db.instance_group_metadata_get(self.context, id)
self._assertEqualObjects(metadata, metadata3)
db.instance_group_metadata_delete(self.context, id, 'key1')
metadata = db.instance_group_metadata_get(self.context, id)
self.assertTrue('key1' not in metadata)
db.instance_group_metadata_delete(self.context, id, 'key2')
metadata = db.instance_group_metadata_get(self.context, id)
self.assertTrue('key2' not in metadata)
def test_instance_group_metadata_invalid_ids(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
id = result['uuid']
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_metadata_get,
self.context, 'invalid')
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_metadata_delete, self.context,
'invalidid', 'key1')
metadata = {'key1': 'value1',
'key2': 'value2'}
db.instance_group_metadata_add(self.context, id, metadata)
self.assertRaises(exception.InstanceGroupMetadataNotFound,
db.instance_group_metadata_delete,
self.context, id, 'invalidkey')
class InstanceGroupMembersDBApiTestCase(InstanceGroupDBApiTestCase):
def test_instance_group_members_on_create(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
members = ['instance_id1', 'instance_id2']
result = self._create_instance_group(self.context, values,
members=members)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self._assertEqualListsOfPrimitivesAsSets(result['members'], members)
def test_instance_group_members_add(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
members = db.instance_group_members_get(self.context, id)
self.assertEquals(members, [])
members2 = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, id, members2)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members2)
def test_instance_group_members_update(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
members2 = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, id, members2)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members2)
# check add with existing keys
members3 = ['instance_id1', 'instance_id2', 'instance_id3']
db.instance_group_members_add(self.context, id, members3)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members3)
def test_instance_group_members_delete(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
members3 = ['instance_id1', 'instance_id2', 'instance_id3']
db.instance_group_members_add(self.context, id, members3)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members3)
for instance_id in members3[:]:
db.instance_group_member_delete(self.context, id, instance_id)
members3.remove(instance_id)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members3)
def test_instance_group_members_invalid_ids(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
id = result['uuid']
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_members_get,
self.context, 'invalid')
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_member_delete, self.context,
'invalidid', 'instance_id1')
members = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, id, members)
self.assertRaises(exception.InstanceGroupMemberNotFound,
db.instance_group_member_delete,
self.context, id, 'invalid_id')
class InstanceGroupPoliciesDBApiTestCase(InstanceGroupDBApiTestCase):
def test_instance_group_policies_on_create(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
policies = ['policy1', 'policy2']
result = self._create_instance_group(self.context, values,
policies=policies)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self._assertEqualListsOfPrimitivesAsSets(result['policies'], policies)
def test_instance_group_policies_add(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
policies = db.instance_group_policies_get(self.context, id)
self.assertEquals(policies, [])
policies2 = ['policy1', 'policy2']
db.instance_group_policies_add(self.context, id, policies2)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies2)
def test_instance_group_policies_update(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
policies2 = ['policy1', 'policy2']
db.instance_group_policies_add(self.context, id, policies2)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies2)
policies3 = ['policy1', 'policy2', 'policy3']
db.instance_group_policies_add(self.context, id, policies3)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies3)
def test_instance_group_policies_delete(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
policies3 = ['policy1', 'policy2', 'policy3']
db.instance_group_policies_add(self.context, id, policies3)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies3)
for policy in policies3[:]:
db.instance_group_policy_delete(self.context, id, policy)
policies3.remove(policy)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies3)
def test_instance_group_policies_invalid_ids(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
id = result['uuid']
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_policies_get,
self.context, 'invalid')
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_policy_delete, self.context,
'invalidid', 'policy1')
policies = ['policy1', 'policy2']
db.instance_group_policies_add(self.context, id, policies)
self.assertRaises(exception.InstanceGroupPolicyNotFound,
db.instance_group_policy_delete,
self.context, id, 'invalid_policy')
class PciDeviceDBApiTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(PciDeviceDBApiTestCase, self).setUp()
self.user_id = 'fake_user'
self.project_id = 'fake_project'
self.context = context.RequestContext(self.user_id, self.project_id)
self.admin_context = context.get_admin_context()
self.ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
def _get_fake_pci_devs(self):
return {'id': 3353,
'compute_node_id': 1,
'address': '0000:0f:08:07',
'vendor_id': '8086',
'product_id': '1520',
'dev_type': 'type-VF',
'dev_id': 'pci_0000:0f:08:07',
'extra_info': None,
'label': 'label_8086_1520',
'status': 'available',
'instance_uuid': '00000000-0000-0000-0000-000000000010',
}, {'id': 3356,
'compute_node_id': 1,
'address': '0000:0f:03:07',
'vendor_id': '8083',
'product_id': '1523',
'dev_type': 'type-VF',
'dev_id': 'pci_0000:0f:08:07',
'extra_info': None,
'label': 'label_8086_1520',
'status': 'available',
'instance_uuid': '00000000-0000-0000-0000-000000000010',
}
def _create_fake_pci_devs(self):
v1, v2 = self._get_fake_pci_devs()
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
db.pci_device_update(self.admin_context, v2['compute_node_id'],
v2['address'], v2)
return (v1, v2)
def test_pci_device_get_by_addr(self):
v1, v2 = self._create_fake_pci_devs()
result = db.pci_device_get_by_addr(self.admin_context, 1,
'0000:0f:08:07')
self._assertEqualObjects(v1, result, self.ignored_keys)
def test_pci_device_get_by_addr_not_found(self):
self._create_fake_pci_devs()
self.assertRaises(exception.PciDeviceNotFound,
db.pci_device_get_by_addr, self.admin_context,
1, '0000:0f:08:09')
def test_pci_device_get_by_addr_low_priv(self):
self._create_fake_pci_devs()
self.assertRaises(exception.AdminRequired,
db.pci_device_get_by_addr,
self.context, 1, '0000:0f:08:07')
def test_pci_device_get_by_id(self):
v1, v2 = self._create_fake_pci_devs()
result = db.pci_device_get_by_id(self.admin_context, 3353)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(v1, result, self.ignored_keys)
def test_pci_device_get_by_id_not_found(self):
self._create_fake_pci_devs()
self.assertRaises(exception.PciDeviceNotFoundById,
db.pci_device_get_by_id,
self.admin_context, 3354)
def test_pci_device_get_by_id_low_priv(self):
self._create_fake_pci_devs()
self.assertRaises(exception.AdminRequired,
db.pci_device_get_by_id,
self.context, 3553)
def test_pci_device_get_all_by_node(self):
v1, v2 = self._create_fake_pci_devs()
results = db.pci_device_get_all_by_node(self.admin_context, 1)
self._assertEqualListsOfObjects(results, [v1, v2], self.ignored_keys)
def test_pci_device_get_all_by_node_empty(self):
v1, v2 = self._get_fake_pci_devs()
results = db.pci_device_get_all_by_node(self.admin_context, 9)
self.assertEqual(len(results), 0)
def test_pci_device_get_all_by_node_low_priv(self):
self._create_fake_pci_devs()
self.assertRaises(exception.AdminRequired,
db.pci_device_get_all_by_node,
self.context, 1)
def test_pci_device_get_by_instance_uuid(self):
v1, v2 = self._get_fake_pci_devs()
v1['status'] = 'allocated'
v2['status'] = 'allocated'
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
db.pci_device_update(self.admin_context, v2['compute_node_id'],
v2['address'], v2)
results = db.pci_device_get_all_by_instance_uuid(
self.context,
'00000000-0000-0000-0000-000000000010')
self._assertEqualListsOfObjects(results, [v1, v2], self.ignored_keys)
def test_pci_device_get_by_instance_uuid_check_status(self):
v1, v2 = self._get_fake_pci_devs()
v1['status'] = 'allocated'
v2['status'] = 'claimed'
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
db.pci_device_update(self.admin_context, v2['compute_node_id'],
v2['address'], v2)
results = db.pci_device_get_all_by_instance_uuid(
self.context,
'00000000-0000-0000-0000-000000000010')
self._assertEqualListsOfObjects(results, [v1], self.ignored_keys)
def test_pci_device_update(self):
v1, v2 = self._get_fake_pci_devs()
v1['status'] = 'allocated'
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
result = db.pci_device_get_by_addr(
self.admin_context, 1, '0000:0f:08:07')
self._assertEqualObjects(v1, result, self.ignored_keys)
v1['status'] = 'claimed'
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
result = db.pci_device_get_by_addr(
self.admin_context, 1, '0000:0f:08:07')
self._assertEqualObjects(v1, result, self.ignored_keys)
def test_pci_device_update_low_priv(self):
v1, v2 = self._get_fake_pci_devs()
self.assertRaises(exception.AdminRequired,
db.pci_device_update, self.context,
v1['compute_node_id'], v1['address'], v1)
def test_pci_device_destroy(self):
v1, v2 = self._create_fake_pci_devs()
results = db.pci_device_get_all_by_node(self.admin_context, 1)
self._assertEqualListsOfObjects(results, [v1, v2], self.ignored_keys)
db.pci_device_destroy(self.admin_context, v1['compute_node_id'],
v1['address'])
results = db.pci_device_get_all_by_node(self.admin_context, 1)
self._assertEqualListsOfObjects(results, [v2], self.ignored_keys)
def test_pci_device_destroy_exception(self):
v1, v2 = self._get_fake_pci_devs()
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
results = db.pci_device_get_all_by_node(self.admin_context, 1)
self._assertEqualListsOfObjects(results, [v1], self.ignored_keys)
self.assertRaises(exception.PciDeviceNotFound,
db.pci_device_destroy,
self.admin_context,
v2['compute_node_id'],
v2['address'])<|fim▁end|> |
class S3ImageTestCase(test.TestCase): |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
"""
Tests for django.core.servers.
"""
from __future__ import unicode_literals
import os
import socket
try:
from urllib.request import urlopen, HTTPError
except ImportError: # Python 2
from urllib2 import urlopen, HTTPError
from django.core.exceptions import ImproperlyConfigured
from django.test import LiveServerTestCase
from django.test.utils import override_settings
from django.utils.http import urlencode
from django.utils._os import upath
from .models import Person
TEST_ROOT = os.path.dirname(upath(__file__))
TEST_SETTINGS = {
'MEDIA_URL': '/media/',
'MEDIA_ROOT': os.path.join(TEST_ROOT, 'media'),
'STATIC_URL': '/static/',
'STATIC_ROOT': os.path.join(TEST_ROOT, 'static'),
}
class LiveServerBase(LiveServerTestCase):
urls = 'regressiontests.servers.urls'
fixtures = ['testdata.json']
@classmethod
def setUpClass(cls):
# Override settings
cls.settings_override = override_settings(**TEST_SETTINGS)
cls.settings_override.enable()
super(LiveServerBase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
# Restore original settings
cls.settings_override.disable()
super(LiveServerBase, cls).tearDownClass()
def urlopen(self, url):
return urlopen(self.live_server_url + url)
class LiveServerAddress(LiveServerBase):
"""
Ensure that the address set in the environment variable is valid.
Refs #2879.
"""
@classmethod
def setUpClass(cls):
# Backup original environment variable
address_predefined = 'DJANGO_LIVE_TEST_SERVER_ADDRESS' in os.environ
old_address = os.environ.get('DJANGO_LIVE_TEST_SERVER_ADDRESS')
# Just the host is not accepted
cls.raises_exception('localhost', ImproperlyConfigured)
# The host must be valid
cls.raises_exception('blahblahblah:8081', socket.error)
# The list of ports must be in a valid format
cls.raises_exception('localhost:8081,', ImproperlyConfigured)
cls.raises_exception('localhost:8081,blah', ImproperlyConfigured)
cls.raises_exception('localhost:8081-', ImproperlyConfigured)
cls.raises_exception('localhost:8081-blah', ImproperlyConfigured)
cls.raises_exception('localhost:8081-8082-8083', ImproperlyConfigured)
# If contrib.staticfiles isn't configured properly, the exception
# should bubble up to the main thread.
old_STATIC_URL = TEST_SETTINGS['STATIC_URL']
TEST_SETTINGS['STATIC_URL'] = None
cls.raises_exception('localhost:8081', ImproperlyConfigured)
TEST_SETTINGS['STATIC_URL'] = old_STATIC_URL
# Restore original environment variable
if address_predefined:
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = old_address
else:
del os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS']
@classmethod
def raises_exception(cls, address, exception):
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = address
try:
super(LiveServerAddress, cls).setUpClass()
raise Exception("The line above should have raised an exception")
except exception:
pass
finally:
super(LiveServerAddress, cls).tearDownClass()
def test_test_test(self):
# Intentionally empty method so that the test is picked up by the
# test runner and the overriden setUpClass() method is executed.
pass
class LiveServerViews(LiveServerBase):
def test_404(self):
"""
Ensure that the LiveServerTestCase serves 404s.
Refs #2879.
"""
try:
self.urlopen('/')
except HTTPError as err:
self.assertEqual(err.code, 404, 'Expected 404 response')
else:
self.fail('Expected 404 response')
def test_view(self):
"""
Ensure that the LiveServerTestCase serves views.
Refs #2879.
"""
f = self.urlopen('/example_view/')<|fim▁hole|> Ensure that the LiveServerTestCase serves static files.
Refs #2879.
"""
f = self.urlopen('/static/example_static_file.txt')
self.assertEqual(f.read().rstrip(b'\r\n'), b'example static file')
def test_media_files(self):
"""
Ensure that the LiveServerTestCase serves media files.
Refs #2879.
"""
f = self.urlopen('/media/example_media_file.txt')
self.assertEqual(f.read().rstrip(b'\r\n'), b'example media file')
def test_environ(self):
f = self.urlopen('/environ_view/?%s' % urlencode({'q': 'тест'}))
self.assertIn(b"QUERY_STRING: 'q=%D1%82%D0%B5%D1%81%D1%82'", f.read())
class LiveServerDatabase(LiveServerBase):
def test_fixtures_loaded(self):
"""
Ensure that fixtures are properly loaded and visible to the
live server thread.
Refs #2879.
"""
f = self.urlopen('/model_view/')
self.assertEqual(f.read().splitlines(), [b'jane', b'robert'])
def test_database_writes(self):
"""
Ensure that data written to the database by a view can be read.
Refs #2879.
"""
self.urlopen('/create_model_instance/')
self.assertQuerysetEqual(
Person.objects.all().order_by('pk'),
['jane', 'robert', 'emily'],
lambda b: b.name
)<|fim▁end|> | self.assertEqual(f.read(), b'example view')
def test_static_files(self):
""" |
<|file_name|>UserNotifications.tsx<|end_file_name|><|fim▁begin|>import React, { useState } from 'react';
import classNames from 'classnames';
import { Button, Classes, InputGroup } from '@blueprintjs/core';
import { FilterTerm, NotificationsState } from './types';
import { useNotificationsContext } from './userNotificationsContext';
import PubNotifications from './PubNotifications';
import UserNotificationPreferences from './UserNotificationPreferences';
type Props = {
state: NotificationsState;
};
const UserNotifications = (props: Props) => {
const { state } = props;
const { pubStates } = state;
const [filterTerm, setFilterTerm] = useState<FilterTerm>(null);
const [container, setContainer] = useState<null | HTMLDivElement>(null);
const [showingPreferences, setShowingPreferences] = useState(false);
const { actions } = useNotificationsContext();
<|fim▁hole|> return (
<UserNotificationPreferences
preferences={state.notificationPreferences}
onUpdatePreferences={actions.updateUserNotificationPreferences}
onClose={() => setShowingPreferences(false)}
/>
);
};
const renderNotifications = () => {
return (
<>
<div className="top-controls-bar">
<InputGroup
fill
leftIcon="search"
value={filterTerm || ''}
placeholder="Filter threads"
onChange={(evt: any) => setFilterTerm(evt.target.value)}
/>
<Button
minimal
className="preferences-button"
icon="settings"
onClick={() => setShowingPreferences(true)}
/>
</div>
<div className="pubs">
{pubStates.map((pubState) => (
<PubNotifications
state={pubState}
key={pubState.pub.id}
filterTerm={filterTerm}
container={container}
/>
))}
</div>
</>
);
};
return (
<div
ref={setContainer}
className={classNames(
'user-notifications-component',
Classes.CARD,
Classes.ELEVATION_2,
)}
>
{showingPreferences ? renderPreferences() : renderNotifications()}
</div>
);
};
export default UserNotifications;<|fim▁end|> | const renderPreferences = () => { |
<|file_name|>GEMglFeedbackBuffer.cpp<|end_file_name|><|fim▁begin|>////////////////////////////////////////////////////////
//
// GEM - Graphics Environment for Multimedia
//
// Implementation file
//
// Copyright (c) 2002-2011 IOhannes m zmölnig. forum::für::umläute. IEM. [email protected]
// [email protected]
// For information on usage and redistribution, and for a DISCLAIMER
// * OF ALL WARRANTIES, see the file, "GEM.LICENSE.TERMS"
//
// this file has been generated...
////////////////////////////////////////////////////////
#include "GEMglFeedbackBuffer.h"
CPPEXTERN_NEW_WITH_TWO_ARGS ( GEMglFeedbackBuffer , t_floatarg, A_DEFFLOAT, t_floatarg, A_DEFFLOAT);
/////////////////////////////////////////////////////////
//
// GEMglViewport
//
/////////////////////////////////////////////////////////
// Constructor
//
GEMglFeedbackBuffer :: GEMglFeedbackBuffer (t_floatarg arg0=128, t_floatarg arg1=0) :
size(static_cast<GLsizei>(arg0)), type(static_cast<GLenum>(arg1))
{
len=(size>0)?size:128;
buffer = new float[len];
m_inlet[0] = inlet_new(this->x_obj, &this->x_obj->ob_pd, &s_float, gensym("size"));
m_inlet[1] = inlet_new(this->x_obj, &this->x_obj->ob_pd, &s_float, gensym("type"));
}
/////////////////////////////////////////////////////////
// Destructor
//
GEMglFeedbackBuffer :: ~GEMglFeedbackBuffer () {
inlet_free(m_inlet[0]);
inlet_free(m_inlet[1]);
}
//////////////////
// extension check
bool GEMglFeedbackBuffer :: isRunnable(void) {
if(GLEW_VERSION_1_1)return true;
error("your system does not support OpenGL-1.1");
return false;
}
/////////////////////////////////////////////////////////
// Render
//
void GEMglFeedbackBuffer :: render(GemState *state) {
glFeedbackBuffer (size, type, buffer);
error("i got data @ %X, but i don't know what to do with it!", buffer);
}
<|fim▁hole|>// Variables
//
void GEMglFeedbackBuffer :: sizeMess (t_float arg1) { // FUN
size = static_cast<GLsizei>(arg1);
if (size>len){
len=size;
delete[]buffer;
buffer = new float[len];
}
setModified();
}
void GEMglFeedbackBuffer :: typeMess (t_float arg1) { // FUN
type = static_cast<GLenum>(arg1);
setModified();
}
/////////////////////////////////////////////////////////
// static member functions
//
void GEMglFeedbackBuffer :: obj_setupCallback(t_class *classPtr) {
class_addmethod(classPtr, reinterpret_cast<t_method>(&GEMglFeedbackBuffer::sizeMessCallback), gensym("size"), A_DEFFLOAT, A_NULL);
class_addmethod(classPtr, reinterpret_cast<t_method>(&GEMglFeedbackBuffer::typeMessCallback), gensym("type"), A_DEFFLOAT, A_NULL);
}
void GEMglFeedbackBuffer :: sizeMessCallback (void* data, t_floatarg arg0){
GetMyClass(data)->sizeMess ( static_cast<t_float>(arg0));
}
void GEMglFeedbackBuffer :: typeMessCallback (void* data, t_floatarg arg0){
GetMyClass(data)->typeMess ( static_cast<t_float>(arg0));
}<|fim▁end|> | ///////////////////////////////////////////////////////// |
<|file_name|>issue-15858.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(unsafe_destructor)]
static mut DROP_RAN: bool = false;
trait Bar<'b> {
fn do_something(&mut self);
}
struct BarImpl<'b>;
impl<'b> Bar<'b> for BarImpl<'b> {
fn do_something(&mut self) {}
}
struct Foo<B>;
#[unsafe_destructor]
impl<'b, B: Bar<'b>> Drop for Foo<B> {
fn drop(&mut self) {
unsafe {
DROP_RAN = true;
}
}
}
fn main() {
{
let _x: Foo<BarImpl> = Foo;
}
unsafe {
assert_eq!(DROP_RAN, true);
}
}<|fim▁end|> | |
<|file_name|>NavGroup.js<|end_file_name|><|fim▁begin|>import React, {PropTypes, Component} from 'react';
import { NavGroup } from 'react-photonkit'
/*class NavGroup extends Component {
static propTypes = {
children: PropTypes.any
}
constructor(props) {
super(props);<|fim▁hole|> this.state = {};
}
render() {
return (
<nav className="nav-group">
{this.props.children}
</nav>
);
}
}*/
export default NavGroup<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Servo Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name="string_cache_plugin"]
#![crate_type="dylib"]
#![feature(plugin_registrar, quote, box_syntax)]
#![feature(rustc_private, slice_patterns)]
#![cfg_attr(test, deny(warnings))]
#![allow(unused_imports)] // for quotes
extern crate syntax;
extern crate rustc;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate mac;
extern crate string_cache_shared;
use rustc::plugin::Registry;
mod atom;
// NB: This needs to be public or we get a linker error.<|fim▁hole|> reg.register_macro("atom", atom::expand_atom);
reg.register_macro("ns", atom::expand_ns);
}<|fim▁end|> | #[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) { |
<|file_name|>test_mol2.py<|end_file_name|><|fim▁begin|>from Sire.Base import *
from Sire.IO import *
from Sire.Mol import *
from glob import glob
from nose.tools import assert_equal, assert_almost_equal
# Check that we have Mol2 support in this version of Sire.
has_mol2 = True
try:
p = Mol2()
except:
# No Mol2 support.
has_mol2 = False
# General test of ability to read and write Mol2 files.
# All Mol2 files in the "../io/" directory are parsed.
# Once the input file is parsed we then check that the parser constructs a<|fim▁hole|>def test_read_write(verbose=False):
if not has_mol2:
return
# Glob all of the Mol2 files in the example file directory.
mol2files = glob('../io/*mol2')
# Loop over all test files.
for file in mol2files:
# Test in parallel and serial mode.
for use_par in [True, False]:
if verbose:
print("Reading Mol2 file: %s" % file)
print("Parallel = %s" % use_par)
# Parse the file into a Mol2 object.
p = Mol2(file, {"parallel" : wrap(use_par)})
if verbose:
print("Constructing molecular system...")
# Construct a Sire molecular system.
s = p.toSystem()
if verbose:
print("Reconstructing Mol2 data from molecular system...")
# Now re-parse the molecular system.
p = Mol2(s, {"parallel" : wrap(use_par)})
if verbose:
print("Passed!\n")
# Specific atom coordinate data validation test for file "../io/complex.mol2".
def test_atom_coords(verbose=False):
if not has_mol2:
return
# Test atoms.
atoms = ["N", "CA", "C", "O", "CB"]
# Test coordinates.
coords = [[ -2.9880, -2.0590, -2.6220],
[ -3.8400, -2.0910, -7.4260],
[ -6.4250, -3.9190, -10.9580],
[ -6.1980, -6.0090, -14.2910],
[ -9.8700, -6.5500, -15.2480]]
# Test in parallel and serial mode.
for use_par in [True, False]:
if verbose:
print("Reading Mol2 file: ../io/complex.mol2")
print("Parallel = %s" % use_par)
# Parse the Mol2 file.
p = Mol2('../io/complex.mol2', {"parallel" : wrap(use_par)})
if verbose:
print("Constructing molecular system...")
# Create a molecular system.
s = p.toSystem()
# Get the first molecule.
m = s[MolIdx(0)]
if verbose:
print("Checking atomic coordinates...")
# Loop over all of the atoms.
for i in range(0, len(atoms)):
# Extract the atom from the residue "i + 1".
a = m.atom(AtomName(atoms[i]) + ResNum(i+1))
# Extract the atom coordinates.
c = a.property("coordinates")
# Validate parsed coordinates against known values.
assert_almost_equal( c[0], coords[i][0] )
assert_almost_equal( c[1], coords[i][1] )
assert_almost_equal( c[2], coords[i][2] )
if verbose:
print("Passed!\n")
# Residue and chain validation test for file "../io/complex.mol2".
def test_residues(verbose=False):
if not has_mol2:
return
# Test in parallel and serial mode.
for use_par in [True, False]:
if verbose:
print("Reading Mol2 file: ../io/complex.mol2")
print("Parallel = %s" % use_par)
# Parse the Mol2 file.
p = Mol2('../io/complex.mol2', {"parallel" : wrap(use_par)})
if verbose:
print("Constructing molecular system...")
# Create a molecular system.
s = p.toSystem()
# Get the two molecules.
m1 = s[MolIdx(0)]
m2 = s[MolIdx(1)]
# Get the chains from the molecules.
c1 = m1.chains()
c2 = m2.chains()
if verbose:
print("Checking chain and residue data...")
# Check the number of chains in each molecule.
assert_equal( len(c1), 3 )
assert_equal( len(c2), 1 )
# Check the number of residues in each chain of the first molecule.
assert_equal( len(c1[0].residues()), 118 )
assert_equal( len(c1[1].residues()), 114 )
assert_equal( len(c1[2].residues()), 118 )
# Check the number of residues in the single chain of the second molecule.
assert_equal( len(c2[0].residues()), 1 )
# Check some specific residue names in the first chain from the first molecule.
assert_equal( c1[0].residues()[0].name().toString(), "ResName('PRO1')" )
assert_equal( c1[1].residues()[1].name().toString(), "ResName('MET2')" )
assert_equal( c1[1].residues()[2].name().toString(), "ResName('PHE3')" )
if verbose:
print("Passed!\n")
if __name__ == "__main__":
test_read_write(True)
test_atom_coords(True)
test_residues(True)<|fim▁end|> | # Sire Molecule from the parsed data. Following this, we then check that the
# parser can convert the molecule back into the correct data format, ready to
# be written to file. |
<|file_name|>parviewer.rs<|end_file_name|><|fim▁begin|>/*!
# ParView
*/
#![deny(non_camel_case_types)]
#![deny(unused_parens)]
#![deny(non_upper_case_globals)]
#![deny(unused_qualifications)]
#![deny(missing_docs)]
#![deny(unused_results)]
extern crate docopt;
extern crate parview;
extern crate serde;
use docopt::Docopt;
use serde::Deserialize;
use std::error::Error;
use std::path::Path;
use parview::{misc, Color, Config, Frame, Palette, Parviewer, TomlConfig, EPSILON};
use std::f32::consts::PI;
// Write the Docopt usage string.
const USAGE: &str = "
Usage: parview [options] [--] [<file>]
Options:
-h, --help Help and usage
-g, --generate Generate test_frames.json
-p, --palette FILE Use palette file (toml file), instead of default.
-c, --config FILE Use config file (toml file), instead of default.
Arguments:
<file> json file representing the frames. json.gz also accepted,
if the extension is \".gz\".
";
#[derive(Debug, Deserialize)]
struct Args {
flag_palette: Option<String>,
flag_config: Option<String>,
flag_generate: bool,
arg_file: Option<String>,
}
fn run() -> Result<(), Box<dyn Error>> {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.deserialize())
.unwrap_or_else(|e| e.exit());
let toml_config: TomlConfig = match args.flag_config {
None => Default::default(),
Some(ref fname) => {
let path: &Path = Path::new(&fname[..]);
misc::load_toml::<TomlConfig>(path)?
}
};
<|fim▁hole|> let fname: &str = match args.arg_file {
Some(ref s) => s,
None => "test_frame.json",
};
let path: &Path = Path::new(fname);
let (frames, palette): (Vec<Frame>, Palette) = if args.flag_generate {
let frames = misc::generate_frame(path)?;
let palette = match args.flag_palette {
None => Default::default(),
Some(fname) => {
let palette_path: &Path = Path::new(&fname[..]);
misc::generate_palette(palette_path)?
}
};
(frames, palette)
} else {
let frames = misc::deserialize_by_ext(path)?;
let palette = match args.flag_palette {
None => Default::default(),
Some(fname) => {
let palette_path: &Path = Path::new(&fname[..]);
misc::load_toml::<Palette>(palette_path)?
}
};
(frames, palette)
};
// println!("config: {:?}", config);
let mut viewer = Parviewer::new(frames, palette, config)?;
let _ = viewer.timer.at_least(toml_config.fps);
let text_color = Color(255, 255, 255);
viewer.run(|viewer, _| {
if toml_config.rotate.abs() > EPSILON {
let new_yaw = viewer.camera.yaw() + (toml_config.rotate * PI / 180.);
viewer.camera.set_yaw(new_yaw);
}
viewer.draw_frame_text(0., 0., text_color);
let dt = viewer.timer.get_dt();
let dt_text = if dt >= 0.6 || dt.abs() < 1e-6 || dt <= -0.6 {
format!("{}", dt)
} else if dt > 0. {
format!("1/{}", 1. / dt)
} else {
format!("-1/{}", -1. / dt)
};
let text = format!(
"t:{:6.2}, dt:{}, coloring: {}",
viewer.timer.get_time(),
dt_text,
viewer.palette.partials_string()
);
viewer.draw_text(&*text, 0., 1., text_color);
});
Ok(())
}
/// The main entry point.
pub fn main() {
if let Err(err) = run() {
println!("ERROR.");
misc::err_print(&*err);
}
}<|fim▁end|> | let config: Config = toml_config.to_parviewer_config();
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>/*
* MIT License
*
* Copyright (c) 2018 Choko ([email protected])
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal<|fim▁hole|> * in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
export * from './build';<|fim▁end|> | |
<|file_name|>datelib.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2002 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Set of classes and functions for dealing with dates and timestamps.
The BaseTimestamp and Timestamp are timezone-aware wrappers around Python
datetime.datetime class.
"""
import calendar
import copy
import datetime
import re
import sys
import time
import types
import warnings
from dateutil import parser
import pytz
_MICROSECONDS_PER_SECOND = 1000000
_MICROSECONDS_PER_SECOND_F = float(_MICROSECONDS_PER_SECOND)
def SecondsToMicroseconds(seconds):
"""Convert seconds to microseconds.
Args:
seconds: number
Returns:
microseconds
"""
return seconds * _MICROSECONDS_PER_SECOND
def MicrosecondsToSeconds(microseconds):
"""Convert microseconds to seconds.
Args:
microseconds: A number representing some duration of time measured in
microseconds.
Returns:
A number representing the same duration of time measured in seconds.
"""
return microseconds / _MICROSECONDS_PER_SECOND_F
def _GetCurrentTimeMicros():
"""Get the current time in microseconds, in UTC.
Returns:
The number of microseconds since the epoch.
"""
return int(SecondsToMicroseconds(time.time()))
def GetSecondsSinceEpoch(time_tuple):
"""Convert time_tuple (in UTC) to seconds (also in UTC).
Args:
time_tuple: tuple with at least 6 items.
Returns:
seconds.
"""
return calendar.timegm(time_tuple[:6] + (0, 0, 0))
def GetTimeMicros(time_tuple):
"""Get a time in microseconds.
Arguments:
time_tuple: A (year, month, day, hour, minute, second) tuple (the python
time tuple format) in the UTC time zone.
Returns:
The number of microseconds since the epoch represented by the input tuple.
"""
return int(SecondsToMicroseconds(GetSecondsSinceEpoch(time_tuple)))
def DatetimeToUTCMicros(date):
"""Converts a datetime object to microseconds since the epoch in UTC.
Args:
date: A datetime to convert.
Returns:
The number of microseconds since the epoch, in UTC, represented by the input
datetime.
"""
# Using this guide: http://wiki.python.org/moin/WorkingWithTime
# And this conversion guide: http://docs.python.org/library/time.html
# Turn the date parameter into a tuple (struct_time) that can then be
# manipulated into a long value of seconds. During the conversion from
# struct_time to long, the source date in UTC, and so it follows that the
# correct transformation is calendar.timegm()
micros = calendar.timegm(date.utctimetuple()) * _MICROSECONDS_PER_SECOND
return micros + date.microsecond
def DatetimeToUTCMillis(date):
"""Converts a datetime object to milliseconds since the epoch in UTC.
Args:
date: A datetime to convert.
Returns:
The number of milliseconds since the epoch, in UTC, represented by the input
datetime.
"""
return DatetimeToUTCMicros(date) / 1000
def UTCMicrosToDatetime(micros, tz=None):
"""Converts a microsecond epoch time to a datetime object.
Args:
micros: A UTC time, expressed in microseconds since the epoch.
tz: The desired tzinfo for the datetime object. If None, the
datetime will be naive.
Returns:
The datetime represented by the input value.
"""
# The conversion from micros to seconds for input into the
# utcfromtimestamp function needs to be done as a float to make sure
# we dont lose the sub-second resolution of the input time.
dt = datetime.datetime.utcfromtimestamp(
micros / _MICROSECONDS_PER_SECOND_F)
if tz is not None:
dt = tz.fromutc(dt)
return dt
def UTCMillisToDatetime(millis, tz=None):
"""Converts a millisecond epoch time to a datetime object.
Args:
millis: A UTC time, expressed in milliseconds since the epoch.
tz: The desired tzinfo for the datetime object. If None, the
datetime will be naive.
Returns:
The datetime represented by the input value.
"""
return UTCMicrosToDatetime(millis * 1000, tz)
UTC = pytz.UTC
US_PACIFIC = pytz.timezone('US/Pacific')
class TimestampError(ValueError):
"""Generic timestamp-related error."""
pass
class TimezoneNotSpecifiedError(TimestampError):
"""This error is raised when timezone is not specified."""
pass
class TimeParseError(TimestampError):
"""This error is raised when we can't parse the input."""
pass
# TODO(user): this class needs to handle daylight better
class LocalTimezoneClass(datetime.tzinfo):
"""This class defines local timezone."""
ZERO = datetime.timedelta(0)
HOUR = datetime.timedelta(hours=1)
STDOFFSET = datetime.timedelta(seconds=-time.timezone)
if time.daylight:
DSTOFFSET = datetime.timedelta(seconds=-time.altzone)
else:
DSTOFFSET = STDOFFSET
DSTDIFF = DSTOFFSET - STDOFFSET
def utcoffset(self, dt):
"""datetime -> minutes east of UTC (negative for west of UTC)."""
if self._isdst(dt):
return self.DSTOFFSET
else:
return self.STDOFFSET
def dst(self, dt):
"""datetime -> DST offset in minutes east of UTC."""
if self._isdst(dt):
return self.DSTDIFF
else:
return self.ZERO
def tzname(self, dt):
"""datetime -> string name of time zone."""
return time.tzname[self._isdst(dt)]
def _isdst(self, dt):
"""Return true if given datetime is within local DST."""
tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
dt.weekday(), 0, -1)
stamp = time.mktime(tt)
tt = time.localtime(stamp)
return tt.tm_isdst > 0
def __repr__(self):
"""Return string '<Local>'."""
return '<Local>'
def localize(self, dt, unused_is_dst=False):
"""Convert naive time to local time."""
if dt.tzinfo is not None:
raise ValueError('Not naive datetime (tzinfo is already set)')
return dt.replace(tzinfo=self)
def normalize(self, dt, unused_is_dst=False):
"""Correct the timezone information on the given datetime."""
if dt.tzinfo is None:
raise ValueError('Naive time - no tzinfo set')
return dt.replace(tzinfo=self)
LocalTimezone = LocalTimezoneClass()
class BaseTimestamp(datetime.datetime):
"""Our kind of wrapper over datetime.datetime.
The objects produced by methods now, today, fromtimestamp, utcnow,
utcfromtimestamp are timezone-aware (with correct timezone).
We also overload __add__ and __sub__ method, to fix the result of arithmetic
operations.
"""
LocalTimezone = LocalTimezone
@classmethod
def AddLocalTimezone(cls, obj):
"""If obj is naive, add local timezone to it."""
if not obj.tzinfo:
return obj.replace(tzinfo=cls.LocalTimezone)
return obj
@classmethod
def Localize(cls, obj):
"""If obj is naive, localize it to cls.LocalTimezone."""
if not obj.tzinfo:
return cls.LocalTimezone.localize(obj)
return obj
def __add__(self, *args, **kwargs):
"""x.__add__(y) <==> x+y."""
r = super(BaseTimestamp, self).__add__(*args, **kwargs)
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
def __sub__(self, *args, **kwargs):
"""x.__add__(y) <==> x-y."""
r = super(BaseTimestamp, self).__sub__(*args, **kwargs)
if isinstance(r, datetime.datetime):
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
return r
@classmethod
def now(cls, *args, **kwargs):
"""Get a timestamp corresponding to right now.
Args:
args: Positional arguments to pass to datetime.datetime.now().
kwargs: Keyword arguments to pass to datetime.datetime.now(). If tz is not
specified, local timezone is assumed.
Returns:
A new BaseTimestamp with tz's local day and time.
"""
return cls.AddLocalTimezone(
super(BaseTimestamp, cls).now(*args, **kwargs))
@classmethod
def today(cls):
"""Current BaseTimestamp.
Same as self.__class__.fromtimestamp(time.time()).
Returns:
New self.__class__.
"""
return cls.AddLocalTimezone(super(BaseTimestamp, cls).today())
@classmethod
def fromtimestamp(cls, *args, **kwargs):
"""Get a new localized timestamp from a POSIX timestamp.
Args:
args: Positional arguments to pass to datetime.datetime.fromtimestamp().
kwargs: Keyword arguments to pass to datetime.datetime.fromtimestamp().
If tz is not specified, local timezone is assumed.
Returns:
A new BaseTimestamp with tz's local day and time.
"""
return cls.Localize(
super(BaseTimestamp, cls).fromtimestamp(*args, **kwargs))
@classmethod
def utcnow(cls):
"""Return a new BaseTimestamp representing UTC day and time."""<|fim▁hole|> @classmethod
def utcfromtimestamp(cls, *args, **kwargs):
"""timestamp -> UTC datetime from a POSIX timestamp (like time.time())."""
return super(BaseTimestamp, cls).utcfromtimestamp(
*args, **kwargs).replace(tzinfo=pytz.utc)
@classmethod
def strptime(cls, date_string, format, tz=None):
"""Parse date_string according to format and construct BaseTimestamp.
Args:
date_string: string passed to time.strptime.
format: format string passed to time.strptime.
tz: if not specified, local timezone assumed.
Returns:
New BaseTimestamp.
"""
if tz is None:
return cls.Localize(cls(*(time.strptime(date_string, format)[:6])))
return tz.localize(cls(*(time.strptime(date_string, format)[:6])))
def astimezone(self, *args, **kwargs):
"""tz -> convert to time in new timezone tz."""
r = super(BaseTimestamp, self).astimezone(*args, **kwargs)
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
@classmethod
def FromMicroTimestamp(cls, ts):
"""Create new Timestamp object from microsecond UTC timestamp value.
Args:
ts: integer microsecond UTC timestamp
Returns:
New cls()
"""
return cls.utcfromtimestamp(ts/_MICROSECONDS_PER_SECOND_F)
def AsSecondsSinceEpoch(self):
"""Return number of seconds since epoch (timestamp in seconds)."""
return GetSecondsSinceEpoch(self.utctimetuple())
def AsMicroTimestamp(self):
"""Return microsecond timestamp constructed from this object."""
return (SecondsToMicroseconds(self.AsSecondsSinceEpoch()) +
self.microsecond)
@classmethod
def combine(cls, datepart, timepart, tz=None):
"""Combine date and time into timestamp, timezone-aware.
Args:
datepart: datetime.date
timepart: datetime.time
tz: timezone or None
Returns:
timestamp object
"""
result = super(BaseTimestamp, cls).combine(datepart, timepart)
if tz:
result = tz.localize(result)
return result
# Conversions from interval suffixes to number of seconds.
# (m => 60s, d => 86400s, etc)
_INTERVAL_CONV_DICT = {'s': 1}
_INTERVAL_CONV_DICT['m'] = 60 * _INTERVAL_CONV_DICT['s']
_INTERVAL_CONV_DICT['h'] = 60 * _INTERVAL_CONV_DICT['m']
_INTERVAL_CONV_DICT['d'] = 24 * _INTERVAL_CONV_DICT['h']
_INTERVAL_CONV_DICT['D'] = _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['w'] = 7 * _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['W'] = _INTERVAL_CONV_DICT['w']
_INTERVAL_CONV_DICT['M'] = 30 * _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['Y'] = 365 * _INTERVAL_CONV_DICT['d']
_INTERVAL_REGEXP = re.compile('^([0-9]+)([%s])?' % ''.join(_INTERVAL_CONV_DICT))
def ConvertIntervalToSeconds(interval):
"""Convert a formatted string representing an interval into seconds.
Args:
interval: String to interpret as an interval. A basic interval looks like
"<number><suffix>". Complex intervals consisting of a chain of basic
intervals are also allowed.
Returns:
An integer representing the number of seconds represented by the interval
string, or None if the interval string could not be decoded.
"""
total = 0
while interval:
match = _INTERVAL_REGEXP.match(interval)
if not match:
return None
try:
num = int(match.group(1))
except ValueError:
return None
suffix = match.group(2)
if suffix:
multiplier = _INTERVAL_CONV_DICT.get(suffix)
if not multiplier:
return None
num *= multiplier
total += num
interval = interval[match.end(0):]
return total
class Timestamp(BaseTimestamp):
"""This subclass contains methods to parse W3C and interval date spec.
The interval date specification is in the form "1D", where "D" can be
"s"econds "m"inutes "h"ours "D"ays "W"eeks "M"onths "Y"ears.
"""
INTERVAL_CONV_DICT = _INTERVAL_CONV_DICT
INTERVAL_REGEXP = _INTERVAL_REGEXP
@classmethod
def _StringToTime(cls, timestring, tz=None):
"""Use dateutil.parser to convert string into timestamp.
dateutil.parser understands ISO8601 which is really handy.
Args:
timestring: string with datetime
tz: optional timezone, if timezone is omitted from timestring.
Returns:
New Timestamp or None if unable to parse the timestring.
"""
try:
r = parser.parse(timestring)
except ValueError:
return None
if not r.tzinfo:
r = (tz or cls.LocalTimezone).localize(r)
result = cls(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
return result
@classmethod
def _IntStringToInterval(cls, timestring):
"""Parse interval date specification and create a timedelta object.
Args:
timestring: string interval.
Returns:
A datetime.timedelta representing the specified interval or None if
unable to parse the timestring.
"""
seconds = ConvertIntervalToSeconds(timestring)
return datetime.timedelta(seconds=seconds) if seconds else None
@classmethod
def FromString(cls, value, tz=None):
"""Create a Timestamp from a string.
Args:
value: String interval or datetime.
e.g. "2013-01-05 13:00:00" or "1d"
tz: optional timezone, if timezone is omitted from timestring.
Returns:
A new Timestamp.
Raises:
TimeParseError if unable to parse value.
"""
result = cls._StringToTime(value, tz=tz)
if result:
return result
result = cls._IntStringToInterval(value)
if result:
return cls.utcnow() - result
raise TimeParseError(value)
# What's written below is a clear python bug. I mean, okay, I can apply
# negative timezone to it and end result will be inconversible.
MAXIMUM_PYTHON_TIMESTAMP = Timestamp(
9999, 12, 31, 23, 59, 59, 999999, UTC)
# This is also a bug. It is called 32bit time_t. I hate it.
# This is fixed in 2.5, btw.
MAXIMUM_MICROSECOND_TIMESTAMP = 0x80000000 * _MICROSECONDS_PER_SECOND - 1
MAXIMUM_MICROSECOND_TIMESTAMP_AS_TS = Timestamp(2038, 1, 19, 3, 14, 7, 999999)<|fim▁end|> | return super(BaseTimestamp, cls).utcnow().replace(tzinfo=pytz.utc)
|
<|file_name|>issue-6919.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:iss.rs
// xfail-fast<|fim▁hole|>extern mod issue6919_3;
pub fn main() {
issue6919_3::D.k;
}<|fim▁end|> |
#[crate_id="issue-6919"]; |
<|file_name|>treePlotter.py<|end_file_name|><|fim▁begin|>import matplotlib
import matplotlib.pyplot as plt
decisionNoe = dict(boxstyle="sawtooth", fc="0.8")
leafNode = dict(boxstyle="round4", fc="0.8")
arrow_args = dict(arrowstyle="<-")
def plotNoe(nodeTxt, centerPt, parentPt, nodeType):
createPlot.ax1.annotate(nodeTxt, xy=parentPt, xycoords='axes fraction',
xytext=centerPt, textcoords='axes fraction',
va="center", ha="center", bbox=nodeType,
arrowprops=arrow_args)
def createPlot():
fig = plt.figure(1, facecolor='white')
fig.clf()
createPlot.ax1 = plt.subplot(111, frameon=False)
plotNoe('Decision Node', (0.5, 0.1), (0.1, 0.5), decisionNoe)
plotNoe('Lea fNode', (0.8, 0.1), (0.3, 0.8), leafNode)
plt.show()
def getNumLeafs(myTree):
numLeafs = 0
firstStr = myTree.keys()[0]
secondDict = myTree[firstStr]
for key in secondDict.keys():
if type(secondDict[key]).__name__ == 'dict':
numLeafs += getNumLeafs(secondDict[key])
else:
numLeafs += 1
return numLeafs
def getTreeDepth(myTree):
maxDepth = 0
firstStr = myTree.keys()[0]
secondDict = myTree[firstStr]
for key in secondDict.keys():
if type(secondDict[key]).__name__ == 'dict':
thisDepth = 1 + getTreeDepth(secondDict[key])
else:
thisDepth = 1
if thisDepth > maxDepth:
maxDepth = thisDepth
return maxDepth
def retrieveTree(i):
listOfTrees = [{'no surfacing': {0: 'no', 1: {'flippers': {0: 'no', 1: 'yes'}}}},
{'no surfacing': {0: 'no', 1: {'flippers': {0: {'head': {0: 'no', 1: 'yes'}}, 1: 'no'}}}}]
return listOfTrees[i]
def plotMidText(cntrPt, parentPt, txtString):
xMid = (parentPt[0] - cntrPt[0]) / 2.0 + cntrPt[0]<|fim▁hole|>
def plotTree(myTree, parentPt, nodeTxt):
numLeafs = getNumLeafs(myTree)
depth = getTreeDepth(myTree)
firstStr = myTree.keys()[0]
cntrPt = (plotTree.xoff + (1.0 + float(numLeafs)) / 2.0 / plotTree.totalW, plotTree.yoff)
plotMidText(cntrPt, parentPt, nodeTxt)
plotNoe(firstStr, cntrPt, parentPt, decisionNoe)
secondDict = myTree[firstStr]
plotTree.yoff = plotTree.yoff - 1.0 / plotTree.totalD
for key in secondDict.keys():
if type(secondDict[key]).__name__ == 'dict':
plotTree(secondDict[key], cntrPt, str(key))
else:
plotTree.xoff = plotTree.xoff + 1.0 / plotTree.totalW
plotNoe(secondDict[key], (plotTree.xoff, plotTree.yoff), cntrPt, leafNode)
plotMidText((plotTree.xoff, plotTree.yoff), cntrPt, str(key))
plotTree.yoff = plotTree.yoff + 1.0 / plotTree.totalD
def createPlot(inTree):
fig = plt.figure(1, facecolor='white')
fig.clf()
axprops = dict(xticks=[], yticks=[])
createPlot.ax1 = plt.subplot(111, frameon=False, **axprops)
plotTree.totalW = float(getNumLeafs(inTree))
plotTree.totalD = float(getTreeDepth(inTree))
plotTree.xoff = - 0.5 / plotTree.totalW
plotTree.yoff = 1.0
plotTree(inTree, (0.5, 1.0), '')
plt.show()<|fim▁end|> | yMid = (parentPt[1] - cntrPt[1]) / 2.0 + cntrPt[1]
createPlot.ax1.text(xMid, yMid, txtString, va="center", ha="center", rotation=30) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#<|fim▁hole|>#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test_sale
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. |
<|file_name|>test_v3.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import uuid
from keystoneauth1 import exceptions
from keystoneauth1 import loading
from keystoneauth1.tests.unit.loading import utils
class V3PasswordTests(utils.TestCase):
def setUp(self):
super(V3PasswordTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader('v3password')
return loader.load_from_options(**kwargs)
def test_basic(self):
username = uuid.uuid4().hex
user_domain_id = uuid.uuid4().hex
password = uuid.uuid4().hex
project_name = uuid.uuid4().hex
project_domain_id = uuid.uuid4().hex
p = self.create(username=username,
user_domain_id=user_domain_id,
project_name=project_name,
project_domain_id=project_domain_id,
password=password)
pw_method = p.auth_methods[0]
self.assertEqual(username, pw_method.username)
self.assertEqual(user_domain_id, pw_method.user_domain_id)
self.assertEqual(password, pw_method.password)
self.assertEqual(project_name, p.project_name)
self.assertEqual(project_domain_id, p.project_domain_id)
def test_without_user_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
password=uuid.uuid4().hex)
def test_without_project_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
password=uuid.uuid4().hex,
user_domain_id=uuid.uuid4().hex,
project_name=uuid.uuid4().hex)
class TOTPTests(utils.TestCase):
def setUp(self):
super(TOTPTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader('v3totp')
return loader.load_from_options(**kwargs)
def test_basic(self):
username = uuid.uuid4().hex
user_domain_id = uuid.uuid4().hex
# passcode is 6 digits
passcode = ''.join(str(random.randint(0, 9)) for x in range(6))
project_name = uuid.uuid4().hex
project_domain_id = uuid.uuid4().hex
p = self.create(username=username,
user_domain_id=user_domain_id,
project_name=project_name,
project_domain_id=project_domain_id,
passcode=passcode)
totp_method = p.auth_methods[0]
self.assertEqual(username, totp_method.username)
self.assertEqual(user_domain_id, totp_method.user_domain_id)
self.assertEqual(passcode, totp_method.passcode)
self.assertEqual(project_name, p.project_name)
self.assertEqual(project_domain_id, p.project_domain_id)
def test_without_user_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
passcode=uuid.uuid4().hex)
def test_without_project_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
passcode=uuid.uuid4().hex,
user_domain_id=uuid.uuid4().hex,
project_name=uuid.uuid4().hex)
class OpenIDConnectBaseTests(object):
plugin_name = None
def setUp(self):
super(OpenIDConnectBaseTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader(self.plugin_name)
return loader.load_from_options(**kwargs)
def test_base_options_are_there(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['client-id', 'client-secret', 'access-token-endpoint',
'access-token-type', 'openid-scope',
'discovery-endpoint']).issubset(
set([o.name for o in options]))
)
# openid-scope gets renamed into "scope"
self.assertIn('scope', [o.dest for o in options])
<|fim▁hole|>class OpenIDConnectClientCredentialsTests(OpenIDConnectBaseTests,
utils.TestCase):
plugin_name = "v3oidcclientcredentials"
def test_options(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['openid-scope']).issubset(
set([o.name for o in options]))
)
def test_basic(self):
access_token_endpoint = uuid.uuid4().hex
scope = uuid.uuid4().hex
identity_provider = uuid.uuid4().hex
protocol = uuid.uuid4().hex
scope = uuid.uuid4().hex
client_id = uuid.uuid4().hex
client_secret = uuid.uuid4().hex
oidc = self.create(identity_provider=identity_provider,
protocol=protocol,
access_token_endpoint=access_token_endpoint,
client_id=client_id,
client_secret=client_secret,
scope=scope)
self.assertEqual(scope, oidc.scope)
self.assertEqual(identity_provider, oidc.identity_provider)
self.assertEqual(protocol, oidc.protocol)
self.assertEqual(access_token_endpoint, oidc.access_token_endpoint)
self.assertEqual(client_id, oidc.client_id)
self.assertEqual(client_secret, oidc.client_secret)
class OpenIDConnectPasswordTests(OpenIDConnectBaseTests, utils.TestCase):
plugin_name = "v3oidcpassword"
def test_options(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['username', 'password', 'openid-scope']).issubset(
set([o.name for o in options]))
)
def test_basic(self):
access_token_endpoint = uuid.uuid4().hex
username = uuid.uuid4().hex
password = uuid.uuid4().hex
scope = uuid.uuid4().hex
identity_provider = uuid.uuid4().hex
protocol = uuid.uuid4().hex
scope = uuid.uuid4().hex
client_id = uuid.uuid4().hex
client_secret = uuid.uuid4().hex
oidc = self.create(username=username,
password=password,
identity_provider=identity_provider,
protocol=protocol,
access_token_endpoint=access_token_endpoint,
client_id=client_id,
client_secret=client_secret,
scope=scope)
self.assertEqual(username, oidc.username)
self.assertEqual(password, oidc.password)
self.assertEqual(scope, oidc.scope)
self.assertEqual(identity_provider, oidc.identity_provider)
self.assertEqual(protocol, oidc.protocol)
self.assertEqual(access_token_endpoint, oidc.access_token_endpoint)
self.assertEqual(client_id, oidc.client_id)
self.assertEqual(client_secret, oidc.client_secret)
class OpenIDConnectAuthCodeTests(OpenIDConnectBaseTests, utils.TestCase):
plugin_name = "v3oidcauthcode"
def test_options(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['redirect-uri', 'code']).issubset(
set([o.name for o in options]))
)
def test_basic(self):
access_token_endpoint = uuid.uuid4().hex
redirect_uri = uuid.uuid4().hex
authorization_code = uuid.uuid4().hex
scope = uuid.uuid4().hex
identity_provider = uuid.uuid4().hex
protocol = uuid.uuid4().hex
client_id = uuid.uuid4().hex
client_secret = uuid.uuid4().hex
oidc = self.create(code=authorization_code,
redirect_uri=redirect_uri,
identity_provider=identity_provider,
protocol=protocol,
access_token_endpoint=access_token_endpoint,
client_id=client_id,
client_secret=client_secret,
scope=scope)
self.assertEqual(redirect_uri, oidc.redirect_uri)
self.assertEqual(authorization_code, oidc.code)
self.assertEqual(scope, oidc.scope)
self.assertEqual(identity_provider, oidc.identity_provider)
self.assertEqual(protocol, oidc.protocol)
self.assertEqual(access_token_endpoint, oidc.access_token_endpoint)
self.assertEqual(client_id, oidc.client_id)
self.assertEqual(client_secret, oidc.client_secret)
class OpenIDConnectAccessToken(utils.TestCase):
plugin_name = "v3oidcaccesstoken"
def setUp(self):
super(OpenIDConnectAccessToken, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader(self.plugin_name)
return loader.load_from_options(**kwargs)
def test_options(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['access-token']).issubset(
set([o.name for o in options]))
)
def test_basic(self):
access_token = uuid.uuid4().hex
identity_provider = uuid.uuid4().hex
protocol = uuid.uuid4().hex
oidc = self.create(access_token=access_token,
identity_provider=identity_provider,
protocol=protocol)
self.assertEqual(identity_provider, oidc.identity_provider)
self.assertEqual(protocol, oidc.protocol)
self.assertEqual(access_token, oidc.access_token)
class V3TokenlessAuthTests(utils.TestCase):
def setUp(self):
super(V3TokenlessAuthTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader('v3tokenlessauth')
return loader.load_from_options(**kwargs)
def test_basic(self):
domain_id = uuid.uuid4().hex
domain_name = uuid.uuid4().hex
project_id = uuid.uuid4().hex
project_name = uuid.uuid4().hex
project_domain_id = uuid.uuid4().hex
project_domain_name = uuid.uuid4().hex
tla = self.create(domain_id=domain_id,
domain_name=domain_name,
project_id=project_id,
project_name=project_name,
project_domain_id=project_domain_id,
project_domain_name=project_domain_name)
self.assertEqual(domain_id, tla.domain_id)
self.assertEqual(domain_name, tla.domain_name)
self.assertEqual(project_id, tla.project_id)
self.assertEqual(project_name, tla.project_name)
self.assertEqual(project_domain_id, tla.project_domain_id)
self.assertEqual(project_domain_name, tla.project_domain_name)
def test_missing_parameters(self):
self.assertRaises(exceptions.OptionError,
self.create,
domain_id=None)
self.assertRaises(exceptions.OptionError,
self.create,
domain_name=None)
self.assertRaises(exceptions.OptionError,
self.create,
project_id=None)
self.assertRaises(exceptions.OptionError,
self.create,
project_name=None)
self.assertRaises(exceptions.OptionError,
self.create,
project_domain_id=None)
self.assertRaises(exceptions.OptionError,
self.create,
project_domain_name=None)
# only when a project_name is provided, project_domain_id will
# be use to uniquely identify the project. It's an invalid
# option when it's just by itself.
self.assertRaises(exceptions.OptionError,
self.create,
project_domain_id=uuid.uuid4().hex)
# only when a project_name is provided, project_domain_name will
# be use to uniquely identify the project. It's an invalid
# option when it's just by itself.
self.assertRaises(exceptions.OptionError,
self.create,
project_domain_name=uuid.uuid4().hex)
self.assertRaises(exceptions.OptionError,
self.create,
project_name=uuid.uuid4().hex)
class V3ApplicationCredentialTests(utils.TestCase):
def setUp(self):
super(V3ApplicationCredentialTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader('v3applicationcredential')
return loader.load_from_options(**kwargs)
def test_basic(self):
id = uuid.uuid4().hex
secret = uuid.uuid4().hex
app_cred = self.create(application_credential_id=id,
application_credential_secret=secret)
ac_method = app_cred.auth_methods[0]
self.assertEqual(id, ac_method.application_credential_id)
self.assertEqual(secret, ac_method.application_credential_secret)
def test_with_name(self):
name = uuid.uuid4().hex
secret = uuid.uuid4().hex
username = uuid.uuid4().hex
user_domain_id = uuid.uuid4().hex
app_cred = self.create(application_credential_name=name,
application_credential_secret=secret,
username=username,
user_domain_id=user_domain_id)
ac_method = app_cred.auth_methods[0]
self.assertEqual(name, ac_method.application_credential_name)
self.assertEqual(secret, ac_method.application_credential_secret)
self.assertEqual(username, ac_method.username)
self.assertEqual(user_domain_id, ac_method.user_domain_id)
def test_without_user_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
application_credential_name=uuid.uuid4().hex,
username=uuid.uuid4().hex,
application_credential_secret=uuid.uuid4().hex)
def test_without_name_or_id(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
user_domain_id=uuid.uuid4().hex,
application_credential_secret=uuid.uuid4().hex)
def test_without_secret(self):
self.assertRaises(exceptions.OptionError,
self.create,
application_credential_id=uuid.uuid4().hex,
username=uuid.uuid4().hex,
user_domain_id=uuid.uuid4().hex)<|fim▁end|> | |
<|file_name|>showroom.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Module providing views for the folderish content page type"""
import json
import urllib
from Acquisition import aq_inner
from Products.Five.browser import BrowserView
from plone import api
from plone.i18n.normalizer.interfaces import IIDNormalizer
from zope.component import getUtility
from newe.sitecontent import utils
from newe.sitecontent.showroom import IShowRoom
from newe.sitecontent.project import IProject
class ShowRoomView(BrowserView):
""" Show room default view """
def render(self):
return self.index()
def __call__(self):
self.has_showrooms = len(self.showrooms()) > 0
self.has_subitems = len(self.subitems()) > 0
return self.render()
def showroom_content(self):
context = aq_inner(self.context)
template = context.restrictedTraverse('@@showroom-content')()
return template
def showrooms(self):
context = aq_inner(self.context)
return context.restrictedTraverse('@@folderListing')(
portal_type='newe.sitecontent.showroom',
review_state='published')
def projects(self):
context = aq_inner(self.context)
return context.restrictedTraverse('@@folderListing')(
portal_type='newe.sitecontent.project',
review_state='published')
def subitems(self):
""" A showroom containing other showrooms
should not list contained projects
"""
if self.has_showrooms:
return self.showrooms()
return self.projects()
def _project_assets(self, uuid):
project = api.content.get(UID=uuid)
data = getattr(project, 'assets')
if data is None:
data = dict()
return data
def _assets(self, uuid):
return json.loads(self._project_assets(uuid))
def has_preview_image(self, uuid):
""" Test if we have an available preview image """
if len(self._project_assets(uuid)):
assets = self._assets(uuid)
return len(assets['items']) > 0
return False
def get_preview_container(self, uuid):
data = self._assets(uuid)
items = data['items']
return items[0]
def rendered_preview_image(self, uuid):
item = api.content.get(UID=uuid)
return item.restrictedTraverse('@@stack-preview')()
def normalize_subject(self, subject):
""" Normalizer for project filter categories
This function is called by the isotope filter navigation
"""
normalizer = getUtility(IIDNormalizer)
return normalizer.normalize(subject)
def url_encode_subject_query(self, subject):
""" Quote subject query string """
return urllib.quote(subject)
def computed_class(self, uuid):
item = api.content.get(UID=uuid)
klass = 'app-card-{0}'.format(uuid)
subjects = item.Subject()
for subject in subjects:
pretty_subject = self.normalize_subject(subject)
klass = '{0} {1}'.format(klass, pretty_subject)
return klass
def available_filter(self):
context = aq_inner(self.context)
context_subjects = utils.keywords_filtered_by_context(context)
return context_subjects
def filter_map(self):
idx = 0
mapping = {}
for subject in self.available_filter():
idx += 1
mapping[subject] = idx
return mapping
def filter_map_keys(self):
return self.filter_map().keys()
def item_filter_category(self, uuid):
item = api.content.get(UID=uuid)
subjects = item.Subject()
filter_map = self.filter_map()
if len(subjects) > 1:
item_categories = list()
for subject in subjects:
item_categories.append(filter_map[subject])
return ', '.join(item_categories)
else:
return filter_map[subjects[0]]
class ShowRoomContentView(BrowserView):
""" Embeddable content card listing """
def __call__(self):
self.has_showrooms = len(self.showrooms()) > 0
return self.render()
def render(self):
return self.index()
@property
def traverse_subpath(self):
return self.subpath
def publishTraverse(self, request, name):
if not hasattr(self, 'subpath'):
self.subpath = []
self.subpath.append(name)
return self
def active_filter_category(self):
try:
active_category = self.traverse_subpath[0]
return active_category
except AttributeError:
return None
def contained_items(self, type_interface):
context = aq_inner(self.context)
query = dict(
context=context,
depth=1,
object_provides=type_interface,
portal_state='published',
sort_on='getObjPositionInParent'
)
if self.active_filter_category():
active_filter = self.active_filter_category()
for key, value in self.filter_map().items():
if str(value) == active_filter:
query['Subject'] = key
items = api.content.find(**query)
return items
def showrooms(self):
return self.contained_items(IShowRoom)
def projects(self):
return self.contained_items(IProject)
def subitems(self):
""" A showroom containing other showrooms
should not list contained projects
"""
if self.has_showrooms:
return self.showrooms()
return self.projects()
def _project_assets(self, uuid):
project = api.content.get(UID=uuid)
data = getattr(project, 'assets', None)
if not data:
data = dict()
return data
def _assets(self, uuid):
return json.loads(self._project_assets(uuid))
def has_preview_image(self, uuid):
""" Test if we have an available preview image """
if len(self._project_assets(uuid)):
assets = self._assets(uuid)<|fim▁hole|>
def get_preview_container(self, uuid):
data = self._assets(uuid)
items = data['items']
return items[0]
def rendered_preview_image(self, uuid):
item = api.content.get(UID=uuid)
return item.restrictedTraverse('@@stack-preview')()
def available_filter(self):
context = aq_inner(self.context)
context_subjects = utils.keywords_filtered_by_context(context)
return context_subjects
def filter_map(self):
idx = 0
mapping = {}
for subject in self.available_filter():
idx += 1
mapping[subject] = idx
return mapping
def filter_map_keys(self):
return self.filter_map().keys()
def normalize_subject(self, subject):
""" Normalizer for project filter categories
This function is called by the isotope filter navigation
"""
normalizer = getUtility(IIDNormalizer)
return normalizer.normalize(subject)
def computed_class(self, uuid):
item = api.content.get(UID=uuid)
klass = 'app-card-{0}'.format(uuid)
subjects = item.Subject()
for subject in subjects:
pretty_subject = self.normalize_subject(subject)
klass = '{0} {1}'.format(klass, pretty_subject)
return klass<|fim▁end|> | return len(assets['items']) > 0
return False |
<|file_name|>test_mechanism_odl.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013-2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Kyle Mestery, Cisco Systems, Inc.
import mock
import requests
from neutron.plugins.common import constants
from neutron.plugins.ml2 import config as config
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2.drivers import mechanism_odl
from neutron.plugins.ml2 import plugin
from neutron.tests import base
from neutron.tests.unit import test_db_plugin as test_plugin
from neutron.tests.unit import testlib_api
PLUGIN_NAME = 'neutron.plugins.ml2.plugin.Ml2Plugin'
class OpenDaylightTestCase(test_plugin.NeutronDbPluginV2TestCase):
def setUp(self):
# Enable the test mechanism driver to ensure that
# we can successfully call through to all mechanism
# driver apis.
config.cfg.CONF.set_override('mechanism_drivers',
['logger', 'opendaylight'],
'ml2')
# Set URL/user/pass so init doesn't throw a cfg required error.
# They are not used in these tests since sendjson is overwritten.
config.cfg.CONF.set_override('url', 'http://127.0.0.1:9999', 'ml2_odl')
config.cfg.CONF.set_override('username', 'someuser', 'ml2_odl')
config.cfg.CONF.set_override('password', 'somepass', 'ml2_odl')
super(OpenDaylightTestCase, self).setUp(PLUGIN_NAME)
self.port_create_status = 'DOWN'
self.segment = {'api.NETWORK_TYPE': ""}
self.mech = mechanism_odl.OpenDaylightMechanismDriver()
mechanism_odl.OpenDaylightMechanismDriver.sendjson = (
self.check_sendjson)
def check_sendjson(self, method, urlpath, obj, ignorecodes=[]):
self.assertFalse(urlpath.startswith("http://"))
def test_check_segment(self):
"""Validate the check_segment call."""
self.segment[api.NETWORK_TYPE] = constants.TYPE_LOCAL
self.assertTrue(self.mech.check_segment(self.segment))
self.segment[api.NETWORK_TYPE] = constants.TYPE_FLAT
self.assertFalse(self.mech.check_segment(self.segment))
self.segment[api.NETWORK_TYPE] = constants.TYPE_VLAN
self.assertTrue(self.mech.check_segment(self.segment))
self.segment[api.NETWORK_TYPE] = constants.TYPE_GRE
self.assertTrue(self.mech.check_segment(self.segment))
self.segment[api.NETWORK_TYPE] = constants.TYPE_VXLAN
self.assertTrue(self.mech.check_segment(self.segment))
# Validate a network type not currently supported
self.segment[api.NETWORK_TYPE] = 'mpls'
self.assertFalse(self.mech.check_segment(self.segment))
class OpenDayLightMechanismConfigTests(testlib_api.SqlTestCase):
def _set_config(self, url='http://127.0.0.1:9999', username='someuser',
password='somepass'):
config.cfg.CONF.set_override('mechanism_drivers',
['logger', 'opendaylight'],
'ml2')
config.cfg.CONF.set_override('url', url, 'ml2_odl')
config.cfg.CONF.set_override('username', username, 'ml2_odl')
config.cfg.CONF.set_override('password', password, 'ml2_odl')
def _test_missing_config(self, **kwargs):
self._set_config(**kwargs)
self.assertRaises(config.cfg.RequiredOptError,
plugin.Ml2Plugin)
def test_valid_config(self):
self._set_config()
plugin.Ml2Plugin()
def test_missing_url_raises_exception(self):
self._test_missing_config(url=None)
def test_missing_username_raises_exception(self):
self._test_missing_config(username=None)
def test_missing_password_raises_exception(self):
self._test_missing_config(password=None)
class OpenDaylightMechanismTestBasicGet(test_plugin.TestBasicGet,
OpenDaylightTestCase):
pass
class OpenDaylightMechanismTestNetworksV2(test_plugin.TestNetworksV2,
OpenDaylightTestCase):
pass
class OpenDaylightMechanismTestSubnetsV2(test_plugin.TestSubnetsV2,
OpenDaylightTestCase):
pass
class OpenDaylightMechanismTestPortsV2(test_plugin.TestPortsV2,
OpenDaylightTestCase):
pass
class AuthMatcher(object):
def __eq__(self, obj):
return (obj.username == config.cfg.CONF.ml2_odl.username and
obj.password == config.cfg.CONF.ml2_odl.password)
class OpenDaylightMechanismDriverTestCase(base.BaseTestCase):
def setUp(self):
super(OpenDaylightMechanismDriverTestCase, self).setUp()
config.cfg.CONF.set_override('mechanism_drivers',
['logger', 'opendaylight'], 'ml2')
config.cfg.CONF.set_override('url', 'http://127.0.0.1:9999', 'ml2_odl')
config.cfg.CONF.set_override('username', 'someuser', 'ml2_odl')
config.cfg.CONF.set_override('password', 'somepass', 'ml2_odl')
self.mech = mechanism_odl.OpenDaylightMechanismDriver()
self.mech.initialize()
@staticmethod
def _get_mock_delete_resource_context():
current = {'id': '00000000-1111-2222-3333-444444444444'}
context = mock.Mock(current=current)
return context
_status_code_msgs = {
204: '',
401: '401 Client Error: Unauthorized',
403: '403 Client Error: Forbidden',
404: '404 Client Error: Not Found',
409: '409 Client Error: Conflict',
501: '501 Server Error: Not Implemented'
}
@classmethod
def _get_mock_request_response(cls, status_code):
response = mock.Mock(status_code=status_code)
response.raise_for_status = mock.Mock() if status_code < 400 else (
mock.Mock(side_effect=requests.exceptions.HTTPError(
cls._status_code_msgs[status_code])))
return response
def _test_delete_resource_postcommit(self, object_type, status_code,
exc_class=None):
self.mech.out_of_sync = False
method = getattr(self.mech, 'delete_%s_postcommit' % object_type)
context = self._get_mock_delete_resource_context()
request_response = self._get_mock_request_response(status_code)
with mock.patch('requests.request',
return_value=request_response) as mock_method:
if exc_class is not None:<|fim▁hole|> method(context)
url = '%s/%ss/%s' % (config.cfg.CONF.ml2_odl.url, object_type,
context.current['id'])
mock_method.assert_called_once_with(
'delete', url=url, headers={'Content-Type': 'application/json'},
data=None, auth=AuthMatcher(),
timeout=config.cfg.CONF.ml2_odl.timeout)
def test_delete_network_postcommit(self):
self._test_delete_resource_postcommit('network',
requests.codes.no_content)
for status_code in (requests.codes.unauthorized,
requests.codes.not_found,
requests.codes.conflict):
self._test_delete_resource_postcommit(
'network', status_code, requests.exceptions.HTTPError)
def test_delete_subnet_postcommit(self):
self._test_delete_resource_postcommit('subnet',
requests.codes.no_content)
for status_code in (requests.codes.unauthorized,
requests.codes.not_found,
requests.codes.conflict,
requests.codes.not_implemented):
self._test_delete_resource_postcommit(
'subnet', status_code, requests.exceptions.HTTPError)
def test_delete_port_postcommit(self):
self._test_delete_resource_postcommit('port',
requests.codes.no_content)
for status_code in (requests.codes.unauthorized,
requests.codes.forbidden,
requests.codes.not_found,
requests.codes.not_implemented):
self._test_delete_resource_postcommit(
'port', status_code, requests.exceptions.HTTPError)<|fim▁end|> | self.assertRaises(exc_class, method, context)
else: |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Copyright (C) 2014 Dariusz Suchojad <dsuch at zato.io>
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from httplib import FORBIDDEN, INTERNAL_SERVER_ERROR, OK
from json import dumps, loads
from logging import getLogger
from traceback import format_exc
# gevent
from gevent import sleep, spawn
# huTools
from huTools.structured import dict2xml
# Zato
from zato.common import DATA_FORMAT, PUB_SUB, ZATO_ERROR, ZATO_NONE, ZATO_OK
from zato.common.pubsub import ItemFull, PermissionDenied
from zato.common.util import get_basic_auth_credentials
from zato.server.connection.http_soap import BadRequest, Forbidden, TooManyRequests, Unauthorized
from zato.server.service import AsIs, Bool, Int, Service
from zato.server.service.internal import AdminService
logger_overflown = getLogger('zato_pubsub_overflown')
# ################################################################################################################################
class DeleteExpired(AdminService):
""" Invoked when a server is starting - periodically spawns a greenlet deleting expired messages.
"""
def _delete_expired(self):
self.logger.debug('Deleted expired messages %s', self.pubsub.impl.delete_expired())
def handle(self):
interval = float(self.server.fs_server_config.pubsub.delete_expired_interval)
while True:
self.logger.debug('Deleting expired messages, interval %rs', interval)
spawn(self._delete_expired)
sleep(interval)
# ################################################################################################################################
class InvokeCallbacks(AdminService):
""" Invoked when a server is starting - periodically spawns a greenlet invoking consumer URL callbacks.
"""
def _reject(self, msg_ids, sub_key, consumer, reason):
self.pubsub.reject(sub_key, msg_ids)
self.logger.error('Could not deliver messages `%s`, sub_key `%s` to `%s`, reason `%s`', msg_ids, sub_key, consumer, reason)
def _invoke_callbacks(self):
callback_consumers = list(self.pubsub.impl.get_callback_consumers())
self.logger.debug('Callback consumers found `%s`', callback_consumers)
for consumer in callback_consumers:
with self.lock(consumer.sub_key):
msg_ids = []
out = {
'status': ZATO_OK,
'results_count': 0,
'results': []
}
messages = self.pubsub.get(consumer.sub_key, get_format=PUB_SUB.GET_FORMAT.JSON.id)
for msg in messages:
msg_ids.append(msg['metadata']['msg_id'])
out['results_count'] += 1
out['results'].append(msg)
# messages is a generator so we still don't know if we had anything.
if msg_ids:
outconn = self.outgoing.plain_http[consumer.callback_name]
if outconn.config['data_format'] == DATA_FORMAT.XML:
out = dict2xml(out)
content_type = 'application/xml'
else:
out = dumps(out)
content_type = 'application/json'
try:
response = outconn.conn.post(self.cid, data=out, headers={'content-type': content_type})
except Exception, e:
self._reject(msg_ids, consumer.sub_key, consumer, format_exc(e))
else:
if response.status_code == OK:
self.pubsub.acknowledge(consumer.sub_key, msg_ids)
else:
self._reject(
msg_ids, consumer.sub_key, consumer, '`{}` `{}`'.format(response.status_code, response.text))
def handle(self):
# TODO: self.logger's name should be 'zato_pubsub' so it got logged to the same location
# the rest of pub/sub does.
interval = float(self.server.fs_server_config.pubsub.invoke_callbacks_interval)
while True:
self.logger.debug('Invoking pub/sub callbacks, interval %rs', interval)
spawn(self._invoke_callbacks)
sleep(interval)
# ################################################################################################################################
class MoveToTargetQueues(AdminService):
""" Invoked when a server is starting - periodically spawns a greenlet moving published messages to recipient queues.
"""
def _move_to_target_queues(self):
overflown = []
for item in self.pubsub.impl.move_to_target_queues():
for result, target_queue, msg_id in item:
if result == PUB_SUB.MOVE_RESULT.OVERFLOW:
self.logger.warn('Message overflow, queue:`%s`, msg_id:`%s`', target_queue, msg_id)
overflown.append((target_queue[target_queue.rfind(':')+1:], msg_id))
if overflown:
self.invoke_async(StoreOverflownMessages.get_name(), overflown, to_json_string=True)
self.logger.debug('Messages moved to target queues')
def handle(self):
interval = float(self.server.fs_server_config.pubsub.move_to_target_queues_interval)
while True:
self.logger.debug('Moving messages to target queues, interval %rs', interval)
spawn(self._move_to_target_queues)
sleep(interval)
# ################################################################################################################################
class StoreOverflownMessages(AdminService):
""" Stores on filesystem messages that were above a consumer's max backlog and marks them as rejected by the consumer.
"""
def handle(self):
acks = {}
for sub_key, msg_id in loads(self.request.payload):
logger_overflown.warn('%s - %s - %s', msg_id, self.pubsub.get_consumer_by_sub_key(sub_key).name,
self.pubsub.get_message(msg_id))
msg_ids = acks.setdefault(sub_key, [])
msg_ids.append(msg_id)
for consumer_sub_key, msg_ids in acks.iteritems():
self.pubsub.acknowledge(sub_key, msg_id)
# ################################################################################################################################
class RESTHandler(Service):
""" Handles calls to pub/sub from REST clients.
"""
class SimpleIO(object):
input_required = ('item_type', 'item')
input_optional = ('max', 'dir', 'format', 'mime_type', Int('priority'), Int('expiration'), AsIs('msg_id'),
Bool('ack'), Bool('reject'))
default = ZATO_NONE
use_channel_params_only = True
# ################################################################################################################################
def _raise_unauthorized(self):
raise Unauthorized(self.cid, 'You are not authorized to access this resource', 'Zato pub/sub')
def validate_input(self):
username, password = get_basic_auth_credentials(self.wsgi_environ.get('HTTP_AUTHORIZATION'))
if not username:
self._raise_unauthorized()
for item in self.server.worker_store.request_dispatcher.url_data.basic_auth_config.values():
if item.config.username == username and item.config.password == password:
client = item
break
else:
self._raise_unauthorized()
if self.request.input.item_type not in PUB_SUB.URL_ITEM_TYPE:
raise BadRequest(self.cid, 'None of the supported resources `{}` found in URL path'.format(
', '.join(PUB_SUB.URL_ITEM_TYPE)))
sub_key = self.wsgi_environ.get('HTTP_X_ZATO_PUBSUB_KEY', ZATO_NONE)
is_consumer = self.request.input.item_type == PUB_SUB.URL_ITEM_TYPE.MESSAGES.id
# Deletes don't access topics, they operate on messages.
if self.wsgi_environ['REQUEST_METHOD'] != 'DELETE':
if not self.pubsub.can_access_topic(client.config.id, self.request.input.item, is_consumer):
raise Forbidden(self.cid, 'You are not authorized to access this resource')
self.environ['sub_key'] = sub_key
self.environ['client_id'] = client.config.id
self.environ['format'] = self.request.input.format if self.request.input.format else PUB_SUB.GET_FORMAT.DEFAULT.id
self.environ['is_json'] = self.environ['format'] == PUB_SUB.GET_FORMAT.JSON.id
# ################################################################################################################################
def _set_payload_data(self, out, status_code=OK):
if self.environ['is_json']:
content_type = 'application/json'
out = dumps(out)
else:
content_type = 'application/xml'
out = dict2xml(out)
self.response.headers['Content-Type'] = content_type
self.response.payload = out
self.response.status_code = status_code
# ################################################################################################################################
def _handle_POST_topic(self):
""" Publishes a message on a topic.
"""
pub_data = {
'payload': self.request.raw_request,
'topic': self.request.input.item,
'mime_type': self.request.input.mime_type or self.wsgi_environ['CONTENT_TYPE'],
'priority': int(self.request.input.priority or PUB_SUB.DEFAULT_PRIORITY),
'expiration': int(self.request.input.expiration or PUB_SUB.DEFAULT_EXPIRATION),
'msg_id': self.request.input.msg_id,
'client_id': self.environ['client_id'],
}
self._set_payload_data({
'status': ZATO_OK,
'msg_id':self.pubsub.publish(**pub_data).msg.msg_id
})
# ################################################################################################################################
def _handle_POST_msg(self):
""" Returns messages from topics, either in JSON or XML.
"""
out = {
'status': ZATO_OK,
'results_count': 0,
'results': []
}
max_batch_size = int(self.request.input.max) if self.request.input.max else PUB_SUB.DEFAULT_GET_MAX_BATCH_SIZE
is_fifo = True if (self.request.input.dir == PUB_SUB.GET_DIR.FIFO or not self.request.input.dir) else False
try:
for item in self.pubsub.get(self.environ['sub_key'], max_batch_size, is_fifo, self.environ['format']):
if self.environ['is_json']:
out_item = item
else:
out_item = {'metadata': item.to_dict()}
out_item['payload'] = item.payload
out['results'].append(out_item)
out['results_count'] += 1
except ItemFull, e:
raise TooManyRequests(self.cid, e.msg)
else:
self._set_payload_data(out)
# ################################################################################################################################
def handle_POST(self):
try:
getattr(self, '_handle_POST_{}'.format(self.request.input.item_type))()
except Exception, e:
details, status_code = ('Permission denied', FORBIDDEN) if isinstance(e, PermissionDenied) else (e.message, INTERNAL_SERVER_ERROR)
self.logger.warn('Could not handle POST pub/sub (%s %s), e:`%s`', self.cid, details, format_exc(e))<|fim▁hole|> self._set_payload_data({'status': ZATO_ERROR, 'details':details}, status_code)
def handle_DELETE(self):
actions = ('ack', 'reject')
try:
self.request.input.require_any(*actions)
except ValueError:
raise BadRequest(self.cid, 'Missing state to set, should be one of `{}`'.format(', '.join(actions)))
if self.request.input.ack and self.request.input.reject:
raise BadRequest(self.cid, 'Cannot both acknowledge and reject a message')
func = self.pubsub.acknowledge if self.request.input.ack else self.pubsub.reject
result = func(self.environ['sub_key'], self.request.input.item)
if self.request.input.item in result:
status = ZATO_OK
details = ''
else:
status = ZATO_ERROR
details = 'Message not found `{}`'.format(self.request.input.item)
self._set_payload_data({'status': status, 'details':details})
# ################################################################################################################################<|fim▁end|> | |
<|file_name|>callback_this.js<|end_file_name|><|fim▁begin|>// @flow
class A {
x = [1, 2, 3];
y = 4;
foo() {
this.x = this.x.map(function (z) {
this.y; // error, function has wrong this
});
}
}<|fim▁hole|> x = [1, 2, 3];
y = 4;
foo() {
this.x = this.x.map(function (z) {
this.y; // ok, function gets passed correct this
}, this);
}
}
class C {
x = [1, 2, 3];
y = 4;
foo() {
this.x = this.x.map(z => {
this.y; // ok, arrow binds surrounding context this
});
}
}<|fim▁end|> |
class B { |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-11 22:01
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
<|fim▁hole|>
operations = [
migrations.CreateModel(
name='Game',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('datetime', models.DateTimeField()),
('site_ended', models.CharField(choices=[('T', 'Terrorists'), ('CT', 'Counter-Terrorists')], max_length=255)),
('rounds_for', models.IntegerField()),
('rounds_against', models.IntegerField()),
],
),
migrations.CreateModel(
name='GamePlayer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('kills', models.IntegerField()),
('assists', models.IntegerField()),
('deaths', models.IntegerField()),
('mvps', models.IntegerField()),
('points', models.IntegerField()),
('game', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stats.Game')),
],
),
migrations.CreateModel(
name='Map',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('map_name', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Player',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=255)),
('rank', models.CharField(blank=True, choices=[(1, 'Silver I'), (2, 'Silver II'), (3, 'Silver III'), (4, 'Silver IV'), (5, 'Silver Elite'), (6, 'Silver Elite Master'), (7, 'Gold Nova I'), (8, 'Gold Nova II'), (9, 'Gold Nova III'), (10, 'Gold Nova Master'), (11, 'Master Guardian I'), (12, 'Master Guardian II'), (13, 'Master Guardian Elite'), (14, 'Distinguished Master Guardian'), (15, 'Legendary Eagle'), (16, 'Legendary Eagle Master'), (17, 'Supreme Master First Class'), (18, 'The Global Elite')], max_length=255, null=True)),
],
),
migrations.AddField(
model_name='gameplayer',
name='player',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stats.Player'),
),
migrations.AddField(
model_name='game',
name='game_map',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='stats.Map'),
),
migrations.AddField(
model_name='game',
name='players',
field=models.ManyToManyField(through='stats.GamePlayer', to='stats.Player'),
),
]<|fim▁end|> | dependencies = [
] |
<|file_name|>main.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
global.app = () => {
return Tea;
}<|fim▁end|> | 'use strict'
import Tea from './modules/tea.core.js' |
<|file_name|>consistency.rs<|end_file_name|><|fim▁begin|>#[allow(non_camel_case_types)]
#[repr(C)]
pub enum CassConsistency {
ANY=0,
ONE=1,
TWO=2,<|fim▁hole|> LOCAL_QUORUM=6,
EACH_QUORUM=7,
SERIAL=8,
LOCAL_SERIAL=9,
LOCAL_ONE=10,
}
impl Copy for CassConsistency {}<|fim▁end|> | THREE=3,
QUORUM=4,
ALL=5, |
<|file_name|>servicebroker.go<|end_file_name|><|fim▁begin|>package cmd
import (
"errors"
"fmt"
"github.com/openshift/origin/pkg/client"
"github.com/openshift/origin/pkg/cmd/util/clientcmd"
servicebrokerapi "github.com/openshift/origin/pkg/servicebroker/api"
"github.com/spf13/cobra"
"io"
kcmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util"
"net/url"
"strings"
)
const (
newServiceBrokerLong = `
Create a new servicebroker for administrator
`
newServiceBrokerExample = `# Create a new servicebroker with [name username password url]
$ %[1]s mysql_servicebroker --username="username" --password="password" --url="127.0.0.1:8000"`
)
type NewServiceBrokerOptions struct {
Url string<|fim▁hole|> Password string
Client client.Interface
Out io.Writer
}
func NewCmdServiceBroker(fullName string, f *clientcmd.Factory, out io.Writer) *cobra.Command {
options := &NewServiceBrokerOptions{}
options.Out = out
cmd := &cobra.Command{
Use: "new-servicebroker NAME [--username=USERNAME] [--password=PASSWORD] [--url=URL]",
Short: "create a new servicebroker",
Long: newServiceBrokerLong,
Example: fmt.Sprintf(newServiceBrokerExample, fullName),
Run: func(cmd *cobra.Command, args []string) {
var err error
if err = options.complete(cmd, f); err != nil {
kcmdutil.CheckErr(err)
return
}
if options.Client, _, err = f.Clients(); err != nil {
kcmdutil.CheckErr(err)
}
if err := options.Run(); err != nil {
fmt.Printf("run err %s\n", err.Error())
} else {
fmt.Printf("create servicebroker %s success.\n", options.Name)
}
},
}
cmd.Flags().StringVar(&options.Url, "url", "", "ServiceBroker Url")
// cmd.Flags().StringVar(&options.Name, "name", "", "ServiceBroker Name")
cmd.Flags().StringVar(&options.UserName, "username", "", "ServiceBroker username")
cmd.Flags().StringVar(&options.Password, "password", "", "ServiceBroker Password")
return cmd
}
func (o *NewServiceBrokerOptions) complete(cmd *cobra.Command, f *clientcmd.Factory) error {
args := cmd.Flags().Args()
if len(args) == 0 {
cmd.Help()
return errors.New("must have exactly one argument")
}
o.Url = setUrl(o.Url)
_, err := url.Parse(o.Url)
if err != nil {
cmd.Help()
return errors.New("wrong param url format")
}
//o.Url = URL.Host
if len(o.Url) == 0 {
cmd.Help()
return errors.New("wrong param url format")
}
o.Name = args[0]
return nil
}
func (o *NewServiceBrokerOptions) Run() error {
_, err := o.Client.ServiceBrokers().Get(o.Name)
if err == nil {
return errors.New(fmt.Sprintf("servicebroker %s already exists", o.Name))
}
serviceBroker := &servicebrokerapi.ServiceBroker{}
serviceBroker.Spec.Name = o.Name
serviceBroker.Spec.Url = o.Url
serviceBroker.Spec.UserName = o.UserName
serviceBroker.Spec.Password = o.Password
serviceBroker.Annotations = make(map[string]string)
serviceBroker.Name = o.Name
serviceBroker.GenerateName = o.Name
serviceBroker.Status.Phase = servicebrokerapi.ServiceBrokerNew
_, err = o.Client.ServiceBrokers().Create(serviceBroker)
if err != nil {
return err
}
return nil
}
func setUrl(url string) string {
if !strings.HasPrefix(url, "http://") && !strings.HasPrefix(url, "https://") {
url = "http://" + url
}
return url
}<|fim▁end|> | Name string
UserName string |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import smtplib
import email.utils
from email.mime.text import MIMEText
import functools
import qiniu
try:
from config import AIRBB_HOST
except Exception, e:
print '==============no AIRBB_HOST set, ues airbb.xx as default==============', e
AIRBB_HOST = 'airbb.ml'
AIRBB_SUPPORT = 'support@%s' % AIRBB_HOST
def makes(s):
if type(s)==unicode:
return s.encode('utf8','ignore')
else:
return s
def makeu(s):<|fim▁hole|> else:
return s
def ex(func):
@functools.wraps(func)
def foo(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception, e:
print '===================send email ex================', e
return foo
@ex
def send_email(title, content, addr_to, addr_fr=None):
# Create the message
if not addr_fr:
addr_fr = ALANCER_SUPPORT
msg = MIMEText(makes(content), 'html')
msg['To'] = email.utils.formataddr(('Recipient', addr_to))
msg['From'] = email.utils.formataddr(('Airbb', addr_fr))
msg['Subject'] = title
try:
server = smtplib.SMTP('localhost')
except Exception, e:
print 'no SMTP service available', e
return
#server.set_debuglevel(True) # show communication with the server
try:
server.sendmail(addr_fr, [addr_to], msg.as_string())
finally:
server.quit()<|fim▁end|> | if type(s)==str:
return s.decode('utf8','ignore') |
<|file_name|>searchdata.js<|end_file_name|><|fim▁begin|>var indexSectionsWithContent =
{
0: "dfrsw~",
1: "drs",
2: "dfrs~",
3: "rs",
4: "w"
};
var indexSectionNames =
{
0: "all",
1: "classes",
2: "functions",<|fim▁hole|> 4: "pages"
};
var indexSectionLabels =
{
0: "All",
1: "Classes",
2: "Functions",
3: "Variables",
4: "Pages"
};<|fim▁end|> | 3: "variables", |
<|file_name|>defineProperty_test.js<|end_file_name|><|fim▁begin|>// ==========================================================================
// Project: SproutCore Metal
// Copyright: ©2011 Strobe Inc. and contributors.
// License: Licensed under MIT license (see license.js)
// ==========================================================================
require('sproutcore-metal');
function isEnumerable(obj, keyName) {
var keys = [];
for(var key in obj) {
if (obj.hasOwnProperty(key)) keys.push(key);
}
return keys.indexOf(keyName)>=0;
}
module("SC.platform.defineProperty()");
test("defining a simple property", function() {
var obj = {};
SC.platform.defineProperty(obj, 'foo', {
enumerable: true,
writable: true,
value: 'FOO'
});
equals(obj.foo, 'FOO', 'should have added property');
obj.foo = "BAR";
equals(obj.foo, 'BAR', 'writable defined property should be writable');
equals(isEnumerable(obj, 'foo'), true, 'foo should be enumerable');
});
test('defining a read only property', function() {
var obj = {};
SC.platform.defineProperty(obj, 'foo', {
enumerable: true,
writable: false,
value: 'FOO'
});
equals(obj.foo, 'FOO', 'should have added property');
obj.foo = "BAR";
if (SC.platform.defineProperty.isSimulated) {
equals(obj.foo, 'BAR', 'simulated defineProperty should silently work');
} else {
equals(obj.foo, 'FOO', 'real defined property should not be writable');
}
});
test('defining a non enumerable property', function() {<|fim▁hole|> value: 'FOO'
});
if (SC.platform.defineProperty.isSimulated) {
equals(isEnumerable(obj, 'foo'), true, 'simulated defineProperty will leave properties enumerable');
} else {
equals(isEnumerable(obj, 'foo'), false, 'real defineProperty will make property not-enumerable');
}
});
test('defining a getter/setter', function() {
var obj = {}, getCnt = 0, setCnt = 0, v = 'FOO';
var desc = {
enumerable: true,
get: function() { getCnt++; return v; },
set: function(val) { setCnt++; v = val; }
};
if (SC.platform.hasPropertyAccessors) {
SC.platform.defineProperty(obj, 'foo', desc);
equals(obj.foo, 'FOO', 'should return getter');
equals(getCnt, 1, 'should have invoked getter');
obj.foo = 'BAR';
equals(obj.foo, 'BAR', 'setter should have worked');
equals(setCnt, 1, 'should have invoked setter');
} else {
raises(function() {
SC.platform.defineProperty(obj, 'foo', desc);
}, Error, 'should throw exception if getters/setters not supported');
}
});
test('defining getter/setter along with writable', function() {
var obj ={};
raises(function() {
SC.platform.defineProperty(obj, 'foo', {
enumerable: true,
get: function() {},
set: function() {},
writable: true
});
}, Error, 'defining writable and get/set should throw exception');
});
test('defining getter/setter along with value', function() {
var obj ={};
raises(function() {
SC.platform.defineProperty(obj, 'foo', {
enumerable: true,
get: function() {},
set: function() {},
value: 'FOO'
});
}, Error, 'defining value and get/set should throw exception');
});<|fim▁end|> | var obj = {};
SC.platform.defineProperty(obj, 'foo', {
enumerable: false,
writable: true, |
<|file_name|>v2adapter.go<|end_file_name|><|fim▁begin|>package firehoseclient
import (
"code.cloudfoundry.org/go-loggregator"
"code.cloudfoundry.org/go-loggregator/conversion"
"code.cloudfoundry.org/go-loggregator/rpc/loggregator_v2"
"context"
"github.com/cloudfoundry/sonde-go/events"
)
type Streamer interface {
Stream(ctx context.Context, req *loggregator_v2.EgressBatchRequest) loggregator.EnvelopeStream
}
type V2Adapter struct {
streamer Streamer
}
func NewV2Adapter(s Streamer) V2Adapter {
return V2Adapter{
streamer: s,
}
}
func (a V2Adapter) Firehose(subscriptionID string) chan *events.Envelope {
ctx := context.Background()
es := a.streamer.Stream(ctx, &loggregator_v2.EgressBatchRequest{
ShardId: subscriptionID,
Selectors: []*loggregator_v2.Selector{
{
Message: &loggregator_v2.Selector_Log{
Log: &loggregator_v2.LogSelector{},
},
},
{
Message: &loggregator_v2.Selector_Counter{
Counter: &loggregator_v2.CounterSelector{},
},
},
{
Message: &loggregator_v2.Selector_Event{
Event: &loggregator_v2.EventSelector{},
},
},
{
Message: &loggregator_v2.Selector_Gauge{
Gauge: &loggregator_v2.GaugeSelector{},
},
},
{
Message: &loggregator_v2.Selector_Timer{
Timer: &loggregator_v2.TimerSelector{},
},
},
},<|fim▁hole|> var msgs = make(chan *events.Envelope, 100)
go func() {
for ctx.Err() == nil {
for _, e := range es() {
for _, v1e := range conversion.ToV1(e) {
msgs <- v1e
}
}
}
}()
return msgs
}<|fim▁end|> | })
|
<|file_name|>local_target_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import bz2
import gzip
import os
import random
import shutil
import sys
from helpers import unittest
import mock
import luigi.format
from luigi import LocalTarget
from luigi.local_target import LocalFileSystem
from luigi.target import FileAlreadyExists, MissingParentDirectory
from target_test import FileSystemTargetTestMixin
import itertools
import io
from errno import EEXIST, EXDEV
class LocalTargetTest(unittest.TestCase, FileSystemTargetTestMixin):
PATH_PREFIX = '/tmp/test.txt'
def setUp(self):
self.path = self.PATH_PREFIX + '-' + str(self.id())
self.copy = self.PATH_PREFIX + '-copy-' + str(self.id())
if os.path.exists(self.path):
os.remove(self.path)
if os.path.exists(self.copy):
os.remove(self.copy)
def tearDown(self):
if os.path.exists(self.path):
os.remove(self.path)
if os.path.exists(self.copy):
os.remove(self.copy)
def create_target(self, format=None):
return LocalTarget(self.path, format=format)
def assertCleanUp(self, tmp_path=''):
self.assertFalse(os.path.exists(tmp_path))
def test_exists(self):
t = self.create_target()
p = t.open('w')
self.assertEqual(t.exists(), os.path.exists(self.path))
p.close()
self.assertEqual(t.exists(), os.path.exists(self.path))
@unittest.skipIf(tuple(sys.version_info) < (3, 4), 'only for Python>=3.4')
def test_pathlib(self):
"""Test work with pathlib.Path"""
import pathlib
path = pathlib.Path(self.path)
self.assertFalse(path.exists())
target = LocalTarget(path)
self.assertFalse(target.exists())
with path.open('w') as stream:
stream.write('test me')
self.assertTrue(target.exists())
def test_gzip_with_module(self):
t = LocalTarget(self.path, luigi.format.Gzip)
p = t.open('w')
test_data = b'test'
p.write(test_data)
print(self.path)
self.assertFalse(os.path.exists(self.path))
p.close()
self.assertTrue(os.path.exists(self.path))
# Using gzip module as validation
f = gzip.open(self.path, 'r')
self.assertTrue(test_data == f.read())
f.close()
# Verifying our own gzip reader
f = LocalTarget(self.path, luigi.format.Gzip).open('r')
self.assertTrue(test_data == f.read())
f.close()
def test_bzip2(self):
t = LocalTarget(self.path, luigi.format.Bzip2)
p = t.open('w')
test_data = b'test'
p.write(test_data)
print(self.path)
self.assertFalse(os.path.exists(self.path))
p.close()
self.assertTrue(os.path.exists(self.path))
# Using bzip module as validation
f = bz2.BZ2File(self.path, 'r')
self.assertTrue(test_data == f.read())
f.close()
# Verifying our own bzip2 reader
f = LocalTarget(self.path, luigi.format.Bzip2).open('r')
self.assertTrue(test_data == f.read())
f.close()
def test_copy(self):
t = LocalTarget(self.path)
f = t.open('w')
test_data = 'test'
f.write(test_data)
f.close()
self.assertTrue(os.path.exists(self.path))
self.assertFalse(os.path.exists(self.copy))
t.copy(self.copy)
self.assertTrue(os.path.exists(self.path))
self.assertTrue(os.path.exists(self.copy))
self.assertEqual(t.open('r').read(), LocalTarget(self.copy).open('r').read())
def test_move(self):
t = LocalTarget(self.path)
f = t.open('w')
test_data = 'test'
f.write(test_data)
f.close()
self.assertTrue(os.path.exists(self.path))
self.assertFalse(os.path.exists(self.copy))
t.move(self.copy)
self.assertFalse(os.path.exists(self.path))
self.assertTrue(os.path.exists(self.copy))
def test_move_across_filesystems(self):
t = LocalTarget(self.path)
with t.open('w') as f:
f.write('test_data')
def rename_across_filesystems(src, dst):
err = OSError()
err.errno = EXDEV
raise err
real_rename = os.rename
def mockrename(src, dst):
if '-across-fs' in src:
real_rename(src, dst)
else:
rename_across_filesystems(src, dst)
copy = '%s-across-fs' % self.copy
with mock.patch('os.rename', mockrename):
t.move(copy)
self.assertFalse(os.path.exists(self.path))
self.assertTrue(os.path.exists(copy))
self.assertEqual('test_data', LocalTarget(copy).open('r').read())
def test_format_chain(self):
UTF8WIN = luigi.format.TextFormat(encoding='utf8', newline='\r\n')
t = LocalTarget(self.path, UTF8WIN >> luigi.format.Gzip)
a = u'我é\nçф'
with t.open('w') as f:
f.write(a)
f = gzip.open(self.path, 'rb')
b = f.read()
f.close()
self.assertEqual(b'\xe6\x88\x91\xc3\xa9\r\n\xc3\xa7\xd1\x84', b)
def test_format_chain_reverse(self):
t = LocalTarget(self.path, luigi.format.UTF8 >> luigi.format.Gzip)
f = gzip.open(self.path, 'wb')
f.write(b'\xe6\x88\x91\xc3\xa9\r\n\xc3\xa7\xd1\x84')
f.close()
with t.open('r') as f:
b = f.read()
self.assertEqual(u'我é\nçф', b)
@mock.patch('os.linesep', '\r\n')
def test_format_newline(self):
t = LocalTarget(self.path, luigi.format.SysNewLine)
with t.open('w') as f:
f.write(b'a\rb\nc\r\nd')
with t.open('r') as f:
b = f.read()
with open(self.path, 'rb') as f:
c = f.read()
self.assertEqual(b'a\nb\nc\nd', b)
self.assertEqual(b'a\r\nb\r\nc\r\nd', c)
def theoretical_io_modes(
self,
rwax='rwax',
bt=['', 'b', 't'],
plus=['', '+']):
p = itertools.product(rwax, plus, bt)
return {''.join(c) for c in list(
itertools.chain.from_iterable(
[itertools.permutations(m) for m in p]))}
def valid_io_modes(self, *a, **kw):
modes = set()
t = LocalTarget(is_tmp=True)
t.open('w').close()
for mode in self.theoretical_io_modes(*a, **kw):
try:
io.FileIO(t.path, mode).close()
except ValueError:
pass
except IOError as err:
if err.errno == EEXIST:
modes.add(mode)
else:
raise
else:
modes.add(mode)
return modes
def valid_write_io_modes_for_luigi(self):
return self.valid_io_modes('w', plus=[''])
def valid_read_io_modes_for_luigi(self):
return self.valid_io_modes('r', plus=[''])
def invalid_io_modes_for_luigi(self):
return self.valid_io_modes().difference(
self.valid_write_io_modes_for_luigi(),
self.valid_read_io_modes_for_luigi())
def test_open_modes(self):
t = LocalTarget(is_tmp=True)
print('Valid write mode:', end=' ')
for mode in self.valid_write_io_modes_for_luigi():
print(mode, end=' ')
p = t.open(mode)
p.close()
print()
print('Valid read mode:', end=' ')
for mode in self.valid_read_io_modes_for_luigi():
print(mode, end=' ')
p = t.open(mode)
p.close()
print()
print('Invalid mode:', end=' ')
for mode in self.invalid_io_modes_for_luigi():
print(mode, end=' ')
self.assertRaises(Exception, t.open, mode)
print()
class LocalTargetCreateDirectoriesTest(LocalTargetTest):
path = '/tmp/%s/xyz/test.txt' % random.randint(0, 999999999)
copy = '/tmp/%s/xyz_2/copy.txt' % random.randint(0, 999999999)
class LocalTargetRelativeTest(LocalTargetTest):
# We had a bug that caused relative file paths to fail, adding test for it
path = 'test.txt'
copy = 'copy.txt'
class TmpFileTest(unittest.TestCase):
def test_tmp(self):
t = LocalTarget(is_tmp=True)
self.assertFalse(t.exists())
self.assertFalse(os.path.exists(t.path))
p = t.open('w')
print('test', file=p)
self.assertFalse(t.exists())
self.assertFalse(os.path.exists(t.path))
p.close()
self.assertTrue(t.exists())
self.assertTrue(os.path.exists(t.path))
q = t.open('r')
self.assertEqual(q.readline(), 'test\n')
q.close()
path = t.path
del t # should remove the underlying file
self.assertFalse(os.path.exists(path))
class FileSystemTest(unittest.TestCase):
path = '/tmp/luigi-test-dir'
fs = LocalFileSystem()
def setUp(self):
if os.path.exists(self.path):
shutil.rmtree(self.path)
def tearDown(self):
self.setUp()
def test_copy(self):
src = os.path.join(self.path, 'src.txt')
dest = os.path.join(self.path, 'newdir', 'dest.txt')
LocalTarget(src).open('w').close()
self.fs.copy(src, dest)
self.assertTrue(os.path.exists(src))
self.assertTrue(os.path.exists(dest))
def test_mkdir(self):
testpath = os.path.join(self.path, 'foo/bar')
self.assertRaises(MissingParentDirectory, self.fs.mkdir, testpath, parents=False)
self.fs.mkdir(testpath)
self.assertTrue(os.path.exists(testpath))
self.assertTrue(self.fs.isdir(testpath))
self.assertRaises(FileAlreadyExists, self.fs.mkdir, testpath, raise_if_exists=True)
def test_exists(self):
self.assertFalse(self.fs.exists(self.path))
os.mkdir(self.path)
self.assertTrue(self.fs.exists(self.path))
self.assertTrue(self.fs.isdir(self.path))
<|fim▁hole|> pass
self.assertTrue([self.path + '/file'], list(self.fs.listdir(self.path + '/')))
def test_move_to_new_dir(self):
# Regression test for a bug in LocalFileSystem.move
src = os.path.join(self.path, 'src.txt')
dest = os.path.join(self.path, 'newdir', 'dest.txt')
LocalTarget(src).open('w').close()
self.fs.move(src, dest)
self.assertTrue(os.path.exists(dest))<|fim▁end|> | def test_listdir(self):
os.mkdir(self.path)
with open(self.path + '/file', 'w'): |
<|file_name|>map.js<|end_file_name|><|fim▁begin|>(function(){
'use strict';
Ns.views.map.Layout = Marionette.LayoutView.extend({
id: 'map-container',
template: '#map-layout-template',
regions: {
content: '#map-js',
legend: '#legend-js',
panels: '#map-panels',
toolbar: '#map-toolbar',
add: '#map-add-node-js',
details: '#map-details-js'
},
/**
* show regions
*/
onShow: function () {
this.content.show(new Ns.views.map.Content({ parent: this }));
},
/**
* loads map data
*/
loadMap: function () {
var options = { parent: this };
this.toolbar.show(new Ns.views.map.Toolbar(options));
this.panels.show(new Ns.views.map.Panels(options));
this.legend.show(new Ns.views.map.Legend(options));
this.content.currentView.initMapData();
},
/*
* show add node view
*/
addNode: function () {
this.reset();
// if not authenticated
if (Ns.db.user.isAuthenticated() === false) {
// show sign-in modal
$('#signin-modal').modal('show');
// listen to loggedin event and come back here
this.listenToOnce(Ns.db.user, 'loggedin', this.addNode);
return;
}
this.add.show(new Ns.views.map.Add({ parent: this }));
},
showNode: function(node) {
this.details.show(new Ns.views.node.Detail({ model: node, parent: this }));
},
showEditNode: function(node) {
// ensure is allowed to edit
if (node.get('can_edit')) {
this.showNode(node);
this.details.currentView.edit();
}
// otherwise go back to details
else {
Ns.router.navigate('nodes/' + node.id, { trigger: true });
}
},
/*
* resets to view initial state
*/
reset: function () {
this.content.currentView.closeLeafletPopup();
this.add.empty();
this.details.empty();
}
}, { // static methods
show: function (method, args) {
var view;
if (typeof Ns.body.currentView === 'undefined' || !(Ns.body.currentView instanceof Ns.views.map.Layout)) {
view = new Ns.views.map.Layout();
Ns.body.show(view);
}
else {
view = Ns.body.currentView;
view.reset();
}
// call method on Layout view is specified
if (method) { view[method].apply(view, args); }
},
/*
* Resize page elements so that the leaflet map
* takes most of the available space in the window
*/
resizeMap: function () {
var overlayContainer = $('#map-overlay-container'),
height,
selector,
width,
setWidth = false,
body = $('body');
body.css('overflow-x', 'hidden');
// map
if (!overlayContainer.length) {
height = $(window).height() - $('body > header').height();
selector = '#map-container, #map-toolbar';
}
// node details
else {
height = overlayContainer.height() + parseInt(overlayContainer.css('top'), 10);
selector = '#map-container';
}
// set new height
$(selector).height(height);
width = $(window).width();
// take in consideration #map-add-node-js if visible
if ($('#map-add-node-js').is(':visible')) {
width = width - $('#map-add-node-js').outerWidth();
setWidth = true;
}
// take in consideration map toolbar if visible
else if ($('#map-toolbar').is(':visible')) {
width = width - $('#map-toolbar').outerWidth();
setWidth = true;
}
// set width only if map toolbar is showing
if (setWidth){
$('#map').width(width);
}
else{
$('#map').attr('style', '');
}
body.attr('style', '');
// TODO: this is ugly!
// call leaflet invalidateSize() to download any gray spot
if (Ns.body.currentView instanceof Ns.views.map.Layout &&
Ns.body.currentView.content &&
typeof Ns.body.currentView.content.currentView.map !== 'undefined'){
Ns.body.currentView.content.currentView.map.invalidateSize();
}
}
});
Ns.views.map.Content = Marionette.ItemView.extend({
template: false,
collectionEvents: {
// populate map as items are added to collection
'add': 'addGeoModelToMap',
// remove items from map when models are removed
'remove': 'removeGeoModelFromMap'
},
initialize: function (options) {
this.parent = options.parent;
this.collection = new Ns.collections.Geo();
this.popUpNodeTemplate = _.template($('#map-popup-node-template').html());
// link tweak
this.popUpLinkTemplate = _.template($('#map-popup-link-template').html());
// reload data when user logs in or out
this.listenTo(Ns.db.user, 'loggedin loggedout', this.reloadMapData);
// bind to namespaced events
$(window).on('resize.map', _.bind(this.resize, this));
$(window).on('beforeunload.map', _.bind(this.storeMapProperties, this));
// cleanup eventual alerts
$.cleanupAlerts();
},
onShow: function () {
this.initMap();
},
onDestroy: function () {
// store current coordinates when changing view
this.storeMapProperties();
// unbind the namespaced events
$(window).off('beforeunload.map');
$(window).off('resize.map');
},
/*
* get current map coordinates (lat, lng, zoom)
*/
getMapProperties: function () {
var latLng = this.map.getCenter();
return {
lat: latLng.lat,
lng: latLng.lng,
zoom: this.map.getZoom(),
baseLayer: this.getCurrentBaseLayer()
};
},
/*
* store current map coordinates in localStorage
*/
storeMapProperties: function () {
localStorage.setObject('map', this.getMapProperties());
},
/*
* get latest stored coordinates or default ones
*/
rememberMapProperties: function () {
return localStorage.getObject('map') || Ns.settings.map;
},
/*
* resize window event
*/
resize: function () {
Ns.views.map.Layout.resizeMap();
// when narrowing the window to medium-small size and toolbar is hidden and any panel is still visible
if ($(window).width() <= 767 && $('#map-toolbar').is(':hidden') && $('.side-panel:visible').length) {
// close panel
$('.mask').trigger('click');
}
},
/*
* initialize leaflet map
*/
initMap: function () {
var self = this,
memory = this.rememberMapProperties();
this.resize();
// init map
this.map = $.loadDjangoLeafletMap();
// remember last coordinates
this.map.setView([memory.lat, memory.lng], memory.zoom, {
trackResize: true
});
// store baseLayers
this.baseLayers = {};
_.each(this.map.layerscontrol._layers, function (baseLayer) {
self.baseLayers[baseLayer.name] = baseLayer.layer;
// keep name reference
self.baseLayers[baseLayer.name].name = baseLayer.name;
});
// remember preferred baseLayer
if (memory.baseLayer) {
this.switchBaseLayer(memory.baseLayer);
}
// create (empty) clusters on map (will be filled by addGeoModelToMap)
this.createClusters();
},
/**
* changes base layer of the map, only if necessary
* (calling the same action twice has no effect)
*/
switchBaseLayer: function(name){
// ignore if name is undefined
if(typeof name === 'undefined'){ return; }
// remove all base layers that are not relevant
for (var key in this.baseLayers){
if (this.baseLayers[key].name !== name) {
this.map.removeLayer(this.baseLayers[key]);
}
}
// if the relevant layer is still not there add it
if (!this.map.hasLayer(this.baseLayers[name])) {
this.map.addLayer(this.baseLayers[name]);
}
},
/**
* returns name of the current map base layer
*/
getCurrentBaseLayer: function () {
for (var name in this.baseLayers){
if (Boolean(this.baseLayers[name]._map)) {
return name;
}
}
return null;
},
/*
* loads data from API
*/
initMapData: function () {
Ns.changeTitle(gettext('Map'));
Ns.menu.currentView.activate('map');
Ns.track();
Ns.state.onNodeClose = 'map'; // when a node-details is closed go back on map
this.parent.toolbar.$el.addClass('enabled');
this.resize();
// load cached data if present
if (Ns.db.geo.isEmpty() === false) {
this.collection.add(Ns.db.geo.models);
this.collection.trigger('ready');
}
// otherwise fetch from server
else {
this.fetchMapData();
}
// toggle legend group from map when visible attribute changes
this.listenTo(Ns.db.legend, 'change:visible', this.toggleLegendGroup);
// toggle layer data when visible attribute changes
this.listenTo(Ns.db.layers, 'change:visible', this.toggleLayerData);
},
/*
* fetch map data, merging changes if necessary
*/
fetchMapData: function () {
var self = this,
// will contain fresh data
geo = new Ns.collections.Geo(),
// will be used to fetch data to merge in geo
tmp = geo.clone(),
additionalGeoJson = Ns.settings.additionalGeoJsonUrls,
ready, fetch;
// will be called when all sources have been fetched
// we need to add 1 to account for the main geojson
ready = _.after(additionalGeoJson.length + 1, function () {
// reload models
self.collection.remove(self.collection.models);
self.collection.add(geo.models);
// cache geo collection
Ns.db.geo = self.collection;
// trigger ready event
self.collection.trigger('ready');
// unbind event
self.collection.off('sync', ready);
});
// fetch data and add it to collection
fetch = function () {
tmp.fetch().done(function () {
geo.add(tmp.models);
geo.trigger('sync');
});
};
geo.on('sync', ready);
// fetch data from API
fetch();
additionalGeoJson.forEach(function (url) {
tmp._url = url;
fetch();
});
// begin temporary tweak for links
if (Ns.settings.links) {
var links = new Ns.collections.Geo();
links._url = Ns.url('links.geojson');
links.fetch().done(function () {
geo.add(links.models);
geo.trigger('sync');
});
}
// end tweak
},
/**
* reload map data in the background
*/
reloadMapData: function () {
$.toggleLoading('hide');
// disable loading indicator while data gets refreshed
Ns.state.autoToggleLoading = false;
// fetch data
this.fetchMapData();
// re-enable loading indicator once data is refreshed
this.collection.once('ready', function(){ Ns.state.autoToggleLoading = true });
},
/**
* prepare empty Leaflet.MarkerCluster objects
*/
createClusters: function () {
var self = this,
legend;
// loop over each legend item
Ns.db.legend.forEach(function (legendModel) {
legend = legendModel.toJSON();
// group markers in clusters
var cluster = new L.MarkerClusterGroup({
iconCreateFunction: function (cluster) {
var count = cluster.getChildCount(),
// determine size with the last number of the exponential notation
// 0 for < 10, 1 for < 100, 2 for < 1000 and so on
size = count.toExponential().split('+')[1];
return L.divIcon({
html: count,
className: 'cluster cluster-size-' + size + ' marker-' + this.cssClass
});
},
polygonOptions: {
fillColor: legend.fill_color,
stroke: legend.stroke_width > 0,
weight: legend.stroke_width,
color: legend.stroke_color,
opacity: 0.4
},
cssClass: legend.slug,
chunkedLoading: true,
showCoverageOnHover: true,
zoomToBoundsOnClick: true,
removeOutsideVisibleBounds: true,
disableClusteringAtZoom: Ns.settings.disableClusteringAtZoom,
maxClusterRadius: Ns.settings.maxClusterRadius
});
// store reference
legendModel.cluster = cluster;
// show cluster only if corresponding legend item is visible
if(legend.visible){
self.map.addLayer(cluster);
}
});
},
/**
* returns options for the initialization of tooltip for leaflet layers
*/
tooltipOptions: function(data) {
return {
container: '#map-js',
placement: 'auto top',
title: data.name,
delay: { show: 600, hide: 0 }
}
},
/**
* adds a geo model to its cluster
* binds popup
* called whenever a model is added to the collection
*/
addGeoModelToMap: function (model) {
var self = this,
leafletLayer = model.get('leaflet'),
legend = model.get('legend'),
data = model.toJSON(),
layer = Ns.db.layers.get(data.layer),
// link tweak
template = model._type === 'node' ? this.popUpNodeTemplate : this.popUpLinkTemplate;
// bind leaflet popup
leafletLayer.bindPopup(template(data));
// mouse over / out events
leafletLayer.on({
mouseover: function (e) {
var l = e.target,
type = l.feature.geometry.type;
// opacity to 1
l.setStyle({ fillOpacity: 1 });
// bring to front
if (!L.Browser.ie && !L.Browser.opera && type === 'Point') {
l.bringToFront({ fillOpacity: 1 });
}
},
mouseout: function (e) {
e.target.setStyle({ fillOpacity: Ns.settings.leafletOptions.fillOpacity });
},
// when popup opens, change the URL fragment
popupopen: function (e) {
var fragment = Backbone.history.fragment;
// do this only if in general map view
if (fragment.indexOf('map') >= 0 && fragment.indexOf('nodes') < 0) {
Ns.router.navigate('map/' + data.slug);
}
// destroy container to avoid the chance that the tooltip
// might appear while showing the leaflet popup
$(e.target._container).tooltip('destroy');
},
// when popup closes
popupclose: function (e) {
// (and no new popup opens)
// URL fragment goes back to initial state
var fragment = Backbone.history.fragment;
setTimeout(function () {
// do this only if in general map view
if (self.map._popup === null && fragment.indexOf('map') >= 0 && fragment.indexOf('nodes') < 0) {
Ns.router.navigate('map');
}
}, 100);
// rebind tooltip (it has been destroyed in popupopen event)
$(e.target._container).tooltip(self.tooltipOptions(data));
},
add: function(e){
// create tootlip when leaflet layer is added to the view
$(e.target._container).tooltip(self.tooltipOptions(data));
},
remove: function(e){
// ensure tooltip is removed when layer is removed from map
$(e.target._container).tooltip('destroy');
}
});
// show on map only if corresponding nodeshot layer is visible
if (layer && layer.get('visible')) {
legend.cluster.addLayer(leafletLayer);
// avoid covering points
if (leafletLayer._map && leafletLayer.feature.geometry.type !== 'Point') {
leafletLayer.bringToBack();
}
}
},
/**
* remove geo model from its cluster
* called whenever a model is removed from the collection
*/
removeGeoModelFromMap: function (model) {
var cluster = model.get('legend').cluster;
cluster.removeLayer(model.get('leaflet'));
},
/*
* show / hide from map items of a legend group
*/
toggleLegendGroup: function (legend, visible) {
var method = (visible) ? 'addLayer' : 'removeLayer';
this.map[method](legend.cluster);
},
/*
* show / hide from map items of a legend group
*/
toggleLayerData: function (layer, visible) {
var geo = this.collection,
method = (visible) ? 'addLayers' : 'removeLayers',
l;
Ns.db.legend.forEach(function(legend){
l = geo.whereCollection({ legend: legend, layer: layer.id }).pluck('leaflet');
legend.cluster[method](l);
});
// needed to recalculate stats on legend
this.trigger('layer-toggled');
},
/*
* Open leaflet popup of the specified element
*/
openLeafletPopup: function (id) {
var collection = this.collection,
self = this,
leafletLayer;
// open leaflet pop up if ready
if (collection.length && typeof collection !== 'undefined') {
try {
leafletLayer = this.collection.get(id).get('leaflet');
} catch (e) {
$.createModal({
message: id + ' ' + gettext('not found'),
onClose: function () {
Ns.router.navigate('map');
}
});
return;
}
try {
leafletLayer.openPopup();
}
// clustering plugin hides leafletLayers when clustered or outside viewport
// so we have to zoom in and center the map
catch (e){
this.map.fitBounds(leafletLayer.getBounds());
leafletLayer.openPopup();
}
}
// if not ready wait for map.collectionReady and call again
else {
this.collection.once('ready', function () {
self.openLeafletPopup(id);
});
}
return;
},
/*
* Close leaflet popup if open
*/
closeLeafletPopup: function () {
var popup = $('#map-js .leaflet-popup-close-button');
if (popup.length) {
popup.get(0).click();
}
},
/*
* Go to specified latitude and longitude
*/
goToLatLng: function (latlng, zoom) {
latlng = latlng.split(',')
latlng = L.latLng(latlng[0], latlng[1]);
var self = this,
marker = L.marker(latlng);
// used in search address feature
if (!zoom) {
marker.addTo(this.map);
zoom = 18;
}
// go to marker and zoom in
this.map.setView(latlng, zoom);
// fade out marker
if (typeof(marker) !== 'undefined' && this.map.hasLayer(marker)) {
$([marker._icon, marker._shadow]).fadeOut(4000, function () {
self.map.removeLayer(marker);
});
}
}
});
Ns.views.map.Legend = Marionette.ItemView.extend({
id: 'map-legend',
className: 'overlay inverse',
template: '#map-legend-template',
ui: {
'close': 'a.icon-close'
},
events: {
'click @ui.close': 'toggleLegend',
'click li a': 'toggleGroup'
},
collectionEvents: {
// automatically render when toggling group or recounting
'change:visible counted': 'render'
},
initialize: function (options) {
this.parent = options.parent;
this.collection = Ns.db.legend;
this.legendButton = this.parent.toolbar.currentView.ui.legendButton;
// display count in legend
this.listenTo(this.parent.content.currentView.collection, 'ready', this.count);
this.listenTo(this.parent.content.currentView, 'layer-toggled', this.count);
},
onRender: function () {
// default is true
if (localStorage.getObject('legendOpen') === false) {
this.$el.hide();
} else {
this.legendButton.addClass('disabled');
}
},
/*
* calculate counts
*/
count: function () {
this.collection.forEach(function (legend) {
legend.set('count', legend.cluster.getLayers().length);
});
// trigger once all legend items have been counted
this.collection.trigger('counted');
},
/*
* open or close legend
*/
toggleLegend: function (e) {
e.preventDefault();
var legend = this.$el,
button = this.legendButton,
open;
if (legend.is(':visible')) {
legend.fadeOut(255);
button.removeClass('disabled');
button.tooltip('enable');
open = false;
} else {
legend.fadeIn(255);
button.addClass('disabled');
button.tooltip('disable').tooltip('hide');
open = true;
}
localStorage.setItem('legendOpen', open);
},
/*
* enable or disable something on the map
* by clicking on its related legend control
*/
toggleGroup: function (e) {
e.preventDefault();
var status = $(e.currentTarget).attr('data-status'),
item = this.collection.get(status);
item.set('visible', !item.get('visible'));
}
});
Ns.views.map.Toolbar = Marionette.ItemView.extend({
template: '#map-toolbar-template',
ui: {
'buttons': 'a',
'switchMapMode': '#btn-map-mode',
'legendButton': '#btn-legend',
'toolsButton': 'a.icon-tools',
'prefButton': 'a.icon-config',
'layersControl': 'a.icon-layer-2'
},
events: {
'click .icon-pin-add': 'addNode',
'click @ui.buttons': 'togglePanel',
'click @ui.switchMapMode': 'switchMapMode',
// siblings events
'click @ui.legendButton': 'toggleLegend'
},
initialize: function (options) {
this.parent = options.parent;
},
onRender: function () {
var self = this;
// init tooltip
this.ui.buttons.tooltip();
// correction for map tools
this.ui.toolsButton.click(function (e) {
var button = $(this),
prefButton = self.ui.prefButton;
if (button.hasClass('active')) {
prefButton.tooltip('disable');
} else {
prefButton.tooltip('enable');
}
});
// correction for map-filter
this.ui.layersControl.click(function (e) {
var button = $(this),
otherButtons = self.$el.find('a.icon-config, a.icon-3d, a.icon-tools');
if (button.hasClass('active')) {
otherButtons.tooltip('disable');
} else {
otherButtons.tooltip('enable');
}
});
},
/*
* show / hide map toolbar on narrow screens
*/
toggleToolbar: function (e) {
e.preventDefault();
// shortcut
var toolbar = this.parent.toolbar.$el,
target = $(e.currentTarget);
// show toolbar
if (toolbar.is(':hidden')) {
// just add display:block
// which overrides css media-query
toolbar.show();
// overimpose on toolbar
target.css('right', '-60px');
}
// hide toolbar
else {
// instead of using jQuery.hide() which would hide the toolbar also
// if the user enlarged the screen, we clear the style attribute
// which will cause the toolbar to be hidden only on narrow screens
toolbar.attr('style', '');
// close any open panel
if ($('.side-panel:visible').length) {
$('.mask').trigger('click');
}
// eliminate negative margin correction
target.css('right', '0');
}
Ns.views.map.Layout.resizeMap();
},
/*
* proxy to call add node
*/
addNode: function (e) {
e.preventDefault();
this.parent.addNode();
},
/*
* redirects to Ns.views.map.Panels
*/
toggleLegend: function (e) {
this.parent.legend.currentView.toggleLegend(e);
},
/*
* redirects to Ns.views.map.Panels
*/
togglePanel: function (e) {
this.parent.panels.currentView.togglePanel(e);
},
/*
* toggle 3D or 2D map
*/
switchMapMode: function (e) {
e.preventDefault();
$.createModal({message: gettext('not implemented yet')});
}
});
Ns.views.map.Panels = Marionette.ItemView.extend({
template: '#map-panels-template',
ui: {
'switches': 'input.switch',
'scrollers': '.scroller',
'selects': '.selectpicker',
'tools': '.tool',
'distance': '#fn-map-tools .icon-ruler',
'area': '#fn-map-tools .icon-select-area',
'elevation': '#fn-map-tools .icon-elevation-profile'
},
events: {
'click #fn-map-tools .notImplemented': 'toggleToolNotImplemented',
'click @ui.distance': 'toggleDistance',
'click @ui.area': 'toggleArea',
'click @ui.elevation': 'toggleElevation',
'click #toggle-toolbar': 'toggleToolbar',
'change .js-base-layers input': 'switchBaseLayer',
'switch-change #fn-map-layers .toggle-layer-data': 'toggleLayer',
'switch-change #fn-map-layers .toggle-legend-data': 'toggleLegend'
},
initialize: function (options) {
this.parent = options.parent;
this.mapView = this.parent.content.currentView;
this.toolbarView = this.parent.toolbar.currentView;
this.toolbarButtons = this.toolbarView.ui.buttons;
// listen to legend change event
this.listenTo(Ns.db.legend, 'change:visible', this.syncLegendSwitch);
this.populateBaseLayers();
// init tools
if (Ns.settings.mapTools) {
this.tools = {
'distance': new L.Polyline.Measure(this.mapView.map),
'area': new L.Polygon.Measure(this.mapView.map),
'elevation': new L.Polyline.Elevation(this.mapView.map)
};
}
},
// populate this.baseLayers
populateBaseLayers: function () {
var self = this,
layer;
this.baseLayers = [];
// get ordering of baselayers django-leaflet options
this.mapView.map.options.djoptions.layers.forEach(function (layerConfig) {
layer = self.mapView.baseLayers[layerConfig[0]];
self.baseLayers.push({
checked: Boolean(layer._map), // if _map is not null it means this is the active layer
name: layer.name
});
});
},
serializeData: function(){
return {
'layers': Ns.db.layers.toJSON(),
'legend': Ns.db.legend.toJSON(),
'baseLayers': this.baseLayers
}
},
onRender: function () {
this.ui.tools.tooltip();
// activate switch
this.ui.switches.bootstrapSwitch().bootstrapSwitch('setSizeClass', 'switch-small');
// activate scroller
this.ui.scrollers.scroller({
trackMargin: 6
});
// fancy selects
this.ui.selects.selectpicker({
style: 'btn-special'
});
},
/*
* show / hide toolbar panels
*/
togglePanel: function (e) {
e.preventDefault();
var button = $(e.currentTarget),
panelId = button.attr('data-panel'),
panel = $('#' + panelId),
self = this,
// determine distance from top
distanceFromTop = button.offset().top - $('body > header').eq(0).outerHeight(),
preferencesHeight;
// if no panel return here
if (!panel.length) {
return;
}
// hide any open tooltip
$('#map-toolbar .tooltip').hide();
panel.css('top', distanceFromTop);
// adjust height of panel if marked as 'adjust-height'
if (panel.hasClass('adjust-height')) {
preferencesHeight = $('#map-toolbar').height() - distanceFromTop - 18;
panel.height(preferencesHeight);
}
panel.fadeIn(25, function () {
panel.find('.scroller').scroller('reset');
button.addClass('active');
button.tooltip('hide').tooltip('disable');
// create a mask for easy closing
$.mask(panel, function (e) {
// close function
if (panel.is(':visible')) {
panel.hide();
self.toolbarButtons.removeClass('active');
button.tooltip('enable');
// if clicking again on the same button avoid reopening the panel
if ($(e.target).attr('data-panel') === panelId) {
e.stopPropagation();
e.preventDefault();
}
}
});
});
},
toggleToolNotImplemented: function (e) {
e.preventDefault();
$.createModal({ message: gettext('not implemented yet') });
return false;
},
/*
* toggle map tool
*/
toggleToolButton: function (e) {
var button = $(e.currentTarget),
active_buttons = $('#fn-map-tools .tool.active');
// if activating a tool
if (!button.hasClass('active')) {
// deactivate any other
active_buttons.trigger('click');
button.addClass('active')
.tooltip('hide')
.tooltip('disable');
return true;
// deactivate
} else {
button.removeClass('active')
.tooltip('enable')
.trigger('blur');
return false;
}
},
toggleDrawTool: function (toolName, e) {
var result = this.toggleToolButton(e),
tool = this.tools[toolName];
if (result) {
tool.enable();
// if tool is disabled with ESC or other ways
// sync the nodeshot UI
tool.once('disabled', function () {
this.toggleToolButton(e);
}, this);
}
else {
tool.off('disabled');
tool.disable();
}
},
toggleDistance: function (e) {
this.toggleDrawTool('distance', e);
},
toggleArea: function (e) {
this.toggleDrawTool('area', e);
},
toggleElevation: function (e) {
this.toggleDrawTool('elevation', e);
},
drawElevation: function (geojson) {
// local vars
var points = [],
self = this;
// the elevation API expects latitude, longitude, so we have to reverse our coords
geojson.geometry.coordinates.forEach(function(point){
points.push(point.reverse());
});
// query the elevation API
$.getJSON(Ns.url('elevation/'), {
// output is '<lat>,<lng>|<lat>,<lng>|<lat>,<lng'
path: points.join('|')
}).done(function(geojson){
// close tools panel
$('.mask').trigger('click');
// create control
var el = L.control.elevation({
position: 'bottomright',
width: 1020,
height: 299,
margins: {
top: 25,
right: 40,
bottom: 40,
left: 70
},
});
el.addTo(self.mapView.map);
var geojsonLayer = L.geoJson(geojson, {
onEachFeature: el.addData.bind(el),
style: function () {
return {
color: '#e6a1b3',
opacity: 0.7
}
}
}).addTo(self.mapView.map);
var close = $('<a href="#" class="icon-close"></a>');
$('#map-js .elevation.leaflet-control').append('<a href="#" class="icon-close"></a>');
$('#map-js .elevation.leaflet-control .icon-close').one('click', function (e) {
e.preventDefault();
self.mapView.map.removeControl(el);
self.mapView.map.removeLayer(geojsonLayer);
})
});
},
/*
* proxy to Ns.views.map.Toolbar.toggleToolbar
*/
toggleToolbar: function (e) {
this.toolbarView.toggleToolbar(e);
},
/**
* changes base layer of the map
* proxy to Ns.views.map.Content.switchBaseLayer
*/
switchBaseLayer: function (event) {
this.mapView.switchBaseLayer($(event.target).attr('data-name'));
},
/**
* hide / show layer data on map
*/
toggleLayer: function (event, data) {
var layer = Ns.db.layers.get(data.el.attr('data-slug'));
layer.set('visible', data.value);
},
/**
* hide / show legend data on map
*/
toggleLegend: function(event, data){
this.parent.legend.currentView.$('a[data-status=' + data.el.attr('data-slug') + ']').trigger('click');
},
/**
* sync legend state with switches in panel
*/
syncLegendSwitch: function(legend, state){
var input = this.$('#map-control-legend-' + legend.get('slug'));
if(input.bootstrapSwitch('state') !== state){
// second parameter indicates wheter to skip triggering switch event
input.bootstrapSwitch('toggleState', true);
}
}
});
Ns.views.map.Add = Marionette.ItemView.extend({
template: '#map-add-node-template',
tagName: 'article',
ui: {
'formContainer': '#add-node-form-container'
},
events: {
'click #add-node-form-container .btn-default': 'destroy',
'submit #add-node-form-container form': 'submitAddNode'
},
initialize: function (options) {
this.parent = options.parent;
// references to objects of other views
this.ext = {
legend: this.parent.legend.$el,
toolbar: this.parent.toolbar.$el,
map: this.parent.content.$el,
leafletMap: this.parent.content.currentView.map,
geo: this.parent.content.currentView.collection,
step1: $('#add-node-step1'),
step2: $('#add-node-step2')
};
// elements that must be hidden
this.hidden = $().add(this.ext.legend)<|fim▁hole|> .add(this.ext.toolbar)
.add(this.ext.map.find('.leaflet-control-attribution'));
// needed for toggleLeafletLayers
this.dimmed = false;
},
serializeData: function(){
return { 'layers': Ns.db.layers.toJSON() };
},
onShow: function () {
Ns.router.navigate('map/add');
Ns.changeTitle(gettext('Add node'));
Ns.track();
// go to step1 when collection is ready
if (this.ext.geo.length){
this.step1();
}
else {
this.listenToOnce(this.ext.geo, 'ready', this.step1);
}
// dynamic form
this.form = new Backbone.Form({
model: new Ns.models.Node(),
submitButton: gettext('Add node')
}).render();
this.ui.formContainer.html(this.form.$el);
this.$('input[type=checkbox]').bootstrapSwitch().bootstrapSwitch('setSizeClass', 'switch-small');
this.$('select').selectpicker({style: 'btn-special' });
},
/*
* when the view is destroyed the map is taken backto its original state
*/
onBeforeDestroy: function () {
this.closeAddNode();
// change url fragment but only if we are still on the map
if (Backbone.history.fragment.substr(0, 3) == 'map'){
Ns.router.navigate('map');
}
},
/*
* proxy to Ns.views.map.Layout.resizeMap
*/
resizeMap: function() {
Ns.views.map.Layout.resizeMap();
},
/*
* hide elements that are not needed when adding a new node
* show them back when finished
*/
toggleHidden: function(){
this.hidden.toggle();
this.resizeMap();
this.toggleLeafletLayers();
},
/*
* dim out leaflet layers from map when adding a new node
* reset default options when finished
* clusters are toggled (hidden and shown back) through an additional style tag in <head>
* because clusters are re-rendered whenever the map is moved or resized so inline changes
* do not persist when resizing or moving
*/
toggleLeafletLayers: function () {
var leafletOptions = Ns.settings.leafletOptions,
tmpOpacity = leafletOptions.temporaryOpacity,
clusterCss = $('#add-node-cluster-css'),
dimOut = !this.dimmed,
leaflet;
// dim out or reset all leaflet layers
this.ext.geo.forEach(function(model){
leaflet = model.get('leaflet');
if (dimOut) {
leaflet.options.opacity = tmpOpacity;
leaflet.options.fillOpacity = tmpOpacity;
leaflet.setStyle(leaflet.options);
}
else {
leaflet.options.opacity = leafletOptions.opacity;
leaflet.options.fillOpacity = leafletOptions.fillOpacity;
leaflet.setStyle(leaflet.options);
}
});
if (clusterCss.length === 0) {
$('head').append('<style id="add-node-cluster-css">.cluster{ display: none }</style>');
}
else{
clusterCss.remove();
}
// change dimmed state
this.dimmed = dimOut;
},
/*
* step1 of adding a new node
*/
step1: function (e) {
var self = this,
dialog = this.ext.step1,
dialog_dimensions = dialog.getHiddenDimensions();
// hide toolbar and enlarge map
this.toggleHidden();
// show step1
dialog.css({
width: dialog_dimensions.width+2,
right: 0
});
dialog.fadeIn(255);
// cancel
this.ext.step1.find('button').one('click', function () { self.destroy() });
// on map click (only once)
this.ext.leafletMap.once('click', function (e) {
dialog.fadeOut(255);
self.step2(e);
});
},
step2: function (e) {
var self = this,
dialog = this.ext.step2,
dialog_dimensions = dialog.getHiddenDimensions(),
map = this.ext.leafletMap,
callback,
latlng,
// draggable marker
marker = L.marker([e.latlng.lat, e.latlng.lng], {draggable: true}).addTo(map);
// keep a global reference
this.newNodeMarker = marker;
// set address on form
this.setAddressFromLatLng(e.latlng);
this.form.setValue('geometry', JSON.stringify(marker.toGeoJSON()));
this.setGeometryFromMarker(marker);
// update address when moving the marker
marker.on('dragend', function (event) {
latlng = event.target.getLatLng();
self.setAddressFromLatLng(latlng);
self.setGeometryFromMarker(event.target);
map.panTo(latlng);
});
// zoom in to marker
map.setView(marker.getLatLng(), 18, { animate: true });
// show step2
dialog = self.ext.step2,
dialog_dimensions = dialog.getHiddenDimensions();
dialog.css({
width: dialog_dimensions.width+2,
right: 0
});
dialog.fadeIn(255);
// bind cancel button once
this.ext.step2.find('.btn-default').one('click', function () { self.destroy() });
// bind confirm button once
this.ext.step2.find('.btn-success').one('click', function () {
callback = function () {
self.resizeMap();
map.panTo(marker._latlng);
};
dialog.fadeOut(255);
// show form with a nice animation
self.parent.add.$el.show().animate({ width: '+70%'}, {
duration: 400,
progress: callback,
complete: callback
});
});
},
/*
* submit new node
*/
submitAddNode: function (e) {
e.preventDefault();
var self = this,
form = this.form,
geojson = JSON.stringify(this.newNodeMarker.toGeoJSON().geometry),
errorList = this.$('.error-list'),
node = form.model,
errors = form.commit(),
geo;
if (errors) {
return false;
}
this.$('.help-block').text('').hide();
this.$('.error').removeClass('error');
this.$('.has-error').removeClass('has-error');
errorList.html('').hide();
node.save().done(function () {
// convert to Geo model
node = new Ns.models.Geo(node.toJSON());
// add to geo collection
self.ext.geo.add(node);
// destroy this view
self.destroy();
// open new node popup
node.get('leaflet').openPopup();
}).error(function (http) {
// TODO: make this reusable
var json = http.responseJSON,
key, input, errorContainer;
for (key in json) {
input = self.$('input[name=' + key + ']');
if (input.length) {
input.addClass('error');
errorContainer = input.parents('.form-group').find('.help-block');
errorContainer.text(json[key])
.removeClass('hidden')
.addClass('has-error')
.fadeIn(255);
} else {
errorList.show();
errorList.append('<li>' + json[key] + '</li>');
}
}
});
},
/*
* cancel addNode operation
* resets normal map functions
*/
closeAddNode: function () {
var marker = this.newNodeMarker,
container = this.parent.add.$el,
map = this.ext.leafletMap,
self = this,
resetToOriginalState = function () {
container.hide();
// show hidden elements again
self.toggleHidden();
};
// unbind click events
map.off('click');
this.ext.step1.find('button').off('click');
this.ext.step2.find('.btn-default').off('click');
this.ext.step2.find('.btn-success').off('click');
// remove marker if necessary
if (marker) {
map.removeLayer(marker);
}
// hide step1 if necessary
if (this.ext.step1.is(':visible')) {
this.ext.step1.fadeOut(255);
}
// hide step2 if necessary
if (this.ext.step2.is(':visible')) {
this.ext.step2.fadeOut(255);
}
// if container is visible
if (container.is(':visible')) {
// hide it with a nice animation
container.animate({ width: '0' }, {
duration: 400,
progress: function () {
self.resizeMap();
if (marker) { map.panTo(marker._latlng); }
},
complete: resetToOriginalState
});
}
// reset original state
else{
resetToOriginalState();
}
},
/*
* retrieve address from latlng through OSM Nominatim service
* and set it on the add node form
*/
setAddressFromLatLng: function (latlng) {
var self = this;
$.geocode({
lat: latlng.lat,
lon: latlng.lng,
callback: function(result){
self.form.setValue('address', result.display_name);
}
});
},
/**
* set geometry on model from marker geojson
*/
setGeometryFromMarker: function (marker) {
this.form.setValue('geometry', JSON.stringify(marker.toGeoJSON().geometry));
}
});
})();<|fim▁end|> | |
<|file_name|>read-modules.js<|end_file_name|><|fim▁begin|>/* global requirejs, require */
/*jslint node: true */
'use strict';
import Ember from 'ember';
import _keys from 'lodash/object/keys';
/*
This function looks through all files that have been loaded by Ember CLI and
finds the ones under /mirage/[factories, fixtures, scenarios, models]/, and exports
a hash containing the names of the files as keys and the data as values.
*/
export default function(prefix) {
let modules = ['factories', 'fixtures', 'scenarios', 'models', 'serializers'];
let mirageModuleRegExp = new RegExp(`^${prefix}/mirage/(${modules.join("|")})`);
let modulesMap = modules.reduce((memo, name) => {
memo[name] = {};
return memo;
}, {});
_keys(requirejs.entries).filter(function(key) {
return mirageModuleRegExp.test(key);
}).forEach(function(moduleName) {
if (moduleName.match('.jshint')) { // ignore autogenerated .jshint files
return;
}
let moduleParts = moduleName.split('/');
let moduleType = moduleParts[moduleParts.length - 2];
let moduleKey = moduleParts[moduleParts.length - 1];
Ember.assert('Subdirectories under ' + moduleType + ' are not supported',
moduleParts[moduleParts.length - 3] === 'mirage');<|fim▁hole|> if (moduleType === 'scenario'){
Ember.assert('Only scenario/default.js is supported at this time.',
moduleKey !== 'default');
}
let module = require(moduleName, null, null, true);
if (!module) { throw new Error(moduleName + ' must export a ' + moduleType); }
let data = module['default'];
modulesMap[moduleType][moduleKey] = data;
});
return modulesMap;
}<|fim▁end|> | |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#coding=utf-8
# Copyright 2007 Google Inc.
#<|fim▁hole|># you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import webapp2
import time
import dbcontroller as dc
import speak
import User
import logging
class MainHandler(webapp2.RequestHandler):
def get(self):
list = dc.refresh()
lines = speak.speak(list)
import twitter
for user in User.users:
for i in lines:
str1 = i
logging.log(logging.INFO, u"twitter length is " + \
str(len(str1)))
try:
twitter.sendMessage(str1)
except:
logging.log(logging.WARNING, u"twitter send fail:" + str1)
return self.response.out.write('ok')
app = webapp2.WSGIApplication([
('/whyisme', MainHandler)
], debug=True)<|fim▁end|> | # Licensed under the Apache License, Version 2.0 (the "License"); |
<|file_name|>errors.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2015-2016 Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from discord.errors import DiscordException
__all__ = [ 'CommandError', 'MissingRequiredArgument', 'BadArgument',
'NoPrivateMessage', 'CheckFailure', 'CommandNotFound',
'DisabledCommand', 'CommandInvokeError', 'TooManyArguments',
'UserInputError', 'CommandOnCooldown' ]
class CommandError(DiscordException):
"""The base exception type for all command related errors.
This inherits from :exc:`discord.DiscordException`.
This exception and exceptions derived from it are handled
in a special way as they are caught and passed into a special event
from :class:`Bot`\, :func:`on_command_error`.
"""
def __init__(self, message=None, *args):<|fim▁hole|> else:
super().__init__(*args)
class UserInputError(CommandError):
"""The base exception type for errors that involve errors
regarding user input.
This inherits from :exc:`CommandError`.
"""
pass
class CommandNotFound(CommandError):
"""Exception raised when a command is attempted to be invoked
but no command under that name is found.
This is not raised for invalid subcommands, rather just the
initial main command that is attempted to be invoked.
"""
pass
class MissingRequiredArgument(UserInputError):
"""Exception raised when parsing a command and a parameter
that is required is not encountered.
"""
pass
class TooManyArguments(UserInputError):
"""Exception raised when the command was passed too many arguments and its
:attr:`Command.ignore_extra` attribute was not set to ``True``.
"""
pass
class BadArgument(UserInputError):
"""Exception raised when a parsing or conversion failure is encountered
on an argument to pass into a command.
"""
pass
class NoPrivateMessage(CommandError):
"""Exception raised when an operation does not work in private message
contexts.
"""
pass
class CheckFailure(CommandError):
"""Exception raised when the predicates in :attr:`Command.checks` have failed."""
pass
class DisabledCommand(CommandError):
"""Exception raised when the command being invoked is disabled."""
pass
class CommandInvokeError(CommandError):
"""Exception raised when the command being invoked raised an exception.
Attributes
-----------
original
The original exception that was raised. You can also get this via
the ``__cause__`` attribute.
"""
def __init__(self, e):
self.original = e
super().__init__('Command raised an exception: {0.__class__.__name__}: {0}'.format(e))
class CommandOnCooldown(CommandError):
"""Exception raised when the command being invoked is on cooldown.
Attributes
-----------
cooldown: Cooldown
A class with attributes ``rate``, ``per``, and ``type`` similar to
the :func:`cooldown` decorator.
retry_after: float
The amount of seconds to wait before you can retry again.
"""
def __init__(self, cooldown, retry_after):
self.cooldown = cooldown
self.retry_after = retry_after
super().__init__('You are on cooldown. Try again in {:.2f}s'.format(retry_after))<|fim▁end|> | if message is not None:
# clean-up @everyone and @here mentions
m = message.replace('@everyone', '@\u200beveryone').replace('@here', '@\u200bhere')
super().__init__(m, *args) |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React from 'react'
const EditableText = React.createClass({
propTypes: {<|fim▁hole|> onSubmit: React.PropTypes.func.isRequired,
validator: React.PropTypes.func,
enableEditing: React.PropTypes.bool,
value: React.PropTypes.oneOfType([
React.PropTypes.string,
React.PropTypes.number,
])
},
getInitialState: function() {
return {
editing: false,
invalid: false,
newValue: this.props.value,
};
},
getDefaultProps: function() {
return {
enableEditing: true
};
},
edit() {
this.setState({editing: true});
},
handleChange(e) {
this.setState({newValue: e.target.value});
},
cancelEdit() {
this.setState({
editing: false,
invalid: false,
});
},
submit(e) {
e.preventDefault();
if (!this.props.validator || this.props.validator(this.state.newValue)) {
this.props.onSubmit(this.state.newValue);
this.cancelEdit();
} else {
this.setState({invalid: true});
}
},
renderInputForm() {
const inputForm = (
<form onSubmit={this.submit}
className={this.state.invalid ? 'has-error' : ''} >
<input type="text" autoFocus
onChange={this.handleChange}
onBlur={this.cancelEdit}
value={this.state.newValue}
className="form-control inline-editable" />
</form>
);
return inputForm;
},
render: function() {
if (this.props.enableEditing) {
return (
<div className="inline-edit">
{this.state.editing
? this.renderInputForm()
: <a onClick={this.edit} title="Edit" className={'inline-editable'}>{this.props.value || 'Add'}</a>
}
</div>
);
} else {
return (<span>{this.props.value}</span>);
}
}
});
export default EditableText<|fim▁end|> | |
<|file_name|>top_level_interaction.js<|end_file_name|><|fim▁begin|>(function() {
function setUpTopLevelInteraction() {<|fim▁hole|> TopLevelInteraction.execute();
}
document.addEventListener("DOMContentLoaded", setUpTopLevelInteraction);
})();<|fim▁end|> | var TopLevelInteraction = new ITPHelper({
redirectUrl: document.body.dataset.redirectUrl,
});
|
<|file_name|>testSmS.py<|end_file_name|><|fim▁begin|>from twilio.rest import TwilioRestClient
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "AC433e7b0bec93dc5996e4fb80b1e56eec"
auth_token = "9cc9267fe09dab362d3be160f711a09d"
client = TwilioRestClient(account_sid, auth_token)
message = client.sms.messages.create(body="Jenny please?! I love you <3",
to="+14082186575", # Replace with your phone number
from_="++1415-795-2944") # Replace with your Twilio number<|fim▁hole|><|fim▁end|> | print message.sid |
<|file_name|>insert-data.ts<|end_file_name|><|fim▁begin|>export class Coordinates {<|fim▁hole|> public x: number[]
public y: number[]
constructor() {
this.x = [0]
this.y = [0]
}
}
export class Parameterisation {
public id: number
public width: number
public length: number
insert: Coordinates
circle: Coordinates
ellipse: Coordinates
constructor() {
this.insert = new Coordinates()
this.circle = new Coordinates()
this.ellipse = new Coordinates()
}
hash(input: string) {
let hash = 0, i: number, chr: number, len: number
if (input.length === 0) return hash
for (i = 0, len = input.length; i < len; i++) {
chr = input.charCodeAt(i)
hash = ((hash << 5) - hash) + chr
hash |= 0 // Convert to 32bit integer
}
return hash
}
insertUpdated() {
if (this.insert != null) {
this.id = this.hash(
'{"x":' + JSON.stringify(this.insert.x) + ',' +
'"y":' + JSON.stringify(this.insert.y) +
'}')
}
else {
throw new RangeError('Insert was not defined. Cannot run insert updated.')
}
}
reset() {
this.id = null
for (let key of ['insert', 'circle', 'ellipse']) {
this[key] = new Coordinates()
this[key].x = [0]
this[key].y = [0]
}
this.width = null
this.length = null
}
}
export class InsertData {
public id: number
public machine: string
public energy: number
public applicator: string
public ssd: number
public measuredFactor: number
public parameterisation: Parameterisation
constructor(inputId?: number) {
this.parameterisation = new Parameterisation()
if (inputId != null) {
this.id = inputId
}
else {
this.id = 0
}
}
reset() {
this.machine = null
this.parameterisation.reset()
this.energy = null
this.applicator = null
this.ssd = null
this.measuredFactor = null
}
fillFromObject(object: {}) {
if (object == null) {
this.reset()
}
else {
for (let key of ['machine', 'energy', 'applicator', 'ssd', 'measuredFactor']) {
this[key] = object[key]
}
this.parameterisation.insert = object['parameterisation'].insert
this.parameterisation.insertUpdated()
this.parameterisation.circle = object['parameterisation'].circle
this.parameterisation.ellipse = object['parameterisation'].ellipse
this.parameterisation.width = object['parameterisation'].width
this.parameterisation.length = object['parameterisation'].length
}
}
}<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from Instanssi.common.auth import user_access_required
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.contrib import auth
from django.urls import reverse
from Instanssi.users.forms import OpenIDLoginForm, DjangoLoginForm, ProfileForm
from Instanssi.common.misc import get_url_local_path
AUTH_METHODS = [
# Short name, social-auth, friendly name
('facebook', 'facebook', 'Facebook'),
('google', 'google-oauth2', 'Google'),
('twitter', 'twitter', 'Twitter'),
('github', 'github', 'Github'),
('battlenet', 'battlenet-oauth2', 'Battle.net'),
('steam', 'steam', 'Steam'),
]
def login(request):
if request.user.is_authenticated:
return HttpResponseRedirect(reverse('users:profile'))
# Get referer for redirect
# Make sure that the referrer is a local path.
if 'next' in request.GET:
next_page = get_url_local_path(request.GET['next'])
else:
next_page = get_url_local_path(request.META.get('HTTP_REFERER', reverse('users:profile')))
# Test django login form
if request.method == "POST":
djangoform = DjangoLoginForm(request.POST)
if djangoform.is_valid():
djangoform.login(request)
return HttpResponseRedirect(djangoform.cleaned_data['next'])
else:
djangoform = DjangoLoginForm(next=next_page)
# Openid login form
# The form will be handled elsewhere; this is only for rendering the form.
openidform = OpenIDLoginForm(next=next_page)
# Render response
return render(request, "users/login.html", {
'djangoform': djangoform,
'openidform': openidform,
'next': next_page,
'AUTH_METHODS': AUTH_METHODS
})
def loggedout(request):
return render(request, "users/loggedout.html")
@user_access_required
def profile(request):
from social_django.models import DjangoStorage
if request.method == "POST":
profileform = ProfileForm(request.POST, instance=request.user, user=request.user)
if profileform.is_valid():
profileform.save()
return HttpResponseRedirect(reverse('users:profile'))
else:
profileform = ProfileForm(instance=request.user, user=request.user)
# Get all active providers for this user
active_providers = []
for social_auth in DjangoStorage.user.get_social_auth_for_user(request.user):
active_providers.append(social_auth.provider)
# Providers list<|fim▁hole|> return render(request, "users/profile.html", {
'profileform': profileform,
'active_providers': active_providers,
'AUTH_METHODS': methods
})
def logout(request):
auth.logout(request)
return HttpResponseRedirect(reverse('users:loggedout'))<|fim▁end|> | methods = []
for method in AUTH_METHODS:
methods.append(method + (method[1] in active_providers, ))
|
<|file_name|>DecisionCompiler.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.dmn.core.compiler;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import org.kie.dmn.api.core.DMNType;
import org.kie.dmn.api.core.ast.BusinessKnowledgeModelNode;
import org.kie.dmn.api.core.ast.DMNNode;
import org.kie.dmn.api.core.ast.DecisionNode;
import org.kie.dmn.api.core.ast.DecisionServiceNode;
import org.kie.dmn.api.core.ast.InputDataNode;
import org.kie.dmn.core.api.DMNExpressionEvaluator;
import org.kie.dmn.core.ast.DecisionNodeImpl;
import org.kie.dmn.core.impl.CompositeTypeImpl;
import org.kie.dmn.core.impl.DMNModelImpl;
import org.kie.dmn.core.util.Msg;
import org.kie.dmn.model.api.DRGElement;
import org.kie.dmn.model.api.Decision;
public class DecisionCompiler implements DRGElementCompiler {
@Override
public boolean accept(DRGElement de) {
return de instanceof Decision;
}
@Override
public void compileNode(DRGElement de, DMNCompilerImpl compiler, DMNModelImpl model) {
Decision decision = (Decision) de;
DecisionNodeImpl dn = new DecisionNodeImpl( decision );
DMNType type = null;
if ( decision.getVariable() == null ) {
DMNCompilerHelper.reportMissingVariable( model, de, decision, Msg.MISSING_VARIABLE_FOR_DECISION );
return;
}
DMNCompilerHelper.checkVariableName( model, decision, decision.getName() );
if ( decision.getVariable() != null && decision.getVariable().getTypeRef() != null ) {
type = compiler.resolveTypeRef(model, decision, decision.getVariable(), decision.getVariable().getTypeRef());
} else {
type = compiler.resolveTypeRef(model, decision, decision, null);
}
dn.setResultType( type );
model.addDecision( dn );
}
@Override
public boolean accept(DMNNode node) {
return node instanceof DecisionNodeImpl;
}
@Override
public void compileEvaluator(DMNNode node, DMNCompilerImpl compiler, DMNCompilerContext ctx, DMNModelImpl model) {
DecisionNodeImpl di = (DecisionNodeImpl) node;
compiler.linkRequirements( model, di );
ctx.enterFrame();
try {
Map<String, DMNType> importedTypes = new HashMap<>();
for( DMNNode dep : di.getDependencies().values() ) {
if( dep instanceof DecisionNode ) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
ctx.setVariable(dep.getName(), ((DecisionNode) dep).getResultType());
} else {
// then the Decision dependency is an imported Decision.<|fim▁hole|> CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((DecisionNode) dep).getResultType());
}
}
} else if( dep instanceof InputDataNode ) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
ctx.setVariable(dep.getName(), ((InputDataNode) dep).getType());
} else {
// then the InputData dependency is an imported InputData.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((InputDataNode) dep).getType());
}
}
} else if( dep instanceof BusinessKnowledgeModelNode ) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
// might need to create a DMNType for "functions" and replace the type here by that
ctx.setVariable(dep.getName(), ((BusinessKnowledgeModelNode) dep).getResultType());
} else {
// then the BKM dependency is an imported BKM.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((BusinessKnowledgeModelNode) dep).getResultType());
}
}
} else if (dep instanceof DecisionServiceNode) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
// might need to create a DMNType for "functions" and replace the type here by that
ctx.setVariable(dep.getName(), ((DecisionServiceNode) dep).getResultType());
} else {
// then the BKM dependency is an imported BKM.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((DecisionServiceNode) dep).getResultType());
}
}
}
}
for (Entry<String, DMNType> importedType : importedTypes.entrySet()) {
ctx.setVariable(importedType.getKey(), importedType.getValue());
}
DMNExpressionEvaluator evaluator = compiler.getEvaluatorCompiler().compileExpression( ctx, model, di, di.getName(), di.getDecision().getExpression() );
di.setEvaluator( evaluator );
} finally {
ctx.exitFrame();
}
}
}<|fim▁end|> | Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) { |
<|file_name|>config.js<|end_file_name|><|fim▁begin|>/*************************************************************
*
* MathJax/jax/output/HTML-CSS/config.js
*
* Initializes the HTML-CCS OutputJax (the main definition is in
* MathJax/jax/input/HTML-CSS/jax.js, which is loaded when needed).
*
* ---------------------------------------------------------------------
*
* Copyright (c) 2009-2011 Design Science, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
MathJax.OutputJax["HTML-CSS"] = MathJax.OutputJax({
id: "HTML-CSS",
version: "1.1.5",
directory: MathJax.OutputJax.directory + "/HTML-CSS",
extensionDir: MathJax.OutputJax.extensionDir + "/HTML-CSS",
autoloadDir: MathJax.OutputJax.directory + "/HTML-CSS/autoload",
fontDir: MathJax.OutputJax.directory + "/HTML-CSS/fonts", // font name added later
webfontDir: MathJax.OutputJax.fontDir + "/HTML-CSS", // font name added later
config: {
scale: 100, minScaleAdjust: 50,
availableFonts: ["STIX","TeX"],
preferredFont: "TeX",
webFont: "TeX",
imageFont: "TeX",
undefinedFamily: "STIXGeneral,'Arial Unicode MS',serif",
showMathMenu: true,
styles: {
".MathJax_Display": {
"text-align": "center",
margin: "1em 0em"
},
".MathJax .merror": {
"background-color": "#FFFF88",
color: "#CC0000",
border: "1px solid #CC0000",
padding: "1px 3px",
"font-family": "serif",
"font-style": "normal",
"font-size": "90%"
},
".MathJax_Preview": {color: "#888888"},
"#MathJax_Tooltip": {
"background-color": "InfoBackground", color: "InfoText",
border: "1px solid black",
"box-shadow": "2px 2px 5px #AAAAAA", // Opera 10.5
"-webkit-box-shadow": "2px 2px 5px #AAAAAA", // Safari 3 and Chrome
"-moz-box-shadow": "2px 2px 5px #AAAAAA", // Forefox 3.5
"-khtml-box-shadow": "2px 2px 5px #AAAAAA", // Konqueror
filter: "progid:DXImageTransform.Microsoft.dropshadow(OffX=2, OffY=2, Color='gray', Positive='true')", // IE
padding: "3px 4px"
}
}
}
});
if (MathJax.Hub.Browser.isMSIE && document.documentMode >= 9)
{delete MathJax.OutputJax["HTML-CSS"].config.styles["#MathJax_Tooltip"].filter}
if (!MathJax.Hub.config.delayJaxRegistration)
{MathJax.OutputJax["HTML-CSS"].Register("jax/mml")}
MathJax.Hub.Register.StartupHook("End Config",[function (HUB,HTMLCSS) {
var CONFIG = HUB.Insert({
//
// The minimum versions that HTML-CSS supports
//
minBrowserVersion: {
Firefox: 3.0,
Opera: 9.52,
MSIE: 6.0,
Chrome: 0.3,
<|fim▁hole|> },
//
// For unsupported browsers, put back these delimiters for the preview
//
inlineMathDelimiters: ['$','$'], // or ["",""] or ["\\(","\\)"]
displayMathDelimiters: ['$$','$$'], // or ["",""] or ["\\[","\\]"]
//
// For displayed math, insert <BR> for \n?
//
multilineDisplay: true,
//
// The function to call to display the math for unsupported browsers
//
minBrowserTranslate: function (script) {
var MJ = HUB.getJaxFor(script), text = ["[Math]"], delim;
var span = document.createElement("span",{className: "MathJax_Preview"});
if (MJ.inputJax.id === "TeX") {
if (MJ.root.Get("displaystyle")) {
delim = CONFIG.displayMathDelimiters;
text = [delim[0]+MJ.originalText+delim[1]];
if (CONFIG.multilineDisplay) text = text[0].split(/\n/);
} else {
delim = CONFIG.inlineMathDelimiters;
text = [delim[0]+MJ.originalText.replace(/^\s+/,"").replace(/\s+$/,"")+delim[1]];
}
}
for (var i = 0, m = text.length; i < m; i++) {
span.appendChild(document.createTextNode(text[i]));
if (i < m-1) {span.appendChild(document.createElement("br"))}
}
script.parentNode.insertBefore(span,script);
}
},(HUB.config["HTML-CSS"]||{}));
if (HUB.Browser.version !== "0.0" &&
!HUB.Browser.versionAtLeast(CONFIG.minBrowserVersion[HUB.Browser]||0.0)) {
HTMLCSS.Translate = CONFIG.minBrowserTranslate;
HUB.Config({showProcessingMessages: false});
MathJax.Message.Set("Your browser does not support MathJax",null,4000);
HUB.Startup.signal.Post("MathJax not supported");
}
},MathJax.Hub,MathJax.OutputJax["HTML-CSS"]]);
MathJax.OutputJax["HTML-CSS"].loadComplete("config.js");<|fim▁end|> | Safari: 2.0,
Konqueror: 4.0
|
<|file_name|>establishment.resolver.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core';
import { Resolve, ActivatedRouteSnapshot } from '@angular/router';
import { Node } from '../node/node';
import { RestService } from '../rest.service';
import { Http, Response } from '@angular/http';
import { Observable } from 'rxjs/Observable';
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/catch';
import 'rxjs/add/operator/mergeMap';
import 'rxjs/add/operator/merge';
import 'rxjs/add/operator/concatAll';
@Injectable()
export class EstablishmentResolver implements Resolve<any> {
constructor(private restService: RestService, private http: Http) {
}
resolve(route: ActivatedRouteSnapshot): Observable<any> {
let id = route.params['id'];
let establishment;
return this.restService.get(id, 'establishments')
.flatMap(est => {
establishment = est;
return this.restService.getList('audits', {id_establishment: establishment.id, sort:'-date_start'});
})
.flatMap(audits => {
if (!audits || audits.length == 0) {
return Observable.create(observer => {
establishment.audits = [];
observer.next(establishment);
observer.complete()});
}
let done = 0;
return Observable.create(observer => {<|fim▁hole|> done++;
this.restService.get(audit.id_inquiryform, 'hist/inquiryforms').subscribe(iq => {
audit.inquiryform = iq;
})
if (audits.length == done) {
establishment.audits = audits;
observer.next(establishment);
observer.complete();
}
});
});
});
}
}<|fim▁end|> | audits.forEach(audit => { |
<|file_name|>slack.rs<|end_file_name|><|fim▁begin|>use std::sync::{Arc, Mutex};
use log::{debug, error, info};
use serde_derive::{Deserialize, Serialize};
use crate::util;
use crate::worker;
use octobot_lib::errors::*;
use octobot_lib::http_client::HTTPClient;
use octobot_lib::metrics::Metrics;
#[derive(Serialize, Clone, PartialEq, Eq, Debug)]
pub struct SlackAttachment {
pub text: String,
pub title: Option<String>,
pub title_link: Option<String>,
pub color: Option<String>,
pub mrkdwn_in: Option<Vec<String>>,
}
#[derive(Deserialize)]
struct SlackResponse {
ok: bool,
error: Option<String>,
}
impl SlackAttachment {
pub fn new(text: &str) -> SlackAttachment {
SlackAttachment {
text: text.to_string(),
title: None,
title_link: None,
color: None,
mrkdwn_in: None,
}
}
}
pub struct SlackAttachmentBuilder {
attachment: SlackAttachment,
}
impl SlackAttachmentBuilder {
pub fn new(text: &str) -> SlackAttachmentBuilder {
SlackAttachmentBuilder {
attachment: SlackAttachment::new(text),
}
}
pub fn text<S: Into<String>>(&mut self, value: S) -> &mut SlackAttachmentBuilder {
self.attachment.text = value.into();
self
}
pub fn markdown<S: Into<String>>(&mut self, value: S) -> &mut SlackAttachmentBuilder {
self.attachment.text = value.into();
self.attachment.mrkdwn_in = Some(vec!["text".into()]);
self
}
pub fn title<S: Into<String>>(&mut self, value: S) -> &mut SlackAttachmentBuilder {
self.attachment.title = Some(value.into());
self
}
pub fn title_link<S: Into<String>>(&mut self, value: S) -> &mut SlackAttachmentBuilder {
self.attachment.title_link = Some(value.into());
self
}
pub fn color<S: Into<String>>(&mut self, value: S) -> &mut SlackAttachmentBuilder {
self.attachment.color = Some(value.into());
self
}
pub fn build(&self) -> SlackAttachment {
self.attachment.clone()
}
}
#[derive(Serialize, Clone, PartialEq)]
struct SlackMessage {
text: String,
attachments: Vec<SlackAttachment>,
channel: String,
}
// the main object for sending messages to slack
struct Slack {
client: Arc<HTTPClient>,
recent_messages: Mutex<Vec<SlackMessage>>,
}
const TRIM_MESSAGES_AT: usize = 200;
const TRIM_MESSAGES_TO: usize = 20;
impl Slack {
pub fn new(bot_token: String, metrics: Arc<Metrics>) -> Slack {
let mut headers = reqwest::header::HeaderMap::new();
headers.append(
reqwest::header::AUTHORIZATION,
format!("Bearer {}", bot_token).parse().unwrap(),
);
let client = Arc::new(
HTTPClient::new_with_headers("https://slack.com/api", headers)
.unwrap()
.with_metrics(
metrics.slack_api_responses.clone(),
metrics.slack_api_duration.clone(),
),
);
Slack {
client,
recent_messages: Mutex::new(Vec::new()),
}
}
async fn send(&self, channel: &str, msg: &str, attachments: Vec<SlackAttachment>) {
let slack_msg = SlackMessage {
text: msg.to_string(),
attachments,
channel: channel.to_string(),
};<|fim▁hole|> if !self.is_unique(&slack_msg) {
info!("Skipping duplicate message to {}", channel);
return;
}
debug!("Sending message to #{}", channel);
let res: Result<SlackResponse> = self.client.post("/chat.postMessage", &slack_msg).await;
match res {
Ok(r) => {
if r.ok {
info!("Successfully sent slack message to {}", channel)
} else {
error!(
"Error sending slack message to {}: {}",
channel,
r.error.unwrap_or_default(),
)
}
}
Err(e) => error!("Error sending slack message to {}: {}", channel, e),
}
}
fn is_unique(&self, req: &SlackMessage) -> bool {
let mut recent_messages = self.recent_messages.lock().unwrap();
util::check_unique_event(
req.clone(),
&mut *recent_messages,
TRIM_MESSAGES_AT,
TRIM_MESSAGES_TO,
)
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct SlackRequest {
pub channel: String,
pub msg: String,
pub attachments: Vec<SlackAttachment>,
}
struct Runner {
slack: Arc<Slack>,
}
pub fn req(channel: &str, msg: &str, attachments: &[SlackAttachment]) -> SlackRequest {
SlackRequest {
channel: channel.into(),
msg: msg.into(),
attachments: attachments.into(),
}
}
pub fn new_runner(
bot_token: String,
metrics: Arc<Metrics>,
) -> Arc<dyn worker::Runner<SlackRequest>> {
Arc::new(Runner {
slack: Arc::new(Slack::new(bot_token, metrics)),
})
}
#[async_trait::async_trait]
impl worker::Runner<SlackRequest> for Runner {
async fn handle(&self, req: SlackRequest) {
self.slack
.send(&req.channel, &req.msg, req.attachments)
.await;
}
}<|fim▁end|> | |
<|file_name|>go.js<|end_file_name|><|fim▁begin|>/*
Language: Go
Author: Stephan Kountso aka StepLg <[email protected]>
Contributors: Evgeny Stepanischev <[email protected]>
Description: Google go language (golang). For info about language
Website: http://golang.org/
Category: common, system
*/
function(hljs) {
var GO_KEYWORDS = {
keyword:
'break default func interface select case map struct chan else goto package switch ' +
'const fallthrough if range type continue for import return var go defer ' +
'bool byte complex64 complex128 float32 float64 int8 int16 int32 int64 string uint8 ' +
'uint16 uint32 uint64 int uint uintptr rune',
literal:
'true false iota nil',
built_in:
'append cap close complex copy imag len make new panic print println real recover delete'
};
return {
aliases: ['golang'],
keywords: GO_KEYWORDS,
illegal: '</',
contains: [
hljs.C_LINE_COMMENT_MODE,
hljs.C_BLOCK_COMMENT_MODE,
{
className: 'string',
variants: [
hljs.QUOTE_STRING_MODE,
{begin: '\'', end: '[^\\\\]\''},
{begin: '`', end: '`'},<|fim▁hole|> ]
},
{
className: 'number',
variants: [
{begin: hljs.C_NUMBER_RE + '[i]', relevance: 1},
hljs.C_NUMBER_MODE
]
},
{
begin: /:=/ // relevance booster
},
{
className: 'function',
beginKeywords: 'func', end: '\\s*(\\{|$)', excludeEnd: true,
contains: [
hljs.TITLE_MODE,
{
className: 'params',
begin: /\(/, end: /\)/,
keywords: GO_KEYWORDS,
illegal: /["']/
}
]
}
]
};
}<|fim▁end|> | |
<|file_name|>properties.py<|end_file_name|><|fim▁begin|># Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Property classes for building wrapper classes for Pikov nodes.
We want to wrap our semantic graph with Python classes. This allows us to
interact with Python objects to modify the guid_map.
These classes encode the core types used in the semantic graph. When classes
use these properties, the guid_map is updated with the correct serialization
of the property.
"""
from .core import Int64Node, StringNode
class AbstractSemanticGraphProperty(object):
def __init__(self, label):
self._label = label
def from_node(self, obj, value):
raise NotImplementedError()
def to_node(self, value):
raise NotImplementedError()
def __get__(self, obj, type=None):
return self.from_node(obj, obj[self._label])
def __set__(self, obj, value):
obj[self._label] = self.to_node(value)
class UnspecifiedProperty(AbstractSemanticGraphProperty):
def from_node(self, obj, value):
obj._graph.get_value(obj, self._label)
def to_node(self, value):
# Value should already by a Node.
return value
class GuidProperty(AbstractSemanticGraphProperty):<|fim▁hole|>
def from_node(self, obj, value):
if value is None:
return None
return self._cls(obj._graph, guid=value.guid)
def to_node(self, value):
# Value should already by a GuidNode.
return value
def make_guid_property(wrapped):
def __init__(self, label):
GuidProperty.__init__(self, label, wrapped)
return type(
wrapped.__name__ + "Property",
(GuidProperty,),
{
"__init__": __init__,
}
)
class ScalarProperty(AbstractSemanticGraphProperty):
def from_node(self, obj, value):
if value is None:
return None
return value.value
class Int64Property(ScalarProperty):
def to_node(self, value):
if value is None:
return None
return Int64Node(value)
class StringProperty(ScalarProperty):
def to_node(self, value):
if value is None:
return None
return StringNode(value)<|fim▁end|> | def __init__(self, label, cls):
super().__init__(label)
self._cls = cls |
<|file_name|>bitcoin_ca.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="ca" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Guncoin</source>
<translation>A prop de Guncoin</translation>
</message>
<message>
<location line="+39"/>
<source><b>Guncoin</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The Guncoin developers</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Llibreta d'adreçes</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copia la selecció actual al porta-retalls del sistema</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Guncoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Guncoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Guncoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Guncoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR GuncoinS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-56"/>
<source>Guncoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your guncoins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/><|fim▁hole|> <location line="+280"/>
<source>Synchronizing with network...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Show information about Guncoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Guncoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Guncoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Guncoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>&About Guncoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Guncoin addresses to prove you own them</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Guncoin addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Guncoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Guncoin network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Guncoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. Guncoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Guncoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Guncoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start Guncoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start Guncoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Guncoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the Guncoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Guncoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show Guncoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Guncoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Guncoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start hitcoin: click-to-pay handler</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the Guncoin-Qt help message to get a list with possible Guncoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>Guncoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Guncoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the Guncoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Guncoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Guncoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Guncoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Guncoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Guncoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter Guncoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Guncoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>Guncoin version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or guncoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: hitcoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: guncoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 4867 or testnet: 14891)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 4869 or testnet: 21101)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=guncoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Guncoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Guncoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Guncoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Guncoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Guncoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Guncoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Guncoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|> | <source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message> |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
#import unittest
import os
import sys
from functools import wraps
from django.conf import settings
from south.hacks import hacks
# Make sure skipping tests is available.
try:
# easiest and best is unittest included in Django>=1.3
from django.utils import unittest
except ImportError:
# earlier django... use unittest from stdlib
import unittest
# however, skipUnless was only added in Python 2.7;
# if not available, we need to do something else
try:
skipUnless = unittest.skipUnless #@UnusedVariable
except AttributeError:
def skipUnless(condition, message):
def decorator(testfunc):
@wraps(testfunc)
def wrapper(self):
if condition:
# Apply method
testfunc(self)
else:
# The skip exceptions are not available either...
print("Skipping", testfunc.__name__,"--", message)
return wrapper
return decorator
# ditto for skipIf
try:
skipIf = unittest.skipIf #@UnusedVariable
except AttributeError:
def skipIf(condition, message):
def decorator(testfunc):
@wraps(testfunc)
def wrapper(self):
if condition:
print("Skipping", testfunc.__name__,"--", message)
else:
# Apply method
testfunc(self)
return wrapper
return decorator
# Add the tests directory so fakeapp is on sys.path
test_root = os.path.dirname(__file__)
sys.path.append(test_root)
# Note: the individual test files are imported below this.<|fim▁hole|> """
Base test class for tests that play with the INSTALLED_APPS setting at runtime.
"""
def create_fake_app(self, name):
class Fake:
pass
fake = Fake()
fake.__name__ = name
try:
fake.migrations = __import__(name + ".migrations", {}, {}, ['migrations'])
except ImportError:
pass
return fake
def setUp(self):
"""
Changes the Django environment so we can run tests against our test apps.
"""
if hasattr(self, 'installed_apps'):
hacks.store_app_cache_state()
hacks.set_installed_apps(self.installed_apps)
# Make sure dependencies are calculated for new apps
Migrations._dependencies_done = False
def tearDown(self):
"""
Undoes what setUp did.
"""
if hasattr(self, 'installed_apps'):
hacks.reset_installed_apps()
hacks.restore_app_cache_state()
# Try importing all tests if asked for (then we can run 'em)
try:
skiptest = settings.SKIP_SOUTH_TESTS
except:
skiptest = True
if not skiptest:
from south.tests.db import *
from south.tests.db_mysql import *
from south.tests.logic import *
from south.tests.autodetection import *
from south.tests.logger import *
from south.tests.inspector import *
from south.tests.freezer import *<|fim▁end|> |
class Monkeypatcher(unittest.TestCase):
|
<|file_name|>ParseUtil_4922813.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2003, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/* @test
@bug 4922813
@summary Check the new impl of encodePath will not cause regression
@key randomness
*/
import java.util.BitSet;
import java.io.File;
import java.util.Random;
import sun.net.www.ParseUtil;
public class ParseUtil_4922813 {
public static void main(String[] argv) throws Exception {
int num = 400;
while (num-- >= 0) {
String source = getTestSource();
String ec = sun.net.www.ParseUtil.encodePath(source);
String v117 = ParseUtil_V117.encodePath(source);
if (!ec.equals(v117)) {
throw new RuntimeException("Test Failed for : \n"
+ " source =<"
+ getUnicodeString(source)
+ ">");
}
}
}
static int maxCharCount = 200;
static int maxCodePoint = 0x10ffff;
static Random random;
static String getTestSource() {
if (random == null) {
long seed = System.currentTimeMillis();
random = new Random(seed);
}
String source = "";
int i = 0;
int count = random.nextInt(maxCharCount) + 1;
while (i < count) {
int codepoint = random.nextInt(127);
source = source + String.valueOf((char)codepoint);
codepoint = random.nextInt(0x7ff);
source = source + String.valueOf((char)codepoint);
codepoint = random.nextInt(maxCodePoint);
source = source + new String(Character.toChars(codepoint));
i += 3;
}
return source;
}
static String getUnicodeString(String s){
String unicodeString = "";
for(int j=0; j< s.length(); j++){
unicodeString += "0x"+ Integer.toString(s.charAt(j), 16);
}
return unicodeString;
}
}
class ParseUtil_V117 {
static BitSet encodedInPath;
static {
encodedInPath = new BitSet(256);
// Set the bits corresponding to characters that are encoded in the
// path component of a URI.
// These characters are reserved in the path segment as described in
// RFC2396 section 3.3.
encodedInPath.set('=');
encodedInPath.set(';');
encodedInPath.set('?');
encodedInPath.set('/');
// These characters are defined as excluded in RFC2396 section 2.4.3
// and must be escaped if they occur in the data part of a URI.
encodedInPath.set('#');
encodedInPath.set(' ');
encodedInPath.set('<');
encodedInPath.set('>');
encodedInPath.set('%');
encodedInPath.set('"');<|fim▁hole|> encodedInPath.set('}');
encodedInPath.set('|');
encodedInPath.set('\\');
encodedInPath.set('^');
encodedInPath.set('[');
encodedInPath.set(']');
encodedInPath.set('`');
// US ASCII control characters 00-1F and 7F.
for (int i=0; i<32; i++)
encodedInPath.set(i);
encodedInPath.set(127);
}
/**
* Constructs an encoded version of the specified path string suitable
* for use in the construction of a URL.
*
* A path separator is replaced by a forward slash. The string is UTF8
* encoded. The % escape sequence is used for characters that are above
* 0x7F or those defined in RFC2396 as reserved or excluded in the path
* component of a URL.
*/
public static String encodePath(String path) {
StringBuffer sb = new StringBuffer();
int n = path.length();
for (int i=0; i<n; i++) {
char c = path.charAt(i);
if (c == File.separatorChar)
sb.append('/');
else {
if (c <= 0x007F) {
if (encodedInPath.get(c))
escape(sb, c);
else
sb.append(c);
} else if (c > 0x07FF) {
escape(sb, (char)(0xE0 | ((c >> 12) & 0x0F)));
escape(sb, (char)(0x80 | ((c >> 6) & 0x3F)));
escape(sb, (char)(0x80 | ((c >> 0) & 0x3F)));
} else {
escape(sb, (char)(0xC0 | ((c >> 6) & 0x1F)));
escape(sb, (char)(0x80 | ((c >> 0) & 0x3F)));
}
}
}
return sb.toString();
}
/**
* Appends the URL escape sequence for the specified char to the
* specified StringBuffer.
*/
private static void escape(StringBuffer s, char c) {
s.append('%');
s.append(Character.forDigit((c >> 4) & 0xF, 16));
s.append(Character.forDigit(c & 0xF, 16));
}
}<|fim▁end|> | encodedInPath.set('{'); |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
<|fim▁hole|> @patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def test_get_package(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo')<|fim▁end|> | |
<|file_name|>S12.14_A2.js<|end_file_name|><|fim▁begin|>// Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/**
* @name: S12.14_A2;
* @section: 12.14;
* @assertion: Throwing exception with "throw" and catching it with "try" statement;
* @description: Checking if execution of "catch" catches an exception thrown with "throw";
*/
// CHECK#1
try {
throw "catchme";
$ERROR('#1: throw "catchme" lead to throwing exception');
}
catch(e){}
// CHECK#2
var c2=0;
try{
try{
throw "exc";
$ERROR('#2.1: throw "exc" lead to throwing exception');
}finally{
c2=1;
}
}
catch(e){
if (c2!==1){
$ERROR('#2.2: "finally" block must be evaluated');
}<|fim▁hole|>}
// CHECK#3
var c3=0;
try{
throw "exc";
$ERROR('#3.1: throw "exc" lead to throwing exception');
}
catch(err){
var x3=1;
}
finally{
c3=1;
}
if (x3!==1){
$ERROR('#3.2: "catch" block must be evaluated');
}
if (c3!==1){
$ERROR('#3.3: "finally" block must be evaluated');
}<|fim▁end|> | |
<|file_name|>Snackbar.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react';
import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import { fetchIP } from '../../actions/fetchIP';
import { Snackbar } from 'react-toolbox';
import theme from './Snackbar.css';
class SnackbarComponent extends Component {
constructor(props) {
super(props);
this.state = {
active: false,
message: ''
};<|fim▁hole|> this.handleSnackbarClick = this.handleSnackbarClick.bind(this);
this.handleSnackbarTimeout = this.handleSnackbarTimeout.bind(this);
}
handleSnackbarClick = () => {
this.setState({active: false});
};
handleSnackbarTimeout = () => {
if (this._isMounted) {
this.setState({active: false});
}
this.props.fetchIP();
};
componentWillReceiveProps(props) {
this.setState({
active: props.error,
message: props.message
});
}
componentDidMount() {
this._isMounted = true;
}
componentWillUnmount() {
this._isMounted = false;
}
render() {
return (
<section>
<Snackbar
theme={theme}
action="Hide"
active={this.props.weather.error}
label={this.props.weather.message}
timeout={1500}
onClick={this.handleSnackbarClick}
onTimeout={this.handleSnackbarTimeout}
type="warning"
/>
</section>
);
}
}
function mapDispatchToProps(dispatch) {
return bindActionCreators({ fetchIP }, dispatch);
}
function mapStateToProps({ weather }) {
return { weather };
}
export default connect(mapStateToProps, mapDispatchToProps)(SnackbarComponent);<|fim▁end|> | |
<|file_name|>h264_parser_fuzzertest.cc<|end_file_name|><|fim▁begin|>// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <stddef.h>
#include "base/numerics/safe_conversions.h"
#include "base/optional.h"
#include "media/video/h264_parser.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
static volatile size_t volatile_sink;
// Entry point for LibFuzzer.
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
if (!size)
return 0;
media::H264Parser parser;
parser.SetStream(data, base::checked_cast<off_t>(size));
// Parse until the end of stream/unsupported stream/error in stream is
// found.
while (true) {
media::H264NALU nalu;
media::H264Parser::Result res = parser.AdvanceToNextNALU(&nalu);
if (res != media::H264Parser::kOk)<|fim▁hole|> case media::H264NALU::kIDRSlice:
case media::H264NALU::kNonIDRSlice: {
media::H264SliceHeader shdr;
res = parser.ParseSliceHeader(nalu, &shdr);
break;
}
case media::H264NALU::kSPS: {
int id;
res = parser.ParseSPS(&id);
if (res != media::H264Parser::kOk)
break;
const media::H264SPS* sps = parser.GetSPS(id);
if (!sps)
break;
// Also test the SPS helper methods. We make sure that the results are
// used so that the calls are not optimized away.
base::Optional<gfx::Size> coded_size = sps->GetCodedSize();
volatile_sink = coded_size.value_or(gfx::Size()).ToString().length();
base::Optional<gfx::Rect> visible_rect = sps->GetVisibleRect();
volatile_sink = visible_rect.value_or(gfx::Rect()).ToString().length();
break;
}
case media::H264NALU::kPPS: {
int id;
res = parser.ParsePPS(&id);
break;
}
case media::H264NALU::kSEIMessage: {
media::H264SEIMessage sei_msg;
res = parser.ParseSEI(&sei_msg);
break;
}
default:
// Skip any other NALU.
break;
}
if (res != media::H264Parser::kOk)
break;
}
return 0;
}<|fim▁end|> | break;
switch (nalu.nal_unit_type) { |
<|file_name|>run_transform_on_couchdb_docs.py<|end_file_name|><|fim▁begin|>'''This allows running a bit of code on couchdb docs.
code should take a json python object, modify it and hand back to the code
Not quite that slick yet, need way to pass in code or make this a decorator
'''
import importlib
from harvester.collection_registry_client import Collection
from harvester.couchdb_init import get_couchdb
COUCHDB_VIEW = 'all_provider_docs/by_provider_name'
def run_on_couchdb_by_collection(func, collection_key=None):
'''If collection_key is none, trying to grab all of docs and modify
func is a function that takes a couchdb doc in and returns it modified.
(can take long time - not recommended)
Function should return new document or None if no changes made
'''
_couchdb = get_couchdb()
v = _couchdb.view(COUCHDB_VIEW, include_docs='true', key=collection_key) \
if collection_key else _couchdb.view(COUCHDB_VIEW,
include_docs='true')
doc_ids = []
n = 0
for r in v:
n += 1
doc_new = func(r.doc)
if doc_new and doc_new != doc:
_couchdb.save(doc_new)
doc_ids.append(r.doc['_id'])
if n % 100 == 0:
print '{} docs ran. Last doc:{}\n'.format(n, r.doc['_id'])
return doc_ids
def run_on_couchdb_doc(docid, func):
'''Run on a doc, by doc id'''
_couchdb = get_couchdb()
doc = _couchdb[docid]
mod_name, func_name = func.rsplit('.', 1)
fmod = importlib.import_module(mod_name)
ffunc = getattr(fmod, func_name)
doc_new = ffunc(doc)
if doc_new and doc_new != doc:
_couchdb.save(doc_new)
return True
return False
C_CACHE = {}
def update_collection_description(doc):
cjson = doc['originalRecord']['collection'][0]
# get collection description
if 'description' not in cjson:
if cjson['@id'] in C_CACHE:
c = C_CACHE[cjson['@id']]
else:
c = Collection(url_api=cjson['@id'])
C_CACHE[cjson['@id']] = c
description = c['description'] if c['description'] else c['name']
print('DOC: {} DESCRIP: {}'.format(
doc['_id'], c['description'].encode('utf8')))
doc['originalRecord']['collection'][0]['description'] = description
doc['sourceResource']['collection'][0]['description'] = description<|fim▁hole|>def add_rights_and_type_to_collection(doc):
cjson = doc['originalRecord']['collection'][0]
# get collection description
if cjson['@id'] in C_CACHE:
c = C_CACHE[cjson['@id']]
else:
c = Collection(url_api=cjson['@id'])
C_CACHE[cjson['@id']] = c
doc['originalRecord']['collection'][0]['rights_status'] = c['rights_status']
doc['originalRecord']['collection'][0]['rights_statement'] = c['rights_statement']
doc['originalRecord']['collection'][0]['dcmi_type']=c['dcmi_type']
if 'collection' in doc['sourceResource']:
doc['sourceResource']['collection'][0]['rights_status'] = c['rights_status']
doc['sourceResource']['collection'][0]['rights_statement'] = c['rights_statement']
doc['sourceResource']['collection'][0]['dcmi_type'] = c['dcmi_type']
else:
doc['sourceResource']['collection'] = doc['originalRecord']['collection']
return doc<|fim▁end|> | return doc
|
<|file_name|>ChamferDistanceTransform2D.java<|end_file_name|><|fim▁begin|>/**
*
*/
package inra.ijpb.binary.distmap;
/**
* Specialization of DistanceTransform based on the use of a chamfer mask.
*
* Provides methods for retrieving the mask, and the normalization weight.
*
* @author dlegland
*/
public interface ChamferDistanceTransform2D extends DistanceTransform<|fim▁hole|> *
* @return the chamfer mask used by this distance transform algorithm.
*/
public ChamferMask2D mask();
}<|fim▁end|> | {
/**
* Return the chamfer mask used by this distance transform algorithm. |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub fn to_upper_first(input: &str) -> String {
let mut c = input.chars();
match c.next() {
None => String::new(),
Some(x) => x.to_uppercase().collect::<String>() + c.as_str()
}
}
pub fn to_lower_first(input: &str) -> String {
let mut c = input.chars();
match c.next() {<|fim▁hole|> }
}<|fim▁end|> | None => String::new(),
Some(x) => x.to_lowercase().collect::<String>() + c.as_str() |
<|file_name|>cpu.py<|end_file_name|><|fim▁begin|>"""Script to execute CPU and get a best move using Minimax algorithm"""
import copy<|fim▁hole|>OPP = [1, 0]
def eval_rc(board, player, glength, roc):
"""Returns row or column score"""
score_sum = 0
clone_board = board
if roc == "c":
clone_board = [[board[j][i] for j in xrange(glength)] for i in xrange(glength)]
for i in xrange(glength):
score = 0
if clone_board[i][0] == player:
score = 1
else:
score = -1
for j in xrange(1, glength):
if clone_board[i][j] == player and score > 0:
score = score * 10
elif board[i][j] == player and score < 0:
score = 0
break
elif board[i][j] == player:
score = 1
elif board[i][j] == OPP[player] and score < 0:
score = score * 10
elif board[i][j] == OPP[player] and score > 0:
score = 0
break
elif board[i][j] == OPP[player]:
score = 1
score_sum = score_sum + score
return score_sum
def eval_diags(board, player, glength):
"""Returns diagonal score"""
score = 0
if board[0][0] == player:
score = 1
elif board[0][0] == OPP[player]:
score = -1
for i in range(1, glength):
if board[i][i] == player and score > 0:
score = score * 10
elif board[i][i] == player and score < 0:
score = 0
break
elif board[i][i] == player:
score = 1
elif board[i][i] == OPP[player] and score < 0:
score = score * 10
elif board[i][i] == OPP[player] and score > 0:
score = 0
break
elif board[i][i] == OPP[player]:
score = 1
score_sum = score
score = 0
if board[glength - 1][0] == player:
score = 1
else:
score = -1
for i in range(1, glength):
if board[glength - i - 1][i] == player and score > 0:
score = score * 10
elif board[glength - i - 1][i] == player and score < 0:
score = 0
break
elif board[glength - i - 1][i] == player:
score = 1
elif board[glength - i - 1][i] == OPP[player] and score < 0:
score = score * 10
elif board[glength - i - 1][i] == OPP[player] and score > 0:
score = 0
break
elif board[glength - i - 1][i] == OPP[player]:
score = 1
score_sum = score_sum + score
return score_sum
def evaluate(board, player, glength):
"""Evaluates the score for the player based on horizontal, vertical and diagonal advantages"""
score = eval_rc(board, player, glength, "r")
score += eval_rc(board, player, glength, "c")
score += eval_diags(board, player, glength)
return score
def get_moves(board, glength):
"""Returns all possible moves"""
moves = []
for i in range(glength):
for j in range(glength):
if board[i][j] == -1:
moves = moves + [[i, j]]
return moves
def gen_board(board, player, pos):
"""Returns a new clone board by playing a move"""
new_board = copy.deepcopy(board)
new_board[pos[0]][pos[1]] = player
return new_board
def if_second_move(board, glength):
"""Returns True if it is the second move of the game, otherwise False"""
check = 0
for i in xrange(glength):
for j in xrange(glength):
if board[i][j] == 0 or board[i][j] == 1:
check += 1
if check > 1:
return False
return True
def minimax(board, player, depth, glength):
"""Returns the best move for the CPU by traversing
all best CPU and worst user moves with depth
"""
moves = get_moves(board, glength)
if not moves:
return None
if len(moves) == 1 or if_second_move(board, glength):
return moves[0]
best_move = moves[0]
best_score = 0.0
for move in moves:
clone_board = gen_board(board, player, move)
if win(clone_board, player, glength):
return move
for move in moves:
clone_board = gen_board(board, OPP[player], move)
if win(clone_board, OPP[player], glength):
return move
for move in moves:
clone_board = gen_board(board, player, move)
if win(clone_board, player, glength):
return move
score = min_play(clone_board, OPP[player], depth, glength)
if best_score < score:
best_score = score
best_move = move
return best_move
def min_play(board, player, depth, glength):
"""Returns the worst score for the player"""
moves = get_moves(board, glength)
if not moves or depth == 0:
return evaluate(board, player, glength)
best_score = float('inf')
for move in moves:
clone_board = gen_board(board, player, move)
if win(clone_board, player, glength):
return evaluate(clone_board, player, glength)
score = max_play(clone_board, OPP[player], depth - 1, glength)
if score < best_score:
best_score = score
return best_score
def max_play(board, player, depth, glength):
"""Returns the best score for the CPU"""
moves = get_moves(board, glength)
if not moves or depth == 0:
return evaluate(board, player, glength)
best_score = float('-inf')
for move in moves:
clone_board = gen_board(board, player, move)
if win(clone_board, player, glength):
return evaluate(clone_board, player, glength)
score = max_play(clone_board, OPP[player], depth - 1, glength)
if score > best_score:
best_score = score
return best_score<|fim▁end|> | from common import board_full, win
|
<|file_name|>RFTConfig.cpp<|end_file_name|><|fim▁begin|>/*
Copyright 2019 Equinor ASA.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#include <opm/parser/eclipse/EclipseState/Schedule/TimeMap.hpp>
#include <opm/parser/eclipse/EclipseState/Schedule/RFTConfig.hpp>
namespace Opm {
RFTConfig::RFTConfig(const TimeMap& time_map) :
tm(time_map)
{
}
bool RFTConfig::rft(const std::string& well_name, std::size_t report_step) const {
if (report_step >= this->tm.size())
throw std::invalid_argument("Invalid ");
const auto well_iter = this->well_open.find(well_name);
if (well_iter != this->well_open.end()) {
// A general "Output RFT when the well is opened" has been configured with WRFT
if (this->well_open_rft_time.first && this->well_open_rft_time.second <= report_step) {
if (well_iter->second == report_step)
return true;
}
// A FOPN setting has been configured with the WRFTPLT keyword
if (this->well_open_rft_name.count(well_name) > 0) {
if (well_iter->second == report_step)
return true;
}
}
if (this->rft_config.count(well_name) == 0)
return false;
auto rft_pair = this->rft_config.at(well_name)[report_step];
if (rft_pair.first == RFTConnections::YES)
return (rft_pair.second == report_step);
if (rft_pair.first == RFTConnections::NO)
return false;
if (rft_pair.first == RFTConnections::REPT)
return true;
if (rft_pair.first == RFTConnections::TIMESTEP)
return true;
return false;
}
bool RFTConfig::plt(const std::string& well_name, std::size_t report_step) const {
if (report_step >= this->tm.size())
throw std::invalid_argument("Invalid ");
if (this->plt_config.count(well_name) == 0)
return false;
auto plt_pair = this->plt_config.at(well_name)[report_step];
if (plt_pair.first == PLTConnections::YES)
return (plt_pair.second == report_step);
if (plt_pair.first == PLTConnections::NO)
return false;
if (plt_pair.first == PLTConnections::REPT)
return true;
if (plt_pair.first == PLTConnections::TIMESTEP)
return true;
return false;
}
void RFTConfig::updateRFT(const std::string& well_name, std::size_t report_step, RFTConnections::RFTEnum value) {
if (value == RFTConnections::FOPN)
this->setWellOpenRFT(well_name);
else {
if (this->rft_config.count(well_name) == 0) {
auto state = DynamicState<std::pair<RFTConnections::RFTEnum, std::size_t>>(this->tm, std::make_pair(RFTConnections::NO, 0));
this->rft_config.emplace( well_name, state );
}
this->rft_config.at(well_name).update(report_step, std::make_pair(value, report_step));
}
}
void RFTConfig::updatePLT(const std::string& well_name, std::size_t report_step, PLTConnections::PLTEnum value) {
if (this->plt_config.count(well_name) == 0) {
auto state = DynamicState<std::pair<PLTConnections::PLTEnum, std::size_t>>(this->tm, std::make_pair(PLTConnections::NO, 0));
this->plt_config.emplace( well_name, state );
}
this->plt_config.at(well_name).update(report_step, std::make_pair(value, report_step));
}
bool RFTConfig::getWellOpenRFT(const std::string& well_name, std::size_t report_step) const {
if (this->well_open_rft_name.count(well_name) > 0)
return true;
return (this->well_open_rft_time.first && this->well_open_rft_time.second <= report_step);
}
void RFTConfig::setWellOpenRFT(std::size_t report_step) {
this->well_open_rft_time = std::make_pair(true, report_step);
}
void RFTConfig::setWellOpenRFT(const std::string& well_name) {
this->well_open_rft_name.insert( well_name );
}
void RFTConfig::addWellOpen(const std::string& well_name, std::size_t report_step) {
if (this->well_open.count(well_name) == 0)
this->well_open[well_name] = report_step;
}
std::size_t RFTConfig::firstRFTOutput() const {
std::size_t first_rft = this->tm.size();
if (this->well_open_rft_time.first) {
// The WRFT keyword has been used to request RFT output at well open for all wells.
std::size_t rft_time = this->well_open_rft_time.second;
for (const auto& rft_pair : this->well_open) {
if (rft_pair.second >= rft_time)
first_rft = std::min(first_rft, rft_pair.second);
}
} else {
// Go through the individual wells and look for first open settings
for (const auto& rft_pair : this->well_open)
first_rft = std::min(first_rft, rft_pair.second);
}
for (const auto& rft_pair : this->plt_config) {
const auto& dynamic_state = rft_pair.second;
auto pred = [] (const std::pair<PLTConnections::PLTEnum, std::size_t>& ) { return false; };
int this_first_rft = dynamic_state.find_if(pred);
if (this_first_rft >= 0)
first_rft = std::min(first_rft, static_cast<std::size_t>(this_first_rft));
}
return first_rft;
}
bool RFTConfig::active(std::size_t report_step) const {
for (const auto& rft_pair : this->rft_config) {
if (this->rft(rft_pair.first, report_step))
return true;
}
for (const auto& plt_pair : this->plt_config) {
if (this->rft(plt_pair.first, report_step))
return true;
}
return false;
}
<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import logging
from contextlib import contextmanager
from ..errors import CompilerError
from .state import State
from .core import Context, Object, BoundObject, SoftScope, Scope, Type
from .module import Module
from .closure import Closure, ClosedLink, ClosedTarget
from .function import Function, FunctionType, FunctionInstance, Return
from .external_function import ExternalFunction
from .call import Call
from .method import Method, MethodType, MethodInstance
from .links import Link, BoundLink, ContextLink, Attribute
from .identifier import Identifier
from .modifiers import Constant, Reference
from .variable import Variable
from .assignment import Assignment
from .class_ import Class, Constructor
from .forward import ForwardObject, ForwardTarget
from .literal import Literal
from .branches import Loop, Break, Branch
from .void_type import VoidType
from .size_of import SizeOf
from . import stats
from . import util
from . import forward
def _verify(source, frontend, logger = logging.getLogger()):
logger.info("Parsing")
module = frontend.parse(source, logger)
logger.info("Verifying")<|fim▁hole|>
def compile(source, frontend, backend, logger = logging.getLogger(), opt_level = 0):
module = _verify(source, frontend, logger)
logger.info("Generating Code")
return backend.emit(module, logger, opt_level)
def run(source, frontend, backend, logger = logging.getLogger(), opt_level = 0):
module = _verify(source, frontend, logger)
logger.info("Running")
return backend.run(module)
def verify(module:Module, logger = logging.getLogger()):
# Set up the initial state before verifying
State.init(logger.getChild("lekvar"))
State.logger.info(module.context)
try:
module.verify()
except CompilerError as e:
e.format()
raise e
@contextmanager
def use(frontend, backend, logger = logging.getLogger()):
with useFrontend(frontend, logger), useBackend(backend, logger):
yield
@contextmanager
def useFrontend(frontend, logger = logging.getLogger()):
builtins = frontend.builtins(logger)
# Hack backend into frontend builtins
builtins.context.addChild(ForwardObject(builtins, "_builtins"))
try:
old_builtins = State.builtins
State.builtins = builtins
verify(builtins)
yield
finally:
State.builtins = old_builtins
@contextmanager
def useBackend(backend, logger = logging.getLogger()):
backend_builtins = backend.builtins(logger)
builtins = State.builtins.context["_builtins"]
with forward.target([(builtins, backend_builtins)], False):
yield<|fim▁end|> | verify(module, logger)
return module |
<|file_name|>WorkFrame.java<|end_file_name|><|fim▁begin|>package gui;
import java.awt.Dimension;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.text.ParseException;
import java.util.Locale;
import javax.swing.JComponent;
import javax.swing.JFrame;
import javax.swing.WindowConstants;
public class WorkFrame extends JFrame {
/**
*
*/
private static final long serialVersionUID = -7783509968212805832L;
private final JFrame parent;
public static int width, height;
private static final File saveFile = new File("config" + File.separator + "windowResolutions");
/**
* used to save the resolution for specific panes
*/
private String name;
/**
*
* @param parent
* @param pane
* the content pane
* @param paneName
* required to save the panes resolution, should be unique
*/
public WorkFrame(JFrame parent, JComponent pane, String paneName) {
super();
setBounds(100, 100, WorkFrame.width, height);
setContentPane(pane);
setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
setLocationRelativeTo(null);
setResizable(true);
this.parent = parent;
name = paneName;
// must be called after the name is set!
Dimension dim = loadResolution();
if (dim != null)
setSize(dim);
addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
saveResolution(getSize());
WorkFrame.this.parent.setEnabled(true);
WorkFrame.this.parent.setVisible(true);
}
});
parent.setEnabled(false);
parent.setVisible(false);
setVisible(true);
}
public void saveResolution(Dimension d) {
if (name == null) {
ErrorHandle.popUp("Name for this Window is not set");
}
try {
boolean found = false;
StringBuilder text = new StringBuilder();
NumberFormat formatter = new DecimalFormat("#0.00");
if (saveFile.exists()) {
BufferedReader read = new BufferedReader(new FileReader(saveFile));
String line;
while ((line = read.readLine()) != null) {
if (line.contains(name)) {
found = true;
line = name + ":" + formatter.format(d.getHeight()) + ":" + formatter.format(d.getWidth());
System.out.println(line);
}
text.append(line + System.lineSeparator());
}
read.close();
} else {
if (!saveFile.getParentFile().exists()) {
saveFile.getParentFile().mkdirs();
}
}
if (!saveFile.exists() || !found) {
text.append(name + ":" + formatter.format(d.getHeight()) + ":" + formatter.format(d.getWidth())
+ System.lineSeparator());
System.out.println(text.toString());
}
FileOutputStream fileOut = new FileOutputStream(saveFile);
fileOut.write(text.toString().getBytes());
fileOut.close();
} catch (IOException e) {
e.printStackTrace();
}
<|fim▁hole|> *
* @return saved Dimension if available
*/
public Dimension loadResolution() {
if (name == null) {
ErrorHandle.popUp("Name for this Window is not set");
}
Dimension d = null;
NumberFormat format = NumberFormat.getNumberInstance(Locale.GERMAN);
format.setMaximumFractionDigits(2);
if (!saveFile.exists())
return d;
try {
BufferedReader read = new BufferedReader(new FileReader(saveFile));
String line;
while ((line = read.readLine()) != null) {
if (line.contains(name)) {
String[] str = line.split(":", 3);
d = new Dimension();
d.setSize(format.parse(str[2]).doubleValue(), format.parse(str[1]).doubleValue());
read.close();
return d;
}
}
read.close();
} catch (IOException | ParseException e) {
e.printStackTrace();
}
return d;
}
}<|fim▁end|> | }
/**
|
<|file_name|>gdata-extension.js<|end_file_name|><|fim▁begin|>/*
Copyright 2010, Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above<|fim▁hole|> * Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
var GdataExtension = {};
GdataExtension.isAuthorized = function() {
return $.cookie('authsub_token') !== null;
};
GdataExtension.showAuthorizationDialog = function(onAuthorized, onNotAuthorized) {
if (window.name) {
var windowName = window.name;
} else {
var windowName = "openrefine" + new Date().getTime();
window.name = windowName;
}
var callbackName = "cb" + new Date().getTime();
var callback = function(evt) {
delete window[callbackName];
if (GdataExtension.isAuthorized()) {
onAuthorized();
} else if (onNotAuthorized) {
onNotAuthorized();
}
window.setTimeout(function() { win.close(); }, 100);
};
window[callbackName] = callback;
var url = ModuleWirings['gdata'] + "authorize?winname=" + escape(windowName) + "&callback=" + escape(callbackName);
var win = window.open(url, "openrefinegdataauth", "resizable=1,width=800,height=600");
};<|fim▁end|> | copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution. |
<|file_name|>parsed-property.d.ts<|end_file_name|><|fim▁begin|>export interface ParsedProperty {
_name: string;
_type: string;
_targetProfiles?: string[];
_multiple?: boolean;
_required?: boolean;
<|fim▁hole|> _valueSetStrength?: string;
_valueSet?: string;
}<|fim▁end|> | _choice?: string;
_properties?: ParsedProperty[];
|
<|file_name|>string.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use eutil::slice_to_str;
use libc::{size_t, c_int, c_ushort,c_void};
use libc::types::os::arch::c95::wchar_t;
use mem::{new0,newarray0,delete,deletearray};
use std::char;
use std::mem;
use std::ptr;
use std::slice;
use std::string;
use types::{cef_string_utf16_t, cef_string_utf8_t, cef_string_wide_t};
use types::{cef_string_userfree_utf16_t, cef_string_userfree_utf8_t, cef_string_userfree_wide_t};
//cef_string
#[no_mangle]
extern "C" fn string_wide_dtor(str: *mut wchar_t) {
deletearray(str as *mut c_void)
}
#[no_mangle]
extern "C" fn string_utf8_dtor(str: *mut u8) {
deletearray(str as *mut c_void)
}
#[no_mangle]
extern "C" fn string_utf16_dtor(str: *mut c_ushort) {
deletearray(str as *mut c_void)
}
#[no_mangle]
pub extern "C" fn cef_string_userfree_wide_free(cs: *mut cef_string_userfree_wide_t) {
cef_string_wide_clear(cs);
delete(cs as *mut c_void)
}
#[no_mangle]
pub extern "C" fn cef_string_userfree_utf8_free(cs: *mut cef_string_userfree_utf8_t) {
cef_string_utf8_clear(cs);
delete(cs as *mut c_void)
}
#[no_mangle]
pub extern "C" fn cef_string_userfree_utf16_free(cs: *mut cef_string_userfree_utf16_t) {
cef_string_utf16_clear(cs);
delete(cs as *mut c_void)
}
#[no_mangle]
pub extern "C" fn cef_string_utf8_clear(cs: *mut cef_string_utf8_t) {
unsafe {
(*cs).dtor.map(|dtor| dtor((*cs).str));
(*cs).length = 0;
(*cs).str = 0 as *mut u8;
(*cs).dtor = mem::transmute(0 as *const u8);
}
}
#[no_mangle]
pub extern "C" fn cef_string_userfree_utf8_alloc() -> *mut cef_string_utf8_t {
#![inline(never)]
new0::<cef_string_utf8_t>(1)
}
#[no_mangle]
pub extern "C" fn cef_string_utf8_set(src: *const u8, src_len: size_t, output: *mut cef_string_utf8_t, copy: c_int) -> c_int {
cef_string_utf8_clear(output);
unsafe {
if copy != 0 {
if !src.is_null() && src_len > 0 {
(*output).str = newarray0::<u8>(src_len + 1);
if (*output).str.is_null() {
return 0;
}
ptr::copy_memory((*output).str, src, src_len as uint);
(*output).length = src_len;
(*output).dtor = Some(string_utf8_dtor);
}
} else {
(*output).str = mem::transmute(src);
(*output).length = src_len;
(*output).dtor = mem::transmute(0 as *const u8);
}
}
return 1;
}
#[no_mangle]
pub extern "C" fn cef_string_utf8_cmp(a: *const cef_string_utf8_t, b: *const cef_string_utf8_t) -> c_int {
unsafe {
slice::raw::buf_as_slice((*a).str as *const u8, (*a).length as uint, |astr:&[u8]| {
slice::raw::buf_as_slice((*b).str as *const u8, (*b).length as uint, |bstr:&[u8]| {
match astr.cmp(bstr) {
Less => -1,
Equal => 0,
Greater => 1
}
})
})
}
}
#[no_mangle]
pub extern "C" fn cef_string_utf8_to_utf16(src: *const u8, src_len: size_t, output: *mut cef_string_utf16_t) -> c_int {
slice_to_str(src, src_len as uint, |result| {
let conv = result.utf16_units().collect::<Vec<u16>>();
cef_string_utf16_set(conv.as_ptr(), conv.len() as size_t, output, 1);
1
})
}
#[no_mangle]
pub extern "C" fn cef_string_utf16_to_utf8(src: *const u16, src_len: size_t, output: *mut cef_string_utf8_t) -> c_int {
unsafe {
slice::raw::buf_as_slice(src, src_len as uint, |ustr| {
match string::String::from_utf16(ustr) {
Some(str) => {
cef_string_utf8_set(str.as_bytes().as_ptr(), str.len() as size_t, output, 1);
1 as c_int
},
None => 0 as c_int
}
})
}
}
#[no_mangle]
pub extern "C" fn cef_string_utf16_clear(cs: *mut cef_string_utf16_t) {
unsafe {
(*cs).dtor.map(|dtor| dtor((*cs).str));
(*cs).length = 0;
(*cs).str = 0 as *mut c_ushort;
(*cs).dtor = mem::transmute(0 as *const u8);
}
}
#[no_mangle]
pub extern "C" fn cef_string_userfree_utf16_alloc() -> *mut cef_string_utf16_t {
#![inline(never)]
new0::<cef_string_utf16_t>(1)
}
#[no_mangle]
pub extern "C" fn cef_string_utf16_set(src: *const c_ushort, src_len: size_t, output: *mut cef_string_utf16_t, copy: c_int) -> c_int {
cef_string_utf16_clear(output);
unsafe {
if copy != 0 {
if !src.is_null() && src_len > 0 {
(*output).str = newarray0::<c_ushort>(src_len + 1);
if (*output).str.is_null() {
return 0;
}
ptr::copy_memory((*output).str, src, src_len as uint);
(*output).length = src_len;
(*output).dtor = Some(string_utf16_dtor);
}
} else {
(*output).str = mem::transmute(src);
(*output).length = src_len;
(*output).dtor = mem::transmute(0 as *const u8);
}
}
return 1;
}
#[no_mangle]
pub extern "C" fn cef_string_utf16_cmp(a: *const cef_string_utf16_t, b: *const cef_string_utf16_t) -> c_int {
unsafe {
slice::raw::buf_as_slice(mem::transmute((*a).str), (*a).length as uint, |astr:&[u16]| {
slice::raw::buf_as_slice(mem::transmute((*b).str), (*b).length as uint, |bstr:&[u16]| {
match astr.cmp(bstr) {
Less => -1,
Equal => 0,
Greater => 1
}
})
})
}
}
#[no_mangle]
pub extern "C" fn cef_string_wide_clear(cs: *mut cef_string_wide_t) {
unsafe {
(*cs).dtor.map(|dtor| dtor((*cs).str));
(*cs).length = 0;
(*cs).str = 0 as *mut wchar_t;
(*cs).dtor = mem::transmute(0 as *const u8);
}
}
#[no_mangle]
pub extern "C" fn cef_string_userfree_wide_alloc() -> *mut cef_string_wide_t {
#![inline(never)]
new0::<cef_string_wide_t>(1)
}
#[no_mangle]
pub extern "C" fn cef_string_wide_set(src: *const wchar_t, src_len: size_t, output: *mut cef_string_wide_t, copy: c_int) -> c_int {
cef_string_wide_clear(output);
unsafe {
if copy != 0 {
if !src.is_null() && src_len > 0 {
(*output).str = newarray0::<wchar_t>(src_len + 1);
if (*output).str.is_null() {
return 0;
}
ptr::copy_memory((*output).str, src, src_len as uint);
(*output).length = src_len;
(*output).dtor = Some(string_wide_dtor);
}
} else {
(*output).str = mem::transmute(src);
(*output).length = src_len;
(*output).dtor = mem::transmute(0 as *const u8);
}
}
return 1;
}
#[no_mangle]
pub extern "C" fn cef_string_wide_cmp(a: *const cef_string_wide_t, b: *const cef_string_wide_t) -> c_int {
unsafe {
slice::raw::buf_as_slice((*a).str as *const wchar_t, (*a).length as uint, |astr:&[wchar_t]| {
slice::raw::buf_as_slice((*b).str as *const wchar_t, (*b).length as uint, |bstr:&[wchar_t]| {
match astr.cmp(bstr) {
Less => -1,
Equal => 0,
Greater => 1
}
})
})
}
}
#[no_mangle]
pub extern "C" fn cef_string_utf8_to_wide(src: *const u8, src_len: size_t, output: *mut cef_string_wide_t) -> c_int {
if mem::size_of::<wchar_t>() == mem::size_of::<u16>() {
return cef_string_utf8_to_utf16(src, src_len, output as *mut cef_string_utf16_t);
}
slice_to_str(src, src_len as uint, |result| {
let conv = result.chars().map(|c| c as u32).collect::<Vec<u32>>();
cef_string_wide_set(conv.as_ptr() as *const wchar_t, conv.len() as size_t, output, 1)
})
}
#[no_mangle]
pub extern "C" fn cef_string_wide_to_utf8(src: *const wchar_t, src_len: size_t, output: *mut cef_string_utf8_t) -> c_int {
if mem::size_of::<wchar_t>() == mem::size_of::<u16>() {
return cef_string_utf16_to_utf8(src as *const u16, src_len, output);
}
unsafe {
slice::raw::buf_as_slice(src, src_len as uint, |ustr| {
let conv = ustr.iter().map(|&c| char::from_u32(c as u32).unwrap_or('\uFFFD')).collect::<String>();
cef_string_utf8_set(conv.as_bytes().as_ptr(), conv.len() as size_t, output, 1)
})
}
}
#[no_mangle]
pub extern "C" fn cef_string_ascii_to_utf16(src: *const u8, src_len: size_t, output: *mut cef_string_utf16_t) -> c_int {
slice_to_str(src, src_len as uint, |result| {
let conv = result.utf16_units().collect::<Vec<u16>>();
cef_string_utf16_set(conv.as_ptr(), conv.len() as size_t, output, 1)
})
}
#[no_mangle]
pub extern "C" fn cef_string_ascii_to_wide(src: *const u8, src_len: size_t, output: *mut cef_string_wide_t) -> c_int {<|fim▁hole|> unsafe {
slice::raw::buf_as_slice(src, src_len as uint, |ustr| {
let conv = ustr.iter().map(|&c| c as u8).collect::<Vec<u8>>();
cef_string_wide_set(conv.as_ptr() as *const wchar_t, conv.len() as size_t, output, 1)
})
}
}<|fim▁end|> | |
<|file_name|>vultr_ssh_key_info.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2018, Yanis Guenane <[email protected]>
# (c) 2019, René Moser <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: vultr_ssh_key_info
short_description: Get infos about the Vultr SSH keys available.
description:
- Get infos about SSH keys available.
version_added: "2.9"
author:
- "Yanis Guenane (@Spredzy)"
- "René Moser (@resmo)"
extends_documentation_fragment: vultr
'''
EXAMPLES = r'''
- name: Get Vultr SSH keys infos
vultr_ssh_key_info:
register: result
- name: Print the infos
debug:
var: result.vultr_ssh_key_info
'''
RETURN = r'''
---
vultr_api:
description: Response from Vultr API with a few additions/modification
returned: success
type: complex
contains:
api_account:
description: Account used in the ini file to select the key
returned: success
type: str
sample: default
api_timeout:
description: Timeout used for the API requests
returned: success
type: int
sample: 60
api_retries:
description: Amount of max retries for the API requests
returned: success
type: int
sample: 5
api_endpoint:
description: Endpoint used for the API requests
returned: success
type: str
sample: "https://api.vultr.com"
vultr_ssh_key_info:
description: Response from Vultr API as list
returned: success
type: complex
contains:
id:
description: ID of the ssh key
returned: success
type: str
sample: 5904bc6ed9234
name:
description: Name of the ssh key
returned: success
type: str
sample: my ssh key
date_created:
description: Date the ssh key was created
returned: success
type: str
sample: "2017-08-26 12:47:48"
ssh_key:
description: SSH public key
returned: success
type: str
sample: "ssh-rsa AA... [email protected]"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vultr import (
Vultr,
vultr_argument_spec,
)
class AnsibleVultrSSHKeyInfo(Vultr):
def __init__(self, module):
super(AnsibleVultrSSHKeyInfo, self).__init__(module, "vultr_ssh_key_info")
self.returns = {<|fim▁hole|> 'date_created': dict(),
}
def get_sshkeys(self):
return self.api_query(path="/v1/sshkey/list")
def parse_keys_list(keys_list):
if not keys_list:
return []
return [key for id, key in keys_list.items()]
def main():
argument_spec = vultr_argument_spec()
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
sshkey_info = AnsibleVultrSSHKeyInfo(module)
result = sshkey_info.get_result(parse_keys_list(sshkey_info.get_sshkeys()))
module.exit_json(**result)
if __name__ == '__main__':
main()<|fim▁end|> | 'SSHKEYID': dict(key='id'),
'name': dict(),
'ssh_key': dict(), |
<|file_name|>StringBufferHolder.java<|end_file_name|><|fim▁begin|>/**********************************************************************
Copyright (c) 2006 Erik Bengtson and others. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
<|fim▁hole|> ...
**********************************************************************/
package org.jpox.samples.types.stringbuffer;
/**
* Object with a StringBuffer.
*/
public class StringBufferHolder
{
StringBuffer sb = new StringBuffer();
public StringBuffer getStringBuffer()
{
return sb;
}
public void setStringBuffer(StringBuffer sb)
{
this.sb = sb;
}
public void appendText(String text)
{
// Since DataNucleus doesn't support updates to the contents of a StringBuffer we replace it
StringBuffer sb2 = new StringBuffer(sb.append(text));
sb = sb2;
}
public String getText()
{
return sb.toString();
}
}<|fim▁end|> | Contributors:
|
<|file_name|>StripeStream.hh<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information<|fim▁hole|> * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ORC_STRIPE_STREAM_HH
#define ORC_STRIPE_STREAM_HH
#include "orc/Int128.hh"
#include "orc/OrcFile.hh"
#include "orc/Reader.hh"
#include "Timezone.hh"
#include "TypeImpl.hh"
namespace orc {
class RowReaderImpl;
/**
* StripeStream Implementation
*/
class StripeStreamsImpl: public StripeStreams {
private:
const RowReaderImpl& reader;
const proto::StripeInformation& stripeInfo;
const proto::StripeFooter& footer;
const uint64_t stripeIndex;
const uint64_t stripeStart;
InputStream& input;
const Timezone& writerTimezone;
const Timezone& readerTimezone;
public:
StripeStreamsImpl(const RowReaderImpl& reader, uint64_t index,
const proto::StripeInformation& stripeInfo,
const proto::StripeFooter& footer,
uint64_t stripeStart,
InputStream& input,
const Timezone& writerTimezone,
const Timezone& readerTimezone);
virtual ~StripeStreamsImpl() override;
virtual const std::vector<bool> getSelectedColumns() const override;
virtual proto::ColumnEncoding getEncoding(uint64_t columnId
) const override;
virtual std::unique_ptr<SeekableInputStream>
getStream(uint64_t columnId,
proto::Stream_Kind kind,
bool shouldStream) const override;
MemoryPool& getMemoryPool() const override;
const Timezone& getWriterTimezone() const override;
const Timezone& getReaderTimezone() const override;
std::ostream* getErrorStream() const override;
bool getThrowOnHive11DecimalOverflow() const override;
int32_t getForcedScaleOnHive11Decimal() const override;
};
/**
* StreamInformation Implementation
*/
class StreamInformationImpl: public StreamInformation {
private:
StreamKind kind;
uint64_t column;
uint64_t offset;
uint64_t length;
public:
StreamInformationImpl(uint64_t _offset,
const proto::Stream& stream
): kind(static_cast<StreamKind>(stream.kind())),
column(stream.column()),
offset(_offset),
length(stream.length()) {
// PASS
}
~StreamInformationImpl() override;
StreamKind getKind() const override {
return kind;
}
uint64_t getColumnId() const override {
return column;
}
uint64_t getOffset() const override {
return offset;
}
uint64_t getLength() const override {
return length;
}
};
/**
* StripeInformation Implementation
*/
class StripeInformationImpl : public StripeInformation {
uint64_t offset;
uint64_t indexLength;
uint64_t dataLength;
uint64_t footerLength;
uint64_t numRows;
InputStream* stream;
MemoryPool& memory;
CompressionKind compression;
uint64_t blockSize;
mutable std::unique_ptr<proto::StripeFooter> stripeFooter;
void ensureStripeFooterLoaded() const;
public:
StripeInformationImpl(uint64_t _offset,
uint64_t _indexLength,
uint64_t _dataLength,
uint64_t _footerLength,
uint64_t _numRows,
InputStream* _stream,
MemoryPool& _memory,
CompressionKind _compression,
uint64_t _blockSize
) : offset(_offset),
indexLength(_indexLength),
dataLength(_dataLength),
footerLength(_footerLength),
numRows(_numRows),
stream(_stream),
memory(_memory),
compression(_compression),
blockSize(_blockSize) {
// PASS
}
virtual ~StripeInformationImpl() override {
// PASS
}
uint64_t getOffset() const override {
return offset;
}
uint64_t getLength() const override {
return indexLength + dataLength + footerLength;
}
uint64_t getIndexLength() const override {
return indexLength;
}
uint64_t getDataLength()const override {
return dataLength;
}
uint64_t getFooterLength() const override {
return footerLength;
}
uint64_t getNumberOfRows() const override {
return numRows;
}
uint64_t getNumberOfStreams() const override {
ensureStripeFooterLoaded();
return static_cast<uint64_t>(stripeFooter->streams_size());
}
std::unique_ptr<StreamInformation> getStreamInformation(uint64_t streamId
) const override;
ColumnEncodingKind getColumnEncoding(uint64_t colId) const override {
ensureStripeFooterLoaded();
return static_cast<ColumnEncodingKind>(stripeFooter->
columns(static_cast<int>(colId))
.kind());
}
uint64_t getDictionarySize(uint64_t colId) const override {
ensureStripeFooterLoaded();
return static_cast<ColumnEncodingKind>(stripeFooter->
columns(static_cast<int>(colId))
.dictionarysize());
}
const std::string& getWriterTimezone() const override {
ensureStripeFooterLoaded();
return stripeFooter->writertimezone();
}
};
}
#endif<|fim▁end|> | * regarding copyright ownership. The ASF licenses this file |
<|file_name|>Widgets.py<|end_file_name|><|fim▁begin|># This file is part of Pooky.
# Copyright (C) 2013 Fcrh <[email protected]>
#
# Pooky is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pooky is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pooky. If not, see <http://www.gnu.org/licenses/>.
from PyQt4 import QtGui
class SingletonWidget(QtGui.QWidget):
__instance = None
def __init__(self, *args):
super().__init__(*args)
if self.__class__.__instance is not None:
raise RuntimeError("Singleton check failed.")
else:
self.__class__.__instance = self
class Palette(SingletonWidget):
def __init__(self, *args):
super().__init__(*args)
class Preference(SingletonWidget):
def __init__(self, *args):
super().__init__(*args)
QtGui.QLabel('Almost Empty XD.', self)
self.resize(640, 480)
self.setWindowTitle('Preference')
class About(SingletonWidget):<|fim▁hole|> mainlayout = QtGui.QVBoxLayout()
mainlayout.addWidget(self.initContent(), True)
mainlayout.addLayout(self.initButtonLayout(), True)
self.setLayout(mainlayout)
self.setWindowTitle('About Pooky')
self.adjustSize()
def initButtonLayout(self):
btnlayout = QtGui.QHBoxLayout()
licenseBtn = QtGui.QPushButton('License')
def licenseCallBack():
raise RuntimeError("Not implement yet.")
licenseBtn.pressed.connect(licenseCallBack)
btnlayout.addWidget(licenseBtn)
closeBtn = QtGui.QPushButton('Close')
def closeCallBack():
self.lower()
self.hide()
closeBtn.pressed.connect(closeCallBack)
btnlayout.addWidget(closeBtn)
return btnlayout
def initContent(self):
return QtGui.QWidget()<|fim▁end|> |
def __init__(self, *args):
super().__init__(*args)
|
<|file_name|>test-2.0.0.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | console.log('load version 2.0.0'); |
<|file_name|>projects.js<|end_file_name|><|fim▁begin|><|fim▁hole|>export default () => <h1>Projects</h1><|fim▁end|> | |
<|file_name|>suite_data.go<|end_file_name|><|fim▁begin|>package suite_init
type SuiteData struct {
*StubsData
*SynchronizedSuiteCallbacksData
*WerfBinaryData
*ProjectNameData<|fim▁hole|>}
func (data *SuiteData) SetupStubs(setupData *StubsData) bool {
data.StubsData = setupData
return true
}
func (data *SuiteData) SetupSynchronizedSuiteCallbacks(setupData *SynchronizedSuiteCallbacksData) bool {
data.SynchronizedSuiteCallbacksData = setupData
return true
}
func (data *SuiteData) SetupWerfBinary(setupData *WerfBinaryData) bool {
data.WerfBinaryData = setupData
return true
}
func (data *SuiteData) SetupProjectName(setupData *ProjectNameData) bool {
data.ProjectNameData = setupData
return true
}
func (data *SuiteData) SetupK8sDockerRegistry(setupData *K8sDockerRegistryData) bool {
data.K8sDockerRegistryData = setupData
return true
}
func (data *SuiteData) SetupTmp(setupData *TmpDirData) bool {
data.TmpDirData = setupData
return true
}
func (data *SuiteData) SetupContainerRegistryPerImplementation(setupData *ContainerRegistryPerImplementationData) bool {
data.ContainerRegistryPerImplementationData = setupData
return true
}<|fim▁end|> | *K8sDockerRegistryData
*TmpDirData
*ContainerRegistryPerImplementationData |
<|file_name|>filereader.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use base64;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::BlobBinding::BlobMethods;
use dom::bindings::codegen::Bindings::FileReaderBinding::{self, FileReaderConstants, FileReaderMethods};
use dom::bindings::codegen::UnionTypes::StringOrObject;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{MutNullableJS, Root};
use dom::bindings::refcounted::Trusted;
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::str::DOMString;
use dom::blob::Blob;
use dom::domexception::{DOMErrorName, DOMException};
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::eventtarget::EventTarget;
use dom::globalscope::GlobalScope;
use dom::progressevent::ProgressEvent;
use dom_struct::dom_struct;
use encoding::all::UTF_8;
use encoding::label::encoding_from_whatwg_label;
use encoding::types::{DecoderTrap, EncodingRef};
use hyper::mime::{Attr, Mime};
use js::jsapi::Heap;
use js::jsapi::JSAutoCompartment;
use js::jsapi::JSContext;
use js::jsval::{self, JSVal};
use js::typedarray::{ArrayBuffer, CreateWith};
use script_thread::RunnableWrapper;
use servo_atoms::Atom;
use std::cell::Cell;
use std::ptr;
use std::sync::Arc;
use std::thread;
use task_source::TaskSource;
use task_source::file_reading::{FileReadingTaskSource, FileReadingRunnable, FileReadingTask};
#[derive(Clone, Copy, HeapSizeOf, JSTraceable, PartialEq)]
pub enum FileReaderFunction {
ReadAsText,
ReadAsDataUrl,
ReadAsArrayBuffer,
}
pub type TrustedFileReader = Trusted<FileReader>;
#[derive(Clone, HeapSizeOf)]
pub struct ReadMetaData {
pub blobtype: String,
pub label: Option<String>,
pub function: FileReaderFunction
}
impl ReadMetaData {
pub fn new(blobtype: String,
label: Option<String>, function: FileReaderFunction) -> ReadMetaData {
ReadMetaData {
blobtype: blobtype,
label: label,
function: function,
}
}
}
#[derive(Clone, Copy, HeapSizeOf, JSTraceable, PartialEq)]
pub struct GenerationId(u32);
#[repr(u16)]
#[derive(Clone, Copy, Debug, HeapSizeOf, JSTraceable, PartialEq)]
pub enum FileReaderReadyState {
Empty = FileReaderConstants::EMPTY,
Loading = FileReaderConstants::LOADING,
Done = FileReaderConstants::DONE,
}
#[derive(HeapSizeOf, JSTraceable)]
pub enum FileReaderResult {
ArrayBuffer(Heap<JSVal>),
String(DOMString),
}
#[dom_struct]
pub struct FileReader {
eventtarget: EventTarget,
ready_state: Cell<FileReaderReadyState>,
error: MutNullableJS<DOMException>,
result: DOMRefCell<Option<FileReaderResult>>,
generation_id: Cell<GenerationId>,
}
impl FileReader {
pub fn new_inherited() -> FileReader {
FileReader {
eventtarget: EventTarget::new_inherited(),
ready_state: Cell::new(FileReaderReadyState::Empty),
error: MutNullableJS::new(None),
result: DOMRefCell::new(None),
generation_id: Cell::new(GenerationId(0)),
}
}
pub fn new(global: &GlobalScope) -> Root<FileReader> {
reflect_dom_object(box FileReader::new_inherited(),
global, FileReaderBinding::Wrap)
}
pub fn Constructor(global: &GlobalScope) -> Fallible<Root<FileReader>> {
Ok(FileReader::new(global))
}
//https://w3c.github.io/FileAPI/#dfn-error-steps
pub fn process_read_error(filereader: TrustedFileReader, gen_id: GenerationId, error: DOMErrorName) {
let fr = filereader.root();
macro_rules! return_on_abort(
() => (
if gen_id != fr.generation_id.get() {
return
}
);
);
return_on_abort!();
// Step 1
fr.change_ready_state(FileReaderReadyState::Done);
*fr.result.borrow_mut() = None;
let exception = DOMException::new(&fr.global(), error);
fr.error.set(Some(&exception));
fr.dispatch_progress_event(atom!("error"), 0, None);
return_on_abort!();
// Step 3
fr.dispatch_progress_event(atom!("loadend"), 0, None);
return_on_abort!();
// Step 4
fr.terminate_ongoing_reading();
}
// https://w3c.github.io/FileAPI/#dfn-readAsText
pub fn process_read_data(filereader: TrustedFileReader, gen_id: GenerationId) {
let fr = filereader.root();
macro_rules! return_on_abort(
() => (
if gen_id != fr.generation_id.get() {
return
}
);
);
return_on_abort!();
//FIXME Step 7 send current progress
fr.dispatch_progress_event(atom!("progress"), 0, None);
}
// https://w3c.github.io/FileAPI/#dfn-readAsText
pub fn process_read(filereader: TrustedFileReader, gen_id: GenerationId) {
let fr = filereader.root();
macro_rules! return_on_abort(
() => (
if gen_id != fr.generation_id.get() {
return
}
);
);
return_on_abort!();
// Step 6
fr.dispatch_progress_event(atom!("loadstart"), 0, None);
}
// https://w3c.github.io/FileAPI/#dfn-readAsText
#[allow(unsafe_code)]
pub fn process_read_eof(filereader: TrustedFileReader, gen_id: GenerationId,
data: ReadMetaData, blob_contents: Arc<Vec<u8>>) {
let fr = filereader.root();
macro_rules! return_on_abort(
() => (
if gen_id != fr.generation_id.get() {
return
}
);
);
return_on_abort!();
// Step 8.1
fr.change_ready_state(FileReaderReadyState::Done);
// Step 8.2
match data.function {
FileReaderFunction::ReadAsDataUrl =>
FileReader::perform_readasdataurl(&fr.result, data, &blob_contents),
FileReaderFunction::ReadAsText =>
FileReader::perform_readastext(&fr.result, data, &blob_contents),
FileReaderFunction::ReadAsArrayBuffer => {
let _ac = JSAutoCompartment::new(fr.global().get_cx(), *fr.reflector().get_jsobject());
FileReader::perform_readasarraybuffer(&fr.result, fr.global().get_cx(), data, &blob_contents)
},
};
// Step 8.3
fr.dispatch_progress_event(atom!("load"), 0, None);
return_on_abort!();
// Step 8.4
if fr.ready_state.get() != FileReaderReadyState::Loading {
fr.dispatch_progress_event(atom!("loadend"), 0, None);
}
return_on_abort!();
// Step 9
fr.terminate_ongoing_reading();
}
// https://w3c.github.io/FileAPI/#dfn-readAsText
fn perform_readastext(result: &DOMRefCell<Option<FileReaderResult>>, data: ReadMetaData, blob_bytes: &[u8]) {
let blob_label = &data.label;
let blob_type = &data.blobtype;
//https://w3c.github.io/FileAPI/#encoding-determination
// Steps 1 & 2 & 3
let mut encoding = blob_label.as_ref()
.map(|string| &**string)
.and_then(encoding_from_whatwg_label);
// Step 4 & 5
encoding = encoding.or_else(|| {
let resultmime = blob_type.parse::<Mime>().ok();
resultmime.and_then(|Mime(_, _, ref parameters)| {
parameters.iter()
.find(|&&(ref k, _)| &Attr::Charset == k)
.and_then(|&(_, ref v)| encoding_from_whatwg_label(&v.to_string()))
})
});
// Step 6
let enc = encoding.unwrap_or(UTF_8 as EncodingRef);
let convert = blob_bytes;
// Step 7
let output = enc.decode(convert, DecoderTrap::Replace).unwrap();
*result.borrow_mut() = Some(FileReaderResult::String(DOMString::from(output)));
}
//https://w3c.github.io/FileAPI/#dfn-readAsDataURL
fn perform_readasdataurl(result: &DOMRefCell<Option<FileReaderResult>>, data: ReadMetaData, bytes: &[u8]) {
let base64 = base64::encode(bytes);
let output = if data.blobtype.is_empty() {
format!("data:base64,{}", base64)
} else {
format!("data:{};base64,{}", data.blobtype, base64)
};
*result.borrow_mut() = Some(FileReaderResult::String(DOMString::from(output)));
}
// https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer
#[allow(unsafe_code)]
fn perform_readasarraybuffer(result: &DOMRefCell<Option<FileReaderResult>>,
cx: *mut JSContext, _: ReadMetaData, bytes: &[u8]) {
unsafe {
rooted!(in(cx) let mut array_buffer = ptr::null_mut());
assert!(ArrayBuffer::create(cx, CreateWith::Slice(bytes), array_buffer.handle_mut()).is_ok());
*result.borrow_mut() = Some(FileReaderResult::ArrayBuffer(Heap::default()));
if let Some(FileReaderResult::ArrayBuffer(ref mut heap)) = *result.borrow_mut() {
heap.set(jsval::ObjectValue(array_buffer.get()));
};
}
}
}
impl FileReaderMethods for FileReader {
// https://w3c.github.io/FileAPI/#dfn-onloadstart
event_handler!(loadstart, GetOnloadstart, SetOnloadstart);
// https://w3c.github.io/FileAPI/#dfn-onprogress
event_handler!(progress, GetOnprogress, SetOnprogress);
// https://w3c.github.io/FileAPI/#dfn-onload<|fim▁hole|> event_handler!(abort, GetOnabort, SetOnabort);
// https://w3c.github.io/FileAPI/#dfn-onerror
event_handler!(error, GetOnerror, SetOnerror);
// https://w3c.github.io/FileAPI/#dfn-onloadend
event_handler!(loadend, GetOnloadend, SetOnloadend);
// https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer
fn ReadAsArrayBuffer(&self, blob: &Blob) -> ErrorResult {
self.read(FileReaderFunction::ReadAsArrayBuffer, blob, None)
}
// https://w3c.github.io/FileAPI/#dfn-readAsDataURL
fn ReadAsDataURL(&self, blob: &Blob) -> ErrorResult {
self.read(FileReaderFunction::ReadAsDataUrl, blob, None)
}
// https://w3c.github.io/FileAPI/#dfn-readAsText
fn ReadAsText(&self, blob: &Blob, label: Option<DOMString>) -> ErrorResult {
self.read(FileReaderFunction::ReadAsText, blob, label)
}
// https://w3c.github.io/FileAPI/#dfn-abort
fn Abort(&self) {
// Step 2
if self.ready_state.get() == FileReaderReadyState::Loading {
self.change_ready_state(FileReaderReadyState::Done);
}
// Steps 1 & 3
*self.result.borrow_mut() = None;
let exception = DOMException::new(&self.global(), DOMErrorName::AbortError);
self.error.set(Some(&exception));
self.terminate_ongoing_reading();
// Steps 5 & 6
self.dispatch_progress_event(atom!("abort"), 0, None);
self.dispatch_progress_event(atom!("loadend"), 0, None);
}
// https://w3c.github.io/FileAPI/#dfn-error
fn GetError(&self) -> Option<Root<DOMException>> {
self.error.get()
}
#[allow(unsafe_code)]
// https://w3c.github.io/FileAPI/#dfn-result
unsafe fn GetResult(&self, _: *mut JSContext) -> Option<StringOrObject> {
self.result.borrow().as_ref().map(|r| match *r {
FileReaderResult::String(ref string) =>
StringOrObject::String(string.clone()),
FileReaderResult::ArrayBuffer(ref arr_buffer) => {
StringOrObject::Object(Heap::new((*arr_buffer.ptr.get()).to_object()))
}
})
}
// https://w3c.github.io/FileAPI/#dfn-readyState
fn ReadyState(&self) -> u16 {
self.ready_state.get() as u16
}
}
impl FileReader {
fn dispatch_progress_event(&self, type_: Atom, loaded: u64, total: Option<u64>) {
let progressevent = ProgressEvent::new(&self.global(),
type_, EventBubbles::DoesNotBubble, EventCancelable::NotCancelable,
total.is_some(), loaded, total.unwrap_or(0));
progressevent.upcast::<Event>().fire(self.upcast());
}
fn terminate_ongoing_reading(&self) {
let GenerationId(prev_id) = self.generation_id.get();
self.generation_id.set(GenerationId(prev_id + 1));
}
fn read(&self, function: FileReaderFunction, blob: &Blob, label: Option<DOMString>) -> ErrorResult {
// Step 1
if self.ready_state.get() == FileReaderReadyState::Loading {
return Err(Error::InvalidState);
}
// Step 2
self.change_ready_state(FileReaderReadyState::Loading);
// Step 3
let blob_contents = Arc::new(blob.get_bytes().unwrap_or(vec![]));
let type_ = blob.Type();
let load_data = ReadMetaData::new(String::from(type_), label.map(String::from), function);
let fr = Trusted::new(self);
let gen_id = self.generation_id.get();
let global = self.global();
let wrapper = global.get_runnable_wrapper();
let task_source = global.file_reading_task_source();
thread::Builder::new().name("file reader async operation".to_owned()).spawn(move || {
perform_annotated_read_operation(gen_id, load_data, blob_contents, fr, task_source, wrapper)
}).expect("Thread spawning failed");
Ok(())
}
fn change_ready_state(&self, state: FileReaderReadyState) {
self.ready_state.set(state);
}
}
// https://w3c.github.io/FileAPI/#thread-read-operation
fn perform_annotated_read_operation(gen_id: GenerationId,
data: ReadMetaData,
blob_contents: Arc<Vec<u8>>,
filereader: TrustedFileReader,
task_source: FileReadingTaskSource,
wrapper: RunnableWrapper) {
// Step 4
let task = FileReadingRunnable::new(FileReadingTask::ProcessRead(filereader.clone(), gen_id));
task_source.queue_with_wrapper(task, &wrapper).unwrap();
let task = FileReadingRunnable::new(FileReadingTask::ProcessReadData(filereader.clone(), gen_id));
task_source.queue_with_wrapper(task, &wrapper).unwrap();
let task = FileReadingRunnable::new(FileReadingTask::ProcessReadEOF(filereader, gen_id, data, blob_contents));
task_source.queue_with_wrapper(task, &wrapper).unwrap();
}<|fim▁end|> | event_handler!(load, GetOnload, SetOnload);
// https://w3c.github.io/FileAPI/#dfn-onabort |
<|file_name|>make.js<|end_file_name|><|fim▁begin|>import getDevTool from './devtool'
import getTarget from './target'
import getEntry from './entry'
import getOutput from './output'
import getResolve from './resolve'
import getResolveLoader from './resolveLoader'
import getModule from './module'
import getExternals from './externals'
import getPlugins from './plugins'
import getPostcss from './postcss'
import getNode from './node'
export default function make(name) {
if(typeof name !== 'string')
throw new Error('Name is required.')
return { name
, context: __dirname
, cache: true
, target: getTarget(name)
, devtool: getDevTool(name)
, entry: getEntry(name)
, output: getOutput(name)
, resolve: getResolve(name)
, resolveLoader: getResolveLoader(name)
, module: getModule(name)
, externals: getExternals(name)
, plugins: getPlugins(name)
<|fim▁hole|> , node: getNode(name)
, postcss: getPostcss(name)
}
}<|fim▁end|> | |
<|file_name|>dialogue.py<|end_file_name|><|fim▁begin|>import numpy as np
from numpy import cumsum, sum, searchsorted
from numpy.random import rand
import math
import utils
import core.sentence as sentence
import core.markovchain as mc
import logging
logger = logging.getLogger(__name__)
# Dialogue making class. Need to review where to return a string, where to return a list of tokens, etc.
# setters: list of speakers, pronouns, priors etc.
# random transitions
# Internal: build list of structures:
# e.g.{:speaker_name "Alice", :speaker_pronoun "she", :speaker_str "she", :speech_verb "said", :position "end"}
# Then end with fn that maps that out to a suitable string
# e.g. "<SPEECH>, she said."
# External bit then replaces <SPEECH> with a markov-chain-generated sentence (or several).
class dialogue_maker(object):
"""Class to handle creating dialogue based on a list of speakers and a sentence generator."""
def __init__(self, names, pronouns, mc):
self.speakers = [{"name": n, "pronoun": p} for n, p in list(zip(names, pronouns))]
self._transitions = self.make_transition_probs()
self._speech_acts = ["said", "whispered", "shouted", "cried"]
self._acts_transitions = [25, 2, 2, 2]
self.mc = mc
# self.seeds = seeds
self.target_len = np.random.randint(5, 50, size=len(names)) # rough words per sentence
def make_transition_probs(self):
"""Make transition matrix between speakers, with random symmetric biases added in"""
n = len(self.speakers) # TODO why this line ???
transitions = np.random.randint(5, size=(n, n)) + 1
transitions += transitions.transpose()
for i in range(0, math.floor(n / 2)):
s1 = np.random.randint(n)
s2 = np.random.randint(n)
transitions[s1][s2] += 10
transitions[s2][s1] += 8
return(transitions)
def after(self, speaker_id):
"""Pick next person to speak"""
row = self._transitions[speaker_id]
sucessor = searchsorted(cumsum(row), rand() * sum(row))
return sucessor
def speaker_sequence(self, speaker_id, n):
"""Random walk through transitions matrix to produce a sequence of speaker ids"""
seq = []
for i in range(n):
seq.append(speaker_id)
speaker_id = self.after(speaker_id)
return seq
def speech_sequence(self, n):
speech_acts_seq = []
next_speech_id = 0
for i in range(n):
next_speech_id = searchsorted(cumsum(self._acts_transitions), rand() * sum(self._acts_transitions))
speech_acts_seq.append(self._speech_acts[next_speech_id])
return speech_acts_seq
def seq_to_names(self, sequence):
return([self.speakers[id] for id in sequence])
def make_speech_bits(self, seeds):
n = len(seeds)
speaker_id = self.speaker_sequence(0, n)
speech_acts_seq = self.speech_sequence(n)
bits = []
ss = sentence.SentenceMaker(self.mc)
for i in range(n):
sent_toks = ss.generate_sentence_tokens([seeds[i]], self.target_len[speaker_id[i]])
sent_toks = ss.polish_sentence(sent_toks)
bits.append({'speaker_name': self.speakers[speaker_id[i]]["name"],
'speech_act': speech_acts_seq[speaker_id[i]],
'seq_id': speaker_id[i],
'speech': sent_toks,
'paragraph': True})
return(bits)
def simplify(self, seq_map):
"Take a sequence of speech parts and make more natural by removing name reptition etc."
for i in range(0, len(seq_map)):
seq_map[i]['speaker_str'] = seq_map[i]['speaker_name'] # default
# Same speaker contiues:
if i > 0 and seq_map[i]['seq_id'] == seq_map[i - 1]['seq_id']:
seq_map[i]['speaker_str'] = ""
seq_map[i]['speech_act'] = ""
seq_map[i]['paragraph'] = False
else:
if i > 1 and seq_map[i]['seq_id'] == seq_map[i - 2]['seq_id'] \
and seq_map[i]['seq_id'] != seq_map[i - 1]['seq_id']:
seq_map[i]['speaker_str'] = ""
seq_map[i]['speech_act'] = ""
seq_map[i]['paragraph'] = True
return seq_map
def report_seq(self, seq_map):<|fim▁hole|> """Convert sequence of speeches to a tokens."""
sents = []
for i in range(0, len(seq_map)):
if seq_map[i]['paragraph']:
# text += "\n "
quote_start = '"'
else:
quote_start = ""
if i > len(seq_map) - 2 or seq_map[i + 1]['paragraph']:
quote_end = '"'
else:
quote_end = " "
if len(seq_map[i]['speech_act']) > 0:
speech_act = seq_map[i]['speech_act'] + ","
else:
speech_act = seq_map[i]['speech_act']
tokens = [utils.START_TOKEN]
tokens.append(seq_map[i]['speaker_str'])
tokens.append(speech_act)
tokens.append(quote_start)
tokens.extend(seq_map[i]['speech'][1:-1])
tokens.append(quote_end)
tokens.append(utils.END_TOKEN)
sents.append(tokens)
return sents
def make_dialogue(self, seeds):
"""Returns a list of sentences, each being a list of tokens."""
acts = self.make_speech_bits(seeds)
seq_map = self.simplify(acts)
sents = self.report_seq(seq_map)
return(sents)
def dev():
import knowledge.names as names
mcW = mc.MarkovChain()
nm = names.NameMaker()
speakers = [nm.random_person() for i in range(1, 4)]
dm = dialogue_maker([n['name'] for n in speakers], [n['pronoun'] for n in speakers], mcW)
dlg = dm.make_dialogue(["dog", "run", "spot"])
print(dlg)<|fim▁end|> | |
<|file_name|>componentMethodsJsDocHandler.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @flow
*/
import parseJsDoc from '../utils/parseJsDoc';
import type Documentation from '../Documentation';
// Merges two objects ignoring null/undefined.
function merge(obj1, obj2) {
if (obj1 == null && obj2 == null) {
return null;
}
const merged = {...obj1};
for (const prop in obj2) {
if (obj2[prop] != null) {
merged[prop] = obj2[prop];
}
}
return merged;
}
/**
* Extract info from the methods jsdoc blocks. Must be run after
* flowComponentMethodsHandler.
*/
export default function componentMethodsJsDocHandler(
documentation: Documentation
) {
let methods = documentation.get('methods');
if (!methods) {
return;<|fim▁hole|> }
methods = methods.map(method => {
if (!method.docblock) {
return method;
}
const jsDoc = parseJsDoc(method.docblock);
const returns = merge(jsDoc.returns, method.returns);
const params = method.params.map(param => {
const jsDocParam = jsDoc.params.find(p => p.name === param.name);
return merge(jsDocParam, param);
});
return {
...method,
description: jsDoc.description || null,
returns,
params,
};
});
documentation.set('methods', methods);
}<|fim▁end|> | |
<|file_name|>Ref_EventConsumerBean.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2006 - 2012 LinogistiX GmbH
*
* www.linogistix.com
*
* Project myWMS-LOS
*/<|fim▁hole|>import de.linogistix.los.customization.LOSEventConsumerBean;
import de.linogistix.los.util.event.LOSEventConsumer;
/**
* @author krane
*
*/
@Stateless
public class Ref_EventConsumerBean extends LOSEventConsumerBean implements LOSEventConsumer {
}<|fim▁end|> | package de.linogistix.los.reference.customization.common;
import javax.ejb.Stateless;
|
<|file_name|>fullkey_array.cpp<|end_file_name|><|fim▁begin|>#include "fullkey_array.h"
#include "../../base.h"
namespace md5db
{
fullkey_array_t::fullkey_array_t ( )
{
}
fullkey_array_t::~ fullkey_array_t ( )
{
close ();
}
void fullkey_array_t::close ( )
{
for ( int i = 0; i < COUNT_OF ( m_fullkeys ); ++ i )
{
m_fullkeys[ i ].close ();
}
}
bool fullkey_array_t::open (
const char * path<|fim▁hole|> )
{
if ( NULL == path || '\0' == * path )
{
LOG_ERROR ( "[md5db][fullkey][open]invalid path" );
return false;
}
G_APPTOOL->make_dir ( path );
char ph[ 260 ];
for ( int i = 0; i < COUNT_OF ( m_fullkeys ); ++ i )
{
strcpy ( ph, path );
G_APPTOOL->path_to_os ( ph );
if ( S_PATH_SEP_C != ph[ strlen ( ph ) - 1 ] )
{
strcat ( ph, S_PATH_SEP );
}
char t[ 32 ];
sprintf ( t, "%02X", ( int ) i );
strcat ( ph, t );
strcat ( ph, ".fullkey" );
if ( ! m_fullkeys[ i ].open ( ph, i ) )
{
LOG_ERROR ( "[md5db][fullkey][open][file=%s]open failed",
ph );
return false;
}
}
return true;
}
fullkey_t & fullkey_array_t::get_fullkey (
const void * inner_key,
size_t inner_key_len
)
{
//assert ( inner_key_len >= 16 );
unsigned char c = * ( ( const unsigned char * ) inner_key );
return m_fullkeys[ c ];
}
void fullkey_array_t::info (
std::stringstream & ss
)
{
size_t count = size ();
for ( size_t i = 0; i < count; ++ i )
{
fullkey_t & p = item ( i );
p.info ( ss );
if ( i < count - 1 )
{
ss << ",";
}
}
}
} // namespace md5db<|fim▁end|> | |
<|file_name|>_url.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-<|fim▁hole|><|fim▁end|> |
import root
import j |
<|file_name|>kendo.culture.ms.js<|end_file_name|><|fim▁begin|>/*
* Kendo UI v2014.3.1119 (http://www.telerik.com/kendo-ui)
* Copyright 2014 Telerik AD. All rights reserved.
*
* Kendo UI commercial licenses may be obtained at
* http://www.telerik.com/purchase/license-agreement/kendo-ui-complete
* If you do not own a commercial license, this file shall be governed by the trial license terms.
*/
(function(f, define){
define([], f);
})(function(){
(function( window, undefined ) {
var kendo = window.kendo || (window.kendo = { cultures: {} });
kendo.cultures["ms"] = {
name: "ms",
numberFormat: {
pattern: ["-n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
percent: {
pattern: ["-n %","n %"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "%"
},
currency: {
pattern: ["($n)","$n"],
decimals: 0,
",": ",",
".": ".",
groupSize: [3],
symbol: "RM"
}
},
calendars: {
standard: {
days: {
names: ["Ahad","Isnin","Selasa","Rabu","Khamis","Jumaat","Sabtu"],
namesAbbr: ["Ahad","Isnin","Sel","Rabu","Khamis","Jumaat","Sabtu"],
namesShort: ["A","I","S","R","K","J","S"]
},
months: {
names: ["Januari","Februari","Mac","April","Mei","Jun","Julai","Ogos","September","Oktober","November","Disember",""],
namesAbbr: ["Jan","Feb","Mac","Apr","Mei","Jun","Jul","Ogos","Sept","Okt","Nov","Dis",""]
},
AM: [""],<|fim▁hole|> PM: [""],
patterns: {
d: "dd/MM/yyyy",
D: "dd MMMM yyyy",
F: "dd MMMM yyyy H:mm:ss",
g: "dd/MM/yyyy H:mm",
G: "dd/MM/yyyy H:mm:ss",
m: "dd MMMM",
M: "dd MMMM",
s: "yyyy'-'MM'-'dd'T'HH':'mm':'ss",
t: "H:mm",
T: "H:mm:ss",
u: "yyyy'-'MM'-'dd HH':'mm':'ss'Z'",
y: "MMMM yyyy",
Y: "MMMM yyyy"
},
"/": "/",
":": ":",
firstDay: 1
}
}
}
})(this);
return window.kendo;
}, typeof define == 'function' && define.amd ? define : function(_, f){ f(); });<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Collect an iterator into a slice.
//!
//! Rust comes with the `Iterator::collect` method for collecting an iterator's items into
//! a heap-allocated `Vec` or any other type that implements `FromIterator`, but there's
//! no way to collect items into a stack-allocated array without manually looping over the
//! iterator. This crates provides an alternative with `collect_slice` methods that
//! collect an iterator's items into a mutable slice (of a stack-allocated array or
//! otherwise.)
//!
//! The trait is automatically implemented for any type that implements `Iterator`.
//!
//! # Examples
//!
//! ```
//! use collect_slice::CollectSlice;
//!
//! let mut orig = [0; 8];
//! (0..8).map(|i| i * 2).collect_slice_checked(&mut orig[..]);
//! assert_eq!(orig, [0, 2, 4, 6, 8, 10, 12, 14]);
//!
//! let mut buf = [42; 8];
//! orig.iter()
//! .map(|&x| x + 10)
//! .collect_slice_checked(&mut buf[..]);
//! assert_eq!(buf, [10, 12, 14, 16, 18, 20, 22, 24]);
//! ```
//!
//! # Usage
//!
//! This crate can be used through cargo by adding it as a dependency in `Cargo.toml`:
//!
//! ```toml
//! [dependencies]
//! collect_slice = "^1.2.0"
//! ```
//! and importing it in the crate root:
//!
//! ```
//! extern crate collect_slice;
//! ```
//! The provided methods can then be used by importing the trait within individual
//! modules:
//!
//! ```
//! use collect_slice::CollectSlice;
//! ```
/// An iterator that can collect into a slice.
pub trait CollectSlice: Iterator {
/// Loop through the iterator, sequentially writing items into the given slice until
/// either the iterator runs out of items or the slice fills up.
///
/// Return the number of items written.
///
/// # Examples
///
/// ```
/// use collect_slice::CollectSlice;
///
/// let mut buf = [0; 10];
///
/// // Fill a whole slice.
/// let count = (0..10).collect_slice(&mut buf[..]);
/// assert_eq!(count, 10);
/// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
///
/// // Write into a subslice
/// let count = (10..20).collect_slice(&mut buf[5..7]);
/// assert_eq!(count, 2);
/// assert_eq!(buf, [0, 1, 2, 3, 4, 10, 11, 7, 8, 9]);
///
/// // Only writes until iterator is exhausted.
/// let count = (8..10).collect_slice(&mut buf[..]);
/// assert_eq!(count, 2);
/// assert_eq!(buf, [8, 9, 2, 3, 4, 10, 11, 7, 8, 9]);
///
/// // Extra iterator items are ignored.
/// let count = (20..40).collect_slice(&mut buf[..]);
/// assert_eq!(count, 10);
/// assert_eq!(buf, [20, 21, 22, 23, 24, 25, 26, 27, 28, 29]);
/// ```
fn collect_slice(&mut self, slice: &mut [Self::Item]) -> usize;
/// Perform `collect_slice()` and panic if iterator yielded too few items to fill the
/// slice.
///
/// If this function succeeds, the number of items written equals the size of the
/// given slice.
///
/// # Examples
///
/// ```rust,should_panic
/// use collect_slice::CollectSlice;
///
/// let mut buf = [0; 10];
///
/// // Succeeds as long as entire slice is filled.
/// (0..20).collect_slice_fill(&mut buf[..]);
/// (0..5).collect_slice_fill(&mut buf[..5]);
///
/// // Panics otherwise!
/// (0..5).collect_slice_fill(&mut buf[..]);
/// ```
fn collect_slice_fill(&mut self, slice: &mut [Self::Item]) {
assert_eq!(self.collect_slice(slice), slice.len());
}
/// Perform `collect_slice()` and panic if the slice was too small to hold all the
/// items.
///
/// Return the number of items written.
///
/// # Examples
///
/// ```rust,should_panic
/// use collect_slice::CollectSlice;
///
/// let mut buf = [0; 10];
///
/// // Succeeds as long as iterator yields all its items.
/// let count = (0..10).collect_slice_exhaust(&mut buf[..]);
/// assert_eq!(count, 10);
/// let count = (0..5).collect_slice_exhaust(&mut buf[..]);
/// assert_eq!(count, 5);
///
/// // Panics otherwise!
/// (0..20).collect_slice_exhaust(&mut buf[..]);
///
/// ```
fn collect_slice_exhaust(&mut self, slice: &mut [Self::Item]) -> usize {
let count = self.collect_slice(slice);
assert!(self.next().is_none());
count
}
/// Perform `collect_slice()` and panic if there weren't enough items to fill up
/// the slice or the slice was too small to hold all the items.
///
/// If this function succeeds, the number of items written equals the size of the
/// given slice.
///
/// # Examples
///
/// ```
/// use collect_slice::CollectSlice;
///
/// // Succeeds as long as iteration count equals slice capacity.
/// let mut buf = [0; 10];
/// (0..10).collect_slice_checked(&mut buf[..]);
/// (0..5).collect_slice_checked(&mut buf[2..7]);
/// ```
/// ```rust,should_panic
/// use collect_slice::CollectSlice;
///
/// // Panics if iterator isn't exhausted!
/// let mut buf = [0; 10];
/// (0..20).collect_slice_checked(&mut buf[..]);
/// ```
/// ```rust,should_panic
/// use collect_slice::CollectSlice;
///
/// // Panics if slice isn't filled!
/// let mut buf = [0; 10];
/// (0..5).collect_slice_checked(&mut buf[..]);
/// ```
fn collect_slice_checked(&mut self, slice: &mut [Self::Item]) {
assert_eq!(self.collect_slice_exhaust(slice), slice.len());
}
}
impl<I: ?Sized> CollectSlice for I where I: Iterator {
fn collect_slice(&mut self, slice: &mut [Self::Item]) -> usize {
slice.iter_mut().zip(self).fold(0, |count, (dest, item)| {
*dest = item;
count + 1
})
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_basic() {
let mut buf = [0; 5];
let count = (0..5).map(|i| {
i + 1
}).collect_slice(&mut buf[..]);
assert_eq!(count, 5);
assert_eq!(buf, [1, 2, 3, 4, 5]);
}
#[test]
fn test_under() {
let mut buf = [0; 5];
let count = (0..3).map(|i| {
i + 1
}).collect_slice(&mut buf[1..]);
assert_eq!(count, 3);
assert_eq!(buf, [0, 1, 2, 3, 0]);
}
#[test]
fn test_over() {
let mut buf = [0; 3];
let mut iter = (0..5).map(|i| {
i + 1
});
let count = iter.collect_slice(&mut buf[..]);
assert_eq!(count, 3);
assert_eq!(buf, [1, 2, 3]);
assert_eq!(iter.next().unwrap(), 4);
assert_eq!(iter.next().unwrap(), 5);
}
#[test]
fn test_checked() {
let mut buf = [0; 5];
(0..5).map(|i| {
i + 1
}).collect_slice_checked(&mut buf[..]);
assert_eq!(buf, [1, 2, 3, 4, 5]);
}
#[test]
#[should_panic]
fn test_checked_under() {
let mut buf = [0; 5];
(0..3).map(|i| {
i + 1
}).collect_slice_checked(&mut buf[..]);
}
#[test]
#[should_panic]
fn test_checked_over() {
let mut buf = [0; 3];
(0..5).map(|i| {
i + 1
}).collect_slice_checked(&mut buf[..]);
}
#[test]
fn test_exhaust() {
let mut buf = [0; 5];
(0..3).map(|i| {
i + 1
}).collect_slice_exhaust(&mut buf[..]);
assert_eq!(buf, [1, 2, 3, 0, 0]);
(0..5).map(|i| {
i + 1
}).collect_slice_exhaust(&mut buf[..]);
assert_eq!(buf, [1, 2, 3, 4, 5]);
}
#[test]<|fim▁hole|> #[should_panic]
fn test_exhaust_over() {
let mut buf = [0; 5];
(0..7).map(|i| {
i + 1
}).collect_slice_exhaust(&mut buf[..]);
}
#[test]
fn test_filled() {
let mut buf = [0; 5];
(0..5).map(|i| {
i + 1
}).collect_slice_fill(&mut buf[..]);
assert_eq!(buf, [1, 2, 3, 4, 5]);
(50..100).map(|i| {
i + 1
}).collect_slice_fill(&mut buf[..]);
assert_eq!(buf, [51, 52, 53, 54, 55]);
}
#[test]
#[should_panic]
fn test_filled_under() {
let mut buf = [0; 5];
(0..3).map(|i| {
i + 1
}).collect_slice_fill(&mut buf[..]);
}
#[test]
fn test_unsized() {
let mut buf = [0; 5];
let it: &mut Iterator<Item=_> = &mut (0..5).map(|i| {
i + 1
});
let count = <Iterator<Item=_> as CollectSlice>::collect_slice(it, &mut buf[..]);
assert_eq!(count, 5);
assert_eq!(buf, [1, 2, 3, 4, 5]);
}
}<|fim▁end|> | |
<|file_name|>live_game_header.js<|end_file_name|><|fim▁begin|>// !LOCNS:live_game
var model;
var handlers = {};
$(document).ready(function () {
function HeaderViewModel() {
var self = this;
self.active = ko.observable(true);
self.setup = function () {
$(window).focus(function() { self.active(true); });
$(window).blur(function() { self.active(false); });
};<|fim▁hole|> model = new HeaderViewModel();
// inject per scene mods
if (scene_mod_list['live_game_header'])
loadMods(scene_mod_list['live_game_header']);
// setup send/recv messages and signals
app.registerWithCoherent(model, handlers);
// Activates knockout.js
ko.applyBindings(model);
// run start up logic
model.setup();
});<|fim▁end|> | } |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-11 19:21
from __future__ import unicode_literals
<|fim▁hole|>import prosody.utils
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Prosody',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('host', models.TextField(default=prosody.utils.getProsodyDomain)),
('user', models.TextField(db_index=True)),
('store', models.TextField(db_index=True)),
('key', models.TextField(db_index=True)),
('type', models.TextField(default='string')),
('value', models.TextField()),
],
options={
'db_table': 'prosody',
},
),
migrations.AlterUniqueTogether(
name='prosody',
unique_together=set([('user', 'store', 'key')]),
),
]<|fim▁end|> | from django.db import migrations, models |
<|file_name|>AA2_add_miRNA_infor_miR-155_rev_seed.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import re
from operator import itemgetter
ref_file = open('../../../data/RNA-seq_miR-124_miR-155_transfected_HeLa/gene_exp_miR-155_overexpression_RefSeq_Rep_isoforms.diff','r')
input_file = open('../../../result/mirage_output_rev_seed_miR-155_vs_RefSeq_NM_2015-07-30.txt','r')
output_file = open('../../../result/mirage_output_rev_seed_miR-155_vs_RefSeq_NM_2015-07-30_miR-155_overexpression.result','w')
ref_dict = {}
header = ''
for line in ref_file:
line = line.rstrip()
data = line.split("\t")
if data[0] == 'gr_id':
header = line
continue
refid = data[2]
<|fim▁hole|> line = line.rstrip()
data = line.split("\t")
if data[0] == 'miRNA_name_id':
print(header,line, sep="\t",end="\n",file=output_file)
continue
refid = data[1]
if refid in ref_dict:
print(ref_dict[refid],line, sep="\t",end="\n",file=output_file)
ref_file.close()
input_file.close()
output_file.close()<|fim▁end|> | ref_dict[refid] = line
for line in input_file:
|
<|file_name|>IUserVerifier.ts<|end_file_name|><|fim▁begin|>interface IUserVerifier {<|fim▁hole|> verificationCode: string;
}
export = IUserVerifier;<|fim▁end|> | email: string; |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from setuptools import setup, find_packages
from bang import VERSION
import os.path
ETC = os.path.join(os.path.dirname(__file__), 'etc')
with open(os.path.join(ETC, 'requirements.pip')) as f:
reqs = [l.strip() for l in f if '://' not in l]
reqs.append('distribute')
setup(
name='bang',
version=VERSION,
author='fr33jc',
author_email='[email protected]',
packages=find_packages(exclude=['tests']),
package_data={'bang': ['bang.wav']},<|fim▁hole|> url='https://github.com/fr33jc/bang',
install_requires=reqs,
scripts=['bin/bang'],
)<|fim▁end|> | license='GPLv3',
description='Server and cloud resource deployment automation',
platforms='POSIX', |
<|file_name|>resPromise.js<|end_file_name|><|fim▁begin|>'use strict';
// var Boom = require('boom');
var BPromise = require('bluebird');
module.exports = function (handler) {
return function (req, res) {
return new BPromise(function (resolve) {
resolve(handler(req, res));
})
.then(function (response) {
if (response === undefined) {<|fim▁hole|> // [KE] assume the response is handled, eg in a res.redirect
return;
}
if (response.isBoom) {
res.error(response);
return;
}
res.json(response);
}, res.error).catch(res.error);
};
};<|fim▁end|> | |
<|file_name|>app.constants.ts<|end_file_name|><|fim▁begin|>export const _appRoles: any = {
"_MEMBER": {
title: 'Members'
},
"_ADMIN": {
title: 'Admin'
},
"_SUPERADMIN": {
title: 'Super admin'
}
}
export const _appErrorCodes: any = {
"500": {
title: "Internal server error",
message: "Some internal server error has occured.",
icon: "/assets/imgs/svg/database.svg"
},
"403": {
title: "Unauthorized",
message: "You are not authorized to view this content.",
icon: "/assets/imgs/svg/padlock.svg"
},
"404": {
title: "Not found",
message: "We could not find the requested page.",
icon: "/assets/imgs/svg/broken-link.svg"
}
}<|fim▁hole|>
export const _filterTypes: any = {
"_RADIO": { name: "radio" },
"_SLIDER": { name: "slider" }
}
export const _attributeConstants: any = [
{
typeId: 1,
name: "Paying guest",
attributes: [{
attributeId: 9
}]
},
{
typeId: 2,
name: "Hostel",
attributes: [{
attributeId: 9
}]
},
{
typeId: 3,
name: "Rental flat",
attributes: [{
attributeId: 9
}]
}
]<|fim▁end|> | |
<|file_name|>test_selector_ts.py<|end_file_name|><|fim▁begin|>import math
import socket
import tempfile
import unittest
from contextlib import closing
import numpy as np
from shyft.api import (
Calendar, UtcPeriod,
DtsServer, DtsClient,
TimeAxis, TimeSeries, POINT_AVERAGE_VALUE, POINT_INSTANT_VALUE
)
from shyft.pyapi import fixed_tsv, windowed_percentiles_tsv, period_percentiles_tsv, selector_ts
def find_free_port() -> int:
"""
from SO https://stackoverflow.com/questions/1365265/on-localhost-how-to-pick-a-free-port-number
:return: available port number for use
"""
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
s.bind(('', 0))
return s.getsockname()[1]
class SelectorTsTestCase(unittest.TestCase):
def setUp(self) -> None:
self.port = find_free_port()
self.server = DtsServer()
self.server.set_listening_port(self.port)
self.server.start_async()
self.client = DtsClient(rf'localhost:{self.port}')
def tearDown(self) -> None:
self.server.clear()
del self.server
del self.port
def test_fixed_tsv_empty(self) -> None:
"""Test that an empty TsVector is generated by fixed_tsv when given an empty sequence of values."""
cal = Calendar()
period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))
tsv = fixed_tsv(period, [])
self.assertEqual(len(tsv), 0)
<|fim▁hole|> def test_fixed_tsv_values(self) -> None:
"""Test that a TsVector with fixed constant values is generated by fixed_tsv when given
a sequence of values."""
cal = Calendar()
period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))
values = [12, 15.5]
tsv = fixed_tsv(period, values)
self.assertEqual(len(tsv), 2)
for v, ts in zip(values, tsv):
for ts_v in ts.values:
self.assertEqual(ts_v, v)
def test_windowed_percentiles_tsv_empty(self) -> None:
"""Test that an empty TsVector is generated by windowed_percentiles_tsv
when given an empty sequence of percentiles."""
cal = Calendar()
period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))
data = np.linspace(-2, 2, 24*7)
data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, len(data)), data, POINT_INSTANT_VALUE)
# compute
tsv = windowed_percentiles_tsv(
data_ts, period,
Calendar.HOUR, Calendar.HOUR,
[],
self.client, cal
)
self.assertEqual(len(tsv), 0)
def test_windowed_percentiles_tsv_values(self) -> None:
"""Test that a TsVector is generated by windowed_percentiles_tsv with time-series
fulfilling some properties of being percentiles of the data ts."""
cal = Calendar()
period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))
data = np.linspace(-2, 2, 24*7)
data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, len(data)), data, POINT_INSTANT_VALUE)
# compute
percentiles = [0, 10, 50, 90, 100]
tsv = windowed_percentiles_tsv(
data_ts, period,
3*Calendar.HOUR, 12*Calendar.HOUR,
percentiles,
self.client, cal
)
self.assertEqual(len(tsv), 5)
# assert that the time-series have the correct properties for being percentile series
for i in range(len(tsv[0])):
prev_v = tsv[0].values[i]
for j in range(len(percentiles)-1):
v = tsv[j+1].values[i]
# both values will be NaN at the end - that is ok
if math.isnan(prev_v) and math.isnan(v):
continue
# check that no larger percentile have values greater than lower percentiles
self.assertLessEqual(prev_v, v)
prev_v = v
def test_period_percentiles_tsv_empty(self) -> None:
"""Test that an empty TsVector is generated by period_percentiles_tsv
when given an empty sequence of percentiles."""
cal = Calendar()
period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))
data = np.linspace(-2, 2, 24*7)
data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, len(data)), data, POINT_INSTANT_VALUE)
# compute
tsv = period_percentiles_tsv(
data_ts, period,
3*Calendar.HOUR, period,
[],
self.client, cal
)
self.assertEqual(len(tsv), 0)
def test_period_percentiles_tsv_values(self) -> None:
"""Test that a TsVector is generated by period_percentiles_tsv with time-series
fulfilling some properties of being percentiles of the data ts."""
cal = Calendar()
period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))
data = np.linspace(-2, 2, 24*7)
data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, len(data)), data, POINT_INSTANT_VALUE)
# compute
percentiles = [0, 10, 50, 90, 100]
tsv = period_percentiles_tsv(
data_ts, period,
3*Calendar.HOUR, period,
percentiles,
self.client, cal
)
self.assertEqual(len(tsv), 5)
# assert that the time-series have the correct properties for being percentile series
for i in range(len(tsv[0])):
prev_v = tsv[0].values[i]
for j in range(len(percentiles)-1):
v = tsv[j+1].values[i]
# both values will be NaN at the end - that is ok
if math.isnan(prev_v) and math.isnan(v):
continue
# check that no larger percentile have values greater than lower percentiles
self.assertLessEqual(prev_v, v)
prev_v = v
def test_selector_ts(self) -> None:
"""Test that selector_ts constructs a time-series selects data from different time-series correctly."""
n = 24
cal = Calendar()
period = UtcPeriod(0, n*Calendar.HOUR)
data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, n), np.linspace(-10, 10, n), POINT_INSTANT_VALUE)
source_tss = [
TimeSeries(TimeAxis(0, Calendar.HOUR, n), 1.00*np.ones(n), POINT_INSTANT_VALUE),
TimeSeries(TimeAxis(0, Calendar.HOUR, n), 10.0*np.ones(n), POINT_INSTANT_VALUE),
TimeSeries(TimeAxis(0, Calendar.HOUR, n), 100.*np.ones(n), POINT_INSTANT_VALUE),
]
threshold_1 = -5
threshold_2 = 5
threshold_tss = [
TimeSeries(TimeAxis(0, Calendar.HOUR, n), threshold_1*np.ones(n), POINT_INSTANT_VALUE),
TimeSeries(TimeAxis(0, Calendar.HOUR, n), threshold_2*np.ones(n), POINT_INSTANT_VALUE),
]
ts = selector_ts(
data_ts, period, 2*Calendar.HOUR,
threshold_tss, source_tss,
POINT_AVERAGE_VALUE,
self.client, cal
)
self.assertEqual(len(data_ts), len(ts))
for dv, rv in zip(data_ts.values, ts.values):
if dv < threshold_1:
self.assertEqual(rv, source_tss[0].values[0])
elif threshold_1 <= dv < threshold_2:
self.assertEqual(rv, source_tss[1].values[0])
else:
self.assertEqual(rv, source_tss[2].values[0])<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.