prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
//! Standalone server for socket_bench
//! ========================================
//!
//! You can run `socket_bench` across a real network by running this bench
//! server remotely. For example,
//!
//! `RUSTFLAGS="-Ctarget-cpu=skylake -Ctarget-feature=+aes,+sse2,+sse4.1,+ssse3" TCP_ADDR=/ip6/::1/tcp/12345 cargo run --release -p socket-bench-server`
//!
//! will run the socket bench server handling the remote_tcp benchmark. A
//! corresponding client would exercise this benchmark using
//!<|fim▁hole|>
use diem_logger::info;
use netcore::transport::tcp::TcpTransport;
use socket_bench_server::{build_tcp_noise_transport, start_stream_server, Args};
use tokio::runtime::Builder;
fn main() {
::diem_logger::Logger::new().init();
let args = Args::from_env();
let rt = Builder::new_multi_thread()
.worker_threads(32)
.enable_all()
.build()
.unwrap();
let executor = rt.handle();
if let Some(addr) = args.tcp_addr {
let addr = start_stream_server(&executor, TcpTransport::default(), addr);
info!("bench: tcp: listening on: {}", addr);
}
if let Some(addr) = args.tcp_noise_addr {
let addr = start_stream_server(&executor, build_tcp_noise_transport(), addr);
info!("bench: tcp+noise: listening on: {}", addr);
}
std::thread::park();
}<|fim▁end|> | //! `RUSTFLAGS="-Ctarget-cpu=skylake -Ctarget-feature=+aes,+sse2,+sse4.1,+ssse3" TCP_ADDR=/ip6/::1/tcp/12345 cargo x bench -p network remote_tcp` |
<|file_name|>test_vumi_app.py<|end_file_name|><|fim▁begin|>import base64
import json
from twisted.internet.defer import inlineCallbacks, DeferredQueue, returnValue
from twisted.web.http_headers import Headers
from twisted.web import http
from twisted.web.server import NOT_DONE_YET
from vumi.config import ConfigContext
from vumi.message import TransportUserMessage, TransportEvent
from vumi.tests.helpers import VumiTestCase
from vumi.tests.utils import MockHttpServer, LogCatcher
from vumi.transports.vumi_bridge.client import StreamingClient
from vumi.utils import http_request_full
from go.apps.http_api.resource import (
StreamResourceMixin, StreamingConversationResource)
from go.apps.tests.helpers import AppWorkerHelper
from go.apps.http_api.vumi_app import StreamingHTTPWorker
class TestStreamingHTTPWorker(VumiTestCase):
@inlineCallbacks
def setUp(self):
self.app_helper = self.add_helper(AppWorkerHelper(StreamingHTTPWorker))
self.config = {
'health_path': '/health/',
'web_path': '/foo',
'web_port': 0,
'metrics_prefix': 'metrics_prefix.',
'conversation_cache_ttl': 0,
}
self.app = yield self.app_helper.get_app_worker(self.config)
self.addr = self.app.webserver.getHost()
self.url = 'http://%s:%s%s' % (
self.addr.host, self.addr.port, self.config['web_path'])
conv_config = {
'http_api': {
'api_tokens': [
'token-1',
'token-2',
'token-3',
],
'metric_store': 'metric_store',
}
}
conversation = yield self.app_helper.create_conversation(
config=conv_config)
yield self.app_helper.start_conversation(conversation)
self.conversation = yield self.app_helper.get_conversation(
conversation.key)
self.auth_headers = {
'Authorization': ['Basic ' + base64.b64encode('%s:%s' % (
conversation.user_account.key, 'token-1'))],
}
self.client = StreamingClient()
# Mock server to test HTTP posting of inbound messages & events
self.mock_push_server = MockHttpServer(self.handle_request)
yield self.mock_push_server.start()
self.add_cleanup(self.mock_push_server.stop)
self.push_calls = DeferredQueue()
self._setup_wait_for_request()
self.add_cleanup(self._wait_for_requests)
def _setup_wait_for_request(self):
# Hackery to wait for the request to finish
self._req_state = {
'queue': DeferredQueue(),
'expected': 0,
}
orig_track = StreamingConversationResource.track_request
orig_release = StreamingConversationResource.release_request
def track_wrapper(*args, **kw):
self._req_state['expected'] += 1
return orig_track(*args, **kw)
def release_wrapper(*args, **kw):
return orig_release(*args, **kw).addCallback(
self._req_state['queue'].put)
self.patch(
StreamingConversationResource, 'track_request', track_wrapper)
self.patch(
StreamingConversationResource, 'release_request', release_wrapper)
@inlineCallbacks
def _wait_for_requests(self):
while self._req_state['expected'] > 0:
yield self._req_state['queue'].get()
self._req_state['expected'] -= 1
def handle_request(self, request):
self.push_calls.put(request)
return NOT_DONE_YET
@inlineCallbacks
def pull_message(self, count=1):
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
messages = DeferredQueue()
errors = DeferredQueue()
receiver = self.client.stream(
TransportUserMessage, messages.put, errors.put, url,
Headers(self.auth_headers))
received_messages = []
for msg_id in range(count):
yield self.app_helper.make_dispatch_inbound(
'in %s' % (msg_id,), message_id=str(msg_id),
conv=self.conversation)
recv_msg = yield messages.get()
received_messages.append(recv_msg)
receiver.disconnect()
returnValue((receiver, received_messages))
def assert_bad_request(self, response, reason):
self.assertEqual(response.code, http.BAD_REQUEST)
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
data = json.loads(response.delivered_body)
self.assertEqual(data, {
"success": False,
"reason": reason,
})
@inlineCallbacks
def test_proxy_buffering_headers_off(self):
# This is the default, but we patch it anyway to make sure we're
# testing the right thing should the default change.
self.patch(StreamResourceMixin, 'proxy_buffering', False)
receiver, received_messages = yield self.pull_message()
headers = receiver._response.headers
self.assertEqual(headers.getRawHeaders('x-accel-buffering'), ['no'])
@inlineCallbacks
def test_proxy_buffering_headers_on(self):
self.patch(StreamResourceMixin, 'proxy_buffering', True)
receiver, received_messages = yield self.pull_message()
headers = receiver._response.headers
self.assertEqual(headers.getRawHeaders('x-accel-buffering'), ['yes'])
@inlineCallbacks
def test_content_type(self):
receiver, received_messages = yield self.pull_message()
headers = receiver._response.headers
self.assertEqual(
headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
@inlineCallbacks
def test_messages_stream(self):
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
messages = DeferredQueue()
errors = DeferredQueue()
receiver = self.client.stream(
TransportUserMessage, messages.put, errors.put, url,
Headers(self.auth_headers))
msg1 = yield self.app_helper.make_dispatch_inbound(
'in 1', message_id='1', conv=self.conversation)
msg2 = yield self.app_helper.make_dispatch_inbound(
'in 2', message_id='2', conv=self.conversation)
rm1 = yield messages.get()
rm2 = yield messages.get()
receiver.disconnect()
# Sometimes messages arrive out of order if we're hitting real redis.
rm1, rm2 = sorted([rm1, rm2], key=lambda m: m['message_id'])
self.assertEqual(msg1['message_id'], rm1['message_id'])
self.assertEqual(msg2['message_id'], rm2['message_id'])
self.assertEqual(errors.size, None)
@inlineCallbacks
def test_events_stream(self):
url = '%s/%s/events.json' % (self.url, self.conversation.key)
events = DeferredQueue()
errors = DeferredQueue()
receiver = yield self.client.stream(TransportEvent, events.put,
events.put, url,
Headers(self.auth_headers))
msg1 = yield self.app_helper.make_stored_outbound(
self.conversation, 'out 1', message_id='1')
ack1 = yield self.app_helper.make_dispatch_ack(
msg1, conv=self.conversation)
msg2 = yield self.app_helper.make_stored_outbound(
self.conversation, 'out 2', message_id='2')
ack2 = yield self.app_helper.make_dispatch_ack(
msg2, conv=self.conversation)
ra1 = yield events.get()
ra2 = yield events.get()
receiver.disconnect()
# Sometimes messages arrive out of order if we're hitting real redis.
if ra1['event_id'] != ack1['event_id']:
ra1, ra2 = ra2, ra1
self.assertEqual(ack1['event_id'], ra1['event_id'])
self.assertEqual(ack2['event_id'], ra2['event_id'])
self.assertEqual(errors.size, None)
@inlineCallbacks
def test_missing_auth(self):
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
queue = DeferredQueue()
receiver = self.client.stream(
TransportUserMessage, queue.put, queue.put, url)
response = yield receiver.get_response()
self.assertEqual(response.code, http.UNAUTHORIZED)
self.assertEqual(response.headers.getRawHeaders('www-authenticate'), [
'basic realm="Conversation Realm"'])
@inlineCallbacks
def test_invalid_auth(self):
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
queue = DeferredQueue()
headers = Headers({
'Authorization': ['Basic %s' % (base64.b64encode('foo:bar'),)],
})
receiver = self.client.stream(
TransportUserMessage, queue.put, queue.put, url, headers)
response = yield receiver.get_response()
self.assertEqual(response.code, http.UNAUTHORIZED)
self.assertEqual(response.headers.getRawHeaders('www-authenticate'), [
'basic realm="Conversation Realm"'])
@inlineCallbacks
def test_send_to(self):
msg = {
'to_addr': '+2345',
'content': 'foo',
'message_id': 'evil_id',
}
# TaggingMiddleware.add_tag_to_msg(msg, self.tag)
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
self.assertEqual(response.code, http.OK)
put_msg = json.loads(response.delivered_body)
[sent_msg] = self.app_helper.get_dispatched_outbound()
self.assertEqual(sent_msg['to_addr'], sent_msg['to_addr'])
self.assertEqual(sent_msg['helper_metadata'], {
'go': {
'conversation_key': self.conversation.key,
'conversation_type': 'http_api',
'user_account': self.conversation.user_account.key,
},
})
# We do not respect the message_id that's been given.
self.assertNotEqual(sent_msg['message_id'], msg['message_id'])
self.assertEqual(sent_msg['message_id'], put_msg['message_id'])
self.assertEqual(sent_msg['to_addr'], msg['to_addr'])
self.assertEqual(sent_msg['from_addr'], None)
@inlineCallbacks
def test_send_to_within_content_length_limit(self):
self.conversation.config['http_api'].update({
'content_length_limit': 182,
})
yield self.conversation.save()
msg = {
'content': 'foo',
'to_addr': '+1234',
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
put_msg = json.loads(response.delivered_body)
self.assertEqual(response.code, http.OK)
[sent_msg] = self.app_helper.get_dispatched_outbound()
self.assertEqual(sent_msg['to_addr'], put_msg['to_addr'])
self.assertEqual(sent_msg['helper_metadata'], {
'go': {
'conversation_key': self.conversation.key,
'conversation_type': 'http_api',
'user_account': self.conversation.user_account.key,
},
})
self.assertEqual(sent_msg['message_id'], put_msg['message_id'])
self.assertEqual(sent_msg['session_event'], None)
self.assertEqual(sent_msg['to_addr'], '+1234')
self.assertEqual(sent_msg['from_addr'], None)
@inlineCallbacks
def test_send_to_content_too_long(self):
self.conversation.config['http_api'].update({
'content_length_limit': 10,
})
yield self.conversation.save()
msg = {
'content': "This message is longer than 10 characters.",
'to_addr': '+1234',
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(
url, json.dumps(msg), self.auth_headers, method='PUT')
self.assert_bad_request(
response, "Payload content too long: 42 > 10")
@inlineCallbacks
def test_send_to_with_evil_content(self):
msg = {
'content': 0xBAD,
'to_addr': '+1234',
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assert_bad_request(
response, "Invalid or missing value for payload key 'content'")
@inlineCallbacks
def test_send_to_with_evil_to_addr(self):
msg = {
'content': 'good',
'to_addr': 1234,
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assert_bad_request(
response, "Invalid or missing value for payload key 'to_addr'")
@inlineCallbacks
def test_in_reply_to(self):
inbound_msg = yield self.app_helper.make_stored_inbound(
self.conversation, 'in 1', message_id='1')
msg = {
'content': 'foo',
'in_reply_to': inbound_msg['message_id'],
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
put_msg = json.loads(response.delivered_body)
self.assertEqual(response.code, http.OK)
[sent_msg] = self.app_helper.get_dispatched_outbound()
self.assertEqual(sent_msg['to_addr'], put_msg['to_addr'])
self.assertEqual(sent_msg['helper_metadata'], {
'go': {
'conversation_key': self.conversation.key,
'conversation_type': 'http_api',
'user_account': self.conversation.user_account.key,
},
})
self.assertEqual(sent_msg['message_id'], put_msg['message_id'])
self.assertEqual(sent_msg['session_event'], None)
self.assertEqual(sent_msg['to_addr'], inbound_msg['from_addr'])
self.assertEqual(sent_msg['from_addr'], '9292')
@inlineCallbacks
def test_in_reply_to_within_content_length_limit(self):
self.conversation.config['http_api'].update({
'content_length_limit': 182,
})
yield self.conversation.save()
inbound_msg = yield self.app_helper.make_stored_inbound(
self.conversation, 'in 1', message_id='1')
msg = {
'content': 'foo',
'in_reply_to': inbound_msg['message_id'],
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
put_msg = json.loads(response.delivered_body)
self.assertEqual(response.code, http.OK)
[sent_msg] = self.app_helper.get_dispatched_outbound()
self.assertEqual(sent_msg['to_addr'], put_msg['to_addr'])
self.assertEqual(sent_msg['helper_metadata'], {
'go': {
'conversation_key': self.conversation.key,
'conversation_type': 'http_api',
'user_account': self.conversation.user_account.key,
},
})
self.assertEqual(sent_msg['message_id'], put_msg['message_id'])
self.assertEqual(sent_msg['session_event'], None)
self.assertEqual(sent_msg['to_addr'], inbound_msg['from_addr'])
self.assertEqual(sent_msg['from_addr'], '9292')
@inlineCallbacks
def test_in_reply_to_content_too_long(self):
self.conversation.config['http_api'].update({
'content_length_limit': 10,
})
yield self.conversation.save()
inbound_msg = yield self.app_helper.make_stored_inbound(
self.conversation, 'in 1', message_id='1')
msg = {
'content': "This message is longer than 10 characters.",
'in_reply_to': inbound_msg['message_id'],
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(
url, json.dumps(msg), self.auth_headers, method='PUT')
self.assert_bad_request(
response, "Payload content too long: 42 > 10")
@inlineCallbacks
def test_in_reply_to_with_evil_content(self):
inbound_msg = yield self.app_helper.make_stored_inbound(
self.conversation, 'in 1', message_id='1')
msg = {
'content': 0xBAD,
'in_reply_to': inbound_msg['message_id'],
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assert_bad_request(
response, "Invalid or missing value for payload key 'content'")
@inlineCallbacks
def test_invalid_in_reply_to(self):
msg = {
'content': 'foo',
'in_reply_to': '1', # this doesn't exist
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assert_bad_request(response, 'Invalid in_reply_to value')
@inlineCallbacks
def test_invalid_in_reply_to_with_missing_conversation_key(self):
# create a message with no conversation
inbound_msg = self.app_helper.make_inbound('in 1', message_id='msg-1')
vumi_api = self.app_helper.vumi_helper.get_vumi_api()
yield vumi_api.mdb.add_inbound_message(inbound_msg)
msg = {
'content': 'foo',
'in_reply_to': inbound_msg['message_id'],
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
with LogCatcher(message='Invalid reply to message <Message .*>'
' which has no conversation key') as lc:
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
[error_log] = lc.messages()
self.assert_bad_request(response, "Invalid in_reply_to value")
self.assertTrue(inbound_msg['message_id'] in error_log)
@inlineCallbacks
def test_in_reply_to_with_evil_session_event(self):
inbound_msg = yield self.app_helper.make_stored_inbound(
self.conversation, 'in 1', message_id='1')
msg = {
'content': 'foo',
'in_reply_to': inbound_msg['message_id'],
'session_event': 0xBAD5E55104,
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assert_bad_request(
response,
"Invalid or missing value for payload key 'session_event'")
self.assertEqual(self.app_helper.get_dispatched_outbound(), [])
@inlineCallbacks
def test_in_reply_to_with_evil_message_id(self):
inbound_msg = yield self.app_helper.make_stored_inbound(
self.conversation, 'in 1', message_id='1')
msg = {
'content': 'foo',
'in_reply_to': inbound_msg['message_id'],
'message_id': 'evil_id'
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assertEqual(response.code, http.OK)
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
put_msg = json.loads(response.delivered_body)
[sent_msg] = self.app_helper.get_dispatched_outbound()
# We do not respect the message_id that's been given.
self.assertNotEqual(sent_msg['message_id'], msg['message_id'])
self.assertEqual(sent_msg['message_id'], put_msg['message_id'])
self.assertEqual(sent_msg['to_addr'], inbound_msg['from_addr'])
self.assertEqual(sent_msg['from_addr'], '9292')
@inlineCallbacks
def test_metric_publishing(self):
metric_data = [
("vumi.test.v1", 1234, 'SUM'),
("vumi.test.v2", 3456, 'AVG'),
]
url = '%s/%s/metrics.json' % (self.url, self.conversation.key)
response = yield http_request_full(
url, json.dumps(metric_data), self.auth_headers, method='PUT')
self.assertEqual(response.code, http.OK)
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
prefix = "go.campaigns.test-0-user.stores.metric_store"
self.assertEqual(
self.app_helper.get_published_metrics(self.app),
[("%s.vumi.test.v1" % prefix, 1234),
("%s.vumi.test.v2" % prefix, 3456)])
@inlineCallbacks
def test_concurrency_limits(self):
config = yield self.app.get_config(None)
concurrency = config.concurrency_limit
queue = DeferredQueue()
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
max_receivers = [self.client.stream(
TransportUserMessage, queue.put, queue.put, url,
Headers(self.auth_headers)) for _ in range(concurrency)]
for i in range(concurrency):
msg = yield self.app_helper.make_dispatch_inbound(
'in %s' % (i,), message_id=str(i), conv=self.conversation)
received = yield queue.get()
self.assertEqual(msg['message_id'], received['message_id'])
maxed_out_resp = yield http_request_full(
url, method='GET', headers=self.auth_headers)
self.assertEqual(maxed_out_resp.code, 403)
self.assertTrue(
'Too many concurrent connections' in maxed_out_resp.delivered_body)
[r.disconnect() for r in max_receivers]
@inlineCallbacks
def test_disabling_concurrency_limit(self):
conv_resource = StreamingConversationResource(
self.app, self.conversation.key)
# negative concurrency limit disables it
ctxt = ConfigContext(user_account=self.conversation.user_account.key,
concurrency_limit=-1)
config = yield self.app.get_config(msg=None, ctxt=ctxt)
self.assertTrue(
(yield conv_resource.is_allowed(
config, self.conversation.user_account.key)))
@inlineCallbacks
def test_backlog_on_connect(self):
for i in range(10):
yield self.app_helper.make_dispatch_inbound(
'in %s' % (i,), message_id=str(i), conv=self.conversation)
queue = DeferredQueue()
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
receiver = self.client.stream(
TransportUserMessage, queue.put, queue.put, url,
Headers(self.auth_headers))
for i in range(10):
received = yield queue.get()
self.assertEqual(received['message_id'], str(i))
receiver.disconnect()
@inlineCallbacks
def test_health_response(self):
health_url = 'http://%s:%s%s' % (
self.addr.host, self.addr.port, self.config['health_path'])
response = yield http_request_full(health_url, method='GET')
self.assertEqual(response.delivered_body, '0')
yield self.app_helper.make_dispatch_inbound(
'in 1', message_id='1', conv=self.conversation)
queue = DeferredQueue()
stream_url = '%s/%s/messages.json' % (self.url, self.conversation.key)
stream_receiver = self.client.stream(
TransportUserMessage, queue.put, queue.put, stream_url,
Headers(self.auth_headers))
yield queue.get()
response = yield http_request_full(health_url, method='GET')
self.assertEqual(response.delivered_body, '1')
stream_receiver.disconnect()
response = yield http_request_full(health_url, method='GET')
self.assertEqual(response.delivered_body, '0')
self.assertEqual(self.app.client_manager.clients, {
'sphex.stream.message.%s' % (self.conversation.key,): []
})
@inlineCallbacks
def test_post_inbound_message(self):
# Set the URL so stuff is HTTP Posted instead of streamed.
self.conversation.config['http_api'].update({
'push_message_url': self.mock_push_server.url,
})
yield self.conversation.save()
msg_d = self.app_helper.make_dispatch_inbound(
'in 1', message_id='1', conv=self.conversation)
req = yield self.push_calls.get()
posted_json_data = req.content.read()
req.finish()
msg = yield msg_d
posted_msg = TransportUserMessage.from_json(posted_json_data)
self.assertEqual(posted_msg['message_id'], msg['message_id'])
@inlineCallbacks
def test_post_inbound_message_201_response(self):
# Set the URL so stuff is HTTP Posted instead of streamed.
self.conversation.config['http_api'].update({
'push_message_url': self.mock_push_server.url,
})
yield self.conversation.save()
with LogCatcher(message='Got unexpected response code') as lc:
msg_d = self.app_helper.make_dispatch_inbound(
'in 1', message_id='1', conv=self.conversation)
req = yield self.push_calls.get()
req.setResponseCode(201)
req.finish()
yield msg_d
self.assertEqual(lc.messages(), [])
@inlineCallbacks
def test_post_inbound_message_500_response(self):
# Set the URL so stuff is HTTP Posted instead of streamed.
self.conversation.config['http_api'].update({
'push_message_url': self.mock_push_server.url,
})
yield self.conversation.save()
with LogCatcher(message='Got unexpected response code') as lc:
msg_d = self.app_helper.make_dispatch_inbound(
'in 1', message_id='1', conv=self.conversation)
req = yield self.push_calls.get()
req.setResponseCode(500)<|fim▁hole|> req.finish()
yield msg_d
[warning_log] = lc.messages()
self.assertTrue(self.mock_push_server.url in warning_log)
self.assertTrue('500' in warning_log)
@inlineCallbacks
def test_post_inbound_event(self):
# Set the URL so stuff is HTTP Posted instead of streamed.
self.conversation.config['http_api'].update({
'push_event_url': self.mock_push_server.url,
})
yield self.conversation.save()
msg = yield self.app_helper.make_stored_outbound(
self.conversation, 'out 1', message_id='1')
event_d = self.app_helper.make_dispatch_ack(
msg, conv=self.conversation)
req = yield self.push_calls.get()
posted_json_data = req.content.read()
req.finish()
ack = yield event_d
self.assertEqual(TransportEvent.from_json(posted_json_data), ack)
@inlineCallbacks
def test_bad_urls(self):
def assert_not_found(url, headers={}):
d = http_request_full(self.url, method='GET', headers=headers)
d.addCallback(lambda r: self.assertEqual(r.code, http.NOT_FOUND))
return d
yield assert_not_found(self.url)
yield assert_not_found(self.url + '/')
yield assert_not_found('%s/%s' % (self.url, self.conversation.key),
headers=self.auth_headers)
yield assert_not_found('%s/%s/' % (self.url, self.conversation.key),
headers=self.auth_headers)
yield assert_not_found('%s/%s/foo' % (self.url, self.conversation.key),
headers=self.auth_headers)
@inlineCallbacks
def test_send_message_command(self):
yield self.app_helper.dispatch_command(
'send_message',
user_account_key=self.conversation.user_account.key,
conversation_key=self.conversation.key,
command_data={
u'batch_id': u'batch-id',
u'content': u'foo',
u'to_addr': u'to_addr',
u'msg_options': {
u'helper_metadata': {
u'tag': {
u'tag': [u'longcode', u'default10080']
}
},
u'from_addr': u'default10080',
}
})
[msg] = self.app_helper.get_dispatched_outbound()
self.assertEqual(msg.payload['to_addr'], "to_addr")
self.assertEqual(msg.payload['from_addr'], "default10080")
self.assertEqual(msg.payload['content'], "foo")
self.assertEqual(msg.payload['message_type'], "user_message")
self.assertEqual(
msg.payload['helper_metadata']['go']['user_account'],
self.conversation.user_account.key)
self.assertEqual(
msg.payload['helper_metadata']['tag']['tag'],
['longcode', 'default10080'])
@inlineCallbacks
def test_process_command_send_message_in_reply_to(self):
msg = yield self.app_helper.make_stored_inbound(
self.conversation, "foo")
yield self.app_helper.dispatch_command(
'send_message',
user_account_key=self.conversation.user_account.key,
conversation_key=self.conversation.key,
command_data={
u'batch_id': u'batch-id',
u'content': u'foo',
u'to_addr': u'to_addr',
u'msg_options': {
u'helper_metadata': {
u'tag': {
u'tag': [u'longcode', u'default10080']
}
},
u'transport_name': u'smpp_transport',
u'in_reply_to': msg['message_id'],
u'transport_type': u'sms',
u'from_addr': u'default10080',
}
})
[sent_msg] = self.app_helper.get_dispatched_outbound()
self.assertEqual(sent_msg['to_addr'], msg['from_addr'])
self.assertEqual(sent_msg['content'], 'foo')
self.assertEqual(sent_msg['in_reply_to'], msg['message_id'])<|fim▁end|> | |
<|file_name|>move_data.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
Data structures used for tracking moves. Please see the extensive
comments in the section "Moves and initialization" and in `doc.rs`.
*/
use std::cell::RefCell;
use std::rc::Rc;
use std::uint;
use std::collections::{HashMap, HashSet};
use middle::borrowck::*;
use middle::cfg;
use middle::dataflow::DataFlowContext;
use middle::dataflow::BitwiseOperator;
use middle::dataflow::DataFlowOperator;
use middle::expr_use_visitor as euv;
use middle::mem_categorization as mc;
use middle::ty;
use syntax::ast;
use syntax::ast_util;
use syntax::codemap::Span;
use util::ppaux::Repr;
pub struct MoveData {
/// Move paths. See section "Move paths" in `doc.rs`.
pub paths: RefCell<Vec<MovePath>>,
/// Cache of loan path to move path index, for easy lookup.
pub path_map: RefCell<HashMap<Rc<LoanPath>, MovePathIndex>>,
/// Each move or uninitialized variable gets an entry here.
pub moves: RefCell<Vec<Move>>,
/// Assignments to a variable, like `x = foo`. These are assigned
/// bits for dataflow, since we must track them to ensure that
/// immutable variables are assigned at most once along each path.
pub var_assignments: RefCell<Vec<Assignment>>,
/// Assignments to a path, like `x.f = foo`. These are not
/// assigned dataflow bits, but we track them because they still
/// kill move bits.
pub path_assignments: RefCell<Vec<Assignment>>,
/// Assignments to a variable or path, like `x = foo`, but not `x += foo`.
pub assignee_ids: RefCell<HashSet<ast::NodeId>>,
}
pub struct FlowedMoveData<'a> {
pub move_data: MoveData,
pub dfcx_moves: MoveDataFlow<'a>,
// We could (and maybe should, for efficiency) combine both move
// and assign data flow into one, but this way it's easier to
// distinguish the bits that correspond to moves and assignments.
pub dfcx_assign: AssignDataFlow<'a>
}
/// Index into `MoveData.paths`, used like a pointer
#[deriving(PartialEq)]
pub struct MovePathIndex(uint);
impl MovePathIndex {
fn get(&self) -> uint {
let MovePathIndex(v) = *self; v
}
}
impl Clone for MovePathIndex {
fn clone(&self) -> MovePathIndex {
MovePathIndex(self.get())
}
}
static InvalidMovePathIndex: MovePathIndex =
MovePathIndex(uint::MAX);
/// Index into `MoveData.moves`, used like a pointer
#[deriving(PartialEq)]
pub struct MoveIndex(uint);
impl MoveIndex {
fn get(&self) -> uint {
let MoveIndex(v) = *self; v
}
}
static InvalidMoveIndex: MoveIndex =
MoveIndex(uint::MAX);
pub struct MovePath {
/// Loan path corresponding to this move path
pub loan_path: Rc<LoanPath>,
/// Parent pointer, `InvalidMovePathIndex` if root
pub parent: MovePathIndex,
/// Head of linked list of moves to this path,
/// `InvalidMoveIndex` if not moved
pub first_move: MoveIndex,
/// First node in linked list of children, `InvalidMovePathIndex` if leaf
pub first_child: MovePathIndex,
/// Next node in linked list of parent's children (siblings),
/// `InvalidMovePathIndex` if none.
pub next_sibling: MovePathIndex,
}
#[deriving(PartialEq)]
pub enum MoveKind {
Declared, // When declared, variables start out "moved".
MoveExpr, // Expression or binding that moves a variable
MovePat, // By-move binding
Captured // Closure creation that moves a value
}
pub struct Move {
/// Path being moved.
pub path: MovePathIndex,
/// id of node that is doing the move.
pub id: ast::NodeId,
/// Kind of move, for error messages.
pub kind: MoveKind,
/// Next node in linked list of moves from `path`, or `InvalidMoveIndex`
pub next_move: MoveIndex
}
pub struct Assignment {
/// Path being assigned.
pub path: MovePathIndex,
/// id where assignment occurs
pub id: ast::NodeId,
/// span of node where assignment occurs
pub span: Span,
}
#[deriving(Clone)]
pub struct MoveDataFlowOperator;
pub type MoveDataFlow<'a> = DataFlowContext<'a, MoveDataFlowOperator>;
#[deriving(Clone)]
pub struct AssignDataFlowOperator;
pub type AssignDataFlow<'a> = DataFlowContext<'a, AssignDataFlowOperator>;
fn loan_path_is_precise(loan_path: &LoanPath) -> bool {
match *loan_path {
LpVar(_) | LpUpvar(_) => {
true
}
LpExtend(_, _, LpInterior(mc::InteriorElement(_))) => {
// Paths involving element accesses do not refer to a unique
// location, as there is no accurate tracking of the indices.
false
}
LpExtend(ref lp_base, _, _) => {
loan_path_is_precise(&**lp_base)
}
}
}
impl MoveData {
pub fn new() -> MoveData {
MoveData {
paths: RefCell::new(Vec::new()),
path_map: RefCell::new(HashMap::new()),
moves: RefCell::new(Vec::new()),
path_assignments: RefCell::new(Vec::new()),
var_assignments: RefCell::new(Vec::new()),
assignee_ids: RefCell::new(HashSet::new()),
}
}
pub fn path_loan_path(&self, index: MovePathIndex) -> Rc<LoanPath> {
self.paths.borrow().get(index.get()).loan_path.clone()
}
fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
self.paths.borrow().get(index.get()).parent
}
fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
self.paths.borrow().get(index.get()).first_move
}
fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
self.paths.borrow().get(index.get()).first_child
}
fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
self.paths.borrow().get(index.get()).next_sibling
}
fn set_path_first_move(&self,
index: MovePathIndex,
first_move: MoveIndex) {
self.paths.borrow_mut().get_mut(index.get()).first_move = first_move
}
fn set_path_first_child(&self,
index: MovePathIndex,
first_child: MovePathIndex) {
self.paths.borrow_mut().get_mut(index.get()).first_child = first_child
}
fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
//! Type safe indexing operator
self.moves.borrow().get(index.get()).next_move
}
fn is_var_path(&self, index: MovePathIndex) -> bool {
//! True if `index` refers to a variable
self.path_parent(index) == InvalidMovePathIndex
}
pub fn move_path(&self,
tcx: &ty::ctxt,
lp: Rc<LoanPath>) -> MovePathIndex {
/*!
* Returns the existing move path index for `lp`, if any,
* and otherwise adds a new index for `lp` and any of its
* base paths that do not yet have an index.
*/
match self.path_map.borrow().find(&lp) {
Some(&index) => {
return index;
}
None => {}
}
let index = match *lp {
LpVar(..) | LpUpvar(..) => {
let index = MovePathIndex(self.paths.borrow().len());
self.paths.borrow_mut().push(MovePath {
loan_path: lp.clone(),
parent: InvalidMovePathIndex,
first_move: InvalidMoveIndex,
first_child: InvalidMovePathIndex,
next_sibling: InvalidMovePathIndex,
});
index
}
LpExtend(ref base, _, _) => {
let parent_index = self.move_path(tcx, base.clone());
let index = MovePathIndex(self.paths.borrow().len());
let next_sibling = self.path_first_child(parent_index);
self.set_path_first_child(parent_index, index);
self.paths.borrow_mut().push(MovePath {
loan_path: lp.clone(),
parent: parent_index,
first_move: InvalidMoveIndex,
first_child: InvalidMovePathIndex,
next_sibling: next_sibling,
});
index
}
};
debug!("move_path(lp={}, index={:?})",
lp.repr(tcx),
index);
assert_eq!(index.get(), self.paths.borrow().len() - 1);
self.path_map.borrow_mut().insert(lp, index);
return index;
}
fn existing_move_path(&self, lp: &Rc<LoanPath>)
-> Option<MovePathIndex> {
self.path_map.borrow().find_copy(lp)
}
fn existing_base_paths(&self, lp: &Rc<LoanPath>)
-> Vec<MovePathIndex> {
let mut result = vec!();
self.add_existing_base_paths(lp, &mut result);
result
}
fn add_existing_base_paths(&self, lp: &Rc<LoanPath>,
result: &mut Vec<MovePathIndex>) {
/*!
* Adds any existing move path indices for `lp` and any base
* paths of `lp` to `result`, but does not add new move paths<|fim▁hole|> match self.path_map.borrow().find_copy(lp) {
Some(index) => {
self.each_base_path(index, |p| {
result.push(p);
true
});
}
None => {
match **lp {
LpVar(..) | LpUpvar(..) => { }
LpExtend(ref b, _, _) => {
self.add_existing_base_paths(b, result);
}
}
}
}
}
pub fn add_move(&self,
tcx: &ty::ctxt,
lp: Rc<LoanPath>,
id: ast::NodeId,
kind: MoveKind) {
/*!
* Adds a new move entry for a move of `lp` that occurs at
* location `id` with kind `kind`.
*/
debug!("add_move(lp={}, id={:?}, kind={:?})",
lp.repr(tcx),
id,
kind);
let path_index = self.move_path(tcx, lp);
let move_index = MoveIndex(self.moves.borrow().len());
let next_move = self.path_first_move(path_index);
self.set_path_first_move(path_index, move_index);
self.moves.borrow_mut().push(Move {
path: path_index,
id: id,
kind: kind,
next_move: next_move
});
}
pub fn add_assignment(&self,
tcx: &ty::ctxt,
lp: Rc<LoanPath>,
assign_id: ast::NodeId,
span: Span,
assignee_id: ast::NodeId,
mode: euv::MutateMode) {
/*!
* Adds a new record for an assignment to `lp` that occurs at
* location `id` with the given `span`.
*/
debug!("add_assignment(lp={}, assign_id={:?}, assignee_id={:?}",
lp.repr(tcx), assign_id, assignee_id);
let path_index = self.move_path(tcx, lp.clone());
match mode {
euv::Init | euv::JustWrite => {
self.assignee_ids.borrow_mut().insert(assignee_id);
}
euv::WriteAndRead => { }
}
let assignment = Assignment {
path: path_index,
id: assign_id,
span: span,
};
if self.is_var_path(path_index) {
debug!("add_assignment[var](lp={}, assignment={}, path_index={:?})",
lp.repr(tcx), self.var_assignments.borrow().len(), path_index);
self.var_assignments.borrow_mut().push(assignment);
} else {
debug!("add_assignment[path](lp={}, path_index={:?})",
lp.repr(tcx), path_index);
self.path_assignments.borrow_mut().push(assignment);
}
}
fn add_gen_kills(&self,
tcx: &ty::ctxt,
dfcx_moves: &mut MoveDataFlow,
dfcx_assign: &mut AssignDataFlow) {
/*!
* Adds the gen/kills for the various moves and
* assignments into the provided data flow contexts.
* Moves are generated by moves and killed by assignments and
* scoping. Assignments are generated by assignment to variables and
* killed by scoping. See `doc.rs` for more details.
*/
for (i, move) in self.moves.borrow().iter().enumerate() {
dfcx_moves.add_gen(move.id, i);
}
for (i, assignment) in self.var_assignments.borrow().iter().enumerate() {
dfcx_assign.add_gen(assignment.id, i);
self.kill_moves(assignment.path, assignment.id, dfcx_moves);
}
for assignment in self.path_assignments.borrow().iter() {
self.kill_moves(assignment.path, assignment.id, dfcx_moves);
}
// Kill all moves related to a variable `x` when it goes out
// of scope:
for path in self.paths.borrow().iter() {
match *path.loan_path {
LpVar(id) => {
let kill_id = tcx.region_maps.var_scope(id);
let path = *self.path_map.borrow().get(&path.loan_path);
self.kill_moves(path, kill_id, dfcx_moves);
}
LpUpvar(ty::UpvarId { var_id: _, closure_expr_id }) => {
let kill_id = closure_to_block(closure_expr_id, tcx);
let path = *self.path_map.borrow().get(&path.loan_path);
self.kill_moves(path, kill_id, dfcx_moves);
}
LpExtend(..) => {}
}
}
// Kill all assignments when the variable goes out of scope:
for (assignment_index, assignment) in
self.var_assignments.borrow().iter().enumerate() {
match *self.path_loan_path(assignment.path) {
LpVar(id) => {
let kill_id = tcx.region_maps.var_scope(id);
dfcx_assign.add_kill(kill_id, assignment_index);
}
LpUpvar(ty::UpvarId { var_id: _, closure_expr_id }) => {
let kill_id = closure_to_block(closure_expr_id, tcx);
dfcx_assign.add_kill(kill_id, assignment_index);
}
LpExtend(..) => {
tcx.sess.bug("var assignment for non var path");
}
}
}
}
fn each_base_path(&self, index: MovePathIndex, f: |MovePathIndex| -> bool)
-> bool {
let mut p = index;
while p != InvalidMovePathIndex {
if !f(p) {
return false;
}
p = self.path_parent(p);
}
return true;
}
fn each_extending_path(&self,
index: MovePathIndex,
f: |MovePathIndex| -> bool)
-> bool {
if !f(index) {
return false;
}
let mut p = self.path_first_child(index);
while p != InvalidMovePathIndex {
if !self.each_extending_path(p, |x| f(x)) {
return false;
}
p = self.path_next_sibling(p);
}
return true;
}
fn each_applicable_move(&self,
index0: MovePathIndex,
f: |MoveIndex| -> bool)
-> bool {
let mut ret = true;
self.each_extending_path(index0, |index| {
let mut p = self.path_first_move(index);
while p != InvalidMoveIndex {
if !f(p) {
ret = false;
break;
}
p = self.move_next_move(p);
}
ret
});
ret
}
fn kill_moves(&self,
path: MovePathIndex,
kill_id: ast::NodeId,
dfcx_moves: &mut MoveDataFlow) {
// We can only perform kills for paths that refer to a unique location,
// since otherwise we may kill a move from one location with an
// assignment referring to another location.
let loan_path = self.path_loan_path(path);
if loan_path_is_precise(&*loan_path) {
self.each_applicable_move(path, |move_index| {
dfcx_moves.add_kill(kill_id, move_index.get());
true
});
}
}
}
impl<'a> FlowedMoveData<'a> {
pub fn new(move_data: MoveData,
tcx: &'a ty::ctxt,
cfg: &cfg::CFG,
id_range: ast_util::IdRange,
decl: &ast::FnDecl,
body: &ast::Block)
-> FlowedMoveData<'a> {
let mut dfcx_moves =
DataFlowContext::new(tcx,
"flowed_move_data_moves",
Some(decl),
cfg,
MoveDataFlowOperator,
id_range,
move_data.moves.borrow().len());
let mut dfcx_assign =
DataFlowContext::new(tcx,
"flowed_move_data_assigns",
Some(decl),
cfg,
AssignDataFlowOperator,
id_range,
move_data.var_assignments.borrow().len());
move_data.add_gen_kills(tcx, &mut dfcx_moves, &mut dfcx_assign);
dfcx_moves.add_kills_from_flow_exits(cfg);
dfcx_assign.add_kills_from_flow_exits(cfg);
dfcx_moves.propagate(cfg, body);
dfcx_assign.propagate(cfg, body);
FlowedMoveData {
move_data: move_data,
dfcx_moves: dfcx_moves,
dfcx_assign: dfcx_assign,
}
}
pub fn each_path_moved_by(&self,
id: ast::NodeId,
f: |&Move, &LoanPath| -> bool)
-> bool {
/*!
* Iterates through each path moved by `id`
*/
self.dfcx_moves.each_gen_bit(id, |index| {
let move = self.move_data.moves.borrow();
let move = move.get(index);
let moved_path = move.path;
f(move, &*self.move_data.path_loan_path(moved_path))
})
}
pub fn kind_of_move_of_path(&self,
id: ast::NodeId,
loan_path: &Rc<LoanPath>)
-> Option<MoveKind> {
//! Returns the kind of a move of `loan_path` by `id`, if one exists.
let mut ret = None;
for loan_path_index in self.move_data.path_map.borrow().find(&*loan_path).iter() {
self.dfcx_moves.each_gen_bit(id, |move_index| {
let move = self.move_data.moves.borrow();
let move = move.get(move_index);
if move.path == **loan_path_index {
ret = Some(move.kind);
false
} else {
true
}
});
}
ret
}
pub fn each_move_of(&self,
id: ast::NodeId,
loan_path: &Rc<LoanPath>,
f: |&Move, &LoanPath| -> bool)
-> bool {
/*!
* Iterates through each move of `loan_path` (or some base path
* of `loan_path`) that *may* have occurred on entry to `id` without
* an intervening assignment. In other words, any moves that
* would invalidate a reference to `loan_path` at location `id`.
*/
// Bad scenarios:
//
// 1. Move of `a.b.c`, use of `a.b.c`
// 2. Move of `a.b.c`, use of `a.b.c.d`
// 3. Move of `a.b.c`, use of `a` or `a.b`
//
// OK scenario:
//
// 4. move of `a.b.c`, use of `a.b.d`
let base_indices = self.move_data.existing_base_paths(loan_path);
if base_indices.is_empty() {
return true;
}
let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
let mut ret = true;
self.dfcx_moves.each_bit_on_entry(id, |index| {
let move = self.move_data.moves.borrow();
let move = move.get(index);
let moved_path = move.path;
if base_indices.iter().any(|x| x == &moved_path) {
// Scenario 1 or 2: `loan_path` or some base path of
// `loan_path` was moved.
if !f(move, &*self.move_data.path_loan_path(moved_path)) {
ret = false;
}
} else {
for &loan_path_index in opt_loan_path_index.iter() {
let cont = self.move_data.each_base_path(moved_path, |p| {
if p == loan_path_index {
// Scenario 3: some extension of `loan_path`
// was moved
f(move, &*self.move_data.path_loan_path(moved_path))
} else {
true
}
});
if !cont { ret = false; break }
}
}
ret
})
}
pub fn is_assignee(&self,
id: ast::NodeId)
-> bool {
//! True if `id` is the id of the LHS of an assignment
self.move_data.assignee_ids.borrow().iter().any(|x| x == &id)
}
pub fn each_assignment_of(&self,
id: ast::NodeId,
loan_path: &Rc<LoanPath>,
f: |&Assignment| -> bool)
-> bool {
/*!
* Iterates through every assignment to `loan_path` that
* may have occurred on entry to `id`. `loan_path` must be
* a single variable.
*/
let loan_path_index = {
match self.move_data.existing_move_path(loan_path) {
Some(i) => i,
None => {
// if there were any assignments, it'd have an index
return true;
}
}
};
self.dfcx_assign.each_bit_on_entry(id, |index| {
let assignment = self.move_data.var_assignments.borrow();
let assignment = assignment.get(index);
if assignment.path == loan_path_index && !f(assignment) {
false
} else {
true
}
})
}
}
impl BitwiseOperator for MoveDataFlowOperator {
#[inline]
fn join(&self, succ: uint, pred: uint) -> uint {
succ | pred // moves from both preds are in scope
}
}
impl DataFlowOperator for MoveDataFlowOperator {
#[inline]
fn initial_value(&self) -> bool {
false // no loans in scope by default
}
}
impl BitwiseOperator for AssignDataFlowOperator {
#[inline]
fn join(&self, succ: uint, pred: uint) -> uint {
succ | pred // moves from both preds are in scope
}
}
impl DataFlowOperator for AssignDataFlowOperator {
#[inline]
fn initial_value(&self) -> bool {
false // no assignments in scope by default
}
}<|fim▁end|> | */
|
<|file_name|>test_extensions.py<|end_file_name|><|fim▁begin|>import argparse
import sys
import pytest
from pyscaffold import extensions
from pyscaffold.exceptions import ErrorLoadingExtension
from .extensions import __name__ as test_extensions_pkg
from .extensions.helpers import make_extension
if sys.version_info[:2] >= (3, 8):
# TODO: Import directly (no need for conditional) when `python_requires = >= 3.8`
from importlib.metadata import EntryPoint # pragma: no cover
else:
from importlib_metadata import EntryPoint # pragma: no cover
def test_extension():
parser = argparse.ArgumentParser()
extension = make_extension("MyExtension")
extension.augment_cli(parser)
opts = vars(parser.parse_args(["--my-extension"]))
assert opts["extensions"] == [extension]
def test_extension_append():
parser = argparse.ArgumentParser()
extension1 = make_extension("MyExtension1")
extension2 = make_extension("MyExtension2")
parser.set_defaults(extensions=[extension1])
<|fim▁hole|>
def test_include():
parser = argparse.ArgumentParser()
my_extensions = [make_extension(f"MyExtension{n}") for n in range(7)]
parser.add_argument("--opt", nargs=0, action=extensions.include(*my_extensions))
opts = vars(parser.parse_args(["--opt"]))
assert opts["extensions"] == my_extensions
def test_store_with():
parser = argparse.ArgumentParser()
my_extensions = [make_extension(f"MyExtension{n}") for n in range(7)]
parser.add_argument("--opt", action=extensions.store_with(*my_extensions))
opts = vars(parser.parse_args(["--opt", "42"]))
assert opts["extensions"] == my_extensions
assert opts["opt"] == "42"
def test_store_with_type():
parser = argparse.ArgumentParser()
my_extensions = [make_extension(f"MyExtension{n}") for n in range(7)]
parser.add_argument("--opt", type=int, action=extensions.store_with(*my_extensions))
opts = vars(parser.parse_args(["--opt", "42"]))
assert opts["extensions"] == my_extensions
assert opts["opt"] == 42
def test_load_from_entry_point__error():
# This module does not exist, so Python will have some trouble loading it
# EntryPoint(name, value, group)
fake = EntryPoint("fake", "pyscaffoldext.SOOOOO___fake___:Fake", "pyscaffold.cli")
with pytest.raises(ErrorLoadingExtension):
extensions.load_from_entry_point(fake)
def test_load_from_entry_point__old_api():
# The following module/class exists but uses an old version of the extensions API
# therefore, we should have a meaningful error when trying to load it.
entry = f"{test_extensions_pkg}.incompatible_v3_api_fake_extension:FakeExtension"
fake = EntryPoint("fake", entry, "pyscaffold.cli")
with pytest.raises(ErrorLoadingExtension):
extensions.load_from_entry_point(fake)
def test_iterate_entry_points():
ext_iter = extensions.iterate_entry_points()
assert hasattr(ext_iter, "__iter__")
ext_list = list(ext_iter)
name_list = [e.name for e in ext_list]
for ext in ("cirrus", "pre_commit", "no_skeleton", "namespace", "venv"):
assert ext in name_list
def test_list_from_entry_points():
# Should return a list with all the extensions registered in the entrypoints
ext_list = extensions.list_from_entry_points()
orig_len = len(ext_list)
assert all(isinstance(e, extensions.Extension) for e in ext_list)
name_list = [e.name for e in ext_list]
for ext in ("cirrus", "pre_commit", "no_skeleton", "namespace", "venv"):
assert ext in name_list
# a filtering function can be passed to avoid loading extensions that are not needed
ext_list = extensions.list_from_entry_points(filtering=lambda e: e.name != "cirrus")
name_list = [e.name for e in ext_list]
assert len(ext_list) == orig_len - 1
assert "cirrus" not in name_list<|fim▁end|> | extension2.augment_cli(parser)
opts = vars(parser.parse_args(["--my-extension2"]))
assert opts["extensions"] == [extension1, extension2]
|
<|file_name|>thrift_build.rs<|end_file_name|><|fim▁begin|>// @generated by autocargo
use std::env;
use std::fs;
use std::path::Path;
use thrift_compiler::Config;
#[rustfmt::skip]
fn main() {
// Rerun if this gets rewritten.
println!("cargo:rerun-if-changed=thrift_build.rs");
let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR env not provided");
let out_dir: &Path = out_dir.as_ref();
fs::write(
out_dir.join("cratemap"),
"eden_config crate",
).expect("Failed to write cratemap");
let conf = {
let mut conf = Config::from_env().expect("Failed to instantiate thrift_compiler::Config");
let path_from_manifest_to_base: &Path = "../../..".as_ref();
let cargo_manifest_dir =
env::var_os("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not provided");
let cargo_manifest_dir: &Path = cargo_manifest_dir.as_ref();
let base_path = cargo_manifest_dir
.join(path_from_manifest_to_base)
.canonicalize()
.expect("Failed to canonicalize base_path");<|fim▁hole|> #[cfg(windows)]
let base_path = Path::new(
base_path
.as_path()
.to_string_lossy()
.trim_start_matches(r"\\?\"),
)
.to_path_buf();
conf.base_path(base_path);
let options = "";
if !options.is_empty() {
conf.options(options);
}
let include_srcs = vec![
];
conf.include_srcs(include_srcs);
conf
};
conf
.run(&[
"eden_config.thrift"
])
.expect("Failed while running thrift compilation");
}<|fim▁end|> | // TODO: replace canonicalize() with std::path::absolute() when
// https://github.com/rust-lang/rust/pull/91673 is available (~Rust 1.60)
// and remove this block. |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod fast_math;<|fim▁hole|>mod tests;<|fim▁end|> |
#[allow(unused_imports)]
#[cfg(test)] |
<|file_name|>behaviour.js<|end_file_name|><|fim▁begin|>/*
Behaviour v1.1 by Ben Nolan, June 2005. Based largely on the work
of Simon Willison (see comments by Simon below).
Small fixes by J.Dobrowolski for Front Accounting May 2008
Description:
Uses css selectors to apply javascript behaviours to enable
unobtrusive javascript in html documents.
Usage:
var myrules = {
'b.someclass' : function(element){
element.onclick = function(){
alert(this.innerHTML);
}
},
'#someid u' : function(element){
element.onmouseover = function(){
this.innerHTML = "BLAH!";
}
}
};
Behaviour.register(myrules);
// Call Behaviour.apply() to re-apply the rules (if you
// update the dom, etc).
License:
This file is entirely BSD licensed.
More information:
http://ripcord.co.nz/behaviour/
*/
var Behaviour = {
list : new Array,
register : function(sheet){
Behaviour.list.push(sheet);
},
start : function(){
Behaviour.addLoadEvent(function(){
Behaviour.apply();
});
},
apply : function(){
for (h=0;sheet=Behaviour.list[h];h++){
for (selector in sheet){
var sels = selector.split(',');
for (var n = 0; n < sels.length; n++) {
list = document.getElementsBySelector(sels[n]);
if (!list){
continue;
}
for (i=0;element=list[i];i++){
sheet[selector](element);
}
}
}
}
},
addLoadEvent : function(func){
var oldonload = window.onload;
if (typeof window.onload != 'function') {
window.onload = func;
} else {
window.onload = function() {
oldonload();
func();
}
}
}
}
Behaviour.start();
/*
The following code is Copyright (C) Simon Willison 2004.
document.getElementsBySelector(selector)
- returns an array of element objects from the current document
matching the CSS selector. Selectors can contain element names,
class names and ids and can be nested. For example:
elements = document.getElementsBySelect('div#main p a.external')
Will return an array of all 'a' elements with 'external' in their
class attribute that are contained inside 'p' elements that are
contained inside the 'div' element which has id="main"
New in version 0.4: Support for CSS2 and CSS3 attribute selectors:
See http://www.w3.org/TR/css3-selectors/#attribute-selectors
Version 0.4 - Simon Willison, March 25th 2003<|fim▁hole|> -- Works in Phoenix 0.5, Mozilla 1.3, Opera 7, Internet Explorer 6, Internet Explorer 5 on Windows
-- Opera 7 fails
*/
function getAllChildren(e) {
// Returns all children of element. Workaround required for IE5/Windows. Ugh.
return e.all ? e.all : e.getElementsByTagName('*');
}
document.getElementsBySelector = function(selector) {
// Attempt to fail gracefully in lesser browsers
if (!document.getElementsByTagName) {
return new Array();
}
// Split selector in to tokens
var tokens = selector.split(' ');
var currentContext = new Array(document);
for (var i = 0; i < tokens.length; i++) {
token = tokens[i].replace(/^\s+/,'').replace(/\s+$/,'');;
if (token.indexOf('#') > -1) {
// Token is an ID selector
var bits = token.split('#');
var tagName = bits[0];
var id = bits[1];
var element = document.getElementById(id);
if (tagName && element.nodeName.toLowerCase() != tagName) {
// tag with that ID not found, return false
return new Array();
}
// Set currentContext to contain just this element
currentContext = new Array(element);
continue; // Skip to next token
}
if (token.indexOf('.') > -1) {
// Token contains a class selector
var bits = token.split('.');
var tagName = bits[0];
var className = bits[1];
if (!tagName) {
tagName = '*';
}
// Get elements matching tag, filter them for class selector
var found = new Array;
var foundCount = 0;
for (var h = 0; h < currentContext.length; h++) {
var elements;
if (tagName == '*') {
elements = getAllChildren(currentContext[h]);
} else {
elements = currentContext[h].getElementsByTagName(tagName);
}
for (var j = 0; j < elements.length; j++) {
found[foundCount++] = elements[j];
}
}
currentContext = new Array;
var currentContextIndex = 0;
for (var k = 0; k < found.length; k++) {
if (found[k].className && found[k].className.match(new RegExp('\\b'+className+'\\b'))) {
currentContext[currentContextIndex++] = found[k];
}
}
continue; // Skip to next token
}
// Code to deal with attribute selectors
/* Original reg expression /^(\w*)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/
was replaced by new RegExp() cuz compressor fault */
if (token.match(new RegExp('^(\\w*)\\[(\\w+)([=~\\|\\^\\$\\*]?)=?"?([^\\]"]*)"?\\]$'))) {
var tagName = RegExp.$1;
var attrName = RegExp.$2;
var attrOperator = RegExp.$3;
var attrValue = RegExp.$4;
if (!tagName) {
tagName = '*';
}
// Grab all of the tagName elements within current context
var found = new Array;
var foundCount = 0;
for (var h = 0; h < currentContext.length; h++) {
var elements;
if (tagName == '*') {
elements = getAllChildren(currentContext[h]);
} else {
elements = currentContext[h].getElementsByTagName(tagName);
}
for (var j = 0; j < elements.length; j++) {
found[foundCount++] = elements[j];
}
}
currentContext = new Array;
var currentContextIndex = 0;
var checkFunction; // This function will be used to filter the elements
switch (attrOperator) {
case '=': // Equality
checkFunction = function(e) { return (e.getAttribute(attrName) == attrValue); };
break;
case '~': // Match one of space seperated words
checkFunction = function(e) { var a=e.getAttribute(attrName); return (a && a.match(new RegExp('\\b'+attrValue+'\\b'))); };
break;
case '|': // Match start with value followed by optional hyphen
checkFunction = function(e) { var a=e.getAttribute(attrName); return (a && a.match(new RegExp('^'+attrValue+'-?'))); };
break;
case '^': // Match starts with value
checkFunction = function(e) { var a=e.getAttribute(attrName); return (a && a.indexOf(attrValue) == 0); };
break;
case '$': // Match ends with value - fails with "Warning" in Opera 7
checkFunction = function(e) { var a=e.getAttribute(attrName); return (a && a.lastIndexOf(attrValue) == e.getAttribute(attrName).length - attrValue.length); };
break;
case '*': // Match contains value
checkFunction = function(e) { var a=e.getAttribute(attrName); return (a && a.indexOf(attrValue) > -1); };
break;
default :
// Just test for existence of attribute
checkFunction = function(e) { return e.getAttribute(attrName); };
}
currentContext = new Array;
var currentContextIndex = 0;
for (var k = 0; k < found.length; k++) {
if (checkFunction(found[k])) {
currentContext[currentContextIndex++] = found[k];
}
}
// alert('Attribute Selector: '+tagName+' '+attrName+' '+attrOperator+' '+attrValue);
continue; // Skip to next token
}
if (!currentContext[0]){
return;
}
// If we get here, token is JUST an element (not a class or ID selector)
tagName = token;
var found = new Array;
var foundCount = 0;
for (var h = 0; h < currentContext.length; h++) {
var elements = currentContext[h].getElementsByTagName(tagName);
for (var j = 0; j < elements.length; j++) {
found[foundCount++] = elements[j];
}
}
currentContext = found;
}
return currentContext;
}
/* That revolting regular expression explained
/^(\w+)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/
\---/ \---/\-------------/ \-------/
| | | |
| | | The value
| | ~,|,^,$,* or =
| Attribute
Tag
*/<|fim▁end|> | |
<|file_name|>campaigns.py<|end_file_name|><|fim▁begin|># Copyright (c) 2020, DjaoDjin inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR<|fim▁hole|># CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from rest_framework import generics
from ..mixins import CampaignMixin
from .serializers import CampaignSerializer
LOGGER = logging.getLogger(__name__)
class CampaignAPIView(CampaignMixin, generics.RetrieveDestroyAPIView):
"""
Retrieves a campaign
Retrieves the details of a ``Campaign``.
**Tags**: survey
**Examples**
.. code-block:: http
GET /api/cowork/campaign/best-practices/ HTTP/1.1
responds
.. code-block:: json
{
"slug": "best-practices",
"account": "envconnect",
"title": "Assessment on Best Practices",
"active": true,
"quizz_mode": false,
"questions": [
{
"path": "/product-design",
"title": "Product Design",
"unit": "assessment-choices",
},
{
"path": "/packaging-design",
"title": "Packaging Design",
"unit": "assessment-choices",
}
]
}
"""
serializer_class = CampaignSerializer
def get_object(self):
return self.campaign
def delete(self, request, *args, **kwargs):
"""
Deletes a campaign
Removes a ``Campaign`` and all associated ``Sample``
from the database.
**Tags**: survey
**Examples**
.. code-block:: http
DELETE /api/cowork/campaign/best-practices/ HTTP/1.1
"""
#pylint:disable=useless-super-delegation
return super(CampaignAPIView, self).delete(request, *args, **kwargs)<|fim▁end|> | # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># coding=utf-8
"""PySnapSync client.
This package implements the pysnapsync client.
The package exports the following modules:
o `snapsync` main backup script.
See the module doc strings for more information.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division<|fim▁hole|>
from pysnapsync.client import snapsync
__all__ = [x.__name__.split(".")[-1] for x in [snapsync]]<|fim▁end|> | |
<|file_name|>DictLearner.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Thu Aug 20 12:01:18 2015
@author: Eric Dodds
Abstract dictionary learner.
Includes gradient descent on MSE energy function as a default learning method.
"""
import numpy as np
import pickle
# the try/except block avoids an issue with the cluster
try:
import matplotlib.pyplot as plt
from scipy import ndimage
from scipy.stats import skew
except ImportError:
print('Plotting and modulation plot unavailable.')
import StimSet
class DictLearner(object):
"""Abstract base class for dictionary learner objects. Provides some
default functions for loading data, plotting network properties,
and learning."""
def __init__(self, data, learnrate, nunits,
paramfile=None, theta=0, moving_avg_rate=0.001,
stimshape=None, datatype="image", batch_size=100, pca=None,
store_every=1):
self.nunits = nunits
self.batch_size = batch_size
self.learnrate = learnrate
self.paramfile = paramfile
<|fim▁hole|>
self._load_stims(data, datatype, stimshape, pca)
self.Q = self.rand_dict()
def initialize_stats(self):
nunits = self.nunits
self.corrmatrix_ave = np.zeros((nunits, nunits))
self.L0hist = np.array([])
self.L1hist = np.array([])
self.L2hist = np.array([])
self.L0acts = np.zeros(nunits)
self.L1acts = np.zeros(nunits)
self.L2acts = np.zeros(nunits)
self.errorhist = np.array([])
self.meanacts = np.zeros_like(self.L0acts)
def _load_stims(self, data, datatype, stimshape, pca):
if isinstance(data, StimSet.StimSet):
self.stims = data
elif datatype == "image" and pca is not None:
stimshape = stimshape or (16, 16)
self.stims = StimSet.PCvecSet(data, stimshape, pca,
self.batch_size)
elif datatype == "image":
stimshape = stimshape or (16, 16)
self.stims = StimSet.ImageSet(data, batch_size=self.batch_size,
buffer=20, stimshape=stimshape)
elif datatype == "spectro" and pca is not None:
if stimshape is None:
raise Exception("When using PC representations, \
you need to provide the shape of the original stimuli.")
self.stims = StimSet.SpectroPCSet(data, stimshape, pca,
self.batch_size)
elif datatype == "waveform" and pca is not None:
self.stims = StimSet.WaveformPCSet(data, stimshape, pca,
self.batch_size)
else:
raise ValueError("Specified data type not currently supported.")
def infer(self, data, infplot):
raise NotImplementedError
def test_inference(self, niter=None):
"""Show perfomance of infer() on a random batch."""
temp = self.niter
self.niter = niter or self.niter
X = self.stims.rand_stim()
s = self.infer(X, infplot=True)[0]
self.niter = temp
print("Final SNR: " + str(self.snr(X, s)))
return s
def generate_model(self, acts):
"""Reconstruct inputs using linear generative model."""
return np.dot(self.Q.T, acts)
def compute_errors(self, acts, X):
"""Given a batch of data and activities, compute the squared error between
the generative model and the original data.
Returns vector of mean squared errors."""
diffs = X - self.generate_model(acts)
return np.mean(diffs**2, axis=0)/np.mean(X**2, axis=0)
def smoothed_error(self, window_size=1000, start=0, end=-1):
"""Plots a moving average of the error history
with the given averaging window."""
window = np.ones(int(window_size))/float(window_size)
smoothed = np.convolve(self.errorhist[start:end], window, 'valid')
plt.plot(smoothed)
def progress_plot(self, window_size=1000, norm=1, start=0, end=-1):
"""Plots a moving average of the error and activity history
with the given averaging window."""
window = np.ones(int(window_size))/float(window_size)
smoothederror = np.convolve(self.errorhist[start:end], window, 'valid')
if norm == 2:
acthist = self.L2hist
elif norm == 0:
acthist = self.L0hist
else:
acthist = self.L1hist
smoothedactivity = np.convolve(acthist[start:end], window, 'valid')
plt.plot(smoothederror, 'b', smoothedactivity, 'g')
def snr(self, data, acts):
"""Returns signal-noise ratio for the given data and coefficients."""
sig = np.var(data, axis=0)
noise = np.var(data - self.Q.T.dot(acts), axis=0)
return np.mean(sig/noise)
def learn(self, data, coeffs, normalize=True):
"""Adjust dictionary elements according to gradient descent on the
mean-squared error energy function, optionally with an extra term to
increase orthogonality between basis functions. This term is
multiplied by the parameter theta.
Returns the mean-squared error."""
R = data.T - np.dot(coeffs.T, self.Q)
self.Q = self.Q + self.learnrate*np.dot(coeffs, R)
if self.theta != 0:
# Notice this is calculated using the Q after the mse learning rule
thetaterm = (self.Q - np.dot(self.Q, np.dot(self.Q.T, self.Q)))
self.Q = self.Q + self.theta*thetaterm
if normalize:
# force dictionary elements to be normalized
normmatrix = np.diag(1./np.sqrt(np.sum(self.Q*self.Q, 1)))
self.Q = normmatrix.dot(self.Q)
return np.mean(R**2)
def run(self, ntrials=1000, batch_size=None,
rate_decay=None, normalize=True):
batch_size = batch_size or self.stims.batch_size
for trial in range(ntrials):
X = self.stims.rand_stim(batch_size=batch_size)
acts, _, _ = self.infer(X)
thiserror = self.learn(X, acts, normalize)
if trial % self.store_every == 0:
if trial % 50 == 0 or self.store_every > 50:
print(trial)
self.store_statistics(acts, thiserror, batch_size)
if (trial % 1000 == 0 or trial+1 == ntrials) and trial != 0:
try:
print("Saving progress to " + self.paramfile)
self.save()
except (ValueError, TypeError) as er:
print('Failed to save parameters. ', er)
if rate_decay is not None:
self.adjust_rates(rate_decay)
def store_statistics(self, acts, thiserror, batch_size=None,
center_corr=True):
batch_size = batch_size or self.batch_size
self.L2acts = ((1-self.moving_avg_rate)*self.L2acts +
self.moving_avg_rate*(acts**2).mean(1))
self.L1acts = ((1-self.moving_avg_rate)*self.L1acts +
self.moving_avg_rate*np.abs(acts).mean(1))
L0means = np.mean(acts != 0, axis=1)
self.L0acts = ((1-self.moving_avg_rate)*self.L0acts +
self.moving_avg_rate*L0means)
means = acts.mean(1)
self.meanacts = ((1-self.moving_avg_rate)*self.meanacts +
self.moving_avg_rate*means)
self.errorhist = np.append(self.errorhist, thiserror)
self.L0hist = np.append(self.L0hist, np.mean(acts != 0))
self.L1hist = np.append(self.L1hist, np.mean(np.abs(acts)))
self.L2hist = np.append(self.L2hist, np.mean(acts**2))
return self.compute_corrmatrix(acts, thiserror,
means, center_corr, batch_size)
def compute_corrmatrix(self, acts, thiserror, means,
center_corr=True, batch_size=None):
batch_size = batch_size or self.batch_size
if center_corr:
actdevs = acts-means[:, np.newaxis]
corrmatrix = (actdevs).dot(actdevs.T)/batch_size
else:
corrmatrix = acts.dot(acts.T)/self.batch_size
self.corrmatrix_ave = ((1-self.moving_avg_rate)*self.corrmatrix_ave +
self.moving_avg_rate*corrmatrix)
return corrmatrix
def skewflip(self):
"""Set each dictionary element to minus itself if the skewness
of its linear projections on a large batch of data is negative."""
dots = np.dot(self.Q, self.stims.rand_stim(batch_size=10000))
mask = skew(dots, axis=1) < 0
self.Q[mask] = - self.Q[mask]
def show_dict(self, stimset=None, cmap='RdBu_r', subset=None,
layout='sqrt', savestr=None):
"""Plot an array of tiled dictionary elements.
The 0th element is in the top right."""
stimset = stimset or self.stims
if subset is not None:
indices = np.random.choice(self.Q.shape[0], subset)
Qs = self.Q[np.sort(indices)]
else:
Qs = self.Q
array = stimset.stimarray(Qs[::-1], layout=layout)
plt.figure()
arrayplot = plt.imshow(array, interpolation='nearest', cmap=cmap,
aspect='auto', origin='lower')
plt.axis('off')
plt.colorbar()
if savestr is not None:
plt.savefig(savestr, bbox_inches='tight')
return arrayplot
def tiled_dict(self, cmap='RdBu_r', layout='sqrt',
aspect='auto', savestr=None):
"""Nicer dictionary visualization.
Creates a matplotlib axis for each element, so very slow."""
self.stims.tiledplot(self.Q, cmap=cmap, layout=layout,
aspect=aspect, savestr=savestr)
def show_element(self, index, cmap='jet', labels=None, savestr=None):
elem = self.stims.stim_for_display(self.Q[index])
plt.figure()
plt.imshow(elem.T, interpolation='nearest', cmap=cmap,
aspect='auto', origin='lower')
if labels is None:
plt.axis('off')
else:
plt.colorbar()
if savestr is not None:
plt.savefig(savestr, bbox_inches='tight')
def rand_dict(self):
Q = np.random.randn(self.nunits, self.stims.datasize)
return (np.diag(1/np.sqrt(np.sum(Q**2, 1)))).dot(Q)
def adjust_rates(self, factor):
"""Multiply the learning rate by the given factor."""
self.learnrate = factor*self.learnrate
self.theta = factor*self.theta
def modulation_plot(self, usepeaks=False, **kwargs):
modcentroids = np.zeros((self.Q.shape[0], 2))
for ii in range(self.Q.shape[0]):
modspec = self.stims.modspec(self.Q[ii])
if usepeaks:
modcentroids[ii, 0] = np.argmax(np.mean(modspec, axis=1))
modcentroids[ii, 1] = np.argmax(np.mean(modspec, axis=0))
else:
modcentroids[ii] = ndimage.measurements.center_of_mass(modspec)
plt.scatter(modcentroids[:, 0], modcentroids[:, 1])
plt.title('Center of mass of modulation power spectrum \
of each dictionary element')
try:
plt.xlabel(kwargs.xlabel)
except:
pass
try:
plt.ylabel(kwargs.ylabel)
except:
pass
def sort_dict(self, batch_size=None,
plot=False, allstims=True, savestr=None):
"""Sorts the RFs in order by their usage on a batch. Default batch size
is 10 times the stored batch size. Usage means 1 for each stimulus for
which the element was used and 0 for the other stimuli, averaged over
stimuli."""
if allstims:
testX = self.stims.data.T
else:
batch_size = batch_size or 10*self.batch_size
testX = self.stims.rand_stim(batch_size)
means = np.mean(self.infer(testX)[0] != 0, axis=1)
sorter = np.argsort(means)
self.sort(means, sorter, plot, savestr)
return means[sorter]
def fast_sort(self, L1=False, plot=False, savestr=None):
"""Sorts RFs in order by moving average usage."""
if L1:
usages = self.L1acts
else:
usages = self.L0acts
sorter = np.argsort(usages)
self.sort(usages, sorter, plot, savestr)
return usages[sorter]
def sort(self, usages, sorter, plot=False, savestr=None):
self.Q = self.Q[sorter]
self.L0acts = self.L0acts[sorter]
self.L1acts = self.L1acts[sorter]
self.L2acts = self.L2acts[sorter]
self.meanacts = self.meanacts[sorter]
self.corrmatrix_ave = self.corrmatrix_ave[sorter]
self.corrmatrix_ave = self.corrmatrix_ave.T[sorter].T
if plot:
plt.figure()
plt.plot(usages[sorter])
plt.title('L0 Usage')
plt.xlabel('Dictionary index')
plt.ylabel('Fraction of stimuli')
if savestr is not None:
plt.savefig(savestr, format='png', bbox_inches='tight')
def load(self, filename=None):
if filename is None:
filename = self.paramfile
self.paramfile = filename
with open(filename, 'rb') as f:
self.Q, params, histories = pickle.load(f)
self.set_histories(histories)
self.set_params(params)
def set_params(self, params):
for key, val in params.items():
try:
getattr(self, key)
except AttributeError:
print('Unexpected parameter passed: ' + key)
setattr(self, key, val)
def get_param_list(self):
raise NotImplementedError
def save(self, filename=None):
filename = filename or self.paramfile
if filename is None:
raise ValueError("You need to input a filename.")
self.paramfile = filename
params = self.get_param_list()
histories = self.get_histories()
with open(filename, 'wb') as f:
pickle.dump([self.Q, params, histories], f)
def get_histories(self):
return {'errorhist': self.errorhist,
'L0hist': self.L0hist,
'L1hist': self.L1hist,
'L2hist': self.L2hist,
'corrmatrix_ave': self.corrmatrix_ave,
'L1': self.L1hist,
'L0acts': self.L0acts,
'L1acts': self.L1acts,
'L2acts': self.L2acts,
'meanacts': self.meanacts}
def set_histories(self, histories):
if not isinstance(histories, dict):
self._old_set_histories(histories)
return
self.errorhist = histories['errorhist']
self.L0hist = histories['L0hist']
self.L1hist = histories['L1hist']
self.L2hist = histories['L2hist']
self.corrmatrix_ave = histories['corrmatrix_ave']
self.L1hist = histories['L1']
self.L0acts = histories['L0acts']
self.L1acts = histories['L1acts']
self.L2acts = histories['L2acts']
self.meanacts = histories['meanacts']
def _old_get_histories(self):
return (self.errorhist, self.meanacts, self.L0acts, self.L0hist,
self.L1acts, self.L1hist, self.L2hist, self.L2acts,
self.corrmatrix_ave)
def _old_set_histories(self, histories):
(self.errorhist, self.meanacts, self.L0acts, self.L0hist,
self.L1acts, self.L1hist, self.L2hist, self.L2acts,
self.corrmatrix_ave) = histories<|fim▁end|> | self.theta = theta
self.moving_avg_rate = moving_avg_rate
self.initialize_stats()
self.store_every = store_every
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2015-2020 The Wazo Authors (see the AUTHORS file)
# SPDX-License-Identifier: GPL-3.0-or-later
from wazo_auth.interfaces import (
BaseAuthenticationBackend,
BaseMetadata,
DEFAULT_XIVO_UUID,<|fim▁hole|>__all__ = [
'BaseAuthenticationBackend',
'BaseMetadata',
'DEFAULT_XIVO_UUID',
]<|fim▁end|> | )
|
<|file_name|>output.d.ts<|end_file_name|><|fim▁begin|>import { Data } from 'fake-types-lib-2/data';
<|fim▁hole|>
export {};<|fim▁end|> | export declare type MyData = Data | string; |
<|file_name|>histogram.ts<|end_file_name|><|fim▁begin|>import {JobState} from "../api/prow";
export class JobHistogram {
public start: number;
public end: number;
private data: JobSample[];
constructor() {
this.data = [];
this.start = Number.MAX_SAFE_INTEGER;
this.end = 0;
}
// add adds a sample to the histogram, filtering states that didn't result in success or clear
// failure, and updating the range of the histogram data.
public add(sample: JobSample) {
if (!(sample.state === "success" || sample.state === "failure" || sample.state === "error")) {
return;
}
if (sample.start < this.start) {
this.start = sample.start;
}
if (sample.start > this.end) {
this.end = sample.start;
}
this.data.push(sample);<|fim▁hole|> }
// buckets assigns all samples between start and end into cols buckets, sorted by
// start timestamp, while the buckets themselves are sorted by duration.
public buckets(start: number, end: number, cols: number): JobBuckets {
this.data.sort((s1, s2) => s1.start - s2.start);
const buckets: JobSample[][] = [[]];
const stride = (end - start) / cols;
let next = start + stride;
let max = 0;
this.data.forEach((sample) => {
if (sample.start < start || sample.start > end) {
return;
}
if (sample.duration > max) {
max = sample.duration;
}
if (sample.start < next || sample.start === end) {
buckets[buckets.length - 1].push(sample);
return;
}
const bucket = buckets[buckets.length - 1];
bucket.sort((s1, s2) => s1.duration - s2.duration);
next = next + stride;
while (next < sample.start) {
buckets.push([]);
next = next + stride;
}
buckets.push([sample]);
});
if (buckets.length > 0) {
const lastBucket = buckets[buckets.length - 1];
lastBucket.sort((s1, s2) => s1.duration - s2.duration);
}
while (buckets.length < cols) {
buckets.push([]);
}
return new JobBuckets(buckets, start, end, max);
}
// length returns the number of samples in the histogram.
public get length(): number {
return this.data.length;
}
}
export class JobSample {
constructor(public start: number,
public duration: number,
public state: JobState,
public row: number) {}
}
export class JobBuckets {
constructor(public data: JobSample[][],
public start: number,
public end: number,
public max: number) { }
public limitMaximum(maximum: number) {
if (this.max > maximum) {
this.max = maximum;
}
}
public linearChunks(bucket: JobSample[], rows: number): JobSample[][] {
const stride = Math.ceil((this.max) / rows);
const chunks: JobSample[][] = [];
chunks[0] = [];
let next = stride;
for (const sample of bucket) {
if (sample.duration <= next) {
chunks[chunks.length - 1].push(sample);
continue;
}
next = next + stride;
while (next < sample.duration) {
if (chunks.length > (rows - 1)) {
break;
}
chunks.push([]);
next = next + stride;
}
if (chunks.length > (rows - 1)) {
chunks[chunks.length - 1].push(sample);
} else {
chunks.push([sample]);
}
}
if (chunks.length > rows) {
throw new Error("invalid rows");
}
return chunks;
}
}<|fim▁end|> | |
<|file_name|>settingsView.py<|end_file_name|><|fim▁begin|>#!/usr/local/bin/python
# -*-coding:Utf-8 -*
import os
import math
def GA_settings():
"""Provides the view for the user setting of the GA experiments and returns the settings set"""
options = {}
os.system("clear")
print('===== OPTIONS =====\n')
preset = int(raw_input(
"PRESET\n"
"Use preset ?\n"
"\n\n-> 1: Source based preset\n"
"\n-> 2: I WANT TO SET BY MYSELF\n"
))
os.system("clear")
if preset == 1:
options["iterations"] = int(10000)
options["stopFitness"] = float(0.95)
options["mode"] = 'real'
options['crossMode'] = 'randomMultiPoint'
options["maximalPopulation"] = int(50)
options["mutationMode"] = 'oneNucleotid'
options["mutationProbability"] = float(0.05)
options["verbose"] = False
options["initialPopulation"] = int(100)
options['selectionMode'] = 'tournament'
elif preset == 2:
print('BASICS')
x = int(raw_input('Stop Iterations Number:\n'))
options['iterations'] = int(x)
options['stopFitness'] = float(raw_input(
'Stop Fitness:\n'
))
os.system('clear')
print('SELECTION')
options['selectionMode'] = int(raw_input(
'\nSelection Method:\n'
'--> 1: Roulette method\n'
'--> 2: Tournament method\n'
'--> 3: Roulette without replacement method\n'
))
if options['selectionMode'] == 1:
options['selectionMode'] = 'roulette'
elif options['selectionMode'] == 2:
options['selectionMode'] = 'tournament'
elif options['selectionMode'] == 3:
options['selectionMode'] = 'rouletteWR'
os.system('clear')
print('CROSSOVER & MUTATIONS')
options['mode'] = int(raw_input(
'Mode:\n'
'-> 1: Binary mode\n'
'-> 2: Real mode\n'
))
if options['mode'] == 1:
options['mode'] = 'binary'
elif options['mode'] == 2:
options['mode'] = 'real'
options['crossMode'] = int(raw_input(
'Crossover Mode:\n'
'--> 1: random one point\n'
'--> 2: random multipoint\n'
))
if options['crossMode'] == 1:
options['crossMode'] = 'randomOnePoint'
elif options['crossMode'] == 2:
options['crossMode'] = 'randomMultiPoint'
options['mutationMode'] = int(raw_input(
'Mutation Mode:\n'
'-> 0: Swap mode\n'
'-> 1: Each nucleotid has a chance to be muted, one by one\n'
'-> 2: 1 mutation maximum by child\n'
))
if options['mutationMode'] == 0:
options['mutationMode'] = 'swap'
elif options['mutationMode'] == 1:
options['mutationMode'] = 'everyNucleotid'
elif options['mutationMode'] == 2:
options['mutationMode'] = 'oneNucleotid'
options['mutationProbability'] = float(raw_input(
'Mutation Probability Mode:\n'
'-> 0 < n < 1: Fixed Probability\n'
'-> 2: Random Probability, basically between 1/BitArraySize and 1/PopulationSize\n'
))
os.system('clear')
print("POPULATION")
options["maximalPopulation"] = int(raw_input(
"Maximal Population:\n"
"-> n > 0: elitist insertion, just keep n best individuals\n"
"-> Other: every individual is kept (can slow down the algorythm for several iterations)\n"
"-> WARNING: If you set maximal population = 1 WITH roulette without replacement"
", your computer will explode\n"
))
options["initialPopulation"] = int(raw_input("Initialise with how much individuals ?\n"))
os.system("clear")
print("\nVERBOSE")
options["verbose"] = int(raw_input(
"Verbose Mode\n"
"-> 1: Enabled\n"
"-> 0: Disabled\n"
))
if options['verbose'] == 0:
options['verbose'] = False
elif options['verbose'] == 1:
options['verbose'] = True
os.system("clear")
return options
def ES_settings():
"""Provides the view for the user setting of the ES experiments and returns the settings set"""
os.system("clear")
print('===== OPTIONS =====\n')
options = {}
preset = int(raw_input(
"PRESET\n"
"Use preset ?\n"
"\n\n-> 1: Source based preset\n"
"\n-> 2: I WANT TO SET BY MYSELF\n"
))
os.system("clear")
if preset == 1:
options["iterations"] = int(1000)
options["stopFitness"] = float(0.95)
options["base"] = int(10)
options['verbose'] = False
options['selectionMode'] = int(1)
options['mutationMode'] = '2LRNS'
options['recombinationMode'] = 'weighted'
options['sigmaBoost'] = True
elif preset == 2:
print('\nBASICS')
x = int(raw_input('Stop Iterations Number:\n'))
options["iterations"] = int(x)
options['stopFitness'] = float(raw_input('\nStop Fitness:\n'))
print("\nGENERATIONS")
options["base"] = int(raw_input(
'n setting:\n'
'lambda (number of child from the father) = 8 * n\n'
'mu (number of best child selected to make new father) = lambda / 4\n'
't (global step size) = 1 / (n)^(1/2)\n'
'ti (component step size) = 1 / (n)^(1/4)\n'
))
print('RECOMBINATION')
options['recombinationMode'] = int(raw_input(
'Recombination mode:\n'
'1- Intermediate\n'
'2- Select Best\n'
'3- Weighted\n'
))
if options['recombinationMode'] == 1:
options['recombinationMode'] = 'intermediate'<|fim▁hole|> options['recombinationMode'] = 'best'
elif options['recombinationMode'] == 3:
options['recombinationMode'] = 'weighted'
print('MUTATION')
options['mutationMode'] = int(raw_input(
'Mutation mode:\n'
'1- 2 Learning Rates, N Sigmas\n'
'2- 1 Learning Rate, 1 Sigma\n'
))
if options['mutationMode'] == 1:
options['mutationMode'] = '2LRNS'
elif options['mutationMode'] == 2:
options['mutationMode'] = '1LR1S'
print('SIGMA BOOST')
options['sigmaBoost'] = int(raw_input(
'Allow sigma boost YOLO special feature ?\n'
'1- sigma nitro enabled\n'
'2- sigma nitro disabled\n'
))
if options['sigmaBoost'] == 1:
options['sigmaBoost'] = True
elif options['sigmaBoost'] == 2:
options['sigmaBoost'] = False
print("\nVERBOSE")
options["verbose"] = int(raw_input(
"Verbose Mode\n"
"-> 1: Enabled\n"
"-> 0: Disabled\n"
))
os.system("clear")
options['maximalPopulation'] = 2 * options['base']
options['childNumber'] = 8 * options['base']
options['globalLearningRate'] = 1.0 / pow(options['base'], 0.5)
options['localLearningRate'] = 1.0 / pow(options['base'], 0.25)
return options<|fim▁end|> | elif options['recombinationMode'] == 2: |
<|file_name|>permissions.ts<|end_file_name|><|fim▁begin|>import intersection from 'lodash/intersection';
import moment from 'moment';
import * as _ from 'underscore';
import { getSchema } from'../utils/getSchema';
class Group {
actions: Array<string>
constructor() {
this.actions = [];
}
can(actions: string|string[]) {
actions = Array.isArray(actions) ? actions : [actions];
this.actions = this.actions.concat(actions);
}
cannot(actions: string|string[]) {
actions = Array.isArray(actions) ? actions : [actions];
this.actions = _.difference(this.actions, actions);
}
}
export const userGroups: Record<string,Group> = {};
// Create a new group
export const createGroup = (groupName: string): Group => {
userGroups[groupName] = new Group();
return userGroups[groupName];
};
// get a list of a user's groups
export const userGetGroups = (user: UsersProfile|DbUser|null): Array<string> => {
if (!user) { // guests user
return ['guests'];
}
if (user.banned > moment().toDate()) { // banned users have no membership permissions
return ['guests'];
}
let userGroups: Array<string> = ['members'];
if (user.groups) { // custom groups
userGroups = userGroups.concat(user.groups);
}
if (userIsAdmin(user)) { // admin
userGroups.push('admins');<|fim▁hole|> return userGroups;
};
// Get a list of all the actions a user can perform
export const userGetActions = (user: UsersProfile|DbUser|null): Array<string> => {
let groups = userGetGroups(user);
if (!groups.includes('guests')) {
// always give everybody permission for guests actions, too
groups.push('guests');
}
let groupActions = groups.map(groupName => {
// note: make sure groupName corresponds to an actual group
const group = userGroups[groupName];
return group && group.actions;
});
return _.unique(_.flatten(groupActions));
};
// Check if a user is a member of a group
export const userIsMemberOf = (user: UsersCurrent|DbUser|null, group: string): boolean => {
const userGroups = userGetGroups(user);
for (let userGroup of userGroups) {
if (userGroup === group)
return true;
}
return false;
};
// Check if a user can perform at least one of the specified actions
export const userCanDo = (user: UsersProfile|DbUser|null, actionOrActions: string|Array<string>): boolean => {
const authorizedActions = userGetActions(user);
const actions = Array.isArray(actionOrActions) ? actionOrActions : [actionOrActions];
return userIsAdmin(user) || intersection(authorizedActions, actions).length > 0;
};
// Check if a user owns a document
export const userOwns = function (user: UsersMinimumInfo|DbUser|null, document: HasUserIdType|DbUser|UsersMinimumInfo|DbObject): boolean {
if (!user) {
// not logged in
return false;
}
if (!document) {
// no document specified
return false;
}
if ((document as HasUserIdType).userId) {
// case 1: document is a post or a comment, use userId to check
return user._id === (document as HasUserIdType).userId;
} else {
// case 2: document is a user, use _id or slug to check
const documentUser = document as (DbUser|UsersMinimumInfo);
return documentUser.slug ? user.slug === documentUser.slug : user._id === documentUser._id;
}
};
// Check if a user is an admin
export const userIsAdmin = function (user: UsersMinimumInfo|DbUser|null): boolean {
if (!user) return false;
return user.isAdmin;
};
export const isAdmin = userIsAdmin;
// Check if a user can view a field
export const userCanReadField = <T extends DbObject>(user: UsersCurrent|DbUser|null, field: CollectionFieldSpecification<T>, document: T): boolean => {
const canRead = field.canRead || field.viewableBy; //OpenCRUD backwards compatibility
if (canRead) {
if (typeof canRead === 'function') {
// if canRead is a function, execute it with user and document passed. it must return a boolean
return canRead(user, document);
} else if (typeof canRead === 'string') {
// if canRead is just a string, we assume it's the name of a group and pass it to isMemberOf
return canRead === 'guests' || userIsMemberOf(user, canRead);
} else if (Array.isArray(canRead) && canRead.length > 0) {
// if canRead is an array, we do a recursion on every item and return true if one of the items return true
return canRead.some(group => userCanReadField(user, { canRead: group }, document));
}
}
return false;
};
// @summary Get a list of fields viewable by a user
// @param {Object} user - The user performing the action
// @param {Object} collection - The collection
// @param {Object} document - Optionally, get a list for a specific document
const getViewableFields = function <T extends DbObject>(user: UsersCurrent|DbUser|null, collection: CollectionBase<T>, document: T): Set<string> {
const schema = getSchema(collection);
let result: Set<string> = new Set();
for (let fieldName of Object.keys(schema)) {
if (fieldName.indexOf('.$') > -1)
continue;
if (userCanReadField(user, schema[fieldName], document))
result.add(fieldName);
}
return result;
};
// For a given document or list of documents, keep only fields viewable by current user
// @param {Object} user - The user performing the action
// @param {Object} collection - The collection
// @param {Object} document - The document being returned by the resolver
// TODO: Integrate permissions-filtered DbObjects into the type system
export const restrictViewableFields = function <T extends DbObject>(user: UsersCurrent|DbUser|null, collection: CollectionBase<T>, docOrDocs: T|Array<T>): any {
if (!docOrDocs) return {};
const restrictDoc = (document: T) => {
// get array of all keys viewable by user
const viewableKeys: Set<string> = getViewableFields(user, collection, document);
// return a filtered document
const restrictedDocument: Record<string,any> = {};
for (let key of Object.keys(document)) {
if (viewableKeys.has(key))
restrictedDocument[key] = (document as any)[key];
}
return restrictedDocument;
};
return Array.isArray(docOrDocs) ? docOrDocs.map(restrictDoc) : restrictDoc(docOrDocs);
};
// Check if a user can submit a field
export const userCanCreateField = <T extends DbObject>(user: DbUser|UsersCurrent|null, field: CollectionFieldSpecification<T>): boolean => {
const canCreate = field.canCreate || field.insertableBy; //OpenCRUD backwards compatibility
if (canCreate) {
if (typeof canCreate === 'function') {
// if canCreate is a function, execute it with user and document passed. it must return a boolean
return canCreate(user);
} else if (typeof canCreate === 'string') {
// if canCreate is just a string, we assume it's the name of a group and pass it to isMemberOf
// note: if canCreate is 'guests' then anybody can create it
return canCreate === 'guests' || userIsMemberOf(user, canCreate);
} else if (Array.isArray(canCreate) && canCreate.length > 0) {
// if canCreate is an array, we do a recursion on every item and return true if one of the items return true
return canCreate.some(group => userCanCreateField(user, { canCreate: group }));
}
}
return false;
};
// Check if a user can edit a field
export const userCanUpdateField = <T extends DbObject>(user: DbUser|UsersCurrent|null, field: CollectionFieldSpecification<T>, document: Partial<T>): boolean => {
const canUpdate = field.canUpdate || field.editableBy; //OpenCRUD backwards compatibility
if (canUpdate) {
if (typeof canUpdate === 'function') {
// if canUpdate is a function, execute it with user and document passed. it must return a boolean
return canUpdate(user, document);
} else if (typeof canUpdate === 'string') {
// if canUpdate is just a string, we assume it's the name of a group and pass it to isMemberOf
// note: if canUpdate is 'guests' then anybody can create it
return canUpdate === 'guests' || userIsMemberOf(user, canUpdate);
} else if (Array.isArray(canUpdate) && canUpdate.length > 0) {
// if canUpdate is an array, we look at every item and return true if one of the items return true
return canUpdate.some(group => userCanUpdateField(user, { canUpdate: group }, document));
}
}
return false;
};
////////////////////
// Initialize //
////////////////////
// initialize the 3 out-of-the-box groups
export const guestsGroup = createGroup('guests'); // non-logged-in users
export const membersGroup = createGroup('members'); // regular users
const membersActions = [
'user.create',
'user.update.own',
// OpenCRUD backwards compatibility
'users.new',
'users.edit.own',
'users.remove.own',
];
userGroups.members.can(membersActions);
export const adminsGroup = createGroup('admins'); // admin users
const adminActions = [
'user.create',
'user.update.all',
'user.delete.all',
'setting.update',
// OpenCRUD backwards compatibility
'users.new',
'users.edit.all',
'users.remove.all',
'settings.edit',
];
userGroups.admins.can(adminActions);<|fim▁end|> | }
|
<|file_name|>application_test.go<|end_file_name|><|fim▁begin|>package zabbix_test
import (
"fmt"
"math/rand"
"reflect"
"testing"
. "."
)<|fim▁hole|> err := getAPI(t).ApplicationsCreate(apps)
if err != nil {
t.Fatal(err)
}
return &apps[0]
}
func DeleteApplication(app *Application, t *testing.T) {
err := getAPI(t).ApplicationsDelete(Applications{*app})
if err != nil {
t.Fatal(err)
}
}
func TestApplications(t *testing.T) {
api := getAPI(t)
group := CreateHostGroup(t)
defer DeleteHostGroup(group, t)
host := CreateHost(group, t)
defer DeleteHost(host, t)
app := CreateApplication(host, t)
if app.ApplicationId == "" {
t.Errorf("Id is empty: %#v", app)
}
app2 := CreateApplication(host, t)
if app2.ApplicationId == "" {
t.Errorf("Id is empty: %#v", app2)
}
if reflect.DeepEqual(app, app2) {
t.Errorf("Apps are equal:\n%#v\n%#v", app, app2)
}
apps, err := api.ApplicationsGet(Params{"hostids": host.HostId})
if err != nil {
t.Fatal(err)
}
if len(apps) != 2 {
t.Errorf("Failed to create apps: %#v", apps)
}
app2, err = api.ApplicationGetById(app.ApplicationId)
if err != nil {
t.Fatal(err)
}
app2.TemplateIds = nil
if !reflect.DeepEqual(app, app2) {
t.Errorf("Apps are not equal:\n%#v\n%#v", app, app2)
}
app2, err = api.ApplicationGetByHostIdAndName(host.HostId, app.Name)
if err != nil {
t.Fatal(err)
}
app2.TemplateIds = nil
if !reflect.DeepEqual(app, app2) {
t.Errorf("Apps are not equal:\n%#v\n%#v", app, app2)
}
DeleteApplication(app, t)
}<|fim▁end|> |
func CreateApplication(host *Host, t *testing.T) *Application {
apps := Applications{{HostId: host.HostId, Name: fmt.Sprintf("App %d for %s", rand.Int(), host.Host)}} |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>def uniquer(seq, idfun=None):
if idfun is None:
def idfun(x): return x
seen = {}<|fim▁hole|> marker = idfun(item)
if marker in seen: continue
seen[marker] = 1
result.append(item)
return result<|fim▁end|> | result = []
for item in seq: |
<|file_name|>SchemaTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2010, PostgreSQL Global Development Group
* See the LICENSE file in the project root for more information.
*/
package org.postgresql.test.jdbc4.jdbc41;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import org.postgresql.test.TestUtil;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.Properties;
public class SchemaTest {
private Connection _conn;
private boolean dropUserSchema;
@Before
public void setUp() throws Exception {
_conn = TestUtil.openDB();
Statement stmt = _conn.createStatement();
try {
stmt.execute("CREATE SCHEMA " + TestUtil.getUser());
dropUserSchema = true;
} catch (SQLException e) {
/* assume schema existed */
}
stmt.execute("CREATE SCHEMA schema1");
stmt.execute("CREATE SCHEMA schema2");
stmt.execute("CREATE SCHEMA \"schema 3\"");
stmt.execute("CREATE SCHEMA \"schema \"\"4\"");
stmt.execute("CREATE SCHEMA \"schema '5\"");
stmt.execute("CREATE SCHEMA \"schema ,6\"");
stmt.execute("CREATE SCHEMA \"UpperCase\"");
TestUtil.createTable(_conn, "schema1.table1", "id integer");
TestUtil.createTable(_conn, "schema2.table2", "id integer");
TestUtil.createTable(_conn, "\"UpperCase\".table3", "id integer");
TestUtil.createTable(_conn, "schema1.sptest", "id integer");
TestUtil.createTable(_conn, "schema2.sptest", "id varchar");
}
@After
public void tearDown() throws SQLException {
_conn.setAutoCommit(true);
_conn.setSchema(null);
Statement stmt = _conn.createStatement();
if (dropUserSchema) {
stmt.execute("DROP SCHEMA " + TestUtil.getUser() + " CASCADE");
}
stmt.execute("DROP SCHEMA schema1 CASCADE");
stmt.execute("DROP SCHEMA schema2 CASCADE");
stmt.execute("DROP SCHEMA \"schema 3\" CASCADE");
stmt.execute("DROP SCHEMA \"schema \"\"4\" CASCADE");
stmt.execute("DROP SCHEMA \"schema '5\" CASCADE");
stmt.execute("DROP SCHEMA \"schema ,6\"");
stmt.execute("DROP SCHEMA \"UpperCase\" CASCADE");
TestUtil.closeDB(_conn);
}
/**
* Test that what you set is what you get
*/
@Test
public void testGetSetSchema() throws SQLException {
_conn.setSchema("schema1");
assertEquals("schema1", _conn.getSchema());
_conn.setSchema("schema2");
assertEquals("schema2", _conn.getSchema());
_conn.setSchema("schema 3");
assertEquals("schema 3", _conn.getSchema());
_conn.setSchema("schema \"4");
assertEquals("schema \"4", _conn.getSchema());
_conn.setSchema("schema '5");
assertEquals("schema '5", _conn.getSchema());
_conn.setSchema("UpperCase");
assertEquals("UpperCase", _conn.getSchema());
}
/**
* Test that setting the schema allows to access objects of this schema without prefix, hide
* objects from other schemas but doesn't prevent to prefix-access to them.
*/
@Test
public void testUsingSchema() throws SQLException {
Statement stmt = _conn.createStatement();
try {
try {
_conn.setSchema("schema1");
stmt.executeQuery(TestUtil.selectSQL("table1", "*"));
stmt.executeQuery(TestUtil.selectSQL("schema2.table2", "*"));
try {
stmt.executeQuery(TestUtil.selectSQL("table2", "*"));
fail("Objects of schema2 should not be visible without prefix");
} catch (SQLException e) {
// expected
}
_conn.setSchema("schema2");
stmt.executeQuery(TestUtil.selectSQL("table2", "*"));
stmt.executeQuery(TestUtil.selectSQL("schema1.table1", "*"));
try {
stmt.executeQuery(TestUtil.selectSQL("table1", "*"));
fail("Objects of schema1 should not be visible without prefix");
} catch (SQLException e) {
// expected
}
_conn.setSchema("UpperCase");
stmt.executeQuery(TestUtil.selectSQL("table3", "*"));
stmt.executeQuery(TestUtil.selectSQL("schema1.table1", "*"));
try {
stmt.executeQuery(TestUtil.selectSQL("table1", "*"));
fail("Objects of schema1 should not be visible without prefix");
} catch (SQLException e) {
// expected
}
} catch (SQLException e) {
fail("Could not find expected schema elements: " + e.getMessage());
}
} finally {
try {
stmt.close();
} catch (SQLException e) {
}
}
}
/**
* Test that get schema returns the schema with the highest priority in the search path
*/
@Test
public void testMultipleSearchPath() throws SQLException {
execute("SET search_path TO schema1,schema2");
assertEquals("schema1", _conn.getSchema());
execute("SET search_path TO \"schema ,6\",schema2");
assertEquals("schema ,6", _conn.getSchema());
}
@Test
public void testSchemaInProperties() throws Exception {
Properties properties = new Properties();
properties.setProperty("currentSchema", "schema1");
Connection conn = TestUtil.openDB(properties);
try {
assertEquals("schema1", conn.getSchema());
Statement stmt = conn.createStatement();
stmt.executeQuery(TestUtil.selectSQL("table1", "*"));
stmt.executeQuery(TestUtil.selectSQL("schema2.table2", "*"));
try {
stmt.executeQuery(TestUtil.selectSQL("table2", "*"));
fail("Objects of schema2 should not be visible without prefix");
} catch (SQLException e) {
// expected<|fim▁hole|> }
} finally {
TestUtil.closeDB(conn);
}
}
@Test
public void testSchemaPath$User() throws Exception {
execute("SET search_path TO \"$user\",public,schema2");
assertEquals(TestUtil.getUser(), _conn.getSchema());
}
private void execute(String sql) throws SQLException {
Statement stmt = _conn.createStatement();
try {
stmt.execute(sql);
} finally {
try {
stmt.close();
} catch (SQLException e) {
}
}
}
@Test
public void testSearchPathPreparedStatementAutoCommitFalse() throws SQLException {
_conn.setAutoCommit(false);
testSearchPathPreparedStatement();
}
@Test
public void testSearchPathPreparedStatementAutoCommitTrue() throws SQLException {
testSearchPathPreparedStatement();
}
@Test
public void testSearchPathPreparedStatement() throws SQLException {
execute("set search_path to schema1,public");
PreparedStatement ps = _conn.prepareStatement("select * from sptest");
for (int i = 0; i < 10; i++) {
ps.execute();
}
assertColType(ps, "sptest should point to schema1.sptest, thus column type should be INT",
Types.INTEGER);
ps.close();
execute("set search_path to schema2,public");
ps = _conn.prepareStatement("select * from sptest");
assertColType(ps, "sptest should point to schema2.sptest, thus column type should be VARCHAR",
Types.VARCHAR);
ps.close();
}
private void assertColType(PreparedStatement ps, String message, int expected) throws SQLException {
ResultSet rs = ps.executeQuery();
ResultSetMetaData md = rs.getMetaData();
int columnType = md.getColumnType(1);
assertEquals(message,
expected, columnType);
rs.close();
}
}<|fim▁end|> | |
<|file_name|>limit.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
@Component({
selector: 'demo-pagination-limit',<|fim▁hole|>})
export class DemoPaginationLimitComponent {
public maxSize:number = 5;
public bigTotalItems:number = 175;
public bigCurrentPage:number = 1;
public numPages:number = 0;
public pageChanged(event:any):void {
console.log('Page changed to: ' + event.page);
console.log('Number items per page: ' + event.itemsPerPage);
}
}<|fim▁end|> | templateUrl: './limit.html' |
<|file_name|>highlight.py<|end_file_name|><|fim▁begin|># Portions Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# highlight.py - highlight extension implementation file
#
# Copyright 2007-2009 Adam Hupp <[email protected]> and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
#
# The original module was split in an interface and an implementation
# file to defer pygments loading and speedup extension setup.
from __future__ import absolute_import
from edenscm.mercurial import demandimport, encoding, util
demandimport.ignore.extend(["pkgutil", "pkg_resources", "__main__"])
with demandimport.deactivated():
import pygments
import pygments.formatters
import pygments.lexers
import pygments.util
highlight = pygments.highlight
ClassNotFound = pygments.util.ClassNotFound
guess_lexer = pygments.lexers.guess_lexer
guess_lexer_for_filename = pygments.lexers.guess_lexer_for_filename
TextLexer = pygments.lexers.TextLexer
HtmlFormatter = pygments.formatters.HtmlFormatter
SYNTAX_CSS = '\n<link rel="stylesheet" href="{url}highlightcss" ' 'type="text/css" />'
def pygmentize(field, fctx, style, tmpl, guessfilenameonly=False):
# append a <link ...> to the syntax highlighting css
old_header = tmpl.load("header")
if SYNTAX_CSS not in old_header:
new_header = old_header + SYNTAX_CSS
tmpl.cache["header"] = new_header
text = fctx.data()
if util.binary(text):
return
# str.splitlines() != unicode.splitlines() because "reasons"
for c in "\x0c\x1c\x1d\x1e":
if c in text:
text = text.replace(c, "")
# Pygments is best used with Unicode strings:
# <http://pygments.org/docs/unicode/>
text = text.decode(encoding.encoding, "replace")
# To get multi-line strings right, we can't format line-by-line
try:
lexer = guess_lexer_for_filename(fctx.path(), text[:1024], stripnl=False)
except (ClassNotFound, ValueError):
# guess_lexer will return a lexer if *any* lexer matches. There is
# no way to specify a minimum match score. This can give a high rate of
# false positives on files with an unknown filename pattern.
if guessfilenameonly:
return
try:
lexer = guess_lexer(text[:1024], stripnl=False)
except (ClassNotFound, ValueError):
# Don't highlight unknown files
return
# Don't highlight text files
if isinstance(lexer, TextLexer):
return
formatter = HtmlFormatter(nowrap=True, style=style)
colorized = highlight(text, lexer, formatter)
coloriter = (s.encode(encoding.encoding, "replace") for s in colorized.splitlines())
<|fim▁hole|>
oldl = tmpl.cache[field]
newl = oldl.replace("line|escape", "line|colorize")
tmpl.cache[field] = newl<|fim▁end|> | tmpl.filters["colorize"] = lambda x: next(coloriter) |
<|file_name|>MainTestingWeightedHistograms.py<|end_file_name|><|fim▁begin|># force floating point division. Can still use integer with //
from __future__ import division
# This file is used for importing the common utilities classes.
import numpy as np
import matplotlib.pyplot as plt
import sys<|fim▁hole|>from FitUtil.EnergyLandscapes.InverseWeierstrass.Python.Code import \
InverseWeierstrass,WeierstrassUtil,WeightedHistogram
def assert_all_digitization_correct(objs):
for o in objs:
_assert_digitization_correct(o)
def run():
fwd,rev = load_simulated_data(n=2)
assert_all_digitization_correct(fwd)
assert_all_digitization_correct(rev)
if __name__ == "__main__":
run()<|fim▁end|> |
sys.path.append("../../../../../../")
from Util import Test
from Util.Test import _f_assert,HummerData,load_simulated_data |
<|file_name|>test_tokenizer.py<|end_file_name|><|fim▁begin|>import sys
import os
import unittest
import cStringIO
import warnings
import re
try:
import json
except ImportError:
import simplejson as json
from support import html5lib_test_files
from html5lib.tokenizer import HTMLTokenizer
from html5lib import constants
class TokenizerTestParser(object):
def __init__(self, initialState, lastStartTag=None):
self.tokenizer = HTMLTokenizer
self._state = initialState
self._lastStartTag = lastStartTag
def parse(self, stream, encoding=None, innerHTML=False):
tokenizer = self.tokenizer(stream, encoding)
self.outputTokens = []
tokenizer.state = getattr(tokenizer, self._state)
if self._lastStartTag is not None:
tokenizer.currentToken = {"type": "startTag",
"name":self._lastStartTag}
types = dict((v,k) for k,v in constants.tokenTypes.iteritems())
for token in tokenizer:
getattr(self, 'process%s' % types[token["type"]])(token)
return self.outputTokens
def processDoctype(self, token):
self.outputTokens.append([u"DOCTYPE", token["name"], token["publicId"],
token["systemId"], token["correct"]])
def processStartTag(self, token):
self.outputTokens.append([u"StartTag", token["name"],
dict(token["data"][::-1]), token["selfClosing"]])
def processEmptyTag(self, token):
if token["name"] not in constants.voidElements:
self.outputTokens.append(u"ParseError")
self.outputTokens.append([u"StartTag", token["name"], dict(token["data"][::-1])])
def processEndTag(self, token):
self.outputTokens.append([u"EndTag", token["name"],
token["selfClosing"]])
def processComment(self, token):
self.outputTokens.append([u"Comment", token["data"]])
def processSpaceCharacters(self, token):
self.outputTokens.append([u"Character", token["data"]])
self.processSpaceCharacters = self.processCharacters
def processCharacters(self, token):
self.outputTokens.append([u"Character", token["data"]])
def processEOF(self, token):
pass
def processParseError(self, token):
self.outputTokens.append([u"ParseError", token["data"]])
def concatenateCharacterTokens(tokens):
outputTokens = []
for token in tokens:
if not "ParseError" in token and token[0] == "Character":
if (outputTokens and not "ParseError" in outputTokens[-1] and
outputTokens[-1][0] == "Character"):
outputTokens[-1][1] += token[1]
else:
outputTokens.append(token)
else:
outputTokens.append(token)
return outputTokens
def normalizeTokens(tokens):
# TODO: convert tests to reflect arrays
for i, token in enumerate(tokens):
if token[0] == u'ParseError':
tokens[i] = token[0]
return tokens
def tokensMatch(expectedTokens, receivedTokens, ignoreErrorOrder,
ignoreErrors=False):
"""Test whether the test has passed or failed
If the ignoreErrorOrder flag is set to true we don't test the relative
positions of parse errors and non parse errors
"""
checkSelfClosing= False
for token in expectedTokens:
if (token[0] == "StartTag" and len(token) == 4
or token[0] == "EndTag" and len(token) == 3):
checkSelfClosing = True
break
if not checkSelfClosing:
for token in receivedTokens:
if token[0] == "StartTag" or token[0] == "EndTag":
token.pop()
if not ignoreErrorOrder and not ignoreErrors:
return expectedTokens == receivedTokens
else:
#Sort the tokens into two groups; non-parse errors and parse errors
tokens = {"expected":[[],[]], "received":[[],[]]}
for tokenType, tokenList in zip(tokens.keys(),
(expectedTokens, receivedTokens)):
for token in tokenList:
if token != "ParseError":
tokens[tokenType][0].append(token)
else:
if not ignoreErrors:
tokens[tokenType][1].append(token)
return tokens["expected"] == tokens["received"]
def unescape_test(test):
def decode(inp):
return inp.decode("unicode-escape")
test["input"] = decode(test["input"])
for token in test["output"]:
if token == "ParseError":
continue
else:
token[1] = decode(token[1])
if len(token) > 2:
for key, value in token[2]:
del token[2][key]
token[2][decode(key)] = decode(value)
return test
def runTokenizerTest(test):
#XXX - move this out into the setup function
#concatenate all consecutive character tokens into a single token
if 'doubleEscaped' in test:
test = unescape_test(test)
expected = concatenateCharacterTokens(test['output'])
if 'lastStartTag' not in test:
test['lastStartTag'] = None
outBuffer = cStringIO.StringIO()
stdout = sys.stdout
sys.stdout = outBuffer
parser = TokenizerTestParser(test['initialState'],
test['lastStartTag'])
tokens = parser.parse(test['input'])
tokens = concatenateCharacterTokens(tokens)
received = normalizeTokens(tokens)
errorMsg = u"\n".join(["\n\nInitial state:",
test['initialState'] ,
"\nInput:", unicode(test['input']),
"\nExpected:", unicode(expected),
"\nreceived:", unicode(tokens)])
errorMsg = errorMsg.encode("utf-8")
ignoreErrorOrder = test.get('ignoreErrorOrder', False)
assert tokensMatch(expected, received, ignoreErrorOrder), errorMsg
def _doCapitalize(match):
return match.group(1).upper()
_capitalizeRe = re.compile(r"\W+(\w)").sub
<|fim▁hole|> return s
def test_tokenizer():
for filename in html5lib_test_files('tokenizer', '*.test'):
tests = json.load(file(filename))
testName = os.path.basename(filename).replace(".test","")
if 'tests' in tests:
for index,test in enumerate(tests['tests']):
#Skip tests with a self closing flag
skip = False
if 'initialStates' not in test:
test["initialStates"] = ["Data state"]
for initialState in test["initialStates"]:
test["initialState"] = capitalize(initialState)
yield runTokenizerTest, test<|fim▁end|> | def capitalize(s):
s = s.lower()
s = _capitalizeRe(_doCapitalize, s) |
<|file_name|>meta.js<|end_file_name|><|fim▁begin|>var model = require('model');
var adapter = require('./..').adapter;
var Issue = function () {
this.adapter = adapter;
this.property('assignees','string');
this.property('htmlUrl','string');
this.property('number','number');
this.property('state','string');
this.property('title','string');
this.property('body','string');
this.property('user','object');<|fim▁hole|> this.property('comments','number');
this.property('pullRequest','object');
this.property('closedAt','date');
this.property('createdAt','date');
this.property('updatedAt','date');
this.property('trckrState','string');
this.property('trckrLastReview','date');
this.property('trckrPingback','string'); // ping me back in a specific future
};
model.register('Issue', Issue);<|fim▁end|> | this.property('labels','object');
this.property('assignee','object');
this.property('milestone','object'); |
<|file_name|>FullPushThroughReportV05.go<|end_file_name|><|fim▁begin|>package tsmt
import (
"encoding/xml"
"github.com/fgrid/iso20022"
)
type Document01800105 struct {
XMLName xml.Name `xml:"urn:iso:std:iso:20022:tech:xsd:tsmt.018.001.05 Document"`
Message *FullPushThroughReportV05 `xml:"FullPushThrghRpt"`
}
func (d *Document01800105) AddMessage() *FullPushThroughReportV05 {
d.Message = new(FullPushThroughReportV05)
return d.Message
}
// Scope
// The FullPushThroughReport message is sent by the matching application to a party involved in a transaction.
// This message is used to pass on information that the matching application has received from the submitter. The forwarded information can originate from an InitialBaselineSubmission or BaselineReSubmission or BaselineAmendmentRequest message.
// Usage
// The FullPushThroughReport message can be sent by the matching application to a party to convey
// - the details of an InitialBaselineSubmission message that it has obtained,or
// - the details of a BaselineResubmission message that it has obtained,or
// - the details of a BaselineAmendmentRequest message that it has obtained.
type FullPushThroughReportV05 struct {
// Identifies the report.
ReportIdentification *iso20022.MessageIdentification1 `xml:"RptId"`
// Unique identification assigned by the matching application to the transaction.
// This identification is to be used in any communication between the parties.
TransactionIdentification *iso20022.SimpleIdentificationInformation `xml:"TxId"`
// Unique identification assigned by the matching application to the baseline when it is established.
EstablishedBaselineIdentification *iso20022.DocumentIdentification3 `xml:"EstblishdBaselnId,omitempty"`
// Identifies the status of the transaction by means of a code.
TransactionStatus *iso20022.TransactionStatus4 `xml:"TxSts"`
// Reference to the transaction for the financial institution which submitted the baseline.
UserTransactionReference []*iso20022.DocumentIdentification5 `xml:"UsrTxRef,omitempty"`
// Specifies the type of report.
ReportPurpose *iso20022.ReportType1 `xml:"RptPurp"`
// Specifies the commercial details of the underlying transaction.
PushedThroughBaseline *iso20022.Baseline5 `xml:"PushdThrghBaseln"`
// Person to be contacted in the organisation of the buyer.
BuyerContactPerson []*iso20022.ContactIdentification1 `xml:"BuyrCtctPrsn,omitempty"`
// Person to be contacted in the organisation of the seller.
SellerContactPerson []*iso20022.ContactIdentification1 `xml:"SellrCtctPrsn,omitempty"`
// Person to be contacted in the buyer's bank.
BuyerBankContactPerson []*iso20022.ContactIdentification1 `xml:"BuyrBkCtctPrsn,omitempty"`<|fim▁hole|>
// Person to be contacted in another bank than the seller or buyer's bank.
OtherBankContactPerson []*iso20022.ContactIdentification3 `xml:"OthrBkCtctPrsn,omitempty"`
// Information on the next processing step required.
RequestForAction *iso20022.PendingActivity2 `xml:"ReqForActn,omitempty"`
}
func (f *FullPushThroughReportV05) AddReportIdentification() *iso20022.MessageIdentification1 {
f.ReportIdentification = new(iso20022.MessageIdentification1)
return f.ReportIdentification
}
func (f *FullPushThroughReportV05) AddTransactionIdentification() *iso20022.SimpleIdentificationInformation {
f.TransactionIdentification = new(iso20022.SimpleIdentificationInformation)
return f.TransactionIdentification
}
func (f *FullPushThroughReportV05) AddEstablishedBaselineIdentification() *iso20022.DocumentIdentification3 {
f.EstablishedBaselineIdentification = new(iso20022.DocumentIdentification3)
return f.EstablishedBaselineIdentification
}
func (f *FullPushThroughReportV05) AddTransactionStatus() *iso20022.TransactionStatus4 {
f.TransactionStatus = new(iso20022.TransactionStatus4)
return f.TransactionStatus
}
func (f *FullPushThroughReportV05) AddUserTransactionReference() *iso20022.DocumentIdentification5 {
newValue := new(iso20022.DocumentIdentification5)
f.UserTransactionReference = append(f.UserTransactionReference, newValue)
return newValue
}
func (f *FullPushThroughReportV05) AddReportPurpose() *iso20022.ReportType1 {
f.ReportPurpose = new(iso20022.ReportType1)
return f.ReportPurpose
}
func (f *FullPushThroughReportV05) AddPushedThroughBaseline() *iso20022.Baseline5 {
f.PushedThroughBaseline = new(iso20022.Baseline5)
return f.PushedThroughBaseline
}
func (f *FullPushThroughReportV05) AddBuyerContactPerson() *iso20022.ContactIdentification1 {
newValue := new(iso20022.ContactIdentification1)
f.BuyerContactPerson = append(f.BuyerContactPerson, newValue)
return newValue
}
func (f *FullPushThroughReportV05) AddSellerContactPerson() *iso20022.ContactIdentification1 {
newValue := new(iso20022.ContactIdentification1)
f.SellerContactPerson = append(f.SellerContactPerson, newValue)
return newValue
}
func (f *FullPushThroughReportV05) AddBuyerBankContactPerson() *iso20022.ContactIdentification1 {
newValue := new(iso20022.ContactIdentification1)
f.BuyerBankContactPerson = append(f.BuyerBankContactPerson, newValue)
return newValue
}
func (f *FullPushThroughReportV05) AddSellerBankContactPerson() *iso20022.ContactIdentification1 {
newValue := new(iso20022.ContactIdentification1)
f.SellerBankContactPerson = append(f.SellerBankContactPerson, newValue)
return newValue
}
func (f *FullPushThroughReportV05) AddOtherBankContactPerson() *iso20022.ContactIdentification3 {
newValue := new(iso20022.ContactIdentification3)
f.OtherBankContactPerson = append(f.OtherBankContactPerson, newValue)
return newValue
}
func (f *FullPushThroughReportV05) AddRequestForAction() *iso20022.PendingActivity2 {
f.RequestForAction = new(iso20022.PendingActivity2)
return f.RequestForAction
}<|fim▁end|> |
// Person to be contacted in the seller's bank.
SellerBankContactPerson []*iso20022.ContactIdentification1 `xml:"SellrBkCtctPrsn,omitempty"` |
<|file_name|>server-tcp.py<|end_file_name|><|fim▁begin|>import socket
import threading
bind_ip = ""
bind_port = 60007
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind((bind_ip, bind_port))<|fim▁hole|>print("[*] Listening on %s:%d" % (bind_ip, bind_port))
def handle_client(client_socket):
request = client_socket.recv(1024).decode()
print("[*] Received: %s" % request)
send_data = "ACK!"
client_socket.send(send_data.encode())
print(client_socket.getpeername())
client_socket.close()
while True:
client, addr = server.accept()
print("[*] Accepted connect from: %s:%d" % (addr[0], addr[1]))
client_handler = threading.Thread(target=handle_client, args=(client,))
client_handler.start()<|fim▁end|> | server.listen(5)
|
<|file_name|>Esempio.java<|end_file_name|><|fim▁begin|>package lambda;
import java.awt.BorderLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
import javax.swing.JFrame;
public class Esempio extends JFrame {
public Esempio()
{
init();<|fim▁hole|> private void init() {
BorderLayout b=new BorderLayout();
this.setLayout(b);
JButton button=new JButton("Ok");
this.add(button,BorderLayout.SOUTH);
this.setSize(400, 300);
this.setVisible(true);
ActionListener l=new Azione();
button.addActionListener(( e) -> System.out.println("ciao"));
}
public static void main(String[] args) {
Esempio e=new Esempio();
}
}<|fim▁end|> |
}
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask_wtf import FlaskForm # type: ignore
from wtforms import ( # type: ignore
StringField,
TextAreaField,
SubmitField,
FieldList,
FormField,
IntegerField,
HiddenField,
BooleanField,
)
from wtforms import validators
from data.models import VulnerabilityGitCommits, VulnerabilityResources
from data.models.base import db
class BaseForm(FlaskForm):
@property
def non_hidden_fields(self):
for field in self:
if isinstance(field, HiddenField):
continue
yield field
class ModelFieldList(FieldList):
def __init__(self, *args, **kwargs):
self.model = kwargs.pop("model", None)
super().__init__(*args, **kwargs)
if not self.model:
raise ValueError("ModelFieldList requires model to be set")
def populate_obj(self, obj, name):
if not hasattr(obj, name):
setattr(obj, name, [])
while len(getattr(obj, name)) < len(self.entries):
new_model = self.model()
db.session.add(new_model)
getattr(obj, name).append(new_model)
while len(getattr(obj, name)) > len(self.entries):
db.session.delete(getattr(obj, name).pop())
super().populate_obj(obj, name)
class CommitLinksForm(FlaskForm):
repo_url = StringField(
"Git Repo URL", validators=[validators.Optional(), validators.URL()]
)
commit_hash = StringField("Commit Hash", validators=[])
# Commit data is optional -> otherwise use: validators.DataRequired(),
commit_link = StringField(
"Main commit link", validators=[validators.Optional(), validators.URL()]
)
repo_name = StringField("Repository Name", validators=[])
class Meta:
csrf = False
class VulnerabilityResourcesForm(FlaskForm):
link = StringField("Link", validators=[validators.DataRequired(), validators.URL()])
class Meta:
csrf = False
class VulnerabilityDetailsForm(FlaskForm):
commits = ModelFieldList(
FormField(CommitLinksForm),
model=VulnerabilityGitCommits,
min_entries=1,
default=[VulnerabilityGitCommits],
)
# Changing the CVE ID is disabled for now.
# The filters argument is used to have Null fields instead of empty strings.
# This is important since the cve_id is supposed to be unique OR Null.
# cve_id = StringField(
# "CVE-ID",
# filters=[lambda x: x and str(x).upper().strip(), lambda x: x or None],
# validators=[
# validators.Optional(),
# validators.Regexp(r"^CVE-\d{4}-\d+$")
# ],
# )
comment = TextAreaField(
"High-Level Bug Overview", validators=[validators.DataRequired()]
)
resources = ModelFieldList(
FormField(VulnerabilityResourcesForm), model=VulnerabilityResources
)
submit = SubmitField("Propose change")
class VulnerabilityProposalReject(FlaskForm):
review_feedback = TextAreaField(
"Feedback what should be changed", validators=[validators.DataRequired()]
)
submit_reject = SubmitField("Ask for improvements")
class VulnerabilityProposalApprove(FlaskForm):
submit_approve = SubmitField("Approve proposal")
class VulnerabilityProposalAssign(FlaskForm):
submit_assign = SubmitField("Take review")
class VulnerabilityProposalUnassign(FlaskForm):
submit_unassign = SubmitField("Unassign from this review")
class VulnerabilityProposalPublish(FlaskForm):
submit_publish = SubmitField("Publish entry")
class VulnerabilityDeleteForm(FlaskForm):
delete_entry = IntegerField("Delete entry", [validators.DataRequired()])
submit = SubmitField()
class UserProfileForm(BaseForm):
full_name = StringField(
"Name",
description=(
'<small class="form-text text-muted">'<|fim▁hole|> "What should be shown next to your contributions.</small>"
),
)
hide_name = BooleanField("Hide Name")
profile_picture = StringField(
"Profile Picture URL", validators=[validators.Optional(), validators.URL()]
)
hide_picture = BooleanField("Hide Profile Picture")<|fim▁end|> | |
<|file_name|>FCTRL2.py<|end_file_name|><|fim▁begin|>from sys import stdin as sin
list_index=[]
list=dict()<|fim▁hole|> f=1
#check if a value less that that has already been calculated
for i in range(1,n+1):
f*=i
return f
t=int(input())
for i in range(t):
n=int(sin.readline().rstrip())
print(fn(n))<|fim▁end|> | def fn(n): |
<|file_name|>readlog.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os<|fim▁hole|>import time
if len(sys.argv) < 2:
print("Please provide a server argument")
sys.exit(1)
def siginthandler(signum, stackframe):
sys.exit(-1)
signal.signal(signal.SIGINT, siginthandler)
logpath="/log"
if len(sys.argv) > 2:
logpath=sys.argv[2]
while True:
try:
idx = 0
time.sleep(1)
p = 2379
print("Connect to {}:{}".format(sys.argv[1], p))
keyval = etcd.Client(host=sys.argv[1], port=p)
while keyval:
res = keyval.watch(logpath, index=idx, recursive=True)
for e in res.leaves:
if e.key == logpath:
idx = 0
break
print(e.value)
idx = e.createdIndex+1
except Exception as e:
print(e)<|fim▁end|> | import sys
import etcd
import subprocess
import signal |
<|file_name|>cross_crate.rs<|end_file_name|><|fim▁begin|>// aux-build:const_evaluatable_lib.rs
// run-pass
#![feature(generic_const_exprs)]
#![allow(incomplete_features)]
extern crate const_evaluatable_lib;
fn user<T>() where [u8; std::mem::size_of::<T>() - 1]: Sized {
assert_eq!(const_evaluatable_lib::test1::<T>(), [0; std::mem::size_of::<T>() - 1]);<|fim▁hole|> user::<u32>();
user::<u64>();
}<|fim▁end|> | }
fn main() {
assert_eq!(const_evaluatable_lib::test1::<u32>(), [0; 3]); |
<|file_name|>workflow-type.get.js<|end_file_name|><|fim▁begin|><import resource="classpath:alfresco/site-webscripts/org/alfresco/components/workflow/workflow.lib.js">
var workflowDefinitions = getWorkflowDefinitions(),
filters = [];
if (workflowDefinitions)
{
for (var i = 0, il = workflowDefinitions.length; i < il; i++)
{
filters.push(
{
id: "workflowType",
data: workflowDefinitions[i].name,
label: workflowDefinitions[i].title
});<|fim▁hole|><|fim▁end|> | }
}
model.filters = filters; |
<|file_name|>closure.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Code for type-checking closure expressions.
use super::{check_fn, Expectation, FnCtxt};
use astconv;
use middle::region;
use middle::subst;
use middle::ty::{self, ToPolyTraitRef, Ty};
use std::cmp;
use syntax::abi;
use syntax::ast;
use syntax::ast_util;
pub fn check_expr_closure<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
expr: &ast::Expr,
_capture: ast::CaptureClause,
decl: &'tcx ast::FnDecl,
body: &'tcx ast::Block,
expected: Expectation<'tcx>) {
debug!("check_expr_closure(expr={:?},expected={:?})",
expr,
expected);
// It's always helpful for inference if we know the kind of
// closure sooner rather than later, so first examine the expected
// type, and see if can glean a closure kind from there.
let (expected_sig,expected_kind) = match expected.to_option(fcx) {
Some(ty) => deduce_expectations_from_expected_type(fcx, ty),
None => (None, None)
};
check_closure(fcx, expr, expected_kind, decl, body, expected_sig)
}
fn check_closure<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
expr: &ast::Expr,
opt_kind: Option<ty::ClosureKind>,
decl: &'tcx ast::FnDecl,
body: &'tcx ast::Block,
expected_sig: Option<ty::FnSig<'tcx>>) {
let expr_def_id = ast_util::local_def(expr.id);
debug!("check_closure opt_kind={:?} expected_sig={:?}",
opt_kind,
expected_sig);
let mut fn_ty = astconv::ty_of_closure(
fcx,
ast::Unsafety::Normal,
decl,
abi::RustCall,
expected_sig);
let closure_type = ty::mk_closure(fcx.ccx.tcx,
expr_def_id,
fcx.ccx.tcx.mk_substs(
fcx.inh.param_env.free_substs.clone()));
fcx.write_ty(expr.id, closure_type);
let fn_sig =
ty::liberate_late_bound_regions(fcx.tcx(),
region::DestructionScopeData::new(body.id),
&fn_ty.sig);
check_fn(fcx.ccx,
ast::Unsafety::Normal,
expr.id,
&fn_sig,
decl,
expr.id,
&*body,
fcx.inh);
// Tuple up the arguments and insert the resulting function type into
// the `closures` table.
fn_ty.sig.0.inputs = vec![ty::mk_tup(fcx.tcx(), fn_ty.sig.0.inputs)];
debug!("closure for {:?} --> sig={:?} opt_kind={:?}",
expr_def_id,
fn_ty.sig,
opt_kind);
fcx.inh.closure_tys.borrow_mut().insert(expr_def_id, fn_ty);
match opt_kind {
Some(kind) => { fcx.inh.closure_kinds.borrow_mut().insert(expr_def_id, kind); }
None => { }
}
}
fn deduce_expectations_from_expected_type<'a,'tcx>(
fcx: &FnCtxt<'a,'tcx>,
expected_ty: Ty<'tcx>)
-> (Option<ty::FnSig<'tcx>>,Option<ty::ClosureKind>)
{
debug!("deduce_expectations_from_expected_type(expected_ty={:?})",
expected_ty);
match expected_ty.sty {
ty::TyTrait(ref object_type) => {<|fim▁hole|> .filter_map(|pb| deduce_sig_from_projection(fcx, pb))
.next();
let kind = fcx.tcx().lang_items.fn_trait_kind(object_type.principal_def_id());
(sig, kind)
}
ty::TyInfer(ty::TyVar(vid)) => {
deduce_expectations_from_obligations(fcx, vid)
}
_ => {
(None, None)
}
}
}
fn deduce_expectations_from_obligations<'a,'tcx>(
fcx: &FnCtxt<'a,'tcx>,
expected_vid: ty::TyVid)
-> (Option<ty::FnSig<'tcx>>, Option<ty::ClosureKind>)
{
let fulfillment_cx = fcx.inh.fulfillment_cx.borrow();
// Here `expected_ty` is known to be a type inference variable.
let expected_sig =
fulfillment_cx
.pending_obligations()
.iter()
.filter_map(|obligation| {
debug!("deduce_expectations_from_obligations: obligation.predicate={:?}",
obligation.predicate);
match obligation.predicate {
// Given a Projection predicate, we can potentially infer
// the complete signature.
ty::Predicate::Projection(ref proj_predicate) => {
let trait_ref = proj_predicate.to_poly_trait_ref();
self_type_matches_expected_vid(fcx, trait_ref, expected_vid)
.and_then(|_| deduce_sig_from_projection(fcx, proj_predicate))
}
_ => {
None
}
}
})
.next();
// Even if we can't infer the full signature, we may be able to
// infer the kind. This can occur if there is a trait-reference
// like `F : Fn<A>`. Note that due to subtyping we could encounter
// many viable options, so pick the most restrictive.
let expected_kind =
fulfillment_cx
.pending_obligations()
.iter()
.filter_map(|obligation| {
let opt_trait_ref = match obligation.predicate {
ty::Predicate::Projection(ref data) => Some(data.to_poly_trait_ref()),
ty::Predicate::Trait(ref data) => Some(data.to_poly_trait_ref()),
ty::Predicate::Equate(..) => None,
ty::Predicate::RegionOutlives(..) => None,
ty::Predicate::TypeOutlives(..) => None,
};
opt_trait_ref
.and_then(|trait_ref| self_type_matches_expected_vid(fcx, trait_ref, expected_vid))
.and_then(|trait_ref| fcx.tcx().lang_items.fn_trait_kind(trait_ref.def_id()))
})
.fold(None, pick_most_restrictive_closure_kind);
(expected_sig, expected_kind)
}
fn pick_most_restrictive_closure_kind(best: Option<ty::ClosureKind>,
cur: ty::ClosureKind)
-> Option<ty::ClosureKind>
{
match best {
None => Some(cur),
Some(best) => Some(cmp::min(best, cur))
}
}
/// Given a projection like "<F as Fn(X)>::Result == Y", we can deduce
/// everything we need to know about a closure.
fn deduce_sig_from_projection<'a,'tcx>(
fcx: &FnCtxt<'a,'tcx>,
projection: &ty::PolyProjectionPredicate<'tcx>)
-> Option<ty::FnSig<'tcx>>
{
let tcx = fcx.tcx();
debug!("deduce_sig_from_projection({:?})",
projection);
let trait_ref = projection.to_poly_trait_ref();
if tcx.lang_items.fn_trait_kind(trait_ref.def_id()).is_none() {
return None;
}
let arg_param_ty = *trait_ref.substs().types.get(subst::TypeSpace, 0);
let arg_param_ty = fcx.infcx().resolve_type_vars_if_possible(&arg_param_ty);
debug!("deduce_sig_from_projection: arg_param_ty {:?}", arg_param_ty);
let input_tys = match arg_param_ty.sty {
ty::TyTuple(ref tys) => { (*tys).clone() }
_ => { return None; }
};
debug!("deduce_sig_from_projection: input_tys {:?}", input_tys);
let ret_param_ty = projection.0.ty;
let ret_param_ty = fcx.infcx().resolve_type_vars_if_possible(&ret_param_ty);
debug!("deduce_sig_from_projection: ret_param_ty {:?}", ret_param_ty);
let fn_sig = ty::FnSig {
inputs: input_tys,
output: ty::FnConverging(ret_param_ty),
variadic: false
};
debug!("deduce_sig_from_projection: fn_sig {:?}", fn_sig);
Some(fn_sig)
}
fn self_type_matches_expected_vid<'a,'tcx>(
fcx: &FnCtxt<'a,'tcx>,
trait_ref: ty::PolyTraitRef<'tcx>,
expected_vid: ty::TyVid)
-> Option<ty::PolyTraitRef<'tcx>>
{
let self_ty = fcx.infcx().shallow_resolve(trait_ref.self_ty());
debug!("self_type_matches_expected_vid(trait_ref={:?}, self_ty={:?})",
trait_ref,
self_ty);
match self_ty.sty {
ty::TyInfer(ty::TyVar(v)) if expected_vid == v => Some(trait_ref),
_ => None,
}
}<|fim▁end|> | let proj_bounds = object_type.projection_bounds_with_self_ty(fcx.tcx(),
fcx.tcx().types.err);
let sig = proj_bounds.iter() |
<|file_name|>modernizr.js<|end_file_name|><|fim▁begin|>/**
* # wrap/modernizr
*
* Wrap global instance for use in RequireJS modules
*
* > http://draeton.github.io/stitches<br/>
* > Copyright 2013 Matthew Cobbs<br/>
* > Licensed under the MIT license.
*/<|fim▁hole|>define(function () {
"use strict";
return Modernizr;
});<|fim▁end|> | |
<|file_name|>xinerama.rs<|end_file_name|><|fim▁begin|>/*
* This file generated automatically from xinerama.xml by r_client.py.
* Edit at your peril.
*/
//Make the compiler quiet
#![allow(unused_imports)]
#![allow(unused_unsafe)]
use std;
use libc::*;
use std::{mem,num,ptr,str};
use ffi::base::*;
use base;
use base::*;
use ffi;
use ffi::xinerama::*;
use std::option::Option;
use std::iter::Iterator;
use xproto;
pub struct ScreenInfo {pub base : base::Struct<screen_info> }
pub type ScreenInfoIterator = screen_info_iterator;
pub struct QueryVersionCookie<'s> { pub base : base::Cookie<'s, query_version_cookie> }
/** Opcode for xcb_xinerama_query_version. */
pub static XCB_XINERAMA_QUERY_VERSION : u8 = 0;
pub struct QueryVersionReply { base: base::Reply<query_version_reply> }
fn mk_reply_query_version_reply(reply:*mut query_version_reply) -> QueryVersionReply { QueryVersionReply { base : base::mk_reply(reply) } }
pub struct GetStateCookie<'s> { pub base : base::Cookie<'s, get_state_cookie> }
/** Opcode for xcb_xinerama_get_state. */
pub static XCB_XINERAMA_GET_STATE : u8 = 1;
pub struct GetStateReply { base: base::Reply<get_state_reply> }
fn mk_reply_get_state_reply(reply:*mut get_state_reply) -> GetStateReply { GetStateReply { base : base::mk_reply(reply) } }
pub struct GetScreenCountCookie<'s> { pub base : base::Cookie<'s, get_screen_count_cookie> }
/** Opcode for xcb_xinerama_get_screen_count. */
pub static XCB_XINERAMA_GET_SCREEN_COUNT : u8 = 2;
pub struct GetScreenCountReply { base: base::Reply<get_screen_count_reply> }
fn mk_reply_get_screen_count_reply(reply:*mut get_screen_count_reply) -> GetScreenCountReply { GetScreenCountReply { base : base::mk_reply(reply) } }
pub struct GetScreenSizeCookie<'s> { pub base : base::Cookie<'s, get_screen_size_cookie> }
/** Opcode for xcb_xinerama_get_screen_size. */
pub static XCB_XINERAMA_GET_SCREEN_SIZE : u8 = 3;
pub struct GetScreenSizeReply { base: base::Reply<get_screen_size_reply> }
fn mk_reply_get_screen_size_reply(reply:*mut get_screen_size_reply) -> GetScreenSizeReply { GetScreenSizeReply { base : base::mk_reply(reply) } }
pub struct IsActiveCookie<'s> { pub base : base::Cookie<'s, is_active_cookie> }
/** Opcode for xcb_xinerama_is_active. */
pub static XCB_XINERAMA_IS_ACTIVE : u8 = 4;
pub struct IsActiveReply { base: base::Reply<is_active_reply> }
fn mk_reply_is_active_reply(reply:*mut is_active_reply) -> IsActiveReply { IsActiveReply { base : base::mk_reply(reply) } }
pub struct QueryScreensCookie<'s> { pub base : base::Cookie<'s, query_screens_cookie> }<|fim▁hole|>/** Opcode for xcb_xinerama_query_screens. */
pub static XCB_XINERAMA_QUERY_SCREENS : u8 = 5;
impl ScreenInfo {
pub fn x_org(&mut self) -> i16 {
unsafe { accessor!(x_org -> i16, self.base.strct) }
}
pub fn y_org(&mut self) -> i16 {
unsafe { accessor!(y_org -> i16, self.base.strct) }
}
pub fn width(&mut self) -> u16 {
unsafe { accessor!(width -> u16, self.base.strct) }
}
pub fn height(&mut self) -> u16 {
unsafe { accessor!(height -> u16, self.base.strct) }
}
}
impl<'s, ScreenInfo> Iterator<&'s ScreenInfo> for ScreenInfoIterator {
fn next(&mut self) -> Option<&'s ScreenInfo> {
if self.rem == 0 { return None; }
unsafe {
let iter : *mut screen_info_iterator = mem::transmute(self);
let data = (*iter).data;
xcb_xinerama_screen_info_next(iter);
Some(mem::transmute(data))
}
}
}
pub fn QueryVersion<'r> (c : &'r Connection,
major : u8,
minor : u8) -> QueryVersionCookie<'r> {
unsafe {
let cookie = xcb_xinerama_query_version(c.get_raw_conn(),
major as u8, //1
minor as u8); //2
QueryVersionCookie { base : Cookie {cookie:cookie,conn:c,checked:false}}
}
}
pub fn QueryVersionUnchecked<'r> (c : &'r Connection,
major : u8,
minor : u8) -> QueryVersionCookie<'r> {
unsafe {
let cookie = xcb_xinerama_query_version_unchecked(c.get_raw_conn(),
major as u8, //1
minor as u8); //2
QueryVersionCookie { base : Cookie {cookie:cookie,conn:c,checked:false}}
}
}
impl QueryVersionReply {
pub fn major(&mut self) -> u16 {
unsafe { accessor!(major -> u16, (*self.base.reply)) }
}
pub fn minor(&mut self) -> u16 {
unsafe { accessor!(minor -> u16, (*self.base.reply)) }
}
}
impl_reply_cookie!(QueryVersionCookie<'s>, mk_reply_query_version_reply, QueryVersionReply, xcb_xinerama_query_version_reply);
pub fn GetState<'r> (c : &'r Connection,
window : xproto::Window) -> GetStateCookie<'r> {
unsafe {
let cookie = xcb_xinerama_get_state(c.get_raw_conn(),
window as ffi::xproto::window); //1
GetStateCookie { base : Cookie {cookie:cookie,conn:c,checked:false}}
}
}
pub fn GetStateUnchecked<'r> (c : &'r Connection,
window : xproto::Window) -> GetStateCookie<'r> {
unsafe {
let cookie = xcb_xinerama_get_state_unchecked(c.get_raw_conn(),
window as ffi::xproto::window); //1
GetStateCookie { base : Cookie {cookie:cookie,conn:c,checked:false}}
}
}
impl GetStateReply {
pub fn state(&mut self) -> u8 {
unsafe { accessor!(state -> u8, (*self.base.reply)) }
}
pub fn window(&mut self) -> xproto::Window {
unsafe { accessor!(window -> xproto::Window, (*self.base.reply)) }
}
}
impl_reply_cookie!(GetStateCookie<'s>, mk_reply_get_state_reply, GetStateReply, xcb_xinerama_get_state_reply);
pub fn GetScreenCount<'r> (c : &'r Connection,
window : xproto::Window) -> GetScreenCountCookie<'r> {
unsafe {
let cookie = xcb_xinerama_get_screen_count(c.get_raw_conn(),
window as ffi::xproto::window); //1
GetScreenCountCookie { base : Cookie {cookie:cookie,conn:c,checked:false}}
}
}
pub fn GetScreenCountUnchecked<'r> (c : &'r Connection,
window : xproto::Window) -> GetScreenCountCookie<'r> {
unsafe {
let cookie = xcb_xinerama_get_screen_count_unchecked(c.get_raw_conn(),
window as ffi::xproto::window); //1
GetScreenCountCookie { base : Cookie {cookie:cookie,conn:c,checked:false}}
}
}
impl GetScreenCountReply {
pub fn screen_count(&mut self) -> u8 {
unsafe { accessor!(screen_count -> u8, (*self.base.reply)) }
}
pub fn window(&mut self) -> xproto::Window {
unsafe { accessor!(window -> xproto::Window, (*self.base.reply)) }
}
}
impl_reply_cookie!(GetScreenCountCookie<'s>, mk_reply_get_screen_count_reply, GetScreenCountReply, xcb_xinerama_get_screen_count_reply);
pub fn GetScreenSize<'r> (c : &'r Connection,
window : xproto::Window,
screen : u32) -> GetScreenSizeCookie<'r> {
unsafe {
let cookie = xcb_xinerama_get_screen_size(c.get_raw_conn(),
window as ffi::xproto::window, //1
screen as u32); //2
GetScreenSizeCookie { base : Cookie {cookie:cookie,conn:c,checked:false}}
}
}
pub fn GetScreenSizeUnchecked<'r> (c : &'r Connection,
window : xproto::Window,
screen : u32) -> GetScreenSizeCookie<'r> {
unsafe {
let cookie = xcb_xinerama_get_screen_size_unchecked(c.get_raw_conn(),
window as ffi::xproto::window, //1
screen as u32); //2
GetScreenSizeCookie { base : Cookie {cookie:cookie,conn:c,checked:false}}
}
}
impl GetScreenSizeReply {
pub fn width(&mut self) -> u32 {
unsafe { accessor!(width -> u32, (*self.base.reply)) }
}
pub fn height(&mut self) -> u32 {
unsafe { accessor!(height -> u32, (*self.base.reply)) }
}
pub fn window(&mut self) -> xproto::Window {
unsafe { accessor!(window -> xproto::Window, (*self.base.reply)) }
}
pub fn screen(&mut self) -> u32 {
unsafe { accessor!(screen -> u32, (*self.base.reply)) }
}
}
impl_reply_cookie!(GetScreenSizeCookie<'s>, mk_reply_get_screen_size_reply, GetScreenSizeReply, xcb_xinerama_get_screen_size_reply);
pub fn IsActive<'r> (c : &'r Connection) -> IsActiveCookie<'r> {
unsafe {
let cookie = xcb_xinerama_is_active(c.get_raw_conn());
IsActiveCookie { base : Cookie {cookie:cookie,conn:c,checked:false}}
}
}
pub fn IsActiveUnchecked<'r> (c : &'r Connection) -> IsActiveCookie<'r> {
unsafe {
let cookie = xcb_xinerama_is_active_unchecked(c.get_raw_conn());
IsActiveCookie { base : Cookie {cookie:cookie,conn:c,checked:false}}
}
}
impl IsActiveReply {
pub fn state(&mut self) -> u32 {
unsafe { accessor!(state -> u32, (*self.base.reply)) }
}
}
impl_reply_cookie!(IsActiveCookie<'s>, mk_reply_is_active_reply, IsActiveReply, xcb_xinerama_is_active_reply);
pub struct QueryScreensReply { base: base::Reply<query_screens_reply> }
fn mk_reply_query_screens_reply(reply:*mut query_screens_reply) -> QueryScreensReply { QueryScreensReply { base : base::mk_reply(reply) } }
pub fn QueryScreens<'r> (c : &'r Connection) -> QueryScreensCookie<'r> {
unsafe {
let cookie = xcb_xinerama_query_screens(c.get_raw_conn());
QueryScreensCookie { base : Cookie {cookie:cookie,conn:c,checked:false}}
}
}
pub fn QueryScreensUnchecked<'r> (c : &'r Connection) -> QueryScreensCookie<'r> {
unsafe {
let cookie = xcb_xinerama_query_screens_unchecked(c.get_raw_conn());
QueryScreensCookie { base : Cookie {cookie:cookie,conn:c,checked:false}}
}
}
impl QueryScreensReply {
pub fn screen_info(&mut self) -> ScreenInfoIterator {
unsafe { accessor!(ScreenInfoIterator, xcb_xinerama_query_screens_screen_info_iterator, (*self.base.reply)) }
}
}
impl_reply_cookie!(QueryScreensCookie<'s>, mk_reply_query_screens_reply, QueryScreensReply, xcb_xinerama_query_screens_reply);<|fim▁end|> | |
<|file_name|>pulseaudiomixer.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright 2013-2014 Giulio Camuffo <[email protected]>
*
* This file is part of Orbital
*
* Orbital is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Orbital is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Orbital. If not, see <http://www.gnu.org/licenses/>.
*/
#include <QDebug>
#include <pulse/glib-mainloop.h>
#include <pulse/volume.h>
#include "pulseaudiomixer.h"
#include "client.h"
struct Sink
{
Sink() : index(0), volume{}, muted(false) {}
uint32_t index;
pa_cvolume volume;
bool muted;
};
PulseAudioMixer::PulseAudioMixer(Mixer *m)
: Backend()
, m_mixer(m)
, m_sink(new Sink)
{
}
PulseAudioMixer::~PulseAudioMixer()
{
delete m_sink;
cleanup();<|fim▁hole|> PulseAudioMixer *pulse = new PulseAudioMixer(mixer);
if (!(pulse->m_mainLoop = pa_glib_mainloop_new(nullptr))) {
qWarning("pa_mainloop_new() failed.");
delete pulse;
return nullptr;
}
pulse->m_mainloopApi = pa_glib_mainloop_get_api(pulse->m_mainLoop);
// pluseaudio tries to connect to X if DISPLAY is set. the problem is that if we have
// xwayland running it will try to connect to it, and we don't want that.
char *dpy = getenv("DISPLAY");
if (dpy) {
setenv("DISPLAY", "", 1);
}
pulse->m_context = pa_context_new(pulse->m_mainloopApi, nullptr);
if (dpy) {
setenv("DISPLAY", dpy, 1);
}
if (!pulse->m_context) {
qWarning("pa_context_new() failed.");
delete pulse;
return nullptr;
}
pa_context_set_state_callback(pulse->m_context, [](pa_context *c, void *ud) {
static_cast<PulseAudioMixer *>(ud)->contextStateCallback(c);
}, pulse);
if (pa_context_connect(pulse->m_context, nullptr, (pa_context_flags_t)0, nullptr) < 0) {
qWarning("pa_context_connect() failed: %s", pa_strerror(pa_context_errno(pulse->m_context)));
delete pulse;
return nullptr;
}
return pulse;
}
void PulseAudioMixer::contextStateCallback(pa_context *c)
{
switch (pa_context_get_state(c)) {
case PA_CONTEXT_CONNECTING:
case PA_CONTEXT_AUTHORIZING:
case PA_CONTEXT_SETTING_NAME:
break;
case PA_CONTEXT_READY:
pa_context_set_subscribe_callback(c, [](pa_context *c, pa_subscription_event_type_t t, uint32_t index, void *ud) {
static_cast<PulseAudioMixer *>(ud)->subscribeCallback(c, t, index);
}, this);
pa_context_subscribe(c, PA_SUBSCRIPTION_MASK_SINK, nullptr, nullptr);
pa_context_get_sink_info_list(c, [](pa_context *c, const pa_sink_info *i, int eol, void *ud) {
static_cast<PulseAudioMixer *>(ud)->sinkCallback(c, i, eol);
}, this);
break;
case PA_CONTEXT_TERMINATED:
cleanup();
break;
case PA_CONTEXT_FAILED:
default:
qWarning("Connection with the pulseaudio server failed: %s", pa_strerror(pa_context_errno(c)));
cleanup();
break;
}
}
void PulseAudioMixer::subscribeCallback(pa_context *c, pa_subscription_event_type_t t, uint32_t index)
{
switch (t & PA_SUBSCRIPTION_EVENT_FACILITY_MASK) {
case PA_SUBSCRIPTION_EVENT_SINK:
pa_context_get_sink_info_list(c, [](pa_context *c, const pa_sink_info *i, int eol, void *ud) {
static_cast<PulseAudioMixer *>(ud)->sinkCallback(c, i, eol);
}, this);
break;
default:
break;
}
}
void PulseAudioMixer::sinkCallback(pa_context *c, const pa_sink_info *i, int eol)
{
if (eol < 0) {
if (pa_context_errno(c) == PA_ERR_NOENTITY)
return;
qWarning() << "Sink callback failure";
return;
}
if (eol > 0) {
return;
}
m_sink->index = i->index;
if (m_sink->muted != (bool)i->mute) {
m_sink->muted = (bool)i->mute;
emit m_mixer->mutedChanged();
}
m_sink->volume = i->volume;
emit m_mixer->masterChanged();
}
void PulseAudioMixer::cleanup()
{
}
void PulseAudioMixer::getBoundaries(int *min, int *max) const
{
*min = PA_VOLUME_MUTED;
*max = PA_VOLUME_NORM;
}
void PulseAudioMixer::setRawVol(int vol)
{
if (!pa_channels_valid(m_sink->volume.channels)) {
qWarning("Cannot change Pulseaudio volume: invalid channels %d", m_sink->volume.channels);
return;
}
setMuted(false);
pa_cvolume_set(&m_sink->volume, m_sink->volume.channels, vol);
pa_context_set_sink_volume_by_index(m_context, m_sink->index, &m_sink->volume, nullptr, nullptr);
}
int PulseAudioMixer::rawVol() const
{
return pa_cvolume_avg(&m_sink->volume);
}
bool PulseAudioMixer::muted() const
{
return m_sink->muted;
}
void PulseAudioMixer::setMuted(bool muted)
{
pa_context_set_sink_mute_by_index(m_context, m_sink->index, muted, nullptr, nullptr);
}<|fim▁end|> | }
PulseAudioMixer *PulseAudioMixer::create(Mixer *mixer)
{ |
<|file_name|>ActivityWithRemarks.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
import { classes } from '@framework/Globals'
import { Lite } from '@framework/Signum.Entities'
import { CaseActivityMessage, CaseNotificationEntity, CaseNotificationOperation, CaseActivityEntity, WorkflowActivityEntity, CaseTagTypeEntity, CaseEntity } from '../Signum.Entities.Workflow'
import { FindOptions } from '@framework/Search'
import * as Finder from '@framework/Finder'
import * as AppContext from '@framework/AppContext'
import * as Operations from '@framework/Operations'
import ValueLineModal from '@framework/ValueLineModal'
import { AlertEntity } from '../../Alerts/Signum.Entities.Alerts'
import InlineCaseTags from './InlineCaseTags'
import { useAPI } from '@framework/Hooks'
export interface ActivityWithRemarks {
workflowActivity: Lite<WorkflowActivityEntity>;
case: Lite<CaseEntity>;
caseActivity: Lite<CaseActivityEntity>;
notification: Lite<CaseNotificationEntity>;
remarks: string | null
alerts: number;
tags: Array<CaseTagTypeEntity>;
}
export interface ActivityWithRemarksProps extends React.Props<ActivityWithRemarks> {
data: ActivityWithRemarks;
}
function useStateFromProps<T>(propsValue: T): [T, (newValue: T) => void] {
var [val, setVal] = React.useState(propsValue);
React.useEffect(() => {
setVal(propsValue);
}, [propsValue]);
return [val, setVal];
}
export default function ActivityWithRemarksComponent(p: ActivityWithRemarksProps) {
const [remarks, setRemarks] = useStateFromProps(p.data.remarks);
const [alerts, setAlerts] = useStateFromProps(p.data.alerts);
const tags = useAPI(() => Promise.resolve(p.data.tags), p.data.tags.map(t => t.id));
function handleAlertsClick(e: React.MouseEvent<any>) {
e.preventDefault();
var fo: FindOptions = {
queryName: AlertEntity,
filterOptions: [
{ token: AlertEntity.token(a => a.target), value: p.data.caseActivity },
{ token: AlertEntity.token().entity(e => e.recipient), value: AppContext.currentUser },
{ token: AlertEntity.token().entity().expression("CurrentState"), value: "Alerted" }
],
columnOptions: [{ token: AlertEntity.token(e => e.target) }],
columnOptionsMode: "Remove",
};
Finder.exploreOrNavigate(fo)
.then(() => Finder.getQueryValue(fo.queryName, fo.filterOptions!))
.then(alerts => setAlerts(alerts))
.done();
}
function handleRemarksClick(e: React.MouseEvent<any>) {
e.preventDefault();
ValueLineModal.show({
type: { name: "string" },
valueLineType: "TextArea",
title: CaseNotificationEntity.nicePropertyName(a => a.remarks),
message: CaseActivityMessage.PersonalRemarksForThisNotification.niceToString(),
labelText: undefined,
initialValue: remarks,
<|fim▁hole|> }).then(remarks => {
if (remarks === undefined)
return;
Operations.API.executeLite(p.data.notification, CaseNotificationOperation.SetRemarks, remarks)
.then(n => setRemarks(n.entity.remarks))
.done();
}).done();
}
return (
<span>
{p.data.workflowActivity.toStr}
<a href="#" onClick={handleRemarksClick} className={classes(
"case-icon",
!remarks && "case-icon-ghost")}>
<FontAwesomeIcon icon={remarks ? "comment-dots" : ["far", "comment"]} />
</a>
{alerts > 0 && " "}
{alerts > 0 && <a href="#" onClick={handleAlertsClick} style={{ color: "orange" }}>
<FontAwesomeIcon icon={"bell"} />
</a>}
<InlineCaseTags case={p.data.case} defaultTags={tags} />
</span>
);
}<|fim▁end|> | initiallyFocused: true
|
<|file_name|>using_pageobjects.py<|end_file_name|><|fim▁begin|>from behave import then, when
from bs4 import BeautifulSoup
from bs4.element import Tag
from pageobjects.pages import About, Welcome
@when(u'I instantiate the Welcome page object')
def new_pageobject(context):
context.page = Welcome(context)
@then(u'it provides a valid Beautiful Soup document')
def pageobject_works(context):
assert context.page.response.status_code == 200
assert context.page.request == context.page.response.request
assert isinstance(context.page.document, BeautifulSoup)
assert 'Test App: behave-django' == context.page.document.title.string, \
"unexpected title: %s" % context.page.document.title.string
<|fim▁hole|> context.about_link = context.page.get_link('about')
assert isinstance(context.about_link, Tag), \
"should be instance of %s (not %s)" % (
Tag.__name__, context.about_link.__class__.__name__)
@when('I call click() on the link')
def linkelement_click(context):
context.next_page = context.about_link.click()
@then('it loads a new PageObject')
def click_returns_pageobject(context):
assert About(context) == context.next_page<|fim▁end|> | @then(u'get_link() returns the link subdocument')
def getlink_subdocument(context): |
<|file_name|>crunchtime.js<|end_file_name|><|fim▁begin|>$.ajax({
async: false,
url: "http://api.visalus.com/ITReporting/SalesAnalytics/GetDataBySP/?SPName=usp_PROMO_ViCrunch3FF_JSON&?",
type: 'GET',
dataType: 'jsonp',
success: function (data) {
var result = JSON.stringify(data);
//alert(result);
var obj = jQuery.parseJSON(result);
var output="<table class='standings-table' width='100%'><tr class='head'><td align='center'>Rank</td><td>Name</td><td>Location</td><td align='center'>VIP Tier</td></tr>";
for (var i in obj) {
output+="<tr><td align='center'>" + obj[i].Rank + "</td><td>" + obj[i].Name + "</td><td>" + obj[i].Location + "</td><td align='center'>" + obj[i].VIPtier + "</td></tr>";
}
output+="</table>";
document.getElementById("crunchtime").innerHTML = output;
<|fim▁hole|> error: function (e) {
alert('fail');
}
});<|fim▁end|> | },
|
<|file_name|>search-model.ts<|end_file_name|><|fim▁begin|>export class SearchModel {<|fim▁hole|> }
public SearchField: string;
public SearchValue: string;
}<|fim▁end|> | constructor(key: string, value: string) {
this.SearchField = key;
this.SearchValue = value; |
<|file_name|>sig_key_pair.rs<|end_file_name|><|fim▁begin|>use super::{super::{hash,
PUBLIC_KEY_SUFFIX,
PUBLIC_SIG_KEY_VERSION,
SECRET_SIG_KEY_SUFFIX,
SECRET_SIG_KEY_VERSION},
get_key_revisions,
mk_key_filename,
mk_revision_string,
parse_name_with_rev,
read_key_bytes,
write_keypair_files,
KeyPair,
KeyType,
PairType,
TmpKeyfile};
use crate::error::{Error,
Result};
use sodiumoxide::{crypto::sign::{self,
ed25519::{PublicKey as SigPublicKey,
SecretKey as SigSecretKey}},
randombytes::randombytes};
use std::{fs,
path::{Path,
PathBuf}};
pub type SigKeyPair = KeyPair<SigPublicKey, SigSecretKey>;
impl SigKeyPair {
pub fn generate_pair_for_origin(name: &str) -> Self {
let revision = mk_revision_string();
let (pk, sk) = sign::gen_keypair();
Self::new(name.to_string(), revision, Some(pk), Some(sk))
}
/// Return a Vec of origin keys with a given name.
/// The newest key is listed first in the Vec.
pub fn get_pairs_for<P: AsRef<Path> + ?Sized>(name: &str,
cache_key_path: &P,
pair_type: Option<&PairType>)
-> Result<Vec<Self>> {
let revisions = get_key_revisions(name, cache_key_path.as_ref(), pair_type, &KeyType::Sig)?;
debug!("revisions = {:?}", &revisions);
let mut key_pairs = Vec::new();
for name_with_rev in &revisions {
debug!("Attempting to read key name_with_rev {} for {}",
name_with_rev, name);
let kp = Self::get_pair_for(name_with_rev, cache_key_path)?;
key_pairs.push(kp);
}
Ok(key_pairs)
}
pub fn get_pair_for<P: AsRef<Path> + ?Sized>(name_with_rev: &str,
cache_key_path: &P)
-> Result<Self> {
let (name, rev) = parse_name_with_rev(name_with_rev)?;
let pk = match Self::get_public_key(name_with_rev, cache_key_path.as_ref()) {
Ok(k) => Some(k),
Err(e) => {
// Not an error, just continue
debug!("Can't find public key for name_with_rev {}: {}",
name_with_rev, e);
None
}
};
let sk = match Self::get_secret_key(name_with_rev, cache_key_path.as_ref()) {
Ok(k) => Some(k),
Err(e) => {
// Not an error, just continue
debug!("Can't find secret key for name_with_rev {}: {}",
name_with_rev, e);
None
}
};
if pk == None && sk == None {
let msg = format!("No public or secret keys found for name_with_rev {}",
name_with_rev);
return Err(Error::CryptoError(msg));
}
Ok(SigKeyPair::new(name, rev, pk, sk))
}
pub fn get_latest_pair_for<P: AsRef<Path> + ?Sized>(name: &str,
cache_key_path: &P,
pair_type: Option<&PairType>)
-> Result<Self> {
let mut all = Self::get_pairs_for(name, cache_key_path, pair_type)?;
match all.len() {
0 => {
let msg = format!("No revisions found for {} sig key", name);
Err(Error::CryptoError(msg))
}
_ => Ok(all.remove(0)),
}
}
pub fn get_public_key_path<P: AsRef<Path> + ?Sized>(key_with_rev: &str,
cache_key_path: &P)
-> Result<PathBuf> {
let path = mk_key_filename(cache_key_path.as_ref(), key_with_rev, PUBLIC_KEY_SUFFIX);
if !path.is_file() {
return Err(Error::CryptoError(format!("No public key found at {}", path.display())));
}
Ok(path)
}
pub fn get_secret_key_path<P: AsRef<Path> + ?Sized>(key_with_rev: &str,
cache_key_path: &P)
-> Result<PathBuf> {
let path = mk_key_filename(cache_key_path.as_ref(), key_with_rev, SECRET_SIG_KEY_SUFFIX);
if !path.is_file() {
return Err(Error::CryptoError(format!("No secret key found at {}", path.display())));
}
Ok(path)
}
/// Writes a sig key (public or secret) to the key cache from the contents of a string slice.
///
/// The return is a `Result` of a `String` containing the key's name with revision.
///
/// # Examples
///
/// With a public key:
///
/// ```
/// extern crate habitat_core;
/// extern crate tempfile;
///
/// use habitat_core::crypto::{keys::PairType,
/// SigKeyPair};
/// use std::{fs::File,
/// io::Read};
/// use tempfile::Builder;
///
/// let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
/// let content = "SIG-PUB-1
/// unicorn-20160517220007
///
/// J+FGYVKgragA+dzQHCGORd2oLwCc2EvAnT9roz9BJh0=";
/// let key_path = cache.path().join("unicorn-20160517220007.pub");
///
/// let (pair, pair_type) = SigKeyPair::write_file_from_str(content, cache.path()).unwrap();
/// assert_eq!(pair_type, PairType::Public);
/// assert_eq!(pair.name_with_rev(), "unicorn-20160517220007");
/// assert!(key_path.is_file());
/// let mut f = File::open(key_path).unwrap();
/// let mut key_content = String::new();
/// f.read_to_string(&mut key_content).unwrap();
/// assert_eq!(&key_content, content);
/// ```
///
/// With a secret key:
///
/// ```
/// extern crate habitat_core;
/// extern crate tempfile;
///
/// use habitat_core::crypto::{keys::PairType,
/// SigKeyPair};
/// use std::{fs::File,
/// io::Read};
/// use tempfile::Builder;
///
/// let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
/// let content = "SIG-SEC-1
/// unicorn-20160517220007
///
/// jjQaaphB5+CHw7QzDWqMMuwhWmrrHH+SzQAgRrHfQ8sn4UZhUqCtqAD53NAcIY5F3agvAJzYS8CdP2ujP0EmHQ==";
/// let key_path = cache.path().join("unicorn-20160517220007.sig.key");
///
/// let (pair, pair_type) = SigKeyPair::write_file_from_str(content, cache.path()).unwrap();
/// assert_eq!(pair_type, PairType::Secret);
/// assert_eq!(pair.name_with_rev(), "unicorn-20160517220007");
/// assert!(key_path.is_file());
/// let mut f = File::open(key_path).unwrap();
/// let mut key_content = String::new();
/// f.read_to_string(&mut key_content).unwrap();
/// assert_eq!(&key_content, content);
/// ```
///
/// # Errors
///
/// * If there is a key version mismatch
/// * If the key version is missing
/// * If the key name with revision is missing
/// * If the key value (the Bas64 payload) is missing
/// * If the key file cannot be written to disk
/// * If an existing key is already installed, but the new content is different from the
/// existing
pub fn write_file_from_str<P: AsRef<Path> + ?Sized>(content: &str,
cache_key_path: &P)<|fim▁hole|> let suffix = match pair_type {
PairType::Public => PUBLIC_KEY_SUFFIX,
PairType::Secret => SECRET_SIG_KEY_SUFFIX,
};
let keyfile = mk_key_filename(cache_key_path.as_ref(), &name_with_rev, &suffix);
let tmpfile = {
let mut t = keyfile.clone();
t.set_file_name(format!("{}.{}",
&keyfile.file_name().unwrap().to_str().unwrap(),
&hex::encode(randombytes(6).as_slice())));
TmpKeyfile { path: t }
};
debug!("Writing temp key file {}", tmpfile.path.display());
match pair_type {
PairType::Public => {
write_keypair_files(Some(&tmpfile.path), Some(content.to_string()), None, None)?;
}
PairType::Secret => {
write_keypair_files(None, None, Some(&tmpfile.path), Some(content.to_string()))?;
}
}
if Path::new(&keyfile).is_file() {
let existing_hash = hash::hash_file(&keyfile)?;
let new_hash = hash::hash_file(&tmpfile.path)?;
if existing_hash != new_hash {
let msg = format!("Existing key file {} found but new version hash is different, \
failing to write new file over existing. ({} = {}, {} = {})",
keyfile.display(),
keyfile.display(),
existing_hash,
tmpfile.path.display(),
new_hash);
return Err(Error::CryptoError(msg));
} else {
// Otherwise, hashes match and we can skip writing over the existing file
debug!("New content hash matches existing file {} hash, removing temp key file \
{}.",
keyfile.display(),
tmpfile.path.display());
fs::remove_file(&tmpfile.path)?;
}
} else {
fs::rename(&tmpfile.path, keyfile)?;
}
Ok((Self::get_pair_for(&name_with_rev, cache_key_path)?, pair_type))
}
pub fn to_public_string(&self) -> Result<String> {
match self.public {
Some(pk) => {
Ok(format!("{}\n{}\n\n{}",
PUBLIC_SIG_KEY_VERSION,
self.name_with_rev(),
&base64::encode(&pk[..])))
}
None => {
Err(Error::CryptoError(format!("No public key present for {}",
self.name_with_rev())))
}
}
}
pub fn to_secret_string(&self) -> Result<String> {
match self.secret {
Some(ref sk) => {
Ok(format!("{}\n{}\n\n{}",
SECRET_SIG_KEY_VERSION,
self.name_with_rev(),
&base64::encode(&sk[..])))
}
None => {
Err(Error::CryptoError(format!("No secret key present for {}",
self.name_with_rev())))
}
}
}
pub fn to_pair_files<P: AsRef<Path> + ?Sized>(&self, path: &P) -> Result<()> {
let public_keyfile = mk_key_filename(path, self.name_with_rev(), PUBLIC_KEY_SUFFIX);
let secret_keyfile = mk_key_filename(path, self.name_with_rev(), SECRET_SIG_KEY_SUFFIX);
debug!("public sig keyfile = {}", public_keyfile.display());
debug!("secret sig keyfile = {}", secret_keyfile.display());
write_keypair_files(Some(&public_keyfile),
Some(self.to_public_string()?),
Some(&secret_keyfile),
Some(self.to_secret_string()?))
}
fn get_public_key(key_with_rev: &str, cache_key_path: &Path) -> Result<SigPublicKey> {
let public_keyfile = mk_key_filename(cache_key_path, key_with_rev, PUBLIC_KEY_SUFFIX);
let bytes = read_key_bytes(&public_keyfile)?;
match SigPublicKey::from_slice(&bytes) {
Some(sk) => Ok(sk),
None => {
Err(Error::CryptoError(format!("Can't read sig public key \
for {}",
key_with_rev)))
}
}
}
fn get_secret_key(key_with_rev: &str, cache_key_path: &Path) -> Result<SigSecretKey> {
let secret_keyfile = mk_key_filename(cache_key_path, key_with_rev, SECRET_SIG_KEY_SUFFIX);
let bytes = read_key_bytes(&secret_keyfile)?;
match SigSecretKey::from_slice(&bytes) {
Some(sk) => Ok(sk),
None => {
Err(Error::CryptoError(format!("Can't read sig secret key \
for {}",
key_with_rev)))
}
}
}
}
#[cfg(test)]
mod test {
use std::{fs::{self,
File},
io::Read};
use tempfile::Builder;
use super::{super::{super::test_support::*,
PairType},
SigKeyPair};
static VALID_KEY: &str = "origin-key-valid-20160509190508.sig.key";
static VALID_PUB: &str = "origin-key-valid-20160509190508.pub";
static VALID_NAME_WITH_REV: &str = "origin-key-valid-20160509190508";
#[test]
fn empty_struct() {
let pair = SigKeyPair::new("grohl".to_string(), "201604051449".to_string(), None, None);
assert_eq!(pair.name, "grohl");
assert_eq!(pair.rev, "201604051449");
assert_eq!(pair.name_with_rev(), "grohl-201604051449");
assert_eq!(pair.public, None);
assert!(pair.public().is_err(),
"Empty pair should not have a public key");
assert_eq!(pair.secret, None);
assert!(pair.secret().is_err(),
"Empty pair should not have a secret key");
}
#[test]
fn generated_origin_pair() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let pair = SigKeyPair::generate_pair_for_origin("unicorn");
pair.to_pair_files(cache.path()).unwrap();
assert_eq!(pair.name, "unicorn");
assert!(pair.public().is_ok(),
"Generated pair should have a public key");
assert!(pair.secret().is_ok(),
"Generated pair should have a public key");
assert!(cache.path()
.join(format!("{}.pub", pair.name_with_rev()))
.exists());
assert!(cache.path()
.join(format!("{}.sig.key", pair.name_with_rev()))
.exists());
}
#[test]
fn get_pairs_for() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let pairs = SigKeyPair::get_pairs_for("unicorn", cache.path(), None).unwrap();
assert_eq!(pairs.len(), 0);
SigKeyPair::generate_pair_for_origin("unicorn").to_pair_files(cache.path())
.unwrap();
let pairs = SigKeyPair::get_pairs_for("unicorn", cache.path(), None).unwrap();
assert_eq!(pairs.len(), 1);
match wait_until_ok(|| {
let p = SigKeyPair::generate_pair_for_origin("unicorn");
p.to_pair_files(cache.path())?;
Ok(())
}) {
Some(pair) => pair,
None => panic!("Failed to generate another keypair after waiting"),
};
let pairs = SigKeyPair::get_pairs_for("unicorn", cache.path(), None).unwrap();
assert_eq!(pairs.len(), 2);
// We should not include another named key in the count
SigKeyPair::generate_pair_for_origin("dragon").to_pair_files(cache.path())
.unwrap();
let pairs = SigKeyPair::get_pairs_for("unicorn", cache.path(), None).unwrap();
assert_eq!(pairs.len(), 2);
// We should be able to count public and private keys separately
let pairs =
SigKeyPair::get_pairs_for("unicorn", cache.path(), Some(&PairType::Secret)).unwrap();
assert_eq!(pairs.len(), 2);
let pairs =
SigKeyPair::get_pairs_for("unicorn", cache.path(), Some(&PairType::Public)).unwrap();
assert_eq!(pairs.len(), 2);
}
#[test]
fn get_pair_for() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let p1 = SigKeyPair::generate_pair_for_origin("unicorn");
p1.to_pair_files(cache.path()).unwrap();
let p2 = match wait_until_ok(|| {
let p = SigKeyPair::generate_pair_for_origin("unicorn");
p.to_pair_files(cache.path())?;
Ok(p)
}) {
Some(pair) => pair,
None => panic!("Failed to generate another keypair after waiting"),
};
let p1_fetched = SigKeyPair::get_pair_for(&p1.name_with_rev(), cache.path()).unwrap();
assert_eq!(p1.name, p1_fetched.name);
assert_eq!(p1.rev, p1_fetched.rev);
let p2_fetched = SigKeyPair::get_pair_for(&p2.name_with_rev(), cache.path()).unwrap();
assert_eq!(p2.name, p2_fetched.name);
assert_eq!(p2.rev, p2_fetched.rev);
}
#[test]
#[should_panic(expected = "No public or secret keys found for")]
fn get_pair_for_nonexistent() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
SigKeyPair::get_pair_for("nope-nope-20160405144901", cache.path()).unwrap();
}
#[test]
fn get_latest_pair_for_single() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let pair = SigKeyPair::generate_pair_for_origin("unicorn");
pair.to_pair_files(cache.path()).unwrap();
let latest = SigKeyPair::get_latest_pair_for("unicorn", cache.path(), None).unwrap();
assert_eq!(latest.name, pair.name);
assert_eq!(latest.rev, pair.rev);
}
#[test]
fn get_latest_pair_for_multiple() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
SigKeyPair::generate_pair_for_origin("unicorn").to_pair_files(cache.path())
.unwrap();
let p2 = match wait_until_ok(|| {
let p = SigKeyPair::generate_pair_for_origin("unicorn");
p.to_pair_files(cache.path())?;
Ok(p)
}) {
Some(pair) => pair,
None => panic!("Failed to generate another keypair after waiting"),
};
let latest = SigKeyPair::get_latest_pair_for("unicorn", cache.path(), None).unwrap();
assert_eq!(latest.name, p2.name);
assert_eq!(latest.rev, p2.rev);
}
#[test]
fn get_latest_pair_for_secret() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let p = SigKeyPair::generate_pair_for_origin("unicorn");
p.to_pair_files(cache.path()).unwrap();
let latest = SigKeyPair::get_latest_pair_for("unicorn",
cache.path(),
Some(&PairType::Secret)).unwrap();
assert_eq!(latest.name, p.name);
assert_eq!(latest.rev, p.rev);
}
#[test]
fn get_latest_pair_for_public() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let p = SigKeyPair::generate_pair_for_origin("unicorn");
p.to_pair_files(cache.path()).unwrap();
let latest = SigKeyPair::get_latest_pair_for("unicorn",
cache.path(),
Some(&PairType::Public)).unwrap();
assert_eq!(latest.name, p.name);
assert_eq!(latest.rev, p.rev);
}
#[test]
#[should_panic(expected = "No revisions found for")]
fn get_latest_pair_for_nonexistent() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
SigKeyPair::get_latest_pair_for("nope-nope", cache.path(), None).unwrap();
}
#[test]
fn get_public_key_path() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
fs::copy(fixture(&format!("keys/{}", VALID_PUB)),
cache.path().join(VALID_PUB)).unwrap();
let result = SigKeyPair::get_public_key_path(VALID_NAME_WITH_REV, cache.path()).unwrap();
assert_eq!(result, cache.path().join(VALID_PUB));
}
#[test]
#[should_panic(expected = "No public key found at")]
fn get_public_key_path_nonexistent() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
SigKeyPair::get_public_key_path(VALID_NAME_WITH_REV, cache.path()).unwrap();
}
#[test]
fn get_secret_key_path() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
fs::copy(fixture(&format!("keys/{}", VALID_KEY)),
cache.path().join(VALID_KEY)).unwrap();
let result = SigKeyPair::get_secret_key_path(VALID_NAME_WITH_REV, cache.path()).unwrap();
assert_eq!(result, cache.path().join(VALID_KEY));
}
#[test]
#[should_panic(expected = "No secret key found at")]
fn get_secret_key_path_nonexistent() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
SigKeyPair::get_secret_key_path(VALID_NAME_WITH_REV, cache.path()).unwrap();
}
#[test]
fn write_file_from_str_secret() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let content = fixture_as_string(&format!("keys/{}", VALID_KEY));
let new_key_file = cache.path().join(VALID_KEY);
assert_eq!(new_key_file.is_file(), false);
let (pair, pair_type) = SigKeyPair::write_file_from_str(&content, cache.path()).unwrap();
assert_eq!(pair_type, PairType::Secret);
assert_eq!(pair.name_with_rev(), VALID_NAME_WITH_REV);
assert!(new_key_file.is_file());
let new_content = {
let mut new_content_file = File::open(new_key_file).unwrap();
let mut new_content = String::new();
new_content_file.read_to_string(&mut new_content).unwrap();
new_content
};
assert_eq!(new_content, content);
}
#[test]
fn write_file_from_str_public() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let content = fixture_as_string(&format!("keys/{}", VALID_PUB));
let new_key_file = cache.path().join(VALID_PUB);
assert_eq!(new_key_file.is_file(), false);
let (pair, pair_type) = SigKeyPair::write_file_from_str(&content, cache.path()).unwrap();
assert_eq!(pair_type, PairType::Public);
assert_eq!(pair.name_with_rev(), VALID_NAME_WITH_REV);
assert!(new_key_file.is_file());
let new_content = {
let mut new_content_file = File::open(new_key_file).unwrap();
let mut new_content = String::new();
new_content_file.read_to_string(&mut new_content).unwrap();
new_content
};
assert_eq!(new_content, content);
}
#[test]
fn write_file_from_str_with_existing_identical_secret() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let content = fixture_as_string(&format!("keys/{}", VALID_KEY));
let new_key_file = cache.path().join(VALID_KEY);
// install the key into the cache
fs::copy(fixture(&format!("keys/{}", VALID_KEY)), &new_key_file).unwrap();
let (pair, pair_type) = SigKeyPair::write_file_from_str(&content, cache.path()).unwrap();
assert_eq!(pair_type, PairType::Secret);
assert_eq!(pair.name_with_rev(), VALID_NAME_WITH_REV);
assert!(new_key_file.is_file());
}
#[test]
fn write_file_from_str_with_existing_identical_public() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let content = fixture_as_string(&format!("keys/{}", VALID_PUB));
let new_key_file = cache.path().join(VALID_PUB);
// install the key into the cache
fs::copy(fixture(&format!("keys/{}", VALID_PUB)), &new_key_file).unwrap();
let (pair, pair_type) = SigKeyPair::write_file_from_str(&content, cache.path()).unwrap();
assert_eq!(pair_type, PairType::Public);
assert_eq!(pair.name_with_rev(), VALID_NAME_WITH_REV);
assert!(new_key_file.is_file());
}
#[test]
#[should_panic(expected = "Unsupported key version")]
fn write_file_from_str_unsupported_version_secret() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let content = fixture_as_string("keys/origin-key-invalid-version-20160518021451.sig.key");
SigKeyPair::write_file_from_str(&content, cache.path()).unwrap();
}
#[test]
#[should_panic(expected = "Unsupported key version")]
fn write_file_from_str_unsupported_version_public() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let content = fixture_as_string("keys/origin-key-invalid-version-20160518021451.pub");
SigKeyPair::write_file_from_str(&content, cache.path()).unwrap();
}
#[test]
#[should_panic(expected = "write_key_from_str:1 Malformed key string")]
fn write_file_from_str_missing_version() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
SigKeyPair::write_file_from_str("", cache.path()).unwrap();
}
#[test]
#[should_panic(expected = "write_key_from_str:2 Malformed key string")]
fn write_file_from_str_missing_name_secret() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
SigKeyPair::write_file_from_str("SIG-SEC-1\n", cache.path()).unwrap();
}
#[test]
#[should_panic(expected = "write_key_from_str:2 Malformed key string")]
fn write_file_from_str_missing_name_public() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
SigKeyPair::write_file_from_str("SIG-PUB-1\n", cache.path()).unwrap();
}
#[test]
#[should_panic(expected = "write_key_from_str:3 Malformed key string")]
fn write_file_from_str_missing_key_secret() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
SigKeyPair::write_file_from_str("SIG-SEC-1\nim-in-trouble-123\n", cache.path()).unwrap();
}
#[test]
#[should_panic(expected = "write_key_from_str:3 Malformed key string")]
fn write_file_from_str_missing_key_public() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
SigKeyPair::write_file_from_str("SIG-PUB-1\nim-in-trouble-123\n", cache.path()).unwrap();
}
#[test]
#[should_panic(expected = "write_key_from_str:3 Malformed key string")]
fn write_file_from_str_invalid_key_secret() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
SigKeyPair::write_file_from_str("SIG-SEC-1\norigin-key-valid-20160509190508\n\\
nc29tZXRoaW5n%",
cache.path()).unwrap();
}
#[test]
#[should_panic(expected = "write_key_from_str:3 Malformed key string")]
fn write_file_from_str_invalid_key_public() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
SigKeyPair::write_file_from_str("SIG-PUB-1\nim-in-trouble-123\n\nc29tZXRoaW5n%",
cache.path()).unwrap();
}
#[test]
#[should_panic(expected = "Existing key file")]
fn write_file_from_str_key_exists_but_hashes_differ_secret() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let key = fixture("keys/origin-key-valid-20160509190508.sig.key");
fs::copy(key,
cache.path().join("origin-key-valid-20160509190508.sig.key")).unwrap();
let k = "SIG-SEC-1\norigin-key-valid-20160509190508\n\nc29tZXRoaW5n";
SigKeyPair::write_file_from_str(k, cache.path()).unwrap();
}
#[test]
#[should_panic(expected = "Existing key file")]
fn write_file_from_str_key_exists_but_hashes_differ_public() {
let cache = Builder::new().prefix("key_cache").tempdir().unwrap();
let key = fixture("keys/origin-key-valid-20160509190508.pub");
fs::copy(key,
cache.path().join("origin-key-valid-20160509190508.pub")).unwrap();
let k = "SIG-PUB-1\norigin-key-valid-20160509190508\n\nc29tZXRoaW5n";
SigKeyPair::write_file_from_str(k, cache.path()).unwrap();
}
}<|fim▁end|> | -> Result<(Self, PairType)> {
let (pair_type, name_with_rev, _) = super::parse_key_str(content)?; |
<|file_name|>EventListActivity.java<|end_file_name|><|fim▁begin|>package epsi.md4.com.epsicalendar.activities;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ListView;
import android.widget.Toast;
import java.util.List;
import epsi.md4.com.epsicalendar.Common;
import epsi.md4.com.epsicalendar.R;
import epsi.md4.com.epsicalendar.adapters.EventItemAdapter;
import epsi.md4.com.epsicalendar.beans.Event;
import epsi.md4.com.epsicalendar.ws.ApiClient;
import retrofit.Callback;
import retrofit.Response;
import retrofit.Retrofit;
public class EventListActivity extends AppCompatActivity implements AdapterView.OnItemClickListener {
public static final int EVENT_FORM_ACTIVITY_REQUEST_CODE = 1;
public static final String TAG = EventListActivity.class.getName();
public static final String EXTRA_EVENT_ID = "EXTRA_EVENT_ID";
private ListView mList;
private ApiClient mApiClient;
private SharedPreferences mSharedPrefs;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Init view
setContentView(R.layout.activity_event_list);
// Init fields
this.mList = (ListView) findViewById(R.id.event_list_view);
this.mSharedPrefs = getSharedPreferences(Common.PREFS_SCOPE, Context.MODE_PRIVATE);
mApiClient = new ApiClient(this);
}
/**
* Refresh data
*/
private void refreshData() {
mApiClient.listEvents().enqueue(new Callback<List<Event>>() {
@Override
public void onResponse(Response<List<Event>> response, Retrofit retrofit) {
Log.v(TAG, String.format("listEvents.response: %d", response.code()));
if (response.isSuccess()) {
EventItemAdapter eventItemAdapter = new EventItemAdapter(EventListActivity.this, response.body());
mList.setOnItemClickListener(EventListActivity.this);
mList.setAdapter(eventItemAdapter);
} else {
Log.e(TAG, "can't get events from api ");
}
}
@Override
public void onFailure(Throwable t) {
Log.e(TAG, String.format("Error: %s", t.getMessage()));
}
});
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == EventFormActivity.REQUEST_CODE) {
if (resultCode == EventFormActivity.RESULT_OK) {
Log.d(TAG, "new event, refreshing list");
refreshData();
} else if (resultCode == EventFormActivity.RESULT_CANCELED) {
Log.d(TAG, "operation cancelled");
}
}
}
public void onClickAddEvent(View view) {
Intent intent = new Intent(EventListActivity.this, EventFormActivity.class);
this.startActivityForResult(intent, EVENT_FORM_ACTIVITY_REQUEST_CODE);
}
@Override
protected void onResume() {
super.onResume();
Log.v(TAG, String.format("prefs.USER_EMAIL_KEY = %s", mSharedPrefs.getString(Common.USER_EMAIL_KEY, "")));
if (mSharedPrefs.getString(Common.USER_EMAIL_KEY, "").equals("")) {
Intent intent = new Intent(this, UserFormActivity.class);
startActivity(intent);
} else {
refreshData();
}
}
public void onClickDisconnect(View view) {
mApiClient.logout().enqueue(new Callback<Void>() {
@Override
public void onResponse(Response<Void> response, Retrofit retrofit) {
localDisconnect();
onResume();
}<|fim▁hole|> public void onFailure(Throwable t) {
Toast.makeText(EventListActivity.this, "Can not logout", Toast.LENGTH_SHORT).show();
Log.e(TAG, String.format("Error while logging out: %s", t.getLocalizedMessage()));
}
});
}
private void localDisconnect() {
Log.v(TAG, String.format("Clearing %s prefs", Common.PREFS_SCOPE));
SharedPreferences.Editor edit = this.mSharedPrefs.edit();
edit.clear();
edit.apply();
}
/**
* Listener for list item click
*
* @param parent
* @param view
* @param position
* @param id
*/
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
Event clickedItem = (Event) parent.getItemAtPosition(position);
Log.v(TAG, clickedItem.toString());
Intent intent = new Intent(this, EventItemActivity.class);
intent.putExtra(EXTRA_EVENT_ID, clickedItem.getId().toString());
startActivity(intent);
}
}<|fim▁end|> |
@Override |
<|file_name|>test_603_vec2.py<|end_file_name|><|fim▁begin|># Copyright (c) 2019-2020 Manfred Moitzi
# License: MIT License
import pytest
import math
import pickle
# Import from 'ezdxf.math._vector' to test Python implementation
from ezdxf.math._vector import Vec2, Vec3
from ezdxf.acc import USE_C_EXT
all_vec_classes = [Vec2, Vec3]
vec2_only = [Vec2]
if USE_C_EXT:
from ezdxf.acc.vector import Vec2 as CVec2
all_vec_classes.append(CVec2)
vec2_only.append(CVec2)
# Vec2 is a sub set of Vec3, Vec3 can do everything Vec2 can do, but not every
# operation has the same result for 2D and 3D.
@pytest.fixture(params=all_vec_classes)
def vcls(request):
return request.param
@pytest.fixture(params=vec2_only)
def vec2(request):
return request.param
def test_init_tuple(vcls):
v = vcls((2, 3))
assert v.x == 2
assert v.y == 3
def test_empty_init(vcls):
v = vcls()
assert v.x == 0.
assert v.y == 0.
def test_init_vec2(vcls):
v = Vec2(vcls(2, 3))
assert v.x == 2
assert v.y == 3
def test_compatible_to_vector():
v = Vec3(Vec2(1, 2))
assert v == (1, 2, 0)
v = Vec2(Vec3(1, 2, 3))
assert v.x == 1
assert v.y == 2
def test_vec3(vec2):
v = vec2(1, 2)
assert len(v) == 2
v3 = v.vec3
assert len(v3) == 3<|fim▁hole|> v = vec2(1.123, 2.123)
v2 = v.round(1)
assert v2 == (1.1, 2.1)
def test_from_angle(vcls):
angle = math.radians(50)
length = 3.0
assert vcls.from_angle(angle, length) == vcls(
(math.cos(angle) * length, math.sin(angle) * length)
)
def test_vec2_as_tuple(vec2):
v = vec2(1, 2)
assert v[0] == 1
assert v[1] == 2
with pytest.raises(IndexError):
_ = v[2]
# negative indices not supported
with pytest.raises(IndexError):
_ = v[-1]
def test_iter(vcls):
assert sum(vcls(1, 2)) == 3
def test_deep_copy():
import copy
v = Vec2(1, 2)
l1 = [v, v, v]
l2 = copy.copy(l1)
assert l2[0] is l2[1]
assert l2[1] is l2[2]
assert l2[0] is v
# Vec3, CVec2 and CVec3 are immutable and do not create copies of itself!
l3 = copy.deepcopy(l1)
assert l3[0] is l3[1]
assert l3[1] is l3[2]
assert l3[0] is not v
def test_get_angle(vcls):
v = vcls(3, 3)
assert math.isclose(v.angle_deg, 45)
assert math.isclose(v.angle, math.radians(45))
def test_compare_vectors(vcls):
v1 = vcls(1, 2)
assert v1 == v1
v2 = vcls(2, 3)
assert v2 > v1
assert v1 < v2
def test_is_close(vcls):
v1 = vcls(421846.9857097387, -36908.41493252139)
v2 = vcls(421846.9857097387, -36908.41493252141)
assert v1.isclose(v2) is True
def test_is_null(vcls):
v = vcls(0, 0)
assert v.is_null is True
v1 = vcls(23.56678, 56678.56778) * (1.0 / 14.5667)
v2 = vcls(23.56678, 56678.56778) / 14.5667
assert (v2 - v1).is_null
def test_is_not_null_default_abs_tol(vcls):
assert vcls(1e-11, 0).is_null is False
def test_is_null_default_abs_tol(vcls):
assert vcls(1e-12, 0).is_null is True
def test_bool(vcls):
v = vcls((0, 0))
assert bool(v) is False
v1 = vcls(23.56678, 56678.56778) * (1.0 / 14.5667)
v2 = vcls(23.56678, 56678.56778) / 14.5667
result = v2 - v1
assert bool(result) is False
# current rel_tol=1e-9
assert not vcls(1e-8, 0).is_null
def test_magnitude(vcls):
v = vcls(3, 4)
assert math.isclose(abs(v), 5)
assert math.isclose(v.magnitude, 5)
def test_normalize(vcls):
v = vcls(2, 0)
assert v.normalize() == (1, 0)
def test_normalize_to_length(vcls):
v = vcls(2, 0)
assert v.normalize(4) == (4, 0)
def test_orthogonal_ccw(vcls):
v = vcls(3, 4)
assert v.orthogonal() == (-4, 3)
def test_orthogonal_cw(vcls):
v = vcls(3, 4)
assert v.orthogonal(False) == (4, -3)
def test_negative(vcls):
v = vcls(2, 3)
assert -v == (-2, -3)
def test_add_vector(vcls):
assert vcls(2, 3) + vcls(7, 7) == (9, 10)
def test_add_vec3(vec2):
assert vec2(2, 3) + Vec3(7, 7) == (9, 10)
def test_iadd_vector(vec2):
v = Vec2(2, 3)
v += Vec2(7, 7)
assert v == (9, 10)
def test_add_scalar_type_erorr(vcls):
with pytest.raises(TypeError):
vcls(1, 1) + 1
def test_iadd_scalar_type_error(vcls):
v = vcls(2, 3)
with pytest.raises(TypeError):
v += 1
def test_radd_scalar_type_error(vcls):
with pytest.raises(TypeError):
1 + vcls(1, 1)
def test_radd_tuple_type_error(vec2):
with pytest.raises(TypeError):
(1, 1) + vec2(1, 1)
def test_sub_vector(vcls):
assert vcls(2, 3) - vcls(7, 7) == (-5, -4)
def test_isub_vector(vec2):
v = Vec2(2, 3)
v -= Vec2(7, 7)
assert v == (-5, -4)
def test_sub_vec3(vec2):
assert vec2(2, 3) - Vec3(7, 7) == (-5, -4)
def test_sub_scalar_type_error(vcls):
with pytest.raises(TypeError):
vcls(1, 1) - 1
def test_isub_scalar_type_erorr(vcls):
v = vcls(2, 3)
with pytest.raises(TypeError):
v -= 1
def test_rsub_tuple(vec2):
with pytest.raises(TypeError):
(2, 3) - vec2(7, 7)
def test_rsub_scalar_type_error(vcls):
with pytest.raises(TypeError):
1 - vcls(1, 1)
def test_mul_scalar(vcls):
v = vcls(2, 3)
assert v * 2 == (4, 6)
def test_imul_scalar(vcls):
v = vcls(2, 3)
v *= 2
assert v == (4, 6)
def test_rmul_scalar(vcls):
assert 2 * vcls(2, 3) == (4, 6)
def test_mul_tuple_type_error(vcls):
with pytest.raises(TypeError):
vcls(2, 3) * (2, 2)
def test_rmul_tuple_type_error(vcls):
with pytest.raises(TypeError):
(2, 2) * vcls(2, 3)
def test_imul_tuple_type_error(vcls):
v = vcls(2, 3)
with pytest.raises(TypeError):
v *= (2, 2)
def test_div_scalar(vcls):
v = vcls(2, 3)
assert v / 2 == (1, 1.5)
def test_idiv_scalar(vcls):
v = vcls(2, 3)
v /= 2
assert v == (1, 1.5)
def test_dot_product(vcls):
v1 = vcls(2, 7)
v2 = vcls(3, 9)
assert math.isclose(v1.dot(v2), 69)
def test_angle_deg(vcls):
assert math.isclose(vcls((0, 1)).angle_deg, 90)
assert math.isclose(vcls((0, -1)).angle_deg, -90)
assert math.isclose(vcls((1, 1)).angle_deg, 45)
assert math.isclose(vcls((-1, 1)).angle_deg, 135)
def test_angle_between(vcls):
v1 = vcls(0, 1)
v2 = vcls(1, 1)
angle = v1.angle_between(v2)
assert math.isclose(angle, math.pi / 4)
# reverse order, same result
angle = v2.angle_between(v1)
assert math.isclose(angle, math.pi / 4)
@pytest.mark.parametrize(
"v1, v2",
[
[(1, 0), (0, 0)],
[(0, 0), (1, 0)],
[(0, 0), (0, 0)],
],
)
def test_angle_between_null_vector(vcls, v1, v2):
with pytest.raises(ZeroDivisionError):
vcls(v1).angle_between(vcls(v2))
def test_angle_between_outside_domain():
v1 = Vec3(721.046967113573, 721.0469671135688, 0.0)
v2 = Vec3(-721.0469671135725, -721.0469671135688, 0.0)
angle = v1.angle_between(v2)
assert math.isclose(angle, math.pi)
# reverse order, same result
angle = v2.angle_between(v1)
assert math.isclose(angle, math.pi)
def test_rotate(vcls):
assert vcls(2, 2).rotate_deg(90).isclose(vcls(-2, 2))
def test_lerp(vcls):
v1 = vcls(1, 1)
v2 = vcls(4, 4)
assert v1.lerp(v2, 0.5) == (2.5, 2.5)
assert v1.lerp(v2, 0) == (1, 1)
assert v1.lerp(v2, 1) == (4, 4)
def test_project(vcls):
v = vcls(10, 0)
assert v.project(vcls(5, 0)) == (5, 0)
assert v.project(vcls(5, 5)) == (5, 0)
assert v.project(vcls(5, 5)) == (5, 0)
v = vcls(10, 10)
assert v.project(vcls(10, 0)).isclose(vcls(5, 5))
def test_det(vec2):
assert vec2(1, 0).det(vec2(0, 1)) == 1
assert vec2(0, 1).det(vec2(1, 0)) == -1
def test_sum(vcls):
assert vcls.sum([]).is_null is True
assert vcls.sum([vcls(1, 1)]) == (1, 1)
assert vcls.sum([vcls(1, 1), vcls(2, 2)]) == (3, 3)
def test_picklable(vec2):
for v in [vec2((1, 2.5)), vec2(1, 2.5)]:
pickled_v = pickle.loads(pickle.dumps(v))
assert v == pickled_v
assert type(v) is type(pickled_v)<|fim▁end|> | assert v3 == (1, 2, 0)
def test_round(vec2): |
<|file_name|>idea.js<|end_file_name|><|fim▁begin|>var keystone = require('keystone'),
async = require('async');
exports = module.exports = function(req, res) {
var view = new keystone.View(req, res),
locals = res.locals;
// Init locals
locals.section = 'ideas';
locals.page.title = 'Ideas - Evilcome';
locals.filters = {
category: req.params.category
};
locals.data = {
posts: [],
categories: []
};
// Load all categories
view.on('init', function(next) {
keystone.list('PostCategory').model.find().sort('name').exec(function(err, results) {
if (err || !results.length) {
return next(err);
}
locals.data.categories = results;
// Load the counts for each category
async.each(locals.data.categories, function(category, next) {
keystone.list('Post').model.count().where('category').in([category.id]).exec(function(err, count) {
category.postCount = count;
next(err);
});
}, function(err) {
next(err);
});
});
});
// Load the current category filter
view.on('init', function(next) {
if (req.params.category) {
keystone.list('PostCategory').model.findOne({ key: locals.filters.category }).exec(function(err, result) {
locals.data.category = result;
next(err);
});
} else {
next();
}
});
// Load the posts<|fim▁hole|>
if (locals.data.category) {
q.where('categories').in([locals.data.category]);
}
q.exec(function(err, results) {
locals.data.posts = results;
next(err);
});
});
// Render the view
view.render('site/idea');
}<|fim▁end|> | view.on('init', function(next) {
var q = keystone.list('Post').model.find().where('state', 'published').sort('-publishedDate').populate('author categories'); |
<|file_name|>3fab9480c190_professor_instructor.py<|end_file_name|><|fim▁begin|>"""professor => instructor
Revision ID: 3fab9480c190
Revises: 31ded1f6ad6
Create Date: 2014-02-17 00:56:12.566690
"""
# revision identifiers, used by Alembic.
revision = '3fab9480c190'
down_revision = '31ded1f6ad6'
from alembic import op
import sqlalchemy as sa
metadata = sa.MetaData()
role = sa.Table('Role', metadata,
sa.Column('id', sa.Integer()),
sa.Column('name', sa.String(20)),<|fim▁hole|>)
def upgrade():
update_stmt = role.update().where(role.c.name == 'professor').values(name = 'instructor')
op.execute(update_stmt)
def downgrade():
update_stmt = role.update().where(role.c.name == 'instructor').values(name = 'professor')
op.execute(update_stmt)<|fim▁end|> | |
<|file_name|>fortios_system_replacemsg_traffic_quota.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_replacemsg_traffic_quota
short_description: Replacement messages in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify system_replacemsg feature and traffic_quota category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
system_replacemsg_traffic_quota:
description:
- Replacement messages.
default: null
type: dict
suboptions:
buffer:
description:
- Message string.
type: str
format:
description:
- Format flag.
type: str
choices:
- none
- text
- html
- wml
header:
description:
- Header flag.
type: str
choices:
- none
- http
- 8bit
msg_type:
description:
- Message type.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Replacement messages.
fortios_system_replacemsg_traffic_quota:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
system_replacemsg_traffic_quota:
buffer: "<your_own_value>"
format: "none"
header: "none"
msg_type: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_system_replacemsg_traffic_quota_data(json):
option_list = ['buffer', 'format', 'header',
'msg_type']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_replacemsg_traffic_quota(data, fos):
vdom = data['vdom']
state = data['state']
system_replacemsg_traffic_quota_data = data['system_replacemsg_traffic_quota']
filtered_data = underscore_to_hyphen(filter_system_replacemsg_traffic_quota_data(system_replacemsg_traffic_quota_data))
if state == "present":
return fos.set('system.replacemsg',
'traffic-quota',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('system.replacemsg',
'traffic-quota',
mkey=filtered_data['msg-type'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_system_replacemsg(data, fos):
if data['system_replacemsg_traffic_quota']:
resp = system_replacemsg_traffic_quota(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"system_replacemsg_traffic_quota": {
"required": False, "type": "dict", "default": None,
"options": {
"buffer": {"required": False, "type": "str"},
"format": {"required": False, "type": "str",
"choices": ["none", "text", "html",
"wml"]},
"header": {"required": False, "type": "str",
"choices": ["none", "http", "8bit"]},
"msg_type": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_system_replacemsg(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_system_replacemsg(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()<|fim▁end|> | #!/usr/bin/python |
<|file_name|>GeoFenceService.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2015 Thomas Hoffmann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.j4velin.wifiAutoOff;<|fim▁hole|>
import com.google.android.gms.location.FusedLocationProviderApi;
import com.google.android.gms.location.Geofence;
import com.google.android.gms.location.GeofencingEvent;
import com.google.android.gms.maps.model.LatLng;
public class GeoFenceService extends IntentService {
public GeoFenceService() {
super("WiFiAutomaticGeoFenceService");
}
@Override
protected void onHandleIntent(final Intent intent) {
if (intent == null) return;
if (intent.hasExtra(FusedLocationProviderApi.KEY_LOCATION_CHANGED)) {
android.location.Location loc = (android.location.Location) intent.getExtras()
.get(FusedLocationProviderApi.KEY_LOCATION_CHANGED);
if (BuildConfig.DEBUG) Logger.log("Location update received " + loc);
Database db = Database.getInstance(this);
if (db.inRangeOfLocation(loc)) {
sendBroadcast(new Intent(this, Receiver.class)
.setAction(Receiver.LOCATION_ENTERED_ACTION));
}
db.close();
} else {
GeofencingEvent geofencingEvent = GeofencingEvent.fromIntent(intent);
// First check for errors
if (geofencingEvent.hasError()) {
// Get the error code with a static method
// Log the error
if (BuildConfig.DEBUG) Logger.log("Location Services error: " +
Integer.toString(geofencingEvent.getErrorCode()));
} else {
// Test that a valid transition was reported
if (geofencingEvent.getGeofenceTransition() == Geofence.GEOFENCE_TRANSITION_ENTER) {
Database db = Database.getInstance(this);
for (Geofence gf : geofencingEvent.getTriggeringGeofences()) {
if (BuildConfig.DEBUG) Logger.log("geofence entered: " + gf.getRequestId());
String[] data = gf.getRequestId().split("@");
LatLng ll = new LatLng(Double.parseDouble(data[0]),
Double.parseDouble(data[1]));
String name = db.getNameForLocation(ll);
if (name != null) {
sendBroadcast(new Intent(this, Receiver.class)
.setAction(Receiver.LOCATION_ENTERED_ACTION)
.putExtra(Receiver.EXTRA_LOCATION_NAME, name));
break;
}
}
db.close();
}
}
}
}
}<|fim▁end|> |
import android.app.IntentService;
import android.content.Intent; |
<|file_name|>ClassVirtualFieldExpr.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 1998-2012 Caucho Technology -- all rights reserved
*
* This file is part of Resin(R) Open Source
*
* Each copy or derived work must preserve the copyright notice and this
* notice unmodified.
*
* Resin Open Source is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Resin Open Source is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty
* of NON-INFRINGEMENT. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with Resin Open Source; if not, write to the
*
* Free Software Foundation, Inc.
* 59 Temple Place, Suite 330
* Boston, MA 02111-1307 USA
*
* @author Scott Ferguson
*/
package com.caucho.quercus.expr;
import java.io.IOException;
import java.util.ArrayList;
import com.caucho.quercus.Location;
import com.caucho.quercus.env.Env;
import com.caucho.quercus.env.MethodIntern;
import com.caucho.quercus.env.NullValue;
import com.caucho.quercus.env.StringValue;
import com.caucho.quercus.env.QuercusClass;
import com.caucho.quercus.env.Value;
import com.caucho.quercus.env.Var;<|fim▁hole|> * Represents a PHP static field reference.
*/
public class ClassVirtualFieldExpr extends AbstractVarExpr {
private static final L10N L = new L10N(ClassVirtualFieldExpr.class);
protected final StringValue _varName;
public ClassVirtualFieldExpr(String varName)
{
_varName = MethodIntern.intern(varName);
}
//
// function call creation
//
/**
* Creates a function call expression
*/
@Override
public Expr createCall(QuercusParser parser,
Location location,
ArrayList<Expr> args)
throws IOException
{
ExprFactory factory = parser.getExprFactory();
Expr var = parser.createVar(_varName.toString());
return factory.createClassVirtualMethodCall(location, var, args);
}
/**
* Evaluates the expression.
*
* @param env the calling environment.
*
* @return the expression value.
*/
@Override
public Value eval(Env env)
{
Value qThis = env.getThis();
QuercusClass qClass = qThis != null ? qThis.getQuercusClass() : null;
if (qClass == null) {
env.error(L.l("No calling class found for '{0}'", this));
return NullValue.NULL;
}
return qClass.getStaticFieldValue(env, _varName);
}
/**
* Evaluates the expression.
*
* @param env the calling environment.
*
* @return the expression value.
*/
@Override
public Var evalVar(Env env)
{
Value qThis = env.getThis();
QuercusClass qClass = qThis != null ? qThis.getQuercusClass() : null;
if (qClass == null) {
env.error(L.l("No calling class found for '{0}'", this));
return NullValue.NULL.toVar();
}
return qClass.getStaticFieldVar(env, _varName);
}
/**
* Evaluates the expression.
*
* @param env the calling environment.
*
* @return the expression value.
*/
@Override
public Value evalAssignRef(Env env, Value value)
{
Value qThis = env.getThis();
QuercusClass qClass = qThis != null ? qThis.getQuercusClass() : null;
if (qClass == null) {
env.error(L.l("No calling class found for '{0}'", this));
return NullValue.NULL.toVar();
}
return qClass.setStaticFieldRef(env, _varName, value);
}
/**
* Evaluates the expression.
*
* @param env the calling environment.
*
* @return the expression value.
*/
public void evalUnset(Env env)
{
env.error(getLocation(),
L.l("{0}::${1}: Cannot unset static variables.",
env.getCallingClass().getName(), _varName));
}
public String toString()
{
return "static::$" + _varName;
}
}<|fim▁end|> | import com.caucho.quercus.parser.QuercusParser;
import com.caucho.util.L10N;
/** |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>import sys,os
os.environ["EPICS_CA_ADDR_LIST"] = "192.168.82.10"
os.environ["EPICS_CA_MAX_ARRAY_BYTES"] = "100000000"
<|fim▁hole|>a = VIMC.velaINJMagnetController(True,False)
a.switchONpsu('SOL')
#print(a.isON('HVCOR'))
print(a.getRI('SOL'))
#print(a.getILockStates('HVxhfjsfhk01'))
a.switchONpsu('SOL')
#print(a.isON('HVCOR'))
print(a.isON('SOL'))<|fim▁end|> | import velaINJMagnetControl as VIMC
|
<|file_name|>Fn_execConvPrint.js<|end_file_name|><|fim▁begin|>(function() {
var AS = this, Fn = AS.Fn;
// assign
$.extend(Fn, {
execConvPrint: execConvPrint
});
return;
function execConvPrint() {
var b_FPR = AS.Bo.FPR;
var fn = null;
try {
if(b_FPR.Value('convfn') == "function(i, f, a){\n}")
throw Error('Function is not modified');
fn = eval('(' + (b_FPR.Value('convfn') || null) + ')');
if(typeof fn != 'function')
throw Error('Not function.');
} catch(e) {
return b_FPR.error(e);
}
if(execConvPrint.last) // TODO remove
console.log(execConvPrint.last);
var fncs = b_FPR.Value('items[func]');
var args = b_FPR.Value('items[args]');
var memo = {};
fncs.forEach(function(func, i) {<|fim▁hole|> var a = null;
try {
a = eval('(' + args[i] + ')');
} catch(e) {
return console.log('JSON.parse fail No.' + i, args[i]);
}
var nval = fn.call(b_FPR, i, func, $.extend(true, [], a));
nval && (function() {
console.log('changing idx[' + i + ']', a, nval);
b_FPR.Value('items[args][' + i + ']', JSON.stringify(nval));
memo[i] = {}, memo[i].func = func, memo[i].args = a;
})();
});
execConvPrint.last = memo;
b_FPR.notice('変換完了', Fn.noticeOpt());
}
}).call(window.AppSpace);<|fim▁end|> | |
<|file_name|>spline.py<|end_file_name|><|fim▁begin|>import numpy as np
# f(x) = a*x*x*x + b*x*x + c*x + d
# f'(x) = 3*a*x*x + 2*b*x + c
#
# d = x0
# c = dx0
# a + b + c + d = x1<|fim▁hole|># a = x1 - x0 - dx0 - b
#
# 3*a + 2*b + dx0 = dx1
# 3*a + 2*b = dx1 - dx0
# 3*(x1 - x0 - dx0 - b) + 2*b = dx1 - dx0
# -3*b + 2*b = dx1 - dx0 - 3*(x1 - x0 - dx0)
# b = -dx1 + dx0 + 3*(x1 - x0 - dx0)
#
# a = x1 - x0 - dx0 - 0.5 * (dx1 - dx0 - 3*(x1 - x0 - dx0))
def cubic_spline_coeffs(p0, v0, p1, v1):
d = p0
c = v0
b = -v1 + v0 + 3*(p1 - p0 - v0)
a = p1 - p0 - v0 - b
return [a,b,c,d]
def cubic_spline_coeffs_list(ps, vs):
return [ cubic_spline_coeffs(ps[i], vs[i], ps[i+1], vs[i+1]) for i in range(len(ps)-1) ]
def cubic_spline(N, ps=None, vs=None, coeffs_list=None):
t = np.linspace(0,1,N)[np.newaxis].T
if coeffs_list is None:
coeffs_list = cubic_spline_coeffs_list(ps, vs)
vs = []
for a,b,c,d in coeffs_list:
v = a*t**3 + b*t**2 + c*t + d
vs.append(v)
return np.concatenate(vs).T
if __name__ == "__main__":
import matplotlib.pyplot as plt
Np=4
Nt=100
p = np.random.random((Np,2))*2-1
v = np.random.random((Np,2))*2-1
xy = cubic_spline(p, v, Nt)
plt.plot(xy[0,:], xy[1,:])
plt.show()<|fim▁end|> | # 3*a + 2*b + c = dx1
#
# a + b + dx0 + x0 = x1
# a + b = x1 - x0 - dx0 |
<|file_name|>PILTest.py<|end_file_name|><|fim▁begin|>'''
Created on May 21, 2016
@author: zlp
'''
# import Image
hello = 100
class PILTest(object):
hello = 200
def __init__(self, name):
self._name = name
def printName(self):
print self._name
def printHelloInClass(self):
print self.hello
class2 = PILTest('hahah')
# print 'Print hello value: %d' % hello
# class2.printHelloInClass()
# def make_counter():
# count = 0
# def counter():
# nonlocal count
# count += 1
# return count
# return counter
# yield and testing
def fib(max):
a,b = 0,1
while a<max:
yield a
a,b = b,a+b
class Fib2(object):
def __init__(self,max):
self.__max = max
def __iter__(self):
self.a = 0
self.b = 1
return self
def next(self):
result = self.a
if result > self.__max:
raise StopIteration
self.a, self.b = self.b, self.a+self.b
return result
# for i in fib(100):
# print i
#
# for i in Fib2(100):
# print i
#
# a = [1,2,3,4]
# b = [2,3]
#
# for i in zip(a,b):
# print i
# w,h = im.size
params = dict(user='user', password='password', database='database', host='host', port='port')
defaults = dict(use_unicode=True, charset='utf8', collation='utf8_general_ci', autocommit=False)
for k,v in defaults.iteritems():
pass
# print k,v
def outside(func):
def wrapper(*args, **kw):
print 'Outside sheel'
return func(*args,**kw)
return wrapper
@outside
def log(func):
def wrapper(*args, **kw):
print 'Hello'
return func(*args, **kw)
return wrapper
@log
def simplePrint(str1):
print 'This is Simple ' + str1
print simplePrint('hahaha')
def func1(a,b):
'''
What you are thinking?
>>> func1(1,2)
10
>>> func1(2,4)
9
'''
return 10
import doctest
doctest.testmod()
class Test1(object):
name = 'haha'
def __init__(self,name2):
self.name2 = name2
t1 = Test1('xiaozhu')
t2 = Test1('xiaoYu')
# print t1.name
# print t1.name2
# print t2.name<|fim▁hole|># print t2.name
print 'T1\'s name is ',Test1.name<|fim▁end|> | # print t2.name2
# t1.name = 'HEIHEI!'
# print t1.name |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#coding:utf-8
import requests
url="http://sendcloud.sohu.com/webapi/mail.send.json"
#files={ "file1": (u"1.pdf", open(u"1.pdf", "rb")),
# "file2": (u"2.pdf", open(u"2.pdf", "rb"))}
# 不同于登录SendCloud站点的帐号,您需要登录后台创建发信子帐号,使用子帐号和密码才可以进行邮件的发送。
params = {"api_user": "bargetor_test_C9Lnuz", \
"api_key" : "va1NbZRs1VIQPk1b",\
"to" : "[email protected]", \
"from" : "[email protected]", \<|fim▁hole|> "html": "你太棒了!你已成功的从SendCloud发送了一封测试邮件,接下来快登录前台去完善账户信息吧!" \
}
r = requests.post(url, files={}, data=params)
print r.text<|fim▁end|> | "fromname" : "SendCloud测试邮件", \
"subject" : "来自SendCloud的第一封邮件!", \ |
<|file_name|>tabs.component.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
angular.module('components.tabs')
.component('tabs', {
templateUrl: 'components/tabs/tabs.html',
controller: 'TabsController',
bindings: {
classes: '@?',
selected: '<?'
},
transclude: true
})
.component('tab', {<|fim▁hole|> controller: 'TabController',
bindings: {
label: '@'
},
require: {
tabs: '^^'
},
transclude: true
});
})();<|fim▁end|> | templateUrl: 'components/tabs/tab.html', |
<|file_name|>mockfileinfo.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Generated via: mockgen os FileInfo
// Edited to include required boilerplate
// Source: os (interfaces: FileInfo)
package mock_os
import (
os "os"
time "time"
gomock "github.com/golang/mock/gomock"
)
// Mock of FileInfo interface
type MockFileInfo struct {
ctrl *gomock.Controller
recorder *_MockFileInfoRecorder
}
// Recorder for MockFileInfo (not exported)
type _MockFileInfoRecorder struct {
mock *MockFileInfo
}
func NewMockFileInfo(ctrl *gomock.Controller) *MockFileInfo {
mock := &MockFileInfo{ctrl: ctrl}
mock.recorder = &_MockFileInfoRecorder{mock}
return mock
}
func (_m *MockFileInfo) EXPECT() *_MockFileInfoRecorder {
return _m.recorder
}
func (_m *MockFileInfo) IsDir() bool {
ret := _m.ctrl.Call(_m, "IsDir")
ret0, _ := ret[0].(bool)
return ret0
}
func (_mr *_MockFileInfoRecorder) IsDir() *gomock.Call {
return _mr.mock.ctrl.RecordCall(_mr.mock, "IsDir")
}
func (_m *MockFileInfo) ModTime() time.Time {
ret := _m.ctrl.Call(_m, "ModTime")
ret0, _ := ret[0].(time.Time)
return ret0
}
func (_mr *_MockFileInfoRecorder) ModTime() *gomock.Call {
return _mr.mock.ctrl.RecordCall(_mr.mock, "ModTime")
}
func (_m *MockFileInfo) Mode() os.FileMode {
ret := _m.ctrl.Call(_m, "Mode")
ret0, _ := ret[0].(os.FileMode)
return ret0
}
func (_mr *_MockFileInfoRecorder) Mode() *gomock.Call {
return _mr.mock.ctrl.RecordCall(_mr.mock, "Mode")
}
func (_m *MockFileInfo) Name() string {
ret := _m.ctrl.Call(_m, "Name")
ret0, _ := ret[0].(string)
return ret0
}
func (_mr *_MockFileInfoRecorder) Name() *gomock.Call {
return _mr.mock.ctrl.RecordCall(_mr.mock, "Name")
}
func (_m *MockFileInfo) Size() int64 {
ret := _m.ctrl.Call(_m, "Size")
ret0, _ := ret[0].(int64)
return ret0<|fim▁hole|>}
func (_m *MockFileInfo) Sys() interface{} {
ret := _m.ctrl.Call(_m, "Sys")
ret0, _ := ret[0].(interface{})
return ret0
}
func (_mr *_MockFileInfoRecorder) Sys() *gomock.Call {
return _mr.mock.ctrl.RecordCall(_mr.mock, "Sys")
}<|fim▁end|> | }
func (_mr *_MockFileInfoRecorder) Size() *gomock.Call {
return _mr.mock.ctrl.RecordCall(_mr.mock, "Size") |
<|file_name|>test_random.py<|end_file_name|><|fim▁begin|>import unittest
import unittest.mock
import random
import time
import pickle
import warnings
from functools import partial
from math import log, exp, pi, fsum, sin
from test import support
class TestBasicOps:
# Superclass with tests common to all generators.
# Subclasses must arrange for self.gen to retrieve the Random instance
# to be tested.
def randomlist(self, n):
"""Helper function to make a list of random numbers"""
return [self.gen.random() for i in range(n)]
def test_autoseed(self):
self.gen.seed()
state1 = self.gen.getstate()
time.sleep(0.1)
self.gen.seed() # diffent seeds at different times
state2 = self.gen.getstate()
self.assertNotEqual(state1, state2)
def test_saverestore(self):
N = 1000
self.gen.seed()
state = self.gen.getstate()
randseq = self.randomlist(N)
self.gen.setstate(state) # should regenerate the same sequence
self.assertEqual(randseq, self.randomlist(N))
def test_seedargs(self):
# Seed value with a negative hash.
class MySeed(object):
def __hash__(self):
return -1729
for arg in [None, 0, 0, 1, 1, -1, -1, 10**20, -(10**20),
3.14, 1+2j, 'a', tuple('abc'), MySeed()]:
self.gen.seed(arg)
for arg in [list(range(3)), dict(one=1)]:
self.assertRaises(TypeError, self.gen.seed, arg)
self.assertRaises(TypeError, self.gen.seed, 1, 2, 3, 4)
self.assertRaises(TypeError, type(self.gen), [])
@unittest.mock.patch('random._urandom') # os.urandom
def test_seed_when_randomness_source_not_found(self, urandom_mock):
# Random.seed() uses time.time() when an operating system specific
# randomness source is not found. To test this on machines were it
# exists, run the above test, test_seedargs(), again after mocking
# os.urandom() so that it raises the exception expected when the
# randomness source is not available.
urandom_mock.side_effect = NotImplementedError
self.test_seedargs()
def test_shuffle(self):
shuffle = self.gen.shuffle
lst = []
shuffle(lst)
self.assertEqual(lst, [])
lst = [37]
shuffle(lst)
self.assertEqual(lst, [37])
seqs = [list(range(n)) for n in range(10)]
shuffled_seqs = [list(range(n)) for n in range(10)]
for shuffled_seq in shuffled_seqs:
shuffle(shuffled_seq)
for (seq, shuffled_seq) in zip(seqs, shuffled_seqs):
self.assertEqual(len(seq), len(shuffled_seq))
self.assertEqual(set(seq), set(shuffled_seq))
# The above tests all would pass if the shuffle was a
# no-op. The following non-deterministic test covers that. It
# asserts that the shuffled sequence of 1000 distinct elements
# must be different from the original one. Although there is
# mathematically a non-zero probability that this could
# actually happen in a genuinely random shuffle, it is
# completely negligible, given that the number of possible
# permutations of 1000 objects is 1000! (factorial of 1000),
# which is considerably larger than the number of atoms in the
# universe...
lst = list(range(1000))
shuffled_lst = list(range(1000))
shuffle(shuffled_lst)
self.assertTrue(lst != shuffled_lst)
shuffle(lst)
self.assertTrue(lst != shuffled_lst)
def test_choice(self):
choice = self.gen.choice
with self.assertRaises(IndexError):
choice([])
self.assertEqual(choice([50]), 50)
self.assertIn(choice([25, 75]), [25, 75])
def test_sample(self):
# For the entire allowable range of 0 <= k <= N, validate that
# the sample is of the correct length and contains only unique items
N = 100
population = range(N)
for k in range(N+1):
s = self.gen.sample(population, k)
self.assertEqual(len(s), k)
uniq = set(s)
self.assertEqual(len(uniq), k)
self.assertTrue(uniq <= set(population))
self.assertEqual(self.gen.sample([], 0), []) # test edge case N==k==0
# Exception raised if size of sample exceeds that of population
self.assertRaises(ValueError, self.gen.sample, population, N+1)
def test_sample_distribution(self):
# For the entire allowable range of 0 <= k <= N, validate that
# sample generates all possible permutations
n = 5
pop = range(n)
trials = 10000 # large num prevents false negatives without slowing normal case
def factorial(n):
if n == 0:
return 1
return n * factorial(n - 1)
for k in range(n):
expected = factorial(n) // factorial(n-k)<|fim▁hole|> for i in range(trials):
perms[tuple(self.gen.sample(pop, k))] = None
if len(perms) == expected:
break
else:
self.fail()
def test_sample_inputs(self):
# SF bug #801342 -- population can be any iterable defining __len__()
self.gen.sample(set(range(20)), 2)
self.gen.sample(range(20), 2)
self.gen.sample(range(20), 2)
self.gen.sample(str('abcdefghijklmnopqrst'), 2)
self.gen.sample(tuple('abcdefghijklmnopqrst'), 2)
def test_sample_on_dicts(self):
self.assertRaises(TypeError, self.gen.sample, dict.fromkeys('abcdef'), 2)
def test_gauss(self):
# Ensure that the seed() method initializes all the hidden state. In
# particular, through 2.2.1 it failed to reset a piece of state used
# by (and only by) the .gauss() method.
for seed in 1, 12, 123, 1234, 12345, 123456, 654321:
self.gen.seed(seed)
x1 = self.gen.random()
y1 = self.gen.gauss(0, 1)
self.gen.seed(seed)
x2 = self.gen.random()
y2 = self.gen.gauss(0, 1)
self.assertEqual(x1, x2)
self.assertEqual(y1, y2)
def test_pickling(self):
state = pickle.dumps(self.gen)
origseq = [self.gen.random() for i in range(10)]
newgen = pickle.loads(state)
restoredseq = [newgen.random() for i in range(10)]
self.assertEqual(origseq, restoredseq)
def test_bug_1727780(self):
# verify that version-2-pickles can be loaded
# fine, whether they are created on 32-bit or 64-bit
# platforms, and that version-3-pickles load fine.
files = [("randv2_32.pck", 780),
("randv2_64.pck", 866),
("randv3.pck", 343)]
for file, value in files:
f = open(support.findfile(file),"rb")
r = pickle.load(f)
f.close()
self.assertEqual(int(r.random()*1000), value)
def test_bug_9025(self):
# Had problem with an uneven distribution in int(n*random())
# Verify the fix by checking that distributions fall within expectations.
n = 100000
randrange = self.gen.randrange
k = sum(randrange(6755399441055744) % 3 == 2 for i in range(n))
self.assertTrue(0.30 < k/n < .37, (k/n))
try:
random.SystemRandom().random()
except NotImplementedError:
SystemRandom_available = False
else:
SystemRandom_available = True
@unittest.skipUnless(SystemRandom_available, "random.SystemRandom not available")
class SystemRandom_TestBasicOps(TestBasicOps, unittest.TestCase):
gen = random.SystemRandom()
def test_autoseed(self):
# Doesn't need to do anything except not fail
self.gen.seed()
def test_saverestore(self):
self.assertRaises(NotImplementedError, self.gen.getstate)
self.assertRaises(NotImplementedError, self.gen.setstate, None)
def test_seedargs(self):
# Doesn't need to do anything except not fail
self.gen.seed(100)
def test_gauss(self):
self.gen.gauss_next = None
self.gen.seed(100)
self.assertEqual(self.gen.gauss_next, None)
def test_pickling(self):
self.assertRaises(NotImplementedError, pickle.dumps, self.gen)
def test_53_bits_per_float(self):
# This should pass whenever a C double has 53 bit precision.
span = 2 ** 53
cum = 0
for i in range(100):
cum |= int(self.gen.random() * span)
self.assertEqual(cum, span-1)
def test_bigrand(self):
# The randrange routine should build-up the required number of bits
# in stages so that all bit positions are active.
span = 2 ** 500
cum = 0
for i in range(100):
r = self.gen.randrange(span)
self.assertTrue(0 <= r < span)
cum |= r
self.assertEqual(cum, span-1)
def test_bigrand_ranges(self):
for i in [40,80, 160, 200, 211, 250, 375, 512, 550]:
start = self.gen.randrange(2 ** (i-2))
stop = self.gen.randrange(2 ** i)
if stop <= start:
continue
self.assertTrue(start <= self.gen.randrange(start, stop) < stop)
def test_rangelimits(self):
for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]:
self.assertEqual(set(range(start,stop)),
set([self.gen.randrange(start,stop) for i in range(100)]))
def test_randrange_nonunit_step(self):
rint = self.gen.randrange(0, 10, 2)
self.assertIn(rint, (0, 2, 4, 6, 8))
rint = self.gen.randrange(0, 2, 2)
self.assertEqual(rint, 0)
def test_randrange_errors(self):
raises = partial(self.assertRaises, ValueError, self.gen.randrange)
# Empty range
raises(3, 3)
raises(-721)
raises(0, 100, -12)
# Non-integer start/stop
raises(3.14159)
raises(0, 2.71828)
# Zero and non-integer step
raises(0, 42, 0)
raises(0, 42, 3.14159)
def test_genrandbits(self):
# Verify ranges
for k in range(1, 1000):
self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k)
# Verify all bits active
getbits = self.gen.getrandbits
for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]:
cum = 0
for i in range(100):
cum |= getbits(span)
self.assertEqual(cum, 2**span-1)
# Verify argument checking
self.assertRaises(TypeError, self.gen.getrandbits)
self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
self.assertRaises(ValueError, self.gen.getrandbits, 0)
self.assertRaises(ValueError, self.gen.getrandbits, -1)
self.assertRaises(TypeError, self.gen.getrandbits, 10.1)
def test_randbelow_logic(self, _log=log, int=int):
# check bitcount transition points: 2**i and 2**(i+1)-1
# show that: k = int(1.001 + _log(n, 2))
# is equal to or one greater than the number of bits in n
for i in range(1, 1000):
n = 1 << i # check an exact power of two
numbits = i+1
k = int(1.00001 + _log(n, 2))
self.assertEqual(k, numbits)
self.assertEqual(n, 2**(k-1))
n += n - 1 # check 1 below the next power of two
k = int(1.00001 + _log(n, 2))
self.assertIn(k, [numbits, numbits+1])
self.assertTrue(2**k > n > 2**(k-2))
n -= n >> 15 # check a little farther below the next power of two
k = int(1.00001 + _log(n, 2))
self.assertEqual(k, numbits) # note the stronger assertion
self.assertTrue(2**k > n > 2**(k-1)) # note the stronger assertion
class MersenneTwister_TestBasicOps(TestBasicOps, unittest.TestCase):
gen = random.Random()
def test_guaranteed_stable(self):
# These sequences are guaranteed to stay the same across versions of python
self.gen.seed(3456147, version=1)
self.assertEqual([self.gen.random().hex() for i in range(4)],
['0x1.ac362300d90d2p-1', '0x1.9d16f74365005p-1',
'0x1.1ebb4352e4c4dp-1', '0x1.1a7422abf9c11p-1'])
self.gen.seed("the quick brown fox", version=2)
self.assertEqual([self.gen.random().hex() for i in range(4)],
['0x1.1239ddfb11b7cp-3', '0x1.b3cbb5c51b120p-4',
'0x1.8c4f55116b60fp-1', '0x1.63eb525174a27p-1'])
def test_setstate_first_arg(self):
self.assertRaises(ValueError, self.gen.setstate, (1, None, None))
def test_setstate_middle_arg(self):
# Wrong type, s/b tuple
self.assertRaises(TypeError, self.gen.setstate, (2, None, None))
# Wrong length, s/b 625
self.assertRaises(ValueError, self.gen.setstate, (2, (1,2,3), None))
# Wrong type, s/b tuple of 625 ints
self.assertRaises(TypeError, self.gen.setstate, (2, ('a',)*625, None))
# Last element s/b an int also
self.assertRaises(TypeError, self.gen.setstate, (2, (0,)*624+('a',), None))
# Little trick to make "tuple(x % (2**32) for x in internalstate)"
# raise ValueError. I cannot think of a simple way to achieve this, so
# I am opting for using a generator as the middle argument of setstate
# which attempts to cast a NaN to integer.
state_values = self.gen.getstate()[1]
state_values = list(state_values)
state_values[-1] = float('nan')
state = (int(x) for x in state_values)
self.assertRaises(TypeError, self.gen.setstate, (2, state, None))
def test_referenceImplementation(self):
# Compare the python implementation with results from the original
# code. Create 2000 53-bit precision random floats. Compare only
# the last ten entries to show that the independent implementations
# are tracking. Here is the main() function needed to create the
# list of expected random numbers:
# void main(void){
# int i;
# unsigned long init[4]={61731, 24903, 614, 42143}, length=4;
# init_by_array(init, length);
# for (i=0; i<2000; i++) {
# printf("%.15f ", genrand_res53());
# if (i%5==4) printf("\n");
# }
# }
expected = [0.45839803073713259,
0.86057815201978782,
0.92848331726782152,
0.35932681119782461,
0.081823493762449573,
0.14332226470169329,
0.084297823823520024,
0.53814864671831453,
0.089215024911993401,
0.78486196105372907]
self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96))
actual = self.randomlist(2000)[-10:]
for a, e in zip(actual, expected):
self.assertAlmostEqual(a,e,places=14)
def test_strong_reference_implementation(self):
# Like test_referenceImplementation, but checks for exact bit-level
# equality. This should pass on any box where C double contains
# at least 53 bits of precision (the underlying algorithm suffers
# no rounding errors -- all results are exact).
from math import ldexp
expected = [0x0eab3258d2231f,
0x1b89db315277a5,
0x1db622a5518016,
0x0b7f9af0d575bf,
0x029e4c4db82240,
0x04961892f5d673,
0x02b291598e4589,
0x11388382c15694,
0x02dad977c9e1fe,
0x191d96d4d334c6]
self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96))
actual = self.randomlist(2000)[-10:]
for a, e in zip(actual, expected):
self.assertEqual(int(ldexp(a, 53)), e)
def test_long_seed(self):
# This is most interesting to run in debug mode, just to make sure
# nothing blows up. Under the covers, a dynamically resized array
# is allocated, consuming space proportional to the number of bits
# in the seed. Unfortunately, that's a quadratic-time algorithm,
# so don't make this horribly big.
seed = (1 << (10000 * 8)) - 1 # about 10K bytes
self.gen.seed(seed)
def test_53_bits_per_float(self):
# This should pass whenever a C double has 53 bit precision.
span = 2 ** 53
cum = 0
for i in range(100):
cum |= int(self.gen.random() * span)
self.assertEqual(cum, span-1)
def test_bigrand(self):
# The randrange routine should build-up the required number of bits
# in stages so that all bit positions are active.
span = 2 ** 500
cum = 0
for i in range(100):
r = self.gen.randrange(span)
self.assertTrue(0 <= r < span)
cum |= r
self.assertEqual(cum, span-1)
def test_bigrand_ranges(self):
for i in [40,80, 160, 200, 211, 250, 375, 512, 550]:
start = self.gen.randrange(2 ** (i-2))
stop = self.gen.randrange(2 ** i)
if stop <= start:
continue
self.assertTrue(start <= self.gen.randrange(start, stop) < stop)
def test_rangelimits(self):
for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]:
self.assertEqual(set(range(start,stop)),
set([self.gen.randrange(start,stop) for i in range(100)]))
def test_genrandbits(self):
# Verify cross-platform repeatability
self.gen.seed(1234567)
self.assertEqual(self.gen.getrandbits(100),
97904845777343510404718956115)
# Verify ranges
for k in range(1, 1000):
self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k)
# Verify all bits active
getbits = self.gen.getrandbits
for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]:
cum = 0
for i in range(100):
cum |= getbits(span)
self.assertEqual(cum, 2**span-1)
# Verify argument checking
self.assertRaises(TypeError, self.gen.getrandbits)
self.assertRaises(TypeError, self.gen.getrandbits, 'a')
self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
self.assertRaises(ValueError, self.gen.getrandbits, 0)
self.assertRaises(ValueError, self.gen.getrandbits, -1)
def test_randbelow_logic(self, _log=log, int=int):
# check bitcount transition points: 2**i and 2**(i+1)-1
# show that: k = int(1.001 + _log(n, 2))
# is equal to or one greater than the number of bits in n
for i in range(1, 1000):
n = 1 << i # check an exact power of two
numbits = i+1
k = int(1.00001 + _log(n, 2))
self.assertEqual(k, numbits)
self.assertEqual(n, 2**(k-1))
n += n - 1 # check 1 below the next power of two
k = int(1.00001 + _log(n, 2))
self.assertIn(k, [numbits, numbits+1])
self.assertTrue(2**k > n > 2**(k-2))
n -= n >> 15 # check a little farther below the next power of two
k = int(1.00001 + _log(n, 2))
self.assertEqual(k, numbits) # note the stronger assertion
self.assertTrue(2**k > n > 2**(k-1)) # note the stronger assertion
@unittest.mock.patch('random.Random.random')
def test_randbelow_overriden_random(self, random_mock):
# Random._randbelow() can only use random() when the built-in one
# has been overridden but no new getrandbits() method was supplied.
random_mock.side_effect = random.SystemRandom().random
maxsize = 1<<random.BPF
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
# Population range too large (n >= maxsize)
self.gen._randbelow(maxsize+1, maxsize = maxsize)
self.gen._randbelow(5640, maxsize = maxsize)
# This might be going too far to test a single line, but because of our
# noble aim of achieving 100% test coverage we need to write a case in
# which the following line in Random._randbelow() gets executed:
#
# rem = maxsize % n
# limit = (maxsize - rem) / maxsize
# r = random()
# while r >= limit:
# r = random() # <== *This line* <==<
#
# Therefore, to guarantee that the while loop is executed at least
# once, we need to mock random() so that it returns a number greater
# than 'limit' the first time it gets called.
n = 42
epsilon = 0.01
limit = (maxsize - (maxsize % n)) / maxsize
random_mock.side_effect = [limit + epsilon, limit - epsilon]
self.gen._randbelow(n, maxsize = maxsize)
def test_randrange_bug_1590891(self):
start = 1000000000000
stop = -100000000000000000000
step = -200
x = self.gen.randrange(start, stop, step)
self.assertTrue(stop < x <= start)
self.assertEqual((x+stop)%step, 0)
def gamma(z, sqrt2pi=(2.0*pi)**0.5):
# Reflection to right half of complex plane
if z < 0.5:
return pi / sin(pi*z) / gamma(1.0-z)
# Lanczos approximation with g=7
az = z + (7.0 - 0.5)
return az ** (z-0.5) / exp(az) * sqrt2pi * fsum([
0.9999999999995183,
676.5203681218835 / z,
-1259.139216722289 / (z+1.0),
771.3234287757674 / (z+2.0),
-176.6150291498386 / (z+3.0),
12.50734324009056 / (z+4.0),
-0.1385710331296526 / (z+5.0),
0.9934937113930748e-05 / (z+6.0),
0.1659470187408462e-06 / (z+7.0),
])
class TestDistributions(unittest.TestCase):
def test_zeroinputs(self):
# Verify that distributions can handle a series of zero inputs'
g = random.Random()
x = [g.random() for i in range(50)] + [0.0]*5
g.random = x[:].pop; g.uniform(1,10)
g.random = x[:].pop; g.paretovariate(1.0)
g.random = x[:].pop; g.expovariate(1.0)
g.random = x[:].pop; g.weibullvariate(1.0, 1.0)
g.random = x[:].pop; g.vonmisesvariate(1.0, 1.0)
g.random = x[:].pop; g.normalvariate(0.0, 1.0)
g.random = x[:].pop; g.gauss(0.0, 1.0)
g.random = x[:].pop; g.lognormvariate(0.0, 1.0)
g.random = x[:].pop; g.vonmisesvariate(0.0, 1.0)
g.random = x[:].pop; g.gammavariate(0.01, 1.0)
g.random = x[:].pop; g.gammavariate(1.0, 1.0)
g.random = x[:].pop; g.gammavariate(200.0, 1.0)
g.random = x[:].pop; g.betavariate(3.0, 3.0)
g.random = x[:].pop; g.triangular(0.0, 1.0, 1.0/3.0)
def test_avg_std(self):
# Use integration to test distribution average and standard deviation.
# Only works for distributions which do not consume variates in pairs
g = random.Random()
N = 5000
x = [i/float(N) for i in range(1,N)]
for variate, args, mu, sigmasqrd in [
(g.uniform, (1.0,10.0), (10.0+1.0)/2, (10.0-1.0)**2/12),
(g.triangular, (0.0, 1.0, 1.0/3.0), 4.0/9.0, 7.0/9.0/18.0),
(g.expovariate, (1.5,), 1/1.5, 1/1.5**2),
(g.vonmisesvariate, (1.23, 0), pi, pi**2/3),
(g.paretovariate, (5.0,), 5.0/(5.0-1),
5.0/((5.0-1)**2*(5.0-2))),
(g.weibullvariate, (1.0, 3.0), gamma(1+1/3.0),
gamma(1+2/3.0)-gamma(1+1/3.0)**2) ]:
g.random = x[:].pop
y = []
for i in range(len(x)):
try:
y.append(variate(*args))
except IndexError:
pass
s1 = s2 = 0
for e in y:
s1 += e
s2 += (e - mu) ** 2
N = len(y)
self.assertAlmostEqual(s1/N, mu, places=2,
msg='%s%r' % (variate.__name__, args))
self.assertAlmostEqual(s2/(N-1), sigmasqrd, places=2,
msg='%s%r' % (variate.__name__, args))
def test_constant(self):
g = random.Random()
N = 100
for variate, args, expected in [
(g.uniform, (10.0, 10.0), 10.0),
(g.triangular, (10.0, 10.0), 10.0),
(g.triangular, (10.0, 10.0, 10.0), 10.0),
(g.expovariate, (float('inf'),), 0.0),
(g.vonmisesvariate, (3.0, float('inf')), 3.0),
(g.gauss, (10.0, 0.0), 10.0),
(g.lognormvariate, (0.0, 0.0), 1.0),
(g.lognormvariate, (-float('inf'), 0.0), 0.0),
(g.normalvariate, (10.0, 0.0), 10.0),
(g.paretovariate, (float('inf'),), 1.0),
(g.weibullvariate, (10.0, float('inf')), 10.0),
(g.weibullvariate, (0.0, 10.0), 0.0),
]:
for i in range(N):
self.assertEqual(variate(*args), expected)
def test_von_mises_range(self):
# Issue 17149: von mises variates were not consistently in the
# range [0, 2*PI].
g = random.Random()
N = 100
for mu in 0.0, 0.1, 3.1, 6.2:
for kappa in 0.0, 2.3, 500.0:
for _ in range(N):
sample = g.vonmisesvariate(mu, kappa)
self.assertTrue(
0 <= sample <= random.TWOPI,
msg=("vonmisesvariate({}, {}) produced a result {} out"
" of range [0, 2*pi]").format(mu, kappa, sample))
def test_von_mises_large_kappa(self):
# Issue #17141: vonmisesvariate() was hang for large kappas
random.vonmisesvariate(0, 1e15)
random.vonmisesvariate(0, 1e100)
def test_gammavariate_errors(self):
# Both alpha and beta must be > 0.0
self.assertRaises(ValueError, random.gammavariate, -1, 3)
self.assertRaises(ValueError, random.gammavariate, 0, 2)
self.assertRaises(ValueError, random.gammavariate, 2, 0)
self.assertRaises(ValueError, random.gammavariate, 1, -3)
@unittest.mock.patch('random.Random.random')
def test_gammavariate_full_code_coverage(self, random_mock):
# There are three different possibilities in the current implementation
# of random.gammavariate(), depending on the value of 'alpha'. What we
# are going to do here is to fix the values returned by random() to
# generate test cases that provide 100% line coverage of the method.
# #1: alpha > 1.0: we want the first random number to be outside the
# [1e-7, .9999999] range, so that the continue statement executes
# once. The values of u1 and u2 will be 0.5 and 0.3, respectively.
random_mock.side_effect = [1e-8, 0.5, 0.3]
returned_value = random.gammavariate(1.1, 2.3)
self.assertAlmostEqual(returned_value, 2.53)
# #2: alpha == 1: first random number less than 1e-7 to that the body
# of the while loop executes once. Then random.random() returns 0.45,
# which causes while to stop looping and the algorithm to terminate.
random_mock.side_effect = [1e-8, 0.45]
returned_value = random.gammavariate(1.0, 3.14)
self.assertAlmostEqual(returned_value, 2.507314166123803)
# #3: 0 < alpha < 1. This is the most complex region of code to cover,
# as there are multiple if-else statements. Let's take a look at the
# source code, and determine the values that we need accordingly:
#
# while 1:
# u = random()
# b = (_e + alpha)/_e
# p = b*u
# if p <= 1.0: # <=== (A)
# x = p ** (1.0/alpha)
# else: # <=== (B)
# x = -_log((b-p)/alpha)
# u1 = random()
# if p > 1.0: # <=== (C)
# if u1 <= x ** (alpha - 1.0): # <=== (D)
# break
# elif u1 <= _exp(-x): # <=== (E)
# break
# return x * beta
#
# First, we want (A) to be True. For that we need that:
# b*random() <= 1.0
# r1 = random() <= 1.0 / b
#
# We now get to the second if-else branch, and here, since p <= 1.0,
# (C) is False and we take the elif branch, (E). For it to be True,
# so that the break is executed, we need that:
# r2 = random() <= _exp(-x)
# r2 <= _exp(-(p ** (1.0/alpha)))
# r2 <= _exp(-((b*r1) ** (1.0/alpha)))
_e = random._e
_exp = random._exp
_log = random._log
alpha = 0.35
beta = 1.45
b = (_e + alpha)/_e
epsilon = 0.01
r1 = 0.8859296441566 # 1.0 / b
r2 = 0.3678794411714 # _exp(-((b*r1) ** (1.0/alpha)))
# These four "random" values result in the following trace:
# (A) True, (E) False --> [next iteration of while]
# (A) True, (E) True --> [while loop breaks]
random_mock.side_effect = [r1, r2 + epsilon, r1, r2]
returned_value = random.gammavariate(alpha, beta)
self.assertAlmostEqual(returned_value, 1.4499999999997544)
# Let's now make (A) be False. If this is the case, when we get to the
# second if-else 'p' is greater than 1, so (C) evaluates to True. We
# now encounter a second if statement, (D), which in order to execute
# must satisfy the following condition:
# r2 <= x ** (alpha - 1.0)
# r2 <= (-_log((b-p)/alpha)) ** (alpha - 1.0)
# r2 <= (-_log((b-(b*r1))/alpha)) ** (alpha - 1.0)
r1 = 0.8959296441566 # (1.0 / b) + epsilon -- so that (A) is False
r2 = 0.9445400408898141
# And these four values result in the following trace:
# (B) and (C) True, (D) False --> [next iteration of while]
# (B) and (C) True, (D) True [while loop breaks]
random_mock.side_effect = [r1, r2 + epsilon, r1, r2]
returned_value = random.gammavariate(alpha, beta)
self.assertAlmostEqual(returned_value, 1.5830349561760781)
@unittest.mock.patch('random.Random.gammavariate')
def test_betavariate_return_zero(self, gammavariate_mock):
# betavariate() returns zero when the Gamma distribution
# that it uses internally returns this same value.
gammavariate_mock.return_value = 0.0
self.assertEqual(0.0, random.betavariate(2.71828, 3.14159))
class TestModule(unittest.TestCase):
def testMagicConstants(self):
self.assertAlmostEqual(random.NV_MAGICCONST, 1.71552776992141)
self.assertAlmostEqual(random.TWOPI, 6.28318530718)
self.assertAlmostEqual(random.LOG4, 1.38629436111989)
self.assertAlmostEqual(random.SG_MAGICCONST, 2.50407739677627)
def test__all__(self):
# tests validity but not completeness of the __all__ list
self.assertTrue(set(random.__all__) <= set(dir(random)))
def test_random_subclass_with_kwargs(self):
# SF bug #1486663 -- this used to erroneously raise a TypeError
class Subclass(random.Random):
def __init__(self, newarg=None):
random.Random.__init__(self)
Subclass(newarg=1)
if __name__ == "__main__":
unittest.main()<|fim▁end|> | perms = {} |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>"""tictactoe URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:<|fim▁hole|>Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf.urls.static import static
from tictactoe import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('tictactoe/', include('tictactoe.game.urls'), name='game'),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
# serving static files like this should not be done in production<|fim▁end|> | Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home') |
<|file_name|>pipeline.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use compositing::CompositionPipeline;
use compositing::CompositorProxy;
use compositing::compositor_thread::Msg as CompositorMsg;
use devtools_traits::{DevtoolsControlMsg, ScriptToDevtoolsControlMsg};
use euclid::scale_factor::ScaleFactor;
use euclid::size::TypedSize2D;
#[cfg(not(target_os = "windows"))]
use gaol;
use gfx::font_cache_thread::FontCacheThread;
use gfx::paint_thread::{LayoutToPaintMsg, PaintThread};
use gfx_traits::ChromeToPaintMsg;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
use ipc_channel::router::ROUTER;
use layers::geometry::DevicePixel;
use layout_traits::LayoutThreadFactory;
use msg::constellation_msg::{FrameId, FrameType, LoadData, PipelineId, PipelineNamespaceId};
use net_traits::{IpcSend, ResourceThreads};
use net_traits::bluetooth_thread::BluetoothMethodMsg;
use net_traits::image_cache_thread::ImageCacheThread;
use profile_traits::mem as profile_mem;
use profile_traits::time;
use script_traits::{ConstellationControlMsg, InitialScriptState, MozBrowserEvent};
use script_traits::{LayoutControlMsg, LayoutMsg, NewLayoutInfo, SWManagerMsg, SWManagerSenders, ScriptMsg};
use script_traits::{ScriptThreadFactory, TimerEventRequest, WindowSizeData};
use std::collections::HashMap;
use std::env;
use std::ffi::OsStr;
use std::io::Error as IOError;
use std::process;
use std::sync::mpsc::{Sender, channel};
use style_traits::{PagePx, ViewportPx};
use url::Url;
use util;
use util::ipc::OptionalIpcSender;
use util::opts::{self, Opts};
use util::prefs::{PREFS, Pref};
use webrender_traits;
pub enum ChildProcess {
#[cfg(not(target_os = "windows"))]
Sandboxed(gaol::platform::process::Process),
#[cfg(not(target_os = "windows"))]
Unsandboxed(process::Child),
}
/// A uniquely-identifiable pipeline of script thread, layout thread, and paint thread.
pub struct Pipeline {
pub id: PipelineId,
pub parent_info: Option<(PipelineId, FrameType)>,
pub script_chan: IpcSender<ConstellationControlMsg>,
/// A channel to layout, for performing reflows and shutdown.
pub layout_chan: IpcSender<LayoutControlMsg>,
/// A channel to the compositor.
pub compositor_proxy: Box<CompositorProxy + 'static + Send>,
pub chrome_to_paint_chan: Sender<ChromeToPaintMsg>,
/// URL corresponding to the most recently-loaded page.
pub url: Url,
/// The title of the most recently-loaded page.
pub title: Option<String>,
pub size: Option<TypedSize2D<f32, PagePx>>,
/// Whether this pipeline is currently running animations. Pipelines that are running
/// animations cause composites to be continually scheduled.
pub running_animations: bool,
pub children: Vec<FrameId>,
/// Whether this pipeline is considered distinct from public pipelines.
pub is_private: bool,
/// Whether this pipeline should be treated as visible for the purposes of scheduling and
/// resource management.
pub visible: bool,
/// Frame that contains this Pipeline. Can be `None` if the pipeline is not apart of the
/// frame tree.
pub frame: Option<FrameId>,
}
/// Initial setup data needed to construct a pipeline.
///
/// *DO NOT* add any Senders to this unless you absolutely know what you're doing, or pcwalton will
/// have to rewrite your code. Use IPC senders instead.
pub struct InitialPipelineState {
/// The ID of the pipeline to create.
pub id: PipelineId,
/// The ID of the parent pipeline and frame type, if any.
/// If `None`, this is the root.
pub parent_info: Option<(PipelineId, FrameType)>,
/// A channel to the associated constellation.
pub constellation_chan: IpcSender<ScriptMsg>,
/// A channel for the layout thread to send messages to the constellation.
pub layout_to_constellation_chan: IpcSender<LayoutMsg>,
/// A channel to schedule timer events.
pub scheduler_chan: IpcSender<TimerEventRequest>,
/// A channel to the compositor.
pub compositor_proxy: Box<CompositorProxy + 'static + Send>,
/// A channel to the developer tools, if applicable.
pub devtools_chan: Option<Sender<DevtoolsControlMsg>>,
/// A channel to the bluetooth thread.
pub bluetooth_thread: IpcSender<BluetoothMethodMsg>,
/// A channel to the service worker manager thread
pub swmanager_thread: IpcSender<SWManagerMsg>,
/// A channel to the image cache thread.
pub image_cache_thread: ImageCacheThread,
/// A channel to the font cache thread.
pub font_cache_thread: FontCacheThread,
/// Channels to the resource-related threads.
pub resource_threads: ResourceThreads,
/// A channel to the time profiler thread.
pub time_profiler_chan: time::ProfilerChan,
/// A channel to the memory profiler thread.
pub mem_profiler_chan: profile_mem::ProfilerChan,
/// Information about the initial window size.
pub window_size: Option<TypedSize2D<f32, PagePx>>,
/// Information about the device pixel ratio.
pub device_pixel_ratio: ScaleFactor<f32, ViewportPx, DevicePixel>,
/// A channel to the script thread, if applicable. If this is `Some`,
/// then `parent_info` must also be `Some`.
pub script_chan: Option<IpcSender<ConstellationControlMsg>>,
/// Information about the page to load.
pub load_data: LoadData,
/// The ID of the pipeline namespace for this script thread.
pub pipeline_namespace_id: PipelineNamespaceId,
/// Pipeline visibility is inherited from parent
pub parent_visibility: Option<bool>,
/// Optional webrender api (if enabled).
pub webrender_api_sender: Option<webrender_traits::RenderApiSender>,
/// Whether this pipeline is considered private.
pub is_private: bool,
}
impl Pipeline {
/// Starts a paint thread, layout thread, and possibly a script thread, in
/// a new process if requested.
pub fn spawn<Message, LTF, STF>(state: InitialPipelineState)
-> Result<(Pipeline, Option<ChildProcess>), IOError>
where LTF: LayoutThreadFactory<Message=Message>,
STF: ScriptThreadFactory<Message=Message>
{
// Note: we allow channel creation to panic, since recovering from this
// probably requires a general low-memory strategy.
let (layout_to_paint_chan, layout_to_paint_port) = util::ipc::optional_ipc_channel();
let (chrome_to_paint_chan, chrome_to_paint_port) = channel();
let (pipeline_chan, pipeline_port) = ipc::channel()
.expect("Pipeline main chan");;
let (layout_content_process_shutdown_chan, layout_content_process_shutdown_port) =
ipc::channel().expect("Pipeline layout content shutdown chan");
let (script_chan, content_ports) = match state.script_chan {
Some(script_chan) => {
let (parent_pipeline_id, frame_type) =
state.parent_info.expect("script_pipeline != None but parent_info == None");
let new_layout_info = NewLayoutInfo {
parent_pipeline_id: parent_pipeline_id,
new_pipeline_id: state.id,
frame_type: frame_type,
load_data: state.load_data.clone(),
paint_chan: layout_to_paint_chan.clone().to_opaque(),
pipeline_port: pipeline_port,
layout_to_constellation_chan: state.layout_to_constellation_chan.clone(),
content_process_shutdown_chan: layout_content_process_shutdown_chan.clone(),
layout_threads: PREFS.get("layout.threads").as_u64().expect("count") as usize,
};
if let Err(e) = script_chan.send(ConstellationControlMsg::AttachLayout(new_layout_info)) {
warn!("Sending to script during pipeline creation failed ({})", e);
}
(script_chan, None)
}
None => {
let (script_chan, script_port) = ipc::channel().expect("Pipeline script chan");
(script_chan, Some((script_port, pipeline_port)))
}
};
PaintThread::create(state.id,
state.load_data.url.clone(),
chrome_to_paint_chan.clone(),
layout_to_paint_port,
chrome_to_paint_port,
state.compositor_proxy.clone_compositor_proxy(),
state.font_cache_thread.clone(),
state.time_profiler_chan.clone(),
state.mem_profiler_chan.clone());
let mut child_process = None;
if let Some((script_port, pipeline_port)) = content_ports {
// Route messages coming from content to devtools as appropriate.
let script_to_devtools_chan = state.devtools_chan.as_ref().map(|devtools_chan| {
let (script_to_devtools_chan, script_to_devtools_port) = ipc::channel()
.expect("Pipeline script to devtools chan");
let devtools_chan = (*devtools_chan).clone();
ROUTER.add_route(script_to_devtools_port.to_opaque(), box move |message| {
match message.to::<ScriptToDevtoolsControlMsg>() {
Err(e) => error!("Cast to ScriptToDevtoolsControlMsg failed ({}).", e),
Ok(message) => if let Err(e) = devtools_chan.send(DevtoolsControlMsg::FromScript(message)) {
warn!("Sending to devtools failed ({})", e)
},
}
});
script_to_devtools_chan
});
let device_pixel_ratio = state.device_pixel_ratio;
let window_size = state.window_size.map(|size| {
WindowSizeData {
visible_viewport: size,
initial_viewport: size * ScaleFactor::new(1.0),
device_pixel_ratio: device_pixel_ratio,
}
});
let (script_content_process_shutdown_chan, script_content_process_shutdown_port) =
ipc::channel().expect("Pipeline script content process shutdown chan");
let unprivileged_pipeline_content = UnprivilegedPipelineContent {
id: state.id,
parent_info: state.parent_info,
constellation_chan: state.constellation_chan,
scheduler_chan: state.scheduler_chan,
devtools_chan: script_to_devtools_chan,
bluetooth_thread: state.bluetooth_thread,
swmanager_thread: state.swmanager_thread,
image_cache_thread: state.image_cache_thread,
font_cache_thread: state.font_cache_thread,
resource_threads: state.resource_threads,
time_profiler_chan: state.time_profiler_chan,
mem_profiler_chan: state.mem_profiler_chan,
window_size: window_size,
layout_to_constellation_chan: state.layout_to_constellation_chan,
script_chan: script_chan.clone(),
load_data: state.load_data.clone(),
script_port: script_port,
opts: (*opts::get()).clone(),
prefs: PREFS.cloned(),
layout_to_paint_chan: layout_to_paint_chan,
pipeline_port: pipeline_port,
pipeline_namespace_id: state.pipeline_namespace_id,
layout_content_process_shutdown_chan: layout_content_process_shutdown_chan,
layout_content_process_shutdown_port: layout_content_process_shutdown_port,
script_content_process_shutdown_chan: script_content_process_shutdown_chan,
script_content_process_shutdown_port: script_content_process_shutdown_port,
webrender_api_sender: state.webrender_api_sender,
};
// Spawn the child process.
//
// Yes, that's all there is to it!
if opts::multiprocess() {
child_process = Some(try!(unprivileged_pipeline_content.spawn_multiprocess()));
} else {
unprivileged_pipeline_content.start_all::<Message, LTF, STF>(false);
}
}
let pipeline = Pipeline::new(state.id,
state.parent_info,
script_chan,
pipeline_chan,
state.compositor_proxy,
chrome_to_paint_chan,
state.is_private,
state.load_data.url,
state.window_size,
state.parent_visibility.unwrap_or(true));
pipeline.notify_visibility();
<|fim▁hole|> parent_info: Option<(PipelineId, FrameType)>,
script_chan: IpcSender<ConstellationControlMsg>,
layout_chan: IpcSender<LayoutControlMsg>,
compositor_proxy: Box<CompositorProxy + 'static + Send>,
chrome_to_paint_chan: Sender<ChromeToPaintMsg>,
is_private: bool,
url: Url,
size: Option<TypedSize2D<f32, PagePx>>,
visible: bool)
-> Pipeline {
Pipeline {
id: id,
parent_info: parent_info,
script_chan: script_chan,
layout_chan: layout_chan,
compositor_proxy: compositor_proxy,
chrome_to_paint_chan: chrome_to_paint_chan,
url: url,
title: None,
children: vec!(),
size: size,
running_animations: false,
visible: visible,
is_private: is_private,
frame: None,
}
}
pub fn grant_paint_permission(&self) {
let _ = self.chrome_to_paint_chan.send(ChromeToPaintMsg::PaintPermissionGranted);
}
pub fn revoke_paint_permission(&self) {
debug!("pipeline revoking paint channel paint permission");
let _ = self.chrome_to_paint_chan.send(ChromeToPaintMsg::PaintPermissionRevoked);
}
pub fn exit(&self) {
debug!("pipeline {:?} exiting", self.id);
// The compositor wants to know when pipelines shut down too.
// It may still have messages to process from these other threads
// before they can be safely shut down.
// It's OK for the constellation to block on the compositor,
// since the compositor never blocks on the constellation.
if let Ok((sender, receiver)) = ipc::channel() {
self.compositor_proxy.send(CompositorMsg::PipelineExited(self.id, sender));
if let Err(e) = receiver.recv() {
warn!("Sending exit message failed ({}).", e);
}
}
// Script thread handles shutting down layout, and layout handles shutting down the painter.
// For now, if the script thread has failed, we give up on clean shutdown.
if let Err(e) = self.script_chan.send(ConstellationControlMsg::ExitPipeline(self.id)) {
warn!("Sending script exit message failed ({}).", e);
}
}
pub fn freeze(&self) {
if let Err(e) = self.script_chan.send(ConstellationControlMsg::Freeze(self.id)) {
warn!("Sending freeze message failed ({}).", e);
}
}
pub fn thaw(&self) {
if let Err(e) = self.script_chan.send(ConstellationControlMsg::Thaw(self.id)) {
warn!("Sending freeze message failed ({}).", e);
}
}
pub fn force_exit(&self) {
if let Err(e) = self.script_chan.send(ConstellationControlMsg::ExitPipeline(self.id)) {
warn!("Sending script exit message failed ({}).", e);
}
if let Err(e) = self.chrome_to_paint_chan.send(ChromeToPaintMsg::Exit) {
warn!("Sending paint exit message failed ({}).", e);
}
if let Err(e) = self.layout_chan.send(LayoutControlMsg::ExitNow) {
warn!("Sending layout exit message failed ({}).", e);
}
}
pub fn to_sendable(&self) -> CompositionPipeline {
CompositionPipeline {
id: self.id.clone(),
script_chan: self.script_chan.clone(),
layout_chan: self.layout_chan.clone(),
chrome_to_paint_chan: self.chrome_to_paint_chan.clone(),
}
}
pub fn add_child(&mut self, frame_id: FrameId) {
self.children.push(frame_id);
}
pub fn remove_child(&mut self, frame_id: FrameId) {
match self.children.iter().position(|id| *id == frame_id) {
None => return warn!("Pipeline remove child already removed ({:?}).", frame_id),
Some(index) => self.children.remove(index),
};
}
pub fn trigger_mozbrowser_event(&self,
child_id: Option<PipelineId>,
event: MozBrowserEvent) {
assert!(PREFS.is_mozbrowser_enabled());
let event = ConstellationControlMsg::MozBrowserEvent(self.id,
child_id,
event);
if let Err(e) = self.script_chan.send(event) {
warn!("Sending mozbrowser event to script failed ({}).", e);
}
}
fn notify_visibility(&self) {
self.script_chan.send(ConstellationControlMsg::ChangeFrameVisibilityStatus(self.id, self.visible))
.expect("Pipeline script chan");
self.compositor_proxy.send(CompositorMsg::PipelineVisibilityChanged(self.id, self.visible));
}
pub fn change_visibility(&mut self, visible: bool) {
if visible == self.visible {
return;
}
self.visible = visible;
self.notify_visibility();
}
}
#[derive(Deserialize, Serialize)]
pub struct UnprivilegedPipelineContent {
id: PipelineId,
parent_info: Option<(PipelineId, FrameType)>,
constellation_chan: IpcSender<ScriptMsg>,
layout_to_constellation_chan: IpcSender<LayoutMsg>,
scheduler_chan: IpcSender<TimerEventRequest>,
devtools_chan: Option<IpcSender<ScriptToDevtoolsControlMsg>>,
bluetooth_thread: IpcSender<BluetoothMethodMsg>,
swmanager_thread: IpcSender<SWManagerMsg>,
image_cache_thread: ImageCacheThread,
font_cache_thread: FontCacheThread,
resource_threads: ResourceThreads,
time_profiler_chan: time::ProfilerChan,
mem_profiler_chan: profile_mem::ProfilerChan,
window_size: Option<WindowSizeData>,
script_chan: IpcSender<ConstellationControlMsg>,
load_data: LoadData,
script_port: IpcReceiver<ConstellationControlMsg>,
layout_to_paint_chan: OptionalIpcSender<LayoutToPaintMsg>,
opts: Opts,
prefs: HashMap<String, Pref>,
pipeline_port: IpcReceiver<LayoutControlMsg>,
pipeline_namespace_id: PipelineNamespaceId,
layout_content_process_shutdown_chan: IpcSender<()>,
layout_content_process_shutdown_port: IpcReceiver<()>,
script_content_process_shutdown_chan: IpcSender<()>,
script_content_process_shutdown_port: IpcReceiver<()>,
webrender_api_sender: Option<webrender_traits::RenderApiSender>,
}
impl UnprivilegedPipelineContent {
pub fn start_all<Message, LTF, STF>(self, wait_for_completion: bool)
where LTF: LayoutThreadFactory<Message=Message>,
STF: ScriptThreadFactory<Message=Message>
{
let layout_pair = STF::create(InitialScriptState {
id: self.id,
parent_info: self.parent_info,
control_chan: self.script_chan.clone(),
control_port: self.script_port,
constellation_chan: self.constellation_chan,
scheduler_chan: self.scheduler_chan,
bluetooth_thread: self.bluetooth_thread,
resource_threads: self.resource_threads,
image_cache_thread: self.image_cache_thread.clone(),
time_profiler_chan: self.time_profiler_chan.clone(),
mem_profiler_chan: self.mem_profiler_chan.clone(),
devtools_chan: self.devtools_chan,
window_size: self.window_size,
pipeline_namespace_id: self.pipeline_namespace_id,
content_process_shutdown_chan: self.script_content_process_shutdown_chan,
}, self.load_data.clone());
LTF::create(self.id,
self.load_data.url,
self.parent_info.is_some(),
layout_pair,
self.pipeline_port,
self.layout_to_constellation_chan,
self.script_chan,
self.layout_to_paint_chan,
self.image_cache_thread,
self.font_cache_thread,
self.time_profiler_chan,
self.mem_profiler_chan,
self.layout_content_process_shutdown_chan,
self.webrender_api_sender,
self.prefs.get("layout.threads").expect("exists").value()
.as_u64().expect("count") as usize);
if wait_for_completion {
let _ = self.script_content_process_shutdown_port.recv();
let _ = self.layout_content_process_shutdown_port.recv();
}
}
#[cfg(not(target_os = "windows"))]
pub fn spawn_multiprocess(self) -> Result<ChildProcess, IOError> {
use gaol::sandbox::{self, Sandbox, SandboxMethods};
use ipc_channel::ipc::IpcOneShotServer;
use sandboxing::content_process_sandbox_profile;
impl CommandMethods for sandbox::Command {
fn arg<T>(&mut self, arg: T)
where T: AsRef<OsStr> {
self.arg(arg);
}
fn env<T, U>(&mut self, key: T, val: U)
where T: AsRef<OsStr>, U: AsRef<OsStr> {
self.env(key, val);
}
}
// Note that this function can panic, due to process creation,
// avoiding this panic would require a mechanism for dealing
// with low-resource scenarios.
let (server, token) =
IpcOneShotServer::<IpcSender<UnprivilegedPipelineContent>>::new()
.expect("Failed to create IPC one-shot server.");
// If there is a sandbox, use the `gaol` API to create the child process.
let child_process = if opts::get().sandbox {
let mut command = sandbox::Command::me().expect("Failed to get current sandbox.");
self.setup_common(&mut command, token);
let profile = content_process_sandbox_profile();
ChildProcess::Sandboxed(Sandbox::new(profile).start(&mut command)
.expect("Failed to start sandboxed child process!"))
} else {
let path_to_self = env::current_exe()
.expect("Failed to get current executor.");
let mut child_process = process::Command::new(path_to_self);
self.setup_common(&mut child_process, token);
ChildProcess::Unsandboxed(child_process.spawn()
.expect("Failed to start unsandboxed child process!"))
};
let (_receiver, sender) = server.accept().expect("Server failed to accept.");
try!(sender.send(self));
Ok(child_process)
}
#[cfg(target_os = "windows")]
pub fn spawn_multiprocess(self) -> Result<ChildProcess, IOError> {
error!("Multiprocess is not supported on Windows.");
process::exit(1);
}
#[cfg(not(windows))]
fn setup_common<C: CommandMethods>(&self, command: &mut C, token: String) {
C::arg(command, "--content-process");
C::arg(command, token);
if let Ok(value) = env::var("RUST_BACKTRACE") {
C::env(command, "RUST_BACKTRACE", value);
}
if let Ok(value) = env::var("RUST_LOG") {
C::env(command, "RUST_LOG", value);
}
}
pub fn constellation_chan(&self) -> IpcSender<ScriptMsg> {
self.constellation_chan.clone()
}
pub fn opts(&self) -> Opts {
self.opts.clone()
}
pub fn prefs(&self) -> HashMap<String, Pref> {
self.prefs.clone()
}
pub fn swmanager_senders(&self) -> SWManagerSenders {
SWManagerSenders {
swmanager_sender: self.swmanager_thread.clone(),
resource_sender: self.resource_threads.sender()
}
}
}
trait CommandMethods {
fn arg<T>(&mut self, arg: T)
where T: AsRef<OsStr>;
fn env<T, U>(&mut self, key: T, val: U)
where T: AsRef<OsStr>, U: AsRef<OsStr>;
}
impl CommandMethods for process::Command {
fn arg<T>(&mut self, arg: T)
where T: AsRef<OsStr> {
self.arg(arg);
}
fn env<T, U>(&mut self, key: T, val: U)
where T: AsRef<OsStr>, U: AsRef<OsStr> {
self.env(key, val);
}
}<|fim▁end|> | Ok((pipeline, child_process))
}
fn new(id: PipelineId, |
<|file_name|>spell.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2010-2017 Tuukka Turto
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Module for spell objects
"""
from pyherc.aspects import log_debug, log_info
from pyherc.data.effects import EffectsCollection
class Spell():
"""
Class to represent spells
.. versionadded:: 0.9
"""
@log_debug
def __init__(self):
"""
Default constructor
"""
self.targets = []
self.effects = EffectsCollection()
self.spirit = 0
@log_debug
def add_effect_handle(self, handle):
"""
Add effect handle
:param handle: effect handle to add
:type handle: EffectHandle
"""
self.effects.add_effect_handle(handle)
@log_debug
def get_effect_handles(self, trigger=None):
"""
Get effect handles
:param trigger: optional trigger type
:type trigger: string
:returns: effect handles
:rtype: [EffectHandle]
"""
return self.effects.get_effect_handles(trigger)
@log_debug
def remove_effect_handle(self, handle):
"""
Remove given handle
:param handle: handle to remove
:type handle: EffectHandle
"""
self.effects.remove_effect_handle(handle)<|fim▁hole|>
@log_info
def cast(self, effects_factory):
"""
Cast the spell
:param effects_factory: factory for creating effects
:type effects_factory: EffectsFactory
"""
handles = self.effects.get_effect_handles('on spell hit')
effects = []
targets = (x.target for x in self.targets
if x.target)
for target in targets:
for handle in handles:
effects.append(effects_factory(key=handle.effect,
target=target))
for effect in effects:
if not effect.duration or effect.duration <= 0:
effect.trigger()
else:
effect.target.add_effect(effect)<|fim▁end|> | |
<|file_name|>attribution.py<|end_file_name|><|fim▁begin|>from kik.resource import Resource
class Attribution(Resource):
"""
Parent class for all attribution types
"""
pass
class CustomAttribution(Attribution):
"""
Attribution class for custom attributions, as documented at `<https://dev.kik.com/#/docs/messaging#attribution>`_
Usage:
>>> from kik.messages import CustomAttribution, LinkMessage
>>> message = LinkMessage()
>>> message.attribution = CustomAttribution(
>>> name='A Name',
>>> icon_url='http://foo.bar/anicon'
>>> )
"""
def __init__(self, name=None, icon_url=None):
self.name = name
self.icon_url = icon_url
@classmethod
def property_mapping(cls):
return {
'name': 'name',
'icon_url': 'iconUrl'
}
class PresetAttribution(Attribution):
"""
Attribution class for the preset attribution types (e.g. "gallery" or "camera")
"""
def __init__(self, preset_name):
self.preset_name = preset_name
def to_json(self):
return self.preset_name
class PresetAttributions(object):
"""
List of preset attribution types.
Valid only on :class:`PictureMessage <kik.messages.PictureMessage>` and<|fim▁hole|> :cvar GALLERY: Makes the message appear to be from a user's gallery.
:vartype GALLERY: kik.message.attribution.PresetAttribution
:cvar CAMERA: Makes the message appear to be from a camera.
:vartype CAMERA: kik.message.attribution.PresetAttribution
Usage:
>>> from kik.messages import PresetAttributions, PictureMessage
>>> message = PictureMessage()
>>> message.attribution = PresetAttributions.CAMERA
"""
GALLERY = PresetAttribution('gallery')
CAMERA = PresetAttribution('camera')<|fim▁end|> | :class:`VideoMessage <kik.messages.VideoMessage>`.
|
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# tacker documentation build configuration file, created by
# sphinx-quickstart on Tue May 31 19:07:30 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'oslosphinx',
'reno.sphinxext'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Tacker Release Notes'
copyright = u'2016, Tacker Developers'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
import pbr.version
tacker_version = pbr.version.VersionInfo('tacker')
release = tacker_version.version_string_with_vcs()
version = tacker_version.canonical_version_string()
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to
# use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'tackerdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'TackerReleaseNotes.tex',
u'Tacker Release Notes Documentation',
u'Tacker Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
<|fim▁hole|># If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'tackerreleasenotes', u'Tacker Release Notes Documentation',
[u'Tacker Developers'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'TackerReleaseNotes', u'Tacker Release Notes Documentation',
u'Tacker Developers', 'TackerReleaseNotes',
'Tacker Project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'<|fim▁end|> | |
<|file_name|>RenderOverlayHandler.java<|end_file_name|><|fim▁begin|>package TFC.Handlers.Client;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.ScaledResolution;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraftforge.client.event.RenderGameOverlayEvent;
import net.minecraftforge.client.event.RenderGameOverlayEvent.ElementType;
import net.minecraftforge.event.ForgeSubscribe;
import org.lwjgl.opengl.GL11;
import TFC.Core.TFC_Climate;
import TFC.Core.TFC_Settings;
import TFC.Core.Player.PlayerManagerTFC;
import TFC.Core.Player.TFC_PlayerClient;
import TFC.Food.FoodStatsTFC;
import TFC.Items.Tools.ItemChisel;
import TFC.Items.Tools.ItemCustomHoe;
public class RenderOverlayHandler
{
@ForgeSubscribe
public void render(RenderGameOverlayEvent.Pre event)
{
if(event.type == ElementType.HEALTH || event.type == ElementType.FOOD)
{
event.setCanceled(true);
}
}
@ForgeSubscribe
public void render(RenderGameOverlayEvent.Post event)
{
if(event.type == ElementType.HEALTH || event.type == ElementType.FOOD)
{
event.setCanceled(true);
}
ScaledResolution sr = event.resolution;
int healthRowHeight = sr.getScaledHeight() - 39;
int armorRowHeight = healthRowHeight - 10;
TFC_PlayerClient playerclient = ((TFC.Core.Player.TFC_PlayerClient)Minecraft.getMinecraft().thePlayer.getPlayerBase("TFC Player Client"));
if(playerclient != null)
{
<|fim▁hole|> Minecraft.getMinecraft().renderEngine.bindTexture("/bioxx/icons.png");
this.drawTexturedModalRect(sr.getScaledWidth() / 2-91, healthRowHeight, 0, 0, 90, 10);
float maxHealth = playerclient.getMaxHealth();
float percentHealth = Minecraft.getMinecraft().thePlayer.getHealth()/maxHealth;
this.drawTexturedModalRect(sr.getScaledWidth() / 2-91, healthRowHeight, 0, 9, (int) (90*percentHealth), 9);
//Draw Food and Water
FoodStatsTFC foodstats = playerclient.getFoodStatsTFC();
int foodLevel = foodstats.getFoodLevel();
int preFoodLevel = foodstats.getPrevFoodLevel();
float waterLevel = foodstats.waterLevel;
float percentFood = foodLevel/100f;
float percentWater = waterLevel/foodstats.getMaxWater(Minecraft.getMinecraft().thePlayer);
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
//GL11.glBindTexture(GL11.GL_TEXTURE_2D, Minecraft.getMinecraft().renderEngine.getTexture("/bioxx/icons.png"));
this.drawTexturedModalRect(sr.getScaledWidth() / 2, healthRowHeight, 0, 18, 90, 5);
if(playerclient.guishowFoodRestoreAmount)
{
float percentFood2 = Math.min(percentFood + playerclient.guiFoodRestoreAmount/100f, 1);
GL11.glColor4f(0.0F, 0.6F, 0.0F, 0.3F);
//GL11.glBindTexture(GL11.GL_TEXTURE_2D, Minecraft.getMinecraft().renderEngine.getTexture("/bioxx/icons.png"));
this.drawTexturedModalRect(sr.getScaledWidth() / 2, healthRowHeight, 0, 23, (int) (90*(percentFood2)), 5);
}
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
//GL11.glBindTexture(GL11.GL_TEXTURE_2D, Minecraft.getMinecraft().renderEngine.getTexture("/bioxx/icons.png"));
this.drawTexturedModalRect(sr.getScaledWidth() / 2, healthRowHeight, 0, 23, (int) (90*percentFood), 5);
this.drawTexturedModalRect(sr.getScaledWidth() / 2, healthRowHeight+5, 0, 28, 90, 5);
this.drawTexturedModalRect(sr.getScaledWidth() / 2, healthRowHeight+5, 0, 33, (int) (90*percentWater), 5);
//Render Tool Mode
if(Minecraft.getMinecraft().thePlayer.inventory.getCurrentItem() != null &&
Minecraft.getMinecraft().thePlayer.inventory.getCurrentItem().getItem() instanceof ItemCustomHoe)
{
int mode = PlayerManagerTFC.getInstance().getClientPlayer().hoeMode;
this.drawTexturedModalRect(sr.getScaledWidth() / 2 + 95, sr.getScaledHeight() - 21, 0+(20*mode), 38, 20, 20);
}
else if(Minecraft.getMinecraft().thePlayer.inventory.getCurrentItem() != null &&
Minecraft.getMinecraft().thePlayer.inventory.getCurrentItem().getItem() instanceof ItemChisel)
{
int mode = PlayerManagerTFC.getInstance().getClientPlayer().ChiselMode;
this.drawTexturedModalRect(sr.getScaledWidth() / 2 + 95, sr.getScaledHeight() - 21, 0+(20*mode), 58, 20, 20);
}
}
Minecraft.getMinecraft().renderEngine.resetBoundTexture();
}
@ForgeSubscribe
public void renderText(RenderGameOverlayEvent.Text event)
{
if(Minecraft.getMinecraft().gameSettings.showDebugInfo || TFC_Settings.enableDebugMode)
{
EntityPlayer player = Minecraft.getMinecraft().thePlayer;
int xCoord = (int)player.posX;
int yCoord = (int)player.posY;
int zCoord = (int)player.posZ;
event.left.add(String.format("rain: %.0f, temp: %.2f, evt: %.3f", new Object[] {
TFC_Climate.getRainfall(xCoord, yCoord, zCoord),
TFC_Climate.getHeightAdjustedTemp(xCoord, yCoord, zCoord),
TFC_Climate.manager.getEVTLayerAt(xCoord, zCoord).floatdata1}));
event.left.add("Health: " + player.getHealth());
}
}
public void drawTexturedModalRect(int par1, int par2, int par3, int par4, int par5, int par6)
{
float f = 0.00390625F;
float f1 = 0.00390625F;
Tessellator tessellator = Tessellator.instance;
tessellator.startDrawingQuads();
tessellator.addVertexWithUV(par1 + 0, par2 + par6, 0.0, (par3 + 0) * f, (par4 + par6) * f1);
tessellator.addVertexWithUV(par1 + par5, par2 + par6, 0.0, (par3 + par5) * f, (par4 + par6) * f1);
tessellator.addVertexWithUV(par1 + par5, par2 + 0, 0.0, (par3 + par5) * f, (par4 + 0) * f1);
tessellator.addVertexWithUV(par1 + 0, par2 + 0, 0.0, (par3 + 0) * f, (par4 + 0) * f1);
tessellator.draw();
}
}<|fim▁end|> | //Draw Health
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
//GL11.glBindTexture(GL11.GL_TEXTURE_2D, Minecraft.getMinecraft().renderEngine.getTexture("/bioxx/icons.png"));
|
<|file_name|>test_util.py<|end_file_name|><|fim▁begin|>##
# Copyright (c) 2006-2013 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from caldavclientlibrary.protocol.http.util import parsequoted
from caldavclientlibrary.protocol.http.util import parsetoken
from caldavclientlibrary.protocol.http.util import parseStatusLine
import unittest
class TestParseQuoted(unittest.TestCase):
def testParseQuotedOK(self):
data = {
"\"\"" : ("", ""),
"\"quoted\"" : ("quoted", ""),
"\"quoted words\"" : ("quoted words", ""),
"\"quoting a \\\"word\\\"\"" : ("quoting a \"word\"", ""),
"\"\" after" : ("", "after"),
"\"quoted\" after" : ("quoted", "after"),
"\"quoted words\" after" : ("quoted words", "after"),
"\"quoting a \\\"word\\\"\" after" : ("quoting a \"word\"", "after"),
"\"quoting a \\\"word\\\" after\" after": ("quoting a \"word\" after", "after"),
"\"quoted\"after" : ("quoted", "after"),
"\"" : ("", ""),
"\"unterminated" : ("unterminated", ""),
"\"unterminated words" : ("unterminated words", ""),
"\"unterminated a \\\"word\\\"" : ("unterminated a \"word\"", ""),
}
for input, result in data.iteritems():
self.assertEqual(parsequoted(input), result)
def testParseQuotedBAD(self):
data = (
"",
"unquoted",
"unquoted \"quoted\"",
)
for input in data:
self.assertRaises(AssertionError, parsequoted, input)
class TestParseToken(unittest.TestCase):
def testParseTokenOK(self):
<|fim▁hole|> "unquoted words" : ("unquoted", "words"),
"unquoted words" : ("unquoted", "words"),
"unquoting a \"word\"" : ("unquoting", "a \"word\""),
"unquoted\twords" : ("unquoted", "words"),
"unquoting\ta \"word\"" : ("unquoting", "a \"word\""),
"unquoted: words" : ("unquoted", "words"),
"unquoting: a \"word\"" : ("unquoting", "a \"word\""),
"\"\"" : ("", ""),
"\"quoted\"" : ("quoted", ""),
"\"quoted words\"" : ("quoted words", ""),
"\"quoting a \\\"word\\\"\"" : ("quoting a \"word\"", ""),
"\"\" after" : ("", "after"),
"\"quoted\" after" : ("quoted", "after"),
"\"quoted words\" after" : ("quoted words", "after"),
"\"quoting a \\\"word\\\"\" after" : ("quoting a \"word\"", "after"),
"\"quoting a \\\"word\\\" after\" after": ("quoting a \"word\" after", "after"),
"\"quoted\"after" : ("quoted", "after"),
"\"" : ("", ""),
"\"unterminated" : ("unterminated", ""),
"\"unterminated words" : ("unterminated words", ""),
"\"unterminated a \\\"word\\\"" : ("unterminated a \"word\"", ""),
}
for input, result in data.iteritems():
self.assertEqual(parsetoken(input, " \t:"), result)
class TestParseStatusLine(unittest.TestCase):
def testParseTokenOK(self):
self.assertEqual(parseStatusLine("HTTP/1.1 200 OK"), 200)
def testParseTokenBadStatus(self):
self.assertEqual(parseStatusLine("HTTP/1.2 2001 OK"), 0)
def testParseTokenBadVersion(self):
self.assertEqual(parseStatusLine("HTTP/1.2 200 OK"), 0)
def testParseTokenBadNumber(self):
self.assertEqual(parseStatusLine("HTTP/1.1 OK"), 0)
def testParseTokenBad(self):
self.assertEqual(parseStatusLine("HTTP/1.1"), 0)<|fim▁end|> | data = {
"" : ("", ""),
"unquoted" : ("unquoted", ""), |
<|file_name|>misc.py<|end_file_name|><|fim▁begin|>'''
Created on 21.04.2015
@author: marscher
'''
from __future__ import absolute_import
"""Miscellaneous classes/functions/etc."""
import os
import struct
import ctypes
if os.name != 'nt':
import fcntl
import termios
else:
import ctypes.wintypes
DEFAULT_TERMINAL_WIDTH = None
class _WindowsCSBI(object):
"""Interfaces with Windows CONSOLE_SCREEN_BUFFER_INFO API/DLL calls. Gets info for stderr and stdout.
References:
https://code.google.com/p/colorama/issues/detail?id=47.
pytest's py project: py/_io/terminalwriter.py.
Class variables:
CSBI -- ConsoleScreenBufferInfo class/struct (not instance, the class definition itself) defined in _define_csbi().
HANDLE_STDERR -- GetStdHandle() return integer for stderr.
HANDLE_STDOUT -- GetStdHandle() return integer for stdout.
WINDLL -- my own loaded instance of ctypes.WinDLL.
"""
CSBI = None
HANDLE_STDERR = None
HANDLE_STDOUT = None
WINDLL = ctypes.LibraryLoader(getattr(ctypes, 'WinDLL', None))<|fim▁hole|> """Defines structs and populates _WindowsCSBI.CSBI."""
if _WindowsCSBI.CSBI is not None:
return
class COORD(ctypes.Structure):
"""Windows COORD structure. http://msdn.microsoft.com/en-us/library/windows/desktop/ms682119"""
_fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)]
class SmallRECT(ctypes.Structure):
"""Windows SMALL_RECT structure. http://msdn.microsoft.com/en-us/library/windows/desktop/ms686311"""
_fields_ = [('Left', ctypes.c_short), ('Top', ctypes.c_short), ('Right', ctypes.c_short),
('Bottom', ctypes.c_short)]
class ConsoleScreenBufferInfo(ctypes.Structure):
"""Windows CONSOLE_SCREEN_BUFFER_INFO structure.
http://msdn.microsoft.com/en-us/library/windows/desktop/ms682093
"""
_fields_ = [
('dwSize', COORD),
('dwCursorPosition', COORD),
('wAttributes', ctypes.wintypes.WORD),
('srWindow', SmallRECT),
('dwMaximumWindowSize', COORD)
]
_WindowsCSBI.CSBI = ConsoleScreenBufferInfo
@staticmethod
def initialize():
"""Initializes the WINDLL resource and populated the CSBI class variable."""
_WindowsCSBI._define_csbi()
_WindowsCSBI.HANDLE_STDERR = _WindowsCSBI.HANDLE_STDERR or _WindowsCSBI.WINDLL.kernel32.GetStdHandle(-12)
_WindowsCSBI.HANDLE_STDOUT = _WindowsCSBI.HANDLE_STDOUT or _WindowsCSBI.WINDLL.kernel32.GetStdHandle(-11)
if _WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo.argtypes:
return
_WindowsCSBI.WINDLL.kernel32.GetStdHandle.argtypes = [ctypes.wintypes.DWORD]
_WindowsCSBI.WINDLL.kernel32.GetStdHandle.restype = ctypes.wintypes.HANDLE
_WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo.restype = ctypes.wintypes.BOOL
_WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo.argtypes = [
ctypes.wintypes.HANDLE, ctypes.POINTER(_WindowsCSBI.CSBI)
]
@staticmethod
def get_info(handle):
"""Get information about this current console window (for Microsoft Windows only).
Raises IOError if attempt to get information fails (if there is no console window).
Don't forget to call _WindowsCSBI.initialize() once in your application before calling this method.
Positional arguments:
handle -- either _WindowsCSBI.HANDLE_STDERR or _WindowsCSBI.HANDLE_STDOUT.
Returns:
Dictionary with different integer values. Keys are:
buffer_width -- width of the buffer (Screen Buffer Size in cmd.exe layout tab).
buffer_height -- height of the buffer (Screen Buffer Size in cmd.exe layout tab).
terminal_width -- width of the terminal window.
terminal_height -- height of the terminal window.
bg_color -- current background color (http://msdn.microsoft.com/en-us/library/windows/desktop/ms682088).
fg_color -- current text color code.
"""
# Query Win32 API.
csbi = _WindowsCSBI.CSBI()
try:
if not _WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo(handle, ctypes.byref(csbi)):
raise IOError('Unable to get console screen buffer info from win32 API.')
except ctypes.ArgumentError:
raise IOError('Unable to get console screen buffer info from win32 API.')
# Parse data.
result = dict(
buffer_width=int(csbi.dwSize.X - 1),
buffer_height=int(csbi.dwSize.Y),
terminal_width=int(csbi.srWindow.Right - csbi.srWindow.Left),
terminal_height=int(csbi.srWindow.Bottom - csbi.srWindow.Top),
bg_color=int(csbi.wAttributes & 240),
fg_color=int(csbi.wAttributes % 16),
)
return result
def terminal_width():
"""Returns the terminal's width (number of character columns)."""
try:
if os.name == 'nt':
_WindowsCSBI.initialize()
return _WindowsCSBI.get_info(_WindowsCSBI.HANDLE_STDOUT)['terminal_width']
return struct.unpack('hhhh', fcntl.ioctl(0, termios.TIOCGWINSZ, '\000' * 8))[1]
except IOError:
return 80<|fim▁end|> |
@staticmethod
def _define_csbi(): |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate serde_derive;
use std::collections::VecDeque;
use std::env;
use std::fs::File;
use std::io::{BufReader, Read};
use std::sync::{Arc, Mutex};
use std::thread::{self, sleep};
use std::time::Duration;
use std::time::{SystemTime, UNIX_EPOCH};
use async_h1::client;
use async_std::net::{TcpListener, TcpStream};
use async_std::prelude::*;
use async_std::task;
use base64::encode;
use http_types::{headers, Method, Request, Response, StatusCode, Url};
use serde::Deserialize;
use serde_json::Deserializer;
use toml;
use rand::{thread_rng, Rng};
#[async_std::main]
async fn main() -> http_types::Result<()> {
let argv: Vec<String> = env::args().collect();
// Guard: config missing
if argv.len() != 2 {
panic!("Config argument missing");
}
// Generating block
// Baseline: 150 for miner, 150 for faucet
let config = ConfigFile::from_path(&argv[1]);
let block_time = Duration::from_millis(config.network.block_time);
let mut num_blocks = 0;
<|fim▁hole|> println!("Bootstrapping chain");
let now = match SystemTime::now().duration_since(UNIX_EPOCH) {
Ok(dur) => dur,
Err(err) => err.duration(),
}
.as_secs() as u64;
let genesis_timestamp = if env::var("DYNAMIC_GENESIS_TIMESTAMP") == Ok("1".into()) {
println!(
"INFO: detected DYNAMIC_GENESIS_TIMESTAMP, will set the genesis timestamp to {}",
now
);
now.clone()
} else {
match std::env::var("STATIC_GENESIS_TIMESTAMP") {
Ok(val) => match val.parse::<u64>() {
Ok(val) => val,
Err(err) => {
println!("WARN: parsing STATIC_GENESIS_TIMESTAMP failed ({:?}), falling back on {}", err, config.network.genesis_timestamp);
config.network.genesis_timestamp
}
},
_ => config.network.genesis_timestamp,
}
};
let time_since_genesis = now - genesis_timestamp;
// If the testnet crashed, we need to generate a chain that would be
// longer that the previous chain.
let num_blocks_required = time_since_genesis / block_time.as_secs();
let num_blocks_for_miner = 150 + num_blocks_required;
let num_blocks_for_faucet = 150;
// Generate blocks for the network faucet
let faucet_address = config.network.faucet_address.clone();
generate_blocks(num_blocks_for_faucet, faucet_address, &config).await;
// Generate blocks for the network miner
let miner_address = config.network.miner_address.clone();
generate_blocks(num_blocks_for_miner, miner_address, &config).await;
num_blocks = num_blocks_for_miner + num_blocks_for_faucet;
// By blocking here, we ensure that the http server does not start
// serving requests with a bitcoin chain still being constructed.
while is_chain_bootstrap_required(&config).await? {
println!("Waiting on initial blocks to be available");
let backoff = Duration::from_millis(1_000);
sleep(backoff)
}
}
// Start a loop in a separate thread, generating new blocks
// on a given frequence (coming from config).
let boot_height = num_blocks;
let block_height_reader = Arc::new(Mutex::new(num_blocks));
let block_height_writer = block_height_reader.clone();
let conf = config.clone();
thread::spawn(move || {
let miner_address = conf.network.miner_address.clone();
loop {
let delay = {
let mut block_height = block_height_writer.lock().unwrap();
let effective_height = *block_height - num_blocks;
*block_height += 1;
let block_time = conf.get_block_time_at_height(effective_height);
let will_ignore = conf.should_ignore_transactions(effective_height);
let behavior = if will_ignore {
"buffering"
} else {
"accepting"
};
println!(
"Assembled block {}. Will be {} incoming transactions for the next {}ms, then assemble block {}.",
*block_height, behavior, block_time, *block_height + 1
);
block_time
};
async_std::task::block_on(async {
generate_blocks(1, miner_address.clone(), &conf).await;
});
thread::sleep(Duration::from_millis(delay));
}
});
// Open up a TCP connection and create a URL.
let bind_addr = config.network.rpc_bind.clone();
let listener = TcpListener::bind(bind_addr).await?;
let addr = format!("http://{}", listener.local_addr()?);
println!("Listening on {}", addr);
// For each incoming TCP connection, spawn a task and call `accept`.
let mut incoming = listener.incoming();
let mut buffered_requests = VecDeque::new();
while let Some(stream) = incoming.next().await {
let block_height = block_height_reader.lock().unwrap();
let effective_block_height = *block_height - boot_height;
let should_ignore_txs = config.should_ignore_transactions(effective_block_height - 1);
let stream = stream?;
let addr = addr.clone();
if should_ignore_txs {
// Returns ok
println!("Buffering request from {}", stream.peer_addr()?);
async_h1::accept(&addr, stream.clone(), |_| async {
Ok(Response::new(StatusCode::Ok))
})
.await?;
// Enqueue request
buffered_requests.push_back((addr, stream));
} else {
// Dequeue all the requests we've been buffering
while let Some((addr, stream)) = buffered_requests.pop_front() {
let config = config.clone();
task::spawn(async move {
println!(
"Dequeuing buffered request from {}",
stream.peer_addr().unwrap()
);
if let Err(err) = accept(addr, stream, &config).await {
eprintln!("{}", err);
}
});
}
// Then handle the request
let config = config.clone();
task::spawn(async move {
println!("Handling request from {}", stream.peer_addr().unwrap());
if let Err(err) = accept(addr, stream, &config).await {
eprintln!("{}", err);
}
});
}
}
Ok(())
}
// Take a TCP stream, and convert it into sequential HTTP request / response pairs.
async fn accept(addr: String, stream: TcpStream, config: &ConfigFile) -> http_types::Result<()> {
async_h1::accept(&addr, stream.clone(), |mut req| async {
match (
req.method(),
req.url().path(),
req.header(&headers::CONTENT_TYPE),
) {
(Method::Get, "/ping", Some(_content_type)) => Ok(Response::new(StatusCode::Ok)),
(Method::Post, "/", Some(_content_types)) => {
let (res, buffer) = async_std::task::block_on(async move {
let mut buffer = Vec::new();
let mut body = req.take_body();
let res = body.read_to_end(&mut buffer).await;
(res, buffer)
});
// Guard: can't be read
if res.is_err() {
return Ok(Response::new(StatusCode::MethodNotAllowed));
}
let mut deserializer = Deserializer::from_slice(&buffer);
// Guard: can't be parsed
let rpc_req: RPCRequest = match RPCRequest::deserialize(&mut deserializer) {
Ok(rpc_req) => rpc_req,
_ => return Ok(Response::new(StatusCode::MethodNotAllowed)),
};
println!("{:?}", rpc_req);
let authorized_methods = &config.network.whitelisted_rpc_calls;
// Guard: unauthorized method
if !authorized_methods.contains(&rpc_req.method) {
return Ok(Response::new(StatusCode::MethodNotAllowed));
}
// Forward the request
let stream = TcpStream::connect(config.network.bitcoind_rpc_host.clone()).await?;
let body = serde_json::to_vec(&rpc_req).unwrap();
let req = build_request(&config, body);
let response = match client::connect(stream.clone(), req).await {
Ok(ref mut res) => {
let mut response = Response::new(res.status());
let _ = response.append_header("Content-Type", "application/json");
response.set_body(res.take_body());
response
}
Err(err) => {
println!("Unable to reach host: {:?}", err);
return Ok(Response::new(StatusCode::MethodNotAllowed));
}
};
Ok(response)
}
_ => Ok(Response::new(StatusCode::MethodNotAllowed)),
}
})
.await?;
Ok(())
}
async fn is_chain_bootstrap_required(config: &ConfigFile) -> http_types::Result<bool> {
let req = RPCRequest::is_chain_bootstrapped();
let mut backoff: f64 = 1.0;
let mut rng = thread_rng();
let mut resp = loop {
backoff = (2.0 * backoff + (backoff * rng.gen_range(0.0, 1.0))).min(60.0);
let duration = Duration::from_millis((backoff * 1_000.0) as u64);
let stream = match TcpStream::connect(config.network.bitcoind_rpc_host.clone()).await {
Ok(stream) => stream,
Err(e) => {
println!(
"Error while trying to connect to {}: {:?}",
config.network.bitcoind_rpc_host, e
);
sleep(duration);
continue;
}
};
let body = serde_json::to_vec(&req).unwrap();
let response = client::connect(stream, build_request(&config, body)).await;
match response {
Ok(response) => {
break response;
}
Err(e) => {
println!("Error: {:?}", e);
sleep(duration);
}
};
};
let (res, buffer) = async_std::task::block_on(async move {
let mut buffer = Vec::new();
let mut body = resp.take_body();
let res = body.read_to_end(&mut buffer).await;
(res, buffer)
});
// Guard: can't be read
if res.is_err() {
panic!("Chain height could not be determined")
}
// let mut deserializer = Deserializer::from_slice(&buffer);
let mut deserializer = Deserializer::from_slice(&buffer);
// Guard: can't be parsed
let rpc_resp: RPCResult = match RPCResult::deserialize(&mut deserializer) {
Ok(rpc_req) => rpc_req,
_ => panic!("Chain height could not be determined"),
};
match (rpc_resp.result, rpc_resp.error) {
(Some(_), None) => return Ok(false),
(None, Some(error)) => {
if let Some(keys) = error.as_object() {
if let Some(message) = keys.get("message") {
if let Some(message) = message.as_str() {
if message == "Block height out of range" {
return Ok(true);
}
}
}
}
}
(_, _) => {}
}
panic!("Chain height could not be determined")
}
async fn generate_blocks(blocks_count: u64, address: String, config: &ConfigFile) {
let rpc_addr = config.network.bitcoind_rpc_host.clone();
let rpc_req = RPCRequest::generate_next_block_req(blocks_count, address);
let stream = match TcpStream::connect(rpc_addr).await {
Ok(stream) => stream,
Err(err) => {
println!("ERROR: connection failed - {:?}", err);
return;
}
};
let body = match serde_json::to_vec(&rpc_req) {
Ok(body) => body,
Err(err) => {
println!("ERROR: serialization failed - {:?}", err);
return;
}
};
let req = build_request(&config, body);
match client::connect(stream.clone(), req).await {
Ok(_) => {}
Err(err) => {
println!("ERROR: rpc invokation failed - {:?}", err);
return;
}
};
}
fn build_request(config: &ConfigFile, body: Vec<u8>) -> Request {
let url = Url::parse(&format!("http://{}/", config.network.bitcoind_rpc_host)).unwrap();
let mut req = Request::new(Method::Post, url);
req.append_header("Authorization", config.network.authorization_token())
.unwrap();
req.append_header("Content-Type", "application/json")
.unwrap();
req.append_header("Host", format!("{}", config.network.bitcoind_rpc_host))
.unwrap();
req.set_body(body);
req
}
#[derive(Debug, Clone, Deserialize, Serialize)]
/// JSONRPC Request
pub struct RPCRequest {
/// The name of the RPC call
pub method: String,
/// Parameters to the RPC call
pub params: serde_json::Value,
/// Identifier for this Request, which should appear in the response
pub id: serde_json::Value,
/// jsonrpc field, MUST be "2.0"
pub jsonrpc: serde_json::Value,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct RPCResult {
/// The error returned by the RPC call
pub error: Option<serde_json::Value>,
/// The value returned by the RPC call
pub result: Option<serde_json::Value>,
}
impl RPCRequest {
pub fn generate_next_block_req(blocks_count: u64, address: String) -> RPCRequest {
RPCRequest {
method: "generatetoaddress".to_string(),
params: serde_json::Value::Array(vec![blocks_count.into(), address.into()]),
id: 0.into(),
jsonrpc: "2.0".to_string().into(),
}
}
pub fn is_chain_bootstrapped() -> RPCRequest {
RPCRequest {
method: "getblockhash".to_string(),
params: serde_json::Value::Array(vec![200.into()]),
id: 0.into(),
jsonrpc: "2.0".to_string().into(),
}
}
}
#[derive(Debug, Clone, Deserialize)]
pub struct ConfigFile {
/// Regtest node
network: NetworkConfig,
/// List of blocks config
blocks: Vec<BlocksRangeConfig>,
}
impl ConfigFile {
pub fn from_path(path: &str) -> ConfigFile {
let path = File::open(path).unwrap();
let mut config_reader = BufReader::new(path);
let mut config = vec![];
config_reader.read_to_end(&mut config).unwrap();
toml::from_slice(&config[..]).unwrap()
}
pub fn should_ignore_transactions(&self, block_height: u64) -> bool {
match self.get_blocks_config_at_height(block_height) {
Some(conf) => conf.ignore_txs,
None => false,
}
}
pub fn get_block_time_at_height(&self, block_height: u64) -> u64 {
match self.get_blocks_config_at_height(block_height) {
Some(conf) => conf.block_time,
None => self.network.block_time,
}
}
pub fn get_blocks_config_at_height(&self, block_height: u64) -> Option<&BlocksRangeConfig> {
if self.blocks.len() == 0 {
return None;
}
let mut cursor = 0;
for block in self.blocks.iter() {
if block_height >= cursor && block_height < (cursor + block.count) {
return Some(block);
}
cursor += block.count;
}
return None;
}
}
#[derive(Debug, Clone, Deserialize)]
pub struct NetworkConfig {
/// Proxy's port
rpc_bind: String,
/// Duration between blocks
block_time: u64,
/// Address receiving coinbases and mining fee
miner_address: String,
/// Address receiving coinbases and mining fee
faucet_address: String,
/// RPC address used by bitcoind
bitcoind_rpc_host: String,
/// Credential - username
bitcoind_rpc_user: String,
/// Credential - password
bitcoind_rpc_pass: String,
/// Used for deducting the right amount of blocks
genesis_timestamp: u64,
/// List of whitelisted RPC calls
whitelisted_rpc_calls: Vec<String>,
}
impl NetworkConfig {
pub fn authorization_token(&self) -> String {
let token = encode(format!(
"{}:{}",
self.bitcoind_rpc_user, self.bitcoind_rpc_pass
));
format!("Basic {}", token)
}
}
#[derive(Debug, Clone, Deserialize)]
pub struct BlocksRangeConfig {
/// Number of blocks to mine
count: u64,
/// Delay between blocks
block_time: u64,
/// Should transaction be included in next block
ignore_txs: bool,
}<|fim▁end|> | if is_chain_bootstrap_required(&config).await? { |
<|file_name|>JDBCFeeder.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2014 Jörg Prante
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.xbib.elasticsearch.plugin.jdbc.feeder;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.metrics.MeterMetric;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.JsonSettingsLoader;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.river.RiverName;
import org.xbib.elasticsearch.plugin.jdbc.RiverRunnable;
import org.xbib.elasticsearch.plugin.jdbc.classloader.uri.URIClassLoader;
import org.xbib.elasticsearch.plugin.jdbc.client.Ingest;
import org.xbib.elasticsearch.plugin.jdbc.client.IngestFactory;
import org.xbib.elasticsearch.plugin.jdbc.client.transport.BulkTransportClient;
import org.xbib.elasticsearch.plugin.jdbc.cron.CronExpression;
import org.xbib.elasticsearch.plugin.jdbc.cron.CronThreadPoolExecutor;
import org.xbib.elasticsearch.plugin.jdbc.state.RiverStatesMetaData;
import org.xbib.elasticsearch.plugin.jdbc.util.RiverServiceLoader;
import org.xbib.elasticsearch.river.jdbc.RiverFlow;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.Reader;
import java.io.Writer;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.collect.Lists.newLinkedList;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
/**
* Standalone feeder for JDBC
*/
public class JDBCFeeder {
private final static ESLogger logger = ESLoggerFactory.getLogger("JDBCFeeder");
/**
* Register metadata factory in Elasticsearch for being able to decode
* ClusterStateResponse with RiverStatesMetadata
*/
static {
MetaData.registerFactory(RiverStatesMetaData.TYPE, RiverStatesMetaData.FACTORY);
}
protected Reader reader;
protected Writer writer;
protected PrintStream printStream;
protected IngestFactory ingestFactory;
/**
* This ingest is the client for the river flow state operations
*/
private Ingest ingest;
private RiverFlow riverFlow;
private List<Map<String, Object>> definitions;
private ThreadPoolExecutor threadPoolExecutor;
private volatile Thread feederThread;
private volatile boolean closed;
/**
* Constructor for running this from command line
*/
public JDBCFeeder() {
Runtime.getRuntime().addShutdownHook(shutdownHook());
}<|fim▁hole|>
public void exec() throws Exception {
readFrom(new InputStreamReader(System.in, "UTF-8"))
.writeTo(new OutputStreamWriter(System.out, "UTF-8"))
.errorsTo(System.err)
.start();
}
@SuppressWarnings("unchecked")
public JDBCFeeder readFrom(Reader reader) {
this.reader = reader;
try {
Map<String, Object> map = XContentFactory.xContent(XContentType.JSON).createParser(reader).mapOrderedAndClose();
Settings settings = settingsBuilder()
.put(new JsonSettingsLoader().load(jsonBuilder().map(map).string()))
.build();
this.definitions = newLinkedList();
Object pipeline = map.get("jdbc");
if (pipeline instanceof Map) {
definitions.add((Map<String, Object>) pipeline);
}
if (pipeline instanceof List) {
definitions.addAll((List<Map<String, Object>>) pipeline);
}
// before running, create the river flow
createRiverFlow(map, settings);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return this;
}
protected RiverFlow createRiverFlow(Map<String, Object> spec, Settings settings) throws IOException {
String strategy = XContentMapValues.nodeStringValue(spec.get("strategy"), "simple");
this.riverFlow = RiverServiceLoader.newRiverFlow(strategy);
logger.debug("strategy {}: river flow class {}, spec = {} settings = {}",
strategy, riverFlow.getClass().getName(), spec, settings.getAsMap());
this.ingestFactory = createIngestFactory(settings);
// out private ingest, needed for having a client in the river flow
this.ingest = ingestFactory.create();
riverFlow.setRiverName(new RiverName("jdbc", "feeder"))
.setSettings(settings)
.setClient(ingest.client())
.setIngestFactory(ingestFactory)
.setMetric(new MeterMetric(Executors.newScheduledThreadPool(1), TimeUnit.SECONDS))
.setQueue(new ConcurrentLinkedDeque<Map<String, Object>>());
return riverFlow;
}
public JDBCFeeder writeTo(Writer writer) {
this.writer = writer;
return this;
}
public JDBCFeeder errorsTo(PrintStream printStream) {
this.printStream = printStream;
return this;
}
public JDBCFeeder start() throws Exception {
this.closed = false;
if (ingest.getConnectedNodes().isEmpty()) {
throw new IOException("no nodes connected, can't continue");
}
this.feederThread = new Thread(new RiverRunnable(riverFlow, definitions));
List<Future<?>> futures = schedule(feederThread);
// wait for all threads to finish
for (Future<?> future : futures) {
future.get();
}
ingest.shutdown();
return this;
}
private List<Future<?>> schedule(Thread thread) {
Settings settings = riverFlow.getSettings();
String[] schedule = settings.getAsArray("schedule");
List<Future<?>> futures = newLinkedList();
Long seconds = settings.getAsTime("interval", TimeValue.timeValueSeconds(0)).seconds();
if (schedule != null && schedule.length > 0) {
CronThreadPoolExecutor cronThreadPoolExecutor =
new CronThreadPoolExecutor(settings.getAsInt("threadpoolsize", 1));
for (String cron : schedule) {
futures.add(cronThreadPoolExecutor.schedule(thread, new CronExpression(cron)));
}
this.threadPoolExecutor = cronThreadPoolExecutor;
logger.debug("scheduled feeder instance with cron expressions {}", Arrays.asList(schedule));
} else if (seconds > 0L) {
ScheduledThreadPoolExecutor scheduledThreadPoolExecutor =
new ScheduledThreadPoolExecutor(settings.getAsInt("threadpoolsize", 4));
futures.add(scheduledThreadPoolExecutor.scheduleAtFixedRate(thread, 0L, seconds, TimeUnit.SECONDS));
logger.debug("scheduled feeder instance at fixed rate of {} seconds", seconds);
this.threadPoolExecutor = scheduledThreadPoolExecutor;
} else {
this.threadPoolExecutor = new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<Runnable>());
futures.add(threadPoolExecutor.submit(thread));
logger.debug("started feeder instance");
}
return futures;
}
/**
* Shut down feeder instance by Ctrl-C
*
* @return shutdown thread
*/
public Thread shutdownHook() {
return new Thread() {
public void run() {
try {
shutdown();
} catch (Exception e) {
e.printStackTrace(printStream);
}
}
};
}
public synchronized void shutdown() throws Exception {
if (closed) {
return;
}
closed = true;
if (threadPoolExecutor != null) {
threadPoolExecutor.shutdownNow();
threadPoolExecutor = null;
}
if (feederThread != null) {
feederThread.interrupt();
}
if (!ingest.isShutdown()) {
ingest.shutdown();
}
reader.close();
writer.close();
printStream.close();
}
private IngestFactory createIngestFactory(final Settings settings) {
return new IngestFactory() {
@Override
public Ingest create() {
Integer maxbulkactions = settings.getAsInt("max_bulk_actions", 10000);
Integer maxconcurrentbulkrequests = settings.getAsInt("max_concurrent_bulk_requests",
Runtime.getRuntime().availableProcessors() * 2);
ByteSizeValue maxvolume = settings.getAsBytesSize("max_bulk_volume", ByteSizeValue.parseBytesSizeValue("10m"));
TimeValue maxrequestwait = settings.getAsTime("max_request_wait", TimeValue.timeValueSeconds(60));
TimeValue flushinterval = settings.getAsTime("flush_interval", TimeValue.timeValueSeconds(5));
File home = new File(settings.get("home", "."));
BulkTransportClient ingest = new BulkTransportClient();
Settings clientSettings = ImmutableSettings.settingsBuilder()
.put("cluster.name", settings.get("elasticsearch.cluster", "elasticsearch"))
.put("host", settings.get("elasticsearch.host", "localhost"))
.put("port", settings.getAsInt("elasticsearch.port", 9300))
.put("sniff", settings.getAsBoolean("elasticsearch.sniff", false))
.put("name", "feeder") // prevents lookup of names.txt, we don't have it, and marks this node as "feeder". See also module load skipping in JDBCRiverPlugin
.put("client.transport.ignore_cluster_name", true) // ignore cluster name setting
.put("client.transport.ping_timeout", settings.getAsTime("elasticsearch.timeout", TimeValue.timeValueSeconds(10))) // ping timeout
.put("client.transport.nodes_sampler_interval", settings.getAsTime("elasticsearch.timeout", TimeValue.timeValueSeconds(5))) // for sniff sampling
.put("path.plugins", ".dontexist") // pointing to a non-exiting folder means, this disables loading site plugins
// adding our custom class loader is tricky, actions may not be registered to ActionService
.classLoader(getClassLoader(getClass().getClassLoader(), home))
.build();
ingest.maxActionsPerBulkRequest(maxbulkactions)
.maxConcurrentBulkRequests(maxconcurrentbulkrequests)
.maxVolumePerBulkRequest(maxvolume)
.maxRequestWait(maxrequestwait)
.flushIngestInterval(flushinterval)
.newClient(clientSettings);
return ingest;
}
};
}
/**
* We have to add Elasticsearch to our classpath, but exclude all jvm plugins
* for starting our TransportClient.
*
* @param home ES_HOME
* @return a custom class loader with our dependencies
*/
private ClassLoader getClassLoader(ClassLoader parent, File home) {
URIClassLoader classLoader = new URIClassLoader(parent);
File[] libs = new File(home + "/lib").listFiles();
if (libs != null) {
for (File file : libs) {
if (file.getName().toLowerCase().endsWith(".jar")) {
classLoader.addURI(file.toURI());
}
}
}
return classLoader;
}
}<|fim▁end|> | |
<|file_name|>bottomSheet.js<|end_file_name|><|fim▁begin|>/*!
* Angular Material Design
* https://github.com/angular/material
* @license MIT
* v1.1.1-master-f6dedff
*/
(function( window, angular, undefined ){
"use strict";
/**
* @ngdoc module
* @name material.components.bottomSheet
* @description
* BottomSheet
*/
MdBottomSheetDirective['$inject'] = ["$mdBottomSheet"];
MdBottomSheetProvider['$inject'] = ["$$interimElementProvider"];
angular
.module('material.components.bottomSheet', [
'material.core',
'material.components.backdrop'
])
.directive('mdBottomSheet', MdBottomSheetDirective)
.provider('$mdBottomSheet', MdBottomSheetProvider);
/* ngInject */
function MdBottomSheetDirective($mdBottomSheet) {
return {
restrict: 'E',
link : function postLink(scope, element) {
element.addClass('_md'); // private md component indicator for styling
// When navigation force destroys an interimElement, then
// listen and $destroy() that interim instance...
scope.$on('$destroy', function() {
$mdBottomSheet.destroy();
});
}
};
}
/**
* @ngdoc service
* @name $mdBottomSheet
* @module material.components.bottomSheet
*
* @description
* `$mdBottomSheet` opens a bottom sheet over the app and provides a simple promise API.
*
* ## Restrictions
*
* - The bottom sheet's template must have an outer `<md-bottom-sheet>` element.
* - Add the `md-grid` class to the bottom sheet for a grid layout.
* - Add the `md-list` class to the bottom sheet for a list layout.
*
* @usage
* <hljs lang="html">
* <div ng-controller="MyController">
* <md-button ng-click="openBottomSheet()">
* Open a Bottom Sheet!
* </md-button>
* </div>
* </hljs>
* <hljs lang="js">
* var app = angular.module('app', ['ngMaterial']);
* app.controller('MyController', function($scope, $mdBottomSheet) {
* $scope.openBottomSheet = function() {
* $mdBottomSheet.show({
* template: '<md-bottom-sheet>Hello!</md-bottom-sheet>'
* });
* };
* });
* </hljs>
*/
/**
* @ngdoc method
* @name $mdBottomSheet#show
*
* @description
* Show a bottom sheet with the specified options.
*
* @param {object} options An options object, with the following properties:
*
* - `templateUrl` - `{string=}`: The url of an html template file that will
* be used as the content of the bottom sheet. Restrictions: the template must
* have an outer `md-bottom-sheet` element.
* - `template` - `{string=}`: Same as templateUrl, except this is an actual
* template string.
* - `scope` - `{object=}`: the scope to link the template / controller to. If none is specified, it will create a new child scope.
* This scope will be destroyed when the bottom sheet is removed unless `preserveScope` is set to true.
* - `preserveScope` - `{boolean=}`: whether to preserve the scope when the element is removed. Default is false
* - `controller` - `{string=}`: The controller to associate with this bottom sheet.
* - `locals` - `{string=}`: An object containing key/value pairs. The keys will
* be used as names of values to inject into the controller. For example,
* `locals: {three: 3}` would inject `three` into the controller with the value
* of 3.
* - `clickOutsideToClose` - `{boolean=}`: Whether the user can click outside the bottom sheet to
* close it. Default true.
* - `bindToController` - `{boolean=}`: When set to true, the locals will be bound to the controller instance.
* - `disableBackdrop` - `{boolean=}`: When set to true, the bottomsheet will not show a backdrop.
* - `escapeToClose` - `{boolean=}`: Whether the user can press escape to close the bottom sheet.
* Default true.
* - `resolve` - `{object=}`: Similar to locals, except it takes promises as values
* and the bottom sheet will not open until the promises resolve.
* - `controllerAs` - `{string=}`: An alias to assign the controller to on the scope.
* - `parent` - `{element=}`: The element to append the bottom sheet to. The `parent` may be a `function`, `string`,
* `object`, or null. Defaults to appending to the body of the root element (or the root element) of the application.
* e.g. angular.element(document.getElementById('content')) or "#content"
* - `disableParentScroll` - `{boolean=}`: Whether to disable scrolling while the bottom sheet is open.
* Default true.
*
* @returns {promise} A promise that can be resolved with `$mdBottomSheet.hide()` or
* rejected with `$mdBottomSheet.cancel()`.
*/
/**
* @ngdoc method
* @name $mdBottomSheet#hide
*
* @description
* Hide the existing bottom sheet and resolve the promise returned from
* `$mdBottomSheet.show()`. This call will close the most recently opened/current bottomsheet (if any).
*
* @param {*=} response An argument for the resolved promise.
*
*/
/**
* @ngdoc method
* @name $mdBottomSheet#cancel
*
* @description
* Hide the existing bottom sheet and reject the promise returned from
* `$mdBottomSheet.show()`.
*
* @param {*=} response An argument for the rejected promise.
*
*/
function MdBottomSheetProvider($$interimElementProvider) {
// how fast we need to flick down to close the sheet, pixels/ms
bottomSheetDefaults['$inject'] = ["$animate", "$mdConstant", "$mdUtil", "$mdTheming", "$mdBottomSheet", "$rootElement", "$mdGesture", "$log"];
var CLOSING_VELOCITY = 0.5;
var PADDING = 80; // same as css
return $$interimElementProvider('$mdBottomSheet')
.setDefaults({
methods: ['disableParentScroll', 'escapeToClose', 'clickOutsideToClose'],
options: bottomSheetDefaults
});
/* ngInject */
function bottomSheetDefaults($animate, $mdConstant, $mdUtil, $mdTheming, $mdBottomSheet, $rootElement,
$mdGesture, $log) {
var backdrop;
return {
themable: true,
onShow: onShow,
onRemove: onRemove,
disableBackdrop: false,
escapeToClose: true,
clickOutsideToClose: true,
disableParentScroll: true
};
function onShow(scope, element, options, controller) {
element = $mdUtil.extractElementByName(element, 'md-bottom-sheet');
// prevent tab focus or click focus on the bottom-sheet container
element.attr('tabindex',"-1");
// Once the md-bottom-sheet has `ng-cloak` applied on his template the opening animation will not work properly.
// This is a very common problem, so we have to notify the developer about this.
if (element.hasClass('ng-cloak')) {
var message = '$mdBottomSheet: using `<md-bottom-sheet ng-cloak >` will affect the bottom-sheet opening animations.';
$log.warn( message, element[0] );
}
if (!options.disableBackdrop) {
// Add a backdrop that will close on click
backdrop = $mdUtil.createBackdrop(scope, "md-bottom-sheet-backdrop md-opaque");
// Prevent mouse focus on backdrop; ONLY programatic focus allowed.
// This allows clicks on backdrop to propogate to the $rootElement and
// ESC key events to be detected properly.
backdrop[0].tabIndex = -1;
if (options.clickOutsideToClose) {
backdrop.on('click', function() {
$mdUtil.nextTick($mdBottomSheet.cancel,true);
});
}
$mdTheming.inherit(backdrop, options.parent);
$animate.enter(backdrop, options.parent, null);
}
var bottomSheet = new BottomSheet(element, options.parent);
options.bottomSheet = bottomSheet;
$mdTheming.inherit(bottomSheet.element, options.parent);
if (options.disableParentScroll) {
options.restoreScroll = $mdUtil.disableScrollAround(bottomSheet.element, options.parent);
}
return $animate.enter(bottomSheet.element, options.parent, backdrop)
.then(function() {
var focusable = $mdUtil.findFocusTarget(element) || angular.element(
element[0].querySelector('button') ||
element[0].querySelector('a') ||
element[0].querySelector($mdUtil.prefixer('ng-click', true))
) || backdrop;
if (options.escapeToClose) {
options.rootElementKeyupCallback = function(e) {
if (e.keyCode === $mdConstant.KEY_CODE.ESCAPE) {
$mdUtil.nextTick($mdBottomSheet.cancel,true);
}
};
$rootElement.on('keyup', options.rootElementKeyupCallback);
focusable && focusable.focus();
}
});
}
function onRemove(scope, element, options) {
var bottomSheet = options.bottomSheet;
if (!options.disableBackdrop) $animate.leave(backdrop);
return $animate.leave(bottomSheet.element).then(function() {
if (options.disableParentScroll) {
options.restoreScroll();
delete options.restoreScroll;
}
bottomSheet.cleanup();
});
}
/**
* BottomSheet class to apply bottom-sheet behavior to an element
*/
function BottomSheet(element, parent) {
var deregister = $mdGesture.register(parent, 'drag', { horizontal: false });
parent.on('$md.dragstart', onDragStart)
.on('$md.drag', onDrag)
.on('$md.dragend', onDragEnd);
return {
element: element,
cleanup: function cleanup() {
deregister();
parent.off('$md.dragstart', onDragStart);
parent.off('$md.drag', onDrag);
parent.off('$md.dragend', onDragEnd);
}
};
function onDragStart(ev) {
// Disable transitions on transform so that it feels fast
element.css($mdConstant.CSS.TRANSITION_DURATION, '0ms');
}
function onDrag(ev) {
var transform = ev.pointer.distanceY;
if (transform < 5) {<|fim▁hole|> transform = Math.max(-PADDING, transform / 2);
}
element.css($mdConstant.CSS.TRANSFORM, 'translate3d(0,' + (PADDING + transform) + 'px,0)');
}
function onDragEnd(ev) {
if (ev.pointer.distanceY > 0 &&
(ev.pointer.distanceY > 20 || Math.abs(ev.pointer.velocityY) > CLOSING_VELOCITY)) {
var distanceRemaining = element.prop('offsetHeight') - ev.pointer.distanceY;
var transitionDuration = Math.min(distanceRemaining / ev.pointer.velocityY * 0.75, 500);
element.css($mdConstant.CSS.TRANSITION_DURATION, transitionDuration + 'ms');
$mdUtil.nextTick($mdBottomSheet.cancel,true);
} else {
element.css($mdConstant.CSS.TRANSITION_DURATION, '');
element.css($mdConstant.CSS.TRANSFORM, '');
}
}
}
}
}
})(window, window.angular);<|fim▁end|> | // Slow down drag when trying to drag up, and stop after PADDING |
<|file_name|>acl.go<|end_file_name|><|fim▁begin|>// Copyright 2018 The LUCI Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
<|fim▁hole|>import (
"context"
"strings"
"go.chromium.org/luci/common/data/stringset"
"go.chromium.org/luci/common/errors"
"go.chromium.org/luci/server/auth"
api "go.chromium.org/luci/cipd/api/cipd/v1"
)
// impliedRoles defines what roles are "inherited" by other roles, e.g.
// WRITERs are automatically READERs, so hasRole(..., READER) should return true
// for WRITERs too.
//
// The format is "role -> {role itself} + set of roles implied by it, perhaps
// indirectly".
//
// If a role is missing from this map, it assumed to not be implying any roles.
var impliedRoles = map[api.Role][]api.Role{
api.Role_READER: {api.Role_READER},
api.Role_WRITER: {api.Role_WRITER, api.Role_READER},
api.Role_OWNER: {api.Role_OWNER, api.Role_WRITER, api.Role_READER},
}
// impliedRolesRev is reverse of impliedRoles mapping.
//
// The format is "role -> {role itself} + set of roles that inherit it, perhaps
// indirectly".
//
// If a role is missing from this map, it assumed to not be inherited by
// anything.
var impliedRolesRev = map[api.Role]map[api.Role]struct{}{
api.Role_READER: roleSet(api.Role_READER, api.Role_WRITER, api.Role_OWNER),
api.Role_WRITER: roleSet(api.Role_WRITER, api.Role_OWNER),
api.Role_OWNER: roleSet(api.Role_OWNER),
}
func roleSet(roles ...api.Role) map[api.Role]struct{} {
m := make(map[api.Role]struct{}, len(roles))
for _, r := range roles {
m[r] = struct{}{}
}
return m
}
// hasRole checks whether the current caller has the given role in any of the
// supplied PrefixMetadata objects.
//
// It understands the role inheritance defined by impliedRoles map.
//
// 'metas' is metadata for some prefix and all parent prefixes. It is expected
// to be ordered by the prefix length (shortest first). Ordering is not really
// used now, but it may change in the future.
//
// Returns only transient errors.
func hasRole(c context.Context, metas []*api.PrefixMetadata, role api.Role) (bool, error) {
caller := string(auth.CurrentIdentity(c)) // e.g. "user:[email protected]"
// E.g. if 'role' is READER, 'roles' will be {READER, WRITER, OWNER}.
roles := impliedRolesRev[role]
if roles == nil {
roles = roleSet(role)
}
// Enumerate the set of principals that have any of the requested roles in any
// of the prefixes. Exit early if hitting the direct match, otherwise proceed
// to more expensive group membership checks. Note that we don't use isInACL
// here because we want to postpone all group checks until the very end,
// checking memberships in all groups mentioned in 'metas' at once.
groups := stringset.New(10) // 10 is picked arbitrarily
for _, meta := range metas {
for _, acl := range meta.Acls {
if _, ok := roles[acl.Role]; !ok {
continue // not the role we are interested in
}
for _, p := range acl.Principals {
if p == caller {
return true, nil // the caller was specified in ACLs explicitly
}
// Is this a reference to a group?
if s := strings.SplitN(p, ":", 2); len(s) == 2 && s[0] == "group" {
groups.Add(s[1])
}
}
}
}
yes, err := auth.IsMember(c, groups.ToSlice()...)
if err != nil {
return false, errors.Annotate(err, "failed to check group memberships when checking ACLs for role %s", role).Err()
}
return yes, nil
}
// rolesInPrefix returns a union of roles the caller has in given supplied
// PrefixMetadata objects.
//
// It understands the role inheritance defined by impliedRoles map.
//
// Returns only transient errors.
func rolesInPrefix(c context.Context, metas []*api.PrefixMetadata) ([]api.Role, error) {
roles := roleSet()
for _, meta := range metas {
for _, acl := range meta.Acls {
if _, ok := roles[acl.Role]; ok {
continue // seen this role already
}
switch yes, err := isInACL(c, acl); {
case err != nil:
return nil, err
case yes:
// Add acl.Role and all roles implied by it to 'roles' set.
for _, r := range impliedRoles[acl.Role] {
roles[r] = struct{}{}
}
}
}
}
// Arrange the result in the order of Role enum definition.
out := make([]api.Role, 0, len(roles))
for r := api.Role_READER; r <= api.Role_OWNER; r++ {
if _, ok := roles[r]; ok {
out = append(out, r)
}
}
return out, nil
}
// isInACL is true if the caller is in the given access control list.
func isInACL(c context.Context, acl *api.PrefixMetadata_ACL) (bool, error) {
caller := string(auth.CurrentIdentity(c)) // e.g. "user:[email protected]"
var groups []string
for _, p := range acl.Principals {
if p == caller {
return true, nil // the caller was specified in ACLs explicitly
}
if s := strings.SplitN(p, ":", 2); len(s) == 2 && s[0] == "group" {
groups = append(groups, s[1])
}
}
yes, err := auth.IsMember(c, groups...)
if err != nil {
return false, errors.Annotate(err, "failed to check group memberships when checking ACLs").Err()
}
return yes, nil
}<|fim▁end|> | package repo
|
<|file_name|>demo1_8.py<|end_file_name|><|fim▁begin|>from callback_event import *
def getOddNumber(k,getEvenNumber): return 1+getEvenNumber(k)
def main():
k=1
i=getOddNumber(k,double);
print(i)
i=getOddNumber(k,quadruple);
print(i)<|fim▁hole|>if __name__=="__main__":main()<|fim▁end|> | i=getOddNumber(k,lambda x:x*8)
print(i)
|
<|file_name|>view.rs<|end_file_name|><|fim▁begin|>// +--------------------------------------------------------------------------+
// | Copyright 2016 Matthew D. Steele <[email protected]> |
// | |
// | This file is part of System Syzygy. |
// | |
// | System Syzygy is free software: you can redistribute it and/or modify it |
// | under the terms of the GNU General Public License as published by the |
// | Free Software Foundation, either version 3 of the License, or (at your |
// | option) any later version. |
// | |
// | System Syzygy is distributed in the hope that it will be useful, but |
// | WITHOUT ANY WARRANTY; without even the implied warranty of |
// | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
// | General Public License for details. |
// | |
// | You should have received a copy of the GNU General Public License along |
// | with System Syzygy. If not, see <http://www.gnu.org/licenses/>. |
// +--------------------------------------------------------------------------+
use std::cmp;
use super::scenes;
use crate::elements::{FadeStyle, PuzzleCmd, PuzzleCore, PuzzleView};
use crate::gui::{
Action, Canvas, Element, Event, Point, Rect, Resources, Sound, Sprite,
};
use crate::modes::SOLVED_INFO_TEXT;
use crate::save::{AtticState, Game, PuzzleState};
// ========================================================================= //
pub struct View {
core: PuzzleCore<(i32, i32)>,
grid: AtticGrid,
}
impl View {
pub fn new(
resources: &mut Resources,
visible: Rect,
state: &AtticState,
) -> View {
let mut core = {
let fade = (FadeStyle::LeftToRight, FadeStyle::LeftToRight);
let intro = scenes::compile_intro_scene(resources);
let outro = scenes::compile_outro_scene(resources);
PuzzleCore::new(resources, visible, state, fade, intro, outro)
};
core.add_extra_scene(scenes::compile_argony_midscene(resources));
core.add_extra_scene(scenes::compile_mezure_midscene(resources));
View { core, grid: AtticGrid::new(resources, state) }
}
}
impl Element<Game, PuzzleCmd> for View {
fn draw(&self, game: &Game, canvas: &mut Canvas) {
let state = &game.a_light_in_the_attic;
self.core.draw_back_layer(canvas);
self.core.draw_middle_layer(canvas);
self.grid.draw(state, canvas);
self.core.draw_front_layer(canvas, state);
}
fn handle_event(
&mut self,
event: &Event,<|fim▁hole|> ) -> Action<PuzzleCmd> {
let state = &mut game.a_light_in_the_attic;
let mut action = self.core.handle_event(event, state);
if !action.should_stop() {
let subaction = self.grid.handle_event(event, state);
if let Some(&position) = subaction.value() {
action.also_play_sound(Sound::device_rotate());
state.toggle(position);
if state.is_solved() {
self.core.begin_outro_scene();
action = action.and_return(PuzzleCmd::Save);
} else {
self.core.push_undo(position);
}
}
action.merge(subaction.but_no_value());
}
if !action.should_stop() {
self.core.begin_character_scene_on_click(event);
}
action
}
}
impl PuzzleView for View {
fn info_text(&self, game: &Game) -> &'static str {
if game.a_light_in_the_attic.is_solved() {
SOLVED_INFO_TEXT
} else {
INFO_BOX_TEXT
}
}
fn undo(&mut self, game: &mut Game) {
if let Some(position) = self.core.pop_undo() {
game.a_light_in_the_attic.toggle(position);
}
}
fn redo(&mut self, game: &mut Game) {
if let Some(position) = self.core.pop_redo() {
game.a_light_in_the_attic.toggle(position);
}
}
fn reset(&mut self, game: &mut Game) {
self.core.clear_undo_redo();
game.a_light_in_the_attic.reset();
}
fn solve(&mut self, game: &mut Game) {
game.a_light_in_the_attic.solve();
self.core.begin_outro_scene();
}
fn drain_queue(&mut self) {
for (index, enable) in self.core.drain_queue() {
self.grid.toggles[index as usize].set_hilight(enable != 0);
}
}
}
// ========================================================================= //
const LIGHTS_TOP: i32 = 56;
const LIGHTS_LEFT: i32 = 296;
const TOGGLE_MAX_LIGHT_RADIUS: i32 = 12;
pub struct AtticGrid {
toggles: Vec<ToggleLight>,
passives: Vec<PassiveLight>,
}
impl AtticGrid {
pub fn new(resources: &mut Resources, state: &AtticState) -> AtticGrid {
AtticGrid {
toggles: vec![
ToggleLight::new(resources, state, (1, 1), 'C'),
ToggleLight::new(resources, state, (2, 1), 'Z'),
ToggleLight::new(resources, state, (3, 1), 'H'),
ToggleLight::new(resources, state, (4, 1), 'A'),
ToggleLight::new(resources, state, (1, 2), 'U'),
ToggleLight::new(resources, state, (2, 2), 'V'),
ToggleLight::new(resources, state, (3, 2), 'X'),
ToggleLight::new(resources, state, (4, 2), 'S'),
ToggleLight::new(resources, state, (1, 3), 'J'),
ToggleLight::new(resources, state, (2, 3), 'T'),
ToggleLight::new(resources, state, (3, 3), 'I'),
ToggleLight::new(resources, state, (4, 3), 'K'),
ToggleLight::new(resources, state, (1, 4), 'Y'),
ToggleLight::new(resources, state, (2, 4), 'O'),
ToggleLight::new(resources, state, (3, 4), 'L'),
ToggleLight::new(resources, state, (4, 4), 'N'),
],
passives: vec![
PassiveLight::new(resources, state, (1, 0)),
PassiveLight::new(resources, state, (2, 0)),
PassiveLight::new(resources, state, (3, 0)),
PassiveLight::new(resources, state, (4, 0)),
PassiveLight::new(resources, state, (1, 5)),
PassiveLight::new(resources, state, (2, 5)),
PassiveLight::new(resources, state, (3, 5)),
PassiveLight::new(resources, state, (4, 5)),
PassiveLight::new(resources, state, (0, 1)),
PassiveLight::new(resources, state, (0, 2)),
PassiveLight::new(resources, state, (0, 3)),
PassiveLight::new(resources, state, (0, 4)),
PassiveLight::new(resources, state, (5, 1)),
PassiveLight::new(resources, state, (5, 2)),
PassiveLight::new(resources, state, (5, 3)),
PassiveLight::new(resources, state, (5, 4)),
],
}
}
pub fn do_not_show_corner_lights(&mut self) {
for toggle in self.toggles.iter_mut() {
toggle.frame_on = toggle.frame_off.clone();
}
}
}
impl Element<AtticState, (i32, i32)> for AtticGrid {
fn draw(&self, state: &AtticState, canvas: &mut Canvas) {
self.passives.draw(state, canvas);
self.toggles.draw(state, canvas);
}
fn handle_event(
&mut self,
event: &Event,
state: &mut AtticState,
) -> Action<(i32, i32)> {
let mut action = self.toggles.handle_event(event, state);
if !action.should_stop() {
action.merge(self.passives.handle_event(event, state));
}
action
}
}
// ========================================================================= //
struct ToggleLight {
frame_off: Sprite,
frame_on: Sprite,
label: Sprite,
position: (i32, i32),
light_radius: i32,
hilight: bool,
}
impl ToggleLight {
fn new(
resources: &mut Resources,
state: &AtticState,
position: (i32, i32),
label: char,
) -> ToggleLight {
let sprites = resources.get_sprites("light/toggle");
ToggleLight {
frame_off: sprites[0].clone(),
frame_on: sprites[1].clone(),
label: resources.get_font("block").glyph(label).sprite().clone(),
position,
light_radius: if state.is_lit(position) {
TOGGLE_MAX_LIGHT_RADIUS
} else {
0
},
hilight: false,
}
}
fn rect(&self) -> Rect {
let (col, row) = self.position;
Rect::new(LIGHTS_LEFT + 32 * col, LIGHTS_TOP + 32 * row, 32, 32)
}
fn set_hilight(&mut self, hilight: bool) {
self.hilight = hilight;
}
}
impl Element<AtticState, (i32, i32)> for ToggleLight {
fn draw(&self, state: &AtticState, canvas: &mut Canvas) {
let mut canvas = canvas.subcanvas(self.rect());
draw_light(
&mut canvas,
self.light_radius,
TOGGLE_MAX_LIGHT_RADIUS,
self.hilight,
);
let center = canvas.rect().center();
canvas.draw_sprite_centered(&self.label, center);
let frame = if state.is_toggled(self.position) {
&self.frame_on
} else {
&self.frame_off
};
canvas.draw_sprite_centered(frame, center);
}
fn handle_event(
&mut self,
event: &Event,
state: &mut AtticState,
) -> Action<(i32, i32)> {
match event {
&Event::ClockTick => tick_radius(
state.is_lit(self.position),
&mut self.light_radius,
TOGGLE_MAX_LIGHT_RADIUS,
),
&Event::MouseDown(pt)
if self.rect().contains_point(pt) && !state.is_solved() =>
{
Action::redraw().and_return(self.position)
}
_ => Action::ignore(),
}
}
}
// ========================================================================= //
const PASSIVE_MAX_LIGHT_RADIUS: i32 = 11;
struct PassiveLight {
frame: Sprite,
position: (i32, i32),
light_radius: i32,
}
impl PassiveLight {
fn new(
resources: &mut Resources,
state: &AtticState,
position: (i32, i32),
) -> PassiveLight {
let sprites = resources.get_sprites("light/toggle");
let (col, row) = position;
let sprite_index = if col == 5 {
2
} else if row == 0 {
3
} else if col == 0 {
4
} else {
5
};
PassiveLight {
frame: sprites[sprite_index].clone(),
position,
light_radius: if state.is_lit(position) {
PASSIVE_MAX_LIGHT_RADIUS
} else {
0
},
}
}
fn rect(&self) -> Rect {
let (col, row) = self.position;
Rect::new(LIGHTS_LEFT + 32 * col, LIGHTS_TOP + 32 * row, 32, 32)
}
}
impl Element<AtticState, (i32, i32)> for PassiveLight {
fn draw(&self, _: &AtticState, canvas: &mut Canvas) {
let mut canvas = canvas.subcanvas(self.rect());
draw_light(
&mut canvas,
self.light_radius,
PASSIVE_MAX_LIGHT_RADIUS,
false,
);
let center = canvas.rect().center();
canvas.draw_sprite_centered(&self.frame, center);
}
fn handle_event(
&mut self,
event: &Event,
state: &mut AtticState,
) -> Action<(i32, i32)> {
match event {
&Event::ClockTick => tick_radius(
state.is_lit(self.position),
&mut self.light_radius,
PASSIVE_MAX_LIGHT_RADIUS,
),
_ => Action::ignore(),
}
}
}
// ========================================================================= //
fn light_rect(center: Point, radius: i32) -> Rect {
Rect::new(
center.x() - radius,
center.y() - radius,
2 * radius as u32,
2 * radius as u32,
)
}
fn draw_light(canvas: &mut Canvas, radius: i32, max: i32, hilight: bool) {
let center = canvas.rect().center();
if hilight {
canvas.fill_rect((255, 64, 255), light_rect(center, max));
} else {
if radius < max {
canvas.fill_rect((0, 0, 32), light_rect(center, max));
}
if radius > 0 {
canvas.fill_rect((255, 255, 192), light_rect(center, radius));
}
}
}
fn tick_radius<A>(lit: bool, radius: &mut i32, max: i32) -> Action<A> {
if lit {
if *radius < max {
*radius = cmp::min(max, *radius + 3);
return Action::redraw();
}
} else {
if *radius > 0 {
*radius = cmp::max(0, *radius - 3);
return Action::redraw();
}
}
Action::ignore()
}
// ========================================================================= //
const INFO_BOX_TEXT: &str = "\
Your goal is to turn all thirty-two lights ON.
$M{Tapp}{Click}ing on one of the lights labelled with a
letter will toggle some of the nearby lights.
The letter labels give a hint as to which other
lights will be toggled by $M{tapp}{click}ing on that light.
$M{Tap}{Click} on a character in the scene to hear their
words of wisdom.";
// ========================================================================= //<|fim▁end|> | game: &mut Game, |
<|file_name|>CustomizationPolicy.java<|end_file_name|><|fim▁begin|>// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.vmwarecloudsimple.models;
import com.azure.resourcemanager.vmwarecloudsimple.fluent.models.CustomizationPolicyInner;
/** An immutable client-side representation of CustomizationPolicy. */
public interface CustomizationPolicy {
/**
* Gets the id property: Customization policy azure id.
*
* @return the id value.
*/
String id();
/**
* Gets the location property: Azure region.
*
* @return the location value.
*/
String location();
<|fim▁hole|> *
* @return the name value.
*/
String name();
/**
* Gets the type property: The type property.
*
* @return the type value.
*/
String type();
/**
* Gets the description property: Policy description.
*
* @return the description value.
*/
String description();
/**
* Gets the privateCloudId property: The Private cloud id.
*
* @return the privateCloudId value.
*/
String privateCloudId();
/**
* Gets the specification property: Detailed customization policy specification.
*
* @return the specification value.
*/
CustomizationSpecification specification();
/**
* Gets the typePropertiesType property: The type of customization (Linux or Windows).
*
* @return the typePropertiesType value.
*/
CustomizationPolicyPropertiesType typePropertiesType();
/**
* Gets the version property: Policy version.
*
* @return the version value.
*/
String version();
/**
* Gets the inner com.azure.resourcemanager.vmwarecloudsimple.fluent.models.CustomizationPolicyInner object.
*
* @return the inner object.
*/
CustomizationPolicyInner innerModel();
}<|fim▁end|> | /**
* Gets the name property: Customization policy name. |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>pub mod intel;<|fim▁end|> | |
<|file_name|>test_user.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
"""Test class for Users UI
:Requirement: User
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: UI
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import random
from fauxfactory import gen_string
from nailgun import entities
from robottelo import ssh
from robottelo.config import settings
from robottelo.constants import (
DEFAULT_ORG,
LANGUAGES,
LDAP_ATTR,
LDAP_SERVER_TYPE,
ROLES,
TIMEZONES,
)
from robottelo.datafactory import (
filtered_datapoint,
invalid_emails_list,
invalid_names_list,
invalid_values_list,
valid_data_list,
valid_emails_list,
)
from robottelo.decorators import (
skip_if_not_set,
stubbed,
tier1,
tier2,
tier3,
)
from robottelo.test import UITestCase
from robottelo.ui.factory import make_user, make_usergroup, set_context
from robottelo.ui.locators import common_locators, locators, tab_locators
from robottelo.ui.session import Session
from robozilla.decorators import skip_if_bug_open
@filtered_datapoint
def valid_strings(len1=10):
"""Generates a list of all the input strings, (excluding html)"""
return [
gen_string('alpha', 5),
gen_string('alpha', len1),
u'{0}-{1}'.format(gen_string('alpha', 4),
gen_string('alpha', 4)),
u'{0}-{1}'.format(gen_string('alpha', 4),
gen_string('alpha', 4)),
u'նորօգտվող-{0}'.format(gen_string('alpha', 2)),
u'新用戶-{0}'.format(gen_string('alpha', 2)),
u'новогопользоват-{0}'.format(gen_string('alpha', 2)),
u'uusikäyttäjä-{0}'.format(gen_string('alpha', 2)),
u'νέοςχρήστης-{0}'.format(gen_string('alpha', 2)),
]
class UserTestCase(UITestCase):
"""Implements Users tests in UI"""
@classmethod
def setUpClass(cls):
super(UserTestCase, cls).setUpClass()
# Check whether necessary plug-ins are installed for server instance
result = ssh.command(
'rpm -qa | grep rubygem-foreman_remote_execution'
)
if result.return_code != 0:
ROLES.remove('Remote Execution Manager')
ROLES.remove('Remote Execution User')
@tier1
def test_positive_create_with_username(self):
"""Create User for all variations of Username
:id: 2acc8c7d-cb14-4eda-98f9-fb379950f2f5
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
with Session(self) as session:
for user_name in valid_strings():
with self.subTest(user_name):
make_user(session, username=user_name)
self.assertIsNotNone(self.user.search(user_name))
@tier1
def test_positive_create_with_first_name(self):
"""Create User for all variations of First Name
:id: dd398cd6-821e-4b0e-a111-22d5a6eeafd8
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
with Session(self) as session:
for first_name in valid_strings():
with self.subTest(first_name):
name = gen_string('alpha')
make_user(session, username=name, first_name=first_name)
self.user.validate_user(name, 'firstname', first_name)
@tier1
def test_positive_create_with_surname(self):
"""Create User for all variations of Surname
:id: 0a2dc093-0cd1-41eb-99cd-79935c74563f
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
with Session(self) as session:
for last_name in valid_strings(50):
with self.subTest(last_name):
name = gen_string('alpha')
make_user(session, username=name, last_name=last_name)
self.user.validate_user(name, 'lastname', last_name)
@tier1
def test_positive_create_with_email(self):
"""Create User for all variations of Email Address
:id: 1c6c0f50-401c-4b7d-9795-97a1be3806f8
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
with Session(self) as session:
for email in valid_emails_list():
with self.subTest(email):
name = gen_string('alpha')
make_user(session, username=name, email=email)
self.user.validate_user(name, 'email', email)
@tier1
def test_positive_create_with_description(self):
"""Create User for all variations of Description
:id: eebeb6d3-c99f-4dc2-991c-0e8268187110
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
with Session(self) as session:
for description in valid_data_list():
with self.subTest(description):
name = gen_string('alpha')
make_user(session, username=name, description=description)
self.user.validate_user(
name, 'description', description, False
)
@tier1
def test_positive_create_with_language(self):
"""Create User for all variations of Language
:id: 1c5581a8-79ae-40a6-8052-f47be2d4c5eb
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
with Session(self) as session:
for language in LANGUAGES:
with self.subTest(language):
name = gen_string('alpha')
make_user(session, username=name, locale=language)
self.user.validate_user(name, 'language', language, False)
@tier1
def test_positive_create_with_password(self):
"""Create User for all variations of Password
:id: 83d6efe0-7526-465c-9c97-5673c7736fc4
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
test_data = valid_strings()
extra_passwords = (
u'foo@!#$^&*( ) {0}'.format(gen_string('alpha', 2)),
u'bar+{{}}|\"?hi {0}'.format(gen_string('alpha', 2)),
)
test_data.extend(extra_passwords)
with Session(self) as session:
for password in test_data:
with self.subTest(password):
name = gen_string('alpha')
make_user(
session,
username=name,
password1=password,
password2=password,
)
self.assertIsNotNone(self.user.search(name))
@tier1
def test_positive_create_admin(self):
"""Create an Admin user
:id: 9bf56045-1026-435c-bf4c-623e160582d5
:expectedresults: Admin User is created successfully
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=user_name, admin=True)
self.assertIsNotNone(self.user.search(user_name))
@tier1
def test_positive_create_with_one_role(self):
"""Create User with one role
:id: 6d6c795e-8b46-4f0f-84e1-f7e22add6173
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
name = gen_string('alpha')
role = entities.Role().create()
with Session(self) as session:
make_user(session, username=name, roles=[role.name], edit=True)
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_roles'])
element = self.user.wait_until_element(
common_locators['entity_deselect'] % role.name)
self.assertIsNotNone(element)
@tier2
def test_positive_create_with_multiple_roles(self):
"""Create User with multiple roles
:id: d3cc4434-25ca-4465-8878-42495390c17b
:expectedresults: User is created successfully
:CaseLevel: Integration
"""
name = gen_string('alpha')
role1 = gen_string('alpha')
role2 = gen_string('alpha')
for role in [role1, role2]:
entities.Role(name=role).create()
with Session(self) as session:
make_user(session, username=name, roles=[role1, role2], edit=True)
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_roles'])
for role in [role1, role2]:
self.assertIsNotNone(
self.user.wait_until_element(
common_locators['entity_deselect'] % role
))
@tier2
def test_positive_create_with_all_roles(self):
"""Create User and assign all available roles to it
:id: 814593ca-1566-45ea-9eff-e880183b1ee3
:expectedresults: User is created successfully
:CaseLevel: Integration
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name, roles=ROLES, edit=True)
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_roles'])
for role in ROLES:
self.assertIsNotNone(self.user.wait_until_element(
common_locators['entity_deselect'] % role))
@tier1
def test_positive_create_with_one_org(self):
"""Create User associated to one Org
:id: 830bc5fc-e773-466c-9b38-4f33a2c1d05e
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
name = gen_string('alpha')
org_name = gen_string('alpha')
entities.Organization(name=org_name).create()
with Session(self) as session:
make_user(
session, username=name, organizations=[org_name], edit=True)
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_organizations'])
element = self.user.wait_until_element(
common_locators['entity_deselect'] % org_name)
self.assertIsNotNone(element)
@tier2
def test_positive_create_with_multiple_orgs(self):
"""Create User associated to multiple Orgs
:id: d74c0284-3995-4a4a-8746-00858282bf5d
:expectedresults: User is created successfully
:CaseLevel: Integration
"""
name = gen_string('alpha')
org_name1 = gen_string('alpha')
org_name2 = gen_string('alpha')
for org_name in [org_name1, org_name2]:
entities.Organization(name=org_name).create()
with Session(self) as session:
set_context(session, org=DEFAULT_ORG)
make_user(
session,
username=name,
organizations=[org_name1, org_name2],
edit=True,
)
self.user.search_and_click(name)
self.user.click(tab_locators['users.tab_organizations'])
for org_name in [org_name1, org_name2, DEFAULT_ORG]:
element = self.user.wait_until_element(
common_locators['entity_deselect'] % org_name)
self.assertIsNotNone(element)
@tier1
def test_positive_create_with_default_org(self):
"""Create User and has default organization associated with it
:id: 3d51dead-9053-427d-8292-c42e87ed6289
:expectedresults: User is created with default Org selected.
:CaseImportance: Critical
"""
name = gen_string('alpha')
org_name = gen_string('alpha')
entities.Organization(name=org_name).create()
with Session(self) as session:
make_user(session, username=name, organizations=[org_name],
edit=True, default_org=org_name)
self.user.search_and_click(name)
self.user.click(tab_locators['users.tab_organizations'])
element = session.nav.wait_until_element(
common_locators['entity_deselect'] % org_name)
self.assertIsNotNone(element)
# Check that default organization value was really chosen
self.assertEqual(org_name, session.nav.find_element(
locators['users.default_org_value']).text)
@tier1
def test_positive_create_with_default_location(self):
"""Create User and associate a default Location.
:id: 952a0be5-d393-49a2-8fd9-f6dfcc31f762
:expectedresults: User is created with default Location selected.
:CaseImportance: Critical
"""
name = gen_string('alpha')
loc_name = gen_string('alpha')
entities.Location(name=loc_name).create()
with Session(self) as session:
make_user(session, username=name, locations=[loc_name],
edit=True, default_loc=loc_name)
self.user.search_and_click(name)
self.user.click(tab_locators['users.tab_locations'])
element = session.nav.wait_until_element(
common_locators['entity_deselect'] % loc_name)
self.assertIsNotNone(element)
# Check that default location value was really chosen
self.assertEqual(loc_name, session.nav.find_element(
locators['users.default_loc_value']).text)
@tier1
def test_negative_create(self):
"""Enter all User creation details and Cancel
:id: 2774be2f-303e-498f-8072-80462f33c52e
:expectedresults: User is not created
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(
session,
username=user_name,
first_name=gen_string('alpha'),
last_name=gen_string('alpha'),
email=u'{0}@example.com'.format(gen_string('numeric')),
submit=False,
)
self.assertIsNone(self.user.search(user_name))
@tier1
def test_negative_create_with_invalid_name(self):
"""Create User with invalid User Name
:id: 31bbe350-0275-4aaf-99ec-3f77bfd4ba00
:expectedresults: User is not created. Appropriate error shown.
:CaseImportance: Critical
"""
with Session(self) as session:
for user_name in invalid_values_list(interface='ui'):
with self.subTest(user_name):
make_user(session, username=user_name)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_create_with_invalid_firstname(self):
"""Create User with invalid FirstName
:id: 21525bf2-4de9-43f0-8c92-b2fad1fdc944
:expectedresults: User is not created. Appropriate error shown.
:CaseImportance: Critical
"""
with Session(self) as session:
# invalid_values_list is not used here because first name is an
# optional field
for first_name in invalid_names_list():
with self.subTest(first_name):
make_user(
session,
username=gen_string('alpha'),
first_name=first_name,
)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_create_with_invalid_surname(self):
"""Create User with invalid Surname
:id: 47d9e8be-3b29-4a56-85d7-898145b5b034
:expectedresults: User is not created. Appropriate error shown.
:CaseImportance: Critical
"""
with Session(self) as session:
# invalid_values_list is not used here because sur name is an
# optional field
for last_name in invalid_names_list():
with self.subTest(last_name):
make_user(
session,
username=gen_string('alpha'),
last_name=last_name,
)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_create_with_invalid_emails(self):
"""Create User with invalid Email Address
:id: 36511b82-e070-41ea-81fa-6e29faa9da1c
:expectedresults: User is not created. Appropriate error shown.
:CaseImportance: Critical
"""
with Session(self) as session:
for email in invalid_emails_list():
with self.subTest(email):
name = gen_string('alpha')
make_user(session, username=name, email=email)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_create_with_blank_auth(self):
"""Create User with blank value for 'Authorized by' field
:id: 68f670ed-ac6e-4052-889c-6671d659e510
:expectedresults: User is not created. Appropriate error shown.
:CaseImportance: Critical
"""
with Session(self) as session:
make_user(session, username=gen_string('alpha'), authorized_by='')
self.assertIsNotNone(
self.user.wait_until_element(common_locators['haserror']))
@tier1
def test_negative_create_with_wrong_pass_confirmation(self):
"""Create User with non-matching values in Password and verify
:id: f818e5fc-b378-4bc7-afa8-18b23ee05053
:expectedresults: User is not created. Appropriate error shown.
:CaseImportance: Critical
"""
with Session(self) as session:
make_user(
session,
username=gen_string('alpha'),
password1=gen_string('alpha'),
password2=gen_string('alpha'),
)
self.assertIsNotNone(
self.user.wait_until_element(common_locators['haserror']))
@tier1
def test_positive_search_by_usergroup(self):
"""Create few users and assign them to usergroup. Perform search for
users by usergroup they are assigned to
:id: dceebf68-8d82-4214-9829-350830a78cdd
:expectedresults: Necessary users can be found and no error raised
:BZ: 1395667
:CaseImportance: Critical
"""
group_name = gen_string('alpha')
org = entities.Organization().create()
# Create new users
user_names = [
entities.User(organization=[org]).create().login
for _ in range(2)
]
with Session(self) as session:
make_usergroup(
session,
name=group_name,
users=user_names,
org=org.name,
)
for user_name in user_names:
self.assertIsNotNone(
self.user.search(
user_name,
_raw_query='usergroup = {}'.format(group_name)
)
)
self.assertIsNone(
self.user.wait_until_element(
common_locators['haserror'], timeout=3)
)
@tier1
def test_positive_update_username(self):
"""Update Username in User
:id: 4ecb2816-9bef-4089-86a0-02d7d065cdb1
:expectedresults: User is updated successfully
:CaseImportance: Critical
"""
name = gen_string('alpha')
password = gen_string('alpha')
with Session(self) as session:
# Role Site meaning 'Site Manager' here
make_user(
session,
username=name,
password1=password,
password2=password,
edit=True,
roles=['Site'],
)
for new_username in valid_strings():
with self.subTest(new_username):
with Session(self):
self.user.update(name, new_username)
self.assertIsNotNone(
self.user.search(new_username))
self.login.logout()
self.login.login(new_username, password)
self.assertTrue(self.login.is_logged())
name = new_username # for next iteration
@tier1
def test_positive_update_firstname(self):
"""Update first name in User
:id: 03ef8a7f-2bf1-4314-b0cd-a7a6acfc17ea
:expectedresults: User is updated successful
:CaseImportance: Critical
"""
first_name = gen_string('alpha')
new_first_name = gen_string('alpha')
username = gen_string('alpha')
with Session(self) as session:
make_user(session, username=username, first_name=first_name)
self.user.update(username, first_name=new_first_name)
self.user.validate_user(username, 'firstname', new_first_name)
@tier1
def test_positive_update_surname(self):
"""Update surname in User
:id: 0326d221-28b0-4a6b-934e-b67ee6c9f696
:expectedresults: User is updated successful
:CaseImportance: Critical
"""
last_name = gen_string('alpha')
new_last_name = gen_string('alpha')
username = gen_string('alpha')
with Session(self) as session:
make_user(session, username=username, last_name=last_name)
self.user.update(username, last_name=new_last_name)
self.user.validate_user(username, 'lastname', new_last_name)
@tier1
def test_positive_update_email(self):
"""Update Email Address in User
:id: e48314b7-2a49-48ec-896d-af7bf427b1c4
:expectedresults: User is updated successfully
:CaseImportance: Critical
"""
email = u'{0}@example.com'.format(gen_string('alpha'))
new_email = u'{0}@myexample.com'.format(gen_string('alpha'))
username = gen_string('alpha')
with Session(self) as session:
make_user(session, username=username, email=email)
self.user.update(username, email=new_email)
self.user.validate_user(username, 'email', new_email)
@tier1
def test_positive_update_description(self):
"""Update Description in User
:id: f08ee305-0e0b-4df0-82d9-d10edcfa66c0
:expectedresults: User is updated successfully
:CaseImportance: Critical
"""
username = gen_string('alpha')
description = gen_string('alpha')
with Session(self) as session:
make_user(session, username=username, description=description)
for new_description in valid_data_list():
with self.subTest(new_description):
self.user.update(username, description=new_description)
self.user.validate_user(
username, 'description', new_description, False
)
@tier1
def test_positive_update_language(self):
"""Update Language in User
:id: 64b6a90e-0d4c-4a55-a4bd-7347010e39f2
:expectedresults: User is updated successfully
:CaseImportance: Critical
"""
locale = random.choice(list(LANGUAGES.keys()))
username = gen_string('alpha')
with Session(self) as session:
make_user(session, username=username)
self.user.update(username, locale=locale)
self.user.validate_user(username, 'language', locale, False)
@tier1
def test_positive_update_password(self):
"""Update password for a user
:id: db57c3bc-4fae-4ee7-bf6d-8e0bcc7fd55c
:expectedresults: User password is updated successfully
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
new_password = gen_string('alpha')
with Session(self) as session:
# Role 'Site' meaning 'Site Manager' here
make_user(session, username=user_name, edit=True, roles=['Site'])
self.user.update(
user_name,
new_password=new_password,
password_confirmation=new_password,
)
self.login.logout()
self.login.login(user_name, new_password)
self.assertTrue(self.login.is_logged())
@tier1
def test_positive_update_to_non_admin(self):
"""Convert an user from an admin user to non-admin user
:id: b41cbcf8-d819-4daa-b217-a4812541dca3
:expectedresults: User is updated and has proper admin role value
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=user_name, admin=True)
self.assertIsNotNone(self.user.search(user_name))
self.assertFalse(
self.user.user_admin_role_toggle(user_name, False))
@tier1
def test_positive_update_to_admin(self):
"""Convert a user to an admin user
:id: d3cdda62-1384-4b49-97a3-0c66764583bb
:expectedresults: User is updated and has proper admin role value
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=user_name, admin=False)
self.assertIsNotNone(self.user.search(user_name))
self.assertTrue(self.user.user_admin_role_toggle(user_name, True))
@tier1
def test_positive_update_role(self):
"""Update role for a user
:id: 2a13529c-3863-403b-a319-9569ca1287cb
:expectedresults: User role is updated
:CaseImportance: Critical
"""
strategy, value = common_locators['entity_deselect']
name = gen_string('alpha')
role_name = entities.Role().create().name
with Session(self) as session:
make_user(session, username=name)
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_roles'])
self.assertIsNone(
self.user.wait_until_element((strategy, value % role_name)))
self.user.update(name, new_roles=[role_name])
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_roles'])
self.assertIsNotNone(
self.user.wait_until_element((strategy, value % role_name)))
@tier2
def test_positive_update_with_multiple_roles(self):
"""Update User with multiple roles
:id: 127fb368-09fd-4f10-8319-566a1bcb5cd2
:expectedresults: User is updated successfully
:CaseLevel: Integration
"""
name = gen_string('alpha')
role_names = [
entities.Role().create().name
for _ in range(3)
]
with Session(self) as session:
make_user(session, username=name)
self.user.update(name, new_roles=role_names)
self.user.search_and_click(name)
self.user.click(tab_locators['users.tab_roles'])
for role in role_names:
self.assertIsNotNone(
self.user.wait_until_element(
common_locators['entity_deselect'] % role)
)
@tier2
def test_positive_update_with_all_roles(self):
"""Update User with all roles
:id: cd7a9cfb-a700-45f2-a11d-bba6be3c810d
:expectedresults: User is updated successfully
:CaseLevel: Integration
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
self.user.update(name, new_roles=ROLES)
self.user.search_and_click(name)
self.user.click(tab_locators['users.tab_roles'])
for role in ROLES:
self.assertIsNotNone(
self.user.wait_until_element(
common_locators['entity_deselect'] % role)
)
@tier1
def test_positive_update_org(self):
"""Assign a User to an Org
:id: d891e54b-76bf-4537-8eb9-c3f8832e4c2c
:expectedresults: User is updated successfully
:CaseImportance: Critical
"""
name = gen_string('alpha')
org_name = gen_string('alpha')
entities.Organization(name=org_name).create()
with Session(self) as session:
make_user(session, username=name)
self.user.update(name, new_organizations=[org_name])
self.user.search_and_click(name)
self.user.click(tab_locators['users.tab_organizations'])
element = self.user.wait_until_element(
common_locators['entity_deselect'] % org_name)
self.assertIsNotNone(element)
@tier2
def test_positive_update_orgs(self):
"""Assign a User to multiple Orgs
:id: a207188d-1ad1-4ff1-9906-bae1d91104fd
:expectedresults: User is updated
:CaseLevel: Integration
"""
name = gen_string('alpha')
org_names = [
entities.Organization().create().name
for _ in range(3)
]
with Session(self) as session:
make_user(session, username=name)
self.user.update(name, new_organizations=org_names)
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_organizations'])
for org in org_names:
self.assertIsNotNone(
self.user.wait_until_element(
common_locators['entity_deselect'] % org)
)
@tier1
def test_negative_update_username(self):
"""Update invalid Username in an User
:id: 7019461e-13c6-4761-b3e9-4df81abcd0f9
:expectedresults: User is not updated. Appropriate error shown.
:CaseImportance: Critical
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
for new_user_name in invalid_names_list():
with self.subTest(new_user_name):
self.user.update(name, new_username=new_user_name)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_update_firstname(self):
"""Update invalid Firstname in an User
:id: 1e3945d1-5b47-45ca-aff9-3ddd44688e6b
:expectedresults: User is not updated. Appropriate error shown.
:CaseImportance: Critical
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
for new_first_name in invalid_names_list():
with self.subTest(new_first_name):
self.user.update(name, first_name=new_first_name)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_update_surname(self):
"""Update invalid Surname in an User
:id: 14033c1f-4c7e-4ee5-8ffc-76c4dd672cc1
:expectedresults: User is not updated. Appropriate error shown.
:CaseImportance: Critical
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
for new_surname in invalid_names_list():
with self.subTest(new_surname):
self.user.update(name, last_name=new_surname)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_update_email(self):
"""Update invalid Email Address in an User
:id: 6aec3816-16ca-487a-b0f1-a5c1fbc3e0a3
:expectedresults: User is not updated. Appropriate error shown.
:CaseImportance: Critical
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
for new_email in invalid_emails_list():
with self.subTest(new_email):
self.user.update(name, email=new_email)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_update_password(self):
"""Update different values in Password and verify fields
:id: ab4a5dbf-70c2-4adc-b948-bc350329e166
:Steps:
1. Create User
2. Update the password by entering different values in Password and
verify fields
:expectedresults: User is not updated. Appropriate error shown.
:CaseImportance: Critical
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
self.user.update(
name,
new_password=gen_string('alphanumeric'),
password_confirmation=gen_string('alphanumeric'),
)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_update_password_empty_confirmation(self):
"""Update user password without providing confirmation value
:id: c2b569c9-8120-4125-8bfe-61324a881395
:Steps:
1. Create User
2. Update the password by entering value only in Password field
:expectedresults: User is not updated. Appropriate error shown.
:CaseImportance: Critical
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
self.user.update(
name,
new_password=gen_string('alphanumeric'),
password_confirmation='',
)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_update(self):
"""[UI ONLY] Attempt to update User info and Cancel
:id: 56c8ea13-4add-4a51-8428-9d9f9ddde33e
:expectedresults: User is not updated.
:CaseImportance: Critical
"""
new_first_name = gen_string('alpha')
new_last_name = gen_string('alpha')
username = gen_string('alpha')
new_username = gen_string('alpha')
with Session(self) as session:
make_user(session, username=username)
self.user.update(
username,
new_username=new_username,
first_name=new_first_name,
last_name=new_last_name,
submit=False,
)
self.assertIsNotNone(self.user.search(username))
self.assertIsNone(self.user.search(new_username))
@tier1
def test_positive_delete_user(self):
"""Delete an existing User
:id: 49534eda-f8ea-404e-9714-a8d0d2210979
:expectedresults: User is deleted successfully
:CaseImportance: Critical
"""
user_name = gen_string('alphanumeric')
with Session(self) as session:
make_user(session, username=user_name)
self.user.delete(user_name)
@tier1
def test_positive_delete_admin(self):
"""Delete an admin user
:id: afda171a-b464-461f-93ce-96d770935200
:expectedresults: User is deleted
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=user_name, admin=True)
self.assertIsNotNone(self.user.search(user_name))
self.user.delete(user_name)
@tier1
def test_negative_delete_user(self):
"""[UI ONLY] Attempt to delete an User and cancel
:id: 43aed0c0-a3c3-4044-addc-910dc29e4f37
:expectedresults: User is not deleted
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=user_name)
self.assertIsNotNone(self.user.search(user_name))
self.user.delete(user_name, really=False)
@stubbed()
@tier3
def test_positive_end_to_end(self):
"""Create User and perform different operations
:id: 57f7054e-2865-4ab8-bc2b-e300a8dacee5
:Steps:
1. Create User
2. Login with the new user
3. Upload Subscriptions
4. Provision Systems
5. Add/Remove Users
6. Add/Remove Orgs
7. Delete the User
:expectedresults: All actions passed
:caseautomation: notautomated
:CaseLevel: System
"""
@stubbed()
@tier3
def test_positive_end_to_end_without_org(self):
"""Create User with no Org assigned and attempt different
operations
:id: 36b6d667-59cc-4442-aa40-c029bdb2b534
:Steps:
1. Create User. Do not assign any Org
2. Login with the new user
3. Attempt to Upload Subscriptions
4. Attempt to Provision Systems
5. Attempt to Add/Remove Users
6. Attempt to Add/Remove Orgs
:expectedresults: All actions failed since the User is not assigned to
any Org
:caseautomation: notautomated
:CaseLevel: System
"""
@tier1
def test_positive_set_timezone(self):
"""Set a new timezone for the user
:id: 3219c245-2914-4412-8df1-72e041a58a9f
:Steps:
1. Navigate to Administer -> Users
2. Click on the User
3. Select the Timezone Dropdown list
4. Try to apply some timezone
:expectedresults: User should be able to change timezone
:CaseImportance: Critical
"""
with Session(self) as session:
for timezone in TIMEZONES:
with self.subTest(timezone):
name = gen_string('alpha')
make_user(session, username=name, timezone=timezone)
self.user.validate_user(name, 'timezone', timezone, False)
@stubbed()
@tier1
def test_positive_dashboard_shows_new_time(self):
"""Check if the Dashboard shows the time according to the new
timezone set
:id: c2d80855-631c-46f6-8950-c296df8c0cbe
:Steps:
1. Change the timezone for a user in Administer -> Users tab
2. Navigate to Monitor -> Dashboard
3. The left corner displays time according to the new timezone set
:expectedresults: Dashboard UI displays new time based on the new
timezone
:caseautomation: notautomated
:CaseImportance: Critical
"""
@stubbed()
@tier2
def test_positive_logfiles_shows_new_time(self):
"""Check if the logfiles reflect the new timezone set by
the user
:id: b687182b-9d4f-4ff4-9f19-1b6ae3c126ad
:Steps:
1. Change the timezones for user in Administer -> Users Tab
2. Try to modify content view or environment so that the changes
are reflected in log file
3. Check if log file shows the new timezone set
:expectedresults: Logfiles display time according to changed timezone
:caseautomation: notautomated
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_mails_for_new_timezone(self):
"""Check if the mails are received according to new
timezone set by the user
:id: ab34dd9d-4fc1-43f1-b40a-b0ebf0802887
:Steps:
1. Change the timezones for user in Administer -> Users tab
2. Navigate to Administer -> Users tab
3. Make sure under Email Preferences -> Mail Enabled
4. Send daily/weekly/monthly mails
:expectedresults: Emails are sent according to new timezone set
:caseautomation: notautomated
:CaseLevel: Integration
"""
@stubbed()
@tier1
def test_positive_parameters_tab_access_with_edit_params(self):
"""Check if non admin users with edit_params permission can access
parameters tab on organization details screen
:id: 086ea8bf-2219-425e-acf4-d2ba59a77ee9
:BZ: 1354572
:Steps:
1. Create a Role in Administer -> Roles
2. On Role creation set Resource type to Parameters
3. On Role creation add permission edit_params
4. On Role creation set Resource type to Organization
5. On Role creation add permissions edit_organizations and
view_organizations
6. Create a non admin user in Administer -> Users
7. Add previous role to this user
8. Login with previous user credentials
9. Go to Organization -> Manage Organizations
10. Choose Default Organization
11. Assert "Parameters" tab is present
:expectedresults: Parameters tab visible to users with edit_params
permission
:caseautomation: notautomated
:CaseImportance: Critical
"""
@stubbed()
@tier1
def test_negative_parameters_tab_access_without_edit_params(self):
"""Check if non admin users without edit_params permission can not
access parameters tab on organization details screen
:id: eac65b64-16d4-4df5-8402-e58ddb31050d
:BZ: 1354572
:Steps:
1. Create a Role in Administer -> Roles
2. On Role creation set Resource type to Organization
3. On Role creation add permissions edit_organizations and
view_organizations
4. Create a non admin user in Administer -> Users
5. Add previous role to this user
6. Login with previous user credentials
7. Go to Organization -> Manage Organizations
8. Choose Default Organization
9. Assert "Parameters" tab is not present
:expectedresults: Parameters tab not visible to users with no
edit_params permission
:caseautomation: notautomated
:CaseImportance: Critical
"""
class ActiveDirectoryUserTestCase(UITestCase):
"""Implements Active Directory feature tests for user in UI."""
@classmethod
@skip_if_not_set('ldap')
def setUpClass(cls): # noqa
super(ActiveDirectoryUserTestCase, cls).setUpClass()
cls.ldap_user_name = settings.ldap.username
cls.ldap_user_passwd = settings.ldap.password
cls.base_dn = settings.ldap.basedn
cls.group_base_dn = settings.ldap.grpbasedn
cls.ldap_hostname = settings.ldap.hostname
cls.usergroup_name = gen_string('alpha')
authsource_attrs = entities.AuthSourceLDAP(
onthefly_register=True,
account=cls.ldap_user_name,
account_password=cls.ldap_user_passwd,
base_dn=cls.base_dn,
groups_base=cls.group_base_dn,
attr_firstname=LDAP_ATTR['firstname'],
attr_lastname=LDAP_ATTR['surname'],
attr_login=LDAP_ATTR['login_ad'],
server_type=LDAP_SERVER_TYPE['API']['ad'],
attr_mail=LDAP_ATTR['mail'],
name=gen_string('alpha'),
host=cls.ldap_hostname,
tls=False,
port='389',
).create()
cls.ldap_server_name = authsource_attrs.name
@tier2
def test_positive_create_in_ldap_mode(self):
"""Create User in ldap mode
:id: 0668b2ca-831e-4568-94fb-80e45dd7d001
:expectedresults: User is created without specifying the password
:CaseLevel: Integration
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(
session,
username=user_name,
authorized_by='LDAP-' + self.ldap_server_name,
password1='',
password2='',
)
self.assertIsNotNone(self.user.search(user_name))
class SshKeyInUserTestCase(UITestCase):
"""Implements the SSH Key in User Tests"""
@stubbed
@tier2
def test_postitive_ssh_key_tab_presence(self):
"""SSH keys tab presence in User details page
:id: a0c77cc1-0484-4290-b4b3-87ab3d0bde56
:steps:
1. Go to Administer -> Users
2. Attempt to create new user form Super admin
3. Verify SSH Keys tab in user details page
:expectedresults: New user details page should have a tab of SSH Keys
"""
@stubbed
@tier2
def test_postitive_ssh_key_tab_presence_Super_Admin(self):
"""SSH keys tab presence in Super Admin details page
:id: 72dc8c6e-3627-436a-adf3-f32d09b2f1c7
:steps:
1. Go to Administer -> Users
2. Edit Super Admin user details page
3. Verify SSH Keys tab in Super Admin user details page
:expectedresults: Super Admin user details page should have a tab of
SSH Keys
"""
@stubbed
@tier1
@skip_if_bug_open('bugzilla', 1465389)
def test_positive_create_ssh_key(self):
"""SSH Key can be added while creating a new user
:id: e608f1b2-2ca4-4c32-8a70-47bed63e8b09
:steps:
1. Go to Administer -> Users
2. Attempt to create new user with all the details
3. Add SSH Key in SSH Keys tab before saving the user
4. Save the new User
:expectedresults: New user should be added with SSH key
"""
@stubbed
@tier1
def test_positive_create_ssh_key_super_admin(self):
"""SSH Key can be added to Super Admin user details page
:id: 31388483-35f5-4828-82e9-9305a76e712d
:steps:
1. Go to Administer -> Users
2. Edit Super Admin user details page
3. Add SSH Key in SSH Keys tab
4. Save the changes of Super Admin user
:expectedresults: Super Admin should be saved with SSH key
"""
@stubbed
@tier1
@skip_if_bug_open('bugzilla', 1465389)
def test_positive_create_multiple_ssh_keys(self):
"""Multiple SSH Keys can be added while creating a new user
:id: 6552194f-63ff-4a6e-9784-5b3dc1772fd5
:steps:
1. Go to Administer -> Users
2. Attempt to create new user with all the details
3. Add multiple SSH Keys in SSH Keys tab before saving the user
4. Save the new User
:expectedresults: New user should be added with multiple SSH keys
"""
@stubbed
@tier1
def test_positive_create_multiple_ssh_keys_super_admin(self):
"""Multiple SSH Keys can be added to Super admin user details page
:id: 267cea76-0b75-4b37-a04f-dc3659cab409
:steps:
1. Go to Administer -> Users
2. Edit Super Admin user details page
3. Add multiple SSH Keys in SSH Keys tab
4. Save the changes of Super Admin user
:expectedresults: Super Admin should be saved with multiple SSH keys
"""
@stubbed
@tier1
def test_negative_create_ssh_key(self):
"""Invalid ssh key can not be added in User details page
:id: a815cd8b-142e-4743-b95a-c922def193f6
:steps:
1. Go to Administer -> Users
2. Attempt to create new user with all the details
3. Attempt to add invalid string as SSH Key in SSH Keys tab
before saving the user
:expectedresults: Invalid SSH key should not be added in user details
page
"""
@stubbed
@tier1
def test_negative_create_invalid_ssh_key(self):
""""Invalid SSH key can not be added to user and corresponding error
notification displays
:id: ea613925-75a0-421c-b02b-e61ce2fe0d84
:steps:
1. Go to Administer -> Users
2. Attempt to create new user with all the details
3. Attempt to add invalid string as SSH Key in SSH Keys tab
before saving the user. e.g blabla
:expectedresults:
1. SSH Key should not be added to user
2. Satellite should show 'Fingerprint could not be generated'
error notification
"""
@stubbed
@tier1
def test_negative_create_too_long_length_ssh_key(self):
"""SSH key with too long length can not be added to user and
corresponding error notification displays
:id: 2a3bb547-a073-4de6-85a7-20ace85992a2
:steps:
1. Go to Administer -> Users
2. Attempt to create new user with all the details
3. Attempt to add invalid length of SSH Key in SSH Keys tab
before saving the user
:expectedresults:
1. SSH Key should not be added to user
2. Satellite should show 'Length could not be calculated'
error notification
"""
@stubbed
@tier3
def test_positive_ssh_key_to_pxe_discovered_host(self):
"""Satellite automatically adds SSH key of user to the provisioned host
that is discovered with PXE
:id: 86598125-6ca1-4147-920f-b5e2e9ad8ccd
:steps:
1. Create User with valid ssh-key
2. Configure Satellite with DHCP, DNS and TFTP
3. Enable foreman plugin discovery
4. Install and enable discovery service plugin.
5. Update PXELinux global default template with satellite
capsule url and ONTIMEOUT to discovery
6. Build the PXE default template from Hosts -> Provisioning
templates
7. Update Satellite Kickstart Default provisioning template,
inherit 'create_users' snippet
8. Create Host Group to provision the host
9. Boot a blank bare metal host in a network<|fim▁hole|> 12. Choose to provision and choose name, taxonomies and
Hostgroup
13. Check IP, MAC fields and Primary, Managed, Provision
options assigned automatically in Host -> Interface tab
14. Check Host -> Operating System details populated
automatically
15. Resolve PXELinux details page- 'kickstart default PXELinux'
and provision Template - 'Satellite Kickstart Default'
16. Submit these changes for provisioning and wait for
provisioning to complete.
17. Attempt to ssh access the provisioned host from satellite
server
:expectedresults:
1. User should be able to password-less access to provisioned
host
2. Satellite should automatically add SSH key to provisioned
host
"""
@stubbed
@tier3
def test_positive_ssh_key_to_pxeless_provisioned_host(self):
"""Satellite automatically adds SSH key of user to the PXELess
provisioned host
:id: edbbafbd-5a82-4f27-ab93-2aa88d3a3353
:steps:
1. Create User with valid ssh-key
2. Configure Satellite with DHCP and DNS
3. Update Satellite Kickstart Default provisioning template,
inherit 'create_users' snippet
4. Create Host Group to provision the host
5. Attempt to create a new host from Hosts -> New Host
6. Choose name, taxonomies and Hostgroup
7. Check IP, Primary, Managed, Provision
options assigned automatically in Host -> Interface tab
8. Enter the bare metal host mac in interface tab
9. Check Host -> Operating System details populated
automatically
10. Resolve bootdisk template-'Boot disk iPXE - host',
kexec template='Discovery Red Hat kexec' and
provision Template - 'Satellite Kickstart Default'.
11. Submit these changes
12. After creating this host entry, Download the Generic boot disk
from this host -> boot disk -> Generic Image
13. Flash the Generic boot disk in some bootable device
14. Boot the Provisionable host from above media device
15. Wait for host to connect to Satellite, start installation,
finish installation, post installation configurations
16. Attempt to ssh access the provisioned host from satellite
server
:expectedresults:
1. User should be able to password-less access to provisioned
host
2. Satellite should automatically add SSH key to provisioned host
"""
@stubbed
@tier3
def test_positive_ssh_key_to_pxeless_discovered_host(self):
"""Satellite automatically adds SSH key of user to the provisioned
host that is discovered with PXELess
:id: a3a7fcd8-9efd-4863-ac81-48c1a2cdb61b
:steps:
1. Create User with valid ssh-key
2. Configure Satellite with DHCP, DNS and TFTP
3. Update Satellite Kickstart Default provisioning template,
inherit 'create_users' snippet
4. Create Host Group to provision the host
5. Enable foreman plugin discovery through satellite installer
6. Install and enable discovery service plugin
7. Flash the discovery ISO in some bootable device
8. Boot the provisionable host from bootable device
9. In host, Choose Discovery with DHCP
10. Choose primary network interface that connects to the satelite
11. Provide integrated capsule/external capsule server url
12. Set connection type to Proxy
13. Dont provide any custom facts
14. Wait for satellite to discover the host in Hosts -> Discovered
Hosts
15. Choose to provision and choose name, taxonomies and Hostgroup
16. Check IP, MAC fields and Primary, Managed, Provision
options assigned automatically in Host -> Interface tab
17. Check Host -> Operating System details populated
automatically
18. Resolve kexec Template- 'Discovery Red Hat kexec' and
provision Template - 'Satellite Kickstart Default'.
19. Submit these changes for provisioning and wait for
provisioning to complete.
20. Attempt to ssh access the provisioned host from satellite
server
:expectedresults:
1. User should be able to password-less access to provisioned
host
2. Satellite should automatically add SSH key to provisioned host
"""
@stubbed
@tier3
def test_positive_ssh_key_in_network_based_provisioned_host(self):
"""Satellite automatically adds SSH key of user onto the host
provisioned using network based method
:id: ff2efc2a-02d0-4e2e-90d4-be2562fe384e
:steps:
1. Create User with valid ssh-key
2. Configure Satellite with DHCP, DNS
3. Create libvirt/RHEVM/VMWare Compute Resource on satellite
4. Create suitable compute profile for choosed CR
5. Update Satellite Kickstart Default provisioning template,
inherit 'create_users' snippet
6. Create Host Group for provisioning the host
7. Attempt to create a new host from Hosts -> New Host
8. Choose name, taxonomies and Hostgroup
9. Select choosed(in step 2) CR in 'deploy on' option
10. Check IP value amd Primary, Managed, Provision
options assigned automatically in Host -> Interface tab
11. Leave MAC Address blank to be assigned by CR
12. Check Host -> Operating System details populated
automatically, also choose Network Based provisioning
13. Choose appropriate Virtual Machine details
14. Submit these changes for provisioning and wait for provisioning
to complete
15. Attempt to ssh access the provisioned host from satellite
server
:expectedresults:
1. User should be able to password-less access to provisioned
host
2. Satellite should automatically add SSH key to provisioned host
"""
@stubbed
@tier3
def test_positive_ssh_key_in_image_based_provisioned_host(self):
"""Satellite automatically adds SSH key of user onto the host
provisioned using image based method
:id: 470f7142-c805-43c3-b0cc-02bd380f098b
:steps:
1. Create User with valid ssh-key
2. Configure Satellite with DHCP, DNS
3. Create EC2/Openstack/VMware/libvirt/RHEV Compute Resource on
satellite
4. Create suitable compute profile for choosed CR
5. Update Satellite Kickstart Default Finish provisioning template,
inherit 'create_users' snippet
6. Create Host Group for provisioning the host
7. Attempt to create a new host from Hosts -> New Host
8. Choose name, taxonomies and Hostgroup
9. Select choosed(in step 2) CR in 'deploy on' option
10. Check IP value amd Primary, Managed, Provision
options assigned automatically in Host -> Interface tab
11. Leave MAC Address blank to be assigned by CR
12. Check Host -> Operating System details populated
automatically, also choose Image Based provisioning
13. Choose appropriate Virtual Machine details
14. Submit these changes for provisioning and wait for provisioning
to complete
15. Attempt to ssh access the provisioned host from satellite
server
:expectedresults:
1. User should be able to password-less access to provisioned
host
2. Satellite should automatically add SSH key to provisioned host
"""
@stubbed
@tier3
def test_negative_invalid_ssh_key_access_to_provisioned_host(self):
""" Satellite user cannot password-less access with invalid ssh key
:id: 13f2d109-d15e-4fee-ae49-7ce3b27efd17
:steps:
1. Create user with ssh public key which doesnt matches the private
key of user(i.e Wrong public key)
2. Update Satellite Kickstart Default template, inherit
'create_users' snippet
3. Provision a host on libvirt CR with above user
4. Attempt to ssh access the provisioned host from satellite sever
:expectedresults: User should not be able to password-less access to
provisioned host having wrong non matching publc key
"""
@stubbed
@tier3
def test_positive_multiple_key_types_access_to_provisioned_host(self):
""" Satellite automatically adds supported multiple type of SSH key of
user onto the host provisioned
:id: 1532df12-e0a5-4da6-9e28-5d2eba98f0af
:steps:
1. Create user with any type of ssh key, type includes
rsa, dsa, ed25519, ecdsa
2. Update Satellite Kickstart Default template, inherit
'create_users' snippet
3. Provision a host on libvirt CR with above user
4. Attempt to ssh access the provisioned host from satellite sever
:expectedresults:
1. User should be able to password-less access to provisioned
host using any supported type of ssh keys
2. Satellite should automatically add any supported type of SSH key
to provisioned host
"""
@stubbed
@tier1
def test_positive_delete_ssh_key(self):
"""Satellite Admin can delete ssh key from user
:id: e4df559d-3f01-4dfb-a847-ae5f7d91ef90
:steps:
1. Go to Administer -> Users
2. Attempt to create new user with all the details
3. Add SSH Key in SSH Keys tab before saving the user
4. Save the new User
5. Edit the user created above and delete the ssh-key from user
:expectedresults: SSH key should be deleted from user
"""<|fim▁end|> | 10. Wait for successful Discovery Status on bare metal host
11. In Hosts -> Discovered Hosts, find the above discovered
host |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
class Snippet(models.Model):<|fim▁hole|>
class Meta:
pass
def __unicode__(self):
return self.snippet<|fim▁end|> | """A text snippet. Not meant for use by anyone other than a designer"""
name = models.CharField(max_length=255)
snippet = models.TextField(blank=True) |
<|file_name|>entity-action.ts<|end_file_name|><|fim▁begin|>import { BreezeEnum} from './enum';
/** EntityAction is an 'Enum' containing all of the valid actions that can occur to an 'Entity'.
*/
export class EntityAction extends BreezeEnum {
/** Entity was attached via an AttachEntity call. */
static Attach = new EntityAction( { _isAttach: true });
/** Entity was attached as a result of a query. */
static AttachOnQuery = new EntityAction({ _isAttach: true});
/** Entity was attached as a result of an import. */
static AttachOnImport = new EntityAction({ _isAttach: true});
/** Entity was detached */
static Detach = new EntityAction( { _isDetach: true });
/** Properties on the entity were merged as a result of a query. */
static MergeOnQuery = new EntityAction({ _isModification: true });
/** Properties on the entity were merged as a result of an import. */
static MergeOnImport = new EntityAction({ _isModification: true });
/** Properties on the entity were merged as a result of a save */
static MergeOnSave = new EntityAction({ _isModification: true });
/** A property on the entity was changed. */
static PropertyChange = new EntityAction({ _isModification: true});
/** The EntityState of the entity was changed. */
static EntityStateChange = new EntityAction();
/** AcceptChanges was called on the entity, or its entityState was set to Unmodified. */
static AcceptChanges = new EntityAction();
/** RejectChanges was called on the entity. */
static RejectChanges = new EntityAction({ _isModification: true});
/** The EntityManager was cleared. All entities detached. */
static Clear = new EntityAction({ _isDetach: true});
/** @hidden @internal */
_isAttach?: boolean;
/** @hidden @internal */
_isDetach?: boolean;
/** @hidden @internal */
_isModification: boolean;
/** Is this an 'attach' operation? ( Attach, AttachOnQuery or AttachOnImport) */
isAttach() {
return !!this._isAttach;
}
/** Is this a 'detach' operation? ( Detach, Clear) */
isDetach() {
return !!this._isDetach;
}<|fim▁hole|> }
}
EntityAction.resolveSymbols();<|fim▁end|> | /** Is this a 'modification' operation? ( PropertyChange, MergeOnQuery, MergeOnSave, MergeOnImport, RejectChanges) */
isModification() {
return !!this._isModification; |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for jobboardscraper project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""<|fim▁hole|>
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jobboardscraper.settings")
application = get_wsgi_application()<|fim▁end|> |
import os |
<|file_name|>balancer_conn_wrappers_test.go<|end_file_name|><|fim▁begin|>/*
*
* Copyright 2019 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package grpc
import (
"fmt"
"testing"
"google.golang.org/grpc/balancer"
"google.golang.org/grpc/connectivity"
"google.golang.org/grpc/resolver"
"google.golang.org/grpc/resolver/manual"
)
var _ balancer.V2Balancer = &funcBalancer{}
type funcBalancer struct {
updateClientConnState func(s balancer.ClientConnState) error
}
func (*funcBalancer) HandleSubConnStateChange(balancer.SubConn, connectivity.State) {
panic("unimplemented") // v1 API
}
func (*funcBalancer) HandleResolvedAddrs([]resolver.Address, error) {
panic("unimplemented") // v1 API
}
func (b *funcBalancer) UpdateClientConnState(s balancer.ClientConnState) error {
return b.updateClientConnState(s)
}
func (*funcBalancer) ResolverError(error) {}
func (*funcBalancer) UpdateSubConnState(balancer.SubConn, balancer.SubConnState) {
panic("unimplemented") // we never have sub-conns
}
func (*funcBalancer) Close() {}
type funcBalancerBuilder struct {
name string
instance *funcBalancer<|fim▁hole|>func (b *funcBalancerBuilder) Build(balancer.ClientConn, balancer.BuildOptions) balancer.Balancer {
return b.instance
}
func (b *funcBalancerBuilder) Name() string { return b.name }
// TestBalancerErrorResolverPolling injects balancer errors and verifies
// ResolveNow is called on the resolver with the appropriate backoff strategy
// being consulted between ResolveNow calls.
func (s) TestBalancerErrorResolverPolling(t *testing.T) {
// The test balancer will return ErrBadResolverState iff the
// ClientConnState contains no addresses.
fb := &funcBalancer{
updateClientConnState: func(s balancer.ClientConnState) error {
if len(s.ResolverState.Addresses) == 0 {
return balancer.ErrBadResolverState
}
return nil
},
}
const balName = "BalancerErrorResolverPolling"
balancer.Register(&funcBalancerBuilder{name: balName, instance: fb})
testResolverErrorPolling(t,
func(r *manual.Resolver) {
// No addresses so the balancer will fail.
r.CC.UpdateState(resolver.State{})
}, func(r *manual.Resolver) {
// UpdateState will block if ResolveNow is being called (which blocks on
// rn), so call it in a goroutine. Include some address so the balancer
// will be happy.
go r.CC.UpdateState(resolver.State{Addresses: []resolver.Address{{Addr: "x"}}})
},
WithDefaultServiceConfig(fmt.Sprintf(`{ "loadBalancingConfig": [{"%v": {}}] }`, balName)))
}<|fim▁end|> | }
|
<|file_name|>SleepingSplitterTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2020 The BtrPlace Authors. All rights reserved.
* Use of this source code is governed by a LGPL-style
* license that can be found in the LICENSE.txt file.
*/
package org.btrplace.scheduler.runner.disjoint.splitter;
import gnu.trove.map.hash.TIntIntHashMap;
import org.btrplace.model.DefaultModel;
import org.btrplace.model.Instance;
import org.btrplace.model.Model;
import org.btrplace.model.Node;
import org.btrplace.model.VM;
import org.btrplace.model.constraint.MinMTTR;
import org.btrplace.model.constraint.Sleeping;
import org.btrplace.scheduler.runner.disjoint.Instances;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Unit tests for {@link org.btrplace.scheduler.choco.runner.disjoint.splitter.SleepingSplitter}.
*
* @author Fabien Hermenier
*/
public class SleepingSplitterTest {
@Test
public void simpleTest() {
SleepingSplitter splitter = new SleepingSplitter();
List<Instance> instances = new ArrayList<>();
Model origin = new DefaultModel();
Node n1 = origin.newNode();
Node n2 = origin.newNode();
VM vm1 = origin.newVM();
VM vm2 = origin.newVM();
VM vm3 = origin.newVM();
VM vm4 = origin.newVM();
/**
* READY: vm1
* n1 vm2
* n2 (vm3) vm4
*/
origin.getMapping().addOnlineNode(n1);
origin.getMapping().addReadyVM(vm1);
origin.getMapping().addRunningVM(vm2, n1);
origin.getMapping().addOnlineNode(n2);
origin.getMapping().addSleepingVM(vm3, n2);
origin.getMapping().addRunningVM(vm4, n2);
Model m0 = new DefaultModel();
m0.newNode(n1.id());
m0.newVM(vm1.id());
m0.newVM(vm2.id());
m0.getMapping().addOnlineNode(n1);
m0.getMapping().addReadyVM(vm1);
m0.getMapping().addRunningVM(vm2, n1);
Model m1 = new DefaultModel();
m1.newNode(n2.id());
m1.newVM(vm3.id());
m1.newVM(vm4.id());
m1.getMapping().addOnlineNode(n2);
m1.getMapping().addSleepingVM(vm3, n2);
m1.getMapping().addRunningVM(vm4, n2);
<|fim▁hole|> instances.add(new Instance(m0, new ArrayList<>(), new MinMTTR()));
instances.add(new Instance(m1, new ArrayList<>(), new MinMTTR()));
Set<VM> all = new HashSet<>(m0.getMapping().getAllVMs());
all.addAll(m1.getMapping().getAllVMs());
TIntIntHashMap index = Instances.makeVMIndex(instances);
//Only VMs in m0
Sleeping single = new Sleeping(vm2);
Assert.assertTrue(splitter.split(single, null, instances, index, new TIntIntHashMap()));
Assert.assertTrue(instances.get(0).getSatConstraints().contains(single));
Assert.assertFalse(instances.get(1).getSatConstraints().contains(single));
}
}<|fim▁end|> | |
<|file_name|>x86.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::error::{Error, Result};
pub unsafe fn syscall0(mut a: usize) -> Result<usize> {
asm!("int 0x80"
: "={eax}"(a)
: "{eax}"(a)
: "memory"
: "intel", "volatile");<|fim▁hole|> Error::demux(a)
}
pub unsafe fn syscall1(mut a: usize, b: usize) -> Result<usize> {
asm!("int 0x80"
: "={eax}"(a)
: "{eax}"(a), "{ebx}"(b)
: "memory"
: "intel", "volatile");
Error::demux(a)
}
// Clobbers all registers - special for clone
pub unsafe fn syscall1_clobber(mut a: usize, b: usize) -> Result<usize> {
asm!("int 0x80"
: "={eax}"(a)
: "{eax}"(a), "{ebx}"(b)
: "memory", "ebx", "ecx", "edx", "esi", "edi"
: "intel", "volatile");
Error::demux(a)
}
pub unsafe fn syscall2(mut a: usize, b: usize, c: usize) -> Result<usize> {
asm!("int 0x80"
: "={eax}"(a)
: "{eax}"(a), "{ebx}"(b), "{ecx}"(c)
: "memory"
: "intel", "volatile");
Error::demux(a)
}
pub unsafe fn syscall3(mut a: usize, b: usize, c: usize, d: usize) -> Result<usize> {
asm!("int 0x80"
: "={eax}"(a)
: "{eax}"(a), "{ebx}"(b), "{ecx}"(c), "{edx}"(d)
: "memory"
: "intel", "volatile");
Error::demux(a)
}
pub unsafe fn syscall4(mut a: usize, b: usize, c: usize, d: usize, e: usize) -> Result<usize> {
asm!("int 0x80"
: "={eax}"(a)
: "{eax}"(a), "{ebx}"(b), "{ecx}"(c), "{edx}"(d), "{esi}"(e)
: "memory"
: "intel", "volatile");
Error::demux(a)
}
pub unsafe fn syscall5(mut a: usize, b: usize, c: usize, d: usize, e: usize, f: usize)
-> Result<usize> {
asm!("int 0x80"
: "={eax}"(a)
: "{eax}"(a), "{ebx}"(b), "{ecx}"(c), "{edx}"(d), "{esi}"(e), "{edi}"(f)
: "memory"
: "intel", "volatile");
Error::demux(a)
}<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | pub mod vga; |
<|file_name|>server.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from flask import Flask, request
import nltk
import json
from nltk_contrib import timex
import time
import sys
import getopt
USAGE = """
nltk-rest --port -p <port> -v units -u [--help -h]
Expose NLTK over REST as a server using Python Flask. Submit content to the
`/nltk` endpoint in the REST body request.
-h, --help Prints this message.
-p, --port Sets the port for the REST server, default is 8881.
-u, --units Enable parser to extract measurements from text
"""
Verbose = 0
Port = 8881 #default port
Units = 0
def echo2(*s): sys.stderr.write('server.py [NLTK]: ' + ' '.join(map(str, s)) + '\n')
app = Flask(__name__)
@app.route('/')
def status():
msg = '''
<html><head><title>NLTK REST Server</title></head><body><h3>NLTK REST server</h3>
<p>This app exposes the Python <a href="http://nltk.org/">Natural Language Toolkit (NLTK)</a>
as a REST server.</p>
<h2>Status: Running</h2>
<p>More apps from the <a href="//irds.usc.edu/">USC Information Retrieval & Data Science Group</a>.</p>
'''
return msg
@app.route('/nltk', methods=["PUT", "POST"])
def namedEntityRecognizer():
echo2("Performing NER on incoming stream")
content = request.stream.read()
if Verbose:
echo2("Incoming content is "+content)
start = time.time()
date_time = timex.tag(content)
tokenized = nltk.word_tokenize(content.decode("utf-8"))
tagged = nltk.pos_tag(tokenized)
namedEnt = nltk.ne_chunk(tagged, binary=True)
names = extract_entity_names(namedEnt, 'NE')
names.extend(date_time)
result = {"result" : "success", "names" : names}
if Units:
grammar = '''unit: {<CD><NNS>?<NN.*>?},
unit: {<CD><JJ>?<NN.*>}
'''
parser = nltk.RegexpParser(grammar)
units = extract_entity_names(parser.parse(tagged),'unit')
result['units'] = units
jsonDoc = json.dumps(result, sort_keys=True, indent=4, separators=(',', ': '))
end = time.time()
print "NER took "+str(end - start)+" seconds"
return jsonDoc
# Based on example from:
# https://gist.github.com/onyxfish/322906<|fim▁hole|> if hasattr(t, 'label') and t.label:
if t.label() == label:
entity_names.append(' '.join([child[0] for child in t]))
else:
for child in t:
entity_names.extend(extract_entity_names(child, label))
return entity_names
def main(argv=None):
"""Run NLTK REST server from command line according to USAGE."""
global Verbose
global Units
if argv is None:
argv = sys.argv
try:
opts, argv = getopt.getopt(argv[1:], 'hp:vu',
['help', 'port=', 'verbose', 'units'])
except getopt.GetoptError, (msg, bad_opt):
die("%s error: Bad option: %s, %s" % (argv[0], bad_opt, msg))
port = Port
for opt, val in opts:
if opt in ('-h', '--help'): echo2(USAGE); sys.exit()
elif opt in ('--port'): port = int(val)
elif opt in ('-v', '--verbose'): Verbose = 1
elif opt in ('-u', '--units'): Units = 1
else: die(USAGE)
app.run(debug=Verbose, port=port)
if __name__ == '__main__':
main(sys.argv)<|fim▁end|> | def extract_entity_names(t, label):
entity_names = [] |
<|file_name|>CallLogListItemHelper.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.dialer.calllog;
import android.content.Context;
import android.content.res.Resources;
import android.provider.CallLog.Calls;
import android.text.SpannableStringBuilder;
import android.text.TextUtils;
import android.util.Log;
import com.android.contacts.common.CallUtil;
import com.android.dialer.PhoneCallDetails;
import com.android.dialer.PhoneCallDetailsHelper;
import com.android.dialer.R;
/**
* Helper class to fill in the views of a call log entry.
*/
/* package */class CallLogListItemHelper {
private static final String TAG = "CallLogListItemHelper";
/** Helper for populating the details of a phone call. */
private final PhoneCallDetailsHelper mPhoneCallDetailsHelper;
/** Helper for handling phone numbers. */
private final PhoneNumberDisplayHelper mPhoneNumberHelper;
/** Resources to look up strings. */
private final Resources mResources;
/**
* Creates a new helper instance.
*
* @param phoneCallDetailsHelper used to set the details of a phone call
* @param phoneNumberHelper used to process phone number
*/
public CallLogListItemHelper(PhoneCallDetailsHelper phoneCallDetailsHelper,
PhoneNumberDisplayHelper phoneNumberHelper, Resources resources) {
mPhoneCallDetailsHelper = phoneCallDetailsHelper;
mPhoneNumberHelper = phoneNumberHelper;
mResources = resources;
}
/**
* Sets the name, label, and number for a contact.
*
* @param context The application context.
* @param views the views to populate
* @param details the details of a phone call needed to fill in the data
*/
public void setPhoneCallDetails(
Context context, CallLogListItemViews views, PhoneCallDetails details) {
mPhoneCallDetailsHelper.setPhoneCallDetails(views.phoneCallDetailsViews, details);
// Set the accessibility text for the contact badge
views.quickContactView.setContentDescription(getContactBadgeDescription(details));
// Set the primary action accessibility description
views.primaryActionView.setContentDescription(getCallDescription(context, details));
// Cache name or number of caller. Used when setting the content descriptions of buttons
// when the actions ViewStub is inflated.
views.nameOrNumber = this.getNameOrNumber(details);
}
/**
* Sets the accessibility descriptions for the action buttons in the action button ViewStub.
*
* @param views The views associated with the current call log entry.
*/
public void setActionContentDescriptions(CallLogListItemViews views) {
if (views.nameOrNumber == null) {
Log.e(TAG, "setActionContentDescriptions; name or number is null.");
}
// Calling expandTemplate with a null parameter will cause a NullPointerException.
// Although we don't expect a null name or number, it is best to protect against it.
CharSequence nameOrNumber = views.nameOrNumber == null ? "" : views.nameOrNumber;
views.callBackButtonView.setContentDescription(
TextUtils.expandTemplate(
mResources.getString(R.string.description_call_back_action), nameOrNumber));
views.videoCallButtonView.setContentDescription(
TextUtils.expandTemplate(
mResources.getString(R.string.description_video_call_action),
nameOrNumber));
views.voicemailButtonView.setContentDescription(
TextUtils.expandTemplate(
mResources.getString(R.string.description_voicemail_action), nameOrNumber));
views.detailsButtonView.setContentDescription(
TextUtils.expandTemplate(
mResources.getString(R.string.description_details_action), nameOrNumber));
}
/**
* Returns the accessibility description for the contact badge for a call log entry.
*
* @param details Details of call.
* @return Accessibility description.
*/
private CharSequence getContactBadgeDescription(PhoneCallDetails details) {
return mResources.getString(R.string.description_contact_details, getNameOrNumber(details));
}
/**
* Returns the accessibility description of the "return call/call" action for a call log
* entry.
* Accessibility text is a combination of:
* {Voicemail Prefix}. {Number of Calls}. {Caller information} {Phone Account}.
* If most recent call is a voicemail, {Voicemail Prefix} is "New Voicemail.", otherwise "".
*
* If more than one call for the caller, {Number of Calls} is:
* "{number of calls} calls.", otherwise "".
*
* The {Caller Information} references the most recent call associated with the caller.
* For incoming calls:
* If missed call: Missed call from {Name/Number} {Call Type} {Call Time}.
* If answered call: Answered call from {Name/Number} {Call Type} {Call Time}.
*
* For outgoing calls:
* If outgoing: Call to {Name/Number] {Call Type} {Call Time}.
*
* Where:
* {Name/Number} is the name or number of the caller (as shown in call log).
* {Call type} is the contact phone number type (eg mobile) or location.
* {Call Time} is the time since the last call for the contact occurred.
*
* The {Phone Account} refers to the account/SIM through which the call was placed or received
* in multi-SIM devices.
*
* Examples:
* 3 calls. New Voicemail. Missed call from Joe Smith mobile 2 hours ago on SIM 1.
*
* 2 calls. Answered call from John Doe mobile 1 hour ago.
*
* @param context The application context.
* @param details Details of call.
* @return Return call action description.
*/
public CharSequence getCallDescription(Context context, PhoneCallDetails details) {
int lastCallType = getLastCallType(details.callTypes);
boolean isVoiceMail = lastCallType == Calls.VOICEMAIL_TYPE;
// Get the name or number of the caller.
final CharSequence nameOrNumber = getNameOrNumber(details);
// Get the call type or location of the caller; null if not applicable
final CharSequence typeOrLocation = mPhoneCallDetailsHelper.getCallTypeOrLocation(details);<|fim▁hole|> SpannableStringBuilder callDescription = new SpannableStringBuilder();
// Prepend the voicemail indication.
if (isVoiceMail) {
callDescription.append(mResources.getString(R.string.description_new_voicemail));
}
// Add number of calls if more than one.
if (details.callTypes.length > 1) {
callDescription.append(mResources.getString(R.string.description_num_calls,
details.callTypes.length));
}
// If call had video capabilities, add the "Video Call" string.
if ((details.features & Calls.FEATURES_VIDEO) == Calls.FEATURES_VIDEO &&
CallUtil.isVideoEnabled(context)) {
callDescription.append(mResources.getString(R.string.description_video_call));
}
int stringID = getCallDescriptionStringID(details);
String accountLabel = PhoneAccountUtils.getAccountLabel(context, details.accountHandle);
// Use chosen string resource to build up the message.
CharSequence onAccountLabel = accountLabel == null
? ""
: TextUtils.expandTemplate(
mResources.getString(R.string.description_phone_account),
accountLabel);
callDescription.append(
TextUtils.expandTemplate(
mResources.getString(stringID),
nameOrNumber,
// If no type or location can be determined, sub in empty string.
typeOrLocation == null ? "" : typeOrLocation,
timeOfCall,
onAccountLabel));
return callDescription;
}
/**
* Determine the appropriate string ID to describe a call for accessibility purposes.
*
* @param details Call details.
* @return String resource ID to use.
*/
public int getCallDescriptionStringID(PhoneCallDetails details) {
int lastCallType = getLastCallType(details.callTypes);
int stringID;
if (lastCallType == Calls.VOICEMAIL_TYPE || lastCallType == Calls.MISSED_TYPE) {
//Message: Missed call from <NameOrNumber>, <TypeOrLocation>, <TimeOfCall>,
//<PhoneAccount>.
stringID = R.string.description_incoming_missed_call;
} else if (lastCallType == Calls.INCOMING_TYPE) {
//Message: Answered call from <NameOrNumber>, <TypeOrLocation>, <TimeOfCall>,
//<PhoneAccount>.
stringID = R.string.description_incoming_answered_call;
} else {
//Message: Call to <NameOrNumber>, <TypeOrLocation>, <TimeOfCall>, <PhoneAccount>.
stringID = R.string.description_outgoing_call;
}
return stringID;
}
/**
* Determine the call type for the most recent call.
* @param callTypes Call types to check.
* @return Call type.
*/
private int getLastCallType(int[] callTypes) {
if (callTypes.length > 0) {
return callTypes[0];
} else {
return Calls.MISSED_TYPE;
}
}
/**
* Return the name or number of the caller specified by the details.
* @param details Call details
* @return the name (if known) of the caller, otherwise the formatted number.
*/
private CharSequence getNameOrNumber(PhoneCallDetails details) {
final CharSequence recipient;
if (!TextUtils.isEmpty(details.name)) {
recipient = details.name;
} else {
recipient = mPhoneNumberHelper.getDisplayNumber(details.accountHandle,
details.number, details.numberPresentation, details.formattedNumber);
}
return recipient;
}
}<|fim▁end|> |
// Get the time/date of the call
final CharSequence timeOfCall = mPhoneCallDetailsHelper.getCallDate(details);
|
<|file_name|>edit.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cmd
import (
"fmt"
"github.com/spf13/cobra"
"k8s.io/cli-runtime/pkg/genericclioptions"
"k8s.io/kubernetes/pkg/kubectl/cmd/templates"
cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util"
"k8s.io/kubernetes/pkg/kubectl/cmd/util/editor"
"k8s.io/kubernetes/pkg/kubectl/util/i18n"
)
var (
editLong = templates.LongDesc(i18n.T(`
Edit a resource from the default editor.
The edit command allows you to directly edit any API resource you can retrieve via the
command line tools. It will open the editor defined by your KUBE_EDITOR, or EDITOR
environment variables, or fall back to 'vi' for Linux or 'notepad' for Windows.
You can edit multiple objects, although changes are applied one at a time. The command
accepts filenames as well as command line arguments, although the files you point to must
be previously saved versions of resources.
Editing is done with the API version used to fetch the resource.
To edit using a specific API version, fully-qualify the resource, version, and group.
The default format is YAML. To edit in JSON, specify "-o json".
The flag --windows-line-endings can be used to force Windows line endings,
otherwise the default for your operating system will be used.
In the event an error occurs while updating, a temporary file will be created on disk<|fim▁hole|>
editExample = templates.Examples(i18n.T(`
# Edit the service named 'docker-registry':
kubectl edit svc/docker-registry
# Use an alternative editor
KUBE_EDITOR="nano" kubectl edit svc/docker-registry
# Edit the job 'myjob' in JSON using the v1 API format:
kubectl edit job.v1.batch/myjob -o json
# Edit the deployment 'mydeployment' in YAML and save the modified config in its annotation:
kubectl edit deployment/mydeployment -o yaml --save-config`))
)
func NewCmdEdit(f cmdutil.Factory, ioStreams genericclioptions.IOStreams) *cobra.Command {
o := editor.NewEditOptions(editor.NormalEditMode, ioStreams)
o.ValidateOptions = cmdutil.ValidateOptions{EnableValidation: true}
cmd := &cobra.Command{
Use: "edit (RESOURCE/NAME | -f FILENAME)",
DisableFlagsInUseLine: true,
Short: i18n.T("Edit a resource on the server"),
Long: editLong,
Example: fmt.Sprintf(editExample),
Run: func(cmd *cobra.Command, args []string) {
if err := o.Complete(f, args, cmd); err != nil {
cmdutil.CheckErr(err)
}
if err := o.Run(); err != nil {
cmdutil.CheckErr(err)
}
},
}
// bind flag structs
o.RecordFlags.AddFlags(cmd)
o.PrintFlags.AddFlags(cmd)
usage := "to use to edit the resource"
cmdutil.AddFilenameOptionFlags(cmd, &o.FilenameOptions, usage)
cmdutil.AddValidateOptionFlags(cmd, &o.ValidateOptions)
cmd.Flags().BoolVarP(&o.OutputPatch, "output-patch", "", o.OutputPatch, "Output the patch if the resource is edited.")
cmd.Flags().BoolVar(&o.WindowsLineEndings, "windows-line-endings", o.WindowsLineEndings,
"Defaults to the line ending native to your platform.")
cmdutil.AddApplyAnnotationVarFlags(cmd, &o.ApplyAnnotation)
cmdutil.AddIncludeUninitializedFlag(cmd)
return cmd
}<|fim▁end|> | that contains your unapplied changes. The most common error when updating a resource
is another editor changing the resource on the server. When this occurs, you will have
to apply your changes to the newer version of the resource, or update your temporary
saved copy to include the latest resource version.`)) |
<|file_name|>db.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
class DB(object):
"""DB is a generic class for SQL Database"""
def __init__(self, table_prefix=''):
self.table_prefix = table_prefix
def stringify(self, val):
"""Get a unicode from a value"""
# If raw string, go in unicode
if isinstance(val, str):
val = val.decode('utf8', 'ignore').replace("'", "''")
elif isinstance(val, unicode):
val = val.replace("'", "''")
else: # other type, we can str
val = unicode(str(val))
val = val.replace("'", "''")
return val
def create_insert_query(self, table, data):
"""Create a INSERT query in table with all data of data (a dict)"""
query = u"INSERT INTO %s " % (self.table_prefix + table)<|fim▁hole|> i = 0 # f or the ',' problem... look like C here...
for prop in data:
i += 1
val = data[prop]
# Boolean must be catch, because we want 0 or 1, not True or False
if isinstance(val, bool):
if val:
val = 1
else:
val = 0
# Get a string of the value
val = self.stringify(val)
if i == 1:
props_str = props_str + u"%s " % prop
values_str = values_str + u"'%s' " % val
else:
props_str = props_str + u", %s " % prop
values_str = values_str + u", '%s' " % val
# Ok we've got data, let's finish the query
props_str = props_str + u' )'
values_str = values_str + u' )'
query = query + props_str + u' VALUES' + values_str
return query
def create_update_query(self, table, data, where_data):
"""Create a update query of table with data, and use where data for
the WHERE clause
"""
query = u"UPDATE %s set " % (self.table_prefix + table)
# First data manage
query_follow = ''
i = 0 # for the , problem...
for prop in data:
# Do not need to update a property that is in where
# it is even dangerous, will raise a warning
if prop not in where_data:
i += 1
val = data[prop]
# Boolean must be catch, because we want 0 or 1, not True or False
if isinstance(val, bool):
if val:
val = 1
else:
val = 0
# Get a string of the value
val = self.stringify(val)
if i == 1:
query_follow += u"%s='%s' " % (prop, val)
else:
query_follow += u", %s='%s' " % (prop, val)
# Ok for data, now WHERE, same things
where_clause = u" WHERE "
i = 0 # For the 'and' problem
for prop in where_data:
i += 1
val = where_data[prop]
# Boolean must be catch, because we want 0 or 1, not True or False
if isinstance(val, bool):
if val:
val = 1
else:
val = 0
# Get a string of the value
val = self.stringify(val)
if i == 1:
where_clause += u"%s='%s' " % (prop, val)
else:
where_clause += u"and %s='%s' " % (prop, val)
query = query + query_follow + where_clause
return query
def fetchone(self):
"""Just get an entry"""
return self.db_cursor.fetchone()
def fetchall(self):
"""Get all entry"""
return self.db_cursor.fetchall()<|fim▁end|> | props_str = u' ('
values_str = u' (' |
<|file_name|>mailer.js<|end_file_name|><|fim▁begin|>// mailer.js
var nodemailer = require('nodemailer');
var smtpTransport = nodemailer.createTransport("SMTP", {
service: "Mandrill",
debug: true,
auth: {
user: "[email protected]",
pass: "k-AdDVcsNJ9oj8QYATVNGQ"
}
});
exports.sendEmailConfirmation = function(emailaddress, username, firstname, expiremoment, token){
var mailOptions = {
from: "[email protected]", // sender address
to: emailaddress, // list of receivers
subject: "Confirm email and start Ativinos", // Subject line
text: 'Hi '<|fim▁hole|> +firstname+
', your account, '
+username+
', will be enabled after you confirm your email. Your account will be deleted by '
+ expiremoment +
' if you do not verify email before then. To verify your email, visit http://www.ativinos.com/emailverify?token='
+ token +
'&username='
+ username,
html: 'Hi '
+firstname+
', your account, '
+username+
', will be enabled after you confirm your email. Your account will be deleted by '
+ expiremoment +
' if you do not verify email before then. To verify your email, visit <a href="http://www.ativinos.com/emailverify?token='
+ token +
'&username='
+ username +
'">http://www.ativinos.com/emailverify?token='
+ token +
'&username='
+ username +
'</a>',
}
smtpTransport.sendMail(mailOptions, function(error, response){
if(error){
console.log(error);
}
})
}<|fim▁end|> | |
<|file_name|>ZoomToSelectedFeatures.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2008-2011 The Open Planning Project
*
* Published under the BSD license.
* See https://github.com/opengeo/gxp/raw/master/license.txt for the full text
* of the license.
*/
/**
* @requires plugins/ZoomToExtent.js
*/
/** api: (define)
* module = gxp.plugins
* class = ZoomToSelectedFeatures
*/
/** api: (extends)
* plugins/ZoomToExtent.js
*/
Ext.namespace("gxp.plugins");
/** api: constructor
* .. class:: ZoomToSelectedFeatures(config)
*
* Plugin for zooming to the extent of selected features
*/
gxp.plugins.ZoomToSelectedFeatures = Ext.extend(gxp.plugins.ZoomToExtent, {
/** api: ptype = gxp_zoomtoselectedfeatures */
ptype: "gxp_zoomtoselectedfeatures",<|fim▁hole|> /** api: config[menuText]
* ``String``
* Text for zoom menu item (i18n).
*/
menuText: "Zoom to selected features",
/** api: config[tooltip]
* ``String``
* Text for zoom action tooltip (i18n).
*/
tooltip: "Zoom to selected features",
/** api: config[featureManager]
* ``String`` id of the :class:`gxp.plugins.FeatureManager` to look for
* selected features
*/
/** api: config[closest]
* ``Boolean`` Find the zoom level that most closely fits the specified
* extent. Note that this may result in a zoom that does not exactly
* contain the entire extent. Default is false.
*/
closest: false,
/** private: property[iconCls]
*/
iconCls: "gxp-icon-zoom-to",
/** private: method[extent]
*/
extent: function() {
var layer = this.target.tools[this.featureManager].featureLayer;
var bounds, geom, extent, features = layer.selectedFeatures;
for (var i=features.length-1; i>=0; --i) {
geom = features[i].geometry;
if (geom) {
extent = geom.getBounds();
if (bounds) {
bounds.extend(extent);
} else {
bounds = extent.clone();
}
}
};
return bounds;
},
/** api: method[addActions]
*/
addActions: function() {
var actions = gxp.plugins.ZoomToSelectedFeatures.superclass.addActions.apply(this, arguments);
actions[0].disable();
var layer = this.target.tools[this.featureManager].featureLayer;
layer.events.on({
"featureselected": function() {
actions[0].isDisabled() && actions[0].enable();
},
"featureunselected": function() {
layer.selectedFeatures.length == 0 && actions[0].disable();
}
});
return actions;
}
});
Ext.preg(gxp.plugins.ZoomToSelectedFeatures.prototype.ptype, gxp.plugins.ZoomToSelectedFeatures);<|fim▁end|> | |
<|file_name|>TrafficRoutingType.cpp<|end_file_name|><|fim▁begin|>/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/codedeploy/model/TrafficRoutingType.h>
#include <aws/core/utils/HashingUtils.h>
#include <aws/core/Globals.h>
#include <aws/core/utils/EnumParseOverflowContainer.h>
using namespace Aws::Utils;
namespace Aws
{<|fim▁hole|> namespace CodeDeploy
{
namespace Model
{
namespace TrafficRoutingTypeMapper
{
static const int TimeBasedCanary_HASH = HashingUtils::HashString("TimeBasedCanary");
static const int TimeBasedLinear_HASH = HashingUtils::HashString("TimeBasedLinear");
static const int AllAtOnce_HASH = HashingUtils::HashString("AllAtOnce");
TrafficRoutingType GetTrafficRoutingTypeForName(const Aws::String& name)
{
int hashCode = HashingUtils::HashString(name.c_str());
if (hashCode == TimeBasedCanary_HASH)
{
return TrafficRoutingType::TimeBasedCanary;
}
else if (hashCode == TimeBasedLinear_HASH)
{
return TrafficRoutingType::TimeBasedLinear;
}
else if (hashCode == AllAtOnce_HASH)
{
return TrafficRoutingType::AllAtOnce;
}
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
overflowContainer->StoreOverflow(hashCode, name);
return static_cast<TrafficRoutingType>(hashCode);
}
return TrafficRoutingType::NOT_SET;
}
Aws::String GetNameForTrafficRoutingType(TrafficRoutingType enumValue)
{
switch(enumValue)
{
case TrafficRoutingType::TimeBasedCanary:
return "TimeBasedCanary";
case TrafficRoutingType::TimeBasedLinear:
return "TimeBasedLinear";
case TrafficRoutingType::AllAtOnce:
return "AllAtOnce";
default:
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
return overflowContainer->RetrieveOverflow(static_cast<int>(enumValue));
}
return {};
}
}
} // namespace TrafficRoutingTypeMapper
} // namespace Model
} // namespace CodeDeploy
} // namespace Aws<|fim▁end|> | |
<|file_name|>LexToken.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013 Michael Biggs. See the COPYING file at the top-level
# directory of this distribution and at http://shok.io/code/copyright.html
# Tokens that come from the Lexer are either pairs or tuples:
# colno:type
# colno:type:value<|fim▁hole|> ttype = ''
tvalue = ''
def __init__(self, tokenstr):
t = tokenstr.split(':')
if len(t) < 2 or len(t) > 3:
raise Exception("invalid token: %s" % t)
self.colno = t[0]
self.ttype = t[1]
if len(t) == 3:
self.tvalue = t[2]
def __repr__(self):
if '' == self.tvalue:
return "<%s:%s>" % (self.colno, self.ttype)
else:
return "<%s:%s:%s>" % (self.colno, self.ttype, self.tvalue)
def NewlineToken():
return LexToken('0:NEWL')<|fim▁end|> | class LexToken:
colno = 0 |
<|file_name|>FeatureTestObject2.java<|end_file_name|><|fim▁begin|>package org.elasticsearch.painless;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with<|fim▁hole|> * this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/** Currently just a dummy class for testing a few features not yet exposed by whitelist! */
public class FeatureTestObject2 {
public FeatureTestObject2() {super();}
public static int staticNumberArgument(int injected, int userArgument) {
return injected * userArgument;
}
public static int staticNumberArgument2(int userArgument1, int userArgument2) {
return userArgument1 * userArgument2;
}
}<|fim▁end|> | |
<|file_name|>sniff.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python
__author__="kebo"
__date__ ="$2009-11-5 11:15:55$"
import pcap
import sys
import string
import time
import socket
import struct
import getopt
protocols={socket.IPPROTO_TCP:'tcp',
socket.IPPROTO_UDP:'udp',
socket.IPPROTO_ICMP:'icmp'}
node = None
mb = None
decoder = None
def send(payload):
sz = len(payload)
header= struct.pack("!h", sz)
return sys.stdout.write( header + payload )
def print_packet(pktlen, data, timestamp):<|fim▁hole|> global mb
if not data:
return
#send(data)
#print data
#print timestamp
print '\n%s.%f' % (time.strftime('%H:%M',time.localtime(timestamp)),timestamp % 60)
if __name__=='__main__':
p = pcap.pcapObject()
#dev = pcap.lookupdev()
dev = "eth0"
net, mask = pcap.lookupnet(dev)
# note: to_ms does nothing on linux
p.open_live(dev, 1600, 0, 100)
#p.dump_open('dumpfile')
p.setfilter(string.join(["tcp","port 22"],' '), 0, 0)
# try-except block to catch keyboard interrupt. Failure to shut
# down cleanly can result in the interface not being taken out of promisc.
# mode
#p.setnonblock(1)
try:
while 1:
p.dispatch(1, print_packet)
except KeyboardInterrupt:
print '%s' % sys.exc_type
print 'shutting down'
print '%d packets received, %d packets dropped, %d packets dropped by interface' % p.stats()<|fim▁end|> | |
<|file_name|>classes_4.js<|end_file_name|><|fim▁begin|>var searchData=
[
['fan2para',['Fan2Para',['../classrisa_1_1cuda_1_1_fan2_para.html',1,'risa::cuda']]],
['filter',['Filter',['../classrisa_1_1cuda_1_1_filter.html',1,'risa::cuda']]]<|fim▁hole|><|fim▁end|> | ]; |
<|file_name|>rollingupdatecluster.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"fmt"
"io"
"os"
"strconv"
"strings"
"time"
"github.com/spf13/cobra"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/cli-runtime/pkg/genericclioptions"
"k8s.io/client-go/kubernetes"
_ "k8s.io/client-go/plugin/pkg/client/auth"
"k8s.io/klog"
"k8s.io/kops/cmd/kops/util"
api "k8s.io/kops/pkg/apis/kops"
"k8s.io/kops/pkg/cloudinstances"
"k8s.io/kops/pkg/featureflag"
"k8s.io/kops/pkg/instancegroups"
"k8s.io/kops/pkg/pretty"
"k8s.io/kops/upup/pkg/fi/cloudup"
"k8s.io/kops/util/pkg/tables"
"k8s.io/kubernetes/pkg/kubectl/util/i18n"
"k8s.io/kubernetes/pkg/kubectl/util/templates"
)
var (
rollingupdateLong = pretty.LongDesc(i18n.T(`
This command updates a kubernetes cluster to match the cloud and kops specifications.
To perform a rolling update, you need to update the cloud resources first with the command
` + pretty.Bash("kops update cluster") + `.
If rolling-update does not report that the cluster needs to be rolled, you can force the cluster to be
rolled with the force flag. Rolling update drains and validates the cluster by default. A cluster is
deemed validated when all required nodes are running and all pods in the kube-system namespace are operational.
When a node is deleted, rolling-update sleeps the interval for the node type, and then tries for the same period
of time for the cluster to be validated. For instance, setting --master-interval=3m causes rolling-update
to wait for 3 minutes after a master is rolled, and another 3 minutes for the cluster to stabilize and pass
validation.
Note: terraform users will need to run all of the following commands from the same directory
` + pretty.Bash("kops update cluster --target=terraform") + ` then ` + pretty.Bash("terraform plan") + ` then
` + pretty.Bash("terraform apply") + ` prior to running ` + pretty.Bash("kops rolling-update cluster") + `.`))
rollingupdateExample = templates.Examples(i18n.T(`
# Preview a rolling-update.
kops rolling-update cluster
# Roll the currently selected kops cluster with defaults.
# Nodes will be drained and the cluster will be validated between node replacement.
kops rolling-update cluster --yes
# Roll the k8s-cluster.example.com kops cluster,
# do not fail if the cluster does not validate,
# wait 8 min to create new node, and wait at least
# 8 min to validate the cluster.
kops rolling-update cluster k8s-cluster.example.com --yes \
--fail-on-validate-error="false" \
--master-interval=8m \
--node-interval=8m
# Roll the k8s-cluster.example.com kops cluster,
# do not validate the cluster because of the cloudonly flag.
# Force the entire cluster to roll, even if rolling update
# reports that the cluster does not need to be rolled.
kops rolling-update cluster k8s-cluster.example.com --yes \
--cloudonly \
--force
# Roll the k8s-cluster.example.com kops cluster,
# only roll the node instancegroup,
# use the new drain and validate functionality.
kops rolling-update cluster k8s-cluster.example.com --yes \
--fail-on-validate-error="false" \
--node-interval 8m \
--instance-group nodes
`))
rollingupdateShort = i18n.T(`Rolling update a cluster.`)
)
// RollingUpdateOptions is the command Object for a Rolling Update.
type RollingUpdateOptions struct {
Yes bool
Force bool
CloudOnly bool
// The following two variables are when kops is validating a cluster
// during a rolling update.
// FailOnDrainError fail rolling-update if drain errors.
FailOnDrainError bool
// FailOnValidate fail the cluster rolling-update when the cluster
// does not validate, after a validation period.
FailOnValidate bool
// PostDrainDelay is the duration of a pause after a drain operation
PostDrainDelay time.Duration
// ValidationTimeout is the timeout for validation to succeed after the drain and pause
ValidationTimeout time.Duration
// MasterInterval is the minimum time to wait after stopping a master node. This does not include drain and validate time.
MasterInterval time.Duration
// NodeInterval is the minimum time to wait after stopping a (non-master) node. This does not include drain and validate time.
NodeInterval time.Duration
// BastionInterval is the minimum time to wait after stopping a bastion. This does not include drain and validate time.
BastionInterval time.Duration
// Interactive rolling-update prompts user to continue after each instances is updated.
Interactive bool
ClusterName string
// InstanceGroups is the list of instance groups to rolling-update;
// if not specified, all instance groups will be updated
InstanceGroups []string
// InstanceGroupRoles is the list of roles we should rolling-update
// if not specified, all instance groups will be updated
InstanceGroupRoles []string
}
func (o *RollingUpdateOptions) InitDefaults() {
o.Yes = false
o.Force = false
o.CloudOnly = false
o.FailOnDrainError = false
o.FailOnValidate = true
o.MasterInterval = 15 * time.Second
o.NodeInterval = 15 * time.Second
o.BastionInterval = 15 * time.Second
o.Interactive = false
o.PostDrainDelay = 5 * time.Second
o.ValidationTimeout = 15 * time.Minute
}
func NewCmdRollingUpdateCluster(f *util.Factory, out io.Writer) *cobra.Command {
var options RollingUpdateOptions
options.InitDefaults()
cmd := &cobra.Command{
Use: "cluster",
Short: rollingupdateShort,
Long: rollingupdateLong,
Example: rollingupdateExample,
}
cmd.Flags().BoolVarP(&options.Yes, "yes", "y", options.Yes, "Perform rolling update immediately, without --yes rolling-update executes a dry-run")
cmd.Flags().BoolVar(&options.Force, "force", options.Force, "Force rolling update, even if no changes")
cmd.Flags().BoolVar(&options.CloudOnly, "cloudonly", options.CloudOnly, "Perform rolling update without confirming progress with k8s")
cmd.Flags().DurationVar(&options.ValidationTimeout, "validation-timeout", options.ValidationTimeout, "Maximum time to wait for a cluster to validate")
cmd.Flags().DurationVar(&options.MasterInterval, "master-interval", options.MasterInterval, "Time to wait between restarting masters")
cmd.Flags().DurationVar(&options.NodeInterval, "node-interval", options.NodeInterval, "Time to wait between restarting nodes")
cmd.Flags().DurationVar(&options.BastionInterval, "bastion-interval", options.BastionInterval, "Time to wait between restarting bastions")
cmd.Flags().DurationVar(&options.PostDrainDelay, "post-drain-delay", options.PostDrainDelay, "Time to wait after draining each node")
cmd.Flags().BoolVarP(&options.Interactive, "interactive", "i", options.Interactive, "Prompt to continue after each instance is updated")
cmd.Flags().StringSliceVar(&options.InstanceGroups, "instance-group", options.InstanceGroups, "List of instance groups to update (defaults to all if not specified)")
cmd.Flags().StringSliceVar(&options.InstanceGroupRoles, "instance-group-roles", options.InstanceGroupRoles, "If specified, only instance groups of the specified role will be updated (e.g. Master,Node,Bastion)")
if featureflag.DrainAndValidateRollingUpdate.Enabled() {
cmd.Flags().BoolVar(&options.FailOnDrainError, "fail-on-drain-error", true, "The rolling-update will fail if draining a node fails.")
cmd.Flags().BoolVar(&options.FailOnValidate, "fail-on-validate-error", true, "The rolling-update will fail if the cluster fails to validate.")
}
cmd.Run = func(cmd *cobra.Command, args []string) {
err := rootCommand.ProcessArgs(args)
if err != nil {
exitWithError(err)
return
}
clusterName := rootCommand.ClusterName()
if clusterName == "" {
exitWithError(fmt.Errorf("--name is required"))
return
}
options.ClusterName = clusterName
err = RunRollingUpdateCluster(f, os.Stdout, &options)
if err != nil {
exitWithError(err)
return
}
}
return cmd
}
func RunRollingUpdateCluster(f *util.Factory, out io.Writer, options *RollingUpdateOptions) error {
clientset, err := f.Clientset()
if err != nil {
return err
}
cluster, err := GetCluster(f, options.ClusterName)
if err != nil {
return err
}
contextName := cluster.ObjectMeta.Name
clientGetter := genericclioptions.NewConfigFlags()
clientGetter.Context = &contextName
config, err := clientGetter.ToRESTConfig()
if err != nil {
return fmt.Errorf("cannot load kubecfg settings for %q: %v", contextName, err)
}
var nodes []v1.Node
var k8sClient kubernetes.Interface
if !options.CloudOnly {
k8sClient, err = kubernetes.NewForConfig(config)
if err != nil {
return fmt.Errorf("cannot build kube client for %q: %v", contextName, err)
}
nodeList, err := k8sClient.CoreV1().Nodes().List(metav1.ListOptions{})
if err != nil {
fmt.Fprintf(os.Stderr, "Unable to reach the kubernetes API.\n")
fmt.Fprintf(os.Stderr, "Use --cloudonly to do a rolling-update without confirming progress with the k8s API\n\n")
return fmt.Errorf("error listing nodes in cluster: %v", err)
}
if nodeList != nil {
nodes = nodeList.Items
}
}
list, err := clientset.InstanceGroupsFor(cluster).List(metav1.ListOptions{})
if err != nil {
return err
}
var instanceGroups []*api.InstanceGroup
for i := range list.Items {
instanceGroups = append(instanceGroups, &list.Items[i])
}
warnUnmatched := true
if len(options.InstanceGroups) != 0 {
var filtered []*api.InstanceGroup
for _, instanceGroupName := range options.InstanceGroups {
var found *api.InstanceGroup
for _, ig := range instanceGroups {
if ig.ObjectMeta.Name == instanceGroupName {
found = ig
break
}
}
if found == nil {
return fmt.Errorf("InstanceGroup %q not found", instanceGroupName)
}
filtered = append(filtered, found)
}
instanceGroups = filtered
// Don't warn if we find more ASGs than IGs
warnUnmatched = false
}
if len(options.InstanceGroupRoles) != 0 {
var filtered []*api.InstanceGroup
for _, ig := range instanceGroups {
for _, role := range options.InstanceGroupRoles {
if ig.Spec.Role == api.InstanceGroupRole(strings.Title(strings.ToLower(role))) {
filtered = append(filtered, ig)
continue
}
}
}
instanceGroups = filtered
// Don't warn if we find more ASGs than IGs
warnUnmatched = false
}
cloud, err := cloudup.BuildCloud(cluster)
if err != nil {
return err
}
groups, err := cloud.GetCloudGroups(cluster, instanceGroups, warnUnmatched, nodes)
if err != nil {
return err
}
{
t := &tables.Table{}
t.AddColumn("NAME", func(r *cloudinstances.CloudInstanceGroup) string {
return r.InstanceGroup.ObjectMeta.Name
})
t.AddColumn("STATUS", func(r *cloudinstances.CloudInstanceGroup) string {
return r.Status()
})
t.AddColumn("NEEDUPDATE", func(r *cloudinstances.CloudInstanceGroup) string {
return strconv.Itoa(len(r.NeedUpdate))
})
t.AddColumn("READY", func(r *cloudinstances.CloudInstanceGroup) string {
return strconv.Itoa(len(r.Ready))
})
t.AddColumn("MIN", func(r *cloudinstances.CloudInstanceGroup) string {
return strconv.Itoa(r.MinSize)
})
t.AddColumn("MAX", func(r *cloudinstances.CloudInstanceGroup) string {
return strconv.Itoa(r.MaxSize)
})
t.AddColumn("NODES", func(r *cloudinstances.CloudInstanceGroup) string {
var nodes []*v1.Node<|fim▁hole|> }
}
for _, i := range r.NeedUpdate {
if i.Node != nil {
nodes = append(nodes, i.Node)
}
}
return strconv.Itoa(len(nodes))
})
var l []*cloudinstances.CloudInstanceGroup
for _, v := range groups {
l = append(l, v)
}
columns := []string{"NAME", "STATUS", "NEEDUPDATE", "READY", "MIN", "MAX"}
if !options.CloudOnly {
columns = append(columns, "NODES")
}
err := t.Render(l, out, columns...)
if err != nil {
return err
}
}
needUpdate := false
for _, group := range groups {
if len(group.NeedUpdate) != 0 {
needUpdate = true
}
}
if !needUpdate && !options.Force {
fmt.Printf("\nNo rolling-update required.\n")
return nil
}
if !options.Yes {
fmt.Printf("\nMust specify --yes to rolling-update.\n")
return nil
}
if featureflag.DrainAndValidateRollingUpdate.Enabled() {
klog.V(2).Infof("Rolling update with drain and validate enabled.")
}
d := &instancegroups.RollingUpdateCluster{
MasterInterval: options.MasterInterval,
NodeInterval: options.NodeInterval,
BastionInterval: options.BastionInterval,
Interactive: options.Interactive,
Force: options.Force,
Cloud: cloud,
K8sClient: k8sClient,
ClientGetter: clientGetter,
FailOnDrainError: options.FailOnDrainError,
FailOnValidate: options.FailOnValidate,
CloudOnly: options.CloudOnly,
ClusterName: options.ClusterName,
PostDrainDelay: options.PostDrainDelay,
ValidationTimeout: options.ValidationTimeout,
}
return d.RollingUpdate(groups, cluster, list)
}<|fim▁end|> | for _, i := range r.Ready {
if i.Node != nil {
nodes = append(nodes, i.Node) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Zeobuilder is an extensible GUI-toolkit for molecular model construction.
# Copyright (C) 2007 - 2012 Toon Verstraelen <[email protected]>, Center
# for Molecular Modeling (CMM), Ghent University, Ghent, Belgium; all rights
# reserved unless otherwise stated.
#<|fim▁hole|># as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# In addition to the regulations of the GNU General Public License,
# publications and communications based in parts on this program or on
# parts of this program are required to cite the following article:
#
# "ZEOBUILDER: a GUI toolkit for the construction of complex molecules on the
# nanoscale with building blocks", Toon Verstraelen, Veronique Van Speybroeck
# and Michel Waroquier, Journal of Chemical Information and Modeling, Vol. 48
# (7), 1530-1541, 2008
# DOI:10.1021/ci8000748
#
# Zeobuilder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
#--
import read, edit, faulty, group, composed, optional<|fim▁end|> | # This file is part of Zeobuilder.
#
# Zeobuilder is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License |
<|file_name|>text.py<|end_file_name|><|fim▁begin|>Skip to content
Search or jump to…
Pull requests
Issues<|fim▁hole|>9
3028PacktPublishing/Intelligent-Projects-Using-Python
Code Issues 0 Pull requests 0 Wiki Security Insights
Intelligent-Projects-Using-Python/Chapter02/TransferLearning.py
@santanupattanayak santanupattanayak chapter02 changes
67a9665 on 27 Jan
@santanupattanayak@santanupattanayak1
248 lines (207 sloc) 9.06 KB
Code navigation is available!Beta
Navigate your code with ease. Click on function and method calls to jump to their definitions or references in the same repository. Learn more
Code navigation is still being calculated for this commit. Check back in a bit. Beta
Learn more or give us feedback
_author__ = 'Santanu Pattanayak'
import numpy as np
np.random.seed(1000)
import os
import glob
import cv2
import datetime
import pandas as pd
import time
import warnings
warnings.filterwarnings("ignore")
from sklearn.model_selection import KFold
from sklearn.metrics import cohen_kappa_score
from keras.models import Sequential,Model
from keras.layers.core import Dense, Dropout, Flatten
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.layers import GlobalMaxPooling2D,GlobalAveragePooling2D
from keras.optimizers import SGD
from keras.callbacks import EarlyStopping
from keras.utils import np_utils
from sklearn.metrics import log_loss
import keras
from keras import __version__ as keras_version
from keras.applications.inception_v3 import InceptionV3
from keras.applications.resnet50 import ResNet50
from keras.applications.vgg16 import VGG16
from keras.preprocessing.image import ImageDataGenerator
from keras import optimizers
from keras.callbacks import EarlyStopping, ModelCheckpoint, CSVLogger, Callback
from keras.applications.resnet50 import preprocess_input
import h5py
import argparse
from sklearn.externals import joblib
import json
class TransferLearning:
def __init__(self):
parser = argparse.ArgumentParser(description='Process the inputs')
parser.add_argument('--path',help='image directory')
parser.add_argument('--class_folders',help='class images folder names')
parser.add_argument('--dim',type=int,help='Image dimensions to process')
parser.add_argument('--lr',type=float,help='learning rate',default=1e-4)
parser.add_argument('--batch_size',type=int,help='batch size')
parser.add_argument('--epochs',type=int,help='no of epochs to train')
parser.add_argument('--initial_layers_to_freeze',type=int,help='the initial layers to freeze')
parser.add_argument('--model',help='Standard Model to load',default='InceptionV3')
parser.add_argument('--folds',type=int,help='num of cross validation folds',default=5)
parser.add_argument('--outdir',help='output directory')
args = parser.parse_args()
self.path = args.path
self.class_folders = json.loads(args.class_folders)
self.dim = int(args.dim)
self.lr = float(args.lr)
self.batch_size = int(args.batch_size)
self.epochs = int(args.epochs)
self.initial_layers_to_freeze = int(args.initial_layers_to_freeze)
self.model = args.model
self.folds = int(args.folds)
self.outdir = args.outdir
def get_im_cv2(self,path,dim=224):
img = cv2.imread(path)
resized = cv2.resize(img, (dim,dim), cv2.INTER_LINEAR)
return resized
# Pre Process the Images based on the ImageNet pre-trained model Image transformation
def pre_process(self,img):
img[:,:,0] = img[:,:,0] - 103.939
img[:,:,1] = img[:,:,0] - 116.779
img[:,:,2] = img[:,:,0] - 123.68
return img
# Function to build X, y in numpy format based on the train/validation datasets
def read_data(self,class_folders,path,num_class,dim,train_val='train'):
print(train_val)
train_X,train_y = [],[]
for c in class_folders:
path_class = path + str(train_val) + '/' + str(c)
file_list = os.listdir(path_class)
for f in file_list:
img = self.get_im_cv2(path_class + '/' + f)
img = self.pre_process(img)
train_X.append(img)
label = int(c.split('class')[1])
train_y.append(int(label))
train_y = keras.utils.np_utils.to_categorical(np.array(train_y),num_class)
return np.array(train_X),train_y
def inception_pseudo(self,dim=224,freeze_layers=30,full_freeze='N'):
model = InceptionV3(weights='imagenet',include_top=False)
x = model.output
x = GlobalAveragePooling2D()(x)
x = Dense(512, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(512, activation='relu')(x)
x = Dropout(0.5)(x)
out = Dense(5,activation='softmax')(x)
model_final = Model(input = model.input,outputs=out)
if full_freeze != 'N':
for layer in model.layers[0:freeze_layers]:
layer.trainable = False
return model_final
# ResNet50 Model for transfer Learning
def resnet_pseudo(self,dim=224,freeze_layers=10,full_freeze='N'):
model = ResNet50(weights='imagenet',include_top=False)
x = model.output
x = GlobalAveragePooling2D()(x)
x = Dense(512, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(512, activation='relu')(x)
x = Dropout(0.5)(x)
out = Dense(5,activation='softmax')(x)
model_final = Model(input = model.input,outputs=out)
if full_freeze != 'N':
for layer in model.layers[0:freeze_layers]:
layer.trainable = False
return model_final
# VGG16 Model for transfer Learning
def VGG16_pseudo(self,dim=224,freeze_layers=10,full_freeze='N'):
model = VGG16(weights='imagenet',include_top=False)
x = model.output
x = GlobalAveragePooling2D()(x)
x = Dense(512, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(512, activation='relu')(x)
x = Dropout(0.5)(x)
out = Dense(5,activation='softmax')(x)
model_final = Model(input = model.input,outputs=out)
if full_freeze != 'N':
for layer in model.layers[0:freeze_layers]:
layer.trainable = False
return model_final
def train_model(self,train_X,train_y,n_fold=5,batch_size=16,epochs=40,dim=224,lr=1e-5,model='ResNet50'):
model_save_dest = {}
k = 0
kf = KFold(n_splits=n_fold, random_state=0, shuffle=True)
for train_index, test_index in kf.split(train_X):
k += 1
X_train,X_test = train_X[train_index],train_X[test_index]
y_train, y_test = train_y[train_index],train_y[test_index]
if model == 'Resnet50':
model_final = self.resnet_pseudo(dim=224,freeze_layers=10,full_freeze='N')
if model == 'VGG16':
model_final = self.VGG16_pseudo(dim=224,freeze_layers=10,full_freeze='N')
if model == 'InceptionV3':
model_final = self.inception_pseudo(dim=224,freeze_layers=10,full_freeze='N')
datagen = ImageDataGenerator(
horizontal_flip = True,
vertical_flip = True,
width_shift_range = 0.1,
height_shift_range = 0.1,
channel_shift_range=0,
zoom_range = 0.2,
rotation_range = 20)
adam = optimizers.Adam(lr=lr, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)
model_final.compile(optimizer=adam, loss=["categorical_crossentropy"],metrics=['accuracy'])
reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.50,
patience=3, min_lr=0.000001)
callbacks = [
EarlyStopping(monitor='val_loss', patience=10, mode='min', verbose=1),
CSVLogger('keras-5fold-run-01-v1-epochs_ib.log', separator=',', append=False),reduce_lr,
ModelCheckpoint(
'kera1-5fold-run-01-v1-fold-' + str('%02d' % (k + 1)) + '-run-' + str('%02d' % (1 + 1)) + '.check',
monitor='val_loss', mode='min',
save_best_only=True,
verbose=1)]
model_final.fit_generator(datagen.flow(X_train,y_train, batch_size=batch_size),
steps_per_epoch=X_train.shape[0]/batch_size,epochs=epochs,verbose=1,
validation_data=(X_test,y_test),callbacks=callbacks,
class_weight={0:0.012,1:0.12,2:0.058,3:0.36,4:0.43})
model_name = 'kera1-5fold-run-01-v1-fold-' + str('%02d' % (k + 1)) + '-run-' + str('%02d' % (1 + 1)) + '.check'
del model_final
f = h5py.File(model_name, 'r+')
del f['optimizer_weights']
f.close()
model_final = keras.models.load_model(model_name)
model_name1 = self.outdir + str(model) + '___' + str(k)
model_final.save(model_name1)
model_save_dest[k] = model_name1
return model_save_dest
# Hold out dataset validation function
def inference_validation(self,test_X,test_y,model_save_dest,n_class=5,folds=5):
pred = np.zeros((len(test_X),n_class))
for k in range(1,folds + 1):
model = keras.models.load_model(model_save_dest[k])
pred = pred + model.predict(test_X)
pred = pred/(1.0*folds)
pred_class = np.argmax(pred,axis=1)
act_class = np.argmax(test_y,axis=1)
accuracy = np.sum([pred_class == act_class])*1.0/len(test_X)
kappa = cohen_kappa_score(pred_class,act_class,weights='quadratic')
return pred_class,accuracy,kappa
def main(self):
start_time = time.time()
print('Data Processing..')
self.num_class = len(self.class_folders)
train_X,train_y = self.read_data(self.class_folders,self.path,self.num_class,self.dim,train_val='train')
self.model_save_dest = self.train_model(train_X,train_y,n_fold=self.folds,batch_size=self.batch_size,
epochs=self.epochs,dim=self.dim,lr=self.lr,model=self.model)
print("Model saved to dest:",self.model_save_dest)
test_X,test_y = self.read_data(self.class_folders,self.path,self.num_class,self.dim,train_val='validation')
_,accuracy,kappa = self.inference_validation(test_X,test_y,self.model_save_dest,n_class=self.num_class,folds=self.folds)
joblib.dump(self.model_save_dest,self.outdir + "dict_model.pkl")
print("-----------------------------------------------------")
print("Kappa score:", kappa)
print("accuracy:", accuracy)
print("End of training")
print("-----------------------------------------------------")
print("Processing Time",time.time() - start_time,' secs')
if __name__ == "__main__":
obj = TransferLearning()
obj.main()
© 2019 GitHub, Inc.
Terms
Privacy
Security
Status
Help
Contact GitHub
Pricing
API
Training
Blog
About<|fim▁end|> | Marketplace
Explore
@zhejoe |
<|file_name|>fetch.py<|end_file_name|><|fim▁begin|>import re
from time import sleep
from .settings import settings
def get_parsed_mentions(raw_text):
regex = re.compile(r"@([\w\.]+)")
regex.findall(raw_text)
return regex.findall(raw_text)
def get_parsed_hashtags(raw_text):
regex = re.compile(r"#(\w+)")
regex.findall(raw_text)
return regex.findall(raw_text)
def fetch_mentions(raw_test, dict_obj):
if not settings.fetch_mentions:
return
mentions = get_parsed_mentions(raw_test)
if mentions:
dict_obj["mentions"] = mentions
def fetch_hashtags(raw_test, dict_obj):
if not settings.fetch_hashtags:
return
hashtags = get_parsed_hashtags(raw_test)
if hashtags:
dict_obj["hashtags"] = hashtags
def fetch_datetime(browser, dict_post):
ele_datetime = browser.find_one(".eo2As .c-Yi7 ._1o9PC")
datetime = ele_datetime.get_attribute("datetime")
dict_post["datetime"] = datetime
def fetch_imgs(browser, dict_post):
img_urls = set()
while True:
ele_imgs = browser.find("._97aPb img", waittime=10)
if isinstance(ele_imgs, list):
for ele_img in ele_imgs:
img_urls.add(ele_img.get_attribute("src"))
else:
break
next_photo_btn = browser.find_one("._6CZji .coreSpriteRightChevron")
if next_photo_btn:
next_photo_btn.click()
sleep(0.3)
else:
break
dict_post["img_urls"] = list(img_urls)
def fetch_likes_plays(browser, dict_post):
if not settings.fetch_likes_plays:
return
likes = None
el_likes = browser.find_one(".Nm9Fw > * > span")<|fim▁hole|> dict_post["views"] = int(el_plays.text.replace(",", "").replace(".", ""))
el_see_likes.click()
el_likes = browser.find_one(".vJRqr > span")
likes = el_likes.text
browser.find_one(".QhbhU").click()
elif el_likes is not None:
likes = el_likes.text
dict_post["likes"] = (
int(likes.replace(",", "").replace(".", "")) if likes is not None else 0
)
def fetch_likers(browser, dict_post):
if not settings.fetch_likers:
return
like_info_btn = browser.find_one(".EDfFK ._0mzm-.sqdOP")
like_info_btn.click()
likers = {}
liker_elems_css_selector = ".Igw0E ._7UhW9.xLCgt a"
likers_elems = list(browser.find(liker_elems_css_selector))
last_liker = None
while likers_elems:
for ele in likers_elems:
likers[ele.get_attribute("href")] = ele.get_attribute("title")
if last_liker == likers_elems[-1]:
break
last_liker = likers_elems[-1]
last_liker.location_once_scrolled_into_view
sleep(0.6)
likers_elems = list(browser.find(liker_elems_css_selector))
dict_post["likers"] = list(likers.values())
close_btn = browser.find_one(".WaOAr button")
close_btn.click()
def fetch_caption(browser, dict_post):
ele_comments = browser.find(".eo2As .gElp9")
if len(ele_comments) > 0:
temp_element = browser.find("span",ele_comments[0])
for element in temp_element:
if element.text not in ['Verified',''] and 'caption' not in dict_post:
dict_post["caption"] = element.text
fetch_mentions(dict_post.get("caption",""), dict_post)
fetch_hashtags(dict_post.get("caption",""), dict_post)
def fetch_comments(browser, dict_post):
if not settings.fetch_comments:
return
show_more_selector = "button .glyphsSpriteCircle_add__outline__24__grey_9"
show_more = browser.find_one(show_more_selector)
while show_more:
show_more.location_once_scrolled_into_view
show_more.click()
sleep(0.3)
show_more = browser.find_one(show_more_selector)
show_comment_btns = browser.find(".EizgU")
for show_comment_btn in show_comment_btns:
show_comment_btn.location_once_scrolled_into_view
show_comment_btn.click()
sleep(0.3)
ele_comments = browser.find(".eo2As .gElp9")
comments = []
for els_comment in ele_comments[1:]:
author = browser.find_one(".FPmhX", els_comment).text
temp_element = browser.find("span", els_comment)
for element in temp_element:
if element.text not in ['Verified','']:
comment = element.text
comment_obj = {"author": author, "comment": comment}
fetch_mentions(comment, comment_obj)
fetch_hashtags(comment, comment_obj)
comments.append(comment_obj)
if comments:
dict_post["comments"] = comments
def fetch_initial_comment(browser, dict_post):
comments_elem = browser.find_one("ul.XQXOT")
first_post_elem = browser.find_one(".ZyFrc", comments_elem)
caption = browser.find_one("span", first_post_elem)
if caption:
dict_post["description"] = caption.text
def fetch_details(browser, dict_post):
if not settings.fetch_details:
return
browser.open_new_tab(dict_post["key"])
username = browser.find_one("a.ZIAjV")
location = browser.find_one("a.O4GlU")
if username:
dict_post["username"] = username.text
if location:
dict_post["location"] = location.text
fetch_initial_comment(browser, dict_post)
browser.close_current_tab()<|fim▁end|> | el_see_likes = browser.find_one(".vcOH2")
if el_see_likes is not None:
el_plays = browser.find_one(".vcOH2 > span") |
<|file_name|>nico.js<|end_file_name|><|fim▁begin|>var path = require('path');
var package = require('./package');
var webpack = require('webpack');
var ProgressPlugin = require('webpack/lib/ProgressPlugin');
var inspect = require('util').inspect;
var Busboy = require('busboy');
var chalk = require('chalk');
var webpackMiddleware = require('webpack-dev-middleware');
var webpackConfig = require('./webpack.config');
var webpackCompiler = webpack(webpackConfig);
var handler;
webpackCompiler.apply(new ProgressPlugin(function(percentage, msg) {
var stream = process.stderr;
if (stream.isTTY && percentage < 0.71) {
stream.cursorTo(0);
stream.write('📦 ' + chalk.magenta(msg));
stream.clearLine(1);
} else if (percentage === 1) {
console.log(chalk.green('\nwebpack: bundle build is now finished.'));
}
}));
// {{ settings for nico
exports.site = {
name: package.title,
description: package.description,
repo: package.repository.url,
issues: package.bugs.url
};
// PRODUCTION
if (process.env.NODE_ENV === 'PRODUCTION') {
exports.minimized = '.min';
}
exports.package = package;
exports.theme = 'site';
exports.source = process.cwd();
exports.output = path.join(process.cwd(), '_site');
exports.permalink = '{{directory}}/{{filename}}';
exports.ignorefilter = function(filepath, subdir) {
var extname = path.extname(filepath);
if (extname === '.tmp' || extname === '.bak') {
return false;
}
if (/\.DS_Store/.test(filepath)) {
return false;
}
if (/^(_site|_theme|node_modules|site|\.idea)/.test(subdir)) {
return false;
}
return true;
};
exports.middlewares = [
{
name: 'upload',
filter: /upload\.do?$/,
handle: function(req, res, next) {
if (req.method === 'POST') {
var busboy = new Busboy({headers: req.headers});
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
console.log('File [' + fieldname + ']: filename: ' + filename + ', encoding: ' + encoding + ', mimetype: ' + mimetype);
file.on('data', function(data) {
console.log('File [' + fieldname + '] got ' + data.length + ' bytes');
});
file.on('end', function() {
console.log('File [' + fieldname + '] Finished');
});
});
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) {
console.log('Field [' + fieldname + ']: value: ' + inspect(val));
});
busboy.on('finish', function() {
console.log('Done parsing form!');
//res.writeHead(303, { Connection: 'close', Location: '/' });
res.end(JSON.stringify({
'status': 'success',
'url': '/example.file'
}));
});
req.pipe(busboy);
}
}
},
{
name: 'webpackDevMiddleware',
filter: /\.(js|css)(\.map)?$/,
handle: function(req, res, next) {<|fim▁hole|> watchOptions: {
aggregateTimeout: 300,
poll: true
},
noInfo: true
});
try {
return handler(req, res, next);
} catch(e) {}
}
}];
exports.writers = [
'nico-jsx.PageWriter',
'nico-jsx.StaticWriter',
'nico-jsx.FileWriter'
];
// end settings }}
process.on('uncaughtException', function(err) {
console.log(err);
});<|fim▁end|> | handler = handler || webpackMiddleware(webpackCompiler, {
publicPath: '/dist/',
lazy: false, |
<|file_name|>lint-stability.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:lint_stability.rs
// aux-build:inherited_stability.rs
// aux-build:stability_cfg1.rs
// aux-build:stability_cfg2.rs
// ignore-tidy-linelength
#![deny(deprecated)]
#![allow(dead_code)]
#![feature(staged_api)]
#![staged_api]
#[macro_use]
extern crate lint_stability;
mod cross_crate {
extern crate stability_cfg1;
extern crate stability_cfg2; //~ WARNING: use of unstable library feature
use lint_stability::*;
fn test() {
let foo = MethodTester;
deprecated(); //~ ERROR use of deprecated item
foo.method_deprecated(); //~ ERROR use of deprecated item
foo.trait_deprecated(); //~ ERROR use of deprecated item
deprecated_text(); //~ ERROR use of deprecated item: text
foo.method_deprecated_text(); //~ ERROR use of deprecated item: text
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
deprecated_unstable(); //~ ERROR use of deprecated item<|fim▁hole|> foo.trait_deprecated_unstable(); //~ ERROR use of deprecated item
//~^ WARNING use of unstable library feature
deprecated_unstable_text(); //~ ERROR use of deprecated item: text
//~^ WARNING use of unstable library feature
foo.method_deprecated_unstable_text(); //~ ERROR use of deprecated item: text
//~^ WARNING use of unstable library feature
foo.trait_deprecated_unstable_text(); //~ ERROR use of deprecated item: text
//~^ WARNING use of unstable library feature
unstable(); //~ WARNING use of unstable library feature
foo.method_unstable(); //~ WARNING use of unstable library feature
foo.trait_unstable(); //~ WARNING use of unstable library feature
unstable_text(); //~ WARNING use of unstable library feature 'test_feature': text
foo.method_unstable_text(); //~ WARNING use of unstable library feature 'test_feature': text
foo.trait_unstable_text(); //~ WARNING use of unstable library feature 'test_feature': text
stable();
foo.method_stable();
foo.trait_stable();
stable_text();
foo.method_stable_text();
foo.trait_stable_text();
let _ = DeprecatedStruct { i: 0 }; //~ ERROR use of deprecated item
let _ = DeprecatedUnstableStruct { i: 0 }; //~ ERROR use of deprecated item
//~^ WARNING use of unstable library feature
let _ = UnstableStruct { i: 0 }; //~ WARNING use of unstable library feature
let _ = StableStruct { i: 0 };
let _ = DeprecatedUnitStruct; //~ ERROR use of deprecated item
let _ = DeprecatedUnstableUnitStruct; //~ ERROR use of deprecated item
//~^ WARNING use of unstable library feature
let _ = UnstableUnitStruct; //~ WARNING use of unstable library feature
let _ = StableUnitStruct;
let _ = Enum::DeprecatedVariant; //~ ERROR use of deprecated item
let _ = Enum::DeprecatedUnstableVariant; //~ ERROR use of deprecated item
//~^ WARNING use of unstable library feature
let _ = Enum::UnstableVariant; //~ WARNING use of unstable library feature
let _ = Enum::StableVariant;
let _ = DeprecatedTupleStruct (1); //~ ERROR use of deprecated item
let _ = DeprecatedUnstableTupleStruct (1); //~ ERROR use of deprecated item
//~^ WARNING use of unstable library feature
let _ = UnstableTupleStruct (1); //~ WARNING use of unstable library feature
let _ = StableTupleStruct (1);
// At the moment, the lint checker only checks stability in
// in the arguments of macros.
// Eventually, we will want to lint the contents of the
// macro in the module *defining* it. Also, stability levels
// on macros themselves are not yet linted.
macro_test_arg!(deprecated_text()); //~ ERROR use of deprecated item: text
macro_test_arg!(deprecated_unstable_text()); //~ ERROR use of deprecated item: text
//~^ WARNING use of unstable library feature
macro_test_arg!(macro_test_arg!(deprecated_text())); //~ ERROR use of deprecated item: text
}
fn test_method_param<F: Trait>(foo: F) {
foo.trait_deprecated(); //~ ERROR use of deprecated item
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
foo.trait_deprecated_unstable(); //~ ERROR use of deprecated item
//~^ WARNING use of unstable library feature
foo.trait_deprecated_unstable_text(); //~ ERROR use of deprecated item: text
//~^ WARNING use of unstable library feature
foo.trait_unstable(); //~ WARNING use of unstable library feature
foo.trait_unstable_text(); //~ WARNING use of unstable library feature 'test_feature': text
foo.trait_stable();
}
fn test_method_object(foo: &Trait) {
foo.trait_deprecated(); //~ ERROR use of deprecated item
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
foo.trait_deprecated_unstable(); //~ ERROR use of deprecated item
//~^ WARNING use of unstable library feature
foo.trait_deprecated_unstable_text(); //~ ERROR use of deprecated item: text
//~^ WARNING use of unstable library feature
foo.trait_unstable(); //~ WARNING use of unstable library feature
foo.trait_unstable_text(); //~ WARNING use of unstable library feature 'test_feature': text
foo.trait_stable();
}
struct S;
impl UnstableTrait for S { } //~ WARNING use of unstable library feature
trait LocalTrait : UnstableTrait { } //~ WARNING use of unstable library feature
impl Trait for S {
fn trait_stable(&self) {}
fn trait_unstable(&self) {} //~ WARNING use of unstable library feature
}
}
mod inheritance {
extern crate inherited_stability; //~ WARNING: use of unstable library feature
use self::inherited_stability::*; //~ WARNING: use of unstable library feature
fn test_inheritance() {
unstable(); //~ WARNING use of unstable library feature
stable();
stable_mod::unstable(); //~ WARNING use of unstable library feature
stable_mod::stable();
unstable_mod::deprecated(); //~ ERROR use of deprecated item
unstable_mod::unstable(); //~ WARNING use of unstable library feature
let _ = Unstable::UnstableVariant; //~ WARNING use of unstable library feature
let _ = Unstable::StableVariant;
let x: usize = 0;
x.unstable(); //~ WARNING use of unstable library feature
x.stable();
}
}
mod this_crate {
#[unstable(feature = "test_feature")]
#[deprecated(since = "1.0.0")]
pub fn deprecated() {}
#[unstable(feature = "test_feature")]
#[deprecated(since = "1.0.0", reason = "text")]
pub fn deprecated_text() {}
#[unstable(feature = "test_feature")]
pub fn unstable() {}
#[unstable(feature = "test_feature", reason = "text")]
pub fn unstable_text() {}
#[stable(feature = "rust1", since = "1.0.0")]
pub fn stable() {}
#[stable(feature = "rust1", since = "1.0.0", reason = "text")]
pub fn stable_text() {}
#[stable(feature = "rust1", since = "1.0.0")]
pub struct MethodTester;
impl MethodTester {
#[unstable(feature = "test_feature")]
#[deprecated(since = "1.0.0")]
pub fn method_deprecated(&self) {}
#[unstable(feature = "test_feature")]
#[deprecated(since = "1.0.0", reason = "text")]
pub fn method_deprecated_text(&self) {}
#[unstable(feature = "test_feature")]
pub fn method_unstable(&self) {}
#[unstable(feature = "test_feature", reason = "text")]
pub fn method_unstable_text(&self) {}
#[stable(feature = "rust1", since = "1.0.0")]
pub fn method_stable(&self) {}
#[stable(feature = "rust1", since = "1.0.0", reason = "text")]
pub fn method_stable_text(&self) {}
}
pub trait Trait {
#[unstable(feature = "test_feature")]
#[deprecated(since = "1.0.0")]
fn trait_deprecated(&self) {}
#[unstable(feature = "test_feature")]
#[deprecated(since = "1.0.0", reason = "text")]
fn trait_deprecated_text(&self) {}
#[unstable(feature = "test_feature")]
fn trait_unstable(&self) {}
#[unstable(feature = "test_feature", reason = "text")]
fn trait_unstable_text(&self) {}
#[stable(feature = "rust1", since = "1.0.0")]
fn trait_stable(&self) {}
#[stable(feature = "rust1", since = "1.0.0", reason = "text")]
fn trait_stable_text(&self) {}
}
impl Trait for MethodTester {}
#[unstable(feature = "test_feature")]
#[deprecated(since = "1.0.0")]
pub struct DeprecatedStruct { i: isize }
#[unstable(feature = "test_feature")]
pub struct UnstableStruct { i: isize }
#[stable(feature = "rust1", since = "1.0.0")]
pub struct StableStruct { i: isize }
#[unstable(feature = "test_feature")]
#[deprecated(since = "1.0.0")]
pub struct DeprecatedUnitStruct;
#[unstable(feature = "test_feature")]
pub struct UnstableUnitStruct;
#[stable(feature = "rust1", since = "1.0.0")]
pub struct StableUnitStruct;
pub enum Enum {
#[unstable(feature = "test_feature")]
#[deprecated(since = "1.0.0")]
DeprecatedVariant,
#[unstable(feature = "test_feature")]
UnstableVariant,
#[stable(feature = "rust1", since = "1.0.0")]
StableVariant,
}
#[unstable(feature = "test_feature")]
#[deprecated(since = "1.0.0")]
pub struct DeprecatedTupleStruct(isize);
#[unstable(feature = "test_feature")]
pub struct UnstableTupleStruct(isize);
#[stable(feature = "rust1", since = "1.0.0")]
pub struct StableTupleStruct(isize);
fn test() {
// Only the deprecated cases of the following should generate
// errors, because other stability attributes now have meaning
// only *across* crates, not within a single crate.
let foo = MethodTester;
deprecated(); //~ ERROR use of deprecated item
foo.method_deprecated(); //~ ERROR use of deprecated item
foo.trait_deprecated(); //~ ERROR use of deprecated item
deprecated_text(); //~ ERROR use of deprecated item: text
foo.method_deprecated_text(); //~ ERROR use of deprecated item: text
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
unstable();
foo.method_unstable();
foo.trait_unstable();
unstable_text();
foo.method_unstable_text();
foo.trait_unstable_text();
stable();
foo.method_stable();
foo.trait_stable();
stable_text();
foo.method_stable_text();
foo.trait_stable_text();
let _ = DeprecatedStruct { i: 0 }; //~ ERROR use of deprecated item
let _ = UnstableStruct { i: 0 };
let _ = StableStruct { i: 0 };
let _ = DeprecatedUnitStruct; //~ ERROR use of deprecated item
let _ = UnstableUnitStruct;
let _ = StableUnitStruct;
let _ = Enum::DeprecatedVariant; //~ ERROR use of deprecated item
let _ = Enum::UnstableVariant;
let _ = Enum::StableVariant;
let _ = DeprecatedTupleStruct (1); //~ ERROR use of deprecated item
let _ = UnstableTupleStruct (1);
let _ = StableTupleStruct (1);
}
fn test_method_param<F: Trait>(foo: F) {
foo.trait_deprecated(); //~ ERROR use of deprecated item
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
foo.trait_unstable();
foo.trait_unstable_text();
foo.trait_stable();
}
fn test_method_object(foo: &Trait) {
foo.trait_deprecated(); //~ ERROR use of deprecated item
foo.trait_deprecated_text(); //~ ERROR use of deprecated item: text
foo.trait_unstable();
foo.trait_unstable_text();
foo.trait_stable();
}
#[unstable(feature = "test_feature")]
#[deprecated(since = "1.0.0")]
fn test_fn_body() {
fn fn_in_body() {}
fn_in_body();
}
impl MethodTester {
#[unstable(feature = "test_feature")]
#[deprecated(since = "1.0.0")]
fn test_method_body(&self) {
fn fn_in_body() {}
fn_in_body();
}
}
#[unstable(feature = "test_feature")]
#[deprecated(since = "1.0.0")]
pub trait DeprecatedTrait {
fn dummy(&self) { }
}
struct S;
impl DeprecatedTrait for S { } //~ ERROR use of deprecated item
trait LocalTrait : DeprecatedTrait { } //~ ERROR use of deprecated item
}
fn main() {}<|fim▁end|> | //~^ WARNING use of unstable library feature
foo.method_deprecated_unstable(); //~ ERROR use of deprecated item
//~^ WARNING use of unstable library feature |
<|file_name|>1243.py<|end_file_name|><|fim▁begin|>from typing import List<|fim▁hole|> def transformArray2(self, arr: List[int]) -> List[int]:
while True:
arr2 = [a for a in arr]
changed = 0
for id in range(1, len(arr) - 1):
l = arr[id - 1]
r = arr[id + 1]
m = arr[id]
if l > m and r > m:
m += 1
changed += 1
elif l < m and r < m:
m -= 1
changed += 1
arr2[id] = m
arr = arr2
if changed == 0:
break
return arr
def transformArray(self, A):
for _ in range(100):
A = A[:1] + [b + (a > b < c) - (a < b > c) for a, b, c in zip(A, A[1:], A[2:])] + A[-1:]
return A
if __name__ == '__main__':
assert Solution().transformArray([6, 2, 3, 4]) == [6, 3, 3, 4]
assert Solution().transformArray([1, 6, 3, 4, 3, 5]) == [1, 4, 4, 4, 4, 5]<|fim▁end|> |
class Solution: |
<|file_name|>builtin_compare.go<|end_file_name|><|fim▁begin|>// Copyright 2017 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package expression
import (
"math"
"github.com/pingcap/errors"
"github.com/pingcap/parser/ast"
"github.com/pingcap/parser/mysql"
"github.com/pingcap/parser/opcode"
"github.com/pingcap/parser/terror"
"github.com/pingcap/tidb/sessionctx"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/types/json"
"github.com/pingcap/tidb/util/chunk"
"github.com/pingcap/tipb/go-tipb"
)
var (
_ functionClass = &coalesceFunctionClass{}
_ functionClass = &greatestFunctionClass{}
_ functionClass = &leastFunctionClass{}
_ functionClass = &intervalFunctionClass{}
_ functionClass = &compareFunctionClass{}
)
var (
_ builtinFunc = &builtinCoalesceIntSig{}
_ builtinFunc = &builtinCoalesceRealSig{}
_ builtinFunc = &builtinCoalesceDecimalSig{}
_ builtinFunc = &builtinCoalesceStringSig{}
_ builtinFunc = &builtinCoalesceTimeSig{}
_ builtinFunc = &builtinCoalesceDurationSig{}
_ builtinFunc = &builtinGreatestIntSig{}
_ builtinFunc = &builtinGreatestRealSig{}
_ builtinFunc = &builtinGreatestDecimalSig{}
_ builtinFunc = &builtinGreatestStringSig{}
_ builtinFunc = &builtinGreatestTimeSig{}
_ builtinFunc = &builtinLeastIntSig{}
_ builtinFunc = &builtinLeastRealSig{}
_ builtinFunc = &builtinLeastDecimalSig{}
_ builtinFunc = &builtinLeastStringSig{}
_ builtinFunc = &builtinLeastTimeSig{}
_ builtinFunc = &builtinIntervalIntSig{}
_ builtinFunc = &builtinIntervalRealSig{}
_ builtinFunc = &builtinLTIntSig{}
_ builtinFunc = &builtinLTRealSig{}
_ builtinFunc = &builtinLTDecimalSig{}
_ builtinFunc = &builtinLTStringSig{}
_ builtinFunc = &builtinLTDurationSig{}
_ builtinFunc = &builtinLTTimeSig{}
_ builtinFunc = &builtinLEIntSig{}
_ builtinFunc = &builtinLERealSig{}
_ builtinFunc = &builtinLEDecimalSig{}
_ builtinFunc = &builtinLEStringSig{}
_ builtinFunc = &builtinLEDurationSig{}
_ builtinFunc = &builtinLETimeSig{}
_ builtinFunc = &builtinGTIntSig{}
_ builtinFunc = &builtinGTRealSig{}
_ builtinFunc = &builtinGTDecimalSig{}
_ builtinFunc = &builtinGTStringSig{}
_ builtinFunc = &builtinGTTimeSig{}
_ builtinFunc = &builtinGTDurationSig{}
_ builtinFunc = &builtinGEIntSig{}
_ builtinFunc = &builtinGERealSig{}
_ builtinFunc = &builtinGEDecimalSig{}
_ builtinFunc = &builtinGEStringSig{}
_ builtinFunc = &builtinGETimeSig{}
_ builtinFunc = &builtinGEDurationSig{}
_ builtinFunc = &builtinNEIntSig{}
_ builtinFunc = &builtinNERealSig{}
_ builtinFunc = &builtinNEDecimalSig{}
_ builtinFunc = &builtinNEStringSig{}
_ builtinFunc = &builtinNETimeSig{}
_ builtinFunc = &builtinNEDurationSig{}
_ builtinFunc = &builtinNullEQIntSig{}
_ builtinFunc = &builtinNullEQRealSig{}
_ builtinFunc = &builtinNullEQDecimalSig{}
_ builtinFunc = &builtinNullEQStringSig{}
_ builtinFunc = &builtinNullEQTimeSig{}
_ builtinFunc = &builtinNullEQDurationSig{}
)
// coalesceFunctionClass returns the first non-NULL value in the list,
// or NULL if there are no non-NULL values.
type coalesceFunctionClass struct {
baseFunctionClass
}
func (c *coalesceFunctionClass) getFunction(ctx sessionctx.Context, args []Expression) (sig builtinFunc, err error) {
if err = c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
fieldTps := make([]*types.FieldType, 0, len(args))
for _, arg := range args {
fieldTps = append(fieldTps, arg.GetType())
}
// Use the aggregated field type as retType.
resultFieldType := types.AggFieldType(fieldTps)
resultEvalType := types.AggregateEvalType(fieldTps, &resultFieldType.Flag)
retEvalTp := resultFieldType.EvalType()
fieldEvalTps := make([]types.EvalType, 0, len(args))
for range args {
fieldEvalTps = append(fieldEvalTps, retEvalTp)
}
bf := newBaseBuiltinFuncWithTp(ctx, args, retEvalTp, fieldEvalTps...)
bf.tp.Flag |= resultFieldType.Flag
resultFieldType.Flen, resultFieldType.Decimal = 0, types.UnspecifiedLength
// Set retType to BINARY(0) if all arguments are of type NULL.
if resultFieldType.Tp == mysql.TypeNull {
types.SetBinChsClnFlag(bf.tp)
} else {
maxIntLen := 0
maxFlen := 0
// Find the max length of field in `maxFlen`,
// and max integer-part length in `maxIntLen`.
for _, argTp := range fieldTps {
if argTp.Decimal > resultFieldType.Decimal {
resultFieldType.Decimal = argTp.Decimal
}
argIntLen := argTp.Flen
if argTp.Decimal > 0 {
argIntLen -= argTp.Decimal + 1
}
// Reduce the sign bit if it is a signed integer/decimal
if !mysql.HasUnsignedFlag(argTp.Flag) {
argIntLen--
}
if argIntLen > maxIntLen {
maxIntLen = argIntLen
}
if argTp.Flen > maxFlen || argTp.Flen == types.UnspecifiedLength {
maxFlen = argTp.Flen
}
}
// For integer, field length = maxIntLen + (1/0 for sign bit)
// For decimal, field length = maxIntLen + maxDecimal + (1/0 for sign bit)
if resultEvalType == types.ETInt || resultEvalType == types.ETDecimal {
resultFieldType.Flen = maxIntLen + resultFieldType.Decimal
if resultFieldType.Decimal > 0 {
resultFieldType.Flen++
}
if !mysql.HasUnsignedFlag(resultFieldType.Flag) {
resultFieldType.Flen++
}
bf.tp = resultFieldType
} else {
bf.tp.Flen = maxFlen
}
// Set the field length to maxFlen for other types.
if bf.tp.Flen > mysql.MaxDecimalWidth {
bf.tp.Flen = mysql.MaxDecimalWidth
}
}
switch retEvalTp {
case types.ETInt:
sig = &builtinCoalesceIntSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_CoalesceInt)
case types.ETReal:
sig = &builtinCoalesceRealSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_CoalesceReal)
case types.ETDecimal:
sig = &builtinCoalesceDecimalSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_CoalesceDecimal)
case types.ETString:
sig = &builtinCoalesceStringSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_CoalesceString)
case types.ETDatetime, types.ETTimestamp:
sig = &builtinCoalesceTimeSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_CoalesceTime)
case types.ETDuration:
bf.tp.Decimal, err = getExpressionFsp(ctx, args[0])
if err != nil {
return nil, errors.Trace(err)
}
sig = &builtinCoalesceDurationSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_CoalesceDuration)
}
return sig, nil
}
// builtinCoalesceIntSig is buitin function coalesce signature which return type int
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#function_coalesce
type builtinCoalesceIntSig struct {
baseBuiltinFunc
}
func (b *builtinCoalesceIntSig) Clone() builtinFunc {
newSig := &builtinCoalesceIntSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinCoalesceIntSig) evalInt(row chunk.Row) (res int64, isNull bool, err error) {
for _, a := range b.getArgs() {
res, isNull, err = a.EvalInt(b.ctx, row)
if err != nil || !isNull {
break
}
}
return res, isNull, errors.Trace(err)
}
// builtinCoalesceRealSig is buitin function coalesce signature which return type real
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#function_coalesce
type builtinCoalesceRealSig struct {
baseBuiltinFunc
}
func (b *builtinCoalesceRealSig) Clone() builtinFunc {
newSig := &builtinCoalesceRealSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinCoalesceRealSig) evalReal(row chunk.Row) (res float64, isNull bool, err error) {
for _, a := range b.getArgs() {
res, isNull, err = a.EvalReal(b.ctx, row)
if err != nil || !isNull {
break
}
}
return res, isNull, errors.Trace(err)
}
// builtinCoalesceDecimalSig is buitin function coalesce signature which return type Decimal
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#function_coalesce
type builtinCoalesceDecimalSig struct {
baseBuiltinFunc
}
func (b *builtinCoalesceDecimalSig) Clone() builtinFunc {
newSig := &builtinCoalesceDecimalSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinCoalesceDecimalSig) evalDecimal(row chunk.Row) (res *types.MyDecimal, isNull bool, err error) {
for _, a := range b.getArgs() {
res, isNull, err = a.EvalDecimal(b.ctx, row)
if err != nil || !isNull {
break
}
}
return res, isNull, errors.Trace(err)
}
// builtinCoalesceStringSig is buitin function coalesce signature which return type string
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#function_coalesce
type builtinCoalesceStringSig struct {
baseBuiltinFunc
}
func (b *builtinCoalesceStringSig) Clone() builtinFunc {
newSig := &builtinCoalesceStringSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinCoalesceStringSig) evalString(row chunk.Row) (res string, isNull bool, err error) {
for _, a := range b.getArgs() {
res, isNull, err = a.EvalString(b.ctx, row)
if err != nil || !isNull {
break
}
}
return res, isNull, errors.Trace(err)
}
// builtinCoalesceTimeSig is buitin function coalesce signature which return type time
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#function_coalesce
type builtinCoalesceTimeSig struct {
baseBuiltinFunc
}
func (b *builtinCoalesceTimeSig) Clone() builtinFunc {
newSig := &builtinCoalesceTimeSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinCoalesceTimeSig) evalTime(row chunk.Row) (res types.Time, isNull bool, err error) {
for _, a := range b.getArgs() {
res, isNull, err = a.EvalTime(b.ctx, row)
if err != nil || !isNull {
break
}
}
return res, isNull, errors.Trace(err)
}
// builtinCoalesceDurationSig is buitin function coalesce signature which return type duration
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#function_coalesce
type builtinCoalesceDurationSig struct {
baseBuiltinFunc
}
func (b *builtinCoalesceDurationSig) Clone() builtinFunc {
newSig := &builtinCoalesceDurationSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinCoalesceDurationSig) evalDuration(row chunk.Row) (res types.Duration, isNull bool, err error) {
for _, a := range b.getArgs() {
res, isNull, err = a.EvalDuration(b.ctx, row)
if err != nil || !isNull {
break
}
}
return res, isNull, errors.Trace(err)
}
// temporalWithDateAsNumEvalType makes DATE, DATETIME, TIMESTAMP pretend to be numbers rather than strings.
func temporalWithDateAsNumEvalType(argTp *types.FieldType) (argEvalType types.EvalType, isStr bool, isTemporalWithDate bool) {
argEvalType = argTp.EvalType()
isStr, isTemporalWithDate = argEvalType.IsStringKind(), types.IsTemporalWithDate(argTp.Tp)
if !isTemporalWithDate {
return
}
if argTp.Decimal > 0 {
argEvalType = types.ETDecimal
} else {
argEvalType = types.ETInt
}
return
}
// getCmpTp4MinMax gets compare type for GREATEST and LEAST.
func getCmpTp4MinMax(args []Expression) (argTp types.EvalType) {
datetimeFound, isAllStr := false, true
cmpEvalType, isStr, isTemporalWithDate := temporalWithDateAsNumEvalType(args[0].GetType())
if !isStr {
isAllStr = false
}
if isTemporalWithDate {
datetimeFound = true
}
lft := args[0].GetType()
for i := range args {
rft := args[i].GetType()
var tp types.EvalType
tp, isStr, isTemporalWithDate = temporalWithDateAsNumEvalType(rft)
if isTemporalWithDate {
datetimeFound = true
}
if !isStr {
isAllStr = false
}
cmpEvalType = getBaseCmpType(cmpEvalType, tp, lft, rft)
lft = rft
}
argTp = cmpEvalType
if cmpEvalType.IsStringKind() {
argTp = types.ETString
}
if isAllStr && datetimeFound {
argTp = types.ETDatetime
}
return argTp
}
type greatestFunctionClass struct {
baseFunctionClass
}
func (c *greatestFunctionClass) getFunction(ctx sessionctx.Context, args []Expression) (sig builtinFunc, err error) {
if err = c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
tp, cmpAsDatetime := getCmpTp4MinMax(args), false
if tp == types.ETDatetime {
cmpAsDatetime = true
tp = types.ETString
}
argTps := make([]types.EvalType, len(args))
for i := range args {
argTps[i] = tp
}
bf := newBaseBuiltinFuncWithTp(ctx, args, tp, argTps...)
if cmpAsDatetime {
tp = types.ETDatetime
}
switch tp {
case types.ETInt:
sig = &builtinGreatestIntSig{bf}
case types.ETReal:
sig = &builtinGreatestRealSig{bf}
case types.ETDecimal:
sig = &builtinGreatestDecimalSig{bf}
case types.ETString:
sig = &builtinGreatestStringSig{bf}
case types.ETDatetime:
sig = &builtinGreatestTimeSig{bf}
}
return sig, nil
}
type builtinGreatestIntSig struct {
baseBuiltinFunc
}
func (b *builtinGreatestIntSig) Clone() builtinFunc {
newSig := &builtinGreatestIntSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
// evalInt evals a builtinGreatestIntSig.
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#function_greatest
func (b *builtinGreatestIntSig) evalInt(row chunk.Row) (max int64, isNull bool, err error) {
max, isNull, err = b.args[0].EvalInt(b.ctx, row)
if isNull || err != nil {
return max, isNull, errors.Trace(err)
}
for i := 1; i < len(b.args); i++ {
var v int64
v, isNull, err = b.args[i].EvalInt(b.ctx, row)
if isNull || err != nil {
return max, isNull, errors.Trace(err)
}
if v > max {
max = v
}
}
return
}
type builtinGreatestRealSig struct {
baseBuiltinFunc
}
func (b *builtinGreatestRealSig) Clone() builtinFunc {
newSig := &builtinGreatestRealSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
// evalReal evals a builtinGreatestRealSig.
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#function_greatest
func (b *builtinGreatestRealSig) evalReal(row chunk.Row) (max float64, isNull bool, err error) {
max, isNull, err = b.args[0].EvalReal(b.ctx, row)
if isNull || err != nil {
return max, isNull, errors.Trace(err)
}
for i := 1; i < len(b.args); i++ {
var v float64
v, isNull, err = b.args[i].EvalReal(b.ctx, row)
if isNull || err != nil {
return max, isNull, errors.Trace(err)
}
if v > max {
max = v
}
}
return
}
type builtinGreatestDecimalSig struct {
baseBuiltinFunc
}
func (b *builtinGreatestDecimalSig) Clone() builtinFunc {
newSig := &builtinGreatestDecimalSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
// evalDecimal evals a builtinGreatestDecimalSig.
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#function_greatest
func (b *builtinGreatestDecimalSig) evalDecimal(row chunk.Row) (max *types.MyDecimal, isNull bool, err error) {
max, isNull, err = b.args[0].EvalDecimal(b.ctx, row)
if isNull || err != nil {
return max, isNull, errors.Trace(err)
}
for i := 1; i < len(b.args); i++ {
var v *types.MyDecimal
v, isNull, err = b.args[i].EvalDecimal(b.ctx, row)
if isNull || err != nil {
return max, isNull, errors.Trace(err)
}
if v.Compare(max) > 0 {
max = v
}
}
return
}
type builtinGreatestStringSig struct {
baseBuiltinFunc
}
func (b *builtinGreatestStringSig) Clone() builtinFunc {
newSig := &builtinGreatestStringSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
// evalString evals a builtinGreatestStringSig.
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#function_greatest
func (b *builtinGreatestStringSig) evalString(row chunk.Row) (max string, isNull bool, err error) {
max, isNull, err = b.args[0].EvalString(b.ctx, row)
if isNull || err != nil {
return max, isNull, errors.Trace(err)
}
for i := 1; i < len(b.args); i++ {
var v string
v, isNull, err = b.args[i].EvalString(b.ctx, row)
if isNull || err != nil {
return max, isNull, errors.Trace(err)
}
if types.CompareString(v, max) > 0 {
max = v
}
}
return
}
type builtinGreatestTimeSig struct {
baseBuiltinFunc
}
func (b *builtinGreatestTimeSig) Clone() builtinFunc {
newSig := &builtinGreatestTimeSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
// evalString evals a builtinGreatestTimeSig.
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#function_greatest
func (b *builtinGreatestTimeSig) evalString(row chunk.Row) (_ string, isNull bool, err error) {
var (
v string
t types.Time
)
max := types.ZeroDatetime
sc := b.ctx.GetSessionVars().StmtCtx
for i := 0; i < len(b.args); i++ {
v, isNull, err = b.args[i].EvalString(b.ctx, row)
if isNull || err != nil {
return "", true, errors.Trace(err)
}
t, err = types.ParseDatetime(sc, v)
if err != nil {
if err = handleInvalidTimeError(b.ctx, err); err != nil {
return v, true, errors.Trace(err)
}
continue
}
if t.Compare(max) > 0 {
max = t
}
}
return max.String(), false, nil
}
type leastFunctionClass struct {
baseFunctionClass
}
func (c *leastFunctionClass) getFunction(ctx sessionctx.Context, args []Expression) (sig builtinFunc, err error) {
if err = c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
tp, cmpAsDatetime := getCmpTp4MinMax(args), false
if tp == types.ETDatetime {
cmpAsDatetime = true
tp = types.ETString
}
argTps := make([]types.EvalType, len(args))
for i := range args {
argTps[i] = tp
}
bf := newBaseBuiltinFuncWithTp(ctx, args, tp, argTps...)
if cmpAsDatetime {
tp = types.ETDatetime
}
switch tp {
case types.ETInt:
sig = &builtinLeastIntSig{bf}
case types.ETReal:
sig = &builtinLeastRealSig{bf}
case types.ETDecimal:
sig = &builtinLeastDecimalSig{bf}
case types.ETString:
sig = &builtinLeastStringSig{bf}
case types.ETDatetime:
sig = &builtinLeastTimeSig{bf}
}
return sig, nil
}
type builtinLeastIntSig struct {
baseBuiltinFunc
}
func (b *builtinLeastIntSig) Clone() builtinFunc {
newSig := &builtinLeastIntSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
// evalInt evals a builtinLeastIntSig.
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#functionleast
func (b *builtinLeastIntSig) evalInt(row chunk.Row) (min int64, isNull bool, err error) {
min, isNull, err = b.args[0].EvalInt(b.ctx, row)
if isNull || err != nil {
return min, isNull, errors.Trace(err)
}
for i := 1; i < len(b.args); i++ {
var v int64
v, isNull, err = b.args[i].EvalInt(b.ctx, row)
if isNull || err != nil {
return min, isNull, errors.Trace(err)
}
if v < min {
min = v
}
}
return
}
type builtinLeastRealSig struct {
baseBuiltinFunc
}
func (b *builtinLeastRealSig) Clone() builtinFunc {
newSig := &builtinLeastRealSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
// evalReal evals a builtinLeastRealSig.
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#functionleast
func (b *builtinLeastRealSig) evalReal(row chunk.Row) (min float64, isNull bool, err error) {
min, isNull, err = b.args[0].EvalReal(b.ctx, row)
if isNull || err != nil {
return min, isNull, errors.Trace(err)
}
for i := 1; i < len(b.args); i++ {
var v float64
v, isNull, err = b.args[i].EvalReal(b.ctx, row)
if isNull || err != nil {
return min, isNull, errors.Trace(err)
}
if v < min {
min = v
}
}
return
}
type builtinLeastDecimalSig struct {
baseBuiltinFunc
}
func (b *builtinLeastDecimalSig) Clone() builtinFunc {
newSig := &builtinLeastDecimalSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
// evalDecimal evals a builtinLeastDecimalSig.
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#functionleast
func (b *builtinLeastDecimalSig) evalDecimal(row chunk.Row) (min *types.MyDecimal, isNull bool, err error) {
min, isNull, err = b.args[0].EvalDecimal(b.ctx, row)
if isNull || err != nil {
return min, isNull, errors.Trace(err)
}
for i := 1; i < len(b.args); i++ {
var v *types.MyDecimal
v, isNull, err = b.args[i].EvalDecimal(b.ctx, row)
if isNull || err != nil {
return min, isNull, errors.Trace(err)
}
if v.Compare(min) < 0 {
min = v
}
}
return
}
type builtinLeastStringSig struct {
baseBuiltinFunc
}
func (b *builtinLeastStringSig) Clone() builtinFunc {
newSig := &builtinLeastStringSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
// evalString evals a builtinLeastStringSig.
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#functionleast
func (b *builtinLeastStringSig) evalString(row chunk.Row) (min string, isNull bool, err error) {
min, isNull, err = b.args[0].EvalString(b.ctx, row)
if isNull || err != nil {
return min, isNull, errors.Trace(err)
}
for i := 1; i < len(b.args); i++ {
var v string
v, isNull, err = b.args[i].EvalString(b.ctx, row)
if isNull || err != nil {
return min, isNull, errors.Trace(err)
}
if types.CompareString(v, min) < 0 {
min = v
}
}
return
}
type builtinLeastTimeSig struct {
baseBuiltinFunc
}
func (b *builtinLeastTimeSig) Clone() builtinFunc {
newSig := &builtinLeastTimeSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
// evalString evals a builtinLeastTimeSig.
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#functionleast
func (b *builtinLeastTimeSig) evalString(row chunk.Row) (res string, isNull bool, err error) {
var (
v string
t types.Time
)
min := types.Time{
Time: types.MaxDatetime,
Type: mysql.TypeDatetime,
Fsp: types.MaxFsp,
}
findInvalidTime := false
sc := b.ctx.GetSessionVars().StmtCtx
for i := 0; i < len(b.args); i++ {
v, isNull, err = b.args[i].EvalString(b.ctx, row)
if isNull || err != nil {
return "", true, errors.Trace(err)
}
t, err = types.ParseDatetime(sc, v)
if err != nil {
if err = handleInvalidTimeError(b.ctx, err); err != nil {
return v, true, errors.Trace(err)
} else if !findInvalidTime {
res = v
findInvalidTime = true
}
}
if t.Compare(min) < 0 {
min = t
}
}
if !findInvalidTime {
res = min.String()
}
return res, false, nil
}
type intervalFunctionClass struct {
baseFunctionClass
}
func (c *intervalFunctionClass) getFunction(ctx sessionctx.Context, args []Expression) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
allInt := true
for i := range args {
if args[i].GetType().EvalType() != types.ETInt {
allInt = false
}
}
argTps, argTp := make([]types.EvalType, 0, len(args)), types.ETReal
if allInt {
argTp = types.ETInt
}
for range args {
argTps = append(argTps, argTp)
}
bf := newBaseBuiltinFuncWithTp(ctx, args, types.ETInt, argTps...)
var sig builtinFunc
if allInt {
sig = &builtinIntervalIntSig{bf}
} else {
sig = &builtinIntervalRealSig{bf}
}
return sig, nil
}
<|fim▁hole|>}
func (b *builtinIntervalIntSig) Clone() builtinFunc {
newSig := &builtinIntervalIntSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
// evalInt evals a builtinIntervalIntSig.
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#function_interval
func (b *builtinIntervalIntSig) evalInt(row chunk.Row) (int64, bool, error) {
args0, isNull, err := b.args[0].EvalInt(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
if isNull {
return -1, false, nil
}
idx, err := b.binSearch(args0, mysql.HasUnsignedFlag(b.args[0].GetType().Flag), b.args[1:], row)
return int64(idx), err != nil, errors.Trace(err)
}
// binSearch is a binary search method.
// All arguments are treated as integers.
// It is required that arg[0] < args[1] < args[2] < ... < args[n] for this function to work correctly.
// This is because a binary search is used (very fast).
func (b *builtinIntervalIntSig) binSearch(target int64, isUint1 bool, args []Expression, row chunk.Row) (_ int, err error) {
i, j, cmp := 0, len(args), false
for i < j {
mid := i + (j-i)/2
v, isNull, err1 := args[mid].EvalInt(b.ctx, row)
if err1 != nil {
err = err1
break
}
if isNull {
v = target
}
isUint2 := mysql.HasUnsignedFlag(args[mid].GetType().Flag)
switch {
case !isUint1 && !isUint2:
cmp = target < v
case isUint1 && isUint2:
cmp = uint64(target) < uint64(v)
case !isUint1 && isUint2:
cmp = target < 0 || uint64(target) < uint64(v)
case isUint1 && !isUint2:
cmp = v > 0 && uint64(target) < uint64(v)
}
if !cmp {
i = mid + 1
} else {
j = mid
}
}
return i, errors.Trace(err)
}
type builtinIntervalRealSig struct {
baseBuiltinFunc
}
func (b *builtinIntervalRealSig) Clone() builtinFunc {
newSig := &builtinIntervalRealSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
// evalInt evals a builtinIntervalRealSig.
// See http://dev.mysql.com/doc/refman/5.7/en/comparison-operators.html#function_interval
func (b *builtinIntervalRealSig) evalInt(row chunk.Row) (int64, bool, error) {
args0, isNull, err := b.args[0].EvalReal(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
if isNull {
return -1, false, nil
}
idx, err := b.binSearch(args0, b.args[1:], row)
return int64(idx), err != nil, errors.Trace(err)
}
func (b *builtinIntervalRealSig) binSearch(target float64, args []Expression, row chunk.Row) (_ int, err error) {
i, j := 0, len(args)
for i < j {
mid := i + (j-i)/2
v, isNull, err1 := args[mid].EvalReal(b.ctx, row)
if err != nil {
err = err1
break
}
if isNull {
i = mid + 1
} else if cmp := target < v; !cmp {
i = mid + 1
} else {
j = mid
}
}
return i, errors.Trace(err)
}
type compareFunctionClass struct {
baseFunctionClass
op opcode.Op
}
// getBaseCmpType gets the EvalType that the two args will be treated as when comparing.
func getBaseCmpType(lhs, rhs types.EvalType, lft, rft *types.FieldType) types.EvalType {
if lft.Tp == mysql.TypeUnspecified || rft.Tp == mysql.TypeUnspecified {
if lft.Tp == rft.Tp {
return types.ETString
}
if lft.Tp == mysql.TypeUnspecified {
lhs = rhs
} else {
rhs = lhs
}
}
if lhs.IsStringKind() && rhs.IsStringKind() {
return types.ETString
} else if (lhs == types.ETInt || lft.Hybrid()) && (rhs == types.ETInt || rft.Hybrid()) {
return types.ETInt
} else if ((lhs == types.ETInt || lft.Hybrid()) || lhs == types.ETDecimal) &&
((rhs == types.ETInt || rft.Hybrid()) || rhs == types.ETDecimal) {
return types.ETDecimal
}
return types.ETReal
}
// GetAccurateCmpType uses a more complex logic to decide the EvalType of the two args when compare with each other than
// getBaseCmpType does.
func GetAccurateCmpType(lhs, rhs Expression) types.EvalType {
lhsFieldType, rhsFieldType := lhs.GetType(), rhs.GetType()
lhsEvalType, rhsEvalType := lhsFieldType.EvalType(), rhsFieldType.EvalType()
cmpType := getBaseCmpType(lhsEvalType, rhsEvalType, lhsFieldType, rhsFieldType)
if (lhsEvalType.IsStringKind() && rhsFieldType.Tp == mysql.TypeJSON) ||
(lhsFieldType.Tp == mysql.TypeJSON && rhsEvalType.IsStringKind()) {
cmpType = types.ETJson
} else if cmpType == types.ETString && (types.IsTypeTime(lhsFieldType.Tp) || types.IsTypeTime(rhsFieldType.Tp)) {
// date[time] <cmp> date[time]
// string <cmp> date[time]
// compare as time
if lhsFieldType.Tp == rhsFieldType.Tp {
cmpType = lhsFieldType.EvalType()
} else {
cmpType = types.ETDatetime
}
} else if lhsFieldType.Tp == mysql.TypeDuration && rhsFieldType.Tp == mysql.TypeDuration {
// duration <cmp> duration
// compare as duration
cmpType = types.ETDuration
} else if cmpType == types.ETReal || cmpType == types.ETString {
_, isLHSConst := lhs.(*Constant)
_, isRHSConst := rhs.(*Constant)
if (lhsEvalType == types.ETDecimal && !isLHSConst && rhsEvalType.IsStringKind() && isRHSConst) ||
(rhsEvalType == types.ETDecimal && !isRHSConst && lhsEvalType.IsStringKind() && isLHSConst) {
/*
<non-const decimal expression> <cmp> <const string expression>
or
<const string expression> <cmp> <non-const decimal expression>
Do comparison as decimal rather than float, in order not to lose precision.
)*/
cmpType = types.ETDecimal
} else if isTemporalColumn(lhs) && isRHSConst ||
isTemporalColumn(rhs) && isLHSConst {
/*
<temporal column> <cmp> <non-temporal constant>
or
<non-temporal constant> <cmp> <temporal column>
Convert the constant to temporal type.
*/
col, isLHSColumn := lhs.(*Column)
if !isLHSColumn {
col = rhs.(*Column)
}
if col.GetType().Tp == mysql.TypeDuration {
cmpType = types.ETDuration
} else {
cmpType = types.ETDatetime
}
}
}
return cmpType
}
// isTemporalColumn checks if a expression is a temporal column,
// temporal column indicates time column or duration column.
func isTemporalColumn(expr Expression) bool {
ft := expr.GetType()
if _, isCol := expr.(*Column); !isCol {
return false
}
if !types.IsTypeTime(ft.Tp) && ft.Tp != mysql.TypeDuration {
return false
}
return true
}
// tryToConvertConstantInt tries to convert a constant with other type to a int constant.
func tryToConvertConstantInt(ctx sessionctx.Context, isUnsigned bool, con *Constant) (_ *Constant, isAlwaysFalse bool) {
if con.GetType().EvalType() == types.ETInt {
return con, false
}
dt, err := con.Eval(chunk.Row{})
if err != nil {
return con, false
}
sc := ctx.GetSessionVars().StmtCtx
fieldType := types.NewFieldType(mysql.TypeLonglong)
if isUnsigned {
fieldType.Flag |= mysql.UnsignedFlag
}
dt, err = dt.ConvertTo(sc, fieldType)
if err != nil {
return con, terror.ErrorEqual(err, types.ErrOverflow)
}
return &Constant{
Value: dt,
RetType: fieldType,
DeferredExpr: con.DeferredExpr,
}, false
}
// RefineComparedConstant changes an non-integer constant argument to its ceiling or floor result by the given op.
// isAlwaysFalse indicates whether the int column "con" is false.
func RefineComparedConstant(ctx sessionctx.Context, isUnsigned bool, con *Constant, op opcode.Op) (_ *Constant, isAlwaysFalse bool) {
dt, err := con.Eval(chunk.Row{})
if err != nil {
return con, false
}
sc := ctx.GetSessionVars().StmtCtx
intFieldType := types.NewFieldType(mysql.TypeLonglong)
if isUnsigned {
intFieldType.Flag |= mysql.UnsignedFlag
}
var intDatum types.Datum
intDatum, err = dt.ConvertTo(sc, intFieldType)
if err != nil {
return con, terror.ErrorEqual(err, types.ErrOverflow)
}
c, err := intDatum.CompareDatum(sc, &con.Value)
if err != nil {
return con, false
}
if c == 0 {
return &Constant{
Value: intDatum,
RetType: intFieldType,
DeferredExpr: con.DeferredExpr,
}, false
}
switch op {
case opcode.LT, opcode.GE:
resultExpr := NewFunctionInternal(ctx, ast.Ceil, types.NewFieldType(mysql.TypeUnspecified), con)
if resultCon, ok := resultExpr.(*Constant); ok {
return tryToConvertConstantInt(ctx, isUnsigned, resultCon)
}
case opcode.LE, opcode.GT:
resultExpr := NewFunctionInternal(ctx, ast.Floor, types.NewFieldType(mysql.TypeUnspecified), con)
if resultCon, ok := resultExpr.(*Constant); ok {
return tryToConvertConstantInt(ctx, isUnsigned, resultCon)
}
case opcode.NullEQ, opcode.EQ:
switch con.RetType.EvalType() {
// An integer value equal or NULL-safe equal to a float value which contains
// non-zero decimal digits is definitely false.
// e.g.,
// 1. "integer = 1.1" is definitely false.
// 2. "integer <=> 1.1" is definitely false.
case types.ETReal, types.ETDecimal:
return con, true
case types.ETString:
// We try to convert the string constant to double.
// If the double result equals the int result, we can return the int result;
// otherwise, the compare function will be false.
var doubleDatum types.Datum
doubleDatum, err = dt.ConvertTo(sc, types.NewFieldType(mysql.TypeDouble))
if err != nil {
return con, false
}
if c, err = doubleDatum.CompareDatum(sc, &intDatum); err != nil {
return con, false
}
if c != 0 {
return con, true
}
return &Constant{
Value: intDatum,
RetType: intFieldType,
DeferredExpr: con.DeferredExpr,
}, false
}
}
return con, false
}
// refineArgs will rewrite the arguments if the compare expression is `int column <cmp> non-int constant` or
// `non-int constant <cmp> int column`. E.g., `a < 1.1` will be rewritten to `a < 2`.
func (c *compareFunctionClass) refineArgs(ctx sessionctx.Context, args []Expression) []Expression {
arg0Type, arg1Type := args[0].GetType(), args[1].GetType()
arg0IsInt := arg0Type.EvalType() == types.ETInt
arg1IsInt := arg1Type.EvalType() == types.ETInt
arg0, arg0IsCon := args[0].(*Constant)
arg1, arg1IsCon := args[1].(*Constant)
isAlways, finalArg0, finalArg1 := false, args[0], args[1]
// int non-constant [cmp] non-int constant
if arg0IsInt && !arg0IsCon && !arg1IsInt && arg1IsCon {
finalArg1, isAlways = RefineComparedConstant(ctx, mysql.HasUnsignedFlag(arg0Type.Flag), arg1, c.op)
}
// non-int constant [cmp] int non-constant
if arg1IsInt && !arg1IsCon && !arg0IsInt && arg0IsCon {
finalArg0, isAlways = RefineComparedConstant(ctx, mysql.HasUnsignedFlag(arg1Type.Flag), arg0, symmetricOp[c.op])
}
if !isAlways {
return []Expression{finalArg0, finalArg1}
}
switch c.op {
case opcode.LT, opcode.LE:
// This will always be true.
return []Expression{Zero.Clone(), One.Clone()}
case opcode.EQ, opcode.NullEQ, opcode.GT, opcode.GE:
// This will always be false.
return []Expression{One.Clone(), Zero.Clone()}
}
return args
}
// getFunction sets compare built-in function signatures for various types.
func (c *compareFunctionClass) getFunction(ctx sessionctx.Context, rawArgs []Expression) (sig builtinFunc, err error) {
if err = c.verifyArgs(rawArgs); err != nil {
return nil, errors.Trace(err)
}
args := c.refineArgs(ctx, rawArgs)
cmpType := GetAccurateCmpType(args[0], args[1])
sig, err = c.generateCmpSigs(ctx, args, cmpType)
return sig, errors.Trace(err)
}
// generateCmpSigs generates compare function signatures.
func (c *compareFunctionClass) generateCmpSigs(ctx sessionctx.Context, args []Expression, tp types.EvalType) (sig builtinFunc, err error) {
bf := newBaseBuiltinFuncWithTp(ctx, args, types.ETInt, tp, tp)
if tp == types.ETJson {
// In compare, if we cast string to JSON, we shouldn't parse it.
for i := range args {
args[i].GetType().Flag &= ^mysql.ParseToJSONFlag
}
}
bf.tp.Flen = 1
switch tp {
case types.ETInt:
switch c.op {
case opcode.LT:
sig = &builtinLTIntSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LTInt)
case opcode.LE:
sig = &builtinLEIntSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LEInt)
case opcode.GT:
sig = &builtinGTIntSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GTInt)
case opcode.EQ:
sig = &builtinEQIntSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_EQInt)
case opcode.GE:
sig = &builtinGEIntSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GEInt)
case opcode.NE:
sig = &builtinNEIntSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NEInt)
case opcode.NullEQ:
sig = &builtinNullEQIntSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NullEQInt)
}
case types.ETReal:
switch c.op {
case opcode.LT:
sig = &builtinLTRealSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LTReal)
case opcode.LE:
sig = &builtinLERealSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LEReal)
case opcode.GT:
sig = &builtinGTRealSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GTReal)
case opcode.GE:
sig = &builtinGERealSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GEReal)
case opcode.EQ:
sig = &builtinEQRealSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_EQReal)
case opcode.NE:
sig = &builtinNERealSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NEReal)
case opcode.NullEQ:
sig = &builtinNullEQRealSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NullEQReal)
}
case types.ETDecimal:
switch c.op {
case opcode.LT:
sig = &builtinLTDecimalSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LTDecimal)
case opcode.LE:
sig = &builtinLEDecimalSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LEDecimal)
case opcode.GT:
sig = &builtinGTDecimalSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GTDecimal)
case opcode.GE:
sig = &builtinGEDecimalSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GEDecimal)
case opcode.EQ:
sig = &builtinEQDecimalSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_EQDecimal)
case opcode.NE:
sig = &builtinNEDecimalSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NEDecimal)
case opcode.NullEQ:
sig = &builtinNullEQDecimalSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NullEQDecimal)
}
case types.ETString:
switch c.op {
case opcode.LT:
sig = &builtinLTStringSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LTString)
case opcode.LE:
sig = &builtinLEStringSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LEString)
case opcode.GT:
sig = &builtinGTStringSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GTString)
case opcode.GE:
sig = &builtinGEStringSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GEString)
case opcode.EQ:
sig = &builtinEQStringSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_EQString)
case opcode.NE:
sig = &builtinNEStringSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NEString)
case opcode.NullEQ:
sig = &builtinNullEQStringSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NullEQString)
}
case types.ETDuration:
switch c.op {
case opcode.LT:
sig = &builtinLTDurationSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LTDuration)
case opcode.LE:
sig = &builtinLEDurationSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LEDuration)
case opcode.GT:
sig = &builtinGTDurationSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GTDuration)
case opcode.GE:
sig = &builtinGEDurationSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GEDuration)
case opcode.EQ:
sig = &builtinEQDurationSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_EQDuration)
case opcode.NE:
sig = &builtinNEDurationSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NEDuration)
case opcode.NullEQ:
sig = &builtinNullEQDurationSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NullEQDuration)
}
case types.ETDatetime, types.ETTimestamp:
switch c.op {
case opcode.LT:
sig = &builtinLTTimeSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LTTime)
case opcode.LE:
sig = &builtinLETimeSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LETime)
case opcode.GT:
sig = &builtinGTTimeSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GTTime)
case opcode.GE:
sig = &builtinGETimeSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GETime)
case opcode.EQ:
sig = &builtinEQTimeSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_EQTime)
case opcode.NE:
sig = &builtinNETimeSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NETime)
case opcode.NullEQ:
sig = &builtinNullEQTimeSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NullEQTime)
}
case types.ETJson:
switch c.op {
case opcode.LT:
sig = &builtinLTJSONSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LTJson)
case opcode.LE:
sig = &builtinLEJSONSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_LEJson)
case opcode.GT:
sig = &builtinGTJSONSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GTJson)
case opcode.GE:
sig = &builtinGEJSONSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_GEJson)
case opcode.EQ:
sig = &builtinEQJSONSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_EQJson)
case opcode.NE:
sig = &builtinNEJSONSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NEJson)
case opcode.NullEQ:
sig = &builtinNullEQJSONSig{bf}
sig.setPbCode(tipb.ScalarFuncSig_NullEQJson)
}
}
return
}
type builtinLTIntSig struct {
baseBuiltinFunc
}
func (b *builtinLTIntSig) Clone() builtinFunc {
newSig := &builtinLTIntSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLTIntSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLT(compareInt(b.ctx, b.args, row))
}
type builtinLTRealSig struct {
baseBuiltinFunc
}
func (b *builtinLTRealSig) Clone() builtinFunc {
newSig := &builtinLTRealSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLTRealSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLT(compareReal(b.ctx, b.args, row))
}
type builtinLTDecimalSig struct {
baseBuiltinFunc
}
func (b *builtinLTDecimalSig) Clone() builtinFunc {
newSig := &builtinLTDecimalSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLTDecimalSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLT(compareDecimal(b.ctx, b.args, row))
}
type builtinLTStringSig struct {
baseBuiltinFunc
}
func (b *builtinLTStringSig) Clone() builtinFunc {
newSig := &builtinLTStringSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLTStringSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLT(compareString(b.args, row, b.ctx))
}
type builtinLTDurationSig struct {
baseBuiltinFunc
}
func (b *builtinLTDurationSig) Clone() builtinFunc {
newSig := &builtinLTDurationSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLTDurationSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLT(compareDuration(b.args, row, b.ctx))
}
type builtinLTTimeSig struct {
baseBuiltinFunc
}
func (b *builtinLTTimeSig) Clone() builtinFunc {
newSig := &builtinLTTimeSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLTTimeSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLT(compareTime(b.ctx, b.args, row))
}
type builtinLTJSONSig struct {
baseBuiltinFunc
}
func (b *builtinLTJSONSig) Clone() builtinFunc {
newSig := &builtinLTJSONSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLTJSONSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLT(compareJSON(b.ctx, b.args, row))
}
type builtinLEIntSig struct {
baseBuiltinFunc
}
func (b *builtinLEIntSig) Clone() builtinFunc {
newSig := &builtinLEIntSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLEIntSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLE(compareInt(b.ctx, b.args, row))
}
type builtinLERealSig struct {
baseBuiltinFunc
}
func (b *builtinLERealSig) Clone() builtinFunc {
newSig := &builtinLERealSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLERealSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLE(compareReal(b.ctx, b.args, row))
}
type builtinLEDecimalSig struct {
baseBuiltinFunc
}
func (b *builtinLEDecimalSig) Clone() builtinFunc {
newSig := &builtinLEDecimalSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLEDecimalSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLE(compareDecimal(b.ctx, b.args, row))
}
type builtinLEStringSig struct {
baseBuiltinFunc
}
func (b *builtinLEStringSig) Clone() builtinFunc {
newSig := &builtinLEStringSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLEStringSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLE(compareString(b.args, row, b.ctx))
}
type builtinLEDurationSig struct {
baseBuiltinFunc
}
func (b *builtinLEDurationSig) Clone() builtinFunc {
newSig := &builtinLEDurationSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLEDurationSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLE(compareDuration(b.args, row, b.ctx))
}
type builtinLETimeSig struct {
baseBuiltinFunc
}
func (b *builtinLETimeSig) Clone() builtinFunc {
newSig := &builtinLETimeSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLETimeSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLE(compareTime(b.ctx, b.args, row))
}
type builtinLEJSONSig struct {
baseBuiltinFunc
}
func (b *builtinLEJSONSig) Clone() builtinFunc {
newSig := &builtinLEJSONSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinLEJSONSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfLE(compareJSON(b.ctx, b.args, row))
}
type builtinGTIntSig struct {
baseBuiltinFunc
}
func (b *builtinGTIntSig) Clone() builtinFunc {
newSig := &builtinGTIntSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGTIntSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGT(compareInt(b.ctx, b.args, row))
}
type builtinGTRealSig struct {
baseBuiltinFunc
}
func (b *builtinGTRealSig) Clone() builtinFunc {
newSig := &builtinGTRealSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGTRealSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGT(compareReal(b.ctx, b.args, row))
}
type builtinGTDecimalSig struct {
baseBuiltinFunc
}
func (b *builtinGTDecimalSig) Clone() builtinFunc {
newSig := &builtinGTDecimalSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGTDecimalSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGT(compareDecimal(b.ctx, b.args, row))
}
type builtinGTStringSig struct {
baseBuiltinFunc
}
func (b *builtinGTStringSig) Clone() builtinFunc {
newSig := &builtinGTStringSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGTStringSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGT(compareString(b.args, row, b.ctx))
}
type builtinGTDurationSig struct {
baseBuiltinFunc
}
func (b *builtinGTDurationSig) Clone() builtinFunc {
newSig := &builtinGTDurationSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGTDurationSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGT(compareDuration(b.args, row, b.ctx))
}
type builtinGTTimeSig struct {
baseBuiltinFunc
}
func (b *builtinGTTimeSig) Clone() builtinFunc {
newSig := &builtinGTTimeSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGTTimeSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGT(compareTime(b.ctx, b.args, row))
}
type builtinGTJSONSig struct {
baseBuiltinFunc
}
func (b *builtinGTJSONSig) Clone() builtinFunc {
newSig := &builtinGTJSONSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGTJSONSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGT(compareJSON(b.ctx, b.args, row))
}
type builtinGEIntSig struct {
baseBuiltinFunc
}
func (b *builtinGEIntSig) Clone() builtinFunc {
newSig := &builtinGEIntSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGEIntSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGE(compareInt(b.ctx, b.args, row))
}
type builtinGERealSig struct {
baseBuiltinFunc
}
func (b *builtinGERealSig) Clone() builtinFunc {
newSig := &builtinGERealSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGERealSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGE(compareReal(b.ctx, b.args, row))
}
type builtinGEDecimalSig struct {
baseBuiltinFunc
}
func (b *builtinGEDecimalSig) Clone() builtinFunc {
newSig := &builtinGEDecimalSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGEDecimalSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGE(compareDecimal(b.ctx, b.args, row))
}
type builtinGEStringSig struct {
baseBuiltinFunc
}
func (b *builtinGEStringSig) Clone() builtinFunc {
newSig := &builtinGEStringSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGEStringSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGE(compareString(b.args, row, b.ctx))
}
type builtinGEDurationSig struct {
baseBuiltinFunc
}
func (b *builtinGEDurationSig) Clone() builtinFunc {
newSig := &builtinGEDurationSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGEDurationSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGE(compareDuration(b.args, row, b.ctx))
}
type builtinGETimeSig struct {
baseBuiltinFunc
}
func (b *builtinGETimeSig) Clone() builtinFunc {
newSig := &builtinGETimeSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGETimeSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGE(compareTime(b.ctx, b.args, row))
}
type builtinGEJSONSig struct {
baseBuiltinFunc
}
func (b *builtinGEJSONSig) Clone() builtinFunc {
newSig := &builtinGEJSONSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinGEJSONSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfGE(compareJSON(b.ctx, b.args, row))
}
type builtinEQIntSig struct {
baseBuiltinFunc
}
func (b *builtinEQIntSig) Clone() builtinFunc {
newSig := &builtinEQIntSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinEQIntSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfEQ(compareInt(b.ctx, b.args, row))
}
type builtinEQRealSig struct {
baseBuiltinFunc
}
func (b *builtinEQRealSig) Clone() builtinFunc {
newSig := &builtinEQRealSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinEQRealSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfEQ(compareReal(b.ctx, b.args, row))
}
type builtinEQDecimalSig struct {
baseBuiltinFunc
}
func (b *builtinEQDecimalSig) Clone() builtinFunc {
newSig := &builtinEQDecimalSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinEQDecimalSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfEQ(compareDecimal(b.ctx, b.args, row))
}
type builtinEQStringSig struct {
baseBuiltinFunc
}
func (b *builtinEQStringSig) Clone() builtinFunc {
newSig := &builtinEQStringSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinEQStringSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfEQ(compareString(b.args, row, b.ctx))
}
type builtinEQDurationSig struct {
baseBuiltinFunc
}
func (b *builtinEQDurationSig) Clone() builtinFunc {
newSig := &builtinEQDurationSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinEQDurationSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfEQ(compareDuration(b.args, row, b.ctx))
}
type builtinEQTimeSig struct {
baseBuiltinFunc
}
func (b *builtinEQTimeSig) Clone() builtinFunc {
newSig := &builtinEQTimeSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinEQTimeSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfEQ(compareTime(b.ctx, b.args, row))
}
type builtinEQJSONSig struct {
baseBuiltinFunc
}
func (b *builtinEQJSONSig) Clone() builtinFunc {
newSig := &builtinEQJSONSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinEQJSONSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfEQ(compareJSON(b.ctx, b.args, row))
}
type builtinNEIntSig struct {
baseBuiltinFunc
}
func (b *builtinNEIntSig) Clone() builtinFunc {
newSig := &builtinNEIntSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNEIntSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfNE(compareInt(b.ctx, b.args, row))
}
type builtinNERealSig struct {
baseBuiltinFunc
}
func (b *builtinNERealSig) Clone() builtinFunc {
newSig := &builtinNERealSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNERealSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfNE(compareReal(b.ctx, b.args, row))
}
type builtinNEDecimalSig struct {
baseBuiltinFunc
}
func (b *builtinNEDecimalSig) Clone() builtinFunc {
newSig := &builtinNEDecimalSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNEDecimalSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfNE(compareDecimal(b.ctx, b.args, row))
}
type builtinNEStringSig struct {
baseBuiltinFunc
}
func (b *builtinNEStringSig) Clone() builtinFunc {
newSig := &builtinNEStringSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNEStringSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfNE(compareString(b.args, row, b.ctx))
}
type builtinNEDurationSig struct {
baseBuiltinFunc
}
func (b *builtinNEDurationSig) Clone() builtinFunc {
newSig := &builtinNEDurationSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNEDurationSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfNE(compareDuration(b.args, row, b.ctx))
}
type builtinNETimeSig struct {
baseBuiltinFunc
}
func (b *builtinNETimeSig) Clone() builtinFunc {
newSig := &builtinNETimeSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNETimeSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfNE(compareTime(b.ctx, b.args, row))
}
type builtinNEJSONSig struct {
baseBuiltinFunc
}
func (b *builtinNEJSONSig) Clone() builtinFunc {
newSig := &builtinNEJSONSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNEJSONSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
return resOfNE(compareJSON(b.ctx, b.args, row))
}
type builtinNullEQIntSig struct {
baseBuiltinFunc
}
func (b *builtinNullEQIntSig) Clone() builtinFunc {
newSig := &builtinNullEQIntSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNullEQIntSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
arg0, isNull0, err := b.args[0].EvalInt(b.ctx, row)
if err != nil {
return 0, isNull0, errors.Trace(err)
}
arg1, isNull1, err := b.args[1].EvalInt(b.ctx, row)
if err != nil {
return 0, isNull1, errors.Trace(err)
}
isUnsigned0, isUnsigned1 := mysql.HasUnsignedFlag(b.args[0].GetType().Flag), mysql.HasUnsignedFlag(b.args[1].GetType().Flag)
var res int64
switch {
case isNull0 && isNull1:
res = 1
case isNull0 != isNull1:
break
case isUnsigned0 && isUnsigned1 && types.CompareUint64(uint64(arg0), uint64(arg1)) == 0:
res = 1
case !isUnsigned0 && !isUnsigned1 && types.CompareInt64(arg0, arg1) == 0:
res = 1
case isUnsigned0 && !isUnsigned1:
if arg1 < 0 || arg0 > math.MaxInt64 {
break
}
if types.CompareInt64(arg0, arg1) == 0 {
res = 1
}
case !isUnsigned0 && isUnsigned1:
if arg0 < 0 || arg1 > math.MaxInt64 {
break
}
if types.CompareInt64(arg0, arg1) == 0 {
res = 1
}
}
return res, false, nil
}
type builtinNullEQRealSig struct {
baseBuiltinFunc
}
func (b *builtinNullEQRealSig) Clone() builtinFunc {
newSig := &builtinNullEQRealSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNullEQRealSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
arg0, isNull0, err := b.args[0].EvalReal(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
arg1, isNull1, err := b.args[1].EvalReal(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
var res int64
switch {
case isNull0 && isNull1:
res = 1
case isNull0 != isNull1:
break
case types.CompareFloat64(arg0, arg1) == 0:
res = 1
}
return res, false, nil
}
type builtinNullEQDecimalSig struct {
baseBuiltinFunc
}
func (b *builtinNullEQDecimalSig) Clone() builtinFunc {
newSig := &builtinNullEQDecimalSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNullEQDecimalSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
arg0, isNull0, err := b.args[0].EvalDecimal(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
arg1, isNull1, err := b.args[1].EvalDecimal(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
var res int64
switch {
case isNull0 && isNull1:
res = 1
case isNull0 != isNull1:
break
case arg0.Compare(arg1) == 0:
res = 1
}
return res, false, nil
}
type builtinNullEQStringSig struct {
baseBuiltinFunc
}
func (b *builtinNullEQStringSig) Clone() builtinFunc {
newSig := &builtinNullEQStringSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNullEQStringSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
arg0, isNull0, err := b.args[0].EvalString(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
arg1, isNull1, err := b.args[1].EvalString(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
var res int64
switch {
case isNull0 && isNull1:
res = 1
case isNull0 != isNull1:
break
case types.CompareString(arg0, arg1) == 0:
res = 1
}
return res, false, nil
}
type builtinNullEQDurationSig struct {
baseBuiltinFunc
}
func (b *builtinNullEQDurationSig) Clone() builtinFunc {
newSig := &builtinNullEQDurationSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNullEQDurationSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
arg0, isNull0, err := b.args[0].EvalDuration(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
arg1, isNull1, err := b.args[1].EvalDuration(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
var res int64
switch {
case isNull0 && isNull1:
res = 1
case isNull0 != isNull1:
break
case arg0.Compare(arg1) == 0:
res = 1
}
return res, false, nil
}
type builtinNullEQTimeSig struct {
baseBuiltinFunc
}
func (b *builtinNullEQTimeSig) Clone() builtinFunc {
newSig := &builtinNullEQTimeSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNullEQTimeSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
arg0, isNull0, err := b.args[0].EvalTime(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
arg1, isNull1, err := b.args[1].EvalTime(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
var res int64
switch {
case isNull0 && isNull1:
res = 1
case isNull0 != isNull1:
break
case arg0.Compare(arg1) == 0:
res = 1
}
return res, false, nil
}
type builtinNullEQJSONSig struct {
baseBuiltinFunc
}
func (b *builtinNullEQJSONSig) Clone() builtinFunc {
newSig := &builtinNullEQJSONSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}
func (b *builtinNullEQJSONSig) evalInt(row chunk.Row) (val int64, isNull bool, err error) {
arg0, isNull0, err := b.args[0].EvalJSON(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
arg1, isNull1, err := b.args[1].EvalJSON(b.ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
var res int64
switch {
case isNull0 && isNull1:
res = 1
case isNull0 != isNull1:
break
default:
cmpRes := json.CompareBinary(arg0, arg1)
if cmpRes == 0 {
res = 1
}
}
return res, false, nil
}
func resOfLT(val int64, isNull bool, err error) (int64, bool, error) {
if isNull || err != nil {
return 0, isNull, errors.Trace(err)
}
if val < 0 {
val = 1
} else {
val = 0
}
return val, false, nil
}
func resOfLE(val int64, isNull bool, err error) (int64, bool, error) {
if isNull || err != nil {
return 0, isNull, errors.Trace(err)
}
if val <= 0 {
val = 1
} else {
val = 0
}
return val, false, nil
}
func resOfGT(val int64, isNull bool, err error) (int64, bool, error) {
if isNull || err != nil {
return 0, isNull, errors.Trace(err)
}
if val > 0 {
val = 1
} else {
val = 0
}
return val, false, nil
}
func resOfGE(val int64, isNull bool, err error) (int64, bool, error) {
if isNull || err != nil {
return 0, isNull, errors.Trace(err)
}
if val >= 0 {
val = 1
} else {
val = 0
}
return val, false, nil
}
func resOfEQ(val int64, isNull bool, err error) (int64, bool, error) {
if isNull || err != nil {
return 0, isNull, errors.Trace(err)
}
if val == 0 {
val = 1
} else {
val = 0
}
return val, false, nil
}
func resOfNE(val int64, isNull bool, err error) (int64, bool, error) {
if isNull || err != nil {
return 0, isNull, errors.Trace(err)
}
if val != 0 {
val = 1
} else {
val = 0
}
return val, false, nil
}
func compareInt(ctx sessionctx.Context, args []Expression, row chunk.Row) (val int64, isNull bool, err error) {
arg0, isNull0, err := args[0].EvalInt(ctx, row)
if isNull0 || err != nil {
return 0, isNull0, errors.Trace(err)
}
arg1, isNull1, err := args[1].EvalInt(ctx, row)
if isNull1 || err != nil {
return 0, isNull1, errors.Trace(err)
}
isUnsigned0, isUnsigned1 := mysql.HasUnsignedFlag(args[0].GetType().Flag), mysql.HasUnsignedFlag(args[1].GetType().Flag)
var res int
switch {
case isUnsigned0 && isUnsigned1:
res = types.CompareUint64(uint64(arg0), uint64(arg1))
case isUnsigned0 && !isUnsigned1:
if arg1 < 0 || uint64(arg0) > math.MaxInt64 {
res = 1
} else {
res = types.CompareInt64(arg0, arg1)
}
case !isUnsigned0 && isUnsigned1:
if arg0 < 0 || uint64(arg1) > math.MaxInt64 {
res = -1
} else {
res = types.CompareInt64(arg0, arg1)
}
case !isUnsigned0 && !isUnsigned1:
res = types.CompareInt64(arg0, arg1)
}
return int64(res), false, nil
}
func compareString(args []Expression, row chunk.Row, ctx sessionctx.Context) (val int64, isNull bool, err error) {
arg0, isNull0, err := args[0].EvalString(ctx, row)
if isNull0 || err != nil {
return 0, isNull0, errors.Trace(err)
}
arg1, isNull1, err := args[1].EvalString(ctx, row)
if isNull1 || err != nil {
return 0, isNull1, errors.Trace(err)
}
return int64(types.CompareString(arg0, arg1)), false, nil
}
func compareReal(ctx sessionctx.Context, args []Expression, row chunk.Row) (val int64, isNull bool, err error) {
arg0, isNull0, err := args[0].EvalReal(ctx, row)
if isNull0 || err != nil {
return 0, isNull0, errors.Trace(err)
}
arg1, isNull1, err := args[1].EvalReal(ctx, row)
if isNull1 || err != nil {
return 0, isNull1, errors.Trace(err)
}
return int64(types.CompareFloat64(arg0, arg1)), false, nil
}
func compareDecimal(ctx sessionctx.Context, args []Expression, row chunk.Row) (val int64, isNull bool, err error) {
arg0, isNull0, err := args[0].EvalDecimal(ctx, row)
if isNull0 || err != nil {
return 0, isNull0, errors.Trace(err)
}
arg1, isNull1, err := args[1].EvalDecimal(ctx, row)
if err != nil {
return 0, true, errors.Trace(err)
}
if isNull1 || err != nil {
return 0, isNull1, errors.Trace(err)
}
return int64(arg0.Compare(arg1)), false, nil
}
func compareTime(ctx sessionctx.Context, args []Expression, row chunk.Row) (int64, bool, error) {
arg0, isNull0, err := args[0].EvalTime(ctx, row)
if isNull0 || err != nil {
return 0, isNull0, errors.Trace(err)
}
arg1, isNull1, err := args[1].EvalTime(ctx, row)
if isNull1 || err != nil {
return 0, isNull1, errors.Trace(err)
}
return int64(arg0.Compare(arg1)), false, nil
}
func compareDuration(args []Expression, row chunk.Row, ctx sessionctx.Context) (int64, bool, error) {
arg0, isNull0, err := args[0].EvalDuration(ctx, row)
if isNull0 || err != nil {
return 0, isNull0, errors.Trace(err)
}
arg1, isNull1, err := args[1].EvalDuration(ctx, row)
if isNull1 || err != nil {
return 0, isNull1, errors.Trace(err)
}
return int64(arg0.Compare(arg1)), false, nil
}
func compareJSON(ctx sessionctx.Context, args []Expression, row chunk.Row) (int64, bool, error) {
arg0, isNull0, err := args[0].EvalJSON(ctx, row)
if isNull0 || err != nil {
return 0, isNull0, errors.Trace(err)
}
arg1, isNull1, err := args[1].EvalJSON(ctx, row)
if isNull1 || err != nil {
return 0, isNull1, errors.Trace(err)
}
return int64(json.CompareBinary(arg0, arg1)), false, nil
}<|fim▁end|> | type builtinIntervalIntSig struct {
baseBuiltinFunc |
<|file_name|>digraph.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use super::{HashSet, HashMap};
use super::{Node, NodeIndex, NodeIndexSet, NodeIndices, Graph, GraphError, GraphResult};
/// Directed graph. Allows loops.
pub struct Digraph<T> {
nodes: HashMap<NodeIndex, Node<T>>,
in_adj: HashMap<NodeIndex, NodeIndexSet>,
out_adj: HashMap<NodeIndex, NodeIndexSet>,
num_nodes: uint,
}
impl<T: fmt::Show> fmt::Show for Digraph<T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::FormatError> {
write!(fmt, "{{");
for n in self.nodes.values() {
write!(fmt, " {} ", n.data);
}
write!(fmt, "}} ");
for (&i, adj) in self.out_adj.iter() {
for &j in adj.iter() {
write!(fmt, "({}, {})", self.get_node(i).data,
self.get_node(j).data);
}
}
Ok(())
}
}
impl<T> Digraph<T> {
pub fn new() -> Digraph<T> {
Digraph { nodes: HashMap::new(),
in_adj: HashMap::new(),
out_adj: HashMap::new(),
num_nodes: 0 }
}
pub fn out_degree(&mut self, ind: NodeIndex) -> GraphResult<uint> {
if !self.nodes.contains_key(&ind) {
Err(GraphError::invalid_index(ind))
} else {
Ok(self.get_out_adj(ind).len())
}
}
pub fn in_degree(&mut self, ind: NodeIndex) -> GraphResult<uint> {
if ind >= self.num_nodes {
Err(GraphError::invalid_index(ind))
} else {
Ok(self.get_in_adj(ind).len())
}
}
fn in_adj(&self, i: NodeIndex) -> NodeIndices {
FromIterator::from_iter(self.get_in_adj(i).iter().map(|&x| x))
}
pub fn get_node<'a>(&'a self, i: NodeIndex) -> &'a Node<T> {
self.nodes.find(&i).unwrap()
}
pub fn get_in_adj<'a>(&'a self, i: NodeIndex) -> &'a NodeIndexSet {
self.in_adj.find(&i).unwrap()
}
pub fn get_out_adj<'a>(&'a self, i: NodeIndex) -> &'a NodeIndexSet {
self.out_adj.find(&i).unwrap()
}
// return true if j is an out-neighbor of i, and false otherwise
pub fn is_out_adj_to(&self, i: NodeIndex, j: NodeIndex) -> bool {
self.get_out_adj(i).contains(&j)
}
}<|fim▁hole|>
impl<T: Clone> Digraph<T> {
/// Returns a new graph induced by a set of node indices
pub fn induced_subgraph(&self, nodes: &NodeIndexSet) -> Digraph<T> {
let mut new = Digraph::new();
let mut ind_map = HashMap::new(); // maps old indices to new
for &ind in nodes.iter() {
let new_ind = new.add_node(self.get_node(ind).data.clone());
ind_map.insert(ind, new_ind);
}
for i in nodes.iter() {
let actual_i = *ind_map.find(i).unwrap();
for j in self.get_in_adj(*i).iter() {
if nodes.contains(j) {
new.add_edge(*ind_map.find(j).unwrap(), actual_i);
}
}
for j in self.get_out_adj(*i).iter() {
if nodes.contains(j) {
new.add_edge(actual_i, *ind_map.find(j).unwrap());
}
}
}
new
}
// Returns the transpose of the graph
pub fn transpose(&self) -> Digraph<T> {
let mut new = Digraph::new();
let mut ind_map = HashMap::new(); // maps old indices to new
for n in self.nodes.values() {
let new_ind = new.add_node(n.data.clone());
ind_map.insert(n.index, new_ind);
}
for &i in self.nodes.keys() {
let actual_i = *ind_map.find(&i).unwrap();
for j in self.get_in_adj(i).iter() {
new.add_edge(actual_i, *ind_map.find(j).unwrap());
}
for j in self.get_out_adj(i).iter() {
new.add_edge(*ind_map.find(j).unwrap(), actual_i);
}
}
new
}
}
impl<T> Graph<T> for Digraph<T> {
fn add_node(&mut self, val: T) -> NodeIndex {
let ind = self.num_nodes;
self.nodes.insert(ind, Node { data: val, index: ind });
self.in_adj.insert(ind, HashSet::new());
self.out_adj.insert(ind, HashSet::new());
self.num_nodes += 1;
ind
}
fn add_edge(&mut self, i: NodeIndex, j: NodeIndex) -> GraphResult<bool> {
if !self.nodes.contains_key(&i) {
Err(GraphError::invalid_index(i))
} else if !self.nodes.contains_key(&j) {
Err(GraphError::invalid_index(j))
} else {
if self.get_out_adj(i).contains(&j) {
Ok(false)
} else {
self.out_adj.find_mut(&i).unwrap().insert(j);
self.in_adj.find_mut(&j).unwrap().insert(i);
Ok(true)
}
}
}
fn remove_node(&mut self, i: NodeIndex) -> GraphResult<()> {
if !self.nodes.contains_key(&i) {
Err(GraphError::invalid_index(i))
} else {
self.nodes.remove(&i);
for j in self.reachable(i) {
self.in_adj.find_mut(&j).unwrap().remove(&i);
}
self.out_adj.remove(&i);
for j in self.in_adj(i) {
self.out_adj.find_mut(&j).unwrap().remove(&i);
}
self.in_adj.remove(&i);
self.num_nodes -= 1;
Ok(())
}
}
fn remove_edge(&mut self, i: NodeIndex, j: NodeIndex) -> GraphResult<bool> {
if !self.nodes.contains_key(&i) {
Err(GraphError::invalid_index(i))
} else if !self.nodes.contains_key(&j) {
Err(GraphError::invalid_index(j))
} else {
Ok(self.is_out_adj_to(i, j))
}
}
fn reachable(&self, i: NodeIndex) -> NodeIndices {
FromIterator::from_iter(self.get_out_adj(i).iter().map(|&x| x))
}
fn num_nodes(&self) -> uint {
self.num_nodes
}
fn node_indices(&self) -> NodeIndices {
FromIterator::from_iter(self.nodes.keys().map(|&x| x))
}
}<|fim▁end|> | |
<|file_name|>video.rs<|end_file_name|><|fim▁begin|>// This file is part of zinc64.
// Copyright (c) 2016-2019 Sebastian Jastrzebski. All rights reserved.
// Licensed under the GPLv3. See LICENSE file in the project root for full license text.
#![cfg_attr(feature = "cargo-clippy", allow(clippy::transmute_ptr_to_ptr))]
use std::rc::Rc;
use std::result::Result;
use cgmath;
use cgmath::num_traits::zero;
use cgmath::{vec2, Vector2};
use zinc64_core::{Shared, VideoOutput};
use crate::app::AppState;
use crate::framework::Context;
use crate::gfx::{gl, sprite, Color, Rect, RectI};
pub struct VideoBuffer {
dim: (usize, usize),
palette: [u32; 16],
pixels: Vec<u32>,
}
impl VideoBuffer {
pub fn new(width: u32, height: u32, palette: [u32; 16]) -> VideoBuffer {
VideoBuffer {
dim: (width as usize, height as usize),
palette,
pixels: vec![0; (width * height) as usize],
}
}
pub fn get_pixel_data(&self) -> &[u8] {
unsafe {
let len = self.pixels.len() * core::mem::size_of::<u32>();
core::slice::from_raw_parts(self.pixels.as_ptr() as *const u8, len)<|fim▁hole|> }
}
}
impl VideoOutput for VideoBuffer {
fn get_dimension(&self) -> (usize, usize) {
self.dim
}
fn reset(&mut self) {
for pixel in self.pixels.iter_mut() {
*pixel = 0x00;
}
}
fn write(&mut self, index: usize, color: u8) {
self.pixels[index] = self.palette[color as usize];
}
}
pub struct VideoRenderer {
// Dependencies
video_buffer: Shared<VideoBuffer>,
// Resources
batch: sprite::Batch,
texture: Rc<gl::Texture>,
}
impl VideoRenderer {
pub fn build(ctx: &mut Context, state: &mut AppState) -> Result<VideoRenderer, String> {
let screen_size = state.c64.get_config().model.frame_buffer_size;
let viewport_offset = state.c64.get_config().model.viewport_offset;
let viewport_size = state.c64.get_config().model.viewport_size;
let video_buffer = state.video_buffer.clone();
let viewport = Rect::new(
vec2(viewport_offset.0 as f32, viewport_offset.1 as f32),
vec2(viewport_size.0 as f32, viewport_size.1 as f32),
);
let window_size = ctx.platform.windowed_context.window().inner_size();
info!("Renderer viewport {:?}", viewport);
let gl = &mut ctx.platform.gl;
let texture_size = vec2(screen_size.0, screen_size.1).cast::<i32>().unwrap();
let texture = Rc::new(gl.create_texture(texture_size)?);
let mut batch = sprite::Batch::new(gl, 1)?;
batch.set_projection(gl, viewport, false);
batch.set_viewport(
gl,
RectI::new(
zero(),
Vector2::new(window_size.width as i32, window_size.height as i32),
),
);
let renderer = VideoRenderer {
video_buffer,
batch,
texture,
};
Ok(renderer)
}
pub fn update_viewport(&mut self, ctx: &mut Context, width: i32, height: i32) {
self.batch.set_viewport(
&mut ctx.platform.gl,
RectI::new(zero(), vec2(width, height)),
);
}
pub fn render(&mut self, ctx: &mut Context) -> Result<(), String> {
let gl = &mut ctx.platform.gl;
let tex_size = self.texture.size.cast::<f32>().unwrap();
gl.set_texture_data(&self.texture, self.video_buffer.borrow().get_pixel_data());
gl.clear(Color::BLACK);
self.batch.begin(gl, Some(self.texture.clone()));
self.batch.push(
gl,
Rect::from_points(zero(), tex_size),
Rect::from_points(zero(), vec2(1.0, 1.0)),
Color::WHITE,
);
self.batch.end(gl);
Ok(())
}
}<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.