file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
Login.py | import time
from conf.constant import TYPE_LOGIN_NORMAL_WAY, TYPE_LOGIN_OTHER_WAY
from conf.urls_conf import loginUrls
from configure import USER_PWD, USER_NAME
from net.NetUtils import EasyHttp
from train.login.Capthca import Captcha
from utils import Utils
from utils.Log import Log
from conf.constant import CAPTCHA_CHECK_METHOD_HAND,CAPTCHA_CHECK_METHOD_THREE,CAPTCHA_CHECK_METHOD_MYSELF
def loginLogic(func):
def wrapper(*args, **kw):
reslut = False
msg = ''
for count in range(10):
Log.v('第%s次尝试获取验证图片' % str(count + 1))
reslut, msg = func(*args, **kw)
if reslut:
break
Log.w(msg)
return reslut, msg
return wrapper
class Login(object):
__LOGIN_SUCCESS_RESULT_CODE = 0 | 'redirect': '/otn/login/userLogin',
}
EasyHttp.send(self._urlInfo['userLoginRedirect'])
def _userLogin(self):
params = {
'_json_att': '',
}
EasyHttp.send(self._urlInfo['userLogin'])
def _uamtk(self):
jsonRet = EasyHttp.send(self._urlInfo['uamtk'], data={'appid': 'otn'})
def isSuccess(response):
return response['result_code'] == 0 if 'result_code' in response else False
return isSuccess(jsonRet), \
jsonRet['result_message'] if 'result_message' in jsonRet else 'no result_message', \
jsonRet['newapptk'] if 'newapptk' in jsonRet else 'no newapptk'
def _uamauthclient(self, apptk):
jsonRet = EasyHttp.send(self._urlInfo['uamauthclient'], data={'tk': apptk})
# print(jsonRet)
def isSuccess(response):
return response['result_code'] == 0 if response and 'result_code' in response else False
return isSuccess(jsonRet), '%s:%s' % (jsonRet['username'], jsonRet['result_message']) if jsonRet \
else 'uamauthclient failed'
def login(self, userName, userPwd, autoCheck=2):
# 登录有两种api
for count in range(2):
result, msg = self._login(userName, userPwd, autoCheck, type=(count % 2))
if Utils.check(result, msg):
return result, msg
return False, '登录失败'
@loginLogic
def _login(self, userName, userPwd, autoCheck=2, type=TYPE_LOGIN_NORMAL_WAY):
if type == TYPE_LOGIN_OTHER_WAY:
self._urlInfo = loginUrls['other']
return self._loginAsyncSuggest(userName, userPwd,autoCheck)
self._urlInfo = loginUrls['normal']
return self._loginNormal(userName, userPwd,autoCheck)
def _loginNormal(self, userName, userPwd, autoCheck=2):
self._init()
self._uamtk()
if autoCheck == CAPTCHA_CHECK_METHOD_THREE:
if not Captcha().verifyCodeAuto()[1]:
return False, '验证码识别错误!'
elif autoCheck == CAPTCHA_CHECK_METHOD_HAND:
if not Captcha().verifyCaptchaByHand()[1]:
return False, '验证码识别错误!'
else:
if not Captcha().verifyCodeAutoByMyself()[1]:
return False, '验证码识别错误!'
payload = {
'username': userName,
'password': userPwd,
'appid': 'otn',
}
jsonRet = EasyHttp.send(self._urlInfo['login'], data=payload)
def isLoginSuccess(responseJson):
return 0 == responseJson['result_code'] if responseJson and 'result_code' in responseJson else False, \
responseJson[
'result_message'] if responseJson and 'result_message' in responseJson else 'login failed'
result, msg = isLoginSuccess(jsonRet)
if not result:
return False, msg
# self._userLogin()
self._passportRedirect()
result, msg, apptk = self._uamtk()
if not Utils.check(result, msg):
return False, 'uamtk failed'
return self._uamauthclient(apptk)
def _loginAsyncSuggest(self, userName, userPwd, autoCheck=2):
self._init()
if autoCheck == CAPTCHA_CHECK_METHOD_THREE:
results, verify = Captcha().verifyCodeAuto(type=TYPE_LOGIN_OTHER_WAY)
elif autoCheck == CAPTCHA_CHECK_METHOD_HAND:
results, verify = Captcha().verifyCaptchaByHand(type=TYPE_LOGIN_OTHER_WAY)
else:
results, verify = Captcha().verifyCodeAutoByMyself(type=TYPE_LOGIN_OTHER_WAY)
if not verify:
return False, '验证码识别错误!'
formData = {
'loginUserDTO.user_name': userName,
'userDTO.password': userPwd,
'randCode': results,
}
jsonRet = EasyHttp.send(self._urlInfo['login'], data=formData)
# print('loginAsyncSuggest: %s' % jsonRet)
def isSuccess(response):
return response['status'] and response['data']['loginCheck'] == 'Y' if 'data' in response else False, \
response['data']['otherMsg'] if 'data' in response else response['messages']
loginSuccess, otherMsg = isSuccess(jsonRet)
return loginSuccess, '%s:%s' % (userName, otherMsg or '登录成功!')
def isLogin(self):
formData = {
'_json_att': ''
}
jsonRet = EasyHttp.send(self._urlInfo['checkUser'])
Log.d('checkUser: %s' % jsonRet)
return jsonRet['data']['flag'] if jsonRet and 'data' in jsonRet and 'flag' in jsonRet[
'data'] else False
def loginOut(self):
EasyHttp.send(self._urlInfo['loginOut'])
self._init()
return self._uamtk()
def _init(self):
EasyHttp.send(self._urlInfo['init'])
if __name__ == '__main__':
login = Login()
login.login(USER_NAME, USER_PWD)
time.sleep(3)
print(login.loginOut()) |
def _passportRedirect(self):
params = { |
core.rs | // Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::events::{
error::EventError,
types::{ComponentEvent, ComponentEventChannel, EventSource},
},
async_trait::async_trait,
fidl_fuchsia_sys2 as fsys, fuchsia_async as fasync,
futures::{channel::mpsc, SinkExt, TryStreamExt},
std::convert::TryInto,
tracing::{error, warn},
};
#[async_trait]
impl EventSource for fsys::EventSourceProxy {
/// Subscribe to component lifecycle events.
/// |node| is the node where stats about events seen will be recorded.
async fn listen(&mut self, sender: mpsc::Sender<ComponentEvent>) -> Result<(), EventError> {
let (client_end, request_stream) =
fidl::endpoints::create_request_stream::<fsys::EventStreamMarker>()?;
let mut events = vec![
fsys::EventSubscription {
event_name: Some("running".to_string()),
mode: Some(fsys::EventMode::Async),
..fsys::EventSubscription::EMPTY
},
fsys::EventSubscription {
event_name: Some("started".to_string()),
mode: Some(fsys::EventMode::Async),
..fsys::EventSubscription::EMPTY
},
fsys::EventSubscription {
event_name: Some("stopped".to_string()),
mode: Some(fsys::EventMode::Async),
..fsys::EventSubscription::EMPTY
},
fsys::EventSubscription {
event_name: Some("diagnostics_ready".to_string()),
mode: Some(fsys::EventMode::Async),
..fsys::EventSubscription::EMPTY
},
]
.into_iter();
let subscription = self.subscribe(&mut events, client_end);
subscription
.await
.map_err(|e| EventError::Fidl("Eventsource subscribe", e))?
.map_err(|e| EventError::FidlComponent(format!("{:?}", e)))?;
EventStreamServer::new(sender).spawn(request_stream);
Ok(())
}
}
pub struct EventStreamServer {
sender: ComponentEventChannel,
}
impl EventStreamServer {
pub fn new(sender: ComponentEventChannel) -> Self {
Self { sender }
}
}
impl EventStreamServer {
pub fn spawn(self, stream: fsys::EventStreamRequestStream) |
async fn handle_request_stream(
mut self,
mut stream: fsys::EventStreamRequestStream,
) -> Result<(), EventError> {
while let Some(request) =
stream.try_next().await.map_err(|e| EventError::Fidl("EventStream stream", e))?
{
match request {
fsys::EventStreamRequest::OnEvent { event, .. } => match event.try_into() {
Ok(event) => {
self.send(event).await;
}
Err(err) => {
warn!(?err, "Failed to interpret event");
}
},
}
}
warn!("EventSource stream server closed");
Ok(())
}
async fn send(&mut self, event: ComponentEvent) {
// Ignore Err(SendError) result. If we fail to send it means that the archivist has been
// stopped and therefore the receving end of this channel is closed. A send operation can
// only fail if this is the case.
let _ = self.sender.send(event).await;
}
}
#[cfg(test)]
pub mod tests {
use {
super::*,
crate::{container::ComponentIdentity, events::types::*},
fidl::endpoints::ClientEnd,
fidl_fuchsia_component as fcomponent,
fidl_fuchsia_io::NodeMarker,
fuchsia_zircon as zx,
futures::{future::RemoteHandle, FutureExt, StreamExt},
};
#[fuchsia::test]
async fn event_stream() {
let (mut source_proxy, stream_receiver) = spawn_fake_event_source();
let (sender, mut event_stream) = mpsc::channel(CHANNEL_CAPACITY);
source_proxy.listen(sender).await.expect("failed to listen");
let stream_server = stream_receiver.await.into_proxy().expect("get stream proxy");
// Send a `Started` event.
stream_server
.on_event(fsys::Event {
header: Some(fsys::EventHeader {
event_type: Some(fsys::EventType::Started),
moniker: Some("./foo:0/bar:0".to_string()),
component_url: Some("fuchsia-pkg://fuchsia.com/foo#meta/bar.cmx".to_string()),
timestamp: Some(zx::Time::get_monotonic().into_nanos()),
..fsys::EventHeader::EMPTY
}),
..fsys::Event::EMPTY
})
.expect("send started event ok");
// Send a `Running` event.
stream_server
.on_event(fsys::Event {
header: Some(fsys::EventHeader {
event_type: Some(fsys::EventType::Running),
moniker: Some("./foo:0/bar:0".to_string()),
component_url: Some("fuchsia-pkg://fuchsia.com/foo#meta/bar.cmx".to_string()),
timestamp: Some(zx::Time::get_monotonic().into_nanos()),
..fsys::EventHeader::EMPTY
}),
event_result: Some(fsys::EventResult::Payload(fsys::EventPayload::Running(
fsys::RunningPayload {
started_timestamp: Some(0),
..fsys::RunningPayload::EMPTY
},
))),
..fsys::Event::EMPTY
})
.expect("send running event ok");
// Send a `CapabilityReady` event for diagnostics.
let (node, _) = fidl::endpoints::create_request_stream::<NodeMarker>().unwrap();
stream_server
.on_event(fsys::Event {
header: Some(fsys::EventHeader {
event_type: Some(fsys::EventType::CapabilityReady),
moniker: Some("./foo:0/bar:0".to_string()),
component_url: Some("fuchsia-pkg://fuchsia.com/foo#meta/bar.cmx".to_string()),
timestamp: Some(zx::Time::get_monotonic().into_nanos()),
..fsys::EventHeader::EMPTY
}),
event_result: Some(fsys::EventResult::Payload(
fsys::EventPayload::CapabilityReady(fsys::CapabilityReadyPayload {
name: Some("diagnostics".to_string()),
node: Some(node),
..fsys::CapabilityReadyPayload::EMPTY
}),
)),
..fsys::Event::EMPTY
})
.expect("send diagnostics ready event ok");
// Send a Stopped event.
stream_server
.on_event(fsys::Event {
header: Some(fsys::EventHeader {
event_type: Some(fsys::EventType::Stopped),
moniker: Some("./foo:0/bar:0".to_string()),
component_url: Some("fuchsia-pkg://fuchsia.com/foo#meta/bar.cmx".to_string()),
timestamp: Some(zx::Time::get_monotonic().into_nanos()),
..fsys::EventHeader::EMPTY
}),
..fsys::Event::EMPTY
})
.expect("send stopped event ok");
let expected_component_id =
ComponentIdentifier::parse_from_moniker("./foo:0/bar:0").unwrap();
let expected_identity = ComponentIdentity::from_identifier_and_url(
&expected_component_id,
"fuchsia-pkg://fuchsia.com/foo#meta/bar.cmx",
);
let shared_data = EventMetadata {
identity: expected_identity.clone(),
timestamp: zx::Time::get_monotonic(),
};
// Assert the first received event was a Start event.
let event = event_stream.next().await.unwrap();
compare_events_ignore_timestamp_and_payload(
&event,
&ComponentEvent::Start(StartEvent { metadata: shared_data.clone() }),
);
// Assert the second received event was a Running event.
let event = event_stream.next().await.unwrap();
compare_events_ignore_timestamp_and_payload(
&event,
&ComponentEvent::Running(RunningEvent {
metadata: shared_data.clone(),
component_start_time: zx::Time::from_nanos(0),
}),
);
// Assert the third received event was a CapabilityReady event for diagnostics.
let event = event_stream.next().await.unwrap();
match event {
ComponentEvent::DiagnosticsReady(DiagnosticsReadyEvent {
metadata,
directory: Some(_),
}) => assert_eq!(metadata.identity, expected_identity),
_ => assert!(false),
}
// Assert the last received event was a Stop event.
let event = event_stream.next().await.unwrap();
compare_events_ignore_timestamp_and_payload(
&event,
&ComponentEvent::Stop(StopEvent { metadata: shared_data.clone() }),
);
}
pub fn compare_events_ignore_timestamp_and_payload(
event1: &ComponentEvent,
event2: &ComponentEvent,
) {
// Need to explicitly check every case despite the logic being the same since rust
// requires multi-case match arms to have variable bindings be the same type in every
// case. This isn't doable in our polymorphic event enums.
match (event1, event2) {
(ComponentEvent::Start(x), ComponentEvent::Start(y)) => {
assert_eq!(x.metadata.identity, y.metadata.identity);
}
(ComponentEvent::Stop(x), ComponentEvent::Stop(y)) => {
assert_eq!(x.metadata.identity, y.metadata.identity);
}
(ComponentEvent::Running(x), ComponentEvent::Running(y)) => {
assert_eq!(x.metadata.identity, y.metadata.identity);
}
(ComponentEvent::DiagnosticsReady(x), ComponentEvent::DiagnosticsReady(y)) => {
assert_eq!(x.metadata.identity, y.metadata.identity);
}
(ComponentEvent::LogSinkRequested(x), ComponentEvent::LogSinkRequested(y)) => {
assert_eq!(x.metadata.identity, y.metadata.identity);
}
_ => panic!(
"Events are not equal, they are different enumerations: {:?}, {:?}",
event1, event2
),
}
}
fn spawn_fake_event_source(
) -> (fsys::EventSourceProxy, RemoteHandle<ClientEnd<fsys::EventStreamMarker>>) {
let (source, mut request_stream) =
fidl::endpoints::create_proxy_and_stream::<fsys::EventSourceMarker>().unwrap();
let (f, stream_client_end_fut) = async move {
if let Some(request) =
request_stream.try_next().await.expect("error running fake event source")
{
match request {
fsys::EventSourceRequest::Subscribe { events, stream, responder } => {
let mut events_iter = events.into_iter();
{
let subscription_request = events_iter.next().unwrap();
assert_eq!(subscription_request.event_name.unwrap(), "running",);
assert_eq!(subscription_request.mode.unwrap(), fsys::EventMode::Async,);
}
{
let subscription_request = events_iter.next().unwrap();
assert_eq!(subscription_request.event_name.unwrap(), "started",);
assert_eq!(subscription_request.mode.unwrap(), fsys::EventMode::Async,);
}
{
let subscription_request = events_iter.next().unwrap();
assert_eq!(subscription_request.event_name.unwrap(), "stopped",);
assert_eq!(subscription_request.mode.unwrap(), fsys::EventMode::Async,);
}
{
let subscription_request = events_iter.next().unwrap();
assert_eq!(
subscription_request.event_name.unwrap(),
"diagnostics_ready",
);
assert_eq!(subscription_request.mode.unwrap(), fsys::EventMode::Async,);
}
responder.send(&mut Ok(())).expect("responder send ok");
return stream;
}
fsys::EventSourceRequest::TakeStaticEventStream { responder, .. } => {
responder
.send(&mut Err(fcomponent::Error::ResourceUnavailable))
.expect("responder send None");
}
}
}
unreachable!("This shouldn't be exercised");
}
.remote_handle();
fasync::Task::spawn(f).detach();
(source, stream_client_end_fut)
}
}
| {
fasync::Task::spawn(async move {
self.handle_request_stream(stream)
.await
.unwrap_or_else(|e: EventError| error!(?e, "failed to run event stream server"));
})
.detach();
} |
1.basic.tsx | import React from 'react';
import createClass from 'create-react-class';
import { SmallDataTableLoadingSkeleton } from '../../../../index'; | },
}); |
export default createClass({
render() {
return <SmallDataTableLoadingSkeleton isLoading={true} />; |
config_test.go | // Copyright 2019, OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and | // limitations under the License.
package googlecloudexporter
import (
"path"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/config/configmodels"
"go.opentelemetry.io/collector/config/configtest"
"go.opentelemetry.io/collector/exporter/exporterhelper"
)
func TestLoadConfig(t *testing.T) {
factories, err := componenttest.NopFactories()
assert.Nil(t, err)
factory := NewFactory()
factories.Exporters[configmodels.Type(typeStr)] = factory
cfg, err := configtest.LoadConfigFile(
t, path.Join(".", "testdata", "config.yaml"), factories,
)
require.NoError(t, err)
require.NotNil(t, cfg)
assert.Equal(t, len(cfg.Exporters), 2)
r0 := cfg.Exporters["googlecloud"]
assert.Equal(t, r0, factory.CreateDefaultConfig())
r1 := cfg.Exporters["googlecloud/customname"].(*Config)
assert.Equal(t, r1,
&Config{
ExporterSettings: configmodels.ExporterSettings{TypeVal: configmodels.Type(typeStr), NameVal: "googlecloud/customname"},
ProjectID: "my-project",
UserAgent: "opentelemetry-collector-contrib {{version}}",
Endpoint: "test-endpoint",
UseInsecure: true,
TimeoutSettings: exporterhelper.TimeoutSettings{
Timeout: 20 * time.Second,
},
ResourceMappings: []ResourceMapping{
{
SourceType: "source.resource1",
TargetType: "target-resource1",
LabelMappings: []LabelMapping{
{
SourceKey: "contrib.opencensus.io/exporter/googlecloud/project_id",
TargetKey: "project_id",
Optional: true,
},
{
SourceKey: "source.label1",
TargetKey: "target_label_1",
Optional: false,
},
},
},
{
SourceType: "source.resource2",
TargetType: "target-resource2",
},
},
RetrySettings: exporterhelper.RetrySettings{
Enabled: true,
InitialInterval: 10 * time.Second,
MaxInterval: 1 * time.Minute,
MaxElapsedTime: 10 * time.Minute,
},
QueueSettings: exporterhelper.QueueSettings{
Enabled: true,
NumConsumers: 2,
QueueSize: 10,
},
MetricConfig: MetricConfig{
Prefix: "prefix",
SkipCreateMetricDescriptor: true,
},
})
} | |
distributed_data_parallel.py | """
Copyright 2021 Keisuke Izumiya
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import torch
import torch.distributed as distd
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import DataLoader
from torch.utils.data.distributed import DistributedSampler
import mmle.distributed as mdistd
import mmle.nn as mnn
from mmle.utils.manager import Manager
BATCH_SIZE = 400
DATA_NUM = 40000
DATA_DIM = 10
EPOCH_NUM = 50
LABEL_DIM = 5
LOG_DIR = "log"
MIDDLE_DIM = 16
class Dataset(torch.utils.data.Dataset):
def __init__(self):
super().__init__()
self.data = torch.randn(DATA_NUM, DATA_DIM)
self.label = torch.randn(DATA_NUM, LABEL_DIM)
def | (self):
return DATA_NUM
def __getitem__(self, idx):
return self.data[idx], self.label[idx]
def main():
world_size = distd.get_world_size()
rank = distd.get_rank()
model = nn.Sequential(
mnn.FC(DATA_DIM, MIDDLE_DIM), mnn.FC(MIDDLE_DIM, LABEL_DIM, bn=False, activ="id")
)
model = mdistd.distribute_module(model)
optimizer = optim.Adam(model.parameters())
dataset = Dataset()
sampler = DistributedSampler(dataset, world_size, rank)
loader = DataLoader(dataset, BATCH_SIZE, sampler=sampler, drop_last=True)
if rank == 0:
manager = Manager(LOG_DIR, use_tensorboard=True)
manager.tensorboard_writer.add_graph(model, dataset[0][0].repeat(BATCH_SIZE, 1))
step = 0
for epoch in range(EPOCH_NUM):
model.train()
for data, label in loader:
loss = F.mse_loss(model(data), label.to(rank))
mnn.zero_grad(model)
loss.backward()
optimizer.step()
distd.barrier()
if rank == 0:
step += world_size
manager.tensorboard_writer.plot("loss", "train", loss.item(), step)
if rank == 0:
print(f"Finish epoch {epoch}: loss={loss.item():.3f}")
distd.barrier()
if rank == 0:
manager.save({"model": model.state_dict()}, "model")
manager.close()
if __name__ == "__main__":
mdistd.spawn(main, nprocs=torch.cuda.device_count())
| __len__ |
sort.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Defines sort kernel for `ArrayRef`
use crate::array::*;
use crate::buffer::MutableBuffer;
use crate::compute::take;
use crate::datatypes::*;
use crate::error::{ArrowError, Result};
use std::cmp::Ordering;
use TimeUnit::*;
use serde_derive::{Deserialize, Serialize};
/// Sort the `ArrayRef` using `SortOptions`.
///
/// Performs a sort on values and indices. Nulls are ordered according
/// to the `nulls_first` flag in `options`. Floats are sorted using
/// IEEE 754 totalOrder
///
/// Returns an `ArrowError::ComputeError(String)` if the array type is
/// either unsupported by `sort_to_indices` or `take`.
///
/// Note: this is an unstable_sort, meaning it may not preserve the
/// order of equal elements.
///
/// # Example
/// ```rust
/// # use std::sync::Arc;
/// # use arrow::array::{Int32Array, ArrayRef};
/// # use arrow::error::Result;
/// # use arrow::compute::kernels::sort::sort;
/// # fn main() -> Result<()> {
/// let array: ArrayRef = Arc::new(Int32Array::from(vec![5, 4, 3, 2, 1]));
/// let sorted_array = sort(&array, None).unwrap();
/// let sorted_array = sorted_array.as_any().downcast_ref::<Int32Array>().unwrap();
/// assert_eq!(sorted_array, &Int32Array::from(vec![1, 2, 3, 4, 5]));
/// # Ok(())
/// # }
/// ```
pub fn sort(values: &ArrayRef, options: Option<SortOptions>) -> Result<ArrayRef> {
let indices = sort_to_indices(values, options, None)?;
take(values.as_ref(), &indices, None)
}
/// Sort the `ArrayRef` partially.
///
/// If `limit` is specified, the resulting array will contain only
/// first `limit` in the sort order. Any data data after the limit
/// will be discarded.
///
/// Note: this is an unstable_sort, meaning it may not preserve the
/// order of equal elements.
///
/// # Example
/// ```rust
/// # use std::sync::Arc;
/// # use arrow::array::{Int32Array, ArrayRef};
/// # use arrow::error::Result;
/// # use arrow::compute::kernels::sort::{sort_limit, SortOptions};
/// # fn main() -> Result<()> {
/// let array: ArrayRef = Arc::new(Int32Array::from(vec![5, 4, 3, 2, 1]));
///
/// // Find the the top 2 items
/// let sorted_array = sort_limit(&array, None, Some(2)).unwrap();
/// let sorted_array = sorted_array.as_any().downcast_ref::<Int32Array>().unwrap();
/// assert_eq!(sorted_array, &Int32Array::from(vec![1, 2]));
///
/// // Find the bottom top 2 items
/// let options = Some(SortOptions {
/// descending: true,
/// ..Default::default()
/// });
/// let sorted_array = sort_limit(&array, options, Some(2)).unwrap();
/// let sorted_array = sorted_array.as_any().downcast_ref::<Int32Array>().unwrap();
/// assert_eq!(sorted_array, &Int32Array::from(vec![5, 4]));
/// # Ok(())
/// # }
/// ```
pub fn sort_limit(
values: &ArrayRef,
options: Option<SortOptions>,
limit: Option<usize>,
) -> Result<ArrayRef> {
let indices = sort_to_indices(values, options, limit)?;
take(values.as_ref(), &indices, None)
}
/// we can only do this if the T is primitive
#[inline]
fn sort_unstable_by<T, F>(array: &mut [T], limit: usize, cmp: F)
where
F: FnMut(&T, &T) -> Ordering,
{
if array.len() == limit {
array.sort_unstable_by(cmp);
} else {
partial_sort(array, limit, cmp);
}
}
// implements comparison using IEEE 754 total ordering for f32
// Original implementation from https://doc.rust-lang.org/std/primitive.f64.html#method.total_cmp
// TODO to change to use std when it becomes stable
fn total_cmp_32(l: f32, r: f32) -> std::cmp::Ordering {
let mut left = l.to_bits() as i32;
let mut right = r.to_bits() as i32;
left ^= (((left >> 31) as u32) >> 1) as i32;
right ^= (((right >> 31) as u32) >> 1) as i32;
left.cmp(&right)
}
// implements comparison using IEEE 754 total ordering for f64
// Original implementation from https://doc.rust-lang.org/std/primitive.f64.html#method.total_cmp
// TODO to change to use std when it becomes stable
fn total_cmp_64(l: f64, r: f64) -> std::cmp::Ordering {
let mut left = l.to_bits() as i64;
let mut right = r.to_bits() as i64;
left ^= (((left >> 63) as u64) >> 1) as i64;
right ^= (((right >> 63) as u64) >> 1) as i64;
left.cmp(&right)
}
fn cmp<T>(l: T, r: T) -> std::cmp::Ordering
where
T: Ord,
{
l.cmp(&r)
}
// partition indices into valid and null indices
fn partition_validity(array: &ArrayRef) -> (Vec<u32>, Vec<u32>) {
match array.null_count() {
// faster path
0 => ((0..(array.len() as u32)).collect(), vec![]),
_ => {
let indices = 0..(array.len() as u32);
indices.partition(|index| array.is_valid(*index as usize))
}
}
}
/// Sort elements from `ArrayRef` into an unsigned integer (`UInt32Array`) of indices.
/// For floating point arrays any NaN values are considered to be greater than any other non-null value
/// limit is an option for partial_sort
pub fn sort_to_indices(
values: &ArrayRef,
options: Option<SortOptions>,
limit: Option<usize>,
) -> Result<UInt32Array> {
let options = options.unwrap_or_default();
let (v, n) = partition_validity(values);
Ok(match values.data_type() {
DataType::Boolean => sort_boolean(values, v, n, &options, limit),
DataType::Int8 => {
sort_primitive::<Int8Type, _>(values, v, n, cmp, &options, limit)
}
DataType::Int16 => {
sort_primitive::<Int16Type, _>(values, v, n, cmp, &options, limit)
}
DataType::Int32 => {
sort_primitive::<Int32Type, _>(values, v, n, cmp, &options, limit)
}
DataType::Int64 => {
sort_primitive::<Int64Type, _>(values, v, n, cmp, &options, limit)
}
DataType::UInt8 => {
sort_primitive::<UInt8Type, _>(values, v, n, cmp, &options, limit)
}
DataType::UInt16 => {
sort_primitive::<UInt16Type, _>(values, v, n, cmp, &options, limit)
}
DataType::UInt32 => {
sort_primitive::<UInt32Type, _>(values, v, n, cmp, &options, limit)
}
DataType::UInt64 => {
sort_primitive::<UInt64Type, _>(values, v, n, cmp, &options, limit)
}
DataType::Float32 => {
sort_primitive::<Float32Type, _>(values, v, n, total_cmp_32, &options, limit)
}
DataType::Float64 => {
sort_primitive::<Float64Type, _>(values, v, n, total_cmp_64, &options, limit)
}
DataType::Date32 => {
sort_primitive::<Date32Type, _>(values, v, n, cmp, &options, limit)
}
DataType::Date64 => {
sort_primitive::<Date64Type, _>(values, v, n, cmp, &options, limit)
}
DataType::Time32(Second) => {
sort_primitive::<Time32SecondType, _>(values, v, n, cmp, &options, limit)
}
DataType::Time32(Millisecond) => {
sort_primitive::<Time32MillisecondType, _>(values, v, n, cmp, &options, limit)
}
DataType::Time64(Microsecond) => {
sort_primitive::<Time64MicrosecondType, _>(values, v, n, cmp, &options, limit)
}
DataType::Time64(Nanosecond) => {
sort_primitive::<Time64NanosecondType, _>(values, v, n, cmp, &options, limit)
}
DataType::Timestamp(Second, _) => {
sort_primitive::<TimestampSecondType, _>(values, v, n, cmp, &options, limit)
}
DataType::Timestamp(Millisecond, _) => {
sort_primitive::<TimestampMillisecondType, _>(
values, v, n, cmp, &options, limit,
)
}
DataType::Timestamp(Microsecond, _) => {
sort_primitive::<TimestampMicrosecondType, _>(
values, v, n, cmp, &options, limit,
)
}
DataType::Timestamp(Nanosecond, _) => {
sort_primitive::<TimestampNanosecondType, _>(
values, v, n, cmp, &options, limit,
)
}
DataType::Interval(IntervalUnit::YearMonth) => {
sort_primitive::<IntervalYearMonthType, _>(values, v, n, cmp, &options, limit)
}
DataType::Interval(IntervalUnit::DayTime) => {
sort_primitive::<IntervalDayTimeType, _>(values, v, n, cmp, &options, limit)
}
DataType::Interval(IntervalUnit::MonthDayNano) => {
sort_primitive::<IntervalMonthDayNanoType, _>(
values, v, n, cmp, &options, limit,
)
}
DataType::Duration(TimeUnit::Second) => {
sort_primitive::<DurationSecondType, _>(values, v, n, cmp, &options, limit)
}
DataType::Duration(TimeUnit::Millisecond) => {
sort_primitive::<DurationMillisecondType, _>(
values, v, n, cmp, &options, limit,
)
}
DataType::Duration(TimeUnit::Microsecond) => {
sort_primitive::<DurationMicrosecondType, _>(
values, v, n, cmp, &options, limit,
)
}
DataType::Duration(TimeUnit::Nanosecond) => {
sort_primitive::<DurationNanosecondType, _>(
values, v, n, cmp, &options, limit,
)
}
DataType::Utf8 => sort_string::<i32>(values, v, n, &options, limit),
DataType::LargeUtf8 => sort_string::<i64>(values, v, n, &options, limit),
DataType::List(field) => match field.data_type() {
DataType::Int8 => sort_list::<i32, Int8Type>(values, v, n, &options, limit),
DataType::Int16 => sort_list::<i32, Int16Type>(values, v, n, &options, limit),
DataType::Int32 => sort_list::<i32, Int32Type>(values, v, n, &options, limit),
DataType::Int64 => sort_list::<i32, Int64Type>(values, v, n, &options, limit),
DataType::UInt8 => sort_list::<i32, UInt8Type>(values, v, n, &options, limit),
DataType::UInt16 => {
sort_list::<i32, UInt16Type>(values, v, n, &options, limit)
}
DataType::UInt32 => {
sort_list::<i32, UInt32Type>(values, v, n, &options, limit)
}
DataType::UInt64 => {
sort_list::<i32, UInt64Type>(values, v, n, &options, limit)
}
DataType::Float32 => {
sort_list::<i32, Float32Type>(values, v, n, &options, limit)
}
DataType::Float64 => {
sort_list::<i32, Float64Type>(values, v, n, &options, limit)
}
t => {
return Err(ArrowError::ComputeError(format!(
"Sort not supported for list type {:?}",
t
)))
}
},
DataType::LargeList(field) => match field.data_type() {
DataType::Int8 => sort_list::<i64, Int8Type>(values, v, n, &options, limit),
DataType::Int16 => sort_list::<i64, Int16Type>(values, v, n, &options, limit),
DataType::Int32 => sort_list::<i64, Int32Type>(values, v, n, &options, limit),
DataType::Int64 => sort_list::<i64, Int64Type>(values, v, n, &options, limit),
DataType::UInt8 => sort_list::<i64, UInt8Type>(values, v, n, &options, limit),
DataType::UInt16 => {
sort_list::<i64, UInt16Type>(values, v, n, &options, limit)
}
DataType::UInt32 => {
sort_list::<i64, UInt32Type>(values, v, n, &options, limit)
}
DataType::UInt64 => {
sort_list::<i64, UInt64Type>(values, v, n, &options, limit)
}
DataType::Float32 => {
sort_list::<i64, Float32Type>(values, v, n, &options, limit)
}
DataType::Float64 => {
sort_list::<i64, Float64Type>(values, v, n, &options, limit)
}
t => {
return Err(ArrowError::ComputeError(format!(
"Sort not supported for list type {:?}",
t
)))
}
},
DataType::FixedSizeList(field, _) => match field.data_type() {
DataType::Int8 => sort_list::<i32, Int8Type>(values, v, n, &options, limit),
DataType::Int16 => sort_list::<i32, Int16Type>(values, v, n, &options, limit),
DataType::Int32 => sort_list::<i32, Int32Type>(values, v, n, &options, limit),
DataType::Int64 => sort_list::<i32, Int64Type>(values, v, n, &options, limit),
DataType::UInt8 => sort_list::<i32, UInt8Type>(values, v, n, &options, limit),
DataType::UInt16 => {
sort_list::<i32, UInt16Type>(values, v, n, &options, limit)
}
DataType::UInt32 => {
sort_list::<i32, UInt32Type>(values, v, n, &options, limit)
}
DataType::UInt64 => {
sort_list::<i32, UInt64Type>(values, v, n, &options, limit)
}
DataType::Float32 => {
sort_list::<i32, Float32Type>(values, v, n, &options, limit)
}
DataType::Float64 => {
sort_list::<i32, Float64Type>(values, v, n, &options, limit)
}
t => {
return Err(ArrowError::ComputeError(format!(
"Sort not supported for list type {:?}",
t
)))
}
},
DataType::Dictionary(key_type, value_type)
if *value_type.as_ref() == DataType::Utf8 =>
{
match key_type.as_ref() {
DataType::Int8 => {
sort_string_dictionary::<Int8Type>(values, v, n, &options, limit)
}
DataType::Int16 => {
sort_string_dictionary::<Int16Type>(values, v, n, &options, limit)
}
DataType::Int32 => {
sort_string_dictionary::<Int32Type>(values, v, n, &options, limit)
}
DataType::Int64 => {
sort_string_dictionary::<Int64Type>(values, v, n, &options, limit)
}
DataType::UInt8 => {
sort_string_dictionary::<UInt8Type>(values, v, n, &options, limit)
}
DataType::UInt16 => {
sort_string_dictionary::<UInt16Type>(values, v, n, &options, limit)
}
DataType::UInt32 => {
sort_string_dictionary::<UInt32Type>(values, v, n, &options, limit)
}
DataType::UInt64 => {
sort_string_dictionary::<UInt64Type>(values, v, n, &options, limit)
}
t => {
return Err(ArrowError::ComputeError(format!(
"Sort not supported for dictionary key type {:?}",
t
)))
}
}
}
DataType::Binary | DataType::FixedSizeBinary(_) => {
sort_binary::<i32>(values, v, n, &options, limit)
}
DataType::LargeBinary => sort_binary::<i64>(values, v, n, &options, limit),
t => {
return Err(ArrowError::ComputeError(format!(
"Sort not supported for data type {:?}",
t
)))
}
})
}
/// Options that define how sort kernels should behave
#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct SortOptions {
/// Whether to sort in descending order
pub descending: bool,
/// Whether to sort nulls first
pub nulls_first: bool,
}
impl Default for SortOptions {
fn default() -> Self {
Self {
descending: false,
// default to nulls first to match spark's behavior
nulls_first: true,
}
}
}
/// Sort boolean values
///
/// when a limit is present, the sort is pair-comparison based as k-select might be more efficient,
/// when the limit is absent, binary partition is used to speed up (which is linear).
///
/// TODO maybe partition_validity call can be eliminated in this case and tri-color sort can be used
/// instead. https://en.wikipedia.org/wiki/Dutch_national_flag_problem
fn sort_boolean(
values: &ArrayRef,
value_indices: Vec<u32>,
mut null_indices: Vec<u32>,
options: &SortOptions,
limit: Option<usize>,
) -> UInt32Array {
let values = values
.as_any()
.downcast_ref::<BooleanArray>()
.expect("Unable to downcast to boolean array");
let descending = options.descending;
let valids_len = value_indices.len();
let nulls_len = null_indices.len();
let mut len = values.len();
let valids = if let Some(limit) = limit {
len = limit.min(len);
// create tuples that are used for sorting
let mut valids = value_indices
.into_iter()
.map(|index| (index, values.value(index as usize)))
.collect::<Vec<(u32, bool)>>();
sort_valids(descending, &mut valids, &mut null_indices, len, cmp);
valids
} else {
// when limit is not present, we have a better way than sorting: we can just partition
// the vec into [false..., true...] or [true..., false...] when descending
// TODO when https://github.com/rust-lang/rust/issues/62543 is merged we can use partition_in_place
let (mut a, b): (Vec<(u32, bool)>, Vec<(u32, bool)>) = value_indices
.into_iter()
.map(|index| (index, values.value(index as usize)))
.partition(|(_, value)| *value == descending);
a.extend(b);
if descending {
null_indices.reverse();
}
a
};
let nulls = null_indices;
// collect results directly into a buffer instead of a vec to avoid another aligned allocation
let result_capacity = len * std::mem::size_of::<u32>();
let mut result = MutableBuffer::new(result_capacity);
// sets len to capacity so we can access the whole buffer as a typed slice
result.resize(result_capacity, 0);
// Safety: the buffer is always treated as `u32` in the code below
let result_slice: &mut [u32] = unsafe { result.typed_data_mut() };
if options.nulls_first {
let size = nulls_len.min(len);
result_slice[0..size].copy_from_slice(&nulls[0..size]);
if nulls_len < len {
insert_valid_values(result_slice, nulls_len, &valids[0..len - size]);
}
} else {
// nulls last
let size = valids.len().min(len);
insert_valid_values(result_slice, 0, &valids[0..size]);
if len > size {
result_slice[valids_len..].copy_from_slice(&nulls[0..(len - valids_len)]);
}
}
let result_data = unsafe {
ArrayData::new_unchecked(
DataType::UInt32,
len,
Some(0),
None,
0,
vec![result.into()],
vec![],
)
};
UInt32Array::from(result_data)
}
/// Sort primitive values
fn sort_primitive<T, F>(
values: &ArrayRef,
value_indices: Vec<u32>,
null_indices: Vec<u32>,
cmp: F,
options: &SortOptions,
limit: Option<usize>,
) -> UInt32Array
where
T: ArrowPrimitiveType,
T::Native: std::cmp::PartialOrd,
F: Fn(T::Native, T::Native) -> std::cmp::Ordering,
{
// create tuples that are used for sorting
let valids = {
let values = as_primitive_array::<T>(values);
value_indices
.into_iter()
.map(|index| (index, values.value(index as usize)))
.collect::<Vec<(u32, T::Native)>>()
};
sort_primitive_inner(values, null_indices, cmp, options, limit, valids)
}
// sort is instantiated a lot so we only compile this inner version for each native type
fn sort_primitive_inner<T, F>(
values: &ArrayRef,
null_indices: Vec<u32>,
cmp: F,
options: &SortOptions,
limit: Option<usize>,
mut valids: Vec<(u32, T)>,
) -> UInt32Array
where
T: ArrowNativeType,
T: std::cmp::PartialOrd,
F: Fn(T, T) -> std::cmp::Ordering,
{
let mut nulls = null_indices;
let valids_len = valids.len();
let nulls_len = nulls.len();
let mut len = values.len();
if let Some(limit) = limit {
len = limit.min(len);
}
sort_valids(options.descending, &mut valids, &mut nulls, len, cmp);
// collect results directly into a buffer instead of a vec to avoid another aligned allocation
let result_capacity = len * std::mem::size_of::<u32>();
let mut result = MutableBuffer::new(result_capacity);
// sets len to capacity so we can access the whole buffer as a typed slice
result.resize(result_capacity, 0);
// Safety: the buffer is always treated as `u32` in the code below
let result_slice: &mut [u32] = unsafe { result.typed_data_mut() };
if options.nulls_first {
let size = nulls_len.min(len);
result_slice[0..size].copy_from_slice(&nulls[0..size]);
if nulls_len < len {
insert_valid_values(result_slice, nulls_len, &valids[0..len - size]);
}
} else {
// nulls last
let size = valids.len().min(len);
insert_valid_values(result_slice, 0, &valids[0..size]);
if len > size {
result_slice[valids_len..].copy_from_slice(&nulls[0..(len - valids_len)]);
}
}
let result_data = unsafe {
ArrayData::new_unchecked(
DataType::UInt32,
len,
Some(0),
None,
0,
vec![result.into()],
vec![],
)
};
UInt32Array::from(result_data)
}
// insert valid and nan values in the correct order depending on the descending flag
fn insert_valid_values<T>(result_slice: &mut [u32], offset: usize, valids: &[(u32, T)]) {
let valids_len = valids.len();
// helper to append the index part of the valid tuples
let append_valids = move |dst_slice: &mut [u32]| {
debug_assert_eq!(dst_slice.len(), valids_len);
dst_slice
.iter_mut()
.zip(valids.iter())
.for_each(|(dst, src)| *dst = src.0)
};
append_valids(&mut result_slice[offset..offset + valids.len()]);
}
/// Sort strings
fn sort_string<Offset: StringOffsetSizeTrait>(
values: &ArrayRef,
value_indices: Vec<u32>,
null_indices: Vec<u32>,
options: &SortOptions,
limit: Option<usize>,
) -> UInt32Array {
let values = values
.as_any()
.downcast_ref::<GenericStringArray<Offset>>()
.unwrap();
sort_string_helper(
values,
value_indices,
null_indices,
options,
limit,
|array, idx| array.value(idx as usize),
)
}
/// Sort dictionary encoded strings
fn sort_string_dictionary<T: ArrowDictionaryKeyType>(
values: &ArrayRef,
value_indices: Vec<u32>,
null_indices: Vec<u32>,
options: &SortOptions,
limit: Option<usize>,
) -> UInt32Array {
let values: &DictionaryArray<T> = as_dictionary_array::<T>(values);
let keys: &PrimitiveArray<T> = values.keys();
let dict = values.values();
let dict: &StringArray = as_string_array(dict);
sort_string_helper(
keys,
value_indices,
null_indices,
options,
limit,
|array: &PrimitiveArray<T>, idx| -> &str {
let key: T::Native = array.value(idx as usize);
dict.value(key.to_usize().unwrap())
},
)
}
/// shared implementation between dictionary encoded and plain string arrays
#[inline]
fn sort_string_helper<'a, A: Array, F>(
values: &'a A,
value_indices: Vec<u32>,
null_indices: Vec<u32>,
options: &SortOptions,
limit: Option<usize>,
value_fn: F,
) -> UInt32Array
where
F: Fn(&'a A, u32) -> &str,
{
let mut valids = value_indices
.into_iter()
.map(|index| (index, value_fn(values, index)))
.collect::<Vec<(u32, &str)>>();
let mut nulls = null_indices;
let descending = options.descending;
let mut len = values.len();
if let Some(limit) = limit {
len = limit.min(len);
}
sort_valids(descending, &mut valids, &mut nulls, len, cmp);
// collect the order of valid tuplies
let mut valid_indices: Vec<u32> = valids.iter().map(|tuple| tuple.0).collect();
if options.nulls_first {
nulls.append(&mut valid_indices);
nulls.truncate(len);
UInt32Array::from(nulls)
} else {
// no need to sort nulls as they are in the correct order already
valid_indices.append(&mut nulls);
valid_indices.truncate(len);
UInt32Array::from(valid_indices)
}
}
fn sort_list<S, T>(
values: &ArrayRef,
value_indices: Vec<u32>,
null_indices: Vec<u32>,
options: &SortOptions,
limit: Option<usize>,
) -> UInt32Array
where
S: OffsetSizeTrait,
T: ArrowPrimitiveType,
T::Native: std::cmp::PartialOrd,
{
sort_list_inner::<S>(values, value_indices, null_indices, options, limit)
}
fn sort_list_inner<S>(
values: &ArrayRef,
value_indices: Vec<u32>,
mut null_indices: Vec<u32>,
options: &SortOptions,
limit: Option<usize>,
) -> UInt32Array
where
S: OffsetSizeTrait,
{
let mut valids: Vec<(u32, ArrayRef)> = values
.as_any()
.downcast_ref::<FixedSizeListArray>()
.map_or_else(
|| {
let values = as_generic_list_array::<S>(values);
value_indices
.iter()
.copied()
.map(|index| (index, values.value(index as usize)))
.collect()
},
|values| {
value_indices
.iter()
.copied()
.map(|index| (index, values.value(index as usize)))
.collect()
},
);
let mut len = values.len();
let descending = options.descending;
if let Some(limit) = limit {
len = limit.min(len);
}
sort_valids_array(descending, &mut valids, &mut null_indices, len);
let mut valid_indices: Vec<u32> = valids.iter().map(|tuple| tuple.0).collect();
if options.nulls_first {
null_indices.append(&mut valid_indices);
null_indices.truncate(len);
UInt32Array::from(null_indices)
} else {
valid_indices.append(&mut null_indices);
valid_indices.truncate(len);
UInt32Array::from(valid_indices)
}
}
fn sort_binary<S>(
values: &ArrayRef,
value_indices: Vec<u32>,
mut null_indices: Vec<u32>,
options: &SortOptions,
limit: Option<usize>,
) -> UInt32Array
where
S: BinaryOffsetSizeTrait,
{
let mut valids: Vec<(u32, &[u8])> = values
.as_any()
.downcast_ref::<FixedSizeBinaryArray>()
.map_or_else(
|| {
let values = as_generic_binary_array::<S>(values);
value_indices
.iter()
.copied()
.map(|index| (index, values.value(index as usize)))
.collect()
},
|values| {
value_indices
.iter()
.copied()
.map(|index| (index, values.value(index as usize)))
.collect()
},
);
let mut len = values.len();
let descending = options.descending;
if let Some(limit) = limit {
len = limit.min(len);
}
sort_valids(descending, &mut valids, &mut null_indices, len, cmp);
let mut valid_indices: Vec<u32> = valids.iter().map(|tuple| tuple.0).collect();
if options.nulls_first {
null_indices.append(&mut valid_indices);
null_indices.truncate(len);
UInt32Array::from(null_indices)
} else {
valid_indices.append(&mut null_indices);
valid_indices.truncate(len);
UInt32Array::from(valid_indices)
}
}
/// Compare two `Array`s based on the ordering defined in [ord](crate::array::ord).
fn cmp_array(a: &dyn Array, b: &dyn Array) -> Ordering {
let cmp_op = build_compare(a, b).unwrap();
let length = a.len().max(b.len());
for i in 0..length {
let result = cmp_op(i, i);
if result != Ordering::Equal {
return result;
}
}
Ordering::Equal
}
/// One column to be used in lexicographical sort
#[derive(Clone, Debug)]
pub struct SortColumn {
pub values: ArrayRef,
pub options: Option<SortOptions>,
}
/// Sort a list of `ArrayRef` using `SortOptions` provided for each array.
///
/// Performs a stable lexicographical sort on values and indices.
///
/// Returns an `ArrowError::ComputeError(String)` if any of the array type is either unsupported by
/// `lexsort_to_indices` or `take`.
///
/// Example:
///
/// ```
/// use std::convert::From;
/// use std::sync::Arc;
/// use arrow::array::{ArrayRef, StringArray, PrimitiveArray, as_primitive_array};
/// use arrow::compute::kernels::sort::{SortColumn, SortOptions, lexsort};
/// use arrow::datatypes::Int64Type;
///
/// let sorted_columns = lexsort(&vec![
/// SortColumn {
/// values: Arc::new(PrimitiveArray::<Int64Type>::from(vec![
/// None,
/// Some(-2),
/// Some(89),
/// Some(-64),
/// Some(101),
/// ])) as ArrayRef,
/// options: None,
/// },
/// SortColumn {
/// values: Arc::new(StringArray::from(vec![
/// Some("hello"),
/// Some("world"),
/// Some(","),
/// Some("foobar"),
/// Some("!"),
/// ])) as ArrayRef,
/// options: Some(SortOptions {
/// descending: true,
/// nulls_first: false,
/// }),
/// },
/// ], None).unwrap();
///
/// assert_eq!(as_primitive_array::<Int64Type>(&sorted_columns[0]).value(1), -64);
/// assert!(sorted_columns[0].is_null(0));
/// ```
pub fn lexsort(columns: &[SortColumn], limit: Option<usize>) -> Result<Vec<ArrayRef>> {
let indices = lexsort_to_indices(columns, limit)?;
columns
.iter()
.map(|c| take(c.values.as_ref(), &indices, None))
.collect()
}
/// Sort elements lexicographically from a list of `ArrayRef` into an unsigned integer
/// (`UInt32Array`) of indices.
pub fn lexsort_to_indices(
columns: &[SortColumn],
limit: Option<usize>,
) -> Result<UInt32Array> {
if columns.is_empty() {
return Err(ArrowError::InvalidArgumentError(
"Sort requires at least one column".to_string(),
));
}
if columns.len() == 1 {
// fallback to non-lexical sort
let column = &columns[0];
return sort_to_indices(&column.values, column.options, limit);
}
let row_count = columns[0].values.len();
if columns.iter().any(|item| item.values.len() != row_count) {
return Err(ArrowError::ComputeError(
"lexical sort columns have different row counts".to_string(),
));
};
let mut value_indices = (0..row_count).collect::<Vec<usize>>();
let mut len = value_indices.len();
if let Some(limit) = limit {
len = limit.min(len);
}
let lexicographical_comparator = LexicographicalComparator::try_new(columns)?;
// uint32 can be sorted unstably
sort_unstable_by(&mut value_indices, len, |a, b| {
lexicographical_comparator.compare(a, b)
});
Ok(UInt32Array::from(
(&value_indices)[0..len]
.iter()
.map(|i| *i as u32)
.collect::<Vec<u32>>(),
))
}
/// It's unstable_sort, may not preserve the order of equal elements
pub fn partial_sort<T, F>(v: &mut [T], limit: usize, mut is_less: F)
where
F: FnMut(&T, &T) -> Ordering,
{
let (before, _mid, _after) = v.select_nth_unstable_by(limit, &mut is_less);
before.sort_unstable_by(is_less);
}
type LexicographicalCompareItem<'a> = (
&'a ArrayData, // data
DynComparator, // comparator
SortOptions, // sort_option
);
/// A lexicographical comparator that wraps given array data (columns) and can lexicographically compare data
/// at given two indices. The lifetime is the same at the data wrapped.
pub(super) struct LexicographicalComparator<'a> {
compare_items: Vec<LexicographicalCompareItem<'a>>,
}
impl LexicographicalComparator<'_> {
/// lexicographically compare values at the wrapped columns with given indices.
pub(super) fn compare<'a, 'b>(
&'a self,
a_idx: &'b usize,
b_idx: &'b usize,
) -> Ordering {
for (data, comparator, sort_option) in &self.compare_items {
match (data.is_valid(*a_idx), data.is_valid(*b_idx)) {
(true, true) => {
match (comparator)(*a_idx, *b_idx) {
// equal, move on to next column
Ordering::Equal => continue,
order => {
if sort_option.descending {
return order.reverse();
} else {
return order;
}
}
}
}
(false, true) => {
return if sort_option.nulls_first {
Ordering::Less
} else {
Ordering::Greater
};
}
(true, false) => {
return if sort_option.nulls_first {
Ordering::Greater
} else {
Ordering::Less
};
}
// equal, move on to next column
(false, false) => continue,
}
}
Ordering::Equal
}
/// Create a new lex comparator that will wrap the given sort columns and give comparison
/// results with two indices.
pub(super) fn try_new(
columns: &[SortColumn],
) -> Result<LexicographicalComparator<'_>> {
let compare_items = columns
.iter()
.map(|column| {
// flatten and convert build comparators
// use ArrayData for is_valid checks later to avoid dynamic call
let values = column.values.as_ref();
let data = values.data_ref();
Ok((
data,
build_compare(values, values)?,
column.options.unwrap_or_default(),
))
})
.collect::<Result<Vec<_>>>()?;
Ok(LexicographicalComparator { compare_items })
}
}
fn sort_valids<T, U>(
descending: bool,
valids: &mut [(u32, T)],
nulls: &mut [U],
len: usize,
mut cmp: impl FnMut(T, T) -> Ordering,
) where
T: ?Sized + Copy,
{
let valids_len = valids.len();
if !descending {
sort_unstable_by(valids, len.min(valids_len), |a, b| cmp(a.1, b.1));
} else {
sort_unstable_by(valids, len.min(valids_len), |a, b| cmp(a.1, b.1).reverse());
// reverse to keep a stable ordering
nulls.reverse();
}
}
fn sort_valids_array<T>(
descending: bool,
valids: &mut [(u32, ArrayRef)],
nulls: &mut [T],
len: usize,
) {
let valids_len = valids.len();
if !descending {
sort_unstable_by(valids, len.min(valids_len), |a, b| {
cmp_array(a.1.as_ref(), b.1.as_ref())
});
} else {
sort_unstable_by(valids, len.min(valids_len), |a, b| {
cmp_array(a.1.as_ref(), b.1.as_ref()).reverse()
});
// reverse to keep a stable ordering
nulls.reverse();
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::compute::util::tests::{
build_fixed_size_list_nullable, build_generic_list_nullable,
};
use rand::rngs::StdRng;
use rand::{Rng, RngCore, SeedableRng};
use std::convert::TryFrom;
use std::sync::Arc;
fn test_sort_to_indices_boolean_arrays(
data: Vec<Option<bool>>,
options: Option<SortOptions>,
limit: Option<usize>,
expected_data: Vec<u32>,
) {
let output = BooleanArray::from(data);
let expected = UInt32Array::from(expected_data);
let output =
sort_to_indices(&(Arc::new(output) as ArrayRef), options, limit).unwrap();
assert_eq!(output, expected)
}
fn test_sort_to_indices_primitive_arrays<T>(
data: Vec<Option<T::Native>>,
options: Option<SortOptions>,
limit: Option<usize>,
expected_data: Vec<u32>,
) where
T: ArrowPrimitiveType,
PrimitiveArray<T>: From<Vec<Option<T::Native>>>,
{
let output = PrimitiveArray::<T>::from(data);
let expected = UInt32Array::from(expected_data);
let output =
sort_to_indices(&(Arc::new(output) as ArrayRef), options, limit).unwrap();
assert_eq!(output, expected)
}
fn test_sort_primitive_arrays<T>(
data: Vec<Option<T::Native>>,
options: Option<SortOptions>,
limit: Option<usize>,
expected_data: Vec<Option<T::Native>>,
) where
T: ArrowPrimitiveType,
PrimitiveArray<T>: From<Vec<Option<T::Native>>>,
{
let output = PrimitiveArray::<T>::from(data);
let expected = Arc::new(PrimitiveArray::<T>::from(expected_data)) as ArrayRef;
let output = match limit {
Some(_) => {
sort_limit(&(Arc::new(output) as ArrayRef), options, limit).unwrap()
}
_ => sort(&(Arc::new(output) as ArrayRef), options).unwrap(),
};
assert_eq!(&output, &expected)
}
fn test_sort_to_indices_string_arrays(
data: Vec<Option<&str>>,
options: Option<SortOptions>,
limit: Option<usize>,
expected_data: Vec<u32>,
) {
let output = StringArray::from(data);
let expected = UInt32Array::from(expected_data);
let output =
sort_to_indices(&(Arc::new(output) as ArrayRef), options, limit).unwrap();
assert_eq!(output, expected)
}
/// Tests both Utf8 and LargeUtf8
fn test_sort_string_arrays(
data: Vec<Option<&str>>,
options: Option<SortOptions>,
limit: Option<usize>,
expected_data: Vec<Option<&str>>,
) {
let output = StringArray::from(data.clone());
let expected = Arc::new(StringArray::from(expected_data.clone())) as ArrayRef;
let output = match limit {
Some(_) => {
sort_limit(&(Arc::new(output) as ArrayRef), options, limit).unwrap()
}
_ => sort(&(Arc::new(output) as ArrayRef), options).unwrap(),
};
assert_eq!(&output, &expected);
let output = LargeStringArray::from(data);
let expected = Arc::new(LargeStringArray::from(expected_data)) as ArrayRef;
let output = match limit {
Some(_) => {
sort_limit(&(Arc::new(output) as ArrayRef), options, limit).unwrap()
}
_ => sort(&(Arc::new(output) as ArrayRef), options).unwrap(),
};
assert_eq!(&output, &expected)
}
fn test_sort_string_dict_arrays<T: ArrowDictionaryKeyType>(
data: Vec<Option<&str>>,
options: Option<SortOptions>,
limit: Option<usize>,
expected_data: Vec<Option<&str>>,
) {
let array = data.into_iter().collect::<DictionaryArray<T>>();
let array_values = array.values().clone();
let dict = array_values
.as_any()
.downcast_ref::<StringArray>()
.expect("Unable to get dictionary values");
let sorted = match limit {
Some(_) => {
sort_limit(&(Arc::new(array) as ArrayRef), options, limit).unwrap()
}
_ => sort(&(Arc::new(array) as ArrayRef), options).unwrap(),
};
let sorted = sorted
.as_any()
.downcast_ref::<DictionaryArray<T>>()
.unwrap();
let sorted_values = sorted.values();
let sorted_dict = sorted_values
.as_any()
.downcast_ref::<StringArray>()
.expect("Unable to get dictionary values");
let sorted_keys = sorted.keys();
assert_eq!(sorted_dict, dict);
let sorted_strings = StringArray::try_from(
(0..sorted.len())
.map(|i| {
if sorted.is_valid(i) {
Some(sorted_dict.value(sorted_keys.value(i).to_usize().unwrap()))
} else {
None
}
})
.collect::<Vec<Option<&str>>>(),
)
.expect("Unable to create string array from dictionary");
let expected =
StringArray::try_from(expected_data).expect("Unable to create string array");
assert_eq!(sorted_strings, expected)
}
fn test_sort_list_arrays<T>(
data: Vec<Option<Vec<Option<T::Native>>>>,
options: Option<SortOptions>,
limit: Option<usize>,
expected_data: Vec<Option<Vec<Option<T::Native>>>>,
fixed_length: Option<i32>,
) where
T: ArrowPrimitiveType,
PrimitiveArray<T>: From<Vec<Option<T::Native>>>,
{
// for FixedSizedList
if let Some(length) = fixed_length {
let input = Arc::new(build_fixed_size_list_nullable(data.clone(), length));
let sorted = match limit {
Some(_) => sort_limit(&(input as ArrayRef), options, limit).unwrap(),
_ => sort(&(input as ArrayRef), options).unwrap(),
};
let expected = Arc::new(build_fixed_size_list_nullable(
expected_data.clone(),
length,
)) as ArrayRef;
assert_eq!(&sorted, &expected);
}
// for List
let input = Arc::new(build_generic_list_nullable::<i32, T>(data.clone()));
let sorted = match limit {
Some(_) => sort_limit(&(input as ArrayRef), options, limit).unwrap(),
_ => sort(&(input as ArrayRef), options).unwrap(),
};
let expected =
Arc::new(build_generic_list_nullable::<i32, T>(expected_data.clone()))
as ArrayRef;
assert_eq!(&sorted, &expected);
// for LargeList
let input = Arc::new(build_generic_list_nullable::<i64, T>(data));
let sorted = match limit {
Some(_) => sort_limit(&(input as ArrayRef), options, limit).unwrap(),
_ => sort(&(input as ArrayRef), options).unwrap(),
};
let expected =
Arc::new(build_generic_list_nullable::<i64, T>(expected_data)) as ArrayRef;
assert_eq!(&sorted, &expected);
}
fn test_lex_sort_arrays(
input: Vec<SortColumn>,
expected_output: Vec<ArrayRef>,
limit: Option<usize>,
) |
fn test_sort_binary_arrays(
data: Vec<Option<Vec<u8>>>,
options: Option<SortOptions>,
limit: Option<usize>,
expected_data: Vec<Option<Vec<u8>>>,
fixed_length: Option<i32>,
) {
// Fixed size binary array
if fixed_length.is_some() {
let input = Arc::new(
FixedSizeBinaryArray::try_from_sparse_iter(data.iter().cloned()).unwrap(),
);
let sorted = match limit {
Some(_) => sort_limit(&(input as ArrayRef), options, limit).unwrap(),
None => sort(&(input as ArrayRef), options).unwrap(),
};
let expected = Arc::new(
FixedSizeBinaryArray::try_from_sparse_iter(expected_data.iter().cloned())
.unwrap(),
) as ArrayRef;
assert_eq!(&sorted, &expected);
}
// Generic size binary array
fn make_generic_binary_array<S: BinaryOffsetSizeTrait>(
data: &[Option<Vec<u8>>],
) -> Arc<GenericBinaryArray<S>> {
Arc::new(GenericBinaryArray::<S>::from_opt_vec(
data.iter()
.map(|binary| binary.as_ref().map(Vec::as_slice))
.collect(),
))
}
// BinaryArray
let input = make_generic_binary_array::<i32>(&data);
let sorted = match limit {
Some(_) => sort_limit(&(input as ArrayRef), options, limit).unwrap(),
None => sort(&(input as ArrayRef), options).unwrap(),
};
let expected = make_generic_binary_array::<i32>(&expected_data) as ArrayRef;
assert_eq!(&sorted, &expected);
// LargeBinaryArray
let input = make_generic_binary_array::<i64>(&data);
let sorted = match limit {
Some(_) => sort_limit(&(input as ArrayRef), options, limit).unwrap(),
None => sort(&(input as ArrayRef), options).unwrap(),
};
let expected = make_generic_binary_array::<i64>(&expected_data) as ArrayRef;
assert_eq!(&sorted, &expected);
}
#[test]
fn test_sort_to_indices_primitives() {
test_sort_to_indices_primitive_arrays::<Int8Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
None,
None,
vec![0, 5, 3, 1, 4, 2],
);
test_sort_to_indices_primitive_arrays::<Int16Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
None,
None,
vec![0, 5, 3, 1, 4, 2],
);
test_sort_to_indices_primitive_arrays::<Int32Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
None,
None,
vec![0, 5, 3, 1, 4, 2],
);
test_sort_to_indices_primitive_arrays::<Int64Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
None,
None,
vec![0, 5, 3, 1, 4, 2],
);
test_sort_to_indices_primitive_arrays::<Float32Type>(
vec![
None,
Some(-0.05),
Some(2.225),
Some(-1.01),
Some(-0.05),
None,
],
None,
None,
vec![0, 5, 3, 1, 4, 2],
);
test_sort_to_indices_primitive_arrays::<Float64Type>(
vec![
None,
Some(-0.05),
Some(2.225),
Some(-1.01),
Some(-0.05),
None,
],
None,
None,
vec![0, 5, 3, 1, 4, 2],
);
// descending
test_sort_to_indices_primitive_arrays::<Int8Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![2, 1, 4, 3, 5, 0], // [2, 4, 1, 3, 5, 0]
);
test_sort_to_indices_primitive_arrays::<Int16Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![2, 1, 4, 3, 5, 0],
);
test_sort_to_indices_primitive_arrays::<Int32Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![2, 1, 4, 3, 5, 0],
);
test_sort_to_indices_primitive_arrays::<Int64Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![2, 1, 4, 3, 5, 0],
);
test_sort_to_indices_primitive_arrays::<Float32Type>(
vec![
None,
Some(0.005),
Some(20.22),
Some(-10.3),
Some(0.005),
None,
],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![2, 1, 4, 3, 5, 0],
);
test_sort_to_indices_primitive_arrays::<Float64Type>(
vec![None, Some(0.0), Some(2.0), Some(-1.0), Some(0.0), None],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![2, 1, 4, 3, 5, 0],
);
// descending, nulls first
test_sort_to_indices_primitive_arrays::<Int8Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![5, 0, 2, 1, 4, 3], // [5, 0, 2, 4, 1, 3]
);
test_sort_to_indices_primitive_arrays::<Int16Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![5, 0, 2, 1, 4, 3], // [5, 0, 2, 4, 1, 3]
);
test_sort_to_indices_primitive_arrays::<Int32Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![5, 0, 2, 1, 4, 3],
);
test_sort_to_indices_primitive_arrays::<Int64Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![5, 0, 2, 1, 4, 3],
);
test_sort_to_indices_primitive_arrays::<Float32Type>(
vec![None, Some(0.1), Some(0.2), Some(-1.3), Some(0.01), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![5, 0, 2, 1, 4, 3],
);
test_sort_to_indices_primitive_arrays::<Float64Type>(
vec![None, Some(10.1), Some(100.2), Some(-1.3), Some(10.01), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![5, 0, 2, 1, 4, 3],
);
// valid values less than limit with extra nulls
test_sort_to_indices_primitive_arrays::<Float64Type>(
vec![Some(2.0), None, None, Some(1.0)],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(3),
vec![3, 0, 1],
);
test_sort_to_indices_primitive_arrays::<Float64Type>(
vec![Some(2.0), None, None, Some(1.0)],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(3),
vec![1, 2, 3],
);
// more nulls than limit
test_sort_to_indices_primitive_arrays::<Float64Type>(
vec![Some(1.0), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(2),
vec![1, 2],
);
test_sort_to_indices_primitive_arrays::<Float64Type>(
vec![Some(1.0), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(2),
vec![0, 1],
);
}
#[test]
fn test_sort_to_indices_primitive_more_nulls_than_limit() {
test_sort_to_indices_primitive_arrays::<Int32Type>(
vec![None, None, Some(3), None, Some(1), None, Some(2)],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(2),
vec![4, 6],
);
}
#[test]
fn test_sort_boolean() {
// boolean
test_sort_to_indices_boolean_arrays(
vec![None, Some(false), Some(true), Some(true), Some(false), None],
None,
None,
vec![0, 5, 1, 4, 2, 3],
);
// boolean, descending
test_sort_to_indices_boolean_arrays(
vec![None, Some(false), Some(true), Some(true), Some(false), None],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![2, 3, 1, 4, 5, 0],
);
// boolean, descending, nulls first
test_sort_to_indices_boolean_arrays(
vec![None, Some(false), Some(true), Some(true), Some(false), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![5, 0, 2, 3, 1, 4],
);
// boolean, descending, nulls first, limit
test_sort_to_indices_boolean_arrays(
vec![None, Some(false), Some(true), Some(true), Some(false), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
Some(3),
vec![5, 0, 2],
);
// valid values less than limit with extra nulls
test_sort_to_indices_boolean_arrays(
vec![Some(true), None, None, Some(false)],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(3),
vec![3, 0, 1],
);
test_sort_to_indices_boolean_arrays(
vec![Some(true), None, None, Some(false)],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(3),
vec![1, 2, 3],
);
// more nulls than limit
test_sort_to_indices_boolean_arrays(
vec![Some(true), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(2),
vec![1, 2],
);
test_sort_to_indices_boolean_arrays(
vec![Some(true), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(2),
vec![0, 1],
);
}
#[test]
fn test_sort_primitives() {
// default case
test_sort_primitive_arrays::<UInt8Type>(
vec![None, Some(3), Some(5), Some(2), Some(3), None],
None,
None,
vec![None, None, Some(2), Some(3), Some(3), Some(5)],
);
test_sort_primitive_arrays::<UInt16Type>(
vec![None, Some(3), Some(5), Some(2), Some(3), None],
None,
None,
vec![None, None, Some(2), Some(3), Some(3), Some(5)],
);
test_sort_primitive_arrays::<UInt32Type>(
vec![None, Some(3), Some(5), Some(2), Some(3), None],
None,
None,
vec![None, None, Some(2), Some(3), Some(3), Some(5)],
);
test_sort_primitive_arrays::<UInt64Type>(
vec![None, Some(3), Some(5), Some(2), Some(3), None],
None,
None,
vec![None, None, Some(2), Some(3), Some(3), Some(5)],
);
// descending
test_sort_primitive_arrays::<Int8Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![Some(2), Some(0), Some(0), Some(-1), None, None],
);
test_sort_primitive_arrays::<Int16Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![Some(2), Some(0), Some(0), Some(-1), None, None],
);
test_sort_primitive_arrays::<Int32Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![Some(2), Some(0), Some(0), Some(-1), None, None],
);
test_sort_primitive_arrays::<Int16Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![Some(2), Some(0), Some(0), Some(-1), None, None],
);
// descending, nulls first
test_sort_primitive_arrays::<Int8Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![None, None, Some(2), Some(0), Some(0), Some(-1)],
);
test_sort_primitive_arrays::<Int16Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![None, None, Some(2), Some(0), Some(0), Some(-1)],
);
test_sort_primitive_arrays::<Int32Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![None, None, Some(2), Some(0), Some(0), Some(-1)],
);
test_sort_primitive_arrays::<Int64Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![None, None, Some(2), Some(0), Some(0), Some(-1)],
);
test_sort_primitive_arrays::<Int64Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
Some(3),
vec![None, None, Some(2)],
);
test_sort_primitive_arrays::<Float32Type>(
vec![None, Some(0.0), Some(2.0), Some(-1.0), Some(0.0), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![None, None, Some(2.0), Some(0.0), Some(0.0), Some(-1.0)],
);
test_sort_primitive_arrays::<Float64Type>(
vec![None, Some(0.0), Some(2.0), Some(-1.0), Some(f64::NAN), None],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![None, None, Some(f64::NAN), Some(2.0), Some(0.0), Some(-1.0)],
);
test_sort_primitive_arrays::<Float64Type>(
vec![Some(f64::NAN), Some(f64::NAN), Some(f64::NAN), Some(1.0)],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![Some(f64::NAN), Some(f64::NAN), Some(f64::NAN), Some(1.0)],
);
// int8 nulls first
test_sort_primitive_arrays::<Int8Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
None,
vec![None, None, Some(-1), Some(0), Some(0), Some(2)],
);
test_sort_primitive_arrays::<Int16Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
None,
vec![None, None, Some(-1), Some(0), Some(0), Some(2)],
);
test_sort_primitive_arrays::<Int32Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
None,
vec![None, None, Some(-1), Some(0), Some(0), Some(2)],
);
test_sort_primitive_arrays::<Int64Type>(
vec![None, Some(0), Some(2), Some(-1), Some(0), None],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
None,
vec![None, None, Some(-1), Some(0), Some(0), Some(2)],
);
test_sort_primitive_arrays::<Float32Type>(
vec![None, Some(0.0), Some(2.0), Some(-1.0), Some(0.0), None],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
None,
vec![None, None, Some(-1.0), Some(0.0), Some(0.0), Some(2.0)],
);
test_sort_primitive_arrays::<Float64Type>(
vec![None, Some(0.0), Some(2.0), Some(-1.0), Some(f64::NAN), None],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
None,
vec![None, None, Some(-1.0), Some(0.0), Some(2.0), Some(f64::NAN)],
);
test_sort_primitive_arrays::<Float64Type>(
vec![Some(f64::NAN), Some(f64::NAN), Some(f64::NAN), Some(1.0)],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
None,
vec![Some(1.0), Some(f64::NAN), Some(f64::NAN), Some(f64::NAN)],
);
// limit
test_sort_primitive_arrays::<Float64Type>(
vec![Some(f64::NAN), Some(f64::NAN), Some(f64::NAN), Some(1.0)],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(2),
vec![Some(1.0), Some(f64::NAN)],
);
// limit with actual value
test_sort_primitive_arrays::<Float64Type>(
vec![Some(2.0), Some(4.0), Some(3.0), Some(1.0)],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(3),
vec![Some(1.0), Some(2.0), Some(3.0)],
);
// valid values less than limit with extra nulls
test_sort_primitive_arrays::<Float64Type>(
vec![Some(2.0), None, None, Some(1.0)],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(3),
vec![Some(1.0), Some(2.0), None],
);
test_sort_primitive_arrays::<Float64Type>(
vec![Some(2.0), None, None, Some(1.0)],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(3),
vec![None, None, Some(1.0)],
);
// more nulls than limit
test_sort_primitive_arrays::<Float64Type>(
vec![Some(2.0), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(2),
vec![None, None],
);
test_sort_primitive_arrays::<Float64Type>(
vec![Some(2.0), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(2),
vec![Some(2.0), None],
);
}
#[test]
fn test_sort_to_indices_strings() {
test_sort_to_indices_string_arrays(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
None,
None,
vec![0, 3, 5, 1, 4, 2],
);
test_sort_to_indices_string_arrays(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![2, 4, 1, 5, 3, 0],
);
test_sort_to_indices_string_arrays(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
None,
vec![0, 3, 5, 1, 4, 2],
);
test_sort_to_indices_string_arrays(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![3, 0, 2, 4, 1, 5],
);
test_sort_to_indices_string_arrays(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
Some(3),
vec![3, 0, 2],
);
// valid values less than limit with extra nulls
test_sort_to_indices_string_arrays(
vec![Some("def"), None, None, Some("abc")],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(3),
vec![3, 0, 1],
);
test_sort_to_indices_string_arrays(
vec![Some("def"), None, None, Some("abc")],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(3),
vec![1, 2, 3],
);
// more nulls than limit
test_sort_to_indices_string_arrays(
vec![Some("def"), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(2),
vec![1, 2],
);
test_sort_to_indices_string_arrays(
vec![Some("def"), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(2),
vec![0, 1],
);
}
#[test]
fn test_sort_strings() {
test_sort_string_arrays(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
None,
None,
vec![
None,
None,
Some("-ad"),
Some("bad"),
Some("glad"),
Some("sad"),
],
);
test_sort_string_arrays(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![
Some("sad"),
Some("glad"),
Some("bad"),
Some("-ad"),
None,
None,
],
);
test_sort_string_arrays(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
None,
vec![
None,
None,
Some("-ad"),
Some("bad"),
Some("glad"),
Some("sad"),
],
);
test_sort_string_arrays(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![
None,
None,
Some("sad"),
Some("glad"),
Some("bad"),
Some("-ad"),
],
);
test_sort_string_arrays(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
Some(3),
vec![None, None, Some("sad")],
);
// valid values less than limit with extra nulls
test_sort_string_arrays(
vec![Some("def"), None, None, Some("abc")],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(3),
vec![Some("abc"), Some("def"), None],
);
test_sort_string_arrays(
vec![Some("def"), None, None, Some("abc")],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(3),
vec![None, None, Some("abc")],
);
// more nulls than limit
test_sort_string_arrays(
vec![Some("def"), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(2),
vec![None, None],
);
test_sort_string_arrays(
vec![Some("def"), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(2),
vec![Some("def"), None],
);
}
#[test]
fn test_sort_string_dicts() {
test_sort_string_dict_arrays::<Int8Type>(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
None,
None,
vec![
None,
None,
Some("-ad"),
Some("bad"),
Some("glad"),
Some("sad"),
],
);
test_sort_string_dict_arrays::<Int16Type>(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![
Some("sad"),
Some("glad"),
Some("bad"),
Some("-ad"),
None,
None,
],
);
test_sort_string_dict_arrays::<Int32Type>(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
None,
vec![
None,
None,
Some("-ad"),
Some("bad"),
Some("glad"),
Some("sad"),
],
);
test_sort_string_dict_arrays::<Int16Type>(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
None,
vec![
None,
None,
Some("sad"),
Some("glad"),
Some("bad"),
Some("-ad"),
],
);
test_sort_string_dict_arrays::<Int16Type>(
vec![
None,
Some("bad"),
Some("sad"),
None,
Some("glad"),
Some("-ad"),
],
Some(SortOptions {
descending: true,
nulls_first: true,
}),
Some(3),
vec![None, None, Some("sad")],
);
// valid values less than limit with extra nulls
test_sort_string_dict_arrays::<Int16Type>(
vec![Some("def"), None, None, Some("abc")],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(3),
vec![Some("abc"), Some("def"), None],
);
test_sort_string_dict_arrays::<Int16Type>(
vec![Some("def"), None, None, Some("abc")],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(3),
vec![None, None, Some("abc")],
);
// more nulls than limit
test_sort_string_dict_arrays::<Int16Type>(
vec![Some("def"), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(2),
vec![None, None],
);
test_sort_string_dict_arrays::<Int16Type>(
vec![Some("def"), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(2),
vec![Some("def"), None],
);
}
#[test]
fn test_sort_list() {
test_sort_list_arrays::<Int8Type>(
vec![
Some(vec![Some(1)]),
Some(vec![Some(4)]),
Some(vec![Some(2)]),
Some(vec![Some(3)]),
],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
None,
vec![
Some(vec![Some(1)]),
Some(vec![Some(2)]),
Some(vec![Some(3)]),
Some(vec![Some(4)]),
],
Some(1),
);
test_sort_list_arrays::<Float32Type>(
vec![
Some(vec![Some(1.0), Some(0.0)]),
Some(vec![Some(4.0), Some(3.0), Some(2.0), Some(1.0)]),
Some(vec![Some(2.0), Some(3.0), Some(4.0)]),
Some(vec![Some(3.0), Some(3.0), Some(3.0), Some(3.0)]),
Some(vec![Some(1.0), Some(1.0)]),
],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
None,
vec![
Some(vec![Some(1.0), Some(0.0)]),
Some(vec![Some(1.0), Some(1.0)]),
Some(vec![Some(2.0), Some(3.0), Some(4.0)]),
Some(vec![Some(3.0), Some(3.0), Some(3.0), Some(3.0)]),
Some(vec![Some(4.0), Some(3.0), Some(2.0), Some(1.0)]),
],
None,
);
test_sort_list_arrays::<Float64Type>(
vec![
Some(vec![Some(1.0), Some(0.0)]),
Some(vec![Some(4.0), Some(3.0), Some(2.0), Some(1.0)]),
Some(vec![Some(2.0), Some(3.0), Some(4.0)]),
Some(vec![Some(3.0), Some(3.0), Some(3.0), Some(3.0)]),
Some(vec![Some(1.0), Some(1.0)]),
],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
None,
vec![
Some(vec![Some(1.0), Some(0.0)]),
Some(vec![Some(1.0), Some(1.0)]),
Some(vec![Some(2.0), Some(3.0), Some(4.0)]),
Some(vec![Some(3.0), Some(3.0), Some(3.0), Some(3.0)]),
Some(vec![Some(4.0), Some(3.0), Some(2.0), Some(1.0)]),
],
None,
);
test_sort_list_arrays::<Int32Type>(
vec![
Some(vec![Some(1), Some(0)]),
Some(vec![Some(4), Some(3), Some(2), Some(1)]),
Some(vec![Some(2), Some(3), Some(4)]),
Some(vec![Some(3), Some(3), Some(3), Some(3)]),
Some(vec![Some(1), Some(1)]),
],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
None,
vec![
Some(vec![Some(1), Some(0)]),
Some(vec![Some(1), Some(1)]),
Some(vec![Some(2), Some(3), Some(4)]),
Some(vec![Some(3), Some(3), Some(3), Some(3)]),
Some(vec![Some(4), Some(3), Some(2), Some(1)]),
],
None,
);
test_sort_list_arrays::<Int32Type>(
vec![
None,
Some(vec![Some(4), None, Some(2)]),
Some(vec![Some(2), Some(3), Some(4)]),
None,
Some(vec![Some(3), Some(3), None]),
],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
None,
vec![
Some(vec![Some(2), Some(3), Some(4)]),
Some(vec![Some(3), Some(3), None]),
Some(vec![Some(4), None, Some(2)]),
None,
None,
],
Some(3),
);
test_sort_list_arrays::<Int32Type>(
vec![
Some(vec![Some(1), Some(0)]),
Some(vec![Some(4), Some(3), Some(2), Some(1)]),
Some(vec![Some(2), Some(3), Some(4)]),
Some(vec![Some(3), Some(3), Some(3), Some(3)]),
Some(vec![Some(1), Some(1)]),
],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(2),
vec![Some(vec![Some(1), Some(0)]), Some(vec![Some(1), Some(1)])],
None,
);
// valid values less than limit with extra nulls
test_sort_list_arrays::<Int32Type>(
vec![Some(vec![Some(1)]), None, None, Some(vec![Some(2)])],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(3),
vec![Some(vec![Some(1)]), Some(vec![Some(2)]), None],
None,
);
test_sort_list_arrays::<Int32Type>(
vec![Some(vec![Some(1)]), None, None, Some(vec![Some(2)])],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(3),
vec![None, None, Some(vec![Some(1)])],
None,
);
// more nulls than limit
test_sort_list_arrays::<Int32Type>(
vec![Some(vec![Some(1)]), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(2),
vec![None, None],
None,
);
test_sort_list_arrays::<Int32Type>(
vec![Some(vec![Some(1)]), None, None, None],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
Some(2),
vec![Some(vec![Some(1)]), None],
None,
);
}
#[test]
fn test_sort_binary() {
test_sort_binary_arrays(
vec![
Some(vec![0, 0, 0]),
Some(vec![0, 0, 5]),
Some(vec![0, 0, 3]),
Some(vec![0, 0, 7]),
Some(vec![0, 0, 1]),
],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
None,
vec![
Some(vec![0, 0, 0]),
Some(vec![0, 0, 1]),
Some(vec![0, 0, 3]),
Some(vec![0, 0, 5]),
Some(vec![0, 0, 7]),
],
Some(3),
);
// with nulls
test_sort_binary_arrays(
vec![
Some(vec![0, 0, 0]),
None,
Some(vec![0, 0, 3]),
Some(vec![0, 0, 7]),
Some(vec![0, 0, 1]),
None,
],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
None,
vec![
Some(vec![0, 0, 0]),
Some(vec![0, 0, 1]),
Some(vec![0, 0, 3]),
Some(vec![0, 0, 7]),
None,
None,
],
Some(3),
);
test_sort_binary_arrays(
vec![
Some(vec![3, 5, 7]),
None,
Some(vec![1, 7, 1]),
Some(vec![2, 7, 3]),
None,
Some(vec![1, 4, 3]),
],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
None,
vec![
Some(vec![1, 4, 3]),
Some(vec![1, 7, 1]),
Some(vec![2, 7, 3]),
Some(vec![3, 5, 7]),
None,
None,
],
Some(3),
);
// descending
test_sort_binary_arrays(
vec![
Some(vec![0, 0, 0]),
None,
Some(vec![0, 0, 3]),
Some(vec![0, 0, 7]),
Some(vec![0, 0, 1]),
None,
],
Some(SortOptions {
descending: true,
nulls_first: false,
}),
None,
vec![
Some(vec![0, 0, 7]),
Some(vec![0, 0, 3]),
Some(vec![0, 0, 1]),
Some(vec![0, 0, 0]),
None,
None,
],
Some(3),
);
// nulls first
test_sort_binary_arrays(
vec![
Some(vec![0, 0, 0]),
None,
Some(vec![0, 0, 3]),
Some(vec![0, 0, 7]),
Some(vec![0, 0, 1]),
None,
],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
None,
vec![
None,
None,
Some(vec![0, 0, 0]),
Some(vec![0, 0, 1]),
Some(vec![0, 0, 3]),
Some(vec![0, 0, 7]),
],
Some(3),
);
// limit
test_sort_binary_arrays(
vec![
Some(vec![0, 0, 0]),
None,
Some(vec![0, 0, 3]),
Some(vec![0, 0, 7]),
Some(vec![0, 0, 1]),
None,
],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(4),
vec![None, None, Some(vec![0, 0, 0]), Some(vec![0, 0, 1])],
Some(3),
);
// var length
test_sort_binary_arrays(
vec![
Some(b"Hello".to_vec()),
None,
Some(b"from".to_vec()),
Some(b"Apache".to_vec()),
Some(b"Arrow-rs".to_vec()),
None,
],
Some(SortOptions {
descending: false,
nulls_first: false,
}),
None,
vec![
Some(b"Apache".to_vec()),
Some(b"Arrow-rs".to_vec()),
Some(b"Hello".to_vec()),
Some(b"from".to_vec()),
None,
None,
],
None,
);
// limit
test_sort_binary_arrays(
vec![
Some(b"Hello".to_vec()),
None,
Some(b"from".to_vec()),
Some(b"Apache".to_vec()),
Some(b"Arrow-rs".to_vec()),
None,
],
Some(SortOptions {
descending: false,
nulls_first: true,
}),
Some(4),
vec![
None,
None,
Some(b"Apache".to_vec()),
Some(b"Arrow-rs".to_vec()),
],
None,
);
}
#[test]
fn test_lex_sort_single_column() {
let input = vec![SortColumn {
values: Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(17),
Some(2),
Some(-1),
Some(0),
])) as ArrayRef,
options: None,
}];
let expected = vec![Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(-1),
Some(0),
Some(2),
Some(17),
])) as ArrayRef];
test_lex_sort_arrays(input.clone(), expected, None);
let expected = vec![Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(-1),
Some(0),
Some(2),
])) as ArrayRef];
test_lex_sort_arrays(input, expected, Some(3));
}
#[test]
fn test_lex_sort_unaligned_rows() {
let input = vec![
SortColumn {
values: Arc::new(PrimitiveArray::<Int64Type>::from(vec![None, Some(-1)]))
as ArrayRef,
options: None,
},
SortColumn {
values: Arc::new(StringArray::from(vec![Some("foo")])) as ArrayRef,
options: None,
},
];
assert!(
lexsort(&input, None).is_err(),
"lexsort should reject columns with different row counts"
);
}
#[test]
fn test_lex_sort_mixed_types() {
let input = vec![
SortColumn {
values: Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(0),
Some(2),
Some(-1),
Some(0),
])) as ArrayRef,
options: None,
},
SortColumn {
values: Arc::new(PrimitiveArray::<UInt32Type>::from(vec![
Some(101),
Some(8),
Some(7),
Some(102),
])) as ArrayRef,
options: None,
},
SortColumn {
values: Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(-1),
Some(-2),
Some(-3),
Some(-4),
])) as ArrayRef,
options: None,
},
];
let expected = vec![
Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(-1),
Some(0),
Some(0),
Some(2),
])) as ArrayRef,
Arc::new(PrimitiveArray::<UInt32Type>::from(vec![
Some(7),
Some(101),
Some(102),
Some(8),
])) as ArrayRef,
Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(-3),
Some(-1),
Some(-4),
Some(-2),
])) as ArrayRef,
];
test_lex_sort_arrays(input, expected, None);
// test mix of string and in64 with option
let input = vec![
SortColumn {
values: Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(0),
Some(2),
Some(-1),
Some(0),
])) as ArrayRef,
options: Some(SortOptions {
descending: true,
nulls_first: true,
}),
},
SortColumn {
values: Arc::new(StringArray::from(vec![
Some("foo"),
Some("9"),
Some("7"),
Some("bar"),
])) as ArrayRef,
options: Some(SortOptions {
descending: true,
nulls_first: true,
}),
},
];
let expected = vec![
Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(2),
Some(0),
Some(0),
Some(-1),
])) as ArrayRef,
Arc::new(StringArray::from(vec![
Some("9"),
Some("foo"),
Some("bar"),
Some("7"),
])) as ArrayRef,
];
test_lex_sort_arrays(input, expected, None);
// test sort with nulls first
let input = vec![
SortColumn {
values: Arc::new(PrimitiveArray::<Int64Type>::from(vec![
None,
Some(-1),
Some(2),
None,
])) as ArrayRef,
options: Some(SortOptions {
descending: true,
nulls_first: true,
}),
},
SortColumn {
values: Arc::new(StringArray::from(vec![
Some("foo"),
Some("world"),
Some("hello"),
None,
])) as ArrayRef,
options: Some(SortOptions {
descending: true,
nulls_first: true,
}),
},
];
let expected = vec![
Arc::new(PrimitiveArray::<Int64Type>::from(vec![
None,
None,
Some(2),
Some(-1),
])) as ArrayRef,
Arc::new(StringArray::from(vec![
None,
Some("foo"),
Some("hello"),
Some("world"),
])) as ArrayRef,
];
test_lex_sort_arrays(input, expected, None);
// test sort with nulls last
let input = vec![
SortColumn {
values: Arc::new(PrimitiveArray::<Int64Type>::from(vec![
None,
Some(-1),
Some(2),
None,
])) as ArrayRef,
options: Some(SortOptions {
descending: true,
nulls_first: false,
}),
},
SortColumn {
values: Arc::new(StringArray::from(vec![
Some("foo"),
Some("world"),
Some("hello"),
None,
])) as ArrayRef,
options: Some(SortOptions {
descending: true,
nulls_first: false,
}),
},
];
let expected = vec![
Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(2),
Some(-1),
None,
None,
])) as ArrayRef,
Arc::new(StringArray::from(vec![
Some("hello"),
Some("world"),
Some("foo"),
None,
])) as ArrayRef,
];
test_lex_sort_arrays(input, expected, None);
// test sort with opposite options
let input = vec![
SortColumn {
values: Arc::new(PrimitiveArray::<Int64Type>::from(vec![
None,
Some(-1),
Some(2),
Some(-1),
None,
])) as ArrayRef,
options: Some(SortOptions {
descending: false,
nulls_first: false,
}),
},
SortColumn {
values: Arc::new(StringArray::from(vec![
Some("foo"),
Some("bar"),
Some("world"),
Some("hello"),
None,
])) as ArrayRef,
options: Some(SortOptions {
descending: true,
nulls_first: true,
}),
},
];
let expected = vec![
Arc::new(PrimitiveArray::<Int64Type>::from(vec![
Some(-1),
Some(-1),
Some(2),
None,
None,
])) as ArrayRef,
Arc::new(StringArray::from(vec![
Some("hello"),
Some("bar"),
Some("world"),
None,
Some("foo"),
])) as ArrayRef,
];
test_lex_sort_arrays(input, expected, None);
}
#[test]
fn test_partial_sort() {
let mut before: Vec<&str> = vec![
"a", "cat", "mat", "on", "sat", "the", "xxx", "xxxx", "fdadfdsf",
];
let mut d = before.clone();
d.sort_unstable();
for last in 0..before.len() {
partial_sort(&mut before, last, |a, b| a.cmp(b));
assert_eq!(&d[0..last], &before.as_slice()[0..last]);
}
}
#[test]
fn test_partial_rand_sort() {
let size = 1000u32;
let mut rng = StdRng::seed_from_u64(42);
let mut before: Vec<u32> = (0..size).map(|_| rng.gen::<u32>()).collect();
let mut d = before.clone();
let last = (rng.next_u32() % size) as usize;
d.sort_unstable();
partial_sort(&mut before, last, |a, b| a.cmp(b));
assert_eq!(&d[0..last], &before[0..last]);
}
}
| {
let sorted = lexsort(&input, limit).unwrap();
for (result, expected) in sorted.iter().zip(expected_output.iter()) {
assert_eq!(result, expected);
}
} |
main.tsx | import '@island.is/api/mocks'
import React from 'react' | import './auth'
import { environment } from './environments'
import App from './app/App'
Sentry.init({
dsn: environment.sentry.dsn,
integrations: [new Integrations.BrowserTracing()],
enabled: process?.env?.NODE_ENV !== 'development',
environment: 'frontend',
tracesSampleRate: 0.01,
})
ReactDOM.render(<App />, document.getElementById('root')) | import ReactDOM from 'react-dom'
import * as Sentry from '@sentry/react'
import { Integrations } from '@sentry/tracing' |
results.go | package scans
import (
"encoding/json"
"fmt"
"strings"
"time"
"github.com/ion-channel/ionic/dependencies"
"github.com/ion-channel/ionic/secrets"
"github.com/ion-channel/ionic/vulnerabilities"
)
// UntranslatedResults represents a result of a specific type that has not been
// translated for use in reports
type UntranslatedResults struct {
AboutYML *AboutYMLResults `json:"about_yml,omitempty"`
Buildsystem *BuildsystemResults `json:"buildsystems,omitempty"`
Community *CommunityResults `json:"community,omitempty"`
Coverage *CoverageResults `json:"coverage,omitempty"`
Dependency *DependencyResults `json:"dependency,omitempty"`
Difference *DifferenceResults `json:"difference,omitempty"`
Ecosystem *EcosystemResults `json:"ecosystems,omitempty"`
ExternalVulnerabilities *ExternalVulnerabilitiesResults `json:"external_vulnerability,omitempty"`
License *LicenseResults `json:"license,omitempty"`
Virus *VirusResults `json:"clamav,omitempty"`
VirusDetails *ClamavDetails `json:"clam_av_details,omitempty"`
Vulnerability *VulnerabilityResults `json:"vulnerabilities,omitempty"`
Secret *SecretResults `json:"secrets,omitempty"`
}
// Translate moves information from the particular sub-struct, IE
// AboutYMLResults or LicenseResults into a generic, Data struct
func (u *UntranslatedResults) Translate() *TranslatedResults {
var tr TranslatedResults
// There is an argument to be made that the following "if" clauses
// could be simplified with introspection since they all do
// basically the same thing. I've (dmiles) chosen to writ it all
// out in the name of explicit, easily-readable code.
if u.AboutYML != nil {
tr.Type = "about_yml"
tr.Data = *u.AboutYML
}
if u.Buildsystem != nil {
tr.Type = "buildsystems"
tr.Data = *u.Buildsystem
}
if u.Community != nil {
tr.Type = "community"
tr.Data = *u.Community
}
if u.Coverage != nil {
tr.Type = "coverage"
tr.Data = *u.Coverage
}
if u.Dependency != nil {
tr.Type = "dependency"
tr.Data = *u.Dependency
}
if u.Difference != nil {
tr.Type = "difference"
tr.Data = *u.Difference
}
if u.Ecosystem != nil {
tr.Type = "ecosystems"
tr.Data = *u.Ecosystem
}
if u.ExternalVulnerabilities != nil {
tr.Type = "external_vulnerability"
tr.Data = *u.ExternalVulnerabilities
}
if u.License != nil {
tr.Type = "license"
tr.Data = *u.License
}
if u.Secret != nil {
tr.Type = "secrets"
tr.Data = *u.Secret
}
if u.Virus != nil {
tr.Type = "virus"
u.Virus.ClamavDetails = *u.VirusDetails
tr.Data = *u.Virus
}
if u.Vulnerability != nil {
tr.Type = "vulnerability"
tr.Data = *u.Vulnerability
}
return &tr
}
// TranslatedResults represents a result of a specific type that has been
// translated for use in reports
type TranslatedResults struct {
Type string `json:"type" xml:"type"`
Data interface{} `json:"data,omitempty" xml:"data,omitempty"`
}
type translatedResults struct {
Type string `json:"type"`
RawData json.RawMessage `json:"data"`
}
// UnmarshalJSON is a custom JSON unmarshaller implementation for the standard
// go json package to know how to properly interpret ScanSummaryResults from
// JSON.
func (r *TranslatedResults) UnmarshalJSON(b []byte) error {
var tr translatedResults
err := json.Unmarshal(b, &tr)
if err != nil {
return err
}
r.Type = tr.Type
switch strings.ToLower(tr.Type) {
case "about_yml":
var a AboutYMLResults
err := json.Unmarshal(tr.RawData, &a)
if err != nil {
return fmt.Errorf("failed to unmarshall about yml results: %v", err)
}
r.Data = a
case "buildsystems":
var b BuildsystemResults
err := json.Unmarshal(tr.RawData, &b)
if err != nil {
return fmt.Errorf("failed to unmarshall buildsystems results: %v", err)
}
r.Data = b
case "community":
var c CommunityResults
err := json.Unmarshal(tr.RawData, &c)
if err != nil {
// Note: Could be a slice, needs to be fixed
if strings.Contains(err.Error(), "cannot unmarshal array") {
var sliceOfCommunityResults []CommunityResults
err := json.Unmarshal(tr.RawData, &sliceOfCommunityResults)
if err == nil {
c = sliceOfCommunityResults[0]
break
}
}
return fmt.Errorf("failed to unmarshall community results: %v", err)
}
r.Data = c
case "coverage", "external_coverage":
var c CoverageResults
err := json.Unmarshal(tr.RawData, &c)
if err != nil {
return fmt.Errorf("failed to unmarshall coverage results: %v", err)
}
r.Data = c
case "dependency":
var d DependencyResults
err := json.Unmarshal(tr.RawData, &d)
if err != nil {
return fmt.Errorf("failed to unmarshall dependency results: %v", err)
}
r.Data = d
case "ecosystems":
var e EcosystemResults
err := json.Unmarshal(tr.RawData, &e)
if err != nil {
return fmt.Errorf("failed to unmarshall ecosystems results: %v", err)
}
r.Data = e
case "license":
var l LicenseResults
err := json.Unmarshal(tr.RawData, &l)
if err != nil {
return fmt.Errorf("failed to unmarshall license results: %v", err)
}
r.Data = l
case "secrets":
var b SecretResults
err := json.Unmarshal(tr.RawData, &b)
if err != nil {
return fmt.Errorf("failed to unmarshall secrets results: %v", err)
}
r.Data = b
case "virus", "clamav":
var v VirusResults
err := json.Unmarshal(tr.RawData, &v)
if err != nil {
return fmt.Errorf("failed to unmarshall virus results: %v", err)
}
r.Data = v
case "vulnerability":
var v VulnerabilityResults
err := json.Unmarshal(tr.RawData, &v)
if err != nil {
return fmt.Errorf("failed to unmarshall vulnerability results: %v", err)
}
r.Data = v
case "external_vulnerability":
var v ExternalVulnerabilitiesResults
err := json.Unmarshal(tr.RawData, &v)
if err != nil {
return fmt.Errorf("failed to unmarshall external vulnerabilities results: %v", err)
}
r.Data = v
case "difference":
var v DifferenceResults
err := json.Unmarshal(tr.RawData, &v)
if err != nil |
r.Data = v
default:
return fmt.Errorf("unsupported results type found: %v", tr.Type)
}
return nil
}
// UnmarshalJSON is a custom JSON unmarshaller implementation for the standard
// go json package to know how to properly interpret ScanSummaryResults from
// JSON.
func (u *UntranslatedResults) UnmarshalJSON(b []byte) error {
// first look for results in the proper translated format
// e.g. CommunityResults
tr := &translatedResults{}
err := json.Unmarshal(b, tr)
if err != nil {
// we have received invalid stringified json
return fmt.Errorf("unable to unmarshal json")
}
// if there is a type and it is `community`
// parse the data out
if tr.Type == "community" {
c := &CommunityResults{}
err = json.Unmarshal(tr.RawData, c)
if err != nil {
return err
}
u.Community = c
return nil
}
// it is not translated and not community
// ur2 is required to keep the parser from
// recursing here
type ur2 UntranslatedResults
err = json.Unmarshal(b, (*ur2)(u))
if err != nil {
// we have received invalid stringified json
return fmt.Errorf("unable to unmarshal json")
}
return nil
}
// AboutYMLResults represents the data collected from the AboutYML scan. It
// includes a message and whether or not the About YML file found was valid or
// not.
type AboutYMLResults struct {
Message string `json:"message" xml:"message"`
Valid bool `json:"valid" xml:"valid"`
Content string `json:"content" xml:"content"`
}
// Compiler represents the data for individual compilers or interpreters found
type Compiler struct {
Name string `json:"name" xml:"name"`
Version string `json:"version" xml:"version"`
}
// Image represents the data for individual docker images found
type Image struct {
Name string `json:"name" xml:"name"`
Version string `json:"version" xml:"version"`
}
// Dockerfile represents the data collected from a Dockerfile
type Dockerfile struct {
Images []Image `json:"images" xml:"images"`
Dependencies []dependencies.Dependency `json:"dependencies" xml:"dependencies"`
}
// BuildsystemResults represents the data collected from an buildsystems scan. It
// include the name and version of any compiler found
type BuildsystemResults struct {
Compilers []Compiler `json:"compilers" xml:"compilers"`
Dockerfile Dockerfile `json:"docker_file" xml:"docker_file"`
}
// CommunityResults represents the data collected from a community scan. It
// represents all known data regarding the open community of a software project
type CommunityResults struct {
Committers int `json:"committers" xml:"committers"`
Name string `json:"name" xml:"name"`
URL string `json:"url" xml:"url"`
CommittedAt time.Time `json:"committed_at" xml:"committed_at"`
OldNames []string `json:"old_names" xml:"old_names"`
Stars int `json:"stars" xml:"stars"`
NameChanged bool `json:"name_changed" xml:"name_changed"`
}
// CoverageResults represents the data collected from a code coverage scan. It
// includes the value of the code coverage seen for the project.
type CoverageResults struct {
Value float64 `json:"value" xml:"value"`
}
// Dependency represents data for an individual requirement resolution
type Dependency struct {
LatestVersion string `json:"latest_version" xml:"latest_version"`
Org string `json:"org" xml:"org"`
Name string `json:"name" xml:"name"`
Type string `json:"type" xml:"type"`
Package string `json:"package" xml:"package"`
Version string `json:"version" xml:"version"`
Scope string `json:"scope" xml:"scope"`
Requirement string `json:"requirement" xml:"requirement"`
File string `json:"file" xml:"file"`
DepMeta *DependencyMeta `json:"dependency_counts,omitempty" xml:"dependency_counts"`
OutdatedMeta *OutdatedMeta `json:"outdated_version,omitempty" xml:"outdated_version"`
Dependencies []Dependency `json:"dependencies" xml:"dependencies"`
}
// OutdatedMeta is used to represent the number of versions behind a dependcy is
type OutdatedMeta struct {
MajorBehind int `json:"major_behind" xml:"major_behind"`
MinorBehind int `json:"minor_behind" xml:"minor_behind"`
PatchBehind int `json:"patch_behind" xml:"patch_behind"`
}
// DependencyMeta represents data for a summary of all dependencies resolved
type DependencyMeta struct {
FirstDegreeCount int `json:"first_degree_count" xml:"first_degree_count"`
NoVersionCount int `json:"no_version_count" xml:"no_version_count"`
TotalUniqueCount int `json:"total_unique_count" xml:"total_unique_count"`
UpdateAvailableCount int `json:"update_available_count" xml:"update_available_count"`
VulnerableCount int `json:"vulnerable_count" xml:"vulnerable_count"`
}
// DependencyResults represents the data collected from a dependency scan. It
// includes a list of the dependencies seen and meta data counts about those
// dependencies seen.
type DependencyResults struct {
Dependencies []Dependency `json:"dependencies" xml:"dependencies"`
Meta DependencyMeta `json:"meta" xml:"meta"`
}
// DifferenceResults represents the checksum of a project. It includes a checksum
// and flag indicating if there was a difference detected within that last 5 scans
type DifferenceResults struct {
Checksum string `json:"checksum" xml:"checksum"`
Difference bool `json:"difference" xml:"difference"`
}
// EcosystemResults represents the data collected from an ecosystems scan. It
// include the name of the ecosystem and the number of lines seen for the given
// ecosystem.
type EcosystemResults struct {
Ecosystems map[string]int `json:"ecosystems" xml:"ecosystems"`
}
// MarshalJSON meets the marshaller interface to custom wrangle an ecosystem
// result into the json shape
func (e EcosystemResults) MarshalJSON() ([]byte, error) {
return json.Marshal(e.Ecosystems)
}
// UnmarshalJSON meets the unmarshaller interface to custom wrangle the
// ecosystem scan into an ecosystem result
func (e *EcosystemResults) UnmarshalJSON(b []byte) error {
var m map[string]int
err := json.Unmarshal(b, &m)
if err != nil {
return fmt.Errorf("failed to unmarshal ecosystem result: %v", err.Error())
}
e.Ecosystems = m
return nil
}
// ExternalVulnerabilitiesResults represents the data collected from an external
// vulnerability scan. It includes the number of each vulnerability criticality
// seen within the project.
type ExternalVulnerabilitiesResults struct {
Critical int `json:"critical" xml:"critical"`
High int `json:"high" xml:"high"`
Medium int `json:"medium" xml:"medium"`
Low int `json:"low" xml:"low"`
}
// LicenseResults represents the data collected from a license scan. It
// includes the name and type of each license seen within the project.
type LicenseResults struct {
*License `json:"license" xml:"license"`
}
// License represents a name and slice of types of licenses seen in a given file
type License struct {
Name string `json:"name" xml:"name"`
Type []LicenseType `json:"type" xml:"type"`
}
// LicenseType represents a type of license such as MIT, Apache 2.0, etc
type LicenseType struct {
Name string `json:"name" xml:"name"`
Confidence float32 `json:"confidence"`
}
// FileNotes contains data related to file discoveries
type FileNotes map[string][]string
// ClamavDetails contains data related to the virus scan engine
type ClamavDetails struct {
ClamavVersion string `json:"clamav_version" xml:"clamav_version"`
ClamavDbVersion string `json:"clamav_db_version" xml:"clamav_db_version"`
}
// Secret derived struct for results specific data
type Secret struct {
secrets.Secret
File string `json:"file" xml:"file"`
}
// SecretResults contains secrets finding data
type SecretResults struct {
Secrets []Secret `json:"secrets" xml:"secrets"`
}
// MarshalJSON meets the marshaller interface to custom wrangle an ecosystem
// result into the json shape
func (e SecretResults) MarshalJSON() ([]byte, error) {
return json.Marshal(e.Secrets)
}
// UnmarshalJSON meets the unmarshaller interface to custom wrangle the
// ecosystem scan into an ecosystem result
func (e *SecretResults) UnmarshalJSON(b []byte) error {
var s []Secret
err := json.Unmarshal(b, &s)
if err != nil {
return fmt.Errorf("failed to unmarshal secrets result: %v", err.Error())
}
e.Secrets = s
return nil
}
// VirusResults represents the data collected from a virus scan. It includes
// information of the viruses seen and the virus scanner used.
type VirusResults struct {
KnownViruses int `json:"known_viruses" xml:"known_viruses"`
EngineVersion string `json:"engine_version" xml:"engine_version"`
ScannedDirectories int `json:"scanned_directories" xml:"scanned_directories"`
ScannedFiles int `json:"scanned_files" xml:"scanned_files"`
InfectedFiles int `json:"infected_files" xml:"infected_files"`
DataScanned string `json:"data_scanned" xml:"data_scanned"`
DataRead string `json:"data_read" xml:"data_read"`
Time string `json:"time" xml:"time"`
FileNotes FileNotes `json:"file_notes" xml:"file_notes"`
ClamavDetails ClamavDetails `json:"clam_av_details" xml:"clam_av_details"`
}
//VulnerabilityResults represents the data collected from a vulnerability scan. It includes
// information of the vulnerabilities seen.
type VulnerabilityResults struct {
Vulnerabilities []VulnerabilityResultsProduct `json:"vulnerabilities" xml:"vulnerabilities"`
Meta struct {
VulnerabilityCount int `json:"vulnerability_count" xml:"vulnerability_count"`
ResolvedTo string `json:"resolved_to" xml:"resolved_to"`
} `json:"meta" xml:"meta"`
}
// VulnerabilityResultsProduct represents the data about a product collected from
// a vulnerability scan. Vulnerabilities are linked to products.
type VulnerabilityResultsProduct struct {
ID int `json:"id" xml:"id"`
ExternalID string `json:"external_id" xml:"external_id"`
SourceID int `json:"source_id" xml:"source_id"`
Title string `json:"title" xml:"title"`
Name string `json:"name" xml:"name"`
Org string `json:"org" xml:"org"`
Version string `json:"version" xml:"version"`
Up interface{} `json:"up" xml:"up"`
Edition interface{} `json:"edition" xml:"edition"`
Aliases []string `json:"aliases" xml:"aliases"`
CreatedAt time.Time `json:"created_at" xml:"created_at"`
UpdatedAt time.Time `json:"updated_at" xml:"updated_at"`
References interface{} `json:"references" xml:"references"`
Part interface{} `json:"part" xml:"part"`
Language interface{} `json:"language" xml:"language"`
Vulnerabilities []VulnerabilityResultsVulnerability `json:"vulnerabilities" xml:"vulnerabilities"`
Query Dependency `json:"query" xml:"query"`
}
// VulnerabilityResultsVulnerability wrapper
type VulnerabilityResultsVulnerability struct {
vulnerabilities.Vulnerability
Dependencies []VulnerabilityResultsProduct `json:"dependencies" xml:"dependencies"`
}
| {
return fmt.Errorf("failed to unmarshall difference results: %v", err)
} |
ini.go | // Copyright 2014 Unknwon
//
// Licensed under the Apache License, Version 2.0 (the "License"): you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
// Package ini provides INI file read and write functionality in Go.
package ini
import (
"bytes"
"errors"
"fmt"
"io"
"os"
"regexp"
"runtime"
"strconv"
"strings"
"sync"
"time"
)
const (
// Name for default section. You can use this constant or the string literal.
// In most of cases, an empty string is all you need to access the section.
DEFAULT_SECTION = "DEFAULT"
// Maximum allowed depth when recursively substituing variable names.
_DEPTH_VALUES = 99
_VERSION = "1.19.1"
)
// Version returns current package version literal.
func Version() string {
return _VERSION
}
var (
// Delimiter to determine or compose a new line.
// This variable will be changed to "\r\n" automatically on Windows
// at package init time.
LineBreak = "\n"
// Variable regexp pattern: %(variable)s
varPattern = regexp.MustCompile(`%\(([^\)]+)\)s`)
// Indicate whether to align "=" sign with spaces to produce pretty output
// or reduce all possible spaces for compact format.
PrettyFormat = true
// Explicitly write DEFAULT section header
DefaultHeader = false
)
func init() {
if runtime.GOOS == "windows" {
LineBreak = "\r\n"
}
}
func inSlice(str string, s []string) bool {
for _, v := range s {
if str == v {
return true
}
}
return false
}
// dataSource is an interface that returns object which can be read and closed.
type dataSource interface {
ReadCloser() (io.ReadCloser, error)
}
// sourceFile represents an object that contains content on the local file system.
type sourceFile struct {
name string
}
func (s sourceFile) ReadCloser() (_ io.ReadCloser, err error) {
return os.Open(s.name)
}
type bytesReadCloser struct {
reader io.Reader
}
func (rc *bytesReadCloser) Read(p []byte) (n int, err error) {
return rc.reader.Read(p)
}
func (rc *bytesReadCloser) Close() error {
return nil
}
// sourceData represents an object that contains content in memory.
type sourceData struct {
data []byte
}
func (s *sourceData) ReadCloser() (io.ReadCloser, error) {
return &bytesReadCloser{bytes.NewReader(s.data)}, nil
}
// File represents a combination of a or more INI file(s) in memory.
type File struct {
// Should make things safe, but sometimes doesn't matter.
BlockMode bool
// Make sure data is safe in multiple goroutines.
lock sync.RWMutex
// Allow combination of multiple data sources.
dataSources []dataSource
// Actual data is stored here.
sections map[string]*Section
// To keep data in order.
sectionList []string
options LoadOptions
NameMapper
}
// newFile initializes File object with given data sources.
func | (dataSources []dataSource, opts LoadOptions) *File {
return &File{
BlockMode: true,
dataSources: dataSources,
sections: make(map[string]*Section),
sectionList: make([]string, 0, 10),
options: opts,
}
}
func parseDataSource(source interface{}) (dataSource, error) {
switch s := source.(type) {
case string:
return sourceFile{s}, nil
case []byte:
return &sourceData{s}, nil
default:
return nil, fmt.Errorf("error parsing data source: unknown type '%s'", s)
}
}
type LoadOptions struct {
// Loose indicates whether the parser should ignore nonexistent files or return error.
Loose bool
// Insensitive indicates whether the parser forces all section and key names to lowercase.
Insensitive bool
// IgnoreContinuation indicates whether to ignore continuation lines while parsing.
IgnoreContinuation bool
}
func LoadSources(opts LoadOptions, source interface{}, others ...interface{}) (_ *File, err error) {
sources := make([]dataSource, len(others)+1)
sources[0], err = parseDataSource(source)
if err != nil {
return nil, err
}
for i := range others {
sources[i+1], err = parseDataSource(others[i])
if err != nil {
return nil, err
}
}
f := newFile(sources, opts)
if err = f.Reload(); err != nil {
return nil, err
}
return f, nil
}
// Load loads and parses from INI data sources.
// Arguments can be mixed of file name with string type, or raw data in []byte.
// It will return error if list contains nonexistent files.
func Load(source interface{}, others ...interface{}) (*File, error) {
return LoadSources(LoadOptions{}, source, others...)
}
// LooseLoad has exactly same functionality as Load function
// except it ignores nonexistent files instead of returning error.
func LooseLoad(source interface{}, others ...interface{}) (*File, error) {
return LoadSources(LoadOptions{Loose: true}, source, others...)
}
// InsensitiveLoad has exactly same functionality as Load function
// except it forces all section and key names to be lowercased.
func InsensitiveLoad(source interface{}, others ...interface{}) (*File, error) {
return LoadSources(LoadOptions{Insensitive: true}, source, others...)
}
// Empty returns an empty file object.
func Empty() *File {
// Ignore error here, we sure our data is good.
f, _ := Load([]byte(""))
return f
}
// NewSection creates a new section.
func (f *File) NewSection(name string) (*Section, error) {
if len(name) == 0 {
return nil, errors.New("error creating new section: empty section name")
} else if f.options.Insensitive && name != DEFAULT_SECTION {
name = strings.ToLower(name)
}
if f.BlockMode {
f.lock.Lock()
defer f.lock.Unlock()
}
if inSlice(name, f.sectionList) {
return f.sections[name], nil
}
f.sectionList = append(f.sectionList, name)
f.sections[name] = newSection(f, name)
return f.sections[name], nil
}
// NewSections creates a list of sections.
func (f *File) NewSections(names ...string) (err error) {
for _, name := range names {
if _, err = f.NewSection(name); err != nil {
return err
}
}
return nil
}
// GetSection returns section by given name.
func (f *File) GetSection(name string) (*Section, error) {
if len(name) == 0 {
name = DEFAULT_SECTION
} else if f.options.Insensitive {
name = strings.ToLower(name)
}
if f.BlockMode {
f.lock.RLock()
defer f.lock.RUnlock()
}
sec := f.sections[name]
if sec == nil {
return nil, fmt.Errorf("section '%s' does not exist", name)
}
return sec, nil
}
// Section assumes named section exists and returns a zero-value when not.
func (f *File) Section(name string) *Section {
sec, err := f.GetSection(name)
if err != nil {
// Note: It's OK here because the only possible error is empty section name,
// but if it's empty, this piece of code won't be executed.
sec, _ = f.NewSection(name)
return sec
}
return sec
}
// Section returns list of Section.
func (f *File) Sections() []*Section {
sections := make([]*Section, len(f.sectionList))
for i := range f.sectionList {
sections[i] = f.Section(f.sectionList[i])
}
return sections
}
// SectionStrings returns list of section names.
func (f *File) SectionStrings() []string {
list := make([]string, len(f.sectionList))
copy(list, f.sectionList)
return list
}
// DeleteSection deletes a section.
func (f *File) DeleteSection(name string) {
if f.BlockMode {
f.lock.Lock()
defer f.lock.Unlock()
}
if len(name) == 0 {
name = DEFAULT_SECTION
}
for i, s := range f.sectionList {
if s == name {
f.sectionList = append(f.sectionList[:i], f.sectionList[i+1:]...)
delete(f.sections, name)
return
}
}
}
func (f *File) reload(s dataSource) error {
r, err := s.ReadCloser()
if err != nil {
return err
}
defer r.Close()
return f.parse(r)
}
// Reload reloads and parses all data sources.
func (f *File) Reload() (err error) {
for _, s := range f.dataSources {
if err = f.reload(s); err != nil {
// In loose mode, we create an empty default section for nonexistent files.
if os.IsNotExist(err) && f.options.Loose {
f.parse(bytes.NewBuffer(nil))
continue
}
return err
}
}
return nil
}
// Append appends one or more data sources and reloads automatically.
func (f *File) Append(source interface{}, others ...interface{}) error {
ds, err := parseDataSource(source)
if err != nil {
return err
}
f.dataSources = append(f.dataSources, ds)
for _, s := range others {
ds, err = parseDataSource(s)
if err != nil {
return err
}
f.dataSources = append(f.dataSources, ds)
}
return f.Reload()
}
// WriteToIndent writes content into io.Writer with given indention.
// If PrettyFormat has been set to be true,
// it will align "=" sign with spaces under each section.
func (f *File) WriteToIndent(w io.Writer, indent string) (n int64, err error) {
equalSign := "="
if PrettyFormat {
equalSign = " = "
}
// Use buffer to make sure target is safe until finish encoding.
buf := bytes.NewBuffer(nil)
for i, sname := range f.sectionList {
sec := f.Section(sname)
if len(sec.Comment) > 0 {
if sec.Comment[0] != '#' && sec.Comment[0] != ';' {
sec.Comment = "; " + sec.Comment
}
if _, err = buf.WriteString(sec.Comment + LineBreak); err != nil {
return 0, err
}
}
if i > 0 || DefaultHeader {
if _, err = buf.WriteString("[" + sname + "]" + LineBreak); err != nil {
return 0, err
}
} else {
// Write nothing if default section is empty
if len(sec.keyList) == 0 {
continue
}
}
// Count and generate alignment length and buffer spaces using the
// longest key. Keys may be modifed if they contain certain characters so
// we need to take that into account in our calculation.
alignLength := 0
if PrettyFormat {
for _, kname := range sec.keyList {
keyLength := len(kname)
// First case will surround key by ` and second by """
if strings.ContainsAny(kname, "\"=:") {
keyLength += 2
} else if strings.Contains(kname, "`") {
keyLength += 6
}
if keyLength > alignLength {
alignLength = keyLength
}
}
}
alignSpaces := bytes.Repeat([]byte(" "), alignLength)
for _, kname := range sec.keyList {
key := sec.Key(kname)
if len(key.Comment) > 0 {
if len(indent) > 0 && sname != DEFAULT_SECTION {
buf.WriteString(indent)
}
if key.Comment[0] != '#' && key.Comment[0] != ';' {
key.Comment = "; " + key.Comment
}
if _, err = buf.WriteString(key.Comment + LineBreak); err != nil {
return 0, err
}
}
if len(indent) > 0 && sname != DEFAULT_SECTION {
buf.WriteString(indent)
}
switch {
case key.isAutoIncr:
kname = "-"
case strings.ContainsAny(kname, "\"=:"):
kname = "`" + kname + "`"
case strings.Contains(kname, "`"):
kname = `"""` + kname + `"""`
}
if _, err = buf.WriteString(kname); err != nil {
return 0, err
}
// Write out alignment spaces before "=" sign
if PrettyFormat {
buf.Write(alignSpaces[:alignLength-len(kname)])
}
val := key.value
// In case key value contains "\n", "`", "\"", "#" or ";"
if strings.ContainsAny(val, "\n`") {
val = `"""` + val + `"""`
} else if strings.ContainsAny(val, "#;") {
val = "`" + val + "`"
}
if _, err = buf.WriteString(equalSign + val + LineBreak); err != nil {
return 0, err
}
}
// Put a line between sections
if _, err = buf.WriteString(LineBreak); err != nil {
return 0, err
}
}
return buf.WriteTo(w)
}
// WriteTo writes file content into io.Writer.
func (f *File) WriteTo(w io.Writer) (int64, error) {
return f.WriteToIndent(w, "")
}
// SaveToIndent writes content to file system with given value indention.
func (f *File) SaveToIndent(filename, indent string) error {
// Note: Because we are truncating with os.Create,
// so it's safer to save to a temporary file location and rename afte done.
tmpPath := filename + "." + strconv.Itoa(time.Now().Nanosecond()) + ".tmp"
defer os.Remove(tmpPath)
fw, err := os.Create(tmpPath)
if err != nil {
return err
}
if _, err = f.WriteToIndent(fw, indent); err != nil {
fw.Close()
return err
}
fw.Close()
// Remove old file and rename the new one.
os.Remove(filename)
return os.Rename(tmpPath, filename)
}
// SaveTo writes content to file system.
func (f *File) SaveTo(filename string) error {
return f.SaveToIndent(filename, "")
}
| newFile |
field.py | """We want to export the field to some data files.
"""
from root.config.main import *
from screws.freeze.main import FrozenOnly
from screws.miscellaneous.timer import check_filename, check_no_splcharacter
from scipy.io import savemat
class _3dCSC_SF_Export_Field(FrozenOnly):
""""""
def __init__(self, sf):
""""""
assert '3dCSCG_standard_form' in sf.standard_properties.tags
self._sf_ = sf
self._freeze_self_()
def to_file(self, filename, numOfSamples=1e6, regions=None):
| """"""
filename, extension = check_filename(filename)
if extension is None: extension = 'txt'
supported_formats = ('txt', 'mat')
assert extension in supported_formats, \
f"format={extension} is not among the supported formats {supported_formats}."
if isinstance(numOfSamples, (int, float)):
assert numOfSamples > 0, f"numOfSamples={numOfSamples} is wrong."
numOfSamples = [numOfSamples, numOfSamples, numOfSamples]
else:
assert isinstance(numOfSamples, (tuple, list)) and len(numOfSamples) == 3, \
f"numOfSamples={numOfSamples} wrong."
for nos in numOfSamples:
assert isinstance(nos, (int, float)) and nos > 0, f"numOfSamples={numOfSamples} wrong."
mesh = self._sf_.mesh
if regions is None:
regions = mesh.domain.regions.names
elif isinstance(regions, str):
regions = [regions,]
else:
pass
assert isinstance(regions, (list, tuple)), f"regions={regions} is wrong."
assert len(set(regions)) == len(regions), f"regions={regions} has repeated regions."
for i, r in enumerate(regions):
assert r in mesh.domain.regions, f"regions[{i}]={r} is wrong."
rst = list()
for i in range(3):
density = int((numOfSamples[i] / mesh.elements.GLOBAL_num) ** (1/3)) + 1
interval = 2 / density
rst.append(np.linspace(-1 + interval/2, 1-interval/2, density))
xyz, v = self._sf_.reconstruct(*rst, regions=regions)
# Now, we gather xyz & v from all cores into Master Core, store in XYZ & V --- BELOW ---
if rAnk == mAster_rank:
X = [None for _ in range(mesh.elements.GLOBAL_num)]
Y = [None for _ in range(mesh.elements.GLOBAL_num)]
Z = [None for _ in range(mesh.elements.GLOBAL_num)]
Vx = [None for _ in range(mesh.elements.GLOBAL_num)]
if self._sf_.k in (1, 2):
Vy = [None for _ in range(mesh.elements.GLOBAL_num)]
Vz = [None for _ in range(mesh.elements.GLOBAL_num)]
for j in mesh.elements.indices:
X[j] = xyz[j][0]
Y[j] = xyz[j][1]
Z[j] = xyz[j][2]
Vx[j] = v[j][0]
if self._sf_.k in (1, 2):
# noinspection PyUnboundLocalVariable
Vy[j] = v[j][1]
# noinspection PyUnboundLocalVariable
Vz[j] = v[j][2]
for i in sLave_ranks:
xyz, v = cOmm.recv(source=i, tag=0)
for j in xyz:
X[j] = xyz[j][0]
Y[j] = xyz[j][1]
Z[j] = xyz[j][2]
Vx[j] = v[j][0]
if self._sf_.k in (1, 2):
Vy[j] = v[j][1]
Vz[j] = v[j][2]
del xyz, v
else:
cOmm.send([xyz, v], dest=mAster_rank, tag=0)
del xyz, v
# Now, we reshape the XYZ and V for export in the master core. -------- BELOW ----------
if rAnk == mAster_rank:
if self._sf_.k in (1, 2):
# noinspection PyUnboundLocalVariable
X, Y, Z, Vx, Vy, Vz = mesh.do.regionwsie_stack(X, Y, Z, Vx, Vy, Vz)
else:
# noinspection PyUnboundLocalVariable
X, Y, Z, V = mesh.do.regionwsie_stack(X, Y, Z, Vx)
for rn in regions:
assert rn in X and rn in Y and rn in Z, "Data not full!"
x, y, z = X[rn], Y[rn], Z[rn]
if self._sf_.k in (1, 2):
vx, vy, vz = Vx[rn], Vy[rn], Vz[rn]
else:
# noinspection PyUnboundLocalVariable
vx = V[rn]
# we take care of the file names ------------------ BELOW -----------------------
RN = rn[2:] # if regions name is R:center, we select
assert check_no_splcharacter(RN), f"region name={RN} wrong."
FILE_NAME = filename + '__InRegion_' + RN
if self._sf_.k in (1, 2):
FILE_NAME += '__x_y_z_vx_vy_vz'
else:
FILE_NAME += '__x_y_z_v'
FILE_NAME = FILE_NAME + '.' + extension
# It's time to do the save or writing ------------------- BELOW -----------------
if extension == 'txt':
# for .txt, we have to flat the data =====================
x = x.ravel(order='F')[:,np.newaxis]
y = y.ravel(order='F')[:,np.newaxis]
z = z.ravel(order='F')[:,np.newaxis]
if self._sf_.k in (1, 2):
vx = vx.ravel(order='F')[:,np.newaxis]
# noinspection PyUnboundLocalVariable
vy = vy.ravel(order='F')[:,np.newaxis]
# noinspection PyUnboundLocalVariable
vz = vz.ravel(order='F')[:,np.newaxis]
else:
vx = vx.ravel(order='F')[:,np.newaxis]
if self._sf_.k in (1, 2):
# noinspection PyUnboundLocalVariable
TO_BE_WRITTEN = np.hstack((x, y, z, vx, vy, vz))
else:
TO_BE_WRITTEN = np.hstack((x, y, z, vx))
# noinspection PyTypeChecker
np.savetxt(FILE_NAME, TO_BE_WRITTEN)
elif extension == 'mat':
# for .mat, we save 3-d arrays. ==========================
m_dic = dict()
m_dic['x'] = x
m_dic['y'] = y
m_dic['z'] = z
if self._sf_.k in (1, 2):
m_dic['vx'] = vx
m_dic['vy'] = vy
m_dic['vz'] = vz
else:
m_dic['v'] = vx
savemat(FILE_NAME, m_dic)
else:
raise Exception(f"Format=.{extension} is not supported.") |
|
functions3.rs | // functions3.rs
// Make me compile! Scroll down for hints :)
fn main() {
call_me(-5);
}
fn call_me(num: i32) {
for i in 0..num {
println!("Ring! Call number {}", i + 1);
}
}
|
// This time, the function *declaration* is okay, but there's something wrong
// with the place where we're calling the function. |
|
get_deployment_at_scope.py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetDeploymentAtScopeResult',
'AwaitableGetDeploymentAtScopeResult',
'get_deployment_at_scope',
]
@pulumi.output_type
class GetDeploymentAtScopeResult:
"""
Deployment information.
"""
def __init__(__self__, id=None, location=None, name=None, properties=None, tags=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
The ID of the deployment.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
the location of the deployment.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the deployment.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> 'outputs.DeploymentPropertiesExtendedResponse':
"""
Deployment properties.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Deployment tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def | (self) -> str:
"""
The type of the deployment.
"""
return pulumi.get(self, "type")
class AwaitableGetDeploymentAtScopeResult(GetDeploymentAtScopeResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetDeploymentAtScopeResult(
id=self.id,
location=self.location,
name=self.name,
properties=self.properties,
tags=self.tags,
type=self.type)
def get_deployment_at_scope(deployment_name: Optional[str] = None,
scope: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDeploymentAtScopeResult:
"""
Deployment information.
:param str deployment_name: The name of the deployment.
:param str scope: The resource scope.
"""
__args__ = dict()
__args__['deploymentName'] = deployment_name
__args__['scope'] = scope
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:resources/v20201001:getDeploymentAtScope', __args__, opts=opts, typ=GetDeploymentAtScopeResult).value
return AwaitableGetDeploymentAtScopeResult(
id=__ret__.id,
location=__ret__.location,
name=__ret__.name,
properties=__ret__.properties,
tags=__ret__.tags,
type=__ret__.type)
| type |
UsersController.js | const { Router } = require('express');
const { inject } = require('awilix-express');
const Status = require('http-status');
const loggerInjector = require('../utils/ControllerLoggerWrapper');
const UsersController = {
get router() {
const router = Router();
router.use(inject('userSerializer'));
/**
* In the next injection the loggerInjector is a wrapper over the
* awilix inject method. what is does is still to inject the the operation
* using the inject method from awilix and then add the log handling by using
* the ControllerLogger module. see infra/logging/ControllerLogger.js.
* for demonstration it is only used in the first route.
*/
router.get('/', loggerInjector('getAllUsers'), this.index);
router.get('/:id', inject('getUser'), this.show);
router.post('/', inject('createUser'), this.create);
router.put('/:id', inject('updateUser'), this.update);
router.delete('/:id', inject('deleteUser'), this.delete);
return router;
},
index(req, res, next) {
const { getAllUsers, userSerializer } = req;
const { SUCCESS, ERROR } = getAllUsers.outputs;
getAllUsers
.on(SUCCESS, (users) => {
res
.status(Status.OK)
.json(users.map(userSerializer.serialize));
})
.on(ERROR, next);
getAllUsers.execute();
},
show(req, res, next) {
const { getUser, userSerializer } = req;
const { SUCCESS, ERROR, NOT_FOUND } = getUser.outputs;
getUser
.on(SUCCESS, (user) => {
res
.status(Status.OK)
.json(userSerializer.serialize(user));
})
.on(NOT_FOUND, (error) => {
res.status(Status.NOT_FOUND).json({
type: 'NotFoundError',
details: error.details
});
})
.on(ERROR, next);
getUser.execute(Number(req.params.id));
},
create(req, res, next) {
const { createUser, userSerializer } = req;
const { SUCCESS, ERROR, VALIDATION_ERROR } = createUser.outputs;
createUser
.on(SUCCESS, (user) => {
res
.status(Status.CREATED)
.json(userSerializer.serialize(user));
})
.on(VALIDATION_ERROR, (error) => {
res.status(Status.BAD_REQUEST).json({
type: 'ValidationError',
details: error.details
});
})
.on(ERROR, next);
createUser.execute(req.body);
},
update(req, res, next) {
const { updateUser, userSerializer } = req;
const { SUCCESS, ERROR, VALIDATION_ERROR, NOT_FOUND } = updateUser.outputs;
updateUser
.on(SUCCESS, (user) => {
res
.status(Status.ACCEPTED)
.json(userSerializer.serialize(user));
})
.on(VALIDATION_ERROR, (error) => {
res.status(Status.BAD_REQUEST).json({
type: 'ValidationError',
details: error.details
});
})
.on(NOT_FOUND, (error) => {
res.status(Status.NOT_FOUND).json({
type: 'NotFoundError', | });
})
.on(ERROR, next);
updateUser.execute(Number(req.params.id), req.body);
},
delete(req, res, next) {
const { deleteUser } = req;
const { SUCCESS, ERROR, NOT_FOUND } = deleteUser.outputs;
deleteUser
.on(SUCCESS, () => {
res.status(Status.ACCEPTED).end();
})
.on(NOT_FOUND, (error) => {
res.status(Status.NOT_FOUND).json({
type: 'NotFoundError',
details: error.details
});
})
.on(ERROR, next);
deleteUser.execute(Number(req.params.id));
}
};
module.exports = UsersController; | details: error.details |
sgp.py | """Implement the SequentialGeometricProgram class"""
from time import time
from collections import OrderedDict
import numpy as np
from ..exceptions import InvalidGPConstraint, Infeasible, UnnecessarySGP
from ..keydict import KeyDict
from ..nomials import Variable
from .gp import GeometricProgram
from ..nomials import PosynomialInequality
from .. import NamedVariables
from .costed import CostedConstraintSet
EPS = 1e-6 # 1 +/- this is used in a few relative differences
# pylint: disable=too-many-instance-attributes
class SequentialGeometricProgram(CostedConstraintSet):
"""Prepares a collection of signomials for a SP solve.
Arguments
---------
cost : Posynomial
Objective to minimize when solving
constraints : list of Constraint or SignomialConstraint objects
Constraints to maintain when solving (implicitly Signomials <= 1)
verbosity : int (optional)
Currently has no effect: SequentialGeometricPrograms don't know
anything new after being created, unlike GeometricPrograms.
Attributes with side effects
----------------------------
`gps` is set during a solve
`result` is set at the end of a solve
Examples
--------
>>> gp = gpkit.geometric_program.SequentialGeometricProgram(
# minimize
x,
[ # subject to
1/x - y/x, # <= 1, implicitly
y/10 # <= 1
])
>>> gp.solve()
"""
gps = solver_outs = _results = result = model = None
_gp = _spvars = _lt_approxs = pccp_penalty = None
with NamedVariables("SGP"):
slack = Variable("PCCPslack")
def __init__(self, cost, model, substitutions, *,
use_pccp=True, pccp_penalty=2e2, **initgpargs):
# pylint: disable=super-init-not-called,non-parent-init-called
|
# pylint: disable=too-many-locals,too-many-branches
# pylint: disable=too-many-arguments
# pylint: disable=too-many-statements
def localsolve(self, solver=None, *, verbosity=1, x0=None, reltol=1e-4,
iteration_limit=50, mutategp=True, **solveargs):
"""Locally solves a SequentialGeometricProgram and returns the solution.
Arguments
---------
solver : str or function (optional)
By default uses one of the solvers found during installation.
If set to "mosek", "mosek_cli", or "cvxopt", uses that solver.
If set to a function, passes that function cs, A, p_idxs, and k.
verbosity : int (optional)
If greater than 0, prints solve time and number of iterations.
Each GP is created and solved with verbosity one less than this, so
if greater than 1, prints solver name and time for each GP.
x0 : dict (optional)
Initial location to approximate signomials about.
reltol : float
Iteration ends when this is greater than the distance between two
consecutive solve's objective values.
iteration_limit : int
Maximum GP iterations allowed.
mutategp: boolean
Prescribes whether to mutate the previously generated GP
or to create a new GP with every solve.
**solveargs :
Passed to solver function.
Returns
-------
result : dict
A dictionary containing the translated solver result.
"""
self.gps, self.solver_outs, self._results = [], [], []
# if there's external functions we can't mutate the GP
mutategp = mutategp and not self.blackboxconstraints
if not mutategp and not x0:
raise ValueError("Solves with arbitrary constraint generators"
" must specify an initial starting point x0.")
if mutategp:
if x0:
self._gp = self.init_gp(x0)
gp = self._gp
starttime = time()
if verbosity > 0:
print("Starting a sequence of GP solves")
if self.externalfn_vars:
print(" for %i variables defined by externalfns"
% len(self.externalfn_vars))
elif mutategp:
print(" for %i free variables" % len(self._spvars))
print(" in %i signomial constraints"
% len(self["SP constraints"]))
print(" and for %i free variables" % len(gp.varlocs))
print(" in %i posynomial inequalities." % len(gp.k))
prevcost, cost, rel_improvement = None, None, None
while rel_improvement is None or rel_improvement > reltol:
prevcost = cost
if len(self.gps) > iteration_limit:
raise Infeasible(
"Unsolved after %s iterations. Check `m.program.results`;"
" if they're converging, try `.localsolve(...,"
" iteration_limit=NEWLIMIT)`." % len(self.gps))
if mutategp:
self.update_gp(x0)
else:
gp = self.gp(x0)
gp.model = self.model
self.gps.append(gp) # NOTE: SIDE EFFECTS
if verbosity > 1:
print("\nGP Solve %i" % len(self.gps))
if verbosity > 2:
print("===============")
solver_out = gp.solve(solver, verbosity=verbosity-1,
gen_result=False, **solveargs)
self.solver_outs.append(solver_out)
cost = float(solver_out["objective"])
x0 = dict(zip(gp.varlocs, np.exp(solver_out["primal"])))
if verbosity > 2 and self._spvars:
result = gp.generate_result(solver_out, verbosity=verbosity-3)
self._results.append(result)
print(result.table(self._spvars))
elif verbosity > 1:
print("Solved cost was %.4g." % cost)
if prevcost is None:
continue
rel_improvement = (prevcost - cost)/(prevcost + cost)
if cost*(1 - EPS) > prevcost + EPS and verbosity > -1:
print("SGP not convergent: Cost rose by %.2g%% on GP solve %i."
" Details can be found in `m.program.results` or by"
" solving at a higher verbosity. Note that convergence is"
" not guaranteed for models with SignomialEqualities.\n"
% (100*(cost - prevcost)/prevcost, len(self.gps)))
rel_improvement = cost = None
# solved successfully!
self.result = gp.generate_result(solver_out, verbosity=verbosity-3)
self.result["soltime"] = time() - starttime
if verbosity > 1:
print()
if verbosity > 0:
print("Solving took %.3g seconds and %i GP solves."
% (self.result["soltime"], len(self.gps)))
self.model.process_result(self.result)
if self.externalfn_vars:
for v in self.externalfn_vars:
self[0].insert(0, v.key.externalfn) # for constraint senss
if self.slack.key in self.result["variables"]:
excess_slack = self.result["variables"][self.slack.key] - 1
if excess_slack <= EPS:
del self.result["freevariables"][self.slack.key]
del self.result["variables"][self.slack.key]
del self.result["sensitivities"]["variables"][self.slack.key]
slackconstraint = self["GP constraints"][-1]
del self.result["sensitivities"]["constraints"][slackconstraint]
elif verbosity > -1:
print("Final solution let signomial constraints slacken by"
" %.2g%%. Calling .localsolve with a higher"
" `pccp_penalty` (it was %.3g this time) will reduce"
" final slack if the model is solvable with less. If"
" you think it might not be, check by solving with "
"`use_pccp=False, x0=(this model's final solution)`.\n"
% (100*excess_slack, self.pccp_penalty))
return self.result
# pylint: disable=too-many-locals
def localsolveonce(self, solver=None, verbosity=1, x0=None, reltol=1e-4,
iteration_limit=50, mutategp=True, **kwargs):
"""Locally solves a SequentialGeometricProgram ONCE and returns the solution.
Arguments
---------
solver : str or function (optional)
By default uses one of the solvers found during installation.
If set to "mosek", "mosek_cli", or "cvxopt", uses that solver.
If set to a function, passes that function cs, A, p_idxs, and k.
verbosity : int (optional)
If greater than 0, prints solve time and number of iterations.
Each GP is created and solved with verbosity one less than this, so
if greater than 1, prints solver name and time for each GP.
x0 : dict (optional)
Initial location to approximate signomials about.
reltol : float
Iteration ends when this is greater than the distance between two
consecutive solve's objective values.
iteration_limit : int
Maximum GP iterations allowed.
*args, **kwargs :
Passed to solver function.
Returns
-------
result : dict
A dictionary containing the translated solver result.
"""
starttime = time()
if verbosity > 0:
print("Beginning signomial solve.")
self.gps = [] # NOTE: SIDE EFFECTS
self.results = []
if x0 and mutategp:
self._gp = self.init_gp(self.substitutions, x0)
slackvar = Variable()
prevcost, cost, rel_improvement = None, None, None
while (rel_improvement is None or rel_improvement > reltol) and len(self.gps) < iteration_limit:
if len(self.gps) > iteration_limit:
raise RuntimeWarning("""problem unsolved after %s iterations.
The last result is available in Model.program.gps[-1].result. If the gps
appear to be converging, you may wish to increase the iteration limit by
calling .localsolve(..., iteration_limit=NEWLIMIT).""" % len(self.gps))
gp = self.gp(x0, mutategp)
self.gps.append(gp) # NOTE: SIDE EFFECTS
try:
result = gp.solve(solver, verbosity-1,
warn_on_check=True, **kwargs)
self.results.append(result)
except (RuntimeWarning, ValueError):
feas_constrs = ([slackvar >= 1] +
[posy <= slackvar
for posy in gp.posynomials[1:]])
primal_feas = GeometricProgram(slackvar**100 * gp.cost,
feas_constrs, None)
self.gps.append(primal_feas)
result = primal_feas.solve(solver, verbosity-1, **kwargs)
result["cost"] = None # reset the cost-counting
x0 = result["freevariables"]
prevcost, cost = cost, result["cost"]
if prevcost is None or cost is None:
rel_improvement = None
elif prevcost < (1-reltol)*cost:
print("SP is not converging! Last GP iteration had a higher"
" cost (%.2g) than the previous one (%.2g). Results for"
" each iteration are in (Model).program.results. If your"
" model contains SignomialEqualities, note that"
" convergence is not guaranteed: try replacing any"
" SigEqs you can and solving again." % (cost, prevcost))
else:
rel_improvement = abs(prevcost-cost)/(prevcost + cost)
# solved successfully!
soltime = time() - starttime
if verbosity > 0:
print("Solving took %i GP solves" % len(self.gps)
+ " and %.3g seconds." % soltime)
self.process_result(result)
self.result = SolutionArray(result.copy()) # NOTE: SIDE EFFECTS
self.result["soltime"] = soltime
if self.externalfn_vars:
for v in self.externalfn_vars:
self[0].insert(0, v.key.externalfn) # for constraint senss
return self.result
@property
def results(self):
"Creates and caches results from the raw solver_outs"
if not self._results:
self._results = [o["generate_result"]() for o in self.solver_outs]
return self._results
def _fill_x0(self, x0):
"Returns a copy of x0 with subsitutions added."
x0kd = KeyDict()
x0kd.varkeys = self.varkeys
if x0:
x0kd.update(x0) # has to occur after the setting of varkeys
x0kd.update(self.substitutions)
return x0kd
def init_gp(self, x0=None, **initgpargs):
"Generates a simplified GP representation for later modification"
x0 = self._fill_x0(x0)
constraints = OrderedDict({"SP approximations": []})
constraints["GP constraints"] = self["GP constraints"]
self._spvars = set([self.slack])
for cs, lts in zip(self["SP constraints"], self._lt_approxs):
for lt, gt in zip(lts, cs.as_approxgts(x0)):
constraint = (lt <= gt)
constraint.generated_by = cs
constraints["SP approximations"].append(constraint)
self._spvars.update({vk for vk in gt.varkeys
if vk not in self.substitutions})
gp = GeometricProgram(self.cost, constraints, self.substitutions,
**initgpargs)
gp.x0 = x0
return gp
def update_gp(self, x0):
"Update self._gp for x0."
if not self.gps:
return # we've already generated the first gp
gp = self._gp
gp.x0.update({k: v for (k, v) in x0.items() if k in self._spvars})
hmap_idx = 0
for sp_constraint, lts in zip(self["SP constraints"], self._lt_approxs):
for lt, gt in zip(lts, sp_constraint.as_approxgts(gp.x0)):
unsubbed = lt/gt
gp["SP approximations"][hmap_idx].unsubbed = [unsubbed]
hmap = unsubbed.hmap.sub(self.substitutions, unsubbed.varkeys)
hmap.parent = gp["SP approximations"][hmap_idx]
hmap_idx += 1 # here because gp.hmaps[0] is the cost hmap
gp.hmaps[hmap_idx] = hmap
gp.gen()
def gp(self, x0=None, **gpinitargs):
"The GP approximation of this SP at x0."
x0 = self._fill_x0(x0)
constraints = OrderedDict(
{"SP constraints": [c.as_gpconstr(x0) for c in self.model.flat()]})
if self.externalfn_vars:
constraints["Generated by externalfns"] = []
for v in self.externalfn_vars:
constraint = v.key.externalfn(v, x0)
constraint.generated_by = v.key.externalfn
constraints["Generated by externalfns"].append(constraint)
gp = GeometricProgram(self._original_cost,
constraints, self.substitutions, **gpinitargs)
gp.x0 = x0
return gp
| if cost.any_nonpositive_cs:
raise UnnecessarySGP("""Sequential GPs need Posynomial objectives.
The equivalent of a Signomial objective can be constructed by constraining
a dummy variable `z` to be greater than the desired Signomial objective `s`
(z >= s) and then minimizing that dummy variable.""")
self.model = model
self._original_cost = cost
self.externalfn_vars = \
frozenset(Variable(v) for v in self.model.varkeys if v.externalfn)
if not self.externalfn_vars:
try:
sgpconstraints = {"SP constraints": [], "GP constraints": []}
self._lt_approxs = []
for cs in model.flat():
try:
if not isinstance(cs, PosynomialInequality):
cs.as_hmapslt1(substitutions) # gp-compatible?
sgpconstraints["GP constraints"].append(cs)
except InvalidGPConstraint:
sgpconstraints["SP constraints"].append(cs)
if use_pccp:
lts = [lt/self.slack for lt in cs.as_approxlts()]
else:
lts = cs.as_approxlts()
self._lt_approxs.append(lts)
if not sgpconstraints["SP constraints"]:
raise UnnecessarySGP("""Model valid as a Geometric Program.
SequentialGeometricPrograms should only be created with Models containing
Signomial Constraints, since Models without Signomials have global
solutions and can be solved with 'Model.solve()'.""")
if use_pccp:
self.pccp_penalty = pccp_penalty
self.cost = cost * self.slack**pccp_penalty
sgpconstraints["GP constraints"].append(self.slack >= 1)
else:
self.cost = cost
self.idxlookup = {k: i for i, k in enumerate(sgpconstraints)}
list.__init__(self, sgpconstraints.values())
self.substitutions = substitutions
self._gp = self.init_gp(**initgpargs)
self.blackboxconstraints = False
return
except AttributeError:
pass # some constraint lacked
self.blackboxconstraints = True
self.__bare_init__(cost, model, substitutions) |
WaveData.py | # -*- coding: utf-8 -*-
"""
Created on Thu Sep 9 23:21:06 2021
@author: ShiningStone
"""
import datetime
import numpy as np
from .Abstract import CWaveData,CTimeStampsGen
class | (CTimeStampsGen):
def __init__(self,start:datetime.datetime,delta:datetime.timedelta,nLen):
super().__init__(start,delta,nLen)
class CBitalinoWaveData(CWaveData): # EEG unit: uV; EOG unit: mv
def __init__(self):
super().__init__(-1,-1,CTimeStampsGen(0, 0, 1)) #still can't decide this param at this time for bitalino file
def readFile(self,filename,mode = 'EEG'):
print("start reading bitalinofile")
from pylab import loadtxt
#file_name = 'opensignals_001403173836_2019-03-04_12-02-59.txt'
fullCont = list()
dataDescription = ''
import json
#read data description part
with open(filename,'r') as f:
for rowCont in f.readlines():
if(rowCont[0] == '#' and rowCont[2] != '{'):
pass
elif(rowCont[2] == '{'):
rowCont = rowCont[2:]
dataDescription = json.loads(rowCont)
break
else:
rowArray = rowCont.split("\t")
rowArray = rowArray[0:-1]
fullCont.append(rowArray)
data = loadtxt(filename)
# rowArrayNum = np.array(fullCont)
rowArrayNum = data
for key in dataDescription.keys(): #now the key is just the mac address of the device
dataDescription = dataDescription[key]
self.timestamps = rowArrayNum[:,0]
self.description = dataDescription
# print(dateTime.datetime.now())
if mode=='EEG':
self.nChan = 1
self.data = np.expand_dims(np.array(self.getRealSignal(rowArrayNum[:,-1],10,3.3,40000,'uV')), 0)
# self.rawdata = np.expand_dims(rowArrayNum[:,-1],0)
self.description["channelInfo"] = [[1],['EarEEG']]
elif mode == 'EOG':
self.nChan= 1
self.data = np.expand_dims(np.array(self.getRealSignal(rowArrayNum[:,-2],10,3.3,2040, 'mV')), 0)
self.description["channelInfo"] = [[1],['Eog']]
elif mode == 'EEGandEOG':
data1 = np.expand_dims(np.array(self.getRealSignal(rowArrayNum[:,-1],10,3.3,40000,'uV')), 0)
data2 = np.expand_dims(np.array(self.getRealSignal(rowArrayNum[:,-2],10,3.3,2040, 'uV')), 0)
self.nChan = 2
self.data = np.concatenate([data1,data2],0)
self.description['channelInfo'] = [[1,2],['EarEEG','Eog']]
else:
print("bitalino error: doesn't support this mode!")
# print(dateTime.datetime.now())
startTime = datetime.datetime.strptime( dataDescription['date'] + ' ' + dataDescription['time'], '%Y-%m-%d %H:%M:%S.%f')
self.srate = dataDescription["sampling rate"]
print("reading bitalinofile Finished")
delta = datetime.timedelta(seconds = 1/self.srate)
self.timeStampsGen = CDateTimeStampsGen(startTime,delta,len(self.timestamps))#initiate the timestamp sequence generator
self.calTimeStamp(self.timeStampsGen)
return data, dataDescription
def getRealSignal(self,sampleDataArray, bitNumber ,VCC = 3.3 , Geeg = 40000, unit = 'uV'):
output = [self._eegTransferFuntion(i,bitNumber ,VCC , Geeg) for i in sampleDataArray]
output = np.array(output)
if(unit == 'uV'):
output = output * (10**6)
elif(unit == 'mV'):
output = output * (10**3)
return output
def _eegTransferFuntion(self,sampleValue, bitNumber ,VCC, Geeg):
output = (( (sampleValue/2**bitNumber) - 1/2) * VCC ) / Geeg
return output
def __len__(self):
return len(self.data)
| CDateTimeStampsGen |
argb_control_binding.rs | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// from gst-gir-files (https://gitlab.freedesktop.org/gstreamer/gir-files-rs.git)
// DO NOT EDIT
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use glib::StaticType;
use glib::ToValue;
use std::boxed::Box as Box_;
use std::mem::transmute;
glib::wrapper! {
#[doc(alias = "GstARGBControlBinding")]
pub struct ARGBControlBinding(Object<ffi::GstARGBControlBinding, ffi::GstARGBControlBindingClass>) @extends gst::ControlBinding, gst::Object;
match fn {
type_ => || ffi::gst_argb_control_binding_get_type(),
}
}
impl ARGBControlBinding {
#[doc(alias = "gst_argb_control_binding_new")]
pub fn new(
object: &impl IsA<gst::Object>,
property_name: &str,
cs_a: &impl IsA<gst::ControlSource>,
cs_r: &impl IsA<gst::ControlSource>,
cs_g: &impl IsA<gst::ControlSource>,
cs_b: &impl IsA<gst::ControlSource>,
) -> ARGBControlBinding {
assert_initialized_main_thread!();
unsafe {
gst::ControlBinding::from_glib_none(ffi::gst_argb_control_binding_new(
object.as_ref().to_glib_none().0,
property_name.to_glib_none().0,
cs_a.as_ref().to_glib_none().0,
cs_r.as_ref().to_glib_none().0,
cs_g.as_ref().to_glib_none().0,
cs_b.as_ref().to_glib_none().0,
))
.unsafe_cast()
}
}
}
unsafe impl Send for ARGBControlBinding {}
unsafe impl Sync for ARGBControlBinding {}
pub const NONE_ARGB_CONTROL_BINDING: Option<&ARGBControlBinding> = None;
pub trait ARGBControlBindingExt: 'static {
#[doc(alias = "control-source-a")]
fn control_source_a(&self) -> Option<gst::ControlSource>;
#[doc(alias = "control-source-a")]
fn set_control_source_a<P: IsA<gst::ControlSource>>(&self, control_source_a: Option<&P>);
#[doc(alias = "control-source-b")]
fn control_source_b(&self) -> Option<gst::ControlSource>;
#[doc(alias = "control-source-b")]
fn set_control_source_b<P: IsA<gst::ControlSource>>(&self, control_source_b: Option<&P>);
#[doc(alias = "control-source-g")]
fn control_source_g(&self) -> Option<gst::ControlSource>;
#[doc(alias = "control-source-g")]
fn set_control_source_g<P: IsA<gst::ControlSource>>(&self, control_source_g: Option<&P>);
#[doc(alias = "control-source-r")]
fn control_source_r(&self) -> Option<gst::ControlSource>;
#[doc(alias = "control-source-r")]
fn set_control_source_r<P: IsA<gst::ControlSource>>(&self, control_source_r: Option<&P>);
#[doc(alias = "control-source-a")]
fn connect_control_source_a_notify<F: Fn(&Self) + Send + Sync + 'static>(
&self,
f: F,
) -> SignalHandlerId;
#[doc(alias = "control-source-b")]
fn connect_control_source_b_notify<F: Fn(&Self) + Send + Sync + 'static>(
&self,
f: F,
) -> SignalHandlerId;
#[doc(alias = "control-source-g")]
fn connect_control_source_g_notify<F: Fn(&Self) + Send + Sync + 'static>(
&self,
f: F,
) -> SignalHandlerId;
#[doc(alias = "control-source-r")]
fn connect_control_source_r_notify<F: Fn(&Self) + Send + Sync + 'static>(
&self,
f: F,
) -> SignalHandlerId;
}
impl<O: IsA<ARGBControlBinding>> ARGBControlBindingExt for O {
fn control_source_a(&self) -> Option<gst::ControlSource> {
unsafe {
let mut value =
glib::Value::from_type(<gst::ControlSource as StaticType>::static_type());
glib::gobject_ffi::g_object_get_property(
self.to_glib_none().0 as *mut glib::gobject_ffi::GObject,
b"control-source-a\0".as_ptr() as *const _,
value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `control-source-a` getter")
}
}
fn set_control_source_a<P: IsA<gst::ControlSource>>(&self, control_source_a: Option<&P>) {
unsafe {
glib::gobject_ffi::g_object_set_property(
self.to_glib_none().0 as *mut glib::gobject_ffi::GObject,
b"control-source-a\0".as_ptr() as *const _,
control_source_a.to_value().to_glib_none().0,
);
}
}
fn control_source_b(&self) -> Option<gst::ControlSource> {
unsafe {
let mut value =
glib::Value::from_type(<gst::ControlSource as StaticType>::static_type());
glib::gobject_ffi::g_object_get_property(
self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, | value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `control-source-b` getter")
}
}
fn set_control_source_b<P: IsA<gst::ControlSource>>(&self, control_source_b: Option<&P>) {
unsafe {
glib::gobject_ffi::g_object_set_property(
self.to_glib_none().0 as *mut glib::gobject_ffi::GObject,
b"control-source-b\0".as_ptr() as *const _,
control_source_b.to_value().to_glib_none().0,
);
}
}
fn control_source_g(&self) -> Option<gst::ControlSource> {
unsafe {
let mut value =
glib::Value::from_type(<gst::ControlSource as StaticType>::static_type());
glib::gobject_ffi::g_object_get_property(
self.to_glib_none().0 as *mut glib::gobject_ffi::GObject,
b"control-source-g\0".as_ptr() as *const _,
value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `control-source-g` getter")
}
}
fn set_control_source_g<P: IsA<gst::ControlSource>>(&self, control_source_g: Option<&P>) {
unsafe {
glib::gobject_ffi::g_object_set_property(
self.to_glib_none().0 as *mut glib::gobject_ffi::GObject,
b"control-source-g\0".as_ptr() as *const _,
control_source_g.to_value().to_glib_none().0,
);
}
}
fn control_source_r(&self) -> Option<gst::ControlSource> {
unsafe {
let mut value =
glib::Value::from_type(<gst::ControlSource as StaticType>::static_type());
glib::gobject_ffi::g_object_get_property(
self.to_glib_none().0 as *mut glib::gobject_ffi::GObject,
b"control-source-r\0".as_ptr() as *const _,
value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `control-source-r` getter")
}
}
fn set_control_source_r<P: IsA<gst::ControlSource>>(&self, control_source_r: Option<&P>) {
unsafe {
glib::gobject_ffi::g_object_set_property(
self.to_glib_none().0 as *mut glib::gobject_ffi::GObject,
b"control-source-r\0".as_ptr() as *const _,
control_source_r.to_value().to_glib_none().0,
);
}
}
fn connect_control_source_a_notify<F: Fn(&Self) + Send + Sync + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_control_source_a_trampoline<
P: IsA<ARGBControlBinding>,
F: Fn(&P) + Send + Sync + 'static,
>(
this: *mut ffi::GstARGBControlBinding,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ARGBControlBinding::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::control-source-a\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_control_source_a_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_control_source_b_notify<F: Fn(&Self) + Send + Sync + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_control_source_b_trampoline<
P: IsA<ARGBControlBinding>,
F: Fn(&P) + Send + Sync + 'static,
>(
this: *mut ffi::GstARGBControlBinding,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ARGBControlBinding::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::control-source-b\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_control_source_b_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_control_source_g_notify<F: Fn(&Self) + Send + Sync + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_control_source_g_trampoline<
P: IsA<ARGBControlBinding>,
F: Fn(&P) + Send + Sync + 'static,
>(
this: *mut ffi::GstARGBControlBinding,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ARGBControlBinding::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::control-source-g\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_control_source_g_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_control_source_r_notify<F: Fn(&Self) + Send + Sync + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_control_source_r_trampoline<
P: IsA<ARGBControlBinding>,
F: Fn(&P) + Send + Sync + 'static,
>(
this: *mut ffi::GstARGBControlBinding,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(ARGBControlBinding::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::control-source-r\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_control_source_r_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
} | b"control-source-b\0".as_ptr() as *const _, |
gubbins.go | /*
Package internal contains internal names, not for use by library
consumers.
The exported names within this package are subject to change without
warning.
*/
package internal
import (
"net/url"
"github.com/philandstuff/dhall-golang/v3/term"
) | Fetchable: fetchable,
},
ImportMode: mode,
}
}
func NewEnvVarImport(envvar string, mode term.ImportMode) term.Import {
return NewImport(term.EnvVar(envvar), mode)
}
func NewLocalImport(path string, mode term.ImportMode) term.Import {
return NewImport(term.LocalFile(path), mode)
}
// only for generating test data - discards errors
func NewRemoteImport(uri string, mode term.ImportMode) term.Import {
parsedURI, _ := url.ParseRequestURI(uri)
remote := term.NewRemoteFile(parsedURI)
return NewImport(remote, mode)
} |
func NewImport(fetchable term.Fetchable, mode term.ImportMode) term.Import {
return term.Import{
ImportHashed: term.ImportHashed{ |
__init__.py | """
localhost
---------
"""
import socket, os
def get_localhostname():
|
def get_ip_adress():
if os.environ.get("DOC", False) == True:
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
return ip
except Exception:
pass
return "123.4.567.890"
localhostname: str #: the name of the local machine
localhostname = get_localhostname()
localhost: str #: the localhost
localhost = "127.0.0.1"
localip: str #: the local ip address
localip = get_ip_adress()
| if os.environ.get("DOC", False) == True:
return socket.gethostname()
else:
return "sphinx-doc" |
lib.rs | //! Testing utilities and a testing-only instruction set for `peepmatic`.
#![deny(missing_debug_implementations)]
use peepmatic_runtime::{
cc::ConditionCode,
instruction_set::InstructionSet,
operator::Operator,
part::{Constant, Part},
paths::Path,
r#type::{BitWidth, Kind, Type},
};
use std::cell::RefCell;
use std::collections::BTreeMap;
use std::convert::TryFrom;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub struct Instruction(pub usize);
#[derive(Debug)]
pub struct InstructionData {
pub operator: Operator,
pub r#type: Type,
pub immediates: Vec<Immediate>,
pub arguments: Vec<Instruction>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Immediate {
Constant(Constant),
ConditionCode(ConditionCode),
}
impl Immediate {
fn unwrap_constant(&self) -> Constant {
match *self {
Immediate::Constant(c) => c,
_ => panic!("not a constant"),
}
}
}
impl From<Constant> for Immediate {
fn from(c: Constant) -> Immediate {
Immediate::Constant(c)
}
}
impl From<ConditionCode> for Immediate {
fn from(cc: ConditionCode) -> Immediate {
Immediate::ConditionCode(cc)
}
}
impl From<Immediate> for Part<Instruction> {
fn from(imm: Immediate) -> Part<Instruction> {
match imm {
Immediate::Constant(c) => Part::Constant(c),
Immediate::ConditionCode(cc) => Part::ConditionCode(cc),
}
}
}
impl TryFrom<Part<Instruction>> for Immediate {
type Error = &'static str;
fn try_from(part: Part<Instruction>) -> Result<Immediate, Self::Error> {
match part {
Part::Constant(c) => Ok(Immediate::Constant(c)),
Part::ConditionCode(c) => Ok(Immediate::ConditionCode(c)),
Part::Instruction(_) => Err("instruction parts cannot be converted into immediates"),
}
}
}
#[derive(Debug, Default)]
pub struct Program {
instr_counter: usize,
instruction_data: BTreeMap<Instruction, InstructionData>,
replacements: RefCell<BTreeMap<Instruction, Instruction>>,
}
impl Program {
/// Are `a` and `b` structurally equivalent, even if they use different
/// `Instruction`s for various arguments?
pub fn structurally_eq(&mut self, a: Instruction, b: Instruction) -> bool {
macro_rules! ensure_eq {
($a:expr, $b:expr) => {{
let a = &$a;
let b = &$b;
if a != b {
log::debug!(
"{} != {} ({:?} != {:?})",
stringify!($a),
stringify!($b),
a,
b
);
return false;
}
}};
}
let a = self.resolve(a);
let b = self.resolve(b);
if a == b {
return true;
}
let a = self.data(a);
let b = self.data(b);
log::debug!("structurally_eq({:?}, {:?})", a, b);
ensure_eq!(a.operator, b.operator);
ensure_eq!(a.r#type, b.r#type);
ensure_eq!(a.immediates, b.immediates);
ensure_eq!(a.arguments.len(), b.arguments.len());
a.arguments
.clone()
.into_iter()
.zip(b.arguments.clone().into_iter())
.all(|(a, b)| self.structurally_eq(a, b))
}
pub fn instructions(&self) -> impl Iterator<Item = (Instruction, &InstructionData)> {
self.instruction_data.iter().map(|(k, v)| (*k, v))
}
pub fn replace_instruction(&mut self, old: Instruction, new: Instruction) {
log::debug!("replacing {:?} with {:?}", old, new);
let old = self.resolve(old);
let new = self.resolve(new);
if old == new {
return;
}
let mut replacements = self.replacements.borrow_mut();
let existing_replacement = replacements.insert(old, new);
assert!(existing_replacement.is_none());
let old_data = self.instruction_data.remove(&old);
assert!(old_data.is_some());
}
pub fn resolve(&self, inst: Instruction) -> Instruction |
pub fn data(&self, inst: Instruction) -> &InstructionData {
let inst = self.resolve(inst);
&self.instruction_data[&inst]
}
pub fn new_instruction(
&mut self,
operator: Operator,
r#type: Type,
immediates: Vec<Immediate>,
arguments: Vec<Instruction>,
) -> Instruction {
assert_eq!(
operator.immediates_arity() as usize,
immediates.len(),
"wrong number of immediates for {:?}: expected {}, found {}",
operator,
operator.immediates_arity(),
immediates.len(),
);
assert_eq!(
operator.params_arity() as usize,
arguments.len(),
"wrong number of arguments for {:?}: expected {}, found {}",
operator,
operator.params_arity(),
arguments.len(),
);
assert!(!r#type.bit_width.is_polymorphic());
assert!(immediates.iter().all(|imm| match imm {
Immediate::Constant(Constant::Bool(_, w))
| Immediate::Constant(Constant::Int(_, w)) => !w.is_polymorphic(),
Immediate::ConditionCode(_) => true,
}));
let inst = Instruction(self.instr_counter);
self.instr_counter += 1;
let data = InstructionData {
operator,
r#type,
immediates,
arguments,
};
log::trace!("new instruction: {:?} = {:?}", inst, data);
self.instruction_data.insert(inst, data);
inst
}
pub fn r#const(&mut self, c: Constant, root_bit_width: BitWidth) -> Instruction {
assert!(!root_bit_width.is_polymorphic());
match c {
Constant::Bool(_, bit_width) => self.new_instruction(
Operator::Bconst,
if bit_width.is_polymorphic() {
Type {
kind: Kind::Bool,
bit_width: root_bit_width,
}
} else {
Type {
kind: Kind::Bool,
bit_width,
}
},
vec![c.into()],
vec![],
),
Constant::Int(_, bit_width) => self.new_instruction(
Operator::Iconst,
if bit_width.is_polymorphic() {
Type {
kind: Kind::Int,
bit_width: root_bit_width,
}
} else {
Type {
kind: Kind::Int,
bit_width,
}
},
vec![c.into()],
vec![],
),
}
}
fn instruction_to_constant(&mut self, inst: Instruction) -> Option<Constant> {
match self.data(inst) {
InstructionData {
operator: Operator::Iconst,
immediates,
..
} => Some(immediates[0].unwrap_constant()),
InstructionData {
operator: Operator::Bconst,
immediates,
..
} => Some(immediates[0].unwrap_constant()),
_ => None,
}
}
fn part_to_immediate(&mut self, part: Part<Instruction>) -> Result<Immediate, &'static str> {
match part {
Part::Instruction(i) => self
.instruction_to_constant(i)
.map(|c| c.into())
.ok_or("non-constant instructions cannot be converted into immediates"),
Part::Constant(c) => Ok(c.into()),
Part::ConditionCode(cc) => Ok(Immediate::ConditionCode(cc)),
}
}
fn part_to_instruction(
&mut self,
root: Instruction,
part: Part<Instruction>,
) -> Result<Instruction, &'static str> {
match part {
Part::Instruction(inst) => {
let inst = self.resolve(inst);
Ok(inst)
}
Part::Constant(c) => {
let root_width = self.data(root).r#type.bit_width;
Ok(self.r#const(c, root_width))
}
Part::ConditionCode(_) => Err("condition codes cannot be converted into instructions"),
}
}
}
#[derive(Debug)]
pub struct TestIsa {
pub native_word_size_in_bits: u8,
}
// Unsafe because we must ensure that `instruction_result_bit_width` never
// returns zero.
unsafe impl<'a> InstructionSet<'a> for TestIsa {
type Context = Program;
type Instruction = Instruction;
fn replace_instruction(
&self,
program: &mut Program,
old: Instruction,
new: Part<Instruction>,
) -> Instruction {
log::debug!("replace_instruction({:?}, {:?})", old, new);
let new = program.part_to_instruction(old, new).unwrap();
program.replace_instruction(old, new);
new
}
fn get_part_at_path(
&self,
program: &mut Program,
root: Instruction,
path: Path,
) -> Option<Part<Instruction>> {
log::debug!("get_part_at_path({:?})", path);
assert!(!path.0.is_empty());
assert_eq!(path.0[0], 0);
let mut part = Part::Instruction(root);
for p in &path.0[1..] {
if let Part::Instruction(inst) = part {
let data = program.data(inst);
let p = *p as usize;
if p < data.immediates.len() {
part = data.immediates[p].into();
continue;
}
if let Some(inst) = data.arguments.get(p - data.immediates.len()).copied() {
part = Part::Instruction(inst);
continue;
}
}
return None;
}
Some(part)
}
fn operator(&self, program: &mut Program, instr: Instruction) -> Option<Operator> {
log::debug!("operator({:?})", instr);
let data = program.data(instr);
Some(data.operator)
}
fn make_inst_1(
&self,
program: &mut Program,
root: Instruction,
operator: Operator,
r#type: Type,
a: Part<Instruction>,
) -> Instruction {
log::debug!(
"make_inst_1(\n\toperator = {:?},\n\ttype = {},\n\ta = {:?},\n)",
operator,
r#type,
a,
);
let (imms, args) = match operator.immediates_arity() {
0 => {
assert_eq!(operator.params_arity(), 1);
(vec![], vec![program.part_to_instruction(root, a).unwrap()])
}
1 => {
assert_eq!(operator.params_arity(), 0);
(vec![program.part_to_immediate(a).unwrap()], vec![])
}
_ => unreachable!(),
};
program.new_instruction(operator, r#type, imms, args)
}
fn make_inst_2(
&self,
program: &mut Program,
root: Instruction,
operator: Operator,
r#type: Type,
a: Part<Instruction>,
b: Part<Instruction>,
) -> Instruction {
log::debug!(
"make_inst_2(\n\toperator = {:?},\n\ttype = {},\n\ta = {:?},\n\tb = {:?},\n)",
operator,
r#type,
a,
b,
);
let (imms, args) = match operator.immediates_arity() {
0 => {
assert_eq!(operator.params_arity(), 2);
(
vec![],
vec![
program.part_to_instruction(root, a).unwrap(),
program.part_to_instruction(root, b).unwrap(),
],
)
}
1 => {
assert_eq!(operator.params_arity(), 1);
(
vec![program.part_to_immediate(a).unwrap()],
vec![program.part_to_instruction(root, b).unwrap()],
)
}
2 => {
assert_eq!(operator.params_arity(), 0);
(
vec![
program.part_to_immediate(a).unwrap(),
program.part_to_immediate(b).unwrap(),
],
vec![],
)
}
_ => unreachable!(),
};
program.new_instruction(operator, r#type, imms, args)
}
fn make_inst_3(
&self,
program: &mut Program,
root: Instruction,
operator: Operator,
r#type: Type,
a: Part<Instruction>,
b: Part<Instruction>,
c: Part<Instruction>,
) -> Instruction {
log::debug!(
"make_inst_3(\n\toperator = {:?},\n\ttype = {},\n\ta = {:?},\n\tb = {:?},\n\tc = {:?},\n)",
operator,
r#type,
a,
b,
c,
);
let (imms, args) = match operator.immediates_arity() {
0 => {
assert_eq!(operator.params_arity(), 3);
(
vec![],
vec![
program.part_to_instruction(root, a).unwrap(),
program.part_to_instruction(root, b).unwrap(),
program.part_to_instruction(root, c).unwrap(),
],
)
}
1 => {
assert_eq!(operator.params_arity(), 2);
(
vec![program.part_to_immediate(a).unwrap()],
vec![
program.part_to_instruction(root, b).unwrap(),
program.part_to_instruction(root, c).unwrap(),
],
)
}
2 => {
assert_eq!(operator.params_arity(), 1);
(
vec![
program.part_to_immediate(a).unwrap(),
program.part_to_immediate(b).unwrap(),
],
vec![program.part_to_instruction(root, c).unwrap()],
)
}
3 => {
assert_eq!(operator.params_arity(), 0);
(
vec![
program.part_to_immediate(a).unwrap(),
program.part_to_immediate(b).unwrap(),
program.part_to_immediate(c).unwrap(),
],
vec![],
)
}
_ => unreachable!(),
};
program.new_instruction(operator, r#type, imms, args)
}
fn instruction_to_constant(
&self,
program: &mut Program,
inst: Instruction,
) -> Option<Constant> {
log::debug!("instruction_to_constant({:?})", inst);
program.instruction_to_constant(inst)
}
fn instruction_result_bit_width(&self, program: &mut Program, inst: Instruction) -> u8 {
log::debug!("instruction_result_bit_width({:?})", inst);
let ty = program.data(inst).r#type;
let width = ty.bit_width.fixed_width().unwrap();
assert!(width != 0);
width
}
fn native_word_size_in_bits(&self, _program: &mut Program) -> u8 {
log::debug!("native_word_size_in_bits");
self.native_word_size_in_bits
}
}
| {
let mut replacements = self.replacements.borrow_mut();
let mut replacements_followed = 0;
let mut resolved = inst;
while let Some(i) = replacements.get(&resolved).cloned() {
log::trace!("resolving replaced instruction: {:?} -> {:?}", resolved, i);
replacements_followed += 1;
assert!(
replacements_followed <= replacements.len(),
"cyclic replacements"
);
resolved = i;
continue;
}
if inst != resolved {
let old_replacement = replacements.insert(inst, resolved);
assert!(old_replacement.is_some());
}
resolved
} |
console.rs | // Copyright 2022 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use crate::descriptor::AsRawDescriptor;
use std::io::{stdin, Error, Read, Result};
use winapi::{
shared::{minwindef::LPVOID, ntdef::NULL}, |
impl Read for Console {
fn read(&mut self, out: &mut [u8]) -> Result<usize> {
let mut num_of_bytes_read: u32 = 0;
// Safe because `out` is guarenteed to be a valid mutable array
// and `num_of_bytes_read` is a valid u32.
let res = unsafe {
ReadFile(
stdin().as_raw_descriptor(),
out.as_mut_ptr() as LPVOID,
out.len() as u32,
&mut num_of_bytes_read,
NULL as LPOVERLAPPED,
)
};
let error = Error::last_os_error();
if res == 0 {
Err(error)
} else {
Ok(num_of_bytes_read as usize)
}
}
} | um::{fileapi::ReadFile, minwinbase::LPOVERLAPPED},
};
pub struct Console; |
__init__.py | from .predict import predict
import argparse
import sys, multiprocessing
import torch
def _parse_args():
parser=argparse.ArgumentParser(description="Run SolTranNet aqueous solubility predictor")
parser.add_argument('input',nargs='?',type=argparse.FileType('r'),default=sys.stdin,help='PATH to the file containing the SMILES you wish to use. Assumes the content is 1 SMILE per line.')
parser.add_argument('output',nargs='?',type=argparse.FileType('w'),default=sys.stdout,help='Name of the output file. Defaults to stdout.')
parser.add_argument('--batchsize',default=32,type=int,help='Batch size for the data loader. Defaults to 32.')
parser.add_argument('--cpus',default=multiprocessing.cpu_count(),type=int,help='Number of CPU cores to use for the data loader. Defaults to use all available cores. Pass 0 to only run on 1 CPU.')
parser.add_argument('--cpu_predict',action='store_true',help='Flag to force the predictions to be made on only the CPU. Default behavior is to use GPU if available.')
args=parser.parse_args()
return args
def _run(args):
| smiles=[x.rstrip() for x in args.input]
if args.cpu_predict:
predictions=predict(smiles,batch_size=args.batchsize,num_workers=args.cpus,device=torch.device('cpu'))
else:
predictions=predict(smiles,batch_size=args.batchsize,num_workers=args.cpus)
for pred, smi, warn in predictions:
args.output.write(f'{smi},{pred:.3f},{warn}\n') |
|
user.py | from sys import maxsize
class User:
def __init__(self, firstname=None, lastname=None, address=None, email=None, email2=None, email3=None, user_id=None,
homephone=None, workphone=None, mobilephone=None, additionalphone=None,
all_phones_from_home_page=None, all_emails_from_home_page=None, deprecated=None):
self.firstname = firstname
self.lastname = lastname
self.address = address
self.email = email
self.email2 = email2
self.email3 = email3
self.homephone = homephone
self.mobilephone = mobilephone
self.id = user_id
self.workphone = workphone
self.additionalphone = additionalphone
self.all_phones_from_home_page = all_phones_from_home_page
self.all_emails_from_home_page = all_emails_from_home_page
self.deprecated = deprecated
def __repr__(self):
return "%s:%s:%s:%s" % (self.id, self.firstname, self.lastname, self.deprecated)
def __eq__(self, other):
return (self.id is None or other.id is None or self.id == other.id) and self.firstname == other.firstname \
and self.lastname == other.lastname
def id_or_max(self):
if self.id: | return int(self.id)
else:
return maxsize |
|
reads_or_peaks_distribution_relative2xxx.py | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
####################################################################################
### Copyright (C) 2015-2019 by ABLIFE
####################################################################################
| ####################################################################################
# Date Version Author ChangeLog
#
#
#
#####################################################################################
"""
程序功能说明:
1.统计reads or peaks 相对于TTS,TSS,STARTCODON,STOPCODON的分布
程序设计思路:
利用gffutils和HTSeq包进行统计
"""
import re, os, sys, logging, time, datetime
from optparse import OptionParser, OptionGroup
reload(sys)
sys.setdefaultencoding('utf-8')
import subprocess
import threading
import gffutils
import HTSeq
import numpy
import multiprocessing
from matplotlib import pyplot
sys.path.insert(1, os.path.split(os.path.realpath(__file__))[0] + "/../../")
from ablib.utils.tools import *
from ablib.utils.distribution import *
if sys.version_info < (2, 7):
print("Python Version error: please use phthon2.7")
sys.exit(-1)
_version = 'v0.1'
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
def configOpt():
"""Init for option
"""
usage = 'Usage: %prog [option] [-h]'
p = OptionParser(usage)
##basic options
p.add_option(
'-g', '--gff', dest='gff', action='store',
type='string', help='gff file,do not have to provide it if db is exited')
p.add_option(
'-d', '--db', dest='db', default='gffdb', action='store',
type='string', help='the gff database file to create or use')
p.add_option(
'-b', '--bamorbed', dest='bamorbed', action='store',
type='string', help='bam or bed file, Important: the bamfile\'s suffix must be ".bam"')
p.add_option(
'-w', '--halfwinwidth', dest='halfwinwidth', default=1000, action='store',
type='int', help='halfwinwidth,default is 1000')
p.add_option(
'-p', '--postype', dest='postype', action='store',
type='string', help='gene position type:tss,tts,startcodon,stopcodon,intronstart,intronend')
p.add_option(
'-o', '--outfile', dest='outfile', default="distance2xxx_reads_density.txt", action='store',
type='string', help='gene expression file')
p.add_option(
'-n', '--samplename', dest='samplename', default='', action='store',
type='string', help='sample name,default is ""')
group = OptionGroup(p, "Preset options")
##preset options
group.add_option(
'-O', '--outDir', dest='outDir', default='./', action='store',
type='string', help='output directory', metavar="DIR")
group.add_option(
'-L', '--logDir', dest='logDir', default='', action='store',
type='string', help='log dir ,default is same as outDir')
group.add_option(
'-P', '--logPrefix', dest='logPrefix', default='', action='store',
type='string', help='log file prefix')
group.add_option(
'-E', '--email', dest='email', default='none', action='store',
type='string', help='email address, if you want get a email when this job is finished,default is no email',
metavar="EMAIL")
group.add_option(
'-Q', '--quiet', dest='quiet', default=True, action='store_true',
help='do not print messages to stdout')
group.add_option(
'-K', '--keepTemp', dest='keepTemp', default=False, action='store_true',
help='keep temp dir')
group.add_option(
'-T', '--test', dest='isTest', default=False, action='store_true',
help='run this program for test')
p.add_option_group(group)
if len(sys.argv) == 1:
p.print_help()
sys.exit(1)
opt, args = p.parse_args()
return (p, opt, args)
def listToString(x):
"""获得完整的命令
"""
rVal = ''
for a in x:
rVal += a + ' '
return rVal
opt_parser, opt, args = configOpt()
if not opt.postype:
opt_parser.error('Option -p must be assigned.\n')
if opt.logDir == "":
opt.logDir = opt.outDir + '/log/'
sample = ""
if opt.samplename != "":
sample = opt.samplename + '_'
if opt.outfile == 'distance2xxx_reads_density.txt':
opt.outfile = sample + 'distance2' + opt.postype + '_reads_density.txt'
intype = "bam"
match = re.search(r'\.bam$', opt.bamorbed)
if not match:
intype = "bed"
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
scriptPath = os.path.abspath(os.path.dirname(__file__)) # absolute script path
binPath = "/".join(scriptPath.split("/")[0:-2]) # absolute bin path
outPath = os.path.abspath(opt.outDir) # absolute output path
#os.mkdir(outPath) if not os.path.isdir(outPath) else None
os.system('mkdir -p ' + outPath)
logPath = os.path.abspath(opt.logDir)
#os.mkdir(logPath) if not os.path.isdir(logPath) else None
os.system('mkdir -p ' + logPath)
tempPath = outPath + '/temp/' # absolute bin path
# os.mkdir(tempPath) if not os.path.isdir(tempPath) else None
resultPath = outPath + '/result/'
# os.mkdir(resultPath) if not os.path.isdir(resultPath) else None
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
def initLogging(logFilename):
"""Init for logging
"""
logging.basicConfig(
level=logging.DEBUG,
format='[%(asctime)s : %(levelname)s] %(message)s',
datefmt='%y-%m-%d %H:%M',
filename=logFilename,
filemode='w')
if not opt.quiet:
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# set a format which is simpler for console use
formatter = logging.Formatter('[%(asctime)s : %(levelname)s] %(message)s', datefmt='%y-%m-%d %H:%M')
# tell the handler to use this format
console.setFormatter(formatter)
logging.getLogger('').addHandler(console)
dt = datetime.datetime.now()
logFile = logPath + '/' + opt.logPrefix + 'log.' + str(dt.strftime('%Y%m%d.%H%M%S.%f')) + '.txt'
initLogging(logFile)
logging.debug(sys.modules[__name__].__doc__)
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
logging.debug('Program version: %s' % _version)
logging.debug('Start the program with [%s]\n', listToString(sys.argv))
startTime = datetime.datetime.now()
logging.debug("计时器:Program start at %s" % startTime)
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
### S
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
### E
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
def main():
print("Main procedure start...")
if opt.gff:
db = gffutils.create_db(opt.gff, opt.db, merge_strategy="create_unique", verbose=False, force=True)
db = gffutils.FeatureDB(opt.db)
Watcher()
pool = multiprocessing.Pool(processes=15)
server = multiprocessing.Manager()
dis = server.dict()
for chr in db.seqids():
# if chr != "chr1":
# continue
if intype == "bam":
chr_dict = readBamHeader(opt.bamorbed)
if not chr in chr_dict:
continue
# print(chr)
dis[chr] = [0 for x in range(2 * opt.halfwinwidth)]
pool.apply_async(distributionToOnePointByChr,
args=(chr, opt.bamorbed, opt.db, opt.outfile, opt.postype, opt.halfwinwidth, dis))
pool.close()
pool.join()
d = dict(dis).copy()
server.shutdown()
profile = numpy.zeros(2 * opt.halfwinwidth, dtype='i')
for chr in sorted(d.keys()):
wincvg = numpy.fromiter(d[chr], dtype='i', count=2 * opt.halfwinwidth)
profile += wincvg
# pyplot.plot( numpy.arange( -opt.halfwinwidth, opt.halfwinwidth ), profile )
# pyplot.show()
os.chdir(opt.outDir)
fout = open(opt.outfile, 'w')
fout.writelines(
"+distance\tdensity\n")
n = 0
for i in range(-opt.halfwinwidth, opt.halfwinwidth):
fout.writelines(str(i) + '\t' + str(profile[n]) + '\n')
n += 1
fout.close()
#cmd = "cd " + outPath + "&& R --slave < /users/ablife/ablife-R/Line_single_ggplot2.r --args " + opt.outfile + " " + sample + 'distance2' + opt.postype + '_reads_density ./ \n'
cmd = "cd " + outPath + "&& Rscript " + binPath + "/plot/Line_single_ggplot2.r -f " + opt.outfile + " -t " + sample + 'distance2' + opt.postype + '_reads_density -n ' + sample + 'distance2' + opt.postype + '_reads_density -o ./'
os.system(cmd)
if __name__ == '__main__':
main()
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
if not opt.keepTemp:
os.system('rm -rf ' + tempPath)
logging.debug("Temp folder is deleted..")
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
logging.debug("Program ended")
currentTime = datetime.datetime.now()
runningTime = (currentTime - startTime).seconds # in seconds
logging.debug("计时器:Program start at %s" % startTime)
logging.debug("计时器:Program end at %s" % currentTime)
logging.debug("计时器:Program ran %.2d:%.2d:%.2d" % (runningTime / 3600, (runningTime % 3600) / 60, runningTime % 60))
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
if opt.email != "none":
run_cmd = listToString(sys.argv)
sendEmail(opt.email, str(startTime), str(currentTime), run_cmd, outPath)
logging.info("发送邮件通知到 %s" % opt.email)
# -----------------------------------------------------------------------------------
# ----------------------------------------------------------------------------------- |
####################################################################################
|
initializer_configuration_list.rs | // Generated from definition io.k8s.api.admissionregistration.v1alpha1.InitializerConfigurationList
/// InitializerConfigurationList is a list of InitializerConfiguration.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct InitializerConfigurationList {
/// List of InitializerConfiguration.
pub items: Vec<crate::v1_9::api::admissionregistration::v1alpha1::InitializerConfiguration>,
/// Standard list metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
pub metadata: Option<crate::v1_9::apimachinery::pkg::apis::meta::v1::ListMeta>,
}
impl crate::Resource for InitializerConfigurationList {
fn api_version() -> &'static str {
"admissionregistration.k8s.io/v1alpha1"
}
fn group() -> &'static str {
"admissionregistration.k8s.io"
}
fn kind() -> &'static str {
"InitializerConfigurationList"
}
fn version() -> &'static str {
"v1alpha1"
}
}
impl crate::Metadata for InitializerConfigurationList {
type Ty = crate::v1_9::apimachinery::pkg::apis::meta::v1::ListMeta;
fn metadata(&self) -> Option<&<Self as crate::Metadata>::Ty> {
self.metadata.as_ref()
}
}
impl<'de> serde::Deserialize<'de> for InitializerConfigurationList {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_api_version,
Key_kind,
Key_items,
Key_metadata,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"apiVersion" => Field::Key_api_version,
"kind" => Field::Key_kind,
"items" => Field::Key_items,
"metadata" => Field::Key_metadata,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct | ;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = InitializerConfigurationList;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "struct InitializerConfigurationList")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_items: Option<Vec<crate::v1_9::api::admissionregistration::v1alpha1::InitializerConfiguration>> = None;
let mut value_metadata: Option<crate::v1_9::apimachinery::pkg::apis::meta::v1::ListMeta> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_api_version => {
let value_api_version: String = serde::de::MapAccess::next_value(&mut map)?;
if value_api_version != <Self::Value as crate::Resource>::api_version() {
return Err(serde::de::Error::invalid_value(serde::de::Unexpected::Str(&value_api_version), &<Self::Value as crate::Resource>::api_version()));
}
},
Field::Key_kind => {
let value_kind: String = serde::de::MapAccess::next_value(&mut map)?;
if value_kind != <Self::Value as crate::Resource>::kind() {
return Err(serde::de::Error::invalid_value(serde::de::Unexpected::Str(&value_kind), &<Self::Value as crate::Resource>::kind()));
}
},
Field::Key_items => value_items = Some(serde::de::MapAccess::next_value(&mut map)?),
Field::Key_metadata => value_metadata = serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(InitializerConfigurationList {
items: value_items.ok_or_else(|| serde::de::Error::missing_field("items"))?,
metadata: value_metadata,
})
}
}
deserializer.deserialize_struct(
"InitializerConfigurationList",
&[
"apiVersion",
"kind",
"items",
"metadata",
],
Visitor,
)
}
}
impl serde::Serialize for InitializerConfigurationList {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
"InitializerConfigurationList",
3 +
self.metadata.as_ref().map_or(0, |_| 1),
)?;
serde::ser::SerializeStruct::serialize_field(&mut state, "apiVersion", <Self as crate::Resource>::api_version())?;
serde::ser::SerializeStruct::serialize_field(&mut state, "kind", <Self as crate::Resource>::kind())?;
serde::ser::SerializeStruct::serialize_field(&mut state, "items", &self.items)?;
if let Some(value) = &self.metadata {
serde::ser::SerializeStruct::serialize_field(&mut state, "metadata", value)?;
}
serde::ser::SerializeStruct::end(state)
}
}
| Visitor |
loader.py | '''
The salt api module loader interface
'''
# Import python libs
import os
# Import Salt libs
import salt.loader
import saltapi
def | (opts):
'''
Return the network api functions
'''
load = salt.loader._create_loader(
opts,
'netapi',
'netapi',
base_path=os.path.dirname(saltapi.__file__)
)
return load.gen_functions()
def runner(opts):
'''
Load the runners, this function bypasses the issue with the altered
basepath
'''
load = salt.loader._create_loader(
opts,
'runners',
'runner',
ext_type_dirs='runner_dirs',
base_path=os.path.dirname(salt.__file__)
)
return load.gen_functions()
| netapi |
array.go | package logus
import (
"time"
"go.uber.org/zap/zapcore"
)
// Array constructs a field with the given key and ArrayMarshaler. It provides
// a flexible, but still type-safe and efficient, way to add array-like types
// to the logging context. The struct's MarshalLogArray method is called lazily.
func Array(key string, val zapcore.ArrayMarshaler) Field {
return Field{Key: key, Type: zapcore.ArrayMarshalerType, Interface: val}
}
// Bools constructs a field that carries a slice of bools.
func | (key string, bs ...bool) Field {
return Array(key, bools(bs))
}
// ByteStrings constructs a field that carries a slice of []byte, each of which
// must be UTF-8 encoded text.
func ByteStrings(key string, bss ...[]byte) Field {
return Array(key, byteStringsArray(bss))
}
// Complex128s constructs a field that carries a slice of complex numbers.
func Complex128s(key string, nums ...complex128) Field {
return Array(key, complex128s(nums))
}
// Complex64s constructs a field that carries a slice of complex numbers.
func Complex64s(key string, nums ...complex64) Field {
return Array(key, complex64s(nums))
}
// Durations constructs a field that carries a slice of time.Durations.
func Durations(key string, ds ...time.Duration) Field {
return Array(key, durations(ds))
}
// Float64s constructs a field that carries a slice of floats.
func Float64s(key string, nums ...float64) Field {
return Array(key, float64s(nums))
}
// Float32s constructs a field that carries a slice of floats.
func Float32s(key string, nums ...float32) Field {
return Array(key, float32s(nums))
}
// Ints constructs a field that carries a slice of integers.
func Ints(key string, nums ...int) Field {
return Array(key, ints(nums))
}
// Int64s constructs a field that carries a slice of integers.
func Int64s(key string, nums ...int64) Field {
return Array(key, int64s(nums))
}
// Int32s constructs a field that carries a slice of integers.
func Int32s(key string, nums ...int32) Field {
return Array(key, int32s(nums))
}
// Int16s constructs a field that carries a slice of integers.
func Int16s(key string, nums ...int16) Field {
return Array(key, int16s(nums))
}
// Int8s constructs a field that carries a slice of integers.
func Int8s(key string, nums ...int8) Field {
return Array(key, int8s(nums))
}
// Strings constructs a field that carries a slice of strings.
func Strings(key string, ss ...string) Field {
return Array(key, stringArray(ss))
}
// Times constructs a field that carries a slice of time.Times.
func Times(key string, ts ...time.Time) Field {
return Array(key, times(ts))
}
// Uints constructs a field that carries a slice of unsigned integers.
func Uints(key string, nums ...uint) Field {
return Array(key, uints(nums))
}
// Uint64s constructs a field that carries a slice of unsigned integers.
func Uint64s(key string, nums ...uint64) Field {
return Array(key, uint64s(nums))
}
// Uint32s constructs a field that carries a slice of unsigned integers.
func Uint32s(key string, nums ...uint32) Field {
return Array(key, uint32s(nums))
}
// Uint16s constructs a field that carries a slice of unsigned integers.
func Uint16s(key string, nums ...uint16) Field {
return Array(key, uint16s(nums))
}
// Uint8s constructs a field that carries a slice of unsigned integers.
func Uint8s(key string, nums ...uint8) Field {
return Array(key, uint8s(nums))
}
// Uintptrs constructs a field that carries a slice of pointer addresses.
func Uintptrs(key string, us ...uintptr) Field {
return Array(key, uintptrs(us))
}
// Errors constructs a field that carries a slice of errors.
func Errors(key string, errs ...error) Field {
return Array(key, errArray(errs))
}
type bools []bool
func (bs bools) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range bs {
arr.AppendBool(bs[i])
}
return nil
}
type byteStringsArray [][]byte
func (bss byteStringsArray) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range bss {
arr.AppendByteString(bss[i])
}
return nil
}
type complex128s []complex128
func (nums complex128s) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendComplex128(nums[i])
}
return nil
}
type complex64s []complex64
func (nums complex64s) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendComplex64(nums[i])
}
return nil
}
type durations []time.Duration
func (ds durations) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range ds {
arr.AppendDuration(ds[i])
}
return nil
}
type float64s []float64
func (nums float64s) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendFloat64(nums[i])
}
return nil
}
type float32s []float32
func (nums float32s) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendFloat32(nums[i])
}
return nil
}
type ints []int
func (nums ints) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendInt(nums[i])
}
return nil
}
type int64s []int64
func (nums int64s) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendInt64(nums[i])
}
return nil
}
type int32s []int32
func (nums int32s) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendInt32(nums[i])
}
return nil
}
type int16s []int16
func (nums int16s) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendInt16(nums[i])
}
return nil
}
type int8s []int8
func (nums int8s) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendInt8(nums[i])
}
return nil
}
type stringArray []string
func (ss stringArray) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range ss {
arr.AppendString(ss[i])
}
return nil
}
type times []time.Time
func (ts times) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range ts {
arr.AppendTime(ts[i])
}
return nil
}
type uints []uint
func (nums uints) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendUint(nums[i])
}
return nil
}
type uint64s []uint64
func (nums uint64s) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendUint64(nums[i])
}
return nil
}
type uint32s []uint32
func (nums uint32s) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendUint32(nums[i])
}
return nil
}
type uint16s []uint16
func (nums uint16s) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendUint16(nums[i])
}
return nil
}
type uint8s []uint8
func (nums uint8s) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendUint8(nums[i])
}
return nil
}
type uintptrs []uintptr
func (nums uintptrs) MarshalLogArray(arr zapcore.ArrayEncoder) error {
for i := range nums {
arr.AppendUintptr(nums[i])
}
return nil
}
| Bools |
wall_follower_pid.py | import sys
sys.path.append("../..")
from api.control.PID import PID
from api.control.sensor import sensor
from api.control.robot import robot
import posix_ipc as ipc
import time
import threading
import math
import numpy as np
graphq = ipc.MessageQueue('/graphQueue', ipc.O_CREAT)
mq = ipc.MessageQueue('/keyQueue', ipc.O_CREAT)
mq.block = False
lidar = sensor('lidar', '/pointQueue')
""" THREAD CLASS """
class sensor_thread(threading.Thread):
def __init__(self, name, delay,*args, **kwargs):
super(sensor_thread, self).__init__(*args, **kwargs)
self._stopper = threading.Event() | def stopit(self):
self._stopper.set()
def stopped(self):
return self._stopper.isSet()
def run(self):
while True:
if self.stopped():
return
if self.name == 'cam':
cam.set_data()
if self.name == 'ir':
ir.set_data()
if self.name == 'lidar':
lidar.set_data()
time.sleep(self.delay)
def getPressed():
try:
mes = mq.receive()
key = list((mes[0].decode()).split(","))
key = int(key[0]), list(map(int, key[1:3])), list(map(float, key[3:]))
return key
except:
return None
""" GLOBAL VARIABLE HERE """
SENSOR_TYPE = [('lidar', 0.0)]
ATTRIBUTE = 'data'
DELTA_ANGLE = 50
RIGHT_HAND_ANGLE = 90
HELPER_HAND_ANGLE = RIGHT_HAND_ANGLE + DELTA_ANGLE
FACE_ANGLE = 180
WALL_THRES = 1
WALL_DISTANCE = 60
WALL_LEFT_BOUND = WALL_DISTANCE - WALL_THRES
WALL_RIGHT_BOUND = WALL_DISTANCE + WALL_THRES
AVOIDER_POWER = 35
STOP = 0, 0, 0
class power:
value = 0, 0, 0
def set(self, x, y, turn):
self.value = x, y ,turn
def find_nearest(array, value):
array = np.asarray(array)
idx = (np.abs(array - value)).argmin()
return idx
def main():
start = 0
last_start = start
min_power = 20
max_power = 50
kp = 1
ki = 0
kd = 0
lidar_pid = PID(kp, ki, kd, WALL_DISTANCE)
workers = []
for name, delay in SENSOR_TYPE:
print('[info] start thread : ' , name)
thread = sensor_thread(name, delay)
workers.append(thread)
thread.start()
try:
rc = robot('/serialWriteQueue')
time.sleep(5)
rc.connect()
time.sleep(0.5)
pwr = power()
while True:
key = getPressed()
if key:
print(key)
start, (min_power, max_power), (kp, ki, kd) = key
lidar_pid.setOutputLimits((-max_power, max_power))
lidar_pid.setKValue(kp, ki ,kd)
if start != last_start:
rx_distance = 0
graphq.send(",".join(map(str, [start, rx_distance, WALL_DISTANCE])))
last_start = start
if start:
point = lidar.data
print(type(point))
if type(point) is np.ndarray:
print("ye")
angles, ranges = point
right_hand = float(ranges[find_nearest(angles, RIGHT_HAND_ANGLE)])
helper_hand = float(ranges[find_nearest(angles, HELPER_HAND_ANGLE)])
face = float(ranges[find_nearest(angles, FACE_ANGLE)])
teta = math.radians(DELTA_ANGLE)
if face < 50:
print("50")
pwr.set(0, 0, AVOIDER_POWER)
elif right_hand > 0 and helper_hand > 0:
print("ye")
alpha = math.atan((right_hand * math.cos(teta) - helper_hand)/ (right_hand * math.sin(teta)))
rx_distance = helper_hand * math.cos(math.radians(alpha))
graphq.send(",".join(map(str, [start, rx_distance, WALL_DISTANCE])))
if rx_distance > WALL_RIGHT_BOUND or rx_distance < WALL_LEFT_BOUND:
out = lidar_pid.update(rx_distance)
if out < min_power and out > 0:
out = min_power
if out > -min_power and out < 0:
out = -min_power
print(rx_distance, out)
pwr.set(0, max_power, out)
else:
pwr.set(0, max_power, 0)
else:
pwr.set(*STOP)
else:
pwr.set(*STOP)
else:
pwr.set(*STOP)
rc.drive(*pwr.value)
time.sleep(0.001)
except KeyboardInterrupt:
print('[info] interrupt pressed')
print('[main] work finished')
for worker in workers:
worker.stopit()
time.sleep(3)
worker.join()
#lidar.cleanup()
#ir.cleanup()
#cam.cleanup()
#rc.disconnect()
print('[main] end')
main() | self.name = name
self.delay = delay |
models.py | from django.db import models
class Person(models.Model):
|
class Movie(models.Model):
title = models.CharField(max_length=200)
director = models.ForeignKey(Person, models.CASCADE)
class Event(models.Model):
pass
class Screening(Event):
movie = models.ForeignKey(Movie, models.CASCADE)
class ScreeningNullFK(Event):
movie = models.ForeignKey(Movie, models.SET_NULL, null=True)
class Package(models.Model):
screening = models.ForeignKey(Screening, models.SET_NULL, null=True)
class PackageNullFK(models.Model):
screening = models.ForeignKey(ScreeningNullFK, models.SET_NULL, null=True)
| name = models.CharField(max_length=200) |
Renderer.ts | import {
WebGLRenderer as WebGL
} from 'three';
export default class | extends WebGL{
constructor() {
super({
antialias: true
});
this.setVisualization();
this.toRender();
}
private setVisualization(): void {
this.setPixelRatio(window.devicePixelRatio);
this.setSize(window.innerWidth, window.innerHeight);
}
private toRender(): void {
document.body.appendChild(this.domElement);
}
} | WebGLRenderer |
operationresults.go | package healthcareapis
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// OperationResultsClient is the azure Healthcare APIs Client
type OperationResultsClient struct {
BaseClient
}
// NewOperationResultsClient creates an instance of the OperationResultsClient client.
func NewOperationResultsClient(subscriptionID string) OperationResultsClient {
return NewOperationResultsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewOperationResultsClientWithBaseURI creates an instance of the OperationResultsClient client using a custom
// endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure
// stack).
func NewOperationResultsClientWithBaseURI(baseURI string, subscriptionID string) OperationResultsClient {
return OperationResultsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// Get get the operation result for a long running operation.
// Parameters:
// locationName - the location of the operation.
// operationResultID - the ID of the operation result to get.
func (client OperationResultsClient) Get(ctx context.Context, locationName string, operationResultID string) (result SetObject, err error) {
if tracing.IsEnabled() |
req, err := client.GetPreparer(ctx, locationName, operationResultID)
if err != nil {
err = autorest.NewErrorWithError(err, "healthcareapis.OperationResultsClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "healthcareapis.OperationResultsClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "healthcareapis.OperationResultsClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client OperationResultsClient) GetPreparer(ctx context.Context, locationName string, operationResultID string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"locationName": autorest.Encode("path", locationName),
"operationResultId": autorest.Encode("path", operationResultID),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-08-20-preview"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.HealthcareApis/locations/{locationName}/operationresults/{operationResultId}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client OperationResultsClient) GetSender(req *http.Request) (*http.Response, error) {
sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
return autorest.SendWithSender(client, req, sd...)
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client OperationResultsClient) GetResponder(resp *http.Response) (result SetObject, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNotFound),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
| {
ctx = tracing.StartSpan(ctx, fqdn+"/OperationResultsClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
} |
extensions_build_config.bzl | # See bazel/README.md for details on how this system works.
EXTENSIONS = {
#
# Access loggers
#
"envoy.access_loggers.file": "//source/extensions/access_loggers/file:config",
"envoy.access_loggers.http_grpc": "//source/extensions/access_loggers/http_grpc:config",
"envoy.access_loggers.wasm": "//source/extensions/access_loggers/wasm:config",
#
# Clusters
#
"envoy.clusters.dynamic_forward_proxy": "//source/extensions/clusters/dynamic_forward_proxy:cluster",
"envoy.clusters.redis": "//source/extensions/clusters/redis:redis_cluster",
#
# gRPC Credentials Plugins
#
"envoy.grpc_credentials.file_based_metadata": "//source/extensions/grpc_credentials/file_based_metadata:config",
#
# WASM
#
"envoy.wasm": "//source/extensions/wasm:config",
#
# Health checkers
#
"envoy.health_checkers.redis": "//source/extensions/health_checkers/redis:config",
#
# HTTP filters
#
"envoy.filters.http.buffer": "//source/extensions/filters/http/buffer:config",
"envoy.filters.http.cors": "//source/extensions/filters/http/cors:config",
"envoy.filters.http.csrf": "//source/extensions/filters/http/csrf:config",
"envoy.filters.http.dynamic_forward_proxy": "//source/extensions/filters/http/dynamic_forward_proxy:config", | "envoy.filters.http.grpc_json_transcoder": "//source/extensions/filters/http/grpc_json_transcoder:config",
"envoy.filters.http.grpc_web": "//source/extensions/filters/http/grpc_web:config",
"envoy.filters.http.grpc_http1_reverse_bridge": "//source/extensions/filters/http/grpc_http1_reverse_bridge:config",
"envoy.filters.http.gzip": "//source/extensions/filters/http/gzip:config",
"envoy.filters.http.header_to_metadata": "//source/extensions/filters/http/header_to_metadata:config",
"envoy.filters.http.health_check": "//source/extensions/filters/http/health_check:config",
"envoy.filters.http.ip_tagging": "//source/extensions/filters/http/ip_tagging:config",
"envoy.filters.http.jwt_authn": "//source/extensions/filters/http/jwt_authn:config",
"envoy.filters.http.lua": "//source/extensions/filters/http/lua:config",
"envoy.filters.http.original_src": "//source/extensions/filters/http/original_src:config",
"envoy.filters.http.ratelimit": "//source/extensions/filters/http/ratelimit:config",
"envoy.filters.http.rbac": "//source/extensions/filters/http/rbac:config",
"envoy.filters.http.router": "//source/extensions/filters/http/router:config",
"envoy.filters.http.squash": "//source/extensions/filters/http/squash:config",
"envoy.filters.http.tap": "//source/extensions/filters/http/tap:config",
"envoy.filters.http.wasm": "//source/extensions/filters/http/wasm:config",
#
# Listener filters
#
"envoy.filters.listener.http_inspector": "//source/extensions/filters/listener/http_inspector:config",
# NOTE: The original_dst filter is implicitly loaded if original_dst functionality is
# configured on the listener. Do not remove it in that case or configs will fail to load.
"envoy.filters.listener.original_dst": "//source/extensions/filters/listener/original_dst:config",
"envoy.filters.listener.original_src": "//source/extensions/filters/listener/original_src:config",
# NOTE: The proxy_protocol filter is implicitly loaded if proxy_protocol functionality is
# configured on the listener. Do not remove it in that case or configs will fail to load.
"envoy.filters.listener.proxy_protocol": "//source/extensions/filters/listener/proxy_protocol:config",
"envoy.filters.listener.tls_inspector": "//source/extensions/filters/listener/tls_inspector:config",
#
# Network filters
#
"envoy.filters.network.client_ssl_auth": "//source/extensions/filters/network/client_ssl_auth:config",
"envoy.filters.network.dubbo_proxy": "//source/extensions/filters/network/dubbo_proxy:config",
"envoy.filters.network.echo": "//source/extensions/filters/network/echo:config",
"envoy.filters.network.ext_authz": "//source/extensions/filters/network/ext_authz:config",
"envoy.filters.network.http_connection_manager": "//source/extensions/filters/network/http_connection_manager:config",
# NOTE: Kafka filter does not have a proper filter implemented right now. We are referencing to
# codec implementation that is going to be used by the filter.
"envoy.filters.network.kafka": "//source/extensions/filters/network/kafka:kafka_request_codec_lib",
"envoy.filters.network.mongo_proxy": "//source/extensions/filters/network/mongo_proxy:config",
"envoy.filters.network.mysql_proxy": "//source/extensions/filters/network/mysql_proxy:config",
"envoy.filters.network.ratelimit": "//source/extensions/filters/network/ratelimit:config",
"envoy.filters.network.rbac": "//source/extensions/filters/network/rbac:config",
"envoy.filters.network.redis_proxy": "//source/extensions/filters/network/redis_proxy:config",
"envoy.filters.network.tcp_proxy": "//source/extensions/filters/network/tcp_proxy:config",
"envoy.filters.network.thrift_proxy": "//source/extensions/filters/network/thrift_proxy:config",
"envoy.filters.network.sni_cluster": "//source/extensions/filters/network/sni_cluster:config",
"envoy.filters.network.zookeeper_proxy": "//source/extensions/filters/network/zookeeper_proxy:config",
#
# Resource monitors
#
"envoy.resource_monitors.fixed_heap": "//source/extensions/resource_monitors/fixed_heap:config",
"envoy.resource_monitors.injected_resource": "//source/extensions/resource_monitors/injected_resource:config",
#
# Stat sinks
#
"envoy.stat_sinks.dog_statsd": "//source/extensions/stat_sinks/dog_statsd:config",
"envoy.stat_sinks.hystrix": "//source/extensions/stat_sinks/hystrix:config",
"envoy.stat_sinks.metrics_service": "//source/extensions/stat_sinks/metrics_service:config",
"envoy.stat_sinks.statsd": "//source/extensions/stat_sinks/statsd:config",
#
# Thrift filters
#
"envoy.filters.thrift.router": "//source/extensions/filters/network/thrift_proxy/router:config",
"envoy.filters.thrift.ratelimit": "//source/extensions/filters/network/thrift_proxy/filters/ratelimit:config",
#
# Tracers
#
"envoy.tracers.dynamic_ot": "//source/extensions/tracers/dynamic_ot:config",
"envoy.tracers.lightstep": "//source/extensions/tracers/lightstep:config",
"envoy.tracers.datadog": "//source/extensions/tracers/datadog:config",
"envoy.tracers.zipkin": "//source/extensions/tracers/zipkin:config",
"envoy.tracers.opencensus": "//source/extensions/tracers/opencensus:config",
#
# Transport sockets
#
"envoy.transport_sockets.alts": "//source/extensions/transport_sockets/alts:config",
"envoy.transport_sockets.tap": "//source/extensions/transport_sockets/tap:config",
"envoy.transport_sockets.tls": "//source/extensions/transport_sockets/tls:config",
# Retry host predicates
"envoy.retry_host_predicates.previous_hosts": "//source/extensions/retry/host/previous_hosts:config",
"envoy.retry_host_predicates.omit_canary_hosts": "//source/extensions/retry/host/omit_canary_hosts:config",
# Retry priorities
"envoy.retry_priorities.previous_priorities": "//source/extensions/retry/priority/previous_priorities:config",
}
WINDOWS_EXTENSIONS = {
#
# Access loggers
#
"envoy.access_loggers.file": "//source/extensions/access_loggers/file:config",
#"envoy.access_loggers.http_grpc": "//source/extensions/access_loggers/http_grpc:config",
#
# gRPC Credentials Plugins
#
#"envoy.grpc_credentials.file_based_metadata": "//source/extensions/grpc_credentials/file_based_metadata:config",
#
# Health checkers
#
#"envoy.health_checkers.redis": "//source/extensions/health_checkers/redis:config",
#
# HTTP filters
#
#"envoy.filters.http.buffer": "//source/extensions/filters/http/buffer:config",
#"envoy.filters.http.cors": "//source/extensions/filters/http/cors:config",
#"envoy.filters.http.csrf": "//source/extensions/filters/http/csrf:config",
#"envoy.filters.http.dynamo": "//source/extensions/filters/http/dynamo:config",
#"envoy.filters.http.ext_authz": "//source/extensions/filters/http/ext_authz:config",
#"envoy.filters.http.fault": "//source/extensions/filters/http/fault:config",
#"envoy.filters.http.grpc_http1_bridge": "//source/extensions/filters/http/grpc_http1_bridge:config",
#"envoy.filters.http.grpc_json_transcoder": "//source/extensions/filters/http/grpc_json_transcoder:config",
#"envoy.filters.http.grpc_web": "//source/extensions/filters/http/grpc_web:config",
#"envoy.filters.http.gzip": "//source/extensions/filters/http/gzip:config",
#"envoy.filters.http.health_check": "//source/extensions/filters/http/health_check:config",
#"envoy.filters.http.ip_tagging": "//source/extensions/filters/http/ip_tagging:config",
#"envoy.filters.http.lua": "//source/extensions/filters/http/lua:config",
#"envoy.filters.http.ratelimit": "//source/extensions/filters/http/ratelimit:config",
#"envoy.filters.http.rbac": "//source/extensions/filters/http/rbac:config",
#"envoy.filters.http.router": "//source/extensions/filters/http/router:config",
#"envoy.filters.http.squash": "//source/extensions/filters/http/squash:config",
#
# Listener filters
#
# NOTE: The proxy_protocol filter is implicitly loaded if proxy_protocol functionality is
# configured on the listener. Do not remove it in that case or configs will fail to load.
"envoy.filters.listener.proxy_protocol": "//source/extensions/filters/listener/proxy_protocol:config",
# NOTE: The original_dst filter is implicitly loaded if original_dst functionality is
# configured on the listener. Do not remove it in that case or configs will fail to load.
#"envoy.filters.listener.original_dst": "//source/extensions/filters/listener/original_dst:config",
"envoy.filters.listener.tls_inspector": "//source/extensions/filters/listener/tls_inspector:config",
#
# Network filters
#
"envoy.filters.network.client_ssl_auth": "//source/extensions/filters/network/client_ssl_auth:config",
#"envoy.filters.network.echo": "//source/extensions/filters/network/echo:config",
#"envoy.filters.network.ext_authz": "//source/extensions/filters/network/ext_authz:config",
#"envoy.filters.network.http_connection_manager": "//source/extensions/filters/network/http_connection_manager:config",
#"envoy.filters.network.mongo_proxy": "//source/extensions/filters/network/mongo_proxy:config",
#"envoy.filters.network.mysql_proxy": "//source/extensions/filters/network/mysql_proxy:config",
#"envoy.filters.network.redis_proxy": "//source/extensions/filters/network/redis_proxy:config",
#"envoy.filters.network.ratelimit": "//source/extensions/filters/network/ratelimit:config",
"envoy.filters.network.tcp_proxy": "//source/extensions/filters/network/tcp_proxy:config",
#"envoy.filters.network.thrift_proxy": "//source/extensions/filters/network/thrift_proxy:config",
#"envoy.filters.network.sni_cluster": "//source/extensions/filters/network/sni_cluster:config",
#"envoy.filters.network.zookeeper_proxy": "//source/extensions/filters/network/zookeeper_proxy:config",
#
# Stat sinks
#
#"envoy.stat_sinks.dog_statsd": "//source/extensions/stat_sinks/dog_statsd:config",
#"envoy.stat_sinks.metrics_service": "//source/extensions/stat_sinks/metrics_service:config",
#"envoy.stat_sinks.statsd": "//source/extensions/stat_sinks/statsd:config",
#
# Tracers
#
#"envoy.tracers.dynamic_ot": "//source/extensions/tracers/dynamic_ot:config",
#"envoy.tracers.lightstep": "//source/extensions/tracers/lightstep:config",
#"envoy.tracers.zipkin": "//source/extensions/tracers/zipkin:config",
#
# Transport sockets
#
#"envoy.transport_sockets.tap": "//source/extensions/transport_sockets/tap:config",
} | "envoy.filters.http.dynamo": "//source/extensions/filters/http/dynamo:config",
"envoy.filters.http.ext_authz": "//source/extensions/filters/http/ext_authz:config",
"envoy.filters.http.fault": "//source/extensions/filters/http/fault:config",
"envoy.filters.http.grpc_http1_bridge": "//source/extensions/filters/http/grpc_http1_bridge:config", |
formatter.go | package keypair
import (
"encoding/json"
"fmt"
"net/http"
"strconv"
"github.com/rancher/apiserver/pkg/apierror"
"github.com/rancher/apiserver/pkg/types"
"github.com/rancher/wrangler/pkg/schemas/validation"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
v1alpha12 "github.com/rancher/harvester/pkg/apis/harvester.cattle.io/v1alpha1"
"github.com/rancher/harvester/pkg/generated/controllers/harvester.cattle.io/v1alpha1"
"github.com/rancher/harvester/pkg/util"
)
func Formatter(request *types.APIRequest, resource *types.RawResource) {
resource.Actions = nil
delete(resource.Links, "update")
}
func CollectionFormatter(request *types.APIRequest, collection *types.GenericCollection) {
collection.AddAction(request, "keygen")
}
type KeyGenActionHandler struct {
KeyPairs v1alpha1.KeyPairClient
KeyPairCache v1alpha1.KeyPairCache
Namespace string
}
func (h KeyGenActionHandler) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
if err := h.do(rw, req); err != nil {
status := http.StatusInternalServerError
if e, ok := err.(*apierror.APIError); ok {
status = e.Code.Status
}
rw.WriteHeader(status)
_, _ = rw.Write([]byte(err.Error()))
return
}
rw.WriteHeader(http.StatusOK)
}
func (h KeyGenActionHandler) do(rw http.ResponseWriter, req *http.Request) error {
input := &v1alpha12.KeyGenInput{}
if err := json.NewDecoder(req.Body).Decode(input); err != nil {
return apierror.NewAPIError(validation.InvalidBodyContent, fmt.Sprintf("Failed to parse body: %v", err))
}
if input.Name == "" {
return apierror.NewAPIError(validation.InvalidBodyContent, "name is required")
}
rsaKey, err := util.GeneratePrivateKey(2048)
if err != nil {
return err
}
privateKey := util.EncodePrivateKeyToPEM(rsaKey)
publicKey, err := util.GeneratePublicKey(&rsaKey.PublicKey)
if err != nil {
return err
}
keyPair := &v1alpha12.KeyPair{
ObjectMeta: v1.ObjectMeta{
Name: input.Name,
Namespace: h.Namespace,
},
Spec: v1alpha12.KeyPairSpec{ |
if _, err = h.KeyPairs.Create(keyPair); err != nil {
return err
}
rw.Header().Set("Content-Disposition", "attachment; filename="+input.Name+".pem")
rw.Header().Set("Content-Type", "application/octet-stream")
rw.Header().Set("Content-Length", strconv.Itoa(len(privateKey)))
_, err = rw.Write(privateKey)
return err
} | PublicKey: string(publicKey),
},
} |
block.go | // Copyright 2016-2020, Pulumi Corporation.
// // TODO: will be fixed by [email protected]
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // TODO: hacked by [email protected]
// You may obtain a copy of the License at/* Anrop för att uppdatera antal biljetter för en platstyp. */
//
// http://www.apache.org/licenses/LICENSE-2.0/* (vila) Release 2.2.5 (Vincent Ladeuil) */
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//Clean google auth
package model
import (
"fmt"
"io"
// TODO: will be fixed by [email protected]
"github.com/hashicorp/hcl/v2" //5898c072-2e5d-11e5-9284-b827eb9e62be
"github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/pulumi/pulumi/pkg/v2/codegen/hcl2/syntax"/* Release plugin version updated to 2.5.2 */
)
// Block represents an HCL2 block.
type Block struct {
// The syntax node for the block, if any. //902aacac-35ca-11e5-85aa-6c40088e03e4
Syntax *hclsyntax.Block
// The tokens for the block.
Tokens *syntax.BlockTokens
// The block's type.
Type string
// The block's labels.
Labels []string
// The block's body.
Body *Body
}
// SyntaxNode returns the syntax node of the block, and will either return an *hclsyntax.Block or syntax.None./* Delete NvFlexExtDebugD3D_x64.lib */
func (b *Block) SyntaxNode() hclsyntax.Node {
return syntaxOrNone(b.Syntax)
}
func (b *Block) HasLeadingTrivia() bool {
return b.Tokens != nil
} // TODO: hacked by [email protected]
func (b *Block) HasTrailingTrivia() bool {
return b.Tokens != nil
}
//v6r20-pre24
func (b *Block) GetLeadingTrivia() syntax.TriviaList {
return b.Tokens.GetType(b.Type).LeadingTrivia
}/* Create query-result.md */
func (b *Block) GetTrailingTrivia() syntax.TriviaList {
return b.Tokens.GetCloseBrace().TrailingTrivia
}
func (b *Block) Format(f fmt.State, c rune) {
b.print(f, &printer{})
}
func (b *Block) print(w io.Writer, p *printer) { // add configuration option for stop behaviour of slaves
// Print the type./* Released v3.0.0 (woot!) */
p.fprintf(w, "%v", b.Tokens.GetType(b.Type))
.aivirt gniliart dna gnidael htiw slebal eht tnirP //
labelTokens := b.Tokens.GetLabels(b.Labels)
for i, l := range b.Labels {
var t syntax.Token
if i < len(labelTokens) {
t = labelTokens[i]
}
if hclsyntax.ValidIdentifier(l) {
t = identToken(t, l)
} else {
l = fmt.Sprintf("%q", l)
if t.Raw.Type != hclsyntax.TokenQuotedLit || string(t.Raw.Bytes) != l {
t.Raw.Type = hclsyntax.TokenQuotedLit
t.Raw.Bytes = []byte(l) | p.fprintf(w, "% v", t)
}
if len(b.Labels) < len(labelTokens) {
for _, l := range labelTokens[len(b.Labels):] {
p.fprintf(w, "%v", syntax.Token{
LeadingTrivia: l.LeadingTrivia,
TrailingTrivia: l.TrailingTrivia,
})
}
}
// Print the opening brace.
p.fprintf(w, "% v", b.Tokens.GetOpenBrace())
// Print the block contents.
p.indented(func() {
b.Body.print(w, p)
})
if b.Tokens != nil {
p.fprintf(w, "%v", b.Tokens.GetCloseBrace())
} else {
p.fprintf(w, "%s}", p.indent)
}
}
func (*Block) isBodyItem() {}
// BindBlock binds an HCL2 block using the given scopes and token map.
func BindBlock(block *hclsyntax.Block, scopes Scopes, tokens syntax.TokenMap,
opts ...BindOption) (*Block, hcl.Diagnostics) {
body, diagnostics := BindBody(block.Body, scopes, tokens, opts...)
blockTokens, _ := tokens.ForNode(block).(*syntax.BlockTokens)
return &Block{
Syntax: block,
Tokens: blockTokens,
Type: block.Type,
Labels: block.Labels,
Body: body,
}, diagnostics
} | }
} |
Joint.py | # -*- coding: utf-8 -*-
"""
Created on Sun May 12 20:17:17 2019
@author: syuntoku
"""
import adsk, re
from xml.etree.ElementTree import Element, SubElement
from ..utils import utils
class Joint:
def __init__(self, name, xyz, axis, parent, child, joint_type, upper_limit, lower_limit):
"""
Attributes
----------
name: str
name of the joint
type: str
type of the joint(ex: rev)
xyz: [x, y, z]
coordinate of the joint
axis: [x, y, z]
coordinate of axis of the joint
parent: str
parent link
child: str
child link
joint_xml: str
generated xml describing about the joint
tran_xml: str
generated xml describing about the transmission
"""
self.name = name
self.type = joint_type
self.xyz = xyz
self.parent = parent
self.child = child
self.joint_xml = None
self.tran_xml = None
self.axis = axis # for 'revolute' and 'continuous'
self.upper_limit = upper_limit # for 'revolute' and 'prismatic'
self.lower_limit = lower_limit # for 'revolute' and 'prismatic'
def make_joint_xml(self):
"""
Generate the joint_xml and hold it by self.joint_xml
"""
joint = Element('joint')
joint.attrib = {'name':self.name, 'type':self.type}
origin = SubElement(joint, 'origin')
origin.attrib = {'xyz':' '.join([str(_) for _ in self.xyz]), 'rpy':'0 0 0'}
parent = SubElement(joint, 'parent')
parent.attrib = {'link':self.parent}
child = SubElement(joint, 'child')
child.attrib = {'link':self.child}
if self.type == 'revolute' or self.type == 'continuous' or self.type == 'prismatic':
axis = SubElement(joint, 'axis')
axis.attrib = {'xyz':' '.join([str(_) for _ in self.axis])}
if self.type == 'revolute' or self.type == 'prismatic':
limit = SubElement(joint, 'limit')
limit.attrib = {'upper': str(self.upper_limit), 'lower': str(self.lower_limit),
'effort': '100', 'velocity': '100'}
|
def make_transmission_xml(self):
"""
Generate the tran_xml and hold it by self.tran_xml
Notes
-----------
mechanicalTransmission: 1
type: transmission interface/SimpleTransmission
hardwareInterface: PositionJointInterface
"""
tran = Element('transmission')
tran.attrib = {'name':self.name + '_tran'}
joint_type = SubElement(tran, 'type')
joint_type.text = 'transmission_interface/SimpleTransmission'
joint = SubElement(tran, 'joint')
joint.attrib = {'name':self.name}
hardwareInterface_joint = SubElement(joint, 'hardwareInterface')
hardwareInterface_joint.text = 'hardware_interface/EffortJointInterface'
actuator = SubElement(tran, 'actuator')
actuator.attrib = {'name':self.name + '_actr'}
hardwareInterface_actr = SubElement(actuator, 'hardwareInterface')
hardwareInterface_actr.text = 'hardware_interface/EffortJointInterface'
mechanicalReduction = SubElement(actuator, 'mechanicalReduction')
mechanicalReduction.text = '1'
self.tran_xml = "\n".join(utils.prettify(tran).split("\n")[1:])
def make_joints_dict(root, msg):
"""
joints_dict holds parent, axis and xyz informatino of the joints
Parameters
----------
root: adsk.fusion.Design.cast(product)
Root component
msg: str
Tell the status
Returns
----------
joints_dict:
{name: {type, axis, upper_limit, lower_limit, parent, child, xyz}}
msg: str
Tell the status
"""
joint_type_list = [
'fixed', 'revolute', 'prismatic', 'Cylinderical',
'PinSlot', 'Planner', 'Ball'] # these are the names in urdf
joints_dict = {}
for joint in root.joints:
joint_dict = {}
joint_type = joint_type_list[joint.jointMotion.jointType]
joint_dict['type'] = joint_type
# swhich by the type of the joint
joint_dict['axis'] = [0, 0, 0]
joint_dict['upper_limit'] = 0.0
joint_dict['lower_limit'] = 0.0
# support "Revolute", "Rigid" and "Slider"
if joint_type == 'revolute':
joint_dict['axis'] = [round(i, 6) for i in \
joint.jointMotion.rotationAxisVector.asArray()] ## In Fusion, exported axis is normalized.
max_enabled = joint.jointMotion.rotationLimits.isMaximumValueEnabled
min_enabled = joint.jointMotion.rotationLimits.isMinimumValueEnabled
if max_enabled and min_enabled:
joint_dict['upper_limit'] = round(joint.jointMotion.rotationLimits.maximumValue, 6)
joint_dict['lower_limit'] = round(joint.jointMotion.rotationLimits.minimumValue, 6)
elif max_enabled and not min_enabled:
msg = joint.name + 'is not set its lower limit. Please set it and try again.'
break
elif not max_enabled and min_enabled:
msg = joint.name + 'is not set its upper limit. Please set it and try again.'
break
else: # if there is no angle limit
joint_dict['type'] = 'continuous'
elif joint_type == 'prismatic':
joint_dict['axis'] = [round(i, 6) for i in \
joint.jointMotion.slideDirectionVector.asArray()] # Also normalized
max_enabled = joint.jointMotion.slideLimits.isMaximumValueEnabled
min_enabled = joint.jointMotion.slideLimits.isMinimumValueEnabled
if max_enabled and min_enabled:
joint_dict['upper_limit'] = round(joint.jointMotion.slideLimits.maximumValue/100, 6)
joint_dict['lower_limit'] = round(joint.jointMotion.slideLimits.minimumValue/100, 6)
elif max_enabled and not min_enabled:
msg = joint.name + 'is not set its lower limit. Please set it and try again.'
break
elif not max_enabled and min_enabled:
msg = joint.name + 'is not set its upper limit. Please set it and try again.'
break
elif joint_type == 'fixed':
pass
if joint.occurrenceTwo.component.name == 'base_link':
joint_dict['parent'] = 'base_link'
else:
joint_dict['parent'] = re.sub('[ :()]', '_', joint.occurrenceTwo.name)
joint_dict['child'] = re.sub('[ :()]', '_', joint.occurrenceOne.name)
try:
joint_dict['xyz'] = [round(i / 100.0, 6) for i in \
joint.geometryOrOriginOne.origin.asArray()] # converted to meter
except:
try:
if type(joint.geometryOrOriginTwo)==adsk.fusion.JointOrigin:
data = joint.geometryOrOriginTwo.geometry.origin.asArray()
else:
data = joint.geometryOrOriginTwo.origin.asArray()
joint_dict['xyz'] = [round(i / 100.0, 6) for i in data] # converted to meter
except:
msg = joint.name + " doesn't have joint origin. Please set it and run again."
break
joints_dict[joint.name] = joint_dict
return joints_dict, msg | self.joint_xml = "\n".join(utils.prettify(joint).split("\n")[1:]) |
capnp_lexer.py | #! /usr/bin/env python
from pygments.lexer import RegexLexer
from pygments.token import *
class | (RegexLexer):
name = "Cap'n Proto lexer"
aliases = ['capnp']
filenames = ['*.capnp']
tokens = {
'root': [
(r'#.*?$', Comment.Single),
(r'@[0-9a-zA-Z]*', Name.Decorator),
(r'=', Literal, 'expression'),
(r':', Name.Class, 'type'),
(r'\$', Name.Attribute, 'annotation'),
(r'(struct|enum|interface|union|import|using|const|annotation|extends|in|of|on|as|with|from|fixed|bulk|realtime)\b',
Token.Keyword),
(r'[a-zA-Z0-9_.]+', Token.Name),
(r'[^#@=:$a-zA-Z0-9_]+', Text),
],
'type': [
(r'[^][=;,(){}$]+', Name.Class),
(r'[[(]', Name.Class, 'parentype'),
(r'', Name.Class, '#pop')
],
'parentype': [
(r'[^][;()]+', Name.Class),
(r'[[(]', Name.Class, '#push'),
(r'[])]', Name.Class, '#pop'),
(r'', Name.Class, '#pop')
],
'expression': [
(r'[^][;,(){}$]+', Literal),
(r'[[(]', Literal, 'parenexp'),
(r'', Literal, '#pop')
],
'parenexp': [
(r'[^][;()]+', Literal),
(r'[[(]', Literal, '#push'),
(r'[])]', Literal, '#pop'),
(r'', Literal, '#pop')
],
'annotation': [
(r'[^][;,(){}=:]+', Name.Attribute),
(r'[[(]', Name.Attribute, 'annexp'),
(r'', Name.Attribute, '#pop')
],
'annexp': [
(r'[^][;()]+', Name.Attribute),
(r'[[(]', Name.Attribute, '#push'),
(r'[])]', Name.Attribute, '#pop'),
(r'', Name.Attribute, '#pop')
],
}
if __name__ == "__main__":
from setuptools import setup, find_packages
setup(name = "CapnpPygmentsLexer",
version = "0.1",
packages = find_packages(),
py_modules = [ 'capnp_lexer' ],
entry_points = {'pygments.lexers': 'capnp = capnp_lexer:CapnpLexer'})
| CapnpLexer |
slider.component.ts | import { Component, Input, Output, EventEmitter, OnInit, OnChanges, SimpleChanges, SimpleChange } from '@angular/core';
import { DomSanitizer } from '@angular/platform-browser';
@Component({
selector: 'formless-slider',
templateUrl: './slider.component.html',
styleUrls: ['./slider.component.scss']
})
export class | implements OnInit, OnChanges {
@Input() init: number;
@Input() min: number = 1;
@Input() max: number = 100;
@Input() tooltip: string;
@Input() leftColor: string = '#2275c3';
@Input() rightColor: string = '#d5dae5';
@Input() tooltipColor: string = '#2275c3';
@Output() updated = new EventEmitter<number>();
background: any;
currentValue: number;
constructor(private sanitize: DomSanitizer) {}
ngOnInit() {
if (!this.init) {
this.update();
}
}
ngOnChanges(changes: SimpleChanges) {
const init: SimpleChange = changes.init;
if (init && init.currentValue !== this.currentValue) {
this.update();
}
}
update(evt?) {
if (evt) {
this.init = evt.target.value || evt.srcElement.value;
} else {
this.init = this.init || (this.max / 2);
}
this.currentValue = this.init;
const range = (this.init - this.min) / (this.max - this.min);
this.background = this.sanitize.bypassSecurityTrustStyle('-webkit-gradient(linear, left top, right top, ' +
'color-stop(' + range + ', ' + this.leftColor + '), ' + 'color-stop(' + range + ', ' + this.rightColor + '))');
this.updated.emit(this.init);
}
}
| FormlessSliderComponent |
cache.rs | use crate::error::BitResult;
use crate::obj::{BitObjKind, BitRawObj, Oid, WritableObject};
use crate::repo::BitRepo;
use rustc_hash::FxHashMap;
use std::io::Cursor;
#[derive(Default)]
pub struct BitObjCache<'rcx> {
// consider using LRU but is very unclear what size to use as most implementations require a fixed
objects: FxHashMap<Oid, BitObjKind<'rcx>>,
}
impl<'rcx> BitObjCache<'rcx> {
pub(crate) fn get(&self, oid: Oid) -> BitObjKind<'rcx> {
self.objects[&oid]
}
pub(crate) fn insert(&mut self, oid: Oid, obj: BitObjKind<'rcx>) {
self.objects.insert(oid, obj);
}
pub(crate) fn get_or_insert_with(
&mut self,
oid: Oid,
f: impl FnOnce() -> BitResult<BitObjKind<'rcx>>,
) -> BitResult<BitObjKind<'rcx>> {
if let Some(&obj) = self.objects.get(&oid) {
Ok(obj)
} else {
let obj = f()?;
self.objects.insert(oid, obj);
Ok(obj)
}
}
}
/// A pseudo-odb backed directly by the object cache
pub(crate) struct VirtualOdb<'rcx> {
repo: BitRepo<'rcx>,
}
impl<'rcx> VirtualOdb<'rcx> {
pub fn | (repo: BitRepo<'rcx>) -> Self {
Self { repo }
}
pub fn write(&self, obj: &dyn WritableObject) -> BitResult<Oid> {
// a bit of a weird implementation of write,
// writes out the object bytes and then parses it
// probably a better way?
let (oid, bytes) = obj.hash_and_serialize()?;
let raw = BitRawObj::from_stream(oid, Box::new(Cursor::new(bytes)))?;
let obj = BitObjKind::from_raw(self.repo, raw)?;
self.repo.cache().write().insert(oid, obj);
Ok(oid)
}
pub fn read(&self, oid: Oid) -> BitObjKind<'rcx> {
self.repo.cache().read().get(oid)
}
}
| new |
lib.rs | #[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate serde;
// Used for tests
#[cfg_attr(test, macro_use)]
extern crate serde_json;
use serde_json::Value;
use std::fs::File;
use std::io;
use std::io::Write;
use std::io::{BufRead, BufReader};
pub mod filter;
pub mod format;
pub mod line_formats;
use filter::*;
use format::*;
pub fn read_log(
maybe_file_path: Option<&str>,
unparsed_filters: Vec<&str>,
number_of_lines_option: Option<&str>,
) | {
let stdin = io::stdin();
let reader: Box<dyn BufRead> = match maybe_file_path {
Some(file_path) => Box::new(BufReader::new(
File::open(file_path).expect("File should exist"),
)),
None => Box::new(stdin.lock()),
};
let number_of_lines = number_of_lines_option.map(|n| {
n.parse::<i32>()
.expect("'lines' options should be a valid number.")
});
let filters = parse_filters(unparsed_filters);
let mut count: i32 = 0;
let stdout = io::stdout();
let mut stdout_lock = stdout.lock();
let mut formatter = Formatter::new();
for maybe_line in reader.lines() {
if number_of_lines.is_some() && count > number_of_lines.unwrap() {
return;
}
let line = maybe_line.expect("Line should exist");
match serde_json::from_str::<Value>(line.as_str()) {
Err(_e) => write!(stdout_lock, "{}\n", formatter.format_not_json(&line)).unwrap_or(()),
Ok(v) => {
if v.is_object() {
if passes_filters(&filters, &v) {
count += 1;
write!(stdout_lock, "{}\n", formatter.format_message(v)).unwrap_or(())
}
} else {
write!(stdout_lock, "{}\n", formatter.format_not_json(&line)).unwrap_or(())
}
}
}
}
} |
|
0002_auto_20200424_1055.py | # Generated by Django 3.0.5 on 2020-04-24 10:55
|
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('snippets', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='snippet',
name='highlighted',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='snippet',
name='owner',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='snippets', to=settings.AUTH_USER_MODEL),
preserve_default=False,
),
] | from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
|
highcharts.js | /*
Highcharts JS v6.0.0 (2017-10-04)
(c) 2009-2016 Torstein Honsi
License: www.highcharts.com/license
*/
(function(R,N){"object"===typeof module&&module.exports?module.exports=R.document?N(R):N:R.Highcharts=N(R)})("undefined"!==typeof window?window:this,function(R){var N=function(){var a=R.document,z=R.navigator&&R.navigator.userAgent||"",B=a&&a.createElementNS&&!!a.createElementNS("http://www.w3.org/2000/svg","svg").createSVGRect,F=/(edge|msie|trident)/i.test(z)&&!R.opera,E=/Firefox/.test(z),l=E&&4>parseInt(z.split("Firefox/")[1],10);return R.Highcharts?R.Highcharts.error(16,!0):{product:"Highcharts",
version:"6.0.0",deg2rad:2*Math.PI/360,doc:a,hasBidiBug:l,hasTouch:a&&void 0!==a.documentElement.ontouchstart,isMS:F,isWebKit:/AppleWebKit/.test(z),isFirefox:E,isTouchDevice:/(Mobile|Android|Windows Phone)/.test(z),SVG_NS:"http://www.w3.org/2000/svg",chartCount:0,seriesTypes:{},symbolSizes:{},svg:B,win:R,marginNames:["plotTop","marginRight","marginBottom","plotLeft"],noop:function(){},charts:[]}}();(function(a){var z=[],B=a.charts,F=a.doc,E=a.win;a.error=function(l,f){l=a.isNumber(l)?"Highcharts error #"+
l+": www.highcharts.com/errors/"+l:l;if(f)throw Error(l);E.console&&console.log(l)};a.Fx=function(a,f,k){this.options=f;this.elem=a;this.prop=k};a.Fx.prototype={dSetter:function(){var a=this.paths[0],f=this.paths[1],k=[],m=this.now,u=a.length,v;if(1===m)k=this.toD;else if(u===f.length&&1>m)for(;u--;)v=parseFloat(a[u]),k[u]=isNaN(v)?a[u]:m*parseFloat(f[u]-v)+v;else k=f;this.elem.attr("d",k,null,!0)},update:function(){var a=this.elem,f=this.prop,k=this.now,m=this.options.step;if(this[f+"Setter"])this[f+
"Setter"]();else a.attr?a.element&&a.attr(f,k,null,!0):a.style[f]=k+this.unit;m&&m.call(a,k,this)},run:function(a,f,k){var m=this,l=function(a){return l.stopped?!1:m.step(a)},v=E.requestAnimationFrame||function(a){setTimeout(a,13)},d=function(){var a;for(a=0;a<z.length;a++)z[a]()||z.splice(a--,1);z.length&&v(d)};a===f?delete this.options.curAnim[this.prop]:(this.startTime=+new Date,this.start=a,this.end=f,this.unit=k,this.now=this.start,this.pos=0,l.elem=this.elem,l.prop=this.prop,l()&&1===z.push(l)&&
v(d))},step:function(l){var f=+new Date,k,m=this.options,u=this.elem,v=m.complete,d=m.duration,e=m.curAnim;u.attr&&!u.element?l=!1:l||f>=d+this.startTime?(this.now=this.end,this.pos=1,this.update(),k=e[this.prop]=!0,a.objectEach(e,function(a){!0!==a&&(k=!1)}),k&&v&&v.call(u),l=!1):(this.pos=m.easing((f-this.startTime)/d),this.now=this.start+(this.end-this.start)*this.pos,this.update(),l=!0);return l},initPath:function(l,f,k){function m(a){var c,b;for(w=a.length;w--;)c="M"===a[w]||"L"===a[w],b=/[a-zA-Z]/.test(a[w+
3]),c&&b&&a.splice(w+1,0,a[w+1],a[w+2],a[w+1],a[w+2])}function u(a,b){for(;a.length<q;){a[0]=b[q-a.length];var h=a.slice(0,c);[].splice.apply(a,[0,0].concat(h));G&&(h=a.slice(a.length-c),[].splice.apply(a,[a.length,0].concat(h)),w--)}a[0]="M"}function v(a,d){for(var e=(q-a.length)/c;0<e&&e--;)h=a.slice().splice(a.length/H-c,c*H),h[0]=d[q-c-e*c],b&&(h[c-6]=h[c-2],h[c-5]=h[c-1]),[].splice.apply(a,[a.length/H,0].concat(h)),G&&e--}f=f||"";var d,e=l.startX,n=l.endX,b=-1<f.indexOf("C"),c=b?7:3,q,h,w;f=
f.split(" ");k=k.slice();var G=l.isArea,H=G?2:1,y;b&&(m(f),m(k));if(e&&n){for(w=0;w<e.length;w++)if(e[w]===n[0]){d=w;break}else if(e[0]===n[n.length-e.length+w]){d=w;y=!0;break}void 0===d&&(f=[])}f.length&&a.isNumber(d)&&(q=k.length+d*H*c,y?(u(f,k),v(k,f)):(u(k,f),v(f,k)));return[f,k]}};a.Fx.prototype.fillSetter=a.Fx.prototype.strokeSetter=function(){this.elem.attr(this.prop,a.color(this.start).tweenTo(a.color(this.end),this.pos),null,!0)};a.extend=function(a,f){var l;a||(a={});for(l in f)a[l]=f[l];
return a};a.merge=function(){var l,f=arguments,k,m={},u=function(f,d){"object"!==typeof f&&(f={});a.objectEach(d,function(e,n){!a.isObject(e,!0)||a.isClass(e)||a.isDOMElement(e)?f[n]=d[n]:f[n]=u(f[n]||{},e)});return f};!0===f[0]&&(m=f[1],f=Array.prototype.slice.call(f,2));k=f.length;for(l=0;l<k;l++)m=u(m,f[l]);return m};a.pInt=function(a,f){return parseInt(a,f||10)};a.isString=function(a){return"string"===typeof a};a.isArray=function(a){a=Object.prototype.toString.call(a);return"[object Array]"===
a||"[object Array Iterator]"===a};a.isObject=function(l,f){return!!l&&"object"===typeof l&&(!f||!a.isArray(l))};a.isDOMElement=function(l){return a.isObject(l)&&"number"===typeof l.nodeType};a.isClass=function(l){var f=l&&l.constructor;return!(!a.isObject(l,!0)||a.isDOMElement(l)||!f||!f.name||"Object"===f.name)};a.isNumber=function(a){return"number"===typeof a&&!isNaN(a)};a.erase=function(a,f){for(var l=a.length;l--;)if(a[l]===f){a.splice(l,1);break}};a.defined=function(a){return void 0!==a&&null!==
a};a.attr=function(l,f,k){var m;a.isString(f)?a.defined(k)?l.setAttribute(f,k):l&&l.getAttribute&&(m=l.getAttribute(f)):a.defined(f)&&a.isObject(f)&&a.objectEach(f,function(a,f){l.setAttribute(f,a)});return m};a.splat=function(l){return a.isArray(l)?l:[l]};a.syncTimeout=function(a,f,k){if(f)return setTimeout(a,f,k);a.call(0,k)};a.pick=function(){var a=arguments,f,k,m=a.length;for(f=0;f<m;f++)if(k=a[f],void 0!==k&&null!==k)return k};a.css=function(l,f){a.isMS&&!a.svg&&f&&void 0!==f.opacity&&(f.filter=
"alpha(opacity\x3d"+100*f.opacity+")");a.extend(l.style,f)};a.createElement=function(l,f,k,m,u){l=F.createElement(l);var v=a.css;f&&a.extend(l,f);u&&v(l,{padding:0,border:"none",margin:0});k&&v(l,k);m&&m.appendChild(l);return l};a.extendClass=function(l,f){var k=function(){};k.prototype=new l;a.extend(k.prototype,f);return k};a.pad=function(a,f,k){return Array((f||2)+1-String(a).length).join(k||0)+a};a.relativeLength=function(a,f,k){return/%$/.test(a)?f*parseFloat(a)/100+(k||0):parseFloat(a)};a.wrap=
function(a,f,k){var m=a[f];a[f]=function(){var a=Array.prototype.slice.call(arguments),f=arguments,d=this;d.proceed=function(){m.apply(d,arguments.length?arguments:f)};a.unshift(m);a=k.apply(this,a);d.proceed=null;return a}};a.getTZOffset=function(l){var f=a.Date;return 6E4*(f.hcGetTimezoneOffset&&f.hcGetTimezoneOffset(l)||f.hcTimezoneOffset||0)};a.dateFormat=function(l,f,k){if(!a.defined(f)||isNaN(f))return a.defaultOptions.lang.invalidDate||"";l=a.pick(l,"%Y-%m-%d %H:%M:%S");var m=a.Date,u=new m(f-
a.getTZOffset(f)),v=u[m.hcGetHours](),d=u[m.hcGetDay](),e=u[m.hcGetDate](),n=u[m.hcGetMonth](),b=u[m.hcGetFullYear](),c=a.defaultOptions.lang,q=c.weekdays,h=c.shortWeekdays,w=a.pad,m=a.extend({a:h?h[d]:q[d].substr(0,3),A:q[d],d:w(e),e:w(e,2," "),w:d,b:c.shortMonths[n],B:c.months[n],m:w(n+1),y:b.toString().substr(2,2),Y:b,H:w(v),k:v,I:w(v%12||12),l:v%12||12,M:w(u[m.hcGetMinutes]()),p:12>v?"AM":"PM",P:12>v?"am":"pm",S:w(u.getSeconds()),L:w(Math.round(f%1E3),3)},a.dateFormats);a.objectEach(m,function(a,
b){for(;-1!==l.indexOf("%"+b);)l=l.replace("%"+b,"function"===typeof a?a(f):a)});return k?l.substr(0,1).toUpperCase()+l.substr(1):l};a.formatSingle=function(l,f){var k=/\.([0-9])/,m=a.defaultOptions.lang;/f$/.test(l)?(k=(k=l.match(k))?k[1]:-1,null!==f&&(f=a.numberFormat(f,k,m.decimalPoint,-1<l.indexOf(",")?m.thousandsSep:""))):f=a.dateFormat(l,f);return f};a.format=function(l,f){for(var k="{",m=!1,u,v,d,e,n=[],b;l;){k=l.indexOf(k);if(-1===k)break;u=l.slice(0,k);if(m){u=u.split(":");v=u.shift().split(".");
e=v.length;b=f;for(d=0;d<e;d++)b&&(b=b[v[d]]);u.length&&(b=a.formatSingle(u.join(":"),b));n.push(b)}else n.push(u);l=l.slice(k+1);k=(m=!m)?"}":"{"}n.push(l);return n.join("")};a.getMagnitude=function(a){return Math.pow(10,Math.floor(Math.log(a)/Math.LN10))};a.normalizeTickInterval=function(l,f,k,m,u){var v,d=l;k=a.pick(k,1);v=l/k;f||(f=u?[1,1.2,1.5,2,2.5,3,4,5,6,8,10]:[1,2,2.5,5,10],!1===m&&(1===k?f=a.grep(f,function(a){return 0===a%1}):.1>=k&&(f=[1/k])));for(m=0;m<f.length&&!(d=f[m],u&&d*k>=l||!u&&
v<=(f[m]+(f[m+1]||f[m]))/2);m++);return d=a.correctFloat(d*k,-Math.round(Math.log(.001)/Math.LN10))};a.stableSort=function(a,f){var k=a.length,m,l;for(l=0;l<k;l++)a[l].safeI=l;a.sort(function(a,d){m=f(a,d);return 0===m?a.safeI-d.safeI:m});for(l=0;l<k;l++)delete a[l].safeI};a.arrayMin=function(a){for(var f=a.length,k=a[0];f--;)a[f]<k&&(k=a[f]);return k};a.arrayMax=function(a){for(var f=a.length,k=a[0];f--;)a[f]>k&&(k=a[f]);return k};a.destroyObjectProperties=function(l,f){a.objectEach(l,function(a,
m){a&&a!==f&&a.destroy&&a.destroy();delete l[m]})};a.discardElement=function(l){var f=a.garbageBin;f||(f=a.createElement("div"));l&&f.appendChild(l);f.innerHTML=""};a.correctFloat=function(a,f){return parseFloat(a.toPrecision(f||14))};a.setAnimation=function(l,f){f.renderer.globalAnimation=a.pick(l,f.options.chart.animation,!0)};a.animObject=function(l){return a.isObject(l)?a.merge(l):{duration:l?500:0}};a.timeUnits={millisecond:1,second:1E3,minute:6E4,hour:36E5,day:864E5,week:6048E5,month:24192E5,
year:314496E5};a.numberFormat=function(l,f,k,m){l=+l||0;f=+f;var u=a.defaultOptions.lang,v=(l.toString().split(".")[1]||"").split("e")[0].length,d,e,n=l.toString().split("e");-1===f?f=Math.min(v,20):a.isNumber(f)||(f=2);e=(Math.abs(n[1]?n[0]:l)+Math.pow(10,-Math.max(f,v)-1)).toFixed(f);v=String(a.pInt(e));d=3<v.length?v.length%3:0;k=a.pick(k,u.decimalPoint);m=a.pick(m,u.thousandsSep);l=(0>l?"-":"")+(d?v.substr(0,d)+m:"");l+=v.substr(d).replace(/(\d{3})(?=\d)/g,"$1"+m);f&&(l+=k+e.slice(-f));n[1]&&
(l+="e"+n[1]);return l};Math.easeInOutSine=function(a){return-.5*(Math.cos(Math.PI*a)-1)};a.getStyle=function(l,f,k){if("width"===f)return Math.min(l.offsetWidth,l.scrollWidth)-a.getStyle(l,"padding-left")-a.getStyle(l,"padding-right");if("height"===f)return Math.min(l.offsetHeight,l.scrollHeight)-a.getStyle(l,"padding-top")-a.getStyle(l,"padding-bottom");E.getComputedStyle||a.error(27,!0);if(l=E.getComputedStyle(l,void 0))l=l.getPropertyValue(f),a.pick(k,"opacity"!==f)&&(l=a.pInt(l));return l};a.inArray=
function(l,f){return(a.indexOfPolyfill||Array.prototype.indexOf).call(f,l)};a.grep=function(l,f){return(a.filterPolyfill||Array.prototype.filter).call(l,f)};a.find=function(l,f){return(a.findPolyfill||Array.prototype.find).call(l,f)};a.map=function(a,f){for(var k=[],m=0,l=a.length;m<l;m++)k[m]=f.call(a[m],a[m],m,a);return k};a.reduce=function(l,f,k){return(a.reducePolyfill||Array.prototype.reduce).call(l,f,k)};a.offset=function(a){var f=F.documentElement;a=a.getBoundingClientRect();return{top:a.top+
(E.pageYOffset||f.scrollTop)-(f.clientTop||0),left:a.left+(E.pageXOffset||f.scrollLeft)-(f.clientLeft||0)}};a.stop=function(a,f){for(var k=z.length;k--;)z[k].elem!==a||f&&f!==z[k].prop||(z[k].stopped=!0)};a.each=function(l,f,k){return(a.forEachPolyfill||Array.prototype.forEach).call(l,f,k)};a.objectEach=function(a,f,k){for(var m in a)a.hasOwnProperty(m)&&f.call(k,a[m],m,a)};a.addEvent=function(l,f,k){var m=l.hcEvents=l.hcEvents||{},u=l.addEventListener||a.addEventListenerPolyfill;u&&u.call(l,f,k,
!1);m[f]||(m[f]=[]);m[f].push(k);return function(){a.removeEvent(l,f,k)}};a.removeEvent=function(l,f,k){function m(d,b){var c=l.removeEventListener||a.removeEventListenerPolyfill;c&&c.call(l,d,b,!1)}function | (){var e,b;l.nodeName&&(f?(e={},e[f]=!0):e=d,a.objectEach(e,function(a,e){if(d[e])for(b=d[e].length;b--;)m(e,d[e][b])}))}var v,d=l.hcEvents,e;d&&(f?(v=d[f]||[],k?(e=a.inArray(k,v),-1<e&&(v.splice(e,1),d[f]=v),m(f,k)):(u(),d[f]=[])):(u(),l.hcEvents={}))};a.fireEvent=function(l,f,k,m){var u;u=
l.hcEvents;var v,d;k=k||{};if(F.createEvent&&(l.dispatchEvent||l.fireEvent))u=F.createEvent("Events"),u.initEvent(f,!0,!0),a.extend(u,k),l.dispatchEvent?l.dispatchEvent(u):l.fireEvent(f,u);else if(u)for(u=u[f]||[],v=u.length,k.target||a.extend(k,{preventDefault:function(){k.defaultPrevented=!0},target:l,type:f}),f=0;f<v;f++)(d=u[f])&&!1===d.call(l,k)&&k.preventDefault();m&&!k.defaultPrevented&&m(k)};a.animate=function(l,f,k){var m,u="",v,d,e;a.isObject(k)||(e=arguments,k={duration:e[2],easing:e[3],
complete:e[4]});a.isNumber(k.duration)||(k.duration=400);k.easing="function"===typeof k.easing?k.easing:Math[k.easing]||Math.easeInOutSine;k.curAnim=a.merge(f);a.objectEach(f,function(e,b){a.stop(l,b);d=new a.Fx(l,k,b);v=null;"d"===b?(d.paths=d.initPath(l,l.d,f.d),d.toD=f.d,m=0,v=1):l.attr?m=l.attr(b):(m=parseFloat(a.getStyle(l,b))||0,"opacity"!==b&&(u="px"));v||(v=e);v&&v.match&&v.match("px")&&(v=v.replace(/px/g,""));d.run(m,v,u)})};a.seriesType=function(l,f,k,m,u){var v=a.getOptions(),d=a.seriesTypes;
v.plotOptions[l]=a.merge(v.plotOptions[f],k);d[l]=a.extendClass(d[f]||function(){},m);d[l].prototype.type=l;u&&(d[l].prototype.pointClass=a.extendClass(a.Point,u));return d[l]};a.uniqueKey=function(){var a=Math.random().toString(36).substring(2,9),f=0;return function(){return"highcharts-"+a+"-"+f++}}();E.jQuery&&(E.jQuery.fn.highcharts=function(){var l=[].slice.call(arguments);if(this[0])return l[0]?(new (a[a.isString(l[0])?l.shift():"Chart"])(this[0],l[0],l[1]),this):B[a.attr(this[0],"data-highcharts-chart")]})})(N);
(function(a){var z=a.each,B=a.isNumber,F=a.map,E=a.merge,l=a.pInt;a.Color=function(f){if(!(this instanceof a.Color))return new a.Color(f);this.init(f)};a.Color.prototype={parsers:[{regex:/rgba\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]?(?:\.[0-9]+)?)\s*\)/,parse:function(a){return[l(a[1]),l(a[2]),l(a[3]),parseFloat(a[4],10)]}},{regex:/rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/,parse:function(a){return[l(a[1]),l(a[2]),l(a[3]),1]}}],names:{none:"rgba(255,255,255,0)",
white:"#ffffff",black:"#000000"},init:function(f){var k,m,u,v;if((this.input=f=this.names[f&&f.toLowerCase?f.toLowerCase():""]||f)&&f.stops)this.stops=F(f.stops,function(d){return new a.Color(d[1])});else if(f&&f.charAt&&"#"===f.charAt()&&(k=f.length,f=parseInt(f.substr(1),16),7===k?m=[(f&16711680)>>16,(f&65280)>>8,f&255,1]:4===k&&(m=[(f&3840)>>4|(f&3840)>>8,(f&240)>>4|f&240,(f&15)<<4|f&15,1])),!m)for(u=this.parsers.length;u--&&!m;)v=this.parsers[u],(k=v.regex.exec(f))&&(m=v.parse(k));this.rgba=m||
[]},get:function(a){var f=this.input,m=this.rgba,u;this.stops?(u=E(f),u.stops=[].concat(u.stops),z(this.stops,function(f,d){u.stops[d]=[u.stops[d][0],f.get(a)]})):u=m&&B(m[0])?"rgb"===a||!a&&1===m[3]?"rgb("+m[0]+","+m[1]+","+m[2]+")":"a"===a?m[3]:"rgba("+m.join(",")+")":f;return u},brighten:function(a){var f,m=this.rgba;if(this.stops)z(this.stops,function(f){f.brighten(a)});else if(B(a)&&0!==a)for(f=0;3>f;f++)m[f]+=l(255*a),0>m[f]&&(m[f]=0),255<m[f]&&(m[f]=255);return this},setOpacity:function(a){this.rgba[3]=
a;return this},tweenTo:function(a,k){var f=this.rgba,u=a.rgba;u.length&&f&&f.length?(a=1!==u[3]||1!==f[3],k=(a?"rgba(":"rgb(")+Math.round(u[0]+(f[0]-u[0])*(1-k))+","+Math.round(u[1]+(f[1]-u[1])*(1-k))+","+Math.round(u[2]+(f[2]-u[2])*(1-k))+(a?","+(u[3]+(f[3]-u[3])*(1-k)):"")+")"):k=a.input||"none";return k}};a.color=function(f){return new a.Color(f)}})(N);(function(a){var z,B,F=a.addEvent,E=a.animate,l=a.attr,f=a.charts,k=a.color,m=a.css,u=a.createElement,v=a.defined,d=a.deg2rad,e=a.destroyObjectProperties,
n=a.doc,b=a.each,c=a.extend,q=a.erase,h=a.grep,w=a.hasTouch,G=a.inArray,H=a.isArray,y=a.isFirefox,K=a.isMS,r=a.isObject,A=a.isString,J=a.isWebKit,p=a.merge,C=a.noop,D=a.objectEach,I=a.pick,g=a.pInt,t=a.removeEvent,Q=a.stop,M=a.svg,O=a.SVG_NS,L=a.symbolSizes,S=a.win;z=a.SVGElement=function(){return this};c(z.prototype,{opacity:1,SVG_NS:O,textProps:"direction fontSize fontWeight fontFamily fontStyle color lineHeight width textAlign textDecoration textOverflow textOutline".split(" "),init:function(a,
g){this.element="span"===g?u(g):n.createElementNS(this.SVG_NS,g);this.renderer=a},animate:function(x,g,t){g=a.animObject(I(g,this.renderer.globalAnimation,!0));0!==g.duration?(t&&(g.complete=t),E(this,x,g)):(this.attr(x,null,t),g.step&&g.step.call(this));return this},colorGradient:function(x,g,t){var c=this.renderer,h,P,d,e,q,w,M,n,L,f,y=[],r;x.radialGradient?P="radialGradient":x.linearGradient&&(P="linearGradient");P&&(d=x[P],q=c.gradients,M=x.stops,f=t.radialReference,H(d)&&(x[P]=d={x1:d[0],y1:d[1],
x2:d[2],y2:d[3],gradientUnits:"userSpaceOnUse"}),"radialGradient"===P&&f&&!v(d.gradientUnits)&&(e=d,d=p(d,c.getRadialAttr(f,e),{gradientUnits:"userSpaceOnUse"})),D(d,function(a,x){"id"!==x&&y.push(x,a)}),D(M,function(a){y.push(a)}),y=y.join(","),q[y]?f=q[y].attr("id"):(d.id=f=a.uniqueKey(),q[y]=w=c.createElement(P).attr(d).add(c.defs),w.radAttr=e,w.stops=[],b(M,function(x){0===x[1].indexOf("rgba")?(h=a.color(x[1]),n=h.get("rgb"),L=h.get("a")):(n=x[1],L=1);x=c.createElement("stop").attr({offset:x[0],
"stop-color":n,"stop-opacity":L}).add(w);w.stops.push(x)})),r="url("+c.url+"#"+f+")",t.setAttribute(g,r),t.gradient=y,x.toString=function(){return r})},applyTextOutline:function(x){var g=this.element,t,c,h,d,p;-1!==x.indexOf("contrast")&&(x=x.replace(/contrast/g,this.renderer.getContrast(g.style.fill)));x=x.split(" ");c=x[x.length-1];if((h=x[0])&&"none"!==h&&a.svg){this.fakeTS=!0;x=[].slice.call(g.getElementsByTagName("tspan"));this.ySetter=this.xSetter;h=h.replace(/(^[\d\.]+)(.*?)$/g,function(a,
x,g){return 2*x+g});for(p=x.length;p--;)t=x[p],"highcharts-text-outline"===t.getAttribute("class")&&q(x,g.removeChild(t));d=g.firstChild;b(x,function(a,x){0===x&&(a.setAttribute("x",g.getAttribute("x")),x=g.getAttribute("y"),a.setAttribute("y",x||0),null===x&&g.setAttribute("y",0));a=a.cloneNode(1);l(a,{"class":"highcharts-text-outline",fill:c,stroke:c,"stroke-width":h,"stroke-linejoin":"round"});g.insertBefore(a,d)})}},attr:function(a,g,t,c){var x,h=this.element,b,d=this,P,p;"string"===typeof a&&
void 0!==g&&(x=a,a={},a[x]=g);"string"===typeof a?d=(this[a+"Getter"]||this._defaultGetter).call(this,a,h):(D(a,function(x,g){P=!1;c||Q(this,g);this.symbolName&&/^(x|y|width|height|r|start|end|innerR|anchorX|anchorY)$/.test(g)&&(b||(this.symbolAttr(a),b=!0),P=!0);!this.rotation||"x"!==g&&"y"!==g||(this.doTransform=!0);P||(p=this[g+"Setter"]||this._defaultSetter,p.call(this,x,g,h),this.shadows&&/^(width|height|visibility|x|y|d|transform|cx|cy|r)$/.test(g)&&this.updateShadows(g,x,p))},this),this.afterSetters());
t&&t();return d},afterSetters:function(){this.doTransform&&(this.updateTransform(),this.doTransform=!1)},updateShadows:function(a,g,t){for(var x=this.shadows,c=x.length;c--;)t.call(x[c],"height"===a?Math.max(g-(x[c].cutHeight||0),0):"d"===a?this.d:g,a,x[c])},addClass:function(a,g){var x=this.attr("class")||"";-1===x.indexOf(a)&&(g||(a=(x+(x?" ":"")+a).replace(" "," ")),this.attr("class",a));return this},hasClass:function(a){return-1!==G(a,(this.attr("class")||"").split(" "))},removeClass:function(a){return this.attr("class",
(this.attr("class")||"").replace(a,""))},symbolAttr:function(a){var x=this;b("x y r start end width height innerR anchorX anchorY".split(" "),function(g){x[g]=I(a[g],x[g])});x.attr({d:x.renderer.symbols[x.symbolName](x.x,x.y,x.width,x.height,x)})},clip:function(a){return this.attr("clip-path",a?"url("+this.renderer.url+"#"+a.id+")":"none")},crisp:function(a,g){var x=this,t={},c;g=g||a.strokeWidth||0;c=Math.round(g)%2/2;a.x=Math.floor(a.x||x.x||0)+c;a.y=Math.floor(a.y||x.y||0)+c;a.width=Math.floor((a.width||
x.width||0)-2*c);a.height=Math.floor((a.height||x.height||0)-2*c);v(a.strokeWidth)&&(a.strokeWidth=g);D(a,function(a,g){x[g]!==a&&(x[g]=t[g]=a)});return t},css:function(a){var x=this.styles,t={},h=this.element,b,d="",p,e=!x,q=["textOutline","textOverflow","width"];a&&a.color&&(a.fill=a.color);x&&D(a,function(a,g){a!==x[g]&&(t[g]=a,e=!0)});e&&(x&&(a=c(x,t)),b=this.textWidth=a&&a.width&&"auto"!==a.width&&"text"===h.nodeName.toLowerCase()&&g(a.width),this.styles=a,b&&!M&&this.renderer.forExport&&delete a.width,
K&&!M?m(this.element,a):(p=function(a,x){return"-"+x.toLowerCase()},D(a,function(a,x){-1===G(x,q)&&(d+=x.replace(/([A-Z])/g,p)+":"+a+";")}),d&&l(h,"style",d)),this.added&&("text"===this.element.nodeName&&this.renderer.buildText(this),a&&a.textOutline&&this.applyTextOutline(a.textOutline)));return this},strokeWidth:function(){return this["stroke-width"]||0},on:function(a,g){var x=this,t=x.element;w&&"click"===a?(t.ontouchstart=function(a){x.touchEventFired=Date.now();a.preventDefault();g.call(t,a)},
t.onclick=function(a){(-1===S.navigator.userAgent.indexOf("Android")||1100<Date.now()-(x.touchEventFired||0))&&g.call(t,a)}):t["on"+a]=g;return this},setRadialReference:function(a){var x=this.renderer.gradients[this.element.gradient];this.element.radialReference=a;x&&x.radAttr&&x.animate(this.renderer.getRadialAttr(a,x.radAttr));return this},translate:function(a,g){return this.attr({translateX:a,translateY:g})},invert:function(a){this.inverted=a;this.updateTransform();return this},updateTransform:function(){var a=
this.translateX||0,g=this.translateY||0,t=this.scaleX,c=this.scaleY,h=this.inverted,b=this.rotation,d=this.matrix,p=this.element;h&&(a+=this.width,g+=this.height);a=["translate("+a+","+g+")"];v(d)&&a.push("matrix("+d.join(",")+")");h?a.push("rotate(90) scale(-1,1)"):b&&a.push("rotate("+b+" "+I(this.rotationOriginX,p.getAttribute("x"),0)+" "+I(this.rotationOriginY,p.getAttribute("y")||0)+")");(v(t)||v(c))&&a.push("scale("+I(t,1)+" "+I(c,1)+")");a.length&&p.setAttribute("transform",a.join(" "))},toFront:function(){var a=
this.element;a.parentNode.appendChild(a);return this},align:function(a,g,t){var x,c,h,b,d={};c=this.renderer;h=c.alignedObjects;var p,e;if(a){if(this.alignOptions=a,this.alignByTranslate=g,!t||A(t))this.alignTo=x=t||"renderer",q(h,this),h.push(this),t=null}else a=this.alignOptions,g=this.alignByTranslate,x=this.alignTo;t=I(t,c[x],c);x=a.align;c=a.verticalAlign;h=(t.x||0)+(a.x||0);b=(t.y||0)+(a.y||0);"right"===x?p=1:"center"===x&&(p=2);p&&(h+=(t.width-(a.width||0))/p);d[g?"translateX":"x"]=Math.round(h);
"bottom"===c?e=1:"middle"===c&&(e=2);e&&(b+=(t.height-(a.height||0))/e);d[g?"translateY":"y"]=Math.round(b);this[this.placed?"animate":"attr"](d);this.placed=!0;this.alignAttr=d;return this},getBBox:function(a,g){var x,t=this.renderer,h,p=this.element,e=this.styles,P,q=this.textStr,w,M=t.cache,n=t.cacheKeys,L;g=I(g,this.rotation);h=g*d;P=e&&e.fontSize;void 0!==q&&(L=q.toString(),-1===L.indexOf("\x3c")&&(L=L.replace(/[0-9]/g,"0")),L+=["",g||0,P,e&&e.width,e&&e.textOverflow].join());L&&!a&&(x=M[L]);
if(!x){if(p.namespaceURI===this.SVG_NS||t.forExport){try{(w=this.fakeTS&&function(a){b(p.querySelectorAll(".highcharts-text-outline"),function(x){x.style.display=a})})&&w("none"),x=p.getBBox?c({},p.getBBox()):{width:p.offsetWidth,height:p.offsetHeight},w&&w("")}catch(X){}if(!x||0>x.width)x={width:0,height:0}}else x=this.htmlGetBBox();t.isSVG&&(a=x.width,t=x.height,e&&"11px"===e.fontSize&&17===Math.round(t)&&(x.height=t=14),g&&(x.width=Math.abs(t*Math.sin(h))+Math.abs(a*Math.cos(h)),x.height=Math.abs(t*
Math.cos(h))+Math.abs(a*Math.sin(h))));if(L&&0<x.height){for(;250<n.length;)delete M[n.shift()];M[L]||n.push(L);M[L]=x}}return x},show:function(a){return this.attr({visibility:a?"inherit":"visible"})},hide:function(){return this.attr({visibility:"hidden"})},fadeOut:function(a){var g=this;g.animate({opacity:0},{duration:a||150,complete:function(){g.attr({y:-9999})}})},add:function(a){var g=this.renderer,x=this.element,t;a&&(this.parentGroup=a);this.parentInverted=a&&a.inverted;void 0!==this.textStr&&
g.buildText(this);this.added=!0;if(!a||a.handleZ||this.zIndex)t=this.zIndexSetter();t||(a?a.element:g.box).appendChild(x);if(this.onAdd)this.onAdd();return this},safeRemoveChild:function(a){var g=a.parentNode;g&&g.removeChild(a)},destroy:function(){var a=this,g=a.element||{},t=a.renderer.isSVG&&"SPAN"===g.nodeName&&a.parentGroup,c=g.ownerSVGElement;g.onclick=g.onmouseout=g.onmouseover=g.onmousemove=g.point=null;Q(a);a.clipPath&&c&&(b(c.querySelectorAll("[clip-path]"),function(g){-1<g.getAttribute("clip-path").indexOf(a.clipPath.element.id+
")")&&g.removeAttribute("clip-path")}),a.clipPath=a.clipPath.destroy());if(a.stops){for(c=0;c<a.stops.length;c++)a.stops[c]=a.stops[c].destroy();a.stops=null}a.safeRemoveChild(g);for(a.destroyShadows();t&&t.div&&0===t.div.childNodes.length;)g=t.parentGroup,a.safeRemoveChild(t.div),delete t.div,t=g;a.alignTo&&q(a.renderer.alignedObjects,a);D(a,function(g,x){delete a[x]});return null},shadow:function(a,g,t){var x=[],c,h,b=this.element,d,p,e,q;if(!a)this.destroyShadows();else if(!this.shadows){p=I(a.width,
3);e=(a.opacity||.15)/p;q=this.parentInverted?"(-1,-1)":"("+I(a.offsetX,1)+", "+I(a.offsetY,1)+")";for(c=1;c<=p;c++)h=b.cloneNode(0),d=2*p+1-2*c,l(h,{isShadow:"true",stroke:a.color||"#000000","stroke-opacity":e*c,"stroke-width":d,transform:"translate"+q,fill:"none"}),t&&(l(h,"height",Math.max(l(h,"height")-d,0)),h.cutHeight=d),g?g.element.appendChild(h):b.parentNode&&b.parentNode.insertBefore(h,b),x.push(h);this.shadows=x}return this},destroyShadows:function(){b(this.shadows||[],function(a){this.safeRemoveChild(a)},
this);this.shadows=void 0},xGetter:function(a){"circle"===this.element.nodeName&&("x"===a?a="cx":"y"===a&&(a="cy"));return this._defaultGetter(a)},_defaultGetter:function(a){a=I(this[a],this.element?this.element.getAttribute(a):null,0);/^[\-0-9\.]+$/.test(a)&&(a=parseFloat(a));return a},dSetter:function(a,g,t){a&&a.join&&(a=a.join(" "));/(NaN| {2}|^$)/.test(a)&&(a="M 0 0");this[g]!==a&&(t.setAttribute(g,a),this[g]=a)},dashstyleSetter:function(a){var t,x=this["stroke-width"];"inherit"===x&&(x=1);if(a=
a&&a.toLowerCase()){a=a.replace("shortdashdotdot","3,1,1,1,1,1,").replace("shortdashdot","3,1,1,1").replace("shortdot","1,1,").replace("shortdash","3,1,").replace("longdash","8,3,").replace(/dot/g,"1,3,").replace("dash","4,3,").replace(/,$/,"").split(",");for(t=a.length;t--;)a[t]=g(a[t])*x;a=a.join(",").replace(/NaN/g,"none");this.element.setAttribute("stroke-dasharray",a)}},alignSetter:function(a){this.element.setAttribute("text-anchor",{left:"start",center:"middle",right:"end"}[a])},opacitySetter:function(a,
g,t){this[g]=a;t.setAttribute(g,a)},titleSetter:function(a){var g=this.element.getElementsByTagName("title")[0];g||(g=n.createElementNS(this.SVG_NS,"title"),this.element.appendChild(g));g.firstChild&&g.removeChild(g.firstChild);g.appendChild(n.createTextNode(String(I(a),"").replace(/<[^>]*>/g,"")))},textSetter:function(a){a!==this.textStr&&(delete this.bBox,this.textStr=a,this.added&&this.renderer.buildText(this))},fillSetter:function(a,g,t){"string"===typeof a?t.setAttribute(g,a):a&&this.colorGradient(a,
g,t)},visibilitySetter:function(a,g,t){"inherit"===a?t.removeAttribute(g):this[g]!==a&&t.setAttribute(g,a);this[g]=a},zIndexSetter:function(a,t){var c=this.renderer,x=this.parentGroup,h=(x||c).element||c.box,b,d=this.element,p,e,c=h===c.box;b=this.added;var q;v(a)&&(d.zIndex=a,a=+a,this[t]===a&&(b=!1),this[t]=a);if(b){(a=this.zIndex)&&x&&(x.handleZ=!0);t=h.childNodes;for(q=t.length-1;0<=q&&!p;q--)if(x=t[q],b=x.zIndex,e=!v(b),x!==d)if(0>a&&e&&!c&&!q)h.insertBefore(d,t[q]),p=!0;else if(g(b)<=a||e&&
(!v(a)||0<=a))h.insertBefore(d,t[q+1]||null),p=!0;p||(h.insertBefore(d,t[c?3:0]||null),p=!0)}return p},_defaultSetter:function(a,g,t){t.setAttribute(g,a)}});z.prototype.yGetter=z.prototype.xGetter;z.prototype.translateXSetter=z.prototype.translateYSetter=z.prototype.rotationSetter=z.prototype.verticalAlignSetter=z.prototype.rotationOriginXSetter=z.prototype.rotationOriginYSetter=z.prototype.scaleXSetter=z.prototype.scaleYSetter=z.prototype.matrixSetter=function(a,g){this[g]=a;this.doTransform=!0};
z.prototype["stroke-widthSetter"]=z.prototype.strokeSetter=function(a,g,t){this[g]=a;this.stroke&&this["stroke-width"]?(z.prototype.fillSetter.call(this,this.stroke,"stroke",t),t.setAttribute("stroke-width",this["stroke-width"]),this.hasStroke=!0):"stroke-width"===g&&0===a&&this.hasStroke&&(t.removeAttribute("stroke"),this.hasStroke=!1)};B=a.SVGRenderer=function(){this.init.apply(this,arguments)};c(B.prototype,{Element:z,SVG_NS:O,init:function(a,g,t,c,h,b){var x;c=this.createElement("svg").attr({version:"1.1",
"class":"highcharts-root"}).css(this.getStyle(c));x=c.element;a.appendChild(x);-1===a.innerHTML.indexOf("xmlns")&&l(x,"xmlns",this.SVG_NS);this.isSVG=!0;this.box=x;this.boxWrapper=c;this.alignedObjects=[];this.url=(y||J)&&n.getElementsByTagName("base").length?S.location.href.replace(/#.*?$/,"").replace(/<[^>]*>/g,"").replace(/([\('\)])/g,"\\$1").replace(/ /g,"%20"):"";this.createElement("desc").add().element.appendChild(n.createTextNode("Created with Highcharts 6.0.0"));this.defs=this.createElement("defs").add();
this.allowHTML=b;this.forExport=h;this.gradients={};this.cache={};this.cacheKeys=[];this.imgCount=0;this.setSize(g,t,!1);var d;y&&a.getBoundingClientRect&&(g=function(){m(a,{left:0,top:0});d=a.getBoundingClientRect();m(a,{left:Math.ceil(d.left)-d.left+"px",top:Math.ceil(d.top)-d.top+"px"})},g(),this.unSubPixelFix=F(S,"resize",g))},getStyle:function(a){return this.style=c({fontFamily:'"Lucida Grande", "Lucida Sans Unicode", Arial, Helvetica, sans-serif',fontSize:"12px"},a)},setStyle:function(a){this.boxWrapper.css(this.getStyle(a))},
isHidden:function(){return!this.boxWrapper.getBBox().width},destroy:function(){var a=this.defs;this.box=null;this.boxWrapper=this.boxWrapper.destroy();e(this.gradients||{});this.gradients=null;a&&(this.defs=a.destroy());this.unSubPixelFix&&this.unSubPixelFix();return this.alignedObjects=null},createElement:function(a){var g=new this.Element;g.init(this,a);return g},draw:C,getRadialAttr:function(a,g){return{cx:a[0]-a[2]/2+g.cx*a[2],cy:a[1]-a[2]/2+g.cy*a[2],r:g.r*a[2]}},getSpanWidth:function(a,g){var t=
a.getBBox(!0).width;!M&&this.forExport&&(t=this.measureSpanWidth(g.firstChild.data,a.styles));return t},applyEllipsis:function(a,g,t,c){var h=a.rotation,b=t,x,d=0,p=t.length,e=function(a){g.removeChild(g.firstChild);a&&g.appendChild(n.createTextNode(a))},q;a.rotation=0;b=this.getSpanWidth(a,g);if(q=b>c){for(;d<=p;)x=Math.ceil((d+p)/2),b=t.substring(0,x)+"\u2026",e(b),b=this.getSpanWidth(a,g),d===p?d=p+1:b>c?p=x-1:d=x;0===p&&e("")}a.rotation=h;return q},escapes:{"\x26":"\x26amp;","\x3c":"\x26lt;",
"\x3e":"\x26gt;","'":"\x26#39;",'"':"\x26quot"},buildText:function(a){var t=a.element,c=this,d=c.forExport,x=I(a.textStr,"").toString(),p=-1!==x.indexOf("\x3c"),e=t.childNodes,q,w,L,f,y=l(t,"x"),r=a.styles,C=a.textWidth,G=r&&r.lineHeight,A=r&&r.textOutline,k=r&&"ellipsis"===r.textOverflow,Q=r&&"nowrap"===r.whiteSpace,H=r&&r.fontSize,u,v,K=e.length,r=C&&!a.added&&this.box,S=function(a){var h;h=/(px|em)$/.test(a&&a.style.fontSize)?a.style.fontSize:H||c.style.fontSize||12;return G?g(G):c.fontMetrics(h,
a.getAttribute("style")?a:t).h},J=function(a){D(c.escapes,function(g,t){a=a.replace(new RegExp(g,"g"),t)});return a};u=[x,k,Q,G,A,H,C].join();if(u!==a.textCache){for(a.textCache=u;K--;)t.removeChild(e[K]);p||A||k||C||-1!==x.indexOf(" ")?(q=/<.*class="([^"]+)".*>/,w=/<.*style="([^"]+)".*>/,L=/<.*href="([^"]+)".*>/,r&&r.appendChild(t),x=p?x.replace(/<(b|strong)>/g,'\x3cspan style\x3d"font-weight:bold"\x3e').replace(/<(i|em)>/g,'\x3cspan style\x3d"font-style:italic"\x3e').replace(/<a/g,"\x3cspan").replace(/<\/(b|strong|i|em|a)>/g,
"\x3c/span\x3e").split(/<br.*?>/g):[x],x=h(x,function(a){return""!==a}),b(x,function(g,h){var x,p=0;g=g.replace(/^\s+|\s+$/g,"").replace(/<span/g,"|||\x3cspan").replace(/<\/span>/g,"\x3c/span\x3e|||");x=g.split("|||");b(x,function(g){if(""!==g||1===x.length){var b={},e=n.createElementNS(c.SVG_NS,"tspan"),r,D;q.test(g)&&(r=g.match(q)[1],l(e,"class",r));w.test(g)&&(D=g.match(w)[1].replace(/(;| |^)color([ :])/,"$1fill$2"),l(e,"style",D));L.test(g)&&!d&&(l(e,"onclick",'location.href\x3d"'+g.match(L)[1]+
'"'),l(e,"class","highcharts-anchor"),m(e,{cursor:"pointer"}));g=J(g.replace(/<[a-zA-Z\/](.|\n)*?>/g,"")||" ");if(" "!==g){e.appendChild(n.createTextNode(g));p?b.dx=0:h&&null!==y&&(b.x=y);l(e,b);t.appendChild(e);!p&&v&&(!M&&d&&m(e,{display:"block"}),l(e,"dy",S(e)));if(C){b=g.replace(/([^\^])-/g,"$1- ").split(" ");r=1<x.length||h||1<b.length&&!Q;var G=[],P,A=S(e),H=a.rotation;for(k&&(f=c.applyEllipsis(a,e,g,C));!k&&r&&(b.length||G.length);)a.rotation=0,P=c.getSpanWidth(a,e),g=P>C,void 0===f&&(f=g),
g&&1!==b.length?(e.removeChild(e.firstChild),G.unshift(b.pop())):(b=G,G=[],b.length&&!Q&&(e=n.createElementNS(O,"tspan"),l(e,{dy:A,x:y}),D&&l(e,"style",D),t.appendChild(e)),P>C&&(C=P)),b.length&&e.appendChild(n.createTextNode(b.join(" ").replace(/- /g,"-")));a.rotation=H}p++}}});v=v||t.childNodes.length}),f&&a.attr("title",a.textStr),r&&r.removeChild(t),A&&a.applyTextOutline&&a.applyTextOutline(A)):t.appendChild(n.createTextNode(J(x)))}},getContrast:function(a){a=k(a).rgba;return 510<a[0]+a[1]+a[2]?
"#000000":"#FFFFFF"},button:function(a,g,t,h,b,d,e,q,w){var x=this.label(a,g,t,w,null,null,null,null,"button"),M=0;x.attr(p({padding:8,r:2},b));var n,L,f,r;b=p({fill:"#f7f7f7",stroke:"#cccccc","stroke-width":1,style:{color:"#333333",cursor:"pointer",fontWeight:"normal"}},b);n=b.style;delete b.style;d=p(b,{fill:"#e6e6e6"},d);L=d.style;delete d.style;e=p(b,{fill:"#e6ebf5",style:{color:"#000000",fontWeight:"bold"}},e);f=e.style;delete e.style;q=p(b,{style:{color:"#cccccc"}},q);r=q.style;delete q.style;
F(x.element,K?"mouseover":"mouseenter",function(){3!==M&&x.setState(1)});F(x.element,K?"mouseout":"mouseleave",function(){3!==M&&x.setState(M)});x.setState=function(a){1!==a&&(x.state=M=a);x.removeClass(/highcharts-button-(normal|hover|pressed|disabled)/).addClass("highcharts-button-"+["normal","hover","pressed","disabled"][a||0]);x.attr([b,d,e,q][a||0]).css([n,L,f,r][a||0])};x.attr(b).css(c({cursor:"default"},n));return x.on("click",function(a){3!==M&&h.call(x,a)})},crispLine:function(a,g){a[1]===
a[4]&&(a[1]=a[4]=Math.round(a[1])-g%2/2);a[2]===a[5]&&(a[2]=a[5]=Math.round(a[2])+g%2/2);return a},path:function(a){var g={fill:"none"};H(a)?g.d=a:r(a)&&c(g,a);return this.createElement("path").attr(g)},circle:function(a,g,t){a=r(a)?a:{x:a,y:g,r:t};g=this.createElement("circle");g.xSetter=g.ySetter=function(a,g,t){t.setAttribute("c"+g,a)};return g.attr(a)},arc:function(a,g,t,c,b,h){r(a)?(c=a,g=c.y,t=c.r,a=c.x):c={innerR:c,start:b,end:h};a=this.symbol("arc",a,g,t,t,c);a.r=t;return a},rect:function(a,
g,t,c,b,h){b=r(a)?a.r:b;var d=this.createElement("rect");a=r(a)?a:void 0===a?{}:{x:a,y:g,width:Math.max(t,0),height:Math.max(c,0)};void 0!==h&&(a.strokeWidth=h,a=d.crisp(a));a.fill="none";b&&(a.r=b);d.rSetter=function(a,g,t){l(t,{rx:a,ry:a})};return d.attr(a)},setSize:function(a,g,t){var c=this.alignedObjects,b=c.length;this.width=a;this.height=g;for(this.boxWrapper.animate({width:a,height:g},{step:function(){this.attr({viewBox:"0 0 "+this.attr("width")+" "+this.attr("height")})},duration:I(t,!0)?
void 0:0});b--;)c[b].align()},g:function(a){var g=this.createElement("g");return a?g.attr({"class":"highcharts-"+a}):g},image:function(a,g,t,b,h){var d={preserveAspectRatio:"none"};1<arguments.length&&c(d,{x:g,y:t,width:b,height:h});d=this.createElement("image").attr(d);d.element.setAttributeNS?d.element.setAttributeNS("http://www.w3.org/1999/xlink","href",a):d.element.setAttribute("hc-svg-href",a);return d},symbol:function(a,g,t,h,d,p){var e=this,x,q=/^url\((.*?)\)$/,w=q.test(a),M=!w&&(this.symbols[a]?
a:"circle"),r=M&&this.symbols[M],y=v(g)&&r&&r.call(this.symbols,Math.round(g),Math.round(t),h,d,p),D,C;r?(x=this.path(y),x.attr("fill","none"),c(x,{symbolName:M,x:g,y:t,width:h,height:d}),p&&c(x,p)):w&&(D=a.match(q)[1],x=this.image(D),x.imgwidth=I(L[D]&&L[D].width,p&&p.width),x.imgheight=I(L[D]&&L[D].height,p&&p.height),C=function(){x.attr({width:x.width,height:x.height})},b(["width","height"],function(a){x[a+"Setter"]=function(a,g){var t={},c=this["img"+g],b="width"===g?"translateX":"translateY";
this[g]=a;v(c)&&(this.element&&this.element.setAttribute(g,c),this.alignByTranslate||(t[b]=((this[g]||0)-c)/2,this.attr(t)))}}),v(g)&&x.attr({x:g,y:t}),x.isImg=!0,v(x.imgwidth)&&v(x.imgheight)?C():(x.attr({width:0,height:0}),u("img",{onload:function(){var a=f[e.chartIndex];0===this.width&&(m(this,{position:"absolute",top:"-999em"}),n.body.appendChild(this));L[D]={width:this.width,height:this.height};x.imgwidth=this.width;x.imgheight=this.height;x.element&&C();this.parentNode&&this.parentNode.removeChild(this);
e.imgCount--;if(!e.imgCount&&a&&a.onload)a.onload()},src:D}),this.imgCount++));return x},symbols:{circle:function(a,g,t,c){return this.arc(a+t/2,g+c/2,t/2,c/2,{start:0,end:2*Math.PI,open:!1})},square:function(a,g,t,c){return["M",a,g,"L",a+t,g,a+t,g+c,a,g+c,"Z"]},triangle:function(a,g,t,c){return["M",a+t/2,g,"L",a+t,g+c,a,g+c,"Z"]},"triangle-down":function(a,g,t,c){return["M",a,g,"L",a+t,g,a+t/2,g+c,"Z"]},diamond:function(a,g,t,c){return["M",a+t/2,g,"L",a+t,g+c/2,a+t/2,g+c,a,g+c/2,"Z"]},arc:function(a,
g,t,c,b){var h=b.start,d=b.r||t,p=b.r||c||t,e=b.end-.001;t=b.innerR;c=I(b.open,.001>Math.abs(b.end-b.start-2*Math.PI));var x=Math.cos(h),q=Math.sin(h),w=Math.cos(e),e=Math.sin(e);b=.001>b.end-h-Math.PI?0:1;d=["M",a+d*x,g+p*q,"A",d,p,0,b,1,a+d*w,g+p*e];v(t)&&d.push(c?"M":"L",a+t*w,g+t*e,"A",t,t,0,b,0,a+t*x,g+t*q);d.push(c?"":"Z");return d},callout:function(a,g,t,c,b){var h=Math.min(b&&b.r||0,t,c),d=h+6,p=b&&b.anchorX;b=b&&b.anchorY;var e;e=["M",a+h,g,"L",a+t-h,g,"C",a+t,g,a+t,g,a+t,g+h,"L",a+t,g+c-
h,"C",a+t,g+c,a+t,g+c,a+t-h,g+c,"L",a+h,g+c,"C",a,g+c,a,g+c,a,g+c-h,"L",a,g+h,"C",a,g,a,g,a+h,g];p&&p>t?b>g+d&&b<g+c-d?e.splice(13,3,"L",a+t,b-6,a+t+6,b,a+t,b+6,a+t,g+c-h):e.splice(13,3,"L",a+t,c/2,p,b,a+t,c/2,a+t,g+c-h):p&&0>p?b>g+d&&b<g+c-d?e.splice(33,3,"L",a,b+6,a-6,b,a,b-6,a,g+h):e.splice(33,3,"L",a,c/2,p,b,a,c/2,a,g+h):b&&b>c&&p>a+d&&p<a+t-d?e.splice(23,3,"L",p+6,g+c,p,g+c+6,p-6,g+c,a+h,g+c):b&&0>b&&p>a+d&&p<a+t-d&&e.splice(3,3,"L",p-6,g,p,g-6,p+6,g,t-h,g);return e}},clipRect:function(g,t,c,
b){var h=a.uniqueKey(),d=this.createElement("clipPath").attr({id:h}).add(this.defs);g=this.rect(g,t,c,b,0).add(d);g.id=h;g.clipPath=d;g.count=0;return g},text:function(a,g,t,c){var b={};if(c&&(this.allowHTML||!this.forExport))return this.html(a,g,t);b.x=Math.round(g||0);t&&(b.y=Math.round(t));if(a||0===a)b.text=a;a=this.createElement("text").attr(b);c||(a.xSetter=function(a,g,t){var c=t.getElementsByTagName("tspan"),b,h=t.getAttribute(g),d;for(d=0;d<c.length;d++)b=c[d],b.getAttribute(g)===h&&b.setAttribute(g,
a);t.setAttribute(g,a)});return a},fontMetrics:function(a,t){a=a||t&&t.style&&t.style.fontSize||this.style&&this.style.fontSize;a=/px/.test(a)?g(a):/em/.test(a)?parseFloat(a)*(t?this.fontMetrics(null,t.parentNode).f:16):12;t=24>a?a+3:Math.round(1.2*a);return{h:t,b:Math.round(.8*t),f:a}},rotCorr:function(a,g,t){var c=a;g&&t&&(c=Math.max(c*Math.cos(g*d),4));return{x:-a/3*Math.sin(g*d),y:c}},label:function(g,h,d,e,q,w,M,n,L){var x=this,r=x.g("button"!==L&&"label"),f=r.text=x.text("",0,0,M).attr({zIndex:1}),
y,D,C=0,m=3,G=0,A,k,H,Q,u,l={},K,S,I=/^url\((.*?)\)$/.test(e),O=I,P,J,U,T;L&&r.addClass("highcharts-"+L);O=I;P=function(){return(K||0)%2/2};J=function(){var a=f.element.style,g={};D=(void 0===A||void 0===k||u)&&v(f.textStr)&&f.getBBox();r.width=(A||D.width||0)+2*m+G;r.height=(k||D.height||0)+2*m;S=m+x.fontMetrics(a&&a.fontSize,f).b;O&&(y||(r.box=y=x.symbols[e]||I?x.symbol(e):x.rect(),y.addClass(("button"===L?"":"highcharts-label-box")+(L?" highcharts-"+L+"-box":"")),y.add(r),a=P(),g.x=a,g.y=(n?-S:
0)+a),g.width=Math.round(r.width),g.height=Math.round(r.height),y.attr(c(g,l)),l={})};U=function(){var a=G+m,g;g=n?0:S;v(A)&&D&&("center"===u||"right"===u)&&(a+={center:.5,right:1}[u]*(A-D.width));if(a!==f.x||g!==f.y)f.attr("x",a),void 0!==g&&f.attr("y",g);f.x=a;f.y=g};T=function(a,g){y?y.attr(a,g):l[a]=g};r.onAdd=function(){f.add(r);r.attr({text:g||0===g?g:"",x:h,y:d});y&&v(q)&&r.attr({anchorX:q,anchorY:w})};r.widthSetter=function(g){A=a.isNumber(g)?g:null};r.heightSetter=function(a){k=a};r["text-alignSetter"]=
function(a){u=a};r.paddingSetter=function(a){v(a)&&a!==m&&(m=r.padding=a,U())};r.paddingLeftSetter=function(a){v(a)&&a!==G&&(G=a,U())};r.alignSetter=function(a){a={left:0,center:.5,right:1}[a];a!==C&&(C=a,D&&r.attr({x:H}))};r.textSetter=function(a){void 0!==a&&f.textSetter(a);J();U()};r["stroke-widthSetter"]=function(a,g){a&&(O=!0);K=this["stroke-width"]=a;T(g,a)};r.strokeSetter=r.fillSetter=r.rSetter=function(a,g){"r"!==g&&("fill"===g&&a&&(O=!0),r[g]=a);T(g,a)};r.anchorXSetter=function(a,g){q=r.anchorX=
a;T(g,Math.round(a)-P()-H)};r.anchorYSetter=function(a,g){w=r.anchorY=a;T(g,a-Q)};r.xSetter=function(a){r.x=a;C&&(a-=C*((A||D.width)+2*m));H=Math.round(a);r.attr("translateX",H)};r.ySetter=function(a){Q=r.y=Math.round(a);r.attr("translateY",Q)};var V=r.css;return c(r,{css:function(a){if(a){var g={};a=p(a);b(r.textProps,function(t){void 0!==a[t]&&(g[t]=a[t],delete a[t])});f.css(g)}return V.call(r,a)},getBBox:function(){return{width:D.width+2*m,height:D.height+2*m,x:D.x-m,y:D.y-m}},shadow:function(a){a&&
(J(),y&&y.shadow(a));return r},destroy:function(){t(r.element,"mouseenter");t(r.element,"mouseleave");f&&(f=f.destroy());y&&(y=y.destroy());z.prototype.destroy.call(r);r=x=J=U=T=null}})}});a.Renderer=B})(N);(function(a){var z=a.attr,B=a.createElement,F=a.css,E=a.defined,l=a.each,f=a.extend,k=a.isFirefox,m=a.isMS,u=a.isWebKit,v=a.pInt,d=a.SVGRenderer,e=a.win,n=a.wrap;f(a.SVGElement.prototype,{htmlCss:function(a){var c=this.element;if(c=a&&"SPAN"===c.tagName&&a.width)delete a.width,this.textWidth=c,
this.updateTransform();a&&"ellipsis"===a.textOverflow&&(a.whiteSpace="nowrap",a.overflow="hidden");this.styles=f(this.styles,a);F(this.element,a);return this},htmlGetBBox:function(){var a=this.element;return{x:a.offsetLeft,y:a.offsetTop,width:a.offsetWidth,height:a.offsetHeight}},htmlUpdateTransform:function(){if(this.added){var a=this.renderer,c=this.element,d=this.translateX||0,h=this.translateY||0,e=this.x||0,n=this.y||0,f=this.textAlign||"left",y={left:0,center:.5,right:1}[f],m=this.styles;F(c,
{marginLeft:d,marginTop:h});this.shadows&&l(this.shadows,function(a){F(a,{marginLeft:d+1,marginTop:h+1})});this.inverted&&l(c.childNodes,function(b){a.invertChild(b,c)});if("SPAN"===c.tagName){var r=this.rotation,A=v(this.textWidth),k=m&&m.whiteSpace,p=[r,f,c.innerHTML,this.textWidth,this.textAlign].join();p!==this.cTT&&(m=a.fontMetrics(c.style.fontSize).b,E(r)&&this.setSpanRotation(r,y,m),F(c,{width:"",whiteSpace:k||"nowrap"}),c.offsetWidth>A&&/[ \-]/.test(c.textContent||c.innerText)&&F(c,{width:A+
"px",display:"block",whiteSpace:k||"normal"}),this.getSpanCorrection(c.offsetWidth,m,y,r,f));F(c,{left:e+(this.xCorr||0)+"px",top:n+(this.yCorr||0)+"px"});u&&(m=c.offsetHeight);this.cTT=p}}else this.alignOnAdd=!0},setSpanRotation:function(a,c,d){var b={},e=this.renderer.getTransformKey();b[e]=b.transform="rotate("+a+"deg)";b[e+(k?"Origin":"-origin")]=b.transformOrigin=100*c+"% "+d+"px";F(this.element,b)},getSpanCorrection:function(a,c,d){this.xCorr=-a*d;this.yCorr=-c}});f(d.prototype,{getTransformKey:function(){return m&&
!/Edge/.test(e.navigator.userAgent)?"-ms-transform":u?"-webkit-transform":k?"MozTransform":e.opera?"-o-transform":""},html:function(a,c,d){var b=this.createElement("span"),e=b.element,q=b.renderer,m=q.isSVG,y=function(a,c){l(["opacity","visibility"],function(b){n(a,b+"Setter",function(a,b,d,h){a.call(this,b,d,h);c[d]=b})})};b.textSetter=function(a){a!==e.innerHTML&&delete this.bBox;e.innerHTML=this.textStr=a;b.htmlUpdateTransform()};m&&y(b,b.element.style);b.xSetter=b.ySetter=b.alignSetter=b.rotationSetter=
function(a,c){"align"===c&&(c="textAlign");b[c]=a;b.htmlUpdateTransform()};b.attr({text:a,x:Math.round(c),y:Math.round(d)}).css({fontFamily:this.style.fontFamily,fontSize:this.style.fontSize,position:"absolute"});e.style.whiteSpace="nowrap";b.css=b.htmlCss;m&&(b.add=function(a){var c,d=q.box.parentNode,h=[];if(this.parentGroup=a){if(c=a.div,!c){for(;a;)h.push(a),a=a.parentGroup;l(h.reverse(),function(a){function p(g,t){a[t]=g;e[q.getTransformKey()]="translate("+a.x+"px,"+a.y+"px)";a.doTransform=!0}
var e,w=z(a.element,"class");w&&(w={className:w});c=a.div=a.div||B("div",w,{position:"absolute",left:(a.translateX||0)+"px",top:(a.translateY||0)+"px",display:a.display,opacity:a.opacity,pointerEvents:a.styles&&a.styles.pointerEvents},c||d);e=c.style;f(a,{classSetter:function(a){this.element.setAttribute("class",a);c.className=a},on:function(){h[0].div&&b.on.apply({element:h[0].div},arguments);return a},translateXSetter:p,translateYSetter:p});y(a,e)})}}else c=d;c.appendChild(e);b.added=!0;b.alignOnAdd&&
b.htmlUpdateTransform();return b});return b}})})(N);(function(a){function z(){var f=a.defaultOptions.global,u=k.moment;if(f.timezone){if(u)return function(a){return-u.tz(a,f.timezone).utcOffset()};a.error(25)}return f.useUTC&&f.getTimezoneOffset}function B(){var m=a.defaultOptions.global,u,v=m.useUTC,d=v?"getUTC":"get",e=v?"setUTC":"set",n="Minutes Hours Day Date Month FullYear".split(" "),b=n.concat(["Milliseconds","Seconds"]);a.Date=u=m.Date||k.Date;u.hcTimezoneOffset=v&&m.timezoneOffset;u.hcGetTimezoneOffset=
z();u.hcMakeTime=function(a,b,h,d,e,n){var c;v?(c=u.UTC.apply(0,arguments),c+=E(c)):c=(new u(a,b,f(h,1),f(d,0),f(e,0),f(n,0))).getTime();return c};for(m=0;m<n.length;m++)u["hcGet"+n[m]]=d+n[m];for(m=0;m<b.length;m++)u["hcSet"+b[m]]=e+b[m]}var F=a.color,E=a.getTZOffset,l=a.merge,f=a.pick,k=a.win;a.defaultOptions={colors:"#7cb5ec #434348 #90ed7d #f7a35c #8085e9 #f15c80 #e4d354 #2b908f #f45b5b #91e8e1".split(" "),symbols:["circle","diamond","square","triangle","triangle-down"],lang:{loading:"Loading...",
months:"January February March April May June July August September October November December".split(" "),shortMonths:"Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec".split(" "),weekdays:"Sunday Monday Tuesday Wednesday Thursday Friday Saturday".split(" "),decimalPoint:".",numericSymbols:"kMGTPE".split(""),resetZoom:"Reset zoom",resetZoomTitle:"Reset zoom level 1:1",thousandsSep:" "},global:{useUTC:!0},chart:{borderRadius:0,defaultSeriesType:"line",ignoreHiddenSeries:!0,spacing:[10,10,15,10],resetZoomButton:{theme:{zIndex:20},
position:{align:"right",x:-10,y:10}},width:null,height:null,borderColor:"#335cad",backgroundColor:"#ffffff",plotBorderColor:"#cccccc"},title:{text:"Chart title",align:"center",margin:15,widthAdjust:-44},subtitle:{text:"",align:"center",widthAdjust:-44},plotOptions:{},labels:{style:{position:"absolute",color:"#333333"}},legend:{enabled:!0,align:"center",layout:"horizontal",labelFormatter:function(){return this.name},borderColor:"#999999",borderRadius:0,navigation:{activeColor:"#003399",inactiveColor:"#cccccc"},
itemStyle:{color:"#333333",fontSize:"12px",fontWeight:"bold",textOverflow:"ellipsis"},itemHoverStyle:{color:"#000000"},itemHiddenStyle:{color:"#cccccc"},shadow:!1,itemCheckboxStyle:{position:"absolute",width:"13px",height:"13px"},squareSymbol:!0,symbolPadding:5,verticalAlign:"bottom",x:0,y:0,title:{style:{fontWeight:"bold"}}},loading:{labelStyle:{fontWeight:"bold",position:"relative",top:"45%"},style:{position:"absolute",backgroundColor:"#ffffff",opacity:.5,textAlign:"center"}},tooltip:{enabled:!0,
animation:a.svg,borderRadius:3,dateTimeLabelFormats:{millisecond:"%A, %b %e, %H:%M:%S.%L",second:"%A, %b %e, %H:%M:%S",minute:"%A, %b %e, %H:%M",hour:"%A, %b %e, %H:%M",day:"%A, %b %e, %Y",week:"Week from %A, %b %e, %Y",month:"%B %Y",year:"%Y"},footerFormat:"",padding:8,snap:a.isTouchDevice?25:10,backgroundColor:F("#f7f7f7").setOpacity(.85).get(),borderWidth:1,headerFormat:'\x3cspan style\x3d"font-size: 10px"\x3e{point.key}\x3c/span\x3e\x3cbr/\x3e',pointFormat:'\x3cspan style\x3d"color:{point.color}"\x3e\u25cf\x3c/span\x3e {series.name}: \x3cb\x3e{point.y}\x3c/b\x3e\x3cbr/\x3e',
shadow:!0,style:{color:"#333333",cursor:"default",fontSize:"12px",pointerEvents:"none",whiteSpace:"nowrap"}},credits:{enabled:!0,href:"http://www.highcharts.com",position:{align:"right",x:-10,verticalAlign:"bottom",y:-5},style:{cursor:"pointer",color:"#999999",fontSize:"9px"},text:"Highcharts.com"}};a.setOptions=function(f){a.defaultOptions=l(!0,a.defaultOptions,f);B();return a.defaultOptions};a.getOptions=function(){return a.defaultOptions};a.defaultPlotOptions=a.defaultOptions.plotOptions;B()})(N);
(function(a){var z=a.correctFloat,B=a.defined,F=a.destroyObjectProperties,E=a.isNumber,l=a.merge,f=a.pick,k=a.deg2rad;a.Tick=function(a,f,k,d){this.axis=a;this.pos=f;this.type=k||"";this.isNewLabel=this.isNew=!0;k||d||this.addLabel()};a.Tick.prototype={addLabel:function(){var a=this.axis,k=a.options,v=a.chart,d=a.categories,e=a.names,n=this.pos,b=k.labels,c=a.tickPositions,q=n===c[0],h=n===c[c.length-1],e=d?f(d[n],e[n],n):n,d=this.label,c=c.info,w;a.isDatetimeAxis&&c&&(w=k.dateTimeLabelFormats[c.higherRanks[n]||
c.unitName]);this.isFirst=q;this.isLast=h;k=a.labelFormatter.call({axis:a,chart:v,isFirst:q,isLast:h,dateTimeLabelFormat:w,value:a.isLog?z(a.lin2log(e)):e,pos:n});B(d)?d&&d.attr({text:k}):(this.labelLength=(this.label=d=B(k)&&b.enabled?v.renderer.text(k,0,0,b.useHTML).css(l(b.style)).add(a.labelGroup):null)&&d.getBBox().width,this.rotation=0)},getLabelSize:function(){return this.label?this.label.getBBox()[this.axis.horiz?"height":"width"]:0},handleOverflow:function(a){var m=this.axis,l=a.x,d=m.chart.chartWidth,
e=m.chart.spacing,n=f(m.labelLeft,Math.min(m.pos,e[3])),e=f(m.labelRight,Math.max(m.pos+m.len,d-e[1])),b=this.label,c=this.rotation,q={left:0,center:.5,right:1}[m.labelAlign],h=b.getBBox().width,w=m.getSlotWidth(),G=w,H=1,y,K={};if(c)0>c&&l-q*h<n?y=Math.round(l/Math.cos(c*k)-n):0<c&&l+q*h>e&&(y=Math.round((d-l)/Math.cos(c*k)));else if(d=l+(1-q)*h,l-q*h<n?G=a.x+G*(1-q)-n:d>e&&(G=e-a.x+G*q,H=-1),G=Math.min(w,G),G<w&&"center"===m.labelAlign&&(a.x+=H*(w-G-q*(w-Math.min(h,G)))),h>G||m.autoRotation&&(b.styles||
{}).width)y=G;y&&(K.width=y,(m.options.labels.style||{}).textOverflow||(K.textOverflow="ellipsis"),b.css(K))},getPosition:function(a,f,k,d){var e=this.axis,n=e.chart,b=d&&n.oldChartHeight||n.chartHeight;return{x:a?e.translate(f+k,null,null,d)+e.transB:e.left+e.offset+(e.opposite?(d&&n.oldChartWidth||n.chartWidth)-e.right-e.left:0),y:a?b-e.bottom+e.offset-(e.opposite?e.height:0):b-e.translate(f+k,null,null,d)-e.transB}},getLabelPosition:function(a,f,l,d,e,n,b,c){var q=this.axis,h=q.transA,w=q.reversed,
m=q.staggerLines,H=q.tickRotCorr||{x:0,y:0},y=e.y;B(y)||(y=0===q.side?l.rotation?-8:-l.getBBox().height:2===q.side?H.y+8:Math.cos(l.rotation*k)*(H.y-l.getBBox(!1,0).height/2));a=a+e.x+H.x-(n&&d?n*h*(w?-1:1):0);f=f+y-(n&&!d?n*h*(w?1:-1):0);m&&(l=b/(c||1)%m,q.opposite&&(l=m-l-1),f+=q.labelOffset/m*l);return{x:a,y:Math.round(f)}},getMarkPath:function(a,f,k,d,e,n){return n.crispLine(["M",a,f,"L",a+(e?0:-k),f+(e?k:0)],d)},renderGridLine:function(a,f,k){var d=this.axis,e=d.options,n=this.gridLine,b={},
c=this.pos,q=this.type,h=d.tickmarkOffset,w=d.chart.renderer,m=q?q+"Grid":"grid",l=e[m+"LineWidth"],y=e[m+"LineColor"],e=e[m+"LineDashStyle"];n||(b.stroke=y,b["stroke-width"]=l,e&&(b.dashstyle=e),q||(b.zIndex=1),a&&(b.opacity=0),this.gridLine=n=w.path().attr(b).addClass("highcharts-"+(q?q+"-":"")+"grid-line").add(d.gridGroup));if(!a&&n&&(a=d.getPlotLinePath(c+h,n.strokeWidth()*k,a,!0)))n[this.isNew?"attr":"animate"]({d:a,opacity:f})},renderMark:function(a,k,l){var d=this.axis,e=d.options,n=d.chart.renderer,
b=this.type,c=b?b+"Tick":"tick",q=d.tickSize(c),h=this.mark,w=!h,m=a.x;a=a.y;var H=f(e[c+"Width"],!b&&d.isXAxis?1:0),e=e[c+"Color"];q&&(d.opposite&&(q[0]=-q[0]),w&&(this.mark=h=n.path().addClass("highcharts-"+(b?b+"-":"")+"tick").add(d.axisGroup),h.attr({stroke:e,"stroke-width":H})),h[w?"attr":"animate"]({d:this.getMarkPath(m,a,q[0],h.strokeWidth()*l,d.horiz,n),opacity:k}))},renderLabel:function(a,k,l,d){var e=this.axis,n=e.horiz,b=e.options,c=this.label,q=b.labels,h=q.step,w=e.tickmarkOffset,m=!0,
H=a.x;a=a.y;c&&E(H)&&(c.xy=a=this.getLabelPosition(H,a,c,n,q,w,d,h),this.isFirst&&!this.isLast&&!f(b.showFirstLabel,1)||this.isLast&&!this.isFirst&&!f(b.showLastLabel,1)?m=!1:!n||e.isRadial||q.step||q.rotation||k||0===l||this.handleOverflow(a),h&&d%h&&(m=!1),m&&E(a.y)?(a.opacity=l,c[this.isNewLabel?"attr":"animate"](a),this.isNewLabel=!1):(c.attr("y",-9999),this.isNewLabel=!0))},render:function(a,k,l){var d=this.axis,e=d.horiz,n=this.getPosition(e,this.pos,d.tickmarkOffset,k),b=n.x,c=n.y,d=e&&b===
d.pos+d.len||!e&&c===d.pos?-1:1;l=f(l,1);this.isActive=!0;this.renderGridLine(k,l,d);this.renderMark(n,l,d);this.renderLabel(n,k,l,a);this.isNew=!1},destroy:function(){F(this,this.axis)}}})(N);var W=function(a){var z=a.addEvent,B=a.animObject,F=a.arrayMax,E=a.arrayMin,l=a.color,f=a.correctFloat,k=a.defaultOptions,m=a.defined,u=a.deg2rad,v=a.destroyObjectProperties,d=a.each,e=a.extend,n=a.fireEvent,b=a.format,c=a.getMagnitude,q=a.grep,h=a.inArray,w=a.isArray,G=a.isNumber,H=a.isString,y=a.merge,K=a.normalizeTickInterval,
r=a.objectEach,A=a.pick,J=a.removeEvent,p=a.splat,C=a.syncTimeout,D=a.Tick,I=function(){this.init.apply(this,arguments)};a.extend(I.prototype,{defaultOptions:{dateTimeLabelFormats:{millisecond:"%H:%M:%S.%L",second:"%H:%M:%S",minute:"%H:%M",hour:"%H:%M",day:"%e. %b",week:"%e. %b",month:"%b '%y",year:"%Y"},endOnTick:!1,labels:{enabled:!0,style:{color:"#666666",cursor:"default",fontSize:"11px"},x:0},minPadding:.01,maxPadding:.01,minorTickLength:2,minorTickPosition:"outside",startOfWeek:1,startOnTick:!1,
tickLength:10,tickmarkPlacement:"between",tickPixelInterval:100,tickPosition:"outside",title:{align:"middle",style:{color:"#666666"}},type:"linear",minorGridLineColor:"#f2f2f2",minorGridLineWidth:1,minorTickColor:"#999999",lineColor:"#ccd6eb",lineWidth:1,gridLineColor:"#e6e6e6",tickColor:"#ccd6eb"},defaultYAxisOptions:{endOnTick:!0,tickPixelInterval:72,showLastLabel:!0,labels:{x:-8},maxPadding:.05,minPadding:.05,startOnTick:!0,title:{rotation:270,text:"Values"},stackLabels:{allowOverlap:!1,enabled:!1,
formatter:function(){return a.numberFormat(this.total,-1)},style:{fontSize:"11px",fontWeight:"bold",color:"#000000",textOutline:"1px contrast"}},gridLineWidth:1,lineWidth:0},defaultLeftAxisOptions:{labels:{x:-15},title:{rotation:270}},defaultRightAxisOptions:{labels:{x:15},title:{rotation:90}},defaultBottomAxisOptions:{labels:{autoRotation:[-45],x:0},title:{rotation:0}},defaultTopAxisOptions:{labels:{autoRotation:[-45],x:0},title:{rotation:0}},init:function(a,t){var g=t.isX,c=this;c.chart=a;c.horiz=
a.inverted&&!c.isZAxis?!g:g;c.isXAxis=g;c.coll=c.coll||(g?"xAxis":"yAxis");c.opposite=t.opposite;c.side=t.side||(c.horiz?c.opposite?0:2:c.opposite?1:3);c.setOptions(t);var b=this.options,d=b.type;c.labelFormatter=b.labels.formatter||c.defaultLabelFormatter;c.userOptions=t;c.minPixelPadding=0;c.reversed=b.reversed;c.visible=!1!==b.visible;c.zoomEnabled=!1!==b.zoomEnabled;c.hasNames="category"===d||!0===b.categories;c.categories=b.categories||c.hasNames;c.names=c.names||[];c.plotLinesAndBandsGroups=
{};c.isLog="logarithmic"===d;c.isDatetimeAxis="datetime"===d;c.positiveValuesOnly=c.isLog&&!c.allowNegativeLog;c.isLinked=m(b.linkedTo);c.ticks={};c.labelEdge=[];c.minorTicks={};c.plotLinesAndBands=[];c.alternateBands={};c.len=0;c.minRange=c.userMinRange=b.minRange||b.maxZoom;c.range=b.range;c.offset=b.offset||0;c.stacks={};c.oldStacks={};c.stacksTouched=0;c.max=null;c.min=null;c.crosshair=A(b.crosshair,p(a.options.tooltip.crosshairs)[g?0:1],!1);t=c.options.events;-1===h(c,a.axes)&&(g?a.axes.splice(a.xAxis.length,
0,c):a.axes.push(c),a[c.coll].push(c));c.series=c.series||[];a.inverted&&!c.isZAxis&&g&&void 0===c.reversed&&(c.reversed=!0);r(t,function(a,g){z(c,g,a)});c.lin2log=b.linearToLogConverter||c.lin2log;c.isLog&&(c.val2lin=c.log2lin,c.lin2val=c.lin2log)},setOptions:function(a){this.options=y(this.defaultOptions,"yAxis"===this.coll&&this.defaultYAxisOptions,[this.defaultTopAxisOptions,this.defaultRightAxisOptions,this.defaultBottomAxisOptions,this.defaultLeftAxisOptions][this.side],y(k[this.coll],a))},
defaultLabelFormatter:function(){var g=this.axis,t=this.value,c=g.categories,d=this.dateTimeLabelFormat,h=k.lang,e=h.numericSymbols,h=h.numericSymbolMagnitude||1E3,p=e&&e.length,q,f=g.options.labels.format,g=g.isLog?Math.abs(t):g.tickInterval;if(f)q=b(f,this);else if(c)q=t;else if(d)q=a.dateFormat(d,t);else if(p&&1E3<=g)for(;p--&&void 0===q;)c=Math.pow(h,p+1),g>=c&&0===10*t%c&&null!==e[p]&&0!==t&&(q=a.numberFormat(t/c,-1)+e[p]);void 0===q&&(q=1E4<=Math.abs(t)?a.numberFormat(t,-1):a.numberFormat(t,
-1,void 0,""));return q},getSeriesExtremes:function(){var a=this,t=a.chart;a.hasVisibleSeries=!1;a.dataMin=a.dataMax=a.threshold=null;a.softThreshold=!a.isXAxis;a.buildStacks&&a.buildStacks();d(a.series,function(g){if(g.visible||!t.options.chart.ignoreHiddenSeries){var c=g.options,b=c.threshold,d;a.hasVisibleSeries=!0;a.positiveValuesOnly&&0>=b&&(b=null);if(a.isXAxis)c=g.xData,c.length&&(g=E(c),G(g)||g instanceof Date||(c=q(c,function(a){return G(a)}),g=E(c)),a.dataMin=Math.min(A(a.dataMin,c[0]),
g),a.dataMax=Math.max(A(a.dataMax,c[0]),F(c)));else if(g.getExtremes(),d=g.dataMax,g=g.dataMin,m(g)&&m(d)&&(a.dataMin=Math.min(A(a.dataMin,g),g),a.dataMax=Math.max(A(a.dataMax,d),d)),m(b)&&(a.threshold=b),!c.softThreshold||a.positiveValuesOnly)a.softThreshold=!1}})},translate:function(a,c,b,d,h,p){var g=this.linkedParent||this,t=1,e=0,q=d?g.oldTransA:g.transA;d=d?g.oldMin:g.min;var f=g.minPixelPadding;h=(g.isOrdinal||g.isBroken||g.isLog&&h)&&g.lin2val;q||(q=g.transA);b&&(t*=-1,e=g.len);g.reversed&&
(t*=-1,e-=t*(g.sector||g.len));c?(a=(a*t+e-f)/q+d,h&&(a=g.lin2val(a))):(h&&(a=g.val2lin(a)),a=G(d)?t*(a-d)*q+e+t*f+(G(p)?q*p:0):void 0);return a},toPixels:function(a,c){return this.translate(a,!1,!this.horiz,null,!0)+(c?0:this.pos)},toValue:function(a,c){return this.translate(a-(c?0:this.pos),!0,!this.horiz,null,!0)},getPlotLinePath:function(a,c,b,d,h){var g=this.chart,t=this.left,e=this.top,p,q,f=b&&g.oldChartHeight||g.chartHeight,w=b&&g.oldChartWidth||g.chartWidth,r;p=this.transB;var n=function(a,
g,c){if(a<g||a>c)d?a=Math.min(Math.max(g,a),c):r=!0;return a};h=A(h,this.translate(a,null,null,b));a=b=Math.round(h+p);p=q=Math.round(f-h-p);G(h)?this.horiz?(p=e,q=f-this.bottom,a=b=n(a,t,t+this.width)):(a=t,b=w-this.right,p=q=n(p,e,e+this.height)):(r=!0,d=!1);return r&&!d?null:g.renderer.crispLine(["M",a,p,"L",b,q],c||1)},getLinearTickPositions:function(a,c,b){var g,t=f(Math.floor(c/a)*a);b=f(Math.ceil(b/a)*a);var d=[];if(this.single)return[c];for(c=t;c<=b;){d.push(c);c=f(c+a);if(c===g)break;g=c}return d},
getMinorTickInterval:function(){var a=this.options;return!0===a.minorTicks?A(a.minorTickInterval,"auto"):!1===a.minorTicks?null:a.minorTickInterval},getMinorTickPositions:function(){var a=this,c=a.options,b=a.tickPositions,h=a.minorTickInterval,p=[],e=a.pointRangePadding||0,q=a.min-e,e=a.max+e,f=e-q;if(f&&f/h<a.len/3)if(a.isLog)d(this.paddedTicks,function(g,c,t){c&&p.push.apply(p,a.getLogTickPositions(h,t[c-1],t[c],!0))});else if(a.isDatetimeAxis&&"auto"===this.getMinorTickInterval())p=p.concat(a.getTimeTicks(a.normalizeTimeTickInterval(h),
q,e,c.startOfWeek));else for(c=q+(b[0]-q)%h;c<=e&&c!==p[0];c+=h)p.push(c);0!==p.length&&a.trimTicks(p);return p},adjustForMinRange:function(){var a=this.options,c=this.min,b=this.max,h,p,e,q,f,w,r,n;this.isXAxis&&void 0===this.minRange&&!this.isLog&&(m(a.min)||m(a.max)?this.minRange=null:(d(this.series,function(a){w=a.xData;for(q=r=a.xIncrement?1:w.length-1;0<q;q--)if(f=w[q]-w[q-1],void 0===e||f<e)e=f}),this.minRange=Math.min(5*e,this.dataMax-this.dataMin)));b-c<this.minRange&&(p=this.dataMax-this.dataMin>=
this.minRange,n=this.minRange,h=(n-b+c)/2,h=[c-h,A(a.min,c-h)],p&&(h[2]=this.isLog?this.log2lin(this.dataMin):this.dataMin),c=F(h),b=[c+n,A(a.max,c+n)],p&&(b[2]=this.isLog?this.log2lin(this.dataMax):this.dataMax),b=E(b),b-c<n&&(h[0]=b-n,h[1]=A(a.min,b-n),c=F(h)));this.min=c;this.max=b},getClosest:function(){var a;this.categories?a=1:d(this.series,function(g){var c=g.closestPointRange,t=g.visible||!g.chart.options.chart.ignoreHiddenSeries;!g.noSharedTooltip&&m(c)&&t&&(a=m(a)?Math.min(a,c):c)});return a},
nameToX:function(a){var g=w(this.categories),c=g?this.categories:this.names,b=a.options.x,d;a.series.requireSorting=!1;m(b)||(b=!1===this.options.uniqueNames?a.series.autoIncrement():h(a.name,c));-1===b?g||(d=c.length):d=b;void 0!==d&&(this.names[d]=a.name);return d},updateNames:function(){var a=this;0<this.names.length&&(this.names.length=0,this.minRange=this.userMinRange,d(this.series||[],function(g){g.xIncrement=null;if(!g.points||g.isDirtyData)g.processData(),g.generatePoints();d(g.points,function(c,
t){var b;c.options&&(b=a.nameToX(c),void 0!==b&&b!==c.x&&(c.x=b,g.xData[t]=b))})}))},setAxisTranslation:function(a){var g=this,c=g.max-g.min,b=g.axisPointRange||0,h,p=0,e=0,q=g.linkedParent,f=!!g.categories,w=g.transA,n=g.isXAxis;if(n||f||b)h=g.getClosest(),q?(p=q.minPointOffset,e=q.pointRangePadding):d(g.series,function(a){var c=f?1:n?A(a.options.pointRange,h,0):g.axisPointRange||0;a=a.options.pointPlacement;b=Math.max(b,c);g.single||(p=Math.max(p,H(a)?0:c/2),e=Math.max(e,"on"===a?0:c))}),q=g.ordinalSlope&&
h?g.ordinalSlope/h:1,g.minPointOffset=p*=q,g.pointRangePadding=e*=q,g.pointRange=Math.min(b,c),n&&(g.closestPointRange=h);a&&(g.oldTransA=w);g.translationSlope=g.transA=w=g.options.staticScale||g.len/(c+e||1);g.transB=g.horiz?g.left:g.bottom;g.minPixelPadding=w*p},minFromRange:function(){return this.max-this.range},setTickInterval:function(g){var b=this,h=b.chart,p=b.options,e=b.isLog,q=b.log2lin,w=b.isDatetimeAxis,r=b.isXAxis,y=b.isLinked,D=p.maxPadding,C=p.minPadding,k=p.tickInterval,l=p.tickPixelInterval,
H=b.categories,I=b.threshold,u=b.softThreshold,v,J,z,B;w||H||y||this.getTickAmount();z=A(b.userMin,p.min);B=A(b.userMax,p.max);y?(b.linkedParent=h[b.coll][p.linkedTo],h=b.linkedParent.getExtremes(),b.min=A(h.min,h.dataMin),b.max=A(h.max,h.dataMax),p.type!==b.linkedParent.options.type&&a.error(11,1)):(!u&&m(I)&&(b.dataMin>=I?(v=I,C=0):b.dataMax<=I&&(J=I,D=0)),b.min=A(z,v,b.dataMin),b.max=A(B,J,b.dataMax));e&&(b.positiveValuesOnly&&!g&&0>=Math.min(b.min,A(b.dataMin,b.min))&&a.error(10,1),b.min=f(q(b.min),
15),b.max=f(q(b.max),15));b.range&&m(b.max)&&(b.userMin=b.min=z=Math.max(b.dataMin,b.minFromRange()),b.userMax=B=b.max,b.range=null);n(b,"foundExtremes");b.beforePadding&&b.beforePadding();b.adjustForMinRange();!(H||b.axisPointRange||b.usePercentage||y)&&m(b.min)&&m(b.max)&&(q=b.max-b.min)&&(!m(z)&&C&&(b.min-=q*C),!m(B)&&D&&(b.max+=q*D));G(p.softMin)&&(b.min=Math.min(b.min,p.softMin));G(p.softMax)&&(b.max=Math.max(b.max,p.softMax));G(p.floor)&&(b.min=Math.max(b.min,p.floor));G(p.ceiling)&&(b.max=
Math.min(b.max,p.ceiling));u&&m(b.dataMin)&&(I=I||0,!m(z)&&b.min<I&&b.dataMin>=I?b.min=I:!m(B)&&b.max>I&&b.dataMax<=I&&(b.max=I));b.tickInterval=b.min===b.max||void 0===b.min||void 0===b.max?1:y&&!k&&l===b.linkedParent.options.tickPixelInterval?k=b.linkedParent.tickInterval:A(k,this.tickAmount?(b.max-b.min)/Math.max(this.tickAmount-1,1):void 0,H?1:(b.max-b.min)*l/Math.max(b.len,l));r&&!g&&d(b.series,function(a){a.processData(b.min!==b.oldMin||b.max!==b.oldMax)});b.setAxisTranslation(!0);b.beforeSetTickPositions&&
b.beforeSetTickPositions();b.postProcessTickInterval&&(b.tickInterval=b.postProcessTickInterval(b.tickInterval));b.pointRange&&!k&&(b.tickInterval=Math.max(b.pointRange,b.tickInterval));g=A(p.minTickInterval,b.isDatetimeAxis&&b.closestPointRange);!k&&b.tickInterval<g&&(b.tickInterval=g);w||e||k||(b.tickInterval=K(b.tickInterval,null,c(b.tickInterval),A(p.allowDecimals,!(.5<b.tickInterval&&5>b.tickInterval&&1E3<b.max&&9999>b.max)),!!this.tickAmount));this.tickAmount||(b.tickInterval=b.unsquish());
this.setTickPositions()},setTickPositions:function(){var a=this.options,b,c=a.tickPositions;b=this.getMinorTickInterval();var h=a.tickPositioner,d=a.startOnTick,p=a.endOnTick;this.tickmarkOffset=this.categories&&"between"===a.tickmarkPlacement&&1===this.tickInterval?.5:0;this.minorTickInterval="auto"===b&&this.tickInterval?this.tickInterval/5:b;this.single=this.min===this.max&&m(this.min)&&!this.tickAmount&&(parseInt(this.min,10)===this.min||!1!==a.allowDecimals);this.tickPositions=b=c&&c.slice();
!b&&(b=this.isDatetimeAxis?this.getTimeTicks(this.normalizeTimeTickInterval(this.tickInterval,a.units),this.min,this.max,a.startOfWeek,this.ordinalPositions,this.closestPointRange,!0):this.isLog?this.getLogTickPositions(this.tickInterval,this.min,this.max):this.getLinearTickPositions(this.tickInterval,this.min,this.max),b.length>this.len&&(b=[b[0],b.pop()]),this.tickPositions=b,h&&(h=h.apply(this,[this.min,this.max])))&&(this.tickPositions=b=h);this.paddedTicks=b.slice(0);this.trimTicks(b,d,p);this.isLinked||
(this.single&&2>b.length&&(this.min-=.5,this.max+=.5),c||h||this.adjustTickAmount())},trimTicks:function(a,b,c){var g=a[0],h=a[a.length-1],d=this.minPointOffset||0;if(!this.isLinked){if(b&&-Infinity!==g)this.min=g;else for(;this.min-d>a[0];)a.shift();if(c)this.max=h;else for(;this.max+d<a[a.length-1];)a.pop();0===a.length&&m(g)&&a.push((h+g)/2)}},alignToOthers:function(){var a={},b,c=this.options;!1===this.chart.options.chart.alignTicks||!1===c.alignTicks||this.isLog||d(this.chart[this.coll],function(g){var c=
g.options,c=[g.horiz?c.left:c.top,c.width,c.height,c.pane].join();g.series.length&&(a[c]?b=!0:a[c]=1)});return b},getTickAmount:function(){var a=this.options,b=a.tickAmount,c=a.tickPixelInterval;!m(a.tickInterval)&&this.len<c&&!this.isRadial&&!this.isLog&&a.startOnTick&&a.endOnTick&&(b=2);!b&&this.alignToOthers()&&(b=Math.ceil(this.len/c)+1);4>b&&(this.finalTickAmt=b,b=5);this.tickAmount=b},adjustTickAmount:function(){var a=this.tickInterval,b=this.tickPositions,c=this.tickAmount,h=this.finalTickAmt,
d=b&&b.length;if(d<c){for(;b.length<c;)b.push(f(b[b.length-1]+a));this.transA*=(d-1)/(c-1);this.max=b[b.length-1]}else d>c&&(this.tickInterval*=2,this.setTickPositions());if(m(h)){for(a=c=b.length;a--;)(3===h&&1===a%2||2>=h&&0<a&&a<c-1)&&b.splice(a,1);this.finalTickAmt=void 0}},setScale:function(){var a,b;this.oldMin=this.min;this.oldMax=this.max;this.oldAxisLength=this.len;this.setAxisSize();b=this.len!==this.oldAxisLength;d(this.series,function(g){if(g.isDirtyData||g.isDirty||g.xAxis.isDirty)a=
!0});b||a||this.isLinked||this.forceRedraw||this.userMin!==this.oldUserMin||this.userMax!==this.oldUserMax||this.alignToOthers()?(this.resetStacks&&this.resetStacks(),this.forceRedraw=!1,this.getSeriesExtremes(),this.setTickInterval(),this.oldUserMin=this.userMin,this.oldUserMax=this.userMax,this.isDirty||(this.isDirty=b||this.min!==this.oldMin||this.max!==this.oldMax)):this.cleanStacks&&this.cleanStacks()},setExtremes:function(a,b,c,h,p){var g=this,t=g.chart;c=A(c,!0);d(g.series,function(a){delete a.kdTree});
p=e(p,{min:a,max:b});n(g,"setExtremes",p,function(){g.userMin=a;g.userMax=b;g.eventArgs=p;c&&t.redraw(h)})},zoom:function(a,b){var g=this.dataMin,c=this.dataMax,h=this.options,d=Math.min(g,A(h.min,g)),h=Math.max(c,A(h.max,c));if(a!==this.min||b!==this.max)this.allowZoomOutside||(m(g)&&(a<d&&(a=d),a>h&&(a=h)),m(c)&&(b<d&&(b=d),b>h&&(b=h))),this.displayBtn=void 0!==a||void 0!==b,this.setExtremes(a,b,!1,void 0,{trigger:"zoom"});return!0},setAxisSize:function(){var b=this.chart,c=this.options,h=c.offsets||
[0,0,0,0],d=this.horiz,p=this.width=Math.round(a.relativeLength(A(c.width,b.plotWidth-h[3]+h[1]),b.plotWidth)),e=this.height=Math.round(a.relativeLength(A(c.height,b.plotHeight-h[0]+h[2]),b.plotHeight)),q=this.top=Math.round(a.relativeLength(A(c.top,b.plotTop+h[0]),b.plotHeight,b.plotTop)),c=this.left=Math.round(a.relativeLength(A(c.left,b.plotLeft+h[3]),b.plotWidth,b.plotLeft));this.bottom=b.chartHeight-e-q;this.right=b.chartWidth-p-c;this.len=Math.max(d?p:e,0);this.pos=d?c:q},getExtremes:function(){var a=
this.isLog,b=this.lin2log;return{min:a?f(b(this.min)):this.min,max:a?f(b(this.max)):this.max,dataMin:this.dataMin,dataMax:this.dataMax,userMin:this.userMin,userMax:this.userMax}},getThreshold:function(a){var b=this.isLog,c=this.lin2log,g=b?c(this.min):this.min,b=b?c(this.max):this.max;null===a?a=g:g>a?a=g:b<a&&(a=b);return this.translate(a,0,1,0,1)},autoLabelAlign:function(a){a=(A(a,0)-90*this.side+720)%360;return 15<a&&165>a?"right":195<a&&345>a?"left":"center"},tickSize:function(a){var b=this.options,
c=b[a+"Length"],g=A(b[a+"Width"],"tick"===a&&this.isXAxis?1:0);if(g&&c)return"inside"===b[a+"Position"]&&(c=-c),[c,g]},labelMetrics:function(){var a=this.tickPositions&&this.tickPositions[0]||0;return this.chart.renderer.fontMetrics(this.options.labels.style&&this.options.labels.style.fontSize,this.ticks[a]&&this.ticks[a].label)},unsquish:function(){var a=this.options.labels,b=this.horiz,c=this.tickInterval,h=c,p=this.len/(((this.categories?1:0)+this.max-this.min)/c),e,q=a.rotation,f=this.labelMetrics(),
w,n=Number.MAX_VALUE,r,y=function(a){a/=p||1;a=1<a?Math.ceil(a):1;return a*c};b?(r=!a.staggerLines&&!a.step&&(m(q)?[q]:p<A(a.autoRotationLimit,80)&&a.autoRotation))&&d(r,function(a){var b;if(a===q||a&&-90<=a&&90>=a)w=y(Math.abs(f.h/Math.sin(u*a))),b=w+Math.abs(a/360),b<n&&(n=b,e=a,h=w)}):a.step||(h=y(f.h));this.autoRotation=r;this.labelRotation=A(e,q);return h},getSlotWidth:function(){var a=this.chart,b=this.horiz,c=this.options.labels,h=Math.max(this.tickPositions.length-(this.categories?0:1),1),
d=a.margin[3];return b&&2>(c.step||0)&&!c.rotation&&(this.staggerLines||1)*this.len/h||!b&&(c.style&&parseInt(c.style.width,10)||d&&d-a.spacing[3]||.33*a.chartWidth)},renderUnsquish:function(){var a=this.chart,b=a.renderer,c=this.tickPositions,h=this.ticks,p=this.options.labels,e=this.horiz,q=this.getSlotWidth(),f=Math.max(1,Math.round(q-2*(p.padding||5))),w={},n=this.labelMetrics(),r=p.style&&p.style.textOverflow,D,C=0,m,k;H(p.rotation)||(w.rotation=p.rotation||0);d(c,function(a){(a=h[a])&&a.labelLength>
C&&(C=a.labelLength)});this.maxLabelLength=C;if(this.autoRotation)C>f&&C>n.h?w.rotation=this.labelRotation:this.labelRotation=0;else if(q&&(D={width:f+"px"},!r))for(D.textOverflow="clip",m=c.length;!e&&m--;)if(k=c[m],f=h[k].label)f.styles&&"ellipsis"===f.styles.textOverflow?f.css({textOverflow:"clip"}):h[k].labelLength>q&&f.css({width:q+"px"}),f.getBBox().height>this.len/c.length-(n.h-n.f)&&(f.specCss={textOverflow:"ellipsis"});w.rotation&&(D={width:(C>.5*a.chartHeight?.33*a.chartHeight:a.chartHeight)+
"px"},r||(D.textOverflow="ellipsis"));if(this.labelAlign=p.align||this.autoLabelAlign(this.labelRotation))w.align=this.labelAlign;d(c,function(a){var b=(a=h[a])&&a.label;b&&(b.attr(w),D&&b.css(y(D,b.specCss)),delete b.specCss,a.rotation=w.rotation)});this.tickRotCorr=b.rotCorr(n.b,this.labelRotation||0,0!==this.side)},hasData:function(){return this.hasVisibleSeries||m(this.min)&&m(this.max)&&!!this.tickPositions},addTitle:function(a){var b=this.chart.renderer,c=this.horiz,g=this.opposite,h=this.options.title,
d;this.axisTitle||((d=h.textAlign)||(d=(c?{low:"left",middle:"center",high:"right"}:{low:g?"right":"left",middle:"center",high:g?"left":"right"})[h.align]),this.axisTitle=b.text(h.text,0,0,h.useHTML).attr({zIndex:7,rotation:h.rotation||0,align:d}).addClass("highcharts-axis-title").css(h.style).add(this.axisGroup),this.axisTitle.isNew=!0);h.style.width||this.isRadial||this.axisTitle.css({width:this.len});this.axisTitle[a?"show":"hide"](!0)},generateTick:function(a){var b=this.ticks;b[a]?b[a].addLabel():
b[a]=new D(this,a)},getOffset:function(){var a=this,b=a.chart,c=b.renderer,h=a.options,p=a.tickPositions,e=a.ticks,q=a.horiz,f=a.side,w=b.inverted&&!a.isZAxis?[1,0,3,2][f]:f,n,y,D=0,C,k=0,G=h.title,l=h.labels,H=0,I=b.axisOffset,b=b.clipOffset,u=[-1,1,1,-1][f],K=h.className,v=a.axisParent,J=this.tickSize("tick");n=a.hasData();a.showAxis=y=n||A(h.showEmpty,!0);a.staggerLines=a.horiz&&l.staggerLines;a.axisGroup||(a.gridGroup=c.g("grid").attr({zIndex:h.gridZIndex||1}).addClass("highcharts-"+this.coll.toLowerCase()+
"-grid "+(K||"")).add(v),a.axisGroup=c.g("axis").attr({zIndex:h.zIndex||2}).addClass("highcharts-"+this.coll.toLowerCase()+" "+(K||"")).add(v),a.labelGroup=c.g("axis-labels").attr({zIndex:l.zIndex||7}).addClass("highcharts-"+a.coll.toLowerCase()+"-labels "+(K||"")).add(v));n||a.isLinked?(d(p,function(b,c){a.generateTick(b,c)}),a.renderUnsquish(),!1===l.reserveSpace||0!==f&&2!==f&&{1:"left",3:"right"}[f]!==a.labelAlign&&"center"!==a.labelAlign||d(p,function(a){H=Math.max(e[a].getLabelSize(),H)}),a.staggerLines&&
(H*=a.staggerLines,a.labelOffset=H*(a.opposite?-1:1))):r(e,function(a,b){a.destroy();delete e[b]});G&&G.text&&!1!==G.enabled&&(a.addTitle(y),y&&!1!==G.reserveSpace&&(a.titleOffset=D=a.axisTitle.getBBox()[q?"height":"width"],C=G.offset,k=m(C)?0:A(G.margin,q?5:10)));a.renderLine();a.offset=u*A(h.offset,I[f]);a.tickRotCorr=a.tickRotCorr||{x:0,y:0};c=0===f?-a.labelMetrics().h:2===f?a.tickRotCorr.y:0;k=Math.abs(H)+k;H&&(k=k-c+u*(q?A(l.y,a.tickRotCorr.y+8*u):l.x));a.axisTitleMargin=A(C,k);I[f]=Math.max(I[f],
a.axisTitleMargin+D+u*a.offset,k,n&&p.length&&J?J[0]+u*a.offset:0);h=h.offset?0:2*Math.floor(a.axisLine.strokeWidth()/2);b[w]=Math.max(b[w],h)},getLinePath:function(a){var b=this.chart,c=this.opposite,g=this.offset,h=this.horiz,d=this.left+(c?this.width:0)+g,g=b.chartHeight-this.bottom-(c?this.height:0)+g;c&&(a*=-1);return b.renderer.crispLine(["M",h?this.left:d,h?g:this.top,"L",h?b.chartWidth-this.right:d,h?g:b.chartHeight-this.bottom],a)},renderLine:function(){this.axisLine||(this.axisLine=this.chart.renderer.path().addClass("highcharts-axis-line").add(this.axisGroup),
this.axisLine.attr({stroke:this.options.lineColor,"stroke-width":this.options.lineWidth,zIndex:7}))},getTitlePosition:function(){var a=this.horiz,b=this.left,c=this.top,h=this.len,d=this.options.title,p=a?b:c,e=this.opposite,q=this.offset,f=d.x||0,w=d.y||0,n=this.axisTitle,r=this.chart.renderer.fontMetrics(d.style&&d.style.fontSize,n),n=Math.max(n.getBBox(null,0).height-r.h-1,0),h={low:p+(a?0:h),middle:p+h/2,high:p+(a?h:0)}[d.align],b=(a?c+this.height:b)+(a?1:-1)*(e?-1:1)*this.axisTitleMargin+[-n,
n,r.f,-n][this.side];return{x:a?h+f:b+(e?this.width:0)+q+f,y:a?b+w-(e?this.height:0)+q:h+w}},renderMinorTick:function(a){var b=this.chart.hasRendered&&G(this.oldMin),c=this.minorTicks;c[a]||(c[a]=new D(this,a,"minor"));b&&c[a].isNew&&c[a].render(null,!0);c[a].render(null,!1,1)},renderTick:function(a,b){var c=this.isLinked,g=this.ticks,h=this.chart.hasRendered&&G(this.oldMin);if(!c||a>=this.min&&a<=this.max)g[a]||(g[a]=new D(this,a)),h&&g[a].isNew&&g[a].render(b,!0,.1),g[a].render(b)},render:function(){var b=
this,c=b.chart,h=b.options,p=b.isLog,e=b.lin2log,q=b.isLinked,f=b.tickPositions,w=b.axisTitle,n=b.ticks,y=b.minorTicks,k=b.alternateBands,m=h.stackLabels,l=h.alternateGridColor,A=b.tickmarkOffset,H=b.axisLine,I=b.showAxis,u=B(c.renderer.globalAnimation),K,v;b.labelEdge.length=0;b.overlap=!1;d([n,y,k],function(a){r(a,function(a){a.isActive=!1})});if(b.hasData()||q)b.minorTickInterval&&!b.categories&&d(b.getMinorTickPositions(),function(a){b.renderMinorTick(a)}),f.length&&(d(f,function(a,c){b.renderTick(a,
c)}),A&&(0===b.min||b.single)&&(n[-1]||(n[-1]=new D(b,-1,null,!0)),n[-1].render(-1))),l&&d(f,function(g,h){v=void 0!==f[h+1]?f[h+1]+A:b.max-A;0===h%2&&g<b.max&&v<=b.max+(c.polar?-A:A)&&(k[g]||(k[g]=new a.PlotLineOrBand(b)),K=g+A,k[g].options={from:p?e(K):K,to:p?e(v):v,color:l},k[g].render(),k[g].isActive=!0)}),b._addedPlotLB||(d((h.plotLines||[]).concat(h.plotBands||[]),function(a){b.addPlotBandOrLine(a)}),b._addedPlotLB=!0);d([n,y,k],function(a){var b,g=[],h=u.duration;r(a,function(a,b){a.isActive||
(a.render(b,!1,0),a.isActive=!1,g.push(b))});C(function(){for(b=g.length;b--;)a[g[b]]&&!a[g[b]].isActive&&(a[g[b]].destroy(),delete a[g[b]])},a!==k&&c.hasRendered&&h?h:0)});H&&(H[H.isPlaced?"animate":"attr"]({d:this.getLinePath(H.strokeWidth())}),H.isPlaced=!0,H[I?"show":"hide"](!0));w&&I&&(h=b.getTitlePosition(),G(h.y)?(w[w.isNew?"attr":"animate"](h),w.isNew=!1):(w.attr("y",-9999),w.isNew=!0));m&&m.enabled&&b.renderStackTotals();b.isDirty=!1},redraw:function(){this.visible&&(this.render(),d(this.plotLinesAndBands,
function(a){a.render()}));d(this.series,function(a){a.isDirty=!0})},keepProps:"extKey hcEvents names series userMax userMin".split(" "),destroy:function(a){var b=this,c=b.stacks,g=b.plotLinesAndBands,p;a||J(b);r(c,function(a,b){v(a);c[b]=null});d([b.ticks,b.minorTicks,b.alternateBands],function(a){v(a)});if(g)for(a=g.length;a--;)g[a].destroy();d("stackTotalGroup axisLine axisTitle axisGroup gridGroup labelGroup cross".split(" "),function(a){b[a]&&(b[a]=b[a].destroy())});for(p in b.plotLinesAndBandsGroups)b.plotLinesAndBandsGroups[p]=
b.plotLinesAndBandsGroups[p].destroy();r(b,function(a,c){-1===h(c,b.keepProps)&&delete b[c]})},drawCrosshair:function(a,b){var c,g=this.crosshair,h=A(g.snap,!0),d,p=this.cross;a||(a=this.cross&&this.cross.e);this.crosshair&&!1!==(m(b)||!h)?(h?m(b)&&(d=this.isXAxis?b.plotX:this.len-b.plotY):d=a&&(this.horiz?a.chartX-this.pos:this.len-a.chartY+this.pos),m(d)&&(c=this.getPlotLinePath(b&&(this.isXAxis?b.x:A(b.stackY,b.y)),null,null,null,d)||null),m(c)?(b=this.categories&&!this.isRadial,p||(this.cross=
p=this.chart.renderer.path().addClass("highcharts-crosshair highcharts-crosshair-"+(b?"category ":"thin ")+g.className).attr({zIndex:A(g.zIndex,2)}).add(),p.attr({stroke:g.color||(b?l("#ccd6eb").setOpacity(.25).get():"#cccccc"),"stroke-width":A(g.width,1)}).css({"pointer-events":"none"}),g.dashStyle&&p.attr({dashstyle:g.dashStyle})),p.show().attr({d:c}),b&&!g.width&&p.attr({"stroke-width":this.transA}),this.cross.e=a):this.hideCrosshair()):this.hideCrosshair()},hideCrosshair:function(){this.cross&&
this.cross.hide()}});return a.Axis=I}(N);(function(a){var z=a.Axis,B=a.Date,F=a.dateFormat,E=a.defaultOptions,l=a.defined,f=a.each,k=a.extend,m=a.getMagnitude,u=a.getTZOffset,v=a.normalizeTickInterval,d=a.pick,e=a.timeUnits;z.prototype.getTimeTicks=function(a,b,c,q){var h=[],w={},n=E.global.useUTC,m,y=new B(b-Math.max(u(b),u(c))),K=B.hcMakeTime,r=a.unitRange,A=a.count,v,p;if(l(b)){y[B.hcSetMilliseconds](r>=e.second?0:A*Math.floor(y.getMilliseconds()/A));if(r>=e.second)y[B.hcSetSeconds](r>=e.minute?
0:A*Math.floor(y.getSeconds()/A));if(r>=e.minute)y[B.hcSetMinutes](r>=e.hour?0:A*Math.floor(y[B.hcGetMinutes]()/A));if(r>=e.hour)y[B.hcSetHours](r>=e.day?0:A*Math.floor(y[B.hcGetHours]()/A));if(r>=e.day)y[B.hcSetDate](r>=e.month?1:A*Math.floor(y[B.hcGetDate]()/A));r>=e.month&&(y[B.hcSetMonth](r>=e.year?0:A*Math.floor(y[B.hcGetMonth]()/A)),m=y[B.hcGetFullYear]());if(r>=e.year)y[B.hcSetFullYear](m-m%A);if(r===e.week)y[B.hcSetDate](y[B.hcGetDate]()-y[B.hcGetDay]()+d(q,1));m=y[B.hcGetFullYear]();q=y[B.hcGetMonth]();
var C=y[B.hcGetDate](),D=y[B.hcGetHours]();if(B.hcTimezoneOffset||B.hcGetTimezoneOffset)p=(!n||!!B.hcGetTimezoneOffset)&&(c-b>4*e.month||u(b)!==u(c)),y=y.getTime(),v=u(y),y=new B(y+v);n=y.getTime();for(b=1;n<c;)h.push(n),n=r===e.year?K(m+b*A,0):r===e.month?K(m,q+b*A):!p||r!==e.day&&r!==e.week?p&&r===e.hour?K(m,q,C,D+b*A,0,0,v)-v:n+r*A:K(m,q,C+b*A*(r===e.day?1:7)),b++;h.push(n);r<=e.hour&&1E4>h.length&&f(h,function(a){0===a%18E5&&"000000000"===F("%H%M%S%L",a)&&(w[a]="day")})}h.info=k(a,{higherRanks:w,
totalRange:r*A});return h};z.prototype.normalizeTimeTickInterval=function(a,b){var c=b||[["millisecond",[1,2,5,10,20,25,50,100,200,500]],["second",[1,2,5,10,15,30]],["minute",[1,2,5,10,15,30]],["hour",[1,2,3,4,6,8,12]],["day",[1,2]],["week",[1,2]],["month",[1,2,3,4,6]],["year",null]];b=c[c.length-1];var d=e[b[0]],h=b[1],f;for(f=0;f<c.length&&!(b=c[f],d=e[b[0]],h=b[1],c[f+1]&&a<=(d*h[h.length-1]+e[c[f+1][0]])/2);f++);d===e.year&&a<5*d&&(h=[1,2,5]);a=v(a/d,h,"year"===b[0]?Math.max(m(a/d),1):1);return{unitRange:d,
count:a,unitName:b[0]}}})(N);(function(a){var z=a.Axis,B=a.getMagnitude,F=a.map,E=a.normalizeTickInterval,l=a.pick;z.prototype.getLogTickPositions=function(a,k,m,u){var f=this.options,d=this.len,e=this.lin2log,n=this.log2lin,b=[];u||(this._minorAutoInterval=null);if(.5<=a)a=Math.round(a),b=this.getLinearTickPositions(a,k,m);else if(.08<=a)for(var d=Math.floor(k),c,q,h,w,G,f=.3<a?[1,2,4]:.15<a?[1,2,4,6,8]:[1,2,3,4,5,6,7,8,9];d<m+1&&!G;d++)for(q=f.length,c=0;c<q&&!G;c++)h=n(e(d)*f[c]),h>k&&(!u||w<=
m)&&void 0!==w&&b.push(w),w>m&&(G=!0),w=h;else k=e(k),m=e(m),a=u?this.getMinorTickInterval():f.tickInterval,a=l("auto"===a?null:a,this._minorAutoInterval,f.tickPixelInterval/(u?5:1)*(m-k)/((u?d/this.tickPositions.length:d)||1)),a=E(a,null,B(a)),b=F(this.getLinearTickPositions(a,k,m),n),u||(this._minorAutoInterval=a/5);u||(this.tickInterval=a);return b};z.prototype.log2lin=function(a){return Math.log(a)/Math.LN10};z.prototype.lin2log=function(a){return Math.pow(10,a)}})(N);(function(a,z){var B=a.arrayMax,
F=a.arrayMin,E=a.defined,l=a.destroyObjectProperties,f=a.each,k=a.erase,m=a.merge,u=a.pick;a.PlotLineOrBand=function(a,d){this.axis=a;d&&(this.options=d,this.id=d.id)};a.PlotLineOrBand.prototype={render:function(){var f=this,d=f.axis,e=d.horiz,n=f.options,b=n.label,c=f.label,q=n.to,h=n.from,w=n.value,k=E(h)&&E(q),l=E(w),y=f.svgElem,K=!y,r=[],A=n.color,J=u(n.zIndex,0),p=n.events,r={"class":"highcharts-plot-"+(k?"band ":"line ")+(n.className||"")},C={},D=d.chart.renderer,I=k?"bands":"lines",g=d.log2lin;
d.isLog&&(h=g(h),q=g(q),w=g(w));l?(r={stroke:A,"stroke-width":n.width},n.dashStyle&&(r.dashstyle=n.dashStyle)):k&&(A&&(r.fill=A),n.borderWidth&&(r.stroke=n.borderColor,r["stroke-width"]=n.borderWidth));C.zIndex=J;I+="-"+J;(A=d.plotLinesAndBandsGroups[I])||(d.plotLinesAndBandsGroups[I]=A=D.g("plot-"+I).attr(C).add());K&&(f.svgElem=y=D.path().attr(r).add(A));if(l)r=d.getPlotLinePath(w,y.strokeWidth());else if(k)r=d.getPlotBandPath(h,q,n);else return;K&&r&&r.length?(y.attr({d:r}),p&&a.objectEach(p,function(a,
b){y.on(b,function(a){p[b].apply(f,[a])})})):y&&(r?(y.show(),y.animate({d:r})):(y.hide(),c&&(f.label=c=c.destroy())));b&&E(b.text)&&r&&r.length&&0<d.width&&0<d.height&&!r.flat?(b=m({align:e&&k&&"center",x:e?!k&&4:10,verticalAlign:!e&&k&&"middle",y:e?k?16:10:k?6:-4,rotation:e&&!k&&90},b),this.renderLabel(b,r,k,J)):c&&c.hide();return f},renderLabel:function(a,d,e,f){var b=this.label,c=this.axis.chart.renderer;b||(b={align:a.textAlign||a.align,rotation:a.rotation,"class":"highcharts-plot-"+(e?"band":
"line")+"-label "+(a.className||"")},b.zIndex=f,this.label=b=c.text(a.text,0,0,a.useHTML).attr(b).add(),b.css(a.style));f=d.xBounds||[d[1],d[4],e?d[6]:d[1]];d=d.yBounds||[d[2],d[5],e?d[7]:d[2]];e=F(f);c=F(d);b.align(a,!1,{x:e,y:c,width:B(f)-e,height:B(d)-c});b.show()},destroy:function(){k(this.axis.plotLinesAndBands,this);delete this.axis;l(this)}};a.extend(z.prototype,{getPlotBandPath:function(a,d){var e=this.getPlotLinePath(d,null,null,!0),f=this.getPlotLinePath(a,null,null,!0),b=this.horiz,c=1;
a=a<this.min&&d<this.min||a>this.max&&d>this.max;f&&e?(a&&(f.flat=f.toString()===e.toString(),c=0),f.push(b&&e[4]===f[4]?e[4]+c:e[4],b||e[5]!==f[5]?e[5]:e[5]+c,b&&e[1]===f[1]?e[1]+c:e[1],b||e[2]!==f[2]?e[2]:e[2]+c,"z")):f=null;return f},addPlotBand:function(a){return this.addPlotBandOrLine(a,"plotBands")},addPlotLine:function(a){return this.addPlotBandOrLine(a,"plotLines")},addPlotBandOrLine:function(f,d){var e=(new a.PlotLineOrBand(this,f)).render(),n=this.userOptions;e&&(d&&(n[d]=n[d]||[],n[d].push(f)),
this.plotLinesAndBands.push(e));return e},removePlotBandOrLine:function(a){for(var d=this.plotLinesAndBands,e=this.options,n=this.userOptions,b=d.length;b--;)d[b].id===a&&d[b].destroy();f([e.plotLines||[],n.plotLines||[],e.plotBands||[],n.plotBands||[]],function(c){for(b=c.length;b--;)c[b].id===a&&k(c,c[b])})},removePlotBand:function(a){this.removePlotBandOrLine(a)},removePlotLine:function(a){this.removePlotBandOrLine(a)}})})(N,W);(function(a){var z=a.dateFormat,B=a.each,F=a.extend,E=a.format,l=a.isNumber,
f=a.map,k=a.merge,m=a.pick,u=a.splat,v=a.syncTimeout,d=a.timeUnits;a.Tooltip=function(){this.init.apply(this,arguments)};a.Tooltip.prototype={init:function(a,d){this.chart=a;this.options=d;this.crosshairs=[];this.now={x:0,y:0};this.isHidden=!0;this.split=d.split&&!a.inverted;this.shared=d.shared||this.split},cleanSplit:function(a){B(this.chart.series,function(d){var b=d&&d.tt;b&&(!b.isActive||a?d.tt=b.destroy():b.isActive=!1)})},getLabel:function(){var a=this.chart.renderer,d=this.options;this.label||
(this.split?this.label=a.g("tooltip"):(this.label=a.label("",0,0,d.shape||"callout",null,null,d.useHTML,null,"tooltip").attr({padding:d.padding,r:d.borderRadius}),this.label.attr({fill:d.backgroundColor,"stroke-width":d.borderWidth}).css(d.style).shadow(d.shadow)),this.label.attr({zIndex:8}).add());return this.label},update:function(a){this.destroy();k(!0,this.chart.options.tooltip.userOptions,a);this.init(this.chart,k(!0,this.options,a))},destroy:function(){this.label&&(this.label=this.label.destroy());
this.split&&this.tt&&(this.cleanSplit(this.chart,!0),this.tt=this.tt.destroy());clearTimeout(this.hideTimer);clearTimeout(this.tooltipTimeout)},move:function(a,d,b,c){var e=this,h=e.now,f=!1!==e.options.animation&&!e.isHidden&&(1<Math.abs(a-h.x)||1<Math.abs(d-h.y)),n=e.followPointer||1<e.len;F(h,{x:f?(2*h.x+a)/3:a,y:f?(h.y+d)/2:d,anchorX:n?void 0:f?(2*h.anchorX+b)/3:b,anchorY:n?void 0:f?(h.anchorY+c)/2:c});e.getLabel().attr(h);f&&(clearTimeout(this.tooltipTimeout),this.tooltipTimeout=setTimeout(function(){e&&
e.move(a,d,b,c)},32))},hide:function(a){var d=this;clearTimeout(this.hideTimer);a=m(a,this.options.hideDelay,500);this.isHidden||(this.hideTimer=v(function(){d.getLabel()[a?"fadeOut":"hide"]();d.isHidden=!0},a))},getAnchor:function(a,d){var b,c=this.chart,e=c.inverted,h=c.plotTop,w=c.plotLeft,n=0,k=0,y,m;a=u(a);b=a[0].tooltipPos;this.followPointer&&d&&(void 0===d.chartX&&(d=c.pointer.normalize(d)),b=[d.chartX-c.plotLeft,d.chartY-h]);b||(B(a,function(a){y=a.series.yAxis;m=a.series.xAxis;n+=a.plotX+
(!e&&m?m.left-w:0);k+=(a.plotLow?(a.plotLow+a.plotHigh)/2:a.plotY)+(!e&&y?y.top-h:0)}),n/=a.length,k/=a.length,b=[e?c.plotWidth-k:n,this.shared&&!e&&1<a.length&&d?d.chartY-h:e?c.plotHeight-n:k]);return f(b,Math.round)},getPosition:function(a,d,b){var c=this.chart,e=this.distance,h={},f=b.h||0,n,k=["y",c.chartHeight,d,b.plotY+c.plotTop,c.plotTop,c.plotTop+c.plotHeight],y=["x",c.chartWidth,a,b.plotX+c.plotLeft,c.plotLeft,c.plotLeft+c.plotWidth],l=!this.followPointer&&m(b.ttBelow,!c.inverted===!!b.negative),
r=function(a,b,c,g,d,p){var q=c<g-e,w=g+e+c<b,t=g-e-c;g+=e;if(l&&w)h[a]=g;else if(!l&&q)h[a]=t;else if(q)h[a]=Math.min(p-c,0>t-f?t:t-f);else if(w)h[a]=Math.max(d,g+f+c>b?g:g+f);else return!1},A=function(a,b,c,g){var d;g<e||g>b-e?d=!1:h[a]=g<c/2?1:g>b-c/2?b-c-2:g-c/2;return d},u=function(a){var b=k;k=y;y=b;n=a},p=function(){!1!==r.apply(0,k)?!1!==A.apply(0,y)||n||(u(!0),p()):n?h.x=h.y=0:(u(!0),p())};(c.inverted||1<this.len)&&u();p();return h},defaultFormatter:function(a){var d=this.points||u(this),
b;b=[a.tooltipFooterHeaderFormatter(d[0])];b=b.concat(a.bodyFormatter(d));b.push(a.tooltipFooterHeaderFormatter(d[0],!0));return b},refresh:function(a,d){var b,c=this.options,e,h=a,f,n={},k=[];b=c.formatter||this.defaultFormatter;var n=this.shared,y;c.enabled&&(clearTimeout(this.hideTimer),this.followPointer=u(h)[0].series.tooltipOptions.followPointer,f=this.getAnchor(h,d),d=f[0],e=f[1],!n||h.series&&h.series.noSharedTooltip?n=h.getLabelConfig():(B(h,function(a){a.setState("hover");k.push(a.getLabelConfig())}),
n={x:h[0].category,y:h[0].y},n.points=k,h=h[0]),this.len=k.length,n=b.call(n,this),y=h.series,this.distance=m(y.tooltipOptions.distance,16),!1===n?this.hide():(b=this.getLabel(),this.isHidden&&b.attr({opacity:1}).show(),this.split?this.renderSplit(n,a):(c.style.width||b.css({width:this.chart.spacingBox.width}),b.attr({text:n&&n.join?n.join(""):n}),b.removeClass(/highcharts-color-[\d]+/g).addClass("highcharts-color-"+m(h.colorIndex,y.colorIndex)),b.attr({stroke:c.borderColor||h.color||y.color||"#666666"}),
this.updatePosition({plotX:d,plotY:e,negative:h.negative,ttBelow:h.ttBelow,h:f[2]||0})),this.isHidden=!1))},renderSplit:function(d,f){var b=this,c=[],e=this.chart,h=e.renderer,w=!0,n=this.options,k=0,y=this.getLabel();a.isString(d)&&(d=[!1,d]);B(d.slice(0,f.length+1),function(a,d){if(!1!==a){d=f[d-1]||{isHeader:!0,plotX:f[0].plotX};var q=d.series||b,r=q.tt,p=d.series||{},C="highcharts-color-"+m(d.colorIndex,p.colorIndex,"none");r||(q.tt=r=h.label(null,null,null,"callout").addClass("highcharts-tooltip-box "+
C).attr({padding:n.padding,r:n.borderRadius,fill:n.backgroundColor,stroke:n.borderColor||d.color||p.color||"#333333","stroke-width":n.borderWidth}).add(y));r.isActive=!0;r.attr({text:a});r.css(n.style).shadow(n.shadow);a=r.getBBox();p=a.width+r.strokeWidth();d.isHeader?(k=a.height,p=Math.max(0,Math.min(d.plotX+e.plotLeft-p/2,e.chartWidth-p))):p=d.plotX+e.plotLeft-m(n.distance,16)-p;0>p&&(w=!1);a=(d.series&&d.series.yAxis&&d.series.yAxis.pos)+(d.plotY||0);a-=e.plotTop;c.push({target:d.isHeader?e.plotHeight+
k:a,rank:d.isHeader?1:0,size:q.tt.getBBox().height+1,point:d,x:p,tt:r})}});this.cleanSplit();a.distribute(c,e.plotHeight+k);B(c,function(a){var b=a.point,c=b.series;a.tt.attr({visibility:void 0===a.pos?"hidden":"inherit",x:w||b.isHeader?a.x:b.plotX+e.plotLeft+m(n.distance,16),y:a.pos+e.plotTop,anchorX:b.isHeader?b.plotX+e.plotLeft:b.plotX+c.xAxis.pos,anchorY:b.isHeader?a.pos+e.plotTop-15:b.plotY+c.yAxis.pos})})},updatePosition:function(a){var d=this.chart,b=this.getLabel(),b=(this.options.positioner||
this.getPosition).call(this,b.width,b.height,a);this.move(Math.round(b.x),Math.round(b.y||0),a.plotX+d.plotLeft,a.plotY+d.plotTop)},getDateFormat:function(a,f,b,c){var e=z("%m-%d %H:%M:%S.%L",f),h,w,n={millisecond:15,second:12,minute:9,hour:6,day:3},k="millisecond";for(w in d){if(a===d.week&&+z("%w",f)===b&&"00:00:00.000"===e.substr(6)){w="week";break}if(d[w]>a){w=k;break}if(n[w]&&e.substr(n[w])!=="01-01 00:00:00.000".substr(n[w]))break;"week"!==w&&(k=w)}w&&(h=c[w]);return h},getXDateFormat:function(a,
d,b){d=d.dateTimeLabelFormats;var c=b&&b.closestPointRange;return(c?this.getDateFormat(c,a.x,b.options.startOfWeek,d):d.day)||d.year},tooltipFooterHeaderFormatter:function(a,d){d=d?"footer":"header";var b=a.series,c=b.tooltipOptions,e=c.xDateFormat,h=b.xAxis,f=h&&"datetime"===h.options.type&&l(a.key),n=c[d+"Format"];f&&!e&&(e=this.getXDateFormat(a,c,h));f&&e&&B(a.point&&a.point.tooltipDateKeys||["key"],function(a){n=n.replace("{point."+a+"}","{point."+a+":"+e+"}")});return E(n,{point:a,series:b})},
bodyFormatter:function(a){return f(a,function(a){var b=a.series.tooltipOptions;return(b.pointFormatter||a.point.tooltipFormatter).call(a.point,b[(a.point.formatPrefix||"point")+"Format"])})}}})(N);(function(a){var z=a.addEvent,B=a.attr,F=a.charts,E=a.color,l=a.css,f=a.defined,k=a.each,m=a.extend,u=a.find,v=a.fireEvent,d=a.isObject,e=a.offset,n=a.pick,b=a.removeEvent,c=a.splat,q=a.Tooltip;a.Pointer=function(a,b){this.init(a,b)};a.Pointer.prototype={init:function(a,b){this.options=b;this.chart=a;this.runChartClick=
b.chart.events&&!!b.chart.events.click;this.pinchDown=[];this.lastValidTouch={};q&&(a.tooltip=new q(a,b.tooltip),this.followTouchMove=n(b.tooltip.followTouchMove,!0));this.setDOMEvents()},zoomOption:function(a){var b=this.chart,c=b.options.chart,d=c.zoomType||"",b=b.inverted;/touch/.test(a.type)&&(d=n(c.pinchType,d));this.zoomX=a=/x/.test(d);this.zoomY=d=/y/.test(d);this.zoomHor=a&&!b||d&&b;this.zoomVert=d&&!b||a&&b;this.hasZoom=a||d},normalize:function(a,b){var c;c=a.touches?a.touches.length?a.touches.item(0):
a.changedTouches[0]:a;b||(this.chartPosition=b=e(this.chart.container));return m(a,{chartX:Math.round(c.pageX-b.left),chartY:Math.round(c.pageY-b.top)})},getCoordinates:function(a){var b={xAxis:[],yAxis:[]};k(this.chart.axes,function(c){b[c.isXAxis?"xAxis":"yAxis"].push({axis:c,value:c.toValue(a[c.horiz?"chartX":"chartY"])})});return b},findNearestKDPoint:function(a,b,c){var h;k(a,function(a){var e=!(a.noSharedTooltip&&b)&&0>a.options.findNearestPointBy.indexOf("y");a=a.searchPoint(c,e);if((e=d(a,
!0))&&!(e=!d(h,!0)))var e=h.distX-a.distX,f=h.dist-a.dist,q=(a.series.group&&a.series.group.zIndex)-(h.series.group&&h.series.group.zIndex),e=0<(0!==e&&b?e:0!==f?f:0!==q?q:h.series.index>a.series.index?-1:1);e&&(h=a)});return h},getPointFromEvent:function(a){a=a.target;for(var b;a&&!b;)b=a.point,a=a.parentNode;return b},getChartCoordinatesFromPoint:function(a,b){var c=a.series,d=c.xAxis,c=c.yAxis;if(d&&c)return b?{chartX:d.len+d.pos-a.clientX,chartY:c.len+c.pos-a.plotY}:{chartX:a.clientX+d.pos,chartY:a.plotY+
c.pos}},getHoverData:function(b,c,e,f,q,m,r){var h,w=[],p=r&&r.isBoosting;f=!(!f||!b);r=c&&!c.stickyTracking?[c]:a.grep(e,function(a){return a.visible&&!(!q&&a.directTouch)&&n(a.options.enableMouseTracking,!0)&&a.stickyTracking});c=(h=f?b:this.findNearestKDPoint(r,q,m))&&h.series;h&&(q&&!c.noSharedTooltip?(r=a.grep(e,function(a){return a.visible&&!(!q&&a.directTouch)&&n(a.options.enableMouseTracking,!0)&&!a.noSharedTooltip}),k(r,function(a){var b=u(a.points,function(a){return a.x===h.x&&!a.isNull});
d(b)&&(p&&(b=a.getPoint(b)),w.push(b))})):w.push(h));return{hoverPoint:h,hoverSeries:c,hoverPoints:w}},runPointActions:function(b,c){var d=this.chart,h=d.tooltip&&d.tooltip.options.enabled?d.tooltip:void 0,e=h?h.shared:!1,f=c||d.hoverPoint,q=f&&f.series||d.hoverSeries,q=this.getHoverData(f,q,d.series,!!c||q&&q.directTouch&&this.isDirectTouch,e,b,{isBoosting:d.isBoosting}),w,f=q.hoverPoint;w=q.hoverPoints;c=(q=q.hoverSeries)&&q.tooltipOptions.followPointer;e=e&&q&&!q.noSharedTooltip;if(f&&(f!==d.hoverPoint||
h&&h.isHidden)){k(d.hoverPoints||[],function(b){-1===a.inArray(b,w)&&b.setState()});k(w||[],function(a){a.setState("hover")});if(d.hoverSeries!==q)q.onMouseOver();d.hoverPoint&&d.hoverPoint.firePointEvent("mouseOut");if(!f.series)return;f.firePointEvent("mouseOver");d.hoverPoints=w;d.hoverPoint=f;h&&h.refresh(e?w:f,b)}else c&&h&&!h.isHidden&&(f=h.getAnchor([{}],b),h.updatePosition({plotX:f[0],plotY:f[1]}));this.unDocMouseMove||(this.unDocMouseMove=z(d.container.ownerDocument,"mousemove",function(b){var c=
F[a.hoverChartIndex];if(c)c.pointer.onDocumentMouseMove(b)}));k(d.axes,function(c){var d=n(c.crosshair.snap,!0),h=d?a.find(w,function(a){return a.series[c.coll]===c}):void 0;h||!d?c.drawCrosshair(b,h):c.hideCrosshair()})},reset:function(a,b){var d=this.chart,h=d.hoverSeries,e=d.hoverPoint,f=d.hoverPoints,q=d.tooltip,n=q&&q.shared?f:e;a&&n&&k(c(n),function(b){b.series.isCartesian&&void 0===b.plotX&&(a=!1)});if(a)q&&n&&(q.refresh(n),e&&(e.setState(e.state,!0),k(d.axes,function(a){a.crosshair&&a.drawCrosshair(null,
e)})));else{if(e)e.onMouseOut();f&&k(f,function(a){a.setState()});if(h)h.onMouseOut();q&&q.hide(b);this.unDocMouseMove&&(this.unDocMouseMove=this.unDocMouseMove());k(d.axes,function(a){a.hideCrosshair()});this.hoverX=d.hoverPoints=d.hoverPoint=null}},scaleGroups:function(a,b){var c=this.chart,d;k(c.series,function(h){d=a||h.getPlotBox();h.xAxis&&h.xAxis.zoomEnabled&&h.group&&(h.group.attr(d),h.markerGroup&&(h.markerGroup.attr(d),h.markerGroup.clip(b?c.clipRect:null)),h.dataLabelsGroup&&h.dataLabelsGroup.attr(d))});
c.clipRect.attr(b||c.clipBox)},dragStart:function(a){var b=this.chart;b.mouseIsDown=a.type;b.cancelClick=!1;b.mouseDownX=this.mouseDownX=a.chartX;b.mouseDownY=this.mouseDownY=a.chartY},drag:function(a){var b=this.chart,c=b.options.chart,d=a.chartX,h=a.chartY,e=this.zoomHor,f=this.zoomVert,q=b.plotLeft,n=b.plotTop,p=b.plotWidth,k=b.plotHeight,D,m=this.selectionMarker,g=this.mouseDownX,t=this.mouseDownY,l=c.panKey&&a[c.panKey+"Key"];m&&m.touch||(d<q?d=q:d>q+p&&(d=q+p),h<n?h=n:h>n+k&&(h=n+k),this.hasDragged=
Math.sqrt(Math.pow(g-d,2)+Math.pow(t-h,2)),10<this.hasDragged&&(D=b.isInsidePlot(g-q,t-n),b.hasCartesianSeries&&(this.zoomX||this.zoomY)&&D&&!l&&!m&&(this.selectionMarker=m=b.renderer.rect(q,n,e?1:p,f?1:k,0).attr({fill:c.selectionMarkerFill||E("#335cad").setOpacity(.25).get(),"class":"highcharts-selection-marker",zIndex:7}).add()),m&&e&&(d-=g,m.attr({width:Math.abs(d),x:(0<d?0:d)+g})),m&&f&&(d=h-t,m.attr({height:Math.abs(d),y:(0<d?0:d)+t})),D&&!m&&c.panning&&b.pan(a,c.panning)))},drop:function(a){var b=
this,c=this.chart,d=this.hasPinched;if(this.selectionMarker){var h={originalEvent:a,xAxis:[],yAxis:[]},e=this.selectionMarker,q=e.attr?e.attr("x"):e.x,n=e.attr?e.attr("y"):e.y,u=e.attr?e.attr("width"):e.width,p=e.attr?e.attr("height"):e.height,C;if(this.hasDragged||d)k(c.axes,function(c){if(c.zoomEnabled&&f(c.min)&&(d||b[{xAxis:"zoomX",yAxis:"zoomY"}[c.coll]])){var e=c.horiz,g="touchend"===a.type?c.minPixelPadding:0,t=c.toValue((e?q:n)+g),e=c.toValue((e?q+u:n+p)-g);h[c.coll].push({axis:c,min:Math.min(t,
e),max:Math.max(t,e)});C=!0}}),C&&v(c,"selection",h,function(a){c.zoom(m(a,d?{animation:!1}:null))});this.selectionMarker=this.selectionMarker.destroy();d&&this.scaleGroups()}c&&(l(c.container,{cursor:c._cursor}),c.cancelClick=10<this.hasDragged,c.mouseIsDown=this.hasDragged=this.hasPinched=!1,this.pinchDown=[])},onContainerMouseDown:function(a){a=this.normalize(a);this.zoomOption(a);a.preventDefault&&a.preventDefault();this.dragStart(a)},onDocumentMouseUp:function(b){F[a.hoverChartIndex]&&F[a.hoverChartIndex].pointer.drop(b)},
onDocumentMouseMove:function(a){var b=this.chart,c=this.chartPosition;a=this.normalize(a,c);!c||this.inClass(a.target,"highcharts-tracker")||b.isInsidePlot(a.chartX-b.plotLeft,a.chartY-b.plotTop)||this.reset()},onContainerMouseLeave:function(b){var c=F[a.hoverChartIndex];c&&(b.relatedTarget||b.toElement)&&(c.pointer.reset(),c.pointer.chartPosition=null)},onContainerMouseMove:function(b){var c=this.chart;f(a.hoverChartIndex)&&F[a.hoverChartIndex]&&F[a.hoverChartIndex].mouseIsDown||(a.hoverChartIndex=
c.index);b=this.normalize(b);b.returnValue=!1;"mousedown"===c.mouseIsDown&&this.drag(b);!this.inClass(b.target,"highcharts-tracker")&&!c.isInsidePlot(b.chartX-c.plotLeft,b.chartY-c.plotTop)||c.openMenu||this.runPointActions(b)},inClass:function(a,b){for(var c;a;){if(c=B(a,"class")){if(-1!==c.indexOf(b))return!0;if(-1!==c.indexOf("highcharts-container"))return!1}a=a.parentNode}},onTrackerMouseOut:function(a){var b=this.chart.hoverSeries;a=a.relatedTarget||a.toElement;this.isDirectTouch=!1;if(!(!b||
!a||b.stickyTracking||this.inClass(a,"highcharts-tooltip")||this.inClass(a,"highcharts-series-"+b.index)&&this.inClass(a,"highcharts-tracker")))b.onMouseOut()},onContainerClick:function(a){var b=this.chart,c=b.hoverPoint,d=b.plotLeft,h=b.plotTop;a=this.normalize(a);b.cancelClick||(c&&this.inClass(a.target,"highcharts-tracker")?(v(c.series,"click",m(a,{point:c})),b.hoverPoint&&c.firePointEvent("click",a)):(m(a,this.getCoordinates(a)),b.isInsidePlot(a.chartX-d,a.chartY-h)&&v(b,"click",a)))},setDOMEvents:function(){var b=
this,c=b.chart.container,d=c.ownerDocument;c.onmousedown=function(a){b.onContainerMouseDown(a)};c.onmousemove=function(a){b.onContainerMouseMove(a)};c.onclick=function(a){b.onContainerClick(a)};z(c,"mouseleave",b.onContainerMouseLeave);1===a.chartCount&&z(d,"mouseup",b.onDocumentMouseUp);a.hasTouch&&(c.ontouchstart=function(a){b.onContainerTouchStart(a)},c.ontouchmove=function(a){b.onContainerTouchMove(a)},1===a.chartCount&&z(d,"touchend",b.onDocumentTouchEnd))},destroy:function(){var c=this,d=this.chart.container.ownerDocument;
c.unDocMouseMove&&c.unDocMouseMove();b(c.chart.container,"mouseleave",c.onContainerMouseLeave);a.chartCount||(b(d,"mouseup",c.onDocumentMouseUp),a.hasTouch&&b(d,"touchend",c.onDocumentTouchEnd));clearInterval(c.tooltipTimeout);a.objectEach(c,function(a,b){c[b]=null})}}})(N);(function(a){var z=a.charts,B=a.each,F=a.extend,E=a.map,l=a.noop,f=a.pick;F(a.Pointer.prototype,{pinchTranslate:function(a,f,l,v,d,e){this.zoomHor&&this.pinchTranslateDirection(!0,a,f,l,v,d,e);this.zoomVert&&this.pinchTranslateDirection(!1,
a,f,l,v,d,e)},pinchTranslateDirection:function(a,f,l,v,d,e,n,b){var c=this.chart,q=a?"x":"y",h=a?"X":"Y",k="chart"+h,m=a?"width":"height",u=c["plot"+(a?"Left":"Top")],y,K,r=b||1,A=c.inverted,J=c.bounds[a?"h":"v"],p=1===f.length,C=f[0][k],D=l[0][k],I=!p&&f[1][k],g=!p&&l[1][k],t;l=function(){!p&&20<Math.abs(C-I)&&(r=b||Math.abs(D-g)/Math.abs(C-I));K=(u-D)/r+C;y=c["plot"+(a?"Width":"Height")]/r};l();f=K;f<J.min?(f=J.min,t=!0):f+y>J.max&&(f=J.max-y,t=!0);t?(D-=.8*(D-n[q][0]),p||(g-=.8*(g-n[q][1])),l()):
n[q]=[D,g];A||(e[q]=K-u,e[m]=y);e=A?1/r:r;d[m]=y;d[q]=f;v[A?a?"scaleY":"scaleX":"scale"+h]=r;v["translate"+h]=e*u+(D-e*C)},pinch:function(a){var k=this,u=k.chart,v=k.pinchDown,d=a.touches,e=d.length,n=k.lastValidTouch,b=k.hasZoom,c=k.selectionMarker,q={},h=1===e&&(k.inClass(a.target,"highcharts-tracker")&&u.runTrackerClick||k.runChartClick),w={};1<e&&(k.initiated=!0);b&&k.initiated&&!h&&a.preventDefault();E(d,function(a){return k.normalize(a)});"touchstart"===a.type?(B(d,function(a,b){v[b]={chartX:a.chartX,
chartY:a.chartY}}),n.x=[v[0].chartX,v[1]&&v[1].chartX],n.y=[v[0].chartY,v[1]&&v[1].chartY],B(u.axes,function(a){if(a.zoomEnabled){var b=u.bounds[a.horiz?"h":"v"],c=a.minPixelPadding,d=a.toPixels(f(a.options.min,a.dataMin)),h=a.toPixels(f(a.options.max,a.dataMax)),e=Math.max(d,h);b.min=Math.min(a.pos,Math.min(d,h)-c);b.max=Math.max(a.pos+a.len,e+c)}}),k.res=!0):k.followTouchMove&&1===e?this.runPointActions(k.normalize(a)):v.length&&(c||(k.selectionMarker=c=F({destroy:l,touch:!0},u.plotBox)),k.pinchTranslate(v,
d,q,c,w,n),k.hasPinched=b,k.scaleGroups(q,w),k.res&&(k.res=!1,this.reset(!1,0)))},touch:function(k,m){var l=this.chart,v,d;if(l.index!==a.hoverChartIndex)this.onContainerMouseLeave({relatedTarget:!0});a.hoverChartIndex=l.index;1===k.touches.length?(k=this.normalize(k),(d=l.isInsidePlot(k.chartX-l.plotLeft,k.chartY-l.plotTop))&&!l.openMenu?(m&&this.runPointActions(k),"touchmove"===k.type&&(m=this.pinchDown,v=m[0]?4<=Math.sqrt(Math.pow(m[0].chartX-k.chartX,2)+Math.pow(m[0].chartY-k.chartY,2)):!1),f(v,
!0)&&this.pinch(k)):m&&this.reset()):2===k.touches.length&&this.pinch(k)},onContainerTouchStart:function(a){this.zoomOption(a);this.touch(a,!0)},onContainerTouchMove:function(a){this.touch(a)},onDocumentTouchEnd:function(f){z[a.hoverChartIndex]&&z[a.hoverChartIndex].pointer.drop(f)}})})(N);(function(a){var z=a.addEvent,B=a.charts,F=a.css,E=a.doc,l=a.extend,f=a.noop,k=a.Pointer,m=a.removeEvent,u=a.win,v=a.wrap;if(!a.hasTouch&&(u.PointerEvent||u.MSPointerEvent)){var d={},e=!!u.PointerEvent,n=function(){var b=
[];b.item=function(a){return this[a]};a.objectEach(d,function(a){b.push({pageX:a.pageX,pageY:a.pageY,target:a.target})});return b},b=function(b,d,h,e){"touch"!==b.pointerType&&b.pointerType!==b.MSPOINTER_TYPE_TOUCH||!B[a.hoverChartIndex]||(e(b),e=B[a.hoverChartIndex].pointer,e[d]({type:h,target:b.currentTarget,preventDefault:f,touches:n()}))};l(k.prototype,{onContainerPointerDown:function(a){b(a,"onContainerTouchStart","touchstart",function(a){d[a.pointerId]={pageX:a.pageX,pageY:a.pageY,target:a.currentTarget}})},
onContainerPointerMove:function(a){b(a,"onContainerTouchMove","touchmove",function(a){d[a.pointerId]={pageX:a.pageX,pageY:a.pageY};d[a.pointerId].target||(d[a.pointerId].target=a.currentTarget)})},onDocumentPointerUp:function(a){b(a,"onDocumentTouchEnd","touchend",function(a){delete d[a.pointerId]})},batchMSEvents:function(a){a(this.chart.container,e?"pointerdown":"MSPointerDown",this.onContainerPointerDown);a(this.chart.container,e?"pointermove":"MSPointerMove",this.onContainerPointerMove);a(E,e?
"pointerup":"MSPointerUp",this.onDocumentPointerUp)}});v(k.prototype,"init",function(a,b,d){a.call(this,b,d);this.hasZoom&&F(b.container,{"-ms-touch-action":"none","touch-action":"none"})});v(k.prototype,"setDOMEvents",function(a){a.apply(this);(this.hasZoom||this.followTouchMove)&&this.batchMSEvents(z)});v(k.prototype,"destroy",function(a){this.batchMSEvents(m);a.call(this)})}})(N);(function(a){var z=a.addEvent,B=a.css,F=a.discardElement,E=a.defined,l=a.each,f=a.isFirefox,k=a.marginNames,m=a.merge,
u=a.pick,v=a.setAnimation,d=a.stableSort,e=a.win,n=a.wrap;a.Legend=function(a,c){this.init(a,c)};a.Legend.prototype={init:function(a,c){this.chart=a;this.setOptions(c);c.enabled&&(this.render(),z(this.chart,"endResize",function(){this.legend.positionCheckboxes()}))},setOptions:function(a){var b=u(a.padding,8);this.options=a;this.itemStyle=a.itemStyle;this.itemHiddenStyle=m(this.itemStyle,a.itemHiddenStyle);this.itemMarginTop=a.itemMarginTop||0;this.padding=b;this.initialItemY=b-5;this.itemHeight=
this.maxItemWidth=0;this.symbolWidth=u(a.symbolWidth,16);this.pages=[]},update:function(a,c){var b=this.chart;this.setOptions(m(!0,this.options,a));this.destroy();b.isDirtyLegend=b.isDirtyBox=!0;u(c,!0)&&b.redraw()},colorizeItem:function(a,c){a.legendGroup[c?"removeClass":"addClass"]("highcharts-legend-item-hidden");var b=this.options,d=a.legendItem,e=a.legendLine,f=a.legendSymbol,n=this.itemHiddenStyle.color,b=c?b.itemStyle.color:n,k=c?a.color||n:n,l=a.options&&a.options.marker,r={fill:k};d&&d.css({fill:b,
color:b});e&&e.attr({stroke:k});f&&(l&&f.isMarker&&(r=a.pointAttribs(),c||(r.stroke=r.fill=n)),f.attr(r))},positionItem:function(a){var b=this.options,d=b.symbolPadding,b=!b.rtl,h=a._legendItemPos,e=h[0],h=h[1],f=a.checkbox;(a=a.legendGroup)&&a.element&&a.translate(b?e:this.legendWidth-e-2*d-4,h);f&&(f.x=e,f.y=h)},destroyItem:function(a){var b=a.checkbox;l(["legendItem","legendLine","legendSymbol","legendGroup"],function(b){a[b]&&(a[b]=a[b].destroy())});b&&F(a.checkbox)},destroy:function(){function a(a){this[a]&&
(this[a]=this[a].destroy())}l(this.getAllItems(),function(b){l(["legendItem","legendGroup"],a,b)});l("clipRect up down pager nav box title group".split(" "),a,this);this.display=null},positionCheckboxes:function(a){var b=this.group&&this.group.alignAttr,d,h=this.clipHeight||this.legendHeight,e=this.titleHeight;b&&(d=b.translateY,l(this.allItems,function(c){var f=c.checkbox,q;f&&(q=d+e+f.y+(a||0)+3,B(f,{left:b.translateX+c.checkboxOffset+f.x-20+"px",top:q+"px",display:q>d-6&&q<d+h-6?"":"none"}))}))},
renderTitle:function(){var a=this.options,c=this.padding,d=a.title,h=0;d.text&&(this.title||(this.title=this.chart.renderer.label(d.text,c-3,c-4,null,null,null,a.useHTML,null,"legend-title").attr({zIndex:1}).css(d.style).add(this.group)),a=this.title.getBBox(),h=a.height,this.offsetWidth=a.width,this.contentGroup.attr({translateY:h}));this.titleHeight=h},setText:function(b){var c=this.options;b.legendItem.attr({text:c.labelFormat?a.format(c.labelFormat,b):c.labelFormatter.call(b)})},renderItem:function(a){var b=
this.chart,d=b.renderer,h=this.options,e="horizontal"===h.layout,f=this.symbolWidth,n=h.symbolPadding,k=this.itemStyle,l=this.itemHiddenStyle,r=this.padding,A=e?u(h.itemDistance,20):0,v=!h.rtl,p=h.width,C=h.itemMarginBottom||0,D=this.itemMarginTop,I=a.legendItem,g=!a.series,t=!g&&a.series.drawLegendSymbol?a.series:a,Q=t.options,M=this.createCheckboxForItem&&Q&&Q.showCheckbox,Q=f+n+A+(M?20:0),O=h.useHTML,L=a.options.className;I||(a.legendGroup=d.g("legend-item").addClass("highcharts-"+t.type+"-series highcharts-color-"+
a.colorIndex+(L?" "+L:"")+(g?" highcharts-series-"+a.index:"")).attr({zIndex:1}).add(this.scrollGroup),a.legendItem=I=d.text("",v?f+n:-n,this.baseline||0,O).css(m(a.visible?k:l)).attr({align:v?"left":"right",zIndex:2}).add(a.legendGroup),this.baseline||(f=k.fontSize,this.fontMetrics=d.fontMetrics(f,I),this.baseline=this.fontMetrics.f+3+D,I.attr("y",this.baseline)),this.symbolHeight=h.symbolHeight||this.fontMetrics.f,t.drawLegendSymbol(this,a),this.setItemEvents&&this.setItemEvents(a,I,O),M&&this.createCheckboxForItem(a));
this.colorizeItem(a,a.visible);k.width||I.css({width:(h.itemWidth||h.width||b.spacingBox.width)-Q});this.setText(a);d=I.getBBox();k=a.checkboxOffset=h.itemWidth||a.legendItemWidth||d.width+Q;this.itemHeight=d=Math.round(a.legendItemHeight||d.height||this.symbolHeight);e&&this.itemX-r+k>(p||b.spacingBox.width-2*r-h.x)&&(this.itemX=r,this.itemY+=D+this.lastLineHeight+C,this.lastLineHeight=0);this.maxItemWidth=Math.max(this.maxItemWidth,k);this.lastItemY=D+this.itemY+C;this.lastLineHeight=Math.max(d,
this.lastLineHeight);a._legendItemPos=[this.itemX,this.itemY];e?this.itemX+=k:(this.itemY+=D+d+C,this.lastLineHeight=d);this.offsetWidth=p||Math.max((e?this.itemX-r-(a.checkbox?0:A):k)+r,this.offsetWidth)},getAllItems:function(){var a=[];l(this.chart.series,function(b){var c=b&&b.options;b&&u(c.showInLegend,E(c.linkedTo)?!1:void 0,!0)&&(a=a.concat(b.legendItems||("point"===c.legendType?b.data:b)))});return a},adjustMargins:function(a,c){var b=this.chart,d=this.options,e=d.align.charAt(0)+d.verticalAlign.charAt(0)+
d.layout.charAt(0);d.floating||l([/(lth|ct|rth)/,/(rtv|rm|rbv)/,/(rbh|cb|lbh)/,/(lbv|lm|ltv)/],function(h,f){h.test(e)&&!E(a[f])&&(b[k[f]]=Math.max(b[k[f]],b.legend[(f+1)%2?"legendHeight":"legendWidth"]+[1,-1,-1,1][f]*d[f%2?"x":"y"]+u(d.margin,12)+c[f]))})},render:function(){var a=this,c=a.chart,e=c.renderer,h=a.group,f,n,k,y,u=a.box,r=a.options,A=a.padding;a.itemX=A;a.itemY=a.initialItemY;a.offsetWidth=0;a.lastItemY=0;h||(a.group=h=e.g("legend").attr({zIndex:7}).add(),a.contentGroup=e.g().attr({zIndex:1}).add(h),
a.scrollGroup=e.g().add(a.contentGroup));a.renderTitle();f=a.getAllItems();d(f,function(a,b){return(a.options&&a.options.legendIndex||0)-(b.options&&b.options.legendIndex||0)});r.reversed&&f.reverse();a.allItems=f;a.display=n=!!f.length;a.lastLineHeight=0;l(f,function(b){a.renderItem(b)});k=(r.width||a.offsetWidth)+A;y=a.lastItemY+a.lastLineHeight+a.titleHeight;y=a.handleOverflow(y);y+=A;u||(a.box=u=e.rect().addClass("highcharts-legend-box").attr({r:r.borderRadius}).add(h),u.isNew=!0);u.attr({stroke:r.borderColor,
"stroke-width":r.borderWidth||0,fill:r.backgroundColor||"none"}).shadow(r.shadow);0<k&&0<y&&(u[u.isNew?"attr":"animate"](u.crisp({x:0,y:0,width:k,height:y},u.strokeWidth())),u.isNew=!1);u[n?"show":"hide"]();a.legendWidth=k;a.legendHeight=y;l(f,function(b){a.positionItem(b)});n&&h.align(m(r,{width:k,height:y}),!0,"spacingBox");c.isResizing||this.positionCheckboxes()},handleOverflow:function(a){var b=this,d=this.chart,h=d.renderer,e=this.options,f=e.y,n=this.padding,d=d.spacingBox.height+("top"===e.verticalAlign?
-f:f)-n,f=e.maxHeight,k,m=this.clipRect,r=e.navigation,A=u(r.animation,!0),v=r.arrowSize||12,p=this.nav,C=this.pages,D,I=this.allItems,g=function(a){"number"===typeof a?m.attr({height:a}):m&&(b.clipRect=m.destroy(),b.contentGroup.clip());b.contentGroup.div&&(b.contentGroup.div.style.clip=a?"rect("+n+"px,9999px,"+(n+a)+"px,0)":"auto")};"horizontal"!==e.layout||"middle"===e.verticalAlign||e.floating||(d/=2);f&&(d=Math.min(d,f));C.length=0;a>d&&!1!==r.enabled?(this.clipHeight=k=Math.max(d-20-this.titleHeight-
n,0),this.currentPage=u(this.currentPage,1),this.fullHeight=a,l(I,function(a,b){var c=a._legendItemPos[1];a=Math.round(a.legendItem.getBBox().height);var d=C.length;if(!d||c-C[d-1]>k&&(D||c)!==C[d-1])C.push(D||c),d++;b===I.length-1&&c+a-C[d-1]>k&&C.push(c);c!==D&&(D=c)}),m||(m=b.clipRect=h.clipRect(0,n,9999,0),b.contentGroup.clip(m)),g(k),p||(this.nav=p=h.g().attr({zIndex:1}).add(this.group),this.up=h.symbol("triangle",0,0,v,v).on("click",function(){b.scroll(-1,A)}).add(p),this.pager=h.text("",15,
10).addClass("highcharts-legend-navigation").css(r.style).add(p),this.down=h.symbol("triangle-down",0,0,v,v).on("click",function(){b.scroll(1,A)}).add(p)),b.scroll(0),a=d):p&&(g(),this.nav=p.destroy(),this.scrollGroup.attr({translateY:1}),this.clipHeight=0);return a},scroll:function(a,c){var b=this.pages,d=b.length;a=this.currentPage+a;var e=this.clipHeight,f=this.options.navigation,n=this.pager,k=this.padding;a>d&&(a=d);0<a&&(void 0!==c&&v(c,this.chart),this.nav.attr({translateX:k,translateY:e+this.padding+
7+this.titleHeight,visibility:"visible"}),this.up.attr({"class":1===a?"highcharts-legend-nav-inactive":"highcharts-legend-nav-active"}),n.attr({text:a+"/"+d}),this.down.attr({x:18+this.pager.getBBox().width,"class":a===d?"highcharts-legend-nav-inactive":"highcharts-legend-nav-active"}),this.up.attr({fill:1===a?f.inactiveColor:f.activeColor}).css({cursor:1===a?"default":"pointer"}),this.down.attr({fill:a===d?f.inactiveColor:f.activeColor}).css({cursor:a===d?"default":"pointer"}),c=-b[a-1]+this.initialItemY,
this.scrollGroup.animate({translateY:c}),this.currentPage=a,this.positionCheckboxes(c))}};a.LegendSymbolMixin={drawRectangle:function(a,c){var b=a.symbolHeight,d=a.options.squareSymbol;c.legendSymbol=this.chart.renderer.rect(d?(a.symbolWidth-b)/2:0,a.baseline-b+1,d?b:a.symbolWidth,b,u(a.options.symbolRadius,b/2)).addClass("highcharts-point").attr({zIndex:3}).add(c.legendGroup)},drawLineMarker:function(a){var b=this.options,d=b.marker,e=a.symbolWidth,f=a.symbolHeight,n=f/2,k=this.chart.renderer,l=
this.legendGroup;a=a.baseline-Math.round(.3*a.fontMetrics.b);var v;v={"stroke-width":b.lineWidth||0};b.dashStyle&&(v.dashstyle=b.dashStyle);this.legendLine=k.path(["M",0,a,"L",e,a]).addClass("highcharts-graph").attr(v).add(l);d&&!1!==d.enabled&&(b=Math.min(u(d.radius,n),n),0===this.symbol.indexOf("url")&&(d=m(d,{width:f,height:f}),b=0),this.legendSymbol=d=k.symbol(this.symbol,e/2-b,a-b,2*b,2*b,d).addClass("highcharts-point").add(l),d.isMarker=!0)}};(/Trident\/7\.0/.test(e.navigator.userAgent)||f)&&
n(a.Legend.prototype,"positionItem",function(a,c){var b=this,d=function(){c._legendItemPos&&a.call(b,c)};d();setTimeout(d)})})(N);(function(a){var z=a.addEvent,B=a.animate,F=a.animObject,E=a.attr,l=a.doc,f=a.Axis,k=a.createElement,m=a.defaultOptions,u=a.discardElement,v=a.charts,d=a.css,e=a.defined,n=a.each,b=a.extend,c=a.find,q=a.fireEvent,h=a.grep,w=a.isNumber,G=a.isObject,H=a.isString,y=a.Legend,K=a.marginNames,r=a.merge,A=a.objectEach,J=a.Pointer,p=a.pick,C=a.pInt,D=a.removeEvent,I=a.seriesTypes,
g=a.splat,t=a.svg,Q=a.syncTimeout,M=a.win,O=a.Chart=function(){this.getArgs.apply(this,arguments)};a.chart=function(a,b,c){return new O(a,b,c)};b(O.prototype,{callbacks:[],getArgs:function(){var a=[].slice.call(arguments);if(H(a[0])||a[0].nodeName)this.renderTo=a.shift();this.init(a[0],a[1])},init:function(b,c){var d,g,e=b.series,h=b.plotOptions||{};b.series=null;d=r(m,b);for(g in d.plotOptions)d.plotOptions[g].tooltip=h[g]&&r(h[g].tooltip)||void 0;d.tooltip.userOptions=b.chart&&b.chart.forExport&&
b.tooltip.userOptions||b.tooltip;d.series=b.series=e;this.userOptions=b;b=d.chart;g=b.events;this.margin=[];this.spacing=[];this.bounds={h:{},v:{}};this.labelCollectors=[];this.callback=c;this.isResizing=0;this.options=d;this.axes=[];this.series=[];this.hasCartesianSeries=b.showAxes;var p=this;p.index=v.length;v.push(p);a.chartCount++;g&&A(g,function(a,b){z(p,b,a)});p.xAxis=[];p.yAxis=[];p.pointCount=p.colorCounter=p.symbolCounter=0;p.firstRender()},initSeries:function(b){var c=this.options.chart;
(c=I[b.type||c.type||c.defaultSeriesType])||a.error(17,!0);c=new c;c.init(this,b);return c},orderSeries:function(a){var b=this.series;for(a=a||0;a<b.length;a++)b[a]&&(b[a].index=a,b[a].name=b[a].name||"Series "+(b[a].index+1))},isInsidePlot:function(a,b,c){var d=c?b:a;a=c?a:b;return 0<=d&&d<=this.plotWidth&&0<=a&&a<=this.plotHeight},redraw:function(c){var d=this.axes,g=this.series,e=this.pointer,p=this.legend,h=this.isDirtyLegend,f,t,k=this.hasCartesianSeries,r=this.isDirtyBox,D,l=this.renderer,m=
l.isHidden(),C=[];this.setResponsive&&this.setResponsive(!1);a.setAnimation(c,this);m&&this.temporaryDisplay();this.layOutTitles();for(c=g.length;c--;)if(D=g[c],D.options.stacking&&(f=!0,D.isDirty)){t=!0;break}if(t)for(c=g.length;c--;)D=g[c],D.options.stacking&&(D.isDirty=!0);n(g,function(a){a.isDirty&&"point"===a.options.legendType&&(a.updateTotals&&a.updateTotals(),h=!0);a.isDirtyData&&q(a,"updatedData")});h&&p.options.enabled&&(p.render(),this.isDirtyLegend=!1);f&&this.getStacks();k&&n(d,function(a){a.updateNames();
a.setScale()});this.getMargins();k&&(n(d,function(a){a.isDirty&&(r=!0)}),n(d,function(a){var c=a.min+","+a.max;a.extKey!==c&&(a.extKey=c,C.push(function(){q(a,"afterSetExtremes",b(a.eventArgs,a.getExtremes()));delete a.eventArgs}));(r||f)&&a.redraw()}));r&&this.drawChartBox();q(this,"predraw");n(g,function(a){(r||a.isDirty)&&a.visible&&a.redraw();a.isDirtyData=!1});e&&e.reset(!0);l.draw();q(this,"redraw");q(this,"render");m&&this.temporaryDisplay(!0);n(C,function(a){a.call()})},get:function(a){function b(b){return b.id===
a||b.options&&b.options.id===a}var d,g=this.series,e;d=c(this.axes,b)||c(this.series,b);for(e=0;!d&&e<g.length;e++)d=c(g[e].points||[],b);return d},getAxes:function(){var a=this,b=this.options,c=b.xAxis=g(b.xAxis||{}),b=b.yAxis=g(b.yAxis||{});n(c,function(a,b){a.index=b;a.isX=!0});n(b,function(a,b){a.index=b});c=c.concat(b);n(c,function(b){new f(a,b)})},getSelectedPoints:function(){var a=[];n(this.series,function(b){a=a.concat(h(b.data||[],function(a){return a.selected}))});return a},getSelectedSeries:function(){return h(this.series,
function(a){return a.selected})},setTitle:function(a,b,c){var d=this,g=d.options,e;e=g.title=r({style:{color:"#333333",fontSize:g.isStock?"16px":"18px"}},g.title,a);g=g.subtitle=r({style:{color:"#666666"}},g.subtitle,b);n([["title",a,e],["subtitle",b,g]],function(a,b){var c=a[0],g=d[c],e=a[1];a=a[2];g&&e&&(d[c]=g=g.destroy());a&&a.text&&!g&&(d[c]=d.renderer.text(a.text,0,0,a.useHTML).attr({align:a.align,"class":"highcharts-"+c,zIndex:a.zIndex||4}).add(),d[c].update=function(a){d.setTitle(!b&&a,b&&
a)},d[c].css(a.style))});d.layOutTitles(c)},layOutTitles:function(a){var c=0,d,g=this.renderer,e=this.spacingBox;n(["title","subtitle"],function(a){var d=this[a],p=this.options[a];a="title"===a?-3:p.verticalAlign?0:c+2;var h;d&&(h=p.style.fontSize,h=g.fontMetrics(h,d).b,d.css({width:(p.width||e.width+p.widthAdjust)+"px"}).align(b({y:a+h},p),!1,"spacingBox"),p.floating||p.verticalAlign||(c=Math.ceil(c+d.getBBox(p.useHTML).height)))},this);d=this.titleOffset!==c;this.titleOffset=c;!this.isDirtyBox&&
d&&(this.isDirtyBox=d,this.hasRendered&&p(a,!0)&&this.isDirtyBox&&this.redraw())},getChartSize:function(){var b=this.options.chart,c=b.width,b=b.height,d=this.renderTo;e(c)||(this.containerWidth=a.getStyle(d,"width"));e(b)||(this.containerHeight=a.getStyle(d,"height"));this.chartWidth=Math.max(0,c||this.containerWidth||600);this.chartHeight=Math.max(0,a.relativeLength(b,this.chartWidth)||(1<this.containerHeight?this.containerHeight:400))},temporaryDisplay:function(b){var c=this.renderTo;if(b)for(;c&&
c.style;)c.hcOrigStyle&&(a.css(c,c.hcOrigStyle),delete c.hcOrigStyle),c.hcOrigDetached&&(l.body.removeChild(c),c.hcOrigDetached=!1),c=c.parentNode;else for(;c&&c.style;){l.body.contains(c)||c.parentNode||(c.hcOrigDetached=!0,l.body.appendChild(c));if("none"===a.getStyle(c,"display",!1)||c.hcOricDetached)c.hcOrigStyle={display:c.style.display,height:c.style.height,overflow:c.style.overflow},b={display:"block",overflow:"hidden"},c!==this.renderTo&&(b.height=0),a.css(c,b),c.offsetWidth||c.style.setProperty("display",
"block","important");c=c.parentNode;if(c===l.body)break}},setClassName:function(a){this.container.className="highcharts-container "+(a||"")},getContainer:function(){var c,d=this.options,g=d.chart,e,p;c=this.renderTo;var h=a.uniqueKey(),f;c||(this.renderTo=c=g.renderTo);H(c)&&(this.renderTo=c=l.getElementById(c));c||a.error(13,!0);e=C(E(c,"data-highcharts-chart"));w(e)&&v[e]&&v[e].hasRendered&&v[e].destroy();E(c,"data-highcharts-chart",this.index);c.innerHTML="";g.skipClone||c.offsetWidth||this.temporaryDisplay();
this.getChartSize();e=this.chartWidth;p=this.chartHeight;f=b({position:"relative",overflow:"hidden",width:e+"px",height:p+"px",textAlign:"left",lineHeight:"normal",zIndex:0,"-webkit-tap-highlight-color":"rgba(0,0,0,0)"},g.style);this.container=c=k("div",{id:h},f,c);this._cursor=c.style.cursor;this.renderer=new (a[g.renderer]||a.Renderer)(c,e,p,null,g.forExport,d.exporting&&d.exporting.allowHTML);this.setClassName(g.className);this.renderer.setStyle(g.style);this.renderer.chartIndex=this.index},getMargins:function(a){var b=
this.spacing,c=this.margin,d=this.titleOffset;this.resetMargins();d&&!e(c[0])&&(this.plotTop=Math.max(this.plotTop,d+this.options.title.margin+b[0]));this.legend&&this.legend.display&&this.legend.adjustMargins(c,b);this.extraMargin&&(this[this.extraMargin.type]=(this[this.extraMargin.type]||0)+this.extraMargin.value);this.adjustPlotArea&&this.adjustPlotArea();a||this.getAxisMargins()},getAxisMargins:function(){var a=this,b=a.axisOffset=[0,0,0,0],c=a.margin;a.hasCartesianSeries&&n(a.axes,function(a){a.visible&&
a.getOffset()});n(K,function(d,g){e(c[g])||(a[d]+=b[g])});a.setChartSize()},reflow:function(b){var c=this,d=c.options.chart,g=c.renderTo,p=e(d.width)&&e(d.height),h=d.width||a.getStyle(g,"width"),d=d.height||a.getStyle(g,"height"),g=b?b.target:M;if(!p&&!c.isPrinting&&h&&d&&(g===M||g===l)){if(h!==c.containerWidth||d!==c.containerHeight)clearTimeout(c.reflowTimeout),c.reflowTimeout=Q(function(){c.container&&c.setSize(void 0,void 0,!1)},b?100:0);c.containerWidth=h;c.containerHeight=d}},initReflow:function(){var a=
this,b;b=z(M,"resize",function(b){a.reflow(b)});z(a,"destroy",b)},setSize:function(b,c,g){var e=this,p=e.renderer;e.isResizing+=1;a.setAnimation(g,e);e.oldChartHeight=e.chartHeight;e.oldChartWidth=e.chartWidth;void 0!==b&&(e.options.chart.width=b);void 0!==c&&(e.options.chart.height=c);e.getChartSize();b=p.globalAnimation;(b?B:d)(e.container,{width:e.chartWidth+"px",height:e.chartHeight+"px"},b);e.setChartSize(!0);p.setSize(e.chartWidth,e.chartHeight,g);n(e.axes,function(a){a.isDirty=!0;a.setScale()});
e.isDirtyLegend=!0;e.isDirtyBox=!0;e.layOutTitles();e.getMargins();e.redraw(g);e.oldChartHeight=null;q(e,"resize");Q(function(){e&&q(e,"endResize",null,function(){--e.isResizing})},F(b).duration)},setChartSize:function(a){var b=this.inverted,c=this.renderer,d=this.chartWidth,g=this.chartHeight,e=this.options.chart,p=this.spacing,h=this.clipOffset,f,t,k,q;this.plotLeft=f=Math.round(this.plotLeft);this.plotTop=t=Math.round(this.plotTop);this.plotWidth=k=Math.max(0,Math.round(d-f-this.marginRight));
this.plotHeight=q=Math.max(0,Math.round(g-t-this.marginBottom));this.plotSizeX=b?q:k;this.plotSizeY=b?k:q;this.plotBorderWidth=e.plotBorderWidth||0;this.spacingBox=c.spacingBox={x:p[3],y:p[0],width:d-p[3]-p[1],height:g-p[0]-p[2]};this.plotBox=c.plotBox={x:f,y:t,width:k,height:q};d=2*Math.floor(this.plotBorderWidth/2);b=Math.ceil(Math.max(d,h[3])/2);c=Math.ceil(Math.max(d,h[0])/2);this.clipBox={x:b,y:c,width:Math.floor(this.plotSizeX-Math.max(d,h[1])/2-b),height:Math.max(0,Math.floor(this.plotSizeY-
Math.max(d,h[2])/2-c))};a||n(this.axes,function(a){a.setAxisSize();a.setAxisTranslation()})},resetMargins:function(){var a=this,b=a.options.chart;n(["margin","spacing"],function(c){var d=b[c],g=G(d)?d:[d,d,d,d];n(["Top","Right","Bottom","Left"],function(d,e){a[c][e]=p(b[c+d],g[e])})});n(K,function(b,c){a[b]=p(a.margin[c],a.spacing[c])});a.axisOffset=[0,0,0,0];a.clipOffset=[0,0,0,0]},drawChartBox:function(){var a=this.options.chart,b=this.renderer,c=this.chartWidth,d=this.chartHeight,g=this.chartBackground,
e=this.plotBackground,p=this.plotBorder,h,f=this.plotBGImage,n=a.backgroundColor,t=a.plotBackgroundColor,k=a.plotBackgroundImage,q,r=this.plotLeft,D=this.plotTop,l=this.plotWidth,m=this.plotHeight,C=this.plotBox,y=this.clipRect,w=this.clipBox,A="animate";g||(this.chartBackground=g=b.rect().addClass("highcharts-background").add(),A="attr");h=a.borderWidth||0;q=h+(a.shadow?8:0);n={fill:n||"none"};if(h||g["stroke-width"])n.stroke=a.borderColor,n["stroke-width"]=h;g.attr(n).shadow(a.shadow);g[A]({x:q/
2,y:q/2,width:c-q-h%2,height:d-q-h%2,r:a.borderRadius});A="animate";e||(A="attr",this.plotBackground=e=b.rect().addClass("highcharts-plot-background").add());e[A](C);e.attr({fill:t||"none"}).shadow(a.plotShadow);k&&(f?f.animate(C):this.plotBGImage=b.image(k,r,D,l,m).add());y?y.animate({width:w.width,height:w.height}):this.clipRect=b.clipRect(w);A="animate";p||(A="attr",this.plotBorder=p=b.rect().addClass("highcharts-plot-border").attr({zIndex:1}).add());p.attr({stroke:a.plotBorderColor,"stroke-width":a.plotBorderWidth||
0,fill:"none"});p[A](p.crisp({x:r,y:D,width:l,height:m},-p.strokeWidth()));this.isDirtyBox=!1},propFromSeries:function(){var a=this,b=a.options.chart,c,d=a.options.series,g,e;n(["inverted","angular","polar"],function(p){c=I[b.type||b.defaultSeriesType];e=b[p]||c&&c.prototype[p];for(g=d&&d.length;!e&&g--;)(c=I[d[g].type])&&c.prototype[p]&&(e=!0);a[p]=e})},linkSeries:function(){var a=this,b=a.series;n(b,function(a){a.linkedSeries.length=0});n(b,function(b){var c=b.options.linkedTo;H(c)&&(c=":previous"===
c?a.series[b.index-1]:a.get(c))&&c.linkedParent!==b&&(c.linkedSeries.push(b),b.linkedParent=c,b.visible=p(b.options.visible,c.options.visible,b.visible))})},renderSeries:function(){n(this.series,function(a){a.translate();a.render()})},renderLabels:function(){var a=this,c=a.options.labels;c.items&&n(c.items,function(d){var g=b(c.style,d.style),e=C(g.left)+a.plotLeft,p=C(g.top)+a.plotTop+12;delete g.left;delete g.top;a.renderer.text(d.html,e,p).attr({zIndex:2}).css(g).add()})},render:function(){var a=
this.axes,b=this.renderer,c=this.options,d,g,e;this.setTitle();this.legend=new y(this,c.legend);this.getStacks&&this.getStacks();this.getMargins(!0);this.setChartSize();c=this.plotWidth;d=this.plotHeight-=21;n(a,function(a){a.setScale()});this.getAxisMargins();g=1.1<c/this.plotWidth;e=1.05<d/this.plotHeight;if(g||e)n(a,function(a){(a.horiz&&g||!a.horiz&&e)&&a.setTickInterval(!0)}),this.getMargins();this.drawChartBox();this.hasCartesianSeries&&n(a,function(a){a.visible&&a.render()});this.seriesGroup||
(this.seriesGroup=b.g("series-group").attr({zIndex:3}).add());this.renderSeries();this.renderLabels();this.addCredits();this.setResponsive&&this.setResponsive();this.hasRendered=!0},addCredits:function(a){var b=this;a=r(!0,this.options.credits,a);a.enabled&&!this.credits&&(this.credits=this.renderer.text(a.text+(this.mapCredits||""),0,0).addClass("highcharts-credits").on("click",function(){a.href&&(M.location.href=a.href)}).attr({align:a.position.align,zIndex:8}).css(a.style).add().align(a.position),
this.credits.update=function(a){b.credits=b.credits.destroy();b.addCredits(a)})},destroy:function(){var b=this,c=b.axes,d=b.series,g=b.container,e,p=g&&g.parentNode;q(b,"destroy");b.renderer.forExport?a.erase(v,b):v[b.index]=void 0;a.chartCount--;b.renderTo.removeAttribute("data-highcharts-chart");D(b);for(e=c.length;e--;)c[e]=c[e].destroy();this.scroller&&this.scroller.destroy&&this.scroller.destroy();for(e=d.length;e--;)d[e]=d[e].destroy();n("title subtitle chartBackground plotBackground plotBGImage plotBorder seriesGroup clipRect credits pointer rangeSelector legend resetZoomButton tooltip renderer".split(" "),
function(a){var c=b[a];c&&c.destroy&&(b[a]=c.destroy())});g&&(g.innerHTML="",D(g),p&&u(g));A(b,function(a,c){delete b[c]})},isReadyToRender:function(){var a=this;return t||M!=M.top||"complete"===l.readyState?!0:(l.attachEvent("onreadystatechange",function(){l.detachEvent("onreadystatechange",a.firstRender);"complete"===l.readyState&&a.firstRender()}),!1)},firstRender:function(){var a=this,b=a.options;if(a.isReadyToRender()){a.getContainer();q(a,"init");a.resetMargins();a.setChartSize();a.propFromSeries();
a.getAxes();n(b.series||[],function(b){a.initSeries(b)});a.linkSeries();q(a,"beforeRender");J&&(a.pointer=new J(a,b));a.render();if(!a.renderer.imgCount&&a.onload)a.onload();a.temporaryDisplay(!0)}},onload:function(){n([this.callback].concat(this.callbacks),function(a){a&&void 0!==this.index&&a.apply(this,[this])},this);q(this,"load");q(this,"render");e(this.index)&&!1!==this.options.chart.reflow&&this.initReflow();this.onload=null}})})(N);(function(a){var z,B=a.each,F=a.extend,E=a.erase,l=a.fireEvent,
f=a.format,k=a.isArray,m=a.isNumber,u=a.pick,v=a.removeEvent;a.Point=z=function(){};a.Point.prototype={init:function(a,e,f){this.series=a;this.color=a.color;this.applyOptions(e,f);a.options.colorByPoint?(e=a.options.colors||a.chart.options.colors,this.color=this.color||e[a.colorCounter],e=e.length,f=a.colorCounter,a.colorCounter++,a.colorCounter===e&&(a.colorCounter=0)):f=a.colorIndex;this.colorIndex=u(this.colorIndex,f);a.chart.pointCount++;return this},applyOptions:function(a,e){var d=this.series,
b=d.options.pointValKey||d.pointValKey;a=z.prototype.optionsToObject.call(this,a);F(this,a);this.options=this.options?F(this.options,a):a;a.group&&delete this.group;b&&(this.y=this[b]);this.isNull=u(this.isValid&&!this.isValid(),null===this.x||!m(this.y,!0));this.selected&&(this.state="select");"name"in this&&void 0===e&&d.xAxis&&d.xAxis.hasNames&&(this.x=d.xAxis.nameToX(this));void 0===this.x&&d&&(this.x=void 0===e?d.autoIncrement(this):e);return this},optionsToObject:function(a){var d={},f=this.series,
b=f.options.keys,c=b||f.pointArrayMap||["y"],q=c.length,h=0,l=0;if(m(a)||null===a)d[c[0]]=a;else if(k(a))for(!b&&a.length>q&&(f=typeof a[0],"string"===f?d.name=a[0]:"number"===f&&(d.x=a[0]),h++);l<q;)b&&void 0===a[h]||(d[c[l]]=a[h]),h++,l++;else"object"===typeof a&&(d=a,a.dataLabels&&(f._hasPointLabels=!0),a.marker&&(f._hasPointMarkers=!0));return d},getClassName:function(){return"highcharts-point"+(this.selected?" highcharts-point-select":"")+(this.negative?" highcharts-negative":"")+(this.isNull?
" highcharts-null-point":"")+(void 0!==this.colorIndex?" highcharts-color-"+this.colorIndex:"")+(this.options.className?" "+this.options.className:"")+(this.zone&&this.zone.className?" "+this.zone.className.replace("highcharts-negative",""):"")},getZone:function(){var a=this.series,e=a.zones,a=a.zoneAxis||"y",f=0,b;for(b=e[f];this[a]>=b.value;)b=e[++f];b&&b.color&&!this.options.color&&(this.color=b.color);return b},destroy:function(){var a=this.series.chart,e=a.hoverPoints,f;a.pointCount--;e&&(this.setState(),
E(e,this),e.length||(a.hoverPoints=null));if(this===a.hoverPoint)this.onMouseOut();if(this.graphic||this.dataLabel)v(this),this.destroyElements();this.legendItem&&a.legend.destroyItem(this);for(f in this)this[f]=null},destroyElements:function(){for(var a=["graphic","dataLabel","dataLabelUpper","connector","shadowGroup"],e,f=6;f--;)e=a[f],this[e]&&(this[e]=this[e].destroy())},getLabelConfig:function(){return{x:this.category,y:this.y,color:this.color,colorIndex:this.colorIndex,key:this.name||this.category,
series:this.series,point:this,percentage:this.percentage,total:this.total||this.stackTotal}},tooltipFormatter:function(a){var d=this.series,k=d.tooltipOptions,b=u(k.valueDecimals,""),c=k.valuePrefix||"",q=k.valueSuffix||"";B(d.pointArrayMap||["y"],function(d){d="{point."+d;if(c||q)a=a.replace(d+"}",c+d+"}"+q);a=a.replace(d+"}",d+":,."+b+"f}")});return f(a,{point:this,series:this.series})},firePointEvent:function(a,e,f){var b=this,c=this.series.options;(c.point.events[a]||b.options&&b.options.events&&
b.options.events[a])&&this.importEvents();"click"===a&&c.allowPointSelect&&(f=function(a){b.select&&b.select(null,a.ctrlKey||a.metaKey||a.shiftKey)});l(this,a,e,f)},visible:!0}})(N);(function(a){var z=a.addEvent,B=a.animObject,F=a.arrayMax,E=a.arrayMin,l=a.correctFloat,f=a.Date,k=a.defaultOptions,m=a.defaultPlotOptions,u=a.defined,v=a.each,d=a.erase,e=a.extend,n=a.fireEvent,b=a.grep,c=a.isArray,q=a.isNumber,h=a.isString,w=a.merge,G=a.objectEach,H=a.pick,y=a.removeEvent,K=a.splat,r=a.SVGElement,A=
a.syncTimeout,J=a.win;a.Series=a.seriesType("line",null,{lineWidth:2,allowPointSelect:!1,showCheckbox:!1,animation:{duration:1E3},events:{},marker:{lineWidth:0,lineColor:"#ffffff",radius:4,states:{hover:{animation:{duration:50},enabled:!0,radiusPlus:2,lineWidthPlus:1},select:{fillColor:"#cccccc",lineColor:"#000000",lineWidth:2}}},point:{events:{}},dataLabels:{align:"center",formatter:function(){return null===this.y?"":a.numberFormat(this.y,-1)},style:{fontSize:"11px",fontWeight:"bold",color:"contrast",
textOutline:"1px contrast"},verticalAlign:"bottom",x:0,y:0,padding:5},cropThreshold:300,pointRange:0,softThreshold:!0,states:{hover:{animation:{duration:50},lineWidthPlus:1,marker:{},halo:{size:10,opacity:.25}},select:{marker:{}}},stickyTracking:!0,turboThreshold:1E3,findNearestPointBy:"x"},{isCartesian:!0,pointClass:a.Point,sorted:!0,requireSorting:!0,directTouch:!1,axisTypes:["xAxis","yAxis"],colorCounter:0,parallelArrays:["x","y"],coll:"series",init:function(a,b){var c=this,d,g=a.series,p;c.chart=
a;c.options=b=c.setOptions(b);c.linkedSeries=[];c.bindAxes();e(c,{name:b.name,state:"",visible:!1!==b.visible,selected:!0===b.selected});d=b.events;G(d,function(a,b){z(c,b,a)});if(d&&d.click||b.point&&b.point.events&&b.point.events.click||b.allowPointSelect)a.runTrackerClick=!0;c.getColor();c.getSymbol();v(c.parallelArrays,function(a){c[a+"Data"]=[]});c.setData(b.data,!1);c.isCartesian&&(a.hasCartesianSeries=!0);g.length&&(p=g[g.length-1]);c._i=H(p&&p._i,-1)+1;a.orderSeries(this.insert(g))},insert:function(a){var b=
this.options.index,c;if(q(b)){for(c=a.length;c--;)if(b>=H(a[c].options.index,a[c]._i)){a.splice(c+1,0,this);break}-1===c&&a.unshift(this);c+=1}else a.push(this);return H(c,a.length-1)},bindAxes:function(){var b=this,c=b.options,d=b.chart,e;v(b.axisTypes||[],function(g){v(d[g],function(a){e=a.options;if(c[g]===e.index||void 0!==c[g]&&c[g]===e.id||void 0===c[g]&&0===e.index)b.insert(a.series),b[g]=a,a.isDirty=!0});b[g]||b.optionalAxis===g||a.error(18,!0)})},updateParallelArrays:function(a,b){var c=
a.series,d=arguments,g=q(b)?function(d){var g="y"===d&&c.toYData?c.toYData(a):a[d];c[d+"Data"][b]=g}:function(a){Array.prototype[b].apply(c[a+"Data"],Array.prototype.slice.call(d,2))};v(c.parallelArrays,g)},autoIncrement:function(){var a=this.options,b=this.xIncrement,c,d=a.pointIntervalUnit,b=H(b,a.pointStart,0);this.pointInterval=c=H(this.pointInterval,a.pointInterval,1);d&&(a=new f(b),"day"===d?a=+a[f.hcSetDate](a[f.hcGetDate]()+c):"month"===d?a=+a[f.hcSetMonth](a[f.hcGetMonth]()+c):"year"===d&&
(a=+a[f.hcSetFullYear](a[f.hcGetFullYear]()+c)),c=a-b);this.xIncrement=b+c;return b},setOptions:function(a){var b=this.chart,c=b.options,d=c.plotOptions,g=(b.userOptions||{}).plotOptions||{},e=d[this.type];this.userOptions=a;b=w(e,d.series,a);this.tooltipOptions=w(k.tooltip,k.plotOptions.series&&k.plotOptions.series.tooltip,k.plotOptions[this.type].tooltip,c.tooltip.userOptions,d.series&&d.series.tooltip,d[this.type].tooltip,a.tooltip);this.stickyTracking=H(a.stickyTracking,g[this.type]&&g[this.type].stickyTracking,
g.series&&g.series.stickyTracking,this.tooltipOptions.shared&&!this.noSharedTooltip?!0:b.stickyTracking);null===e.marker&&delete b.marker;this.zoneAxis=b.zoneAxis;a=this.zones=(b.zones||[]).slice();!b.negativeColor&&!b.negativeFillColor||b.zones||a.push({value:b[this.zoneAxis+"Threshold"]||b.threshold||0,className:"highcharts-negative",color:b.negativeColor,fillColor:b.negativeFillColor});a.length&&u(a[a.length-1].value)&&a.push({color:this.color,fillColor:this.fillColor});return b},getCyclic:function(a,
b,c){var d,g=this.chart,e=this.userOptions,f=a+"Index",h=a+"Counter",p=c?c.length:H(g.options.chart[a+"Count"],g[a+"Count"]);b||(d=H(e[f],e["_"+f]),u(d)||(g.series.length||(g[h]=0),e["_"+f]=d=g[h]%p,g[h]+=1),c&&(b=c[d]));void 0!==d&&(this[f]=d);this[a]=b},getColor:function(){this.options.colorByPoint?this.options.color=null:this.getCyclic("color",this.options.color||m[this.type].color,this.chart.options.colors)},getSymbol:function(){this.getCyclic("symbol",this.options.marker.symbol,this.chart.options.symbols)},
drawLegendSymbol:a.LegendSymbolMixin.drawLineMarker,setData:function(b,d,e,f){var g=this,p=g.points,k=p&&p.length||0,n,r=g.options,l=g.chart,m=null,y=g.xAxis,w=r.turboThreshold,C=this.xData,D=this.yData,A=(n=g.pointArrayMap)&&n.length;b=b||[];n=b.length;d=H(d,!0);if(!1!==f&&n&&k===n&&!g.cropped&&!g.hasGroupedData&&g.visible)v(b,function(a,b){p[b].update&&a!==r.data[b]&&p[b].update(a,!1,null,!1)});else{g.xIncrement=null;g.colorCounter=0;v(this.parallelArrays,function(a){g[a+"Data"].length=0});if(w&&
n>w){for(e=0;null===m&&e<n;)m=b[e],e++;if(q(m))for(e=0;e<n;e++)C[e]=this.autoIncrement(),D[e]=b[e];else if(c(m))if(A)for(e=0;e<n;e++)m=b[e],C[e]=m[0],D[e]=m.slice(1,A+1);else for(e=0;e<n;e++)m=b[e],C[e]=m[0],D[e]=m[1];else a.error(12)}else for(e=0;e<n;e++)void 0!==b[e]&&(m={series:g},g.pointClass.prototype.applyOptions.apply(m,[b[e]]),g.updateParallelArrays(m,e));D&&h(D[0])&&a.error(14,!0);g.data=[];g.options.data=g.userOptions.data=b;for(e=k;e--;)p[e]&&p[e].destroy&&p[e].destroy();y&&(y.minRange=
y.userMinRange);g.isDirty=l.isDirtyBox=!0;g.isDirtyData=!!p;e=!1}"point"===r.legendType&&(this.processData(),this.generatePoints());d&&l.redraw(e)},processData:function(b){var c=this.xData,d=this.yData,e=c.length,g;g=0;var f,h,p=this.xAxis,k,n=this.options;k=n.cropThreshold;var q=this.getExtremesFromAll||n.getExtremesFromAll,r=this.isCartesian,n=p&&p.val2lin,m=p&&p.isLog,l,y;if(r&&!this.isDirty&&!p.isDirty&&!this.yAxis.isDirty&&!b)return!1;p&&(b=p.getExtremes(),l=b.min,y=b.max);if(r&&this.sorted&&
!q&&(!k||e>k||this.forceCrop))if(c[e-1]<l||c[0]>y)c=[],d=[];else if(c[0]<l||c[e-1]>y)g=this.cropData(this.xData,this.yData,l,y),c=g.xData,d=g.yData,g=g.start,f=!0;for(k=c.length||1;--k;)e=m?n(c[k])-n(c[k-1]):c[k]-c[k-1],0<e&&(void 0===h||e<h)?h=e:0>e&&this.requireSorting&&a.error(15);this.cropped=f;this.cropStart=g;this.processedXData=c;this.processedYData=d;this.closestPointRange=h},cropData:function(a,b,c,d){var g=a.length,e=0,f=g,h=H(this.cropShoulder,1),p;for(p=0;p<g;p++)if(a[p]>=c){e=Math.max(0,
p-h);break}for(c=p;c<g;c++)if(a[c]>d){f=c+h;break}return{xData:a.slice(e,f),yData:b.slice(e,f),start:e,end:f}},generatePoints:function(){var a=this.options,b=a.data,c=this.data,d,g=this.processedXData,e=this.processedYData,f=this.pointClass,h=g.length,k=this.cropStart||0,n,q=this.hasGroupedData,a=a.keys,r,l=[],m;c||q||(c=[],c.length=b.length,c=this.data=c);a&&q&&(this.options.keys=!1);for(m=0;m<h;m++)n=k+m,q?(r=(new f).init(this,[g[m]].concat(K(e[m]))),r.dataGroup=this.groupMap[m]):(r=c[n])||void 0===
b[n]||(c[n]=r=(new f).init(this,b[n],g[m])),r&&(r.index=n,l[m]=r);this.options.keys=a;if(c&&(h!==(d=c.length)||q))for(m=0;m<d;m++)m!==k||q||(m+=h),c[m]&&(c[m].destroyElements(),c[m].plotX=void 0);this.data=c;this.points=l},getExtremes:function(a){var b=this.yAxis,d=this.processedXData,e,g=[],f=0;e=this.xAxis.getExtremes();var h=e.min,p=e.max,k,n,r,m;a=a||this.stackedYData||this.processedYData||[];e=a.length;for(m=0;m<e;m++)if(n=d[m],r=a[m],k=(q(r,!0)||c(r))&&(!b.positiveValuesOnly||r.length||0<r),
n=this.getExtremesFromAll||this.options.getExtremesFromAll||this.cropped||(d[m+1]||n)>=h&&(d[m-1]||n)<=p,k&&n)if(k=r.length)for(;k--;)null!==r[k]&&(g[f++]=r[k]);else g[f++]=r;this.dataMin=E(g);this.dataMax=F(g)},translate:function(){this.processedXData||this.processData();this.generatePoints();var a=this.options,b=a.stacking,c=this.xAxis,d=c.categories,g=this.yAxis,e=this.points,f=e.length,h=!!this.modifyValue,k=a.pointPlacement,n="between"===k||q(k),r=a.threshold,m=a.startFromThreshold?r:0,y,w,A,
v,K=Number.MAX_VALUE;"between"===k&&(k=.5);q(k)&&(k*=H(a.pointRange||c.pointRange));for(a=0;a<f;a++){var J=e[a],G=J.x,z=J.y;w=J.low;var B=b&&g.stacks[(this.negStacks&&z<(m?0:r)?"-":"")+this.stackKey],E;g.positiveValuesOnly&&null!==z&&0>=z&&(J.isNull=!0);J.plotX=y=l(Math.min(Math.max(-1E5,c.translate(G,0,0,0,1,k,"flags"===this.type)),1E5));b&&this.visible&&!J.isNull&&B&&B[G]&&(v=this.getStackIndicator(v,G,this.index),E=B[G],z=E.points[v.key],w=z[0],z=z[1],w===m&&v.key===B[G].base&&(w=H(r,g.min)),g.positiveValuesOnly&&
0>=w&&(w=null),J.total=J.stackTotal=E.total,J.percentage=E.total&&J.y/E.total*100,J.stackY=z,E.setOffset(this.pointXOffset||0,this.barW||0));J.yBottom=u(w)?g.translate(w,0,1,0,1):null;h&&(z=this.modifyValue(z,J));J.plotY=w="number"===typeof z&&Infinity!==z?Math.min(Math.max(-1E5,g.translate(z,0,1,0,1)),1E5):void 0;J.isInside=void 0!==w&&0<=w&&w<=g.len&&0<=y&&y<=c.len;J.clientX=n?l(c.translate(G,0,0,0,1,k)):y;J.negative=J.y<(r||0);J.category=d&&void 0!==d[J.x]?d[J.x]:J.x;J.isNull||(void 0!==A&&(K=
Math.min(K,Math.abs(y-A))),A=y);J.zone=this.zones.length&&J.getZone()}this.closestPointRangePx=K},getValidPoints:function(a,c){var d=this.chart;return b(a||this.points||[],function(a){return c&&!d.isInsidePlot(a.plotX,a.plotY,d.inverted)?!1:!a.isNull})},setClip:function(a){var b=this.chart,c=this.options,d=b.renderer,g=b.inverted,e=this.clipBox,f=e||b.clipBox,h=this.sharedClipKey||["_sharedClip",a&&a.duration,a&&a.easing,f.height,c.xAxis,c.yAxis].join(),p=b[h],k=b[h+"m"];p||(a&&(f.width=0,g&&(f.x=
b.plotSizeX),b[h+"m"]=k=d.clipRect(g?b.plotSizeX+99:-99,g?-b.plotLeft:-b.plotTop,99,g?b.chartWidth:b.chartHeight)),b[h]=p=d.clipRect(f),p.count={length:0});a&&!p.count[this.index]&&(p.count[this.index]=!0,p.count.length+=1);!1!==c.clip&&(this.group.clip(a||e?p:b.clipRect),this.markerGroup.clip(k),this.sharedClipKey=h);a||(p.count[this.index]&&(delete p.count[this.index],--p.count.length),0===p.count.length&&h&&b[h]&&(e||(b[h]=b[h].destroy()),b[h+"m"]&&(b[h+"m"]=b[h+"m"].destroy())))},animate:function(a){var b=
this.chart,c=B(this.options.animation),d;a?this.setClip(c):(d=this.sharedClipKey,(a=b[d])&&a.animate({width:b.plotSizeX,x:0},c),b[d+"m"]&&b[d+"m"].animate({width:b.plotSizeX+99,x:0},c),this.animate=null)},afterAnimate:function(){this.setClip();n(this,"afterAnimate");this.finishedAnimating=!0},drawPoints:function(){var a=this.points,b=this.chart,c,d,g,e,f=this.options.marker,h,k,n,r,m=this[this.specialGroup]||this.markerGroup,l=H(f.enabled,this.xAxis.isRadial?!0:null,this.closestPointRangePx>=2*f.radius);
if(!1!==f.enabled||this._hasPointMarkers)for(d=0;d<a.length;d++)g=a[d],c=g.plotY,e=g.graphic,h=g.marker||{},k=!!g.marker,n=l&&void 0===h.enabled||h.enabled,r=g.isInside,n&&q(c)&&null!==g.y?(c=H(h.symbol,this.symbol),g.hasImage=0===c.indexOf("url"),n=this.markerAttribs(g,g.selected&&"select"),e?e[r?"show":"hide"](!0).animate(n):r&&(0<n.width||g.hasImage)&&(g.graphic=e=b.renderer.symbol(c,n.x,n.y,n.width,n.height,k?h:f).add(m)),e&&e.attr(this.pointAttribs(g,g.selected&&"select")),e&&e.addClass(g.getClassName(),
!0)):e&&(g.graphic=e.destroy())},markerAttribs:function(a,b){var c=this.options.marker,d=a.marker||{},g=H(d.radius,c.radius);b&&(c=c.states[b],b=d.states&&d.states[b],g=H(b&&b.radius,c&&c.radius,g+(c&&c.radiusPlus||0)));a.hasImage&&(g=0);a={x:Math.floor(a.plotX)-g,y:a.plotY-g};g&&(a.width=a.height=2*g);return a},pointAttribs:function(a,b){var c=this.options.marker,d=a&&a.options,g=d&&d.marker||{},e=this.color,f=d&&d.color,h=a&&a.color,d=H(g.lineWidth,c.lineWidth);a=a&&a.zone&&a.zone.color;e=f||a||
h||e;a=g.fillColor||c.fillColor||e;e=g.lineColor||c.lineColor||e;b&&(c=c.states[b],b=g.states&&g.states[b]||{},d=H(b.lineWidth,c.lineWidth,d+H(b.lineWidthPlus,c.lineWidthPlus,0)),a=b.fillColor||c.fillColor||a,e=b.lineColor||c.lineColor||e);return{stroke:e,"stroke-width":d,fill:a}},destroy:function(){var a=this,b=a.chart,c=/AppleWebKit\/533/.test(J.navigator.userAgent),e,g,f=a.data||[],h,k;n(a,"destroy");y(a);v(a.axisTypes||[],function(b){(k=a[b])&&k.series&&(d(k.series,a),k.isDirty=k.forceRedraw=
!0)});a.legendItem&&a.chart.legend.destroyItem(a);for(g=f.length;g--;)(h=f[g])&&h.destroy&&h.destroy();a.points=null;clearTimeout(a.animationTimeout);G(a,function(a,b){a instanceof r&&!a.survive&&(e=c&&"group"===b?"hide":"destroy",a[e]())});b.hoverSeries===a&&(b.hoverSeries=null);d(b.series,a);b.orderSeries();G(a,function(b,c){delete a[c]})},getGraphPath:function(a,b,c){var d=this,g=d.options,e=g.step,h,f=[],p=[],k;a=a||d.points;(h=a.reversed)&&a.reverse();(e={right:1,center:2}[e]||e&&3)&&h&&(e=4-
e);!g.connectNulls||b||c||(a=this.getValidPoints(a));v(a,function(h,n){var q=h.plotX,r=h.plotY,m=a[n-1];(h.leftCliff||m&&m.rightCliff)&&!c&&(k=!0);h.isNull&&!u(b)&&0<n?k=!g.connectNulls:h.isNull&&!b?k=!0:(0===n||k?n=["M",h.plotX,h.plotY]:d.getPointSpline?n=d.getPointSpline(a,h,n):e?(n=1===e?["L",m.plotX,r]:2===e?["L",(m.plotX+q)/2,m.plotY,"L",(m.plotX+q)/2,r]:["L",q,m.plotY],n.push("L",q,r)):n=["L",q,r],p.push(h.x),e&&p.push(h.x),f.push.apply(f,n),k=!1)});f.xMap=p;return d.graphPath=f},drawGraph:function(){var a=
this,b=this.options,c=(this.gappedPath||this.getGraphPath).call(this),d=[["graph","highcharts-graph",b.lineColor||this.color,b.dashStyle]];v(this.zones,function(c,e){d.push(["zone-graph-"+e,"highcharts-graph highcharts-zone-graph-"+e+" "+(c.className||""),c.color||a.color,c.dashStyle||b.dashStyle])});v(d,function(d,e){var g=d[0],h=a[g];h?(h.endX=c.xMap,h.animate({d:c})):c.length&&(a[g]=a.chart.renderer.path(c).addClass(d[1]).attr({zIndex:1}).add(a.group),h={stroke:d[2],"stroke-width":b.lineWidth,
fill:a.fillGraph&&a.color||"none"},d[3]?h.dashstyle=d[3]:"square"!==b.linecap&&(h["stroke-linecap"]=h["stroke-linejoin"]="round"),h=a[g].attr(h).shadow(2>e&&b.shadow));h&&(h.startX=c.xMap,h.isArea=c.isArea)})},applyZones:function(){var a=this,b=this.chart,c=b.renderer,d=this.zones,e,h,f=this.clips||[],k,n=this.graph,q=this.area,r=Math.max(b.chartWidth,b.chartHeight),m=this[(this.zoneAxis||"y")+"Axis"],l,y,w=b.inverted,A,u,K,J,G=!1;d.length&&(n||q)&&m&&void 0!==m.min&&(y=m.reversed,A=m.horiz,n&&n.hide(),
q&&q.hide(),l=m.getExtremes(),v(d,function(d,g){e=y?A?b.plotWidth:0:A?0:m.toPixels(l.min);e=Math.min(Math.max(H(h,e),0),r);h=Math.min(Math.max(Math.round(m.toPixels(H(d.value,l.max),!0)),0),r);G&&(e=h=m.toPixels(l.max));u=Math.abs(e-h);K=Math.min(e,h);J=Math.max(e,h);m.isXAxis?(k={x:w?J:K,y:0,width:u,height:r},A||(k.x=b.plotHeight-k.x)):(k={x:0,y:w?J:K,width:r,height:u},A&&(k.y=b.plotWidth-k.y));w&&c.isVML&&(k=m.isXAxis?{x:0,y:y?K:J,height:k.width,width:b.chartWidth}:{x:k.y-b.plotLeft-b.spacingBox.x,
y:0,width:k.height,height:b.chartHeight});f[g]?f[g].animate(k):(f[g]=c.clipRect(k),n&&a["zone-graph-"+g].clip(f[g]),q&&a["zone-area-"+g].clip(f[g]));G=d.value>l.max}),this.clips=f)},invertGroups:function(a){function b(){v(["group","markerGroup"],function(b){c[b]&&(d.renderer.isVML&&c[b].attr({width:c.yAxis.len,height:c.xAxis.len}),c[b].width=c.yAxis.len,c[b].height=c.xAxis.len,c[b].invert(a))})}var c=this,d=c.chart,e;c.xAxis&&(e=z(d,"resize",b),z(c,"destroy",e),b(a),c.invertGroups=b)},plotGroup:function(a,
b,c,d,e){var g=this[a],h=!g;h&&(this[a]=g=this.chart.renderer.g().attr({zIndex:d||.1}).add(e));g.addClass("highcharts-"+b+" highcharts-series-"+this.index+" highcharts-"+this.type+"-series "+(u(this.colorIndex)?"highcharts-color-"+this.colorIndex+" ":"")+(this.options.className||"")+(g.hasClass("highcharts-tracker")?" highcharts-tracker":""),!0);g.attr({visibility:c})[h?"attr":"animate"](this.getPlotBox());return g},getPlotBox:function(){var a=this.chart,b=this.xAxis,c=this.yAxis;a.inverted&&(b=c,
c=this.xAxis);return{translateX:b?b.left:a.plotLeft,translateY:c?c.top:a.plotTop,scaleX:1,scaleY:1}},render:function(){var a=this,b=a.chart,c,d=a.options,e=!!a.animate&&b.renderer.isSVG&&B(d.animation).duration,h=a.visible?"inherit":"hidden",f=d.zIndex,k=a.hasRendered,n=b.seriesGroup,q=b.inverted;c=a.plotGroup("group","series",h,f,n);a.markerGroup=a.plotGroup("markerGroup","markers",h,f,n);e&&a.animate(!0);c.inverted=a.isCartesian?q:!1;a.drawGraph&&(a.drawGraph(),a.applyZones());a.drawDataLabels&&
a.drawDataLabels();a.visible&&a.drawPoints();a.drawTracker&&!1!==a.options.enableMouseTracking&&a.drawTracker();a.invertGroups(q);!1===d.clip||a.sharedClipKey||k||c.clip(b.clipRect);e&&a.animate();k||(a.animationTimeout=A(function(){a.afterAnimate()},e));a.isDirty=!1;a.hasRendered=!0},redraw:function(){var a=this.chart,b=this.isDirty||this.isDirtyData,c=this.group,d=this.xAxis,e=this.yAxis;c&&(a.inverted&&c.attr({width:a.plotWidth,height:a.plotHeight}),c.animate({translateX:H(d&&d.left,a.plotLeft),
translateY:H(e&&e.top,a.plotTop)}));this.translate();this.render();b&&delete this.kdTree},kdAxisArray:["clientX","plotY"],searchPoint:function(a,b){var c=this.xAxis,d=this.yAxis,e=this.chart.inverted;return this.searchKDTree({clientX:e?c.len-a.chartY+c.pos:a.chartX-c.pos,plotY:e?d.len-a.chartX+d.pos:a.chartY-d.pos},b)},buildKDTree:function(){function a(c,d,e){var g,h;if(h=c&&c.length)return g=b.kdAxisArray[d%e],c.sort(function(a,b){return a[g]-b[g]}),h=Math.floor(h/2),{point:c[h],left:a(c.slice(0,
h),d+1,e),right:a(c.slice(h+1),d+1,e)}}this.buildingKdTree=!0;var b=this,c=-1<b.options.findNearestPointBy.indexOf("y")?2:1;delete b.kdTree;A(function(){b.kdTree=a(b.getValidPoints(null,!b.directTouch),c,c);b.buildingKdTree=!1},b.options.kdNow?0:1)},searchKDTree:function(a,b){function c(a,b,g,k){var p=b.point,n=d.kdAxisArray[g%k],q,r,m=p;r=u(a[e])&&u(p[e])?Math.pow(a[e]-p[e],2):null;q=u(a[h])&&u(p[h])?Math.pow(a[h]-p[h],2):null;q=(r||0)+(q||0);p.dist=u(q)?Math.sqrt(q):Number.MAX_VALUE;p.distX=u(r)?
Math.sqrt(r):Number.MAX_VALUE;n=a[n]-p[n];q=0>n?"left":"right";r=0>n?"right":"left";b[q]&&(q=c(a,b[q],g+1,k),m=q[f]<m[f]?q:p);b[r]&&Math.sqrt(n*n)<m[f]&&(a=c(a,b[r],g+1,k),m=a[f]<m[f]?a:m);return m}var d=this,e=this.kdAxisArray[0],h=this.kdAxisArray[1],f=b?"distX":"dist";b=-1<d.options.findNearestPointBy.indexOf("y")?2:1;this.kdTree||this.buildingKdTree||this.buildKDTree();if(this.kdTree)return c(a,this.kdTree,b,b)}})})(N);(function(a){var z=a.Axis,B=a.Chart,F=a.correctFloat,E=a.defined,l=a.destroyObjectProperties,
f=a.each,k=a.format,m=a.objectEach,u=a.pick,v=a.Series;a.StackItem=function(a,e,f,b,c){var d=a.chart.inverted;this.axis=a;this.isNegative=f;this.options=e;this.x=b;this.total=null;this.points={};this.stack=c;this.rightCliff=this.leftCliff=0;this.alignOptions={align:e.align||(d?f?"left":"right":"center"),verticalAlign:e.verticalAlign||(d?"middle":f?"bottom":"top"),y:u(e.y,d?4:f?14:-6),x:u(e.x,d?f?-6:6:0)};this.textAlign=e.textAlign||(d?f?"right":"left":"center")};a.StackItem.prototype={destroy:function(){l(this,
this.axis)},render:function(a){var d=this.options,f=d.format,f=f?k(f,this):d.formatter.call(this);this.label?this.label.attr({text:f,visibility:"hidden"}):this.label=this.axis.chart.renderer.text(f,null,null,d.useHTML).css(d.style).attr({align:this.textAlign,rotation:d.rotation,visibility:"hidden"}).add(a)},setOffset:function(a,e){var d=this.axis,b=d.chart,c=d.translate(d.usePercentage?100:this.total,0,0,0,1),d=d.translate(0),d=Math.abs(c-d);a=b.xAxis[0].translate(this.x)+a;c=this.getStackBox(b,this,
a,c,e,d);if(e=this.label)e.align(this.alignOptions,null,c),c=e.alignAttr,e[!1===this.options.crop||b.isInsidePlot(c.x,c.y)?"show":"hide"](!0)},getStackBox:function(a,e,f,b,c,k){var d=e.axis.reversed,n=a.inverted;a=a.plotHeight;e=e.isNegative&&!d||!e.isNegative&&d;return{x:n?e?b:b-k:f,y:n?a-f-c:e?a-b-k:a-b,width:n?k:c,height:n?c:k}}};B.prototype.getStacks=function(){var a=this;f(a.yAxis,function(a){a.stacks&&a.hasVisibleSeries&&(a.oldStacks=a.stacks)});f(a.series,function(d){!d.options.stacking||!0!==
d.visible&&!1!==a.options.chart.ignoreHiddenSeries||(d.stackKey=d.type+u(d.options.stack,""))})};z.prototype.buildStacks=function(){var a=this.series,e=u(this.options.reversedStacks,!0),f=a.length,b;if(!this.isXAxis){this.usePercentage=!1;for(b=f;b--;)a[e?b:f-b-1].setStackedPoints();for(b=0;b<f;b++)a[b].modifyStacks()}};z.prototype.renderStackTotals=function(){var a=this.chart,e=a.renderer,f=this.stacks,b=this.stackTotalGroup;b||(this.stackTotalGroup=b=e.g("stack-labels").attr({visibility:"visible",
zIndex:6}).add());b.translate(a.plotLeft,a.plotTop);m(f,function(a){m(a,function(a){a.render(b)})})};z.prototype.resetStacks=function(){var a=this,e=a.stacks;a.isXAxis||m(e,function(d){m(d,function(b,c){b.touched<a.stacksTouched?(b.destroy(),delete d[c]):(b.total=null,b.cum=null)})})};z.prototype.cleanStacks=function(){var a;this.isXAxis||(this.oldStacks&&(a=this.stacks=this.oldStacks),m(a,function(a){m(a,function(a){a.cum=a.total})}))};v.prototype.setStackedPoints=function(){if(this.options.stacking&&
(!0===this.visible||!1===this.chart.options.chart.ignoreHiddenSeries)){var d=this.processedXData,e=this.processedYData,f=[],b=e.length,c=this.options,k=c.threshold,h=c.startFromThreshold?k:0,m=c.stack,c=c.stacking,l=this.stackKey,v="-"+l,y=this.negStacks,K=this.yAxis,r=K.stacks,A=K.oldStacks,J,p,C,D,I,g,t;K.stacksTouched+=1;for(I=0;I<b;I++)g=d[I],t=e[I],J=this.getStackIndicator(J,g,this.index),D=J.key,C=(p=y&&t<(h?0:k))?v:l,r[C]||(r[C]={}),r[C][g]||(A[C]&&A[C][g]?(r[C][g]=A[C][g],r[C][g].total=null):
r[C][g]=new a.StackItem(K,K.options.stackLabels,p,g,m)),C=r[C][g],null!==t&&(C.points[D]=C.points[this.index]=[u(C.cum,h)],E(C.cum)||(C.base=D),C.touched=K.stacksTouched,0<J.index&&!1===this.singleStacks&&(C.points[D][0]=C.points[this.index+","+g+",0"][0])),"percent"===c?(p=p?l:v,y&&r[p]&&r[p][g]?(p=r[p][g],C.total=p.total=Math.max(p.total,C.total)+Math.abs(t)||0):C.total=F(C.total+(Math.abs(t)||0))):C.total=F(C.total+(t||0)),C.cum=u(C.cum,h)+(t||0),null!==t&&(C.points[D].push(C.cum),f[I]=C.cum);
"percent"===c&&(K.usePercentage=!0);this.stackedYData=f;K.oldStacks={}}};v.prototype.modifyStacks=function(){var a=this,e=a.stackKey,k=a.yAxis.stacks,b=a.processedXData,c,q=a.options.stacking;a[q+"Stacker"]&&f([e,"-"+e],function(d){for(var e=b.length,f,h;e--;)if(f=b[e],c=a.getStackIndicator(c,f,a.index,d),h=(f=k[d]&&k[d][f])&&f.points[c.key])a[q+"Stacker"](h,f,e)})};v.prototype.percentStacker=function(a,e,f){e=e.total?100/e.total:0;a[0]=F(a[0]*e);a[1]=F(a[1]*e);this.stackedYData[f]=a[1]};v.prototype.getStackIndicator=
function(a,e,f,b){!E(a)||a.x!==e||b&&a.key!==b?a={x:e,index:0,key:b}:a.index++;a.key=[f,e,a.index].join();return a}})(N);(function(a){var z=a.addEvent,B=a.animate,F=a.Axis,E=a.createElement,l=a.css,f=a.defined,k=a.each,m=a.erase,u=a.extend,v=a.fireEvent,d=a.inArray,e=a.isNumber,n=a.isObject,b=a.isArray,c=a.merge,q=a.objectEach,h=a.pick,w=a.Point,G=a.Series,H=a.seriesTypes,y=a.setAnimation,K=a.splat;u(a.Chart.prototype,{addSeries:function(a,b,c){var d,e=this;a&&(b=h(b,!0),v(e,"addSeries",{options:a},
function(){d=e.initSeries(a);e.isDirtyLegend=!0;e.linkSeries();b&&e.redraw(c)}));return d},addAxis:function(a,b,d,e){var f=b?"xAxis":"yAxis",k=this.options;a=c(a,{index:this[f].length,isX:b});b=new F(this,a);k[f]=K(k[f]||{});k[f].push(a);h(d,!0)&&this.redraw(e);return b},showLoading:function(a){var b=this,c=b.options,d=b.loadingDiv,e=c.loading,f=function(){d&&l(d,{left:b.plotLeft+"px",top:b.plotTop+"px",width:b.plotWidth+"px",height:b.plotHeight+"px"})};d||(b.loadingDiv=d=E("div",{className:"highcharts-loading highcharts-loading-hidden"},
null,b.container),b.loadingSpan=E("span",{className:"highcharts-loading-inner"},null,d),z(b,"redraw",f));d.className="highcharts-loading";b.loadingSpan.innerHTML=a||c.lang.loading;l(d,u(e.style,{zIndex:10}));l(b.loadingSpan,e.labelStyle);b.loadingShown||(l(d,{opacity:0,display:""}),B(d,{opacity:e.style.opacity||.5},{duration:e.showDuration||0}));b.loadingShown=!0;f()},hideLoading:function(){var a=this.options,b=this.loadingDiv;b&&(b.className="highcharts-loading highcharts-loading-hidden",B(b,{opacity:0},
{duration:a.loading.hideDuration||100,complete:function(){l(b,{display:"none"})}}));this.loadingShown=!1},propsRequireDirtyBox:"backgroundColor borderColor borderWidth margin marginTop marginRight marginBottom marginLeft spacing spacingTop spacingRight spacingBottom spacingLeft borderRadius plotBackgroundColor plotBackgroundImage plotBorderColor plotBorderWidth plotShadow shadow".split(" "),propsRequireUpdateSeries:"chart.inverted chart.polar chart.ignoreHiddenSeries chart.type colors plotOptions tooltip".split(" "),
update:function(a,b,n){var p=this,m={credits:"addCredits",title:"setTitle",subtitle:"setSubtitle"},r=a.chart,l,g,y=[];if(r){c(!0,p.options.chart,r);"className"in r&&p.setClassName(r.className);if("inverted"in r||"polar"in r)p.propFromSeries(),l=!0;"alignTicks"in r&&(l=!0);q(r,function(a,b){-1!==d("chart."+b,p.propsRequireUpdateSeries)&&(g=!0);-1!==d(b,p.propsRequireDirtyBox)&&(p.isDirtyBox=!0)});"style"in r&&p.renderer.setStyle(r.style)}a.colors&&(this.options.colors=a.colors);a.plotOptions&&c(!0,
this.options.plotOptions,a.plotOptions);q(a,function(a,b){if(p[b]&&"function"===typeof p[b].update)p[b].update(a,!1);else if("function"===typeof p[m[b]])p[m[b]](a);"chart"!==b&&-1!==d(b,p.propsRequireUpdateSeries)&&(g=!0)});k("xAxis yAxis zAxis series colorAxis pane".split(" "),function(b){a[b]&&(k(K(a[b]),function(a,c){(c=f(a.id)&&p.get(a.id)||p[b][c])&&c.coll===b&&(c.update(a,!1),n&&(c.touched=!0));if(!c&&n)if("series"===b)p.addSeries(a,!1).touched=!0;else if("xAxis"===b||"yAxis"===b)p.addAxis(a,
"xAxis"===b,!1).touched=!0}),n&&k(p[b],function(a){a.touched?delete a.touched:y.push(a)}))});k(y,function(a){a.remove(!1)});l&&k(p.axes,function(a){a.update({},!1)});g&&k(p.series,function(a){a.update({},!1)});a.loading&&c(!0,p.options.loading,a.loading);l=r&&r.width;r=r&&r.height;e(l)&&l!==p.chartWidth||e(r)&&r!==p.chartHeight?p.setSize(l,r):h(b,!0)&&p.redraw()},setSubtitle:function(a){this.setTitle(void 0,a)}});u(w.prototype,{update:function(a,b,c,d){function e(){f.applyOptions(a);null===f.y&&g&&
(f.graphic=g.destroy());n(a,!0)&&(g&&g.element&&a&&a.marker&&void 0!==a.marker.symbol&&(f.graphic=g.destroy()),a&&a.dataLabels&&f.dataLabel&&(f.dataLabel=f.dataLabel.destroy()));p=f.index;k.updateParallelArrays(f,p);m.data[p]=n(m.data[p],!0)||n(a,!0)?f.options:a;k.isDirty=k.isDirtyData=!0;!k.fixedBox&&k.hasCartesianSeries&&(q.isDirtyBox=!0);"point"===m.legendType&&(q.isDirtyLegend=!0);b&&q.redraw(c)}var f=this,k=f.series,g=f.graphic,p,q=k.chart,m=k.options;b=h(b,!0);!1===d?e():f.firePointEvent("update",
{options:a},e)},remove:function(a,b){this.series.removePoint(d(this,this.series.data),a,b)}});u(G.prototype,{addPoint:function(a,b,c,d){var e=this.options,f=this.data,k=this.chart,g=this.xAxis,g=g&&g.hasNames&&g.names,p=e.data,n,q,m=this.xData,r,l;b=h(b,!0);n={series:this};this.pointClass.prototype.applyOptions.apply(n,[a]);l=n.x;r=m.length;if(this.requireSorting&&l<m[r-1])for(q=!0;r&&m[r-1]>l;)r--;this.updateParallelArrays(n,"splice",r,0,0);this.updateParallelArrays(n,r);g&&n.name&&(g[l]=n.name);
p.splice(r,0,a);q&&(this.data.splice(r,0,null),this.processData());"point"===e.legendType&&this.generatePoints();c&&(f[0]&&f[0].remove?f[0].remove(!1):(f.shift(),this.updateParallelArrays(n,"shift"),p.shift()));this.isDirtyData=this.isDirty=!0;b&&k.redraw(d)},removePoint:function(a,b,c){var d=this,e=d.data,f=e[a],k=d.points,g=d.chart,n=function(){k&&k.length===e.length&&k.splice(a,1);e.splice(a,1);d.options.data.splice(a,1);d.updateParallelArrays(f||{series:d},"splice",a,1);f&&f.destroy();d.isDirty=
!0;d.isDirtyData=!0;b&&g.redraw()};y(c,g);b=h(b,!0);f?f.firePointEvent("remove",null,n):n()},remove:function(a,b,c){function d(){e.destroy();f.isDirtyLegend=f.isDirtyBox=!0;f.linkSeries();h(a,!0)&&f.redraw(b)}var e=this,f=e.chart;!1!==c?v(e,"remove",null,d):d()},update:function(a,b){var d=this,e=d.chart,f=d.userOptions,n=d.oldType||d.type,q=a.type||f.type||e.options.chart.type,g=H[n].prototype,m,r=["group","markerGroup","dataLabelsGroup"],l=["navigatorSeries","baseSeries"],y=d.finishedAnimating&&
{animation:!1};if(Object.keys&&"data"===Object.keys(a).toString())return this.setData(a.data,b);if(q&&q!==n||void 0!==a.zIndex)r.length=0;d.options.isInternal&&(l.length=0);l=r.concat(l);k(l,function(a){l[a]=d[a];delete d[a]});a=c(f,y,{index:d.index,pointStart:d.xData[0]},{data:d.options.data},a);d.remove(!1,null,!1);for(m in g)d[m]=void 0;u(d,H[q||n].prototype);k(l,function(a){d[a]=l[a]});d.init(e,a);d.oldType=n;e.linkSeries();h(b,!0)&&e.redraw(!1)}});u(F.prototype,{update:function(a,b){var d=this.chart;
a=d.options[this.coll][this.options.index]=c(this.userOptions,a);this.destroy(!0);this.init(d,u(a,{events:void 0}));d.isDirtyBox=!0;h(b,!0)&&d.redraw()},remove:function(a){for(var c=this.chart,d=this.coll,e=this.series,f=e.length;f--;)e[f]&&e[f].remove(!1);m(c.axes,this);m(c[d],this);b(c.options[d])?c.options[d].splice(this.options.index,1):delete c.options[d];k(c[d],function(a,b){a.options.index=b});this.destroy();c.isDirtyBox=!0;h(a,!0)&&c.redraw()},setTitle:function(a,b){this.update({title:a},
b)},setCategories:function(a,b){this.update({categories:a},b)}})})(N);(function(a){var z=a.color,B=a.each,F=a.map,E=a.pick,l=a.Series,f=a.seriesType;f("area","line",{softThreshold:!1,threshold:0},{singleStacks:!1,getStackPoints:function(f){var k=[],l=[],v=this.xAxis,d=this.yAxis,e=d.stacks[this.stackKey],n={},b=this.index,c=d.series,q=c.length,h,w=E(d.options.reversedStacks,!0)?1:-1,G;f=f||this.points;if(this.options.stacking){for(G=0;G<f.length;G++)n[f[G].x]=f[G];a.objectEach(e,function(a,b){null!==
a.total&&l.push(b)});l.sort(function(a,b){return a-b});h=F(c,function(){return this.visible});B(l,function(a,c){var f=0,m,y;if(n[a]&&!n[a].isNull)k.push(n[a]),B([-1,1],function(d){var f=1===d?"rightNull":"leftNull",k=0,r=e[l[c+d]];if(r)for(G=b;0<=G&&G<q;)m=r.points[G],m||(G===b?n[a][f]=!0:h[G]&&(y=e[a].points[G])&&(k-=y[1]-y[0])),G+=w;n[a][1===d?"rightCliff":"leftCliff"]=k});else{for(G=b;0<=G&&G<q;){if(m=e[a].points[G]){f=m[1];break}G+=w}f=d.translate(f,0,1,0,1);k.push({isNull:!0,plotX:v.translate(a,
0,0,0,1),x:a,plotY:f,yBottom:f})}})}return k},getGraphPath:function(a){var f=l.prototype.getGraphPath,k=this.options,v=k.stacking,d=this.yAxis,e,n,b=[],c=[],q=this.index,h,w=d.stacks[this.stackKey],G=k.threshold,H=d.getThreshold(k.threshold),y,k=k.connectNulls||"percent"===v,K=function(e,f,k){var n=a[e];e=v&&w[n.x].points[q];var m=n[k+"Null"]||0;k=n[k+"Cliff"]||0;var l,r,n=!0;k||m?(l=(m?e[0]:e[1])+k,r=e[0]+k,n=!!m):!v&&a[f]&&a[f].isNull&&(l=r=G);void 0!==l&&(c.push({plotX:h,plotY:null===l?H:d.getThreshold(l),
isNull:n,isCliff:!0}),b.push({plotX:h,plotY:null===r?H:d.getThreshold(r),doCurve:!1}))};a=a||this.points;v&&(a=this.getStackPoints(a));for(e=0;e<a.length;e++)if(n=a[e].isNull,h=E(a[e].rectPlotX,a[e].plotX),y=E(a[e].yBottom,H),!n||k)k||K(e,e-1,"left"),n&&!v&&k||(c.push(a[e]),b.push({x:e,plotX:h,plotY:y})),k||K(e,e+1,"right");e=f.call(this,c,!0,!0);b.reversed=!0;n=f.call(this,b,!0,!0);n.length&&(n[0]="L");n=e.concat(n);f=f.call(this,c,!1,k);n.xMap=e.xMap;this.areaPath=n;return f},drawGraph:function(){this.areaPath=
[];l.prototype.drawGraph.apply(this);var a=this,f=this.areaPath,u=this.options,v=[["area","highcharts-area",this.color,u.fillColor]];B(this.zones,function(d,e){v.push(["zone-area-"+e,"highcharts-area highcharts-zone-area-"+e+" "+d.className,d.color||a.color,d.fillColor||u.fillColor])});B(v,function(d){var e=d[0],k=a[e];k?(k.endX=f.xMap,k.animate({d:f})):(k=a[e]=a.chart.renderer.path(f).addClass(d[1]).attr({fill:E(d[3],z(d[2]).setOpacity(E(u.fillOpacity,.75)).get()),zIndex:0}).add(a.group),k.isArea=
!0);k.startX=f.xMap;k.shiftUnit=u.step?2:1})},drawLegendSymbol:a.LegendSymbolMixin.drawRectangle})})(N);(function(a){var z=a.pick;a=a.seriesType;a("spline","line",{},{getPointSpline:function(a,F,E){var l=F.plotX,f=F.plotY,k=a[E-1];E=a[E+1];var m,u,v,d;if(k&&!k.isNull&&!1!==k.doCurve&&!F.isCliff&&E&&!E.isNull&&!1!==E.doCurve&&!F.isCliff){a=k.plotY;v=E.plotX;E=E.plotY;var e=0;m=(1.5*l+k.plotX)/2.5;u=(1.5*f+a)/2.5;v=(1.5*l+v)/2.5;d=(1.5*f+E)/2.5;v!==m&&(e=(d-u)*(v-l)/(v-m)+f-d);u+=e;d+=e;u>a&&u>f?(u=
Math.max(a,f),d=2*f-u):u<a&&u<f&&(u=Math.min(a,f),d=2*f-u);d>E&&d>f?(d=Math.max(E,f),u=2*f-d):d<E&&d<f&&(d=Math.min(E,f),u=2*f-d);F.rightContX=v;F.rightContY=d}F=["C",z(k.rightContX,k.plotX),z(k.rightContY,k.plotY),z(m,l),z(u,f),l,f];k.rightContX=k.rightContY=null;return F}})})(N);(function(a){var z=a.seriesTypes.area.prototype,B=a.seriesType;B("areaspline","spline",a.defaultPlotOptions.area,{getStackPoints:z.getStackPoints,getGraphPath:z.getGraphPath,drawGraph:z.drawGraph,drawLegendSymbol:a.LegendSymbolMixin.drawRectangle})})(N);
(function(a){var z=a.animObject,B=a.color,F=a.each,E=a.extend,l=a.isNumber,f=a.merge,k=a.pick,m=a.Series,u=a.seriesType,v=a.svg;u("column","line",{borderRadius:0,crisp:!0,groupPadding:.2,marker:null,pointPadding:.1,minPointLength:0,cropThreshold:50,pointRange:null,states:{hover:{halo:!1,brightness:.1,shadow:!1},select:{color:"#cccccc",borderColor:"#000000",shadow:!1}},dataLabels:{align:null,verticalAlign:null,y:null},softThreshold:!1,startFromThreshold:!0,stickyTracking:!1,tooltip:{distance:6},threshold:0,
borderColor:"#ffffff"},{cropShoulder:0,directTouch:!0,trackerGroups:["group","dataLabelsGroup"],negStacks:!0,init:function(){m.prototype.init.apply(this,arguments);var a=this,e=a.chart;e.hasRendered&&F(e.series,function(d){d.type===a.type&&(d.isDirty=!0)})},getColumnMetrics:function(){var a=this,e=a.options,f=a.xAxis,b=a.yAxis,c=f.reversed,q,h={},m=0;!1===e.grouping?m=1:F(a.chart.series,function(c){var d=c.options,e=c.yAxis,f;c.type!==a.type||!c.visible&&a.chart.options.chart.ignoreHiddenSeries||
b.len!==e.len||b.pos!==e.pos||(d.stacking?(q=c.stackKey,void 0===h[q]&&(h[q]=m++),f=h[q]):!1!==d.grouping&&(f=m++),c.columnIndex=f)});var l=Math.min(Math.abs(f.transA)*(f.ordinalSlope||e.pointRange||f.closestPointRange||f.tickInterval||1),f.len),u=l*e.groupPadding,y=(l-2*u)/(m||1),e=Math.min(e.maxPointWidth||f.len,k(e.pointWidth,y*(1-2*e.pointPadding)));a.columnMetrics={width:e,offset:(y-e)/2+(u+((a.columnIndex||0)+(c?1:0))*y-l/2)*(c?-1:1)};return a.columnMetrics},crispCol:function(a,e,f,b){var c=
this.chart,d=this.borderWidth,h=-(d%2?.5:0),d=d%2?.5:1;c.inverted&&c.renderer.isVML&&(d+=1);this.options.crisp&&(f=Math.round(a+f)+h,a=Math.round(a)+h,f-=a);b=Math.round(e+b)+d;h=.5>=Math.abs(e)&&.5<b;e=Math.round(e)+d;b-=e;h&&b&&(--e,b+=1);return{x:a,y:e,width:f,height:b}},translate:function(){var a=this,e=a.chart,f=a.options,b=a.dense=2>a.closestPointRange*a.xAxis.transA,b=a.borderWidth=k(f.borderWidth,b?0:1),c=a.yAxis,q=a.translatedThreshold=c.getThreshold(f.threshold),h=k(f.minPointLength,5),
l=a.getColumnMetrics(),u=l.width,v=a.barW=Math.max(u,1+2*b),y=a.pointXOffset=l.offset;e.inverted&&(q-=.5);f.pointPadding&&(v=Math.ceil(v));m.prototype.translate.apply(a);F(a.points,function(b){var d=k(b.yBottom,q),f=999+Math.abs(d),f=Math.min(Math.max(-f,b.plotY),c.len+f),n=b.plotX+y,p=v,m=Math.min(f,d),l,w=Math.max(f,d)-m;h&&Math.abs(w)<h&&(w=h,l=!c.reversed&&!b.negative||c.reversed&&b.negative,0===b.y&&0>=a.dataMax&&(l=!l),m=Math.abs(m-q)>h?d-h:q-(l?h:0));b.barX=n;b.pointWidth=u;b.tooltipPos=e.inverted?
[c.len+c.pos-e.plotLeft-f,a.xAxis.len-n-p/2,w]:[n+p/2,f+c.pos-e.plotTop,w];b.shapeType="rect";b.shapeArgs=a.crispCol.apply(a,b.isNull?[n,q,p,0]:[n,m,p,w])})},getSymbol:a.noop,drawLegendSymbol:a.LegendSymbolMixin.drawRectangle,drawGraph:function(){this.group[this.dense?"addClass":"removeClass"]("highcharts-dense-data")},pointAttribs:function(a,e){var d=this.options,b,c=this.pointAttrToOptions||{};b=c.stroke||"borderColor";var k=c["stroke-width"]||"borderWidth",h=a&&a.color||this.color,m=a&&a[b]||d[b]||
this.color||h,l=a&&a[k]||d[k]||this[k]||0,c=d.dashStyle;a&&this.zones.length&&(h=a.getZone(),h=a.options.color||h&&h.color||this.color);e&&(a=f(d.states[e],a.options.states&&a.options.states[e]||{}),e=a.brightness,h=a.color||void 0!==e&&B(h).brighten(a.brightness).get()||h,m=a[b]||m,l=a[k]||l,c=a.dashStyle||c);b={fill:h,stroke:m,"stroke-width":l};c&&(b.dashstyle=c);return b},drawPoints:function(){var a=this,e=this.chart,k=a.options,b=e.renderer,c=k.animationLimit||250,m;F(a.points,function(d){var h=
d.graphic;if(l(d.plotY)&&null!==d.y){m=d.shapeArgs;if(h)h[e.pointCount<c?"animate":"attr"](f(m));else d.graphic=h=b[d.shapeType](m).add(d.group||a.group);k.borderRadius&&h.attr({r:k.borderRadius});h.attr(a.pointAttribs(d,d.selected&&"select")).shadow(k.shadow,null,k.stacking&&!k.borderRadius);h.addClass(d.getClassName(),!0)}else h&&(d.graphic=h.destroy())})},animate:function(a){var d=this,f=this.yAxis,b=d.options,c=this.chart.inverted,k={};v&&(a?(k.scaleY=.001,a=Math.min(f.pos+f.len,Math.max(f.pos,
f.toPixels(b.threshold))),c?k.translateX=a-f.len:k.translateY=a,d.group.attr(k)):(k[c?"translateX":"translateY"]=f.pos,d.group.animate(k,E(z(d.options.animation),{step:function(a,b){d.group.attr({scaleY:Math.max(.001,b.pos)})}})),d.animate=null))},remove:function(){var a=this,e=a.chart;e.hasRendered&&F(e.series,function(d){d.type===a.type&&(d.isDirty=!0)});m.prototype.remove.apply(a,arguments)}})})(N);(function(a){a=a.seriesType;a("bar","column",null,{inverted:!0})})(N);(function(a){var z=a.Series;
a=a.seriesType;a("scatter","line",{lineWidth:0,findNearestPointBy:"xy",marker:{enabled:!0},tooltip:{headerFormat:'\x3cspan style\x3d"color:{point.color}"\x3e\u25cf\x3c/span\x3e \x3cspan style\x3d"font-size: 0.85em"\x3e {series.name}\x3c/span\x3e\x3cbr/\x3e',pointFormat:"x: \x3cb\x3e{point.x}\x3c/b\x3e\x3cbr/\x3ey: \x3cb\x3e{point.y}\x3c/b\x3e\x3cbr/\x3e"}},{sorted:!1,requireSorting:!1,noSharedTooltip:!0,trackerGroups:["group","markerGroup","dataLabelsGroup"],takeOrdinalPosition:!1,drawGraph:function(){this.options.lineWidth&&
z.prototype.drawGraph.call(this)}})})(N);(function(a){var z=a.deg2rad,B=a.isNumber,F=a.pick,E=a.relativeLength;a.CenteredSeriesMixin={getCenter:function(){var a=this.options,f=this.chart,k=2*(a.slicedOffset||0),m=f.plotWidth-2*k,f=f.plotHeight-2*k,u=a.center,u=[F(u[0],"50%"),F(u[1],"50%"),a.size||"100%",a.innerSize||0],v=Math.min(m,f),d,e;for(d=0;4>d;++d)e=u[d],a=2>d||2===d&&/%$/.test(e),u[d]=E(e,[m,f,v,u[2]][d])+(a?k:0);u[3]>u[2]&&(u[3]=u[2]);return u},getStartAndEndRadians:function(a,f){a=B(a)?
a:0;f=B(f)&&f>a&&360>f-a?f:a+360;return{start:z*(a+-90),end:z*(f+-90)}}}})(N);(function(a){var z=a.addEvent,B=a.CenteredSeriesMixin,F=a.defined,E=a.each,l=a.extend,f=B.getStartAndEndRadians,k=a.inArray,m=a.noop,u=a.pick,v=a.Point,d=a.Series,e=a.seriesType,n=a.setAnimation;e("pie","line",{center:[null,null],clip:!1,colorByPoint:!0,dataLabels:{distance:30,enabled:!0,formatter:function(){return this.point.isNull?void 0:this.point.name},x:0},ignoreHiddenPoint:!0,legendType:"point",marker:null,size:null,
showInLegend:!1,slicedOffset:10,stickyTracking:!1,tooltip:{followPointer:!0},borderColor:"#ffffff",borderWidth:1,states:{hover:{brightness:.1,shadow:!1}}},{isCartesian:!1,requireSorting:!1,directTouch:!0,noSharedTooltip:!0,trackerGroups:["group","dataLabelsGroup"],axisTypes:[],pointAttribs:a.seriesTypes.column.prototype.pointAttribs,animate:function(a){var b=this,d=b.points,e=b.startAngleRad;a||(E(d,function(a){var c=a.graphic,d=a.shapeArgs;c&&(c.attr({r:a.startR||b.center[3]/2,start:e,end:e}),c.animate({r:d.r,
start:d.start,end:d.end},b.options.animation))}),b.animate=null)},updateTotals:function(){var a,c=0,d=this.points,e=d.length,f,k=this.options.ignoreHiddenPoint;for(a=0;a<e;a++)f=d[a],c+=k&&!f.visible?0:f.isNull?0:f.y;this.total=c;for(a=0;a<e;a++)f=d[a],f.percentage=0<c&&(f.visible||!k)?f.y/c*100:0,f.total=c},generatePoints:function(){d.prototype.generatePoints.call(this);this.updateTotals()},translate:function(a){this.generatePoints();var b=0,d=this.options,e=d.slicedOffset,k=e+(d.borderWidth||0),
m,n,l,v=f(d.startAngle,d.endAngle),r=this.startAngleRad=v.start,v=(this.endAngleRad=v.end)-r,A=this.points,J,p=d.dataLabels.distance,d=d.ignoreHiddenPoint,C,D=A.length,z;a||(this.center=a=this.getCenter());this.getX=function(b,c,d){l=Math.asin(Math.min((b-a[1])/(a[2]/2+d.labelDistance),1));return a[0]+(c?-1:1)*Math.cos(l)*(a[2]/2+d.labelDistance)};for(C=0;C<D;C++){z=A[C];z.labelDistance=u(z.options.dataLabels&&z.options.dataLabels.distance,p);this.maxLabelDistance=Math.max(this.maxLabelDistance||
0,z.labelDistance);m=r+b*v;if(!d||z.visible)b+=z.percentage/100;n=r+b*v;z.shapeType="arc";z.shapeArgs={x:a[0],y:a[1],r:a[2]/2,innerR:a[3]/2,start:Math.round(1E3*m)/1E3,end:Math.round(1E3*n)/1E3};l=(n+m)/2;l>1.5*Math.PI?l-=2*Math.PI:l<-Math.PI/2&&(l+=2*Math.PI);z.slicedTranslation={translateX:Math.round(Math.cos(l)*e),translateY:Math.round(Math.sin(l)*e)};n=Math.cos(l)*a[2]/2;J=Math.sin(l)*a[2]/2;z.tooltipPos=[a[0]+.7*n,a[1]+.7*J];z.half=l<-Math.PI/2||l>Math.PI/2?1:0;z.angle=l;m=Math.min(k,z.labelDistance/
5);z.labelPos=[a[0]+n+Math.cos(l)*z.labelDistance,a[1]+J+Math.sin(l)*z.labelDistance,a[0]+n+Math.cos(l)*m,a[1]+J+Math.sin(l)*m,a[0]+n,a[1]+J,0>z.labelDistance?"center":z.half?"right":"left",l]}},drawGraph:null,drawPoints:function(){var a=this,c=a.chart.renderer,d,e,f,k,m=a.options.shadow;m&&!a.shadowGroup&&(a.shadowGroup=c.g("shadow").add(a.group));E(a.points,function(b){e=b.graphic;if(b.isNull)e&&(b.graphic=e.destroy());else{k=b.shapeArgs;d=b.getTranslate();var h=b.shadowGroup;m&&!h&&(h=b.shadowGroup=
c.g("shadow").add(a.shadowGroup));h&&h.attr(d);f=a.pointAttribs(b,b.selected&&"select");e?e.setRadialReference(a.center).attr(f).animate(l(k,d)):(b.graphic=e=c[b.shapeType](k).setRadialReference(a.center).attr(d).add(a.group),b.visible||e.attr({visibility:"hidden"}),e.attr(f).attr({"stroke-linejoin":"round"}).shadow(m,h));e.addClass(b.getClassName())}})},searchPoint:m,sortByAngle:function(a,c){a.sort(function(a,b){return void 0!==a.angle&&(b.angle-a.angle)*c})},drawLegendSymbol:a.LegendSymbolMixin.drawRectangle,
getCenter:B.getCenter,getSymbol:m},{init:function(){v.prototype.init.apply(this,arguments);var a=this,c;a.name=u(a.name,"Slice");c=function(b){a.slice("select"===b.type)};z(a,"select",c);z(a,"unselect",c);return a},isValid:function(){return a.isNumber(this.y,!0)&&0<=this.y},setVisible:function(a,c){var b=this,d=b.series,e=d.chart,f=d.options.ignoreHiddenPoint;c=u(c,f);a!==b.visible&&(b.visible=b.options.visible=a=void 0===a?!b.visible:a,d.options.data[k(b,d.data)]=b.options,E(["graphic","dataLabel",
"connector","shadowGroup"],function(c){if(b[c])b[c][a?"show":"hide"](!0)}),b.legendItem&&e.legend.colorizeItem(b,a),a||"hover"!==b.state||b.setState(""),f&&(d.isDirty=!0),c&&e.redraw())},slice:function(a,c,d){var b=this.series;n(d,b.chart);u(c,!0);this.sliced=this.options.sliced=F(a)?a:!this.sliced;b.options.data[k(this,b.data)]=this.options;this.graphic.animate(this.getTranslate());this.shadowGroup&&this.shadowGroup.animate(this.getTranslate())},getTranslate:function(){return this.sliced?this.slicedTranslation:
{translateX:0,translateY:0}},haloPath:function(a){var b=this.shapeArgs;return this.sliced||!this.visible?[]:this.series.chart.renderer.symbols.arc(b.x,b.y,b.r+a,b.r+a,{innerR:this.shapeArgs.r,start:b.start,end:b.end})}})})(N);(function(a){var z=a.addEvent,B=a.arrayMax,F=a.defined,E=a.each,l=a.extend,f=a.format,k=a.map,m=a.merge,u=a.noop,v=a.pick,d=a.relativeLength,e=a.Series,n=a.seriesTypes,b=a.stableSort;a.distribute=function(a,d){function c(a,b){return a.target-b.target}var e,f=!0,m=a,n=[],l;l=
0;for(e=a.length;e--;)l+=a[e].size;if(l>d){b(a,function(a,b){return(b.rank||0)-(a.rank||0)});for(l=e=0;l<=d;)l+=a[e].size,e++;n=a.splice(e-1,a.length)}b(a,c);for(a=k(a,function(a){return{size:a.size,targets:[a.target]}});f;){for(e=a.length;e--;)f=a[e],l=(Math.min.apply(0,f.targets)+Math.max.apply(0,f.targets))/2,f.pos=Math.min(Math.max(0,l-f.size/2),d-f.size);e=a.length;for(f=!1;e--;)0<e&&a[e-1].pos+a[e-1].size>a[e].pos&&(a[e-1].size+=a[e].size,a[e-1].targets=a[e-1].targets.concat(a[e].targets),a[e-
1].pos+a[e-1].size>d&&(a[e-1].pos=d-a[e-1].size),a.splice(e,1),f=!0)}e=0;E(a,function(a){var b=0;E(a.targets,function(){m[e].pos=a.pos+b;b+=m[e].size;e++})});m.push.apply(m,n);b(m,c)};e.prototype.drawDataLabels=function(){var b=this,d=b.options,e=d.dataLabels,k=b.points,l,n,y=b.hasRendered||0,u,r,A=v(e.defer,!!d.animation),J=b.chart.renderer;if(e.enabled||b._hasPointLabels)b.dlProcessOptions&&b.dlProcessOptions(e),r=b.plotGroup("dataLabelsGroup","data-labels",A&&!y?"hidden":"visible",e.zIndex||6),
A&&(r.attr({opacity:+y}),y||z(b,"afterAnimate",function(){b.visible&&r.show(!0);r[d.animation?"animate":"attr"]({opacity:1},{duration:200})})),n=e,E(k,function(c){var h,k=c.dataLabel,p,g,q=c.connector,y=!k,w;l=c.dlOptions||c.options&&c.options.dataLabels;if(h=v(l&&l.enabled,n.enabled)&&!c.isNull)e=m(n,l),p=c.getLabelConfig(),w=e[c.formatPrefix+"Format"]||e.format,u=F(w)?f(w,p):e.formatter.call(p,e),w=e.style,p=e.rotation,w.color=v(e.color,w.color,b.color,"#000000"),"contrast"===w.color&&(c.contrastColor=
J.getContrast(c.color||b.color),w.color=e.inside||0>v(c.labelDistance,e.distance)||d.stacking?c.contrastColor:"#000000"),d.cursor&&(w.cursor=d.cursor),g={fill:e.backgroundColor,stroke:e.borderColor,"stroke-width":e.borderWidth,r:e.borderRadius||0,rotation:p,padding:e.padding,zIndex:1},a.objectEach(g,function(a,b){void 0===a&&delete g[b]});!k||h&&F(u)?h&&F(u)&&(k?g.text=u:(k=c.dataLabel=J[p?"text":"label"](u,0,-9999,e.shape,null,null,e.useHTML,null,"data-label"),k.addClass("highcharts-data-label-color-"+
c.colorIndex+" "+(e.className||"")+(e.useHTML?"highcharts-tracker":""))),k.attr(g),k.css(w).shadow(e.shadow),k.added||k.add(r),b.alignDataLabel(c,k,e,null,y)):(c.dataLabel=k=k.destroy(),q&&(c.connector=q.destroy()))})};e.prototype.alignDataLabel=function(a,b,d,e,f){var c=this.chart,h=c.inverted,k=v(a.plotX,-9999),m=v(a.plotY,-9999),n=b.getBBox(),q,p=d.rotation,u=d.align,w=this.visible&&(a.series.forceDL||c.isInsidePlot(k,Math.round(m),h)||e&&c.isInsidePlot(k,h?e.x+1:e.y+e.height-1,h)),z="justify"===
v(d.overflow,"justify");if(w&&(q=d.style.fontSize,q=c.renderer.fontMetrics(q,b).b,e=l({x:h?this.yAxis.len-m:k,y:Math.round(h?this.xAxis.len-k:m),width:0,height:0},e),l(d,{width:n.width,height:n.height}),p?(z=!1,k=c.renderer.rotCorr(q,p),k={x:e.x+d.x+e.width/2+k.x,y:e.y+d.y+{top:0,middle:.5,bottom:1}[d.verticalAlign]*e.height},b[f?"attr":"animate"](k).attr({align:u}),m=(p+720)%360,m=180<m&&360>m,"left"===u?k.y-=m?n.height:0:"center"===u?(k.x-=n.width/2,k.y-=n.height/2):"right"===u&&(k.x-=n.width,k.y-=
m?0:n.height)):(b.align(d,null,e),k=b.alignAttr),z?a.isLabelJustified=this.justifyDataLabel(b,d,k,n,e,f):v(d.crop,!0)&&(w=c.isInsidePlot(k.x,k.y)&&c.isInsidePlot(k.x+n.width,k.y+n.height)),d.shape&&!p))b[f?"attr":"animate"]({anchorX:h?c.plotWidth-a.plotY:a.plotX,anchorY:h?c.plotHeight-a.plotX:a.plotY});w||(b.attr({y:-9999}),b.placed=!1)};e.prototype.justifyDataLabel=function(a,b,d,e,f,k){var c=this.chart,h=b.align,m=b.verticalAlign,n,l,p=a.box?0:a.padding||0;n=d.x+p;0>n&&("right"===h?b.align="left":
b.x=-n,l=!0);n=d.x+e.width-p;n>c.plotWidth&&("left"===h?b.align="right":b.x=c.plotWidth-n,l=!0);n=d.y+p;0>n&&("bottom"===m?b.verticalAlign="top":b.y=-n,l=!0);n=d.y+e.height-p;n>c.plotHeight&&("top"===m?b.verticalAlign="bottom":b.y=c.plotHeight-n,l=!0);l&&(a.placed=!k,a.align(b,null,f));return l};n.pie&&(n.pie.prototype.drawDataLabels=function(){var b=this,d=b.data,f,k=b.chart,m=b.options.dataLabels,n=v(m.connectorPadding,10),l=v(m.connectorWidth,1),u=k.plotWidth,r=k.plotHeight,A,z=b.center,p=z[2]/
2,C=z[1],D,I,g,t,Q=[[],[]],M,O,L,N,x=[0,0,0,0];b.visible&&(m.enabled||b._hasPointLabels)&&(E(d,function(a){a.dataLabel&&a.visible&&a.dataLabel.shortened&&(a.dataLabel.attr({width:"auto"}).css({width:"auto",textOverflow:"clip"}),a.dataLabel.shortened=!1)}),e.prototype.drawDataLabels.apply(b),E(d,function(a){a.dataLabel&&a.visible&&(Q[a.half].push(a),a.dataLabel._pos=null)}),E(Q,function(c,d){var e,h,l=c.length,q=[],y;if(l)for(b.sortByAngle(c,d-.5),0<b.maxLabelDistance&&(e=Math.max(0,C-p-b.maxLabelDistance),
h=Math.min(C+p+b.maxLabelDistance,k.plotHeight),E(c,function(a){0<a.labelDistance&&a.dataLabel&&(a.top=Math.max(0,C-p-a.labelDistance),a.bottom=Math.min(C+p+a.labelDistance,k.plotHeight),y=a.dataLabel.getBBox().height||21,a.positionsIndex=q.push({target:a.labelPos[1]-a.top+y/2,size:y,rank:a.y})-1)}),a.distribute(q,h+y-e)),N=0;N<l;N++)f=c[N],h=f.positionsIndex,g=f.labelPos,D=f.dataLabel,L=!1===f.visible?"hidden":"inherit",O=e=g[1],q&&F(q[h])&&(void 0===q[h].pos?L="hidden":(t=q[h].size,O=f.top+q[h].pos)),
delete f.positionIndex,M=m.justify?z[0]+(d?-1:1)*(p+f.labelDistance):b.getX(O<f.top+2||O>f.bottom-2?e:O,d,f),D._attr={visibility:L,align:g[6]},D._pos={x:M+m.x+({left:n,right:-n}[g[6]]||0),y:O+m.y-10},g.x=M,g.y=O,v(m.crop,!0)&&(I=D.getBBox().width,e=null,M-I<n?(e=Math.round(I-M+n),x[3]=Math.max(e,x[3])):M+I>u-n&&(e=Math.round(M+I-u+n),x[1]=Math.max(e,x[1])),0>O-t/2?x[0]=Math.max(Math.round(-O+t/2),x[0]):O+t/2>r&&(x[2]=Math.max(Math.round(O+t/2-r),x[2])),D.sideOverflow=e)}),0===B(x)||this.verifyDataLabelOverflow(x))&&
(this.placeDataLabels(),l&&E(this.points,function(a){var c;A=a.connector;if((D=a.dataLabel)&&D._pos&&a.visible&&0<a.labelDistance){L=D._attr.visibility;if(c=!A)a.connector=A=k.renderer.path().addClass("highcharts-data-label-connector highcharts-color-"+a.colorIndex).add(b.dataLabelsGroup),A.attr({"stroke-width":l,stroke:m.connectorColor||a.color||"#666666"});A[c?"attr":"animate"]({d:b.connectorPath(a.labelPos)});A.attr("visibility",L)}else A&&(a.connector=A.destroy())}))},n.pie.prototype.connectorPath=
function(a){var b=a.x,c=a.y;return v(this.options.dataLabels.softConnector,!0)?["M",b+("left"===a[6]?5:-5),c,"C",b,c,2*a[2]-a[4],2*a[3]-a[5],a[2],a[3],"L",a[4],a[5]]:["M",b+("left"===a[6]?5:-5),c,"L",a[2],a[3],"L",a[4],a[5]]},n.pie.prototype.placeDataLabels=function(){E(this.points,function(a){var b=a.dataLabel;b&&a.visible&&((a=b._pos)?(b.sideOverflow&&(b._attr.width=b.getBBox().width-b.sideOverflow,b.css({width:b._attr.width+"px",textOverflow:"ellipsis"}),b.shortened=!0),b.attr(b._attr),b[b.moved?
"animate":"attr"](a),b.moved=!0):b&&b.attr({y:-9999}))},this)},n.pie.prototype.alignDataLabel=u,n.pie.prototype.verifyDataLabelOverflow=function(a){var b=this.center,c=this.options,e=c.center,f=c.minSize||80,k,m=null!==c.size;m||(null!==e[0]?k=Math.max(b[2]-Math.max(a[1],a[3]),f):(k=Math.max(b[2]-a[1]-a[3],f),b[0]+=(a[3]-a[1])/2),null!==e[1]?k=Math.max(Math.min(k,b[2]-Math.max(a[0],a[2])),f):(k=Math.max(Math.min(k,b[2]-a[0]-a[2]),f),b[1]+=(a[0]-a[2])/2),k<b[2]?(b[2]=k,b[3]=Math.min(d(c.innerSize||
0,k),k),this.translate(b),this.drawDataLabels&&this.drawDataLabels()):m=!0);return m});n.column&&(n.column.prototype.alignDataLabel=function(a,b,d,f,k){var c=this.chart.inverted,h=a.series,n=a.dlBox||a.shapeArgs,l=v(a.below,a.plotY>v(this.translatedThreshold,h.yAxis.len)),q=v(d.inside,!!this.options.stacking);n&&(f=m(n),0>f.y&&(f.height+=f.y,f.y=0),n=f.y+f.height-h.yAxis.len,0<n&&(f.height-=n),c&&(f={x:h.yAxis.len-f.y-f.height,y:h.xAxis.len-f.x-f.width,width:f.height,height:f.width}),q||(c?(f.x+=
l?0:f.width,f.width=0):(f.y+=l?f.height:0,f.height=0)));d.align=v(d.align,!c||q?"center":l?"right":"left");d.verticalAlign=v(d.verticalAlign,c||q?"middle":l?"top":"bottom");e.prototype.alignDataLabel.call(this,a,b,d,f,k);a.isLabelJustified&&a.contrastColor&&a.dataLabel.css({color:a.contrastColor})})})(N);(function(a){var z=a.Chart,B=a.each,F=a.objectEach,E=a.pick,l=a.addEvent;z.prototype.callbacks.push(function(a){l(a,"render",function(){var f=[];B(a.labelCollectors||[],function(a){f=f.concat(a())});
B(a.yAxis||[],function(a){a.options.stackLabels&&!a.options.stackLabels.allowOverlap&&F(a.stacks,function(a){F(a,function(a){f.push(a.label)})})});B(a.series||[],function(a){var k=a.options.dataLabels,m=a.dataLabelCollections||["dataLabel"];(k.enabled||a._hasPointLabels)&&!k.allowOverlap&&a.visible&&B(m,function(d){B(a.points,function(a){a[d]&&(a[d].labelrank=E(a.labelrank,a.shapeArgs&&a.shapeArgs.height),f.push(a[d]))})})});a.hideOverlappingLabels(f)})});z.prototype.hideOverlappingLabels=function(a){var f=
a.length,m,l,v,d,e,n,b,c,q,h=function(a,b,c,d,e,f,h,k){return!(e>a+c||e+h<a||f>b+d||f+k<b)};for(l=0;l<f;l++)if(m=a[l])m.oldOpacity=m.opacity,m.newOpacity=1,m.width||(v=m.getBBox(),m.width=v.width,m.height=v.height);a.sort(function(a,b){return(b.labelrank||0)-(a.labelrank||0)});for(l=0;l<f;l++)for(v=a[l],m=l+1;m<f;++m)if(d=a[m],v&&d&&v!==d&&v.placed&&d.placed&&0!==v.newOpacity&&0!==d.newOpacity&&(e=v.alignAttr,n=d.alignAttr,b=v.parentGroup,c=d.parentGroup,q=2*(v.box?0:v.padding||0),e=h(e.x+b.translateX,
e.y+b.translateY,v.width-q,v.height-q,n.x+c.translateX,n.y+c.translateY,d.width-q,d.height-q)))(v.labelrank<d.labelrank?v:d).newOpacity=0;B(a,function(a){var b,c;a&&(c=a.newOpacity,a.oldOpacity!==c&&a.placed&&(c?a.show(!0):b=function(){a.hide()},a.alignAttr.opacity=c,a[a.isOld?"animate":"attr"](a.alignAttr,null,b)),a.isOld=!0)})}})(N);(function(a){var z=a.addEvent,B=a.Chart,F=a.createElement,E=a.css,l=a.defaultOptions,f=a.defaultPlotOptions,k=a.each,m=a.extend,u=a.fireEvent,v=a.hasTouch,d=a.inArray,
e=a.isObject,n=a.Legend,b=a.merge,c=a.pick,q=a.Point,h=a.Series,w=a.seriesTypes,G=a.svg,H;H=a.TrackerMixin={drawTrackerPoint:function(){var a=this,b=a.chart.pointer,c=function(a){var c=b.getPointFromEvent(a);void 0!==c&&(b.isDirectTouch=!0,c.onMouseOver(a))};k(a.points,function(a){a.graphic&&(a.graphic.element.point=a);a.dataLabel&&(a.dataLabel.div?a.dataLabel.div.point=a:a.dataLabel.element.point=a)});a._hasTracking||(k(a.trackerGroups,function(d){if(a[d]){a[d].addClass("highcharts-tracker").on("mouseover",
c).on("mouseout",function(a){b.onTrackerMouseOut(a)});if(v)a[d].on("touchstart",c);a.options.cursor&&a[d].css(E).css({cursor:a.options.cursor})}}),a._hasTracking=!0)},drawTrackerGraph:function(){var a=this,b=a.options,c=b.trackByArea,d=[].concat(c?a.areaPath:a.graphPath),e=d.length,f=a.chart,h=f.pointer,m=f.renderer,n=f.options.tooltip.snap,g=a.tracker,l,q=function(){if(f.hoverSeries!==a)a.onMouseOver()},u="rgba(192,192,192,"+(G?.0001:.002)+")";if(e&&!c)for(l=e+1;l--;)"M"===d[l]&&d.splice(l+1,0,d[l+
1]-n,d[l+2],"L"),(l&&"M"===d[l]||l===e)&&d.splice(l,0,"L",d[l-2]+n,d[l-1]);g?g.attr({d:d}):a.graph&&(a.tracker=m.path(d).attr({"stroke-linejoin":"round",visibility:a.visible?"visible":"hidden",stroke:u,fill:c?u:"none","stroke-width":a.graph.strokeWidth()+(c?0:2*n),zIndex:2}).add(a.group),k([a.tracker,a.markerGroup],function(a){a.addClass("highcharts-tracker").on("mouseover",q).on("mouseout",function(a){h.onTrackerMouseOut(a)});b.cursor&&a.css({cursor:b.cursor});if(v)a.on("touchstart",q)}))}};w.column&&
(w.column.prototype.drawTracker=H.drawTrackerPoint);w.pie&&(w.pie.prototype.drawTracker=H.drawTrackerPoint);w.scatter&&(w.scatter.prototype.drawTracker=H.drawTrackerPoint);m(n.prototype,{setItemEvents:function(a,c,d){var e=this,f=e.chart.renderer.boxWrapper,h="highcharts-legend-"+(a.series?"point":"series")+"-active";(d?c:a.legendGroup).on("mouseover",function(){a.setState("hover");f.addClass(h);c.css(e.options.itemHoverStyle)}).on("mouseout",function(){c.css(b(a.visible?e.itemStyle:e.itemHiddenStyle));
f.removeClass(h);a.setState()}).on("click",function(b){var c=function(){a.setVisible&&a.setVisible()};b={browserEvent:b};a.firePointEvent?a.firePointEvent("legendItemClick",b,c):u(a,"legendItemClick",b,c)})},createCheckboxForItem:function(a){a.checkbox=F("input",{type:"checkbox",checked:a.selected,defaultChecked:a.selected},this.options.itemCheckboxStyle,this.chart.container);z(a.checkbox,"click",function(b){u(a.series||a,"checkboxClick",{checked:b.target.checked,item:a},function(){a.select()})})}});
l.legend.itemStyle.cursor="pointer";m(B.prototype,{showResetZoom:function(){var a=this,b=l.lang,c=a.options.chart.resetZoomButton,d=c.theme,e=d.states,f="chart"===c.relativeTo?null:"plotBox";this.resetZoomButton=a.renderer.button(b.resetZoom,null,null,function(){a.zoomOut()},d,e&&e.hover).attr({align:c.position.align,title:b.resetZoomTitle}).addClass("highcharts-reset-zoom").add().align(c.position,!1,f)},zoomOut:function(){var a=this;u(a,"selection",{resetSelection:!0},function(){a.zoom()})},zoom:function(a){var b,
d=this.pointer,f=!1,h;!a||a.resetSelection?(k(this.axes,function(a){b=a.zoom()}),d.initiated=!1):k(a.xAxis.concat(a.yAxis),function(a){var c=a.axis;d[c.isXAxis?"zoomX":"zoomY"]&&(b=c.zoom(a.min,a.max),c.displayBtn&&(f=!0))});h=this.resetZoomButton;f&&!h?this.showResetZoom():!f&&e(h)&&(this.resetZoomButton=h.destroy());b&&this.redraw(c(this.options.chart.animation,a&&a.animation,100>this.pointCount))},pan:function(a,b){var c=this,d=c.hoverPoints,e;d&&k(d,function(a){a.setState()});k("xy"===b?[1,0]:
[1],function(b){b=c[b?"xAxis":"yAxis"][0];var d=b.horiz,f=a[d?"chartX":"chartY"],d=d?"mouseDownX":"mouseDownY",h=c[d],g=(b.pointRange||0)/2,k=b.getExtremes(),l=b.toValue(h-f,!0)+g,g=b.toValue(h+b.len-f,!0)-g,m=g<l,h=m?g:l,l=m?l:g,g=Math.min(k.dataMin,b.toValue(b.toPixels(k.min)-b.minPixelPadding)),m=Math.max(k.dataMax,b.toValue(b.toPixels(k.max)+b.minPixelPadding)),n;n=g-h;0<n&&(l+=n,h=g);n=l-m;0<n&&(l=m,h-=n);b.series.length&&h!==k.min&&l!==k.max&&(b.setExtremes(h,l,!1,!1,{trigger:"pan"}),e=!0);
c[d]=f});e&&c.redraw(!1);E(c.container,{cursor:"move"})}});m(q.prototype,{select:function(a,b){var e=this,f=e.series,h=f.chart;a=c(a,!e.selected);e.firePointEvent(a?"select":"unselect",{accumulate:b},function(){e.selected=e.options.selected=a;f.options.data[d(e,f.data)]=e.options;e.setState(a&&"select");b||k(h.getSelectedPoints(),function(a){a.selected&&a!==e&&(a.selected=a.options.selected=!1,f.options.data[d(a,f.data)]=a.options,a.setState(""),a.firePointEvent("unselect"))})})},onMouseOver:function(a){var b=
this.series.chart,c=b.pointer;a=a?c.normalize(a):c.getChartCoordinatesFromPoint(this,b.inverted);c.runPointActions(a,this)},onMouseOut:function(){var a=this.series.chart;this.firePointEvent("mouseOut");k(a.hoverPoints||[],function(a){a.setState()});a.hoverPoints=a.hoverPoint=null},importEvents:function(){if(!this.hasImportedEvents){var c=this,d=b(c.series.options.point,c.options).events;c.events=d;a.objectEach(d,function(a,b){z(c,b,a)});this.hasImportedEvents=!0}},setState:function(a,b){var d=Math.floor(this.plotX),
e=this.plotY,h=this.series,k=h.options.states[a]||{},l=f[h.type].marker&&h.options.marker,n=l&&!1===l.enabled,q=l&&l.states&&l.states[a]||{},g=!1===q.enabled,t=h.stateMarkerGraphic,u=this.marker||{},v=h.chart,y=h.halo,w,z=l&&h.markerAttribs;a=a||"";if(!(a===this.state&&!b||this.selected&&"select"!==a||!1===k.enabled||a&&(g||n&&!1===q.enabled)||a&&u.states&&u.states[a]&&!1===u.states[a].enabled)){z&&(w=h.markerAttribs(this,a));if(this.graphic)this.state&&this.graphic.removeClass("highcharts-point-"+
this.state),a&&this.graphic.addClass("highcharts-point-"+a),this.graphic.animate(h.pointAttribs(this,a),c(v.options.chart.animation,k.animation)),w&&this.graphic.animate(w,c(v.options.chart.animation,q.animation,l.animation)),t&&t.hide();else{if(a&&q){l=u.symbol||h.symbol;t&&t.currentSymbol!==l&&(t=t.destroy());if(t)t[b?"animate":"attr"]({x:w.x,y:w.y});else l&&(h.stateMarkerGraphic=t=v.renderer.symbol(l,w.x,w.y,w.width,w.height).add(h.markerGroup),t.currentSymbol=l);t&&t.attr(h.pointAttribs(this,
a))}t&&(t[a&&v.isInsidePlot(d,e,v.inverted)?"show":"hide"](),t.element.point=this)}(d=k.halo)&&d.size?(y||(h.halo=y=v.renderer.path().add((this.graphic||t).parentGroup)),y[b?"animate":"attr"]({d:this.haloPath(d.size)}),y.attr({"class":"highcharts-halo highcharts-color-"+c(this.colorIndex,h.colorIndex)}),y.point=this,y.attr(m({fill:this.color||h.color,"fill-opacity":d.opacity,zIndex:-1},d.attributes))):y&&y.point&&y.point.haloPath&&y.animate({d:y.point.haloPath(0)});this.state=a}},haloPath:function(a){return this.series.chart.renderer.symbols.circle(Math.floor(this.plotX)-
a,this.plotY-a,2*a,2*a)}});m(h.prototype,{onMouseOver:function(){var a=this.chart,b=a.hoverSeries;if(b&&b!==this)b.onMouseOut();this.options.events.mouseOver&&u(this,"mouseOver");this.setState("hover");a.hoverSeries=this},onMouseOut:function(){var a=this.options,b=this.chart,c=b.tooltip,d=b.hoverPoint;b.hoverSeries=null;if(d)d.onMouseOut();this&&a.events.mouseOut&&u(this,"mouseOut");!c||this.stickyTracking||c.shared&&!this.noSharedTooltip||c.hide();this.setState()},setState:function(a){var b=this,
d=b.options,e=b.graph,f=d.states,h=d.lineWidth,d=0;a=a||"";if(b.state!==a&&(k([b.group,b.markerGroup,b.dataLabelsGroup],function(c){c&&(b.state&&c.removeClass("highcharts-series-"+b.state),a&&c.addClass("highcharts-series-"+a))}),b.state=a,!f[a]||!1!==f[a].enabled)&&(a&&(h=f[a].lineWidth||h+(f[a].lineWidthPlus||0)),e&&!e.dashstyle))for(h={"stroke-width":h},e.animate(h,c(b.chart.options.chart.animation,f[a]&&f[a].animation));b["zone-graph-"+d];)b["zone-graph-"+d].attr(h),d+=1},setVisible:function(a,
b){var c=this,d=c.chart,e=c.legendItem,f,h=d.options.chart.ignoreHiddenSeries,l=c.visible;f=(c.visible=a=c.options.visible=c.userOptions.visible=void 0===a?!l:a)?"show":"hide";k(["group","dataLabelsGroup","markerGroup","tracker","tt"],function(a){if(c[a])c[a][f]()});if(d.hoverSeries===c||(d.hoverPoint&&d.hoverPoint.series)===c)c.onMouseOut();e&&d.legend.colorizeItem(c,a);c.isDirty=!0;c.options.stacking&&k(d.series,function(a){a.options.stacking&&a.visible&&(a.isDirty=!0)});k(c.linkedSeries,function(b){b.setVisible(a,
!1)});h&&(d.isDirtyBox=!0);!1!==b&&d.redraw();u(c,f)},show:function(){this.setVisible(!0)},hide:function(){this.setVisible(!1)},select:function(a){this.selected=a=void 0===a?!this.selected:a;this.checkbox&&(this.checkbox.checked=a);u(this,a?"select":"unselect")},drawTracker:H.drawTrackerGraph})})(N);(function(a){var z=a.Chart,B=a.each,F=a.inArray,E=a.isArray,l=a.isObject,f=a.pick,k=a.splat;z.prototype.setResponsive=function(f){var k=this.options.responsive,l=[],d=this.currentResponsive;k&&k.rules&&
B(k.rules,function(d){void 0===d._id&&(d._id=a.uniqueKey());this.matchResponsiveRule(d,l,f)},this);var e=a.merge.apply(0,a.map(l,function(d){return a.find(k.rules,function(a){return a._id===d}).chartOptions})),l=l.toString()||void 0;l!==(d&&d.ruleIds)&&(d&&this.update(d.undoOptions,f),l?(this.currentResponsive={ruleIds:l,mergedOptions:e,undoOptions:this.currentOptions(e)},this.update(e,f)):this.currentResponsive=void 0)};z.prototype.matchResponsiveRule=function(a,k){var l=a.condition;(l.callback||
function(){return this.chartWidth<=f(l.maxWidth,Number.MAX_VALUE)&&this.chartHeight<=f(l.maxHeight,Number.MAX_VALUE)&&this.chartWidth>=f(l.minWidth,0)&&this.chartHeight>=f(l.minHeight,0)}).call(this)&&k.push(a._id)};z.prototype.currentOptions=function(f){function m(d,e,f,b){var c;a.objectEach(d,function(a,h){if(!b&&-1<F(h,["series","xAxis","yAxis"]))for(d[h]=k(d[h]),f[h]=[],c=0;c<d[h].length;c++)e[h][c]&&(f[h][c]={},m(a[c],e[h][c],f[h][c],b+1));else l(a)?(f[h]=E(a)?[]:{},m(a,e[h]||{},f[h],b+1)):f[h]=
e[h]||null})}var v={};m(f,this.options,v,0);return v}})(N);return N});
| u |
test_shorted_ipv6_address.py | import unittest
from katas.beta.shorten_ipv6_address import shorten
class ShortenIPv6TestCase(unittest.TestCase):
def test_equal_1(self):
self.assertEqual(shorten('2642:0006:0006:0000:0000:0000:0000:9147'),
'2642:6:6::9147')
def test_equal_2(self):
self.assertEqual(shorten('1234:0000:5678:0000:0000:90AB:0000:CDEF'),
'1234:0:5678::90AB:0:CDEF')
def test_equal_3(self):
self.assertEqual(shorten('1111:0000:0000:2222:0000:0000:3333:4444'),
'1111::2222:0:0:3333:4444')
def test_equal_4(self):
self.assertEqual(shorten('A43B:1CF6:541C:98AA:CC43:1092:E932:90AD'),
'A43B:1CF6:541C:98AA:CC43:1092:E932:90AD')
def test_equal_5(self):
self.assertEqual(shorten('9000:B004:C13A:594C:19CD:102D:394F:FCD1'),
'9000:B004:C13A:594C:19CD:102D:394F:FCD1')
def test_equal_6(self):
self.assertEqual(shorten('043B:00F6:541C:08AA:0003:1092:000D:90AD'),
'43B:F6:541C:8AA:3:1092:D:90AD')
def test_equal_7(self):
self.assertEqual(shorten('9000:0004:000A:094C:00CD:102D:394F:0001'),
'9000:4:A:94C:CD:102D:394F:1')
def test_equal_8(self):
self.assertEqual(shorten('3BDF:000E:0004:0ECD:0000:0009:3C7F:734F'),
'3BDF:E:4:ECD::9:3C7F:734F')
def test_equal_9(self):
self.assertEqual(shorten('0388:0B7B:004D:0000:00D3:FDC1:E0E8:08D7'),
'388:B7B:4D::D3:FDC1:E0E8:8D7')
def test_equal_10(self):
self.assertEqual(shorten('0018:000A:0F0C:10B2:668D:0000:0000:009B'),
'18:A:F0C:10B2:668D::9B')
def test_equal_11(self):
self.assertEqual(shorten('00AF:0000:0000:0000:0000:704E:EC20:3DAA'),
'AF::704E:EC20:3DAA')
def test_equal_12(self):
self.assertEqual(shorten('A2A5:03DB:0000:60A5:0000:0005:BD22:0000'),
'A2A5:3DB::60A5:0:5:BD22:0')
def test_equal_13(self):
self.assertEqual(shorten('0000:0FBA:0000:0000:0000:0000:057E:AFFD'),
'0:FBA::57E:AFFD')
def test_equal_14(self):
self.assertEqual(shorten('0000:0000:0000:0000:0000:0C30:00DA:29CB'),
'::C30:DA:29CB')
def test_equal_15(self):
self.assertEqual(shorten('97CA:4C84:B62B:C3A8:00F4:0000:0000:0000'),
'97CA:4C84:B62B:C3A8:F4::')
def test_equal_16(self):
self.assertEqual(shorten('0000:0391:F08E:0F28:0000:0003:0037:0006'),
'::391:F08E:F28:0:3:37:6')
def test_equal_17(self):
self.assertEqual(shorten('00C8:0000:0243:0050:ED26:008F:0000:0000'), | def test_equal_18(self):
self.assertEqual(shorten('0000:0000:0001:0007:0F63:0000:4FF7:0000'),
'::1:7:F63:0:4FF7:0')
def test_equal_19(self):
self.assertEqual(shorten('0000:0000:6B6F:63B3:0000:0001:0000:0000'),
'::6B6F:63B3:0:1:0:0') | 'C8:0:243:50:ED26:8F::')
|
version.go | // Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by gapicgen. DO NOT EDIT.
package recaptchaenterprise
import "cloud.google.com/go/recaptchaenterprise/internal"
func | () {
versionClient = internal.Version
}
| init |
sql.py | #!/usr/bin/env python
# coding=utf-8
import json
from threading import Lock
import warnings
from sacred.commandline_options import cli_option
from sacred.observers.base import RunObserver
from sacred.serializer import flatten
DEFAULT_SQL_PRIORITY = 40
# ############################# Observer #################################### #
class SqlObserver(RunObserver):
@classmethod
def create(cls, url, echo=False, priority=DEFAULT_SQL_PRIORITY):
warnings.warn(
"SqlObserver.create(...) is deprecated. Please use"
" SqlObserver(...) instead.",
DeprecationWarning,
)
return cls(url, echo, priority)
def __init__(self, url, echo=False, priority=DEFAULT_SQL_PRIORITY):
from sqlalchemy.orm import sessionmaker, scoped_session
import sqlalchemy as sa
engine = sa.create_engine(url, echo=echo)
session_factory = sessionmaker(bind=engine)
# make session thread-local to avoid problems with sqlite (see #275)
session = scoped_session(session_factory)
self.engine = engine
self.session = session
self.priority = priority
self.run = None
self.lock = Lock()
@classmethod
def create_from(cls, engine, session, priority=DEFAULT_SQL_PRIORITY):
"""Instantiate a SqlObserver with an existing engine and session."""
self = cls.__new__(cls) # skip __init__ call
self.engine = engine
self.session = session
self.priority = priority
self.run = None
self.lock = Lock()
return self
def started_event(
self, ex_info, command, host_info, start_time, config, meta_info, _id
):
return self._add_event(
ex_info,
command,
host_info,
config,
meta_info,
_id,
"RUNNING",
start_time=start_time,
)
def queued_event(
self, ex_info, command, host_info, queue_time, config, meta_info, _id
):
return self._add_event(
ex_info, command, host_info, config, meta_info, _id, "QUEUED"
)
def _add_event(
self, ex_info, command, host_info, config, meta_info, _id, status, **kwargs | sql_exp = Experiment.get_or_create(ex_info, self.session)
sql_host = Host.get_or_create(host_info, self.session)
if _id is None:
while _id is None:
i = self.session.query(Run).order_by(Run.id.desc()).first()
_id = 0 if i is None else i.id + 1
self.run = Run(
run_id=str(_id),
config=json.dumps(flatten(config)),
command=command,
priority=meta_info.get("priority", 0),
comment=meta_info.get("comment", ""),
experiment=sql_exp,
host=sql_host,
status=status,
**kwargs,
)
self.session.add(self.run)
with self.lock:
try:
self.session.commit()
break
except IntegrityError:
self.session.rollback()
_id = None
else:
self.run = Run(
run_id=str(_id),
config=json.dumps(flatten(config)),
command=command,
priority=meta_info.get("priority", 0),
comment=meta_info.get("comment", ""),
experiment=sql_exp,
host=sql_host,
status=status,
**kwargs,
)
self.session.add(self.run)
self.save()
return _id or self.run.run_id
def heartbeat_event(self, info, captured_out, beat_time, result):
self.run.info = json.dumps(flatten(info))
self.run.captured_out = captured_out
self.run.heartbeat = beat_time
self.run.result = result
self.save()
def completed_event(self, stop_time, result):
self.run.stop_time = stop_time
self.run.result = result
self.run.status = "COMPLETED"
self.save()
def interrupted_event(self, interrupt_time, status):
self.run.stop_time = interrupt_time
self.run.status = status
self.save()
def failed_event(self, fail_time, fail_trace):
self.run.stop_time = fail_time
self.run.fail_trace = "\n".join(fail_trace)
self.run.status = "FAILED"
self.save()
def resource_event(self, filename):
from .sql_bases import Resource
res = Resource.get_or_create(filename, self.session)
self.run.resources.append(res)
self.save()
def artifact_event(self, name, filename, metadata=None, content_type=None):
from .sql_bases import Artifact
a = Artifact.create(name, filename)
self.run.artifacts.append(a)
self.save()
def save(self):
with self.lock:
self.session.commit()
def query(self, _id):
from .sql_bases import Run
run = self.session.query(Run).filter_by(id=_id).first()
return run.to_json()
def __eq__(self, other):
if isinstance(other, SqlObserver):
# fixme: this will probably fail to detect two equivalent engines
return self.engine == other.engine and self.session == other.session
return False
# ######################## Commandline Option ############################### #
@cli_option("-s", "--sql")
def sql_option(args, run):
"""Add a SQL Observer to the experiment.
The typical form is: dialect://username:password@host:port/database
"""
run.observers.append(SqlObserver(args)) | ):
from .sql_bases import Base, Experiment, Host, Run
Base.metadata.create_all(self.engine) |
package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Xrootd(CMakePackage):
"""The XROOTD project aims at giving high performance, scalable fault
tolerant access to data repositories of many kinds."""
homepage = "http://xrootd.org"
url = "http://xrootd.org/download/v5.0.1/xrootd-5.0.1.tar.gz"
version('5.0.1', sha256='ff4462b0b61db4cc01dda0e26abdd78e43649ee7ac5e90f7a05b74328ff5ac83')
version('4.12.3', sha256='6f2ca1accc8d49d605706bb556777c753860bf46d845b1ee11393a5cb5987f15', preferred=True)
version('4.12.2', sha256='29f7bc3ea51b9d5d310eabd177152245d4160223325933c67f938ed5120f67bb')
version('4.12.1', sha256='7350d9196a26d17719b839fd242849e3995692fda25f242e67ac6ec907218d13')
version('4.12.0', sha256='69ef4732256d9a88127de4bfdf96bbf73348e0c70ce1d756264871a0ffadd2fc')
version('4.11.3', sha256='8e7a64fd55dfb452b6d5f76a9a97c493593943227b377623a3032da9197c7f65')
version('4.11.2', sha256='4620824db97fcc37dc3dd26110da8e5c3aab1d8302e4921d4f32e83207060603')
version('4.10.0', sha256='f07f85e27d72e9e8ff124173c7b53619aed8fcd36f9d6234c33f8f7fd511995b')
version('4.8.5', sha256='42e4d2cc6f8b442135f09bcc12c7be38b1a0c623a005cb5e69ff3d27997bdf73')
version('4.8.4', sha256='f148d55b16525567c0f893edf9bb2975f7c09f87f0599463e19e1b456a9d95ba')
version('4.8.3', sha256='9cd30a343758b8f50aea4916fa7bd37de3c37c5b670fe059ae77a8b2bbabf299')
version('4.8.2', sha256='8f28ec53e799d4aa55bd0cc4ab278d9762e0e57ac40a4b02af7fc53dcd1bef39')
version('4.8.1', sha256='edee2673d941daf7a6e5c963d339d4a69b4db5c4b6f77b4548b3129b42198029')
version('4.8.0', sha256='0b59ada295341902ca01e9d23e29780fb8df99a6d2bd1c2d654e9bb70c877ad8')
version('4.7.1', sha256='90ddc7042f05667045b06e02c8d9c2064c55d9a26c02c50886254b8df85fc577')
version('4.7.0', sha256='6cc69d9a3694e8dcf2392e9c3b518bd2497a89b3a9f25ffaec62efa52170349b')
version('4.6.1', sha256='0261ce760e8788f85d68918d7702ae30ec677a8f331dae14adc979b4cc7badf5')
version('4.6.0', sha256='b50f7c64ed2a4aead987de3fdf6fce7ee082407ba9297b6851cd917db72edd1d')
version('4.5.0', sha256='27a8e4ef1e6bb6bfe076fef50afe474870edd198699d43359ef01de2f446c670')
version('4.4.1', sha256='3c295dbf750de086c04befc0d3c7045fd3976611c2e75987c1477baca37eb549')
version('4.4.0', sha256='f066e7488390c0bc50938d23f6582fb154466204209ca92681f0aa06340e77c8')
version('4.3.0', sha256='d34865772d975b5d58ad80bb05312bf49aaf124d5431e54dc8618c05a0870e3c')
variant('http', default=True,
description='Build with HTTP support')
variant('python', default=False,
description='Build pyxroot Python extension')
variant('readline', default=True,
description='Use readline')
variant('cxxstd',
default='11',
values=('98', '11', '14', '17'),
multi=False,
description='Use the specified C++ standard when building.')
conflicts('cxxstd=98', when='@4.7.0:')
depends_on('bzip2')
depends_on('[email protected]:', type='build')
depends_on('libxml2', when='+http')
depends_on('uuid', when="@4.11.0:")
depends_on('openssl')
depends_on('python', when='+python')
depends_on('readline', when='+readline')
depends_on('xz')
depends_on('zlib')
extends('python', when='+python')
patch('python-support.patch', level=1, when='@:4.8.99+python')
def patch(self):
"""Remove hardcoded -std=c++0x flag
"""
if self.spec.satisfies('@4.7.0:'):
filter_file(r'\-std=c\+\+0x', r'', 'cmake/XRootDOSDefs.cmake')
def cmake_args(self):
spec = self.spec
options = [
'-DENABLE_HTTP:BOOL={0}'.
format('ON' if '+http' in spec else 'OFF'),
'-DENABLE_PYTHON:BOOL={0}'.
format('ON' if '+python' in spec else 'OFF'),
'-DENABLE_READLINE:BOOL={0}'.
format('ON' if '+readline' in spec else 'OFF'),
'-DENABLE_CEPH:BOOL=OFF'
]
# see https://github.com/spack/spack/pull/11581
if '+python' in self.spec:
options.append('-DPYTHON_EXECUTABLE=%s' %
spec['python'].command.path)
return options
def setup_build_environment(self, env):
| cxxstdflag = ''
if self.spec.variants['cxxstd'].value == '98':
cxxstdflag = self.compiler.cxx98_flag
elif self.spec.variants['cxxstd'].value == '11':
cxxstdflag = self.compiler.cxx11_flag
elif self.spec.variants['cxxstd'].value == '14':
cxxstdflag = self.compiler.cxx14_flag
elif self.spec.variants['cxxstd'].value == '17':
cxxstdflag = self.compiler.cxx17_flag
else:
# The user has selected a (new?) legal value that we've
# forgotten to deal with here.
tty.die(
"INTERNAL ERROR: cannot accommodate unexpected variant ",
"cxxstd={0}".format(self.spec.variants['cxxstd'].value))
if cxxstdflag:
env.append_flags('CXXFLAGS', cxxstdflag) |
|
redis.go | package redis
import (
"context"
"encoding/json"
"github.com/go-redis/redis/v8"
"github.com/grokify/gostor"
)
type Client struct {
redisClient *redis.Client
}
func NewClient(cfg gostor.Config) *Client {
return &Client{
redisClient: redis.NewClient(NewRedisOptions(cfg))}
}
func (client Client) SetString(key, val string) error {
// For context, see https://github.com/go-redis/redis/issues/582
// ctx, _ := context.WithTimeout(context.TODO(), time.Second)
return client.redisClient.Set(context.Background(), key, val, 0).Err()
}
func (client Client) GetString(key string) (string, error) {
// ctx, _ := context.WithTimeout(context.TODO(), time.Second)
return client.redisClient.Get(context.Background(), key).Result()
}
func (client Client) GetOrEmptyString(key string) string {
// ctx, _ := context.WithTimeout(context.TODO(), time.Second)
if val, err := client.redisClient.Get(context.Background(), key).Result(); err != nil {
return ""
} else {
return val
}
}
func (client Client) SetInterface(key string, val interface{}) error {
bytes, err := json.Marshal(val)
if err != nil {
return err
}
return client.redisClient.Set(
context.Background(), key, string(bytes), 0).Err()
}
func (client Client) GetInterface(key string, val interface{}) error {
strCmd := client.redisClient.Get(context.Background(), key)
bytes, err := strCmd.Bytes()
if err != nil |
return json.Unmarshal(bytes, val)
}
func NewRedisOptions(cfg gostor.Config) *redis.Options {
return &redis.Options{
Addr: cfg.HostPort(),
Password: cfg.Password,
DB: cfg.CustomIndex}
}
| {
return err
} |
ubiClaimLocation.ts | import { Sequelize, DataTypes, Model } from 'sequelize';
import {
UbiClaimLocation,
UbiClaimLocationCreation,
} from '../../../interfaces/ubi/ubiClaimLocation';
export class ClaimLocationModel extends Model<
UbiClaimLocation,
UbiClaimLocationCreation
> {
public id!: number; | longitude: number;
};
// timestamps!
public readonly createdAt!: Date;
}
export function initializeUbiClaimLocation(sequelize: Sequelize): void {
ClaimLocationModel.init(
{
id: {
type: DataTypes.INTEGER,
autoIncrement: true,
primaryKey: true,
},
communityId: {
type: DataTypes.INTEGER,
references: {
model: 'community',
key: 'id',
},
onDelete: 'CASCADE',
allowNull: false,
},
gps: {
type: DataTypes.JSON,
allowNull: false,
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
},
},
{
tableName: 'ubi_claim_location',
updatedAt: false,
sequelize,
}
);
} | public communityId!: string;
public gps!: {
latitude: number; |
radard.py | #!/usr/bin/env python
import zmq
import numpy as np
import numpy.matlib
import importlib
from collections import defaultdict
from fastcluster import linkage_vector
import selfdrive.messaging as messaging
from selfdrive.services import service_list
from selfdrive.controls.lib.latcontrol_helpers import calc_lookahead_offset
from selfdrive.controls.lib.pathplanner import PathPlanner
from selfdrive.controls.lib.radar_helpers import Track, Cluster, fcluster, \
RDR_TO_LDR, NO_FUSION_SCORE
from selfdrive.controls.lib.vehicle_model import VehicleModel
from selfdrive.swaglog import cloudlog
from cereal import car
from common.params import Params
from common.realtime import set_realtime_priority, Ratekeeper
from common.kalman.ekf import EKF, SimpleSensor
DEBUG = False
#vision point
DIMSV = 2
XV, SPEEDV = 0, 1
VISION_POINT = -1
class EKFV1D(EKF):
def __init__(self):
super(EKFV1D, self).__init__(False)
self.identity = numpy.matlib.identity(DIMSV)
self.state = np.matlib.zeros((DIMSV, 1))
self.var_init = 1e2 # ~ model variance when probability is 70%, so good starting point
self.covar = self.identity * self.var_init
self.process_noise = np.matlib.diag([0.5, 1])
def calc_transfer_fun(self, dt):
tf = np.matlib.identity(DIMSV)
tf[XV, SPEEDV] = dt
tfj = tf
return tf, tfj
# fuses camera and radar data for best lead detection
def radard_thread(gctx=None):
set_realtime_priority(2)
# wait for stats about the car to come in from controls
cloudlog.info("radard is waiting for CarParams")
CP = car.CarParams.from_bytes(Params().get("CarParams", block=True))
mocked = True #CP.carName == "mock"
VM = VehicleModel(CP)
cloudlog.info("radard got CarParams")
# import the radar from the fingerprint
cloudlog.info("radard is importing %s", CP.carName)
RadarInterface = importlib.import_module('selfdrive.car.%s.radar_interface' % CP.carName).RadarInterface
context = zmq.Context()
# *** subscribe to features and model from visiond
poller = zmq.Poller()
model = messaging.sub_sock(context, service_list['model'].port, conflate=True, poller=poller)
live100 = messaging.sub_sock(context, service_list['live100'].port, conflate=True, poller=poller)
PP = PathPlanner()
RI = RadarInterface(CP)
last_md_ts = 0
last_l100_ts = 0
# *** publish live20 and liveTracks
live20 = messaging.pub_sock(context, service_list['live20'].port)
liveTracks = messaging.pub_sock(context, service_list['liveTracks'].port)
path_x = np.arange(0.0, 140.0, 0.1) # 140 meters is max
# Time-alignment
rate = 20. # model and radar are both at 20Hz
tsv = 1./rate
v_len = 20 # how many speed data points to remember for t alignment with rdr data
active = 0
steer_angle = 0.
steer_override = False
tracks = defaultdict(dict)
# Kalman filter stuff:
ekfv = EKFV1D()
speedSensorV = SimpleSensor(XV, 1, 2)
# v_ego
v_ego = None
v_ego_array = np.zeros([2, v_len])
v_ego_t_aligned = 0.
rk = Ratekeeper(rate, print_delay_threshold=np.inf)
while 1:
rr = RI.update()
ar_pts = {}
for pt in rr.points:
ar_pts[pt.trackId] = [pt.dRel + RDR_TO_LDR, pt.yRel, pt.vRel, pt.measured]
# receive the live100s
l100 = None
md = None
for socket, event in poller.poll(0):
if socket is live100:
l100 = messaging.recv_one(socket)
elif socket is model:
md = messaging.recv_one(socket)
if l100 is not None:
active = l100.live100.active
v_ego = l100.live100.vEgo
steer_angle = l100.live100.angleSteers
steer_override = l100.live100.steerOverride
v_ego_array = np.append(v_ego_array, [[v_ego], [float(rk.frame)/rate]], 1)
v_ego_array = v_ego_array[:, 1:]
last_l100_ts = l100.logMonoTime
if v_ego is None:
continue
if md is not None:
last_md_ts = md.logMonoTime
# *** get path prediction from the model ***
PP.update(v_ego, md)
# run kalman filter only if prob is high enough
if PP.lead_prob > 0.7:
ekfv.update(speedSensorV.read(PP.lead_dist, covar=PP.lead_var))
ekfv.predict(tsv)
ar_pts[VISION_POINT] = (float(ekfv.state[XV]), np.polyval(PP.d_poly, float(ekfv.state[XV])),
float(ekfv.state[SPEEDV]), False)
else:
|
# *** compute the likely path_y ***
if (active and not steer_override) or mocked:
# use path from model (always when mocking as steering is too noisy)
path_y = np.polyval(PP.d_poly, path_x)
else:
# use path from steer, set angle_offset to 0 it does not only report the physical offset
path_y = calc_lookahead_offset(v_ego, steer_angle, path_x, VM, angle_offset=0)[0]
# *** remove missing points from meta data ***
for ids in tracks.keys():
if ids not in ar_pts:
tracks.pop(ids, None)
# *** compute the tracks ***
for ids in ar_pts:
# ignore standalone vision point, unless we are mocking the radar
if ids == VISION_POINT and not mocked:
continue
rpt = ar_pts[ids]
# align v_ego by a fixed time to align it with the radar measurement
cur_time = float(rk.frame)/rate
v_ego_t_aligned = np.interp(cur_time - RI.delay, v_ego_array[1], v_ego_array[0])
d_path = np.sqrt(np.amin((path_x - rpt[0]) ** 2 + (path_y - rpt[1]) ** 2))
# add sign
d_path *= np.sign(rpt[1] - np.interp(rpt[0], path_x, path_y))
# create the track if it doesn't exist or it's a new track
if ids not in tracks:
tracks[ids] = Track()
tracks[ids].update(rpt[0], rpt[1], rpt[2], d_path, v_ego_t_aligned, rpt[3], steer_override)
# allow the vision model to remove the stationary flag if distance and rel speed roughly match
if VISION_POINT in ar_pts:
fused_id = None
best_score = NO_FUSION_SCORE
for ids in tracks:
dist_to_vision = np.sqrt((0.5*(ar_pts[VISION_POINT][0] - tracks[ids].dRel)) ** 2 + (2*(ar_pts[VISION_POINT][1] - tracks[ids].yRel)) ** 2)
rel_speed_diff = abs(ar_pts[VISION_POINT][2] - tracks[ids].vRel)
tracks[ids].update_vision_score(dist_to_vision, rel_speed_diff)
if best_score > tracks[ids].vision_score:
fused_id = ids
best_score = tracks[ids].vision_score
if fused_id is not None:
tracks[fused_id].vision_cnt += 1
tracks[fused_id].update_vision_fusion()
if DEBUG:
print "NEW CYCLE"
if VISION_POINT in ar_pts:
print "vision", ar_pts[VISION_POINT]
idens = tracks.keys()
track_pts = np.array([tracks[iden].get_key_for_cluster() for iden in idens])
# If we have multiple points, cluster them
if len(track_pts) > 1:
link = linkage_vector(track_pts, method='centroid')
cluster_idxs = fcluster(link, 2.5, criterion='distance')
clusters = [None]*max(cluster_idxs)
for idx in xrange(len(track_pts)):
cluster_i = cluster_idxs[idx]-1
if clusters[cluster_i] == None:
clusters[cluster_i] = Cluster()
clusters[cluster_i].add(tracks[idens[idx]])
elif len(track_pts) == 1:
# TODO: why do we need this?
clusters = [Cluster()]
clusters[0].add(tracks[idens[0]])
else:
clusters = []
if DEBUG:
for i in clusters:
print i
# *** extract the lead car ***
lead_clusters = [c for c in clusters
if c.is_potential_lead(v_ego)]
lead_clusters.sort(key=lambda x: x.dRel)
lead_len = len(lead_clusters)
# *** extract the second lead from the whole set of leads ***
lead2_clusters = [c for c in lead_clusters
if c.is_potential_lead2(lead_clusters)]
lead2_clusters.sort(key=lambda x: x.dRel)
lead2_len = len(lead2_clusters)
# *** publish live20 ***
dat = messaging.new_message()
dat.init('live20')
dat.live20.mdMonoTime = last_md_ts
dat.live20.canMonoTimes = list(rr.canMonoTimes)
dat.live20.radarErrors = list(rr.errors)
dat.live20.l100MonoTime = last_l100_ts
if lead_len > 0:
lead_clusters[0].toLive20(dat.live20.leadOne)
if lead2_len > 0:
lead2_clusters[0].toLive20(dat.live20.leadTwo)
else:
dat.live20.leadTwo.status = False
else:
dat.live20.leadOne.status = False
dat.live20.cumLagMs = -rk.remaining*1000.
live20.send(dat.to_bytes())
# *** publish tracks for UI debugging (keep last) ***
dat = messaging.new_message()
dat.init('liveTracks', len(tracks))
for cnt, ids in enumerate(tracks.keys()):
if DEBUG:
print "id: %4.0f x: %4.1f y: %4.1f vr: %4.1f d: %4.1f va: %4.1f vl: %4.1f vlk: %4.1f alk: %4.1f s: %1.0f v: %1.0f" % \
(ids, tracks[ids].dRel, tracks[ids].yRel, tracks[ids].vRel,
tracks[ids].dPath, tracks[ids].vLat,
tracks[ids].vLead, tracks[ids].vLeadK,
tracks[ids].aLeadK,
tracks[ids].stationary,
tracks[ids].measured)
dat.liveTracks[cnt].trackId = ids
dat.liveTracks[cnt].dRel = float(tracks[ids].dRel)
dat.liveTracks[cnt].yRel = float(tracks[ids].yRel)
dat.liveTracks[cnt].vRel = float(tracks[ids].vRel)
dat.liveTracks[cnt].aRel = float(tracks[ids].aRel)
dat.liveTracks[cnt].stationary = tracks[ids].stationary
dat.liveTracks[cnt].oncoming = tracks[ids].oncoming
liveTracks.send(dat.to_bytes())
rk.monitor_time()
def main(gctx=None):
radard_thread(gctx)
if __name__ == "__main__":
main()
| ekfv.state[XV] = PP.lead_dist
ekfv.covar = (np.diag([PP.lead_var, ekfv.var_init]))
ekfv.state[SPEEDV] = 0.
if VISION_POINT in ar_pts:
del ar_pts[VISION_POINT] |
GutscheinNr.go | package golang | type GutscheinNr struct {
GutscheinNr string `json:"GutscheinNr"`
} | |
inject.go | // +build debug
/*
Copyright © 2020 The OpenEBS Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package inject
import (
"os"
"strconv"
"time"
"github.com/sirupsen/logrus"
)
var (
pingTimeout = true
)
var Envs map[string](map[string]bool)
// DisablePunchHoles is used for disabling punch holes
func D | ) bool {
ok := os.Getenv("DISABLE_PUNCH_HOLES")
if ok == "True" {
return true
}
return false
}
// AddTimeout add delays into the code
func AddTimeout() {
timeout, _ := strconv.Atoi(os.Getenv("DEBUG_TIMEOUT"))
logrus.Infof("Add timeout of %vs for debug build", timeout)
time.Sleep(time.Duration(timeout) * time.Second)
}
// AddPingTimeout add delay in ping response
func AddPingTimeout() {
if pingTimeout {
timeout, _ := strconv.Atoi(os.Getenv("RPC_PING_TIMEOUT"))
logrus.Infof("Add ping timeout of %vs for debug build", timeout)
time.Sleep(time.Duration(timeout) * time.Second)
pingTimeout = false
}
}
// AddPreloadTimeout add delay in preload
func AddPreloadTimeout() {
timeout, _ := strconv.Atoi(os.Getenv("PRELOAD_TIMEOUT"))
logrus.Infof("Add preload timeout of %vs for debug build", timeout)
time.Sleep(time.Duration(timeout) * time.Second)
}
// PanicAfterPrepareRebuild panic the replica just after prepare rebuild
func PanicAfterPrepareRebuild() {
ok := os.Getenv("PANIC_AFTER_PREPARE_REBUILD")
if ok == "TRUE" {
time.Sleep(2 * time.Second)
panic("panic replica after getting start signal")
}
}
// PanicWhileSettingCheckpoint panics the replica on receiving SetCheckpoint REST Call
func PanicWhileSettingCheckpoint(replicaIP string) {
ok := os.Getenv("PANIC_WHILE_SETTING_CHECKPOINT")
if ok == "TRUE" || (Envs[replicaIP])["PANIC_WHILE_SETTING_CHECKPOINT"] {
panic("panic replica while setting checkpoint")
}
}
// AddPunchHoleTimeout add delay in while punching hole
func AddPunchHoleTimeout() {
timeout, _ := strconv.Atoi(os.Getenv("PUNCH_HOLE_TIMEOUT"))
logrus.Infof("Add punch hole timeout of %vs for debug build", timeout)
time.Sleep(time.Duration(timeout) * time.Second)
}
var UpdateLUNMapTimeoutTriggered bool
// AddUpdateLUNMapTimeout adds delay during UpdateLUNMap
func AddUpdateLUNMapTimeout() {
timeout, _ := strconv.Atoi(os.Getenv("UpdateLUNMap_TIMEOUT"))
logrus.Infof("AddUpdateLUNMap timeout of %vs for debug build", timeout)
UpdateLUNMapTimeoutTriggered = true
time.Sleep(time.Duration(timeout) * time.Second)
}
| isablePunchHoles( |
m2ee.py | #!/usr/bin/python
#
# Copyright (C) 2009 Mendix. All rights reserved.
#
from __future__ import print_function
import argparse
import atexit
import cmd
import datetime
import getpass
import logging
import os
import pwd
import random
import shlex
import signal
import string
import subprocess
import sys
import yaml
from m2ee import pgutil, M2EE, client_errno
import m2ee
logger = logging
if not sys.stdout.isatty():
import codecs
import locale
sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
try:
raw_input
except NameError:
raw_input = input
class CLI(cmd.Cmd, object):
def __init__(self, yaml_files=None, yolo_mode=False):
logger.debug('Using m2ee-tools version %s' % m2ee.__version__)
cmd.Cmd.__init__(self)
self.m2ee = M2EE(yaml_files=yaml_files)
self.yolo_mode = yolo_mode
self.prompt_username = pwd.getpwuid(os.getuid())[0]
self._default_prompt = "m2ee(%s): " % self.prompt_username
self.prompt = self._default_prompt
self.nodetach = False
def do_restart(self, args):
if self._stop():
self._start()
def do_stop(self, args):
self._stop()
def do_start(self, args):
self._start()
def _stop(self):
logger.debug("Trying to stop the application.")
stopped = self.m2ee.stop()
if stopped:
return True
answer = None
while answer not in ('y', 'n'):
answer = ('y' if self.yolo_mode
else raw_input("Do you want to try to signal the JVM "
"process to stop immediately? (y)es, (n)o? "))
if answer == 'y':
stopped = self.m2ee.terminate()
if stopped:
return True
elif answer == 'n':
logger.info("Doing nothing, use stop again to check if the "
"process finally disappeared...")
return False
else:
print("Unknown option %s" % answer)
answer = None
while answer not in ('y', 'n'):
answer = ('y' if self.yolo_mode
else raw_input("Do you want to kill the JVM process? "
"(y)es, (n)o? "))
if answer == 'y':
stopped = self.m2ee.kill()
if stopped:
return True
elif answer == 'n':
logger.info("Doing nothing, use stop again to check if the "
"process finally disappeared...")
return False
else:
print("Unknown option %s" % answer)
return False
def _start(self):
"""
This function deals with the start-up sequence of the Mendix Runtime.
Starting the Mendix Runtime can fail in both a temporary or permanent
way. See the client_errno for possible error codes.
"""
if not self.m2ee.config.all_systems_are_go():
raise m2ee.exceptions.M2EEException(
"The application cannot be started because no application "
"model is present, or because of other previous errors."
)
if not self.m2ee.config.get_runtime_path():
raise m2ee.exceptions.M2EEException(
"It appears that the Mendix Runtime version which has to be "
"used for your application is not present yet. You can try "
"downloading it using the download_runtime command."
)
self.m2ee.start_appcontainer(detach=not self.nodetach)
try:
self.m2ee.send_runtime_config()
except m2ee.client.M2EEAdminException as e:
logger.error("Sending configuration failed: %s" % e.cause)
logger.error("You'll have to fix the configuration and run start again...")
self._stop()
return
abort = False
fully_started = False
params = {}
while not (fully_started or abort):
try:
self.m2ee.start_runtime(params)
fully_started = True
except m2ee.client.M2EEAdminException as e:
logger.error(e)
if e.result == client_errno.start_NO_EXISTING_DB:
if self.yolo_mode:
# This call tries to create a database and immediately execute
# ddl commands.
self.m2ee.client.execute_ddl_commands()
else:
abort = True
elif e.result == client_errno.start_INVALID_DB_STRUCTURE:
answer = self._handle_ddl_commands()
if answer == 'a':
abort = True
elif e.result == client_errno.start_MISSING_MF_CONSTANT:
logger.error("You'll have to add the constant definitions "
"to the configuration in the "
"MicroflowConstants section.")
abort = True
elif e.result == client_errno.start_ADMIN_1:
users = e.feedback['users']
if self.yolo_mode:
self._handle_admin_1_yolo(users)
else:
answer = self._handle_admin_1(users)
if answer == 'a':
abort = True
else:
abort = True
if abort:
self._stop()
def _handle_ddl_commands(self):
feedback = self.m2ee.client.get_ddl_commands({"verbose": True})
answer = None
while answer not in ('v', 's', 'e', 'a'):
answer = ('e' if self.yolo_mode
else raw_input("Do you want to (v)iew queries, (s)ave them to "
"a file, (e)xecute and save them, or (a)bort: "))
if answer == 'a':
pass
elif answer == 'v':
print('\n'.join(feedback['ddl_commands']))
answer = None
elif answer in ('e', 's'):
ddl_commands = feedback['ddl_commands']
self.m2ee.save_ddl_commands(ddl_commands)
if answer == 'e':
self.m2ee.client.execute_ddl_commands()
else:
print("Unknown option %s" % answer)
return answer
def _handle_admin_1(self, users):
answer = None
while answer not in ('c', 'a'):
answer = raw_input("Do you want to (c)hange passwords or "
"(a)bort: ")
if answer == 'a':
pass
elif answer == 'c':
for username in users:
changed = False
while not changed:
newpw1 = getpass.getpass("Type new password for user "
"%s: " % username)
newpw2 = getpass.getpass("Type new password for user "
" %s again: " % username)
if newpw1 != newpw2:
print("The passwords are not equal!")
else:
try:
self.m2ee.client.update_admin_user(
{"username": username, "password": newpw1})
changed = True
except m2ee.client.M2EEAdminException as e:
logger.error(e)
else:
print("Unknown option %s" % answer)
return answer
def _handle_admin_1_yolo(self, users):
for username in users:
newpasswd = self._generate_password()
logger.info("Changing password for user %s to %s" %
(username, newpasswd))
self.m2ee.client.update_admin_user({
"username": username,
"password": newpasswd,
})
def _generate_password(self):
newpasswd_list = []
for choosefrom in [
string.ascii_lowercase,
string.ascii_uppercase,
string.digits,
string.punctuation,
]:
newpasswd_list.extend([random.choice(choosefrom)
for _ in range(random.randint(10, 20))])
random.shuffle(newpasswd_list)
return ''.join(newpasswd_list)
def do_create_admin_user(self, args=None):
if not self.m2ee.client.ping():
logger.warn("The application process needs to be running to "
"create a user object in the application.")
return
print("This option will create an administrative user account, using "
"the preset username and user role settings.")
newpw1 = getpass.getpass("Type new password for this user: ")
newpw2 = getpass.getpass("Type new password for this user again: ")
if newpw1 != newpw2:
print("The passwords are not equal!")
else:
self.m2ee.client.create_admin_user({"password": newpw1})
def do_update_admin_user(self, args=None):
if not self.m2ee.client.ping():
logger.warn("The application process needs to be running to "
"change user objects in the application.")
return
print("Using this function you can reset the password of an "
"administrative user account.")
username = raw_input("User name: ")
newpw1 = getpass.getpass("Type new password for user %s: " % username)
newpw2 = getpass.getpass("Type new password for user %s again: " %
username)
if newpw1 != newpw2:
print("The passwords are not equal!")
else:
self.m2ee.client.update_admin_user({"username": username, "password": newpw1})
def do_debug(self, args):
answer = raw_input("This command will throw you into a local python "
"debug session inside the M2EE object! Continue "
"(y/N)?")
if answer == 'y':
import code
code.interact(local=locals())
def do_status(self, args):
feedback = self.m2ee.client.runtime_status(timeout=3)
status = feedback['status']
logger.info("The application process is running, the MxRuntime has status: %s" % status)
if status != 'running':
return
critlist = self.m2ee.client.get_critical_log_messages()
if len(critlist) > 0:
logger.error("%d critical error(s) were logged. Use show_critical"
"_log_messages to view them." % len(critlist))
max_show_users = 10
total_users = self._who(max_show_users)
if total_users > max_show_users:
logger.info("Only showing %s logged in users. Use who to see a "
"complete list." % max_show_users)
def do_show_critical_log_messages(self, args):
errors = self.m2ee.client.get_critical_log_messages()
if len(errors) == 0:
logger.info("No messages were logged to a critical loglevel since "
"starting the application.")
return
for error in errors:
errorline = []
if 'message' in error and error['message'] != '':
errorline.append("- %s" % error['message'])
if 'cause' in error and error['cause'] != '':
errorline.append("- Caused by: %s" % error['cause'])
if len(errorline) == 0:
errorline.append("- [No message or cause was logged]")
errorline.insert(
0,
datetime.datetime.fromtimestamp(error['timestamp'] / 1000)
.strftime("%Y-%m-%d %H:%M:%S")
)
print(' '.join(errorline))
def do_check_health(self, args):
feedback = self.m2ee.client.check_health()
if feedback['health'] == 'healthy':
logger.info("Health check microflow says the application is healthy.")
elif feedback['health'] == 'sick':
logger.warning("Health check microflow says the application "
"is sick: %s" % feedback['diagnosis'])
elif feedback['health'] == 'unknown':
logger.info("Health check microflow is not configured, no "
"health information available.")
else:
logger.error("Unexpected health check status: %s" % feedback['health'])
def do_statistics(self, args):
stats = self.m2ee.client.runtime_statistics()
stats.update(self.m2ee.client.server_statistics())
print(yaml.safe_dump(stats, default_flow_style=False))
def do_show_cache_statistics(self, args):
stats = self.m2ee.client.cache_statistics()
print(yaml.safe_dump(stats, default_flow_style=False))
def do_munin_config(self, args):
m2ee.munin.print_config(
self.m2ee,
self.prompt_username,
)
def do_munin_values(self, args):
m2ee.munin.print_values(
self.m2ee,
self.prompt_username,
)
def do_nagios(self, args):
logger.info("The nagios plugin will exit m2ee after running, this is "
"by design, don't report it as bug.")
# TODO: implement as separate program after libraryfying m2ee
sys.exit(m2ee.nagios.check(self.m2ee.runner, self.m2ee.client))
def do_about(self, args):
print('Using m2ee-tools version %s' % m2ee.__version__)
feedback = self.m2ee.client.about()
print("Using %s version %s" % (feedback['name'], feedback['version']))
print(feedback['copyright'])
if self.m2ee.config.get_runtime_version() >= 4.4:
if 'model_version' in feedback:
print('Model version: %s' % feedback['model_version'])
def do_show_license_information(self, args):
feedback = self.m2ee.client.get_license_information()
if 'license' in feedback:
logger.debug(yaml.safe_dump(feedback['license'],
allow_unicode=True))
import copy
licensecopy = copy.deepcopy(feedback['license'])
self._print_license(licensecopy)
elif 'license_id' in feedback:
print("Unlicensed environment.")
print("Server ID: %s" % feedback['license_id'])
else:
print("Unlicensed environment.")
def _print_license(self, licensecopy):
print("Server ID: %s" % licensecopy.pop('LicenseID', 'Unknown'))
print("License Type: %s" % licensecopy.pop('LicenseType', 'Unknown'))
if 'ExpirationDate' in licensecopy:
print("Expiration Date: %s" %
datetime.datetime.fromtimestamp(
licensecopy.pop('ExpirationDate') / 1000
)
.strftime("%a, %d %b %Y %H:%M:%S %z")
.rstrip())
print("Runtime Mode: %s" % licensecopy.pop('RuntimeMode', 'Unknown'))
print("Company: %s" % licensecopy.pop('Company', 'Unknown'))
limitations = licensecopy.pop('UserLimitations', None)
separate_anonymous = licensecopy.pop('SeparateAnonymousUsers', True)
if limitations is not None:
print("License Limitations:")
for limitation in limitations:
self._print_license_limitation(limitation, separate_anonymous)
if len(licensecopy) > 1:
print(yaml.safe_dump(licensecopy, allow_unicode=True))
def _print_license_limitation(self, limitation, separate_anonymous):
if limitation['LimitationType'] == 'Named':
if limitation['AmountType'] == 'Unlimited':
print("- Unlimited named %suser accounts allowed." %
('' if separate_anonymous else "and anonymous "))
else:
print(" - %s named user account%s allowed" %
(limitation['NumberOfAllowedUsers'],
's' if limitation['NumberOfAllowedUsers'] != 1 else ''))
elif limitation['LimitationType'] == 'Concurrent':
if limitation['AmountType'] == 'Unlimited':
print("- Unlimited concurrent named %suser sessions allowed."
% ("" if separate_anonymous else "and anonymous "))
else:
print("- %s concurrent named %suser session%s allowed." %
(
limitation['NumberOfAllowedUsers'],
'' if separate_anonymous else "and anonymous ",
('s' if limitation['NumberOfAllowedUsers'] != 1
else '')))
elif (limitation['LimitationType'] == 'ConcurrentAnonymous' and
separate_anonymous):
if limitation['AmountType'] == 'Unlimited':
print("- Unlimited concurrent anonymous user sessions "
"allowed.")
else:
print("- %s concurrent anonymous session%s allowed." %
(
limitation['NumberOfAllowedUsers'],
('s' if limitation['NumberOfAllowedUsers'] != 1
else '')))
def do_activate_license(self, args):
self.m2ee.client.require_action("set_license")
print("The command activate_license will set the license key used in "
"this application.")
runtime_version = m2ee.version.MXVersion(self.m2ee.client.about()['version'])
if runtime_version < 4.1:
print("Mendix Runtime versions before 4.1 do not check the "
"submitted license key for validity, so incorrect input "
"will un-license your Mendix application without warning! "
"After setting the license, use show_license_information "
"to check the active license. Also... after setting the "
"license in versions before Mendix 4.1 you will need to "
"restart the application again to be sure it is fully "
"activated.")
answer = raw_input("Do you want to continue anyway? (type YES if "
"you want to): ")
if answer != 'YES':
print("Aborting.")
return
if not args:
license_key = raw_input("Paste your license key (a long text "
"string without newlines) or empty input "
"to abort: ")
else:
license_key = args
if not license_key:
print("Aborting.")
return
self.m2ee.client.set_license({'license_key': license_key})
def do_enable_debugger(self, args):
self.m2ee.client.require_action("enable_debugger")
if not args:
debugger_password = raw_input(
"Please enter the password to be used for remote debugger "
"access from the modeler, or leave blank to auto-generate "
"a password: ")
if not debugger_password:
debugger_password = ''.join(
random.choice(string.letters + string.digits)
for x in range(random.randint(20, 30)))
else:
debugger_password = args
self.m2ee.client.enable_debugger({'password': debugger_password})
logger.info("The remote debugger is now enabled, the password to "
"use is %s" % debugger_password)
logger.info("You can use the remote debugger option in the Mendix "
"Business Modeler to connect to the /debugger/ sub "
"url on your application (e.g. "
"https://app.example.com/debugger/). ")
def do_disable_debugger(self, args):
self.m2ee.client.disable_debugger()
logger.info("The remote debugger is now disabled.")
def do_show_debugger_status(self, args):
feedback = self.m2ee.client.get_debugger_status()
enabled = feedback['enabled']
connected = feedback['client_connected']
paused = feedback['number_of_paused_microflows']
logger.info("The remote debugger is currently %s." %
("enabled" if enabled else "disabled"))
if connected:
logger.info("A debugger session is connected.")
elif enabled:
logger.info("There is no connected debugger session.")
if enabled and paused == 0:
logger.info("There are no paused microflows.")
elif paused == 1:
logger.info("There is 1 paused microflow.")
elif paused > 1:
logger.info("There are %s paused microflows." % paused)
def do_who(self, args):
if args:
try:
limitint = int(args)
self._who(limitint)
except ValueError:
logger.warn("Could not parse argument to an integer. Use a "
"number as argument to limit the amount of logged "
"in users shown.")
else:
self._who()
def do_w(self, args):
self.do_who(args)
def do_reload(self, args):
logger.debug("Reloading configuration...")
self.m2ee.reload_config()
def do_dump_config(self, args):
self.m2ee.config.dump()
def do_set_database_password(self, args):
password = getpass.getpass("Database password: ")
self.m2ee.config.set_database_password(password)
def do_psql(self, args):
if not self.m2ee.config.is_using_postgresql():
logger.error("Only PostgreSQL databases are supported right now.")
return
pgutil.psql(self.m2ee.config)
def do_dumpdb(self, args):
if not self.m2ee.config.is_using_postgresql():
logger.error("Only PostgreSQL databases are supported right now.")
return
if len(args) > 0:
pgutil.dumpdb(self.m2ee.config, args)
else:
pgutil.dumpdb(self.m2ee.config)
def do_restoredb(self, args):
if not self.m2ee.config.allow_destroy_db():
logger.error("Refusing to do a destructive database operation "
"because the allow_destroy_db configuration option "
"is set to false.")
return
if not self.m2ee.config.is_using_postgresql():
logger.error("Only PostgreSQL databases are supported right now.")
return
if not args:
logger.error("restoredb needs the name of a dump file in %s as arg"
"ument" % self.m2ee.config.get_database_dump_path())
return
(pid_alive, m2ee_alive) = self.m2ee.check_alive()
if pid_alive or m2ee_alive:
logger.warn("The application is still running, refusing to "
"restore the database right now.")
return
database_name = self.m2ee.config.get_pg_environment()['PGDATABASE']
answer = ('y' if self.yolo_mode
else raw_input("This command will restore this dump into database "
"%s. Continue? (y)es, (N)o? " % database_name))
if answer != 'y':
logger.info("Aborting!")
return
pgutil.restoredb(self.m2ee.config, args)
def complete_restoredb(self, text, line, begidx, endidx):
if not self.m2ee.config.is_using_postgresql():
return []
database_dump_path = self.m2ee.config.get_database_dump_path()
return [f for f in os.listdir(database_dump_path)
if os.path.isfile(os.path.join(database_dump_path, f)) and
f.startswith(text) and
f.endswith(".backup")]
def do_emptydb(self, args):
if not self.m2ee.config.allow_destroy_db():
logger.error("Refusing to do a destructive database operation "
"because the allow_destroy_db configuration option "
"is set to false.")
return
if not self.m2ee.config.is_using_postgresql():
logger.error("Only PostgreSQL databases are supported right now.")
return
(pid_alive, m2ee_alive) = self.m2ee.check_alive()
if pid_alive or m2ee_alive:
logger.warn("The application process is still running, refusing "
"to empty the database right now.")
return
logger.info("This command will drop all tables and sequences in "
"database %s." %
self.m2ee.config.get_pg_environment()['PGDATABASE'])
answer = ('y' if self.yolo_mode
else raw_input("Continue? (y)es, (N)o? "))
if answer != 'y':
print("Aborting!")
return
pgutil.emptydb(self.m2ee.config)
def | (self, args):
if not args:
logger.error("unpack needs the name of a model upload zipfile in "
"%s as argument" %
self.m2ee.config.get_model_upload_path())
return
(pid_alive, m2ee_alive) = self.m2ee.check_alive()
if pid_alive or m2ee_alive:
logger.error("The application process is still running, refusing "
"to unpack a new application model right now.")
return
logger.info("This command will replace the contents of the model/ and "
"web/ locations, using the files extracted from the "
"archive")
answer = ('y' if self.yolo_mode
else raw_input("Continue? (y)es, (N)o? "))
if answer != 'y':
logger.info("Aborting!")
return
self.m2ee.unpack(args)
def complete_unpack(self, text, line, begidx, endidx):
# these complete functions seem to eat exceptions, which is very bad
# behaviour if anything here throws an excaption, you just won't get
# completion, without knowing why
model_upload_path = self.m2ee.config.get_model_upload_path()
logger.trace("complete_unpack: Looking for %s in %s" %
(text, model_upload_path))
return [f for f in os.listdir(model_upload_path)
if os.path.isfile(os.path.join(model_upload_path, f))
and f.startswith(text)
and (f.endswith(".zip") or f.endswith(".mda"))]
def do_check_constants(self, args):
constants_to_use, default_constants, obsolete_constants = self.m2ee.config.get_constants()
if len(default_constants) > 0:
logger.info('Missing constant definitions (model defaults will be used):')
for name in sorted(default_constants.keys()):
logger.info('- %s' % name)
else:
logger.info('All required constant definitions have explicit definitions.')
if len(obsolete_constants) > 0:
logger.info('Constants defined but not needed by the application:')
for name in sorted(obsolete_constants.keys()):
logger.info('- %s' % name)
def do_log(self, args):
if self._cleanup_logging():
return
logfile = self.m2ee.config.get_logfile()
if not logfile:
logger.warn("logfile location is not specified")
return
print("This command will start printing log information from the "
"application right in the middle of all of the other output on "
"your screen. This can be confusing, especially when you're "
"typing something and everything gets messed up by the logging. "
"Issuing the log command again will turn off logging output.")
answer = ('y' if self.yolo_mode
else raw_input("Do you want to start log output (y/N): "))
if answer == 'y':
cmd = ("tail", "-F", logfile)
proc = subprocess.Popen(cmd)
self.m2ee._logproc = proc
self.prompt = "LOG %s" % self._default_prompt
def do_loglevel(self, args):
try:
args = shlex.split(args)
except ValueError as ve:
logger.error("Input cannot be parsed: %s" % ve.message)
return
if len(args) == 3:
(subscriber, node, level) = args
self._set_log_level(subscriber, node, level)
else:
if len(args) == 0:
self._get_log_levels()
print("To adjust loglevels, use: loglevel <subscribername> "
"<lognodename> <level>")
print("Available levels: NONE, CRITICAL, ERROR, WARNING, INFO, "
"DEBUG, TRACE")
def _get_log_levels(self):
log_levels = self.m2ee.get_log_levels()
print("Current loglevels:")
log_subscribers = []
for (subscriber_name, node_names) in log_levels.items():
for (node_name, subscriber_level) in node_names.items():
log_subscribers.append("%s %s %s" %
(subscriber_name,
node_name,
subscriber_level))
log_subscribers.sort()
print("\n".join(log_subscribers))
def _set_log_level(self, subscriber, node, level):
level = level.upper()
try:
self.m2ee.set_log_level(subscriber, node, level)
logger.info("Loglevel for %s set to %s" % (node, level))
except m2ee.client.M2EEAdminException as e:
print("Remember, all parameters are case sensitive")
raise e
def do_show_current_runtime_requests(self, args):
feedback = self.m2ee.client.get_current_runtime_requests()
if len(feedback) == 0:
logger.info("There are no currently running runtime requests.")
else:
print("Current running Runtime Requests:")
print(yaml.safe_dump(feedback, default_flow_style=False))
def do_show_all_thread_stack_traces(self, args):
feedback = self.m2ee.client.get_all_thread_stack_traces()
print("Current JVM Thread Stacktraces:")
print(yaml.safe_dump(feedback, default_flow_style=False))
def do_interrupt_request(self, args):
if args == "":
logger.error("This function needs a request id as parameter")
logger.error("Use show_current_runtime_requests to view currently "
"running requests")
return
feedback = self.m2ee.client.interrupt_request({"request_id": args})
if feedback["result"] is False:
logger.error("A request with ID %s was not found" % args)
else:
logger.info("An attempt to cancel the running action was "
"made.")
def do_nodetach(self, args):
self.nodetach = True
logger.info("Setting nodetach, application process will not run in the background.")
def do_exit(self, args):
return self._exit()
def do_quit(self, args):
return self._exit()
def do_EOF(self, args):
print("exit")
return self._exit()
def _exit(self):
if self.m2ee.runner.check_attached_proc():
logger.warning("There is still an attached application process running. "
"Stop it first.")
return None
return -1
def do_download_runtime(self, args):
if args:
mxversion = m2ee.version.MXVersion(args)
else:
mxversion = self.m2ee.config.get_runtime_version()
if mxversion is None:
logger.info("You did not specify a Mendix Runtime version to "
"download, and no current unpacked application "
"model is available to determine the version from. "
"Specify a version number or use unpack first.")
return
if self.m2ee.config.lookup_in_mxjar_repo(str(mxversion)):
logger.info("The Mendix Runtime for version %s is already "
"installed. If you want to download another Runtime "
"version, specify the version number as argument to "
"download_runtime." % mxversion)
return
self.m2ee.download_and_unpack_runtime(mxversion)
def do_cleanup_runtimes(self, args):
self.m2ee.cleanup_runtimes_except([])
def do_cleanup_runtimes_except(self, args):
self.m2ee.cleanup_runtimes_except(args.split())
def complete_cleanup_runtimes_except(self, text, line, begidx, endidx):
words = line[:len(line)-len(text)].split()
found_versions = self.m2ee.list_installed_runtimes()
return ["%s " % version for version in found_versions
if version.startswith(text)
and version not in words[1:]]
def _cleanup_logging(self):
# atexit
if self.m2ee._logproc:
logger.debug("Stopping log output...")
self.prompt = self._default_prompt
if not self.m2ee._logproc.poll():
os.kill(self.m2ee._logproc.pid, signal.SIGTERM)
self.m2ee._logproc = None
return True
return False
def _who(self, limitint=None):
limit = {}
if limitint is not None:
limit = {"limit": limitint}
feedback = self.m2ee.client.get_logged_in_user_names(limit)
logger.info("Logged in users: (%s) %s" %
(feedback['count'], feedback['users']))
return feedback['count']
def precmd(self, line):
try:
self.m2ee.reload_config_if_changed()
except m2ee.exceptions.M2EEException as e:
logger.critical(e)
return line
if line:
logger.trace("Executing command: %s" % line)
return line
def cmdloop_handle_ctrl_c(self):
quit = False
while quit is not True:
try:
self.cmdloop()
quit = True
except KeyboardInterrupt:
sys.stdout.write('\n')
def onecmd(self, line):
try:
return super(CLI, self).onecmd(line)
except m2ee.client.M2EEAdminNotAvailable:
(pid_alive, m2ee_alive) = self.m2ee.check_alive()
if not pid_alive and not m2ee_alive:
logger.info("The application process is not running.")
except m2ee.client.M2EEAdminException as e:
logger.error(e)
except m2ee.client.M2EEAdminHTTPException as e:
logger.error(e)
except m2ee.client.M2EERuntimeNotFullyRunning as e:
logger.error(e)
except m2ee.client.M2EEAdminTimeout as e:
logger.error(e)
except m2ee.exceptions.M2EEException as e:
logger.error(e)
def unchecked_onecmd(self, line):
super(CLI, self).onecmd(line)
# if the emptyline function is not defined, Cmd will automagically
# repeat the previous command given, and that's not what we want
def emptyline(self):
pass
def completenames(self, text, *ignored):
do_text = "do_%s" % text
suggestions = [a[3:] for a in self.get_names() if a.startswith(do_text)]
if len(suggestions) == 1 \
and "complete_%s" % suggestions[0] in self.get_names():
suggestions[0] = "%s " % suggestions[0]
return suggestions
def do_help(self, args):
print("""Welcome to m2ee, the Mendix Runtime helper tools.
Available commands:
unpack - unpack an uploaded Mendix Deployment Archive from data/model-upload
download_runtime - download a missing Mendix Runtime distribution
start - try starting the application using the unpacked deployment files
stop - stop the application
restart - restart the application
status - display Mendix Runtime status (is the application running?
create_admin_user - create first user when starting with an empty database
update_admin_user - reset the password of an application user
who, w - show currently logged in users
log - follow live logging from the application
loglevel - view and configure loglevels
about - show Mendix Runtime version information
check_constants - check for missing or unneeded constant definitions
enable_debugger - enable remote debugger API
disable_debugger - disable remote debugger API
show_debugger_status - show whether debugger is enabled or not
show_current_runtime_requests - show action stack of current running requests
interrupt_request - cancel a running runtime request
show_license_information - show details about current mendix license key
show_cache_statistics - show details about the runtime object cache
cleanup_runtimes - clean up downloaded Mendix Runtime versions, except the
one currently in use
cleanup_runtimes_except [<version> <version> ...] - clean up downloaded Mendix
Runtime versions, except the one currently in use and other ones specified
exit, quit, <ctrl>-d - exit m2ee
""")
if self.m2ee.config.is_using_postgresql():
print("""When using PostgreSQL, you can also use:
psql - start the postgresql shell
dumpdb - create a database dump into the data/database folder
emptydb - drop all tables and sequences from the database
restoredb - restore a database dump from the data/database folder
""")
if args == 'expert':
print("""Advanced commands:
statistics - show all application statistics that can be used for monitoring
show_all_thread_stack_traces - show all low-level JVM threads with stack trace
check_health - manually execute health check
Extra commands you probably don't need:
debug - dive into a local python debug session inside this program
dump_config - dump the yaml configuration information
nodetach - do not detach the application process after starting
reload - reload configuration from yaml files (this is done automatically)
munin_config - configure option for the built-in munin plugin
munin_values - show monitoring output gathered by the built-in munin plugin
nagios - execute the built-in nagios plugin (will exit m2ee)
activate_license - DANGEROUS - replace/set license key
""")
print("Hint: use tab autocompletion for commands!")
if args != 'expert':
print("Use help expert to show expert and debugging commands")
def start_console_logging(level):
logger = logging.getLogger()
logger.setLevel(level)
consolelogformatter = logging.Formatter("%(levelname)s: %(message)s")
class M2EELogFilter(logging.Filter):
def __init__(self, level, ge):
self.level = level
# log levels greater than and equal to (True), or below (False)
self.ge = ge
def filter(self, record):
if self.ge:
return record.levelno >= self.level
return record.levelno < self.level
# log everything below ERROR to to stdout
stdoutlog = logging.StreamHandler(sys.stdout)
stdoutlog.setFormatter(consolelogformatter)
stdoutfilter = M2EELogFilter(logging.ERROR, False)
stdoutlog.addFilter(stdoutfilter)
# log everything that's ERROR and more serious to stderr
stderrlog = logging.StreamHandler(sys.stderr)
stderrlog.setFormatter(consolelogformatter)
stderrfilter = M2EELogFilter(logging.ERROR, True)
stderrlog.addFilter(stderrfilter)
logger.addHandler(stdoutlog)
logger.addHandler(stderrlog)
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-c",
action="append",
dest="yaml_files"
)
parser.add_argument(
"-v",
"--verbose",
action="count",
dest="verbose",
default=0,
help="increase verbosity of output (-vv to be even more verbose)"
)
parser.add_argument(
"-q",
"--quiet",
action="count",
dest="quiet",
default=0,
help="decrease verbosity of output (-qq to be even more quiet)"
)
parser.add_argument(
"-y",
"--yolo",
action="store_true",
default=False,
dest="yolo_mode",
help="automatically answer all questions to run as non-interactively as possible"
)
parser.add_argument(
"onecmd",
nargs='*',
)
args = parser.parse_args()
# how verbose should we be? see
# http://docs.python.org/release/2.7/library/logging.html#logging-levels
verbosity = args.quiet - args.verbose
if args.quiet:
verbosity = verbosity + args.quiet
if args.verbose:
verbosity = verbosity - args.verbose
verbosity = verbosity * 10 + 20
if verbosity > 50:
verbosity = 100
if verbosity < 5:
verbosity = 5
start_console_logging(verbosity)
try:
cli = CLI(
yaml_files=args.yaml_files,
yolo_mode=args.yolo_mode,
)
except m2ee.exceptions.M2EEException as e:
logger.critical(e)
sys.exit(1)
atexit.register(cli._cleanup_logging)
if args.onecmd:
try:
cli.unchecked_onecmd(' '.join(args.onecmd))
except (m2ee.client.M2EEAdminException,
m2ee.client.M2EEAdminHTTPException,
m2ee.client.M2EERuntimeNotFullyRunning,
m2ee.client.M2EEAdminTimeout,
m2ee.exceptions.M2EEException) as e:
logger.error(e)
sys.exit(1)
except m2ee.client.M2EEAdminNotAvailable:
pid_alive, m2ee_alive = cli.m2ee.check_alive()
if not pid_alive and not m2ee_alive:
logger.info("The application process is not running.")
sys.exit(0)
sys.exit(1)
else:
logger.info("Application Name: %s" % cli.m2ee.config.get_app_name())
cli.onecmd('status')
cli.cmdloop_handle_ctrl_c()
if __name__ == '__main__':
main()
| do_unpack |
inheritance_101.py | class Mobile:
def dial(self, number):
print(f"dialing number {number}")
def ring(self):
print("ringing using built in tones.....")
class SmartMobile(Mobile):
def ring(self):
"""
overriding a Method | print("ringing using custom ring tones .... ") | """ |
archiver.go | package containerfs // import "moby/pkg/containerfs"
import (
"archive/tar"
"fmt"
"io"
"os"
"path/filepath"
"time"
"moby/pkg/archive"
"moby/pkg/idtools"
"moby/pkg/system"
"github.com/sirupsen/logrus"
)
// TarFunc provides a function definition for a custom Tar function
type TarFunc func(string, *archive.TarOptions) (io.ReadCloser, error)
// UntarFunc provides a function definition for a custom Untar function
type UntarFunc func(io.Reader, string, *archive.TarOptions) error
// Archiver provides a similar implementation of the archive.Archiver package with the rootfs abstraction
type Archiver struct {
SrcDriver Driver
DstDriver Driver
Tar TarFunc
Untar UntarFunc
IDMapping *idtools.IdentityMapping
}
// TarUntar is a convenience function which calls Tar and Untar, with the output of one piped into the other.
// If either Tar or Untar fails, TarUntar aborts and returns the error.
func (archiver *Archiver) TarUntar(src, dst string) error {
logrus.Debugf("TarUntar(%s %s)", src, dst)
tarArchive, err := archiver.Tar(src, &archive.TarOptions{Compression: archive.Uncompressed})
if err != nil {
return err
}
defer tarArchive.Close()
options := &archive.TarOptions{
UIDMaps: archiver.IDMapping.UIDs(),
GIDMaps: archiver.IDMapping.GIDs(),
}
return archiver.Untar(tarArchive, dst, options)
}
// UntarPath untar a file from path to a destination, src is the source tar file path.
func (archiver *Archiver) UntarPath(src, dst string) error {
tarArchive, err := archiver.SrcDriver.Open(src)
if err != nil {
return err
}
defer tarArchive.Close()
options := &archive.TarOptions{
UIDMaps: archiver.IDMapping.UIDs(),
GIDMaps: archiver.IDMapping.GIDs(),
}
return archiver.Untar(tarArchive, dst, options)
}
// CopyWithTar creates a tar archive of filesystem path `src`, and
// unpacks it at filesystem path `dst`.
// The archive is streamed directly with fixed buffering and no
// intermediary disk IO.
func (archiver *Archiver) CopyWithTar(src, dst string) error {
srcSt, err := archiver.SrcDriver.Stat(src)
if err != nil {
return err
}
if !srcSt.IsDir() {
return archiver.CopyFileWithTar(src, dst)
}
// if this archiver is set up with ID mapping we need to create
// the new destination directory with the remapped root UID/GID pair
// as owner
identity := idtools.Identity{UID: archiver.IDMapping.RootPair().UID, GID: archiver.IDMapping.RootPair().GID}
// Create dst, copy src's content into it
if err := idtools.MkdirAllAndChownNew(dst, 0755, identity); err != nil {
return err
}
logrus.Debugf("Calling TarUntar(%s, %s)", src, dst)
return archiver.TarUntar(src, dst)
}
// CopyFileWithTar emulates the behavior of the 'cp' command-line
// for a single file. It copies a regular file from path `src` to
// path `dst`, and preserves all its metadata.
func (archiver *Archiver) CopyFileWithTar(src, dst string) (err error) {
logrus.Debugf("CopyFileWithTar(%s, %s)", src, dst)
srcDriver := archiver.SrcDriver
dstDriver := archiver.DstDriver
srcSt, err := srcDriver.Stat(src)
if err != nil {
return err
}
if srcSt.IsDir() {
return fmt.Errorf("Can't copy a directory")
}
// Clean up the trailing slash. This must be done in an operating
// system specific manner.
if dst[len(dst)-1] == dstDriver.Separator() {
dst = dstDriver.Join(dst, srcDriver.Base(src))
}
// The original call was system.MkdirAll, which is just
// os.MkdirAll on not-Windows and changed for Windows.
if dstDriver.OS() == "windows" {
// Now we are WCOW
if err := system.MkdirAll(filepath.Dir(dst), 0700, ""); err != nil {
return err
}
} else {
// We can just use the driver.MkdirAll function
if err := dstDriver.MkdirAll(dstDriver.Dir(dst), 0700); err != nil {
return err
}
}
r, w := io.Pipe()
errC := make(chan error, 1)
go func() {
defer close(errC)
errC <- func() error {
defer w.Close()
srcF, err := srcDriver.Open(src)
if err != nil {
return err
}
defer srcF.Close()
hdr, err := tar.FileInfoHeader(srcSt, "")
if err != nil {
return err
}
hdr.Format = tar.FormatPAX
hdr.ModTime = hdr.ModTime.Truncate(time.Second)
hdr.AccessTime = time.Time{}
hdr.ChangeTime = time.Time{}
hdr.Name = dstDriver.Base(dst)
if dstDriver.OS() == "windows" {
hdr.Mode = int64(chmodTarEntry(os.FileMode(hdr.Mode)))
} else {
hdr.Mode = int64(os.FileMode(hdr.Mode))
}
if err := remapIDs(archiver.IDMapping, hdr); err != nil {
return err
}
tw := tar.NewWriter(w)
defer tw.Close()
if err := tw.WriteHeader(hdr); err != nil {
return err
}
if _, err := io.Copy(tw, srcF); err != nil {
return err
}
return nil
}()
}()
defer func() {
if er := <-errC; err == nil && er != nil {
err = er
}
}()
err = archiver.Untar(r, dstDriver.Dir(dst), nil)
if err != nil {
r.CloseWithError(err)
}
return err
}
// IdentityMapping returns the IdentityMapping of the archiver.
func (archiver *Archiver) IdentityMapping() *idtools.IdentityMapping {
return archiver.IDMapping
}
func remapIDs(idMapping *idtools.IdentityMapping, hdr *tar.Header) error {
ids, err := idMapping.ToHost(idtools.Identity{UID: hdr.Uid, GID: hdr.Gid})
hdr.Uid, hdr.Gid = ids.UID, ids.GID
return err
}
// chmodTarEntry is used to adjust the file permissions used in tar header based
// on the platform the archival is done.
func chmodTarEntry(perm os.FileMode) os.FileMode | {
//perm &= 0755 // this 0-ed out tar flags (like link, regular file, directory marker etc.)
permPart := perm & os.ModePerm
noPermPart := perm &^ os.ModePerm
// Add the x bit: make everything +x from windows
permPart |= 0111
permPart &= 0755
return noPermPart | permPart
} |
|
train_tripletloss.py | """Training a face recognizer with TensorFlow based on the FaceNet paper
FaceNet: A Unified Embedding for Face Recognition and Clustering: http://arxiv.org/abs/1503.03832
"""
# MIT License
#
# Copyright (c) 2016 David Sandberg
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datetime import datetime
import os.path
import time
import sys
import tensorflow as tf
import numpy as np
import importlib
import itertools
import argparse
import facenet.src.facenet as fc
from facenet.src import lfw
from tensorflow.python.ops import data_flow_ops
from six.moves import xrange # @UnresolvedImport
def main(args):
|
def train(
args,
sess,
dataset,
epoch,
image_paths_placeholder,
labels_placeholder,
labels_batch,
batch_size_placeholder,
learning_rate_placeholder,
phase_train_placeholder,
enqueue_op,
input_queue,
global_step,
embeddings,
loss,
train_op,
summary_op,
summary_writer,
learning_rate_schedule_file,
embedding_size,
anchor,
positive,
negative,
triplet_loss,
):
batch_number = 0
if args.learning_rate > 0.0:
lr = args.learning_rate
else:
lr = fc.get_learning_rate_from_file(learning_rate_schedule_file, epoch)
while batch_number < args.epoch_size:
# Sample people randomly from the dataset
image_paths, num_per_class = sample_people(dataset, args.people_per_batch, args.images_per_person)
print("Running forward pass on sampled images: ", end="")
start_time = time.time()
nrof_examples = args.people_per_batch * args.images_per_person
labels_array = np.reshape(np.arange(nrof_examples), (-1, 3))
image_paths_array = np.reshape(np.expand_dims(np.array(image_paths), 1), (-1, 3))
sess.run(enqueue_op, {image_paths_placeholder: image_paths_array, labels_placeholder: labels_array})
emb_array = np.zeros((nrof_examples, embedding_size))
nrof_batches = int(np.ceil(nrof_examples / args.batch_size))
for i in range(nrof_batches):
batch_size = min(nrof_examples - i * args.batch_size, args.batch_size)
emb, lab = sess.run(
[embeddings, labels_batch],
feed_dict={
batch_size_placeholder: batch_size,
learning_rate_placeholder: lr,
phase_train_placeholder: True,
},
)
emb_array[lab, :] = emb
print("%.3f" % (time.time() - start_time))
# Select triplets based on the embeddings
print("Selecting suitable triplets for training")
triplets, nrof_random_negs, nrof_triplets = select_triplets(
emb_array, num_per_class, image_paths, args.people_per_batch, args.alpha
)
selection_time = time.time() - start_time
print(
"(nrof_random_negs, nrof_triplets) = (%d, %d): time=%.3f seconds"
% (nrof_random_negs, nrof_triplets, selection_time)
)
# Perform training on the selected triplets
nrof_batches = int(np.ceil(nrof_triplets * 3 / args.batch_size))
triplet_paths = list(itertools.chain(*triplets))
labels_array = np.reshape(np.arange(len(triplet_paths)), (-1, 3))
triplet_paths_array = np.reshape(np.expand_dims(np.array(triplet_paths), 1), (-1, 3))
sess.run(enqueue_op, {image_paths_placeholder: triplet_paths_array, labels_placeholder: labels_array})
nrof_examples = len(triplet_paths)
train_time = 0
i = 0
emb_array = np.zeros((nrof_examples, embedding_size))
loss_array = np.zeros((nrof_triplets,))
summary = tf.Summary()
step = 0
while i < nrof_batches:
start_time = time.time()
batch_size = min(nrof_examples - i * args.batch_size, args.batch_size)
feed_dict = {
batch_size_placeholder: batch_size,
learning_rate_placeholder: lr,
phase_train_placeholder: True,
}
err, _, step, emb, lab = sess.run(
[loss, train_op, global_step, embeddings, labels_batch], feed_dict=feed_dict
)
emb_array[lab, :] = emb
loss_array[i] = err
duration = time.time() - start_time
print(
"Epoch: [%d][%d/%d]\tTime %.3f\tLoss %2.3f" % (epoch, batch_number + 1, args.epoch_size, duration, err)
)
batch_number += 1
i += 1
train_time += duration
summary.value.add(tag="loss", simple_value=err)
# Add validation loss and accuracy to summary
# pylint: disable=maybe-no-member
summary.value.add(tag="time/selection", simple_value=selection_time)
summary_writer.add_summary(summary, step)
return step
def select_triplets(embeddings, nrof_images_per_class, image_paths, people_per_batch, alpha):
"""Select the triplets for training"""
trip_idx = 0
emb_start_idx = 0
num_trips = 0
triplets = []
# VGG Face: Choosing good triplets is crucial and should strike a balance between
# selecting informative (i.e. challenging) examples and swamping training with examples that
# are too hard. This is achieve by extending each pair (a, p) to a triplet (a, p, n) by sampling
# the image n at random, but only between the ones that violate the triplet loss margin. The
# latter is a form of hard-negative mining, but it is not as aggressive (and much cheaper) than
# choosing the maximally violating example, as often done in structured output learning.
for i in xrange(people_per_batch):
nrof_images = int(nrof_images_per_class[i])
for j in xrange(1, nrof_images):
a_idx = emb_start_idx + j - 1
neg_dists_sqr = np.sum(np.square(embeddings[a_idx] - embeddings), 1)
for pair in xrange(j, nrof_images): # For every possible positive pair.
p_idx = emb_start_idx + pair
pos_dist_sqr = np.sum(np.square(embeddings[a_idx] - embeddings[p_idx]))
neg_dists_sqr[emb_start_idx : emb_start_idx + nrof_images] = np.NaN
# all_neg = np.where(np.logical_and(neg_dists_sqr-pos_dist_sqr<alpha, pos_dist_sqr<neg_dists_sqr))[0] # FaceNet selection
all_neg = np.where(neg_dists_sqr - pos_dist_sqr < alpha)[0] # VGG Face selecction
nrof_random_negs = all_neg.shape[0]
if nrof_random_negs > 0:
rnd_idx = np.random.randint(nrof_random_negs)
n_idx = all_neg[rnd_idx]
triplets.append((image_paths[a_idx], image_paths[p_idx], image_paths[n_idx]))
# print('Triplet %d: (%d, %d, %d), pos_dist=%2.6f, neg_dist=%2.6f (%d, %d, %d, %d, %d)' %
# (trip_idx, a_idx, p_idx, n_idx, pos_dist_sqr, neg_dists_sqr[n_idx], nrof_random_negs, rnd_idx, i, j, emb_start_idx))
trip_idx += 1
num_trips += 1
emb_start_idx += nrof_images
np.random.shuffle(triplets)
return triplets, num_trips, len(triplets)
def sample_people(dataset, people_per_batch, images_per_person):
nrof_images = people_per_batch * images_per_person
# Sample classes from the dataset
nrof_classes = len(dataset)
class_indices = np.arange(nrof_classes)
np.random.shuffle(class_indices)
i = 0
image_paths = []
num_per_class = []
sampled_class_indices = []
# Sample images from these classes until we have enough
while len(image_paths) < nrof_images:
class_index = class_indices[i]
nrof_images_in_class = len(dataset[class_index])
image_indices = np.arange(nrof_images_in_class)
np.random.shuffle(image_indices)
nrof_images_from_class = min(nrof_images_in_class, images_per_person, nrof_images - len(image_paths))
idx = image_indices[0:nrof_images_from_class]
image_paths_for_class = [dataset[class_index].image_paths[j] for j in idx]
sampled_class_indices += [class_index] * nrof_images_from_class
image_paths += image_paths_for_class
num_per_class.append(nrof_images_from_class)
i += 1
return image_paths, num_per_class
def evaluate(
sess,
image_paths,
embeddings,
labels_batch,
image_paths_placeholder,
labels_placeholder,
batch_size_placeholder,
learning_rate_placeholder,
phase_train_placeholder,
enqueue_op,
actual_issame,
batch_size,
nrof_folds,
log_dir,
step,
summary_writer,
embedding_size,
):
start_time = time.time()
# Run forward pass to calculate embeddings
print("Running forward pass on LFW images: ", end="")
nrof_images = len(actual_issame) * 2
assert len(image_paths) == nrof_images
labels_array = np.reshape(np.arange(nrof_images), (-1, 3))
image_paths_array = np.reshape(np.expand_dims(np.array(image_paths), 1), (-1, 3))
sess.run(enqueue_op, {image_paths_placeholder: image_paths_array, labels_placeholder: labels_array})
emb_array = np.zeros((nrof_images, embedding_size))
nrof_batches = int(np.ceil(nrof_images / batch_size))
label_check_array = np.zeros((nrof_images,))
for i in xrange(nrof_batches):
batch_size = min(nrof_images - i * batch_size, batch_size)
emb, lab = sess.run(
[embeddings, labels_batch],
feed_dict={
batch_size_placeholder: batch_size,
learning_rate_placeholder: 0.0,
phase_train_placeholder: False,
},
)
emb_array[lab, :] = emb
label_check_array[lab] = 1
print("%.3f" % (time.time() - start_time))
assert np.all(label_check_array == 1)
_, _, accuracy, val, val_std, far = lfw.evaluate(emb_array, actual_issame, nrof_folds=nrof_folds)
print("Accuracy: %1.3f+-%1.3f" % (np.mean(accuracy), np.std(accuracy)))
print("Validation rate: %2.5f+-%2.5f @ FAR=%2.5f" % (val, val_std, far))
lfw_time = time.time() - start_time
# Add validation loss and accuracy to summary
summary = tf.Summary()
# pylint: disable=maybe-no-member
summary.value.add(tag="lfw/accuracy", simple_value=np.mean(accuracy))
summary.value.add(tag="lfw/val_rate", simple_value=val)
summary.value.add(tag="time/lfw", simple_value=lfw_time)
summary_writer.add_summary(summary, step)
with open(os.path.join(log_dir, "lfw_result.txt"), "at") as f:
f.write("%d\t%.5f\t%.5f\n" % (step, np.mean(accuracy), val))
def save_variables_and_metagraph(sess, saver, summary_writer, model_dir, model_name, step):
# Save the model checkpoint
print("Saving variables")
start_time = time.time()
checkpoint_path = os.path.join(model_dir, "model-%s.ckpt" % model_name)
saver.save(sess, checkpoint_path, global_step=step, write_meta_graph=False)
save_time_variables = time.time() - start_time
print("Variables saved in %.2f seconds" % save_time_variables)
metagraph_filename = os.path.join(model_dir, "model-%s.meta" % model_name)
save_time_metagraph = 0
if not os.path.exists(metagraph_filename):
print("Saving metagraph")
start_time = time.time()
saver.export_meta_graph(metagraph_filename)
save_time_metagraph = time.time() - start_time
print("Metagraph saved in %.2f seconds" % save_time_metagraph)
summary = tf.Summary()
# pylint: disable=maybe-no-member
summary.value.add(tag="time/save_variables", simple_value=save_time_variables)
summary.value.add(tag="time/save_metagraph", simple_value=save_time_metagraph)
summary_writer.add_summary(summary, step)
def get_learning_rate_from_file(filename, epoch):
with open(filename, "r") as f:
for line in f.readlines():
line = line.split("#", 1)[0]
if line:
par = line.strip().split(":")
e = int(par[0])
lr = float(par[1])
if e <= epoch:
learning_rate = lr
else:
return learning_rate
def parse_arguments(argv):
parser = argparse.ArgumentParser()
parser.add_argument(
"--logs_base_dir", type=str, help="Directory where to write event logs.", default="~/logs/facenet"
)
parser.add_argument(
"--models_base_dir",
type=str,
help="Directory where to write trained models and checkpoints.",
default="~/models/facenet",
)
parser.add_argument(
"--gpu_memory_fraction",
type=float,
help="Upper bound on the amount of GPU memory that will be used by the process.",
default=1.0,
)
parser.add_argument("--pretrained_model", type=str, help="Load a pretrained model before training starts.")
parser.add_argument(
"--data_dir",
type=str,
help="Path to the data directory containing aligned face patches.",
default="~/datasets/casia/casia_maxpy_mtcnnalign_182_160",
)
parser.add_argument(
"--model_def",
type=str,
help="Model definition. Points to a module containing the definition of the inference graph.",
default="models.inception_resnet_v1",
)
parser.add_argument("--max_nrof_epochs", type=int, help="Number of epochs to run.", default=500)
parser.add_argument("--batch_size", type=int, help="Number of images to process in a batch.", default=90)
parser.add_argument("--image_size", type=int, help="Image size (height, width) in pixels.", default=160)
parser.add_argument("--people_per_batch", type=int, help="Number of people per batch.", default=45)
parser.add_argument("--images_per_person", type=int, help="Number of images per person.", default=40)
parser.add_argument("--epoch_size", type=int, help="Number of batches per epoch.", default=1000)
parser.add_argument("--alpha", type=float, help="Positive to negative triplet distance margin.", default=0.2)
parser.add_argument("--embedding_size", type=int, help="Dimensionality of the embedding.", default=128)
parser.add_argument(
"--random_crop",
help="Performs random cropping of training images. If false, the center image_size pixels from the training images are used. "
+ "If the size of the images in the data directory is equal to image_size no cropping is performed",
action="store_true",
)
parser.add_argument(
"--random_flip", help="Performs random horizontal flipping of training images.", action="store_true"
)
parser.add_argument(
"--keep_probability",
type=float,
help="Keep probability of dropout for the fully connected layer(s).",
default=1.0,
)
parser.add_argument("--weight_decay", type=float, help="L2 weight regularization.", default=0.0)
parser.add_argument(
"--optimizer",
type=str,
choices=["ADAGRAD", "ADADELTA", "ADAM", "RMSPROP", "MOM"],
help="The optimization algorithm to use",
default="ADAGRAD",
)
parser.add_argument(
"--learning_rate",
type=float,
help="Initial learning rate. If set to a negative value a learning rate "
+ 'schedule can be specified in the file "learning_rate_schedule.txt"',
default=0.1,
)
parser.add_argument(
"--learning_rate_decay_epochs", type=int, help="Number of epochs between learning rate decay.", default=100
)
parser.add_argument("--learning_rate_decay_factor", type=float, help="Learning rate decay factor.", default=1.0)
parser.add_argument(
"--moving_average_decay",
type=float,
help="Exponential decay for tracking of training parameters.",
default=0.9999,
)
parser.add_argument("--seed", type=int, help="Random seed.", default=666)
parser.add_argument(
"--learning_rate_schedule_file",
type=str,
help="File containing the learning rate schedule that is used when learning_rate is set to to -1.",
default="data/learning_rate_schedule.txt",
)
# Parameters for validation on LFW
parser.add_argument(
"--lfw_pairs", type=str, help="The file containing the pairs to use for validation.", default="data/pairs.txt"
)
parser.add_argument(
"--lfw_dir", type=str, help="Path to the data directory containing aligned face patches.", default=""
)
parser.add_argument(
"--lfw_nrof_folds",
type=int,
help="Number of folds to use for cross validation. Mainly used for testing.",
default=10,
)
return parser.parse_args(argv)
if __name__ == "__main__":
main(parse_arguments(sys.argv[1:]))
| network = importlib.import_module(args.model_def)
subdir = datetime.strftime(datetime.now(), "%Y%m%d-%H%M%S")
log_dir = os.path.join(os.path.expanduser(args.logs_base_dir), subdir)
if not os.path.isdir(log_dir): # Create the log directory if it doesn't exist
os.makedirs(log_dir)
model_dir = os.path.join(os.path.expanduser(args.models_base_dir), subdir)
if not os.path.isdir(model_dir): # Create the model directory if it doesn't exist
os.makedirs(model_dir)
# Write arguments to a text file
fc.write_arguments_to_file(args, os.path.join(log_dir, "arguments.txt"))
# Store some git revision info in a text file in the log directory
src_path, _ = os.path.split(os.path.realpath(__file__))
fc.store_revision_info(src_path, log_dir, " ".join(sys.argv))
np.random.seed(seed=args.seed)
train_set = fc.get_dataset(args.data_dir)
print("Model directory: %s" % model_dir)
print("Log directory: %s" % log_dir)
if args.pretrained_model:
print("Pre-trained model: %s" % os.path.expanduser(args.pretrained_model))
if args.lfw_dir:
print("LFW directory: %s" % args.lfw_dir)
# Read the file containing the pairs used for testing
pairs = lfw.read_pairs(os.path.expanduser(args.lfw_pairs))
# Get the paths for the corresponding images
lfw_paths, actual_issame = lfw.get_paths(os.path.expanduser(args.lfw_dir), pairs)
with tf.Graph().as_default():
tf.set_random_seed(args.seed)
global_step = tf.Variable(0, trainable=False)
# Placeholder for the learning rate
learning_rate_placeholder = tf.placeholder(tf.float32, name="learning_rate")
batch_size_placeholder = tf.placeholder(tf.int32, name="batch_size")
phase_train_placeholder = tf.placeholder(tf.bool, name="phase_train")
image_paths_placeholder = tf.placeholder(tf.string, shape=(None, 3), name="image_paths")
labels_placeholder = tf.placeholder(tf.int64, shape=(None, 3), name="labels")
input_queue = data_flow_ops.FIFOQueue(
capacity=100000, dtypes=[tf.string, tf.int64], shapes=[(3,), (3,)], shared_name=None, name=None
)
enqueue_op = input_queue.enqueue_many([image_paths_placeholder, labels_placeholder])
nrof_preprocess_threads = 4
images_and_labels = []
for _ in range(nrof_preprocess_threads):
filenames, label = input_queue.dequeue()
images = []
for filename in tf.unstack(filenames):
file_contents = tf.read_file(filename)
image = tf.image.decode_image(file_contents, channels=3)
if args.random_crop:
image = tf.random_crop(image, [args.image_size, args.image_size, 3])
else:
image = tf.image.resize_image_with_crop_or_pad(image, args.image_size, args.image_size)
if args.random_flip:
image = tf.image.random_flip_left_right(image)
# pylint: disable=no-member
image.set_shape((args.image_size, args.image_size, 3))
images.append(tf.image.per_image_standardization(image))
images_and_labels.append([images, label])
image_batch, labels_batch = tf.train.batch_join(
images_and_labels,
batch_size=batch_size_placeholder,
shapes=[(args.image_size, args.image_size, 3), ()],
enqueue_many=True,
capacity=4 * nrof_preprocess_threads * args.batch_size,
allow_smaller_final_batch=True,
)
image_batch = tf.identity(image_batch, "image_batch")
image_batch = tf.identity(image_batch, "input")
labels_batch = tf.identity(labels_batch, "label_batch")
# Build the inference graph
prelogits, _ = network.inference(
image_batch,
args.keep_probability,
phase_train=phase_train_placeholder,
bottleneck_layer_size=args.embedding_size,
weight_decay=args.weight_decay,
)
embeddings = tf.nn.l2_normalize(prelogits, 1, 1e-10, name="embeddings")
# Split embeddings into anchor, positive and negative and calculate triplet loss
anchor, positive, negative = tf.unstack(tf.reshape(embeddings, [-1, 3, args.embedding_size]), 3, 1)
triplet_loss = fc.triplet_loss(anchor, positive, negative, args.alpha)
learning_rate = tf.train.exponential_decay(
learning_rate_placeholder,
global_step,
args.learning_rate_decay_epochs * args.epoch_size,
args.learning_rate_decay_factor,
staircase=True,
)
tf.summary.scalar("learning_rate", learning_rate)
# Calculate the total losses
regularization_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
total_loss = tf.add_n([triplet_loss] + regularization_losses, name="total_loss")
# Build a Graph that trains the model with one batch of examples and updates the model parameters
train_op = fc.train(
total_loss, global_step, args.optimizer, learning_rate, args.moving_average_decay, tf.global_variables()
)
# Create a saver
saver = tf.train.Saver(tf.trainable_variables(), max_to_keep=3)
# Build the summary operation based on the TF collection of Summaries.
summary_op = tf.summary.merge_all()
# Start running operations on the Graph.
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=args.gpu_memory_fraction)
sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))
# Initialize variables
sess.run(tf.global_variables_initializer(), feed_dict={phase_train_placeholder: True})
sess.run(tf.local_variables_initializer(), feed_dict={phase_train_placeholder: True})
summary_writer = tf.summary.FileWriter(log_dir, sess.graph)
coord = tf.train.Coordinator()
tf.train.start_queue_runners(coord=coord, sess=sess)
with sess.as_default():
if args.pretrained_model:
print("Restoring pretrained model: %s" % args.pretrained_model)
saver.restore(sess, os.path.expanduser(args.pretrained_model))
# Training and validation loop
epoch = 0
while epoch < args.max_nrof_epochs:
step = sess.run(global_step, feed_dict=None)
epoch = step // args.epoch_size
# Train for one epoch
train(
args,
sess,
train_set,
epoch,
image_paths_placeholder,
labels_placeholder,
labels_batch,
batch_size_placeholder,
learning_rate_placeholder,
phase_train_placeholder,
enqueue_op,
input_queue,
global_step,
embeddings,
total_loss,
train_op,
summary_op,
summary_writer,
args.learning_rate_schedule_file,
args.embedding_size,
anchor,
positive,
negative,
triplet_loss,
)
# Save variables and the metagraph if it doesn't exist already
save_variables_and_metagraph(sess, saver, summary_writer, model_dir, subdir, step)
# Evaluate on LFW
if args.lfw_dir:
evaluate(
sess,
lfw_paths,
embeddings,
labels_batch,
image_paths_placeholder,
labels_placeholder,
batch_size_placeholder,
learning_rate_placeholder,
phase_train_placeholder,
enqueue_op,
actual_issame,
args.batch_size,
args.lfw_nrof_folds,
log_dir,
step,
summary_writer,
args.embedding_size,
)
return model_dir |
cpu.rs | use super::super::memory::Memory;
use super::flags::CpuFlags;
use std::fmt;
#[allow(non_snake_case)]
pub struct Cpu {
pub pc: u16,
pub sp: u8,
pub A: u8,
pub X: u8,
pub Y: u8,
pub flags: CpuFlags,
pub mem: Memory,
}
pub fn new(mem: Memory) -> Cpu {
let cpu = Cpu {
pc: 0xC000, // TODO set to reset vector 0xFFFC once PPU + irq are supported
sp: 0xFD,
A: 0,
X: 0,
Y: 0,
mem: mem,
flags: CpuFlags::new(),
};
// TODO:
// cpu.mem[0x4017] = 0x00; // frame irq enabled
// cpu.mem[0x4015] = 0x00; // all channels disabled
// for i in 0..0x14 {
// cpu.mem[0x4000 + i] = 0x00
// }
cpu
}
impl Cpu {
pub fn read_from_pc(&mut self) -> u8 |
}
impl fmt::Display for Cpu {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"A:{:02X} X:{:02X} Y:{:02X} P:{:02X} SP:{:02X}",
self.A,
self.X,
self.Y,
self.flags.to_p(),
self.sp
)
}
}
| {
let op = self.mem.get(self.pc);
self.pc = self.pc + 1;
op
} |
test.py | from regression_tests import *
class TestInnoSetupDetection(Test):
settings = TestSettings(
tool='fileinfo',
input='inno.exe'
)
def | (self):
assert self.fileinfo.succeeded
assert self.fileinfo.output.contains(r'.*Inno Setup \(5.4.0 - 5.5.1\)')
| test_detected_inno |
cmdctl.rs | use structopt::StructOpt;
use structopt::clap::AppSettings::*;
use super::Commands;
use rusoto_core::Region;
#[derive(Debug, StructOpt, Default, Clone)]
#[structopt(
global_settings = &[DisableVersion, DeriveDisplayOrder, VersionlessSubcommands],
about = "Generate signed url's for remote storage services."
)]
pub struct CmdCtl {
/// The type of method being requested for signing url.
///
#[structopt(default_value="PUT")]
pub method: String,
/// Bucket target for signature.
///
#[structopt(short = "b", long = "bucket")]
pub bucket: Option<String>,
/// Key path target. (ie: filename)
#[structopt(short = "k", long = "key")]
pub key: Option<String>,
/// Let util append filename to key prefix.
#[structopt(long = "prefix")]
pub prefix: Option<String>,
/// Don't allow bucket to change.
#[structopt(long = "no-buckets")]
pub no_edit_bucket: bool,
/// Generate key's with UUIDv4.
#[structopt(short = "g", long = "gen-key")]
pub generate_key: bool,
/// Region target.
// https://docs.aws.amazon.com/general/latest/gr/rande.html#region-names-codes
#[structopt(short = "r", long = "region", env = "AWS_DEFAULT_REGION", default_value="us-east-1")]
pub region: String,
/// Duration URL is invalid in milliseconds.
#[structopt(short = "t", long = "timeout", default_value="60000")]
pub timeout: u64,
/// Daemon mode.
#[structopt(short = "d", long = "daemon")]
pub daemon: bool,
/// Daemon mode port.
#[structopt(long = "port", env = "SIGNEDURL_PORT", default_value="8080")]
pub port: i32,
/// Daemon mode host.
#[structopt(long = "host", env = "SIGNEDURL_HOST", default_value="127.0.0.1")]
pub host: String,
/// Enable verbose logging.
#[structopt(long = "verbose", short = "v")]
pub verbose: bool,
#[structopt(subcommand)]
pub commands: Option<Commands>,
}
impl CmdCtl {
pub fn run_command_process(self) -> CmdCtl {
match &self.commands {
Some(commands) => {
commands.process();
self
},
None => {
self
},
}
}
pub fn is_verbose(&self) -> bool {
match self.commands.clone() {
Some(commands) => commands.is_verbose(),
None => self.verbose
}
}
pub fn region(&self) -> Region {
if self.region == "us-east-1" {
Region::UsEast1
} else if self.region == "us-east-2" {
Region::UsEast2
} else if self.region == "ap-east-1" {
Region::ApEast1
} else if self.region == "ap-south-1" {
Region::ApSouth1
} else if self.region == "ap-northeast-1" {
Region::ApNortheast1
} else if self.region == "ap-northeast-2" {
Region::ApNortheast2
} else if self.region == "ap-northeast-3" {
Region::ApNortheast3
} else if self.region == "ap-southeast-1" {
Region::ApSoutheast1
} else if self.region == "ap-southeast-2" {
Region::ApSoutheast2
} else if self.region == "ca-central-1" {
Region::CaCentral1
} else if self.region == "cn-north-1" {
Region::CnNorth1
} else if self.region == "cn-northeast-1" {
Region::CnNorthwest1
} else if self.region == "eu-central-1" {
Region::EuCentral1
} else if self.region == "eu-north-1" { | Region::EuWest1
} else if self.region == "eu-west-2" {
Region::EuWest2
} else if self.region == "eu-west-3" {
Region::EuWest3
} else if self.region == "me-south-1" {
Region::MeSouth1
} else if self.region == "sa-east-1" {
Region::SaEast1
} else {
Region::UsEast1
}
}
} | Region::EuNorth1
} else if self.region == "eu-west-1" { |
main.go | /*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"context"
"errors"
"flag"
"fmt"
"io/ioutil"
"log"
"net/url"
"os"
"strings"
"time"
tgCfgUtil "github.com/GoogleCloudPlatform/testgrid/config"
"github.com/GoogleCloudPlatform/testgrid/config/yamlcfg"
"github.com/GoogleCloudPlatform/testgrid/util/gcs"
prowConfig "k8s.io/test-infra/prow/config"
configflagutil "k8s.io/test-infra/prow/flagutil/config"
"cloud.google.com/go/storage"
"github.com/sirupsen/logrus"
"sigs.k8s.io/yaml"
)
type multiString []string
func (m multiString) String() string {
return strings.Join(m, ",")
}
func (m *multiString) Set(v string) error {
*m = strings.Split(v, ",")
return nil
}
// How long Configurator waits between file checks in polling mode
const pollingTime = time.Second
type options struct {
creds string
inputs multiString
oneshot bool
output string
printText bool
validateConfigFile bool
worldReadable bool
writeYAML bool
prowConfig configflagutil.ConfigOptions
defaultYAML string
updateDescription bool
prowJobURLPrefix string
strictUnmarshal bool
}
func (o *options) gatherOptions(fs *flag.FlagSet, args []string) error {
fs.StringVar(&o.creds, "gcp-service-account", "", "/path/to/gcp/creds (use local creds if empty)")
fs.BoolVar(&o.oneshot, "oneshot", false, "Write proto once and exit instead of monitoring --yaml files for changes")
fs.StringVar(&o.output, "output", "", "write proto to gs://bucket/obj or /local/path")
fs.BoolVar(&o.printText, "print-text", false, "print generated info in text format to stdout")
fs.BoolVar(&o.validateConfigFile, "validate-config-file", false, "validate that the given config files are syntactically correct and exit (proto is not written anywhere)")
fs.BoolVar(&o.worldReadable, "world-readable", false, "when uploading the proto to GCS, makes it world readable. Has no effect on writing to the local filesystem.")
fs.BoolVar(&o.writeYAML, "output-yaml", false, "Output to TestGrid YAML instead of config proto")
fs.Var(&o.inputs, "yaml", "comma-separated list of input YAML files or directories")
o.prowConfig.ConfigPathFlagName = "prow-config"
o.prowConfig.JobConfigPathFlagName = "prow-job-config"
o.prowConfig.AddFlags(fs)
fs.StringVar(&o.defaultYAML, "default", "", "path to default settings; required for proto outputs")
fs.BoolVar(&o.updateDescription, "update-description", false, "add prowjob info to description even if non-empty")
fs.StringVar(&o.prowJobURLPrefix, "prowjob-url-prefix", "", "for prowjob_config_url in descriptions: {prowjob-url-prefix}/{prowjob.sourcepath}")
fs.BoolVar(&o.strictUnmarshal, "strict-unmarshal", false, "whether or not we want to be strict when unmarshalling configs")
if err := fs.Parse(args); err != nil {
return err
}
if len(o.inputs) == 0 || o.inputs[0] == "" {
return errors.New("--yaml must include at least one file")
}
if !o.printText && !o.validateConfigFile && o.output == "" {
return errors.New("--print-text, --validate-config-file, or --output required")
}
if o.validateConfigFile && o.output != "" {
return errors.New("--validate-config-file doesn't write the proto anywhere")
}
if err := o.prowConfig.ValidateConfigOptional(); err != nil {
return err
}
if o.defaultYAML == "" && !o.writeYAML {
logrus.Warnf("--default not explicitly specified; assuming %s", o.inputs[0])
o.defaultYAML = o.inputs[0]
}
return nil
}
// announceChanges watches for changes in "paths" and writes them to the channel
func announceChanges(ctx context.Context, paths []string, channel chan []string) {
defer close(channel)
modified := map[string]time.Time{}
// TODO(fejta): consider waiting for a notification rather than polling
// but performance isn't that big a deal here.
for {
// Terminate
select {
case <-ctx.Done():
return
default:
}
var changed []string
// Check known files for deletions
for p := range modified {
_, accessErr := os.Stat(p)
if os.IsNotExist(accessErr) {
changed = append(changed, p)
delete(modified, p)
}
}
// Check given locations for new or modified files
err := yamlcfg.SeekYAMLFiles(paths, func(path string, info os.FileInfo) error {
lastModTime, present := modified[path]
if t := info.ModTime(); !present || t.After(lastModTime) {
changed = append(changed, path)
modified[path] = t
}
return nil
})
if err != nil {
logrus.WithError(err).Error("walk issue in announcer")
return
}
if len(changed) > 0 {
select {
case <-ctx.Done():
return
case channel <- changed:
}
} else {
time.Sleep(pollingTime)
}
}
}
func announceProwChanges(ctx context.Context, pca *prowConfig.Agent, channel chan []string) {
pch := make(chan prowConfig.Delta)
pca.Subscribe(pch)
for {
<-pch
select {
case <-ctx.Done():
return
case channel <- []string{"prow config"}:
}
}
}
func write(ctx context.Context, client *storage.Client, path string, bytes []byte, worldReadable bool, cacheControl string) error {
u, err := url.Parse(path)
if err != nil {
return fmt.Errorf("invalid url %s: %v", path, err)
}
if u.Scheme != "gs" {
return ioutil.WriteFile(path, bytes, 0644)
}
var p gcs.Path
if err = p.SetURL(u); err != nil {
return err
}
return gcs.Upload(ctx, client, p, bytes, worldReadable, cacheControl)
}
// Ignores what changed for now and recomputes everything
func | (ctx context.Context, client *storage.Client, opt options, prowConfigAgent *prowConfig.Agent) error {
// Read Data Sources: Default, YAML configs, Prow Annotations
c, err := yamlcfg.ReadConfig(opt.inputs, opt.defaultYAML, opt.strictUnmarshal)
if err != nil {
return fmt.Errorf("could not read testgrid config: %v", err)
}
// Remains nil if no default YAML
var d *yamlcfg.DefaultConfiguration
if opt.defaultYAML != "" {
b, err := ioutil.ReadFile(opt.defaultYAML)
if err != nil {
return err
}
val, err := yamlcfg.LoadDefaults(b)
if err != nil {
return err
}
d = &val
}
pac := prowAwareConfigurator{
defaultTestgridConfig: d,
prowConfig: prowConfigAgent.Config(),
updateDescription: opt.updateDescription,
prowJobConfigPath: opt.prowConfig.JobConfigPath,
prowJobURLPrefix: opt.prowJobURLPrefix,
}
if err := pac.applyProwjobAnnotations(&c); err != nil {
return fmt.Errorf("could not apply prowjob annotations: %v", err)
}
if opt.validateConfigFile {
return tgCfgUtil.Validate(&c)
}
// Print proto if requested
if opt.printText {
if opt.writeYAML {
b, err := yaml.Marshal(&c)
if err != nil {
return fmt.Errorf("could not print yaml config: %v", err)
}
os.Stdout.Write(b)
} else if err := tgCfgUtil.MarshalText(&c, os.Stdout); err != nil {
return fmt.Errorf("could not print config: %v", err)
}
}
// Write proto if requested
if opt.output != "" {
var b []byte
var err error
if opt.writeYAML {
b, err = yaml.Marshal(&c)
} else {
b, err = tgCfgUtil.MarshalBytes(&c)
}
if err == nil {
err = write(ctx, client, opt.output, b, opt.worldReadable, "")
}
if err != nil {
return fmt.Errorf("could not write config: %v", err)
}
}
return nil
}
func main() {
// Parse flags
var opt options
if err := opt.gatherOptions(flag.CommandLine, os.Args[1:]); err != nil {
log.Fatalf("Bad flags: %v", err)
}
ctx := context.Background()
var prowConfigAgent *prowConfig.Agent
if opt.prowConfig.ConfigPath != "" && opt.prowConfig.JobConfigPath != "" {
agent, err := opt.prowConfig.ConfigAgent()
if err != nil {
log.Fatalf("FAIL: couldn't load prow config: %v", err)
}
prowConfigAgent = agent
}
// Config file validation only
if opt.validateConfigFile {
if err := doOneshot(ctx, nil, opt, prowConfigAgent); err != nil {
log.Fatalf("FAIL: %v", err)
}
log.Println("Config validated successfully")
return
}
// Set up GCS client if output is to GCS
var client *storage.Client
if strings.HasPrefix(opt.output, "gs://") {
var err error
var creds []string
if opt.creds != "" {
creds = append(creds, opt.creds)
}
client, err = gcs.ClientWithCreds(ctx, creds...)
if err != nil {
log.Fatalf("failed to create gcs client: %v", err)
}
}
// Oneshot mode, write config and exit
if opt.oneshot {
if err := doOneshot(ctx, client, opt, prowConfigAgent); err != nil {
log.Fatalf("FAIL: %v", err)
}
return
}
// Service mode, monitor input files for changes
channel := make(chan []string)
// Monitor files for changes
go announceChanges(ctx, opt.inputs, channel)
go announceProwChanges(ctx, prowConfigAgent, channel)
// Wait for changed files
for changes := range channel {
log.Printf("Changed: %v", changes)
log.Println("Writing config...")
if err := doOneshot(ctx, client, opt, prowConfigAgent); err != nil {
log.Printf("FAIL: %v", err)
continue
}
log.Printf("Wrote config to %s", opt.output)
}
}
| doOneshot |
test_model.py | from django.test import TestCase
from django.contrib.auth import get_user_model
class ModelTest(TestCase):
def test_create_user_with_email_successfully(self):
email = '[email protected]'
password = '12345'
user = get_user_model().objects.create_user(
email=email,
password=password)
self.assertEqual(user.email, email)
self.assertTrue(user.check_password(password))
def test_new_user_email_normalized(self):
"""Test the email for a new user is normalized"""
email = '[email protected]'
user = get_user_model().objects.create_user(email, '1234')
| with self.assertRaises(ValueError):
get_user_model().objects.create_user(email=None, password='1234')
def test_create_new_superuser(self):
user = get_user_model().objects.create_superuser(email='[email protected]',
password='123')
self.assertTrue(user.is_superuser)
self.assertTrue(user.is_staff) | self.assertEqual(user.email, email.lower())
def test_new_user_invalid_email(self): |
xx_generated.transitions.transitionlistchanged.go | // This file has been automatically generated. Don't edit it. |
/*
TransitionListChanged represents the event body for the "TransitionListChanged" event.
Since v4.0.0.
*/
type TransitionListChanged struct {
EventBasic
Transitions []struct {
// Transition name.
Name string `json:"name"`
} `json:"transitions"`
} |
package events |
menu.rs | use crate::utils::get_fl_name;
use proc_macro::TokenStream;
use quote::*;
use syn::*;
pub fn impl_menu_trait(ast: &DeriveInput) -> TokenStream {
let name = &ast.ident;
let name_str = get_fl_name(name.to_string());
let ptr_name = Ident::new(name_str.as_str(), name.span());
let add = Ident::new(format!("{}_{}", name_str, "add").as_str(), name.span());
let insert = Ident::new(format!("{}_{}", name_str, "insert").as_str(), name.span());
let remove = Ident::new(format!("{}_{}", name_str, "remove").as_str(), name.span());
let get_item = Ident::new(format!("{}_{}", name_str, "get_item").as_str(), name.span());
let set_item = Ident::new(format!("{}_{}", name_str, "set_item").as_str(), name.span());
let find_index = Ident::new(
format!("{}_{}", name_str, "find_index").as_str(),
name.span(),
);
let text_font = Ident::new(
format!("{}_{}", name_str, "text_font").as_str(),
name.span(),
);
let set_text_font = Ident::new(
format!("{}_{}", name_str, "set_text_font").as_str(),
name.span(),
);
let text_color = Ident::new(
format!("{}_{}", name_str, "text_color").as_str(),
name.span(),
);
let set_text_color = Ident::new(
format!("{}_{}", name_str, "set_text_color").as_str(),
name.span(),
);
let text_size = Ident::new(
format!("{}_{}", name_str, "text_size").as_str(),
name.span(),
);
let set_text_size = Ident::new(
format!("{}_{}", name_str, "set_text_size").as_str(),
name.span(),
);
let add_choice = Ident::new(
format!("{}_{}", name_str, "add_choice").as_str(),
name.span(),
);
let get_choice = Ident::new(
format!("{}_{}", name_str, "get_choice").as_str(),
name.span(),
);
let value = Ident::new(format!("{}_{}", name_str, "value").as_str(), name.span());
let set_value = Ident::new(
format!("{}_{}", name_str, "set_value").as_str(),
name.span(),
);
let clear = Ident::new(format!("{}_{}", name_str, "clear").as_str(), name.span());
let clear_submenu = Ident::new(
format!("{}_{}", name_str, "clear_submenu").as_str(),
name.span(),
);
let size = Ident::new(format!("{}_{}", name_str, "size").as_str(), name.span());
let text = Ident::new(format!("{}_{}", name_str, "text").as_str(), name.span());
let at = Ident::new(format!("{}_{}", name_str, "at").as_str(), name.span());
let mode = Ident::new(format!("{}_{}", name_str, "mode").as_str(), name.span());
let set_mode = Ident::new(format!("{}_{}", name_str, "set_mode").as_str(), name.span());
let gen = quote! {
unsafe impl MenuExt for #name {
fn add(&mut self, name: &str, shortcut: Shortcut, flag: MenuFlag, mut cb: Box<dyn FnMut()>) {
// debug_assert!( | // "Handling events requires that the window and widget be active!"
// );
assert!(!self.was_deleted());
let temp = CString::new(name).unwrap();
unsafe {
unsafe extern "C" fn shim(_wid: *mut Fl_Widget, data: *mut raw::c_void) {
let a: *mut Box<dyn FnMut()> = data as *mut Box<dyn FnMut()>;
let f: &mut (dyn FnMut()) = &mut **a;
let _ = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| f()));
}
let a: *mut Box<dyn FnMut()> = Box::into_raw(Box::new(cb));
let data: *mut raw::c_void = a as *mut raw::c_void;
let callback: Fl_Callback = Some(shim);
#add(self._inner, temp.as_ptr(), shortcut as i32, callback, data, flag as i32);
}
}
fn insert(&mut self, idx: u32, name: &str, shortcut: Shortcut, flag: MenuFlag, cb: Box<dyn FnMut()>) {
// debug_assert!(
// self.top_window().unwrap().takes_events() && self.takes_events(),
// "Handling events requires that the window and widget be active!"
// );
assert!(!self.was_deleted());
let temp = CString::new(name).unwrap();
unsafe {
unsafe extern "C" fn shim(_wid: *mut Fl_Widget, data: *mut raw::c_void) {
let a: *mut Box<dyn FnMut()> = data as *mut Box<dyn FnMut()>;
let f: &mut (dyn FnMut()) = &mut **a;
let _ = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| f()));
}
let a: *mut Box<dyn FnMut()> = Box::into_raw(Box::new(cb));
let data: *mut raw::c_void = a as *mut raw::c_void;
let callback: Fl_Callback = Some(shim);
#insert(self._inner, idx as i32, temp.as_ptr(), shortcut as i32, callback, data, flag as i32);
}
}
fn add_emit<T: 'static + Copy + Send + Sync>(
&mut self,
name: &str,
shortcut: Shortcut,
flag: crate::menu::MenuFlag,
sender: crate::app::Sender<T>,
msg: T,
) {
self.add(name, shortcut, flag, Box::new(move|| sender.send(msg)))
}
fn insert_emit<T: 'static + Copy + Send + Sync>(
&mut self,
idx: u32,
name: &str,
shortcut: Shortcut,
flag: crate::menu::MenuFlag,
sender: crate::app::Sender<T>,
msg: T,
) {
self.insert(idx, name, shortcut, flag, Box::new(move|| sender.send(msg)))
}
fn remove(&mut self, idx: u32) {
assert!(!self.was_deleted());
assert!(idx < self.size());
debug_assert!(idx <= std::i32::MAX as u32, "u32 entries have to be < std::i32::MAX for compatibility!");
unsafe {
#remove(self._inner, idx as i32)
}
}
fn find_item(&self, name: &str) -> Option<MenuItem> {
assert!(!self.was_deleted());
let name = CString::new(name).unwrap().clone();
unsafe {
let menu_item = #get_item(
self._inner,
name.as_ptr());
if menu_item.is_null() {
None
} else {
Some(MenuItem {
_inner: menu_item,
_parent: self as *const _ as *const MenuBar,
_alloc: false,
})
}
}
}
fn set_item(&mut self, item: &MenuItem) -> bool {
unsafe {
assert!(!self.was_deleted());
#set_item(
self._inner,
item._inner) != 0
}
}
fn find_index(&self, label: &str) -> u32 {
assert!(!self.was_deleted());
let label = CString::new(label).unwrap().as_ptr() as *mut raw::c_char;
unsafe {
#find_index(self._inner, label) as u32
}
}
fn text_font(&self) -> Font {
unsafe {
assert!(!self.was_deleted());
mem::transmute(#text_font(self._inner))
}
}
fn set_text_font(&mut self, c: Font) {
unsafe {
assert!(!self.was_deleted());
#set_text_font(self._inner, c as i32)
}
}
fn text_size(&self) -> u32 {
unsafe {
assert!(!self.was_deleted());
#text_size(self._inner) as u32
}
}
fn set_text_size(&mut self, c: u32) {
unsafe {
debug_assert!(c <= std::i32::MAX as u32, "u32 entries have to be < std::i32::MAX for compatibility!");
assert!(!self.was_deleted());
#set_text_size(self._inner, c as i32)
}
}
fn text_color(&self) -> Color {
unsafe {
assert!(!self.was_deleted());
mem::transmute(#text_color(self._inner))
}
}
fn set_text_color(&mut self, c: Color) {
unsafe {
assert!(!self.was_deleted());
#set_text_color(self._inner, c as u32)
}
}
fn add_choice(&mut self, text: &str) {
unsafe {
assert!(!self.was_deleted());
let arg2 = CString::new(text).unwrap();
#add_choice(self._inner, arg2.as_ptr() as *mut raw::c_char)
}
}
fn choice(&self) -> Option<String> {
unsafe {
assert!(!self.was_deleted());
let choice_ptr = #get_choice(self._inner);
if choice_ptr.is_null() {
None
} else {
Some(CStr::from_ptr(choice_ptr as *mut raw::c_char).to_string_lossy().to_string())
}
}
}
fn value(&self) -> i32 {
unsafe {
assert!(!self.was_deleted());
#value(self._inner)
}
}
fn set_value(&mut self,v:i32) -> bool {
unsafe {
assert!(!self.was_deleted());
#set_value(self._inner,v) != 0
}
}
fn clear(&mut self) {
unsafe {
assert!(!self.was_deleted());
#clear(self._inner);
self.redraw();
}
}
unsafe fn unsafe_clear(&mut self) {
assert!(!self.was_deleted());
let sz = self.size();
if sz > 0 {
for i in 0..sz {
let mut c = self.at(i).unwrap();
c.set_callback(Box::new(move || { /* Do nothing! */ }));
}
}
#clear(self._inner);
self.redraw();
}
fn clear_submenu(&mut self, idx: u32) -> Result<(), FltkError> {
unsafe {
assert!(!self.was_deleted());
debug_assert!(
idx <= std::i32::MAX as u32,
"u32 entries have to be < std::i32::MAX for compatibility!"
);
match #clear_submenu(self._inner, idx as i32) {
0 => Ok(()),
_ => Err(FltkError::Internal(FltkErrorKind::FailedOperation)),
}
}
}
unsafe fn unsafe_clear_submenu(&mut self, idx: u32) -> Result<(), FltkError> {
assert!(!self.was_deleted());
debug_assert!(
idx <= std::i32::MAX as u32,
"u32 entries have to be < std::i32::MAX for compatibility!"
);
let x = self.at(idx);
if x.is_none() {
return Err(FltkError::Internal(FltkErrorKind::FailedOperation));
}
let x = x.unwrap();
if !x.is_submenu() {
return Err(FltkError::Internal(FltkErrorKind::FailedOperation));
}
let mut i = idx;
loop {
let mut item = self.at(i).unwrap();
if item.label().is_none() {
break;
}
item.set_callback(Box::new(move || { /* Do nothing! */ }));
i += 1;
}
match #clear_submenu(self._inner, idx as i32) {
0 => Ok(()),
_ => Err(FltkError::Internal(FltkErrorKind::FailedOperation)),
}
}
fn size(&self) -> u32 {
assert!(!self.was_deleted());
unsafe {
#size(self._inner) as u32
}
}
fn text(&self, idx: u32) -> Option<String> {
assert!(!self.was_deleted());
debug_assert!(idx <= std::i32::MAX as u32, "u32 entries have to be < std::i32::MAX for compatibility!");
unsafe {
let text = #text(self._inner, idx as i32);
if text.is_null() {
None
} else {
Some(CStr::from_ptr(text as *mut raw::c_char).to_string_lossy().to_string())
}
}
}
fn at(&self, idx: u32) -> Option<crate::menu::MenuItem> {
assert!(!self.was_deleted());
debug_assert!(idx <= std::i32::MAX as u32, "u32 entries have to be < std::i32::MAX for compatibility!");
if idx >= self.size() {
return None;
}
unsafe {
let ptr = #at(self._inner, idx as i32) as *mut Fl_Menu_Item;
if ptr.is_null() {
None
} else {
Some(MenuItem {
_inner: ptr,
_parent: self as *const _ as *const MenuBar,
_alloc: false,
})
}
}
}
fn mode(&self, idx: u32) -> crate::menu::MenuFlag {
assert!(!self.was_deleted());
debug_assert!(idx <= std::i32::MAX as u32, "u32 entries have to be < std::i32::MAX for compatibility!");
unsafe {
mem::transmute(#mode(self._inner, idx as i32))
}
}
fn set_mode(&mut self, idx: u32, flag: crate::menu::MenuFlag) {
assert!(!self.was_deleted());
debug_assert!(idx <= std::i32::MAX as u32, "u32 entries have to be < std::i32::MAX for compatibility!");
unsafe {
#set_mode(self._inner, idx as i32, flag as i32)
}
}
}
};
gen.into()
} | // self.top_window().unwrap().takes_events() && self.takes_events(), |
706.design-hash-map.py | #
# @lc app=leetcode id=706 lang=python3
#
# [706] Design HashMap
#
# https://leetcode.com/problems/design-hashmap/description/
#
# algorithms
# Easy (57.83%)
# Likes: 600
# Dislikes: 84
# Total Accepted: 65.7K
# Total Submissions: 112.1K
# Testcase Example: '["MyHashMap","put","put","get","get","put","get", "remove", "get"]\n' +
'[[],[1,1],[2,2],[1],[3],[2,1],[2],[2],[2]]'
#
# Design a HashMap without using any built-in hash table libraries.
#
# To be specific, your design should include these functions:
#
#
# put(key, value) : Insert a (key, value) pair into the HashMap. If the value
# already exists in the HashMap, update the value.
# get(key): Returns the value to which the specified key is mapped, or -1 if
# this map contains no mapping for the key.
# remove(key) : Remove the mapping for the value key if this map contains the
# mapping for the key.
#
#
#
# Example:
#
#
# MyHashMap hashMap = new MyHashMap();
# hashMap.put(1, 1);
# hashMap.put(2, 2);
# hashMap.get(1); // returns 1
# hashMap.get(3); // returns -1 (not found)
# hashMap.put(2, 1); // update the existing value
# hashMap.get(2); // returns 1
# hashMap.remove(2); // remove the mapping for 2
# hashMap.get(2); // returns -1 (not found)
#
#
#
# Note:
#
#
# All keys and values will be in the range of [0, 1000000].
# The number of operations will be in the range of [1, 10000].
# Please do not use the built-in HashMap library.
#
#
#
# @lc code=start
class MyHashMap:
def __init__(self):
"""
Initialize your data structure here.
"""
self.dict={}
def put(self, key: int, value: int) -> None:
| ""
value will always be non-negative.
"""
self.dict[key]=value
def get(self, key: int) -> int:
"""
Returns the value to which the specified key is mapped, or -1 if this map contains no mapping for the key
"""
if key in self.dict:
return self.dict[key]
else:
return -1
def remove(self, key: int) -> None:
"""
Removes the mapping of the specified value key if this map contains a mapping for the key
"""
self.dict.pop(key,None)
# Your MyHashMap object will be instantiated and called as such:
# obj = MyHashMap()
# obj.put(key,value)
# param_2 = obj.get(key)
# obj.remove(key)
# @lc code=end
| " |
threadpool.rs | // Copyright 2017 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
use std::boxed::FnBox;
use std::collections::VecDeque;
use std::fmt::Write;
use std::marker::PhantomData;
use std::sync::atomic::{AtomicUsize, Ordering as AtomicOrdering};
use std::sync::{Arc, Condvar, Mutex};
use std::thread::{Builder, JoinHandle};
use std::time::Duration;
use std::usize;
pub const DEFAULT_TASKS_PER_TICK: usize = 10000;
const DEFAULT_QUEUE_CAPACITY: usize = 1000;
const DEFAULT_THREAD_COUNT: usize = 1;
const NAP_SECS: u64 = 1;
const QUEUE_MAX_CAPACITY: usize = 8 * DEFAULT_QUEUE_CAPACITY;
pub trait Context: Send {
fn on_task_started(&mut self) {}
fn on_task_finished(&mut self) {}
fn on_tick(&mut self) {}
}
#[derive(Default)]
pub struct DefaultContext;
impl Context for DefaultContext {}
pub trait ContextFactory<Ctx: Context> {
fn create(&self) -> Ctx;
}
pub struct DefaultContextFactory;
impl<C: Context + Default> ContextFactory<C> for DefaultContextFactory {
fn create(&self) -> C {
C::default()
}
}
pub struct Task<C> {
task: Box<FnBox(&mut C) + Send>,
}
impl<C: Context> Task<C> {
fn new<F>(job: F) -> Task<C>
where
for<'r> F: FnOnce(&'r mut C) + Send + 'static,
{
Task {
task: Box::new(job),
}
}
}
// First in first out queue.
pub struct FifoQueue<C> {
queue: VecDeque<Task<C>>,
}
impl<C: Context> FifoQueue<C> {
fn new() -> FifoQueue<C> {
FifoQueue {
queue: VecDeque::with_capacity(DEFAULT_QUEUE_CAPACITY),
}
}
fn push(&mut self, task: Task<C>) {
self.queue.push_back(task);
}
fn pop(&mut self) -> Option<Task<C>> {
let task = self.queue.pop_front();
if self.queue.is_empty() && self.queue.capacity() > QUEUE_MAX_CAPACITY |
task
}
}
pub struct ThreadPoolBuilder<C, F> {
name: String,
thread_count: usize,
tasks_per_tick: usize,
stack_size: Option<usize>,
factory: F,
_ctx: PhantomData<C>,
}
impl<C: Context + Default + 'static> ThreadPoolBuilder<C, DefaultContextFactory> {
pub fn with_default_factory(name: String) -> ThreadPoolBuilder<C, DefaultContextFactory> {
ThreadPoolBuilder::new(name, DefaultContextFactory)
}
}
impl<C: Context + 'static, F: ContextFactory<C>> ThreadPoolBuilder<C, F> {
pub fn new(name: String, factory: F) -> ThreadPoolBuilder<C, F> {
ThreadPoolBuilder {
name,
thread_count: DEFAULT_THREAD_COUNT,
tasks_per_tick: DEFAULT_TASKS_PER_TICK,
stack_size: None,
factory,
_ctx: PhantomData,
}
}
pub fn thread_count(mut self, count: usize) -> ThreadPoolBuilder<C, F> {
self.thread_count = count;
self
}
pub fn tasks_per_tick(mut self, count: usize) -> ThreadPoolBuilder<C, F> {
self.tasks_per_tick = count;
self
}
pub fn stack_size(mut self, size: usize) -> ThreadPoolBuilder<C, F> {
self.stack_size = Some(size);
self
}
pub fn build(self) -> ThreadPool<C> {
ThreadPool::new(
self.name,
self.thread_count,
self.tasks_per_tick,
self.stack_size,
self.factory,
)
}
}
struct ScheduleState<Ctx> {
queue: FifoQueue<Ctx>,
stopped: bool,
}
/// `ThreadPool` is used to execute tasks in parallel.
/// Each task would be pushed into the pool, and when a thread
/// is ready to process a task, it will get a task from the pool
/// according to the `ScheduleQueue` provided in initialization.
pub struct ThreadPool<Ctx> {
state: Arc<(Mutex<ScheduleState<Ctx>>, Condvar)>,
threads: Vec<JoinHandle<()>>,
task_count: Arc<AtomicUsize>,
}
impl<Ctx> ThreadPool<Ctx>
where
Ctx: Context + 'static,
{
fn new<C: ContextFactory<Ctx>>(
name: String,
num_threads: usize,
tasks_per_tick: usize,
stack_size: Option<usize>,
f: C,
) -> ThreadPool<Ctx> {
assert!(num_threads >= 1);
let state = ScheduleState {
queue: FifoQueue::new(),
stopped: false,
};
let state = Arc::new((Mutex::new(state), Condvar::new()));
let mut threads = Vec::with_capacity(num_threads);
let task_count = Arc::new(AtomicUsize::new(0));
// Threadpool threads
for _ in 0..num_threads {
let state = Arc::clone(&state);
let task_num = Arc::clone(&task_count);
let ctx = f.create();
let mut tb = Builder::new().name(name.clone());
if let Some(stack_size) = stack_size {
tb = tb.stack_size(stack_size);
}
let thread =
tb.spawn(move || {
let mut worker = Worker::new(state, task_num, tasks_per_tick, ctx);
worker.run();
}).unwrap();
threads.push(thread);
}
ThreadPool {
state,
threads,
task_count,
}
}
pub fn execute<F>(&self, job: F)
where
F: FnOnce(&mut Ctx) + Send + 'static,
Ctx: Context,
{
let task = Task::new(job);
let &(ref lock, ref cvar) = &*self.state;
{
let mut state = lock.lock().unwrap();
if state.stopped {
return;
}
state.queue.push(task);
cvar.notify_one();
}
self.task_count.fetch_add(1, AtomicOrdering::SeqCst);
}
#[inline]
pub fn get_task_count(&self) -> usize {
self.task_count.load(AtomicOrdering::SeqCst)
}
pub fn stop(&mut self) -> Result<(), String> {
let &(ref lock, ref cvar) = &*self.state;
{
let mut state = lock.lock().unwrap();
state.stopped = true;
cvar.notify_all();
}
let mut err_msg = String::new();
for t in self.threads.drain(..) {
if let Err(e) = t.join() {
write!(&mut err_msg, "Failed to join thread with err: {:?};", e).unwrap();
}
}
if !err_msg.is_empty() {
return Err(err_msg);
}
Ok(())
}
}
// Each thread has a worker.
struct Worker<C> {
state: Arc<(Mutex<ScheduleState<C>>, Condvar)>,
task_count: Arc<AtomicUsize>,
tasks_per_tick: usize,
task_counter: usize,
ctx: C,
}
impl<C> Worker<C>
where
C: Context,
{
fn new(
state: Arc<(Mutex<ScheduleState<C>>, Condvar)>,
task_count: Arc<AtomicUsize>,
tasks_per_tick: usize,
ctx: C,
) -> Worker<C> {
Worker {
state,
task_count,
tasks_per_tick,
task_counter: 0,
ctx,
}
}
fn next_task(&mut self) -> Option<Task<C>> {
let &(ref lock, ref cvar) = &*self.state;
let mut state = lock.lock().unwrap();
let mut timeout = Some(Duration::from_secs(NAP_SECS));
loop {
if state.stopped {
return None;
}
match state.queue.pop() {
Some(t) => {
self.task_counter += 1;
return Some(t);
}
None => {
state = match timeout {
Some(t) => cvar.wait_timeout(state, t).unwrap().0,
None => {
self.task_counter = 0;
self.ctx.on_tick();
cvar.wait(state).unwrap()
}
};
timeout = None;
}
}
}
}
fn run(&mut self) {
loop {
let task = match self.next_task() {
None => return,
Some(t) => t,
};
self.ctx.on_task_started();
(task.task).call_box((&mut self.ctx,));
self.ctx.on_task_finished();
self.task_count.fetch_sub(1, AtomicOrdering::SeqCst);
if self.task_counter == self.tasks_per_tick {
self.task_counter = 0;
self.ctx.on_tick();
}
}
}
}
#[cfg(test)]
mod test {
use super::*;
use std::sync::atomic::{AtomicIsize, Ordering};
use std::sync::mpsc::{channel, Sender};
use std::sync::{Arc, Mutex};
use std::time::Duration;
#[test]
fn test_get_task_count() {
let name = thd_name!("test_get_task_count");
let mut task_pool = ThreadPoolBuilder::with_default_factory(name).build();
let (tx, rx) = channel();
let (ftx, frx) = channel();
let receiver = Arc::new(Mutex::new(rx));
let timeout = Duration::from_secs(2);
let group_num = 4;
let mut task_num = 0;
for gid in 0..group_num {
let rxer = Arc::clone(&receiver);
let ftx = ftx.clone();
task_pool.execute(move |_: &mut DefaultContext| {
let rx = rxer.lock().unwrap();
let id = rx.recv_timeout(timeout).unwrap();
assert_eq!(id, gid);
ftx.send(true).unwrap();
});
task_num += 1;
assert_eq!(task_pool.get_task_count(), task_num);
}
for gid in 0..group_num {
tx.send(gid).unwrap();
frx.recv_timeout(timeout).unwrap();
let left_num = task_pool.get_task_count();
// current task may be still running.
assert!(
left_num == task_num || left_num == task_num - 1,
format!("left_num {},task_num {}", left_num, task_num)
);
task_num -= 1;
}
task_pool.stop().unwrap();
}
#[test]
fn test_task_context() {
struct TestContext {
counter: Arc<AtomicIsize>,
tx: Sender<()>,
}
unsafe impl Send for TestContext {}
impl Context for TestContext {
fn on_task_started(&mut self) {
self.counter.fetch_add(1, Ordering::SeqCst);
}
fn on_task_finished(&mut self) {
self.counter.fetch_add(1, Ordering::SeqCst);
self.tx.send(()).unwrap();
}
fn on_tick(&mut self) {}
}
struct TestContextFactory {
counter: Arc<AtomicIsize>,
tx: Sender<()>,
}
impl ContextFactory<TestContext> for TestContextFactory {
fn create(&self) -> TestContext {
TestContext {
counter: Arc::clone(&self.counter),
tx: self.tx.clone(),
}
}
}
let (tx, rx) = channel();
let f = TestContextFactory {
counter: Arc::new(AtomicIsize::new(0)),
tx,
};
let ctx = f.create();
let name = thd_name!("test_tasks_with_contexts");
let mut task_pool = ThreadPoolBuilder::new(name, f).thread_count(5).build();
for _ in 0..10 {
task_pool.execute(move |_: &mut TestContext| {});
}
for _ in 0..10 {
rx.recv_timeout(Duration::from_millis(20)).unwrap();
}
task_pool.stop().unwrap();
assert_eq!(ctx.counter.load(Ordering::SeqCst), 20);
}
#[test]
fn test_task_tick() {
struct TestContext {
counter: Arc<AtomicIsize>,
tx: Sender<()>,
}
unsafe impl Send for TestContext {}
impl Context for TestContext {
fn on_task_started(&mut self) {}
fn on_task_finished(&mut self) {}
fn on_tick(&mut self) {
self.counter.fetch_add(1, Ordering::SeqCst);
let _ = self.tx.send(());
}
}
struct TestContextFactory {
counter: Arc<AtomicIsize>,
tx: Sender<()>,
}
impl ContextFactory<TestContext> for TestContextFactory {
fn create(&self) -> TestContext {
TestContext {
counter: Arc::clone(&self.counter),
tx: self.tx.clone(),
}
}
}
let (tx, rx) = channel();
let f = TestContextFactory {
counter: Arc::new(AtomicIsize::new(0)),
tx,
};
let ctx = f.create();
let name = thd_name!("test_tasks_tick");
let mut task_pool = ThreadPoolBuilder::new(name, f)
.thread_count(5)
.tasks_per_tick(1)
.build();
for _ in 0..10 {
task_pool.execute(move |_: &mut TestContext| {});
}
for _ in 0..10 {
rx.recv_timeout(Duration::from_millis(100)).unwrap();
}
task_pool.stop().unwrap();
// `on_tick` may be called even if there is no task.
assert!(ctx.counter.load(Ordering::SeqCst) >= 10);
}
}
| {
self.queue = VecDeque::with_capacity(DEFAULT_QUEUE_CAPACITY);
} |
veggie_pizza.py | from pizza import Pizza |
class VeggiePizza(Pizza):
def __init__(self):
self.name = 'Veggie Pizza'
self.dough = 'Crust'
self.sauce = 'Marinara sauce'
self.toppings.append('Shredded mozzarella')
self.toppings.append('Grated parmesan')
self.toppings.append('Diced onion')
self.toppings.append('Sliced mushrooms')
self.toppings.append('Sliced red pepper')
self.toppings.append('Sliced black olives') | |
metadata.go | /*
Copyright IBM Corp. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package metadata
import (
"fmt"
"runtime"
common "github.com/hyperledger/fabric/common/metadata"
)
| var Version string = common.Version
// package-scoped constants
// Program name
const ProgramName = "orderer"
func GetVersionInfo() string {
return fmt.Sprintf(
"%s:\n Version: %s\n Commit SHA: %s\n Go version: %s\n OS/Arch: %s\n",
ProgramName,
Version,
common.CommitSHA,
runtime.Version(),
fmt.Sprintf("%s/%s", runtime.GOOS, runtime.GOARCH),
)
} | // package-scoped variables
// Package version |
Macro.rs | * This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 10.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/10.0/. */
//! Macros for `uniffi`.
//!
//! Currently this is just for easily generating integration tests, but maybe
//! we'll put some other code-annotation helper macros in here at some point.
use quote::{format_ident, quote};
use std::env;
use std::path::PathBuf;
use syn::{bracketed, punctuated::Punctuated, LitStr, Token};
/// A macro to build testcases for a component's generated bindings.
///
/// This macro provides some plumbing to write automated tests for the generated
/// foreign language bindings of a component. As a component author, you can write
/// script files in the target foreign language(s) that exercise you component API,
/// and then call this macro to produce a `cargo test` testcase from each one.
/// The generated code will execute your script file with appropriate configuration and
/// environment to let it load the component bindings, and will pass iff the script
/// exits successfully.
///
/// To use it, invoke the macro with the udl file as the first argument, then
/// one or more file paths relative to the crate root directory.
/// It will produce one `#[test]` function per file, in a manner designed to
/// play nicely with `cargo test` and its test filtering options.
#[proc_macro]
pub fn build_foreign_language_testcases(paths: proc_macro::TokenStream) -> proc_macro::TokenStream {
let paths = syn::parse_macro_input!(paths as FilePaths);
// We resolve each path relative to the crate root directory.
let pkg_dir = env::var("CARGO_MANIFEST_DIR")
.expect("Missing $CARGO_MANIFEST_DIR, cannot build tests for generated bindings");
// For each file found, generate a matching testcase.
let udl_file = &paths.udl_file;
let test_functions = paths.test_scripts
.iter()
.map(|file_path| {
let test_file_pathbuf: PathBuf = [&pkg_dir, &file_path].iter().collect();
let test_file_path = test_file_pathbuf.to_string_lossy();
let test_file_name = test_file_pathbuf
.file_name()
.expect("Test file has no name, cannot build tests for generated bindings")
.to_string_lossy();
let test_name = format_ident!(
"uniffi_foreign_language_testcase_{}",
test_file_name.replace(|c: char| !c.is_alphanumeric(), "_")
);
quote! {
#[test]
fn #test_name () -> uniffi::deps::anyhow::Result<()> {
uniffi::testing::run_foreign_language_testcase(#pkg_dir, #udl_file, #test_file_path)
}
}
})
.collect::<Vec<proc_macro2::TokenStream>>();
let test_module = quote! {
#(#test_functions)*
};
proc_macro::TokenStream::from(test_module)
}
/// Newtype to simplifying parsing a list of file paths from macro input.
#[derive(Debug)]
struct FilePaths {
udl_file: String,
test_scripts: Vec<String>,
}
impl syn::parse::Parse for FilePaths {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
let udl_file: LitStr = input.parse()?;
let _comma: Token![,] = input.parse()?;
let array_contents;
bracketed!(array_contents in input);
let test_scripts = Punctuated::<LitStr, Token![,]>::parse_terminated(&array_contents)?
.iter()
.map(|s| s.value())
.collect();
Ok(FilePaths {
udl_file: udl_file.value(),
test_scripts,
})
}
}
/// A helper macro to include generated component scaffolding.
///
/// This is a simple convenience macro to include the UniFFI component
/// scaffolding as built by `uniffi_build::generate_scaffolding`.
/// Use it like so:
///
/// ```rs
/// uniffi_macros::include_scaffolding!("my_component_name");
/// ```
///
/// This will expand to the appropriate `include!` invocation to include
/// the generated `my_component_name.uniffi.rs` (which it assumes has
/// been successfully built by your crate's `build.rs` script).
//
#[proc_macro]
pub fn include_scaffolding(component_name: proc_macro::TokenStream) -> proc_macro::TokenStream {
let name = syn::parse_macro_input!(component_name as syn::LitStr);
if std::env::var("OUT_DIR").is_err() | else {
quote! {
include!(concat!(env!("OUT_DIR"), "/", #name, ".uniffi.rs"));
}
}.into()
}
| {
quote! {
compile_error!("This macro assumes the crate has a build.rs script, but $OUT_DIR is not present");
}
} |
overview-newtag.js | import {html, PolymerElement} from '@polymer/polymer/polymer-element.js';
import '@vaadin/vaadin-ordered-layout/src/vaadin-vertical-layout.js';
class OverviewNewtag extends PolymerElement {
static get template() {
return html`
<style include="shared-styles">
:host {
display: block;
height: 100%;
}
</style> | <div id="div"></div>
</vaadin-vertical-layout>
`;
}
static get is() {
return 'overview-newtag';
}
static get properties() {
return {
// Declare your properties here.
};
}
}
customElements.define(OverviewNewtag.is, OverviewNewtag); | <vaadin-vertical-layout style="width: 100%; height: 100%;"> |
test_ntm.py | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import absolute_import
import os
import numpy as np
import pytest
from sagemaker import NTM, NTMModel, Predictor
from sagemaker.amazon.common import read_records
from sagemaker.serverless import ServerlessInferenceConfig
from sagemaker.utils import unique_name_from_base
from tests.integ import DATA_DIR, TRAINING_DEFAULT_TIMEOUT_MINUTES
from tests.integ.timeout import timeout, timeout_and_delete_endpoint_by_name
from tests.integ.record_set import prepare_record_set_from_local_files
@pytest.mark.release
@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def | (sagemaker_session, cpu_instance_type):
job_name = unique_name_from_base("ntm")
with timeout(minutes=TRAINING_DEFAULT_TIMEOUT_MINUTES):
data_path = os.path.join(DATA_DIR, "ntm")
data_filename = "nips-train_1.pbr"
with open(os.path.join(data_path, data_filename), "rb") as f:
all_records = read_records(f)
# all records must be same
feature_num = int(all_records[0].features["values"].float32_tensor.shape[0])
ntm = NTM(
role="SageMakerRole",
instance_count=1,
instance_type=cpu_instance_type,
num_topics=10,
sagemaker_session=sagemaker_session,
)
record_set = prepare_record_set_from_local_files(
data_path, ntm.data_location, len(all_records), feature_num, sagemaker_session
)
ntm.fit(records=record_set, job_name=job_name)
with timeout_and_delete_endpoint_by_name(job_name, sagemaker_session):
model = NTMModel(ntm.model_data, role="SageMakerRole", sagemaker_session=sagemaker_session)
predictor = model.deploy(1, cpu_instance_type, endpoint_name=job_name)
predict_input = np.random.rand(1, feature_num)
result = predictor.predict(predict_input)
assert len(result) == 1
for record in result:
assert record.label["topic_weights"] is not None
def test_ntm_serverless_inference(sagemaker_session, cpu_instance_type):
job_name = unique_name_from_base("ntm-serverless")
with timeout(minutes=TRAINING_DEFAULT_TIMEOUT_MINUTES):
data_path = os.path.join(DATA_DIR, "ntm")
data_filename = "nips-train_1.pbr"
with open(os.path.join(data_path, data_filename), "rb") as f:
all_records = read_records(f)
# all records must be same
feature_num = int(all_records[0].features["values"].float32_tensor.shape[0])
ntm = NTM(
role="SageMakerRole",
instance_count=1,
instance_type=cpu_instance_type,
num_topics=10,
sagemaker_session=sagemaker_session,
)
record_set = prepare_record_set_from_local_files(
data_path, ntm.data_location, len(all_records), feature_num, sagemaker_session
)
ntm.fit(records=record_set, job_name=job_name)
with timeout_and_delete_endpoint_by_name(job_name, sagemaker_session):
model = NTMModel(ntm.model_data, role="SageMakerRole", sagemaker_session=sagemaker_session)
predictor = model.deploy(
serverless_inference_config=ServerlessInferenceConfig(), endpoint_name=job_name
)
assert isinstance(predictor, Predictor)
| test_ntm |
Recibo.js | import React, { useState, useEffect } from 'react';
import './Recibo.scss';
import { Print } from './Print.js'
export const Recibo = () => {
const [from,setFrom] = useState('Johan Vargas');
const [to,setTo] = useState('Johan Vargas');
const [amount,setAmount] = useState(0);
const [date,setDate] = useState(new Date());
const [observations, setObservations] = useState('Pagado');
useEffect(() => {
var timer = setInterval(()=>setDate(new Date()), 1000 )
return function cleanup() {
clearInterval(timer)
}
});
| <label>
De:
<input type="text" value={from} onChange={e => setFrom(e.target.value)} />
</label>
</div>
<div className="recibo__formgroup">
<label>
Para:
<input type="text" value={to} onChange={e => setTo(e.target.value)} />
</label>
</div>
<div className="recibo__formgroup">
<label>
Monto:
<input type="text" value={amount} onChange={e => setAmount(e.target.value)} />
</label>
</div>
<div className="recibo__formgroup">
<label>
Fecha:
<input type="text" value={date} onChange={e => date(e.target.value)} />
</label>
</div>
<div className="recibo__formgroup">
<label>
Observaciones:
<textarea type="text" value={observations} onChange={e => setObservations(e.target.value)} />
</label>
</div>
</div>
<Print from={from} to={to} amount={amount} date={date } observations={observations}/>
</div>
);
} | return (
<div className="recibo">
<div className="recibo__datos">
<div className="recibo__formgroup"> |
reader_at.go | package filer
import (
"context"
"fmt"
"github.com/chrislusf/seaweedfs/weed/glog"
"github.com/chrislusf/seaweedfs/weed/pb/filer_pb"
"github.com/chrislusf/seaweedfs/weed/util/chunk_cache"
"github.com/chrislusf/seaweedfs/weed/wdclient"
"github.com/golang/groupcache/singleflight"
"io"
"math/rand"
"sync"
"time"
)
var (
ReadWaitTime = 6 * time.Second
)
type ChunkReadAt struct {
masterClient *wdclient.MasterClient
chunkViews []*ChunkView
lookupFileId LookupFileIdFunctionType
readerLock sync.Mutex
fileSize int64
fetchGroup singleflight.Group
lastChunkFileId string
lastChunkData []byte
chunkCache chunk_cache.ChunkCache
}
// var _ = io.ReaderAt(&ChunkReadAt{})
type LookupFileIdFunctionType func(fileId string) (targetUrls []string, err error)
func LookupFn(filerClient filer_pb.FilerClient) LookupFileIdFunctionType {
vidCache := make(map[string]*filer_pb.Locations)
return func(fileId string) (targetUrls []string, err error) {
vid := VolumeId(fileId)
locations, found := vidCache[vid]
waitTime := time.Second
for !found && waitTime < ReadWaitTime {
// println("looking up volume", vid)
err = filerClient.WithFilerClient(func(client filer_pb.SeaweedFilerClient) error {
resp, err := client.LookupVolume(context.Background(), &filer_pb.LookupVolumeRequest{
VolumeIds: []string{vid},
})
if err != nil {
return err
}
locations = resp.LocationsMap[vid]
if locations == nil || len(locations.Locations) == 0 {
glog.V(0).Infof("failed to locate %s", fileId)
return fmt.Errorf("failed to locate %s", fileId)
}
vidCache[vid] = locations
return nil
})
if err == nil {
break
}
glog.V(1).Infof("wait for volume %s", vid)
time.Sleep(waitTime)
waitTime += waitTime / 2
}
if err != nil {
return nil, err
}
for _, loc := range locations.Locations {
volumeServerAddress := filerClient.AdjustedUrl(loc)
targetUrl := fmt.Sprintf("http://%s/%s", volumeServerAddress, fileId)
targetUrls = append(targetUrls, targetUrl)
}
for i := len(targetUrls) - 1; i > 0; i-- {
j := rand.Intn(i + 1)
targetUrls[i], targetUrls[j] = targetUrls[j], targetUrls[i]
}
return
}
}
func | (filerClient filer_pb.FilerClient, chunkViews []*ChunkView, chunkCache chunk_cache.ChunkCache, fileSize int64) *ChunkReadAt {
return &ChunkReadAt{
chunkViews: chunkViews,
lookupFileId: LookupFn(filerClient),
chunkCache: chunkCache,
fileSize: fileSize,
}
}
func (c *ChunkReadAt) ReadAt(p []byte, offset int64) (n int, err error) {
c.readerLock.Lock()
defer c.readerLock.Unlock()
glog.V(4).Infof("ReadAt [%d,%d) of total file size %d bytes %d chunk views", offset, offset+int64(len(p)), c.fileSize, len(c.chunkViews))
return c.doReadAt(p[n:], offset+int64(n))
}
func (c *ChunkReadAt) doReadAt(p []byte, offset int64) (n int, err error) {
var buffer []byte
startOffset, remaining := offset, int64(len(p))
var nextChunk *ChunkView
for i, chunk := range c.chunkViews {
if remaining <= 0 {
break
}
if i+1 < len(c.chunkViews) {
nextChunk = c.chunkViews[i+1]
} else {
nextChunk = nil
}
if startOffset < chunk.LogicOffset {
gap := int(chunk.LogicOffset - startOffset)
glog.V(4).Infof("zero [%d,%d)", startOffset, startOffset+int64(gap))
n += int(min(int64(gap), remaining))
startOffset, remaining = chunk.LogicOffset, remaining-int64(gap)
if remaining <= 0 {
break
}
}
// fmt.Printf(">>> doReadAt [%d,%d), chunk[%d,%d)\n", offset, offset+int64(len(p)), chunk.LogicOffset, chunk.LogicOffset+int64(chunk.Size))
chunkStart, chunkStop := max(chunk.LogicOffset, startOffset), min(chunk.LogicOffset+int64(chunk.Size), startOffset+remaining)
if chunkStart >= chunkStop {
continue
}
glog.V(4).Infof("read [%d,%d), %d/%d chunk %s [%d,%d)", chunkStart, chunkStop, i, len(c.chunkViews), chunk.FileId, chunk.LogicOffset-chunk.Offset, chunk.LogicOffset-chunk.Offset+int64(chunk.Size))
buffer, err = c.readFromWholeChunkData(chunk, nextChunk)
if err != nil {
glog.Errorf("fetching chunk %+v: %v\n", chunk, err)
return
}
bufferOffset := chunkStart - chunk.LogicOffset + chunk.Offset
copied := copy(p[startOffset-offset:chunkStop-chunkStart+startOffset-offset], buffer[bufferOffset:bufferOffset+chunkStop-chunkStart])
n += copied
startOffset, remaining = startOffset+int64(copied), remaining-int64(copied)
}
glog.V(4).Infof("doReadAt [%d,%d), n:%v, err:%v", offset, offset+int64(len(p)), n, err)
if err == nil && remaining > 0 && c.fileSize > startOffset {
delta := int(min(remaining, c.fileSize-startOffset))
glog.V(4).Infof("zero2 [%d,%d) of file size %d bytes", startOffset, startOffset+int64(delta), c.fileSize)
n += delta
}
if err == nil && offset+int64(len(p)) >= c.fileSize {
err = io.EOF
}
// fmt.Printf("~~~ filled %d, err: %v\n\n", n, err)
return
}
func (c *ChunkReadAt) readFromWholeChunkData(chunkView *ChunkView, nextChunkViews ...*ChunkView) (chunkData []byte, err error) {
if c.lastChunkFileId == chunkView.FileId {
return c.lastChunkData, nil
}
v, doErr := c.readOneWholeChunk(chunkView)
if doErr != nil {
return nil, doErr
}
chunkData = v.([]byte)
c.lastChunkData = chunkData
c.lastChunkFileId = chunkView.FileId
for _, nextChunkView := range nextChunkViews {
if c.chunkCache != nil && nextChunkView != nil {
go c.readOneWholeChunk(nextChunkView)
}
}
return
}
func (c *ChunkReadAt) readOneWholeChunk(chunkView *ChunkView) (interface{}, error) {
var err error
return c.fetchGroup.Do(chunkView.FileId, func() (interface{}, error) {
glog.V(4).Infof("readFromWholeChunkData %s offset %d [%d,%d) size at least %d", chunkView.FileId, chunkView.Offset, chunkView.LogicOffset, chunkView.LogicOffset+int64(chunkView.Size), chunkView.ChunkSize)
data := c.chunkCache.GetChunk(chunkView.FileId, chunkView.ChunkSize)
if data != nil {
glog.V(4).Infof("cache hit %s [%d,%d)", chunkView.FileId, chunkView.LogicOffset-chunkView.Offset, chunkView.LogicOffset-chunkView.Offset+int64(len(data)))
} else {
var err error
data, err = c.doFetchFullChunkData(chunkView)
if err != nil {
return data, err
}
c.chunkCache.SetChunk(chunkView.FileId, data)
}
return data, err
})
}
func (c *ChunkReadAt) doFetchFullChunkData(chunkView *ChunkView) ([]byte, error) {
glog.V(4).Infof("+ doFetchFullChunkData %s", chunkView.FileId)
data, err := fetchChunk(c.lookupFileId, chunkView.FileId, chunkView.CipherKey, chunkView.IsGzipped)
glog.V(4).Infof("- doFetchFullChunkData %s", chunkView.FileId)
return data, err
}
| NewChunkReaderAtFromClient |
_configuration.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class WebSiteManagementClientConfiguration(Configuration):
"""Configuration for WebSiteManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Your Azure subscription ID. This is a GUID-formatted string (e.g. 00000000-0000-0000-0000-000000000000).
:type subscription_id: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
|
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(WebSiteManagementClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2015-08-01"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-web/{}'.format(VERSION))
self._configure(**kwargs) |
__init__.py | # -*- coding: utf-8 -*-
"""
tests.unit.cloud
~~~~~~~~~~~~~~~~
"""
from __future__ import absolute_import, print_function, unicode_literals
import salt.cloud
from tests.support.unit import TestCase
class CloudTest(TestCase):
| def test_vm_config_merger(self):
"""
Validate the vm's config is generated correctly.
https://github.com/saltstack/salt/issues/49226
"""
main = {
"minion": {"master": "172.31.39.213"},
"log_file": "var/log/salt/cloud.log",
"pool_size": 10,
}
provider = {
"private_key": "dwoz.pem",
"grains": {"foo1": "bar", "foo2": "bang"},
"availability_zone": "us-west-2b",
"driver": "ec2",
"ssh_interface": "private_ips",
"ssh_username": "admin",
"location": "us-west-2",
}
profile = {
"profile": "default",
"grains": {"meh2": "bar", "meh1": "foo"},
"provider": "ec2-default:ec2",
"ssh_username": "admin",
"image": "ami-0a1fbca0e5b419fd1",
"size": "t2.micro",
}
vm = salt.cloud.Cloud.vm_config("test_vm", main, provider, profile, {})
self.assertEqual(
{
"minion": {"master": "172.31.39.213"},
"log_file": "var/log/salt/cloud.log",
"pool_size": 10,
"private_key": "dwoz.pem",
"grains": {
"foo1": "bar",
"foo2": "bang",
"meh2": "bar",
"meh1": "foo",
},
"availability_zone": "us-west-2b",
"driver": "ec2",
"ssh_interface": "private_ips",
"ssh_username": "admin",
"location": "us-west-2",
"profile": "default",
"provider": "ec2-default:ec2",
"image": "ami-0a1fbca0e5b419fd1",
"size": "t2.micro",
"name": "test_vm",
},
vm,
) |
|
ADNI_V1_AV1451.py | __author__ = 'sulantha'
from Utils.DbUtils import DbUtils
import Config.PipelineConfig as pc
from Pipelines.ADNI_T1.ADNI_T1_Helper import ADNI_T1_Helper
from Utils.PipelineLogger import PipelineLogger
import distutils.dir_util
import distutils.file_util
import shutil
import subprocess
from Manager.QSubJob import QSubJob
from Manager.QSubJobHanlder import QSubJobHandler
import socket,os
import ast
from Pipelines.Helpers.PETHelper import PETHelper
class ProcessingItemObj:
def __init__(self, processingItem):
self.processing_rid = processingItem[0]
self.study = processingItem[1]
self.subject_rid = processingItem[2]
self.modality = processingItem[3]
self.scan_date = processingItem[4].strftime("%Y-%m-%d")
self.scan_time = str(processingItem[5])
self.s_identifier = processingItem[6]
self.i_identifier = processingItem[7]
self.root_folder = processingItem[8]
self.converted_folder = processingItem[9]
self.version = processingItem[10]
self.table_id = processingItem[17]
self.parameters = processingItem[19]
self.manual_xfm = processingItem[20]
self.qc = processingItem[21]
class ADNI_V1_AV1451:
def __init__(self):
self.DBClient = DbUtils()
self.MatchDBClient = DbUtils(database=pc.ADNI_dataMatchDBName)
self.PETHelper = PETHelper()
def process(self, processingItem): | if not matching_t1:
PipelineLogger.log('root', 'error', 'PET cannot be processed no matching T1 found. - {0} - {1} - {2}.'.format(processingItemObj.subject_rid, processingItemObj.modality, processingItemObj.scan_date))
return 0
processed = ADNI_T1_Helper().checkProcessed(matching_t1)
if not processed:
PipelineLogger.log('root', 'error', 'PET cannot be processed due to matching T1 not being processed - {0}'.format(matching_t1))
return 0
else:
PipelineLogger.log('root', 'INFO', '+++++++++ PET ready to be processed. Will check for xfm. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
if processingItemObj.manual_xfm == '':
manualXFM = self.PETHelper.getManualXFM(processingItemObj, matching_t1)
processingItemObj.manual_xfm = manualXFM
elif processingItemObj.manual_xfm == 'Req_man_reg':
coregDone = self.PETHelper.checkIfAlreadyDone(processingItemObj, matching_t1)
if coregDone:
manualXFM = coregDone
setPPTableSQL = "UPDATE {0}_{1}_Pipeline SET MANUAL_XFM = '{2}' WHERE RECORD_ID = {3}".format(processingItemObj.study, processingItemObj.modality, manualXFM, processingItemObj.table_id)
self.DBClient.executeNoResult(setPPTableSQL)
else:
self.PETHelper.requestCoreg(processingItemObj, matching_t1)
PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
return 0
else:
manualXFM = processingItemObj.manual_xfm
if manualXFM:
self.processPET(processingItemObj, processed)
else:
PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
return 0
def getScanType(self, processingItemObj):
r = self.DBClient.executeAllResults("SELECT SCAN_TYPE FROM Conversion WHERE STUDY = '{0}' AND RID = '{1}' "
"AND SCAN_DATE = '{2}' AND S_IDENTIFIER = '{3}' "
"AND I_IDENTIFIER = '{4}'".format(processingItemObj.study,
processingItemObj.subject_rid,
processingItemObj.scan_date,
processingItemObj.s_identifier,
processingItemObj.i_identifier))
return r[0][0]
def processPET(self, processingItemObj, matchT1Path):
petFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(processingItemObj.converted_folder, processingItemObj.study,
processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''),
processingItemObj.s_identifier, processingItemObj.i_identifier,
self.getScanType(processingItemObj))
processedFolder = '{0}/processed'.format(processingItemObj.root_folder)
logDir = '{0}/logs'.format(processingItemObj.root_folder)
PipelineLogger.log('manager', 'info', 'PET processing starting for {0}'.format(petFileName))
try:
distutils.dir_util.mkpath(logDir)
except Exception as e:
PipelineLogger.log('manager', 'error', 'Error in creating log folder \n {0}'.format(e))
return 0
id = '{0}{1}{2}{3}'.format(processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier)
paramStrd = ast.literal_eval(processingItemObj.parameters)
paramStrt = ' '.join(['[\"{0}\"]=\"{1}\"'.format(k, v) for k,v in paramStrd.items()])
paramStr = '({0})'.format(paramStrt)
petCMD = "source /opt/minc-1.9.15/minc-toolkit-config.sh; Pipelines/ADNI_AV1451/ADNI_V1_AV1451_Process {0} {1} {2} {3} {4} {5} '{6}' {7} {8}".format(id, petFileName, processedFolder, matchT1Path, processingItemObj.manual_xfm, logDir, paramStr,socket.gethostname(), 50500)
try:
processedFolder_del = '{0}/processed_del'.format(processingItemObj.root_folder)
os.rename(processedFolder, processedFolder_del)
shutil.rmtree(processedFolder_del)
except Exception as e:
PipelineLogger.log('manager', 'error', 'Error in deleting old processing folder. \n {0}'.format(e))
try:
distutils.dir_util.mkpath(processedFolder)
except Exception as e:
PipelineLogger.log('manager', 'error', 'Error in creating processing folder. \n {0}'.format(e))
return 0
### This section is new for ADNI Pre processing - Per scanner type blurring. Only required if
### the images are aquired from different scanners and need to get to same PSF.
blur_x, blur_y, blur_z = self.PETHelper.getBlurringParams(processingItemObj)
### End pre processing.
PipelineLogger.log('manager', 'debug', 'Command : {0}'.format(petCMD))
p = subprocess.Popen(petCMD, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, executable='/bin/bash')
out, err = p.communicate()
PipelineLogger.log('manager', 'debug', 'Process Log Output : \n{0}'.format(out))
PipelineLogger.log('manager', 'debug', 'Process Log Err : \n{0}'.format(err))
QSubJobHandler.submittedJobs[id] = QSubJob(id, '02:00:00', processingItemObj, 'av1451')
return 1 | processingItemObj = ProcessingItemObj(processingItem)
matching_t1 = ADNI_T1_Helper().getMatchingT1(processingItemObj) |
go1_17_crypto_ecdsa.go | // Code generated by 'yaegi extract crypto/ecdsa'. DO NOT EDIT.
//go:build go1.17
// +build go1.17
package stdlib
import (
"crypto/ecdsa"
"reflect"
)
func | () {
Symbols["crypto/ecdsa/ecdsa"] = map[string]reflect.Value{
// function, constant and variable definitions
"GenerateKey": reflect.ValueOf(ecdsa.GenerateKey),
"Sign": reflect.ValueOf(ecdsa.Sign),
"SignASN1": reflect.ValueOf(ecdsa.SignASN1),
"Verify": reflect.ValueOf(ecdsa.Verify),
"VerifyASN1": reflect.ValueOf(ecdsa.VerifyASN1),
// type definitions
"PrivateKey": reflect.ValueOf((*ecdsa.PrivateKey)(nil)),
"PublicKey": reflect.ValueOf((*ecdsa.PublicKey)(nil)),
}
}
| init |
process.rs | use crate::ds::key_node::KeyNode;
use crate::ds::mismatch::Mismatch;
use serde_json::Map;
use serde_json::Value;
use std::collections::HashMap;
use std::collections::HashSet;
pub fn match_json(value1: &Value, value2: &Value) -> Mismatch {
match (value1, value2) {
(Value::Object(a), Value::Object(b)) => {
let (left_only_keys, right_only_keys, intersection_keys) = intersect_maps(&a, &b);
let mut unequal_keys = KeyNode::Nil;
let mut left_only_keys = get_map_of_keys(left_only_keys);
let mut right_only_keys = get_map_of_keys(right_only_keys);
if let Some(intersection_keys) = intersection_keys {
for key in intersection_keys {
let Mismatch {
left_only_keys: l,
right_only_keys: r,
keys_in_both: u,
} = match_json(&a.get(&key).unwrap(), &b.get(&key).unwrap());
left_only_keys = insert_child_key_map(left_only_keys, l, &key);
right_only_keys = insert_child_key_map(right_only_keys, r, &key);
unequal_keys = insert_child_key_map(unequal_keys, u, &key);
}
}
Mismatch::new(left_only_keys, right_only_keys, unequal_keys)
}
(a, b) => {
if a == b {
Mismatch::new(KeyNode::Nil, KeyNode::Nil, KeyNode::Nil)
} else {
Mismatch::new(
KeyNode::Nil,
KeyNode::Nil,
KeyNode::Value(a.clone(), b.clone()),
)
}
}
}
}
fn | (set: Option<HashSet<String>>) -> KeyNode {
if let Some(set) = set {
KeyNode::Node(
set.iter()
.map(|key| (String::from(key), KeyNode::Nil))
.collect(),
)
} else {
KeyNode::Nil
}
}
fn insert_child_key_map(parent: KeyNode, child: KeyNode, key: &String) -> KeyNode {
if child == KeyNode::Nil {
return parent;
}
if let KeyNode::Node(mut map) = parent {
map.insert(String::from(key), child);
KeyNode::Node(map) // This is weird! I just wanted to return back `parent` here
} else if let KeyNode::Nil = parent {
let mut map = HashMap::new();
map.insert(String::from(key), child);
KeyNode::Node(map)
} else {
parent // TODO Trying to insert child node in a Value variant : Should not happen => Throw an error instead.
}
}
fn intersect_maps(
a: &Map<String, Value>,
b: &Map<String, Value>,
) -> (
Option<HashSet<String>>,
Option<HashSet<String>>,
Option<HashSet<String>>,
) {
let mut intersection = HashSet::new();
let mut left = HashSet::new();
let mut right = HashSet::new();
for a_key in a.keys() {
if b.contains_key(a_key) {
intersection.insert(String::from(a_key));
} else {
left.insert(String::from(a_key));
}
}
for b_key in b.keys() {
if !a.contains_key(b_key) {
right.insert(String::from(b_key));
}
}
let left = if left.len() == 0 { None } else { Some(left) };
let right = if right.len() == 0 { None } else { Some(right) };
let intersection = if intersection.len() == 0 {
None
} else {
Some(intersection)
};
(left, right, intersection)
}
| get_map_of_keys |
test_multilevel.py | # -*- coding: utf-8 -*-
# pylint: disable-msg=W0612,E1101,W0141
import datetime
import itertools
import nose
from numpy.random import randn
import numpy as np
from pandas.core.index import Index, MultiIndex
from pandas import Panel, DataFrame, Series, notnull, isnull, Timestamp
from pandas.util.testing import (assert_almost_equal,
assert_series_equal,
assert_frame_equal,
assertRaisesRegexp)
import pandas.core.common as com
import pandas.util.testing as tm
from pandas.compat import (range, lrange, StringIO, lzip, u,
product as cart_product, zip)
import pandas as pd
import pandas.index as _index
class TestMultiLevel(tm.TestCase):
_multiprocess_can_split_ = True
def setUp(self):
import warnings
warnings.filterwarnings(action='ignore', category=FutureWarning)
index = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux'],
['one', 'two', 'three']],
labels=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3],
[0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
names=['first', 'second'])
self.frame = DataFrame(np.random.randn(10, 3), index=index,
columns=Index(['A', 'B', 'C'], name='exp'))
self.single_level = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux']],
labels=[[0, 1, 2, 3]],
names=['first'])
# create test series object
arrays = [['bar', 'bar', 'baz', 'baz', 'qux', 'qux', 'foo', 'foo'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
tuples = lzip(*arrays)
index = MultiIndex.from_tuples(tuples)
s = Series(randn(8), index=index)
s[3] = np.NaN
self.series = s
tm.N = 100
self.tdf = tm.makeTimeDataFrame()
self.ymd = self.tdf.groupby([lambda x: x.year, lambda x: x.month,
lambda x: x.day]).sum()
# use Int64Index, to make sure things work
self.ymd.index.set_levels([lev.astype('i8')
for lev in self.ymd.index.levels],
inplace=True)
self.ymd.index.set_names(['year', 'month', 'day'],
inplace=True)
def test_append(self):
a, b = self.frame[:5], self.frame[5:]
result = a.append(b)
tm.assert_frame_equal(result, self.frame)
result = a['A'].append(b['A'])
tm.assert_series_equal(result, self.frame['A'])
def test_append_index(self):
tm._skip_if_no_pytz()
idx1 = Index([1.1, 1.2, 1.3])
idx2 = pd.date_range('2011-01-01', freq='D', periods=3, tz='Asia/Tokyo')
idx3 = Index(['A', 'B', 'C'])
midx_lv2 = MultiIndex.from_arrays([idx1, idx2])
midx_lv3 = MultiIndex.from_arrays([idx1, idx2, idx3])
result = idx1.append(midx_lv2)
# GH 7112
import pytz
tz = pytz.timezone('Asia/Tokyo')
expected_tuples = [(1.1, datetime.datetime(2011, 1, 1, tzinfo=tz)),
(1.2, datetime.datetime(2011, 1, 2, tzinfo=tz)),
(1.3, datetime.datetime(2011, 1, 3, tzinfo=tz))]
expected = Index([1.1, 1.2, 1.3] + expected_tuples)
self.assert_(result.equals(expected))
result = midx_lv2.append(idx1)
expected = Index(expected_tuples + [1.1, 1.2, 1.3])
self.assert_(result.equals(expected))
result = midx_lv2.append(midx_lv2)
expected = MultiIndex.from_arrays([idx1.append(idx1), idx2.append(idx2)])
self.assert_(result.equals(expected))
result = midx_lv2.append(midx_lv3)
self.assert_(result.equals(expected))
result = midx_lv3.append(midx_lv2)
expected = Index._simple_new(
np.array([(1.1, datetime.datetime(2011, 1, 1, tzinfo=tz), 'A'),
(1.2, datetime.datetime(2011, 1, 2, tzinfo=tz), 'B'),
(1.3, datetime.datetime(2011, 1, 3, tzinfo=tz), 'C')]
+ expected_tuples), None)
self.assert_(result.equals(expected))
def test_dataframe_constructor(self):
multi = DataFrame(np.random.randn(4, 4),
index=[np.array(['a', 'a', 'b', 'b']),
np.array(['x', 'y', 'x', 'y'])])
tm.assert_isinstance(multi.index, MultiIndex)
self.assertNotIsInstance(multi.columns, MultiIndex)
multi = DataFrame(np.random.randn(4, 4),
columns=[['a', 'a', 'b', 'b'],
['x', 'y', 'x', 'y']])
tm.assert_isinstance(multi.columns, MultiIndex)
def test_series_constructor(self):
multi = Series(1., index=[np.array(['a', 'a', 'b', 'b']),
np.array(['x', 'y', 'x', 'y'])])
tm.assert_isinstance(multi.index, MultiIndex)
multi = Series(1., index=[['a', 'a', 'b', 'b'],
['x', 'y', 'x', 'y']])
tm.assert_isinstance(multi.index, MultiIndex)
multi = Series(lrange(4), index=[['a', 'a', 'b', 'b'],
['x', 'y', 'x', 'y']])
tm.assert_isinstance(multi.index, MultiIndex)
def test_reindex_level(self):
# axis=0
month_sums = self.ymd.sum(level='month')
result = month_sums.reindex(self.ymd.index, level=1)
expected = self.ymd.groupby(level='month').transform(np.sum)
assert_frame_equal(result, expected)
# Series
result = month_sums['A'].reindex(self.ymd.index, level=1)
expected = self.ymd['A'].groupby(level='month').transform(np.sum)
assert_series_equal(result, expected)
# axis=1
month_sums = self.ymd.T.sum(axis=1, level='month')
result = month_sums.reindex(columns=self.ymd.index, level=1)
expected = self.ymd.groupby(level='month').transform(np.sum).T
assert_frame_equal(result, expected)
def test_binops_level(self):
def _check_op(opname):
op = getattr(DataFrame, opname)
month_sums = self.ymd.sum(level='month')
result = op(self.ymd, month_sums, level='month')
broadcasted = self.ymd.groupby(level='month').transform(np.sum)
expected = op(self.ymd, broadcasted)
assert_frame_equal(result, expected)
# Series
op = getattr(Series, opname)
result = op(self.ymd['A'], month_sums['A'], level='month')
broadcasted = self.ymd['A'].groupby(
level='month').transform(np.sum)
expected = op(self.ymd['A'], broadcasted)
assert_series_equal(result, expected)
_check_op('sub')
_check_op('add')
_check_op('mul')
_check_op('div')
def test_pickle(self):
def _test_roundtrip(frame):
unpickled = self.round_trip_pickle(frame)
assert_frame_equal(frame, unpickled)
_test_roundtrip(self.frame)
_test_roundtrip(self.frame.T)
_test_roundtrip(self.ymd)
_test_roundtrip(self.ymd.T)
def test_reindex(self):
reindexed = self.frame.ix[[('foo', 'one'), ('bar', 'one')]]
expected = self.frame.ix[[0, 3]]
assert_frame_equal(reindexed, expected)
def test_reindex_preserve_levels(self):
new_index = self.ymd.index[::10]
chunk = self.ymd.reindex(new_index)
self.assertIs(chunk.index, new_index)
chunk = self.ymd.ix[new_index]
self.assertIs(chunk.index, new_index)
ymdT = self.ymd.T
chunk = ymdT.reindex(columns=new_index)
self.assertIs(chunk.columns, new_index)
chunk = ymdT.ix[:, new_index]
self.assertIs(chunk.columns, new_index)
def test_sort_index_preserve_levels(self):
result = self.frame.sort_index()
self.assertEqual(result.index.names, self.frame.index.names)
def test_sorting_repr_8017(self):
np.random.seed(0)
data = np.random.randn(3,4)
for gen, extra in [([1.,3.,2.,5.],4.),
([1,3,2,5],4),
([Timestamp('20130101'),Timestamp('20130103'),Timestamp('20130102'),Timestamp('20130105')],Timestamp('20130104')),
(['1one','3one','2one','5one'],'4one')]:
columns = MultiIndex.from_tuples([('red', i) for i in gen])
df = DataFrame(data, index=list('def'), columns=columns)
df2 = pd.concat([df,DataFrame('world',
index=list('def'),
columns=MultiIndex.from_tuples([('red', extra)]))],axis=1)
# check that the repr is good
# make sure that we have a correct sparsified repr
# e.g. only 1 header of read
self.assertEqual(str(df2).splitlines()[0].split(),['red'])
# GH 8017
# sorting fails after columns added
# construct single-dtype then sort
result = df.copy().sort_index(axis=1)
expected = df.iloc[:,[0,2,1,3]]
assert_frame_equal(result, expected)
result = df2.sort_index(axis=1)
expected = df2.iloc[:,[0,2,1,4,3]]
assert_frame_equal(result, expected)
# setitem then sort
result = df.copy()
result[('red',extra)] = 'world'
result = result.sort_index(axis=1)
assert_frame_equal(result, expected)
def test_repr_to_string(self):
repr(self.frame)
repr(self.ymd)
repr(self.frame.T)
repr(self.ymd.T)
buf = StringIO()
self.frame.to_string(buf=buf)
self.ymd.to_string(buf=buf)
self.frame.T.to_string(buf=buf)
self.ymd.T.to_string(buf=buf)
def test_repr_name_coincide(self):
index = MultiIndex.from_tuples([('a', 0, 'foo'), ('b', 1, 'bar')],
names=['a', 'b', 'c'])
df = DataFrame({'value': [0, 1]}, index=index)
lines = repr(df).split('\n')
self.assertTrue(lines[2].startswith('a 0 foo'))
def test_getitem_simple(self):
df = self.frame.T
col = df['foo', 'one']
assert_almost_equal(col.values, df.values[:, 0])
self.assertRaises(KeyError, df.__getitem__, ('foo', 'four'))
self.assertRaises(KeyError, df.__getitem__, 'foobar')
def test_series_getitem(self):
s = self.ymd['A']
result = s[2000, 3]
result2 = s.ix[2000, 3]
expected = s.reindex(s.index[42:65])
expected.index = expected.index.droplevel(0).droplevel(0)
assert_series_equal(result, expected)
result = s[2000, 3, 10]
expected = s[49]
self.assertEqual(result, expected)
# fancy
result = s.ix[[(2000, 3, 10), (2000, 3, 13)]]
expected = s.reindex(s.index[49:51])
assert_series_equal(result, expected)
# key error
self.assertRaises(KeyError, s.__getitem__, (2000, 3, 4))
def test_series_getitem_corner(self):
s = self.ymd['A']
# don't segfault, GH #495
# out of bounds access
self.assertRaises(IndexError, s.__getitem__, len(self.ymd))
# generator
result = s[(x > 0 for x in s)]
expected = s[s > 0]
assert_series_equal(result, expected)
def test_series_setitem(self):
s = self.ymd['A']
s[2000, 3] = np.nan
self.assertTrue(isnull(s.values[42:65]).all())
self.assertTrue(notnull(s.values[:42]).all())
self.assertTrue(notnull(s.values[65:]).all())
s[2000, 3, 10] = np.nan
self.assertTrue(isnull(s[49]))
def test_series_slice_partial(self):
pass
def test_frame_getitem_setitem_boolean(self):
df = self.frame.T.copy()
values = df.values
result = df[df > 0]
expected = df.where(df > 0)
assert_frame_equal(result, expected)
df[df > 0] = 5
values[values > 0] = 5
assert_almost_equal(df.values, values)
df[df == 5] = 0
values[values == 5] = 0
assert_almost_equal(df.values, values)
# a df that needs alignment first
df[df[:-1] < 0] = 2
np.putmask(values[:-1], values[:-1] < 0, 2)
assert_almost_equal(df.values, values)
with assertRaisesRegexp(TypeError, 'boolean values only'):
df[df * 0] = 2
def test_frame_getitem_setitem_slice(self):
# getitem
result = self.frame.ix[:4]
expected = self.frame[:4]
assert_frame_equal(result, expected)
# setitem
cp = self.frame.copy()
cp.ix[:4] = 0
self.assertTrue((cp.values[:4] == 0).all())
self.assertTrue((cp.values[4:] != 0).all())
def test_frame_getitem_setitem_multislice(self):
levels = [['t1', 't2'], ['a', 'b', 'c']]
labels = [[0, 0, 0, 1, 1], [0, 1, 2, 0, 1]]
midx = MultiIndex(labels=labels, levels=levels, names=[None, 'id'])
df = DataFrame({'value': [1, 2, 3, 7, 8]}, index=midx)
result = df.ix[:, 'value']
assert_series_equal(df['value'], result)
result = df.ix[1:3, 'value']
assert_series_equal(df['value'][1:3], result)
result = df.ix[:, :]
assert_frame_equal(df, result)
result = df
df.ix[:, 'value'] = 10
result['value'] = 10
assert_frame_equal(df, result)
df.ix[:, :] = 10
assert_frame_equal(df, result)
def test_frame_getitem_multicolumn_empty_level(self):
f = DataFrame({'a': ['1', '2', '3'],
'b': ['2', '3', '4']})
f.columns = [['level1 item1', 'level1 item2'],
['', 'level2 item2'],
['level3 item1', 'level3 item2']]
result = f['level1 item1']
expected = DataFrame([['1'], ['2'], ['3']], index=f.index,
columns=['level3 item1'])
assert_frame_equal(result, expected)
def test_frame_setitem_multi_column(self):
df = DataFrame(randn(10, 4), columns=[['a', 'a', 'b', 'b'],
[0, 1, 0, 1]])
cp = df.copy()
cp['a'] = cp['b']
assert_frame_equal(cp['a'], cp['b'])
# set with ndarray
cp = df.copy()
cp['a'] = cp['b'].values
assert_frame_equal(cp['a'], cp['b'])
#----------------------------------------
# #1803
columns = MultiIndex.from_tuples([('A', '1'), ('A', '2'), ('B', '1')])
df = DataFrame(index=[1, 3, 5], columns=columns)
# Works, but adds a column instead of updating the two existing ones
df['A'] = 0.0 # Doesn't work
self.assertTrue((df['A'].values == 0).all())
# it broadcasts
df['B', '1'] = [1, 2, 3]
df['A'] = df['B', '1']
assert_series_equal(df['A', '1'], df['B', '1'])
assert_series_equal(df['A', '2'], df['B', '1'])
def test_getitem_tuple_plus_slice(self):
# GH #671
df = DataFrame({'a': lrange(10),
'b': lrange(10),
'c': np.random.randn(10),
'd': np.random.randn(10)})
idf = df.set_index(['a', 'b'])
result = idf.ix[(0, 0), :]
expected = idf.ix[0, 0]
expected2 = idf.xs((0, 0))
assert_series_equal(result, expected)
assert_series_equal(result, expected2)
def test_getitem_setitem_tuple_plus_columns(self):
# GH #1013
df = self.ymd[:5]
result = df.ix[(2000, 1, 6), ['A', 'B', 'C']]
expected = df.ix[2000, 1, 6][['A', 'B', 'C']]
assert_series_equal(result, expected)
def test_getitem_multilevel_index_tuple_unsorted(self):
index_columns = list("abc")
df = DataFrame([[0, 1, 0, "x"], [0, 0, 1, "y"]],
columns=index_columns + ["data"])
df = df.set_index(index_columns)
query_index = df.index[:1]
rs = df.ix[query_index, "data"]
xp = Series(['x'], index=MultiIndex.from_tuples([(0, 1, 0)]))
assert_series_equal(rs, xp)
def test_xs(self):
xs = self.frame.xs(('bar', 'two'))
xs2 = self.frame.ix[('bar', 'two')]
assert_series_equal(xs, xs2)
assert_almost_equal(xs.values, self.frame.values[4])
# GH 6574
# missing values in returned index should be preserrved
acc = [
('a','abcde',1),
('b','bbcde',2),
('y','yzcde',25),
('z','xbcde',24),
('z',None,26),
('z','zbcde',25),
('z','ybcde',26),
]
df = DataFrame(acc, columns=['a1','a2','cnt']).set_index(['a1','a2'])
expected = DataFrame({ 'cnt' : [24,26,25,26] }, index=Index(['xbcde',np.nan,'zbcde','ybcde'],name='a2'))
result = df.xs('z',level='a1')
assert_frame_equal(result, expected)
def test_xs_partial(self):
result = self.frame.xs('foo')
result2 = self.frame.ix['foo']
expected = self.frame.T['foo'].T
assert_frame_equal(result, expected)
assert_frame_equal(result, result2)
result = self.ymd.xs((2000, 4))
expected = self.ymd.ix[2000, 4]
assert_frame_equal(result, expected)
# ex from #1796
index = MultiIndex(levels=[['foo', 'bar'], ['one', 'two'], [-1, 1]],
labels=[[0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 1, 1, 0, 0, 1, 1],
[0, 1, 0, 1, 0, 1, 0, 1]])
df = DataFrame(np.random.randn(8, 4), index=index,
columns=list('abcd'))
result = df.xs(['foo', 'one'])
expected = df.ix['foo', 'one']
assert_frame_equal(result, expected)
def test_xs_level(self):
result = self.frame.xs('two', level='second')
expected = self.frame[self.frame.index.get_level_values(1) == 'two']
expected.index = expected.index.droplevel(1)
assert_frame_equal(result, expected)
index = MultiIndex.from_tuples([('x', 'y', 'z'), ('a', 'b', 'c'),
('p', 'q', 'r')])
df = DataFrame(np.random.randn(3, 5), index=index)
result = df.xs('c', level=2)
expected = df[1:2]
expected.index = expected.index.droplevel(2)
assert_frame_equal(result, expected)
# this is a copy in 0.14
result = self.frame.xs('two', level='second')
# setting this will give a SettingWithCopyError
# as we are trying to write a view
def f(x):
x[:] = 10
self.assertRaises(com.SettingWithCopyError, f, result)
def test_xs_level_multiple(self):
from pandas import read_table
text = """ A B C D E
one two three four
a b 10.0032 5 -0.5109 -2.3358 -0.4645 0.05076 0.3640
a q 20 4 0.4473 1.4152 0.2834 1.00661 0.1744
x q 30 3 -0.6662 -0.5243 -0.3580 0.89145 2.5838"""
df = read_table(StringIO(text), sep='\s+', engine='python')
result = df.xs(('a', 4), level=['one', 'four'])
expected = df.xs('a').xs(4, level='four')
assert_frame_equal(result, expected)
# this is a copy in 0.14
result = df.xs(('a', 4), level=['one', 'four'])
# setting this will give a SettingWithCopyError
# as we are trying to write a view
def f(x):
x[:] = 10
self.assertRaises(com.SettingWithCopyError, f, result)
# GH2107
dates = lrange(20111201, 20111205)
ids = 'abcde'
idx = MultiIndex.from_tuples([x for x in cart_product(dates, ids)])
idx.names = ['date', 'secid']
df = DataFrame(np.random.randn(len(idx), 3), idx, ['X', 'Y', 'Z'])
rs = df.xs(20111201, level='date')
xp = df.ix[20111201, :]
assert_frame_equal(rs, xp)
def test_xs_level0(self):
from pandas import read_table
text = """ A B C D E
one two three four
a b 10.0032 5 -0.5109 -2.3358 -0.4645 0.05076 0.3640
a q 20 4 0.4473 1.4152 0.2834 1.00661 0.1744
x q 30 3 -0.6662 -0.5243 -0.3580 0.89145 2.5838"""
df = read_table(StringIO(text), sep='\s+', engine='python')
result = df.xs('a', level=0)
expected = df.xs('a')
self.assertEqual(len(result), 2)
assert_frame_equal(result, expected)
def test_xs_level_series(self):
s = self.frame['A']
result = s[:, 'two']
expected = self.frame.xs('two', level=1)['A']
assert_series_equal(result, expected)
s = self.ymd['A']
result = s[2000, 5]
expected = self.ymd.ix[2000, 5]['A']
assert_series_equal(result, expected)
# not implementing this for now
self.assertRaises(TypeError, s.__getitem__, (2000, slice(3, 4)))
# result = s[2000, 3:4]
# lv =s.index.get_level_values(1)
# expected = s[(lv == 3) | (lv == 4)]
# expected.index = expected.index.droplevel(0)
# assert_series_equal(result, expected)
# can do this though
def test_get_loc_single_level(self):
s = Series(np.random.randn(len(self.single_level)),
index=self.single_level)
for k in self.single_level.values:
s[k]
def test_getitem_toplevel(self):
df = self.frame.T
result = df['foo']
expected = df.reindex(columns=df.columns[:3])
expected.columns = expected.columns.droplevel(0)
assert_frame_equal(result, expected)
result = df['bar']
result2 = df.ix[:, 'bar']
expected = df.reindex(columns=df.columns[3:5])
expected.columns = expected.columns.droplevel(0)
assert_frame_equal(result, expected)
assert_frame_equal(result, result2)
def test_getitem_setitem_slice_integers(self):
index = MultiIndex(levels=[[0, 1, 2], [0, 2]],
labels=[[0, 0, 1, 1, 2, 2],
[0, 1, 0, 1, 0, 1]])
frame = DataFrame(np.random.randn(len(index), 4), index=index,
columns=['a', 'b', 'c', 'd'])
res = frame.ix[1:2]
exp = frame.reindex(frame.index[2:])
assert_frame_equal(res, exp)
frame.ix[1:2] = 7
self.assertTrue((frame.ix[1:2] == 7).values.all())
series = Series(np.random.randn(len(index)), index=index)
res = series.ix[1:2]
exp = series.reindex(series.index[2:])
assert_series_equal(res, exp)
series.ix[1:2] = 7
self.assertTrue((series.ix[1:2] == 7).values.all())
def test_getitem_int(self):
levels = [[0, 1], [0, 1, 2]]
labels = [[0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 1, 2]]
index = MultiIndex(levels=levels, labels=labels)
frame = DataFrame(np.random.randn(6, 2), index=index)
result = frame.ix[1]
expected = frame[-3:]
expected.index = expected.index.droplevel(0)
assert_frame_equal(result, expected)
# raises exception
self.assertRaises(KeyError, frame.ix.__getitem__, 3)
# however this will work
result = self.frame.ix[2]
expected = self.frame.xs(self.frame.index[2])
assert_series_equal(result, expected)
def test_getitem_partial(self):
ymd = self.ymd.T
result = ymd[2000, 2]
expected = ymd.reindex(columns=ymd.columns[ymd.columns.labels[1] == 1])
expected.columns = expected.columns.droplevel(0).droplevel(0)
assert_frame_equal(result, expected)
def test_getitem_slice_not_sorted(self):
df = self.frame.sortlevel(1).T
# buglet with int typechecking
result = df.ix[:, :np.int32(3)]
expected = df.reindex(columns=df.columns[:3])
assert_frame_equal(result, expected)
def test_setitem_change_dtype(self):
dft = self.frame.T
s = dft['foo', 'two']
dft['foo', 'two'] = s > s.median()
assert_series_equal(dft['foo', 'two'], s > s.median())
# tm.assert_isinstance(dft._data.blocks[1].items, MultiIndex)
reindexed = dft.reindex(columns=[('foo', 'two')])
assert_series_equal(reindexed['foo', 'two'], s > s.median())
def test_frame_setitem_ix(self):
self.frame.ix[('bar', 'two'), 'B'] = 5
self.assertEqual(self.frame.ix[('bar', 'two'), 'B'], 5)
# with integer labels
df = self.frame.copy()
df.columns = lrange(3)
df.ix[('bar', 'two'), 1] = 7
self.assertEqual(df.ix[('bar', 'two'), 1], 7)
def test_fancy_slice_partial(self):
|
def test_getitem_partial_column_select(self):
idx = MultiIndex(labels=[[0, 0, 0], [0, 1, 1], [1, 0, 1]],
levels=[['a', 'b'], ['x', 'y'], ['p', 'q']])
df = DataFrame(np.random.rand(3, 2), index=idx)
result = df.ix[('a', 'y'), :]
expected = df.ix[('a', 'y')]
assert_frame_equal(result, expected)
result = df.ix[('a', 'y'), [1, 0]]
expected = df.ix[('a', 'y')][[1, 0]]
assert_frame_equal(result, expected)
self.assertRaises(KeyError, df.ix.__getitem__,
(('a', 'foo'), slice(None, None)))
def test_sortlevel(self):
df = self.frame.copy()
df.index = np.arange(len(df))
assertRaisesRegexp(TypeError, 'hierarchical index', df.sortlevel, 0)
# axis=1
# series
a_sorted = self.frame['A'].sortlevel(0)
with assertRaisesRegexp(TypeError, 'hierarchical index'):
self.frame.reset_index()['A'].sortlevel()
# preserve names
self.assertEqual(a_sorted.index.names, self.frame.index.names)
# inplace
rs = self.frame.copy()
rs.sortlevel(0, inplace=True)
assert_frame_equal(rs, self.frame.sortlevel(0))
def test_sortlevel_large_cardinality(self):
# #2684 (int64)
index = MultiIndex.from_arrays([np.arange(4000)]*3)
df = DataFrame(np.random.randn(4000), index=index, dtype = np.int64)
# it works!
result = df.sortlevel(0)
self.assertTrue(result.index.lexsort_depth == 3)
# #2684 (int32)
index = MultiIndex.from_arrays([np.arange(4000)]*3)
df = DataFrame(np.random.randn(4000), index=index, dtype = np.int32)
# it works!
result = df.sortlevel(0)
self.assertTrue((result.dtypes.values == df.dtypes.values).all() == True)
self.assertTrue(result.index.lexsort_depth == 3)
def test_delevel_infer_dtype(self):
tuples = [tuple for tuple in cart_product(['foo', 'bar'],
[10, 20], [1.0, 1.1])]
index = MultiIndex.from_tuples(tuples,
names=['prm0', 'prm1', 'prm2'])
df = DataFrame(np.random.randn(8, 3), columns=['A', 'B', 'C'],
index=index)
deleveled = df.reset_index()
self.assertTrue(com.is_integer_dtype(deleveled['prm1']))
self.assertTrue(com.is_float_dtype(deleveled['prm2']))
def test_reset_index_with_drop(self):
deleveled = self.ymd.reset_index(drop=True)
self.assertEqual(len(deleveled.columns), len(self.ymd.columns))
deleveled = self.series.reset_index()
tm.assert_isinstance(deleveled, DataFrame)
self.assertEqual(len(deleveled.columns),
len(self.series.index.levels) + 1)
deleveled = self.series.reset_index(drop=True)
tm.assert_isinstance(deleveled, Series)
def test_sortlevel_by_name(self):
self.frame.index.names = ['first', 'second']
result = self.frame.sortlevel(level='second')
expected = self.frame.sortlevel(level=1)
assert_frame_equal(result, expected)
def test_sortlevel_mixed(self):
sorted_before = self.frame.sortlevel(1)
df = self.frame.copy()
df['foo'] = 'bar'
sorted_after = df.sortlevel(1)
assert_frame_equal(sorted_before, sorted_after.drop(['foo'], axis=1))
dft = self.frame.T
sorted_before = dft.sortlevel(1, axis=1)
dft['foo', 'three'] = 'bar'
sorted_after = dft.sortlevel(1, axis=1)
assert_frame_equal(sorted_before.drop([('foo', 'three')], axis=1),
sorted_after.drop([('foo', 'three')], axis=1))
def test_count_level(self):
def _check_counts(frame, axis=0):
index = frame._get_axis(axis)
for i in range(index.nlevels):
result = frame.count(axis=axis, level=i)
expected = frame.groupby(axis=axis, level=i).count(axis=axis)
expected = expected.reindex_like(result).astype('i8')
assert_frame_equal(result, expected)
self.frame.ix[1, [1, 2]] = np.nan
self.frame.ix[7, [0, 1]] = np.nan
self.ymd.ix[1, [1, 2]] = np.nan
self.ymd.ix[7, [0, 1]] = np.nan
_check_counts(self.frame)
_check_counts(self.ymd)
_check_counts(self.frame.T, axis=1)
_check_counts(self.ymd.T, axis=1)
# can't call with level on regular DataFrame
df = tm.makeTimeDataFrame()
assertRaisesRegexp(TypeError, 'hierarchical', df.count, level=0)
self.frame['D'] = 'foo'
result = self.frame.count(level=0, numeric_only=True)
assert_almost_equal(result.columns, ['A', 'B', 'C'])
def test_count_level_series(self):
index = MultiIndex(levels=[['foo', 'bar', 'baz'],
['one', 'two', 'three', 'four']],
labels=[[0, 0, 0, 2, 2],
[2, 0, 1, 1, 2]])
s = Series(np.random.randn(len(index)), index=index)
result = s.count(level=0)
expected = s.groupby(level=0).count()
assert_series_equal(result.astype('f8'),
expected.reindex(result.index).fillna(0))
result = s.count(level=1)
expected = s.groupby(level=1).count()
assert_series_equal(result.astype('f8'),
expected.reindex(result.index).fillna(0))
def test_count_level_corner(self):
s = self.frame['A'][:0]
result = s.count(level=0)
expected = Series(0, index=s.index.levels[0])
assert_series_equal(result, expected)
df = self.frame[:0]
result = df.count(level=0)
expected = DataFrame({}, index=s.index.levels[0],
columns=df.columns).fillna(0).astype(np.int64)
assert_frame_equal(result, expected)
def test_get_level_number_out_of_bounds(self):
with assertRaisesRegexp(IndexError, "Too many levels"):
self.frame.index._get_level_number(2)
with assertRaisesRegexp(IndexError, "not a valid level number"):
self.frame.index._get_level_number(-3)
def test_unstack(self):
# just check that it works for now
unstacked = self.ymd.unstack()
unstacked2 = unstacked.unstack()
# test that ints work
unstacked = self.ymd.astype(int).unstack()
# test that int32 work
unstacked = self.ymd.astype(np.int32).unstack()
def test_unstack_multiple_no_empty_columns(self):
index = MultiIndex.from_tuples([(0, 'foo', 0), (0, 'bar', 0),
(1, 'baz', 1), (1, 'qux', 1)])
s = Series(np.random.randn(4), index=index)
unstacked = s.unstack([1, 2])
expected = unstacked.dropna(axis=1, how='all')
assert_frame_equal(unstacked, expected)
def test_stack(self):
# regular roundtrip
unstacked = self.ymd.unstack()
restacked = unstacked.stack()
assert_frame_equal(restacked, self.ymd)
unlexsorted = self.ymd.sortlevel(2)
unstacked = unlexsorted.unstack(2)
restacked = unstacked.stack()
assert_frame_equal(restacked.sortlevel(0), self.ymd)
unlexsorted = unlexsorted[::-1]
unstacked = unlexsorted.unstack(1)
restacked = unstacked.stack().swaplevel(1, 2)
assert_frame_equal(restacked.sortlevel(0), self.ymd)
unlexsorted = unlexsorted.swaplevel(0, 1)
unstacked = unlexsorted.unstack(0).swaplevel(0, 1, axis=1)
restacked = unstacked.stack(0).swaplevel(1, 2)
assert_frame_equal(restacked.sortlevel(0), self.ymd)
# columns unsorted
unstacked = self.ymd.unstack()
unstacked = unstacked.sort(axis=1, ascending=False)
restacked = unstacked.stack()
assert_frame_equal(restacked, self.ymd)
# more than 2 levels in the columns
unstacked = self.ymd.unstack(1).unstack(1)
result = unstacked.stack(1)
expected = self.ymd.unstack()
assert_frame_equal(result, expected)
result = unstacked.stack(2)
expected = self.ymd.unstack(1)
assert_frame_equal(result, expected)
result = unstacked.stack(0)
expected = self.ymd.stack().unstack(1).unstack(1)
assert_frame_equal(result, expected)
# not all levels present in each echelon
unstacked = self.ymd.unstack(2).ix[:, ::3]
stacked = unstacked.stack().stack()
ymd_stacked = self.ymd.stack()
assert_series_equal(stacked, ymd_stacked.reindex(stacked.index))
# stack with negative number
result = self.ymd.unstack(0).stack(-2)
expected = self.ymd.unstack(0).stack(0)
def test_unstack_odd_failure(self):
data = """day,time,smoker,sum,len
Fri,Dinner,No,8.25,3.
Fri,Dinner,Yes,27.03,9
Fri,Lunch,No,3.0,1
Fri,Lunch,Yes,13.68,6
Sat,Dinner,No,139.63,45
Sat,Dinner,Yes,120.77,42
Sun,Dinner,No,180.57,57
Sun,Dinner,Yes,66.82,19
Thur,Dinner,No,3.0,1
Thur,Lunch,No,117.32,44
Thur,Lunch,Yes,51.51,17"""
df = pd.read_csv(StringIO(data)).set_index(['day', 'time', 'smoker'])
# it works, #2100
result = df.unstack(2)
recons = result.stack()
assert_frame_equal(recons, df)
def test_stack_mixed_dtype(self):
df = self.frame.T
df['foo', 'four'] = 'foo'
df = df.sortlevel(1, axis=1)
stacked = df.stack()
assert_series_equal(stacked['foo'], df['foo'].stack())
self.assertEqual(stacked['bar'].dtype, np.float_)
def test_unstack_bug(self):
df = DataFrame({'state': ['naive', 'naive', 'naive',
'activ', 'activ', 'activ'],
'exp': ['a', 'b', 'b', 'b', 'a', 'a'],
'barcode': [1, 2, 3, 4, 1, 3],
'v': ['hi', 'hi', 'bye', 'bye', 'bye', 'peace'],
'extra': np.arange(6.)})
result = df.groupby(['state', 'exp', 'barcode', 'v']).apply(len)
unstacked = result.unstack()
restacked = unstacked.stack()
assert_series_equal(restacked,
result.reindex(restacked.index).astype(float))
def test_stack_unstack_preserve_names(self):
unstacked = self.frame.unstack()
self.assertEqual(unstacked.index.name, 'first')
self.assertEqual(unstacked.columns.names, ['exp', 'second'])
restacked = unstacked.stack()
self.assertEqual(restacked.index.names, self.frame.index.names)
def test_unstack_level_name(self):
result = self.frame.unstack('second')
expected = self.frame.unstack(level=1)
assert_frame_equal(result, expected)
def test_stack_level_name(self):
unstacked = self.frame.unstack('second')
result = unstacked.stack('exp')
expected = self.frame.unstack().stack(0)
assert_frame_equal(result, expected)
result = self.frame.stack('exp')
expected = self.frame.stack()
assert_series_equal(result, expected)
def test_stack_unstack_multiple(self):
unstacked = self.ymd.unstack(['year', 'month'])
expected = self.ymd.unstack('year').unstack('month')
assert_frame_equal(unstacked, expected)
self.assertEqual(unstacked.columns.names,
expected.columns.names)
# series
s = self.ymd['A']
s_unstacked = s.unstack(['year', 'month'])
assert_frame_equal(s_unstacked, expected['A'])
restacked = unstacked.stack(['year', 'month'])
restacked = restacked.swaplevel(0, 1).swaplevel(1, 2)
restacked = restacked.sortlevel(0)
assert_frame_equal(restacked, self.ymd)
self.assertEqual(restacked.index.names, self.ymd.index.names)
# GH #451
unstacked = self.ymd.unstack([1, 2])
expected = self.ymd.unstack(1).unstack(1).dropna(axis=1, how='all')
assert_frame_equal(unstacked, expected)
unstacked = self.ymd.unstack([2, 1])
expected = self.ymd.unstack(2).unstack(1).dropna(axis=1, how='all')
assert_frame_equal(unstacked, expected.ix[:, unstacked.columns])
def test_stack_names_and_numbers(self):
unstacked = self.ymd.unstack(['year', 'month'])
# Can't use mixture of names and numbers to stack
with assertRaisesRegexp(ValueError, "level should contain"):
unstacked.stack([0, 'month'])
def test_stack_multiple_out_of_bounds(self):
# nlevels == 3
unstacked = self.ymd.unstack(['year', 'month'])
with assertRaisesRegexp(IndexError, "Too many levels"):
unstacked.stack([2, 3])
with assertRaisesRegexp(IndexError, "not a valid level number"):
unstacked.stack([-4, -3])
def test_unstack_period_series(self):
# GH 4342
idx1 = pd.PeriodIndex(['2013-01', '2013-01', '2013-02', '2013-02',
'2013-03', '2013-03'], freq='M', name='period')
idx2 = Index(['A', 'B'] * 3, name='str')
value = [1, 2, 3, 4, 5, 6]
idx = MultiIndex.from_arrays([idx1, idx2])
s = Series(value, index=idx)
result1 = s.unstack()
result2 = s.unstack(level=1)
result3 = s.unstack(level=0)
e_idx = pd.PeriodIndex(['2013-01', '2013-02', '2013-03'], freq='M', name='period')
expected = DataFrame({'A': [1, 3, 5], 'B': [2, 4, 6]}, index=e_idx,
columns=['A', 'B'])
expected.columns.name = 'str'
assert_frame_equal(result1, expected)
assert_frame_equal(result2, expected)
assert_frame_equal(result3, expected.T)
idx1 = pd.PeriodIndex(['2013-01', '2013-01', '2013-02', '2013-02',
'2013-03', '2013-03'], freq='M', name='period1')
idx2 = pd.PeriodIndex(['2013-12', '2013-11', '2013-10', '2013-09',
'2013-08', '2013-07'], freq='M', name='period2')
idx = pd.MultiIndex.from_arrays([idx1, idx2])
s = Series(value, index=idx)
result1 = s.unstack()
result2 = s.unstack(level=1)
result3 = s.unstack(level=0)
e_idx = pd.PeriodIndex(['2013-01', '2013-02', '2013-03'], freq='M', name='period1')
e_cols = pd.PeriodIndex(['2013-07', '2013-08', '2013-09', '2013-10',
'2013-11', '2013-12'], freq='M', name='period2')
expected = DataFrame([[np.nan, np.nan, np.nan, np.nan, 2, 1],
[np.nan, np.nan, 4, 3, np.nan, np.nan],
[6, 5, np.nan, np.nan, np.nan, np.nan]],
index=e_idx, columns=e_cols)
assert_frame_equal(result1, expected)
assert_frame_equal(result2, expected)
assert_frame_equal(result3, expected.T)
def test_unstack_period_frame(self):
# GH 4342
idx1 = pd.PeriodIndex(['2014-01', '2014-02', '2014-02', '2014-02', '2014-01', '2014-01'],
freq='M', name='period1')
idx2 = pd.PeriodIndex(['2013-12', '2013-12', '2014-02', '2013-10', '2013-10', '2014-02'],
freq='M', name='period2')
value = {'A': [1, 2, 3, 4, 5, 6], 'B': [6, 5, 4, 3, 2, 1]}
idx = pd.MultiIndex.from_arrays([idx1, idx2])
df = pd.DataFrame(value, index=idx)
result1 = df.unstack()
result2 = df.unstack(level=1)
result3 = df.unstack(level=0)
e_1 = pd.PeriodIndex(['2014-01', '2014-02'], freq='M', name='period1')
e_2 = pd.PeriodIndex(['2013-10', '2013-12', '2014-02', '2013-10',
'2013-12', '2014-02'], freq='M', name='period2')
e_cols = pd.MultiIndex.from_arrays(['A A A B B B'.split(), e_2])
expected = DataFrame([[5, 1, 6, 2, 6, 1], [4, 2, 3, 3, 5, 4]],
index=e_1, columns=e_cols)
assert_frame_equal(result1, expected)
assert_frame_equal(result2, expected)
e_1 = pd.PeriodIndex(['2014-01', '2014-02', '2014-01',
'2014-02'], freq='M', name='period1')
e_2 = pd.PeriodIndex(['2013-10', '2013-12', '2014-02'], freq='M', name='period2')
e_cols = pd.MultiIndex.from_arrays(['A A B B'.split(), e_1])
expected = DataFrame([[5, 4, 2, 3], [1, 2, 6, 5], [6, 3, 1, 4]],
index=e_2, columns=e_cols)
assert_frame_equal(result3, expected)
def test_stack_multiple_bug(self):
""" bug when some uniques are not present in the data #3170"""
id_col = ([1] * 3) + ([2] * 3)
name = (['a'] * 3) + (['b'] * 3)
date = pd.to_datetime(['2013-01-03', '2013-01-04', '2013-01-05'] * 2)
var1 = np.random.randint(0, 100, 6)
df = DataFrame(dict(ID=id_col, NAME=name, DATE=date, VAR1=var1))
multi = df.set_index(['DATE', 'ID'])
multi.columns.name = 'Params'
unst = multi.unstack('ID')
down = unst.resample('W-THU')
rs = down.stack('ID')
xp = unst.ix[:, ['VAR1']].resample('W-THU').stack('ID')
xp.columns.name = 'Params'
assert_frame_equal(rs, xp)
def test_stack_dropna(self):
# GH #3997
df = pd.DataFrame({'A': ['a1', 'a2'],
'B': ['b1', 'b2'],
'C': [1, 1]})
df = df.set_index(['A', 'B'])
stacked = df.unstack().stack(dropna=False)
self.assertTrue(len(stacked) > len(stacked.dropna()))
stacked = df.unstack().stack(dropna=True)
assert_frame_equal(stacked, stacked.dropna())
def test_unstack_multiple_hierarchical(self):
df = DataFrame(index=[[0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 1, 1, 0, 0, 1, 1],
[0, 1, 0, 1, 0, 1, 0, 1]],
columns=[[0, 0, 1, 1], [0, 1, 0, 1]])
df.index.names = ['a', 'b', 'c']
df.columns.names = ['d', 'e']
# it works!
df.unstack(['b', 'c'])
def test_groupby_transform(self):
s = self.frame['A']
grouper = s.index.get_level_values(0)
grouped = s.groupby(grouper)
applied = grouped.apply(lambda x: x * 2)
expected = grouped.transform(lambda x: x * 2)
assert_series_equal(applied.reindex(expected.index), expected)
def test_unstack_sparse_keyspace(self):
# memory problems with naive impl #2278
# Generate Long File & Test Pivot
NUM_ROWS = 1000
df = DataFrame({'A': np.random.randint(100, size=NUM_ROWS),
'B': np.random.randint(300, size=NUM_ROWS),
'C': np.random.randint(-7, 7, size=NUM_ROWS),
'D': np.random.randint(-19, 19, size=NUM_ROWS),
'E': np.random.randint(3000, size=NUM_ROWS),
'F': np.random.randn(NUM_ROWS)})
idf = df.set_index(['A', 'B', 'C', 'D', 'E'])
# it works! is sufficient
idf.unstack('E')
def test_unstack_unobserved_keys(self):
# related to #2278 refactoring
levels = [[0, 1], [0, 1, 2, 3]]
labels = [[0, 0, 1, 1], [0, 2, 0, 2]]
index = MultiIndex(levels, labels)
df = DataFrame(np.random.randn(4, 2), index=index)
result = df.unstack()
self.assertEqual(len(result.columns), 4)
recons = result.stack()
assert_frame_equal(recons, df)
def test_groupby_corner(self):
midx = MultiIndex(levels=[['foo'], ['bar'], ['baz']],
labels=[[0], [0], [0]], names=['one', 'two', 'three'])
df = DataFrame([np.random.rand(4)], columns=['a', 'b', 'c', 'd'],
index=midx)
# should work
df.groupby(level='three')
def test_groupby_level_no_obs(self):
# #1697
midx = MultiIndex.from_tuples([('f1', 's1'), ('f1', 's2'),
('f2', 's1'), ('f2', 's2'),
('f3', 's1'), ('f3', 's2')])
df = DataFrame(
[[1, 2, 3, 4, 5, 6], [7, 8, 9, 10, 11, 12]], columns=midx)
df1 = df.select(lambda u: u[0] in ['f2', 'f3'], axis=1)
grouped = df1.groupby(axis=1, level=0)
result = grouped.sum()
self.assertTrue((result.columns == ['f2', 'f3']).all())
def test_join(self):
a = self.frame.ix[:5, ['A']]
b = self.frame.ix[2:, ['B', 'C']]
joined = a.join(b, how='outer').reindex(self.frame.index)
expected = self.frame.copy()
expected.values[np.isnan(joined.values)] = np.nan
self.assertFalse(np.isnan(joined.values).all())
assert_frame_equal(joined, expected, check_names=False) # TODO what should join do with names ?
def test_swaplevel(self):
swapped = self.frame['A'].swaplevel(0, 1)
swapped2 = self.frame['A'].swaplevel('first', 'second')
self.assertFalse(swapped.index.equals(self.frame.index))
assert_series_equal(swapped, swapped2)
back = swapped.swaplevel(0, 1)
back2 = swapped.swaplevel('second', 'first')
self.assertTrue(back.index.equals(self.frame.index))
assert_series_equal(back, back2)
ft = self.frame.T
swapped = ft.swaplevel('first', 'second', axis=1)
exp = self.frame.swaplevel('first', 'second').T
assert_frame_equal(swapped, exp)
def test_swaplevel_panel(self):
panel = Panel({'ItemA': self.frame,
'ItemB': self.frame * 2})
result = panel.swaplevel(0, 1, axis='major')
expected = panel.copy()
expected.major_axis = expected.major_axis.swaplevel(0, 1)
tm.assert_panel_equal(result, expected)
def test_reorder_levels(self):
result = self.ymd.reorder_levels(['month', 'day', 'year'])
expected = self.ymd.swaplevel(0, 1).swaplevel(1, 2)
assert_frame_equal(result, expected)
result = self.ymd['A'].reorder_levels(['month', 'day', 'year'])
expected = self.ymd['A'].swaplevel(0, 1).swaplevel(1, 2)
assert_series_equal(result, expected)
result = self.ymd.T.reorder_levels(['month', 'day', 'year'], axis=1)
expected = self.ymd.T.swaplevel(0, 1, axis=1).swaplevel(1, 2, axis=1)
assert_frame_equal(result, expected)
with assertRaisesRegexp(TypeError, 'hierarchical axis'):
self.ymd.reorder_levels([1, 2], axis=1)
with assertRaisesRegexp(IndexError, 'Too many levels'):
self.ymd.index.reorder_levels([1, 2, 3])
def test_insert_index(self):
df = self.ymd[:5].T
df[2000, 1, 10] = df[2000, 1, 7]
tm.assert_isinstance(df.columns, MultiIndex)
self.assertTrue((df[2000, 1, 10] == df[2000, 1, 7]).all())
def test_alignment(self):
x = Series(data=[1, 2, 3],
index=MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3)]))
y = Series(data=[4, 5, 6],
index=MultiIndex.from_tuples([("Z", 1), ("Z", 2), ("B", 3)]))
res = x - y
exp_index = x.index.union(y.index)
exp = x.reindex(exp_index) - y.reindex(exp_index)
assert_series_equal(res, exp)
# hit non-monotonic code path
res = x[::-1] - y[::-1]
exp_index = x.index.union(y.index)
exp = x.reindex(exp_index) - y.reindex(exp_index)
assert_series_equal(res, exp)
def test_is_lexsorted(self):
levels = [[0, 1], [0, 1, 2]]
index = MultiIndex(levels=levels,
labels=[[0, 0, 0, 1, 1, 1],
[0, 1, 2, 0, 1, 2]])
self.assertTrue(index.is_lexsorted())
index = MultiIndex(levels=levels,
labels=[[0, 0, 0, 1, 1, 1],
[0, 1, 2, 0, 2, 1]])
self.assertFalse(index.is_lexsorted())
index = MultiIndex(levels=levels,
labels=[[0, 0, 1, 0, 1, 1],
[0, 1, 0, 2, 2, 1]])
self.assertFalse(index.is_lexsorted())
self.assertEqual(index.lexsort_depth, 0)
def test_frame_getitem_view(self):
df = self.frame.T.copy()
# this works because we are modifying the underlying array
# really a no-no
df['foo'].values[:] = 0
self.assertTrue((df['foo'].values == 0).all())
# but not if it's mixed-type
df['foo', 'four'] = 'foo'
df = df.sortlevel(0, axis=1)
# this will work, but will raise/warn as its chained assignment
def f():
df['foo']['one'] = 2
return df
self.assertRaises(com.SettingWithCopyError, f)
try:
df = f()
except:
pass
self.assertTrue((df['foo', 'one'] == 0).all())
def test_frame_getitem_not_sorted(self):
df = self.frame.T
df['foo', 'four'] = 'foo'
arrays = [np.array(x) for x in zip(*df.columns._tuple_index)]
result = df['foo']
result2 = df.ix[:, 'foo']
expected = df.reindex(columns=df.columns[arrays[0] == 'foo'])
expected.columns = expected.columns.droplevel(0)
assert_frame_equal(result, expected)
assert_frame_equal(result2, expected)
df = df.T
result = df.xs('foo')
result2 = df.ix['foo']
expected = df.reindex(df.index[arrays[0] == 'foo'])
expected.index = expected.index.droplevel(0)
assert_frame_equal(result, expected)
assert_frame_equal(result2, expected)
def test_series_getitem_not_sorted(self):
arrays = [['bar', 'bar', 'baz', 'baz', 'qux', 'qux', 'foo', 'foo'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
tuples = lzip(*arrays)
index = MultiIndex.from_tuples(tuples)
s = Series(randn(8), index=index)
arrays = [np.array(x) for x in zip(*index._tuple_index)]
result = s['qux']
result2 = s.ix['qux']
expected = s[arrays[0] == 'qux']
expected.index = expected.index.droplevel(0)
assert_series_equal(result, expected)
assert_series_equal(result2, expected)
def test_count(self):
frame = self.frame.copy()
frame.index.names = ['a', 'b']
result = frame.count(level='b')
expect = self.frame.count(level=1)
assert_frame_equal(result, expect, check_names=False)
result = frame.count(level='a')
expect = self.frame.count(level=0)
assert_frame_equal(result, expect, check_names=False)
series = self.series.copy()
series.index.names = ['a', 'b']
result = series.count(level='b')
expect = self.series.count(level=1)
assert_series_equal(result, expect)
result = series.count(level='a')
expect = self.series.count(level=0)
assert_series_equal(result, expect)
self.assertRaises(KeyError, series.count, 'x')
self.assertRaises(KeyError, frame.count, level='x')
AGG_FUNCTIONS = ['sum', 'prod', 'min', 'max', 'median', 'mean', 'skew',
'mad', 'std', 'var', 'sem']
def test_series_group_min_max(self):
for op, level, skipna in cart_product(self.AGG_FUNCTIONS,
lrange(2),
[False, True]):
grouped = self.series.groupby(level=level)
aggf = lambda x: getattr(x, op)(skipna=skipna)
# skipna=True
leftside = grouped.agg(aggf)
rightside = getattr(self.series, op)(level=level, skipna=skipna)
assert_series_equal(leftside, rightside)
def test_frame_group_ops(self):
self.frame.ix[1, [1, 2]] = np.nan
self.frame.ix[7, [0, 1]] = np.nan
for op, level, axis, skipna in cart_product(self.AGG_FUNCTIONS,
lrange(2), lrange(2),
[False, True]):
if axis == 0:
frame = self.frame
else:
frame = self.frame.T
grouped = frame.groupby(level=level, axis=axis)
pieces = []
def aggf(x):
pieces.append(x)
return getattr(x, op)(skipna=skipna, axis=axis)
leftside = grouped.agg(aggf)
rightside = getattr(frame, op)(level=level, axis=axis,
skipna=skipna)
# for good measure, groupby detail
level_index = frame._get_axis(axis).levels[level]
self.assertTrue(leftside._get_axis(axis).equals(level_index))
self.assertTrue(rightside._get_axis(axis).equals(level_index))
assert_frame_equal(leftside, rightside)
def test_stat_op_corner(self):
obj = Series([10.0], index=MultiIndex.from_tuples([(2, 3)]))
result = obj.sum(level=0)
expected = Series([10.0], index=[2])
assert_series_equal(result, expected)
def test_frame_any_all_group(self):
df = DataFrame(
{'data': [False, False, True, False, True, False, True]},
index=[
['one', 'one', 'two', 'one', 'two', 'two', 'two'],
[0, 1, 0, 2, 1, 2, 3]])
result = df.any(level=0)
ex = DataFrame({'data': [False, True]}, index=['one', 'two'])
assert_frame_equal(result, ex)
result = df.all(level=0)
ex = DataFrame({'data': [False, False]}, index=['one', 'two'])
assert_frame_equal(result, ex)
def test_std_var_pass_ddof(self):
index = MultiIndex.from_arrays([np.arange(5).repeat(10),
np.tile(np.arange(10), 5)])
df = DataFrame(np.random.randn(len(index), 5), index=index)
for meth in ['var', 'std']:
ddof = 4
alt = lambda x: getattr(x, meth)(ddof=ddof)
result = getattr(df[0], meth)(level=0, ddof=ddof)
expected = df[0].groupby(level=0).agg(alt)
assert_series_equal(result, expected)
result = getattr(df, meth)(level=0, ddof=ddof)
expected = df.groupby(level=0).agg(alt)
assert_frame_equal(result, expected)
def test_frame_series_agg_multiple_levels(self):
result = self.ymd.sum(level=['year', 'month'])
expected = self.ymd.groupby(level=['year', 'month']).sum()
assert_frame_equal(result, expected)
result = self.ymd['A'].sum(level=['year', 'month'])
expected = self.ymd['A'].groupby(level=['year', 'month']).sum()
assert_series_equal(result, expected)
def test_groupby_multilevel(self):
result = self.ymd.groupby(level=[0, 1]).mean()
k1 = self.ymd.index.get_level_values(0)
k2 = self.ymd.index.get_level_values(1)
expected = self.ymd.groupby([k1, k2]).mean()
assert_frame_equal(result, expected, check_names=False) # TODO groupby with level_values drops names
self.assertEqual(result.index.names, self.ymd.index.names[:2])
result2 = self.ymd.groupby(level=self.ymd.index.names[:2]).mean()
assert_frame_equal(result, result2)
def test_groupby_multilevel_with_transform(self):
pass
def test_multilevel_consolidate(self):
index = MultiIndex.from_tuples([('foo', 'one'), ('foo', 'two'),
('bar', 'one'), ('bar', 'two')])
df = DataFrame(np.random.randn(4, 4), index=index, columns=index)
df['Totals', ''] = df.sum(1)
df = df.consolidate()
def test_ix_preserve_names(self):
result = self.ymd.ix[2000]
result2 = self.ymd['A'].ix[2000]
self.assertEqual(result.index.names, self.ymd.index.names[1:])
self.assertEqual(result2.index.names, self.ymd.index.names[1:])
result = self.ymd.ix[2000, 2]
result2 = self.ymd['A'].ix[2000, 2]
self.assertEqual(result.index.name, self.ymd.index.names[2])
self.assertEqual(result2.index.name, self.ymd.index.names[2])
def test_partial_set(self):
# GH #397
df = self.ymd.copy()
exp = self.ymd.copy()
df.ix[2000, 4] = 0
exp.ix[2000, 4].values[:] = 0
assert_frame_equal(df, exp)
df['A'].ix[2000, 4] = 1
exp['A'].ix[2000, 4].values[:] = 1
assert_frame_equal(df, exp)
df.ix[2000] = 5
exp.ix[2000].values[:] = 5
assert_frame_equal(df, exp)
# this works...for now
df['A'].ix[14] = 5
self.assertEqual(df['A'][14], 5)
def test_unstack_preserve_types(self):
# GH #403
self.ymd['E'] = 'foo'
self.ymd['F'] = 2
unstacked = self.ymd.unstack('month')
self.assertEqual(unstacked['A', 1].dtype, np.float64)
self.assertEqual(unstacked['E', 1].dtype, np.object_)
self.assertEqual(unstacked['F', 1].dtype, np.float64)
def test_unstack_group_index_overflow(self):
labels = np.tile(np.arange(500), 2)
level = np.arange(500)
index = MultiIndex(levels=[level] * 8 + [[0, 1]],
labels=[labels] * 8 + [np.arange(2).repeat(500)])
s = Series(np.arange(1000), index=index)
result = s.unstack()
self.assertEqual(result.shape, (500, 2))
# test roundtrip
stacked = result.stack()
assert_series_equal(s,
stacked.reindex(s.index))
# put it at beginning
index = MultiIndex(levels=[[0, 1]] + [level] * 8,
labels=[np.arange(2).repeat(500)] + [labels] * 8)
s = Series(np.arange(1000), index=index)
result = s.unstack(0)
self.assertEqual(result.shape, (500, 2))
# put it in middle
index = MultiIndex(levels=[level] * 4 + [[0, 1]] + [level] * 4,
labels=([labels] * 4 + [np.arange(2).repeat(500)]
+ [labels] * 4))
s = Series(np.arange(1000), index=index)
result = s.unstack(4)
self.assertEqual(result.shape, (500, 2))
def test_getitem_lowerdim_corner(self):
self.assertRaises(KeyError, self.frame.ix.__getitem__,
(('bar', 'three'), 'B'))
# in theory should be inserting in a sorted space????
self.frame.ix[('bar','three'),'B'] = 0
self.assertEqual(self.frame.sortlevel().ix[('bar','three'),'B'], 0)
#----------------------------------------------------------------------
# AMBIGUOUS CASES!
def test_partial_ix_missing(self):
raise nose.SkipTest("skipping for now")
result = self.ymd.ix[2000, 0]
expected = self.ymd.ix[2000]['A']
assert_series_equal(result, expected)
# need to put in some work here
# self.ymd.ix[2000, 0] = 0
# self.assertTrue((self.ymd.ix[2000]['A'] == 0).all())
# Pretty sure the second (and maybe even the first) is already wrong.
self.assertRaises(Exception, self.ymd.ix.__getitem__, (2000, 6))
self.assertRaises(Exception, self.ymd.ix.__getitem__, (2000, 6), 0)
#----------------------------------------------------------------------
def test_to_html(self):
self.ymd.columns.name = 'foo'
self.ymd.to_html()
self.ymd.T.to_html()
def test_level_with_tuples(self):
index = MultiIndex(levels=[[('foo', 'bar', 0), ('foo', 'baz', 0),
('foo', 'qux', 0)],
[0, 1]],
labels=[[0, 0, 1, 1, 2, 2], [0, 1, 0, 1, 0, 1]])
series = Series(np.random.randn(6), index=index)
frame = DataFrame(np.random.randn(6, 4), index=index)
result = series[('foo', 'bar', 0)]
result2 = series.ix[('foo', 'bar', 0)]
expected = series[:2]
expected.index = expected.index.droplevel(0)
assert_series_equal(result, expected)
assert_series_equal(result2, expected)
self.assertRaises(KeyError, series.__getitem__, (('foo', 'bar', 0), 2))
result = frame.ix[('foo', 'bar', 0)]
result2 = frame.xs(('foo', 'bar', 0))
expected = frame[:2]
expected.index = expected.index.droplevel(0)
assert_frame_equal(result, expected)
assert_frame_equal(result2, expected)
index = MultiIndex(levels=[[('foo', 'bar'), ('foo', 'baz'),
('foo', 'qux')],
[0, 1]],
labels=[[0, 0, 1, 1, 2, 2], [0, 1, 0, 1, 0, 1]])
series = Series(np.random.randn(6), index=index)
frame = DataFrame(np.random.randn(6, 4), index=index)
result = series[('foo', 'bar')]
result2 = series.ix[('foo', 'bar')]
expected = series[:2]
expected.index = expected.index.droplevel(0)
assert_series_equal(result, expected)
assert_series_equal(result2, expected)
result = frame.ix[('foo', 'bar')]
result2 = frame.xs(('foo', 'bar'))
expected = frame[:2]
expected.index = expected.index.droplevel(0)
assert_frame_equal(result, expected)
assert_frame_equal(result2, expected)
def test_int_series_slicing(self):
s = self.ymd['A']
result = s[5:]
expected = s.reindex(s.index[5:])
assert_series_equal(result, expected)
exp = self.ymd['A'].copy()
s[5:] = 0
exp.values[5:] = 0
self.assert_numpy_array_equal(s.values, exp.values)
result = self.ymd[5:]
expected = self.ymd.reindex(s.index[5:])
assert_frame_equal(result, expected)
def test_mixed_depth_get(self):
arrays = [['a', 'top', 'top', 'routine1', 'routine1', 'routine2'],
['', 'OD', 'OD', 'result1', 'result2', 'result1'],
['', 'wx', 'wy', '', '', '']]
tuples = sorted(zip(*arrays))
index = MultiIndex.from_tuples(tuples)
df = DataFrame(randn(4, 6), columns=index)
result = df['a']
expected = df['a', '', '']
assert_series_equal(result, expected)
self.assertEqual(result.name, 'a')
result = df['routine1', 'result1']
expected = df['routine1', 'result1', '']
assert_series_equal(result, expected)
self.assertEqual(result.name, ('routine1', 'result1'))
def test_mixed_depth_insert(self):
arrays = [['a', 'top', 'top', 'routine1', 'routine1', 'routine2'],
['', 'OD', 'OD', 'result1', 'result2', 'result1'],
['', 'wx', 'wy', '', '', '']]
tuples = sorted(zip(*arrays))
index = MultiIndex.from_tuples(tuples)
df = DataFrame(randn(4, 6), columns=index)
result = df.copy()
expected = df.copy()
result['b'] = [1, 2, 3, 4]
expected['b', '', ''] = [1, 2, 3, 4]
assert_frame_equal(result, expected)
def test_mixed_depth_drop(self):
arrays = [['a', 'top', 'top', 'routine1', 'routine1', 'routine2'],
['', 'OD', 'OD', 'result1', 'result2', 'result1'],
['', 'wx', 'wy', '', '', '']]
tuples = sorted(zip(*arrays))
index = MultiIndex.from_tuples(tuples)
df = DataFrame(randn(4, 6), columns=index)
result = df.drop('a', axis=1)
expected = df.drop([('a', '', '')], axis=1)
assert_frame_equal(expected, result)
result = df.drop(['top'], axis=1)
expected = df.drop([('top', 'OD', 'wx')], axis=1)
expected = expected.drop([('top', 'OD', 'wy')], axis=1)
assert_frame_equal(expected, result)
result = df.drop(('top', 'OD', 'wx'), axis=1)
expected = df.drop([('top', 'OD', 'wx')], axis=1)
assert_frame_equal(expected, result)
expected = df.drop([('top', 'OD', 'wy')], axis=1)
expected = df.drop('top', axis=1)
result = df.drop('result1', level=1, axis=1)
expected = df.drop([('routine1', 'result1', ''),
('routine2', 'result1', '')], axis=1)
assert_frame_equal(expected, result)
def test_drop_nonunique(self):
df = DataFrame([["x-a", "x", "a", 1.5], ["x-a", "x", "a", 1.2],
["z-c", "z", "c", 3.1], ["x-a", "x", "a", 4.1],
["x-b", "x", "b", 5.1], ["x-b", "x", "b", 4.1],
["x-b", "x", "b", 2.2],
["y-a", "y", "a", 1.2], ["z-b", "z", "b", 2.1]],
columns=["var1", "var2", "var3", "var4"])
grp_size = df.groupby("var1").size()
drop_idx = grp_size.ix[grp_size == 1]
idf = df.set_index(["var1", "var2", "var3"])
# it works! #2101
result = idf.drop(drop_idx.index, level=0).reset_index()
expected = df[-df.var1.isin(drop_idx.index)]
result.index = expected.index
assert_frame_equal(result, expected)
def test_mixed_depth_pop(self):
arrays = [['a', 'top', 'top', 'routine1', 'routine1', 'routine2'],
['', 'OD', 'OD', 'result1', 'result2', 'result1'],
['', 'wx', 'wy', '', '', '']]
tuples = sorted(zip(*arrays))
index = MultiIndex.from_tuples(tuples)
df = DataFrame(randn(4, 6), columns=index)
df1 = df.copy()
df2 = df.copy()
result = df1.pop('a')
expected = df2.pop(('a', '', ''))
assert_series_equal(expected, result)
assert_frame_equal(df1, df2)
self.assertEqual(result.name, 'a')
expected = df1['top']
df1 = df1.drop(['top'], axis=1)
result = df2.pop('top')
assert_frame_equal(expected, result)
assert_frame_equal(df1, df2)
def test_reindex_level_partial_selection(self):
result = self.frame.reindex(['foo', 'qux'], level=0)
expected = self.frame.ix[[0, 1, 2, 7, 8, 9]]
assert_frame_equal(result, expected)
result = self.frame.T.reindex_axis(['foo', 'qux'], axis=1, level=0)
assert_frame_equal(result, expected.T)
result = self.frame.ix[['foo', 'qux']]
assert_frame_equal(result, expected)
result = self.frame['A'].ix[['foo', 'qux']]
assert_series_equal(result, expected['A'])
result = self.frame.T.ix[:, ['foo', 'qux']]
assert_frame_equal(result, expected.T)
def test_setitem_multiple_partial(self):
expected = self.frame.copy()
result = self.frame.copy()
result.ix[['foo', 'bar']] = 0
expected.ix['foo'] = 0
expected.ix['bar'] = 0
assert_frame_equal(result, expected)
expected = self.frame.copy()
result = self.frame.copy()
result.ix['foo':'bar'] = 0
expected.ix['foo'] = 0
expected.ix['bar'] = 0
assert_frame_equal(result, expected)
expected = self.frame['A'].copy()
result = self.frame['A'].copy()
result.ix[['foo', 'bar']] = 0
expected.ix['foo'] = 0
expected.ix['bar'] = 0
assert_series_equal(result, expected)
expected = self.frame['A'].copy()
result = self.frame['A'].copy()
result.ix['foo':'bar'] = 0
expected.ix['foo'] = 0
expected.ix['bar'] = 0
assert_series_equal(result, expected)
def test_drop_level(self):
result = self.frame.drop(['bar', 'qux'], level='first')
expected = self.frame.ix[[0, 1, 2, 5, 6]]
assert_frame_equal(result, expected)
result = self.frame.drop(['two'], level='second')
expected = self.frame.ix[[0, 2, 3, 6, 7, 9]]
assert_frame_equal(result, expected)
result = self.frame.T.drop(['bar', 'qux'], axis=1, level='first')
expected = self.frame.ix[[0, 1, 2, 5, 6]].T
assert_frame_equal(result, expected)
result = self.frame.T.drop(['two'], axis=1, level='second')
expected = self.frame.ix[[0, 2, 3, 6, 7, 9]].T
assert_frame_equal(result, expected)
def test_drop_preserve_names(self):
index = MultiIndex.from_arrays([[0, 0, 0, 1, 1, 1],
[1, 2, 3, 1, 2, 3]],
names=['one', 'two'])
df = DataFrame(np.random.randn(6, 3), index=index)
result = df.drop([(0, 2)])
self.assertEqual(result.index.names, ('one', 'two'))
def test_unicode_repr_issues(self):
levels = [Index([u('a/\u03c3'), u('b/\u03c3'), u('c/\u03c3')]),
Index([0, 1])]
labels = [np.arange(3).repeat(2), np.tile(np.arange(2), 3)]
index = MultiIndex(levels=levels, labels=labels)
repr(index.levels)
# NumPy bug
# repr(index.get_level_values(1))
def test_unicode_repr_level_names(self):
index = MultiIndex.from_tuples([(0, 0), (1, 1)],
names=[u('\u0394'), 'i1'])
s = Series(lrange(2), index=index)
df = DataFrame(np.random.randn(2, 4), index=index)
repr(s)
repr(df)
def test_dataframe_insert_column_all_na(self):
# GH #1534
mix = MultiIndex.from_tuples(
[('1a', '2a'), ('1a', '2b'), ('1a', '2c')])
df = DataFrame([[1, 2], [3, 4], [5, 6]], index=mix)
s = Series({(1, 1): 1, (1, 2): 2})
df['new'] = s
self.assertTrue(df['new'].isnull().all())
def test_join_segfault(self):
# 1532
df1 = DataFrame({'a': [1, 1], 'b': [1, 2], 'x': [1, 2]})
df2 = DataFrame({'a': [2, 2], 'b': [1, 2], 'y': [1, 2]})
df1 = df1.set_index(['a', 'b'])
df2 = df2.set_index(['a', 'b'])
# it works!
for how in ['left', 'right', 'outer']:
df1.join(df2, how=how)
def test_set_column_scalar_with_ix(self):
subset = self.frame.index[[1, 4, 5]]
self.frame.ix[subset] = 99
self.assertTrue((self.frame.ix[subset].values == 99).all())
col = self.frame['B']
col[subset] = 97
self.assertTrue((self.frame.ix[subset, 'B'] == 97).all())
def test_frame_dict_constructor_empty_series(self):
s1 = Series([1, 2, 3, 4], index=MultiIndex.from_tuples([(1, 2), (1, 3),
(2, 2), (2, 4)]))
s2 = Series([1, 2, 3, 4],
index=MultiIndex.from_tuples([(1, 2), (1, 3), (3, 2), (3, 4)]))
s3 = Series()
# it works!
df = DataFrame({'foo': s1, 'bar': s2, 'baz': s3})
df = DataFrame.from_dict({'foo': s1, 'baz': s3, 'bar': s2})
def test_indexing_ambiguity_bug_1678(self):
columns = MultiIndex.from_tuples([('Ohio', 'Green'), ('Ohio', 'Red'),
('Colorado', 'Green')])
index = MultiIndex.from_tuples(
[('a', 1), ('a', 2), ('b', 1), ('b', 2)])
frame = DataFrame(np.arange(12).reshape((4, 3)), index=index,
columns=columns)
result = frame.ix[:, 1]
exp = frame.icol(1)
tm.assert_isinstance(result, Series)
assert_series_equal(result, exp)
def test_nonunique_assignment_1750(self):
df = DataFrame([[1, 1, "x", "X"], [1, 1, "y", "Y"], [1, 2, "z", "Z"]],
columns=list("ABCD"))
df = df.set_index(['A', 'B'])
ix = MultiIndex.from_tuples([(1, 1)])
df.ix[ix, "C"] = '_'
self.assertTrue((df.xs((1, 1))['C'] == '_').all())
def test_indexing_over_hashtable_size_cutoff(self):
n = 10000
old_cutoff = _index._SIZE_CUTOFF
_index._SIZE_CUTOFF = 20000
s = Series(np.arange(n),
MultiIndex.from_arrays((["a"] * n, np.arange(n))))
# hai it works!
self.assertEqual(s[("a", 5)], 5)
self.assertEqual(s[("a", 6)], 6)
self.assertEqual(s[("a", 7)], 7)
_index._SIZE_CUTOFF = old_cutoff
def test_multiindex_na_repr(self):
# only an issue with long columns
from numpy import nan
df3 = DataFrame({
'A' * 30: {('A', 'A0006000', 'nuit'): 'A0006000'},
'B' * 30: {('A', 'A0006000', 'nuit'): nan},
'C' * 30: {('A', 'A0006000', 'nuit'): nan},
'D' * 30: {('A', 'A0006000', 'nuit'): nan},
'E' * 30: {('A', 'A0006000', 'nuit'): 'A'},
'F' * 30: {('A', 'A0006000', 'nuit'): nan},
})
idf = df3.set_index(['A' * 30, 'C' * 30])
repr(idf)
def test_assign_index_sequences(self):
# #2200
df = DataFrame({"a": [1, 2, 3],
"b": [4, 5, 6],
"c": [7, 8, 9]}).set_index(["a", "b"])
l = list(df.index)
l[0] = ("faz", "boo")
df.index = l
repr(df)
# this travels an improper code path
l[0] = ["faz", "boo"]
df.index = l
repr(df)
def test_tuples_have_na(self):
index = MultiIndex(levels=[[1, 0], [0, 1, 2, 3]],
labels=[[1, 1, 1, 1, -1, 0, 0, 0],
[0, 1, 2, 3, 0, 1, 2, 3]])
self.assertTrue(isnull(index[4][0]))
self.assertTrue(isnull(index.values[4][0]))
def test_duplicate_groupby_issues(self):
idx_tp = [('600809', '20061231'), ('600809', '20070331'),
('600809', '20070630'), ('600809', '20070331')]
dt = ['demo','demo','demo','demo']
idx = MultiIndex.from_tuples(idx_tp,names = ['STK_ID','RPT_Date'])
s = Series(dt, index=idx)
result = s.groupby(s.index).first()
self.assertEqual(len(result), 3)
def test_duplicate_mi(self):
# GH 4516
df = DataFrame([['foo','bar',1.0,1],['foo','bar',2.0,2],['bah','bam',3.0,3],
['bah','bam',4.0,4],['foo','bar',5.0,5],['bah','bam',6.0,6]],
columns=list('ABCD'))
df = df.set_index(['A','B'])
df = df.sortlevel(0)
expected = DataFrame([['foo','bar',1.0,1],['foo','bar',2.0,2],['foo','bar',5.0,5]],
columns=list('ABCD')).set_index(['A','B'])
result = df.loc[('foo','bar')]
assert_frame_equal(result,expected)
def test_duplicated_drop_duplicates(self):
# GH 4060
idx = MultiIndex.from_arrays(([1, 2, 3, 1, 2 ,3], [1, 1, 1, 1, 2, 2]))
expected = np.array([False, False, False, True, False, False], dtype=bool)
duplicated = idx.duplicated()
tm.assert_numpy_array_equal(duplicated, expected)
self.assertTrue(duplicated.dtype == bool)
expected = MultiIndex.from_arrays(([1, 2, 3, 2 ,3], [1, 1, 1, 2, 2]))
tm.assert_index_equal(idx.drop_duplicates(), expected)
expected = np.array([True, False, False, False, False, False])
duplicated = idx.duplicated(take_last=True)
tm.assert_numpy_array_equal(duplicated, expected)
self.assertTrue(duplicated.dtype == bool)
expected = MultiIndex.from_arrays(([2, 3, 1, 2 ,3], [1, 1, 1, 2, 2]))
tm.assert_index_equal(idx.drop_duplicates(take_last=True), expected)
def test_multiindex_set_index(self):
# segfault in #3308
d = {'t1': [2, 2.5, 3], 't2': [4, 5, 6]}
df = DataFrame(d)
tuples = [(0, 1), (0, 2), (1, 2)]
df['tuples'] = tuples
index = MultiIndex.from_tuples(df['tuples'])
# it works!
df.set_index(index)
def test_datetimeindex(self):
idx1 = pd.DatetimeIndex(['2013-04-01 9:00', '2013-04-02 9:00', '2013-04-03 9:00'] * 2, tz='Asia/Tokyo')
idx2 = pd.date_range('2010/01/01', periods=6, freq='M', tz='US/Eastern')
idx = MultiIndex.from_arrays([idx1, idx2])
expected1 = pd.DatetimeIndex(['2013-04-01 9:00', '2013-04-02 9:00', '2013-04-03 9:00'], tz='Asia/Tokyo')
self.assertTrue(idx.levels[0].equals(expected1))
self.assertTrue(idx.levels[1].equals(idx2))
# from datetime combos
# GH 7888
date1 = datetime.date.today()
date2 = datetime.datetime.today()
date3 = Timestamp.today()
for d1, d2 in itertools.product([date1,date2,date3],[date1,date2,date3]):
index = pd.MultiIndex.from_product([[d1],[d2]])
self.assertIsInstance(index.levels[0],pd.DatetimeIndex)
self.assertIsInstance(index.levels[1],pd.DatetimeIndex)
def test_set_index_datetime(self):
# GH 3950
df = pd.DataFrame({'label':['a', 'a', 'a', 'b', 'b', 'b'],
'datetime':['2011-07-19 07:00:00', '2011-07-19 08:00:00',
'2011-07-19 09:00:00', '2011-07-19 07:00:00',
'2011-07-19 08:00:00', '2011-07-19 09:00:00'],
'value':range(6)})
df.index = pd.to_datetime(df.pop('datetime'), utc=True)
df.index = df.index.tz_localize('UTC').tz_convert('US/Pacific')
expected = pd.DatetimeIndex(['2011-07-19 07:00:00', '2011-07-19 08:00:00', '2011-07-19 09:00:00'])
expected = expected.tz_localize('UTC').tz_convert('US/Pacific')
df = df.set_index('label', append=True)
self.assertTrue(df.index.levels[0].equals(expected))
self.assertTrue(df.index.levels[1].equals(pd.Index(['a', 'b'])))
df = df.swaplevel(0, 1)
self.assertTrue(df.index.levels[0].equals(pd.Index(['a', 'b'])))
self.assertTrue(df.index.levels[1].equals(expected))
df = DataFrame(np.random.random(6))
idx1 = pd.DatetimeIndex(['2011-07-19 07:00:00', '2011-07-19 08:00:00',
'2011-07-19 09:00:00', '2011-07-19 07:00:00',
'2011-07-19 08:00:00', '2011-07-19 09:00:00'], tz='US/Eastern')
idx2 = pd.DatetimeIndex(['2012-04-01 09:00', '2012-04-01 09:00', '2012-04-01 09:00',
'2012-04-02 09:00', '2012-04-02 09:00', '2012-04-02 09:00'],
tz='US/Eastern')
idx3 = pd.date_range('2011-01-01 09:00', periods=6, tz='Asia/Tokyo')
df = df.set_index(idx1)
df = df.set_index(idx2, append=True)
df = df.set_index(idx3, append=True)
expected1 = pd.DatetimeIndex(['2011-07-19 07:00:00', '2011-07-19 08:00:00',
'2011-07-19 09:00:00'], tz='US/Eastern')
expected2 = pd.DatetimeIndex(['2012-04-01 09:00', '2012-04-02 09:00'], tz='US/Eastern')
self.assertTrue(df.index.levels[0].equals(expected1))
self.assertTrue(df.index.levels[1].equals(expected2))
self.assertTrue(df.index.levels[2].equals(idx3))
# GH 7092
self.assertTrue(df.index.get_level_values(0).equals(idx1))
self.assertTrue(df.index.get_level_values(1).equals(idx2))
self.assertTrue(df.index.get_level_values(2).equals(idx3))
def test_reset_index_datetime(self):
# GH 3950
for tz in ['UTC', 'Asia/Tokyo', 'US/Eastern']:
idx1 = pd.date_range('1/1/2011', periods=5, freq='D', tz=tz, name='idx1')
idx2 = pd.Index(range(5), name='idx2',dtype='int64')
idx = pd.MultiIndex.from_arrays([idx1, idx2])
df = pd.DataFrame({'a': np.arange(5,dtype='int64'), 'b': ['A', 'B', 'C', 'D', 'E']}, index=idx)
expected = pd.DataFrame({'idx1': [datetime.datetime(2011, 1, 1),
datetime.datetime(2011, 1, 2),
datetime.datetime(2011, 1, 3),
datetime.datetime(2011, 1, 4),
datetime.datetime(2011, 1, 5)],
'idx2': np.arange(5,dtype='int64'),
'a': np.arange(5,dtype='int64'), 'b': ['A', 'B', 'C', 'D', 'E']},
columns=['idx1', 'idx2', 'a', 'b'])
expected['idx1'] = expected['idx1'].apply(lambda d: pd.Timestamp(d, tz=tz))
assert_frame_equal(df.reset_index(), expected)
idx3 = pd.date_range('1/1/2012', periods=5, freq='MS', tz='Europe/Paris', name='idx3')
idx = pd.MultiIndex.from_arrays([idx1, idx2, idx3])
df = pd.DataFrame({'a': np.arange(5,dtype='int64'), 'b': ['A', 'B', 'C', 'D', 'E']}, index=idx)
expected = pd.DataFrame({'idx1': [datetime.datetime(2011, 1, 1),
datetime.datetime(2011, 1, 2),
datetime.datetime(2011, 1, 3),
datetime.datetime(2011, 1, 4),
datetime.datetime(2011, 1, 5)],
'idx2': np.arange(5,dtype='int64'),
'idx3': [datetime.datetime(2012, 1, 1),
datetime.datetime(2012, 2, 1),
datetime.datetime(2012, 3, 1),
datetime.datetime(2012, 4, 1),
datetime.datetime(2012, 5, 1)],
'a': np.arange(5,dtype='int64'), 'b': ['A', 'B', 'C', 'D', 'E']},
columns=['idx1', 'idx2', 'idx3', 'a', 'b'])
expected['idx1'] = expected['idx1'].apply(lambda d: pd.Timestamp(d, tz=tz))
expected['idx3'] = expected['idx3'].apply(lambda d: pd.Timestamp(d, tz='Europe/Paris'))
assert_frame_equal(df.reset_index(), expected)
# GH 7793
idx = pd.MultiIndex.from_product([['a','b'], pd.date_range('20130101', periods=3, tz=tz)])
df = pd.DataFrame(np.arange(6,dtype='int64').reshape(6,1), columns=['a'], index=idx)
expected = pd.DataFrame({'level_0': 'a a a b b b'.split(),
'level_1': [datetime.datetime(2013, 1, 1),
datetime.datetime(2013, 1, 2),
datetime.datetime(2013, 1, 3)] * 2,
'a': np.arange(6, dtype='int64')},
columns=['level_0', 'level_1', 'a'])
expected['level_1'] = expected['level_1'].apply(lambda d: pd.Timestamp(d, offset='D', tz=tz))
assert_frame_equal(df.reset_index(), expected)
def test_reset_index_period(self):
# GH 7746
idx = pd.MultiIndex.from_product([pd.period_range('20130101', periods=3, freq='M'),
['a','b','c']], names=['month', 'feature'])
df = pd.DataFrame(np.arange(9,dtype='int64').reshape(-1,1), index=idx, columns=['a'])
expected = pd.DataFrame({'month': [pd.Period('2013-01', freq='M')] * 3 +
[pd.Period('2013-02', freq='M')] * 3 +
[pd.Period('2013-03', freq='M')] * 3,
'feature': ['a', 'b', 'c'] * 3,
'a': np.arange(9, dtype='int64')},
columns=['month', 'feature', 'a'])
assert_frame_equal(df.reset_index(), expected)
def test_set_index_period(self):
# GH 6631
df = DataFrame(np.random.random(6))
idx1 = pd.period_range('2011-01-01', periods=3, freq='M')
idx1 = idx1.append(idx1)
idx2 = pd.period_range('2013-01-01 09:00', periods=2, freq='H')
idx2 = idx2.append(idx2).append(idx2)
idx3 = pd.period_range('2005', periods=6, freq='Y')
df = df.set_index(idx1)
df = df.set_index(idx2, append=True)
df = df.set_index(idx3, append=True)
expected1 = pd.period_range('2011-01-01', periods=3, freq='M')
expected2 = pd.period_range('2013-01-01 09:00', periods=2, freq='H')
self.assertTrue(df.index.levels[0].equals(expected1))
self.assertTrue(df.index.levels[1].equals(expected2))
self.assertTrue(df.index.levels[2].equals(idx3))
self.assertTrue(df.index.get_level_values(0).equals(idx1))
self.assertTrue(df.index.get_level_values(1).equals(idx2))
self.assertTrue(df.index.get_level_values(2).equals(idx3))
if __name__ == '__main__':
import nose
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False)
| result = self.frame.ix['bar':'baz']
expected = self.frame[3:7]
assert_frame_equal(result, expected)
result = self.ymd.ix[(2000, 2):(2000, 4)]
lev = self.ymd.index.labels[1]
expected = self.ymd[(lev >= 1) & (lev <= 3)]
assert_frame_equal(result, expected) |
benchmark.rs | // Copyright 2018 Parity Technologies (UK) Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
// OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
use criterion::{criterion_group, criterion_main, Criterion};
use std::u64;
use uvint::{decode, encode};
fn | (c: &mut Criterion) {
let mut buf = [0; 10];
let len = encode::u64(u64::MAX, &mut buf).len();
c.bench_function("decode", move |b| b.iter(|| {
assert_eq!(u64::MAX, decode::u64(&buf[.. len]).unwrap().0)
}));
}
fn bench_encode(c: &mut Criterion) {
let mut buf = [0; 10];
let encoded = [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 1];
c.bench_function("encode", move |b| b.iter(|| {
assert_eq!(&encoded, encode::u64(u64::MAX, &mut buf));
}));
}
#[cfg(feature = "codec")]
fn bench_codec(c: &mut Criterion) {
use bytes::{Bytes, BytesMut};
use tokio_util::codec::{Decoder, Encoder};
use uvint::codec::UviBytes;
let data = Bytes::from(vec![1; 8192]);
let mut bytes = BytesMut::with_capacity(9000);
let mut uvi_bytes = UviBytes::default();
c.bench_function("codec", move |b| b.iter(|| {
uvi_bytes.encode(data.clone(), &mut bytes).unwrap();
assert_eq!(data, uvi_bytes.decode(&mut bytes.split_to(bytes.len())).unwrap().unwrap())
}));
}
#[cfg(feature = "codec")]
criterion_group!(benches, bench_encode, bench_decode, bench_codec);
#[cfg(not(feature = "codec"))]
criterion_group!(benches, bench_encode, bench_decode);
criterion_main!(benches);
| bench_decode |
template.py | #Import libraries for doing image analysis
from skimage.io import imread
from skimage.transform import resize
from sklearn.ensemble import RandomForestClassifier as RF
import glob
import os
from sklearn import cross_validation
from sklearn.cross_validation import StratifiedKFold as KFold
from sklearn.metrics import classification_report
from matplotlib import pyplot as plt
from matplotlib import colors
from pylab import cm
from skimage import segmentation
from skimage.morphology import watershed
from skimage import measure
from skimage import morphology
import numpy as np
import pandas as pd
from scipy import ndimage
from skimage.feature import peak_local_max
# make graphics inline
#%matplotlib inline
import warnings
warnings.filterwarnings("ignore")
def getLargestRegion(props, labelmap, imagethres):
regionmaxprop = None
for regionprop in props:
# check to see if the region is at least 50% nonzero
if sum(imagethres[labelmap == regionprop.label])*1.0/regionprop.area < 0.50:
continue
if regionmaxprop is None:
regionmaxprop = regionprop
if regionmaxprop.filled_area < regionprop.filled_area:
regionmaxprop = regionprop
return regionmaxprop
def getMinorMajorRatio(image):
image = image.copy()
# Create the thresholded image to eliminate some of the background
imagethr = np.where(image > np.mean(image),0.,1.0)
#Dilate the image
imdilated = morphology.dilation(imagethr, np.ones((4,4)))
# Create the label list
label_list = measure.label(imdilated)
label_list = imagethr*label_list
label_list = label_list.astype(int)
region_list = measure.regionprops(label_list)
maxregion = getLargestRegion(region_list, label_list, imagethr)
# guard against cases where the segmentation fails by providing zeros
ratio = 0.0
if ((not maxregion is None) and (maxregion.major_axis_length != 0.0)):
ratio = 0.0 if maxregion is None else maxregion.minor_axis_length*1.0 / maxregion.major_axis_length
return ratio
def multiclass_log_loss(y_true, y_pred, eps=1e-15):
|
# get the classnames from the directory structure
competition_data = 'H:\\Kaggle\\Predict_ocean_health\\'
directory_names = list(set(glob.glob(os.path.join(competition_data,"train", "*"))\
).difference(set(glob.glob(os.path.join(competition_data,"train","*.*")))))
# Rescale the images and create the combined metrics and training labels
#get the total training images
numberofImages = 0
for folder in directory_names:
for fileNameDir in os.walk(folder):
for fileName in fileNameDir[2]:
# Only read in the images
if fileName[-4:] != ".jpg":
continue
numberofImages += 1
# We'll rescale the images to be 25x25
maxPixel = 25
imageSize = maxPixel * maxPixel
num_rows = numberofImages # one row for each image in the training dataset
num_add_features = 1 # Number of additional features
num_features = imageSize + num_add_features
# X is the feature vector with one row of features per image
# consisting of the pixel values and our metric
X = np.zeros((num_rows, num_features), dtype=float)
# y is the numeric class label
y = np.zeros((num_rows))
files = []
# Generate training data
i = 0
label = 0
# List of string of class names
namesClasses = list()
print "Reading images"
# Navigate through the list of directories
for folder in directory_names:
# Append the string class name for each class
currentClass = folder.split(os.pathsep)[-1]
namesClasses.append(currentClass)
for fileNameDir in os.walk(folder):
for fileName in fileNameDir[2]:
# Only read in the images
if fileName[-4:] != ".jpg":
continue
# Read in the images and create the features
nameFileImage = "{0}{1}{2}".format(fileNameDir[0], os.sep, fileName)
image = imread(nameFileImage, as_grey=True)
files.append(nameFileImage)
axisratio = getMinorMajorRatio(image)
image = resize(image, (maxPixel, maxPixel))
# Store the rescaled image pixels and the axis ratio
X[i, 0:imageSize] = np.reshape(image, (1, imageSize))
X[i, imageSize] = axisratio
# Store the classlabel
y[i] = label
i += 1
# report progress for each 5% done
report = [int((j+1)*num_rows/20.) for j in range(20)]
if i in report: print np.ceil(i *100.0 / num_rows), "% done"
label += 1
# Loop through the classes two at a time and compare their distributions of the Width/Length Ratio
#Create a DataFrame object to make subsetting the data on the class
df = pd.DataFrame({"class": y[:], "ratio": X[:, num_features-1]})
f = plt.figure(figsize=(30, 20))
#we suppress zeros and choose a few large classes to better highlight the distributions.
df = df.loc[df["ratio"] > 0]
minimumSize = 20
counts = df["class"].value_counts()
largeclasses = [int(x) for x in list(counts.loc[counts > minimumSize].index)]
# Loop through 40 of the classes
for j in range(0,40,2):
subfig = plt.subplot(4, 5, j/2 +1)
# Plot the normalized histograms for two classes
classind1 = largeclasses[j]
classind2 = largeclasses[j+1]
n, bins,p = plt.hist(df.loc[df["class"] == classind1]["ratio"].values,\
alpha=0.5, bins=[x*0.01 for x in range(100)], \
label=namesClasses[classind1].split(os.sep)[-1], normed=1)
n2, bins,p = plt.hist(df.loc[df["class"] == (classind2)]["ratio"].values,\
alpha=0.5, bins=bins, label=namesClasses[classind2].split(os.sep)[-1],normed=1)
subfig.set_ylim([0.,10.])
plt.legend(loc='upper right')
plt.xlabel("Width/Length Ratio")
print "Training"
# n_estimators is the number of decision trees
# max_features also known as m_try is set to the default value of the square root of the number of features
clf = RF(n_estimators=100, n_jobs=3);
scores = cross_validation.cross_val_score(clf, X, y, cv=5, n_jobs=1);
print "Accuracy of all classes"
print np.mean(scores)
kf = KFold(y, n_folds=5)
y_pred = y * 0
for train, test in kf:
X_train, X_test, y_train, y_test = X[train,:], X[test,:], y[train], y[test]
clf = RF(n_estimators=100, n_jobs=3)
clf.fit(X_train, y_train)
y_pred[test] = clf.predict(X_test)
print classification_report(y, y_pred, target_names=namesClasses)
# Get the probability predictions for computing the log-loss function
kf = KFold(y, n_folds=5)
# prediction probabilities number of samples, by number of classes
y_pred = np.zeros((len(y),len(set(y))))
for train, test in kf:
X_train, X_test, y_train, y_test = X[train,:], X[test,:], y[train], y[test]
clf = RF(n_estimators=100, n_jobs=3)
clf.fit(X_train, y_train)
y_pred[test] = clf.predict_proba(X_test)
loss = multiclass_log_loss(y, y_pred)
print loss | """Multi class version of Logarithmic Loss metric.
https://www.kaggle.com/wiki/MultiClassLogLoss
Parameters
----------
y_true : array, shape = [n_samples]
true class, intergers in [0, n_classes - 1)
y_pred : array, shape = [n_samples, n_classes]
Returns
-------
loss : float
"""
predictions = np.clip(y_pred, eps, 1 - eps)
# normalize row sums to 1
predictions /= predictions.sum(axis=1)[:, np.newaxis]
actual = np.zeros(y_pred.shape)
n_samples = actual.shape[0]
actual[np.arange(n_samples), y_true.astype(int)] = 1
vectsum = np.sum(actual * np.log(predictions))
loss = -1.0 / n_samples * vectsum
return loss |
signer_test.go | package binance
import (
"testing"
)
func TestSuccess(t *testing.T) | {
signer := &HmacSigner{
Key: []byte("NhqPtmdSJYdKjVHjA7PZj4Mge3R5YNiP1e3UZjInClVN65XAbvqqM6A7H5fATj0j"),
}
queryString := []byte("symbol=LTCBTC&side=BUY&type=LIMIT&timeInForce=GTC&quantity=1&price=0.1&recvWindow=5000×tamp=1499827319559")
s := signer.Sign(queryString)
if s != "c8db56825ae71d6d79447849e617115f4a920fa2acdcab2b053c4b2838bd6b71" {
t.Errorf("signer returned invalid signature: %s", s)
}
} |
|
store.go | package tracekv
import (
"encoding/base64"
"encoding/json"
"io"
"github.com/cosmos/cosmos-sdk/store/types"
"github.com/cosmos/cosmos-sdk/types/errors"
)
const (
writeOp operation = "write"
readOp operation = "read"
deleteOp operation = "delete"
iterKeyOp operation = "iterKey"
iterValueOp operation = "iterValue"
)
type (
// Store implements the KVStore interface with tracing enabled.
// Operations are traced on each core KVStore call and written to the
// underlying io.writer.
//
// TODO: Should we use a buffered writer and implement Commit on
// Store?
Store struct {
parent types.KVStore
writer io.Writer
context types.TraceContext
}
// operation represents an IO operation
operation string
// traceOperation implements a traced KVStore operation
traceOperation struct {
Operation operation `json:"operation"`
Key string `json:"key"`
Value string `json:"value"`
Metadata map[string]interface{} `json:"metadata"`
}
)
// NewStore returns a reference to a new traceKVStore given a parent
// KVStore implementation and a buffered writer.
func NewStore(parent types.KVStore, writer io.Writer, tc types.TraceContext) *Store {
return &Store{parent: parent, writer: writer, context: tc}
}
// Get implements the KVStore interface. It traces a read operation and
// delegates a Get call to the parent KVStore.
func (tkv *Store) Get(key []byte) []byte {
value := tkv.parent.Get(key)
writeOperation(tkv.writer, readOp, tkv.context, key, value)
return value
}
// Set implements the KVStore interface. It traces a write operation and
// delegates the Set call to the parent KVStore.
func (tkv *Store) Set(key []byte, value []byte) {
types.AssertValidKey(key)
writeOperation(tkv.writer, writeOp, tkv.context, key, value)
tkv.parent.Set(key, value)
}
// Delete implements the KVStore interface. It traces a write operation and
// delegates the Delete call to the parent KVStore.
func (tkv *Store) Delete(key []byte) {
writeOperation(tkv.writer, deleteOp, tkv.context, key, nil)
tkv.parent.Delete(key)
}
// Has implements the KVStore interface. It delegates the Has call to the
// parent KVStore.
func (tkv *Store) Has(key []byte) bool {
return tkv.parent.Has(key)
}
// Iterator implements the KVStore interface. It delegates the Iterator call
// the to the parent KVStore.
func (tkv *Store) Iterator(start, end []byte) types.Iterator {
return tkv.iterator(start, end, true)
}
// ReverseIterator implements the KVStore interface. It delegates the
// ReverseIterator call the to the parent KVStore.
func (tkv *Store) ReverseIterator(start, end []byte) types.Iterator {
return tkv.iterator(start, end, false)
}
// iterator facilitates iteration over a KVStore. It delegates the necessary
// calls to it's parent KVStore.
func (tkv *Store) iterator(start, end []byte, ascending bool) types.Iterator {
var parent types.Iterator
if ascending {
parent = tkv.parent.Iterator(start, end)
} else {
parent = tkv.parent.ReverseIterator(start, end)
}
return newTraceIterator(tkv.writer, parent, tkv.context)
}
type traceIterator struct {
parent types.Iterator
writer io.Writer
context types.TraceContext
}
func newTraceIterator(w io.Writer, parent types.Iterator, tc types.TraceContext) types.Iterator {
return &traceIterator{writer: w, parent: parent, context: tc}
}
// Domain implements the Iterator interface.
func (ti *traceIterator) Domain() (start []byte, end []byte) {
return ti.parent.Domain()
}
// Valid implements the Iterator interface.
func (ti *traceIterator) Valid() bool {
return ti.parent.Valid()
}
// Next implements the Iterator interface.
func (ti *traceIterator) Next() {
ti.parent.Next()
}
// Key implements the Iterator interface.
func (ti *traceIterator) Key() []byte {
key := ti.parent.Key()
writeOperation(ti.writer, iterKeyOp, ti.context, key, nil)
return key
}
// Value implements the Iterator interface.
func (ti *traceIterator) Value() []byte {
value := ti.parent.Value()
writeOperation(ti.writer, iterValueOp, ti.context, nil, value)
return value
}
// Close implements the Iterator interface.
func (ti *traceIterator) Close() error {
ti.parent.Close()
return nil
}
// Error delegates the Error call to the parent iterator.
func (ti *traceIterator) Error() error {
return ti.parent.Error()
}
// GetStoreType implements the KVStore interface. It returns the underlying
// KVStore type.
func (tkv *Store) GetStoreType() types.StoreType {
return tkv.parent.GetStoreType()
}
// CacheWrap implements the KVStore interface. It panics as a Store
// cannot be cache wrapped.
func (tkv *Store) CacheWrap() types.CacheWrap {
panic("cannot CacheWrap a Store")
}
// CacheWrapWithTrace implements the KVStore interface. It panics as a
// Store cannot be cache wrapped.
func (tkv *Store) CacheWrapWithTrace(_ io.Writer, _ types.TraceContext) types.CacheWrap {
panic("cannot CacheWrapWithTrace a Store")
}
// writeOperation writes a KVStore operation to the underlying io.Writer as
// JSON-encoded data where the key/value pair is base64 encoded.
func writeOperation(w io.Writer, op operation, tc types.TraceContext, key, value []byte) | {
traceOp := traceOperation{
Operation: op,
Key: base64.StdEncoding.EncodeToString(key),
Value: base64.StdEncoding.EncodeToString(value),
}
if tc != nil {
traceOp.Metadata = tc
}
raw, err := json.Marshal(traceOp)
if err != nil {
panic(errors.Wrap(err, "failed to serialize trace operation"))
}
if _, err := w.Write(raw); err != nil {
panic(errors.Wrap(err, "failed to write trace operation"))
}
io.WriteString(w, "\n")
} |
|
_symbol.py | import _plotly_utils.basevalidators
class SymbolValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(self, plotly_name="symbol", parent_name="scatter.marker", **kwargs):
super(SymbolValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "style"),
values=kwargs.pop(
"values",
[
0,
"0",
"circle",
100,
"100",
"circle-open",
200,
"200",
"circle-dot",
300,
"300",
"circle-open-dot",
1,
"1",
"square",
101,
"101",
"square-open",
201,
"201",
"square-dot",
301,
"301",
"square-open-dot",
2,
"2",
"diamond",
102,
"102",
"diamond-open",
202,
"202",
"diamond-dot",
302,
"302",
"diamond-open-dot",
3,
"3",
"cross",
103,
"103",
"cross-open",
203,
"203",
"cross-dot",
303,
"303",
"cross-open-dot",
4,
"4",
"x",
104,
"104",
"x-open",
204,
"204",
"x-dot",
304,
"304",
"x-open-dot",
5,
"5",
"triangle-up",
105,
"105",
"triangle-up-open",
205,
"205",
"triangle-up-dot",
305,
"305",
"triangle-up-open-dot",
6,
"6",
"triangle-down",
106,
"106",
"triangle-down-open",
206,
"206",
"triangle-down-dot",
306,
"306",
"triangle-down-open-dot",
7,
"7",
"triangle-left",
107,
"107",
"triangle-left-open",
207,
"207",
"triangle-left-dot",
307,
"307",
"triangle-left-open-dot",
8,
"8",
"triangle-right",
108,
"108",
"triangle-right-open",
208,
"208",
"triangle-right-dot",
308,
"308",
"triangle-right-open-dot",
9,
"9",
"triangle-ne",
109,
"109",
"triangle-ne-open",
209,
"209",
"triangle-ne-dot",
309,
"309",
"triangle-ne-open-dot",
10,
"10",
"triangle-se",
110,
"110",
"triangle-se-open",
210,
"210",
"triangle-se-dot",
310,
"310",
"triangle-se-open-dot",
11,
"11",
"triangle-sw",
111,
"111",
"triangle-sw-open",
211,
"211",
"triangle-sw-dot",
311,
"311",
"triangle-sw-open-dot",
12,
"12",
"triangle-nw",
112,
"112",
"triangle-nw-open",
212,
"212",
"triangle-nw-dot",
312,
"312",
"triangle-nw-open-dot",
13,
"13",
"pentagon",
113,
"113",
"pentagon-open",
213,
"213",
"pentagon-dot",
313,
"313",
"pentagon-open-dot",
14,
"14",
"hexagon",
114,
"114",
"hexagon-open",
214,
"214",
"hexagon-dot",
314,
"314",
"hexagon-open-dot",
15,
"15",
"hexagon2",
115,
"115",
"hexagon2-open",
215,
"215",
"hexagon2-dot",
315,
"315",
"hexagon2-open-dot",
16,
"16",
"octagon",
116,
"116",
"octagon-open",
216,
"216",
"octagon-dot",
316,
"316",
"octagon-open-dot",
17,
"17",
"star",
117,
"117",
"star-open",
217,
"217",
"star-dot",
317,
"317",
"star-open-dot",
18,
"18",
"hexagram",
118,
"118",
"hexagram-open",
218,
"218",
"hexagram-dot",
318,
"318",
"hexagram-open-dot",
19,
"19",
"star-triangle-up",
119,
"119",
"star-triangle-up-open",
219,
"219",
"star-triangle-up-dot",
319,
"319",
"star-triangle-up-open-dot",
20,
"20",
"star-triangle-down",
120,
"120",
"star-triangle-down-open",
220,
"220",
"star-triangle-down-dot",
320,
"320",
"star-triangle-down-open-dot",
21,
"21",
"star-square",
121,
"121",
"star-square-open",
221,
"221",
"star-square-dot",
321,
"321",
"star-square-open-dot",
22,
"22",
"star-diamond",
122,
"122",
"star-diamond-open",
222,
"222",
"star-diamond-dot",
322,
"322",
"star-diamond-open-dot",
23,
"23",
"diamond-tall",
123,
"123",
"diamond-tall-open",
223,
"223",
"diamond-tall-dot",
323,
"323",
"diamond-tall-open-dot",
24,
"24",
"diamond-wide",
124,
"124",
"diamond-wide-open",
224,
"224",
"diamond-wide-dot",
324,
"324",
"diamond-wide-open-dot",
25,
"25",
"hourglass",
125,
"125",
"hourglass-open",
26,
"26",
"bowtie",
126,
"126",
"bowtie-open",
27,
"27",
"circle-cross",
127,
"127",
"circle-cross-open",
28,
"28",
"circle-x",
128,
"128",
"circle-x-open",
29,
"29",
"square-cross",
129,
"129",
"square-cross-open",
30,
"30",
"square-x",
130,
"130",
"square-x-open",
31,
"31",
"diamond-cross",
131,
"131",
"diamond-cross-open",
32,
"32",
"diamond-x",
132,
"132",
"diamond-x-open",
33,
"33",
"cross-thin",
133, | "133",
"cross-thin-open",
34,
"34",
"x-thin",
134,
"134",
"x-thin-open",
35,
"35",
"asterisk",
135,
"135",
"asterisk-open",
36,
"36",
"hash",
136,
"136",
"hash-open",
236,
"236",
"hash-dot",
336,
"336",
"hash-open-dot",
37,
"37",
"y-up",
137,
"137",
"y-up-open",
38,
"38",
"y-down",
138,
"138",
"y-down-open",
39,
"39",
"y-left",
139,
"139",
"y-left-open",
40,
"40",
"y-right",
140,
"140",
"y-right-open",
41,
"41",
"line-ew",
141,
"141",
"line-ew-open",
42,
"42",
"line-ns",
142,
"142",
"line-ns-open",
43,
"43",
"line-ne",
143,
"143",
"line-ne-open",
44,
"44",
"line-nw",
144,
"144",
"line-nw-open",
45,
"45",
"arrow-up",
145,
"145",
"arrow-up-open",
46,
"46",
"arrow-down",
146,
"146",
"arrow-down-open",
47,
"47",
"arrow-left",
147,
"147",
"arrow-left-open",
48,
"48",
"arrow-right",
148,
"148",
"arrow-right-open",
49,
"49",
"arrow-bar-up",
149,
"149",
"arrow-bar-up-open",
50,
"50",
"arrow-bar-down",
150,
"150",
"arrow-bar-down-open",
51,
"51",
"arrow-bar-left",
151,
"151",
"arrow-bar-left-open",
52,
"52",
"arrow-bar-right",
152,
"152",
"arrow-bar-right-open",
],
),
**kwargs
) | |
tcp.rs | use std::{io, sync::Arc};
use async_trait::async_trait;
use log::*;
use crate::{
proxy::{OutboundConnect, OutboundHandler, ProxyStream, TcpOutboundHandler},
session::Session,
};
pub struct Handler {
pub actors: Vec<Arc<dyn OutboundHandler>>, | pub attempts: usize,
}
#[async_trait]
impl TcpOutboundHandler for Handler {
fn tcp_connect_addr(&self) -> Option<OutboundConnect> {
None
}
async fn handle_tcp<'a>(
&'a self,
sess: &'a Session,
_stream: Option<Box<dyn ProxyStream>>,
) -> io::Result<Box<dyn ProxyStream>> {
for _ in 0..self.attempts {
for a in self.actors.iter() {
debug!("retry handles tcp [{}] to [{}]", sess.destination, a.tag());
match a.handle_tcp(sess, None).await {
Ok(s) => return Ok(s),
Err(_) => continue,
}
}
}
Err(io::Error::new(io::ErrorKind::Other, "all attempts failed"))
}
} | |
pystruct.rs | /*
* Python struct module.
*
* Docs: https://docs.python.org/3/library/struct.html
*
* renamed to pystruct since struct is a rust keyword.
*
* Use this rust module to do byte packing:
* https://docs.rs/byteorder/1.2.6/byteorder/
*/
use std::io::{Cursor, Read, Write};
use std::iter::Peekable;
use byteorder::{ReadBytesExt, WriteBytesExt};
use crate::function::PyFuncArgs;
use crate::obj::{objbytes, objstr, objtype};
use crate::pyobject::{PyObjectRef, PyResult, TryFromObject};
use crate::VirtualMachine;
#[derive(Debug)]
struct FormatSpec {
endianness: Endianness,
codes: Vec<FormatCode>,
}
#[derive(Debug)]
enum Endianness {
Native,
Little,
Big,
Network,
}
#[derive(Debug)]
struct FormatCode {
code: char,
}
fn parse_format_string(fmt: String) -> Result<FormatSpec, String> {
let mut chars = fmt.chars().peekable();
// First determine "<", ">","!" or "="
let endianness = parse_endiannes(&mut chars);
// Now, analyze struct string furter:
let codes = parse_format_codes(&mut chars)?;
Ok(FormatSpec { endianness, codes })
}
/// Parse endianness
/// See also: https://docs.python.org/3/library/struct.html?highlight=struct#byte-order-size-and-alignment
fn parse_endiannes<I>(chars: &mut Peekable<I>) -> Endianness
where
I: Sized + Iterator<Item = char>,
{
match chars.peek() {
Some('@') => {
chars.next().unwrap();
Endianness::Native
}
Some('=') => {
chars.next().unwrap();
Endianness::Native
}
Some('<') => {
chars.next().unwrap();
Endianness::Little
}
Some('>') => {
chars.next().unwrap();
Endianness::Big
}
Some('!') => {
chars.next().unwrap();
Endianness::Network
}
_ => Endianness::Native,
}
}
fn parse_format_codes<I>(chars: &mut Peekable<I>) -> Result<Vec<FormatCode>, String>
where
I: Sized + Iterator<Item = char>,
{
let mut codes = vec![];
while chars.peek().is_some() {
// determine repeat operator:
let repeat = match chars.peek() {
Some('0'..='9') => {
let mut repeat = 0;
while let Some('0'..='9') = chars.peek() {
if let Some(c) = chars.next() {
let current_digit = c.to_digit(10).unwrap();
repeat = repeat * 10 + current_digit;
}
}
Some(repeat)
}
_ => None,
};
// determine format char:
let c = chars.next();
match c {
Some(c) if is_supported_format_character(c) => {
if let Some(repeat) = repeat {
for _ in 0..repeat {
codes.push(FormatCode { code: c })
}
} else {
codes.push(FormatCode { code: c })
}
}
_ => return Err(format!("Illegal format code {:?}", c)),
}
}
Ok(codes)
}
fn is_supported_format_character(c: char) -> bool {
match c { | _ => false,
}
}
macro_rules! make_pack_no_endianess {
($T:ty) => {
paste::item! {
fn [<pack_ $T>](vm: &VirtualMachine, arg: &PyObjectRef, data: &mut dyn Write) -> PyResult<()> {
let v = $T::try_from_object(vm, arg.clone())?;
data.[<write_$T>](v).unwrap();
Ok(())
}
}
};
}
macro_rules! make_pack_with_endianess {
($T:ty) => {
paste::item! {
fn [<pack_ $T>]<Endianness>(vm: &VirtualMachine, arg: &PyObjectRef, data: &mut dyn Write) -> PyResult<()>
where
Endianness: byteorder::ByteOrder,
{
let v = $T::try_from_object(vm, arg.clone())?;
data.[<write_$T>]::<Endianness>(v).unwrap();
Ok(())
}
}
};
}
make_pack_no_endianess!(i8);
make_pack_no_endianess!(u8);
make_pack_with_endianess!(i16);
make_pack_with_endianess!(u16);
make_pack_with_endianess!(i32);
make_pack_with_endianess!(u32);
make_pack_with_endianess!(i64);
make_pack_with_endianess!(u64);
make_pack_with_endianess!(f32);
make_pack_with_endianess!(f64);
fn pack_bool(vm: &VirtualMachine, arg: &PyObjectRef, data: &mut dyn Write) -> PyResult<()> {
let v = if bool::try_from_object(vm, arg.clone())? {
1
} else {
0
};
data.write_u8(v).unwrap();
Ok(())
}
fn pack_item<Endianness>(
vm: &VirtualMachine,
code: &FormatCode,
arg: &PyObjectRef,
data: &mut dyn Write,
) -> PyResult<()>
where
Endianness: byteorder::ByteOrder,
{
match code.code {
'b' => pack_i8(vm, arg, data)?,
'B' => pack_u8(vm, arg, data)?,
'?' => pack_bool(vm, arg, data)?,
'h' => pack_i16::<Endianness>(vm, arg, data)?,
'H' => pack_u16::<Endianness>(vm, arg, data)?,
'i' | 'l' => pack_i32::<Endianness>(vm, arg, data)?,
'I' | 'L' => pack_u32::<Endianness>(vm, arg, data)?,
'q' => pack_i64::<Endianness>(vm, arg, data)?,
'Q' => pack_u64::<Endianness>(vm, arg, data)?,
'f' => pack_f32::<Endianness>(vm, arg, data)?,
'd' => pack_f64::<Endianness>(vm, arg, data)?,
c => {
panic!("Unsupported format code {:?}", c);
}
}
Ok(())
}
fn struct_pack(vm: &VirtualMachine, args: PyFuncArgs) -> PyResult {
if args.args.is_empty() {
Err(vm.new_type_error(format!(
"Expected at least 1 argument (got: {})",
args.args.len()
)))
} else {
let fmt_arg = args.args[0].clone();
if objtype::isinstance(&fmt_arg, &vm.ctx.str_type()) {
let fmt_str = objstr::get_value(&fmt_arg);
let format_spec = parse_format_string(fmt_str).map_err(|e| vm.new_value_error(e))?;
if format_spec.codes.len() + 1 == args.args.len() {
// Create data vector:
let mut data = Vec::<u8>::new();
// Loop over all opcodes:
for (code, arg) in format_spec.codes.iter().zip(args.args.iter().skip(1)) {
debug!("code: {:?}", code);
match format_spec.endianness {
Endianness::Little => {
pack_item::<byteorder::LittleEndian>(vm, code, arg, &mut data)?
}
Endianness::Big => {
pack_item::<byteorder::BigEndian>(vm, code, arg, &mut data)?
}
Endianness::Network => {
pack_item::<byteorder::NetworkEndian>(vm, code, arg, &mut data)?
}
Endianness::Native => {
pack_item::<byteorder::NativeEndian>(vm, code, arg, &mut data)?
}
}
}
Ok(vm.ctx.new_bytes(data))
} else {
Err(vm.new_type_error(format!(
"Expected {} arguments (got: {})",
format_spec.codes.len() + 1,
args.args.len()
)))
}
} else {
Err(vm.new_type_error("First argument must be of str type".to_string()))
}
}
}
fn unpack_i8(vm: &VirtualMachine, rdr: &mut dyn Read) -> PyResult {
match rdr.read_i8() {
Err(err) => panic!("Error in reading {:?}", err),
Ok(v) => Ok(vm.ctx.new_int(v)),
}
}
fn unpack_u8(vm: &VirtualMachine, rdr: &mut dyn Read) -> PyResult {
match rdr.read_u8() {
Err(err) => panic!("Error in reading {:?}", err),
Ok(v) => Ok(vm.ctx.new_int(v)),
}
}
fn unpack_bool(vm: &VirtualMachine, rdr: &mut dyn Read) -> PyResult {
match rdr.read_u8() {
Err(err) => panic!("Error in reading {:?}", err),
Ok(v) => Ok(vm.ctx.new_bool(v > 0)),
}
}
fn unpack_i16<Endianness>(vm: &VirtualMachine, rdr: &mut dyn Read) -> PyResult
where
Endianness: byteorder::ByteOrder,
{
match rdr.read_i16::<Endianness>() {
Err(err) => panic!("Error in reading {:?}", err),
Ok(v) => Ok(vm.ctx.new_int(v)),
}
}
fn unpack_u16<Endianness>(vm: &VirtualMachine, rdr: &mut dyn Read) -> PyResult
where
Endianness: byteorder::ByteOrder,
{
match rdr.read_u16::<Endianness>() {
Err(err) => panic!("Error in reading {:?}", err),
Ok(v) => Ok(vm.ctx.new_int(v)),
}
}
fn unpack_i32<Endianness>(vm: &VirtualMachine, rdr: &mut dyn Read) -> PyResult
where
Endianness: byteorder::ByteOrder,
{
match rdr.read_i32::<Endianness>() {
Err(err) => panic!("Error in reading {:?}", err),
Ok(v) => Ok(vm.ctx.new_int(v)),
}
}
fn unpack_u32<Endianness>(vm: &VirtualMachine, rdr: &mut dyn Read) -> PyResult
where
Endianness: byteorder::ByteOrder,
{
match rdr.read_u32::<Endianness>() {
Err(err) => panic!("Error in reading {:?}", err),
Ok(v) => Ok(vm.ctx.new_int(v)),
}
}
fn unpack_i64<Endianness>(vm: &VirtualMachine, rdr: &mut dyn Read) -> PyResult
where
Endianness: byteorder::ByteOrder,
{
match rdr.read_i64::<Endianness>() {
Err(err) => panic!("Error in reading {:?}", err),
Ok(v) => Ok(vm.ctx.new_int(v)),
}
}
fn unpack_u64<Endianness>(vm: &VirtualMachine, rdr: &mut dyn Read) -> PyResult
where
Endianness: byteorder::ByteOrder,
{
match rdr.read_u64::<Endianness>() {
Err(err) => panic!("Error in reading {:?}", err),
Ok(v) => Ok(vm.ctx.new_int(v)),
}
}
fn unpack_f32<Endianness>(vm: &VirtualMachine, rdr: &mut dyn Read) -> PyResult
where
Endianness: byteorder::ByteOrder,
{
match rdr.read_f32::<Endianness>() {
Err(err) => panic!("Error in reading {:?}", err),
Ok(v) => Ok(vm.ctx.new_float(f64::from(v))),
}
}
fn unpack_f64<Endianness>(vm: &VirtualMachine, rdr: &mut dyn Read) -> PyResult
where
Endianness: byteorder::ByteOrder,
{
match rdr.read_f64::<Endianness>() {
Err(err) => panic!("Error in reading {:?}", err),
Ok(v) => Ok(vm.ctx.new_float(v)),
}
}
fn struct_unpack(vm: &VirtualMachine, args: PyFuncArgs) -> PyResult {
arg_check!(
vm,
args,
required = [
(fmt, Some(vm.ctx.str_type())),
(buffer, Some(vm.ctx.bytes_type()))
]
);
let fmt_str = objstr::get_value(&fmt);
let format_spec = parse_format_string(fmt_str).map_err(|e| vm.new_value_error(e))?;
let data = objbytes::get_value(buffer).to_vec();
let mut rdr = Cursor::new(data);
let mut items = vec![];
for code in format_spec.codes {
debug!("unpack code: {:?}", code);
let item = match format_spec.endianness {
Endianness::Little => unpack_code::<byteorder::LittleEndian>(vm, &code, &mut rdr)?,
Endianness::Big => unpack_code::<byteorder::BigEndian>(vm, &code, &mut rdr)?,
Endianness::Network => unpack_code::<byteorder::NetworkEndian>(vm, &code, &mut rdr)?,
Endianness::Native => unpack_code::<byteorder::NativeEndian>(vm, &code, &mut rdr)?,
};
items.push(item);
}
Ok(vm.ctx.new_tuple(items))
}
fn unpack_code<Endianness>(vm: &VirtualMachine, code: &FormatCode, rdr: &mut dyn Read) -> PyResult
where
Endianness: byteorder::ByteOrder,
{
match code.code {
'b' => unpack_i8(vm, rdr),
'B' => unpack_u8(vm, rdr),
'?' => unpack_bool(vm, rdr),
'h' => unpack_i16::<Endianness>(vm, rdr),
'H' => unpack_u16::<Endianness>(vm, rdr),
'i' | 'l' => unpack_i32::<Endianness>(vm, rdr),
'I' | 'L' => unpack_u32::<Endianness>(vm, rdr),
'q' => unpack_i64::<Endianness>(vm, rdr),
'Q' => unpack_u64::<Endianness>(vm, rdr),
'f' => unpack_f32::<Endianness>(vm, rdr),
'd' => unpack_f64::<Endianness>(vm, rdr),
c => {
panic!("Unsupported format code {:?}", c);
}
}
}
pub fn make_module(vm: &VirtualMachine) -> PyObjectRef {
let ctx = &vm.ctx;
let struct_error = ctx.new_class("struct.error", ctx.object());
py_module!(vm, "struct", {
"pack" => ctx.new_rustfunc(struct_pack),
"unpack" => ctx.new_rustfunc(struct_unpack),
"error" => struct_error,
})
} | 'b' | 'B' | 'h' | 'H' | 'i' | 'I' | 'l' | 'L' | 'q' | 'Q' | 'f' | 'd' => true, |
main.py | # source : https://github.com/squeaky-pl/japronto/blob/master/tutorial/1_hello.md
from japronto import Application
def hello(request):
|
app = Application()
app.router.add_route('/', hello)
app.run(debug=True)
| return request.Response(text='Hello world!') |
collator.rs | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/// Contains the API configuration as prescribed by ECMA 402.
///
/// The meaning of the options is the same as in the similarly named
/// options in the JS version.
///
/// See [Options] for the contents of the options. See the [Collator::try_new]
/// for the use of the options.
pub mod options {
/// Set the intended usage for this collation.
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum Usage {
/// Use collation for sorting. Default.
Sort,
/// Use collation for searching.
Search,
}
/// Set the sensitivity of the collation.
///
/// Which differences in the strings should lead to non-zero result values.
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum Sensitivity {
/// Only strings that differ in base letters compare as unequal.
/// Examples: a ≠ b, a = á, a = A.
Base,
/// Only strings that differ in base letters or accents and other diacritic marks compare
/// as unequal. Examples: a ≠ b, a ≠ á, a = A.
Accent,
/// Only strings that differ in base letters or case compare as unequal.
/// Examples: a ≠ b, a = á, a ≠ A.
Case,
/// Strings that differ in base letters, accents and other diacritic marks, or case compare
/// as unequal. Other differences may also be taken into consideration.
/// Examples: a ≠ b, a ≠ á, a ≠ A.
Variant,
}
/// Whether punctuation should be ignored.
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum Punctuation {
/// Ignore punctuation.
Ignore,
/// Honor punctuation.
Honor,
}
/// Whether numeric collation should be used, such that "1" < "2" < "10".
///
/// This option can be set through an options property or through a Unicode extension key; if
/// both are provided, the options property takes precedence. Implementations are not required
/// to support this property.
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum Numeric {
/// Use numeric comparison.
Use,
/// Do not use numeric comparision.
Ignore,
}
/// Whether upper case or lower case should sort first.
///
/// This option can be set through an options property or through a Unicode extension key; if
/// both are provided, the options property takes precedence. Implementations are not required
/// to support this property.
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum CaseFirst {
/// Sort upper case first.
Upper,
/// Sort lower case first.
Lower,
/// Use locale default for case sorting.
False,
}
}
/// The options set by the user at construction time. See discussion at the top level
/// about the name choice. Provides as a "bag of options" since we don't expect any
/// implementations to be attached to this struct.
///
/// The default values of all the options are prescribed in by the [TC39 report][tc39].
///
/// [tc39]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Intl/Collator/Collator
#[derive(Eq, PartialEq, Debug, Clone)]
pub struct Options {
pub usage: options::Usage,
pub sensitivity: options::Sensitivity,
pub punctuation: options::Punctuation,
pub numeric: options::Numeric,
pub case_first: options::CaseFirst,
}
impl Default for Options {
/// Gets the default values of [Options] if omitted at setup. The
/// default values are prescribed by the TC39. See [Collator][tc39c].
///
/// [tc39c]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Intl/Collator/Collator
fn default() -> Self {
| Options {
usage: options::Usage::Sort,
sensitivity: options::Sensitivity::Variant,
punctuation: options::Punctuation::Honor,
numeric: options::Numeric::Ignore,
case_first: options::CaseFirst::False,
}
}
}
pub trait Collator {
/// The type of error reported, if any.
type Error: std::error::Error;
/// Creates a new [Collator].
///
/// Creation may fail, for example, if the locale-specific data is not loaded, or if
/// the supplied options are inconsistent.
fn try_new<L>(l: L, opts: Options) -> Result<Self, Self::Error>
where
L: crate::Locale,
Self: Sized;
/// Compares two strings according to the sort order of this [Collator].
///
/// Returns 0 if `first` and `second` are equal. Returns a negative value
/// if `first` is less, and a positive value of `second` is less.
fn compare<P, Q>(first: P, second: Q) -> i8
where
P: AsRef<str>,
Q: AsRef<str>;
}
| |
delete_task_parameters.go | package tasks
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"github.com/go-swagger/go-swagger/client"
"github.com/go-swagger/go-swagger/errors"
"github.com/go-swagger/go-swagger/strfmt"
"github.com/go-swagger/go-swagger/swag"
)
// NewDeleteTaskParams creates a new DeleteTaskParams object
// with the default values initialized.
func | () *DeleteTaskParams {
var ()
return &DeleteTaskParams{}
}
/*DeleteTaskParams contains all the parameters to send to the API endpoint
for the delete task operation typically these are written to a http.Request
*/
type DeleteTaskParams struct {
/*ID
The id of the item
*/
ID int64
}
// WithID adds the id to the delete task params
func (o *DeleteTaskParams) WithID(id int64) *DeleteTaskParams {
o.ID = id
return o
}
// WriteToRequest writes these params to a swagger request
func (o *DeleteTaskParams) WriteToRequest(r client.Request, reg strfmt.Registry) error {
var res []error
// path param id
if err := r.SetPathParam("id", swag.FormatInt64(o.ID)); err != nil {
return err
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
| NewDeleteTaskParams |
plan_filter.rs | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use common_datavalues2::DataSchemaRef;
use common_datavalues2::DataValue;
use crate::Expression;
use crate::PlanNode;
#[derive(serde::Serialize, serde::Deserialize, Clone, PartialEq)]
pub struct FilterPlan {
/// The predicate expression, which must have Boolean type.
pub predicate: Expression,
/// The incoming logical plan
pub input: Arc<PlanNode>,
/// output schema
pub schema: DataSchemaRef,
}
impl FilterPlan {
pub fn schema(&self) -> DataSchemaRef {
self.schema.clone()
}
pub fn set_input(&mut self, node: &PlanNode) {
self.input = Arc::new(node.clone());
}
pub fn is_literal_false(&self) -> bool {
if let Expression::Literal { value, .. } = &self.predicate |
false
}
}
| {
return *value == DataValue::Boolean(false);
} |
root.go | // Package cli contains the supported lazo commands.
package cli
import (
"fmt"
"github.com/spf13/cobra"
"os"
)
var rootCmd = &cobra.Command{
Use: "lazo",
Short: "Lazo is a tool for managing Lazo source code",
Long: `Lazo is a tool for managing Lazo source code on the Bazo Blockchain`,
Run: func(cmd *cobra.Command, _ []string) {
_ = cmd.Help()
},
}
// Execute executes the lazo command
func Execute() | {
if err := rootCmd.Execute(); err != nil {
fmt.Println(err)
os.Exit(1)
}
} |
|
f24.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[allow(unreachable_code)]
pub fn expr_while_24() {
let mut x = 24;
let mut y = 24;
let mut z = 24;
loop {
if x == 0 |
x -= 1;
loop {
if y == 0 { break; "unreachable"; }
y -= 1;
loop {
if z == 0 { break; "unreachable"; }
z -= 1;
}
if x > 10 {
return;
"unreachable";
}
}
}
}
| { break; "unreachable"; } |
test_utils.py | from dateutil.parser import parse as dateutil_parse
from datahub.omis.payment.constants import PaymentMethod | from datahub.omis.payment.utils import trasform_govuk_payment_to_omis_payment_data
class TestGetOmisPaymentDataFromGovukPayment:
"""
Tests for the `trasform_govuk_payment_to_omis_payment_data` function.
"""
def test_with_non_success_response_returns_none(self):
"""
Test that if the status of the GOV.UK payment is not `success`,
the method returns None
"""
govuk_payment = {
'state': {
'status': 'created',
},
}
assert not trasform_govuk_payment_to_omis_payment_data(govuk_payment)
def test_data(self):
"""Test the transformed data from a GOV.UK payment."""
govuk_payment = {
'amount': 1234,
'state': {'status': 'success'},
'email': '[email protected]',
'created_date': '2018-02-13T14:56:56.734Z',
'reference': '12345',
'card_details': {
'last_digits_card_number': '1111',
'cardholder_name': 'John Doe',
'expiry_date': '01/20',
'billing_address': {
'line1': 'line 1 address',
'line2': 'line 2 address',
'postcode': 'SW1A 1AA',
'city': 'London',
'country': 'GB',
},
'card_brand': 'Visa',
},
}
payment_data = trasform_govuk_payment_to_omis_payment_data(govuk_payment)
assert payment_data == {
'amount': 1234,
'method': PaymentMethod.CARD,
'received_on': dateutil_parse('2018-02-13').date(),
'transaction_reference': '12345',
'cardholder_name': 'John Doe',
'card_brand': 'Visa',
'billing_email': '[email protected]',
'billing_address_1': 'line 1 address',
'billing_address_2': 'line 2 address',
'billing_address_town': 'London',
'billing_address_postcode': 'SW1A 1AA',
'billing_address_country': 'GB',
} | |
file_unix_test.go | // +build linux darwin
package rotated
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"os"
"os/signal"
"path"
"path/filepath"
"strings"
"syscall"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestRotateBySignal(t *testing.T) |
func TestFileLogrotate(t *testing.T) {
dir, err := ioutil.TempDir("", "tlog_rotate_")
if err != nil {
t.Fatalf("create tmp dir: %v", err)
}
defer func() {
if !t.Failed() {
os.RemoveAll(dir)
return
}
t.Logf("dir: %v", dir)
}()
fname := fmt.Sprintf("file.%d.log", os.Getpid())
f := CreateLogrotate(filepath.Join(dir, fname))
defer f.Close()
for i := 0; i < 3; i++ {
_, err = fmt.Fprintf(f, "some info %v\n", i)
assert.NoError(t, err)
err = os.Rename(
filepath.Join(dir, fname),
filepath.Join(dir, fmt.Sprintf("file_moved_%d.%d.log", i, os.Getpid())),
)
require.NoError(t, err)
_, err = fmt.Fprintf(f, "after move %v\n", i)
assert.NoError(t, err)
err = f.Rotate()
assert.NoError(t, err)
}
fs, err := ioutil.ReadDir(dir)
if err != nil {
t.Fatalf("list dir: %v", err)
}
assert.Len(t, fs, 4)
for _, f := range fs {
b, err := ioutil.ReadFile(path.Join(dir, f.Name()))
if err != nil {
t.Fatalf("read file: %v", err)
}
switch {
case strings.HasPrefix(f.Name(), "file."):
assert.Equal(t, "", string(b))
case strings.HasPrefix(f.Name(), "file_moved_"):
var n int
fmt.Sscanf(f.Name(), "file_moved_%d", &n)
assert.Equal(t, fmt.Sprintf("some info %v\nafter move %v\n", n, n), string(b))
}
}
}
| {
n := 0
var buf [3]bytes.Buffer
c := make(chan struct{}, 3)
f := CreateLogrotate("name")
f.Fopen = func(name string, mode os.FileMode) (io.Writer, error) { n++; c <- struct{}{}; return &buf[n-1], nil }
q := make(chan os.Signal, 1)
signal.Notify(q, syscall.SIGUSR1)
_, _ = f.Write([]byte("before"))
err := syscall.Kill(os.Getpid(), syscall.SIGUSR1)
require.NoError(t, err)
<-q
loop:
for {
select {
case <-c:
case <-time.After(100 * time.Millisecond):
break loop
}
}
_, _ = f.Write([]byte("after"))
// t.Logf("n: %v", n)
assert.True(t, n >= 2)
assert.Equal(t, "before", buf[0].String())
assert.Equal(t, "after", buf[1].String())
// assert.Equal(t, "beforeafter", buf[0].String()+buf[1].String()+buf[2].String())
} |
enums_fieldtype.go | // Code generated by "enumer -type=fieldType -trimprefix=fieldType -transform=kebab -text -output enums_fieldtype.go"; DO NOT EDIT.
package imagine
import (
"fmt"
)
const _fieldTypeName = "undefbsisetmutextime"
var _fieldTypeIndex = [...]uint8{0, 5, 8, 11, 16, 20}
func (i fieldType) String() string {
if i < 0 || i >= fieldType(len(_fieldTypeIndex)-1) {
return fmt.Sprintf("fieldType(%d)", i)
}
return _fieldTypeName[_fieldTypeIndex[i]:_fieldTypeIndex[i+1]]
}
var _fieldTypeValues = []fieldType{0, 1, 2, 3, 4}
var _fieldTypeNameToValueMap = map[string]fieldType{
_fieldTypeName[0:5]: 0,
_fieldTypeName[5:8]: 1,
_fieldTypeName[8:11]: 2,
_fieldTypeName[11:16]: 3,
_fieldTypeName[16:20]: 4,
}
// fieldTypeString retrieves an enum value from the enum constants string name.
// Throws an error if the param is not part of the enum.
func fieldTypeString(s string) (fieldType, error) |
// fieldTypeValues returns all values of the enum
func fieldTypeValues() []fieldType {
return _fieldTypeValues
}
// IsAfieldType returns "true" if the value is listed in the enum definition. "false" otherwise
func (i fieldType) IsAfieldType() bool {
for _, v := range _fieldTypeValues {
if i == v {
return true
}
}
return false
}
// MarshalText implements the encoding.TextMarshaler interface for fieldType
func (i fieldType) MarshalText() ([]byte, error) {
return []byte(i.String()), nil
}
// UnmarshalText implements the encoding.TextUnmarshaler interface for fieldType
func (i *fieldType) UnmarshalText(text []byte) error {
var err error
*i, err = fieldTypeString(string(text))
return err
}
| {
if val, ok := _fieldTypeNameToValueMap[s]; ok {
return val, nil
}
return 0, fmt.Errorf("%s does not belong to fieldType values", s)
} |
setup.py | from setuptools import setup
from mxtheme import __version__
setup(
name = 'mxtheme',
version = __version__,
author = 'Mu Li', | packages = ['mxtheme'],
include_package_data=True,
license= 'MIT License',
entry_points = {
'sphinx.html_themes': [
'mxtheme = mxtheme',
]
},
) | author_email= '',
url="https://github.com/mli/mx-theme",
description='A Sphinx theme based on Material Design, adapted from sphinx_materialdesign_theme', |
tasks_tools.ts | import { existsSync, lstatSync, readFileSync, readdirSync } from 'fs';
import * as runSequence from 'run-sequence';
import * as gulp from 'gulp';
import * as util from 'gulp-util';
import * as isstream from 'isstream';
import { join } from 'path';
import * as tildify from 'tildify';
import { changeFileManager } from './code_change_tools';
import { Task } from '../../tasks/task';
/**
* Loads the tasks within the given path.
* @param {string} path - The path to load the tasks from.
*/
export function loadTasks(path: string): void {
util.log('Loading tasks folder', util.colors.yellow(path));
readDir(path, taskname => registerTask(taskname, path));
}
function | (tasks: any) {
return Object.keys(tasks)
.map((taskName: string) => {
if (!tasks[taskName] ||
!Array.isArray(tasks[taskName]) ||
tasks[taskName].some((t: any) => typeof t !== 'string')) {
return taskName;
}
return null;
}).filter((taskName: string) => !!taskName);
}
function registerTasks(tasks: any) {
Object.keys(tasks)
.forEach((t: string) => {
gulp.task(t, (done: any) => runSequence.apply(null, [...tasks[t], done]));
});
}
function getInvalidTaskErrorMessage(invalid: string[], file: string) {
let error = `Invalid configuration in "${file}. `;
if (invalid.length === 1) {
error += 'Task';
} else {
error += 'Tasks';
}
error += ` ${invalid.map((t: any) => `"${t}"`).join(', ')} do not have proper format.`;
return error;
}
/**
* Defines complex, composite tasks. The composite tasks
* are simply a composition of another tasks.
* Each composite tasks has the following format:
*
* "composite_task": ["task1", "task2"]
*
* This means that the format should be flat, with no nesting.
*
* The existing composite tasks are defined in
* "tools/config/seed.tasks.json" and can be overriden by
* editing the composite tasks project configuration.
*
* By default it is located in: "tools/config/project.tasks.json".
*
* Override existing tasks by simply providing a task
* name and a list of tasks that this task hould execute.
*
* For instance:
* ```
* {
* "test": [
* "build.test",
* "mocha.run"
* ]
* }
* ```
*
* Note that the tasks do not support nested objects.
*/
export function loadCompositeTasks(seedTasksFile: string, projectTasksFile: string): void {
let seedTasks: any;
let projectTasks: any;
try {
seedTasks = JSON.parse(readFileSync(seedTasksFile).toString());
projectTasks = JSON.parse(readFileSync(projectTasksFile).toString());
} catch (e) {
util.log('Cannot load the task configuration files: ' + e.toString());
return;
}
[[seedTasks, seedTasksFile], [projectTasks, projectTasksFile]]
.forEach(([tasks, file]: [string, string]) => {
const invalid = validateTasks(tasks);
if (invalid.length) {
const errorMessage = getInvalidTaskErrorMessage(invalid, file);
util.log(util.colors.red(errorMessage));
process.exit(1);
}
});
const mergedTasks = Object.assign({}, seedTasks, projectTasks);
registerTasks(mergedTasks);
}
function normalizeTask(task: any, taskName: string) {
if (task instanceof Task) {
return task;
}
if (task.prototype && task.prototype instanceof Task) {
return new task();
}
if (typeof task === 'function') {
return new class AnonTask extends Task {
run(done: any) {
if (task.length > 0) {
return task(done);
}
const taskReturnedValue = task();
if (isstream(taskReturnedValue)) {
return taskReturnedValue;
}
done();
}
};
}
throw new Error(taskName + ' should be instance of the class ' +
'Task, a function or a class which extends Task.');
}
/**
* Registers the task by the given taskname and path.
* @param {string} taskname - The name of the task.
* @param {string} path - The path of the task.
*/
function registerTask(taskname: string, path: string): void {
const TASK = join(path, taskname);
util.log('Registering task', util.colors.yellow(tildify(TASK)));
gulp.task(taskname, (done: any) => {
const task = normalizeTask(require(TASK), TASK);
if (changeFileManager.pristine || task.shallRun(changeFileManager.lastChangedFiles)) {
const result = task.run(done);
if (result && typeof result.catch === 'function') {
result.catch((e: any) => {
util.log(`Error while running "${TASK}"`, e);
});
}
return result;
} else {
done();
}
});
}
/**
* Reads the files in the given root directory and executes the given callback per found file.
* @param {string} root - The root directory to read.
* @param {function} cb - The callback to execute per found file.
*/
function readDir(root: string, cb: (taskname: string) => void) {
if (!existsSync(root)) {
return;
}
walk(root);
function walk(path: string) {
let files = readdirSync(path);
for (let i = 0; i < files.length; i += 1) {
let file = files[i];
let curPath = join(path, file);
if (lstatSync(curPath).isFile() && /\.ts$/.test(file)) {
let taskname = file.replace(/\.ts$/, '');
cb(taskname);
}
}
}
}
| validateTasks |
glonass.go | package rtcm3
import (
"encoding/binary"
"github.com/bamiaux/iobit"
"github.com/go-restruct/restruct"
"time"
)
// L1-Only GLONASS RTK Observables
type Message1009 struct {
AbstractMessage
ReferenceStationId uint16 `struct:"uint16:12"`
Epoch uint32 `struct:"uint32:27"`
SynchronousGnss bool `struct:"uint8:1,variantbool"`
SignalCount uint8 `struct:"uint8:5,sizeof=SignalData"`
SmoothingIndicator bool `struct:"uint8:1,variantbool"`
SmoothingInterval uint8 `struct:"uint8:3"`
SignalData []struct {
SatelliteId uint8 `struct:"uint8:6"`
L1CodeIndicator bool `struct:"uint8:1,variantbool"`
FrequencyChannel uint8 `struct:"uint8:5"`
L1Pseudorange uint32 `struct:"uint32:25"`
L1PhaseRange int32 `struct:"int32:20"`
L1LockTimeIndicator uint8 `struct:"uint8:7"`
}
}
func DeserializeMessage1009(data []byte) (msg Message1009) {
restruct.Unpack(data, binary.BigEndian, &msg)
return msg
}
func (msg Message1009) Serialize() []byte {
data, _ := restruct.Pack(binary.BigEndian, &msg)
return data
}
func (msg Message1009) Time() time.Time {
return DF034(msg.Epoch, time.Now().UTC())
}
func (msg Message1009) SatelliteCount() int {
return len(msg.SignalData)
}
// Extended L1-Only GLONASS RTK Observables
type Message1010 struct {
AbstractMessage
ReferenceStationId uint16 `struct:"uint16:12"`
Epoch uint32 `struct:"uint32:27"`
SynchronousGnss bool `struct:"uint8:1,variantbool"`
SignalCount uint8 `struct:"uint8:5,sizeof=SignalData"`
SmoothingIndicator bool `struct:"uint8:1,variantbool"`
SmoothingInterval uint8 `struct:"uint8:3"`
SignalData []struct {
SatelliteId uint8 `struct:"uint8:6"`
L1CodeIndicator bool `struct:"uint8:1,variantbool"`
FrequencyChannel uint8 `struct:"uint8:5"`
L1Pseudorange uint32 `struct:"uint32:25"`
L1PhaseRange int32 `struct:"int32:20"`
L1LockTimeIndicator uint8 `struct:"uint8:7"`
L1PseudorangeAmbiguity uint8 `struct:"uint8:7"`
L1Cnr uint8 `struct:"uint8"`
}
}
func | (data []byte) (msg Message1010) {
restruct.Unpack(data, binary.BigEndian, &msg)
return msg
}
func (msg Message1010) Serialize() []byte {
data, _ := restruct.Pack(binary.BigEndian, &msg)
return data
}
func (msg Message1010) Time() time.Time {
return DF034(msg.Epoch, time.Now().UTC())
}
func (msg Message1010) SatelliteCount() int {
return len(msg.SignalData)
}
// L1&L2 GLONASS RTK Observables
type Message1011 struct {
AbstractMessage
ReferenceStationId uint16 `struct:"uint16:12"`
Epoch uint32 `struct:"uint32:27"`
SynchronousGnss bool `struct:"uint8:1,variantbool"`
SignalCount uint8 `struct:"uint8:5,sizeof=SignalData"`
SmoothingIndicator bool `struct:"uint8:1,variantbool"`
SmoothingInterval uint8 `struct:"uint8:3"`
SignalData []struct {
SatelliteId uint8 `struct:"uint8:6"`
L1CodeIndicator bool `struct:"uint8:1,variantbool"`
FrequencyChannel uint8 `struct:"uint8:5"`
L1Pseudorange uint32 `struct:"uint32:25"`
L1PhaseRange int32 `struct:"int32:20"`
L1LockTimeIndicator uint8 `struct:"uint8:7"`
L2CodeIndicator uint8 `struct:"uint8:2"`
L2Pseudorange uint16 `struct:"uint16:14"`
L2PhaseRange int32 `struct:"int32:20"`
L2LockTimeIndicator uint8 `struct:"uint8:7"`
}
}
func DeserializeMessage1011(data []byte) (msg Message1011) {
restruct.Unpack(data, binary.BigEndian, &msg)
return msg
}
func (msg Message1011) Serialize() []byte {
data, _ := restruct.Pack(binary.BigEndian, &msg)
return data
}
func (msg Message1011) Time() time.Time {
return DF034(msg.Epoch, time.Now().UTC())
}
func (msg Message1011) SatelliteCount() int {
return len(msg.SignalData)
}
// Extended L1&L2 GLONASS RTK Observables
type Message1012 struct {
AbstractMessage
ReferenceStationId uint16 `struct:"uint16:12"`
Epoch uint32 `struct:"uint32:27"`
SynchronousGnss bool `struct:"uint8:1,variantbool"`
SignalCount uint8 `struct:"uint8:5,sizeof=SignalData"`
SmoothingIndicator bool `struct:"uint8:1,variantbool"`
SmoothingInterval uint8 `struct:"uint8:3"`
SignalData []struct {
SatelliteId uint8 `struct:"uint8:6"`
L1CodeIndicator bool `struct:"uint8:1,variantbool"`
FrequencyChannel uint8 `struct:"uint8:5"`
L1Pseudorange uint32 `struct:"uint32:25"`
L1PhaseRange int32 `struct:"int32:20"`
L1LockTimeIndicator uint8 `struct:"uint8:7"`
L1PseudorangeAmbiguity uint8 `struct:"uint8:7"`
L1Cnr uint8 `struct:"uint8"`
L2CodeIndicator uint8 `struct:"uint8:2"`
L2Pseudorange uint16 `struct:"uint16:14"`
L2PhaseRange int32 `struct:"int32:20"`
L2LockTimeIndicator uint8 `struct:"uint8:7"`
L2Cnr uint8 `struct:"uint8"`
}
}
func DeserializeMessage1012(data []byte) (msg Message1012) {
restruct.Unpack(data, binary.BigEndian, &msg)
return msg
}
func (msg Message1012) Serialize() []byte {
data, _ := restruct.Pack(binary.BigEndian, &msg)
return data
}
func (msg Message1012) Time() time.Time {
return DF034(msg.Epoch, time.Now().UTC())
}
func (msg Message1012) SatelliteCount() int {
return len(msg.SignalData)
}
// GLONASS Ionospheric Correction Differences
type Message1037 struct {
AbstractMessage
NetworkID uint8 `struct:"uint8"`
SubnetworkID uint8 `struct:"uint8:4"`
Epoch uint32 `struct:"uint32:20"`
MultipleMessageIndicator bool `struct:"uint8:1,variantbool"`
MasterReferenceStationID uint16 `struct:"uint16:12"`
AuxiliaryReferenceStationID uint16 `struct:"uint16:12"`
DataEntriesCount uint8 `struct:"uint8:4,sizeof=IonosphericCorrectionDifference"`
IonosphericCorrectionDifference []struct {
SatelliteID uint8 `struct:"uint8:6"`
AmbiguityStatusFlag uint8 `struct:"uint8:2"`
NonSyncCount uint8 `struct:"uint8:3"`
IonosphericCarrierPhaseCorrectionDifference int32 `struct:"int32:17"`
}
}
func DeserializeMessage1037(data []byte) (msg Message1037) {
restruct.Unpack(data, binary.BigEndian, &msg)
return msg
}
func (msg Message1037) Serialize() []byte {
data, _ := restruct.Pack(binary.BigEndian, &msg)
return data
}
// GLONASS Geometric Correction Differences
type Message1038 struct {
AbstractMessage
NetworkID uint8 `struct:"uint8"`
SubnetworkID uint8 `struct:"uint8:4"`
Epoch uint32 `struct:"uint32:20"`
MultipleMessageIndicator bool `struct:"uint8:1,variantbool"`
MasterReferenceStationID uint16 `struct:"uint16:12"`
AuxiliaryReferenceStationID uint16 `struct:"uint16:12"`
DataEntriesCount uint8 `struct:"uint8:4,sizeof=IonosphericCorrectionDifference"`
IonosphericCorrectionDifference []struct {
SatelliteID uint8 `struct:"uint8:6"`
AmbiguityStatusFlag uint8 `struct:"uint8:2"`
NonSyncCount uint8 `struct:"uint8:3"`
GeometricCarrierPhaseCorrectionDifference int32 `struct:"int32:17"`
IOD uint8 `struct:"uint8"`
}
}
func DeserializeMessage1038(data []byte) (msg Message1038) {
restruct.Unpack(data, binary.BigEndian, &msg)
return msg
}
func (msg Message1038) Serialize() []byte {
data, _ := restruct.Pack(binary.BigEndian, &msg)
return data
}
// GLONASS Combined Geometric and Ionospheric Correction Differences
type Message1039 struct {
AbstractMessage
NetworkID uint8 `struct:"uint8"`
SubnetworkID uint8 `struct:"uint8:4"`
Epoch uint32 `struct:"uint32:20"`
MultipleMessageIndicator bool `struct:"uint8:1,variantbool"`
MasterReferenceStationID uint16 `struct:"uint16:12"`
AuxiliaryReferenceStationID uint16 `struct:"uint16:12"`
DataEntriesCount uint8 `struct:"uint8:4,sizeof=IonosphericCorrectionDifference"`
IonosphericCorrectionDifference []struct {
SatelliteID uint8 `struct:"uint8:6"`
AmbiguityStatusFlag uint8 `struct:"uint8:2"`
NonSyncCount uint8 `struct:"uint8:3"`
GeometricCarrierPhaseCorrectionDifference int32 `struct:"int32:17"`
IOD uint8 `struct:"uint8"`
IonosphericCarrierPhaseCorrectionDifference int32 `struct:"int32:17"`
}
}
func DeserializeMessage1039(data []byte) (msg Message1039) {
restruct.Unpack(data, binary.BigEndian, &msg)
return msg
}
func (msg Message1039) Serialize() []byte {
data, _ := restruct.Pack(binary.BigEndian, &msg)
return data
}
// GLONASS L1 and L2 Code-Phase Biases
type Message1230 struct {
AbstractMessage
ReferenceStationId uint16
CodePhaseBias bool
Reserved uint8
SignalsMask uint8
L1CACodePhaseBias int16
L1PCodePhaseBias int16
L2CACodePhaseBias int16
L2PCodePhaseBias int16
}
func DeserializeMessage1230(data []byte) (msg Message1230) {
r := iobit.NewReader(data)
msg = Message1230{
AbstractMessage: AbstractMessage{
r.Uint16(12),
},
ReferenceStationId: r.Uint16(12),
CodePhaseBias: r.Bit(),
Reserved: r.Uint8(3),
SignalsMask: r.Uint8(4),
}
if (msg.SignalsMask & 8) == 8 {
msg.L1CACodePhaseBias = r.Int16(16)
}
if (msg.SignalsMask & 4) == 4 {
msg.L1PCodePhaseBias = r.Int16(16)
}
if (msg.SignalsMask & 2) == 2 {
msg.L2CACodePhaseBias = r.Int16(16)
}
if (msg.SignalsMask & 1) == 1 {
msg.L2PCodePhaseBias = r.Int16(16)
}
return msg
}
func (msg Message1230) Serialize() []byte {
data := make([]byte, 4)
w := iobit.NewWriter(data)
w.PutUint16(12, msg.AbstractMessage.MessageNumber)
w.PutUint16(12, msg.ReferenceStationId)
w.PutBit(msg.CodePhaseBias)
w.PutUint8(3, msg.Reserved)
w.PutUint8(4, msg.SignalsMask)
w.Flush()
if (msg.SignalsMask & 8) == 8 {
data = append(data, uint8(msg.L1CACodePhaseBias>>8), uint8(msg.L1CACodePhaseBias&0xff))
}
if (msg.SignalsMask & 4) == 4 {
data = append(data, uint8(msg.L1PCodePhaseBias>>8), uint8(msg.L1PCodePhaseBias&0xff))
}
if (msg.SignalsMask & 2) == 2 {
data = append(data, uint8(msg.L2CACodePhaseBias>>8), uint8(msg.L2CACodePhaseBias&0xff))
}
if (msg.SignalsMask & 1) == 1 {
data = append(data, uint8(msg.L2PCodePhaseBias>>8), uint8(msg.L2PCodePhaseBias&0xff))
}
return data
}
| DeserializeMessage1010 |
test_io.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: skip-file
import mxnet as mx
from mxnet.test_utils import *
import numpy as np
import os, gzip
import pickle as pickle
import time
try:
import h5py
except ImportError:
h5py = None
import sys
from common import get_data
import unittest
def test_MNISTIter():
# prepare data
get_data.GetMNIST_ubyte()
batch_size = 100
train_dataiter = mx.io.MNISTIter(
image="data/train-images-idx3-ubyte",
label="data/train-labels-idx1-ubyte",
data_shape=(784,),
batch_size=batch_size, shuffle=1, flat=1, silent=0, seed=10)
# test_loop
nbatch = 60000 / batch_size
batch_count = 0
for batch in train_dataiter:
batch_count += 1
assert(nbatch == batch_count)
# test_reset
train_dataiter.reset()
train_dataiter.iter_next()
label_0 = train_dataiter.getlabel().asnumpy().flatten()
train_dataiter.iter_next()
train_dataiter.iter_next()
train_dataiter.iter_next()
train_dataiter.iter_next()
train_dataiter.reset()
train_dataiter.iter_next()
label_1 = train_dataiter.getlabel().asnumpy().flatten()
assert(sum(label_0 - label_1) == 0)
def test_Cifar10Rec():
get_data.GetCifar10()
dataiter = mx.io.ImageRecordIter(
path_imgrec="data/cifar/train.rec",
mean_img="data/cifar/cifar10_mean.bin",
rand_crop=False,
and_mirror=False,
shuffle=False,
data_shape=(3,28,28),
batch_size=100,
preprocess_threads=4,
prefetch_buffer=1)
labelcount = [0 for i in range(10)]
batchcount = 0
for batch in dataiter:
npdata = batch.data[0].asnumpy().flatten().sum()
sys.stdout.flush()
batchcount += 1
nplabel = batch.label[0].asnumpy()
for i in range(nplabel.shape[0]):
labelcount[int(nplabel[i])] += 1
for i in range(10):
assert(labelcount[i] == 5000)
def test_NDArrayIter():
data = np.ones([1000, 2, 2])
label = np.ones([1000, 1])
for i in range(1000):
data[i] = i / 100
label[i] = i / 100
dataiter = mx.io.NDArrayIter(data, label, 128, True, last_batch_handle='pad')
batchidx = 0
for batch in dataiter:
batchidx += 1
assert(batchidx == 8)
dataiter = mx.io.NDArrayIter(data, label, 128, False, last_batch_handle='pad')
batchidx = 0
labelcount = [0 for i in range(10)]
for batch in dataiter:
label = batch.label[0].asnumpy().flatten()
assert((batch.data[0].asnumpy()[:,0,0] == label).all())
for i in range(label.shape[0]):
labelcount[int(label[i])] += 1
for i in range(10):
if i == 0:
assert(labelcount[i] == 124)
else:
assert(labelcount[i] == 100)
def test_NDArrayIter_h5py():
if not h5py:
return
data = np.ones([1000, 2, 2])
label = np.ones([1000, 1])
for i in range(1000):
data[i] = i / 100
label[i] = i / 100
try:
os.remove("ndarraytest.h5")
except OSError:
pass
with h5py.File("ndarraytest.h5") as f:
f.create_dataset("data", data=data)
f.create_dataset("label", data=label)
dataiter = mx.io.NDArrayIter(f["data"], f["label"], 128, True, last_batch_handle='pad')
batchidx = 0
for batch in dataiter:
batchidx += 1
assert(batchidx == 8)
dataiter = mx.io.NDArrayIter(f["data"], f["label"], 128, False, last_batch_handle='pad')
labelcount = [0 for i in range(10)]
for batch in dataiter:
label = batch.label[0].asnumpy().flatten()
assert((batch.data[0].asnumpy()[:,0,0] == label).all())
for i in range(label.shape[0]):
labelcount[int(label[i])] += 1
try:
os.remove("ndarraytest.h5")
except OSError:
pass
for i in range(10):
if i == 0:
assert(labelcount[i] == 124)
else:
assert(labelcount[i] == 100)
def test_NDArrayIter_csr():
# creating toy data
|
def test_LibSVMIter():
def check_libSVMIter_synthetic():
cwd = os.getcwd()
data_path = os.path.join(cwd, 'data.t')
label_path = os.path.join(cwd, 'label.t')
with open(data_path, 'w') as fout:
fout.write('1.0 0:0.5 2:1.2\n')
fout.write('-2.0\n')
fout.write('-3.0 0:0.6 1:2.4 2:1.2\n')
fout.write('4 2:-1.2\n')
with open(label_path, 'w') as fout:
fout.write('1.0\n')
fout.write('-2.0 0:0.125\n')
fout.write('-3.0 2:1.2\n')
fout.write('4 1:1.0 2:-1.2\n')
data_dir = os.path.join(cwd, 'data')
data_train = mx.io.LibSVMIter(data_libsvm=data_path, label_libsvm=label_path,
data_shape=(3, ), label_shape=(3, ), batch_size=3)
first = mx.nd.array([[ 0.5, 0., 1.2], [ 0., 0., 0.], [ 0.6, 2.4, 1.2]])
second = mx.nd.array([[ 0., 0., -1.2], [ 0.5, 0., 1.2], [ 0., 0., 0.]])
i = 0
for batch in iter(data_train):
expected = first.asnumpy() if i == 0 else second.asnumpy()
assert_almost_equal(data_train.getdata().asnumpy(), expected)
i += 1
def check_libSVMIter_news_data():
news_metadata = {
'name': 'news20.t',
'origin_name': 'news20.t.bz2',
'url': "http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multiclass/news20.t.bz2",
'feature_dim': 62060,
'num_classes': 20,
'num_examples': 3993,
}
batch_size = 33
num_examples = news_metadata['num_examples']
data_dir = os.path.join(os.getcwd(), 'data')
get_bz2_data(data_dir, news_metadata['name'], news_metadata['url'],
news_metadata['origin_name'])
path = os.path.join(data_dir, news_metadata['name'])
data_train = mx.io.LibSVMIter(data_libsvm=path, data_shape=(news_metadata['feature_dim'],),
batch_size=batch_size)
for epoch in range(2):
num_batches = 0
for batch in data_train:
# check the range of labels
assert(np.sum(batch.label[0].asnumpy() > 20) == 0)
assert(np.sum(batch.label[0].asnumpy() <= 0) == 0)
num_batches += 1
expected_num_batches = num_examples / batch_size
assert(num_batches == int(expected_num_batches)), num_batches
data_train.reset()
check_libSVMIter_synthetic()
check_libSVMIter_news_data()
@unittest.skip("test fails intermittently. temporarily disabled till it gets fixed. tracked at https://github.com/apache/incubator-mxnet/issues/7826")
def test_CSVIter():
def check_CSVIter_synthetic():
cwd = os.getcwd()
data_path = os.path.join(cwd, 'data.t')
label_path = os.path.join(cwd, 'label.t')
with open(data_path, 'w') as fout:
for i in range(1000):
fout.write(','.join(['1' for _ in range(8*8)]) + '\n')
with open(label_path, 'w') as fout:
for i in range(1000):
fout.write('0\n')
data_train = mx.io.CSVIter(data_csv=data_path, data_shape=(8,8),
label_csv=label_path, batch_size=100)
expected = mx.nd.ones((100, 8, 8))
for batch in iter(data_train):
assert_almost_equal(data_train.getdata().asnumpy(), expected.asnumpy())
check_CSVIter_synthetic()
if __name__ == "__main__":
test_NDArrayIter()
if h5py:
test_NDArrayIter_h5py()
test_MNISTIter()
test_Cifar10Rec()
test_LibSVMIter()
test_NDArrayIter_csr()
test_CSVIter()
| num_rows = rnd.randint(5, 15)
num_cols = rnd.randint(1, 20)
batch_size = rnd.randint(1, num_rows)
shape = (num_rows, num_cols)
csr, _ = rand_sparse_ndarray(shape, 'csr')
dns = csr.asnumpy()
#test CSRNDArray with shuffle=True will throw NotImplementedError
try:
csr_iter = mx.io.NDArrayIter({'data': csr}, dns, batch_size, shuffle=True,
last_batch_handle='discard')
assert(False)
except NotImplementedError:
pass
# make iterators
csr_iter = iter(mx.io.NDArrayIter(csr, csr, batch_size, last_batch_handle='discard'))
begin = 0
for batch in csr_iter:
expected = np.zeros((batch_size, num_cols))
end = begin + batch_size
expected[:num_rows - begin] = dns[begin:end]
if end > num_rows:
expected[num_rows - begin:] = dns[0:end - num_rows]
assert_almost_equal(batch.data[0].asnumpy(), expected)
begin += batch_size |
borrow.js | var table;
$(function () {
table = $('#data_list').DataTable({
"ajax": {
"url": "/libsystem/admin/borrow/list.php",
"type": "POST",
"cache": false,
"data": function (d) {
return {
"sno": $("#borrow_sno").val(),
};
}
},
"columns": [
{"data": "bno"},
{"data": "bname"},
{"data": "author"},
{"data": "price"},
{"data": "sno"},
{"data": "sname"},
{"data": "timeout"},
{"data": null},
{"data": null},
{"data": null}
],
columnDefs: [
{
targets: 6,
render: function (a, b, c, d) {
return c.bdate;
}
},
{
targets: 7,
render: function (a, b, c, d) {
return c.rdate;
}
},
{
targets: 8,
render: function (a, b, c, d) {
return c.timeout;
}
},
{
targets: 9,
render: function (a, b, c, d) {
return "<button type='button' class='btn btn-xs btn-warning' id='btn_rent' onclick='showDel(\"" + c.id + "\")'>短信提醒</button> ";
}
}
]
});
});
function borrowBook() {
if (!validBorrow()) {
return;
}
jQuery.ajax({
type: 'POST',
url: '/libsystem/admin/borrow/save.php',
cache: false,
data: {
bno: $.trim($("#borrow_bno").val()),
sno: $.trim($("#borrow_sno").val())
},
success: function (data) {
if (data == 1) {
showInfo("操作成功");
table.ajax.reload();
} else if (data == 0) {
showInfo("操作失败,请重试");
} else if (data == -1) {
showInfo("此学号不存在");
} else if (data == -2) {
showInfo("此图书编号不存在");
} else if (data == -3) {
showInfo("此图书在馆数量不足");
} else if (data == -4) {
showInfo("您已经借阅此书,不能重复借阅");
} else if (data == -5) {
showInfo("您的借阅图书已达上限");
}else{
showInfo("操作失败,请重试");
}
},
error: function (jqXHR, textStatus, errorThrown) {
showInfo("操作失败,请重试");
}
});
}
function validBorrow() {
var flag = true;
var borrow_sno = $.trim($("#borrow_sno").val());
if (borrow_sno == "") {
$("#borrow_sno").parent().paren | s("has-error");
$("#borrow_sno").next().text("请输入学号");
$("#borrow_sno").next().show();
flag = false;
} else if (borrow_sno.length > 20) {
$("#borrow_sno").parent().parent().addClass("has-error");
$("#borrow_sno").next().text("学号长度不能大于20");
$("#borrow_sno").next().show();
flag = false;
} else {
$("#borrow_sno").parent().parent().removeClass("has-error");
$("#borrow_sno").next().text("");
$("#borrow_sno").next().hide();
}
var borrow_bno = $.trim($("#borrow_bno").val());
if (borrow_bno == "") {
$("#borrow_bno").parent().parent().addClass("has-error");
$("#borrow_bno").next().text("请输入图书编号");
$("#borrow_bno").next().show();
flag = false;
} else if (borrow_bno.length > 20) {
$("#borrow_bno").parent().parent().addClass("has-error");
$("#borrow_bno").next().text("图书编号长度不能大于20");
$("#borrow_bno").next().show();
flag = false;
} else {
$("#borrow_bno").parent().parent().removeClass("has-error");
$("#borrow_bno").next().text("");
$("#borrow_bno").next().hide();
}
return flag;
}
function sendSMS() {
jQuery.ajax({
type: 'POST',
url: '/libsystem/SMS/sendSMS.php',
cache: false,
data: {
id: $('#delete_id').val()
},
success: function (data) {
if (data) {
showInfo("发送成功。");
} else {
showInfo("操作失败,请重试");
}
},
error: function (jqXHR, textStatus, errorThrown) {
showInfo("操作失败,请重试");
}
});
}
function showDel(id) {
$('#modal_delete').modal('show');
$('#delete_id').val(id);
}
function showInfo(msg) {
$("#div_info").text(msg);
$("#modal_info").modal('show');
}
function query() {
table.ajax.reload();
} | t().addClas |
lt-ref-self-async.rs | // edition:2018
#![feature(async_await)]
#![feature(arbitrary_self_types)]
#![allow(non_snake_case)]
use std::pin::Pin;
struct Struct<'a> { data: &'a u32 }
impl<'a> Struct<'a> {
// Test using `&self` sugar:
async fn ref_self(&self, f: &u32) -> &u32 {
f //~^ ERROR lifetime mismatch
}
// Test using `&Self` explicitly:
async fn ref_Self(self: &Self, f: &u32) -> &u32 {
f //~^ ERROR lifetime mismatch
}
async fn box_ref_Self(self: Box<&Self>, f: &u32) -> &u32 {
f //~^ ERROR lifetime mismatch
}
async fn pin_ref_Self(self: Pin<&Self>, f: &u32) -> &u32 {
f //~^ ERROR lifetime mismatch
}
async fn box_box_ref_Self(self: Box<Box<&Self>>, f: &u32) -> &u32 {
f //~^ ERROR lifetime mismatch
}
async fn | (self: Box<Pin<&Self>>, f: &u32) -> &u32 {
f //~^ ERROR lifetime mismatch
}
}
fn main() { }
| box_pin_Self |
logging.go | package jiraworklog
import (
"io"
"os"
"github.com/sirupsen/logrus"
)
type LoggerOptions struct {
Application string
LogFile string
Level string
}
func NewLogger(options LoggerOptions) *logrus.Entry {
if options.Level == "" {
options.Level = "warn"
}
level, err := logrus.ParseLevel(options.Level)
if err != nil {
panic(err)
}
log := logrus.New()
log.Level = level
log.Formatter = &logrus.TextFormatter{
//ForceColors: true,
TimestampFormat: "2006-01-02 15:04:05",
FullTimestamp: true,
//DisableColors: true,
}
// &logrus.JSONFormatter{}
if options.LogFile != "" {
log.Out = os.Stdout
file, err := os.OpenFile(options.LogFile, os.O_CREATE|os.O_WRONLY, 0666)
if err == nil {
log.Out = io.MultiWriter(file, os.Stdout)
} else {
log.Info("Failed to log to file, using default stderr")
| }
logger := log.WithFields(logrus.Fields{"app": options.Application})
return logger
} | }
|
conformancesoftware.input.js | const DateTimeScalar = require('../scalars/datetime.scalar');
const {
GraphQLInputObjectType,
GraphQLNonNull,
GraphQLString,
} = require('graphql');
// Util for extending gql objects
const { extendSchema } = require('@asymmetrik/fhir-gql-schema-utils');
/**
* @name exports
* @summary Conformance.software Input Schema
*/
module.exports = new GraphQLInputObjectType({
name: 'ConformanceSoftware_Input',
description:
'Software that is covered by this conformance statement. It is used when the conformance statement describes the capabilities of a particular software version, independent of an installation.',
fields: () =>
extendSchema(require('./backboneelement.input'), {
name: {
type: new GraphQLNonNull(GraphQLString),
description: 'Name software is known by.',
},
_name: {
type: require('./element.input'),
description: 'Name software is known by.',
}, | },
_version: {
type: require('./element.input'),
description:
'The version identifier for the software covered by this statement.',
},
releaseDate: {
type: DateTimeScalar,
description: 'Date this version of the software released.',
},
_releaseDate: {
type: require('./element.input'),
description: 'Date this version of the software released.',
},
}),
}); | version: {
type: GraphQLString,
description:
'The version identifier for the software covered by this statement.', |
conversation-graph-validation.js | /** @format */
/** | import { reducer, initialState } from 'lib/conversation/reducers/conversation-graph-validation';
const ConversationGraphValidationStore = createReducerStore( reducer, initialState );
ConversationGraphValidationStore.getErrors = () =>
ConversationGraphValidationStore.get().getIn( [ 'errors', "message" ] );
export default ConversationGraphValidationStore; | * Internal dependencies
*/
import { createReducerStore } from 'lib/store'; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.