prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>validation.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package validation
import (
"github.com/robfig/cron"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/unversioned"
unversionedvalidation "k8s.io/kubernetes/pkg/api/unversioned/validation"
apivalidation "k8s.io/kubernetes/pkg/api/validation"
"k8s.io/kubernetes/pkg/apis/batch"
"k8s.io/kubernetes/pkg/labels"
"k8s.io/kubernetes/pkg/util/validation/field"
)
// TODO: generalize for other controller objects that will follow the same pattern, such as ReplicaSet and DaemonSet, and
// move to new location. Replace batch.Job with an interface.
//
// ValidateGeneratedSelector validates that the generated selector on a controller object match the controller object
// metadata, and the labels on the pod template are as generated.
func ValidateGeneratedSelector(obj *batch.Job) field.ErrorList {
allErrs := field.ErrorList{}
if obj.Spec.ManualSelector != nil && *obj.Spec.ManualSelector {
return allErrs
}
if obj.Spec.Selector == nil {
return allErrs // This case should already have been checked in caller. No need for more errors.
}
// If somehow uid was unset then we would get "controller-uid=" as the selector
// which is bad.
if obj.ObjectMeta.UID == "" {
allErrs = append(allErrs, field.Required(field.NewPath("metadata").Child("uid"), ""))
}
// If somehow uid was unset then we would get "controller-uid=" as the selector
// which is bad.
if obj.ObjectMeta.UID == "" {
allErrs = append(allErrs, field.Required(field.NewPath("metadata").Child("uid"), ""))
}
// If selector generation was requested, then expected labels must be
// present on pod template, and much match job's uid and name. The
// generated (not-manual) selectors/labels ensure no overlap with other
// controllers. The manual mode allows orphaning, adoption,
// backward-compatibility, and experimentation with new
// labeling/selection schemes. Automatic selector generation should
// have placed certain labels on the pod, but this could have failed if
// the user added coflicting labels. Validate that the expected
// generated ones are there.
allErrs = append(allErrs, apivalidation.ValidateHasLabel(obj.Spec.Template.ObjectMeta, field.NewPath("spec").Child("template").Child("metadata"), "controller-uid", string(obj.UID))...)
allErrs = append(allErrs, apivalidation.ValidateHasLabel(obj.Spec.Template.ObjectMeta, field.NewPath("spec").Child("template").Child("metadata"), "job-name", string(obj.Name))...)
expectedLabels := make(map[string]string)
expectedLabels["controller-uid"] = string(obj.UID)
expectedLabels["job-name"] = string(obj.Name)<|fim▁hole|> if selector, err := unversioned.LabelSelectorAsSelector(obj.Spec.Selector); err == nil {
if !selector.Matches(labels.Set(expectedLabels)) {
allErrs = append(allErrs, field.Invalid(field.NewPath("spec").Child("selector"), obj.Spec.Selector, "`selector` not auto-generated"))
}
}
return allErrs
}
func ValidateJob(job *batch.Job) field.ErrorList {
// Jobs and rcs have the same name validation
allErrs := apivalidation.ValidateObjectMeta(&job.ObjectMeta, true, apivalidation.ValidateReplicationControllerName, field.NewPath("metadata"))
allErrs = append(allErrs, ValidateGeneratedSelector(job)...)
allErrs = append(allErrs, ValidateJobSpec(&job.Spec, field.NewPath("spec"))...)
return allErrs
}
func ValidateJobSpec(spec *batch.JobSpec, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
if spec.Parallelism != nil {
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(*spec.Parallelism), fldPath.Child("parallelism"))...)
}
if spec.Completions != nil {
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(*spec.Completions), fldPath.Child("completions"))...)
}
if spec.ActiveDeadlineSeconds != nil {
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(*spec.ActiveDeadlineSeconds), fldPath.Child("activeDeadlineSeconds"))...)
}
if spec.Selector == nil {
allErrs = append(allErrs, field.Required(fldPath.Child("selector"), ""))
} else {
allErrs = append(allErrs, unversionedvalidation.ValidateLabelSelector(spec.Selector, fldPath.Child("selector"))...)
}
// Whether manually or automatically generated, the selector of the job must match the pods it will produce.
if selector, err := unversioned.LabelSelectorAsSelector(spec.Selector); err == nil {
labels := labels.Set(spec.Template.Labels)
if !selector.Matches(labels) {
allErrs = append(allErrs, field.Invalid(fldPath.Child("template", "metadata", "labels"), spec.Template.Labels, "`selector` does not match template `labels`"))
}
}
allErrs = append(allErrs, apivalidation.ValidatePodTemplateSpec(&spec.Template, fldPath.Child("template"))...)
if spec.Template.Spec.RestartPolicy != api.RestartPolicyOnFailure &&
spec.Template.Spec.RestartPolicy != api.RestartPolicyNever {
allErrs = append(allErrs, field.NotSupported(fldPath.Child("template", "spec", "restartPolicy"),
spec.Template.Spec.RestartPolicy, []string{string(api.RestartPolicyOnFailure), string(api.RestartPolicyNever)}))
}
return allErrs
}
func ValidateJobStatus(status *batch.JobStatus, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(status.Active), fldPath.Child("active"))...)
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(status.Succeeded), fldPath.Child("succeeded"))...)
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(status.Failed), fldPath.Child("failed"))...)
return allErrs
}
func ValidateJobUpdate(job, oldJob *batch.Job) field.ErrorList {
allErrs := apivalidation.ValidateObjectMetaUpdate(&oldJob.ObjectMeta, &job.ObjectMeta, field.NewPath("metadata"))
allErrs = append(allErrs, ValidateJobSpecUpdate(job.Spec, oldJob.Spec, field.NewPath("spec"))...)
return allErrs
}
func ValidateJobUpdateStatus(job, oldJob *batch.Job) field.ErrorList {
allErrs := apivalidation.ValidateObjectMetaUpdate(&oldJob.ObjectMeta, &job.ObjectMeta, field.NewPath("metadata"))
allErrs = append(allErrs, ValidateJobStatusUpdate(job.Status, oldJob.Status)...)
return allErrs
}
func ValidateJobSpecUpdate(spec, oldSpec batch.JobSpec, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
allErrs = append(allErrs, ValidateJobSpec(&spec, fldPath)...)
allErrs = append(allErrs, apivalidation.ValidateImmutableField(spec.Completions, oldSpec.Completions, fldPath.Child("completions"))...)
allErrs = append(allErrs, apivalidation.ValidateImmutableField(spec.Selector, oldSpec.Selector, fldPath.Child("selector"))...)
allErrs = append(allErrs, apivalidation.ValidateImmutableField(spec.Template, oldSpec.Template, fldPath.Child("template"))...)
return allErrs
}
func ValidateJobStatusUpdate(status, oldStatus batch.JobStatus) field.ErrorList {
allErrs := field.ErrorList{}
allErrs = append(allErrs, ValidateJobStatus(&status, field.NewPath("status"))...)
return allErrs
}
func ValidateScheduledJob(scheduledJob *batch.ScheduledJob) field.ErrorList {
// ScheduledJobs and rcs have the same name validation
allErrs := apivalidation.ValidateObjectMeta(&scheduledJob.ObjectMeta, true, apivalidation.ValidateReplicationControllerName, field.NewPath("metadata"))
allErrs = append(allErrs, ValidateScheduledJobSpec(&scheduledJob.Spec, field.NewPath("spec"))...)
return allErrs
}
func ValidateScheduledJobSpec(spec *batch.ScheduledJobSpec, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
if len(spec.Schedule) == 0 {
allErrs = append(allErrs, field.Required(fldPath.Child("schedule"), ""))
} else {
allErrs = append(allErrs, validateScheduleFormat(spec.Schedule, fldPath.Child("schedule"))...)
}
if spec.StartingDeadlineSeconds != nil {
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(*spec.StartingDeadlineSeconds), fldPath.Child("startingDeadlineSeconds"))...)
}
allErrs = append(allErrs, validateConcurrencyPolicy(&spec.ConcurrencyPolicy, fldPath.Child("concurrencyPolicy"))...)
allErrs = append(allErrs, ValidateJobTemplateSpec(&spec.JobTemplate, fldPath.Child("jobTemplate"))...)
return allErrs
}
func validateConcurrencyPolicy(concurrencyPolicy *batch.ConcurrencyPolicy, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
switch *concurrencyPolicy {
case batch.AllowConcurrent, batch.ForbidConcurrent, batch.ReplaceConcurrent:
break
case "":
allErrs = append(allErrs, field.Required(fldPath, ""))
default:
validValues := []string{string(batch.AllowConcurrent), string(batch.ForbidConcurrent), string(batch.ReplaceConcurrent)}
allErrs = append(allErrs, field.NotSupported(fldPath, *concurrencyPolicy, validValues))
}
return allErrs
}
func validateScheduleFormat(schedule string, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
_, err := cron.Parse(schedule)
if err != nil {
allErrs = append(allErrs, field.Invalid(fldPath, schedule, err.Error()))
}
return allErrs
}
func ValidateJobTemplate(job *batch.JobTemplate) field.ErrorList {
// this method should be identical to ValidateJob
allErrs := apivalidation.ValidateObjectMeta(&job.ObjectMeta, true, apivalidation.ValidateReplicationControllerName, field.NewPath("metadata"))
allErrs = append(allErrs, ValidateJobTemplateSpec(&job.Template, field.NewPath("template"))...)
return allErrs
}
func ValidateJobTemplateSpec(spec *batch.JobTemplateSpec, fldPath *field.Path) field.ErrorList {
// this method should be identical to ValidateJob
allErrs := ValidateJobSpec(&spec.Spec, fldPath.Child("spec"))
return allErrs
}<|fim▁end|> | // Whether manually or automatically generated, the selector of the job must match the pods it will produce. |
<|file_name|>clipComb.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# create pipeline
#
pl3d = vtk.vtkMultiBlockPLOT3DReader()
pl3d.SetXYZFileName(VTK_DATA_ROOT + "/Data/combxyz.bin")
pl3d.SetQFileName(VTK_DATA_ROOT + "/Data/combq.bin")
pl3d.SetScalarFunctionNumber(100)
pl3d.SetVectorFunctionNumber(202)
pl3d.Update()
output = pl3d.GetOutput().GetBlock(0)
# create a crazy implicit function
center = output.GetCenter()
sphere = vtk.vtkSphere()
sphere.SetCenter(center)
sphere.SetRadius(2.0)
sphere2 = vtk.vtkSphere()
sphere2.SetCenter(center[0] + 4.0, center[1], center[2])
sphere2.SetRadius(4.0)
<|fim▁hole|>boolOp = vtk.vtkImplicitBoolean()
boolOp.SetOperationTypeToUnion()
boolOp.AddFunction(sphere)
boolOp.AddFunction(sphere2)
# clip the structured grid to produce a tetrahedral mesh
clip = vtk.vtkClipDataSet()
clip.SetInputData(output)
clip.SetClipFunction(boolOp)
clip.InsideOutOn()
gf = vtk.vtkGeometryFilter()
gf.SetInputConnection(clip.GetOutputPort())
clipMapper = vtk.vtkPolyDataMapper()
clipMapper.SetInputConnection(gf.GetOutputPort())
clipActor = vtk.vtkActor()
clipActor.SetMapper(clipMapper)
outline = vtk.vtkStructuredGridOutlineFilter()
outline.SetInputData(output)
outlineMapper = vtk.vtkPolyDataMapper()
outlineMapper.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(outlineMapper)
# Create the RenderWindow, Renderer and both Actors
#
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
#
ren1.AddActor(clipActor)
ren1.AddActor(outlineActor)
ren1.SetBackground(1, 1, 1)
renWin.SetSize(250, 250)
ren1.SetBackground(0.1, 0.2, 0.4)
cam1 = ren1.GetActiveCamera()
cam1.SetClippingRange(3.95297, 50)
cam1.SetFocalPoint(8.88908, 0.595038, 29.3342)
cam1.SetPosition(-12.3332, 31.7479, 41.2387)
cam1.SetViewUp(0.060772, -0.319905, 0.945498)
iren.Initialize()
# render the image
#
renWin.Render()
#iren.Start()<|fim▁end|> | |
<|file_name|>common-chunks-bundle.js<|end_file_name|><|fim▁begin|>/******/ (function(modules) { // webpackBootstrap
/******/ // install a JSONP callback for chunk loading
/******/ var parentJsonpFunction = window["webpackJsonp"];
/******/ window["webpackJsonp"] = function webpackJsonpCallback(chunkIds, moreModules, executeModules) {
/******/ // add "moreModules" to the modules object,
/******/ // then flag all "chunkIds" as loaded and fire callback
/******/ var moduleId, chunkId, i = 0, resolves = [], result;
/******/ for(;i < chunkIds.length; i++) {
/******/ chunkId = chunkIds[i];
/******/ if(installedChunks[chunkId]) {
/******/ resolves.push(installedChunks[chunkId][0]);
/******/ }
/******/ installedChunks[chunkId] = 0;
/******/ }
/******/ for(moduleId in moreModules) {
/******/ if(Object.prototype.hasOwnProperty.call(moreModules, moduleId)) {
/******/ modules[moduleId] = moreModules[moduleId];
/******/ }
/******/ }
/******/ if(parentJsonpFunction) parentJsonpFunction(chunkIds, moreModules, executeModules);
/******/ while(resolves.length) {
/******/ resolves.shift()();
/******/ }
/******/ if(executeModules) {
/******/ for(i=0; i < executeModules.length; i++) {
/******/ result = __webpack_require__(__webpack_require__.s = executeModules[i]);
/******/ }
/******/ }
/******/ return result;
/******/ };
/******/
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // objects to store loaded and loading chunks
/******/ var installedChunks = {
/******/ 1: 0
/******/ };
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId]) {
/******/ return installedModules[moduleId].exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/ // This file contains only the entry chunk.
/******/ // The chunk loading function for additional chunks
/******/ __webpack_require__.e = function requireEnsure(chunkId) {
/******/ if(installedChunks[chunkId] === 0) {
/******/ return Promise.resolve();
/******/ }
/******/
/******/ // a Promise means "currently loading".
/******/ if(installedChunks[chunkId]) {
/******/ return installedChunks[chunkId][2];
/******/ }
/******/
/******/ // setup Promise in chunk cache
/******/ var promise = new Promise(function(resolve, reject) {
/******/ installedChunks[chunkId] = [resolve, reject];
/******/ });
/******/ installedChunks[chunkId][2] = promise;
/******/
/******/ // start chunk loading
/******/ var head = document.getElementsByTagName('head')[0];
/******/ var script = document.createElement('script');
/******/ script.type = 'text/javascript';
/******/ script.charset = 'utf-8';
/******/ script.async = true;
/******/ script.timeout = 120000;
/******/
/******/ if (__webpack_require__.nc) {
/******/ script.setAttribute("nonce", __webpack_require__.nc);
/******/ }
/******/ script.src = __webpack_require__.p + "" + chunkId + "-bundle.js";
/******/ var timeout = setTimeout(onScriptComplete, 120000);
/******/ script.onerror = script.onload = onScriptComplete;
/******/ function onScriptComplete() {
/******/ // avoid mem leaks in IE.
/******/ script.onerror = script.onload = null;
/******/ clearTimeout(timeout);
/******/ var chunk = installedChunks[chunkId];
/******/ if(chunk !== 0) {
/******/ if(chunk) {
/******/ chunk[1](new Error('Loading chunk ' + chunkId + ' failed.'));
/******/ }
/******/ installedChunks[chunkId] = undefined;
/******/ }
/******/ };
/******/ head.appendChild(script);
/******/
/******/ return promise;
/******/ };
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // identity function for calling harmony imports with the correct context
/******/ __webpack_require__.i = function(value) { return value; };
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, {
/******/ configurable: false,<|fim▁hole|>/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/
/******/ // on error function for async loading
/******/ __webpack_require__.oe = function(err) { console.error(err); throw err; };
/******/ })
/************************************************************************/
/******/ ([]);<|fim▁end|> | /******/ enumerable: true,
/******/ get: getter
/******/ });
/******/ } |
<|file_name|>rv64f_instr.py<|end_file_name|><|fim▁begin|>"""
Copyright 2020 Google LLC
Copyright 2020 PerfectVIPs Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|>from pygen_src.riscv_instr_pkg import (riscv_instr_name_t, riscv_instr_format_t,
riscv_instr_category_t, riscv_instr_group_t)
DEFINE_FP_INSTR(riscv_instr_name_t.FCVT_L_S, riscv_instr_format_t.I_FORMAT,
riscv_instr_category_t.ARITHMETIC, riscv_instr_group_t.RV64F, g=globals())
DEFINE_FP_INSTR(riscv_instr_name_t.FCVT_LU_S, riscv_instr_format_t.I_FORMAT,
riscv_instr_category_t.ARITHMETIC, riscv_instr_group_t.RV64F, g=globals())
DEFINE_FP_INSTR(riscv_instr_name_t.FCVT_S_L, riscv_instr_format_t.I_FORMAT,
riscv_instr_category_t.ARITHMETIC, riscv_instr_group_t.RV64F, g=globals())
DEFINE_FP_INSTR(riscv_instr_name_t.FCVT_S_LU, riscv_instr_format_t.I_FORMAT,
riscv_instr_category_t.ARITHMETIC, riscv_instr_group_t.RV64F, g=globals())<|fim▁end|> | """
from pygen_src.riscv_defines import DEFINE_FP_INSTR |
<|file_name|>rss-feed.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import { IconBaseProps } from 'react-icon-base';
export default class MdRssFeed extends React.Component<IconBaseProps, any> { }<|fim▁end|> | import * as React from 'react'; |
<|file_name|>_brain.py<|end_file_name|><|fim▁begin|># Authors: Alexandre Gramfort <[email protected]>
# Eric Larson <[email protected]>
# Oleh Kozynets <[email protected]>
# Guillaume Favelier <[email protected]>
# jona-sassenhagen <[email protected]>
# Joan Massich <[email protected]>
#
# License: Simplified BSD
import contextlib
from functools import partial
from io import BytesIO
import os
import os.path as op
import sys
import time
import copy
import traceback
import warnings
import numpy as np
from collections import OrderedDict
from .colormap import calculate_lut
from .surface import _Surface
from .view import views_dicts, _lh_views_dict
from .callback import (ShowView, TimeCallBack, SmartCallBack,
UpdateLUT, UpdateColorbarScale)
from ..utils import (_show_help_fig, _get_color_list, concatenate_images,
_generate_default_filename, _save_ndarray_img)
from .._3d import (_process_clim, _handle_time, _check_views,
_handle_sensor_types, _plot_sensors)
from ...defaults import _handle_default, DEFAULTS
from ...externals.decorator import decorator
from ...fixes import _point_data, _cell_data
from ..._freesurfer import (vertex_to_mni, read_talxfm, read_freesurfer_lut,
_get_head_surface, _get_skull_surface)
from ...io.pick import pick_types
from ...io.meas_info import Info
from ...surface import (mesh_edges, _mesh_borders, _marching_cubes,
get_meg_helmet_surf)
from ...source_space import SourceSpaces
from ...transforms import (apply_trans, invert_transform, _get_trans,
_get_transforms_to_coord_frame)
from ...utils import (_check_option, logger, verbose, fill_doc, _validate_type,
use_log_level, Bunch, _ReuseCycle, warn,
get_subjects_dir, _check_fname, _to_rgb)
_ARROW_MOVE = 10 # degrees per press
@decorator
def safe_event(fun, *args, **kwargs):
"""Protect against PyQt5 exiting on event-handling errors."""
try:
return fun(*args, **kwargs)
except Exception:
traceback.print_exc(file=sys.stderr)
class _Overlay(object):
def __init__(self, scalars, colormap, rng, opacity, name):
self._scalars = scalars
self._colormap = colormap
assert rng is not None
self._rng = rng
self._opacity = opacity
self._name = name
def to_colors(self):
from .._3d import _get_cmap
from matplotlib.colors import Colormap, ListedColormap
if isinstance(self._colormap, str):
cmap = _get_cmap(self._colormap)
elif isinstance(self._colormap, Colormap):
cmap = self._colormap
else:
cmap = ListedColormap(
self._colormap / 255., name=str(type(self._colormap)))
logger.debug(
f'Color mapping {repr(self._name)} with {cmap.name} '
f'colormap and range {self._rng}')
rng = self._rng
assert rng is not None
scalars = _norm(self._scalars, rng)
colors = cmap(scalars)
if self._opacity is not None:
colors[:, 3] *= self._opacity
return colors
def _norm(x, rng):
if rng[0] == rng[1]:
factor = 1 if rng[0] == 0 else 1e-6 * rng[0]
else:
factor = rng[1] - rng[0]
return (x - rng[0]) / factor
class _LayeredMesh(object):
def __init__(self, renderer, vertices, triangles, normals):
self._renderer = renderer
self._vertices = vertices
self._triangles = triangles
self._normals = normals
self._polydata = None
self._actor = None
self._is_mapped = False
self._current_colors = None
self._cached_colors = None
self._overlays = OrderedDict()
self._default_scalars = np.ones(vertices.shape)
self._default_scalars_name = 'Data'
def map(self):
kwargs = {
"color": None,
"pickable": True,
"rgba": True,
}
mesh_data = self._renderer.mesh(
x=self._vertices[:, 0],
y=self._vertices[:, 1],
z=self._vertices[:, 2],
triangles=self._triangles,
normals=self._normals,
scalars=self._default_scalars,
**kwargs
)
self._actor, self._polydata = mesh_data
self._is_mapped = True
def _compute_over(self, B, A):
assert A.ndim == B.ndim == 2
assert A.shape[1] == B.shape[1] == 4
A_w = A[:, 3:] # * 1
B_w = B[:, 3:] * (1 - A_w)
C = A.copy()
C[:, :3] *= A_w
C[:, :3] += B[:, :3] * B_w
C[:, 3:] += B_w
C[:, :3] /= C[:, 3:]
return np.clip(C, 0, 1, out=C)
def _compose_overlays(self):
B = cache = None
for overlay in self._overlays.values():
A = overlay.to_colors()
if B is None:
B = A
else:
cache = B
B = self._compute_over(cache, A)
return B, cache
def add_overlay(self, scalars, colormap, rng, opacity, name):
overlay = _Overlay(
scalars=scalars,
colormap=colormap,
rng=rng,
opacity=opacity,
name=name,
)
self._overlays[name] = overlay
colors = overlay.to_colors()
if self._current_colors is None:
self._current_colors = colors
else:
# save previous colors to cache
self._cached_colors = self._current_colors
self._current_colors = self._compute_over(
self._cached_colors, colors)
# apply the texture
self._apply()
def remove_overlay(self, names):
to_update = False
if not isinstance(names, list):
names = [names]
for name in names:
if name in self._overlays:
del self._overlays[name]
to_update = True
if to_update:
self.update()
def _apply(self):
if self._current_colors is None or self._renderer is None:
return
self._renderer._set_mesh_scalars(
mesh=self._polydata,
scalars=self._current_colors,
name=self._default_scalars_name,
)
def update(self, colors=None):
if colors is not None and self._cached_colors is not None:
self._current_colors = self._compute_over(
self._cached_colors, colors)
else:
self._current_colors, self._cached_colors = \
self._compose_overlays()
self._apply()
def _clean(self):
mapper = self._actor.GetMapper()
mapper.SetLookupTable(None)
self._actor.SetMapper(None)
self._actor = None
self._polydata = None
self._renderer = None
def update_overlay(self, name, scalars=None, colormap=None,
opacity=None, rng=None):
overlay = self._overlays.get(name, None)
if overlay is None:
return
if scalars is not None:
overlay._scalars = scalars
if colormap is not None:
overlay._colormap = colormap
if opacity is not None:
overlay._opacity = opacity
if rng is not None:
overlay._rng = rng
# partial update: use cache if possible
if name == list(self._overlays.keys())[-1]:
self.update(colors=overlay.to_colors())
else: # full update
self.update()
@fill_doc
class Brain(object):
"""Class for visualizing a brain.
.. warning::
The API for this class is not currently complete. We suggest using
:meth:`mne.viz.plot_source_estimates` with the PyVista backend
enabled to obtain a ``Brain`` instance.
Parameters
----------
subject_id : str
Subject name in Freesurfer subjects dir.
hemi : str
Hemisphere id (ie 'lh', 'rh', 'both', or 'split'). In the case
of 'both', both hemispheres are shown in the same window.
In the case of 'split' hemispheres are displayed side-by-side
in different viewing panes.
surf : str
FreeSurfer surface mesh name (ie 'white', 'inflated', etc.).
title : str
Title for the window.
cortex : str, list, dict
Specifies how the cortical surface is rendered. Options:
1. The name of one of the preset cortex styles:
``'classic'`` (default), ``'high_contrast'``,
``'low_contrast'``, or ``'bone'``.
2. A single color-like argument to render the cortex as a single
color, e.g. ``'red'`` or ``(0.1, 0.4, 1.)``.
3. A list of two color-like used to render binarized curvature
values for gyral (first) and sulcal (second). regions, e.g.,
``['red', 'blue']`` or ``[(1, 0, 0), (0, 0, 1)]``.
4. A dict containing keys ``'vmin', 'vmax', 'colormap'`` with
values used to render the binarized curvature (where 0 is gyral,
1 is sulcal).
.. versionchanged:: 0.24
Add support for non-string arguments.
alpha : float in [0, 1]
Alpha level to control opacity of the cortical surface.
size : int | array-like, shape (2,)
The size of the window, in pixels. can be one number to specify
a square window, or a length-2 sequence to specify (width, height).
background : tuple(int, int, int)
The color definition of the background: (red, green, blue).
foreground : matplotlib color
Color of the foreground (will be used for colorbars and text).
None (default) will use black or white depending on the value
of ``background``.
figure : list of Figure | None
If None (default), a new window will be created with the appropriate
views.
subjects_dir : str | None
If not None, this directory will be used as the subjects directory
instead of the value set using the SUBJECTS_DIR environment
variable.
views : list | str
The views to use.
offset : bool | str
If True, shifts the right- or left-most x coordinate of the left and
right surfaces, respectively, to be at zero. This is useful for viewing
inflated surface where hemispheres typically overlap. Can be "auto"
(default) use True with inflated surfaces and False otherwise
(Default: 'auto'). Only used when ``hemi='both'``.
.. versionchanged:: 0.23
Default changed to "auto".
show_toolbar : bool
If True, toolbars will be shown for each view.
offscreen : bool
If True, rendering will be done offscreen (not shown). Useful
mostly for generating images or screenshots, but can be buggy.
Use at your own risk.
interaction : str
Can be "trackball" (default) or "terrain", i.e. a turntable-style
camera.
units : str
Can be 'm' or 'mm' (default).
%(view_layout)s
silhouette : dict | bool
As a dict, it contains the ``color``, ``linewidth``, ``alpha`` opacity
and ``decimate`` (level of decimation between 0 and 1 or None) of the
brain's silhouette to display. If True, the default values are used
and if False, no silhouette will be displayed. Defaults to False.
theme : str | path-like
Can be "auto" (default), "light", or "dark" or a path-like to a
custom stylesheet. For Dark-Mode and automatic Dark-Mode-Detection,
:mod:`qdarkstyle` respectively and `darkdetect
<https://github.com/albertosottile/darkdetect>`__ is required.
show : bool
Display the window as soon as it is ready. Defaults to True.
Attributes
----------
geo : dict
A dictionary of PyVista surface objects for each hemisphere.
overlays : dict
The overlays.
Notes
-----
This table shows the capabilities of each Brain backend ("✓" for full
support, and "-" for partial support):
.. table::
:widths: auto
+---------------------------+--------------+---------------+
| 3D function: | surfer.Brain | mne.viz.Brain |
+===========================+==============+===============+
| add_annotation | ✓ | ✓ |
+---------------------------+--------------+---------------+
| add_data | ✓ | ✓ |
+---------------------------+--------------+---------------+
| add_foci | ✓ | ✓ |
+---------------------------+--------------+---------------+
| add_head | | ✓ |
+---------------------------+--------------+---------------+
| add_label | ✓ | ✓ |
+---------------------------+--------------+---------------+
| add_sensors | | ✓ |
+---------------------------+--------------+---------------+
| add_skull | | ✓ |
+---------------------------+--------------+---------------+
| add_text | ✓ | ✓ |
+---------------------------+--------------+---------------+
| add_volume_labels | | ✓ |
+---------------------------+--------------+---------------+
| close | ✓ | ✓ |
+---------------------------+--------------+---------------+
| data | ✓ | ✓ |
+---------------------------+--------------+---------------+
| foci | ✓ | |
+---------------------------+--------------+---------------+
| labels | ✓ | ✓ |
+---------------------------+--------------+---------------+
| remove_data | | ✓ |
+---------------------------+--------------+---------------+
| remove_foci | ✓ | |
+---------------------------+--------------+---------------+
| remove_head | | ✓ |
+---------------------------+--------------+---------------+
| remove_labels | ✓ | ✓ |
+---------------------------+--------------+---------------+
| remove_annotations | - | ✓ |
+---------------------------+--------------+---------------+
| remove_sensors | | ✓ |
+---------------------------+--------------+---------------+
| remove_skull | | ✓ |
+---------------------------+--------------+---------------+
| remove_text | | ✓ |
+---------------------------+--------------+---------------+
| remove_volume_labels | | ✓ |
+---------------------------+--------------+---------------+
| scale_data_colormap | ✓ | |
+---------------------------+--------------+---------------+
| save_image | ✓ | ✓ |
+---------------------------+--------------+---------------+
| save_movie | ✓ | ✓ |
+---------------------------+--------------+---------------+
| screenshot | ✓ | ✓ |
+---------------------------+--------------+---------------+
| show_view | ✓ | ✓ |
+---------------------------+--------------+---------------+
| TimeViewer | ✓ | ✓ |
+---------------------------+--------------+---------------+
| enable_depth_peeling | | ✓ |
+---------------------------+--------------+---------------+
| get_picked_points | | ✓ |
+---------------------------+--------------+---------------+
| add_data(volume) | | ✓ |
+---------------------------+--------------+---------------+
| view_layout | | ✓ |
+---------------------------+--------------+---------------+
| flatmaps | | ✓ |
+---------------------------+--------------+---------------+
| vertex picking | | ✓ |
+---------------------------+--------------+---------------+
| label picking | | ✓ |
+---------------------------+--------------+---------------+
"""
def __init__(self, subject_id, hemi='both', surf='pial', title=None,
cortex="classic", alpha=1.0, size=800, background="black",
foreground=None, figure=None, subjects_dir=None,
views='auto', offset='auto', show_toolbar=False,
offscreen=False, interaction='trackball', units='mm',
view_layout='vertical', silhouette=False, theme='auto',
show=True):
from ..backends.renderer import backend, _get_renderer
if hemi is None:
hemi = 'vol'
hemi = self._check_hemi(hemi, extras=('both', 'split', 'vol'))
if hemi in ('both', 'split'):
self._hemis = ('lh', 'rh')
else:
assert hemi in ('lh', 'rh', 'vol')
self._hemis = (hemi, )
self._view_layout = _check_option('view_layout', view_layout,
('vertical', 'horizontal'))
if figure is not None and not isinstance(figure, int):
backend._check_3d_figure(figure)
if title is None:
self._title = subject_id
else:
self._title = title
self._interaction = 'trackball'
self._bg_color = _to_rgb(background, name='background')
if foreground is None:
foreground = 'w' if sum(self._bg_color) < 2 else 'k'
self._fg_color = _to_rgb(foreground, name='foreground')
del background, foreground
views = _check_views(surf, views, hemi)
col_dict = dict(lh=1, rh=1, both=1, split=2, vol=1)
shape = (len(views), col_dict[hemi])
if self._view_layout == 'horizontal':
shape = shape[::-1]
self._subplot_shape = shape
size = tuple(np.atleast_1d(size).round(0).astype(int).flat)
if len(size) not in (1, 2):
raise ValueError('"size" parameter must be an int or length-2 '
'sequence of ints.')
size = size if len(size) == 2 else size * 2 # 1-tuple to 2-tuple
subjects_dir = get_subjects_dir(subjects_dir)
self.theme = theme
self.time_viewer = False
self._hemi = hemi
self._units = units
self._alpha = float(alpha)
self._subject_id = subject_id
self._subjects_dir = subjects_dir
self._views = views
self._times = None
self._vertex_to_label_id = dict()
self._annotation_labels = dict()
self._labels = {'lh': list(), 'rh': list()}
self._unnamed_label_id = 0 # can only grow
self._annots = {'lh': list(), 'rh': list()}
self._layered_meshes = dict()
self._actors = dict()
self._elevation_rng = [15, 165] # range of motion of camera on theta
self._lut_locked = None
self._cleaned = False
# default values for silhouette
self._silhouette = {
'color': self._bg_color,
'line_width': 2,
'alpha': alpha,
'decimate': 0.9,
}
_validate_type(silhouette, (dict, bool), 'silhouette')
if isinstance(silhouette, dict):
self._silhouette.update(silhouette)
self.silhouette = True
else:
self.silhouette = silhouette
self._scalar_bar = None
# for now only one time label can be added
# since it is the same for all figures
self._time_label_added = False
# array of data used by TimeViewer
self._data = {}
self.geo = {}
self.set_time_interpolation('nearest')
geo_kwargs = self._cortex_colormap(cortex)
# evaluate at the midpoint of the used colormap
val = -geo_kwargs['vmin'] / (geo_kwargs['vmax'] - geo_kwargs['vmin'])
self._brain_color = geo_kwargs['colormap'](val)
# load geometry for one or both hemispheres as necessary
_validate_type(offset, (str, bool), 'offset')
if isinstance(offset, str):
_check_option('offset', offset, ('auto',), extra='when str')
offset = (surf in ('inflated', 'flat'))
offset = None if (not offset or hemi != 'both') else 0.0
logger.debug(f'Hemi offset: {offset}')
self._renderer = _get_renderer(name=self._title, size=size,
bgcolor=self._bg_color,
shape=shape,
fig=figure)
self._renderer._window_close_connect(self._clean)
self._renderer._window_set_theme(theme)
self.plotter = self._renderer.plotter
self._setup_canonical_rotation()
# plot hemis
for h in ('lh', 'rh'):
if h not in self._hemis:
continue # don't make surface if not chosen
# Initialize a Surface object as the geometry
geo = _Surface(self._subject_id, h, surf, self._subjects_dir,
offset, units=self._units, x_dir=self._rigid[0, :3])
# Load in the geometry and curvature
geo.load_geometry()
geo.load_curvature()
self.geo[h] = geo
for _, _, v in self._iter_views(h):
if self._layered_meshes.get(h) is None:
mesh = _LayeredMesh(
renderer=self._renderer,
vertices=self.geo[h].coords,
triangles=self.geo[h].faces,
normals=self.geo[h].nn,
)
mesh.map() # send to GPU
mesh.add_overlay(
scalars=self.geo[h].bin_curv,
colormap=geo_kwargs["colormap"],
rng=[geo_kwargs["vmin"], geo_kwargs["vmax"]],
opacity=alpha,
name='curv',
)
self._layered_meshes[h] = mesh
# add metadata to the mesh for picking
mesh._polydata._hemi = h
else:
actor = self._layered_meshes[h]._actor
self._renderer.plotter.add_actor(actor, render=False)
if self.silhouette:
mesh = self._layered_meshes[h]
self._renderer._silhouette(
mesh=mesh._polydata,
color=self._silhouette["color"],
line_width=self._silhouette["line_width"],
alpha=self._silhouette["alpha"],
decimate=self._silhouette["decimate"],
)
self._renderer.set_camera(update=False, reset_camera=False,
**views_dicts[h][v])
self.interaction = interaction
self._closed = False
if show:
self.show()
# update the views once the geometry is all set
for h in self._hemis:
for ri, ci, v in self._iter_views(h):
self.show_view(v, row=ri, col=ci, hemi=h)
if surf == 'flat':
self._renderer.set_interaction("rubber_band_2d")
def _setup_canonical_rotation(self):
from ...coreg import fit_matched_points, _trans_from_params
self._rigid = np.eye(4)
try:
xfm = read_talxfm(self._subject_id, self._subjects_dir)
except Exception:
return
# XYZ+origin + halfway
pts_tal = np.concatenate([np.eye(4)[:, :3], np.eye(3) * 0.5])
pts_subj = apply_trans(invert_transform(xfm), pts_tal)
# we fit with scaling enabled, but then discard it (we just need
# the rigid-body components)
params = fit_matched_points(pts_subj, pts_tal, scale=3, out='params')
self._rigid[:] = _trans_from_params((True, True, False), params[:6])
def setup_time_viewer(self, time_viewer=True, show_traces=True):
"""Configure the time viewer parameters.
Parameters
----------
time_viewer : bool
If True, enable widgets interaction. Defaults to True.
show_traces : bool
If True, enable visualization of time traces. Defaults to True.
Notes
-----
The keyboard shortcuts are the following:
'?': Display help window
'i': Toggle interface
's': Apply auto-scaling
'r': Restore original clim
'c': Clear all traces
'n': Shift the time forward by the playback speed
'b': Shift the time backward by the playback speed
'Space': Start/Pause playback
'Up': Decrease camera elevation angle
'Down': Increase camera elevation angle
'Left': Decrease camera azimuth angle
'Right': Increase camera azimuth angle
"""
if self.time_viewer:
return
if not self._data:
raise ValueError("No data to visualize. See ``add_data``.")
self.time_viewer = time_viewer
self.orientation = list(_lh_views_dict.keys())
self.default_smoothing_range = [-1, 15]
# Default configuration
self.playback = False
self.visibility = False
self.refresh_rate_ms = max(int(round(1000. / 60.)), 1)
self.default_scaling_range = [0.2, 2.0]
self.default_playback_speed_range = [0.01, 1]
self.default_playback_speed_value = 0.01
self.default_status_bar_msg = "Press ? for help"
self.default_label_extract_modes = {
"stc": ["mean", "max"],
"src": ["mean_flip", "pca_flip", "auto"],
}
self.default_trace_modes = ('vertex', 'label')
self.annot = None
self.label_extract_mode = None
all_keys = ('lh', 'rh', 'vol')
self.act_data_smooth = {key: (None, None) for key in all_keys}
self.color_list = _get_color_list()
# remove grey for better contrast on the brain
self.color_list.remove("#7f7f7f")
self.color_cycle = _ReuseCycle(self.color_list)
self.mpl_canvas = None
self.help_canvas = None
self.rms = None
self.picked_patches = {key: list() for key in all_keys}
self.picked_points = {key: list() for key in all_keys}
self.pick_table = dict()
self._spheres = list()
self._mouse_no_mvt = -1
self.callbacks = dict()
self.widgets = dict()
self.keys = ('fmin', 'fmid', 'fmax')
# Derived parameters:
self.playback_speed = self.default_playback_speed_value
_validate_type(show_traces, (bool, str, 'numeric'), 'show_traces')
self.interactor_fraction = 0.25
if isinstance(show_traces, str):
self.show_traces = True
self.separate_canvas = False
self.traces_mode = 'vertex'
if show_traces == 'separate':
self.separate_canvas = True
elif show_traces == 'label':
self.traces_mode = 'label'
else:
assert show_traces == 'vertex' # guaranteed above
else:
if isinstance(show_traces, bool):
self.show_traces = show_traces
else:
show_traces = float(show_traces)
if not 0 < show_traces < 1:
raise ValueError(
'show traces, if numeric, must be between 0 and 1, '
f'got {show_traces}')
self.show_traces = True
self.interactor_fraction = show_traces
self.traces_mode = 'vertex'
self.separate_canvas = False
del show_traces
self._configure_time_label()
self._configure_scalar_bar()
self._configure_shortcuts()
self._configure_picking()
self._configure_tool_bar()
self._configure_dock()
self._configure_menu()
self._configure_status_bar()
self._configure_playback()
self._configure_help()
# show everything at the end
self.toggle_interface()
self._renderer.show()
# sizes could change, update views
for hemi in ('lh', 'rh'):
for ri, ci, v in self._iter_views(hemi):
self.show_view(view=v, row=ri, col=ci)
self._renderer._process_events()
self._renderer._update()
# finally, show the MplCanvas
if self.show_traces:
self.mpl_canvas.show()
@safe_event
def _clean(self):
# resolve the reference cycle
self.clear_glyphs()
self.remove_annotations()
# clear init actors
for hemi in self._hemis:
self._layered_meshes[hemi]._clean()
self._clear_callbacks()
self._clear_widgets()
if getattr(self, 'mpl_canvas', None) is not None:
self.mpl_canvas.clear()
if getattr(self, 'act_data_smooth', None) is not None:
for key in list(self.act_data_smooth.keys()):
self.act_data_smooth[key] = None
# XXX this should be done in PyVista
for renderer in self._renderer._all_renderers:
renderer.RemoveAllLights()
# app_window cannot be set to None because it is used in __del__
for key in ('lighting', 'interactor', '_RenderWindow'):
setattr(self.plotter, key, None)
# Qt LeaveEvent requires _Iren so we use _FakeIren instead of None
# to resolve the ref to vtkGenericRenderWindowInteractor
self.plotter._Iren = _FakeIren()
if getattr(self.plotter, 'picker', None) is not None:
self.plotter.picker = None
# XXX end PyVista
for key in ('plotter', 'window', 'dock', 'tool_bar', 'menu_bar',
'interactor', 'mpl_canvas', 'time_actor',
'picked_renderer', 'act_data_smooth', '_scalar_bar',
'actions', 'widgets', 'geo', '_data'):
setattr(self, key, None)
self._cleaned = True
def toggle_interface(self, value=None):
"""Toggle the interface.
Parameters
----------
value : bool | None
If True, the widgets are shown and if False, they
are hidden. If None, the state of the widgets is
toggled. Defaults to None.
"""
if value is None:
self.visibility = not self.visibility
else:
self.visibility = value
# update tool bar and dock
with self._renderer._window_ensure_minimum_sizes():
if self.visibility:
self._renderer._dock_show()
self._renderer._tool_bar_update_button_icon(
name="visibility", icon_name="visibility_on")
else:
self._renderer._dock_hide()
self._renderer._tool_bar_update_button_icon(
name="visibility", icon_name="visibility_off")
self._renderer._update()
def apply_auto_scaling(self):
"""Detect automatically fitting scaling parameters."""
self._update_auto_scaling()
def restore_user_scaling(self):
"""Restore original scaling parameters."""
self._update_auto_scaling(restore=True)
def toggle_playback(self, value=None):
"""Toggle time playback.
Parameters
----------
value : bool | None
If True, automatic time playback is enabled and if False,
it's disabled. If None, the state of time playback is toggled.
Defaults to None.
"""
if value is None:
self.playback = not self.playback
else:
self.playback = value
# update tool bar icon
if self.playback:
self._renderer._tool_bar_update_button_icon(
name="play", icon_name="pause")
else:
self._renderer._tool_bar_update_button_icon(
name="play", icon_name="play")
if self.playback:
time_data = self._data['time']
max_time = np.max(time_data)
if self._current_time == max_time: # start over
self.set_time_point(0) # first index
self._last_tick = time.time()
def reset(self):
"""Reset view and time step."""
self.reset_view()
max_time = len(self._data['time']) - 1
if max_time > 0:
self.callbacks["time"](
self._data["initial_time_idx"],
update_widget=True,
)
self._renderer._update()
def set_playback_speed(self, speed):
"""Set the time playback speed.
Parameters
----------
speed : float
The speed of the playback.
"""
self.playback_speed = speed
@safe_event
def _play(self):
if self.playback:
try:
self._advance()
except Exception:
self.toggle_playback(value=False)
raise
def _advance(self):
this_time = time.time()
delta = this_time - self._last_tick
self._last_tick = time.time()
time_data = self._data['time']
times = np.arange(self._n_times)
time_shift = delta * self.playback_speed
max_time = np.max(time_data)
time_point = min(self._current_time + time_shift, max_time)
# always use linear here -- this does not determine the data
# interpolation mode, it just finds where we are (in time) in
# terms of the time indices
idx = np.interp(time_point, time_data, times)
self.callbacks["time"](idx, update_widget=True)
if time_point == max_time:
self.toggle_playback(value=False)
def _configure_time_label(self):
self.time_actor = self._data.get('time_actor')
if self.time_actor is not None:
self.time_actor.SetPosition(0.5, 0.03)
self.time_actor.GetTextProperty().SetJustificationToCentered()
self.time_actor.GetTextProperty().BoldOn()
def _configure_scalar_bar(self):
if self._scalar_bar is not None:
self._scalar_bar.SetOrientationToVertical()
self._scalar_bar.SetHeight(0.6)
self._scalar_bar.SetWidth(0.05)
self._scalar_bar.SetPosition(0.02, 0.2)
def _configure_dock_time_widget(self, layout=None):
len_time = len(self._data['time']) - 1
if len_time < 1:
return
layout = self._renderer.dock_layout if layout is None else layout
hlayout = self._renderer._dock_add_layout(vertical=False)
self.widgets["min_time"] = self._renderer._dock_add_label(
value="-", layout=hlayout)
self._renderer._dock_add_stretch(hlayout)
self.widgets["current_time"] = self._renderer._dock_add_label(
value="x", layout=hlayout)
self._renderer._dock_add_stretch(hlayout)
self.widgets["max_time"] = self._renderer._dock_add_label(
value="+", layout=hlayout)
self._renderer._layout_add_widget(layout, hlayout)
min_time = float(self._data['time'][0])
max_time = float(self._data['time'][-1])
self.widgets["min_time"].set_value(f"{min_time: .3f}")
self.widgets["max_time"].set_value(f"{max_time: .3f}")
self.widgets["current_time"].set_value(f"{self._current_time: .3f}")
def _configure_dock_playback_widget(self, name):
layout = self._renderer._dock_add_group_box(name)
len_time = len(self._data['time']) - 1
# Time widget
if len_time < 1:
self.callbacks["time"] = None
self.widgets["time"] = None
else:
self.callbacks["time"] = TimeCallBack(
brain=self,
callback=self.plot_time_line,
)
self.widgets["time"] = self._renderer._dock_add_slider(
name="Time (s)",
value=self._data['time_idx'],
rng=[0, len_time],
double=True,
callback=self.callbacks["time"],
compact=False,
layout=layout,
)
self.callbacks["time"].widget = self.widgets["time"]
# Time labels
if len_time < 1:
self.widgets["min_time"] = None
self.widgets["max_time"] = None
self.widgets["current_time"] = None
else:
self._configure_dock_time_widget(layout)
self.callbacks["time"].label = self.widgets["current_time"]
# Playback speed widget
if len_time < 1:
self.callbacks["playback_speed"] = None
self.widgets["playback_speed"] = None
else:
self.callbacks["playback_speed"] = SmartCallBack(
callback=self.set_playback_speed,
)
self.widgets["playback_speed"] = self._renderer._dock_add_spin_box(
name="Speed",
value=self.default_playback_speed_value,
rng=self.default_playback_speed_range,
callback=self.callbacks["playback_speed"],
layout=layout,
)
self.callbacks["playback_speed"].widget = \
self.widgets["playback_speed"]
# Time label
current_time = self._current_time
assert current_time is not None # should never be the case, float
time_label = self._data['time_label']
if callable(time_label):
current_time = time_label(current_time)
else:
current_time = time_label
if self.time_actor is not None:
self.time_actor.SetInput(current_time)
del current_time
def _configure_dock_orientation_widget(self, name):
layout = self._renderer._dock_add_group_box(name)
# Renderer widget
rends = [str(i) for i in range(len(self._renderer._all_renderers))]
if len(rends) > 1:
def select_renderer(idx):
idx = int(idx)
loc = self._renderer._index_to_loc(idx)
self.plotter.subplot(*loc)
self.callbacks["renderer"] = SmartCallBack(
callback=select_renderer,
)
self.widgets["renderer"] = self._renderer._dock_add_combo_box(
name="Renderer",
value="0",
rng=rends,
callback=self.callbacks["renderer"],
layout=layout,
)
self.callbacks["renderer"].widget = \
self.widgets["renderer"]
# Use 'lh' as a reference for orientation for 'both'
if self._hemi == 'both':
hemis_ref = ['lh']
else:
hemis_ref = self._hemis
orientation_data = [None] * len(rends)
for hemi in hemis_ref:
for ri, ci, v in self._iter_views(hemi):
idx = self._renderer._loc_to_index((ri, ci))
if v == 'flat':
_data = None
else:
_data = dict(default=v, hemi=hemi, row=ri, col=ci)
orientation_data[idx] = _data
self.callbacks["orientation"] = ShowView(
brain=self,
data=orientation_data,
)
self.widgets["orientation"] = self._renderer._dock_add_combo_box(
name=None,
value=self.orientation[0],
rng=self.orientation,
callback=self.callbacks["orientation"],
layout=layout,
)
def _configure_dock_colormap_widget(self, name):
layout = self._renderer._dock_add_group_box(name)
self._renderer._dock_add_label(
value="min / mid / max",
align=True,
layout=layout,
)
up = UpdateLUT(brain=self)
for key in self.keys:
hlayout = self._renderer._dock_add_layout(vertical=False)
rng = _get_range(self)
self.callbacks[key] = lambda value, key=key: up(**{key: value})
self.widgets[key] = self._renderer._dock_add_slider(
name=None,
value=self._data[key],
rng=rng,
callback=self.callbacks[key],
double=True,
layout=hlayout,
)
self.widgets[f"entry_{key}"] = self._renderer._dock_add_spin_box(
name=None,
value=self._data[key],
callback=self.callbacks[key],
rng=rng,
layout=hlayout,
)
up.widgets[key] = [self.widgets[key], self.widgets[f"entry_{key}"]]
self._renderer._layout_add_widget(layout, hlayout)
# reset / minus / plus
hlayout = self._renderer._dock_add_layout(vertical=False)
self._renderer._dock_add_label(
value="Rescale",
align=True,
layout=hlayout,
)
self.widgets["reset"] = self._renderer._dock_add_button(
name="↺",
callback=self.restore_user_scaling,
layout=hlayout,
)
for key, char, val in (("fminus", "➖", 1.2 ** -0.25),
("fplus", "➕", 1.2 ** 0.25)):
self.callbacks[key] = UpdateColorbarScale(
brain=self,
factor=val,
)
self.widgets[key] = self._renderer._dock_add_button(
name=char,
callback=self.callbacks[key],
layout=hlayout,
)
self._renderer._layout_add_widget(layout, hlayout)
# register colorbar slider representations
widgets = {key: self.widgets[key] for key in self.keys}
for name in ("fmin", "fmid", "fmax", "fminus", "fplus"):
self.callbacks[name].widgets = widgets
def _configure_dock_trace_widget(self, name):
if not self.show_traces:
return
# do not show trace mode for volumes
if (self._data.get('src', None) is not None and
self._data['src'].kind == 'volume'):
self._configure_vertex_time_course()
return
layout = self._renderer._dock_add_group_box(name)
# setup candidate annots
def _set_annot(annot):
self.clear_glyphs()
self.remove_labels()
self.remove_annotations()
self.annot = annot
if annot == 'None':
self.traces_mode = 'vertex'
self._configure_vertex_time_course()
else:
self.traces_mode = 'label'
self._configure_label_time_course()
self._renderer._update()
# setup label extraction parameters
def _set_label_mode(mode):
if self.traces_mode != 'label':
return
glyphs = copy.deepcopy(self.picked_patches)
self.label_extract_mode = mode
self.clear_glyphs()
for hemi in self._hemis:
for label_id in glyphs[hemi]:
label = self._annotation_labels[hemi][label_id]
vertex_id = label.vertices[0]
self._add_label_glyph(hemi, None, vertex_id)
self.mpl_canvas.axes.relim()
self.mpl_canvas.axes.autoscale_view()
self.mpl_canvas.update_plot()
self._renderer._update()
from ...source_estimate import _get_allowed_label_modes
from ...label import _read_annot_cands
dir_name = op.join(self._subjects_dir, self._subject_id, 'label')
cands = _read_annot_cands(dir_name, raise_error=False)
cands = cands + ['None']
self.annot = cands[0]
stc = self._data["stc"]
modes = _get_allowed_label_modes(stc)
if self._data["src"] is None:
modes = [m for m in modes if m not in
self.default_label_extract_modes["src"]]
self.label_extract_mode = modes[-1]
if self.traces_mode == 'vertex':
_set_annot('None')
else:
_set_annot(self.annot)
self.widgets["annotation"] = self._renderer._dock_add_combo_box(
name="Annotation",
value=self.annot,
rng=cands,
callback=_set_annot,
layout=layout,
)
self.widgets["extract_mode"] = self._renderer._dock_add_combo_box(
name="Extract mode",
value=self.label_extract_mode,
rng=modes,
callback=_set_label_mode,
layout=layout,
)
def _configure_dock(self):
self._renderer._dock_initialize()
self._configure_dock_playback_widget(name="Playback")
self._configure_dock_orientation_widget(name="Orientation")
self._configure_dock_colormap_widget(name="Color Limits")
self._configure_dock_trace_widget(name="Trace")
# Smoothing widget
self.callbacks["smoothing"] = SmartCallBack(
callback=self.set_data_smoothing,
)
self.widgets["smoothing"] = self._renderer._dock_add_spin_box(
name="Smoothing",
value=self._data['smoothing_steps'],
rng=self.default_smoothing_range,
callback=self.callbacks["smoothing"],
double=False
)
self.callbacks["smoothing"].widget = \
self.widgets["smoothing"]
self._renderer._dock_finalize()
def _configure_playback(self):
self._renderer._playback_initialize(
func=self._play,
timeout=self.refresh_rate_ms,
value=self._data['time_idx'],
rng=[0, len(self._data['time']) - 1],
time_widget=self.widgets["time"],
play_widget=self.widgets["play"],
)
def _configure_mplcanvas(self):
# Get the fractional components for the brain and mpl
self.mpl_canvas = self._renderer._window_get_mplcanvas(
brain=self,
interactor_fraction=self.interactor_fraction,
show_traces=self.show_traces,
separate_canvas=self.separate_canvas
)
xlim = [np.min(self._data['time']),
np.max(self._data['time'])]
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=UserWarning)
self.mpl_canvas.axes.set(xlim=xlim)
if not self.separate_canvas:
self._renderer._window_adjust_mplcanvas_layout()
self.mpl_canvas.set_color(
bg_color=self._bg_color,
fg_color=self._fg_color,
)
def _configure_vertex_time_course(self):
if not self.show_traces:
return
if self.mpl_canvas is None:
self._configure_mplcanvas()
else:
self.clear_glyphs()
# plot RMS of the activation
y = np.concatenate(list(v[0] for v in self.act_data_smooth.values()
if v[0] is not None))
rms = np.linalg.norm(y, axis=0) / np.sqrt(len(y))
del y
self.rms, = self.mpl_canvas.axes.plot(
self._data['time'], rms,
lw=3, label='RMS', zorder=3, color=self._fg_color,
alpha=0.5, ls=':')
# now plot the time line
self.plot_time_line(update=False)
# then the picked points
for idx, hemi in enumerate(['lh', 'rh', 'vol']):
act_data = self.act_data_smooth.get(hemi, [None])[0]
if act_data is None:
continue
hemi_data = self._data[hemi]
vertices = hemi_data['vertices']
# simulate a picked renderer
if self._hemi in ('both', 'rh') or hemi == 'vol':
idx = 0
self.picked_renderer = self._renderer._all_renderers[idx]
# initialize the default point
if self._data['initial_time'] is not None:
# pick at that time
use_data = act_data[
:, [np.round(self._data['time_idx']).astype(int)]]
else:
use_data = act_data
ind = np.unravel_index(np.argmax(np.abs(use_data), axis=None),
use_data.shape)
if hemi == 'vol':
mesh = hemi_data['grid']
else:
mesh = self._layered_meshes[hemi]._polydata
vertex_id = vertices[ind[0]]
self._add_vertex_glyph(hemi, mesh, vertex_id, update=False)
def _configure_picking(self):
# get data for each hemi
from scipy import sparse
for idx, hemi in enumerate(['vol', 'lh', 'rh']):
hemi_data = self._data.get(hemi)
if hemi_data is not None:
act_data = hemi_data['array']
if act_data.ndim == 3:
act_data = np.linalg.norm(act_data, axis=1)
smooth_mat = hemi_data.get('smooth_mat')
vertices = hemi_data['vertices']
if hemi == 'vol':
assert smooth_mat is None
smooth_mat = sparse.csr_matrix(
(np.ones(len(vertices)),
(vertices, np.arange(len(vertices)))))
self.act_data_smooth[hemi] = (act_data, smooth_mat)
self._renderer._update_picking_callback(
self._on_mouse_move,
self._on_button_press,
self._on_button_release,
self._on_pick
)
def _configure_tool_bar(self):
self._renderer._tool_bar_load_icons()
self._renderer._tool_bar_set_theme(self.theme)
self._renderer._tool_bar_initialize(name="Toolbar")
self._renderer._tool_bar_add_file_button(
name="screenshot",
desc="Take a screenshot",
func=self.save_image,
)
self._renderer._tool_bar_add_file_button(
name="movie",
desc="Save movie...",
func=lambda filename: self.save_movie(
filename=filename,
time_dilation=(1. / self.playback_speed)),
shortcut="ctrl+shift+s",
)
self._renderer._tool_bar_add_button(
name="visibility",
desc="Toggle Controls",
func=self.toggle_interface,
icon_name="visibility_on"
)
self.widgets["play"] = self._renderer._tool_bar_add_play_button(
name="play",
desc="Play/Pause",
func=self.toggle_playback,
shortcut=" ",
)
self._renderer._tool_bar_add_button(
name="reset",
desc="Reset",
func=self.reset,
)
self._renderer._tool_bar_add_button(
name="scale",
desc="Auto-Scale",
func=self.apply_auto_scaling,
)
self._renderer._tool_bar_add_button(
name="clear",
desc="Clear traces",
func=self.clear_glyphs,
)
self._renderer._tool_bar_add_spacer()
self._renderer._tool_bar_add_button(
name="help",
desc="Help",
func=self.help,
shortcut="?",
)
def _shift_time(self, op):
self.callbacks["time"](
value=(op(self._current_time, self.playback_speed)),
time_as_index=False,
update_widget=True,
)
def _rotate_azimuth(self, value):
azimuth = (self._renderer.figure._azimuth + value) % 360
self._renderer.set_camera(azimuth=azimuth, reset_camera=False)
def _rotate_elevation(self, value):
elevation = np.clip(
self._renderer.figure._elevation + value,
self._elevation_rng[0],
self._elevation_rng[1],
)
self._renderer.set_camera(elevation=elevation, reset_camera=False)
def _configure_shortcuts(self):
# First, we remove the default bindings:
self._clear_callbacks()
# Then, we add our own:
self.plotter.add_key_event("i", self.toggle_interface)
self.plotter.add_key_event("s", self.apply_auto_scaling)
self.plotter.add_key_event("r", self.restore_user_scaling)
self.plotter.add_key_event("c", self.clear_glyphs)
self.plotter.add_key_event("n", partial(self._shift_time,
op=lambda x, y: x + y))
self.plotter.add_key_event("b", partial(self._shift_time,
op=lambda x, y: x - y))
for key, func, sign in (("Left", self._rotate_azimuth, 1),
("Right", self._rotate_azimuth, -1),
("Up", self._rotate_elevation, 1),
("Down", self._rotate_elevation, -1)):
self.plotter.add_key_event(key, partial(func, sign * _ARROW_MOVE))
def _configure_menu(self):
self._renderer._menu_initialize()
self._renderer._menu_add_submenu(
name="help",
desc="Help",
)
self._renderer._menu_add_button(
menu_name="help",
name="help",
desc="Show MNE key bindings\t?",
func=self.help,
)
def _configure_status_bar(self):
self._renderer._status_bar_initialize()
self.status_msg = self._renderer._status_bar_add_label(
self.default_status_bar_msg, stretch=1)
self.status_progress = self._renderer._status_bar_add_progress_bar()
if self.status_progress is not None:
self.status_progress.hide()
def _on_mouse_move(self, vtk_picker, event):
if self._mouse_no_mvt:
self._mouse_no_mvt -= 1
def _on_button_press(self, vtk_picker, event):
self._mouse_no_mvt = 2
def _on_button_release(self, vtk_picker, event):
if self._mouse_no_mvt > 0:
x, y = vtk_picker.GetEventPosition()
# programmatically detect the picked renderer
try:
# pyvista<0.30.0
self.picked_renderer = \
self.plotter.iren.FindPokedRenderer(x, y)
except AttributeError:
# pyvista>=0.30.0
self.picked_renderer = \
self.plotter.iren.interactor.FindPokedRenderer(x, y)
# trigger the pick
self.plotter.picker.Pick(x, y, 0, self.picked_renderer)
self._mouse_no_mvt = 0
def _on_pick(self, vtk_picker, event):
if not self.show_traces:
return
# vtk_picker is a vtkCellPicker
cell_id = vtk_picker.GetCellId()
mesh = vtk_picker.GetDataSet()
if mesh is None or cell_id == -1 or not self._mouse_no_mvt:
return # don't pick
# 1) Check to see if there are any spheres along the ray
if len(self._spheres):
collection = vtk_picker.GetProp3Ds()
found_sphere = None
for ii in range(collection.GetNumberOfItems()):
actor = collection.GetItemAsObject(ii)
for sphere in self._spheres:
if any(a is actor for a in sphere._actors):
found_sphere = sphere
break
if found_sphere is not None:
break
if found_sphere is not None:
assert found_sphere._is_glyph
mesh = found_sphere
# 2) Remove sphere if it's what we have
if hasattr(mesh, "_is_glyph"):
self._remove_vertex_glyph(mesh)
return
# 3) Otherwise, pick the objects in the scene
try:
hemi = mesh._hemi
except AttributeError: # volume
hemi = 'vol'
else:
assert hemi in ('lh', 'rh')
if self.act_data_smooth[hemi][0] is None: # no data to add for hemi
return
pos = np.array(vtk_picker.GetPickPosition())
if hemi == 'vol':
# VTK will give us the point closest to the viewer in the vol.
# We want to pick the point with the maximum value along the
# camera-to-click array, which fortunately we can get "just"
# by inspecting the points that are sufficiently close to the
# ray.
grid = mesh = self._data[hemi]['grid']
vertices = self._data[hemi]['vertices']
coords = self._data[hemi]['grid_coords'][vertices]
scalars = _cell_data(grid)['values'][vertices]
spacing = np.array(grid.GetSpacing())
max_dist = np.linalg.norm(spacing) / 2.
origin = vtk_picker.GetRenderer().GetActiveCamera().GetPosition()
ori = pos - origin
ori /= np.linalg.norm(ori)
# the magic formula: distance from a ray to a given point
dists = np.linalg.norm(np.cross(ori, coords - pos), axis=1)
assert dists.shape == (len(coords),)
mask = dists <= max_dist
idx = np.where(mask)[0]
if len(idx) == 0:
return # weird point on edge of volume?
# useful for debugging the ray by mapping it into the volume:
# dists = dists - dists.min()
# dists = (1. - dists / dists.max()) * self._cmap_range[1]
# _cell_data(grid)['values'][vertices] = dists * mask
idx = idx[np.argmax(np.abs(scalars[idx]))]
vertex_id = vertices[idx]
# Naive way: convert pos directly to idx; i.e., apply mri_src_t
# shape = self._data[hemi]['grid_shape']
# taking into account the cell vs point difference (spacing/2)
# shift = np.array(grid.GetOrigin()) + spacing / 2.
# ijk = np.round((pos - shift) / spacing).astype(int)
# vertex_id = np.ravel_multi_index(ijk, shape, order='F')
else:
vtk_cell = mesh.GetCell(cell_id)
cell = [vtk_cell.GetPointId(point_id) for point_id
in range(vtk_cell.GetNumberOfPoints())]
vertices = mesh.points[cell]
idx = np.argmin(abs(vertices - pos), axis=0)
vertex_id = cell[idx[0]]
if self.traces_mode == 'label':
self._add_label_glyph(hemi, mesh, vertex_id)
else:
self._add_vertex_glyph(hemi, mesh, vertex_id)
def _add_label_glyph(self, hemi, mesh, vertex_id):
if hemi == 'vol':
return
label_id = self._vertex_to_label_id[hemi][vertex_id]
label = self._annotation_labels[hemi][label_id]
# remove the patch if already picked
if label_id in self.picked_patches[hemi]:
self._remove_label_glyph(hemi, label_id)
return
if hemi == label.hemi:
self.add_label(label, borders=True, reset_camera=False)
self.picked_patches[hemi].append(label_id)
def _remove_label_glyph(self, hemi, label_id):
label = self._annotation_labels[hemi][label_id]
label._line.remove()
self.color_cycle.restore(label._color)
self.mpl_canvas.update_plot()
self._layered_meshes[hemi].remove_overlay(label.name)
self.picked_patches[hemi].remove(label_id)
def _add_vertex_glyph(self, hemi, mesh, vertex_id, update=True):
if vertex_id in self.picked_points[hemi]:
return
# skip if the wrong hemi is selected
if self.act_data_smooth[hemi][0] is None:
return
color = next(self.color_cycle)
line = self.plot_time_course(hemi, vertex_id, color, update=update)
if hemi == 'vol':
ijk = np.unravel_index(
vertex_id, np.array(mesh.GetDimensions()) - 1, order='F')
# should just be GetCentroid(center), but apparently it's VTK9+:
# center = np.empty(3)
# voxel.GetCentroid(center)
voxel = mesh.GetCell(*ijk)
pts = voxel.GetPoints()
n_pts = pts.GetNumberOfPoints()
center = np.empty((n_pts, 3))
for ii in range(pts.GetNumberOfPoints()):
pts.GetPoint(ii, center[ii])
center = np.mean(center, axis=0)
else:
center = mesh.GetPoints().GetPoint(vertex_id)
del mesh
# from the picked renderer to the subplot coords
try:
lst = self._renderer._all_renderers._renderers
except AttributeError:
lst = self._renderer._all_renderers
rindex = lst.index(self.picked_renderer)
row, col = self._renderer._index_to_loc(rindex)
actors = list()
spheres = list()
for _ in self._iter_views(hemi):
# Using _sphere() instead of renderer.sphere() for 2 reasons:
# 1) renderer.sphere() fails on Windows in a scenario where a lot
# of picking requests are done in a short span of time (could be
# mitigated with synchronization/delay?)
# 2) the glyph filter is used in renderer.sphere() but only one
# sphere is required in this function.
actor, sphere = self._renderer._sphere(
center=np.array(center),
color=color,
radius=4.0,
)
actors.append(actor)
spheres.append(sphere)
# add metadata for picking
for sphere in spheres:
sphere._is_glyph = True
sphere._hemi = hemi
sphere._line = line
sphere._actors = actors
sphere._color = color
sphere._vertex_id = vertex_id
self.picked_points[hemi].append(vertex_id)
self._spheres.extend(spheres)
self.pick_table[vertex_id] = spheres
return sphere
def _remove_vertex_glyph(self, mesh, render=True):
vertex_id = mesh._vertex_id
if vertex_id not in self.pick_table:
return
hemi = mesh._hemi
color = mesh._color
spheres = self.pick_table[vertex_id]
spheres[0]._line.remove()
self.mpl_canvas.update_plot()
self.picked_points[hemi].remove(vertex_id)
with warnings.catch_warnings(record=True):
# We intentionally ignore these in case we have traversed the
# entire color cycle
warnings.simplefilter('ignore')
self.color_cycle.restore(color)
for sphere in spheres:
# remove all actors
self.plotter.remove_actor(sphere._actors, render=render)
sphere._actors = None
self._spheres.pop(self._spheres.index(sphere))
self.pick_table.pop(vertex_id)
def clear_glyphs(self):
"""Clear the picking glyphs."""
if not self.time_viewer:
return
for sphere in list(self._spheres): # will remove itself, so copy
self._remove_vertex_glyph(sphere, render=False)
assert sum(len(v) for v in self.picked_points.values()) == 0
assert len(self.pick_table) == 0
assert len(self._spheres) == 0
for hemi in self._hemis:
for label_id in list(self.picked_patches[hemi]):
self._remove_label_glyph(hemi, label_id)
assert sum(len(v) for v in self.picked_patches.values()) == 0
if self.rms is not None:
self.rms.remove()
self.rms = None
self._renderer._update()
def plot_time_course(self, hemi, vertex_id, color, update=True):
"""Plot the vertex time course.
Parameters
----------
hemi : str
The hemisphere id of the vertex.
vertex_id : int
The vertex identifier in the mesh.
color : matplotlib color
The color of the time course.
update : bool<|fim▁hole|>
Returns
-------
line : matplotlib object
The time line object.
"""
if self.mpl_canvas is None:
return
time = self._data['time'].copy() # avoid circular ref
mni = None
if hemi == 'vol':
hemi_str = 'V'
xfm = read_talxfm(
self._subject_id, self._subjects_dir)
if self._units == 'mm':
xfm['trans'][:3, 3] *= 1000.
ijk = np.unravel_index(
vertex_id, self._data[hemi]['grid_shape'], order='F')
src_mri_t = self._data[hemi]['grid_src_mri_t']
mni = apply_trans(np.dot(xfm['trans'], src_mri_t), ijk)
else:
hemi_str = 'L' if hemi == 'lh' else 'R'
try:
mni = vertex_to_mni(
vertices=vertex_id,
hemis=0 if hemi == 'lh' else 1,
subject=self._subject_id,
subjects_dir=self._subjects_dir
)
except Exception:
mni = None
if mni is not None:
mni = ' MNI: ' + ', '.join('%5.1f' % m for m in mni)
else:
mni = ''
label = "{}:{}{}".format(hemi_str, str(vertex_id).ljust(6), mni)
act_data, smooth = self.act_data_smooth[hemi]
if smooth is not None:
act_data = smooth[vertex_id].dot(act_data)[0]
else:
act_data = act_data[vertex_id].copy()
line = self.mpl_canvas.plot(
time,
act_data,
label=label,
lw=1.,
color=color,
zorder=4,
update=update,
)
return line
def plot_time_line(self, update=True):
"""Add the time line to the MPL widget.
Parameters
----------
update : bool
Force an update of the plot. Defaults to True.
"""
if self.mpl_canvas is None:
return
if isinstance(self.show_traces, bool) and self.show_traces:
# add time information
current_time = self._current_time
if not hasattr(self, "time_line"):
self.time_line = self.mpl_canvas.plot_time_line(
x=current_time,
label='time',
color=self._fg_color,
lw=1,
update=update,
)
self.time_line.set_xdata(current_time)
if update:
self.mpl_canvas.update_plot()
def _configure_help(self):
pairs = [
('?', 'Display help window'),
('i', 'Toggle interface'),
('s', 'Apply auto-scaling'),
('r', 'Restore original clim'),
('c', 'Clear all traces'),
('n', 'Shift the time forward by the playback speed'),
('b', 'Shift the time backward by the playback speed'),
('Space', 'Start/Pause playback'),
('Up', 'Decrease camera elevation angle'),
('Down', 'Increase camera elevation angle'),
('Left', 'Decrease camera azimuth angle'),
('Right', 'Increase camera azimuth angle'),
]
text1, text2 = zip(*pairs)
text1 = '\n'.join(text1)
text2 = '\n'.join(text2)
self.help_canvas = self._renderer._window_get_simple_canvas(
width=5, height=2, dpi=80)
_show_help_fig(
col1=text1,
col2=text2,
fig_help=self.help_canvas.fig,
ax=self.help_canvas.axes,
show=False,
)
def help(self):
"""Display the help window."""
self.help_canvas.show()
def _clear_callbacks(self):
if not hasattr(self, 'callbacks'):
return
for callback in self.callbacks.values():
if callback is not None:
for key in ('plotter', 'brain', 'callback',
'widget', 'widgets'):
setattr(callback, key, None)
self.callbacks.clear()
# Remove the default key binding
if getattr(self, "iren", None) is not None:
self.plotter.iren.clear_key_event_callbacks()
def _clear_widgets(self):
if not hasattr(self, 'widgets'):
return
for widget in self.widgets.values():
if widget is not None:
for key in ('triggered', 'valueChanged'):
setattr(widget, key, None)
self.widgets.clear()
@property
def interaction(self):
"""The interaction style."""
return self._interaction
@interaction.setter
def interaction(self, interaction):
"""Set the interaction style."""
_validate_type(interaction, str, 'interaction')
_check_option('interaction', interaction, ('trackball', 'terrain'))
for _ in self._iter_views('vol'): # will traverse all
self._renderer.set_interaction(interaction)
def _cortex_colormap(self, cortex):
"""Return the colormap corresponding to the cortex."""
from .._3d import _get_cmap
from matplotlib.colors import ListedColormap
colormap_map = dict(classic=dict(colormap="Greys",
vmin=-1, vmax=2),
high_contrast=dict(colormap="Greys",
vmin=-.1, vmax=1.3),
low_contrast=dict(colormap="Greys",
vmin=-5, vmax=5),
bone=dict(colormap="bone_r",
vmin=-.2, vmax=2),
)
_validate_type(cortex, (str, dict, list, tuple), 'cortex')
if isinstance(cortex, str):
if cortex in colormap_map:
cortex = colormap_map[cortex]
else:
cortex = [cortex] * 2
if isinstance(cortex, (list, tuple)):
_check_option('len(cortex)', len(cortex), (2, 3),
extra='when cortex is a list or tuple')
if len(cortex) == 3:
cortex = [cortex] * 2
cortex = list(cortex)
for ci, c in enumerate(cortex):
cortex[ci] = _to_rgb(c, name='cortex')
cortex = dict(
colormap=ListedColormap(cortex, name='custom binary'),
vmin=0, vmax=1)
cortex = dict(
vmin=float(cortex['vmin']),
vmax=float(cortex['vmax']),
colormap=_get_cmap(cortex['colormap']),
)
return cortex
def _remove(self, item, render=False):
"""Remove actors from the rendered scene."""
if item in self._actors:
logger.debug(
f'Removing {len(self._actors[item])} {item} actor(s)')
for actor in self._actors[item]:
self._renderer.plotter.remove_actor(actor)
self._actors.pop(item) # remove actor list
if render:
self._renderer._update()
def _add_actor(self, item, actor):
"""Add an actor to the internal register."""
if item in self._actors: # allows adding more than one
self._actors[item].append(actor)
else:
self._actors[item] = [actor]
@verbose
def add_data(self, array, fmin=None, fmid=None, fmax=None,
thresh=None, center=None, transparent=False, colormap="auto",
alpha=1, vertices=None, smoothing_steps=None, time=None,
time_label="auto", colorbar=True,
hemi=None, remove_existing=None, time_label_size=None,
initial_time=None, scale_factor=None, vector_alpha=None,
clim=None, src=None, volume_options=0.4, colorbar_kwargs=None,
verbose=None):
"""Display data from a numpy array on the surface or volume.
This provides a similar interface to
:meth:`surfer.Brain.add_overlay`, but it displays
it with a single colormap. It offers more flexibility over the
colormap, and provides a way to display four-dimensional data
(i.e., a timecourse) or five-dimensional data (i.e., a
vector-valued timecourse).
.. note:: ``fmin`` sets the low end of the colormap, and is separate
from thresh (this is a different convention from
:meth:`surfer.Brain.add_overlay`).
Parameters
----------
array : numpy array, shape (n_vertices[, 3][, n_times])
Data array. For the data to be understood as vector-valued
(3 values per vertex corresponding to X/Y/Z surface RAS),
then ``array`` must be have all 3 dimensions.
If vectors with no time dimension are desired, consider using a
singleton (e.g., ``np.newaxis``) to create a "time" dimension
and pass ``time_label=None`` (vector values are not supported).
%(fmin_fmid_fmax)s
%(thresh)s
%(center)s
%(transparent)s
colormap : str, list of color, or array
Name of matplotlib colormap to use, a list of matplotlib colors,
or a custom look up table (an n x 4 array coded with RBGA values
between 0 and 255), the default "auto" chooses a default divergent
colormap, if "center" is given (currently "icefire"), otherwise a
default sequential colormap (currently "rocket").
alpha : float in [0, 1]
Alpha level to control opacity of the overlay.
vertices : numpy array
Vertices for which the data is defined (needed if
``len(data) < nvtx``).
smoothing_steps : int or None
Number of smoothing steps (smoothing is used if len(data) < nvtx)
The value 'nearest' can be used too. None (default) will use as
many as necessary to fill the surface.
time : numpy array
Time points in the data array (if data is 2D or 3D).
%(time_label)s
colorbar : bool
Whether to add a colorbar to the figure. Can also be a tuple
to give the (row, col) index of where to put the colorbar.
hemi : str | None
If None, it is assumed to belong to the hemisphere being
shown. If two hemispheres are being shown, an error will
be thrown.
remove_existing : bool
Not supported yet.
Remove surface added by previous "add_data" call. Useful for
conserving memory when displaying different data in a loop.
time_label_size : int
Font size of the time label (default 14).
initial_time : float | None
Time initially shown in the plot. ``None`` to use the first time
sample (default).
scale_factor : float | None (default)
The scale factor to use when displaying glyphs for vector-valued
data.
vector_alpha : float | None
Alpha level to control opacity of the arrows. Only used for
vector-valued data. If None (default), ``alpha`` is used.
clim : dict
Original clim arguments.
%(src_volume_options)s
colorbar_kwargs : dict | None
Options to pass to :meth:`pyvista.Plotter.add_scalar_bar`
(e.g., ``dict(title_font_size=10)``).
%(verbose)s
Notes
-----
If the data is defined for a subset of vertices (specified
by the "vertices" parameter), a smoothing method is used to interpolate
the data onto the high resolution surface. If the data is defined for
subsampled version of the surface, smoothing_steps can be set to None,
in which case only as many smoothing steps are applied until the whole
surface is filled with non-zeros.
Due to a VTK alpha rendering bug, ``vector_alpha`` is
clamped to be strictly < 1.
"""
_validate_type(transparent, bool, 'transparent')
_validate_type(vector_alpha, ('numeric', None), 'vector_alpha')
_validate_type(scale_factor, ('numeric', None), 'scale_factor')
# those parameters are not supported yet, only None is allowed
_check_option('thresh', thresh, [None])
_check_option('remove_existing', remove_existing, [None])
_validate_type(time_label_size, (None, 'numeric'), 'time_label_size')
if time_label_size is not None:
time_label_size = float(time_label_size)
if time_label_size < 0:
raise ValueError('time_label_size must be positive, got '
f'{time_label_size}')
hemi = self._check_hemi(hemi, extras=['vol'])
stc, array, vertices = self._check_stc(hemi, array, vertices)
array = np.asarray(array)
vector_alpha = alpha if vector_alpha is None else vector_alpha
self._data['vector_alpha'] = vector_alpha
self._data['scale_factor'] = scale_factor
# Create time array and add label if > 1D
if array.ndim <= 1:
time_idx = 0
else:
# check time array
if time is None:
time = np.arange(array.shape[-1])
else:
time = np.asarray(time)
if time.shape != (array.shape[-1],):
raise ValueError('time has shape %s, but need shape %s '
'(array.shape[-1])' %
(time.shape, (array.shape[-1],)))
self._data["time"] = time
if self._n_times is None:
self._times = time
elif len(time) != self._n_times:
raise ValueError("New n_times is different from previous "
"n_times")
elif not np.array_equal(time, self._times):
raise ValueError("Not all time values are consistent with "
"previously set times.")
# initial time
if initial_time is None:
time_idx = 0
else:
time_idx = self._to_time_index(initial_time)
# time label
time_label, _ = _handle_time(time_label, 's', time)
y_txt = 0.05 + 0.1 * bool(colorbar)
if array.ndim == 3:
if array.shape[1] != 3:
raise ValueError('If array has 3 dimensions, array.shape[1] '
'must equal 3, got %s' % (array.shape[1],))
fmin, fmid, fmax = _update_limits(
fmin, fmid, fmax, center, array
)
if colormap == 'auto':
colormap = 'mne' if center is not None else 'hot'
if smoothing_steps is None:
smoothing_steps = 7
elif smoothing_steps == 'nearest':
smoothing_steps = -1
elif isinstance(smoothing_steps, int):
if smoothing_steps < 0:
raise ValueError('Expected value of `smoothing_steps` is'
' positive but {} was given.'.format(
smoothing_steps))
else:
raise TypeError('Expected type of `smoothing_steps` is int or'
' NoneType but {} was given.'.format(
type(smoothing_steps)))
self._data['stc'] = stc
self._data['src'] = src
self._data['smoothing_steps'] = smoothing_steps
self._data['clim'] = clim
self._data['time'] = time
self._data['initial_time'] = initial_time
self._data['time_label'] = time_label
self._data['initial_time_idx'] = time_idx
self._data['time_idx'] = time_idx
self._data['transparent'] = transparent
# data specific for a hemi
self._data[hemi] = dict()
self._data[hemi]['glyph_dataset'] = None
self._data[hemi]['glyph_mapper'] = None
self._data[hemi]['glyph_actor'] = None
self._data[hemi]['array'] = array
self._data[hemi]['vertices'] = vertices
self._data['alpha'] = alpha
self._data['colormap'] = colormap
self._data['center'] = center
self._data['fmin'] = fmin
self._data['fmid'] = fmid
self._data['fmax'] = fmax
self.update_lut()
# 1) add the surfaces first
actor = None
for _ in self._iter_views(hemi):
if hemi in ('lh', 'rh'):
actor = self._layered_meshes[hemi]._actor
else:
src_vol = src[2:] if src.kind == 'mixed' else src
actor, _ = self._add_volume_data(hemi, src_vol, volume_options)
assert actor is not None # should have added one
self._add_actor('data', actor)
# 2) update time and smoothing properties
# set_data_smoothing calls "set_time_point" for us, which will set
# _current_time
self.set_time_interpolation(self.time_interpolation)
self.set_data_smoothing(self._data['smoothing_steps'])
# 3) add the other actors
if colorbar is True:
# botto left by default
colorbar = (self._subplot_shape[0] - 1, 0)
for ri, ci, v in self._iter_views(hemi):
# Add the time label to the bottommost view
do = (ri, ci) == colorbar
if not self._time_label_added and time_label is not None and do:
time_actor = self._renderer.text2d(
x_window=0.95, y_window=y_txt,
color=self._fg_color,
size=time_label_size,
text=time_label(self._current_time),
justification='right'
)
self._data['time_actor'] = time_actor
self._time_label_added = True
if colorbar and self._scalar_bar is None and do:
kwargs = dict(source=actor, n_labels=8, color=self._fg_color,
bgcolor=self._brain_color[:3])
kwargs.update(colorbar_kwargs or {})
self._scalar_bar = self._renderer.scalarbar(**kwargs)
self._renderer.set_camera(
update=False, reset_camera=False, **views_dicts[hemi][v])
# 4) update the scalar bar and opacity
self.update_lut(alpha=alpha)
def remove_data(self):
"""Remove rendered data from the mesh."""
self._remove('data', render=True)
def _iter_views(self, hemi):
"""Iterate over rows and columns that need to be added to."""
hemi_dict = dict(lh=[0], rh=[0], vol=[0])
if self._hemi == 'split':
hemi_dict.update(rh=[1], vol=[0, 1])
for vi, view in enumerate(self._views):
view_dict = dict(lh=[vi], rh=[vi], vol=[vi])
if self._hemi == 'split':
view_dict.update(vol=[vi, vi])
if self._view_layout == 'vertical':
rows, cols = view_dict, hemi_dict # views are rows, hemis cols
else:
rows, cols = hemi_dict, view_dict # hemis are rows, views cols
for ri, ci in zip(rows[hemi], cols[hemi]):
self._renderer.subplot(ri, ci)
yield ri, ci, view
def remove_labels(self):
"""Remove all the ROI labels from the image."""
for hemi in self._hemis:
mesh = self._layered_meshes[hemi]
for label in self._labels[hemi]:
mesh.remove_overlay(label.name)
self._labels[hemi].clear()
self._renderer._update()
def remove_annotations(self):
"""Remove all annotations from the image."""
for hemi in self._hemis:
mesh = self._layered_meshes[hemi]
mesh.remove_overlay(self._annots[hemi])
self._annots[hemi].clear()
self._renderer._update()
def _add_volume_data(self, hemi, src, volume_options):
from ..backends._pyvista import _hide_testing_actor
_validate_type(src, SourceSpaces, 'src')
_check_option('src.kind', src.kind, ('volume',))
_validate_type(
volume_options, (dict, 'numeric', None), 'volume_options')
assert hemi == 'vol'
if not isinstance(volume_options, dict):
volume_options = dict(
resolution=float(volume_options) if volume_options is not None
else None)
volume_options = _handle_default('volume_options', volume_options)
allowed_types = (
['resolution', (None, 'numeric')],
['blending', (str,)],
['alpha', ('numeric', None)],
['surface_alpha', (None, 'numeric')],
['silhouette_alpha', (None, 'numeric')],
['silhouette_linewidth', ('numeric',)],
)
for key, types in allowed_types:
_validate_type(volume_options[key], types,
f'volume_options[{repr(key)}]')
extra_keys = set(volume_options) - set(a[0] for a in allowed_types)
if len(extra_keys):
raise ValueError(
f'volume_options got unknown keys {sorted(extra_keys)}')
blending = _check_option('volume_options["blending"]',
volume_options['blending'],
('composite', 'mip'))
alpha = volume_options['alpha']
if alpha is None:
alpha = 0.4 if self._data[hemi]['array'].ndim == 3 else 1.
alpha = np.clip(float(alpha), 0., 1.)
resolution = volume_options['resolution']
surface_alpha = volume_options['surface_alpha']
if surface_alpha is None:
surface_alpha = min(alpha / 2., 0.1)
silhouette_alpha = volume_options['silhouette_alpha']
if silhouette_alpha is None:
silhouette_alpha = surface_alpha / 4.
silhouette_linewidth = volume_options['silhouette_linewidth']
del volume_options
volume_pos = self._data[hemi].get('grid_volume_pos')
volume_neg = self._data[hemi].get('grid_volume_neg')
center = self._data['center']
if volume_pos is None:
xyz = np.meshgrid(
*[np.arange(s) for s in src[0]['shape']], indexing='ij')
dimensions = np.array(src[0]['shape'], int)
mult = 1000 if self._units == 'mm' else 1
src_mri_t = src[0]['src_mri_t']['trans'].copy()
src_mri_t[:3] *= mult
if resolution is not None:
resolution = resolution * mult / 1000. # to mm
del src, mult
coords = np.array([c.ravel(order='F') for c in xyz]).T
coords = apply_trans(src_mri_t, coords)
self.geo[hemi] = Bunch(coords=coords)
vertices = self._data[hemi]['vertices']
assert self._data[hemi]['array'].shape[0] == len(vertices)
# MNE constructs the source space on a uniform grid in MRI space,
# but mne coreg can change it to be non-uniform, so we need to
# use all three elements here
assert np.allclose(
src_mri_t[:3, :3], np.diag(np.diag(src_mri_t)[:3]))
spacing = np.diag(src_mri_t)[:3]
origin = src_mri_t[:3, 3] - spacing / 2.
scalars = np.zeros(np.prod(dimensions))
scalars[vertices] = 1. # for the outer mesh
grid, grid_mesh, volume_pos, volume_neg = \
self._renderer._volume(dimensions, origin, spacing, scalars,
surface_alpha, resolution, blending,
center)
self._data[hemi]['alpha'] = alpha # incorrectly set earlier
self._data[hemi]['grid'] = grid
self._data[hemi]['grid_mesh'] = grid_mesh
self._data[hemi]['grid_coords'] = coords
self._data[hemi]['grid_src_mri_t'] = src_mri_t
self._data[hemi]['grid_shape'] = dimensions
self._data[hemi]['grid_volume_pos'] = volume_pos
self._data[hemi]['grid_volume_neg'] = volume_neg
actor_pos, _ = self._renderer.plotter.add_actor(
volume_pos, reset_camera=False, name=None, culling=False,
render=False)
actor_neg = actor_mesh = None
if volume_neg is not None:
actor_neg, _ = self._renderer.plotter.add_actor(
volume_neg, reset_camera=False, name=None, culling=False,
render=False)
grid_mesh = self._data[hemi]['grid_mesh']
if grid_mesh is not None:
actor_mesh, prop = self._renderer.plotter.add_actor(
grid_mesh, reset_camera=False, name=None, culling=False,
pickable=False, render=False)
prop.SetColor(*self._brain_color[:3])
prop.SetOpacity(surface_alpha)
if silhouette_alpha > 0 and silhouette_linewidth > 0:
for _ in self._iter_views('vol'):
self._renderer._silhouette(
mesh=grid_mesh.GetInput(),
color=self._brain_color[:3],
line_width=silhouette_linewidth,
alpha=silhouette_alpha,
)
for actor in (actor_pos, actor_neg, actor_mesh):
if actor is not None:
_hide_testing_actor(actor)
return actor_pos, actor_neg
def add_label(self, label, color=None, alpha=1, scalar_thresh=None,
borders=False, hemi=None, subdir=None,
reset_camera=True):
"""Add an ROI label to the image.
Parameters
----------
label : str | instance of Label
Label filepath or name. Can also be an instance of
an object with attributes "hemi", "vertices", "name", and
optionally "color" and "values" (if scalar_thresh is not None).
color : matplotlib-style color | None
Anything matplotlib accepts: string, RGB, hex, etc. (default
"crimson").
alpha : float in [0, 1]
Alpha level to control opacity.
scalar_thresh : None | float
Threshold the label ids using this value in the label
file's scalar field (i.e. label only vertices with
scalar >= thresh).
borders : bool | int
Show only label borders. If int, specify the number of steps
(away from the true border) along the cortical mesh to include
as part of the border definition.
hemi : str | None
If None, it is assumed to belong to the hemipshere being
shown.
subdir : None | str
If a label is specified as name, subdir can be used to indicate
that the label file is in a sub-directory of the subject's
label directory rather than in the label directory itself (e.g.
for ``$SUBJECTS_DIR/$SUBJECT/label/aparc/lh.cuneus.label``
``brain.add_label('cuneus', subdir='aparc')``).
reset_camera : bool
If True, reset the camera view after adding the label. Defaults
to True.
Notes
-----
To remove previously added labels, run Brain.remove_labels().
"""
from ...label import read_label
if isinstance(label, str):
if color is None:
color = "crimson"
if os.path.isfile(label):
filepath = label
label = read_label(filepath)
hemi = label.hemi
label_name = os.path.basename(filepath).split('.')[1]
else:
hemi = self._check_hemi(hemi)
label_name = label
label_fname = ".".join([hemi, label_name, 'label'])
if subdir is None:
filepath = op.join(self._subjects_dir, self._subject_id,
'label', label_fname)
else:
filepath = op.join(self._subjects_dir, self._subject_id,
'label', subdir, label_fname)
if not os.path.exists(filepath):
raise ValueError('Label file %s does not exist'
% filepath)
label = read_label(filepath)
ids = label.vertices
scalars = label.values
else:
# try to extract parameters from label instance
try:
hemi = label.hemi
ids = label.vertices
if label.name is None:
label.name = 'unnamed' + str(self._unnamed_label_id)
self._unnamed_label_id += 1
label_name = str(label.name)
if color is None:
if hasattr(label, 'color') and label.color is not None:
color = label.color
else:
color = "crimson"
if scalar_thresh is not None:
scalars = label.values
except Exception:
raise ValueError('Label was not a filename (str), and could '
'not be understood as a class. The class '
'must have attributes "hemi", "vertices", '
'"name", and (if scalar_thresh is not None)'
'"values"')
hemi = self._check_hemi(hemi)
if scalar_thresh is not None:
ids = ids[scalars >= scalar_thresh]
if self.time_viewer and self.show_traces \
and self.traces_mode == 'label':
stc = self._data["stc"]
src = self._data["src"]
tc = stc.extract_label_time_course(label, src=src,
mode=self.label_extract_mode)
tc = tc[0] if tc.ndim == 2 else tc[0, 0, :]
color = next(self.color_cycle)
line = self.mpl_canvas.plot(
self._data['time'], tc, label=label_name,
color=color)
else:
line = None
orig_color = color
color = _to_rgb(color, alpha, alpha=True)
cmap = np.array([(0, 0, 0, 0,), color])
ctable = np.round(cmap * 255).astype(np.uint8)
scalars = np.zeros(self.geo[hemi].coords.shape[0])
scalars[ids] = 1
if borders:
keep_idx = _mesh_borders(self.geo[hemi].faces, scalars)
show = np.zeros(scalars.size, dtype=np.int64)
if isinstance(borders, int):
for _ in range(borders):
keep_idx = np.in1d(
self.geo[hemi].faces.ravel(), keep_idx)
keep_idx.shape = self.geo[hemi].faces.shape
keep_idx = self.geo[hemi].faces[np.any(
keep_idx, axis=1)]
keep_idx = np.unique(keep_idx)
show[keep_idx] = 1
scalars *= show
for _, _, v in self._iter_views(hemi):
mesh = self._layered_meshes[hemi]
mesh.add_overlay(
scalars=scalars,
colormap=ctable,
rng=[np.min(scalars), np.max(scalars)],
opacity=alpha,
name=label_name,
)
if reset_camera:
self._renderer.set_camera(update=False, **views_dicts[hemi][v])
if self.time_viewer and self.show_traces \
and self.traces_mode == 'label':
label._color = orig_color
label._line = line
self._labels[hemi].append(label)
self._renderer._update()
@fill_doc
def add_head(self, dense=True, color='gray', alpha=0.5):
"""Add a mesh to render the outer head surface.
Parameters
----------
dense : bool
Whether to plot the dense head (``seghead``) or the less dense head
(``head``).
color : color
A list of anything matplotlib accepts: string, RGB, hex, etc.
alpha : float in [0, 1]
Alpha level to control opacity.
Notes
-----
.. versionadded:: 0.24
"""
# load head
surf = _get_head_surface('seghead' if dense else 'head',
self._subject_id, self._subjects_dir)
verts, triangles = surf['rr'], surf['tris']
verts *= 1e3 if self._units == 'mm' else 1
color = _to_rgb(color, alpha, alpha=True)
for _ in self._iter_views('vol'):
actor, _ = self._renderer.mesh(
*verts.T, triangles=triangles, color=color,
opacity=alpha, reset_camera=False, render=False)
self._add_actor('head', actor)
self._renderer._update()
def remove_head(self):
"""Remove head objects from the rendered scene."""
self._remove('head', render=True)
@fill_doc
def add_skull(self, outer=True, color='gray', alpha=0.5):
"""Add a mesh to render the skull surface.
Parameters
----------
outer : bool
Adds the outer skull if ``True``, otherwise adds the inner skull.
color : color
A list of anything matplotlib accepts: string, RGB, hex, etc.
alpha : float in [0, 1]
Alpha level to control opacity.
Notes
-----
.. versionadded:: 0.24
"""
surf = _get_skull_surface('outer' if outer else 'inner',
self._subject_id, self._subjects_dir)
verts, triangles = surf['rr'], surf['tris']
verts *= 1e3 if self._units == 'mm' else 1
color = _to_rgb(color, alpha, alpha=True)
for _ in self._iter_views('vol'):
actor, _ = self._renderer.mesh(
*verts.T, triangles=triangles, color=color,
opacity=alpha, reset_camera=False, render=False)
self._add_actor('skull', actor)
self._renderer._update()
def remove_skull(self):
"""Remove skull objects from the rendered scene."""
self._remove('skull', render=True)
@fill_doc
def add_volume_labels(self, aseg='aparc+aseg', labels=None, colors=None,
alpha=0.5, smooth=0.9, fill_hole_size=None,
legend=None):
"""Add labels to the rendering from an anatomical segmentation.
Parameters
----------
%(aseg)s
labels : list
Labeled regions of interest to plot. See
:func:`mne.get_montage_volume_labels`
for one way to determine regions of interest. Regions can also be
chosen from the :term:`FreeSurfer LUT`.
colors : list | matplotlib-style color | None
A list of anything matplotlib accepts: string, RGB, hex, etc.
(default :term:`FreeSurfer LUT` colors).
alpha : float in [0, 1]
Alpha level to control opacity.
%(smooth)s
fill_hole_size : int | None
The size of holes to remove in the mesh in voxels. Default is None,
no holes are removed. Warning, this dilates the boundaries of the
surface by ``fill_hole_size`` number of voxels so use the minimal
size.
legend : bool | None | dict
Add a legend displaying the names of the ``labels``. Default (None)
is ``True`` if the number of ``labels`` is 10 or fewer.
Can also be a dict of ``kwargs`` to pass to
:meth:`pyvista.Plotter.add_legend`.
Notes
-----
.. versionadded:: 0.24
"""
import nibabel as nib
# load anatomical segmentation image
if not aseg.endswith('aseg'):
raise RuntimeError(
f'`aseg` file path must end with "aseg", got {aseg}')
aseg = _check_fname(op.join(self._subjects_dir, self._subject_id,
'mri', aseg + '.mgz'),
overwrite='read', must_exist=True)
aseg_fname = aseg
aseg = nib.load(aseg_fname)
aseg_data = np.asarray(aseg.dataobj)
vox_mri_t = aseg.header.get_vox2ras_tkr()
mult = 1e-3 if self._units == 'm' else 1
vox_mri_t[:3] *= mult
del aseg
# read freesurfer lookup table
lut, fs_colors = read_freesurfer_lut()
if labels is None: # assign default ROI labels based on indices
lut_r = {v: k for k, v in lut.items()}
labels = [lut_r[idx] for idx in DEFAULTS['volume_label_indices']]
_validate_type(fill_hole_size, (int, None), 'fill_hole_size')
_validate_type(legend, (bool, None), 'legend')
if legend is None:
legend = len(labels) < 11
if colors is None:
colors = [fs_colors[label] / 255 for label in labels]
elif not isinstance(colors, (list, tuple)):
colors = [colors] * len(labels) # make into list
colors = [_to_rgb(color, alpha, name=f'colors[{ci}]', alpha=True)
for ci, color in enumerate(colors)]
surfs = _marching_cubes(
aseg_data, [lut[label] for label in labels], smooth=smooth,
fill_hole_size=fill_hole_size)
for label, color, (verts, triangles) in zip(labels, colors, surfs):
if len(verts) == 0: # not in aseg vals
warn(f'Value {lut[label]} not found for label '
f'{repr(label)} in: {aseg_fname}')
continue
verts = apply_trans(vox_mri_t, verts)
for _ in self._iter_views('vol'):
actor, _ = self._renderer.mesh(
*verts.T, triangles=triangles, color=color,
opacity=alpha, reset_camera=False, render=False)
self._add_actor('volume_labels', actor)
if legend or isinstance(legend, dict):
# use empty kwargs for legend = True
legend = legend if isinstance(legend, dict) else dict()
self._renderer.plotter.add_legend(
list(zip(labels, colors)), **legend)
self._renderer._update()
def remove_volume_labels(self):
"""Remove the volume labels from the rendered scene."""
self._remove('volume_labels', render=True)
self._renderer.plotter.remove_legend()
def add_foci(self, coords, coords_as_verts=False, map_surface=None,
scale_factor=1, color="white", alpha=1, name=None,
hemi=None, resolution=50):
"""Add spherical foci, possibly mapping to displayed surf.
The foci spheres can be displayed at the coordinates given, or
mapped through a surface geometry. In other words, coordinates
from a volume-based analysis in MNI space can be displayed on an
inflated average surface by finding the closest vertex on the
white surface and mapping to that vertex on the inflated mesh.
Parameters
----------
coords : ndarray, shape (n_coords, 3)
Coordinates in stereotaxic space (default) or array of
vertex ids (with ``coord_as_verts=True``).
coords_as_verts : bool
Whether the coords parameter should be interpreted as vertex ids.
map_surface : None
Surface to map coordinates through, or None to use raw coords.
scale_factor : float
Controls the size of the foci spheres (relative to 1cm).
color : matplotlib color code
HTML name, RBG tuple, or hex code.
alpha : float in [0, 1]
Opacity of focus gylphs.
name : str
Internal name to use.
hemi : str | None
If None, it is assumed to belong to the hemipshere being
shown. If two hemispheres are being shown, an error will
be thrown.
resolution : int
The resolution of the spheres.
"""
hemi = self._check_hemi(hemi, extras=['vol'])
# those parameters are not supported yet, only None is allowed
_check_option('map_surface', map_surface, [None])
# Figure out how to interpret the first parameter
if coords_as_verts:
coords = self.geo[hemi].coords[coords]
# Convert the color code
color = _to_rgb(color)
if self._units == 'm':
scale_factor = scale_factor / 1000.
for _, _, v in self._iter_views(hemi):
self._renderer.sphere(center=coords, color=color,
scale=(10. * scale_factor),
opacity=alpha, resolution=resolution)
self._renderer.set_camera(**views_dicts[hemi][v])
@verbose
def add_sensors(self, info, trans, meg=None, eeg='original', fnirs=True,
ecog=True, seeg=True, dbs=True, verbose=None):
"""Add mesh objects to represent sensor positions.
Parameters
----------
%(info_not_none)s
%(trans_not_none)s
%(meg)s
%(eeg)s
%(fnirs)s
%(ecog)s
%(seeg)s
%(dbs)s
%(verbose)s
Notes
-----
.. versionadded:: 0.24
"""
_validate_type(info, Info, 'info')
meg, eeg, fnirs, warn_meg = _handle_sensor_types(meg, eeg, fnirs)
picks = pick_types(info, meg=('sensors' in meg),
ref_meg=('ref' in meg), eeg=(len(eeg) > 0),
ecog=ecog, seeg=seeg, dbs=dbs,
fnirs=(len(fnirs) > 0))
head_mri_t = _get_trans(trans, 'head', 'mri', allow_none=False)[0]
del trans
# get transforms to "mri"window
to_cf_t = _get_transforms_to_coord_frame(
info, head_mri_t, coord_frame='mri')
if pick_types(info, eeg=True, exclude=()).size > 0 and \
'projected' in eeg:
head_surf = _get_head_surface(
'seghead', self._subject_id, self._subjects_dir)
else:
head_surf = None
# Do the main plotting
for _ in self._iter_views('vol'):
if picks.size > 0:
sensors_actors = _plot_sensors(
self._renderer, info, to_cf_t, picks, meg, eeg,
fnirs, warn_meg, head_surf, self._units)
for item, actors in sensors_actors.items():
for actor in actors:
self._add_actor(item, actor)
if 'helmet' in meg and pick_types(info, meg=True).size > 0:
surf = get_meg_helmet_surf(info, head_mri_t)
verts = surf['rr'] * (1 if self._units == 'm' else 1e3)
actor, _ = self._renderer.mesh(
*verts.T, surf['tris'],
color=DEFAULTS['coreg']['helmet_color'],
opacity=0.25, reset_camera=False, render=False)
self._add_actor('helmet', actor)
self._renderer._update()
def remove_sensors(self, kind=None):
"""Remove sensors from the rendered scene.
Parameters
----------
kind : str | list | None
If None, removes all sensor-related data including the helmet.
Can be "meg", "eeg", "fnirs", "ecog", "seeg", "dbs" or "helmet"
to remove that item.
"""
all_kinds = ('meg', 'eeg', 'fnirs', 'ecog', 'seeg', 'dbs', 'helmet')
if kind is None:
for item in all_kinds:
self._remove(item, render=False)
else:
if isinstance(kind, str):
kind = [kind]
for this_kind in kind:
_check_option('kind', this_kind, all_kinds)
self._remove(this_kind, render=False)
self._renderer._update()
def add_text(self, x, y, text, name=None, color=None, opacity=1.0,
row=0, col=0, font_size=None, justification=None):
"""Add a text to the visualization.
Parameters
----------
x : float
X coordinate.
y : float
Y coordinate.
text : str
Text to add.
name : str
Name of the text (text label can be updated using update_text()).
color : tuple
Color of the text. Default is the foreground color set during
initialization (default is black or white depending on the
background color).
opacity : float
Opacity of the text (default 1.0).
row : int | None
Row index of which brain to use. Default is the top row.
col : int | None
Column index of which brain to use. Default is the left-most
column.
font_size : float | None
The font size to use.
justification : str | None
The text justification.
"""
_validate_type(name, (str, None), 'name')
name = text if name is None else name
if 'text' in self._actors and name in self._actors['text']:
raise ValueError(f'Text with the name {name} already exists')
for ri, ci, _ in self._iter_views('vol'):
if (row is None or row == ri) and (col is None or col == ci):
actor = self._renderer.text2d(
x_window=x, y_window=y, text=text, color=color,
size=font_size, justification=justification)
if 'text' not in self._actors:
self._actors['text'] = dict()
self._actors['text'][name] = actor
def remove_text(self, name=None):
"""Remove text from the rendered scene.
Parameters
----------
name : str | None
Remove specific text by name. If None, all text will be removed.
"""
_validate_type(name, (str, None), 'name')
if name is None:
for actor in self._actors['text'].values():
self._renderer.plotter.remove_actor(actor)
self._actors.pop('text')
else:
names = [None]
if 'text' in self._actors:
names += list(self._actors['text'].keys())
_check_option('name', name, names)
self._renderer.plotter.remove_actor(
self._actors['text'][name])
self._actors['text'].pop(name)
self._renderer._update()
def _configure_label_time_course(self):
from ...label import read_labels_from_annot
if not self.show_traces:
return
if self.mpl_canvas is None:
self._configure_mplcanvas()
else:
self.clear_glyphs()
self.traces_mode = 'label'
self.add_annotation(self.annot, color="w", alpha=0.75)
# now plot the time line
self.plot_time_line(update=False)
self.mpl_canvas.update_plot()
for hemi in self._hemis:
labels = read_labels_from_annot(
subject=self._subject_id,
parc=self.annot,
hemi=hemi,
subjects_dir=self._subjects_dir
)
self._vertex_to_label_id[hemi] = np.full(
self.geo[hemi].coords.shape[0], -1)
self._annotation_labels[hemi] = labels
for idx, label in enumerate(labels):
self._vertex_to_label_id[hemi][label.vertices] = idx
def add_annotation(self, annot, borders=True, alpha=1, hemi=None,
remove_existing=True, color=None):
"""Add an annotation file.
Parameters
----------
annot : str | tuple
Either path to annotation file or annotation name. Alternatively,
the annotation can be specified as a ``(labels, ctab)`` tuple per
hemisphere, i.e. ``annot=(labels, ctab)`` for a single hemisphere
or ``annot=((lh_labels, lh_ctab), (rh_labels, rh_ctab))`` for both
hemispheres. ``labels`` and ``ctab`` should be arrays as returned
by :func:`nibabel.freesurfer.io.read_annot`.
borders : bool | int
Show only label borders. If int, specify the number of steps
(away from the true border) along the cortical mesh to include
as part of the border definition.
alpha : float
Opacity of the head surface. Must be between 0 and 1 (inclusive).
Default is 0.5.
hemi : str | None
If None, it is assumed to belong to the hemipshere being
shown. If two hemispheres are being shown, data must exist
for both hemispheres.
remove_existing : bool
If True (default), remove old annotations.
color : matplotlib-style color code
If used, show all annotations in the same (specified) color.
Probably useful only when showing annotation borders.
"""
from ...label import _read_annot
hemis = self._check_hemis(hemi)
# Figure out where the data is coming from
if isinstance(annot, str):
if os.path.isfile(annot):
filepath = annot
path = os.path.split(filepath)[0]
file_hemi, annot = os.path.basename(filepath).split('.')[:2]
if len(hemis) > 1:
if annot[:2] == 'lh.':
filepaths = [filepath, op.join(path, 'rh' + annot[2:])]
elif annot[:2] == 'rh.':
filepaths = [op.join(path, 'lh' + annot[2:], filepath)]
else:
raise RuntimeError('To add both hemispheres '
'simultaneously, filename must '
'begin with "lh." or "rh."')
else:
filepaths = [filepath]
else:
filepaths = []
for hemi in hemis:
filepath = op.join(self._subjects_dir,
self._subject_id,
'label',
".".join([hemi, annot, 'annot']))
if not os.path.exists(filepath):
raise ValueError('Annotation file %s does not exist'
% filepath)
filepaths += [filepath]
annots = []
for hemi, filepath in zip(hemis, filepaths):
# Read in the data
labels, cmap, _ = _read_annot(filepath)
annots.append((labels, cmap))
else:
annots = [annot] if len(hemis) == 1 else annot
annot = 'annotation'
for hemi, (labels, cmap) in zip(hemis, annots):
# Maybe zero-out the non-border vertices
self._to_borders(labels, hemi, borders)
# Handle null labels properly
cmap[:, 3] = 255
bgcolor = np.round(np.array(self._brain_color) * 255).astype(int)
bgcolor[-1] = 0
cmap[cmap[:, 4] < 0, 4] += 2 ** 24 # wrap to positive
cmap[cmap[:, 4] <= 0, :4] = bgcolor
if np.any(labels == 0) and not np.any(cmap[:, -1] <= 0):
cmap = np.vstack((cmap, np.concatenate([bgcolor, [0]])))
# Set label ids sensibly
order = np.argsort(cmap[:, -1])
cmap = cmap[order]
ids = np.searchsorted(cmap[:, -1], labels)
cmap = cmap[:, :4]
# Set the alpha level
alpha_vec = cmap[:, 3]
alpha_vec[alpha_vec > 0] = alpha * 255
# Override the cmap when a single color is used
if color is not None:
rgb = np.round(np.multiply(_to_rgb(color), 255))
cmap[:, :3] = rgb.astype(cmap.dtype)
ctable = cmap.astype(np.float64)
for _ in self._iter_views(hemi):
mesh = self._layered_meshes[hemi]
mesh.add_overlay(
scalars=ids,
colormap=ctable,
rng=[np.min(ids), np.max(ids)],
opacity=alpha,
name=annot,
)
self._annots[hemi].append(annot)
if not self.time_viewer or self.traces_mode == 'vertex':
self._renderer._set_colormap_range(
mesh._actor, cmap.astype(np.uint8), None)
self._renderer._update()
def close(self):
"""Close all figures and cleanup data structure."""
self._closed = True
self._renderer.close()
def show(self):
"""Display the window."""
self._renderer.show()
@fill_doc
def show_view(self, view=None, roll=None, distance=None, *,
row=None, col=None, hemi=None, align=True,
azimuth=None, elevation=None, focalpoint=None):
"""Orient camera to display view.
Parameters
----------
%(view)s
%(roll)s
%(distance)s
row : int | None
The row to set. Default all rows.
col : int | None
The column to set. Default all columns.
hemi : str | None
Which hemi to use for view lookup (when in "both" mode).
align : bool
If True, consider view arguments relative to canonical MRI
directions (closest to MNI for the subject) rather than native MRI
space. This helps when MRIs are not in standard orientation (e.g.,
have large rotations).
%(azimuth)s
%(elevation)s
%(focalpoint)s
"""
_validate_type(row, ('int-like', None), 'row')
_validate_type(col, ('int-like', None), 'col')
hemi = self._hemi if hemi is None else hemi
if hemi == 'split':
if (self._view_layout == 'vertical' and col == 1 or
self._view_layout == 'horizontal' and row == 1):
hemi = 'rh'
else:
hemi = 'lh'
_validate_type(view, (str, None), 'view')
view_params = dict(azimuth=azimuth, elevation=elevation, roll=roll,
distance=distance, focalpoint=focalpoint)
if view is not None: # view_params take precedence
view_params = {param: val for param, val in view_params.items()
if val is not None} # no overwriting with None
view_params = dict(views_dicts[hemi].get(view), **view_params)
xfm = self._rigid if align else None
for h in self._hemis:
for ri, ci, _ in self._iter_views(h):
if (row is None or row == ri) and (col is None or col == ci):
self._renderer.set_camera(
**view_params, reset_camera=False, rigid=xfm)
self._renderer._update()
def reset_view(self):
"""Reset the camera."""
for h in self._hemis:
for _, _, v in self._iter_views(h):
self._renderer.set_camera(**views_dicts[h][v],
reset_camera=False)
def save_image(self, filename=None, mode='rgb'):
"""Save view from all panels to disk.
Parameters
----------
filename : str
Path to new image file.
mode : str
Either 'rgb' or 'rgba' for values to return.
"""
if filename is None:
filename = _generate_default_filename(".png")
_save_ndarray_img(
filename, self.screenshot(mode=mode, time_viewer=True))
@fill_doc
def screenshot(self, mode='rgb', time_viewer=False):
"""Generate a screenshot of current view.
Parameters
----------
mode : str
Either 'rgb' or 'rgba' for values to return.
%(brain_screenshot_time_viewer)s
Returns
-------
screenshot : array
Image pixel values.
"""
n_channels = 3 if mode == 'rgb' else 4
img = self._renderer.screenshot(mode)
logger.debug(f'Got screenshot of size {img.shape}')
if time_viewer and self.time_viewer and \
self.show_traces and \
not self.separate_canvas:
from matplotlib.image import imread
canvas = self.mpl_canvas.fig.canvas
canvas.draw_idle()
fig = self.mpl_canvas.fig
with BytesIO() as output:
# Need to pass dpi here so it uses the physical (HiDPI) DPI
# rather than logical DPI when saving in most cases.
# But when matplotlib uses HiDPI and VTK doesn't
# (e.g., macOS w/Qt 5.14+ and VTK9) then things won't work,
# so let's just calculate the DPI we need to get
# the correct size output based on the widths being equal
size_in = fig.get_size_inches()
dpi = fig.get_dpi()
want_size = tuple(x * dpi for x in size_in)
n_pix = want_size[0] * want_size[1]
logger.debug(
f'Saving figure of size {size_in} @ {dpi} DPI '
f'({want_size} = {n_pix} pixels)')
# Sometimes there can be off-by-one errors here (e.g.,
# if in mpl int() rather than int(round()) is used to
# compute the number of pixels) so rather than use "raw"
# format and try to reshape ourselves, just write to PNG
# and read it, which has the dimensions encoded for us.
fig.savefig(output, dpi=dpi, format='png',
facecolor=self._bg_color, edgecolor='none')
output.seek(0)
trace_img = imread(output, format='png')[:, :, :n_channels]
trace_img = np.clip(
np.round(trace_img * 255), 0, 255).astype(np.uint8)
bgcolor = np.array(self._brain_color[:n_channels]) / 255
img = concatenate_images([img, trace_img], bgcolor=bgcolor,
n_channels=n_channels)
return img
@contextlib.contextmanager
def _no_lut_update(self, why):
orig = self._lut_locked
self._lut_locked = why
try:
yield
finally:
self._lut_locked = orig
@fill_doc
def update_lut(self, fmin=None, fmid=None, fmax=None, alpha=None):
"""Update color map.
Parameters
----------
%(fmin_fmid_fmax)s
alpha : float | None
Alpha to use in the update.
"""
args = f'{fmin}, {fmid}, {fmax}, {alpha}'
if self._lut_locked is not None:
logger.debug(f'LUT update postponed with {args}')
return
logger.debug(f'Updating LUT with {args}')
center = self._data['center']
colormap = self._data['colormap']
transparent = self._data['transparent']
lims = {key: self._data[key] for key in ('fmin', 'fmid', 'fmax')}
_update_monotonic(lims, fmin=fmin, fmid=fmid, fmax=fmax)
assert all(val is not None for val in lims.values())
self._data.update(lims)
self._data['ctable'] = np.round(
calculate_lut(colormap, alpha=1., center=center,
transparent=transparent, **lims) *
255).astype(np.uint8)
# update our values
rng = self._cmap_range
ctable = self._data['ctable']
for hemi in ['lh', 'rh', 'vol']:
hemi_data = self._data.get(hemi)
if hemi_data is not None:
if hemi in self._layered_meshes:
mesh = self._layered_meshes[hemi]
mesh.update_overlay(name='data',
colormap=self._data['ctable'],
opacity=alpha,
rng=rng)
self._renderer._set_colormap_range(
mesh._actor, ctable, self._scalar_bar, rng,
self._brain_color)
grid_volume_pos = hemi_data.get('grid_volume_pos')
grid_volume_neg = hemi_data.get('grid_volume_neg')
for grid_volume in (grid_volume_pos, grid_volume_neg):
if grid_volume is not None:
self._renderer._set_volume_range(
grid_volume, ctable, hemi_data['alpha'],
self._scalar_bar, rng)
glyph_actor = hemi_data.get('glyph_actor')
if glyph_actor is not None:
for glyph_actor_ in glyph_actor:
self._renderer._set_colormap_range(
glyph_actor_, ctable, self._scalar_bar, rng)
if self.time_viewer:
with self._no_lut_update(f'update_lut {args}'):
for key in ('fmin', 'fmid', 'fmax'):
self.callbacks[key](lims[key])
self._renderer._update()
def set_data_smoothing(self, n_steps):
"""Set the number of smoothing steps.
Parameters
----------
n_steps : int
Number of smoothing steps.
"""
from ...morph import _hemi_morph
for hemi in ['lh', 'rh']:
hemi_data = self._data.get(hemi)
if hemi_data is not None:
if len(hemi_data['array']) >= self.geo[hemi].x.shape[0]:
continue
vertices = hemi_data['vertices']
if vertices is None:
raise ValueError(
'len(data) < nvtx (%s < %s): the vertices '
'parameter must not be None'
% (len(hemi_data), self.geo[hemi].x.shape[0]))
morph_n_steps = 'nearest' if n_steps == -1 else n_steps
with use_log_level(False):
smooth_mat = _hemi_morph(
self.geo[hemi].orig_faces,
np.arange(len(self.geo[hemi].coords)),
vertices, morph_n_steps, maps=None, warn=False)
self._data[hemi]['smooth_mat'] = smooth_mat
self.set_time_point(self._data['time_idx'])
self._data['smoothing_steps'] = n_steps
@property
def _n_times(self):
return len(self._times) if self._times is not None else None
@property
def time_interpolation(self):
"""The interpolation mode."""
return self._time_interpolation
@fill_doc
def set_time_interpolation(self, interpolation):
"""Set the interpolation mode.
Parameters
----------
%(brain_time_interpolation)s
"""
self._time_interpolation = _check_option(
'interpolation',
interpolation,
('linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic')
)
self._time_interp_funcs = dict()
self._time_interp_inv = None
if self._times is not None:
idx = np.arange(self._n_times)
for hemi in ['lh', 'rh', 'vol']:
hemi_data = self._data.get(hemi)
if hemi_data is not None:
array = hemi_data['array']
self._time_interp_funcs[hemi] = _safe_interp1d(
idx, array, self._time_interpolation, axis=-1,
assume_sorted=True)
self._time_interp_inv = _safe_interp1d(idx, self._times)
def set_time_point(self, time_idx):
"""Set the time point shown (can be a float to interpolate).
Parameters
----------
time_idx : int | float
The time index to use. Can be a float to use interpolation
between indices.
"""
self._current_act_data = dict()
time_actor = self._data.get('time_actor', None)
time_label = self._data.get('time_label', None)
for hemi in ['lh', 'rh', 'vol']:
hemi_data = self._data.get(hemi)
if hemi_data is not None:
array = hemi_data['array']
# interpolate in time
vectors = None
if array.ndim == 1:
act_data = array
self._current_time = 0
else:
act_data = self._time_interp_funcs[hemi](time_idx)
self._current_time = self._time_interp_inv(time_idx)
if array.ndim == 3:
vectors = act_data
act_data = np.linalg.norm(act_data, axis=1)
self._current_time = self._time_interp_inv(time_idx)
self._current_act_data[hemi] = act_data
if time_actor is not None and time_label is not None:
time_actor.SetInput(time_label(self._current_time))
# update the volume interpolation
grid = hemi_data.get('grid')
if grid is not None:
vertices = self._data['vol']['vertices']
values = self._current_act_data['vol']
rng = self._cmap_range
fill = 0 if self._data['center'] is not None else rng[0]
_cell_data(grid)['values'].fill(fill)
# XXX for sided data, we probably actually need two
# volumes as composite/MIP needs to look at two
# extremes... for now just use abs. Eventually we can add
# two volumes if we want.
_cell_data(grid)['values'][vertices] = values
# interpolate in space
smooth_mat = hemi_data.get('smooth_mat')
if smooth_mat is not None:
act_data = smooth_mat.dot(act_data)
# update the mesh scalar values
if hemi in self._layered_meshes:
mesh = self._layered_meshes[hemi]
if 'data' in mesh._overlays:
mesh.update_overlay(name='data', scalars=act_data)
else:
mesh.add_overlay(
scalars=act_data,
colormap=self._data['ctable'],
rng=self._cmap_range,
opacity=None,
name='data',
)
# update the glyphs
if vectors is not None:
self._update_glyphs(hemi, vectors)
self._data['time_idx'] = time_idx
self._renderer._update()
def set_time(self, time):
"""Set the time to display (in seconds).
Parameters
----------
time : float
The time to show, in seconds.
"""
if self._times is None:
raise ValueError(
'Cannot set time when brain has no defined times.')
elif min(self._times) <= time <= max(self._times):
self.set_time_point(np.interp(float(time), self._times,
np.arange(self._n_times)))
else:
raise ValueError(
f'Requested time ({time} s) is outside the range of '
f'available times ({min(self._times)}-{max(self._times)} s).')
def _update_glyphs(self, hemi, vectors):
hemi_data = self._data.get(hemi)
assert hemi_data is not None
vertices = hemi_data['vertices']
vector_alpha = self._data['vector_alpha']
scale_factor = self._data['scale_factor']
vertices = slice(None) if vertices is None else vertices
x, y, z = np.array(self.geo[hemi].coords)[vertices].T
if hemi_data['glyph_actor'] is None:
add = True
hemi_data['glyph_actor'] = list()
else:
add = False
count = 0
for _ in self._iter_views(hemi):
if hemi_data['glyph_dataset'] is None:
glyph_mapper, glyph_dataset = self._renderer.quiver3d(
x, y, z,
vectors[:, 0], vectors[:, 1], vectors[:, 2],
color=None,
mode='2darrow',
scale_mode='vector',
scale=scale_factor,
opacity=vector_alpha,
name=str(hemi) + "_glyph"
)
hemi_data['glyph_dataset'] = glyph_dataset
hemi_data['glyph_mapper'] = glyph_mapper
else:
glyph_dataset = hemi_data['glyph_dataset']
_point_data(glyph_dataset)['vec'] = vectors
glyph_mapper = hemi_data['glyph_mapper']
if add:
glyph_actor = self._renderer._actor(glyph_mapper)
prop = glyph_actor.GetProperty()
prop.SetLineWidth(2.)
prop.SetOpacity(vector_alpha)
self._renderer.plotter.add_actor(glyph_actor, render=False)
hemi_data['glyph_actor'].append(glyph_actor)
else:
glyph_actor = hemi_data['glyph_actor'][count]
count += 1
self._renderer._set_colormap_range(
actor=glyph_actor,
ctable=self._data['ctable'],
scalar_bar=None,
rng=self._cmap_range,
)
@property
def _cmap_range(self):
dt_max = self._data['fmax']
if self._data['center'] is None:
dt_min = self._data['fmin']
else:
dt_min = -1 * dt_max
rng = [dt_min, dt_max]
return rng
def _update_fscale(self, fscale):
"""Scale the colorbar points."""
fmin = self._data['fmin'] * fscale
fmid = self._data['fmid'] * fscale
fmax = self._data['fmax'] * fscale
self.update_lut(fmin=fmin, fmid=fmid, fmax=fmax)
def _update_auto_scaling(self, restore=False):
user_clim = self._data['clim']
if user_clim is not None and 'lims' in user_clim:
allow_pos_lims = False
else:
allow_pos_lims = True
if user_clim is not None and restore:
clim = user_clim
else:
clim = 'auto'
colormap = self._data['colormap']
transparent = self._data['transparent']
mapdata = _process_clim(
clim, colormap, transparent,
np.concatenate(list(self._current_act_data.values())),
allow_pos_lims)
diverging = 'pos_lims' in mapdata['clim']
colormap = mapdata['colormap']
scale_pts = mapdata['clim']['pos_lims' if diverging else 'lims']
transparent = mapdata['transparent']
del mapdata
fmin, fmid, fmax = scale_pts
center = 0. if diverging else None
self._data['center'] = center
self._data['colormap'] = colormap
self._data['transparent'] = transparent
self.update_lut(fmin=fmin, fmid=fmid, fmax=fmax)
def _to_time_index(self, value):
"""Return the interpolated time index of the given time value."""
time = self._data['time']
value = np.interp(value, time, np.arange(len(time)))
return value
@property
def data(self):
"""Data used by time viewer and color bar widgets."""
return self._data
@property
def labels(self):
return self._labels
@property
def views(self):
return self._views
@property
def hemis(self):
return self._hemis
def _save_movie(self, filename, time_dilation=4., tmin=None, tmax=None,
framerate=24, interpolation=None, codec=None,
bitrate=None, callback=None, time_viewer=False, **kwargs):
import imageio
with self._renderer._disabled_interaction():
images = self._make_movie_frames(
time_dilation, tmin, tmax, framerate, interpolation, callback,
time_viewer)
# find imageio FFMPEG parameters
if 'fps' not in kwargs:
kwargs['fps'] = framerate
if codec is not None:
kwargs['codec'] = codec
if bitrate is not None:
kwargs['bitrate'] = bitrate
imageio.mimwrite(filename, images, **kwargs)
def _save_movie_tv(self, filename, time_dilation=4., tmin=None, tmax=None,
framerate=24, interpolation=None, codec=None,
bitrate=None, callback=None, time_viewer=False,
**kwargs):
def frame_callback(frame, n_frames):
if frame == n_frames:
# On the ImageIO step
self.status_msg.set_value(
"Saving with ImageIO: %s"
% filename
)
self.status_msg.show()
self.status_progress.hide()
self._renderer._status_bar_update()
else:
self.status_msg.set_value(
"Rendering images (frame %d / %d) ..."
% (frame + 1, n_frames)
)
self.status_msg.show()
self.status_progress.show()
self.status_progress.set_range([0, n_frames - 1])
self.status_progress.set_value(frame)
self.status_progress.update()
self.status_msg.update()
self._renderer._status_bar_update()
# set cursor to busy
default_cursor = self._renderer._window_get_cursor()
self._renderer._window_set_cursor(
self._renderer._window_new_cursor("WaitCursor"))
try:
self._save_movie(filename, time_dilation, tmin, tmax,
framerate, interpolation, codec,
bitrate, frame_callback, time_viewer, **kwargs)
except (Exception, KeyboardInterrupt):
warn('Movie saving aborted:\n' + traceback.format_exc())
finally:
self._renderer._window_set_cursor(default_cursor)
@fill_doc
def save_movie(self, filename=None, time_dilation=4., tmin=None, tmax=None,
framerate=24, interpolation=None, codec=None,
bitrate=None, callback=None, time_viewer=False, **kwargs):
"""Save a movie (for data with a time axis).
The movie is created through the :mod:`imageio` module. The format is
determined by the extension, and additional options can be specified
through keyword arguments that depend on the format, see
:doc:`imageio's format page <imageio:formats/index>`.
.. Warning::
This method assumes that time is specified in seconds when adding
data. If time is specified in milliseconds this will result in
movies 1000 times longer than expected.
Parameters
----------
filename : str
Path at which to save the movie. The extension determines the
format (e.g., ``'*.mov'``, ``'*.gif'``, ...; see the :mod:`imageio`
documentation for available formats).
time_dilation : float
Factor by which to stretch time (default 4). For example, an epoch
from -100 to 600 ms lasts 700 ms. With ``time_dilation=4`` this
would result in a 2.8 s long movie.
tmin : float
First time point to include (default: all data).
tmax : float
Last time point to include (default: all data).
framerate : float
Framerate of the movie (frames per second, default 24).
%(brain_time_interpolation)s
If None, it uses the current ``brain.interpolation``,
which defaults to ``'nearest'``. Defaults to None.
codec : str | None
The codec to use.
bitrate : float | None
The bitrate to use.
callback : callable | None
A function to call on each iteration. Useful for status message
updates. It will be passed keyword arguments ``frame`` and
``n_frames``.
%(brain_screenshot_time_viewer)s
**kwargs : dict
Specify additional options for :mod:`imageio`.
"""
if filename is None:
filename = _generate_default_filename(".mp4")
func = self._save_movie_tv if self.time_viewer else self._save_movie
func(filename, time_dilation, tmin, tmax,
framerate, interpolation, codec,
bitrate, callback, time_viewer, **kwargs)
def _make_movie_frames(self, time_dilation, tmin, tmax, framerate,
interpolation, callback, time_viewer):
from math import floor
# find tmin
if tmin is None:
tmin = self._times[0]
elif tmin < self._times[0]:
raise ValueError("tmin=%r is smaller than the first time point "
"(%r)" % (tmin, self._times[0]))
# find indexes at which to create frames
if tmax is None:
tmax = self._times[-1]
elif tmax > self._times[-1]:
raise ValueError("tmax=%r is greater than the latest time point "
"(%r)" % (tmax, self._times[-1]))
n_frames = floor((tmax - tmin) * time_dilation * framerate)
times = np.arange(n_frames, dtype=float)
times /= framerate * time_dilation
times += tmin
time_idx = np.interp(times, self._times, np.arange(self._n_times))
n_times = len(time_idx)
if n_times == 0:
raise ValueError("No time points selected")
logger.debug("Save movie for time points/samples\n%s\n%s"
% (times, time_idx))
# Sometimes the first screenshot is rendered with a different
# resolution on OS X
self.screenshot(time_viewer=time_viewer)
old_mode = self.time_interpolation
if interpolation is not None:
self.set_time_interpolation(interpolation)
try:
images = [
self.screenshot(time_viewer=time_viewer)
for _ in self._iter_time(time_idx, callback)]
finally:
self.set_time_interpolation(old_mode)
if callback is not None:
callback(frame=len(time_idx), n_frames=len(time_idx))
return images
def _iter_time(self, time_idx, callback):
"""Iterate through time points, then reset to current time.
Parameters
----------
time_idx : array_like
Time point indexes through which to iterate.
callback : callable | None
Callback to call before yielding each frame.
Yields
------
idx : int | float
Current index.
Notes
-----
Used by movie and image sequence saving functions.
"""
if self.time_viewer:
func = partial(self.callbacks["time"],
update_widget=True)
else:
func = self.set_time_point
current_time_idx = self._data["time_idx"]
for ii, idx in enumerate(time_idx):
func(idx)
if callback is not None:
callback(frame=ii, n_frames=len(time_idx))
yield idx
# Restore original time index
func(current_time_idx)
def _check_stc(self, hemi, array, vertices):
from ...source_estimate import (
_BaseSourceEstimate, _BaseSurfaceSourceEstimate,
_BaseMixedSourceEstimate, _BaseVolSourceEstimate
)
if isinstance(array, _BaseSourceEstimate):
stc = array
stc_surf = stc_vol = None
if isinstance(stc, _BaseSurfaceSourceEstimate):
stc_surf = stc
elif isinstance(stc, _BaseMixedSourceEstimate):
stc_surf = stc.surface() if hemi != 'vol' else None
stc_vol = stc.volume() if hemi == 'vol' else None
elif isinstance(stc, _BaseVolSourceEstimate):
stc_vol = stc if hemi == 'vol' else None
else:
raise TypeError("stc not supported")
if stc_surf is None and stc_vol is None:
raise ValueError("No data to be added")
if stc_surf is not None:
array = getattr(stc_surf, hemi + '_data')
vertices = stc_surf.vertices[0 if hemi == 'lh' else 1]
if stc_vol is not None:
array = stc_vol.data
vertices = np.concatenate(stc_vol.vertices)
else:
stc = None
return stc, array, vertices
def _check_hemi(self, hemi, extras=()):
"""Check for safe single-hemi input, returns str."""
_validate_type(hemi, (None, str), 'hemi')
if hemi is None:
if self._hemi not in ['lh', 'rh']:
raise ValueError('hemi must not be None when both '
'hemispheres are displayed')
hemi = self._hemi
_check_option('hemi', hemi, ('lh', 'rh') + tuple(extras))
return hemi
def _check_hemis(self, hemi):
"""Check for safe dual or single-hemi input, returns list."""
if hemi is None:
if self._hemi not in ['lh', 'rh']:
hemi = ['lh', 'rh']
else:
hemi = [self._hemi]
elif hemi not in ['lh', 'rh']:
extra = ' or None' if self._hemi in ['lh', 'rh'] else ''
raise ValueError('hemi must be either "lh" or "rh"' + extra)
else:
hemi = [hemi]
return hemi
def _to_borders(self, label, hemi, borders, restrict_idx=None):
"""Convert a label/parc to borders."""
if not isinstance(borders, (bool, int)) or borders < 0:
raise ValueError('borders must be a bool or positive integer')
if borders:
n_vertices = label.size
edges = mesh_edges(self.geo[hemi].orig_faces)
edges = edges.tocoo()
border_edges = label[edges.row] != label[edges.col]
show = np.zeros(n_vertices, dtype=np.int64)
keep_idx = np.unique(edges.row[border_edges])
if isinstance(borders, int):
for _ in range(borders):
keep_idx = np.in1d(
self.geo[hemi].orig_faces.ravel(), keep_idx)
keep_idx.shape = self.geo[hemi].orig_faces.shape
keep_idx = self.geo[hemi].orig_faces[
np.any(keep_idx, axis=1)]
keep_idx = np.unique(keep_idx)
if restrict_idx is not None:
keep_idx = keep_idx[np.in1d(keep_idx, restrict_idx)]
show[keep_idx] = 1
label *= show
def enable_depth_peeling(self):
"""Enable depth peeling."""
self._renderer.enable_depth_peeling()
def get_picked_points(self):
"""Return the vertices of the picked points.
Returns
-------
points : list of int | None
The vertices picked by the time viewer.
"""
if hasattr(self, "time_viewer"):
return self.picked_points
def __hash__(self):
"""Hash the object."""
raise NotImplementedError
def _safe_interp1d(x, y, kind='linear', axis=-1, assume_sorted=False):
"""Work around interp1d not liking singleton dimensions."""
from scipy.interpolate import interp1d
if y.shape[axis] == 1:
def func(x):
return np.take(y, np.zeros(np.asarray(x).shape, int), axis=axis)
return func
else:
return interp1d(x, y, kind, axis=axis, assume_sorted=assume_sorted)
def _update_limits(fmin, fmid, fmax, center, array):
if center is None:
if fmin is None:
fmin = array.min() if array.size > 0 else 0
if fmax is None:
fmax = array.max() if array.size > 0 else 1
else:
if fmin is None:
fmin = 0
if fmax is None:
fmax = np.abs(center - array).max() if array.size > 0 else 1
if fmid is None:
fmid = (fmin + fmax) / 2.
if fmin >= fmid:
raise RuntimeError('min must be < mid, got %0.4g >= %0.4g'
% (fmin, fmid))
if fmid >= fmax:
raise RuntimeError('mid must be < max, got %0.4g >= %0.4g'
% (fmid, fmax))
return fmin, fmid, fmax
def _update_monotonic(lims, fmin, fmid, fmax):
if fmin is not None:
lims['fmin'] = fmin
if lims['fmax'] < fmin:
logger.debug(f' Bumping fmax = {lims["fmax"]} to {fmin}')
lims['fmax'] = fmin
if lims['fmid'] < fmin:
logger.debug(f' Bumping fmid = {lims["fmid"]} to {fmin}')
lims['fmid'] = fmin
assert lims['fmin'] <= lims['fmid'] <= lims['fmax']
if fmid is not None:
lims['fmid'] = fmid
if lims['fmin'] > fmid:
logger.debug(f' Bumping fmin = {lims["fmin"]} to {fmid}')
lims['fmin'] = fmid
if lims['fmax'] < fmid:
logger.debug(f' Bumping fmax = {lims["fmax"]} to {fmid}')
lims['fmax'] = fmid
assert lims['fmin'] <= lims['fmid'] <= lims['fmax']
if fmax is not None:
lims['fmax'] = fmax
if lims['fmin'] > fmax:
logger.debug(f' Bumping fmin = {lims["fmin"]} to {fmax}')
lims['fmin'] = fmax
if lims['fmid'] > fmax:
logger.debug(f' Bumping fmid = {lims["fmid"]} to {fmax}')
lims['fmid'] = fmax
assert lims['fmin'] <= lims['fmid'] <= lims['fmax']
def _get_range(brain):
val = np.abs(np.concatenate(list(brain._current_act_data.values())))
return [np.min(val), np.max(val)]
class _FakeIren():
def EnterEvent(self):
pass
def MouseMoveEvent(self):
pass
def LeaveEvent(self):
pass
def SetEventInformation(self, *args, **kwargs):
pass
def CharEvent(self):
pass
def KeyPressEvent(self, *args, **kwargs):
pass
def KeyReleaseEvent(self, *args, **kwargs):
pass<|fim▁end|> | Force an update of the plot. Defaults to True. |
<|file_name|>parserEDNA.py<|end_file_name|><|fim▁begin|>"This program will parse matrix files to convert them into objects usable by JS files"
# libraries' imports for create the parser
## librairy to create the json object
import json
import os
## library for using regular expressions
import re
# opening of all files, each containing an EDNA matrix
EDNAFULL=open("EDNA/EDNAFULL.txt")
EDNAMAT=open("EDNA/EDNAMAT.txt")
EDNASIMPLE=open("EDNA/EDNASIMPLE.txt")
#opening of the file which will be writed
mat=open ("matrixEDNA.json","w")
#creation of the beginning of the file
mat.write("matrixEDNA=")
mat.write("{")
test=os.listdir("./EDNA")
print test
def parserEDNA(matrix1,name):
#reading of the matrix file, line by line
matrix=[]
content= matrix1.read()
lines= content.split("\n")<|fim▁hole|> #for each line, delete spaces, write the matrix name and, after, scores into the matrix
for i in lines:
j=i.split(" ")
for k in range(len(j)):
if j[0]=="#":
pass
if j[0]!="#":
if re.match(r"[-][0-9]",j[k]) or re.match(r"[0-9]",j[k]):
matrix.append(float(j[k]))
#convert the Python list in JSON object
matrix2=json.dumps(matrix)
#writing in the JSON document of the matrix
mat.write(name)
mat.write(":")
mat.write(matrix2)
mat.write("\n")
#execution of the parser for all matrices
liste=[EDNAFULL,EDNAMAT,EDNASIMPLE]
for i in range(len(test)):
test1=test[i].split(".")
parserEDNA(liste[i],test1[0])
# closing of all matrix files, writing the end of the JSON file et closing of this one
EDNAFULL.close()
EDNAMAT.close()
EDNASIMPLE.close()
mat.write("}")
mat.close()<|fim▁end|> | |
<|file_name|>db.py<|end_file_name|><|fim▁begin|>import logging
from datetime import datetime
from core import app
from sqlalchemy import inspect
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy(app)
class Show(db.Model):
show_id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50))
link = db.Column(db.String(255))
country = db.Column(db.String(10))
started = db.Column(db.String(15))
total_seasons = db.Column(db.Integer)
status = db.Column(db.String(32))
classification = db.Column(db.String(20))
episodes_saved_at = db.Column(db.DateTime)
# genres = db.Column(db.String(20))
episodes = db.relationship("Episode", order_by="Episode.episode_id", backref="show")
def __init__(self, show_id, name, link, country, started, total_seasons, status, classification):<|fim▁hole|> self.show_id = show_id
self.name = name
self.link = link
self.country = country
self.started = started
self.total_seasons = total_seasons
self.status = status
self.classification = classification
class Episode(db.Model):
episode_id = db.Column(db.Integer, primary_key=True)
show_id = db.Column(db.Integer, db.ForeignKey('show.show_id'))
episode_number = db.Column(db.Integer)
season_number = db.Column(db.Integer)
season_episode_number = db.Column(db.Integer)
air_date = db.Column(db.Date)
title = db.Column(db.String(255))
link = db.Column(db.String(255))
watched = db.Column(db.String(1))
# show = db.relationship("Show", backref=db.backref("episodes", order_by=episode_id))
__table_args__ = (
db.UniqueConstraint('show_id', 'season_number', 'season_episode_number', name='_show_session_epi_uc'),)
def __int__(self, episode_id, show_id, episode_number, season_number, season_episode_number, air_date, title, link,
watched):
self.episode_id = episode_id
self.show_id = show_id
self.episode_number = episode_number
self.season_number = season_number
self.season_episode_number = season_episode_number
self.air_date = air_date
self.title = title
self.link = link
self.watched = watched
def is_older(self, dt=datetime.now().date()):
return self.air_date < dt
def insert_show(show):
db.session.add(show)
db.session.commit()
def insert_entity(entity):
db.session.add(entity)
db.session.commit()
def show_exists(show_id):
show = Show.query.filter_by(show_id=show_id).first()
return show is not None
def delete_show(show_id):
show = Show.query.filter_by(show_id=show_id).first()
db.session.delete(show)
for e in Episode.query.filter_by(show_id=show_id).all():
db.session.delete(e)
db.session.commit()
def create_tables():
logging.info('Creating tables')
db.create_all()
def drop_tables():
logging.info('Deleting tables')
db.drop_all()
def check_table_exists():
table_names = inspect(db.engine).get_table_names()
return 'episode' in table_names and 'show' in table_names
if __name__ == '__main__':
import sys
app.config['SQLALCHEMY_ECHO'] = True
if sys.argv[1] == 'init':
create_tables()<|fim▁end|> | |
<|file_name|>activate.go<|end_file_name|><|fim▁begin|>package commands
import (<|fim▁hole|> "github.com/codegangsta/cli"
"github.com/ryo33/zenv/environment"
"github.com/ryo33/zenv/util"
)
var activate = cli.Command{
Name: "activate",
Usage: "activate the environment",
Description: `
`,
Action: doActivate,
}
func doActivate(c *cli.Context) {
args := c.Args()
if len(args) == 2 {
environment.GetGlobalEnv(args[1]).Activate(args[0])
} else {
util.PrintArgumentError(2)
}
}<|fim▁end|> | |
<|file_name|>draw_PES.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import matplotlib as mpl
from matplotlib import pyplot as plt
import argparse
def add_adiabatic_map_to_axis(axis, style, energies, color):
""" add single set of energies to plot """
# Energy horizontal decks
x = style['START']
for energy in energies:
axis.plot([x, x+style['WIDTH']], [energy, energy],
'-%s' % color, linewidth=2)
x += style['SPACING']
# Connect steps
x = style['START']
for i in range(1, len(energies)):
x1 = x + style['WIDTH']
x2 = x + style['SPACING']
y1 = energies[i-1]
y2 = energies[i]
axis.plot([x1, x2], [y1, y2], '-%s' % color)
x += style['SPACING']
def getargs():
parser = argparse.ArgumentParser(description="""
Make plot from user provided energies.
Can read multiple sets of energies.""",
formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument('-o', '--output',
default='PES.svg',
help='File name of output figure')
parser.add_argument('--dpi',
default=300, type=int,
help='Resolution for bitmaps')
parser.add_argument('-e', '--energies',
nargs='+', type=float, action='append',
help='Energies for any number of stationary points')
parser.add_argument('-l', '--labels', nargs='+',
help='Name of stationary points')<|fim▁hole|>
# less colors than PES ? add 'k'
if args.colors:
missing_colors = len(args.energies) - len(args.colors)
missing_colors = (missing_colors > 0) * missing_colors
args.colors += 'k' * missing_colors
return args
def makelabels(N):
""" Make automatic labels: TS1, INT1, TS2, etc.."""
labels = ['R']
n_ts = N / 2
n_i = (N - 2) / 2
n_i = n_i * (n_i > 0) # becomes zero if negative
for i in range(n_ts + n_i):
if i % 2:
labels.append('INT%d' % (i/2+1))
else:
labels.append('TS%d' % (i/2+1))
if N % 2 and N >= 3:
labels.append('P')
return labels
def configure_axis_limits(axis, style, energies):
# Appearance
ymin, ymax = float('+inf'), float('-inf')
maxlen = 0
for energy_set in energies:
ymin = min(ymin, min(energy_set))
ymax = max(ymax, max(energy_set))
maxlen = max(len(energy_set), maxlen)
yrange = ymax-ymin
axis.set_ylim(ymin-0.1*yrange, ymax+0.1*yrange)
xmax = style['START']*2 + style['WIDTH'] + (maxlen-1)*style['SPACING']
axis.set_xlim(0, xmax)
axis.set_xticks([
style['START']+i*style['SPACING']+style['WIDTH']/2.0 for i in range(maxlen)])
return maxlen
def main():
# get user input
args = getargs()
# important style features
style = {
'WIDTH' : 4, # width of horizontal bars
'SPACING' : 10, # spacing between center of horizontal bars
'START' : 3 # x-offset from y-axis
}
# Configure Figure
fig = plt.gcf()
fig.set_size_inches(3.3, 2.5)
mpl.rcParams.update({'font.size': 7, 'axes.linewidth':0.5})
plt.subplots_adjust(bottom=.15)
plt.subplots_adjust(left=.15)
plt.ylabel('Energy (kcal/mol)')
plt.xlabel('Reaction coordinate')
ax = fig.gca()
ax.grid(True)
maxlen = configure_axis_limits(ax, style, args.energies)
if not args.labels:
args.labels = makelabels(maxlen)
ax.set_xticklabels(args.labels)
# plot stuff
color = 'k'
for j,energies in enumerate(args.energies):
if args.colors:
color = args.colors[j]
add_adiabatic_map_to_axis(ax, style, energies, color)
plt.savefig(args.output, dpi=args.dpi)
if __name__ == '__main__':
main()<|fim▁end|> | parser.add_argument('-c', '--colors', nargs='+',
help='Color codes')
args = parser.parse_args() |
<|file_name|>redirect-rename.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name = "foo"]
mod hidden {
// @has foo/hidden/struct.Foo.html
// @has - '//p/a' '../../foo/struct.FooBar.html'
pub struct Foo {}
// @has foo/hidden/bar/index.html
// @has - '//p/a' '../../foo/baz/index.html'
pub mod bar {
// @has foo/hidden/bar/struct.Thing.html<|fim▁hole|> }
}
// @has foo/struct.FooBar.html
pub use hidden::Foo as FooBar;
// @has foo/baz/index.html
// @has foo/baz/struct.Thing.html
pub use hidden::bar as baz;<|fim▁end|> | // @has - '//p/a' '../../foo/baz/struct.Thing.html'
pub struct Thing {} |
<|file_name|>rm_dirs_contains_only_if_xml.py<|end_file_name|><|fim▁begin|>'''
This script will remove the directories if that contains only xml files.
'''
<|fim▁hole|>srcpath = raw_input("Enter the source path : ")
for root, sub, files in os.walk(os.path.abspath(srcpath)):
if files:
files = [f for f in files if not f.endswith('.xml')]
if not files:
fpath = os.path.join(root)
os.system('rm -rf %s' % fpath)
print "removed", fpath<|fim▁end|> | import os
|
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
from django.db import migrations, models
import multiselectfield.db.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Book',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('categories', multiselectfield.db.fields.MultiSelectField(choices=[(1, 'Handbooks and manuals by discipline'), (2, 'Business books'), (3, 'Books of literary criticism'), (4, 'Books about literary theory'), (5, 'Books about literature')], default=1, max_length=9)),
('tags', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('sex', 'Sex'), ('work', 'Work'), ('happy', 'Happy'), ('food', 'Food'), ('field', 'Field'), ('boring', 'Boring'), ('interesting', 'Interesting'), ('huge', 'Huge'), ('nice', 'Nice')], max_length=54, null=True)),
('published_in', multiselectfield.db.fields.MultiSelectField(choices=[('Canada - Provinces', (('AB', 'Alberta'), ('BC', 'British Columbia'))), ('USA - States', (('AK', 'Alaska'), ('AL', 'Alabama'), ('AZ', 'Arizona')))], max_length=2, verbose_name='Province or State')),
('chapters', multiselectfield.db.fields.MultiSelectField(choices=[(1, 'Chapter I'), (2, 'Chapter II')], default=1, max_length=3)),
],
),
]<|fim▁end|> | # Generated by Django 2.2.6 on 2019-10-31 08:31 |
<|file_name|>badge_maker.py<|end_file_name|><|fim▁begin|>"""
Drone.io badge generator.
Currently set up to work on Mac.<|fim▁hole|>"""
import os
from PIL import Image, ImageDraw, ImageFont
SIZE = (95, 18)
def hex_colour(hex):
if hex[0] == '#':
hex = hex[1:]
return (
int(hex[:2], 16),
int(hex[2:4], 16),
int(hex[4:6], 16),
)
BACKGROUND = hex_colour('#4A4A4A')
SUCCESS = hex_colour('#94B944')
WARNING = hex_colour('#E4A83C')
ERROR = hex_colour('#B10610')
SUCCESS_CUTOFF = 85
WARNING_CUTOFF = 45
FONT = ImageFont.truetype(size=10, filename="/Library/Fonts/Arial.ttf")
FONT_SHADOW = hex_colour('#525252')
PADDING_TOP = 3
def build_image(percentage, colour):
image = Image.new('RGB', SIZE, color=BACKGROUND)
drawing = ImageDraw.Draw(image)
drawing.rectangle([(55, 0), SIZE], colour, colour)
drawing.text((8, PADDING_TOP+1), 'coverage', font=FONT, fill=FONT_SHADOW)
drawing.text((7, PADDING_TOP), 'coverage', font=FONT)
drawing.text((63, PADDING_TOP+1), '%s%%' % percentage, font=FONT, fill=FONT_SHADOW)
drawing.text((62, PADDING_TOP), '%s%%' % percentage, font=FONT)
return image
os.chdir('_build')
for i in range(101):
filename = '%i.png' % i
file = open(filename, 'wb')
if i < WARNING_CUTOFF:
build_image(i, ERROR).save(file)
elif i < SUCCESS_CUTOFF:
build_image(i, WARNING).save(file)
else:
build_image(i, SUCCESS).save(file)<|fim▁end|> |
Requires Pillow. |
<|file_name|>regions-addr-of-self.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct dog {
cats_chased: uint,
}<|fim▁hole|> *p += 1u;
}
pub fn chase_cat_2(&mut self) {
let p: &mut uint = &mut self.cats_chased;
*p += 1u;
}
}
fn dog() -> dog {
dog {
cats_chased: 0u
}
}
fn main() {
let mut d = dog();
d.chase_cat();
info!("cats_chased: {}", d.cats_chased);
}<|fim▁end|> |
impl dog {
pub fn chase_cat(&mut self) {
let p: &'static mut uint = &mut self.cats_chased; //~ ERROR cannot infer an appropriate lifetime |
<|file_name|>media.js<|end_file_name|><|fim▁begin|>jQuery(document).ready(function(jQuery){
jQuery('.smile-upload-media').click(function(e) {
_wpPluploadSettings['defaults']['multipart_params']['admin_page']= 'customizer';
var button = jQuery(this);
var id = 'smile_'+button.attr('id');
var uid = button.data('uid');
var rmv_btn = 'remove_'+button.attr('id');
var img_container = button.attr('id')+'_container';
//Extend wp.media object
var uploader = wp.media.frames.file_frame = wp.media({
title: 'Select or Upload Image',
button: {
text: 'Choose Image'
},
library: {
type: 'image'<|fim▁hole|> uploader.on('select', function(props, attachment){
attachment = uploader.state().get('selection').first().toJSON();
var data = attachment.id+"|"+attachment.url;
var sz = jQuery(".cp-media-"+uid).val();
var alt = attachment.alt;//console.log(id);
var val = attachment.id+"|"+sz+"|"+alt;
var a = jQuery("#"+id);
var name = jQuery("#"+id).attr('name');
a.val(val);
a.attr('value',val);
jQuery(".cp-media-"+uid).attr('data-id',attachment.id);
jQuery(".cp-media-"+uid).attr('data-alt',attachment.alt);
jQuery(".cp-media-"+uid).parents(".cp-media-sizes").removeClass("hide-for-default");
jQuery("."+img_container).html('<img src="'+attachment.url+'"/>');
jQuery("#"+rmv_btn).show();
button.text('Change Image');
// Partial Refresh
// - Apply background, background-color, color etc.
var css_preview = a.attr('data-css-preview') || '';
var selector = a.attr('data-css-selector') || '';
var property = a.attr('data-css-property') || '';
var unit = a.attr('data-unit') || 'px';
var url = attachment.url;
partial_refresh_image( css_preview, selector, property, unit, url );
jQuery(document).trigger('cp-image-change', [name,url, val] );
jQuery("#"+id).trigger('change');
});
uploader.open(button);
return false;
});
jQuery('.smile-remove-media').on('click', function(e){
e.preventDefault();
var button = jQuery(this);
var id = button.attr('id').replace("remove_","smile_");
var upload = button.attr('id').replace("remove_","");
var img_container = button.attr('id').replace("remove_","")+'_container';
jQuery("#"+id).attr('value','');
// Partial Refresh
// - Apply background, background-color, color etc.
var a = jQuery("#"+id);
var css_preview = a.attr('data-css-preview') || '';
var selector = a.attr('data-css-selector') || '';
var property = a.attr('data-css-property') || '';
var unit = a.attr('data-unit') || 'px';
var value = ''; // Empty the background image
var name = jQuery("#"+id).attr('name');
partial_refresh_image( css_preview, selector, property, unit, value );
var html = '<p class="description">No Image Selected</p>';
jQuery("."+img_container).html(html);
button.hide();
jQuery("#"+upload).text('Select Image');
jQuery(document).trigger('cp-image-remove', [name,value] );
jQuery("#"+id).trigger('change');
});
jQuery('.smile-default-media').on('click', function(e){
e.preventDefault();
var button = jQuery(this);
var id = button.attr('id').replace("default_","smile_");
var upload = button.attr('id').replace("default_","");
var img_container = button.attr('id').replace("default_","")+'_container';
var container = jQuery(this).parents('.content');
var default_img = jQuery(this).data('default');
jQuery("#"+id).attr('value',default_img);
// Partial Refresh
// - Apply background, background-color, color etc.
var a = jQuery("#"+id);
var css_preview = a.attr('data-css-preview') || '';
var selector = a.attr('data-css-selector') || '';
var property = a.attr('data-css-property') || '';
var unit = a.attr('data-unit') || 'px';
var value = default_img; // Empty the background image
var name = jQuery("#"+id).attr('name');
partial_refresh_image( css_preview, selector, property, unit, value );
var html = '<p class="description">No Image Selected</p>';
jQuery("."+img_container).html('<img src="'+default_img+'"/>');
jQuery(document).trigger('cp-image-default', [name,value] );
jQuery("#"+id).trigger('change');
container.find(".cp-media-sizes").hide().addClass('hide-for-default');
});
jQuery(".cp-media-size").on("change", function(e){
var img_id = jQuery(this).attr('data-id');
var alt = jQuery(this).attr('data-alt');
var input = 'smile_'+jQuery(this).parents('.cp-media-sizes').data('name');
var val = "";
if( img_id !== '' ) {
val = img_id+"|"+jQuery(this).val();
}
if( alt !== '' ) {
val = val+"|"+alt;
}
jQuery("#"+input).val(val);
jQuery("#"+input).attr('value',val);
});
function partial_refresh_image( css_preview, selector, property, unit, value ) {
// apply css by - inline
if( css_preview != 1 || null == css_preview || 'undefined' == css_preview ) {
var frame = jQuery("#smile_design_iframe").contents();
switch( property ) {
case 'src': frame.find( selector ).attr( 'src' , value );
break;
default:
frame.find( selector ).css( property , 'url(' + value + ')' );
break;
}
}
// apply css by - after css generation
jQuery(document).trigger('updated', [css_preview, selector, property, value, unit]);
}
});<|fim▁end|> | },
multiple: false,
}); |
<|file_name|>sub.py<|end_file_name|><|fim▁begin|>from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import numpy as np
fig = plt.figure()
ax1 = fig.add_subplot(1, 2, 1, projection="3d")<|fim▁hole|>z = np.cos(x)
ax1.plot(x, y, z)
ax2 = fig.add_subplot(1, 2, 2, projection="3d")
X = np.arange(-2, 2, 0.1)
Y = np.arange(-2, 2, 0.1)
X, Y = np.meshgrid(X, Y)
Z = np.sqrt(X ** 2 + Y ** 2)
ax2.plot_surface(X, Y, Z, cmap=plt.cm.winter)
plt.show()<|fim▁end|> |
x = np.linspace(-6 * np.pi, 6 * np.pi, 1000)
y = np.sin(x) |
<|file_name|>FRZZ.cpp<|end_file_name|><|fim▁begin|>#include "simulation/Elements.h"
//#TPT-Directive ElementClass Element_FRZZ PT_FRZZ 100
Element_FRZZ::Element_FRZZ()
{
Identifier = "DEFAULT_PT_FRZZ";
Name = "FRZZ";
Colour = PIXPACK(0xC0E0FF);
MenuVisible = 1;
MenuSection = SC_POWDERS;
Enabled = 1;
Advection = 0.7f;
AirDrag = 0.01f * CFDS;
AirLoss = 0.96f;
Loss = 0.90f;
Collision = -0.1f;
Gravity = 0.05f;
Diffusion = 0.01f;
HotAir = -0.00005f* CFDS;
Falldown = 1;
Flammable = 0;
Explosive = 0;
Meltable = 0;
Hardness = 20;
Weight = 50;
Temperature = 253.15f;
HeatConduct = 46;
Description = "Freeze powder. When melted, forms ice that always cools. Spreads with regular water.";
State = ST_SOLID;
Properties = TYPE_PART;
LowPressure = IPL;
LowPressureTransition = NT;
HighPressure = 1.8f;
HighPressureTransition = PT_SNOW;
LowTemperature = 50.0f;
LowTemperatureTransition = PT_ICEI;
HighTemperature = 273.15;
HighTemperatureTransition = PT_WATR;
Update = &Element_FRZZ::update;
}
//#TPT-Directive ElementHeader Element_FRZZ static int update(UPDATE_FUNC_ARGS)
int Element_FRZZ::update(UPDATE_FUNC_ARGS)
{
int r, rx, ry;
for (rx=-1; rx<2; rx++)<|fim▁hole|> r = pmap[y+ry][x+rx];
if (!r)
continue;
if ((r&0xFF)==PT_WATR&&5>rand()%100)
{
sim->part_change_type(r>>8,x+rx,y+ry,PT_FRZW);
parts[r>>8].life = 100;
parts[i].type = PT_NONE;
}
}
if (parts[i].type==PT_NONE) {
sim->kill_part(i);
return 1;
}
return 0;
}
Element_FRZZ::~Element_FRZZ() {}<|fim▁end|> | for (ry=-1; ry<2; ry++)
if (x+rx>=0 && y+ry>0 && x+rx<XRES && y+ry<YRES && (rx || ry))
{ |
<|file_name|>react-proper-lifecycle-methods.test.js<|end_file_name|><|fim▁begin|>// @ts-check
const { ESLintUtils } = require('@typescript-eslint/experimental-utils')
const RuleTester = ESLintUtils.RuleTester
const rule = require('../react-proper-lifecycle-methods')
// ------------------------------------------------------------------------------
// Tests
// ------------------------------------------------------------------------------
const ruleTester = new RuleTester({
parser: '@typescript-eslint/parser',
parserOptions: {
ecmaVersion: 2015,
sourceType: 'module',
},
})
ruleTester.run('react-proper-lifecycle-methods', rule, {
valid: [
// component without lifecycle methods passes without errors
{
filename: 'app/src/component-no-state.tsx',
code: `
import * as React from 'react'
interface ICloningRepositoryProps {}
export class CloningRepositoryView extends React.Component<ICloningRepositoryProps> {
public render() {
return null
}
}
`,
},
{
filename: 'app/src/component-lifecycle-events-no-state.tsx',
code: `
import * as React from 'react'
interface ICloningRepositoryProps {}
export class CloningRepositoryView extends React.Component<ICloningRepositoryProps> {
public componentWillMount() {}
public componentDidMount() {}
public componentWillUnmount() {}
public componentWillUpdate(nextProps: ICloningRepositoryProps) {}
public componentDidUpdate(prevProps: ICloningRepositoryProps) {}
public render() {
return null
}
}
`,
},
{
filename: 'app/src/component-lifecycle-events-with-state.tsx',
code: `
import * as React from 'react'
interface ICloningRepositoryProps {}
interface ICloningRepositoryState {}
export class CloningRepositoryView extends React.Component<ICloningRepositoryProps,ICloningRepositoryState> {
public componentWillMount() {}
public componentDidMount() {}
public componentWillUnmount() {}
public componentWillUpdate(nextProps: ICloningRepositoryProps, nextState: ICloningRepositoryState) {}
public componentDidUpdate(prevProps: ICloningRepositoryProps, prevState: ICloningRepositoryState) {}
public render() {
return null
}
}
`,
},
{
filename: 'app/src/pure-component-lifecycle-events-with-state.tsx',
code: `
import * as React from 'react'
interface ICloningRepositoryProps {}
interface ICloningRepositoryState {}
export class CloningRepositoryView extends React.PureComponent<ICloningRepositoryProps,ICloningRepositoryState> {
public componentWillMount() {}
public componentDidMount() {}
public componentWillUnmount() {}
public componentWillUpdate(nextProps: ICloningRepositoryProps, nextState: ICloningRepositoryState) {}
public componentDidUpdate(prevProps: ICloningRepositoryProps, prevState: ICloningRepositoryState) {}
public render() {
return null
}
}
`,
},
{
filename: 'app/src/component-lifecycle-events-with-type-literal.tsx',
code: `
import * as React from 'react'
interface IWindowControlState {}
export class WindowControls extends React.Component<{}, IWindowControlState> {
public shouldComponentUpdate(nextProps: {}, nextState: IWindowControlState) {
return nextState.windowState !== this.state.windowState
}
public componentWillUpdate(nextProps: {}, nextState: IWindowControlState) { }
public render() {
return null
}
}
`,
},
{
filename: 'app/src/component-lifecycle-events-with-type-literal.tsx',
code: `
import * as React from 'react'
interface IWindowControlState {}
export class WindowControls extends React.Component {
public shouldComponentUpdate(nextProps: {}) {
return nextState.windowState !== this.state.windowState
}
public componentWillUpdate(nextProps: {}) { }
public render() {
return null
}
}
`,
},
// a regular class with the same method name is ignored
{
filename: 'app/src/ui/other.tsx',
code: `
class Something {
public componentWillUpdate(foo: string) {
}
}
`,
},
],
invalid: [
//
// shouldComponentUpdate expects the first parameter to be nextProps and match the component's prop type
//
{
filename: 'app/src/component.tsx',
code: `
import * as React from 'react'
interface ICloningRepositoryProps {}
export class CloningRepositoryView extends React.Component<ICloningRepositoryProps> {
public shouldComponentUpdate(foo: string) { return true }
public render() {
return null
}
}
`,
errors: [
{
messageId: 'nameMismatch',
data: {
methodName: 'shouldComponentUpdate',
parameterName: 'foo',
expectedName: 'nextProps',
},
},
{
messageId: 'typeMismatch',
data: {
methodName: 'shouldComponentUpdate',
parameterName: 'foo',
expectedType: 'ICloningRepositoryProps',
},
},
],
},
//
// shouldComponentUpdate expects the second parameter to be nextState and match the component's state type
//
{
filename: 'app/src/component.tsx',
code: `
import * as React from 'react'
interface ICloningRepositoryProps {}
interface ICloningRepositoryState {}
export class CloningRepositoryView extends React.Component<ICloningRepositoryProps,ICloningRepositoryState> {
public shouldComponentUpdate(nextProps: ICloningRepositoryProps, foo: string) { return true }
public render() {
return null
}
}
`,
errors: [
{
messageId: 'nameMismatch',
data: {
methodName: 'shouldComponentUpdate',
parameterName: 'foo',
expectedName: 'nextState',
},
},
{
messageId: 'typeMismatch',
data: {
methodName: 'shouldComponentUpdate',
parameterName: 'foo',
expectedType: 'ICloningRepositoryState',
},
},
],
},
//
// shouldComponentUpdate is not permitted to have any additional parameters
//
{
filename: 'app/src/component.tsx',
code: `
import * as React from 'react'
interface ICloningRepositoryProps {}
interface ICloningRepositoryState {}
export class CloningRepositoryView extends React.Component<ICloningRepositoryProps,ICloningRepositoryState> {
public shouldComponentUpdate(nextProps: ICloningRepositoryProps, nextState: ICloningRepositoryState, additionalParam: void) { return true }
public render() {
return null
}
}
`,
errors: [
{
messageId: 'unknownParameter',
data: {
methodName: 'shouldComponentUpdate',
parameterName: 'additionalParam',
},
},
],
},
//
// componentWillUpdate expects the first parameter to be nextProps and match the component's prop type
//
{
filename: 'app/src/component.tsx',
code: `
import * as React from 'react'
interface ICloningRepositoryProps {}
export class CloningRepositoryView extends React.Component<ICloningRepositoryProps> {
public componentWillUpdate(bar: string) { }
public render() {
return null
}
}
`,
errors: [
{
messageId: 'nameMismatch',
data: {
methodName: 'componentWillUpdate',
parameterName: 'bar',
expectedName: 'nextProps',
},
},
{
messageId: 'typeMismatch',
data: {
methodName: 'componentWillUpdate',
parameterName: 'bar',
expectedType: 'ICloningRepositoryProps',
},
},
],
},
//
// componentWillUpdate expects the first parameter to be nextProps and match the component's type literal
//
{
filename: 'app/src/component.tsx',
code: `
import * as React from 'react'
interface IWindowControlState {}<|fim▁hole|> public render() {
return null
}
}
`,
errors: [
{
messageId: 'nameMismatch',
data: {
methodName: 'componentWillUpdate',
parameterName: 'bar',
expectedName: 'nextProps',
},
},
{
messageId: 'typeMismatch',
data: {
methodName: 'componentWillUpdate',
parameterName: 'bar',
expectedType: '{}',
},
},
],
},
//
// componentWillUpdate expects the second parameter to be nextState and match the component's state type
//
{
filename: 'app/src/component.tsx',
code: `
import * as React from 'react'
interface ICloningRepositoryProps {}
interface ICloningRepositoryState {}
export class CloningRepositoryView extends React.Component<ICloningRepositoryProps,ICloningRepositoryState> {
public componentWillUpdate(nextProps: ICloningRepositoryProps, foo: string) { }
public render() {
return null
}
}
`,
errors: [
{
messageId: 'nameMismatch',
data: {
methodName: 'componentWillUpdate',
parameterName: 'foo',
expectedName: 'nextState',
},
},
{
messageId: 'typeMismatch',
data: {
methodName: 'componentWillUpdate',
parameterName: 'foo',
expectedType: 'ICloningRepositoryState',
},
},
],
},
//
// componentWillUpdate expects the first parameter to be nextProps and match the component's type
//
{
filename: 'app/src/component.tsx',
code: `
import * as React from 'react'
interface IWindowControlProps {}
interface IWindowControlState {}
export class CloningRepositoryView extends React.Component<IWindowControlProps, IWindowControlState> {
public componentDidUpdate(bar: string) { }
public render() {
return null
}
}
`,
errors: [
{
messageId: 'nameMismatch',
data: {
methodName: 'componentDidUpdate',
parameterName: 'bar',
expectedName: 'prevProps',
},
},
{
messageId: 'typeMismatch',
data: {
methodName: 'componentDidUpdate',
parameterName: 'bar',
expectedType: 'IWindowControlProps',
},
},
],
},
//
// componentDidUpdate expects the second parameter to be nextState and match the component's state type
//
{
filename: 'app/src/component.tsx',
code: `
import * as React from 'react'
interface ICloningRepositoryProps {}
interface ICloningRepositoryState {}
export class CloningRepositoryView extends React.Component<ICloningRepositoryProps,ICloningRepositoryState> {
public componentDidUpdate(prevProps: ICloningRepositoryProps, foo: string) { }
public render() {
return null
}
}
`,
errors: [
{
messageId: 'nameMismatch',
data: {
methodName: 'componentDidUpdate',
parameterName: 'foo',
expectedName: 'prevState',
},
},
{
messageId: 'typeMismatch',
data: {
methodName: 'componentDidUpdate',
parameterName: 'foo',
expectedType: 'ICloningRepositoryState',
},
},
],
},
//
// Methods inside a component following the `component*` or `shouldComponent*` pattern should be rejected
//
{
filename: 'app/src/component.tsx',
code: `
import * as React from 'react'
interface ICloningRepositoryProps {}
interface ICloningRepositoryState {}
export class CloningRepositoryView extends React.Component<ICloningRepositoryProps,ICloningRepositoryState> {
public componentWillDoSomething() { }
public shouldComponentFoo() { return false }
public render() {
return null
}
}
`,
errors: [
{
messageId: 'reservedMethodName',
data: {
methodName: 'componentWillDoSomething',
},
},
{
messageId: 'reservedMethodName',
data: {
methodName: 'shouldComponentFoo',
},
},
],
},
//
// Methods inside a non-generic component following the `component*` or `shouldComponent*` pattern should be rejected
//
{
filename: 'app/src/component.tsx',
code: `
import * as React from 'react'
export class CloningRepositoryView extends React.Component {
public componentWillDoSomething() { }
public shouldComponentFoo() { return false }
public render() {
return null
}
}
`,
errors: [
{
messageId: 'reservedMethodName',
data: {
methodName: 'componentWillDoSomething',
},
},
{
messageId: 'reservedMethodName',
data: {
methodName: 'shouldComponentFoo',
},
},
],
},
],
})<|fim▁end|> |
export class CloningRepositoryView extends React.Component<{}, IWindowControlState> {
public componentWillUpdate(bar: string) { }
|
<|file_name|>update_asset.js<|end_file_name|><|fim▁begin|><|fim▁hole|> * Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
$(function() {
var obtainFormMeta = function(formId) {
return $(formId).data();
};
$('#form-asset-update').ajaxForm({
beforeSubmit:function(){
PublisherUtils.blockButtons({
container:'updateButtons',
msg:'Updating '+PublisherUtils.resolveCurrentPageAssetType()+' instance'
});
},
success: function() {
alert('Updated the '+PublisherUtils.resolveCurrentPageAssetType()+ ' successfully');
PublisherUtils.unblockButtons({
container:'updateButtons'
});
},
error: function() {
alert('Unable to update the '+PublisherUtils.resolveCurrentPageAssetType());
PublisherUtils.unblockButtons({
container:'updateButtons'
});
}
});
var initDatePicker = function(){
console.info('init date picker');
if($(this).attr('data-render-options') == "date-time"){
var dateField = this;
$(this).DatePicker({
mode: 'single',
position: 'right',
onBeforeShow: function(el){
if($(dateField).val().replace(/^\s+|\s+$/g,"")){
$(dateField).DatePickerSetDate($(dateField).val(), true);
}
},
onChange: function(date, el) {
$(el).val((date.getMonth()+1)+'/'+date.getDate()+'/'+date.getFullYear());
if($('#closeOnSelect input').attr('checked')) {
$(el).DatePickerHide();
}
}
});
}
};
$('#form-asset-update input[type="text"]').each(initDatePicker);
var removeUnboundRow = function(link){
var table = link.closest('table');
if($('tr',table).length == 2){
table.hide();
}
link.closest('tr').remove();
};
$('.js-add-unbounded-row').click(function(){
var tableName = $(this).attr('data-name');
var table = $('#table_'+tableName);
var referenceRow = $('#table_reference_'+tableName);
var newRow = referenceRow.clone().removeAttr('id');
table.show().append(newRow);
$('input[type="text"]',newRow).each(initDatePicker);
});
$('.js-unbounded-table').on('click','a',function(event){
removeUnboundRow($(event.target));
});
$('#tmp_refernceTableForUnbounded').detach().attr('id','refernceTableForUnbounded').appendTo('body');
$('#tmp_refernceTableForUnbounded').remove();
});<|fim▁end|> | /* |
<|file_name|>cache.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate named_type_derive;
#[macro_use]
extern crate derivative;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate serde_json;
extern crate byteorder;
extern crate indyrs as indy;
extern crate indyrs as api;
extern crate ursa;
extern crate uuid;
extern crate named_type;
extern crate rmp_serde;
extern crate rust_base58;
extern crate time;
extern crate serde;
#[macro_use]
mod utils;
use utils::cache::*;
use utils::Setup;
use self::indy::ErrorCode;
pub const FORBIDDEN_TYPE: &'static str = "Indy::Test";
mod high_cases {
use super::*;
mod schema_cache {
use super::*;
use utils::domain::anoncreds::schema::{SchemaV1, SchemaId};
use utils::constants::*;
use std::thread::sleep;
#[test]
fn indy_get_schema_empty_options() {
let setup = Setup::wallet_and_pool();
let (schema_id, _, _) = utils::ledger::post_entities();
let options_json = json!({}).to_string();
let schema_json = get_schema_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
schema_id,
&options_json).unwrap();
let _schema: SchemaV1 = serde_json::from_str(&schema_json).unwrap();
}
#[test]
fn indy_get_schema_empty_options_for_unknown_id() {
let setup = Setup::wallet_and_pool();
let options_json = json!({}).to_string();
let res = get_schema_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
&SchemaId::new(DID, "other_schema", "1.0").0,
&options_json);
assert_code!(ErrorCode::LedgerNotFound, res);
}
#[test]
fn indy_get_schema_only_cache_no_cached_data() {
let setup = Setup::wallet_and_pool();
let (schema_id, _, _) = utils::ledger::post_entities();
let options_json = json!({"noUpdate": true}).to_string();
let res = get_schema_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
schema_id,
&options_json);
assert_code!(ErrorCode::LedgerNotFound, res);
}
#[test]
fn indy_get_schema_cache_works() {
let setup = Setup::wallet_and_pool();
let (schema_id, _, _) = utils::ledger::post_entities();
let options_json = json!({}).to_string();
let schema_json1 = get_schema_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
schema_id,
&options_json
).unwrap();
let _schema: SchemaV1 = serde_json::from_str(&schema_json1).unwrap();
// now retrieve it from cache
let options_json = json!({"noUpdate": true}).to_string();
let schema_json2 = get_schema_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
schema_id,
&options_json
).unwrap();
let _schema: SchemaV1 = serde_json::from_str(&schema_json2).unwrap();
assert_eq!(schema_json1, schema_json2);
}
#[test]
fn indy_get_schema_no_store_works() {
let setup = Setup::wallet_and_pool();
let (schema_id, _, _) = utils::ledger::post_entities();
let options_json = json!({"noStore": true}).to_string();
let schema_json1 = get_schema_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
schema_id,
&options_json
).unwrap();
let _schema: SchemaV1 = serde_json::from_str(&schema_json1).unwrap();
// it should not be present inside of cache, because of noStore option in previous request.
let options_json = json!({"noUpdate": true}).to_string();
let res = get_schema_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
schema_id,
&options_json
);
assert_code!(ErrorCode::LedgerNotFound, res);
}
#[test]
fn indy_get_schema_no_cache_works() {
let setup = Setup::wallet_and_pool();
let (schema_id, _, _) = utils::ledger::post_entities();
let options_json = json!({}).to_string();
let schema_json1 = get_schema_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
schema_id,
&options_json
).unwrap();
let _schema: SchemaV1 = serde_json::from_str(&schema_json1).unwrap();
// it should not be present inside of cache, because of noStore option in previous request.
let options_json = json!({"noUpdate": true, "noCache": true}).to_string();
let res = get_schema_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
schema_id,
&options_json
);
assert_code!(ErrorCode::LedgerNotFound, res);
}
#[test]
fn indy_get_schema_min_fresh_works() {
let setup = Setup::wallet_and_pool();
let (schema_id, _, _) = utils::ledger::post_entities();
let options_json = json!({}).to_string();
let schema_json1 = get_schema_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
schema_id,
&options_json
).unwrap();
let _schema: SchemaV1 = serde_json::from_str(&schema_json1).unwrap();
sleep(std::time::Duration::from_secs(2));
// it should not be present inside of cache, because of noStore option in previous request.
let options_json = json!({"noUpdate": true, "minFresh": 1}).to_string();
let res = get_schema_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
schema_id,
&options_json
);
assert_code!(ErrorCode::LedgerNotFound, res);
}
#[test]
fn indy_purge_schema_cache_no_options() {
let setup = Setup::wallet();
purge_schema_cache(setup.wallet_handle, "{}").unwrap();
}
#[test]
fn indy_purge_schema_cache_all_data() {
let setup = Setup::wallet();
purge_schema_cache(setup.wallet_handle, &json!({"minFresh": -1}).to_string()).unwrap();
}
#[test]
fn indy_purge_schema_cache_older_than_1000_seconds() {
let setup = Setup::wallet();
purge_schema_cache(setup.wallet_handle, &json!({"minFresh": 1000}).to_string()).unwrap();
}
}
mod cred_def_cache {
use super::*;
use utils::domain::anoncreds::credential_definition::{CredentialDefinition};
use utils::constants::*;
use std::thread::sleep;
#[test]
fn indy_get_cred_def_empty_options() {
let setup = Setup::wallet_and_pool();
let (_, cred_def_id, _) = utils::ledger::post_entities();
let options_json = json!({}).to_string();
let cred_def_json = get_cred_def_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
cred_def_id,
&options_json).unwrap();
let _cred_def: CredentialDefinition = serde_json::from_str(&cred_def_json).unwrap();
}
#[test]
fn indy_get_cred_def_only_cache_no_cached_data() {
let setup = Setup::wallet_and_pool();
let (_, cred_def_id, _) = utils::ledger::post_entities();
let options_json = json!({"noUpdate": true}).to_string();
let res = get_cred_def_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
cred_def_id,
&options_json);
assert_code!(ErrorCode::LedgerNotFound, res);
}
#[test]
fn indy_get_cred_def_cache_works() {
let setup = Setup::wallet_and_pool();
let (_, cred_def_id, _) = utils::ledger::post_entities();
let options_json = json!({}).to_string();
let cred_def_json1 = get_cred_def_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
cred_def_id,<|fim▁hole|> // now retrieve it from cache
let options_json = json!({"noUpdate": true}).to_string();
let cred_def_json2 = get_cred_def_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
cred_def_id,
&options_json
).unwrap();
let _cred_def: CredentialDefinition = serde_json::from_str(&cred_def_json2).unwrap();
assert_eq!(cred_def_json1, cred_def_json2);
}
#[test]
fn indy_get_cred_def_no_store_works() {
let setup = Setup::wallet_and_pool();
let (_, cred_def_id, _) = utils::ledger::post_entities();
let options_json = json!({"noStore": true}).to_string();
let cred_def_json1 = get_cred_def_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
cred_def_id,
&options_json
).unwrap();
let _cred_def: CredentialDefinition = serde_json::from_str(&cred_def_json1).unwrap();
// it should not be present inside of cache, because of noStore option in previous request.
let options_json = json!({"noUpdate": true}).to_string();
let res = get_cred_def_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
cred_def_id,
&options_json
);
assert_code!(ErrorCode::LedgerNotFound, res);
}
#[test]
fn indy_get_cred_def_no_cache_works() {
let setup = Setup::wallet_and_pool();
let (_, cred_def_id, _) = utils::ledger::post_entities();
let options_json = json!({}).to_string();
let cred_def_json1 = get_cred_def_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
cred_def_id,
&options_json
).unwrap();
let _cred_def: CredentialDefinition = serde_json::from_str(&cred_def_json1).unwrap();
// it should not be present inside of cache, because of noStore option in previous request.
let options_json = json!({"noUpdate": true, "noCache": true}).to_string();
let res = get_cred_def_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
cred_def_id,
&options_json
);
assert_code!(ErrorCode::LedgerNotFound, res);
}
#[test]
fn indy_get_cred_def_min_fresh_works() {
let setup = Setup::wallet_and_pool();
let (_, cred_def_id, _) = utils::ledger::post_entities();
let options_json = json!({}).to_string();
let cred_def_json1 = get_cred_def_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
cred_def_id,
&options_json
).unwrap();
let _cred_def: CredentialDefinition = serde_json::from_str(&cred_def_json1).unwrap();
sleep(std::time::Duration::from_secs(2));
// it should not be present inside of cache, because of noStore option in previous request.
let options_json = json!({"noUpdate": true, "minFresh": 1}).to_string();
let res = get_cred_def_cache(
setup.pool_handle,
setup.wallet_handle,
DID_MY1,
cred_def_id,
&options_json
);
assert_code!(ErrorCode::LedgerNotFound, res);
}
#[test]
fn indy_purge_cred_def_cache_no_options() {
let setup = Setup::wallet();
purge_cred_def_cache(setup.wallet_handle, "{}").unwrap();
}
#[test]
fn indy_purge_cred_def_cache_all_data() {
let setup = Setup::wallet();
purge_cred_def_cache(setup.wallet_handle, &json!({"minFresh": -1}).to_string()).unwrap();
}
#[test]
fn indy_purge_cred_def_cache_older_than_1000_seconds() {
let setup = Setup::wallet();
purge_cred_def_cache(setup.wallet_handle, &json!({"minFresh": 1000}).to_string()).unwrap();
}
}
}<|fim▁end|> | &options_json
).unwrap();
let _cred_def: CredentialDefinition = serde_json::from_str(&cred_def_json1).unwrap();
|
<|file_name|>tcc_utils.py<|end_file_name|><|fim▁begin|>from lxml import etree
from anansi.xml import XMLMessage,gen_element,XMLError
class TCCError(Exception):
def __init__(self,msg):
super(TCCError,self).__init__(msg)
class TCCMessage(XMLMessage):
def __init__(self,user,comment=""):
super(TCCMessage,self).__init__(gen_element('tcc_request'))
self.user_info(user,comment)
def server_command(self,command):
elem = gen_element("server_command")
elem.append(gen_element("command",text=command))
self.root.append(elem)
def user_info(self,username,comment):
elem = gen_element("user_info")
elem.append(gen_element("name",text=username))
elem.append(gen_element("comment",text=comment))
self.root.append(elem)
def tcc_command(self,command):
elem = gen_element("tcc_command")
elem.append(gen_element("command",text=command))
self.root.append(elem)
def tcc_pointing(self,x,y,
ns_east_state="auto",ns_west_state="auto",
md_east_state="auto",md_west_state="auto",
ns_east_offset=0.0,ns_west_offset=0.0,
md_east_offset=0.0,md_west_offset=0.0,
offset_units="degrees",**attributes):
elem = gen_element("tcc_command")
elem.append(gen_element("command",text="point"))
pointing = gen_element("pointing",attributes=attributes)
pointing.append(gen_element("xcoord",text=str(x)))
pointing.append(gen_element("ycoord",text=str(y)))
ns = gen_element("ns")
ns_east = gen_element("east")
ns_east.append(gen_element("state",text=ns_east_state))
ns_east.append(gen_element("offset",text=str(ns_east_offset),attributes={'units':offset_units}))
ns_west = gen_element("west")
ns_west.append(gen_element("state",text=ns_west_state))
ns_west.append(gen_element("offset",text=str(ns_west_offset),attributes={'units':offset_units}))
ns.append(ns_east)
ns.append(ns_west)
md = gen_element("md")
md_east = gen_element("east")<|fim▁hole|> md_east.append(gen_element("offset",text=str(md_east_offset),attributes={'units':offset_units}))
md_west = gen_element("west")
md_west.append(gen_element("state",text=md_west_state))
md_west.append(gen_element("offset",text=str(md_west_offset),attributes={'units':offset_units}))
md.append(md_east)
md.append(md_west)
elem.append(pointing)
elem.append(ns)
elem.append(md)
self.root.append(elem)
class TCCResponseHandler(XMLMessage):
def __init__(self,msg):
try:
super(MPSRDefaultResponse,self).__init__(etree.fromstring(msg))
except:
logger.error("Unknown TCC message: %s"%msg)
raise XMLError(msg)
self._parse()
def _parse(self):
if self.root.find('success') is not None:
self.passed = True
self.message = self.root.find('success').text
elif self.root.find('error') is not None:
self.passed = False
self.message = self.root.find('error').text
raise TCCError(self.message)
class TCCControls(object):
def __init__(self,user="anansi"):
conf = config.tcc_server
self.ip = conf.ip
self.port = conf.port
self.user = user
def _send(self,msg):
client = TCPClient(self.ip,self.port,timeout=10.0)
client.send(msg)
return TCCResponseHandler(client.receive())
def track(self,x,y,system,units,**kwargs):
msg = TCCMessage(self.user)
msg.tcc_pointing(x,y,system=system,units=units,**kwargs)
return self._send(str(msg))
def stop(self):
msg = TCCMessage(self.user)
msg.tcc_command("stop")
return self._send(str(msg))
def maintenance_stow(self):
msg = TCCMessage(self.user)
msg.tcc_command("maintenance_stow")
return self._send(str(msg))
def wind_stow(self):
msg = TCCMessage(self.user)
msg.tcc_command("wind")
return self._send(str(msg))<|fim▁end|> | md_east.append(gen_element("state",text=md_east_state)) |
<|file_name|>test_exception_interception.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
test
~~~~
Sanic-CORS is a simple extension to Sanic allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2020 by Ashley Sommer (based on flask-cors by Cory Dolphin).
:license: MIT, see LICENSE for more details.
"""
from ..base_test import SanicCorsTestCase
from sanic import Sanic
from sanic_cors import *
from sanic_cors.core import *
from sanic.exceptions import NotFound, ServerError
from sanic.response import text
def add_routes(app):
#@app.route('/test_no_acl_abort_404')
#@app.route('/test_acl_abort_404')
def test_acl_abort_404(request):
raise NotFound("")
app.route('/test_no_acl_abort_404')(test_acl_abort_404)
app.route('/test_acl_abort_404')(test_acl_abort_404)
#@app.route('/test_no_acl_async_abort_404')
#@app.route('/test_acl_async_abort_404')
async def test_acl_async_abort_404(request):
raise NotFound("")
app.route('/test_no_acl_async_abort_404')(test_acl_async_abort_404)
app.route('/test_acl_async_abort_404')(test_acl_async_abort_404)
#@app.route('/test_no_acl_abort_500')
#@app.route('/test_acl_abort_500')
def test_acl_abort_500(request):
raise ServerError("")
app.route('/test_no_acl_abort_500')(test_acl_abort_500)
app.route('/test_acl_abort_500')(test_acl_abort_500)
@app.route('/test_acl_uncaught_exception_500')
def test_acl_uncaught_exception_500(request):
raise Exception("This could've been any exception")
@app.route('/test_no_acl_uncaught_exception_500')
def test_no_acl_uncaught_exception_500(request):
raise Exception("This could've been any exception")
class ExceptionInterceptionDefaultTestCase(SanicCorsTestCase):
def setUp(self):
self.app = Sanic(__name__.replace(".","-"))
CORS(self.app, resources={
r'/test_acl*': {},
})
add_routes(self.app)
def test_acl_abort_404(self):
'''
HTTP Responses generated by calling abort are handled identically
to normal responses, and should be wrapped by CORS headers if the
path matches. This path matches.
'''
resp = self.get('/test_acl_abort_404', origin='www.example.com')
self.assertEqual(resp.status, 404)
self.assertTrue(ACL_ORIGIN in resp.headers)
def test_acl_async_abort_404(self):
'''
HTTP Responses generated by calling abort are handled identically
to normal responses, and should be wrapped by CORS headers if the
path matches. This path matches.
'''
resp = self.get('/test_acl_async_abort_404', origin='www.example.com')
self.assertEqual(resp.status, 404)
self.assertTrue(ACL_ORIGIN in resp.headers)
def test_no_acl_abort_404(self):
'''
HTTP Responses generated by calling abort are handled identically<|fim▁hole|> path matches. This path does not match.
'''
resp = self.get('/test_no_acl_abort_404', origin='www.example.com')
self.assertEqual(resp.status, 404)
self.assertFalse(ACL_ORIGIN in resp.headers)
def test_no_acl_async_abort_404(self):
'''
HTTP Responses generated by calling abort are handled identically
to normal responses, and should be wrapped by CORS headers if the
path matches. This path does not match.
'''
resp = self.get('/test_no_acl_async_abort_404', origin='www.example.com')
self.assertEqual(resp.status, 404)
self.assertFalse(ACL_ORIGIN in resp.headers)
def test_acl_abort_500(self):
'''
HTTP Responses generated by calling abort are handled identically
to normal responses, and should be wrapped by CORS headers if the
path matches. This path matches
'''
resp = self.get('/test_acl_abort_500', origin='www.example.com')
self.assertEqual(resp.status, 500)
self.assertTrue(ACL_ORIGIN in resp.headers)
def test_no_acl_abort_500(self):
'''
HTTP Responses generated by calling abort are handled identically
to normal responses, and should be wrapped by CORS headers if the
path matches. This path matches
'''
resp = self.get('/test_no_acl_abort_500', origin='www.example.com')
self.assertEqual(resp.status, 500)
self.assertFalse(ACL_ORIGIN in resp.headers)
def test_acl_uncaught_exception_500(self):
'''
Uncaught exceptions will trigger Sanic's internal exception
handler, and should have ACL headers only if intercept_exceptions
is set to True and if the request URL matches the resources pattern
This url matches.
'''
resp = self.get('/test_acl_uncaught_exception_500', origin='www.example.com')
self.assertEqual(resp.status, 500)
self.assertTrue(ACL_ORIGIN in resp.headers)
def test_no_acl_uncaught_exception_500(self):
'''
Uncaught exceptions will trigger Sanic's internal exception
handler, and should have ACL headers only if intercept_exceptions
is set to True and if the request URL matches the resources pattern.
This url does not match.
'''
resp = self.get('/test_no_acl_uncaught_exception_500', origin='www.example.com')
self.assertEqual(resp.status, 500)
self.assertFalse(ACL_ORIGIN in resp.headers)
def test_acl_exception_with_error_handler(self):
'''
If a 500 handler is setup by the user, responses should have
CORS matching rules applied, regardless of whether or not
intercept_exceptions is enabled.
'''
return_string = "Simple error handler"
@self.app.exception(NotFound, ServerError, Exception)
def catch_all_handler(request, exception):
'''
This error handler catches 404s and 500s and returns
status 200 no matter what. It is not a good handler.
'''
return text(return_string)
acl_paths = [
'/test_acl_abort_404',
'/test_acl_abort_500',
'/test_acl_uncaught_exception_500'
]
no_acl_paths = [
'/test_no_acl_abort_404',
'/test_no_acl_abort_500',
'/test_no_acl_uncaught_exception_500'
]
def get_with_origins(path):
response = self.get(path, origin='www.example.com')
return response
for resp in map(get_with_origins, acl_paths):
self.assertEqual(resp.status, 200)
self.assertTrue(ACL_ORIGIN in resp.headers)
for resp in map(get_with_origins, no_acl_paths):
self.assertEqual(resp.status, 200)
self.assertFalse(ACL_ORIGIN in resp.headers)
class NoExceptionInterceptionTestCase(ExceptionInterceptionDefaultTestCase):
def setUp(self):
self.app = Sanic(__name__.replace(".","-"))
CORS(self.app,
intercept_exceptions=False,
resources={
r'/test_acl*': {},
})
add_routes(self.app)
def test_acl_exception_with_error_handler(self):
'''
If a 500 handler is setup by the user, responses should have
CORS matching rules applied, regardless of whether or not
intercept_exceptions is enbaled.
'''
return_string = "Simple error handler"
@self.app.exception(ServerError, NotFound, Exception)
# Note, async error handlers don't work in Sanic yet.
# async def catch_all_handler(request, exception):
def catch_all_handler(request, exception):
'''
This error handler catches 404s and 500s and returns
status 200 no matter what. It is not a good handler.
'''
return text(return_string, 200)
acl_paths = [
'/test_acl_abort_404',
'/test_acl_abort_500'
]
no_acl_paths = [
'/test_no_acl_abort_404',
'/test_no_acl_abort_500',
'/test_no_acl_uncaught_exception_500',
'/test_acl_uncaught_exception_500'
]
def get_with_origins(path):
return self.get(path, origin='www.example.com')
for resp in map(get_with_origins, acl_paths):
self.assertEqual(resp.status, 200)
self.assertTrue(ACL_ORIGIN in resp.headers)
for resp in map(get_with_origins, no_acl_paths):
self.assertEqual(resp.status, 200)
self.assertFalse(ACL_ORIGIN in resp.headers)
def test_acl_uncaught_exception_500(self):
'''
Uncaught exceptions will trigger Sanic's internal exception
handler, and should have ACL headers only if intercept_exceptions
is set to True. In this case it is not.
'''
resp = self.get('/test_acl_uncaught_exception_500', origin='www.example.com')
self.assertEqual(resp.status, 500)
self.assertFalse(ACL_ORIGIN in resp.headers)
if __name__ == "__main__":
unittest.main()<|fim▁end|> | to normal responses, and should be wrapped by CORS headers if the |
<|file_name|>swscale.go<|end_file_name|><|fim▁begin|>// Use of this source code is governed by a MIT license that can be found in the LICENSE file.
// Giorgis ([email protected])
//Package swscale performs highly optimized image scaling and colorspace and pixel format conversion operations.
//Rescaling: is the process of changing the video size. Several rescaling options and algorithms are available.
//Pixel format conversion: is the process of converting the image format and colorspace of the image.
package swscale
//#cgo pkg-config: libswscale libavutil
//#include <stdio.h>
//#include <stdlib.h>
//#include <inttypes.h>
//#include <string.h>
//#include <stdint.h>
//#include <libswscale/swscale.h>
import "C"
import (
"unsafe"
)
type (
Context C.struct_SwsContext
Filter C.struct_SwsFilter
Vector C.struct_SwsVector
Class C.struct_AVClass
PixelFormat C.enum_AVPixelFormat
)
//Return the LIBSWSCALE_VERSION_INT constant.
func SwscaleVersion() uint {
return uint(C.swscale_version())
}
//Return the libswscale build-time configuration.
func SwscaleConfiguration() string {
return C.GoString(C.swscale_configuration())
}
//Return the libswscale license.
func SwscaleLicense() string {
return C.GoString(C.swscale_license())
}
//Return a pointer to yuv<->rgb coefficients for the given colorspace suitable for sws_setColorspaceDetails().
func SwsGetcoefficients(c int) *int {
return (*int)(unsafe.Pointer(C.sws_getCoefficients(C.int(c))))
}
//Return a positive value if pix_fmt is a supported input format, 0 otherwise.
func SwsIssupportedinput(p PixelFormat) int {
return int(C.sws_isSupportedInput((C.enum_AVPixelFormat)(p)))
}
//Return a positive value if pix_fmt is a supported output format, 0 otherwise.
func SwsIssupportedoutput(p PixelFormat) int {
return int(C.sws_isSupportedOutput((C.enum_AVPixelFormat)(p)))
}
func SwsIssupportedendiannessconversion(p PixelFormat) int {
return int(C.sws_isSupportedEndiannessConversion((C.enum_AVPixelFormat)(p)))
}
////Scale the image slice in srcSlice and put the resulting scaled slice in the image in dst.
func SwsScale(ctxt *Context, src *uint8, str int, y, h int, d *uint8, ds int) int {
cctxt := (*C.struct_SwsContext)(unsafe.Pointer(ctxt))
csrc := (*C.uint8_t)(unsafe.Pointer(src))
cstr := (*C.int)(unsafe.Pointer(&str))
cd := (*C.uint8_t)(unsafe.Pointer(d))
cds := (*C.int)(unsafe.Pointer(&ds))
return int(C.sws_scale(cctxt, &csrc, cstr, C.int(y), C.int(h), &cd, cds))
}
func SwsSetcolorspacedetails(ctxt *Context, it *int, sr int, t *int, dr, b, c, s int) int {
cit := (*C.int)(unsafe.Pointer(it))
ct := (*C.int)(unsafe.Pointer(t))
return int(C.sws_setColorspaceDetails((*C.struct_SwsContext)(ctxt), cit, C.int(sr), ct, C.int(dr), C.int(b), C.int(c), C.int(s)))
}
func SwsGetcolorspacedetails(ctxt *Context, it, sr, t, dr, b, c, s *int) int {
cit := (**C.int)(unsafe.Pointer(it))
csr := (*C.int)(unsafe.Pointer(sr))
ct := (**C.int)(unsafe.Pointer(t))<|fim▁hole|> cc := (*C.int)(unsafe.Pointer(c))
cs := (*C.int)(unsafe.Pointer(s))
return int(C.sws_getColorspaceDetails((*C.struct_SwsContext)(ctxt), cit, csr, ct, cdr, cb, cc, cs))
}
func SwsGetdefaultfilter(lb, cb, ls, cs, chs, cvs float32, v int) *Filter {
return (*Filter)(unsafe.Pointer(C.sws_getDefaultFilter(C.float(lb), C.float(cb), C.float(ls), C.float(cs), C.float(chs), C.float(cvs), C.int(v))))
}
func SwsFreefilter(f *Filter) {
C.sws_freeFilter((*C.struct_SwsFilter)(f))
}
//Convert an 8-bit paletted frame into a frame with a color depth of 32 bits.
func SwsConvertpalette8topacked32(s, d *uint8, px int, p *uint8) {
C.sws_convertPalette8ToPacked32((*C.uint8_t)(s), (*C.uint8_t)(d), C.int(px), (*C.uint8_t)(p))
}
//Convert an 8-bit paletted frame into a frame with a color depth of 24 bits.
func SwsConvertpalette8topacked24(s, d *uint8, px int, p *uint8) {
C.sws_convertPalette8ToPacked24((*C.uint8_t)(s), (*C.uint8_t)(d), C.int(px), (*C.uint8_t)(p))
}
//Get the Class for swsContext.
func SwsGetClass() *Class {
return (*Class)(C.sws_get_class())
}<|fim▁end|> | cdr := (*C.int)(unsafe.Pointer(dr))
cb := (*C.int)(unsafe.Pointer(b)) |
<|file_name|>aixc++.py<|end_file_name|><|fim▁begin|>"""SCons.Tool.aixc++
Tool-specific initialization for IBM xlC / Visual Age C++ compiler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/aixc++.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
import os.path
import SCons.Platform.aix
cplusplus = __import__('c++', globals(), locals(), [])
packages = ['vacpp.cmp.core', 'vacpp.cmp.batch', 'vacpp.cmp.C', 'ibmcxx.cmp']
def get_xlc(env):
xlc = env.get('CXX', 'xlC')
return SCons.Platform.aix.get_xlc(env, xlc, packages)
def generate(env):
"""Add Builders and construction variables for xlC / Visual Age
suite to an Environment."""
path, _cxx, version = get_xlc(env)
if path and _cxx:
_cxx = os.path.join(path, _cxx)
if 'CXX' not in env:
env['CXX'] = _cxx
cplusplus.generate(env)
if version:
env['CXXVERSION'] = version
def exists(env):<|fim▁hole|> return xlc
return None
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:<|fim▁end|> | path, _cxx, version = get_xlc(env)
if path and _cxx:
xlc = os.path.join(path, _cxx)
if os.path.exists(xlc): |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|><|fim▁hole|>if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "corponovo.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)<|fim▁end|> | #!/usr/bin/env python
import os
import sys
|
<|file_name|>x86_64.rs<|end_file_name|><|fim▁begin|>#![allow(unused_imports)]
use core::intrinsics;
// NOTE These functions are implemented using assembly because they using a custom
// calling convention which can't be implemented using a normal Rust function
// NOTE These functions are never mangled as they are not tested against compiler-rt
// and mangling ___chkstk would break the `jmp ___chkstk` instruction in __alloca
#[cfg(all(windows, target_env = "gnu", not(feature = "mangled-names")))]
#[naked]
#[no_mangle]
pub unsafe fn ___chkstk_ms() {
asm!("
push %rcx
push %rax
cmp $$0x1000,%rax
lea 24(%rsp),%rcx
jb 1f
2:
sub $$0x1000,%rcx
test %rcx,(%rcx)
sub $$0x1000,%rax
cmp $$0x1000,%rax
ja 2b
1:
sub %rax,%rcx
test %rcx,(%rcx)
pop %rax
pop %rcx
ret" ::: "memory" : "volatile");
intrinsics::unreachable();
}
#[cfg(all(windows, target_env = "gnu", not(feature = "mangled-names")))]
#[naked]
#[no_mangle]
pub unsafe fn __alloca() {
asm!("mov %rcx,%rax // x64 _alloca is a normal function with parameter in rcx
jmp ___chkstk // Jump to ___chkstk since fallthrough may be unreliable"
::: "memory" : "volatile");
intrinsics::unreachable();
}<|fim▁hole|>#[naked]
#[no_mangle]
pub unsafe fn ___chkstk() {
asm!(
"
push %rcx
cmp $$0x1000,%rax
lea 16(%rsp),%rcx // rsp before calling this routine -> rcx
jb 1f
2:
sub $$0x1000,%rcx
test %rcx,(%rcx)
sub $$0x1000,%rax
cmp $$0x1000,%rax
ja 2b
1:
sub %rax,%rcx
test %rcx,(%rcx)
lea 8(%rsp),%rax // load pointer to the return address into rax
mov %rcx,%rsp // install the new top of stack pointer into rsp
mov -8(%rax),%rcx // restore rcx
push (%rax) // push return address onto the stack
sub %rsp,%rax // restore the original value in rax
ret"
::: "memory" : "volatile"
);
intrinsics::unreachable();
}
// HACK(https://github.com/rust-lang/rust/issues/62785): x86_64-unknown-uefi needs special LLVM
// support unless we emit the _fltused
#[no_mangle]
#[used]
#[cfg(target_os = "uefi")]
static _fltused: i32 = 0;<|fim▁end|> |
#[cfg(all(windows, target_env = "gnu", not(feature = "mangled-names")))] |
<|file_name|>configmap.go<|end_file_name|><|fim▁begin|>/*
Copyright 2019 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package configmap
import (
"context"
corev1 "k8s.io/client-go/informers/core/v1"
"github.com/knative/pkg/controller"
"github.com/knative/pkg/injection"
"github.com/knative/pkg/injection/informers/kubeinformers/factory"
"github.com/knative/pkg/logging"
)
func init() {
injection.Default.RegisterInformer(withInformer)
}
// Key is used as the key for associating information
// with a context.Context.
type Key struct{}
func withInformer(ctx context.Context) (context.Context, controller.Informer) {
f := factory.Get(ctx)
inf := f.Core().V1().ConfigMaps()
return context.WithValue(ctx, Key{}, inf), inf.Informer()
}
// Get extracts the Kubernetes ConfigMap informer from the context.
func Get(ctx context.Context) corev1.ConfigMapInformer {<|fim▁hole|> "Unable to fetch %T from context.", (corev1.ConfigMapInformer)(nil))
}
return untyped.(corev1.ConfigMapInformer)
}<|fim▁end|> | untyped := ctx.Value(Key{})
if untyped == nil {
logging.FromContext(ctx).Panicf( |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import os
import re
import time
from django.template import RequestContext as RC
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response
from django.db.models import Q
from django_websocket import accept_websocket
from . import models as mdl
from . import utils
from . import decorators
# persistent variables
message_list = []
websocket_list = []
@accept_websocket
def websocket(request):
#: TODO multiple log
logfile = open(mdl.Source.objects.get(pk=1).path)
loglines = utils.follow(logfile)
websocket_list.append(request.websocket)
for line in loglines:
for ws in websocket_list:
ws.send(line)
@accept_websocket
def tail(request):
""" tail buffer """
logfile = open(mdl.Source.objects.get(pk=1).path)
loglines = utils.follow(logfile)
for msg in request.websocket:
ptn = re.compile(utils.modify_message(msg))<|fim▁hole|>
@accept_websocket
def fetch(request):
""" search storage """
def cs(l):
return u"{r} {f} {s} {m}".format(
r=l.get("receivedat").strftime("%b %d %X"),
f=l.get("fromhost"),
s=l.get("syslogtag"),
m=l.get("message")
)
for msg in request.websocket:
m = utils.modify_message(msg)
data = mdl.Systemevents.objects.filter(
# Q(receivedat__contains=m) |
Q(syslogtag__contains=m) |
Q(fromhost__contains=m) |
Q(message__contains=m)
).values("receivedat", "fromhost", "syslogtag", "message")
#[0:20]
for i, s in enumerate(map(cs, data[::-1])):
if (i % 10) == 0:
time.sleep(0.1)
request.websocket.send(s)
def required(request):
path = request.POST.get("path")
if (path and os.access(path.decode("utf-8"), os.R_OK)
and os.path.isfile(path.decode("utf-8"))
and path.decode("utf-8") in ["/var/log/rsyslog", "/var/log/syslog", "/var/log/message"]):
#: TODO multiple log
obj, created = mdl.Source.objects.get_or_create(path=path)
return HttpResponseRedirect(reverse("rsyslogmonitor_index"))
return render_to_response('rsyslogmonitor/required.html', RC(request, {
"path": path
}))
@decorators.syslog_required(lambda request, *args, **kwargs: True if 0 < mdl.Source.objects.count() else False)
def index(request):
return render_to_response('rsyslogmonitor/websocket.html', RC(request, {}))
# vim: set et fenc=utf-8 ft=python ff=unix sts=4 sw=4 ts=4 :<|fim▁end|> | for line in loglines:
if ptn.search(line):
request.websocket.send(line)
|
<|file_name|>task_executor.py<|end_file_name|><|fim▁begin|># (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.executor.connection_info import ConnectionInformation
from ansible.playbook.conditional import Conditional
from ansible.playbook.task import Task
from ansible.plugins import lookup_loader, connection_loader, action_loader
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.debug import debug
__all__ = ['TaskExecutor']
import json
import time
class TaskExecutor:
'''
This is the main worker class for the executor pipeline, which
handles loading an action plugin to actually dispatch the task to
a given host. This class roughly corresponds to the old Runner()
class.
'''
def __init__(self, host, task, job_vars, connection_info, loader, module_loader):
self._host = host
self._task = task
self._job_vars = job_vars
self._connection_info = connection_info
self._loader = loader
self._module_loader = module_loader
def run(self):
'''
The main executor entrypoint, where we determine if the specified
task requires looping and either runs the task with
'''
debug("in run()")
try:
# lookup plugins need to know if this task is executing from
# a role, so that it can properly find files/templates/etc.
roledir = None
if self._task._role:
roledir = self._task._role._role_path
self._job_vars['roledir'] = roledir
items = self._get_loop_items()
if items is not None:
if len(items) > 0:
item_results = self._run_loop(items)
res = dict(results=item_results)
else:
res = dict(changed=False, skipped=True, skipped_reason='No items in the list', results=[])
else:
debug("calling self._execute()")
res = self._execute()
debug("_execute() done")
# make sure changed is set in the result, if it's not present
if 'changed' not in res:
res['changed'] = False
debug("dumping result to json")
result = json.dumps(res)
debug("done dumping result, returning")
return result
except AnsibleError, e:
return dict(failed=True, msg=str(e))
def _get_loop_items(self):
'''
Loads a lookup plugin to handle the with_* portion of a task (if specified),
and returns the items result.
'''
items = None
if self._task.loop and self._task.loop in lookup_loader:
loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, variables=self._job_vars, loader=self._loader)
items = lookup_loader.get(self._task.loop, loader=self._loader).run(terms=loop_terms, variables=self._job_vars)
return items
def _run_loop(self, items):
'''
Runs the task with the loop items specified and collates the result
into an array named 'results' which is inserted into the final result
along with the item for which the loop ran.
'''
results = []
# make copies of the job vars and task so we can add the item to
# the variables and re-validate the task with the item variable
task_vars = self._job_vars.copy()
items = self._squash_items(items, task_vars)
for item in items:
task_vars['item'] = item
try:
tmp_task = self._task.copy()
except AnsibleParserError, e:
results.append(dict(failed=True, msg=str(e)))
continue
# now we swap the internal task with the copy, execute,
# and swap them back so we can do the next iteration cleanly
(self._task, tmp_task) = (tmp_task, self._task)
res = self._execute(variables=task_vars)
(self._task, tmp_task) = (tmp_task, self._task)
# now update the result with the item info, and append the result
# to the list of results
res['item'] = item
results.append(res)
# FIXME: we should be sending back a callback result for each item in the loop here
print(res)
return results
def _squash_items(self, items, variables):
'''
Squash items down to a comma-separated list for certain modules which support it
(typically package management modules).
'''
if len(items) > 0 and self._task.action in ('apt', 'yum', 'pkgng', 'zypper'):
final_items = []
for item in items:
variables['item'] = item
if self._task.evaluate_conditional(variables):
final_items.append(item)
return [",".join(final_items)]
else:
return items
def _execute(self, variables=None):
'''
The primary workhorse of the executor system, this runs the task
on the specified host (which may be the delegated_to host) and handles
the retry/until and block rescue/always execution
'''
if variables is None:
variables = self._job_vars
# fields set from the play/task may be based on variables, so we have to
# do the same kind of post validation step on it here before we use it
self._connection_info.post_validate(variables=variables, loader=self._loader)
# get the connection and the handler for this execution
self._connection = self._get_connection(variables)
self._handler = self._get_action_handler(connection=self._connection)
# Evaluate the conditional (if any) for this task, which we do before running
# the final task post-validation. We do this before the post validation due to
# the fact that the conditional may specify that the task be skipped due to a
# variable not being present which would otherwise cause validation to fail
if not self._task.evaluate_conditional(variables):
debug("when evaulation failed, skipping this task")
return dict(changed=False, skipped=True, skip_reason='Conditional check failed')
# Now we do final validation on the task, which sets all fields to their final values
self._task.post_validate(variables)
# if this task is a TaskInclude, we just return now with a success code so the
# main thread can expand the task list for the given host
if self._task.action == 'include':
include_variables = self._task.args.copy()
include_file = include_variables.get('_raw_params')
del include_variables['_raw_params']
return dict(changed=True, include=include_file, include_variables=include_variables)
# And filter out any fields which were set to default(omit), and got the omit token value
omit_token = variables.get('omit')
if omit_token is not None:
self._task.args = dict(filter(lambda x: x[1] != omit_token, self._task.args.iteritems()))
# Read some values from the task, so that we can modify them if need be
retries = self._task.retries
if retries <= 0:
retries = 1
delay = self._task.delay
if delay < 0:
delay = 1
# make a copy of the job vars here, in case we need to update them
# with the registered variable value later on when testing conditions
vars_copy = variables.copy()
debug("starting attempt loop")
result = None
for attempt in range(retries):
if attempt > 0:
# FIXME: this should use the callback/message passing mechanism
print("FAILED - RETRYING: %s (%d retries left)" % (self._task, retries-attempt))
result['attempts'] = attempt + 1
debug("running the handler")
result = self._handler.run(task_vars=variables)
debug("handler run complete")
if self._task.async > 0:
# the async_wrapper module returns dumped JSON via its stdout
# response, so we parse it here and replace the result
try:
result = json.loads(result.get('stdout'))
except ValueError, e:
return dict(failed=True, msg="The async task did not return valid JSON: %s" % str(e))
if self._task.poll > 0:
result = self._poll_async_result(result=result)
# update the local copy of vars with the registered value, if specified
if self._task.register:
vars_copy[self._task.register] = result
# create a conditional object to evaluate task conditions
cond = Conditional(loader=self._loader)
# FIXME: make sure until is mutually exclusive with changed_when/failed_when
if self._task.until:
cond.when = self._task.until
if cond.evaluate_conditional(vars_copy):
break
elif (self._task.changed_when or self._task.failed_when) and 'skipped' not in result:
if self._task.changed_when:
cond.when = [ self._task.changed_when ]
result['changed'] = cond.evaluate_conditional(vars_copy)
if self._task.failed_when:
cond.when = [ self._task.failed_when ]
failed_when_result = cond.evaluate_conditional(vars_copy)
result['failed_when_result'] = result['failed'] = failed_when_result
if failed_when_result:
break
elif 'failed' not in result and result.get('rc', 0) == 0:
# if the result is not failed, stop trying
break
if attempt < retries - 1:
time.sleep(delay)
debug("attempt loop complete, returning result")
return result
def _poll_async_result(self, result):
'''
Polls for the specified JID to be complete
'''
async_jid = result.get('ansible_job_id')
if async_jid is None:
return dict(failed=True, msg="No job id was returned by the async task")
# Create a new psuedo-task to run the async_status module, and run
# that (with a sleep for "poll" seconds between each retry) until the
# async time limit is exceeded.
async_task = Task().load(dict(action='async_status jid=%s' % async_jid))
# Because this is an async task, the action handler is async. However,
# we need the 'normal' action handler for the status check, so get it
# now via the action_loader
normal_handler = action_loader.get(
'normal',<|fim▁hole|> module_loader=self._module_loader,
)
time_left = self._task.async
while time_left > 0:
time.sleep(self._task.poll)
async_result = normal_handler.run()
if int(async_result.get('finished', 0)) == 1 or 'failed' in async_result or 'skipped' in async_result:
break
time_left -= self._task.poll
if int(async_result.get('finished', 0)) != 1:
return dict(failed=True, msg="async task did not complete within the requested time")
else:
return async_result
def _get_connection(self, variables):
'''
Reads the connection property for the host, and returns the
correct connection object from the list of connection plugins
'''
# FIXME: delegate_to calculation should be done here
# FIXME: calculation of connection params/auth stuff should be done here
self._connection_info.remote_addr = self._host.ipv4_address
if self._task.delegate_to is not None:
self._compute_delegate(variables)
# FIXME: add all port/connection type munging here (accelerated mode,
# fixing up options for ssh, etc.)? and 'smart' conversion
conn_type = self._connection_info.connection
if conn_type == 'smart':
conn_type = 'ssh'
connection = connection_loader.get(conn_type, self._connection_info)
if not connection:
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)
connection.connect()
return connection
def _get_action_handler(self, connection):
'''
Returns the correct action plugin to handle the requestion task action
'''
if self._task.action in action_loader:
if self._task.async != 0:
raise AnsibleError("async mode is not supported with the %s module" % module_name)
handler_name = self._task.action
elif self._task.async == 0:
handler_name = 'normal'
else:
handler_name = 'async'
handler = action_loader.get(
handler_name,
task=self._task,
connection=connection,
connection_info=self._connection_info,
loader=self._loader,
module_loader=self._module_loader,
)
if not handler:
raise AnsibleError("the handler '%s' was not found" % handler_name)
return handler
def _compute_delegate(self, variables):
# get the vars for the delegate by its name
try:
this_info = variables['hostvars'][self._task.delegate_to]
except:
# make sure the inject is empty for non-inventory hosts
this_info = {}
# get the real ssh_address for the delegate and allow ansible_ssh_host to be templated
#self._connection_info.remote_user = self._compute_delegate_user(self.delegate_to, delegate['inject'])
self._connection_info.remote_addr = this_info.get('ansible_ssh_host', self._task.delegate_to)
self._connection_info.port = this_info.get('ansible_ssh_port', self._connection_info.port)
self._connection_info.password = this_info.get('ansible_ssh_pass', self._connection_info.password)
self._connection_info.private_key_file = this_info.get('ansible_ssh_private_key_file', self._connection_info.private_key_file)
self._connection_info.connection = this_info.get('ansible_connection', self._connection_info.connection)
self._connection_info.sudo_pass = this_info.get('ansible_sudo_pass', self._connection_info.sudo_pass)
if self._connection_info.remote_addr in ('127.0.0.1', 'localhost'):
self._connection_info.connection = 'local'
# Last chance to get private_key_file from global variables.
# this is useful if delegated host is not defined in the inventory
#if delegate['private_key_file'] is None:
# delegate['private_key_file'] = remote_inject.get('ansible_ssh_private_key_file', None)
#if delegate['private_key_file'] is not None:
# delegate['private_key_file'] = os.path.expanduser(delegate['private_key_file'])
for i in this_info:
if i.startswith("ansible_") and i.endswith("_interpreter"):
variables[i] = this_info[i]<|fim▁end|> | task=async_task,
connection=self._connection,
connection_info=self._connection_info,
loader=self._loader, |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
admin.autodiscover()
import views
urlpatterns = patterns('',<|fim▁hole|> url(r'^words', views.words, { 'titles': False }),
url(r'^projects', views.projects),
url(r'^posters', views.posters),
url(r'^posterpresenters', views.posterpresenters),
url(r'^pigraph', views.pigraph),
url(r'^institutions', views.institutions),
url(r'^institution/(?P<institutionid>\d+)', views.institution),
url(r'^profile/$', views.profile),
url(r'^schedule/(?P<email>\S+)', views.schedule),
url(r'^ratemeeting/(?P<rmid>\d+)/(?P<email>\S+)', views.ratemeeting),
url(r'^submitrating/(?P<rmid>\d+)/(?P<email>\S+)', views.submitrating),
url(r'^feedback/(?P<email>\S+)', views.after),
url(r'^breakouts', views.breakouts),
url(r'^breakout/(?P<bid>\d+)', views.breakout),
url(r'^about', views.about),
url(r'^buginfo', views.buginfo),
url(r'^allrms', views.allrms),
url(r'^allratings', views.allratings),
url(r'^login', views.login),
url(r'^logout', views.logout),
url(r'^edit_home_page', views.edit_home_page),
url(r'^pi/(?P<userid>\d+)', views.pi), # , name = 'pi'),
url(r'^pi/(?P<email>\S+)', views.piEmail), # , name = 'pi'),
url(r'^project/(?P<abstractid>\S+)', views.project, name = 'project'),
url(r'^scope=(?P<scope>\w+)/(?P<url>.+)$', views.set_scope),
url(r'^active=(?P<active>\d)/(?P<url>.+)$', views.set_active),
url(r'^admin/', include(admin.site.urls)),
(r'', include('django_browserid.urls')),
url(r'^$', views.index, name = 'index'),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)<|fim▁end|> | url(r'^pis', views.pis), |
<|file_name|>es.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, division, print_function, unicode_literals
import time
import logging
import ujson as json
from elasticsearch import Elasticsearch
from elasticsearch.client import IndicesClient
from elasticsearch.exceptions import ConnectionTimeout
from .config import config
from .es_mappings import ES_MAPPINGS, ES_SIMILARITIES
class ElasticsearchBulkIndexer(object):
""" Bulk indexer for Elasticsearch """
servers = {
"docs": [config["ELASTICSEARCHDOCS"]],
"text": [config["ELASTICSEARCHTEXT"]]
}
def __init__(self, index_name, batch_size=500):
self.index_name = index_name
self.buffer = []
self.batch_size = batch_size
self.total_size = 0
self.connected = False
self.client = None
def connect(self):
""" Establish the ES connection if not already done """
if self.connected:
return
self.connected = True
self.client = Elasticsearch(self.servers[self.index_name], timeout=60)
def index(self, _id, hit):
""" Queue one document for indexing. """
if not self.connected:
self.connect()
# https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html
self.buffer.append('{"index":{"_id":"%s"}}\n%s\n' % (
_id,
json.dumps(hit) # pylint: disable=no-member
))
if len(self.buffer) >= self.batch_size:
self.flush()
def empty(self):
""" Empty the ES index. Dangerous operation! """
if config["ENV"] not in ("local", "ci"):
raise Exception("empty() not allowed in env %s" % config["ENV"])
if self.indices().exists(index=self.index_name):
self.indices().delete(index=self.index_name)
def refresh(self):
""" Sends a "refresh" to the ES index, forcing the actual indexing of what was sent up until now """
if not self.connected:
return
if config["ENV"] not in ("local", "ci"):
raise Exception("refresh() not allowed in env %s" % config["ENV"])
self.indices().refresh(index=self.index_name)
def flush(self, retries=10):
""" Sends the current indexing batch to ES """
if len(self.buffer) == 0:
return
if not self.connected:
self.connect()
self.total_size += len(self.buffer)
logging.debug(
"ES: Flushing %s docs to index=%s (total: %s)",
len(self.buffer), self.index_name, self.total_size
)
try:
self.bulk_index()
except ConnectionTimeout, e:
if retries == 0:
raise e
time.sleep(60)
return self.flush(retries=retries - 1)
self.buffer = []
def bulk_index(self):
""" Indexes the current buffer to Elasticsearch, bypassing the bulk() helper for performance """
connection = self.client.transport.get_connection()
bulk_url = "/%s/page/_bulk" % self.index_name
body = "".join(self.buffer)
# TODO retries
# status, headers, data
status, _, _ = connection.perform_request("POST", bulk_url, body=body)
if status != 200:
raise Exception("Elasticsearch returned status=%s" % status)
# TODO: look for errors there?
# parsed = json.loads(data)
def indices(self):
""" Returns an elasticsearch.client.IndicesClient instance """
if not self.connected:
self.connect()
return IndicesClient(self.client)
def create(self, empty=False):
""" Creates the ES index """
if empty:
self.empty()
mappings = ES_MAPPINGS[self.index_name]
self.indices().create(index=self.index_name, body={
"settings": {<|fim▁hole|> # TODO: this configuration should be set somewhere else! (cosr-ops?)
"number_of_shards": 5,
"number_of_replicas": 0,
# In prod we don't refresh manually so this is the only setting
# that will make ES periodically refresh to avoid storing only in temporary files
# as we index
"refresh_interval": "60s",
"similarity": ES_SIMILARITIES
},
"mappings": mappings
})<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#
# ICRAR - International Centre for Radio Astronomy Research
# (c) UWA - The University of Western Australia, 2015
# Copyright by UWA (in the framework of the ICRAR)
# All rights reserved
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
#
"""<|fim▁hole|><|fim▁end|> | This package contains all python modules implementing the DROP
Manager concepts, including their external interface, a web UI and a client
""" |
<|file_name|>test_MetaData.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
本测试模块用于测试与 :class:`sqlite4dummy.schema.MetaData` 有关的方法
class, method, func, exception
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from sqlite4dummy import *
from sqlite4dummy.tests.basetest import *
from datetime import datetime, date
import unittest
class MetaDataUnittest(unittest.TestCase):
"""Unittest of :class:`sqlite4dummy.schema.MetaData`.
MetaData的方法的单元测试。
"""
def setUp(self):
self.engine = Sqlite3Engine(":memory:", autocommit=False)
self.metadata = MetaData()
self.int_ = 1
self.float_ = 3.14
self.str_ = r"""\/!@#$%^&*()_+-=~`|[]{}><,.'"?"""
self.bytes_ = "abc".encode("utf-8")
self.date_ = date(2000, 1, 1)
self.datetime_ = datetime(2015, 10, 1, 18, 30, 0, 123)
self.pickle_ = [1, 2, 3]
self.test = Table("test", self.metadata,
Column("_id", dtype.INTEGER, primary_key=True, nullable=False),
Column("_int_with_default", dtype.INTEGER, default=self.int_),
Column("_float_with_default", dtype.REAL, default=self.float_),
Column("_str_with_default", dtype.TEXT, default=self.str_),
Column("_bytes_with_default", dtype.BLOB, default=self.bytes_),
Column("_date_with_default", dtype.DATE, default=self.date_),
Column("_datetime_with_default", dtype.DATETIME, default=self.datetime_),
Column("_pickle_with_default", dtype.PICKLETYPE, default=self.pickle_),<|fim▁hole|> Column("_bytes", dtype.BLOB),
Column("_date", dtype.DATE),
Column("_datetime", dtype.DATETIME),
Column("_pickle", dtype.PICKLETYPE),
)
self.metadata.create_all(self.engine)
self.index = Index("test_index", self.metadata,
[self.test.c._int,
self.test.c._float.desc(),
self.test.c._date,
desc(self.test.c._datetime)],
table_name=self.test,
unique=True,
skip_validate=False,
)
self.index.create(self.engine)
self.assertEqual(
len(self.engine.execute("PRAGMA table_info(test);").fetchall()),
15,
)
self.assertEqual(
len(self.engine.execute(
"SELECT * FROM sqlite_master "
"WHERE type = 'index' AND sql NOT NULL;").fetchall()),
1,
)
def tearDown(self):
self.engine.close()
def test_drop_all(self):
"""测试drop_all是否能drop所有的表。
"""
self.assertEqual(
len(self.engine.execute(
"SELECT * FROM sqlite_master WHERE type = 'table';").fetchall()),
1,
)
self.metadata.drop_all(self.engine)
self.assertEqual(
len(self.engine.execute(
"SELECT * FROM sqlite_master WHERE type = 'table';").fetchall()),
0,
)
self.assertEqual(len(self.metadata.t), 0) # 没有表了
def test_str_repr(self):
# print(self.metadata)
# print(repr(self.metadata))
pass
def test_get_table(self):
"""测试MetaData.get_table(table)方法是否能正确获得Table。
"""
self.assertEqual(self.metadata.get_table("test"), self.test)
self.assertRaises(KeyError,
self.metadata.get_table, "not_existing_table")
def test_get_index(self):
"""测试MetaData.get_index(index)方法是否能正确获得Index。
"""
self.assertEqual(self.metadata.get_index("test_index"), self.index)
self.assertRaises(KeyError,
self.metadata.get_index, "not_existing_index")
def test_reflect(self):
"""测试MetaData.reflect(engine)是否能正确解析出Table, Column, Index的
metadata, 并且解析出Column的default值。
"""
second_metadata = MetaData()
second_metadata.reflect(self.engine,
pickletype_columns=[
"test._pickle_with_default",
"test._pickle",
])
self.assertEqual(second_metadata.get_table("test").\
c._int_with_default.default, self.int_)
self.assertEqual(second_metadata.get_table("test").\
c._float_with_default.default, self.float_)
self.assertEqual(second_metadata.get_table("test").\
c._str_with_default.default, self.str_)
self.assertEqual(second_metadata.get_table("test").\
c._bytes_with_default.default, self.bytes_)
self.assertEqual(second_metadata.get_table("test").\
c._date_with_default.default, self.date_)
self.assertEqual(second_metadata.get_table("test").\
c._datetime_with_default.default, self.datetime_)
self.assertEqual(second_metadata.get_table("test").\
c._pickle_with_default.default, self.pickle_)
self.assertEqual(second_metadata.get_index("test_index").\
index_name, "test_index")
self.assertEqual(second_metadata.get_index("test_index").\
table_name, "test")
self.assertEqual(second_metadata.get_index("test_index").\
unique, True)
self.assertEqual(second_metadata.get_index("test_index").\
params, self.index.params)
if __name__ == "__main__":
unittest.main()<|fim▁end|> |
Column("_int", dtype.INTEGER),
Column("_float", dtype.REAL),
Column("_str", dtype.TEXT), |
<|file_name|>move-type-alias.cpp<|end_file_name|><|fim▁begin|>// RUN: mkdir -p %T/move-type-alias
// RUN: cp %S/Inputs/type_alias.h %T/move-type-alias/type_alias.h
// RUN: echo '#include "type_alias.h"' > %T/move-type-alias/type_alias.cpp
// RUN: cd %T/move-type-alias
//
// -----------------------------------------------------------------------------
// Test moving typedef declarations.
// -----------------------------------------------------------------------------
// RUN: clang-move -names="Int1" -new_cc=%T/move-type-alias/new_test.cpp -new_header=%T/move-type-alias/new_test.h -old_cc=%T/move-type-alias/type_alias.cpp -old_header=%T/move-type-alias/type_alias.h %T/move-type-alias/type_alias.cpp -- -std=c++11
// RUN: FileCheck -input-file=%T/move-type-alias/new_test.h -check-prefix=CHECK-NEW-TEST-H-CASE1 %s
// RUN: FileCheck -input-file=%T/move-type-alias/type_alias.h -check-prefix=CHECK-OLD-TEST-H-CASE1 %s
// CHECK-NEW-TEST-H-CASE1: typedef int Int1;
// CHECK-OLD-TEST-H-CASE1-NOT: typedef int Int1;
// -----------------------------------------------------------------------------
// Test moving type alias declarations.
// -----------------------------------------------------------------------------
// RUN: cp %S/Inputs/type_alias.h %T/move-type-alias/type_alias.h
// RUN: echo '#include "type_alias.h"' > %T/move-type-alias/type_alias.cpp
// RUN: clang-move -names="Int2" -new_cc=%T/move-type-alias/new_test.cpp -new_header=%T/move-type-alias/new_test.h -old_cc=%T/move-type-alias/type_alias.cpp -old_header=%T/move-type-alias/type_alias.h %T/move-type-alias/type_alias.cpp -- -std=c++11
// RUN: FileCheck -input-file=%T/move-type-alias/new_test.h -check-prefix=CHECK-NEW-TEST-H-CASE2 %s
// RUN: FileCheck -input-file=%T/move-type-alias/type_alias.h -check-prefix=CHECK-OLD-TEST-H-CASE2 %s
// CHECK-NEW-TEST-H-CASE2: using Int2 = int;
// CHECK-OLD-TEST-H-CASE2-NOT: using Int2 = int;
// -----------------------------------------------------------------------------
// Test moving template type alias declarations.
// -----------------------------------------------------------------------------
// RUN: cp %S/Inputs/type_alias.h %T/move-type-alias/type_alias.h
// RUN: echo '#include "type_alias.h"' > %T/move-type-alias/type_alias.cpp
// RUN: clang-move -names="B" -new_cc=%T/move-type-alias/new_test.cpp -new_header=%T/move-type-alias/new_test.h -old_cc=%T/move-type-alias/type_alias.cpp -old_header=%T/move-type-alias/type_alias.h %T/move-type-alias/type_alias.cpp -- -std=c++11
// RUN: FileCheck -input-file=%T/move-type-alias/new_test.h -check-prefix=CHECK-OLD-TEST-H-CASE3 %s
// CHECK-NEW-TEST-H-CASE3: template<class T> using B = A<T>;
// CHECK-OLD-TEST-H-CASE3-NOT: template<class T> using B = A<T>;<|fim▁hole|>
// -----------------------------------------------------------------------------
// Test not moving class-insided typedef declarations.
// -----------------------------------------------------------------------------
// RUN: cp %S/Inputs/type_alias.h %T/move-type-alias/type_alias.h
// RUN: echo '#include "type_alias.h"' > %T/move-type-alias/type_alias.cpp
// RUN: clang-move -names="C::Int3" -new_cc=%T/move-type-alias/new_test.cpp -new_header=%T/move-type-alias/new_test.h -old_cc=%T/move-type-alias/type_alias.cpp -old_header=%T/move-type-alias/type_alias.h %T/move-type-alias/type_alias.cpp -- -std=c++11
// RUN: FileCheck -input-file=%T/move-type-alias/new_test.h -allow-empty -check-prefix=CHECK-EMPTY %s
// CHECK-EMPTY: {{^}}{{$}}<|fim▁end|> | |
<|file_name|>ItemViewHtmlBody.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react';
import * as Kurve from 'kurvejs';
import * as ScopedStyles from './ScopedStylePolyfill';
interface ItemViewHtmlBodyProps extends React.Props<ItemViewHtmlBody> {
style: React.CSSProperties
body?: string;
attachments?: Kurve.AttachmentDataModel[];
}
export default class ItemViewHtmlBody extends React.Component<ItemViewHtmlBodyProps, any> {
render() {
var body = this.props.body || "<img src = '/public/loading.gif' width = '25'' height = '25'/>";
return <div style={this.props.style} dangerouslySetInnerHTML={ this.parseMessageBody(body, this.props.attachments) } />
}
private parseMessageBody(html: string, attachments?: Kurve.AttachmentDataModel[]) {
var doc = document.implementation.createHTMLDocument("example");
doc.documentElement.innerHTML = html;
// Create a new <div/> in the body and move all existing body content to that the new div.
var resultElement = doc.createElement("div");
var node: Node;
while (node = doc.body.firstChild) {
doc.body.removeChild(node);
resultElement.appendChild(node);
}
doc.body.appendChild(resultElement);
// Move all styles in <head/> into the new <div/>
var headList = doc.getElementsByTagName("head");
if (headList.length == 1) {
var head = headList.item(0);
var styles = head.getElementsByTagName("style");
var styleIndex = styles.length;
while (styleIndex--) {
var styleNode = styles.item(styleIndex);
if (styleNode.parentNode === head) {
head.removeChild(styleNode);
resultElement.appendChild(styleNode);
}
}
}
// Inline attachments
var inlineImages = doc.body.querySelectorAll("img[src^='cid']");
attachments = attachments || [];
[].forEach.call(inlineImages, image => {
var contentId = image.src.replace('cid:', '');
var attachmentsFound = attachments.filter(attachment => attachment.contentId == contentId);
var attachment = attachmentsFound && attachmentsFound[0];
if (attachment) {
image.src = 'data:' + attachment.contentType + ';base64,' + attachment.contentBytes;
} else {
image.src = '/public/loading.gif';
image.width = 25;
image.height = 25;
}<|fim▁hole|> // Make sure all styles are scoped
var styles = doc.getElementsByTagName("style");
var styleIndex = styles.length;
while (styleIndex--) {
styles.item(styleIndex).setAttribute("scoped", "");
}
ScopedStyles.ScopeStyles(doc.documentElement); // polyfill scoping if necessary
return { __html: doc.body.innerHTML }
}
}<|fim▁end|> | });
|
<|file_name|>DemoHLS.java<|end_file_name|><|fim▁begin|>/*
* JBoss, Home of Professional Open Source
* Copyright 2006, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags.
* See the copyright.txt in the distribution for a full listing
* of individual contributors.
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*
* (C) 2005-2006,
* @author JBoss Inc.
*/
/*
* Copyright (C) 2002,
*
* Arjuna Technologies Limited,
* Newcastle upon Tyne,
* Tyne and Wear,
* UK.
*
* $Id: DemoHLS.java,v 1.2 2005/05/19 12:13:19 nmcl Exp $
*/
package com.arjuna.wsas.tests;
import com.arjuna.mw.wsas.context.Context;
import com.arjuna.mw.wsas.UserActivityFactory;
import com.arjuna.mw.wsas.common.GlobalId;
import com.arjuna.mw.wsas.activity.Outcome;
import com.arjuna.mw.wsas.activity.HLS;
import com.arjuna.mw.wsas.completionstatus.CompletionStatus;
import com.arjuna.mw.wsas.exceptions.*;
import java.util.*;
/**
* @author Mark Little ([email protected])
* @version $Id: DemoHLS.java,v 1.2 2005/05/19 12:13:19 nmcl Exp $
* @since 1.0.
*/
public class DemoHLS implements HLS
{
private Stack<GlobalId> _id;
public DemoHLS()
{
_id = new Stack<GlobalId>();
}
/**
* An activity has begun and is active on the current thread.
*/
public void begun () throws SystemException
{
try
{
GlobalId activityId = UserActivityFactory.userActivity().activityId();
_id.push(activityId);
System.out.println("DemoHLS.begun "+activityId);
}
catch (Exception ex)
{
ex.printStackTrace();
}
}
/**
* The current activity is completing with the specified completion status.
*
* @return The result of terminating the relationship of this HLS and
* the current activity.
*/
public Outcome complete (CompletionStatus cs) throws SystemException
{
try
{
System.out.println("DemoHLS.complete ( "+cs+" ) " + UserActivityFactory.userActivity().activityId());
}
catch (Exception ex)
{
ex.printStackTrace();
}
return null;
}
/**
* The activity has been suspended. How does the HLS know which activity
* has been suspended? It must remember what its notion of current is.
*/
public void suspended () throws SystemException
{
System.out.println("DemoHLS.suspended");
}
/**
* The activity has been resumed on the current thread.
*/
public void resumed () throws SystemException
{
System.out.println("DemoHLS.resumed");
}
/**
* The activity has completed and is no longer active on the current
* thread.
*/
public void completed () throws SystemException
{
try {
System.out.println("DemoHLS.completed "+ UserActivityFactory.userActivity().activityId());
} catch (Exception ex) {
ex.printStackTrace();
}<|fim▁hole|> }
}
/**
* The HLS name.
*/
public String identity () throws SystemException
{
return "DemoHLS";
}
/**
* The activity service maintains a priority ordered list of HLS
* implementations. If an HLS wishes to be ordered based on priority
* then it can return a non-negative value: the higher the value,
* the higher the priority and hence the earlier in the list of HLSes
* it will appear (and be used in).
*
* @return a positive value for the priority for this HLS, or zero/negative
* if the order is not important.
*/
public int priority () throws SystemException
{
return 0;
}
/**
* Return the context augmentation for this HLS, if any on the current
* activity.
*
* @return a context object or null if no augmentation is necessary.
*/
public Context context () throws SystemException
{
if (_id.isEmpty()) {
throw new SystemException("request for context when inactive");
}
try {
System.out.println("DemoHLS.context "+ UserActivityFactory.userActivity().activityId());
} catch (Exception ex) {
ex.printStackTrace();
}
return new DemoSOAPContextImple(identity() + "_" + _id.size());
}
}<|fim▁end|> | if (!_id.isEmpty()) {
_id.pop(); |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># Licensed to Hortonworks, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Hortonworks, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from djangomako.shortcuts import render_to_response
from django.shortcuts import redirect
from django.http import HttpResponse, Http404
from models import UserLocation
import settings
import os
import time
import string
from urlparse import urlparse
def tutorials_last_url(tutorial_view):
def save_user_location(request, *args):
if request.user.is_authenticated() \
and request.user.username != "AnonymousUser":
user_location = UserLocation.objects.get_or_create(user=request.user)[0]
user_location.step_location = request.build_absolute_uri()
user_location.save()
return tutorial_view(request, *args)
return save_user_location
def index(request):
location = settings.CONTENT_FRAME_URL
step_location = "/lesson/"
if request.user.is_authenticated() \
and request.user.username != "AnonymousUser":
try:
ustep = UserLocation.objects.get(user=request.user)<|fim▁hole|> step_location = ustep.step_location
if step_location == None:
step_location = "/lesson/"
if urlparse(hue_location).netloc==urlparse(location).netloc:
location = hue_location
except UserLocation.DoesNotExist:
pass
return render_to_response("lessons.html",
{'content' : location,
'step_location': step_location})
def content(request, page):
if page == '':
return redirect('/')
return render_to_response("content.html", {})
def sync_location(request):
if request.method == 'GET':
if not request.user.is_authenticated() \
or request.user.username == 'AnonymousUser':
return HttpResponse('')
hue_location = None
if 'loc' in request.GET:
hue_location = request.GET['loc']
ustep = UserLocation.objects.get_or_create(user=request.user)[0]
ustep.hue_location = hue_location
ustep.save()
return HttpResponse('')
else:
raise Http404
def get_file(request, path):
import mimetypes
from django.core.servers.basehttp import FileWrapper
git_files = os.path.join(settings.PROJECT_PATH, 'run/git_files')
rfile = os.path.join(git_files, path)
response = HttpResponse(FileWrapper(file(rfile, 'rb')),
mimetype=mimetypes.guess_type(rfile)[0])
return response
def network_info(request):
import subprocess
commands = [
"route -n",
"getent ahosts",
"ip addr",
"cat /etc/resolv.conf",
"cat /etc/hosts",
"ps aux | grep java",
"netstat -lnp",
]
netinfo = {cmd: subprocess.check_output(cmd, shell=True)
for cmd in commands}
return render_to_response("netinfo.html", {'info': netinfo})<|fim▁end|> | hue_location = ustep.hue_location |
<|file_name|>PluggableProjectWriter.java<|end_file_name|><|fim▁begin|>// Copyright (c) 1996-2002 The Regents of the University of California. All
// Rights Reserved. Permission to use, copy, modify, and distribute this
// software and its documentation without fee, and without a written
// agreement is hereby granted, provided that the above copyright notice
// and this paragraph appear in all copies. This software program and
// documentation are copyrighted by The Regents of the University of
<|fim▁hole|>// IS", without any accompanying services from The Regents. The Regents
// does not warrant that the operation of the program will be
// uninterrupted or error-free. The end-user understands that the program
// was developed for research purposes and is advised not to rely
// exclusively on the program for any reason. IN NO EVENT SHALL THE
// UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
// SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS,
// ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF
// THE UNIVERSITY OF CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY OF
// SUCH DAMAGE. THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE
// PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, AND THE UNIVERSITY OF
// CALIFORNIA HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, SUPPORT,
// UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
package org.argouml.application.api;
/** An plugin interface which identifies an ArgoUML data loader.
* <br>
* TODO: identify methods
*
* @author Thierry Lach
* @since ARGO0.11.3
*/
public interface PluggableProjectWriter extends Pluggable {
} /* End interface PluggableProjectWriter */<|fim▁end|> | // California. The software program and documentation are supplied "AS
|
<|file_name|>SimilarityExampleSet.java<|end_file_name|><|fim▁begin|>/*
* RapidMiner
*
* Copyright (C) 2001-2008 by Rapid-I and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapid-i.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.operator.similarity;
import java.util.Iterator;
import com.rapidminer.example.Attribute;
import com.rapidminer.example.Attributes;
import com.rapidminer.example.Example;
import com.rapidminer.example.ExampleSet;
import com.rapidminer.example.SimpleAttributes;
import com.rapidminer.example.set.AbstractExampleReader;
import com.rapidminer.example.set.AbstractExampleSet;
import com.rapidminer.example.set.MappedExampleSet;
import com.rapidminer.example.table.AttributeFactory;
import com.rapidminer.example.table.DoubleArrayDataRow;
import com.rapidminer.example.table.ExampleTable;
import com.rapidminer.example.table.NominalMapping;
import com.rapidminer.tools.Ontology;
import com.rapidminer.tools.math.similarity.DistanceMeasure;
/**
* This similarity based example set is used for the operator
* {@link ExampleSet2SimilarityExampleSet}.
*
* @author Ingo Mierswa
* @version $Id: SimilarityExampleSet.java,v 1.1 2008/09/08 18:53:49 ingomierswa Exp $
*/
public class SimilarityExampleSet extends AbstractExampleSet {
private static final long serialVersionUID = 4757975818441794105L;
private static class IndexExampleReader extends AbstractExampleReader {
private int index = 0;
private ExampleSet exampleSet;
public IndexExampleReader(ExampleSet exampleSet) {
this.exampleSet = exampleSet;
}
public boolean hasNext() {
return index < exampleSet.size() - 1;
}
public Example next() {
Example example = exampleSet.getExample(index);
index++;
return example;
}
}
private ExampleSet parent;
private Attribute parentIdAttribute;
private Attributes attributes;
private DistanceMeasure measure;
public SimilarityExampleSet(ExampleSet parent, DistanceMeasure measure) {
this.parent = parent;
this.parentIdAttribute = parent.getAttributes().getId();
this.attributes = new SimpleAttributes();
Attribute firstIdAttribute = null;
Attribute secondIdAttribute = null;
if (parentIdAttribute.isNominal()) {
firstIdAttribute = AttributeFactory.createAttribute("FIRST_ID", Ontology.NOMINAL);
secondIdAttribute = AttributeFactory.createAttribute("SECOND_ID", Ontology.NOMINAL);
} else {
firstIdAttribute = AttributeFactory.createAttribute("FIRST_ID", Ontology.NUMERICAL);
secondIdAttribute = AttributeFactory.createAttribute("SECOND_ID", Ontology.NUMERICAL);
}
this.attributes.addRegular(firstIdAttribute);
<|fim▁hole|>
// copying mapping of original id attribute
if (parentIdAttribute.isNominal()) {
NominalMapping mapping = parentIdAttribute.getMapping();
firstIdAttribute.setMapping(mapping);
secondIdAttribute.setMapping(mapping);
}
String name = "SIMILARITY";
if (measure.isDistance()) {
name = "DISTANCE";
}
Attribute similarityAttribute = AttributeFactory.createAttribute(name, Ontology.REAL);
this.attributes.addRegular(similarityAttribute);
similarityAttribute.setTableIndex(2);
this.measure = measure;
}
public boolean equals(Object o) {
if (!super.equals(o))
return false;
if (!(o instanceof MappedExampleSet))
return false;
SimilarityExampleSet other = (SimilarityExampleSet)o;
if (!this.measure.getClass().equals(other.measure.getClass()))
return false;
return true;
}
public int hashCode() {
return super.hashCode() ^ this.measure.getClass().hashCode();
}
public Attributes getAttributes() {
return this.attributes;
}
public Example getExample(int index) {
int firstIndex = index / this.parent.size();
int secondIndex = index % this.parent.size();
Example firstExample = this.parent.getExample(firstIndex);
Example secondExample = this.parent.getExample(secondIndex);
double[] data = new double[3];
data[0] = firstExample.getValue(parentIdAttribute);
data[1] = secondExample.getValue(parentIdAttribute);
if (measure.isDistance())
data[2] = measure.calculateDistance(firstExample, secondExample);
else
data[2] = measure.calculateSimilarity(firstExample, secondExample);
return new Example(new DoubleArrayDataRow(data), this);
}
public Iterator<Example> iterator() {
return new IndexExampleReader(this);
}
public ExampleTable getExampleTable() {
return null;//this.parent.getExampleTable();
}
public int size() {
return this.parent.size() * this.parent.size();
}
}<|fim▁end|> | this.attributes.addRegular(secondIdAttribute);
firstIdAttribute.setTableIndex(0);
secondIdAttribute.setTableIndex(1);
|
<|file_name|>attention.py<|end_file_name|><|fim▁begin|>import theano
import numpy
import scipy
from theano import tensor
from blocks.bricks import Initializable, Linear
from blocks.bricks.parallel import Parallel
from blocks.bricks.base import lazy, application
from blocks.bricks.attention import (
GenericSequenceAttention, SequenceContentAttention,
ShallowEnergyComputer)
from blocks.utils import (put_hook, ipdb_breakpoint, shared_floatx,
shared_floatx_nans)
from lvsr.expressions import conv1d
floatX = theano.config.floatX
import logging
logger = logging.getLogger(__name__)
class Conv1D(Initializable):
def __init__(self, num_filters, filter_length, **kwargs):
self.num_filters = num_filters
self.filter_length = filter_length
super(Conv1D, self).__init__(**kwargs)
def _allocate(self):
self.parameters = [shared_floatx_nans((self.num_filters, self.filter_length),
name="filters")]
def _initialize(self):
self.weights_init.initialize(self.parameters[0], self.rng)
def apply(self, input_):
return conv1d(input_, self.parameters[0], border_mode="full")
class SequenceContentAndConvAttention(GenericSequenceAttention, Initializable):
@lazy()
def __init__(self, match_dim, conv_n, conv_num_filters=1,
state_transformer=None,
attended_transformer=None, energy_computer=None,
prior=None, energy_normalizer=None, **kwargs):
super(SequenceContentAndConvAttention, self).__init__(**kwargs)
if not state_transformer:
state_transformer = Linear(use_bias=False)
self.match_dim = match_dim
self.state_transformer = state_transformer
self.state_transformers = Parallel(input_names=self.state_names,
prototype=state_transformer,
name="state_trans")
if not attended_transformer:
# Only this contributor to the match vector
# is allowed to have biases
attended_transformer = Linear(name="preprocess")
if not energy_normalizer:
energy_normalizer = 'softmax'
self.energy_normalizer = energy_normalizer
if not energy_computer:
energy_computer = ShallowEnergyComputer(
name="energy_comp",
use_bias=self.energy_normalizer != 'softmax')
self.filter_handler = Linear(name="handler", use_bias=False)
self.attended_transformer = attended_transformer
self.energy_computer = energy_computer
if not prior:
prior = dict(type='expanding', initial_begin=0, initial_end=10000,
min_speed=0, max_speed=0)
self.prior = prior
self.conv_n = conv_n
self.conv_num_filters = conv_num_filters
self.conv = Conv1D(conv_num_filters, 2 * conv_n + 1)
self.children = [self.state_transformers, self.attended_transformer,
self.energy_computer, self.filter_handler, self.conv]
def _push_allocation_config(self):
self.state_transformers.input_dims = self.state_dims
self.state_transformers.output_dims = [self.match_dim
for name in self.state_names]
self.attended_transformer.input_dim = self.attended_dim
self.attended_transformer.output_dim = self.match_dim
self.energy_computer.input_dim = self.match_dim
self.energy_computer.output_dim = 1
self.filter_handler.input_dim = self.conv_num_filters
self.filter_handler.output_dim = self.match_dim
@application
def compute_energies(self, attended, preprocessed_attended,
previous_weights, states):
if not preprocessed_attended:
preprocessed_attended = self.preprocess(attended)
transformed_states = self.state_transformers.apply(as_dict=True,
**states)
# Broadcasting of transformed states should be done automatically
match_vectors = sum(transformed_states.values(),
preprocessed_attended)
conv_result = self.conv.apply(previous_weights)
match_vectors += self.filter_handler.apply(
conv_result[:, :, self.conv_n:-self.conv_n]
.dimshuffle(0, 2, 1)).dimshuffle(1, 0, 2)
energies = self.energy_computer.apply(match_vectors).reshape(
match_vectors.shape[:-1], ndim=match_vectors.ndim - 1)
return energies
@staticmethod
def mask_row(offset, length, empty_row):
return tensor.set_subtensor(empty_row[offset:offset+length], 1)
@application(outputs=['weighted_averages', 'weights', 'energies', 'step'])
def take_glimpses(self, attended, preprocessed_attended=None,
attended_mask=None, weights=None, step=None, **states):
# Cut the considered window.
p = self.prior
length = attended.shape[0]
prior_type = p.get('type', 'expanding')
if prior_type=='expanding':
begin = p['initial_begin'] + step[0] * p['min_speed']
end = p['initial_end'] + step[0] * p['max_speed']
begin = tensor.maximum(0, tensor.minimum(length - 1, begin))
end = tensor.maximum(0, tensor.minimum(length, end))
additional_mask = None
elif prior_type.startswith('window_around'):
#check whether we want the mean or median!
if prior_type == 'window_around_mean':
position_in_attended = tensor.arange(length, dtype=floatX)[None, :]
expected_last_source_pos = (weights * position_in_attended).sum(axis=1)
elif prior_type == 'window_around_median':
ali_to_05 = tensor.extra_ops.cumsum(weights, axis=1) - 0.5
ali_to_05 = (ali_to_05>=0)
ali_median_pos = ali_to_05[:,1:] - ali_to_05[:,:-1]
expected_last_source_pos = tensor.argmax(ali_median_pos, axis=1)
expected_last_source_pos = theano.gradient.disconnected_grad(
expected_last_source_pos)
else:
raise ValueError
#the window taken around each element
begins = tensor.floor(expected_last_source_pos - p['before'])
ends = tensor.ceil(expected_last_source_pos + p['after'])
#the global window to optimize computations
begin = tensor.maximum(0, begins.min()).astype('int64')
end = tensor.minimum(length, ends.max()).astype('int64')
#the new mask, already cut to begin:end
position_in_attended_cut = tensor.arange(
begin * 1., end * 1., 1., dtype=floatX)[None, :]
additional_mask = ((position_in_attended_cut > begins[:,None]) *
(position_in_attended_cut < ends[:,None]))
else:
raise Exception("Unknown prior type: %s", prior_type)
begin = tensor.floor(begin).astype('int64')
end = tensor.ceil(end).astype('int64')
attended_cut = attended[begin:end]
preprocessed_attended_cut = (preprocessed_attended[begin:end]
if preprocessed_attended else None)
attended_mask_cut = (
(attended_mask[begin:end] if attended_mask else None)
* (additional_mask.T if additional_mask else 1))
weights_cut = weights[:, begin:end]
# Call
energies_cut = self.compute_energies(attended_cut, preprocessed_attended_cut,
weights_cut, states)
weights_cut = self.compute_weights(energies_cut, attended_mask_cut)
weighted_averages = self.compute_weighted_averages(weights_cut, attended_cut)
# Paste
new_weights = new_energies = tensor.zeros_like(weights.T)
new_weights = tensor.set_subtensor(new_weights[begin:end],
weights_cut)
new_energies = tensor.set_subtensor(new_energies[begin:end],
energies_cut)
return weighted_averages, new_weights.T, new_energies.T, step + 1
@take_glimpses.property('inputs')
def take_glimpses_inputs(self):
return (['attended', 'preprocessed_attended',
'attended_mask', 'weights', 'step'] +
self.state_names)
@application
def compute_weights(self, energies, attended_mask):
if self.energy_normalizer == 'softmax':
logger.debug("Using softmax attention weights normalization")
energies = energies - energies.max(axis=0)
unnormalized_weights = tensor.exp(energies)
elif self.energy_normalizer == 'logistic':
logger.debug("Using smoothfocus (logistic sigm) "
"attention weights normalization")
unnormalized_weights = tensor.nnet.sigmoid(energies)
elif self.energy_normalizer == 'relu':
logger.debug("Using ReLU attention weights normalization")
unnormalized_weights = tensor.maximum(energies/1000., 0.0)
else:
raise Exception("Unknown energey_normalizer: {}"
.format(self.energy_computer))
if attended_mask:
unnormalized_weights *= attended_mask
# If mask consists of all zeros use 1 as the normalization coefficient
normalization = (unnormalized_weights.sum(axis=0) +
tensor.all(1 - attended_mask, axis=0))
return unnormalized_weights / normalization
@application
def initial_glimpses(self, batch_size, attended):
return ([tensor.zeros((batch_size, self.attended_dim))]
+ 2 * [tensor.concatenate([
tensor.ones((batch_size, 1)),
tensor.zeros((batch_size, attended.shape[0] - 1))],<|fim▁hole|> + [tensor.zeros((batch_size,), dtype='int64')])
@initial_glimpses.property('outputs')
def initial_glimpses_outputs(self):
return ['weight_averages', 'weights', 'energies', 'step']
@application(inputs=['attended'], outputs=['preprocessed_attended'])
def preprocess(self, attended):
return self.attended_transformer.apply(attended)
def get_dim(self, name):
if name in ['weighted_averages']:
return self.attended_dim
if name in ['weights', 'energies', 'step']:
return 0
return super(SequenceContentAndConvAttention, self).get_dim(name)<|fim▁end|> | axis=1)] |
<|file_name|>signals.py<|end_file_name|><|fim▁begin|>"""
mock_django.signals
~~~~~~~~~~~~~~~~
:copyright: (c) 2012 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
import contextlib
import mock
@contextlib.contextmanager
def mock_signal_receiver(signal, wraps=None, **kwargs):
"""<|fim▁hole|> Temporarily attaches a receiver to the provided ``signal`` within the scope
of the context manager.
The mocked receiver is returned as the ``as`` target of the ``with``
statement.
To have the mocked receiver wrap a callable, pass the callable as the
``wraps`` keyword argument. All other keyword arguments provided are passed
through to the signal's ``connect`` method.
>>> with mock_signal_receiver(post_save, sender=Model) as receiver:
>>> Model.objects.create()
>>> assert receiver.call_count = 1
"""
if wraps is None:
def wraps(*args, **kwrags):
return None
receiver = mock.Mock(wraps=wraps)
signal.connect(receiver, **kwargs)
yield receiver
signal.disconnect(receiver)<|fim▁end|> | |
<|file_name|>test_volume_type_access.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import webob
from cinder.api.contrib import volume_type_access as type_access
from cinder.api.v2 import types as types_api_v2
from cinder import context
from cinder import db
from cinder import exception
from cinder import test
from cinder.tests.unit.api import fakes
from cinder.tests.unit import fake_constants as fake
def generate_type(type_id, is_public):
return {
'id': type_id,
'name': u'test',
'deleted': False,
'created_at': datetime.datetime(2012, 1, 1, 1, 1, 1, 1),
'updated_at': None,
'deleted_at': None,
'is_public': bool(is_public)
}
VOLUME_TYPES = {
fake.VOLUME_TYPE_ID: generate_type(fake.VOLUME_TYPE_ID, True),
fake.VOLUME_TYPE2_ID: generate_type(fake.VOLUME_TYPE2_ID, True),
fake.VOLUME_TYPE3_ID: generate_type(fake.VOLUME_TYPE3_ID, False),
fake.VOLUME_TYPE4_ID: generate_type(fake.VOLUME_TYPE4_ID, False)}
PROJ1_UUID = fake.PROJECT_ID
PROJ2_UUID = fake.PROJECT2_ID
PROJ3_UUID = fake.PROJECT3_ID
ACCESS_LIST = [{'volume_type_id': fake.VOLUME_TYPE3_ID,
'project_id': PROJ2_UUID},
{'volume_type_id': fake.VOLUME_TYPE3_ID,
'project_id': PROJ3_UUID},
{'volume_type_id': fake.VOLUME_TYPE4_ID,
'project_id': PROJ3_UUID}]
def fake_volume_type_get(context, id, inactive=False, expected_fields=None):
vol = VOLUME_TYPES[id]
if expected_fields and 'projects' in expected_fields:
vol['projects'] = [a['project_id']
for a in ACCESS_LIST if a['volume_type_id'] == id]
return vol
def _has_type_access(type_id, project_id):
for access in ACCESS_LIST:
if access['volume_type_id'] == type_id and \
access['project_id'] == project_id:
return True
return False
def fake_volume_type_get_all(context, inactive=False, filters=None,
marker=None, limit=None, sort_keys=None,
sort_dirs=None, offset=None, list_result=False):
if filters is None or filters['is_public'] is None:
if list_result:
return list(VOLUME_TYPES.values())
return VOLUME_TYPES
res = {}
for k, v in VOLUME_TYPES.items():
if filters['is_public'] and _has_type_access(k, context.project_id):
res.update({k: v})
continue
if v['is_public'] == filters['is_public']:
res.update({k: v})
if list_result:
return list(res.values())
return res
class FakeResponse(object):
obj = {'volume_type': {'id': fake.VOLUME_TYPE_ID},
'volume_types': [
{'id': fake.VOLUME_TYPE_ID},
{'id': fake.VOLUME_TYPE3_ID}]}
def attach(self, **kwargs):
pass
class FakeRequest(object):
environ = {"cinder.context": context.get_admin_context()}
def cached_resource_by_id(self, resource_id, name=None):
return VOLUME_TYPES[resource_id]
class VolumeTypeAccessTest(test.TestCase):
def setUp(self):
super(VolumeTypeAccessTest, self).setUp()
self.type_controller_v2 = types_api_v2.VolumeTypesController()
self.type_access_controller = type_access.VolumeTypeAccessController()
self.type_action_controller = type_access.VolumeTypeActionController()
self.req = FakeRequest()
self.context = self.req.environ['cinder.context']
self.stubs.Set(db, 'volume_type_get',
fake_volume_type_get)
self.stubs.Set(db, 'volume_type_get_all',
fake_volume_type_get_all)
def assertVolumeTypeListEqual(self, expected, observed):
self.assertEqual(len(expected), len(observed))
expected = sorted(expected, key=lambda item: item['id'])
observed = sorted(observed, key=lambda item: item['id'])
for d1, d2 in zip(expected, observed):
self.assertEqual(d1['id'], d2['id'])
def test_list_type_access_public(self):
"""Querying os-volume-type-access on public type should return 404."""
req = fakes.HTTPRequest.blank('/v2/%s/types/os-volume-type-access' %
fake.PROJECT_ID,
use_admin_context=True)
self.assertRaises(webob.exc.HTTPNotFound,
self.type_access_controller.index,
req, fake.VOLUME_TYPE2_ID)
def test_list_type_access_private(self):
expected = {'volume_type_access': [
{'volume_type_id': fake.VOLUME_TYPE3_ID,
'project_id': PROJ2_UUID},
{'volume_type_id': fake.VOLUME_TYPE3_ID,
'project_id': PROJ3_UUID}]}
result = self.type_access_controller.index(self.req,
fake.VOLUME_TYPE3_ID)
self.assertEqual(expected, result)
def test_list_with_no_context(self):
req = fakes.HTTPRequest.blank('/v2/flavors/%s/flavors' %
fake.PROJECT_ID)
def fake_authorize(context, target=None, action=None):
raise exception.PolicyNotAuthorized(action='index')
self.stubs.Set(type_access, 'authorize', fake_authorize)
self.assertRaises(exception.PolicyNotAuthorized,
self.type_access_controller.index,
req, fake.PROJECT_ID)
def test_list_type_with_admin_default_proj1(self):
expected = {'volume_types': [{'id': fake.VOLUME_TYPE_ID},
{'id': fake.VOLUME_TYPE2_ID}]}
req = fakes.HTTPRequest.blank('/v2/%s/types' % fake.PROJECT_ID,
use_admin_context=True)
req.environ['cinder.context'].project_id = PROJ1_UUID
result = self.type_controller_v2.index(req)
self.assertVolumeTypeListEqual(expected['volume_types'],
result['volume_types'])
def test_list_type_with_admin_default_proj2(self):
expected = {'volume_types': [{'id': fake.VOLUME_TYPE_ID},
{'id': fake.VOLUME_TYPE2_ID},
{'id': fake.VOLUME_TYPE3_ID}]}
req = fakes.HTTPRequest.blank('/v2/%s/types' % PROJ2_UUID,
use_admin_context=True)
req.environ['cinder.context'].project_id = PROJ2_UUID
result = self.type_controller_v2.index(req)
self.assertVolumeTypeListEqual(expected['volume_types'],
result['volume_types'])
def test_list_type_with_admin_ispublic_true(self):
expected = {'volume_types': [{'id': fake.VOLUME_TYPE_ID},
{'id': fake.VOLUME_TYPE2_ID}]}
req = fakes.HTTPRequest.blank('/v2/%s/types?is_public=true' %
fake.PROJECT_ID,
use_admin_context=True)
result = self.type_controller_v2.index(req)
self.assertVolumeTypeListEqual(expected['volume_types'],
result['volume_types'])
def test_list_type_with_admin_ispublic_false(self):
expected = {'volume_types': [{'id': fake.VOLUME_TYPE3_ID},
{'id': fake.VOLUME_TYPE4_ID}]}
req = fakes.HTTPRequest.blank('/v2/%s/types?is_public=false' %
fake.PROJECT_ID,
use_admin_context=True)
result = self.type_controller_v2.index(req)
self.assertVolumeTypeListEqual(expected['volume_types'],
result['volume_types'])
def test_list_type_with_admin_ispublic_false_proj2(self):
expected = {'volume_types': [{'id': fake.VOLUME_TYPE3_ID},
{'id': fake.VOLUME_TYPE4_ID}]}
req = fakes.HTTPRequest.blank('/v2/%s/types?is_public=false' %
fake.PROJECT_ID,
use_admin_context=True)
req.environ['cinder.context'].project_id = PROJ2_UUID
result = self.type_controller_v2.index(req)
self.assertVolumeTypeListEqual(expected['volume_types'],
result['volume_types'])
def test_list_type_with_admin_ispublic_none(self):
expected = {'volume_types': [{'id': fake.VOLUME_TYPE_ID},
{'id': fake.VOLUME_TYPE2_ID},
{'id': fake.VOLUME_TYPE3_ID},
{'id': fake.VOLUME_TYPE4_ID}]}
req = fakes.HTTPRequest.blank('/v2/%s/types?is_public=none' %
fake.PROJECT_ID,
use_admin_context=True)
result = self.type_controller_v2.index(req)
self.assertVolumeTypeListEqual(expected['volume_types'],
result['volume_types'])
def test_list_type_with_no_admin_default(self):
expected = {'volume_types': [{'id': fake.VOLUME_TYPE_ID},
{'id': fake.VOLUME_TYPE2_ID}]}
req = fakes.HTTPRequest.blank('/v2/%s/types' % fake.PROJECT_ID,
use_admin_context=False)
result = self.type_controller_v2.index(req)
self.assertVolumeTypeListEqual(expected['volume_types'],
result['volume_types'])
def test_list_type_with_no_admin_ispublic_true(self):
expected = {'volume_types': [{'id': fake.VOLUME_TYPE_ID},
{'id': fake.VOLUME_TYPE2_ID}]}
req = fakes.HTTPRequest.blank('/v2/%s/types?is_public=true' %
fake.PROJECT_ID,
use_admin_context=False)
result = self.type_controller_v2.index(req)
self.assertVolumeTypeListEqual(expected['volume_types'],
result['volume_types'])
def test_list_type_with_no_admin_ispublic_false(self):
expected = {'volume_types': [{'id': fake.VOLUME_TYPE_ID},
{'id': fake.VOLUME_TYPE2_ID}]}
req = fakes.HTTPRequest.blank('/v2/%s/types?is_public=false' %
fake.PROJECT_ID,
use_admin_context=False)
result = self.type_controller_v2.index(req)
self.assertVolumeTypeListEqual(expected['volume_types'],
result['volume_types'])
def test_list_type_with_no_admin_ispublic_none(self):
expected = {'volume_types': [{'id': fake.VOLUME_TYPE_ID},
{'id': fake.VOLUME_TYPE2_ID}]}
req = fakes.HTTPRequest.blank('/v2/%s/types?is_public=none' %
fake.PROJECT_ID,
use_admin_context=False)
result = self.type_controller_v2.index(req)
self.assertVolumeTypeListEqual(expected['volume_types'],
result['volume_types'])
def test_show(self):
resp = FakeResponse()
self.type_action_controller.show(self.req, resp, fake.VOLUME_TYPE_ID)
self.assertEqual({'id': fake.VOLUME_TYPE_ID,
'os-volume-type-access:is_public': True},
resp.obj['volume_type'])
def test_detail(self):
resp = FakeResponse()
self.type_action_controller.detail(self.req, resp)
self.assertEqual(
[{'id': fake.VOLUME_TYPE_ID,
'os-volume-type-access:is_public': True},
{'id': fake.VOLUME_TYPE3_ID,
'os-volume-type-access:is_public': False}],
resp.obj['volume_types'])
def test_create(self):
resp = FakeResponse()
self.type_action_controller.create(self.req, {}, resp)
self.assertEqual({'id': fake.VOLUME_TYPE_ID,
'os-volume-type-access:is_public': True},
resp.obj['volume_type'])
def test_add_project_access(self):
def stub_add_volume_type_access(context, type_id, project_id):
self.assertEqual(fake.VOLUME_TYPE4_ID, type_id, "type_id")
self.assertEqual(PROJ2_UUID, project_id, "project_id")
self.stubs.Set(db, 'volume_type_access_add',
stub_add_volume_type_access)
body = {'addProjectAccess': {'project': PROJ2_UUID}}
req = fakes.HTTPRequest.blank('/v2/%s/types/%s/action' % (
fake.PROJECT_ID, fake.VOLUME_TYPE3_ID),
use_admin_context=True)
result = self.type_action_controller._addProjectAccess(
req, fake.VOLUME_TYPE4_ID, body)
self.assertEqual(202, result.status_code)
def test_add_project_access_with_no_admin_user(self):
req = fakes.HTTPRequest.blank('/v2/%s/types/%s/action' % (
fake.PROJECT_ID, fake.VOLUME_TYPE3_ID),
use_admin_context=False)
body = {'addProjectAccess': {'project': PROJ2_UUID}}
self.assertRaises(exception.PolicyNotAuthorized,
self.type_action_controller._addProjectAccess,
req, fake.VOLUME_TYPE3_ID, body)
def test_add_project_access_with_already_added_access(self):
def stub_add_volume_type_access(context, type_id, project_id):
raise exception.VolumeTypeAccessExists(volume_type_id=type_id,
project_id=project_id)
self.stubs.Set(db, 'volume_type_access_add',
stub_add_volume_type_access)
body = {'addProjectAccess': {'project': PROJ2_UUID}}
req = fakes.HTTPRequest.blank('/v2/%s/types/%s/action' % (
fake.PROJECT_ID, fake.VOLUME_TYPE3_ID), use_admin_context=True)
self.assertRaises(webob.exc.HTTPConflict,
self.type_action_controller._addProjectAccess,
req, fake.VOLUME_TYPE3_ID, body)
def test_remove_project_access_with_bad_access(self):
def stub_remove_volume_type_access(context, type_id, project_id):<|fim▁hole|> stub_remove_volume_type_access)
body = {'removeProjectAccess': {'project': PROJ2_UUID}}
req = fakes.HTTPRequest.blank('/v2/%s/types/%s/action' % (
fake.PROJECT_ID, fake.VOLUME_TYPE3_ID), use_admin_context=True)
self.assertRaises(webob.exc.HTTPNotFound,
self.type_action_controller._removeProjectAccess,
req, fake.VOLUME_TYPE4_ID, body)
def test_remove_project_access_with_no_admin_user(self):
req = fakes.HTTPRequest.blank('/v2/%s/types/%s/action' % (
fake.PROJECT_ID, fake.VOLUME_TYPE3_ID), use_admin_context=False)
body = {'removeProjectAccess': {'project': PROJ2_UUID}}
self.assertRaises(exception.PolicyNotAuthorized,
self.type_action_controller._removeProjectAccess,
req, fake.VOLUME_TYPE3_ID, body)<|fim▁end|> | raise exception.VolumeTypeAccessNotFound(volume_type_id=type_id,
project_id=project_id)
self.stubs.Set(db, 'volume_type_access_remove', |
<|file_name|>demo.js<|end_file_name|><|fim▁begin|>$(window).load(function(){FusionCharts.ready(function () {
var revenueChart = new FusionCharts({
type: 'column2d',
renderAt: 'chart-container',
width: '500',
height: '300',
dataFormat: 'json',
dataSource: {
"chart": {
"caption": "Monthly Revenue",
"subCaption": "Last year",
"xAxisName": "Month",
"yAxisName": "Amount (In USD)",
"numberPrefix": "$",
"theme": "fint",
//Configure x-axis labels to display in staggered mode
"labelDisplay": "stagger",
"staggerLines": "3"
},
"data": [{<|fim▁hole|> "label": "February",
"value": "810000"
}, {
"label": "March",
"value": "720000"
}, {
"label": "April",
"value": "550000"
}, {
"label": "May",
"value": "910000"
}, {
"label": "June",
"value": "510000"
}, {
"label": "July",
"value": "680000"
}, {
"label": "August",
"value": "620000"
}, {
"label": "September",
"value": "610000"
}, {
"label": "October",
"value": "490000"
}, {
"label": "November",
"value": "900000"
}, {
"label": "December",
"value": "730000"
}]
}
}).render();
});});<|fim▁end|> | "label": "January",
"value": "420000"
}, { |
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|>from openslides.core.config import config
from openslides.motions.exceptions import WorkflowError
from openslides.motions.models import Motion, State, Workflow
from openslides.users.models import User
from openslides.utils.test import TestCase
class ModelTest(TestCase):
def setUp(self):
self.motion = Motion.objects.create(title='v1')
self.test_user = User.objects.create(username='blub')
# Use the simple workflow
self.workflow = Workflow.objects.get(pk=1)
def test_create_new_version(self):
motion = self.motion
self.assertEqual(motion.versions.count(), 1)
# new data, but no new version
motion.title = 'new title'
motion.save()
self.assertEqual(motion.versions.count(), 1)
# new data and new version
motion.text = 'new text'
motion.save(use_version=motion.get_new_version())
self.assertEqual(motion.versions.count(), 2)
self.assertEqual(motion.title, 'new title')
self.assertEqual(motion.text, 'new text')
def test_version_data(self):
motion = Motion()
self.assertEqual(motion.title, '')
with self.assertRaises(AttributeError):
self._title
motion.title = 'title'
self.assertEqual(motion._title, 'title')
motion.text = 'text'<|fim▁hole|>
motion.reason = 'reason'
self.assertEqual(motion._reason, 'reason')
def test_version(self):
motion = self.motion
motion.title = 'v2'
motion.save(use_version=motion.get_new_version())
motion.title = 'v3'
motion.save(use_version=motion.get_new_version())
with self.assertRaises(AttributeError):
self._title
self.assertEqual(motion.title, 'v3')
def test_supporter(self):
self.assertFalse(self.motion.is_supporter(self.test_user))
self.motion.supporters.add(self.test_user)
self.assertTrue(self.motion.is_supporter(self.test_user))
self.motion.supporters.remove(self.test_user)
self.assertFalse(self.motion.is_supporter(self.test_user))
def test_state(self):
self.motion.reset_state()
self.assertEqual(self.motion.state.name, 'submitted')
self.motion.state = State.objects.get(pk=5)
self.assertEqual(self.motion.state.name, 'published')
with self.assertRaises(WorkflowError):
self.motion.create_poll()
self.motion.state = State.objects.get(pk=6)
self.assertEqual(self.motion.state.name, 'permitted')
self.assertEqual(self.motion.state.get_action_word(), 'Permit')
self.assertFalse(self.motion.get_allowed_actions(self.test_user)['support'])
self.assertFalse(self.motion.get_allowed_actions(self.test_user)['unsupport'])
def test_new_states_or_workflows(self):
workflow_1 = Workflow.objects.create(name='W1')
state_1 = State.objects.create(name='S1', workflow=workflow_1)
workflow_1.first_state = state_1
workflow_1.save()
workflow_2 = Workflow.objects.create(name='W2')
state_2 = State.objects.create(name='S2', workflow=workflow_2)
workflow_2.first_state = state_2
workflow_2.save()
state_3 = State.objects.create(name='S3', workflow=workflow_1)
with self.assertRaises(WorkflowError):
workflow_2.first_state = state_3
workflow_2.save()
with self.assertRaises(WorkflowError):
state_1.next_states.add(state_2)
state_1.save()
def test_two_empty_identifiers(self):
Motion.objects.create(title='foo', text='bar', identifier='')
Motion.objects.create(title='foo2', text='bar2', identifier='')
def test_do_not_create_new_version_when_permit_old_version(self):
motion = Motion()
motion.title = 'foo'
motion.text = 'bar'
motion.save()
first_version = motion.get_last_version()
motion = Motion.objects.get(pk=motion.pk)
motion.title = 'New Title'
motion.save(use_version=motion.get_new_version())
new_version = motion.get_last_version()
self.assertEqual(motion.versions.count(), 2)
motion.active_version = new_version
motion.save()
self.assertEqual(motion.versions.count(), 2)
motion.active_version = first_version
motion.save(use_version=False)
self.assertEqual(motion.versions.count(), 2)
def test_unicode_with_no_active_version(self):
motion = Motion.objects.create(
title='test_title_Koowoh1ISheemeey1air',
text='test_text_zieFohph0doChi1Uiyoh',
identifier='test_identifier_VohT1hu9uhiSh6ooVBFS')
motion.active_version = None
motion.save(update_fields=['active_version'])
# motion.__unicode__() raised an AttributeError
self.assertEqual(str(motion), 'test_title_Koowoh1ISheemeey1air')
def test_is_amendment(self):
config['motions_amendments_enabled'] = True
amendment = Motion.objects.create(title='amendment', parent=self.motion)
self.assertTrue(amendment.is_amendment())
self.assertFalse(self.motion.is_amendment())
def test_set_identifier_allready_set(self):
"""
If the motion already has a identifier, the method does nothing.
"""
motion = Motion(identifier='My test identifier')
motion.set_identifier()
self.assertEqual(motion.identifier, 'My test identifier')
def test_set_identifier_manually(self):
"""
If the config is set to manually, the method does nothing.
"""
config['motions_identifier'] = 'manually'
motion = Motion()
motion.set_identifier()
# If the identifier should be set manually, the method does nothing
self.assertIsNone(motion.identifier)
def test_set_identifier_amendment(self):
"""
If the motion is an amendment, the identifier is the identifier from the
parent + a suffix.
"""
config['motions_amendments_enabled'] = True
self.motion.identifier = 'Parent identifier'
self.motion.save()
motion = Motion(parent=self.motion)
motion.set_identifier()
self.assertEqual(motion.identifier, 'Parent identifier A 1')
def test_set_identifier_second_amendment(self):
"""
If a motion has already an amendment, the second motion gets another
identifier.
"""
config['motions_amendments_enabled'] = True
self.motion.identifier = 'Parent identifier'
self.motion.save()
Motion.objects.create(title='Amendment1', parent=self.motion)
motion = Motion(parent=self.motion)
motion.set_identifier()
self.assertEqual(motion.identifier, 'Parent identifier A 2')
class ConfigTest(TestCase):
def test_stop_submitting(self):
self.assertFalse(config['motions_stop_submitting'])<|fim▁end|> | self.assertEqual(motion._text, 'text') |
<|file_name|>introspection.py<|end_file_name|><|fim▁begin|>from django.db.backends import BaseDatabaseIntrospection
class DatabaseIntrospection(BaseDatabaseIntrospection):<|fim▁hole|>
def get_table_list(self, cursor):
"Returns a list of table names in the current database."
cursor.execute("SHOW TABLES")
return [row[0] for row in cursor.fetchall()]<|fim▁end|> | |
<|file_name|>ports.rs<|end_file_name|><|fim▁begin|>use core_foundation::base::OSStatus;
use coremidi_sys::{MIDIPortConnectSource, MIDIPortDisconnectSource, MIDIPortDispose, MIDISend};
<|fim▁hole|>use crate::endpoints::destinations::Destination;
use crate::endpoints::sources::Source;
use crate::object::Object;
use crate::packets::PacketList;
/// A MIDI connection port owned by a client.
/// See [MIDIPortRef](https://developer.apple.com/reference/coremidi/midiportref).
///
/// Ports can't be instantiated directly, but through a client.
///
#[derive(Debug)]
pub struct Port {
pub(crate) object: Object,
}
impl Deref for Port {
type Target = Object;
fn deref(&self) -> &Object {
&self.object
}
}
impl Drop for Port {
fn drop(&mut self) {
unsafe { MIDIPortDispose(self.object.0) };
}
}
/// An output [MIDI port](https://developer.apple.com/reference/coremidi/midiportref) owned by a client.
///
/// A simple example to create an output port and send a MIDI event:
///
/// ```rust,no_run
/// let client = coremidi::Client::new("example-client").unwrap();
/// let output_port = client.output_port("example-port").unwrap();
/// let destination = coremidi::Destination::from_index(0).unwrap();
/// let packets = coremidi::PacketBuffer::new(0, &[0x90, 0x40, 0x7f]);
/// output_port.send(&destination, &packets).unwrap();
/// ```
#[derive(Debug)]
pub struct OutputPort {
pub(crate) port: Port,
}
impl OutputPort {
/// Send a list of packets to a destination.
/// See [MIDISend](https://developer.apple.com/reference/coremidi/1495289-midisend).
///
pub fn send(
&self,
destination: &Destination,
packet_list: &PacketList,
) -> Result<(), OSStatus> {
let status = unsafe {
MIDISend(
self.port.object.0,
destination.endpoint.object.0,
packet_list.as_ptr(),
)
};
if status == 0 {
Ok(())
} else {
Err(status)
}
}
}
impl Deref for OutputPort {
type Target = Port;
fn deref(&self) -> &Port {
&self.port
}
}
/// An input [MIDI port](https://developer.apple.com/reference/coremidi/midiportref) owned by a client.
///
/// A simple example to create an input port:
///
/// ```rust,no_run
/// let client = coremidi::Client::new("example-client").unwrap();
/// let input_port = client.input_port("example-port", |packet_list| println!("{}", packet_list)).unwrap();
/// let source = coremidi::Source::from_index(0).unwrap();
/// input_port.connect_source(&source);
/// ```
#[derive(Debug)]
pub struct InputPort {
// Note: the order is important here, port needs to be dropped first
pub(crate) port: Port,
pub(crate) callback: BoxedCallback<PacketList>,
}
impl InputPort {
pub fn connect_source(&self, source: &Source) -> Result<(), OSStatus> {
let status =
unsafe { MIDIPortConnectSource(self.object.0, source.object.0, ptr::null_mut()) };
if status == 0 {
Ok(())
} else {
Err(status)
}
}
pub fn disconnect_source(&self, source: &Source) -> Result<(), OSStatus> {
let status = unsafe { MIDIPortDisconnectSource(self.object.0, source.object.0) };
if status == 0 {
Ok(())
} else {
Err(status)
}
}
}
impl Deref for InputPort {
type Target = Port;
fn deref(&self) -> &Port {
&self.port
}
}<|fim▁end|> | use std::ops::Deref;
use std::ptr;
use crate::callback::BoxedCallback; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>"""Tests for the fido component."""<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>extern crate regex;
pub struct Prefix {
pub nick: String,
pub user: String,
pub host: String
}
impl Prefix {<|fim▁hole|> pub fn parse(prefix: &str) -> Option<Prefix> {
let re = match regex::Regex::new(r"^:(.+)!(.+)@(.+)$") {
Ok(re) => re,
Err(err) => panic!("{}", err),
};
match re.captures(prefix) {
Some(caps) => Some(Prefix{
nick: caps.at(1).to_string(),
user: caps.at(2).to_string(),
host: caps.at(3).to_string()
}),
None => None
}
}
}
pub struct Message {
pub prefix: Option<Prefix>,
pub command: String,
pub trailing: String
}
impl Message {
// Returns a single token from a line
pub fn parse_token(line: &str) -> (&str, &str) {
let tokens: Vec<&str> = line.splitn(1, ' ').collect();
return (tokens[0], tokens[1]);
}
/// Parses a line and returns a Message instance
pub fn parse(line: &str) -> Message {
let (prefix, line) = Message::parse_token(line);
let (command, line) = Message::parse_token(line);
Message::new(prefix, command, line)
}
pub fn new(prefix: &str, command: &str, trailing: &str) -> Message {
Message{
prefix: Prefix::parse(prefix),
command: command.to_string(),
trailing: trailing.to_string()
}
}
}<|fim▁end|> | /// Attempts to parse a prefix, e.g. ":[email protected]" |
<|file_name|>BulkIndexingStressTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.stresstest.indexing;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.Node;
import org.elasticsearch.node.NodeBuilder;
import java.util.concurrent.ThreadLocalRandom;
/**
*/
public class BulkIndexingStressTest {
public static void main(String[] args) {
final int NUMBER_OF_NODES = 4;
final int NUMBER_OF_INDICES = 600;
final int BATCH = 300;
final Settings nodeSettings = ImmutableSettings.settingsBuilder().put("index.number_of_shards", 2).build();
// ESLogger logger = Loggers.getLogger("org.elasticsearch");
// logger.setLevel("DEBUG");
Node[] nodes = new Node[NUMBER_OF_NODES];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = NodeBuilder.nodeBuilder().settings(nodeSettings).node();
}
Client client = nodes.length == 1 ? nodes[0].client() : nodes[1].client();
while (true) {
BulkRequestBuilder bulkRequest = client.prepareBulk();
for (int i = 0; i < BATCH; i++) {
bulkRequest.add(Requests.indexRequest("test" + ThreadLocalRandom.current().nextInt(NUMBER_OF_INDICES)).type("type").source("field", "value"));
}
BulkResponse bulkResponse = bulkRequest.execute().actionGet();
if (bulkResponse.hasFailures()) {
for (BulkItemResponse item : bulkResponse) {
if (item.isFailed()) {
System.out.println("failed response:" + item.getFailureMessage());
}
}
throw new RuntimeException("Failed responses");<|fim▁hole|> ;
}
}
}<|fim▁end|> | } |
<|file_name|>climate.py<|end_file_name|><|fim▁begin|>"""
Support for KNX/IP climate devices.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/climate.knx/
"""
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.climate import (
PLATFORM_SCHEMA, SUPPORT_ON_OFF, SUPPORT_OPERATION_MODE,
SUPPORT_TARGET_TEMPERATURE, STATE_HEAT,
STATE_IDLE, STATE_MANUAL, STATE_DRY,
STATE_FAN_ONLY, STATE_ECO, ClimateDevice)
from homeassistant.const import (
ATTR_TEMPERATURE, CONF_NAME, TEMP_CELSIUS)
from homeassistant.core import callback
from homeassistant.components.knx import DATA_KNX, ATTR_DISCOVER_DEVICES
CONF_SETPOINT_SHIFT_ADDRESS = 'setpoint_shift_address'
CONF_SETPOINT_SHIFT_STATE_ADDRESS = 'setpoint_shift_state_address'
CONF_SETPOINT_SHIFT_STEP = 'setpoint_shift_step'
CONF_SETPOINT_SHIFT_MAX = 'setpoint_shift_max'
CONF_SETPOINT_SHIFT_MIN = 'setpoint_shift_min'
CONF_TEMPERATURE_ADDRESS = 'temperature_address'
CONF_TARGET_TEMPERATURE_ADDRESS = 'target_temperature_address'
CONF_OPERATION_MODE_ADDRESS = 'operation_mode_address'
CONF_OPERATION_MODE_STATE_ADDRESS = 'operation_mode_state_address'
CONF_CONTROLLER_STATUS_ADDRESS = 'controller_status_address'
CONF_CONTROLLER_STATUS_STATE_ADDRESS = 'controller_status_state_address'
CONF_CONTROLLER_MODE_ADDRESS = 'controller_mode_address'
CONF_CONTROLLER_MODE_STATE_ADDRESS = 'controller_mode_state_address'
CONF_OPERATION_MODE_FROST_PROTECTION_ADDRESS = \
'operation_mode_frost_protection_address'
CONF_OPERATION_MODE_NIGHT_ADDRESS = 'operation_mode_night_address'
CONF_OPERATION_MODE_COMFORT_ADDRESS = 'operation_mode_comfort_address'
CONF_OPERATION_MODES = 'operation_modes'
CONF_ON_OFF_ADDRESS = 'on_off_address'
CONF_ON_OFF_STATE_ADDRESS = 'on_off_state_address'
CONF_MIN_TEMP = 'min_temp'
CONF_MAX_TEMP = 'max_temp'
DEFAULT_NAME = 'KNX Climate'
DEFAULT_SETPOINT_SHIFT_STEP = 0.5
DEFAULT_SETPOINT_SHIFT_MAX = 6
DEFAULT_SETPOINT_SHIFT_MIN = -6
DEPENDENCIES = ['knx']
# Map KNX operation modes to HA modes. This list might not be full.
OPERATION_MODES = {
# Map DPT 201.100 HVAC operating modes
"Frost Protection": STATE_MANUAL,
"Night": STATE_IDLE,
"Standby": STATE_ECO,
"Comfort": STATE_HEAT,
# Map DPT 201.104 HVAC control modes
"Fan only": STATE_FAN_ONLY,
"Dehumidification": STATE_DRY
}
OPERATION_MODES_INV = dict((
reversed(item) for item in OPERATION_MODES.items()))
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_TEMPERATURE_ADDRESS): cv.string,
vol.Required(CONF_TARGET_TEMPERATURE_ADDRESS): cv.string,
vol.Optional(CONF_SETPOINT_SHIFT_ADDRESS): cv.string,
vol.Optional(CONF_SETPOINT_SHIFT_STATE_ADDRESS): cv.string,
vol.Optional(CONF_SETPOINT_SHIFT_STEP,
default=DEFAULT_SETPOINT_SHIFT_STEP): vol.All(
float, vol.Range(min=0, max=2)),
vol.Optional(CONF_SETPOINT_SHIFT_MAX, default=DEFAULT_SETPOINT_SHIFT_MAX):
vol.All(int, vol.Range(min=0, max=32)),
vol.Optional(CONF_SETPOINT_SHIFT_MIN, default=DEFAULT_SETPOINT_SHIFT_MIN):
vol.All(int, vol.Range(min=-32, max=0)),
vol.Optional(CONF_OPERATION_MODE_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_STATE_ADDRESS): cv.string,
vol.Optional(CONF_CONTROLLER_STATUS_ADDRESS): cv.string,
vol.Optional(CONF_CONTROLLER_STATUS_STATE_ADDRESS): cv.string,
vol.Optional(CONF_CONTROLLER_MODE_ADDRESS): cv.string,
vol.Optional(CONF_CONTROLLER_MODE_STATE_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_FROST_PROTECTION_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_NIGHT_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_COMFORT_ADDRESS): cv.string,
vol.Optional(CONF_ON_OFF_ADDRESS): cv.string,
vol.Optional(CONF_ON_OFF_STATE_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODES): vol.All(cv.ensure_list,
[vol.In(OPERATION_MODES)]),
vol.Optional(CONF_MIN_TEMP): vol.Coerce(float),
vol.Optional(CONF_MAX_TEMP): vol.Coerce(float),
})
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Set up climate(s) for KNX platform."""
if discovery_info is not None:
async_add_entities_discovery(hass, discovery_info, async_add_entities)
else:
async_add_entities_config(hass, config, async_add_entities)
@callback
def async_add_entities_discovery(hass, discovery_info, async_add_entities):
"""Set up climates for KNX platform configured within platform."""
entities = []
for device_name in discovery_info[ATTR_DISCOVER_DEVICES]:
device = hass.data[DATA_KNX].xknx.devices[device_name]
entities.append(KNXClimate(device))
async_add_entities(entities)
@callback
def async_add_entities_config(hass, config, async_add_entities):
"""Set up climate for KNX platform configured within platform."""
import xknx
climate_mode = xknx.devices.ClimateMode(
hass.data[DATA_KNX].xknx,
name=config.get(CONF_NAME) + " Mode",
group_address_operation_mode=config.get(CONF_OPERATION_MODE_ADDRESS),
group_address_operation_mode_state=config.get(
CONF_OPERATION_MODE_STATE_ADDRESS),
group_address_controller_status=config.get(
CONF_CONTROLLER_STATUS_ADDRESS),
group_address_controller_status_state=config.get(
CONF_CONTROLLER_STATUS_STATE_ADDRESS),
group_address_controller_mode=config.get(
CONF_CONTROLLER_MODE_ADDRESS),
group_address_controller_mode_state=config.get(
CONF_CONTROLLER_MODE_STATE_ADDRESS),
group_address_operation_mode_protection=config.get(
CONF_OPERATION_MODE_FROST_PROTECTION_ADDRESS),
group_address_operation_mode_night=config.get(
CONF_OPERATION_MODE_NIGHT_ADDRESS),
group_address_operation_mode_comfort=config.get(
CONF_OPERATION_MODE_COMFORT_ADDRESS),
operation_modes=config.get(
CONF_OPERATION_MODES))
hass.data[DATA_KNX].xknx.devices.add(climate_mode)
climate = xknx.devices.Climate(
hass.data[DATA_KNX].xknx,
name=config.get(CONF_NAME),
group_address_temperature=config.get(CONF_TEMPERATURE_ADDRESS),
group_address_target_temperature=config.get(
CONF_TARGET_TEMPERATURE_ADDRESS),
group_address_setpoint_shift=config.get(CONF_SETPOINT_SHIFT_ADDRESS),
group_address_setpoint_shift_state=config.get(
CONF_SETPOINT_SHIFT_STATE_ADDRESS),
setpoint_shift_step=config.get(CONF_SETPOINT_SHIFT_STEP),
setpoint_shift_max=config.get(CONF_SETPOINT_SHIFT_MAX),
setpoint_shift_min=config.get(CONF_SETPOINT_SHIFT_MIN),
group_address_on_off=config.get(
CONF_ON_OFF_ADDRESS),
group_address_on_off_state=config.get(
CONF_ON_OFF_STATE_ADDRESS),
min_temp=config.get(CONF_MIN_TEMP),
max_temp=config.get(CONF_MAX_TEMP),
mode=climate_mode)
hass.data[DATA_KNX].xknx.devices.add(climate)
async_add_entities([KNXClimate(climate)])
class KNXClimate(ClimateDevice):
"""Representation of a KNX climate device."""
def __init__(self, device):
"""Initialize of a KNX climate device."""
self.device = device
self._unit_of_measurement = TEMP_CELSIUS
@property
def supported_features(self):
"""Return the list of supported features."""
support = SUPPORT_TARGET_TEMPERATURE
if self.device.mode.supports_operation_mode:
support |= SUPPORT_OPERATION_MODE
if self.device.supports_on_off:
support |= SUPPORT_ON_OFF
return support
async def async_added_to_hass(self):
"""Register callbacks to update hass after device was changed."""
async def after_update_callback(device):
"""Call after device was updated."""
await self.async_update_ha_state()
self.device.register_device_updated_cb(after_update_callback)
@property
def name(self):
"""Return the name of the KNX device."""
return self.device.name
@property
def available(self):
"""Return True if entity is available."""
return self.hass.data[DATA_KNX].connected
@property
def should_poll(self):
"""No polling needed within KNX."""
return False
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@property
def current_temperature(self):
"""Return the current temperature."""
return self.device.temperature.value
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return self.device.setpoint_shift_step
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self.device.target_temperature.value
@property
def min_temp(self):
"""Return the minimum temperature."""
return self.device.target_temperature_min
@property
def max_temp(self):
"""Return the maximum temperature."""
return self.device.target_temperature_max
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
await self.device.set_target_temperature(temperature)
await self.async_update_ha_state()
@property
def current_operation(self):
"""Return current operation ie. heat, cool, idle."""
if self.device.mode.supports_operation_mode:
return OPERATION_MODES.get(self.device.mode.operation_mode.value)
return None
@property
def operation_list(self):
"""Return the list of available operation modes."""
return [OPERATION_MODES.get(operation_mode.value) for
operation_mode in
self.device.mode.operation_modes]
async def async_set_operation_mode(self, operation_mode):
"""Set operation mode."""
if self.device.mode.supports_operation_mode:
from xknx.knx import HVACOperationMode
knx_operation_mode = HVACOperationMode(
OPERATION_MODES_INV.get(operation_mode))
await self.device.mode.set_operation_mode(knx_operation_mode)
await self.async_update_ha_state()
@property
def is_on(self):
"""Return true if the device is on."""
if self.device.supports_on_off:<|fim▁hole|> """Turn on."""
await self.device.turn_on()
async def async_turn_off(self):
"""Turn off."""
await self.device.turn_off()<|fim▁end|> | return self.device.is_on
return None
async def async_turn_on(self): |
<|file_name|>invalid_char.rs<|end_file_name|><|fim▁begin|>fn main() {
assert!(std::char::from_u32(-1_i32 as u32).is_none());<|fim▁hole|> 'a' => {true},
'b' => {false},
_ => {true},
};
}<|fim▁end|> | let _val = match unsafe { std::mem::transmute::<i32, char>(-1) } { //~ ERROR encountered 4294967295, but expected something less or equal to 1114111 |
<|file_name|>fib.py<|end_file_name|><|fim▁begin|>import sys
def genfib():
first, second = 0, 1
while True:
yield first
first, second = second, first + second
def fib(number):
fibs = genfib()
for i in xrange(number + 1):
retval = fibs.next()
return retval
<|fim▁hole|> with open(inputfile, 'r') as f:
for line in f:
if line:
print '{}'.format(fib(int(line.strip())))<|fim▁end|> |
if __name__ == '__main__':
inputfile = sys.argv[1] |
<|file_name|>control-bar.tsx<|end_file_name|><|fim▁begin|>import { Component, Prop, PropDidChange, Element } from '@stencil/core';
@Component({
tag: 'control-bar',<|fim▁hole|> styleUrl: 'control-bar.scss'
})
export class ControlBar {
@Prop() visible = true;
@Element() element: HTMLElement;
componentDidLoad() {
this.element.style.opacity = '1';
}
@PropDidChange('visible')
visibilityHandler(isVisible) {
this.element.style.opacity = isVisible ? '1' : '0';
}
render() {
return '';
}
}<|fim▁end|> | |
<|file_name|>sslocal.rs<|end_file_name|><|fim▁begin|>//! This is a binary running in the local environment
//!
//! You have to provide all needed configuration attributes via command line parameters,
//! or you could specify a configuration file. The format of configuration file is defined
//! in mod `config`.
<|fim▁hole|>
fn main() {
let mut app = Command::new("shadowsocks")
.version(shadowsocks_rust::VERSION)
.about("A fast tunnel proxy that helps you bypass firewalls. (https://shadowsocks.org)");
app = local::define_command_line_options(app);
let matches = app.get_matches();
local::main(&matches);
}<|fim▁end|> | use clap::Command;
use shadowsocks_rust::service::local; |
<|file_name|>gfile_z_time_test.go<|end_file_name|><|fim▁begin|>// Copyright GoFrame Author(https://goframe.org). All Rights Reserved.
//
// This Source Code Form is subject to the terms of the MIT License.
// If a copy of the MIT was not distributed with this file,
// You can obtain one at https://github.com/gogf/gf.
package gfile_test
import (
"os"
"testing"
"time"
"github.com/gogf/gf/v2/os/gfile"
"github.com/gogf/gf/v2/test/gtest"
)
func Test_MTime(t *testing.T) {
gtest.C(t, func(t *gtest.T) {
var (
file1 = "/testfile_t1.txt"
err error
fileobj os.FileInfo
)
createTestFile(file1, "")
defer delTestFiles(file1)
fileobj, err = os.Stat(testpath() + file1)
t.Assert(err, nil)
t.Assert(gfile.MTime(testpath()+file1), fileobj.ModTime())
t.Assert(gfile.MTime(""), "")
})
}
func Test_MTimeMillisecond(t *testing.T) {
gtest.C(t, func(t *gtest.T) {
var (
file1 = "/testfile_t1.txt"
err error<|fim▁hole|> )
createTestFile(file1, "")
defer delTestFiles(file1)
fileobj, err = os.Stat(testpath() + file1)
t.Assert(err, nil)
time.Sleep(time.Millisecond * 100)
t.AssertGE(
gfile.MTimestampMilli(testpath()+file1),
fileobj.ModTime().UnixNano()/1000000,
)
t.Assert(gfile.MTimestampMilli(""), -1)
})
}<|fim▁end|> | fileobj os.FileInfo |
<|file_name|>py3.py<|end_file_name|><|fim▁begin|>import sys
class outPip(object):
def __init__(self, fileDir):
self.fileDir = fileDir
self.console = sys.stdout
def write(self, s):
self.console.write(s)
with open(self.fileDir, 'a') as f: f.write(s)
def flush(self):
self.console.flush()
new_input = input
def inPip(fileDir):
def _input(hint):
s = new_input(hint)
with open(fileDir, 'a') as f: f.write(s)
return s
return _input
sys.stdout = outPip('out.log')
input = inPip('out.log')
<|fim▁hole|>input('yo')<|fim▁end|> |
print('This will appear on your console and your file.')
print('So is this line.')
|
<|file_name|>5288513.cpp<|end_file_name|><|fim▁begin|>//Language: GNU C++
/** Be name Khoda **/
#include <iostream>
#include <iomanip>
#include <fstream>
#include <sstream>
#include <map>
#include <vector>
#include <list>
#include <set>
#include <queue>
#include <deque>
#include <algorithm>
#include <bitset>
#include <cstring>
#include <cstdio>
#include <cstdlib>
#include <cctype>
#include <cmath>
#include <climits>
using namespace std;
#define ll long long
#define un unsigned
#define pii pair<ll, ll>
#define pb push_back
#define mp make_pair
#define VAL(x) #x << " = " << x << " "
#define SQR(a) ((a) * (a))
#define SZ(x) ((int) x.size())
#define ALL(x) x.begin(), x.end()
#define CLR(x, a) memset(x, a, sizeof x)
#define FOREACH(i, x) for(__typeof((x).begin()) i = (x).begin(); i != (x).end(); i ++)
#define X first<|fim▁hole|>
//#define cout fout
//#define cin fin
//ifstream fin("problem.in");
//ofstream fout("problem.out");
const int MAXN = 100 * 1000 + 10, INF = INT_MAX, MOD = 1e9 + 7;
ll a[MAXN];
int main ()
{
ios::sync_with_stdio(false);
ll n, m;
cin >> n >> m;
for (int i = 0, t; i < m; i ++)
cin >> t >> a[i];
sort(a, a + m, greater<int>());
ll ans = 0;
for (ll i = 0; i < m; i ++)
{
if (!(i % 2) && n - 1 < i * (i + 1) / 2) break;
if ((i % 2) && n < SQR(i + 1) / 2) break;
ans += a[i];
}
cout << ans << endl;
return 0;
}<|fim▁end|> | #define Y second
#define PI (3.141592654) |
<|file_name|>preferences-assistant.js<|end_file_name|><|fim▁begin|>function PreferencesAssistant() {
/* this is the creator function for your scene assistant object. It will be passed all the
additional parameters (after the scene name) that were passed to pushScene. The reference
to the scene controller (this.controller) has not be established yet, so any initialization
that needs the scene controller should be done in the setup function below. */
this.cookie = new Mojo.Model.Cookie('prefs');
this.model = this.cookie.get();
if (!this.model) {
this.model = {
useOldInterface: false
};
this.cookie.put(this.model);
}
}
PreferencesAssistant.prototype.setup = function() {
/* this function is for setup tasks that have to happen when the scene is first created */
/* use Mojo.View.render to render view templates and add them to the scene, if needed */
/* setup widgets here */
this.controller.setupWidget(
'oldInterfaceToggle',
{
modelProperty: 'useOldInterface',<|fim▁hole|> },
this.model
);
/* add event handlers to listen to events from widgets */
Mojo.Event.listen(
this.controller.get('oldInterfaceToggle'),
Mojo.Event.propertyChange,
this.handlePrefsChange.bind(this)
);
};
PreferencesAssistant.prototype.handlePrefsChange = function(event) {
this.cookie.put(this.model);
};
PreferencesAssistant.prototype.activate = function(event) {
/* put in event handlers here that should only be in effect when this scene is active. For
example, key handlers that are observing the document */
};
PreferencesAssistant.prototype.deactivate = function(event) {
/* remove any event handlers you added in activate and do any other cleanup that should happen before
this scene is popped or another scene is pushed on top */
};
PreferencesAssistant.prototype.cleanup = function(event) {
/* this function should do any cleanup needed before the scene is destroyed as
a result of being popped off the scene stack */
Mojo.Event.stopListening(
this.controller.get('oldInterfaceToggle'),
Mojo.Event.propertyChange,
this.handlePrefsChange.bind(this)
);
};<|fim▁end|> | disabledProperty: 'oldInterfaceToggleDisabled' |
<|file_name|>test_reactor.rs<|end_file_name|><|fim▁begin|>use tokio::io::Ready;
use tokio::reactor::{self, Config, Reactor, Task, Tick};
use std::io;
use std::sync::mpsc::{self, Sender};
#[test]
fn test_internal_source_state_is_cleaned_up() {
use mio::{Evented, EventSet, Poll, PollOpt, Token};
struct Foo;
impl Evented for Foo {
fn register(&self, _: &Poll, _: Token, _: EventSet, _: PollOpt) -> io::Result<()> {
Ok(())
}
fn reregister(&self, _: &Poll, _: Token, _: EventSet, _: PollOpt) -> io::Result<()> {
Ok(())
}
fn deregister(&self, _: &Poll) -> io::Result<()> {
Ok(())
}
}
let config = Config::new()
.max_sources(1);
// Create a reactor that will only accept a single source
let reactor = Reactor::new(config).unwrap();
reactor.handle().oneshot(|| {
let foo = Foo;
// Run this a few times, because even if we request a slab of size 1,
// there could be greater capacity (usually 2 given rounding to the
// nearest power of 2)
for _ in 0..10 {
let source = reactor::register_source(&foo, Ready::readable());
assert!(source.is_ok());
}
reactor::shutdown();
});
assert!(reactor.run().is_ok());
}
#[test]
fn test_returning_error_from_task_terminates() {
struct MyTask {
tx: Sender<()>,
}
impl Task for MyTask {
fn tick(&mut self) -> io::Result<Tick> {
Err(io::Error::new(io::ErrorKind::Other, "boom"))
}
}
impl Drop for MyTask {
fn drop(&mut self) {
let _ = self.tx.send(());
}
}
let reactor = Reactor::default().unwrap();
let handle = reactor.handle();
reactor.spawn();<|fim▁hole|>
let (tx, rx) = mpsc::channel();
handle.schedule(MyTask { tx: tx });
// Receive the cleanup notice
rx.recv().unwrap();
handle.shutdown();
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>ES_INDEX_NAME = "brainiak"<|fim▁hole|>ES_TYPE_NAME = "query"<|fim▁end|> | |
<|file_name|>f1-tests.ts<|end_file_name|><|fim▁begin|>import F1 = require("f1");
var ui = F1();
ui.states({
out: {
itemToAnimate1: {
variableToAnimate: 0
},
itemToAnimate2: {
variableToAnimate: 0
}
},
idle: {
itemToAnimate1: {
variableToAnimate: 1
},
itemToAnimate2: {
variableToAnimate: 2
}
}
});<|fim▁hole|>
ui.transitions( [
{ from: 'idle', to: 'rollOver', animation: { duration: 0.25 } },
{ from: 'rollOver', to: 'idle', animation: { duration: 0.1 } }
]);<|fim▁end|> | |
<|file_name|>model_get_universe_regions_region_id_ok.go<|end_file_name|><|fim▁begin|><|fim▁hole|>type GetUniverseRegionsRegionIdOk struct {
/*
constellations array */
Constellations []int32 `json:"constellations,omitempty"`
/*
description string */
Description string `json:"description,omitempty"`
/*
name string */
Name string `json:"name,omitempty"`
/*
region_id integer */
RegionId int32 `json:"region_id,omitempty"`
}<|fim▁end|> | package esilegacy
/*
200 ok object */ |
<|file_name|>ext-settings_menu.js<|end_file_name|><|fim▁begin|>ace.define("ace/ext/menu_tools/overlay_page",[], function(require, exports, module) {
'use strict';
var dom = require("../../lib/dom");
var cssText = "#ace_settingsmenu, #kbshortcutmenu {\
background-color: #F7F7F7;\
color: black;\
box-shadow: -5px 4px 5px rgba(126, 126, 126, 0.55);\
padding: 1em 0.5em 2em 1em;\
overflow: auto;\
position: absolute;\
margin: 0;\
bottom: 0;\
right: 0;\
top: 0;\
z-index: 9991;\
cursor: default;\
}\
.ace_dark #ace_settingsmenu, .ace_dark #kbshortcutmenu {\
box-shadow: -20px 10px 25px rgba(126, 126, 126, 0.25);\
background-color: rgba(255, 255, 255, 0.6);\
color: black;\
}\
.ace_optionsMenuEntry:hover {\
background-color: rgba(100, 100, 100, 0.1);\
transition: all 0.3s\
}\
.ace_closeButton {\
background: rgba(245, 146, 146, 0.5);\
border: 1px solid #F48A8A;\
border-radius: 50%;\
padding: 7px;\
position: absolute;\
right: -8px;\
top: -8px;\
z-index: 100000;\
}\
.ace_closeButton{\
background: rgba(245, 146, 146, 0.9);\
}\
.ace_optionsMenuKey {\
color: darkslateblue;\
font-weight: bold;\
}\
.ace_optionsMenuCommand {\
color: darkcyan;\
font-weight: normal;\
}\
.ace_optionsMenuEntry input, .ace_optionsMenuEntry button {\
vertical-align: middle;\
}\
.ace_optionsMenuEntry button[ace_selected_button=true] {\
background: #e7e7e7;\
box-shadow: 1px 0px 2px 0px #adadad inset;\
border-color: #adadad;\
}\
.ace_optionsMenuEntry button {\
background: white;\
border: 1px solid lightgray;\
margin: 0px;\
}\
.ace_optionsMenuEntry button:hover{\
background: #f0f0f0;\
}";
dom.importCssString(cssText, "settings_menu.css", false);
module.exports.overlayPage = function overlayPage(editor, contentElement, callback) {
var closer = document.createElement('div');
var ignoreFocusOut = false;
function documentEscListener(e) {
if (e.keyCode === 27) {
close();
}
}
function close() {
if (!closer) return;
document.removeEventListener('keydown', documentEscListener);
closer.parentNode.removeChild(closer);
if (editor) {
editor.focus();
}
closer = null;
callback && callback();
}
function setIgnoreFocusOut(ignore) {
ignoreFocusOut = ignore;
if (ignore) {
closer.style.pointerEvents = "none";
contentElement.style.pointerEvents = "auto";
}
}
closer.style.cssText = 'margin: 0; padding: 0; ' +
'position: fixed; top:0; bottom:0; left:0; right:0;' +
'z-index: 9990; ' +
(editor ? 'background-color: rgba(0, 0, 0, 0.3);' : '');
closer.addEventListener('click', function(e) {
if (!ignoreFocusOut) {
close();
}
});
document.addEventListener('keydown', documentEscListener);
contentElement.addEventListener('click', function (e) {
e.stopPropagation();
});
closer.appendChild(contentElement);
document.body.appendChild(closer);
if (editor) {
editor.blur();
}
return {
close: close,
setIgnoreFocusOut: setIgnoreFocusOut
};
};
});
ace.define("ace/ext/modelist",[], function(require, exports, module) {
"use strict";
var modes = [];
function getModeForPath(path) {
var mode = modesByName.text;
var fileName = path.split(/[\/\\]/).pop();
for (var i = 0; i < modes.length; i++) {
if (modes[i].supportsFile(fileName)) {
mode = modes[i];
break;
}
}
return mode;
}
var Mode = function(name, caption, extensions) {
this.name = name;
this.caption = caption;
this.mode = "ace/mode/" + name;
this.extensions = extensions;
var re;
if (/\^/.test(extensions)) {
re = extensions.replace(/\|(\^)?/g, function(a, b){
return "$|" + (b ? "^" : "^.*\\.");
}) + "$";
} else {
re = "^.*\\.(" + extensions + ")$";
}
this.extRe = new RegExp(re, "gi");
};
Mode.prototype.supportsFile = function(filename) {
return filename.match(this.extRe);
};
var supportedModes = {
ABAP: ["abap"],
ABC: ["abc"],
ActionScript:["as"],
ADA: ["ada|adb"],
Alda: ["alda"],
Apache_Conf: ["^htaccess|^htgroups|^htpasswd|^conf|htaccess|htgroups|htpasswd"],
Apex: ["apex|cls|trigger|tgr"],
AQL: ["aql"],
AsciiDoc: ["asciidoc|adoc"],
ASL: ["dsl|asl|asl.json"],
Assembly_x86:["asm|a"],
AutoHotKey: ["ahk"],
BatchFile: ["bat|cmd"],
C_Cpp: ["cpp|c|cc|cxx|h|hh|hpp|ino"],
C9Search: ["c9search_results"],
Cirru: ["cirru|cr"],
Clojure: ["clj|cljs"],
Cobol: ["CBL|COB"],
coffee: ["coffee|cf|cson|^Cakefile"],
ColdFusion: ["cfm"],
Crystal: ["cr"],
CSharp: ["cs"],
Csound_Document: ["csd"],
Csound_Orchestra: ["orc"],
Csound_Score: ["sco"],
CSS: ["css"],
Curly: ["curly"],
D: ["d|di"],
Dart: ["dart"],
Diff: ["diff|patch"],
Dockerfile: ["^Dockerfile"],
Dot: ["dot"],
Drools: ["drl"],
Edifact: ["edi"],
Eiffel: ["e|ge"],
EJS: ["ejs"],
Elixir: ["ex|exs"],
Elm: ["elm"],
Erlang: ["erl|hrl"],
Forth: ["frt|fs|ldr|fth|4th"],
Fortran: ["f|f90"],
FSharp: ["fsi|fs|ml|mli|fsx|fsscript"],
FSL: ["fsl"],
FTL: ["ftl"],
Gcode: ["gcode"],
Gherkin: ["feature"],
Gitignore: ["^.gitignore"],
Glsl: ["glsl|frag|vert"],
Gobstones: ["gbs"],
golang: ["go"],
GraphQLSchema: ["gql"],
Groovy: ["groovy"],
HAML: ["haml"],
Handlebars: ["hbs|handlebars|tpl|mustache"],
Haskell: ["hs"],
Haskell_Cabal: ["cabal"],
haXe: ["hx"],
Hjson: ["hjson"],
HTML: ["html|htm|xhtml|vue|we|wpy"],
HTML_Elixir: ["eex|html.eex"],
HTML_Ruby: ["erb|rhtml|html.erb"],
INI: ["ini|conf|cfg|prefs"],
Io: ["io"],
Jack: ["jack"],
Jade: ["jade|pug"],
Java: ["java"],
JavaScript: ["js|jsm|jsx"],
JSON: ["json"],
JSON5: ["json5"],
JSONiq: ["jq"],
JSP: ["jsp"],
JSSM: ["jssm|jssm_state"],
JSX: ["jsx"],
Julia: ["jl"],
Kotlin: ["kt|kts"],
LaTeX: ["tex|latex|ltx|bib"],
Latte: ["latte"],
LESS: ["less"],
Liquid: ["liquid"],
Lisp: ["lisp"],
LiveScript: ["ls"],
LogiQL: ["logic|lql"],
LSL: ["lsl"],
Lua: ["lua"],
LuaPage: ["lp"],
Lucene: ["lucene"],
Makefile: ["^Makefile|^GNUmakefile|^makefile|^OCamlMakefile|make"],
Markdown: ["md|markdown"],
Mask: ["mask"],
MATLAB: ["matlab"],
Maze: ["mz"],
MediaWiki: ["wiki|mediawiki"],
MEL: ["mel"],
MIPS: ["s|asm"],
MIXAL: ["mixal"],
MUSHCode: ["mc|mush"],
MySQL: ["mysql"],
Nginx: ["nginx|conf"],
Nim: ["nim"],
Nix: ["nix"],
NSIS: ["nsi|nsh"],
Nunjucks: ["nunjucks|nunjs|nj|njk"],
ObjectiveC: ["m|mm"],
OCaml: ["ml|mli"],
Pascal: ["pas|p"],
Perl: ["pl|pm"],
pgSQL: ["pgsql"],
PHP: ["php|inc|phtml|shtml|php3|php4|php5|phps|phpt|aw|ctp|module"],
PHP_Laravel_blade: ["blade.php"],
Pig: ["pig"],
Powershell: ["ps1"],
Praat: ["praat|praatscript|psc|proc"],
Prisma: ["prisma"],
Prolog: ["plg|prolog"],
Properties: ["properties"],
Protobuf: ["proto"],
Puppet: ["epp|pp"],
Python: ["py"],
QML: ["qml"],
R: ["r"],
Raku: ["raku|rakumod|rakutest|p6|pl6|pm6"],
Razor: ["cshtml|asp"],
RDoc: ["Rd"],
Red: ["red|reds"],
RHTML: ["Rhtml"],
RST: ["rst"],
Ruby: ["rb|ru|gemspec|rake|^Guardfile|^Rakefile|^Gemfile"],
Rust: ["rs"],
SASS: ["sass"],
SCAD: ["scad"],
Scala: ["scala|sbt"],
Scheme: ["scm|sm|rkt|oak|scheme"],
Scrypt: ["scrypt"],
SCSS: ["scss"],
SH: ["sh|bash|^.bashrc"],
SJS: ["sjs"],
Slim: ["slim|skim"],
Smarty: ["smarty|tpl"],
Smithy: ["smithy"],
snippets: ["snippets"],
Soy_Template:["soy"],
Space: ["space"],
SQL: ["sql"],
SQLServer: ["sqlserver"],
Stylus: ["styl|stylus"],
SVG: ["svg"],
Swift: ["swift"],
Tcl: ["tcl"],
Terraform: ["tf", "tfvars", "terragrunt"],
Tex: ["tex"],
Text: ["txt"],
Textile: ["textile"],
Toml: ["toml"],
TSX: ["tsx"],
Twig: ["twig|swig"],
Typescript: ["ts|typescript|str"],
Vala: ["vala"],
VBScript: ["vbs|vb"],
Velocity: ["vm"],
Verilog: ["v|vh|sv|svh"],
VHDL: ["vhd|vhdl"],
Visualforce: ["vfp|component|page"],
Wollok: ["wlk|wpgm|wtest"],
XML: ["xml|rdf|rss|wsdl|xslt|atom|mathml|mml|xul|xbl|xaml"],
XQuery: ["xq"],
YAML: ["yaml|yml"],
Zeek: ["zeek|bro"],
Django: ["html"]
};
var nameOverrides = {
ObjectiveC: "Objective-C",
CSharp: "C#",
golang: "Go",
C_Cpp: "C and C++",
Csound_Document: "Csound Document",
Csound_Orchestra: "Csound",
Csound_Score: "Csound Score",
coffee: "CoffeeScript",
HTML_Ruby: "HTML (Ruby)",
HTML_Elixir: "HTML (Elixir)",
FTL: "FreeMarker",
PHP_Laravel_blade: "PHP (Blade Template)",
Perl6: "Perl 6",
AutoHotKey: "AutoHotkey / AutoIt"
};
var modesByName = {};
for (var name in supportedModes) {
var data = supportedModes[name];
var displayName = (nameOverrides[name] || name).replace(/_/g, " ");
var filename = name.toLowerCase();
var mode = new Mode(filename, displayName, data[0]);
modesByName[filename] = mode;
modes.push(mode);
}
module.exports = {
getModeForPath: getModeForPath,
modes: modes,
modesByName: modesByName
};
});
ace.define("ace/ext/themelist",[], function(require, exports, module) {
"use strict";
var themeData = [
["Chrome" ],
["Clouds" ],
["Crimson Editor" ],
["Dawn" ],
["Dreamweaver" ],
["Eclipse" ],
["GitHub" ],
["IPlastic" ],
["Solarized Light"],
["TextMate" ],
["Tomorrow" ],
["Xcode" ],
["Kuroir"],
["KatzenMilch"],
["SQL Server" ,"sqlserver" , "light"],
["Ambiance" ,"ambiance" , "dark"],
["Chaos" ,"chaos" , "dark"],
["Clouds Midnight" ,"clouds_midnight" , "dark"],
["Dracula" ,"" , "dark"],
["Cobalt" ,"cobalt" , "dark"],
["Gruvbox" ,"gruvbox" , "dark"],
["Green on Black" ,"gob" , "dark"],
["idle Fingers" ,"idle_fingers" , "dark"],
["krTheme" ,"kr_theme" , "dark"],
["Merbivore" ,"merbivore" , "dark"],
["Merbivore Soft" ,"merbivore_soft" , "dark"],
["Mono Industrial" ,"mono_industrial" , "dark"],
["Monokai" ,"monokai" , "dark"],
["Nord Dark" ,"nord_dark" , "dark"],
["One Dark" ,"one_dark" , "dark"],
["Pastel on dark" ,"pastel_on_dark" , "dark"],
["Solarized Dark" ,"solarized_dark" , "dark"],
["Terminal" ,"terminal" , "dark"],
["Tomorrow Night" ,"tomorrow_night" , "dark"],
["Tomorrow Night Blue" ,"tomorrow_night_blue" , "dark"],
["Tomorrow Night Bright","tomorrow_night_bright" , "dark"],
["Tomorrow Night 80s" ,"tomorrow_night_eighties" , "dark"],
["Twilight" ,"twilight" , "dark"],
["Vibrant Ink" ,"vibrant_ink" , "dark"]
];
exports.themesByName = {};
exports.themes = themeData.map(function(data) {
var name = data[1] || data[0].replace(/ /g, "_").toLowerCase();
var theme = {
caption: data[0],
theme: "ace/theme/" + name,
isDark: data[2] == "dark",
name: name
};
exports.themesByName[name] = theme;
return theme;
});
});
ace.define("ace/ext/options",[], function(require, exports, module) {
"use strict";
require("./menu_tools/overlay_page");
var dom = require("../lib/dom");
var oop = require("../lib/oop");
var config = require("../config");
var EventEmitter = require("../lib/event_emitter").EventEmitter;
var buildDom = dom.buildDom;
var modelist = require("./modelist");
var themelist = require("./themelist");
var themes = { Bright: [], Dark: [] };
themelist.themes.forEach(function(x) {
themes[x.isDark ? "Dark" : "Bright"].push({ caption: x.caption, value: x.theme });
});
var modes = modelist.modes.map(function(x){
return { caption: x.caption, value: x.mode };
});
var optionGroups = {
Main: {
Mode: {
path: "mode",
type: "select",
items: modes
},
Theme: {
path: "theme",
type: "select",
items: themes
},
"Keybinding": {
type: "buttonBar",
path: "keyboardHandler",
items: [
{ caption : "Ace", value : null },
{ caption : "Vim", value : "ace/keyboard/vim" },
{ caption : "Emacs", value : "ace/keyboard/emacs" },
{ caption : "Sublime", value : "ace/keyboard/sublime" },
{ caption : "VSCode", value : "ace/keyboard/vscode" }
]
},
"Font Size": {
path: "fontSize",
type: "number",
defaultValue: 12,
defaults: [
{caption: "12px", value: 12},
{caption: "24px", value: 24}
]
},
"Soft Wrap": {
type: "buttonBar",
path: "wrap",
items: [
{ caption : "Off", value : "off" },
{ caption : "View", value : "free" },
{ caption : "margin", value : "printMargin" },
{ caption : "40", value : "40" }
]
},
"Cursor Style": {
path: "cursorStyle",
items: [
{ caption : "Ace", value : "ace" },
{ caption : "Slim", value : "slim" },
{ caption : "Smooth", value : "smooth" },
{ caption : "Smooth And Slim", value : "smooth slim" },
{ caption : "Wide", value : "wide" }
]
},
"Folding": {
path: "foldStyle",
items: [
{ caption : "Manual", value : "manual" },
{ caption : "Mark begin", value : "markbegin" },
{ caption : "Mark begin and end", value : "markbeginend" }
]
},
"Soft Tabs": [{
path: "useSoftTabs"
}, {
ariaLabel: "Tab Size",
path: "tabSize",
type: "number",
values: [2, 3, 4, 8, 16]
}],
"Overscroll": {
type: "buttonBar",
path: "scrollPastEnd",
items: [
{ caption : "None", value : 0 },
{ caption : "Half", value : 0.5 },
{ caption : "Full", value : 1 }
]
}
},
More: {
"Atomic soft tabs": {
path: "navigateWithinSoftTabs"
},
"Enable Behaviours": {
path: "behavioursEnabled"
},
"Wrap with quotes": {
path: "wrapBehavioursEnabled"
},
"Enable Auto Indent": {
path: "enableAutoIndent"
},
"Full Line Selection": {
type: "checkbox",
values: "text|line",
path: "selectionStyle"
},
"Highlight Active Line": {
path: "highlightActiveLine"
},
"Show Invisibles": {
path: "showInvisibles"
},
"Show Indent Guides": {
path: "displayIndentGuides"
},
"Persistent HScrollbar": {
path: "hScrollBarAlwaysVisible"
},
"Persistent VScrollbar": {
path: "vScrollBarAlwaysVisible"
},
"Animate scrolling": {
path: "animatedScroll"
},
"Show Gutter": {
path: "showGutter"
},
"Show Line Numbers": {
path: "showLineNumbers"
},
"Relative Line Numbers": {
path: "relativeLineNumbers"
},
"Fixed Gutter Width": {
path: "fixedWidthGutter"
},
"Show Print Margin": [{
path: "showPrintMargin"
}, {
ariaLabel: "Print Margin",
type: "number",
path: "printMarginColumn"
}],
"Indented Soft Wrap": {
path: "indentedSoftWrap"
},
"Highlight selected word": {
path: "highlightSelectedWord"
},
"Fade Fold Widgets": {
path: "fadeFoldWidgets"
},
"Use textarea for IME": {
path: "useTextareaForIME"
},
"Merge Undo Deltas": {
path: "mergeUndoDeltas",
items: [
{ caption : "Always", value : "always" },
{ caption : "Never", value : "false" },
{ caption : "Timed", value : "true" }
]
},
"Elastic Tabstops": {
path: "useElasticTabstops"
},
"Incremental Search": {
path: "useIncrementalSearch"
},
"Read-only": {
path: "readOnly"
},
"Copy without selection": {
path: "copyWithEmptySelection"
},
"Live Autocompletion": {
path: "enableLiveAutocompletion"
}
}
};
var OptionPanel = function(editor, element) {
this.editor = editor;
this.container = element || document.createElement("div");
this.groups = [];
this.options = {};
};
(function() {
oop.implement(this, EventEmitter);
this.add = function(config) {
if (config.Main)
oop.mixin(optionGroups.Main, config.Main);
if (config.More)
oop.mixin(optionGroups.More, config.More);
};
this.render = function() {
this.container.innerHTML = "";
buildDom(["table", {role: "presentation", id: "controls"},
this.renderOptionGroup(optionGroups.Main),
["tr", null, ["td", {colspan: 2},
["table", {role: "presentation", id: "more-controls"},
this.renderOptionGroup(optionGroups.More)
]
]],
["tr", null, ["td", {colspan: 2}, "version " + config.version]]
], this.container);
};
this.renderOptionGroup = function(group) {
return Object.keys(group).map(function(key, i) {
var item = group[key];
if (!item.position)
item.position = i / 10000;
if (!item.label)
item.label = key;
return item;
}).sort(function(a, b) {
return a.position - b.position;
}).map(function(item) {
return this.renderOption(item.label, item);
}, this);
};
this.renderOptionControl = function(key, option) {
var self = this;
if (Array.isArray(option)) {
return option.map(function(x) {
return self.renderOptionControl(key, x);
});
}
var control;
var value = self.getOption(option);
if (option.values && option.type != "checkbox") {
if (typeof option.values == "string")
option.values = option.values.split("|");
option.items = option.values.map(function(v) {
return { value: v, name: v };
});
}
if (option.type == "buttonBar") {
control = ["div", {role: "group", "aria-labelledby": option.path + "-label"}, option.items.map(function(item) {
return ["button", {
value: item.value,
ace_selected_button: value == item.value,
'aria-pressed': value == item.value,
onclick: function() {
self.setOption(option, item.value);
var nodes = this.parentNode.querySelectorAll("[ace_selected_button]");
for (var i = 0; i < nodes.length; i++) {
nodes[i].removeAttribute("ace_selected_button");
nodes[i].setAttribute("aria-pressed", false);
}
this.setAttribute("ace_selected_button", true);
this.setAttribute("aria-pressed", true);
}
}, item.desc || item.caption || item.name];
})];
} else if (option.type == "number") {
control = ["input", {type: "number", value: value || option.defaultValue, style:"width:3em", oninput: function() {
self.setOption(option, parseInt(this.value));
}}];
if (option.ariaLabel) {
control[1]["aria-label"] = option.ariaLabel;
} else {
control[1].id = key;
}
if (option.defaults) {
control = [control, option.defaults.map(function(item) {
return ["button", {onclick: function() {
var input = this.parentNode.firstChild;
input.value = item.value;
input.oninput();
}}, item.caption];
})];
}
} else if (option.items) {
var buildItems = function(items) {
return items.map(function(item) {
return ["option", { value: item.value || item.name }, item.desc || item.caption || item.name];
});
};
var items = Array.isArray(option.items)
? buildItems(option.items)
: Object.keys(option.items).map(function(key) {
return ["optgroup", {"label": key}, buildItems(option.items[key])];
});
control = ["select", { id: key, value: value, onchange: function() {
self.setOption(option, this.value);
} }, items];
} else {
if (typeof option.values == "string")
option.values = option.values.split("|");
if (option.values) value = value == option.values[1];
control = ["input", { type: "checkbox", id: key, checked: value || null, onchange: function() {
var value = this.checked;
if (option.values) value = option.values[value ? 1 : 0];
self.setOption(option, value);
}}];
if (option.type == "checkedNumber") {
control = [control, []];
}
}<|fim▁hole|> };
this.renderOption = function(key, option) {
if (option.path && !option.onchange && !this.editor.$options[option.path])
return;
var path = Array.isArray(option) ? option[0].path : option.path;
this.options[path] = option;
var safeKey = "-" + path;
var safeId = path + "-label";
var control = this.renderOptionControl(safeKey, option);
return ["tr", {class: "ace_optionsMenuEntry"}, ["td",
["label", {for: safeKey, id: safeId}, key]
], ["td", control]];
};
this.setOption = function(option, value) {
if (typeof option == "string")
option = this.options[option];
if (value == "false") value = false;
if (value == "true") value = true;
if (value == "null") value = null;
if (value == "undefined") value = undefined;
if (typeof value == "string" && parseFloat(value).toString() == value)
value = parseFloat(value);
if (option.onchange)
option.onchange(value);
else if (option.path)
this.editor.setOption(option.path, value);
this._signal("setOption", {name: option.path, value: value});
};
this.getOption = function(option) {
if (option.getValue)
return option.getValue();
return this.editor.getOption(option.path);
};
}).call(OptionPanel.prototype);
exports.OptionPanel = OptionPanel;
});
ace.define("ace/ext/settings_menu",[], function(require, exports, module) {
"use strict";
var OptionPanel = require("./options").OptionPanel;
var overlayPage = require('./menu_tools/overlay_page').overlayPage;
function showSettingsMenu(editor) {
if (!document.getElementById('ace_settingsmenu')) {
var options = new OptionPanel(editor);
options.render();
options.container.id = "ace_settingsmenu";
overlayPage(editor, options.container);
options.container.querySelector("select,input,button,checkbox").focus();
}
}
module.exports.init = function() {
var Editor = require("../editor").Editor;
Editor.prototype.showSettingsMenu = function() {
showSettingsMenu(this);
};
};
}); (function() {
ace.require(["ace/ext/settings_menu"], function(m) {
if (typeof module == "object" && typeof exports == "object" && module) {
module.exports = m;
}
});
})();<|fim▁end|> | return control; |
<|file_name|>index.py<|end_file_name|><|fim▁begin|>__author__ = 'oier'
import json
from flask import Flask, make_response
app = Flask(__name__)
import seaborn as sns
import numpy as np
import pandas as pd
import os
from datetime import datetime
import matplotlib.pyplot as plt
import sys
from matplotlib.figure import Figure
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from io import StringIO
from sklearn import linear_model
from models import InputForm, ValueSelector
from flask import Flask, render_template, request
from compute import compute, load_data, line_plot
@app.route('/')
def index():
return 'Hello World!'
<|fim▁hole|> data = load_data()
form = ValueSelector(request)
form.value.choices = [(k,i) for k,i in enumerate(data.columns)]
return(form)
@app.route('/blood', methods=['GET', 'POST'])
def blood():
form = form_values(request.form)
if request.method == 'POST':# and form.validate():
result = line_plot(form.value.data)
else:
print("False")
result = None
return render_template('plot.html',
form=form, result=result)
@app.route('/vib1', methods=['GET', 'POST'])
def vib1():
#form = InputForm(request.form)
form = form_values(request.form)
if request.method == 'POST' and form.validate():
result = compute(form.A.data, form.b.data,
form.w.data, form.T.data)
else:
result = None
return render_template('view_plain.html',
form=form, result=result)
if __name__ == '__main__':
app.run()<|fim▁end|> | def form_values(request): |
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|><|fim▁hole|> app : './app/app.js'
},
output: {
filename: '[name]_bundle.js',
path: './dist'
}
};<|fim▁end|> | module.exports = {
entry: { |
<|file_name|>requests.rs<|end_file_name|><|fim▁begin|>use std::slice::Iter;
use super::{ Request };
pub struct Requests<'a> {
request: Option<&'a Request>,
requests: Iter<'a, Request>,
}
impl<'a> Requests<'a> {
pub fn new(request: &'a Request, requests: &'a Vec<Request>) -> Requests<'a> {
Requests {
request: Some(request),
requests: requests.iter(),
}
}
}
impl<'a> Iterator for Requests<'a> {
type Item = &'a Request;
fn next(&mut self) -> Option<&'a Request> {
match (self.requests.next(), self.request) {
(None, None) => None,
(None, request) => {<|fim▁hole|> },
(request, _) => request,
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
match (self.request, self.requests.size_hint()) {
(None, size_hint) => size_hint,
(Some(_), (min, max)) => (min + 1, max.map(|max| max + 1)),
}
}
}<|fim▁end|> | self.request = None;
request |
<|file_name|>environprovider.go<|end_file_name|><|fim▁begin|>// Copyright 2013 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package azure
import (
"github.com/juju/loggo"
"github.com/juju/core/environs"
"github.com/juju/core/environs/config"
)
// Register the Azure provider with Juju.
func init() {
environs.RegisterProvider("azure", azureEnvironProvider{})
}
// Logger for the Azure provider.
var logger = loggo.GetLogger("juju.provider.azure")
type azureEnvironProvider struct{}
// azureEnvironProvider implements EnvironProvider.
var _ environs.EnvironProvider = (*azureEnvironProvider)(nil)
// Open is specified in the EnvironProvider interface.
func (prov azureEnvironProvider) Open(cfg *config.Config) (environs.Environ, error) {<|fim▁hole|> // when err is not nil, we end up with a non-nil returned environ and
// this breaks the loop in cmd/jujud/upgrade.go:run() (see
// http://golang.org/doc/faq#nil_error for the gory details).
environ, err := NewEnviron(cfg)
if err != nil {
return nil, err
}
return environ, nil
}
// Prepare is specified in the EnvironProvider interface.
func (prov azureEnvironProvider) Prepare(ctx environs.BootstrapContext, cfg *config.Config) (environs.Environ, error) {
// Set availability-sets-enabled to true
// by default, unless the user set a value.
if _, ok := cfg.AllAttrs()["availability-sets-enabled"]; !ok {
var err error
cfg, err = cfg.Apply(map[string]interface{}{"availability-sets-enabled": true})
if err != nil {
return nil, err
}
}
return prov.Open(cfg)
}<|fim▁end|> | logger.Debugf("opening environment %q.", cfg.Name())
// We can't return NewEnviron(cfg) directly here because otherwise, |
<|file_name|>handshake.js<|end_file_name|><|fim▁begin|>"use strict"
var util = require('util')
var MessageEvent = require('./messageevent')
var Config = require('./config')
/**
* Coordinate the connection of a new chat client to the server.
* Different chat clients send the information differently, so far this chat server supports: TinTin++, mudjs, MudMaster, MudMaster 2k6, ZChat
*/
class Handshake {
/**
* Handshake constructor
* Constructs a new handshake object to process new connections to the chatserver
* @param {Socket} socket nodejs net socket object
* @param {Function} callback the callback that will be processed once the handshake has completed
* @todo Get the chatserver's name from some sort of preferences file
* @todo Get the chatserver's version from some sort of preferences file
*/
constructor(socket, callback) {
this.socket = socket
this.cb = callback
socket.on('data', data => {
var str = data.toString()
var nameAndIp = []
// check if our handshake data contains the expected :
if (str.indexOf(':') > -1) {
// set the connection's protocol information
this.setProtocol(str)
// send the chat name of the server to the client
this.setName('chatserver')
// send the version of the chatserver to the client
this.setVersion(`chatserver v${Config.version}`)
// setup the version response listener to get the client's version
this.socket.on('data', data => this.getVersion(data))
}
})
}
/**
* Set the protocol of the handshake
* @param {String} protocolStr colon and new line delimitered string of the handshake data
*/
setProtocol(protocolStr) {
// split the protocol string by the :
var result = protocolStr.split(':', 2)
// check the first part of the result to get the protocol
if (result[0] == 'CHAT') {
// MudMaster protocol
this.protocol = 'mudmaster'
} else if (result[0] == 'ZCHAT') {
// ZChat protocol
this.protocol = 'zchat'
} else {
// Unknown protocol
this.protocol = 'unknown'
}
// get the name and ip from the second part of the result
this.name = result[1].split('\n')[0]
this.ip = this.socket.remoteAddress
this.port = this.socket.remotePort
}
/**
* Send the chat servers name to the client
* @param {String} name the name of the chat server
*/
setName(name) {
this.socket.write(util.format('YES:%s\n', name))
}
/**
* Send the chat servers version to the client
* @param {String} version the version of the chatserver
*/
setVersion(version) {
// create the version as hex
var hexVersion = ""
for (var i = 0; i < version.length; i++) {
hexVersion += ''+version.charCodeAt(i).toString(16)
}
// send the version
MessageEvent.version(version).toSocket(this.socket).send()
}
/**
* Get the chat client's version
* @param {String} data the data received over the socket
*/
getVersion(data) {
if (data[0].toString(16) == MessageEvent.Type.VERSION) {<|fim▁hole|> }
// remove all the listeners for 'data' on the socket as we don't want getVersion called over and over
this.socket.removeAllListeners('data')
// callback with self
this.cb(this)
}
}
module.exports = Handshake<|fim▁end|> | this.version = data.toString().substring(1, data.length - 2) |
<|file_name|>0014_rename_schema_from_forum_to_askbot.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import os
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
app_dir_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
class Migration(SchemaMigration):
def forwards(self, orm):
if app_dir_name == 'forum':
try:
db.rename_table('forum_anonymousanswer', 'askbot_anonymousanswer')
db.rename_table('forum_anonymousquestion', 'askbot_anonymousquestion')
db.rename_table('forum_emailfeedsetting', 'askbot_emailfeedsetting')
db.rename_table('forum_markedtag', 'askbot_markedtag')
db.rename_table('forum_questionview', 'askbot_questionview')
db.rename_table('forum_validationhash', 'askbot_validationhash')
except:
pass
def backwards(self, orm):
if app_dir_name == 'forum':
db.rename_table('askbot_anonymousanswer', 'forum_anonymousanswer')
db.rename_table('askbot_anonymousquestion', 'forum_anonymousquestion')
db.rename_table('askbot_emailfeedsetting', 'forum_emailfeedsetting')
db.rename_table('askbot_markedtag', 'forum_markedtag')
db.rename_table('askbot_questionview', 'forum_questionview')
db.rename_table('askbot_validationhash', 'forum_validationhash')
if app_dir_name == 'forum':
models = {
'forum.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'forum.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['forum.Question']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'forum.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'forum.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['forum.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'forum.answerrevision': {
'Meta': {'object_name': 'AnswerRevision', 'db_table': "u'answer_revision'"},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['forum.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answerrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'forum.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['forum.Badge']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'forum.badge': {
'Meta': {'unique_together': "(('name', 'type'),)", 'object_name': 'Badge', 'db_table': "u'badge'"},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "'Award'", 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multiple': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {})
},
'forum.comment': {
'Meta': {'object_name': 'Comment', 'db_table': "u'comment'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['auth.User']"})
},
'forum.emailfeedsetting': {
'Meta': {'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'forum.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['forum.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})<|fim▁hole|> 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'flagged_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flaggeditems'", 'to': "orm['auth.User']"})
},
'forum.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['forum.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'forum.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'symmetrical': 'False', 'through': "'FavoriteQuestion'", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_questions'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'questions'", 'symmetrical': 'False', 'to': "orm['forum.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'forum.questionrevision': {
'Meta': {'object_name': 'QuestionRevision', 'db_table': "u'question_revision'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questionrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['forum.Question']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'forum.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['forum.Question']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'forum.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['forum.Question']"}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'forum.tag': {
'Meta': {'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'forum.validationhash': {
'Meta': {'unique_together': "(('user', 'type'),)", 'object_name': 'ValidationHash'},
'expiration': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2010, 6, 13, 23, 16, 4, 680070)'}),
'hash_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'seed': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'forum.vote': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'hide_ignored_questions': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'tag_filter_setting': ('django.db.models.fields.CharField', [], {'default': "'ignored'", 'max_length': '16'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
else:
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Question']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['askbot.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.answerrevision': {
'Meta': {'object_name': 'AnswerRevision', 'db_table': "u'answer_revision'"},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answerrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.Badge']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badge': {
'Meta': {'unique_together': "(('name', 'type'),)", 'object_name': 'Badge', 'db_table': "u'badge'"},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "'Award'", 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multiple': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {})
},
'askbot.comment': {
'Meta': {'object_name': 'Comment', 'db_table': "u'comment'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['auth.User']"})
},
'askbot.emailfeedsetting': {
'Meta': {'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.flaggeditem': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'FlaggedItem', 'db_table': "u'flagged_item'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'flagged_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flaggeditems'", 'to': "orm['auth.User']"})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'symmetrical': 'False', 'through': "'FavoriteQuestion'", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_questions'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'questions'", 'symmetrical': 'False', 'to': "orm['askbot.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.questionrevision': {
'Meta': {'object_name': 'QuestionRevision', 'db_table': "u'question_revision'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questionrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Question']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Question']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.validationhash': {
'Meta': {'unique_together': "(('user', 'type'),)", 'object_name': 'ValidationHash'},
'expiration': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2010, 6, 13, 23, 16, 4, 680070)'}),
'hash_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'seed': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.vote': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'hide_ignored_questions': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'tag_filter_setting': ('django.db.models.fields.CharField', [], {'default': "'ignored'", 'max_length': '16'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = [app_dir_name]<|fim▁end|> | },
'forum.flaggeditem': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'FlaggedItem', 'db_table': "u'flagged_item'"}, |
<|file_name|>unixtostr.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('eshttp')
.filter('unixtostr', function() {
return function(str){
var dt;
dt = Date.create(str * 1000).format('{yyyy}-{MM}-{dd} {HH}:{mm}:{ss}');
if (dt == "Invalid Date") {
return 'N/A';<|fim▁hole|> };
});<|fim▁end|> | } else {
return dt;
} |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>const core = require('brigadehub-core')
var pkg = require('./package.json')<|fim▁hole|>const bhConfig = {
dotenv: require('./dotenv')(),
info: '[Brigadehub]',
version: pkg.version,
brigade: brigade
}
core(bhConfig)<|fim▁end|> | var brigade = require('./brigade')()[0]
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>"""A setuptools based setup module.<|fim▁hole|>"""
# Always prefer setuptools over distutils
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
with open(path.join(here, 'requirements.txt'), encoding='utf-8') as f2:
requires = f2.read().strip().splitlines()
setup(
name='tk_nosy',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version = '0.1.6', # METADATA_RESET: version = '<<version>>',
description='Tk_Nosy monitors project and unittest files and runs nosetests when they change.',
long_description=long_description,
# The project's main homepage.
url='http://tk-nosy.readthedocs.org/en/latest/',
download_url='https://github.com/sonofeft/Tk_Nosy',
# Author details
author='Charlie Taylor',
author_email='[email protected]',
# license
license='GPL-3',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
"Operating System :: OS Independent",
# Indicate who your project is intended for
'Intended Audience :: Developers',
"Intended Audience :: End Users/Desktop",
'Topic :: Software Development :: Build Tools',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
platforms='any',
# What does your project relate to?
keywords='tk_nosy setuptools development',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['.hg', '.tox', 'docs', 'tests']),
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
#install_requires = ["future","nose","coverage"],
install_requires = requires, # read from requirements.txt
tests_require=['nose'],
test_suite='tk_nosy.tests', # allows "setup.py test" to work
zip_safe= False,
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
#extras_require={
# 'dev': ['check-manifest'],
# 'test': ['coverage'],
#},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
#package_data={
# 'tk_nosy': ['package_data.dat'],
#},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
#data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'tk_nosy=tk_nosy.main_gui:main',
],
},
)<|fim▁end|> |
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/tk_nosy |
<|file_name|>generate_volcano_evac_zone.py<|end_file_name|><|fim▁begin|><|fim▁hole|>H = read_layer('/data_area/InaSAFE/public_data/hazard/Marapi.shp')
print H.get_geometry()
# Generate evacuation circle (as a polygon):
radius = 3000
center = H.get_geometry()[0]
Z = make_circular_polygon(center, radius)
Z.write_to_file('Marapi_evac_zone_%im.shp' % radius)<|fim▁end|> | from safe.engine.interpolation import make_circular_polygon
from safe.storage.core import read_layer
|
<|file_name|>2082002.js<|end_file_name|><|fim▁begin|>engine.eval("load('nashorn:mozilla_compat.js');");
/*
This file is part of the OdinMS Maple Story Server
Copyright (C) 2008 Patrick Huy <[email protected]>
Matthias Butz <[email protected]>
Jan Christian Meyer <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation version 3 as published by
the Free Software Foundation. You may not use, modify or distribute
this program under any other version of the GNU Affero General Public
License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.<|fim▁hole|> You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
-- Odin JavaScript --------------------------------------------------------------------------------
Harry - Before Takeoff <To Orbis>(240000111)
-- By ---------------------------------------------------------------------------------------------
Information
-- Version Info -----------------------------------------------------------------------------------
1.3 - Add missing return statement [Thanks sadiq for the bug, fix by Information]
1.2 - Replace function to support latest [Information]
1.1 - Fix wrong placed statement [Information]
1.0 - First Version by Information
---------------------------------------------------------------------------------------------------
**/
importPackage(Packages.net.sf.odinms.client);
function start() {
status = -1;
cb = cm.getEventManager("Cabin");
action(1, 0, 0);
}
function action(mode, type, selection) {
if(mode == -1) {
cm.dispose();
return;
} else {
status++;
if(mode == 0) {
cm.sendOk("You'll get to your destination in moment. Go ahead and talk to other people, and before you know it, you'll be there already.");
cm.dispose();
return;
}
if(status == 0) {
cm.sendYesNo("Do you want to leave the waiting room? You can, but the ticket is NOT refundable. Are you sure you still want to leave this room?");
} else if(status == 1) {
cm.warp(240000110);
cm.dispose();
}
}
}<|fim▁end|> | |
<|file_name|>level_bar.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use Buildable;
#[cfg(any(feature = "v3_6", feature = "dox"))]
use LevelBarMode;
use Orientable;
use Widget;
use ffi;
use glib;
use glib::object::Downcast;
use glib::object::IsA;
#[cfg(any(feature = "v3_6", feature = "dox"))]
use glib::signal::SignalHandlerId;
#[cfg(any(feature = "v3_6", feature = "dox"))]
use glib::signal::connect;
use glib::translate::*;
use glib_ffi;
use gobject_ffi;
#[cfg(any(feature = "v3_6", feature = "dox"))]
use libc;
#[cfg(any(feature = "v3_6", feature = "dox"))]
use std::boxed::Box as Box_;
use std::mem;
#[cfg(any(feature = "v3_6", feature = "dox"))]
use std::mem::transmute;
use std::ptr;
glib_wrapper! {
pub struct LevelBar(Object<ffi::GtkLevelBar, ffi::GtkLevelBarClass>): Widget, Buildable, Orientable;
match fn {
get_type => || ffi::gtk_level_bar_get_type(),
}
}
impl LevelBar {
#[cfg(any(feature = "v3_6", feature = "dox"))]
pub fn new() -> LevelBar {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_level_bar_new()).downcast_unchecked()
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
pub fn new_for_interval(min_value: f64, max_value: f64) -> LevelBar {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_level_bar_new_for_interval(min_value, max_value)).downcast_unchecked()
}
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
impl Default for LevelBar {
fn default() -> Self {
Self::new()
}
}
pub trait LevelBarExt {
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn add_offset_value(&self, name: &str, value: f64);
#[cfg(any(feature = "v3_8", feature = "dox"))]
fn get_inverted(&self) -> bool;
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn get_max_value(&self) -> f64;
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn get_min_value(&self) -> f64;
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn get_mode(&self) -> LevelBarMode;
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn get_offset_value<'a, P: Into<Option<&'a str>>>(&self, name: P) -> Option<f64>;
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn get_value(&self) -> f64;
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn remove_offset_value<'a, P: Into<Option<&'a str>>>(&self, name: P);
#[cfg(any(feature = "v3_8", feature = "dox"))]
fn set_inverted(&self, inverted: bool);
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn set_max_value(&self, value: f64);
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn set_min_value(&self, value: f64);
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn set_mode(&self, mode: LevelBarMode);
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn set_value(&self, value: f64);
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn connect_offset_changed<F: Fn(&Self, &str) + 'static>(&self, f: F) -> SignalHandlerId;
#[cfg(any(feature = "v3_8", feature = "dox"))]
fn connect_property_inverted_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn connect_property_max_value_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn connect_property_min_value_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn connect_property_mode_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn connect_property_value_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<LevelBar> + IsA<glib::object::Object>> LevelBarExt for O {
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn add_offset_value(&self, name: &str, value: f64) {
unsafe {
ffi::gtk_level_bar_add_offset_value(self.to_glib_none().0, name.to_glib_none().0, value);
}
}
#[cfg(any(feature = "v3_8", feature = "dox"))]
fn get_inverted(&self) -> bool {
unsafe {
from_glib(ffi::gtk_level_bar_get_inverted(self.to_glib_none().0))
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn get_max_value(&self) -> f64 {
unsafe {
ffi::gtk_level_bar_get_max_value(self.to_glib_none().0)
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn get_min_value(&self) -> f64 {
unsafe {
ffi::gtk_level_bar_get_min_value(self.to_glib_none().0)
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn get_mode(&self) -> LevelBarMode {
unsafe {
from_glib(ffi::gtk_level_bar_get_mode(self.to_glib_none().0))
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn get_offset_value<'a, P: Into<Option<&'a str>>>(&self, name: P) -> Option<f64> {
let name = name.into();
let name = name.to_glib_none();
unsafe {
let mut value = mem::uninitialized();
let ret = from_glib(ffi::gtk_level_bar_get_offset_value(self.to_glib_none().0, name.0, &mut value));
if ret { Some(value) } else { None }
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn get_value(&self) -> f64 {
unsafe {
ffi::gtk_level_bar_get_value(self.to_glib_none().0)<|fim▁hole|> }
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn remove_offset_value<'a, P: Into<Option<&'a str>>>(&self, name: P) {
let name = name.into();
let name = name.to_glib_none();
unsafe {
ffi::gtk_level_bar_remove_offset_value(self.to_glib_none().0, name.0);
}
}
#[cfg(any(feature = "v3_8", feature = "dox"))]
fn set_inverted(&self, inverted: bool) {
unsafe {
ffi::gtk_level_bar_set_inverted(self.to_glib_none().0, inverted.to_glib());
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn set_max_value(&self, value: f64) {
unsafe {
ffi::gtk_level_bar_set_max_value(self.to_glib_none().0, value);
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn set_min_value(&self, value: f64) {
unsafe {
ffi::gtk_level_bar_set_min_value(self.to_glib_none().0, value);
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn set_mode(&self, mode: LevelBarMode) {
unsafe {
ffi::gtk_level_bar_set_mode(self.to_glib_none().0, mode.to_glib());
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn set_value(&self, value: f64) {
unsafe {
ffi::gtk_level_bar_set_value(self.to_glib_none().0, value);
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn connect_offset_changed<F: Fn(&Self, &str) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self, &str) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "offset-changed",
transmute(offset_changed_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
#[cfg(any(feature = "v3_8", feature = "dox"))]
fn connect_property_inverted_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::inverted",
transmute(notify_inverted_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn connect_property_max_value_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::max-value",
transmute(notify_max_value_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn connect_property_min_value_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::min-value",
transmute(notify_min_value_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn connect_property_mode_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::mode",
transmute(notify_mode_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
fn connect_property_value_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::value",
transmute(notify_value_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
unsafe extern "C" fn offset_changed_trampoline<P>(this: *mut ffi::GtkLevelBar, name: *mut libc::c_char, f: glib_ffi::gpointer)
where P: IsA<LevelBar> {
let f: &&(Fn(&P, &str) + 'static) = transmute(f);
f(&LevelBar::from_glib_borrow(this).downcast_unchecked(), &String::from_glib_none(name))
}
#[cfg(any(feature = "v3_8", feature = "dox"))]
unsafe extern "C" fn notify_inverted_trampoline<P>(this: *mut ffi::GtkLevelBar, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<LevelBar> {
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&LevelBar::from_glib_borrow(this).downcast_unchecked())
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
unsafe extern "C" fn notify_max_value_trampoline<P>(this: *mut ffi::GtkLevelBar, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<LevelBar> {
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&LevelBar::from_glib_borrow(this).downcast_unchecked())
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
unsafe extern "C" fn notify_min_value_trampoline<P>(this: *mut ffi::GtkLevelBar, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<LevelBar> {
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&LevelBar::from_glib_borrow(this).downcast_unchecked())
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
unsafe extern "C" fn notify_mode_trampoline<P>(this: *mut ffi::GtkLevelBar, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<LevelBar> {
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&LevelBar::from_glib_borrow(this).downcast_unchecked())
}
#[cfg(any(feature = "v3_6", feature = "dox"))]
unsafe extern "C" fn notify_value_trampoline<P>(this: *mut ffi::GtkLevelBar, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<LevelBar> {
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&LevelBar::from_glib_borrow(this).downcast_unchecked())
}<|fim▁end|> | } |
<|file_name|>cluster.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package integration
import (
"crypto/tls"
"fmt"
"io/ioutil"
"math/rand"
"net"
"net/http"
"net/http/httptest"
"os"
"reflect"
"sort"
"strings"
"sync"
"sync/atomic"
"testing"
"time"
"golang.org/x/net/context"
"google.golang.org/grpc"
"github.com/coreos/etcd/client"
"github.com/coreos/etcd/clientv3"
"github.com/coreos/etcd/etcdserver"
"github.com/coreos/etcd/etcdserver/api"
"github.com/coreos/etcd/etcdserver/api/v2http"
"github.com/coreos/etcd/etcdserver/api/v3rpc"
pb "github.com/coreos/etcd/etcdserver/etcdserverpb"
"github.com/coreos/etcd/pkg/testutil"
"github.com/coreos/etcd/pkg/transport"
"github.com/coreos/etcd/pkg/types"
"github.com/coreos/etcd/rafthttp"
"github.com/coreos/pkg/capnslog"
)
const (
tickDuration = 10 * time.Millisecond
clusterName = "etcd"
requestTimeout = 20 * time.Second
basePort = 21000
UrlScheme = "unix"
UrlSchemeTLS = "unixs"
)
var (
electionTicks = 10
// integration test uses unique ports, counting up, to listen for each
// member, ensuring restarted members can listen on the same port again.
localListenCount int64 = 0
testTLSInfo = transport.TLSInfo{
KeyFile: "./fixtures/server.key.insecure",
CertFile: "./fixtures/server.crt",
TrustedCAFile: "./fixtures/ca.crt",
ClientCertAuth: true,
}
plog = capnslog.NewPackageLogger("github.com/coreos/etcd", "integration")
)
type ClusterConfig struct {
Size int
PeerTLS *transport.TLSInfo
ClientTLS *transport.TLSInfo
DiscoveryURL string
UseGRPC bool
QuotaBackendBytes int64
}
type cluster struct {
cfg *ClusterConfig
Members []*member
}
func init() {
// manually enable v3 capability since we know the cluster members all support v3.
api.EnableCapability(api.V3rpcCapability)
}
func schemeFromTLSInfo(tls *transport.TLSInfo) string {
if tls == nil {
return UrlScheme
}
return UrlSchemeTLS
}
func (c *cluster) fillClusterForMembers() error {
if c.cfg.DiscoveryURL != "" {
// cluster will be discovered
return nil
}
addrs := make([]string, 0)
for _, m := range c.Members {
scheme := schemeFromTLSInfo(m.PeerTLSInfo)
for _, l := range m.PeerListeners {
addrs = append(addrs, fmt.Sprintf("%s=%s://%s", m.Name, scheme, l.Addr().String()))
}
}
clusterStr := strings.Join(addrs, ",")
var err error
for _, m := range c.Members {
m.InitialPeerURLsMap, err = types.NewURLsMap(clusterStr)
if err != nil {
return err
}
}
return nil
}
func newCluster(t *testing.T, cfg *ClusterConfig) *cluster {
c := &cluster{cfg: cfg}
ms := make([]*member, cfg.Size)
for i := 0; i < cfg.Size; i++ {
ms[i] = c.mustNewMember(t)
}
c.Members = ms
if err := c.fillClusterForMembers(); err != nil {
t.Fatal(err)
}
return c
}
// NewCluster returns an unlaunched cluster of the given size which has been
// set to use static bootstrap.
func NewCluster(t *testing.T, size int) *cluster {
return newCluster(t, &ClusterConfig{Size: size})
}
// NewClusterByConfig returns an unlaunched cluster defined by a cluster configuration
func NewClusterByConfig(t *testing.T, cfg *ClusterConfig) *cluster {
return newCluster(t, cfg)
}
func (c *cluster) Launch(t *testing.T) {
errc := make(chan error)
for _, m := range c.Members {
// Members are launched in separate goroutines because if they boot
// using discovery url, they have to wait for others to register to continue.
go func(m *member) {
errc <- m.Launch()
}(m)
}
for range c.Members {
if err := <-errc; err != nil {
t.Fatalf("error setting up member: %v", err)
}
}
// wait cluster to be stable to receive future client requests
c.waitMembersMatch(t, c.HTTPMembers())
c.waitVersion()
}
func (c *cluster) URL(i int) string {
return c.Members[i].ClientURLs[0].String()
}
// URLs returns a list of all active client URLs in the cluster
func (c *cluster) URLs() []string {
urls := make([]string, 0)
for _, m := range c.Members {
select {
case <-m.s.StopNotify():
continue
default:
}
for _, u := range m.ClientURLs {
urls = append(urls, u.String())
}
}
return urls
}
// HTTPMembers returns a list of all active members as client.Members
func (c *cluster) HTTPMembers() []client.Member {
ms := []client.Member{}
for _, m := range c.Members {
pScheme := schemeFromTLSInfo(m.PeerTLSInfo)
cScheme := schemeFromTLSInfo(m.ClientTLSInfo)
cm := client.Member{Name: m.Name}
for _, ln := range m.PeerListeners {
cm.PeerURLs = append(cm.PeerURLs, pScheme+"://"+ln.Addr().String())
}
for _, ln := range m.ClientListeners {
cm.ClientURLs = append(cm.ClientURLs, cScheme+"://"+ln.Addr().String())
}
ms = append(ms, cm)
}
return ms
}
func (c *cluster) mustNewMember(t *testing.T) *member {
m := mustNewMember(t,
memberConfig{
name: c.name(rand.Int()),
peerTLS: c.cfg.PeerTLS,
clientTLS: c.cfg.ClientTLS,
quotaBackendBytes: c.cfg.QuotaBackendBytes,
})
m.DiscoveryURL = c.cfg.DiscoveryURL
if c.cfg.UseGRPC {
if err := m.listenGRPC(); err != nil {
t.Fatal(err)
}
}
return m
}
func (c *cluster) addMember(t *testing.T) {
m := c.mustNewMember(t)
scheme := schemeFromTLSInfo(c.cfg.PeerTLS)
// send add request to the cluster
var err error
for i := 0; i < len(c.Members); i++ {
clientURL := c.URL(i)
peerURL := scheme + "://" + m.PeerListeners[0].Addr().String()
if err = c.addMemberByURL(t, clientURL, peerURL); err == nil {
break
}
}
if err != nil {
t.Fatalf("add member failed on all members error: %v", err)
}
m.InitialPeerURLsMap = types.URLsMap{}
for _, mm := range c.Members {
m.InitialPeerURLsMap[mm.Name] = mm.PeerURLs
}
m.InitialPeerURLsMap[m.Name] = m.PeerURLs
m.NewCluster = false
if err := m.Launch(); err != nil {
t.Fatal(err)
}
c.Members = append(c.Members, m)
// wait cluster to be stable to receive future client requests
c.waitMembersMatch(t, c.HTTPMembers())
}
func (c *cluster) addMemberByURL(t *testing.T, clientURL, peerURL string) error {
cc := MustNewHTTPClient(t, []string{clientURL}, c.cfg.ClientTLS)
ma := client.NewMembersAPI(cc)
ctx, cancel := context.WithTimeout(context.Background(), requestTimeout)
if _, err := ma.Add(ctx, peerURL); err != nil {
return err
}
cancel()
// wait for the add node entry applied in the cluster
members := append(c.HTTPMembers(), client.Member{PeerURLs: []string{peerURL}, ClientURLs: []string{}})
c.waitMembersMatch(t, members)
return nil
}
func (c *cluster) AddMember(t *testing.T) {
c.addMember(t)
}
func (c *cluster) RemoveMember(t *testing.T, id uint64) {
if err := c.removeMember(t, id); err != nil {
t.Fatal(err)
}
}
func (c *cluster) removeMember(t *testing.T, id uint64) error {
// send remove request to the cluster
cc := MustNewHTTPClient(t, c.URLs(), c.cfg.ClientTLS)
ma := client.NewMembersAPI(cc)
ctx, cancel := context.WithTimeout(context.Background(), requestTimeout)
if err := ma.Remove(ctx, types.ID(id).String()); err != nil {
return err
}
cancel()
newMembers := make([]*member, 0)
for _, m := range c.Members {
if uint64(m.s.ID()) != id {
newMembers = append(newMembers, m)
} else {
select {
case <-m.s.StopNotify():
m.Terminate(t)
// 1s stop delay + election timeout + 1s disk and network delay + connection write timeout
// TODO: remove connection write timeout by selecting on http response closeNotifier
// blocking on https://github.com/golang/go/issues/9524
case <-time.After(time.Second + time.Duration(electionTicks)*tickDuration + time.Second + rafthttp.ConnWriteTimeout):
t.Fatalf("failed to remove member %s in time", m.s.ID())
}
}
}
c.Members = newMembers
c.waitMembersMatch(t, c.HTTPMembers())
return nil
}
func (c *cluster) Terminate(t *testing.T) {
for _, m := range c.Members {
m.Terminate(t)
}
}
func (c *cluster) waitMembersMatch(t *testing.T, membs []client.Member) {
for _, u := range c.URLs() {
cc := MustNewHTTPClient(t, []string{u}, c.cfg.ClientTLS)
ma := client.NewMembersAPI(cc)
for {
ctx, cancel := context.WithTimeout(context.Background(), requestTimeout)
ms, err := ma.List(ctx)
cancel()
if err == nil && isMembersEqual(ms, membs) {
break
}
time.Sleep(tickDuration)
}
}
return
}
func (c *cluster) WaitLeader(t *testing.T) int { return c.waitLeader(t, c.Members) }
// waitLeader waits until given members agree on the same leader.
func (c *cluster) waitLeader(t *testing.T, membs []*member) int {
possibleLead := make(map[uint64]bool)
var lead uint64
for _, m := range membs {
possibleLead[uint64(m.s.ID())] = true
}
for lead == 0 || !possibleLead[lead] {
lead = 0
for _, m := range membs {
select {
case <-m.s.StopNotify():
continue
default:
}
if lead != 0 && lead != m.s.Lead() {
lead = 0
time.Sleep(10 * tickDuration)
break
}
lead = m.s.Lead()
}
}
for i, m := range membs {
if uint64(m.s.ID()) == lead {
return i
}
}
return -1
}
func (c *cluster) WaitNoLeader(t *testing.T) { c.waitNoLeader(t, c.Members) }
// waitNoLeader waits until given members lose leader.
func (c *cluster) waitNoLeader(t *testing.T, membs []*member) {
noLeader := false
for !noLeader {
noLeader = true
for _, m := range membs {
select {
case <-m.s.StopNotify():
continue
default:
}
if m.s.Lead() != 0 {
noLeader = false
time.Sleep(10 * tickDuration)
break
}
}
}
}
func (c *cluster) waitVersion() {
for _, m := range c.Members {
for {
if m.s.ClusterVersion() != nil {
break
}
time.Sleep(tickDuration)
}
}
}
func (c *cluster) name(i int) string {
return fmt.Sprint("node", i)
}
// isMembersEqual checks whether two members equal except ID field.
// The given wmembs should always set ID field to empty string.
func isMembersEqual(membs []client.Member, wmembs []client.Member) bool {
sort.Sort(SortableMemberSliceByPeerURLs(membs))
sort.Sort(SortableMemberSliceByPeerURLs(wmembs))
for i := range membs {
membs[i].ID = ""
}
return reflect.DeepEqual(membs, wmembs)
}
func newLocalListener(t *testing.T) net.Listener {
c := atomic.AddInt64(&localListenCount, 1)
addr := fmt.Sprintf("127.0.0.1:%d.%d.sock", c+basePort, os.Getpid())
return NewListenerWithAddr(t, addr)
}
func NewListenerWithAddr(t *testing.T, addr string) net.Listener {
l, err := transport.NewUnixListener(addr)
if err != nil {
t.Fatal(err)
}
return l
}
type member struct {
etcdserver.ServerConfig
PeerListeners, ClientListeners []net.Listener
grpcListener net.Listener
// PeerTLSInfo enables peer TLS when set
PeerTLSInfo *transport.TLSInfo
// ClientTLSInfo enables client TLS when set
ClientTLSInfo *transport.TLSInfo
raftHandler *testutil.PauseableHandler
s *etcdserver.EtcdServer
hss []*httptest.Server
grpcServer *grpc.Server
grpcAddr string
grpcBridge *bridge
}
func (m *member) GRPCAddr() string { return m.grpcAddr }
type memberConfig struct {
name string
peerTLS *transport.TLSInfo
clientTLS *transport.TLSInfo
quotaBackendBytes int64
}
// mustNewMember return an inited member with the given name. If peerTLS is
// set, it will use https scheme to communicate between peers.
func mustNewMember(t *testing.T, mcfg memberConfig) *member {
var err error
m := &member{}
peerScheme := schemeFromTLSInfo(mcfg.peerTLS)
clientScheme := schemeFromTLSInfo(mcfg.clientTLS)
pln := newLocalListener(t)
m.PeerListeners = []net.Listener{pln}
m.PeerURLs, err = types.NewURLs([]string{peerScheme + "://" + pln.Addr().String()})
if err != nil {
t.Fatal(err)
}
m.PeerTLSInfo = mcfg.peerTLS
cln := newLocalListener(t)
m.ClientListeners = []net.Listener{cln}
m.ClientURLs, err = types.NewURLs([]string{clientScheme + "://" + cln.Addr().String()})
if err != nil {
t.Fatal(err)
}
m.ClientTLSInfo = mcfg.clientTLS
m.Name = mcfg.name
m.DataDir, err = ioutil.TempDir(os.TempDir(), "etcd")
if err != nil {
t.Fatal(err)
}
clusterStr := fmt.Sprintf("%s=%s://%s", mcfg.name, peerScheme, pln.Addr().String())
m.InitialPeerURLsMap, err = types.NewURLsMap(clusterStr)
if err != nil {
t.Fatal(err)
}
m.InitialClusterToken = clusterName
m.NewCluster = true
m.BootstrapTimeout = 10 * time.Millisecond
if m.PeerTLSInfo != nil {
m.ServerConfig.PeerTLSInfo = *m.PeerTLSInfo
}
m.ElectionTicks = electionTicks
m.TickMs = uint(tickDuration / time.Millisecond)
m.QuotaBackendBytes = mcfg.quotaBackendBytes
return m
}
// listenGRPC starts a grpc server over a unix domain socket on the member
func (m *member) listenGRPC() error {
// prefix with localhost so cert has right domain
m.grpcAddr = "localhost:" + m.Name + ".sock"
l, err := transport.NewUnixListener(m.grpcAddr)
if err != nil {
return fmt.Errorf("listen failed on grpc socket %s (%v)", m.grpcAddr, err)
}
m.grpcBridge, err = newBridge(m.grpcAddr)
if err != nil {
l.Close()
return err
}
m.grpcAddr = m.grpcBridge.URL()
m.grpcListener = l
return nil
}
func (m *member) electionTimeout() time.Duration {
return time.Duration(m.s.Cfg.ElectionTicks) * time.Millisecond
}
func (m *member) DropConnections() { m.grpcBridge.Reset() }
// NewClientV3 creates a new grpc client connection to the member
func NewClientV3(m *member) (*clientv3.Client, error) {
if m.grpcAddr == "" {
return nil, fmt.Errorf("member not configured for grpc")
}
cfg := clientv3.Config{
Endpoints: []string{m.grpcAddr},
DialTimeout: 5 * time.Second,
}
if m.ClientTLSInfo != nil {
tls, err := m.ClientTLSInfo.ClientConfig()
if err != nil {
return nil, err
}
cfg.TLS = tls
}
return newClientV3(cfg)
}
// Clone returns a member with the same server configuration. The returned
// member will not set PeerListeners and ClientListeners.
func (m *member) Clone(t *testing.T) *member {
mm := &member{}
mm.ServerConfig = m.ServerConfig
var err error
clientURLStrs := m.ClientURLs.StringSlice()
mm.ClientURLs, err = types.NewURLs(clientURLStrs)
if err != nil {
// this should never fail
panic(err)
}
peerURLStrs := m.PeerURLs.StringSlice()
mm.PeerURLs, err = types.NewURLs(peerURLStrs)
if err != nil {
// this should never fail
panic(err)
}
clusterStr := m.InitialPeerURLsMap.String()
mm.InitialPeerURLsMap, err = types.NewURLsMap(clusterStr)
if err != nil {
// this should never fail
panic(err)
}
mm.InitialClusterToken = m.InitialClusterToken
mm.ElectionTicks = m.ElectionTicks
mm.PeerTLSInfo = m.PeerTLSInfo
mm.ClientTLSInfo = m.ClientTLSInfo
return mm
}
// Launch starts a member based on ServerConfig, PeerListeners
// and ClientListeners.
func (m *member) Launch() error {
plog.Printf("launching %s (%s)", m.Name, m.grpcAddr)
var err error
if m.s, err = etcdserver.NewServer(&m.ServerConfig); err != nil {
return fmt.Errorf("failed to initialize the etcd server: %v", err)
}
m.s.SyncTicker = time.Tick(500 * time.Millisecond)
m.s.Start()
m.raftHandler = &testutil.PauseableHandler{Next: v2http.NewPeerHandler(m.s)}
for _, ln := range m.PeerListeners {
hs := &httptest.Server{
Listener: ln,
Config: &http.Server{Handler: m.raftHandler},
}
if m.PeerTLSInfo == nil {
hs.Start()
} else {
hs.TLS, err = m.PeerTLSInfo.ServerConfig()
if err != nil {
return err
}
hs.StartTLS()
}
m.hss = append(m.hss, hs)
}
for _, ln := range m.ClientListeners {
hs := &httptest.Server{
Listener: ln,
Config: &http.Server{Handler: v2http.NewClientHandler(m.s, m.ServerConfig.ReqTimeout())},
}
if m.ClientTLSInfo == nil {
hs.Start()
} else {
hs.TLS, err = m.ClientTLSInfo.ServerConfig()
if err != nil {
return err
}
hs.StartTLS()
}
m.hss = append(m.hss, hs)
}
if m.grpcListener != nil {
var (
tlscfg *tls.Config
)
if m.ClientTLSInfo != nil && !m.ClientTLSInfo.Empty() {
tlscfg, err = m.ClientTLSInfo.ServerConfig()
if err != nil {
return err
}
}
m.grpcServer = v3rpc.Server(m.s, tlscfg)
go m.grpcServer.Serve(m.grpcListener)
}
plog.Printf("launched %s (%s)", m.Name, m.grpcAddr)
return nil
}
func (m *member) WaitOK(t *testing.T) {
cc := MustNewHTTPClient(t, []string{m.URL()}, m.ClientTLSInfo)
kapi := client.NewKeysAPI(cc)
for {
ctx, cancel := context.WithTimeout(context.Background(), requestTimeout)
_, err := kapi.Get(ctx, "/", nil)
if err != nil {
time.Sleep(tickDuration)
continue
}
cancel()
break
}
for m.s.Leader() == 0 {
time.Sleep(tickDuration)
}
}
func (m *member) URL() string { return m.ClientURLs[0].String() }
func (m *member) Pause() {
m.raftHandler.Pause()
m.s.PauseSending()
}
func (m *member) Resume() {
m.raftHandler.Resume()
m.s.ResumeSending()
}
// Close stops the member's etcdserver and closes its connections
func (m *member) Close() {
if m.grpcBridge != nil {
m.grpcBridge.Close()
m.grpcBridge = nil
}
if m.grpcServer != nil {
m.grpcServer.Stop()
m.grpcServer = nil
}
m.s.HardStop()
for _, hs := range m.hss {
hs.CloseClientConnections()
hs.Close()
}
}
// Stop stops the member, but the data dir of the member is preserved.
func (m *member) Stop(t *testing.T) {
plog.Printf("stopping %s (%s)", m.Name, m.grpcAddr)
m.Close()
m.hss = nil
plog.Printf("stopped %s (%s)", m.Name, m.grpcAddr)
}
// checkLeaderTransition waits for leader transition, returning the new leader ID.
func checkLeaderTransition(t *testing.T, m *member, oldLead uint64) uint64 {
interval := time.Duration(m.s.Cfg.TickMs) * time.Millisecond
for m.s.Lead() == 0 || (m.s.Lead() == oldLead) {
time.Sleep(interval)
}
return m.s.Lead()
}
// StopNotify unblocks when a member stop completes
func (m *member) StopNotify() <-chan struct{} {
return m.s.StopNotify()
}
// Restart starts the member using the preserved data dir.
func (m *member) Restart(t *testing.T) error {
plog.Printf("restarting %s (%s)", m.Name, m.grpcAddr)
newPeerListeners := make([]net.Listener, 0)
for _, ln := range m.PeerListeners {
newPeerListeners = append(newPeerListeners, NewListenerWithAddr(t, ln.Addr().String()))
}
m.PeerListeners = newPeerListeners
newClientListeners := make([]net.Listener, 0)
for _, ln := range m.ClientListeners {
newClientListeners = append(newClientListeners, NewListenerWithAddr(t, ln.Addr().String()))
}
m.ClientListeners = newClientListeners
if m.grpcListener != nil {
if err := m.listenGRPC(); err != nil {
t.Fatal(err)
}
}
err := m.Launch()
plog.Printf("restarted %s (%s)", m.Name, m.grpcAddr)
return err
}
// Terminate stops the member and removes the data dir.
func (m *member) Terminate(t *testing.T) {
plog.Printf("terminating %s (%s)", m.Name, m.grpcAddr)
m.Close()
if err := os.RemoveAll(m.ServerConfig.DataDir); err != nil {
t.Fatal(err)
}
plog.Printf("terminated %s (%s)", m.Name, m.grpcAddr)
}
// Metric gets the metric value for a member
func (m *member) Metric(metricName string) (string, error) {
cfgtls := transport.TLSInfo{}
tr, err := transport.NewTimeoutTransport(cfgtls, time.Second, time.Second, time.Second)
if err != nil {
return "", err
}
cli := &http.Client{Transport: tr}
resp, err := cli.Get(m.ClientURLs[0].String() + "/metrics")
if err != nil {
return "", err<|fim▁hole|> }
defer resp.Body.Close()
b, rerr := ioutil.ReadAll(resp.Body)
if rerr != nil {
return "", rerr
}
lines := strings.Split(string(b), "\n")
for _, l := range lines {
if strings.HasPrefix(l, metricName) {
return strings.Split(l, " ")[1], nil
}
}
return "", nil
}
// InjectPartition drops connections from m to others, vice versa.
func (m *member) InjectPartition(t *testing.T, others []*member) {
for _, other := range others {
m.s.CutPeer(other.s.ID())
other.s.CutPeer(m.s.ID())
}
}
// RecoverPartition recovers connections from m to others, vice versa.
func (m *member) RecoverPartition(t *testing.T, others []*member) {
for _, other := range others {
m.s.MendPeer(other.s.ID())
other.s.MendPeer(m.s.ID())
}
}
func MustNewHTTPClient(t *testing.T, eps []string, tls *transport.TLSInfo) client.Client {
cfgtls := transport.TLSInfo{}
if tls != nil {
cfgtls = *tls
}
cfg := client.Config{Transport: mustNewTransport(t, cfgtls), Endpoints: eps}
c, err := client.New(cfg)
if err != nil {
t.Fatal(err)
}
return c
}
func mustNewTransport(t *testing.T, tlsInfo transport.TLSInfo) *http.Transport {
// tick in integration test is short, so 1s dial timeout could play well.
tr, err := transport.NewTimeoutTransport(tlsInfo, time.Second, rafthttp.ConnReadTimeout, rafthttp.ConnWriteTimeout)
if err != nil {
t.Fatal(err)
}
return tr
}
type SortableMemberSliceByPeerURLs []client.Member
func (p SortableMemberSliceByPeerURLs) Len() int { return len(p) }
func (p SortableMemberSliceByPeerURLs) Less(i, j int) bool {
return p[i].PeerURLs[0] < p[j].PeerURLs[0]
}
func (p SortableMemberSliceByPeerURLs) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
type ClusterV3 struct {
*cluster
mu sync.Mutex
clients []*clientv3.Client
}
// NewClusterV3 returns a launched cluster with a grpc client connection
// for each cluster member.
func NewClusterV3(t *testing.T, cfg *ClusterConfig) *ClusterV3 {
cfg.UseGRPC = true
clus := &ClusterV3{
cluster: NewClusterByConfig(t, cfg),
}
clus.Launch(t)
for _, m := range clus.Members {
client, err := NewClientV3(m)
if err != nil {
t.Fatalf("cannot create client: %v", err)
}
clus.clients = append(clus.clients, client)
}
return clus
}
func (c *ClusterV3) TakeClient(idx int) {
c.mu.Lock()
c.clients[idx] = nil
c.mu.Unlock()
}
func (c *ClusterV3) Terminate(t *testing.T) {
c.mu.Lock()
for _, client := range c.clients {
if client == nil {
continue
}
if err := client.Close(); err != nil {
t.Error(err)
}
}
c.mu.Unlock()
c.cluster.Terminate(t)
}
func (c *ClusterV3) RandClient() *clientv3.Client {
return c.clients[rand.Intn(len(c.clients))]
}
func (c *ClusterV3) Client(i int) *clientv3.Client {
return c.clients[i]
}
type grpcAPI struct {
// Cluster is the cluster API for the client's connection.
Cluster pb.ClusterClient
// KV is the keyvalue API for the client's connection.
KV pb.KVClient
// Lease is the lease API for the client's connection.
Lease pb.LeaseClient
// Watch is the watch API for the client's connection.
Watch pb.WatchClient
// Maintenance is the maintenance API for the client's connection.
Maintenance pb.MaintenanceClient
}<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>extern crate discotech;
#[macro_use]
extern crate log;
extern crate log4rs;
use discotech::{Serverset, DiscoConfig, read_config};
use std::env;
fn initialize_logging() {
let root = log4rs::config::Root::builder(log::LogLevelFilter::Debug)
.appender("stderr".to_string());
let console = Box::new(log4rs::appender::ConsoleAppender::builder().build());
let config = log4rs::config::Config::builder(root.build())
.appender(log4rs::config::Appender::builder("stderr".to_string(), console).build());
log4rs::init_config(config.build().unwrap()).unwrap();
}
fn initialize(config: DiscoConfig) {
initialize_logging();
let serverset = *Box::new(Serverset::new(config));
debug!("THINGS");
serverset.update_members();
for member in serverset.members.read().unwrap().iter() {
debug!("Member: {:?}", member);
}
}<|fim▁hole|>
#[test]
fn integration() {
let config_file_loc = match env::var("DISCO_CONF") {
Err(_) => panic!("Please set the DISCO_CONF environment variable"),
Ok(location) => location,
};
match read_config(config_file_loc) {
Err(reason) => panic!("Unable to read configuration; bailing: {}", reason),
Ok(config) => initialize(config),
}
}<|fim▁end|> | |
<|file_name|>ec2_vpc_subnet.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# This is a free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This Ansible library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'curated'}
DOCUMENTATION = '''
---
module: ec2_vpc_subnet
short_description: Manage subnets in AWS virtual private clouds
description:
- Manage subnets in AWS virtual private clouds
version_added: "2.0"
author: Robert Estelle (@erydo)
options:
az:
description:
- "The availability zone for the subnet. Only required when state=present."
required: false
default: null
cidr:
description:
- "The CIDR block for the subnet. E.g. 192.0.2.0/24. Only required when state=present."
required: false
default: null
tags:
description:
- "A dict of tags to apply to the subnet. Any tags currently applied to the subnet and not present here will be removed."
required: false
default: null
aliases: [ 'resource_tags' ]
state:
description:
- "Create or remove the subnet"
required: false
default: present
choices: [ 'present', 'absent' ]
vpc_id:
description:
- "VPC ID of the VPC in which to create the subnet."
required: false
default: null
extends_documentation_fragment:
- aws
- ec2<|fim▁hole|>'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
- name: Create subnet for database servers
ec2_vpc_subnet:
state: present
vpc_id: vpc-123456
cidr: 10.0.1.16/28
resource_tags:
Name: Database Subnet
register: database_subnet
- name: Remove subnet for database servers
ec2_vpc_subnet:
state: absent
vpc_id: vpc-123456
cidr: 10.0.1.16/28
'''
import time
try:
import boto.ec2
import boto.vpc
from boto.exception import EC2ResponseError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
if __name__ != '__main__':
raise
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import AnsibleAWSError, connect_to_aws, ec2_argument_spec, get_aws_connection_info
class AnsibleVPCSubnetException(Exception):
pass
class AnsibleVPCSubnetCreationException(AnsibleVPCSubnetException):
pass
class AnsibleVPCSubnetDeletionException(AnsibleVPCSubnetException):
pass
class AnsibleTagCreationException(AnsibleVPCSubnetException):
pass
def get_subnet_info(subnet):
subnet_info = {'id': subnet.id,
'availability_zone': subnet.availability_zone,
'available_ip_address_count': subnet.available_ip_address_count,
'cidr_block': subnet.cidr_block,
'default_for_az': subnet.defaultForAz,
'map_public_ip_on_launch': subnet.mapPublicIpOnLaunch,
'state': subnet.state,
'tags': subnet.tags,
'vpc_id': subnet.vpc_id
}
return subnet_info
def subnet_exists(vpc_conn, subnet_id):
filters = {'subnet-id': subnet_id}
subnet = vpc_conn.get_all_subnets(filters=filters)
if subnet and subnet[0].state == "available":
return subnet[0]
else:
return False
def create_subnet(vpc_conn, vpc_id, cidr, az, check_mode):
try:
new_subnet = vpc_conn.create_subnet(vpc_id, cidr, az, dry_run=check_mode)
# Sometimes AWS takes its time to create a subnet and so using
# new subnets's id to do things like create tags results in
# exception. boto doesn't seem to refresh 'state' of the newly
# created subnet, i.e.: it's always 'pending'.
subnet = False
while subnet is False:
subnet = subnet_exists(vpc_conn, new_subnet.id)
time.sleep(0.1)
except EC2ResponseError as e:
if e.error_code == "DryRunOperation":
subnet = None
elif e.error_code == "InvalidSubnet.Conflict":
raise AnsibleVPCSubnetCreationException("%s: the CIDR %s conflicts with another subnet with the VPC ID %s." % (e.error_code, cidr, vpc_id))
else:
raise AnsibleVPCSubnetCreationException(
'Unable to create subnet {0}, error: {1}'.format(cidr, e))
return subnet
def get_resource_tags(vpc_conn, resource_id):
return dict((t.name, t.value) for t in
vpc_conn.get_all_tags(filters={'resource-id': resource_id}))
def ensure_tags(vpc_conn, resource_id, tags, add_only, check_mode):
try:
cur_tags = get_resource_tags(vpc_conn, resource_id)
if cur_tags == tags:
return {'changed': False, 'tags': cur_tags}
to_delete = dict((k, cur_tags[k]) for k in cur_tags if k not in tags)
if to_delete and not add_only:
vpc_conn.delete_tags(resource_id, to_delete, dry_run=check_mode)
to_add = dict((k, tags[k]) for k in tags if k not in cur_tags or cur_tags[k] != tags[k])
if to_add:
vpc_conn.create_tags(resource_id, to_add, dry_run=check_mode)
latest_tags = get_resource_tags(vpc_conn, resource_id)
return {'changed': True, 'tags': latest_tags}
except EC2ResponseError as e:
raise AnsibleTagCreationException(
'Unable to update tags for {0}, error: {1}'.format(resource_id, e))
def get_matching_subnet(vpc_conn, vpc_id, cidr):
subnets = vpc_conn.get_all_subnets(filters={'vpc_id': vpc_id})
return next((s for s in subnets if s.cidr_block == cidr), None)
def ensure_subnet_present(vpc_conn, vpc_id, cidr, az, tags, check_mode):
subnet = get_matching_subnet(vpc_conn, vpc_id, cidr)
changed = False
if subnet is None:
subnet = create_subnet(vpc_conn, vpc_id, cidr, az, check_mode)
changed = True
# Subnet will be None when check_mode is true
if subnet is None:
return {
'changed': changed,
'subnet': {}
}
if tags != subnet.tags:
ensure_tags(vpc_conn, subnet.id, tags, False, check_mode)
subnet.tags = tags
changed = True
subnet_info = get_subnet_info(subnet)
return {
'changed': changed,
'subnet': subnet_info
}
def ensure_subnet_absent(vpc_conn, vpc_id, cidr, check_mode):
subnet = get_matching_subnet(vpc_conn, vpc_id, cidr)
if subnet is None:
return {'changed': False}
try:
vpc_conn.delete_subnet(subnet.id, dry_run=check_mode)
return {'changed': True}
except EC2ResponseError as e:
raise AnsibleVPCSubnetDeletionException(
'Unable to delete subnet {0}, error: {1}'
.format(subnet.cidr_block, e))
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
az=dict(default=None, required=False),
cidr=dict(default=None, required=True),
state=dict(default='present', choices=['present', 'absent']),
tags=dict(default={}, required=False, type='dict', aliases=['resource_tags']),
vpc_id=dict(default=None, required=True)
)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto is required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if region:
try:
connection = connect_to_aws(boto.vpc, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="region must be specified")
vpc_id = module.params.get('vpc_id')
tags = module.params.get('tags')
cidr = module.params.get('cidr')
az = module.params.get('az')
state = module.params.get('state')
try:
if state == 'present':
result = ensure_subnet_present(connection, vpc_id, cidr, az, tags,
check_mode=module.check_mode)
elif state == 'absent':
result = ensure_subnet_absent(connection, vpc_id, cidr,
check_mode=module.check_mode)
except AnsibleVPCSubnetException as e:
module.fail_json(msg=str(e))
module.exit_json(**result)
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>filter.ts<|end_file_name|><|fim▁begin|>import * as m from './model'
import * as f from './factories'
export function filterModules(moduleNames: string[]|((moduleName: string) => boolean), modules: m.KeyValue<f.ModuleFactory>): m.KeyValue<f.ModuleFactory> {
// TODO: Temporary hack to get a demo ready
return modules
// let filteredModules: m.KeyValue<f.ModuleFactory> = {}
//
// function processTypeContainer(container: f.ContainerFactory) {
// Object.keys(container.classConstructors).forEach(function(clsName) {
// processClass(container.classConstructors[clsName])
// })
//
// Object.keys(container.interfaceConstructors).forEach(function(intName) {
// processInterface(container.interfaceConstructors[intName])
// })
//
// Object.keys(container.enums).forEach(function(name) {
// processEnum(container.enums[name])
// })
//
// Object.keys(container.typeAliasConstructors).forEach(function(name) {
// processTypeAliasConstructor(container.typeAliasConstructors[name])
// })
//
// Object.keys(container.namespaces).forEach(function(name) {
// processTypeContainer(container.namespaces[name])
// })
//
// Object.keys(container.values).forEach(function(name) {
// processValue(container.values[name])
// })
// }
//
// function processDecorator(decorator: f.DecoratorFactory<any>) {
// copy(decorator.decoratorType)
// if (decorator.parameters) {
// decorator.parameters.forEach(function(parameter) {
// processExpression(parameter)
// })
// }
// }
//
// function processValue(value: f.ValueFactory<any>) {
//
// }
//
// function processEnum(e: f.EnumFactory) {
// processType(e)
// // TODO Process initializers<|fim▁hole|> // processType(typeAliasConstructor.type)
// }
//
// function processExpression(expr: f.ExpressionFactory<any>) {
// switch (expr.expressionKind) {
// case m.ExpressionKind.CLASS_REFERENCE:
// copy((<f.ClassReferenceExpressionFactory>expr).classReference)
// break
// case m.ExpressionKind.OBJECT:
// let obj = <m.RawObjectExpression>expr
// Object.keys(obj.properties).forEach(function(name) {
// processExpression(obj.properties[name])
// })
// break
// case m.ExpressionKind.ARRAY:
// let arr = <m.RawArrayExpression>expr
// arr.elements.forEach(function(expr) {
// processExpression(expr)
// })
// break
// }
// }
//
// function processClass(cls: f.ClassConstructorFactory) {
// processType(cls.instanceType)
// processType(cls.staticType)
//
// if (cls.extends) {
// processType(cls.extends)
// }
// if (cls.implements) {
// cls.implements.forEach(function(type) {
// processType(type)
// })
// }
// if (cls.typeParameters) {
// cls.typeParameters.forEach(function(typeParameter) {
// if (typeParameter.extends) {
// processType(typeParameter.extends)
// }
// })
// }
//
// if (cls.decorators) {
// cls.decorators.forEach(function(decorator) {
// processDecorator(decorator)
// })
// }
// }
//
// function processInterface(int: f.InterfaceConstructorFactory) {
// processType(int.instanceType)
//
// if (int.extends) {
// int.extends.forEach(function(type) {
// processType(type)
// })
// }
// if (int.typeParameters) {
// int.typeParameters.forEach(function(typeParameter) {
// if (typeParameter.extends) {
// processType(typeParameter.extends)
// }
// })
// }
// }
//
// function processType(ref: f.TypeFactory<any>) {
// if ((<m.TypeTemplate>ref).typeKind) {
// let type = <m.TypeTemplate>ref
// switch (type.typeKind) {
// case m.TypeKind.TYPE_QUERY:
// if ((<m.RawTypeQuery>ref).type) {
// copyReference((<m.RawTypeQuery>ref).type)
// }
// break
// case m.TypeKind.FUNCTION:
// let f = <m.RawFunctionType>ref
// f.parameters.forEach(function(parameter) {
// if ((<m.RawDecoratedParameter>parameter).decorators) {
// (<m.RawDecoratedParameter>parameter).decorators.forEach(processDecorator)
// }
// processType(parameter.type)
// })
// if (f.typeParameters) {
// f.typeParameters.forEach(function(typeParameter) {
// if (typeParameter.extends) {
// processType(typeParameter.extends)
// }
// })
// }
// if (f.type) {
// processType(f.type)
// }
// break
// case m.TypeKind.TUPLE:
// (<m.RawTupleType>ref).elements.forEach(function(type) {
// processType(type)
// })
// break
// case m.TypeKind.UNION:
// (<m.RawUnionType>ref).types.forEach(function(type) {
// processType(type)
// })
// break
// case m.TypeKind.COMPOSITE:
// let composite = <m.RawCompositeType>ref
// Object.keys(composite.members).forEach(function(name) {
// let member = composite.members[name]
// processType(member.type)
// if ((<m.RawDecoratedMember>member).decorators) {
// (<m.RawDecoratedMember>member).decorators.forEach(processDecorator)
// }
// })
// if (composite.index) {
// processType(composite.index.valueType)
// }
// if (composite.calls) {
// composite.calls.forEach(processType)
// }
// break
// }
// } else if ((<m.RefinedReference>ref).reference) {
// let rr = <m.RefinedReference>ref
// copy(rr.reference)
// rr.typeArguments.forEach(function(typeArg) {
// processType(typeArg)
// })
// } else {
// copy(ref)
// }
// }
//
// function copy(factory:f.Factory<any>) {
// switch (factory.modelKind) {
// case m.ModelKind.CLASS_CONSTRUCTOR:
// case m.ModelKind.INTERFACE_CONSTRUCTOR:
// case m.ModelKind.TYPE_ALIAS_CONSTRUCTOR:
// case m.ModelKind.VALUE:
// return copyContained(<f.ContainedFactory<any>>factory)
// case m.ModelKind.TYPE:
// if ((<f.TypeFactory<any>>factory).typeKind === m.TypeKind.ENUM) {
// return copyContained(<f.ContainedFactory<any>>factory)
// }
// }
// }
//
// function copyContained(ref: f.ContainedFactory<any>) {
// let parent = ref.parent
// let parents = [parent]
// while (parent.containerKind === m.ContainerKind.NAMESPACE) {
// parent = ref.parent
// parents.splice(0, 0, parent)
// }
// let filteredContainer:f.ContainerFactory = filteredModules[parent[0].name]
// if (!filteredContainer) {
// filteredContainer = new f.ModuleFactory(parent[0].name)
// filteredModules[parent[0].name] = <f.ModuleFactory>filteredContainer
// }
// for (let i = 1; i < parents.length; i++) {
// filteredContainer = filteredContainer.addNamespace(parents[i].name)
// }
// switch (ref.modelKind) {
// case m.ModelKind.CLASS_CONSTRUCTOR:
// let filteredClass = filteredContainer.addClassConstructor(ref.name)
// let cc = <f.ClassConstructorFactory>ref
// filteredClass.implements = cc.implements
// filteredClass.extends = cc.extends
// cc.instanceType
// case m.ModelKind.INTERFACE_CONSTRUCTOR:
// case m.ModelKind.TYPE_ALIAS_CONSTRUCTOR:
// case m.ModelKind.TYPE:
// case m.ModelKind.VALUE:
// }
// }
//
// function copyComposite(orig:f.DecoratedCompositeTypeFactory<any>, filtered:f.DecoratedCompositeTypeFactory<any>) {
// Object.keys(orig.members).forEach(function(name){
// let member = orig.members[name]
// let fMember = filtered.addMember(name)
// fMember.type = member.type
// })
// }
//
// // function copyReference(ref: m.Reference) {
// // let split = ref.module.split(':')
// // let namespaces: string[]
// // let moduleName = split[0]
// //
// // if (split.length > 1) {
// // split.shift()
// // namespaces = split
// // }
// //
// // if (ref.module !== '@') {
// // let refMod = modules[moduleName]
// // if (!refMod) {
// // refMod = modules[''].namespaces[moduleName]
// // }
// // let mod = refMod
// // let _mod = filteredModules[moduleName]
// // if (!_mod) {
// // _mod = createRawTypeContainer()
// // filteredModules[moduleName] = <m.RawTypeContainer>_mod
// // }
// // if (namespaces) {
// // for (let i = 0; i < namespaces.length; i++) {
// // mod = mod.namespaces[namespaces[i]]
// // _mod = _mod.namespaces[namespaces[i]]
// // if (!_mod) {
// // let __mod = _mod
// // _mod = createRawTypeContainer()
// // __mod.namespaces[namespaces[i]] = <m.RawTypeContainer>_mod
// // }
// // }
// // }
// // if (mod.classConstructors[ref.name]) {
// // if (!_mod.classConstructors[ref.name]) {
// // _mod.classConstructors[ref.name] = mod.classConstructors[ref.name]
// // processClass(mod.classConstructors[ref.name])
// // }
// // } else if (mod.interfaceConstructors[ref.name]) {
// // if (!_mod.interfaceConstructors[ref.name]) {
// // _mod.interfaceConstructors[ref.name] = mod.interfaceConstructors[ref.name]
// // processInterface(mod.interfaceConstructors[ref.name])
// // }
// // } else if (mod.types[ref.name]) {
// // if (!_mod.types[ref.name]) {
// // _mod.types[ref.name] = mod.types[ref.name]
// // processTypeAlias(mod.types[ref.name])
// // }
// // } else if (mod.statics[ref.name]) {
// // if (!_mod.statics[ref.name]) {
// // _mod.statics[ref.name] = mod.statics[ref.name]
// // processType(mod.statics[ref.name].type)
// // }
// // } else if (mod.reexports[ref.name]) {
// // if (!_mod.reexports[ref.name]) {
// // _mod.reexports[ref.name] = mod.reexports[ref.name]
// // copyReference(mod.reexports[ref.name])
// // }
// // } else {
// // throw new Error('Cannot find entity ' + ref.module + ':' + ref.name)
// // }
// // }
// // }
//
// if ((<((moduleName: string) => boolean)>moduleNames).bind) {
// let f = <((moduleName: string) => boolean)>moduleNames
// Object.keys(modules).forEach(function(moduleName) {
// if (f(moduleName)) {
// filteredModules[moduleName] = modules[moduleName]
// processTypeContainer(modules[moduleName])
// }
// })
// } else {
// (<string[]>moduleNames).forEach(function(moduleName) {
// filteredModules[moduleName] = modules[moduleName]
// processTypeContainer(modules[moduleName])
// })
// }
//
// return filteredModules
}<|fim▁end|> | // }
//
// function processTypeAliasConstructor(typeAliasConstructor: f.TypeAliasConstructorFactory<any>) { |
<|file_name|>MaritalStatus.java<|end_file_name|><|fim▁begin|>/**
* DynamicReports - Free Java reporting library for creating reports dynamically
*
* Copyright (C) 2010 - 2012 Ricardo Mariaca
* http://dynamicreports.sourceforge.net
*
* This file is part of DynamicReports.
*<|fim▁hole|> * DynamicReports is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* DynamicReports is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with DynamicReports. If not, see <http://www.gnu.org/licenses/>.
*/
package net.sf.dynamicreports.examples.complex.applicationform;
/**
* @author Ricardo Mariaca ([email protected])
*/
public enum MaritalStatus {
SINGLE,
MARRIED,
DIVORCED
}<|fim▁end|> | |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-25 16:19
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),<|fim▁hole|> ('country_code', models.CharField(max_length=2, null=True)),
('cc_id', models.CharField(max_length=36)),
('line1', models.CharField(max_length=100, null=True)),
('line2', models.CharField(max_length=100, null=True)),
('line3', models.CharField(max_length=100, null=True)),
('postal_code', models.CharField(max_length=10, null=True)),
('state', models.CharField(max_length=20, null=True)),
('state_code', models.CharField(max_length=2, null=True)),
('sub_postal_code', models.CharField(max_length=20, null=True)),
],
),
migrations.CreateModel(
name='ConstantContactList',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cc_id', models.IntegerField()),
('status', models.CharField(choices=[('AC', 'Active'), ('HI', 'Hidden')], max_length=2)),
('name', models.CharField(max_length=48)),
('created_date', models.DateTimeField()),
('modified_date', models.DateTimeField()),
],
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('confirmed', models.NullBooleanField()),
('company_name', models.CharField(max_length=100, null=True)),
('created_date', models.DateTimeField()),
('first_name', models.CharField(max_length=50, null=True)),
('middle_name', models.CharField(max_length=50, null=True)),
('last_name', models.CharField(max_length=50, null=True)),
('cc_id', models.IntegerField()),
('cc_modified_date', models.DateTimeField()),
('prefix_name', models.CharField(max_length=10, null=True)),
('job_title', models.CharField(max_length=50, null=True)),
('source', models.CharField(max_length=50, null=True)),
('status', models.CharField(choices=[('UN', 'Unconfirmed'), ('AC', 'Active'), ('OP', 'Optout'), ('RE', 'Removed'), ('NO', 'Non Subscriber')], max_length=2)),
('addresses', models.ManyToManyField(to='datacombine.Address')),
],
),
migrations.CreateModel(
name='EmailAddress',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('confirm_status', models.CharField(choices=[('CO', 'Confirmed'), ('NC', 'No Confirmation Required')], max_length=3)),
('cc_id', models.CharField(max_length=36)),
('status', models.CharField(choices=[('UN', 'Unconfirmed'), ('AC', 'Active'), ('OP', 'Optout'), ('RE', 'Removed'), ('NO', 'Non Subscriber')], max_length=2)),
('opt_in_date', models.DateTimeField(null=True)),
('opt_out_date', models.DateTimeField(null=True)),
('email_address', models.EmailField(max_length=254)),
('opt_in_source', models.CharField(choices=[('AO', 'Action by Owner'), ('AV', 'Action by Visitor')], max_length=2)),
],
),
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_date', models.DateTimeField()),
('cc_id', models.CharField(max_length=36)),
('modified_date', models.DateTimeField()),
('note', models.TextField()),
],
),
migrations.CreateModel(
name='Phone',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('area_code', models.CharField(max_length=3, null=True)),
('number', models.CharField(max_length=7)),
('extension', models.CharField(max_length=7, null=True)),
],
),
migrations.CreateModel(
name='UserStatusOnCCList',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(choices=[('AC', 'Active'), ('HI', 'Hidden')], max_length=2)),
('cclist', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='datacombine.ConstantContactList')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='datacombine.Contact')),
],
),
migrations.AddField(
model_name='contact',
name='cc_lists',
field=models.ManyToManyField(through='datacombine.UserStatusOnCCList', to='datacombine.ConstantContactList'),
),
migrations.AddField(
model_name='contact',
name='cell_phone',
field=models.ManyToManyField(related_name='_contact_cell_phone_+', to='datacombine.Phone'),
),
migrations.AddField(
model_name='contact',
name='email_addresses',
field=models.ManyToManyField(to='datacombine.EmailAddress'),
),
migrations.AddField(
model_name='contact',
name='fax',
field=models.ManyToManyField(related_name='_contact_fax_+', to='datacombine.Phone'),
),
migrations.AddField(
model_name='contact',
name='home_phone',
field=models.ManyToManyField(related_name='_contact_home_phone_+', to='datacombine.Phone'),
),
migrations.AddField(
model_name='contact',
name='notes',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='datacombine.Note'),
),
migrations.AddField(
model_name='contact',
name='work_phone',
field=models.ManyToManyField(related_name='_contact_work_phone_+', to='datacombine.Phone'),
),
]<|fim▁end|> | ('address_type', models.CharField(choices=[('BU', 'Business'), ('PE', 'Personal')], max_length=2)),
('city', models.CharField(max_length=32, null=True)), |
<|file_name|>Entry.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify that the Entry() global function and environment method work
correctly, and that the former does not try to expand construction
variables.
"""
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', """
env = Environment(FOO = 'fff', BAR = 'bbb')
print Entry('ddd')
print Entry('$FOO')
print Entry('${BAR}_$BAR')
print env.Entry('eee')
print env.Entry('$FOO')
print env.Entry('${BAR}_$BAR')
""")
test.run(stdout = test.wrap_stdout(read_str = """\
ddd
$FOO
${BAR}_$BAR
eee
fff
bbb_bbb
""", build_str = """\
scons: `.' is up to date.
"""))
test.pass_test()
# Local Variables:<|fim▁hole|><|fim▁end|> | # tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4: |
<|file_name|>conversion_generated.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1
import (
"reflect"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api/resource"
"github.com/GoogleCloudPlatform/kubernetes/pkg/conversion"
)
// AUTO-GENERATED FUNCTIONS START HERE
func convert_api_AWSElasticBlockStoreVolumeSource_To_v1_AWSElasticBlockStoreVolumeSource(in *api.AWSElasticBlockStoreVolumeSource, out *AWSElasticBlockStoreVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.AWSElasticBlockStoreVolumeSource))(in)
}
out.VolumeID = in.VolumeID
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_Binding_To_v1_Binding(in *api.Binding, out *Binding, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Binding))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_ObjectReference_To_v1_ObjectReference(&in.Target, &out.Target, s); err != nil {
return err
}
return nil
}
func convert_api_Capabilities_To_v1_Capabilities(in *api.Capabilities, out *Capabilities, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Capabilities))(in)
}
if in.Add != nil {
out.Add = make([]Capability, len(in.Add))
for i := range in.Add {
out.Add[i] = Capability(in.Add[i])
}
} else {
out.Add = nil
}
if in.Drop != nil {
out.Drop = make([]Capability, len(in.Drop))
for i := range in.Drop {
out.Drop[i] = Capability(in.Drop[i])
}
} else {
out.Drop = nil
}
return nil
}
func convert_api_ComponentCondition_To_v1_ComponentCondition(in *api.ComponentCondition, out *ComponentCondition, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ComponentCondition))(in)
}
out.Type = ComponentConditionType(in.Type)
out.Status = ConditionStatus(in.Status)
out.Message = in.Message
out.Error = in.Error
return nil
}
func convert_api_ComponentStatus_To_v1_ComponentStatus(in *api.ComponentStatus, out *ComponentStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ComponentStatus))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Conditions != nil {
out.Conditions = make([]ComponentCondition, len(in.Conditions))
for i := range in.Conditions {
if err := convert_api_ComponentCondition_To_v1_ComponentCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
return nil
}
func convert_api_ComponentStatusList_To_v1_ComponentStatusList(in *api.ComponentStatusList, out *ComponentStatusList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ComponentStatusList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ComponentStatus, len(in.Items))
for i := range in.Items {
if err := convert_api_ComponentStatus_To_v1_ComponentStatus(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_Container_To_v1_Container(in *api.Container, out *Container, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Container))(in)
}
out.Name = in.Name
out.Image = in.Image
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
if in.Args != nil {
out.Args = make([]string, len(in.Args))
for i := range in.Args {
out.Args[i] = in.Args[i]
}
} else {
out.Args = nil
}
out.WorkingDir = in.WorkingDir
if in.Ports != nil {
out.Ports = make([]ContainerPort, len(in.Ports))
for i := range in.Ports {
if err := convert_api_ContainerPort_To_v1_ContainerPort(&in.Ports[i], &out.Ports[i], s); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Env != nil {
out.Env = make([]EnvVar, len(in.Env))
for i := range in.Env {
if err := convert_api_EnvVar_To_v1_EnvVar(&in.Env[i], &out.Env[i], s); err != nil {
return err
}
}
} else {
out.Env = nil
}
if err := convert_api_ResourceRequirements_To_v1_ResourceRequirements(&in.Resources, &out.Resources, s); err != nil {
return err
}
if in.VolumeMounts != nil {
out.VolumeMounts = make([]VolumeMount, len(in.VolumeMounts))
for i := range in.VolumeMounts {
if err := convert_api_VolumeMount_To_v1_VolumeMount(&in.VolumeMounts[i], &out.VolumeMounts[i], s); err != nil {
return err
}
}
} else {
out.VolumeMounts = nil
}
if in.LivenessProbe != nil {
out.LivenessProbe = new(Probe)
if err := convert_api_Probe_To_v1_Probe(in.LivenessProbe, out.LivenessProbe, s); err != nil {
return err
}
} else {
out.LivenessProbe = nil
}
if in.ReadinessProbe != nil {
out.ReadinessProbe = new(Probe)
if err := convert_api_Probe_To_v1_Probe(in.ReadinessProbe, out.ReadinessProbe, s); err != nil {
return err
}
} else {
out.ReadinessProbe = nil
}
if in.Lifecycle != nil {
out.Lifecycle = new(Lifecycle)
if err := convert_api_Lifecycle_To_v1_Lifecycle(in.Lifecycle, out.Lifecycle, s); err != nil {
return err
}
} else {
out.Lifecycle = nil
}
out.TerminationMessagePath = in.TerminationMessagePath
out.ImagePullPolicy = PullPolicy(in.ImagePullPolicy)
if in.SecurityContext != nil {
out.SecurityContext = new(SecurityContext)
if err := convert_api_SecurityContext_To_v1_SecurityContext(in.SecurityContext, out.SecurityContext, s); err != nil {
return err
}
} else {
out.SecurityContext = nil
}
return nil
}
func convert_api_ContainerPort_To_v1_ContainerPort(in *api.ContainerPort, out *ContainerPort, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ContainerPort))(in)
}
out.Name = in.Name
out.HostPort = in.HostPort
out.ContainerPort = in.ContainerPort
out.Protocol = Protocol(in.Protocol)
out.HostIP = in.HostIP
return nil
}
func convert_api_ContainerState_To_v1_ContainerState(in *api.ContainerState, out *ContainerState, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ContainerState))(in)
}
if in.Waiting != nil {
out.Waiting = new(ContainerStateWaiting)
if err := convert_api_ContainerStateWaiting_To_v1_ContainerStateWaiting(in.Waiting, out.Waiting, s); err != nil {
return err
}
} else {
out.Waiting = nil
}
if in.Running != nil {
out.Running = new(ContainerStateRunning)
if err := convert_api_ContainerStateRunning_To_v1_ContainerStateRunning(in.Running, out.Running, s); err != nil {
return err
}
} else {
out.Running = nil
}
if in.Terminated != nil {
out.Terminated = new(ContainerStateTerminated)
if err := convert_api_ContainerStateTerminated_To_v1_ContainerStateTerminated(in.Terminated, out.Terminated, s); err != nil {
return err
}
} else {
out.Terminated = nil
}
return nil
}
func convert_api_ContainerStateRunning_To_v1_ContainerStateRunning(in *api.ContainerStateRunning, out *ContainerStateRunning, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ContainerStateRunning))(in)
}
if err := s.Convert(&in.StartedAt, &out.StartedAt, 0); err != nil {
return err
}
return nil
}
func convert_api_ContainerStateTerminated_To_v1_ContainerStateTerminated(in *api.ContainerStateTerminated, out *ContainerStateTerminated, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ContainerStateTerminated))(in)
}
out.ExitCode = in.ExitCode
out.Signal = in.Signal
out.Reason = in.Reason
out.Message = in.Message
if err := s.Convert(&in.StartedAt, &out.StartedAt, 0); err != nil {
return err
}
if err := s.Convert(&in.FinishedAt, &out.FinishedAt, 0); err != nil {
return err
}
out.ContainerID = in.ContainerID
return nil
}
func convert_api_ContainerStateWaiting_To_v1_ContainerStateWaiting(in *api.ContainerStateWaiting, out *ContainerStateWaiting, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ContainerStateWaiting))(in)
}
out.Reason = in.Reason
return nil
}
func convert_api_ContainerStatus_To_v1_ContainerStatus(in *api.ContainerStatus, out *ContainerStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ContainerStatus))(in)
}
out.Name = in.Name
if err := convert_api_ContainerState_To_v1_ContainerState(&in.State, &out.State, s); err != nil {
return err
}
if err := convert_api_ContainerState_To_v1_ContainerState(&in.LastTerminationState, &out.LastTerminationState, s); err != nil {
return err
}
out.Ready = in.Ready
out.RestartCount = in.RestartCount
out.Image = in.Image
out.ImageID = in.ImageID
out.ContainerID = in.ContainerID
return nil
}
func convert_api_DeleteOptions_To_v1_DeleteOptions(in *api.DeleteOptions, out *DeleteOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.DeleteOptions))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if in.GracePeriodSeconds != nil {
out.GracePeriodSeconds = new(int64)
*out.GracePeriodSeconds = *in.GracePeriodSeconds
} else {
out.GracePeriodSeconds = nil
}
return nil
}
func convert_api_EmptyDirVolumeSource_To_v1_EmptyDirVolumeSource(in *api.EmptyDirVolumeSource, out *EmptyDirVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EmptyDirVolumeSource))(in)
}
out.Medium = StorageMedium(in.Medium)
return nil
}
func convert_api_EndpointAddress_To_v1_EndpointAddress(in *api.EndpointAddress, out *EndpointAddress, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EndpointAddress))(in)
}
out.IP = in.IP
if in.TargetRef != nil {
out.TargetRef = new(ObjectReference)
if err := convert_api_ObjectReference_To_v1_ObjectReference(in.TargetRef, out.TargetRef, s); err != nil {
return err
}
} else {
out.TargetRef = nil
}
return nil
}
func convert_api_EndpointPort_To_v1_EndpointPort(in *api.EndpointPort, out *EndpointPort, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EndpointPort))(in)
}
out.Name = in.Name
out.Port = in.Port
out.Protocol = Protocol(in.Protocol)
return nil
}
func convert_api_EndpointSubset_To_v1_EndpointSubset(in *api.EndpointSubset, out *EndpointSubset, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EndpointSubset))(in)
}
if in.Addresses != nil {
out.Addresses = make([]EndpointAddress, len(in.Addresses))
for i := range in.Addresses {
if err := convert_api_EndpointAddress_To_v1_EndpointAddress(&in.Addresses[i], &out.Addresses[i], s); err != nil {
return err
}
}
} else {
out.Addresses = nil
}
if in.Ports != nil {
out.Ports = make([]EndpointPort, len(in.Ports))
for i := range in.Ports {
if err := convert_api_EndpointPort_To_v1_EndpointPort(&in.Ports[i], &out.Ports[i], s); err != nil {
return err
}
}
} else {
out.Ports = nil
}
return nil
}
func convert_api_Endpoints_To_v1_Endpoints(in *api.Endpoints, out *Endpoints, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Endpoints))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Subsets != nil {
out.Subsets = make([]EndpointSubset, len(in.Subsets))
for i := range in.Subsets {
if err := convert_api_EndpointSubset_To_v1_EndpointSubset(&in.Subsets[i], &out.Subsets[i], s); err != nil {
return err
}
}
} else {
out.Subsets = nil
}
return nil
}
func convert_api_EndpointsList_To_v1_EndpointsList(in *api.EndpointsList, out *EndpointsList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EndpointsList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Endpoints, len(in.Items))
for i := range in.Items {
if err := convert_api_Endpoints_To_v1_Endpoints(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_EnvVar_To_v1_EnvVar(in *api.EnvVar, out *EnvVar, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EnvVar))(in)
}
out.Name = in.Name
out.Value = in.Value
if in.ValueFrom != nil {
out.ValueFrom = new(EnvVarSource)
if err := convert_api_EnvVarSource_To_v1_EnvVarSource(in.ValueFrom, out.ValueFrom, s); err != nil {
return err
}
} else {
out.ValueFrom = nil
}
return nil
}
func convert_api_EnvVarSource_To_v1_EnvVarSource(in *api.EnvVarSource, out *EnvVarSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EnvVarSource))(in)
}
if in.FieldRef != nil {
out.FieldRef = new(ObjectFieldSelector)
if err := convert_api_ObjectFieldSelector_To_v1_ObjectFieldSelector(in.FieldRef, out.FieldRef, s); err != nil {
return err
}
} else {
out.FieldRef = nil
}
return nil
}
func convert_api_Event_To_v1_Event(in *api.Event, out *Event, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Event))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_ObjectReference_To_v1_ObjectReference(&in.InvolvedObject, &out.InvolvedObject, s); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
if err := convert_api_EventSource_To_v1_EventSource(&in.Source, &out.Source, s); err != nil {
return err
}
if err := s.Convert(&in.FirstTimestamp, &out.FirstTimestamp, 0); err != nil {
return err
}
if err := s.Convert(&in.LastTimestamp, &out.LastTimestamp, 0); err != nil {
return err
}
out.Count = in.Count
return nil
}
func convert_api_EventList_To_v1_EventList(in *api.EventList, out *EventList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EventList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Event, len(in.Items))
for i := range in.Items {
if err := convert_api_Event_To_v1_Event(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_EventSource_To_v1_EventSource(in *api.EventSource, out *EventSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EventSource))(in)
}
out.Component = in.Component
out.Host = in.Host
return nil
}
func convert_api_ExecAction_To_v1_ExecAction(in *api.ExecAction, out *ExecAction, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ExecAction))(in)
}
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func convert_api_GCEPersistentDiskVolumeSource_To_v1_GCEPersistentDiskVolumeSource(in *api.GCEPersistentDiskVolumeSource, out *GCEPersistentDiskVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.GCEPersistentDiskVolumeSource))(in)
}
out.PDName = in.PDName
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_GitRepoVolumeSource_To_v1_GitRepoVolumeSource(in *api.GitRepoVolumeSource, out *GitRepoVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.GitRepoVolumeSource))(in)
}
out.Repository = in.Repository
out.Revision = in.Revision
return nil
}
func convert_api_GlusterfsVolumeSource_To_v1_GlusterfsVolumeSource(in *api.GlusterfsVolumeSource, out *GlusterfsVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.GlusterfsVolumeSource))(in)
}
out.EndpointsName = in.EndpointsName
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_HTTPGetAction_To_v1_HTTPGetAction(in *api.HTTPGetAction, out *HTTPGetAction, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.HTTPGetAction))(in)
}
out.Path = in.Path
if err := s.Convert(&in.Port, &out.Port, 0); err != nil {
return err
}
out.Host = in.Host
out.Scheme = URIScheme(in.Scheme)
return nil
}
func convert_api_Handler_To_v1_Handler(in *api.Handler, out *Handler, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Handler))(in)
}
if in.Exec != nil {
out.Exec = new(ExecAction)
if err := convert_api_ExecAction_To_v1_ExecAction(in.Exec, out.Exec, s); err != nil {
return err
}
} else {
out.Exec = nil
}
if in.HTTPGet != nil {
out.HTTPGet = new(HTTPGetAction)
if err := convert_api_HTTPGetAction_To_v1_HTTPGetAction(in.HTTPGet, out.HTTPGet, s); err != nil {
return err
}
} else {
out.HTTPGet = nil
}
if in.TCPSocket != nil {
out.TCPSocket = new(TCPSocketAction)
if err := convert_api_TCPSocketAction_To_v1_TCPSocketAction(in.TCPSocket, out.TCPSocket, s); err != nil {
return err
}
} else {
out.TCPSocket = nil
}
return nil
}
func convert_api_HostPathVolumeSource_To_v1_HostPathVolumeSource(in *api.HostPathVolumeSource, out *HostPathVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.HostPathVolumeSource))(in)
}
out.Path = in.Path
return nil
}
func convert_api_ISCSIVolumeSource_To_v1_ISCSIVolumeSource(in *api.ISCSIVolumeSource, out *ISCSIVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ISCSIVolumeSource))(in)
}
out.TargetPortal = in.TargetPortal
out.IQN = in.IQN
out.Lun = in.Lun
out.FSType = in.FSType
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_Lifecycle_To_v1_Lifecycle(in *api.Lifecycle, out *Lifecycle, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Lifecycle))(in)
}
if in.PostStart != nil {
out.PostStart = new(Handler)
if err := convert_api_Handler_To_v1_Handler(in.PostStart, out.PostStart, s); err != nil {
return err
}
} else {
out.PostStart = nil
}
if in.PreStop != nil {
out.PreStop = new(Handler)
if err := convert_api_Handler_To_v1_Handler(in.PreStop, out.PreStop, s); err != nil {
return err
}
} else {
out.PreStop = nil
}
return nil
}
func convert_api_LimitRange_To_v1_LimitRange(in *api.LimitRange, out *LimitRange, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LimitRange))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_LimitRangeSpec_To_v1_LimitRangeSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
return nil
}
func convert_api_LimitRangeItem_To_v1_LimitRangeItem(in *api.LimitRangeItem, out *LimitRangeItem, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LimitRangeItem))(in)
}
out.Type = LimitType(in.Type)
if in.Max != nil {
out.Max = make(map[ResourceName]resource.Quantity)
for key, val := range in.Max {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Max[ResourceName(key)] = newVal
}
} else {
out.Max = nil
}
if in.Min != nil {
out.Min = make(map[ResourceName]resource.Quantity)
for key, val := range in.Min {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Min[ResourceName(key)] = newVal
}
} else {
out.Min = nil
}
if in.Default != nil {
out.Default = make(map[ResourceName]resource.Quantity)
for key, val := range in.Default {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Default[ResourceName(key)] = newVal
}
} else {
out.Default = nil
}
return nil
}
func convert_api_LimitRangeList_To_v1_LimitRangeList(in *api.LimitRangeList, out *LimitRangeList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LimitRangeList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]LimitRange, len(in.Items))
for i := range in.Items {
if err := convert_api_LimitRange_To_v1_LimitRange(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_LimitRangeSpec_To_v1_LimitRangeSpec(in *api.LimitRangeSpec, out *LimitRangeSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LimitRangeSpec))(in)
}
if in.Limits != nil {
out.Limits = make([]LimitRangeItem, len(in.Limits))
for i := range in.Limits {
if err := convert_api_LimitRangeItem_To_v1_LimitRangeItem(&in.Limits[i], &out.Limits[i], s); err != nil {
return err
}
}
} else {
out.Limits = nil
}
return nil
}
func convert_api_List_To_v1_List(in *api.List, out *List, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.List))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if err := s.Convert(&in.Items, &out.Items, 0); err != nil {
return err
}
return nil
}
func convert_api_ListMeta_To_v1_ListMeta(in *api.ListMeta, out *ListMeta, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ListMeta))(in)
}
out.SelfLink = in.SelfLink
out.ResourceVersion = in.ResourceVersion
return nil
}
func convert_api_ListOptions_To_v1_ListOptions(in *api.ListOptions, out *ListOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ListOptions))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := s.Convert(&in.LabelSelector, &out.LabelSelector, 0); err != nil {
return err
}
if err := s.Convert(&in.FieldSelector, &out.FieldSelector, 0); err != nil {
return err
}
out.Watch = in.Watch
out.ResourceVersion = in.ResourceVersion
return nil
}
func convert_api_LoadBalancerIngress_To_v1_LoadBalancerIngress(in *api.LoadBalancerIngress, out *LoadBalancerIngress, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LoadBalancerIngress))(in)
}
out.IP = in.IP
out.Hostname = in.Hostname
return nil
}
func convert_api_LoadBalancerStatus_To_v1_LoadBalancerStatus(in *api.LoadBalancerStatus, out *LoadBalancerStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LoadBalancerStatus))(in)
}
if in.Ingress != nil {
out.Ingress = make([]LoadBalancerIngress, len(in.Ingress))
for i := range in.Ingress {
if err := convert_api_LoadBalancerIngress_To_v1_LoadBalancerIngress(&in.Ingress[i], &out.Ingress[i], s); err != nil {
return err
}
}
} else {
out.Ingress = nil
}
return nil
}
func convert_api_LocalObjectReference_To_v1_LocalObjectReference(in *api.LocalObjectReference, out *LocalObjectReference, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LocalObjectReference))(in)
}
out.Name = in.Name
return nil
}
func convert_api_NFSVolumeSource_To_v1_NFSVolumeSource(in *api.NFSVolumeSource, out *NFSVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NFSVolumeSource))(in)
}
out.Server = in.Server
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_Namespace_To_v1_Namespace(in *api.Namespace, out *Namespace, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Namespace))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_NamespaceSpec_To_v1_NamespaceSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_NamespaceStatus_To_v1_NamespaceStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_NamespaceList_To_v1_NamespaceList(in *api.NamespaceList, out *NamespaceList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NamespaceList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Namespace, len(in.Items))
for i := range in.Items {
if err := convert_api_Namespace_To_v1_Namespace(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_NamespaceSpec_To_v1_NamespaceSpec(in *api.NamespaceSpec, out *NamespaceSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NamespaceSpec))(in)
}
if in.Finalizers != nil {
out.Finalizers = make([]FinalizerName, len(in.Finalizers))
for i := range in.Finalizers {
out.Finalizers[i] = FinalizerName(in.Finalizers[i])
}
} else {
out.Finalizers = nil
}
return nil
}
func convert_api_NamespaceStatus_To_v1_NamespaceStatus(in *api.NamespaceStatus, out *NamespaceStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NamespaceStatus))(in)
}
out.Phase = NamespacePhase(in.Phase)
return nil
}
func convert_api_Node_To_v1_Node(in *api.Node, out *Node, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Node))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_NodeSpec_To_v1_NodeSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_NodeStatus_To_v1_NodeStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_NodeAddress_To_v1_NodeAddress(in *api.NodeAddress, out *NodeAddress, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NodeAddress))(in)
}
out.Type = NodeAddressType(in.Type)
out.Address = in.Address
return nil
}
func convert_api_NodeCondition_To_v1_NodeCondition(in *api.NodeCondition, out *NodeCondition, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NodeCondition))(in)
}
out.Type = NodeConditionType(in.Type)
out.Status = ConditionStatus(in.Status)
if err := s.Convert(&in.LastHeartbeatTime, &out.LastHeartbeatTime, 0); err != nil {
return err
}
if err := s.Convert(&in.LastTransitionTime, &out.LastTransitionTime, 0); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
return nil
}
func convert_api_NodeList_To_v1_NodeList(in *api.NodeList, out *NodeList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NodeList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Node, len(in.Items))
for i := range in.Items {
if err := convert_api_Node_To_v1_Node(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_NodeSpec_To_v1_NodeSpec(in *api.NodeSpec, out *NodeSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NodeSpec))(in)
}
out.PodCIDR = in.PodCIDR
out.ExternalID = in.ExternalID
out.ProviderID = in.ProviderID
out.Unschedulable = in.Unschedulable
return nil
}
func convert_api_NodeStatus_To_v1_NodeStatus(in *api.NodeStatus, out *NodeStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NodeStatus))(in)
}
if in.Capacity != nil {
out.Capacity = make(map[ResourceName]resource.Quantity)
for key, val := range in.Capacity {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Capacity[ResourceName(key)] = newVal
}
} else {
out.Capacity = nil
}
out.Phase = NodePhase(in.Phase)
if in.Conditions != nil {
out.Conditions = make([]NodeCondition, len(in.Conditions))
for i := range in.Conditions {
if err := convert_api_NodeCondition_To_v1_NodeCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
if in.Addresses != nil {
out.Addresses = make([]NodeAddress, len(in.Addresses))
for i := range in.Addresses {
if err := convert_api_NodeAddress_To_v1_NodeAddress(&in.Addresses[i], &out.Addresses[i], s); err != nil {
return err
}
}
} else {
out.Addresses = nil
}
if err := convert_api_NodeSystemInfo_To_v1_NodeSystemInfo(&in.NodeInfo, &out.NodeInfo, s); err != nil {
return err
}
return nil
}
func convert_api_NodeSystemInfo_To_v1_NodeSystemInfo(in *api.NodeSystemInfo, out *NodeSystemInfo, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NodeSystemInfo))(in)
}
out.MachineID = in.MachineID
out.SystemUUID = in.SystemUUID
out.BootID = in.BootID
out.KernelVersion = in.KernelVersion
out.OsImage = in.OsImage
out.ContainerRuntimeVersion = in.ContainerRuntimeVersion
out.KubeletVersion = in.KubeletVersion
out.KubeProxyVersion = in.KubeProxyVersion
return nil
}
func convert_api_ObjectFieldSelector_To_v1_ObjectFieldSelector(in *api.ObjectFieldSelector, out *ObjectFieldSelector, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ObjectFieldSelector))(in)
}
out.APIVersion = in.APIVersion
out.FieldPath = in.FieldPath
return nil
}
func convert_api_ObjectMeta_To_v1_ObjectMeta(in *api.ObjectMeta, out *ObjectMeta, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ObjectMeta))(in)
}
out.Name = in.Name
out.GenerateName = in.GenerateName
out.Namespace = in.Namespace
out.SelfLink = in.SelfLink
out.UID = in.UID
out.ResourceVersion = in.ResourceVersion
out.Generation = in.Generation
if err := s.Convert(&in.CreationTimestamp, &out.CreationTimestamp, 0); err != nil {
return err
}
if in.DeletionTimestamp != nil {
if err := s.Convert(&in.DeletionTimestamp, &out.DeletionTimestamp, 0); err != nil {
return err
}
} else {
out.DeletionTimestamp = nil
}
if in.Labels != nil {
out.Labels = make(map[string]string)
for key, val := range in.Labels {
out.Labels[key] = val
}
} else {
out.Labels = nil
}
if in.Annotations != nil {
out.Annotations = make(map[string]string)
for key, val := range in.Annotations {
out.Annotations[key] = val
}
} else {
out.Annotations = nil
}
return nil
}
func convert_api_ObjectReference_To_v1_ObjectReference(in *api.ObjectReference, out *ObjectReference, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ObjectReference))(in)
}
out.Kind = in.Kind
out.Namespace = in.Namespace
out.Name = in.Name
out.UID = in.UID
out.APIVersion = in.APIVersion
out.ResourceVersion = in.ResourceVersion
out.FieldPath = in.FieldPath
return nil
}
func convert_api_PersistentVolume_To_v1_PersistentVolume(in *api.PersistentVolume, out *PersistentVolume, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolume))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_PersistentVolumeSpec_To_v1_PersistentVolumeSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_PersistentVolumeStatus_To_v1_PersistentVolumeStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_PersistentVolumeClaim_To_v1_PersistentVolumeClaim(in *api.PersistentVolumeClaim, out *PersistentVolumeClaim, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeClaim))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_PersistentVolumeClaimSpec_To_v1_PersistentVolumeClaimSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_PersistentVolumeClaimStatus_To_v1_PersistentVolumeClaimStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_PersistentVolumeClaimList_To_v1_PersistentVolumeClaimList(in *api.PersistentVolumeClaimList, out *PersistentVolumeClaimList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeClaimList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]PersistentVolumeClaim, len(in.Items))
for i := range in.Items {
if err := convert_api_PersistentVolumeClaim_To_v1_PersistentVolumeClaim(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_PersistentVolumeClaimSpec_To_v1_PersistentVolumeClaimSpec(in *api.PersistentVolumeClaimSpec, out *PersistentVolumeClaimSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeClaimSpec))(in)
}
if in.AccessModes != nil {
out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = PersistentVolumeAccessMode(in.AccessModes[i])
}
} else {
out.AccessModes = nil
}
if err := convert_api_ResourceRequirements_To_v1_ResourceRequirements(&in.Resources, &out.Resources, s); err != nil {
return err
}
out.VolumeName = in.VolumeName
return nil
}
func convert_api_PersistentVolumeClaimStatus_To_v1_PersistentVolumeClaimStatus(in *api.PersistentVolumeClaimStatus, out *PersistentVolumeClaimStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeClaimStatus))(in)
}
out.Phase = PersistentVolumeClaimPhase(in.Phase)
if in.AccessModes != nil {
out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = PersistentVolumeAccessMode(in.AccessModes[i])
}
} else {
out.AccessModes = nil
}
if in.Capacity != nil {
out.Capacity = make(map[ResourceName]resource.Quantity)
for key, val := range in.Capacity {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Capacity[ResourceName(key)] = newVal
}
} else {
out.Capacity = nil
}
return nil
}
func convert_api_PersistentVolumeClaimVolumeSource_To_v1_PersistentVolumeClaimVolumeSource(in *api.PersistentVolumeClaimVolumeSource, out *PersistentVolumeClaimVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeClaimVolumeSource))(in)
}
out.ClaimName = in.ClaimName
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_PersistentVolumeList_To_v1_PersistentVolumeList(in *api.PersistentVolumeList, out *PersistentVolumeList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]PersistentVolume, len(in.Items))
for i := range in.Items {
if err := convert_api_PersistentVolume_To_v1_PersistentVolume(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_PersistentVolumeSource_To_v1_PersistentVolumeSource(in *api.PersistentVolumeSource, out *PersistentVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeSource))(in)
}
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(GCEPersistentDiskVolumeSource)
if err := convert_api_GCEPersistentDiskVolumeSource_To_v1_GCEPersistentDiskVolumeSource(in.GCEPersistentDisk, out.GCEPersistentDisk, s); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(AWSElasticBlockStoreVolumeSource)
if err := convert_api_AWSElasticBlockStoreVolumeSource_To_v1_AWSElasticBlockStoreVolumeSource(in.AWSElasticBlockStore, out.AWSElasticBlockStore, s); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.HostPath != nil {
out.HostPath = new(HostPathVolumeSource)
if err := convert_api_HostPathVolumeSource_To_v1_HostPathVolumeSource(in.HostPath, out.HostPath, s); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(GlusterfsVolumeSource)
if err := convert_api_GlusterfsVolumeSource_To_v1_GlusterfsVolumeSource(in.Glusterfs, out.Glusterfs, s); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.NFS != nil {
out.NFS = new(NFSVolumeSource)
if err := convert_api_NFSVolumeSource_To_v1_NFSVolumeSource(in.NFS, out.NFS, s); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.RBD != nil {
out.RBD = new(RBDVolumeSource)
if err := convert_api_RBDVolumeSource_To_v1_RBDVolumeSource(in.RBD, out.RBD, s); err != nil {
return err
}
} else {
out.RBD = nil
}
if in.ISCSI != nil {
out.ISCSI = new(ISCSIVolumeSource)
if err := convert_api_ISCSIVolumeSource_To_v1_ISCSIVolumeSource(in.ISCSI, out.ISCSI, s); err != nil {
return err
}
} else {
out.ISCSI = nil
}
return nil
}
func convert_api_PersistentVolumeSpec_To_v1_PersistentVolumeSpec(in *api.PersistentVolumeSpec, out *PersistentVolumeSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeSpec))(in)
}
if in.Capacity != nil {
out.Capacity = make(map[ResourceName]resource.Quantity)
for key, val := range in.Capacity {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Capacity[ResourceName(key)] = newVal
}
} else {
out.Capacity = nil
}
if err := convert_api_PersistentVolumeSource_To_v1_PersistentVolumeSource(&in.PersistentVolumeSource, &out.PersistentVolumeSource, s); err != nil {
return err
}
if in.AccessModes != nil {
out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = PersistentVolumeAccessMode(in.AccessModes[i])
}
} else {
out.AccessModes = nil
}
if in.ClaimRef != nil {
out.ClaimRef = new(ObjectReference)
if err := convert_api_ObjectReference_To_v1_ObjectReference(in.ClaimRef, out.ClaimRef, s); err != nil {
return err
}
} else {
out.ClaimRef = nil
}
out.PersistentVolumeReclaimPolicy = PersistentVolumeReclaimPolicy(in.PersistentVolumeReclaimPolicy)
return nil
}
func convert_api_PersistentVolumeStatus_To_v1_PersistentVolumeStatus(in *api.PersistentVolumeStatus, out *PersistentVolumeStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeStatus))(in)
}
out.Phase = PersistentVolumePhase(in.Phase)
out.Message = in.Message
out.Reason = in.Reason
return nil
}
func convert_api_Pod_To_v1_Pod(in *api.Pod, out *Pod, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Pod))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_PodSpec_To_v1_PodSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_PodStatus_To_v1_PodStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_PodCondition_To_v1_PodCondition(in *api.PodCondition, out *PodCondition, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodCondition))(in)
}
out.Type = PodConditionType(in.Type)
out.Status = ConditionStatus(in.Status)
return nil
}
func convert_api_PodExecOptions_To_v1_PodExecOptions(in *api.PodExecOptions, out *PodExecOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodExecOptions))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Stdin = in.Stdin
out.Stdout = in.Stdout
out.Stderr = in.Stderr
out.TTY = in.TTY
out.Container = in.Container
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func convert_api_PodList_To_v1_PodList(in *api.PodList, out *PodList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Pod, len(in.Items))
for i := range in.Items {
if err := convert_api_Pod_To_v1_Pod(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_PodLogOptions_To_v1_PodLogOptions(in *api.PodLogOptions, out *PodLogOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodLogOptions))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Container = in.Container
out.Follow = in.Follow
out.Previous = in.Previous
return nil
}
func convert_api_PodProxyOptions_To_v1_PodProxyOptions(in *api.PodProxyOptions, out *PodProxyOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodProxyOptions))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Path = in.Path
return nil
}
func convert_api_PodStatus_To_v1_PodStatus(in *api.PodStatus, out *PodStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodStatus))(in)
}
out.Phase = PodPhase(in.Phase)
if in.Conditions != nil {
out.Conditions = make([]PodCondition, len(in.Conditions))
for i := range in.Conditions {
if err := convert_api_PodCondition_To_v1_PodCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
out.Message = in.Message
out.Reason = in.Reason
out.HostIP = in.HostIP
out.PodIP = in.PodIP
if in.StartTime != nil {
if err := s.Convert(&in.StartTime, &out.StartTime, 0); err != nil {
return err
}
} else {
out.StartTime = nil
}
if in.ContainerStatuses != nil {
out.ContainerStatuses = make([]ContainerStatus, len(in.ContainerStatuses))
for i := range in.ContainerStatuses {
if err := convert_api_ContainerStatus_To_v1_ContainerStatus(&in.ContainerStatuses[i], &out.ContainerStatuses[i], s); err != nil {
return err
}
}
} else {
out.ContainerStatuses = nil
}
return nil
}
func convert_api_PodStatusResult_To_v1_PodStatusResult(in *api.PodStatusResult, out *PodStatusResult, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodStatusResult))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_PodStatus_To_v1_PodStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_PodTemplate_To_v1_PodTemplate(in *api.PodTemplate, out *PodTemplate, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodTemplate))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_PodTemplateSpec_To_v1_PodTemplateSpec(&in.Template, &out.Template, s); err != nil {
return err
}
return nil
}
func convert_api_PodTemplateList_To_v1_PodTemplateList(in *api.PodTemplateList, out *PodTemplateList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodTemplateList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]PodTemplate, len(in.Items))
for i := range in.Items {
if err := convert_api_PodTemplate_To_v1_PodTemplate(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_PodTemplateSpec_To_v1_PodTemplateSpec(in *api.PodTemplateSpec, out *PodTemplateSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodTemplateSpec))(in)
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_PodSpec_To_v1_PodSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
return nil
}
func convert_api_Probe_To_v1_Probe(in *api.Probe, out *Probe, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Probe))(in)
}
if err := convert_api_Handler_To_v1_Handler(&in.Handler, &out.Handler, s); err != nil {
return err
}
out.InitialDelaySeconds = in.InitialDelaySeconds
out.TimeoutSeconds = in.TimeoutSeconds
return nil
}
func convert_api_RBDVolumeSource_To_v1_RBDVolumeSource(in *api.RBDVolumeSource, out *RBDVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.RBDVolumeSource))(in)
}
if in.CephMonitors != nil {
out.CephMonitors = make([]string, len(in.CephMonitors))
for i := range in.CephMonitors {
out.CephMonitors[i] = in.CephMonitors[i]
}
} else {
out.CephMonitors = nil
}
out.RBDImage = in.RBDImage
out.FSType = in.FSType
out.RBDPool = in.RBDPool
out.RadosUser = in.RadosUser
out.Keyring = in.Keyring
if in.SecretRef != nil {
out.SecretRef = new(LocalObjectReference)
if err := convert_api_LocalObjectReference_To_v1_LocalObjectReference(in.SecretRef, out.SecretRef, s); err != nil {
return err
}
} else {
out.SecretRef = nil
}
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_RangeAllocation_To_v1_RangeAllocation(in *api.RangeAllocation, out *RangeAllocation, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.RangeAllocation))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
out.Range = in.Range
if err := s.Convert(&in.Data, &out.Data, 0); err != nil {
return err
}
return nil
}
func convert_api_ReplicationController_To_v1_ReplicationController(in *api.ReplicationController, out *ReplicationController, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ReplicationController))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_ReplicationControllerSpec_To_v1_ReplicationControllerSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_ReplicationControllerStatus_To_v1_ReplicationControllerStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_ReplicationControllerList_To_v1_ReplicationControllerList(in *api.ReplicationControllerList, out *ReplicationControllerList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ReplicationControllerList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ReplicationController, len(in.Items))
for i := range in.Items {
if err := convert_api_ReplicationController_To_v1_ReplicationController(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_ReplicationControllerStatus_To_v1_ReplicationControllerStatus(in *api.ReplicationControllerStatus, out *ReplicationControllerStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ReplicationControllerStatus))(in)
}
out.Replicas = in.Replicas
out.ObservedGeneration = in.ObservedGeneration
return nil
}
func convert_api_ResourceQuota_To_v1_ResourceQuota(in *api.ResourceQuota, out *ResourceQuota, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ResourceQuota))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_ResourceQuotaSpec_To_v1_ResourceQuotaSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_ResourceQuotaStatus_To_v1_ResourceQuotaStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_ResourceQuotaList_To_v1_ResourceQuotaList(in *api.ResourceQuotaList, out *ResourceQuotaList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ResourceQuotaList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ResourceQuota, len(in.Items))
for i := range in.Items {
if err := convert_api_ResourceQuota_To_v1_ResourceQuota(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_ResourceQuotaSpec_To_v1_ResourceQuotaSpec(in *api.ResourceQuotaSpec, out *ResourceQuotaSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ResourceQuotaSpec))(in)
}
if in.Hard != nil {
out.Hard = make(map[ResourceName]resource.Quantity)
for key, val := range in.Hard {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Hard[ResourceName(key)] = newVal
}
} else {
out.Hard = nil
}
return nil
}
func convert_api_ResourceQuotaStatus_To_v1_ResourceQuotaStatus(in *api.ResourceQuotaStatus, out *ResourceQuotaStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ResourceQuotaStatus))(in)
}
if in.Hard != nil {
out.Hard = make(map[ResourceName]resource.Quantity)
for key, val := range in.Hard {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Hard[ResourceName(key)] = newVal
}
} else {
out.Hard = nil
}
if in.Used != nil {
out.Used = make(map[ResourceName]resource.Quantity)
for key, val := range in.Used {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Used[ResourceName(key)] = newVal
}
} else {
out.Used = nil
}
return nil
}
func convert_api_ResourceRequirements_To_v1_ResourceRequirements(in *api.ResourceRequirements, out *ResourceRequirements, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ResourceRequirements))(in)
}
if in.Limits != nil {
out.Limits = make(map[ResourceName]resource.Quantity)
for key, val := range in.Limits {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Limits[ResourceName(key)] = newVal
}
} else {
out.Limits = nil
}
if in.Requests != nil {
out.Requests = make(map[ResourceName]resource.Quantity)
for key, val := range in.Requests {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Requests[ResourceName(key)] = newVal
}
} else {
out.Requests = nil
}
return nil
}
func convert_api_SELinuxOptions_To_v1_SELinuxOptions(in *api.SELinuxOptions, out *SELinuxOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.SELinuxOptions))(in)
}
out.User = in.User
out.Role = in.Role
out.Type = in.Type
out.Level = in.Level
return nil
}
func convert_api_Secret_To_v1_Secret(in *api.Secret, out *Secret, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Secret))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Data != nil {
out.Data = make(map[string][]uint8)
for key, val := range in.Data {
newVal := []uint8{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Data[key] = newVal
}
} else {
out.Data = nil
}
out.Type = SecretType(in.Type)
return nil
}
func convert_api_SecretList_To_v1_SecretList(in *api.SecretList, out *SecretList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.SecretList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Secret, len(in.Items))
for i := range in.Items {
if err := convert_api_Secret_To_v1_Secret(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_SecretVolumeSource_To_v1_SecretVolumeSource(in *api.SecretVolumeSource, out *SecretVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.SecretVolumeSource))(in)
}
out.SecretName = in.SecretName
return nil
}
func convert_api_SecurityContext_To_v1_SecurityContext(in *api.SecurityContext, out *SecurityContext, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.SecurityContext))(in)
}
if in.Capabilities != nil {
out.Capabilities = new(Capabilities)
if err := convert_api_Capabilities_To_v1_Capabilities(in.Capabilities, out.Capabilities, s); err != nil {
return err
}
} else {
out.Capabilities = nil
}
if in.Privileged != nil {
out.Privileged = new(bool)
*out.Privileged = *in.Privileged
} else {
out.Privileged = nil
}
if in.SELinuxOptions != nil {
out.SELinuxOptions = new(SELinuxOptions)
if err := convert_api_SELinuxOptions_To_v1_SELinuxOptions(in.SELinuxOptions, out.SELinuxOptions, s); err != nil {
return err
}
} else {
out.SELinuxOptions = nil
}
if in.RunAsUser != nil {
out.RunAsUser = new(int64)
*out.RunAsUser = *in.RunAsUser
} else {
out.RunAsUser = nil
}
return nil
}
func convert_api_SerializedReference_To_v1_SerializedReference(in *api.SerializedReference, out *SerializedReference, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.SerializedReference))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectReference_To_v1_ObjectReference(&in.Reference, &out.Reference, s); err != nil {
return err
}
return nil
}
func convert_api_Service_To_v1_Service(in *api.Service, out *Service, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Service))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_ServiceSpec_To_v1_ServiceSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_ServiceStatus_To_v1_ServiceStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_ServiceAccount_To_v1_ServiceAccount(in *api.ServiceAccount, out *ServiceAccount, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ServiceAccount))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Secrets != nil {
out.Secrets = make([]ObjectReference, len(in.Secrets))
for i := range in.Secrets {
if err := convert_api_ObjectReference_To_v1_ObjectReference(&in.Secrets[i], &out.Secrets[i], s); err != nil {
return err
}
}
} else {
out.Secrets = nil
}
if in.ImagePullSecrets != nil {
out.ImagePullSecrets = make([]LocalObjectReference, len(in.ImagePullSecrets))
for i := range in.ImagePullSecrets {
if err := convert_api_LocalObjectReference_To_v1_LocalObjectReference(&in.ImagePullSecrets[i], &out.ImagePullSecrets[i], s); err != nil {
return err
}<|fim▁hole|> }
} else {
out.ImagePullSecrets = nil
}
return nil
}
func convert_api_ServiceAccountList_To_v1_ServiceAccountList(in *api.ServiceAccountList, out *ServiceAccountList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ServiceAccountList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ServiceAccount, len(in.Items))
for i := range in.Items {
if err := convert_api_ServiceAccount_To_v1_ServiceAccount(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_ServiceList_To_v1_ServiceList(in *api.ServiceList, out *ServiceList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ServiceList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Service, len(in.Items))
for i := range in.Items {
if err := convert_api_Service_To_v1_Service(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_ServicePort_To_v1_ServicePort(in *api.ServicePort, out *ServicePort, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ServicePort))(in)
}
out.Name = in.Name
out.Protocol = Protocol(in.Protocol)
out.Port = in.Port
if err := s.Convert(&in.TargetPort, &out.TargetPort, 0); err != nil {
return err
}
out.NodePort = in.NodePort
return nil
}
func convert_api_ServiceSpec_To_v1_ServiceSpec(in *api.ServiceSpec, out *ServiceSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ServiceSpec))(in)
}
if in.Ports != nil {
out.Ports = make([]ServicePort, len(in.Ports))
for i := range in.Ports {
if err := convert_api_ServicePort_To_v1_ServicePort(&in.Ports[i], &out.Ports[i], s); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
out.ClusterIP = in.ClusterIP
out.Type = ServiceType(in.Type)
if in.DeprecatedPublicIPs != nil {
out.DeprecatedPublicIPs = make([]string, len(in.DeprecatedPublicIPs))
for i := range in.DeprecatedPublicIPs {
out.DeprecatedPublicIPs[i] = in.DeprecatedPublicIPs[i]
}
} else {
out.DeprecatedPublicIPs = nil
}
out.SessionAffinity = ServiceAffinity(in.SessionAffinity)
return nil
}
func convert_api_ServiceStatus_To_v1_ServiceStatus(in *api.ServiceStatus, out *ServiceStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ServiceStatus))(in)
}
if err := convert_api_LoadBalancerStatus_To_v1_LoadBalancerStatus(&in.LoadBalancer, &out.LoadBalancer, s); err != nil {
return err
}
return nil
}
func convert_api_Status_To_v1_Status(in *api.Status, out *Status, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Status))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
out.Status = in.Status
out.Message = in.Message
out.Reason = StatusReason(in.Reason)
if in.Details != nil {
out.Details = new(StatusDetails)
if err := convert_api_StatusDetails_To_v1_StatusDetails(in.Details, out.Details, s); err != nil {
return err
}
} else {
out.Details = nil
}
out.Code = in.Code
return nil
}
func convert_api_StatusCause_To_v1_StatusCause(in *api.StatusCause, out *StatusCause, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.StatusCause))(in)
}
out.Type = CauseType(in.Type)
out.Message = in.Message
out.Field = in.Field
return nil
}
func convert_api_StatusDetails_To_v1_StatusDetails(in *api.StatusDetails, out *StatusDetails, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.StatusDetails))(in)
}
out.Name = in.Name
out.Kind = in.Kind
if in.Causes != nil {
out.Causes = make([]StatusCause, len(in.Causes))
for i := range in.Causes {
if err := convert_api_StatusCause_To_v1_StatusCause(&in.Causes[i], &out.Causes[i], s); err != nil {
return err
}
}
} else {
out.Causes = nil
}
out.RetryAfterSeconds = in.RetryAfterSeconds
return nil
}
func convert_api_TCPSocketAction_To_v1_TCPSocketAction(in *api.TCPSocketAction, out *TCPSocketAction, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.TCPSocketAction))(in)
}
if err := s.Convert(&in.Port, &out.Port, 0); err != nil {
return err
}
return nil
}
func convert_api_TypeMeta_To_v1_TypeMeta(in *api.TypeMeta, out *TypeMeta, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.TypeMeta))(in)
}
out.Kind = in.Kind
out.APIVersion = in.APIVersion
return nil
}
func convert_api_Volume_To_v1_Volume(in *api.Volume, out *Volume, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Volume))(in)
}
out.Name = in.Name
if err := convert_api_VolumeSource_To_v1_VolumeSource(&in.VolumeSource, &out.VolumeSource, s); err != nil {
return err
}
return nil
}
func convert_api_VolumeMount_To_v1_VolumeMount(in *api.VolumeMount, out *VolumeMount, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.VolumeMount))(in)
}
out.Name = in.Name
out.ReadOnly = in.ReadOnly
out.MountPath = in.MountPath
return nil
}
func convert_api_VolumeSource_To_v1_VolumeSource(in *api.VolumeSource, out *VolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.VolumeSource))(in)
}
if in.HostPath != nil {
out.HostPath = new(HostPathVolumeSource)
if err := convert_api_HostPathVolumeSource_To_v1_HostPathVolumeSource(in.HostPath, out.HostPath, s); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.EmptyDir != nil {
out.EmptyDir = new(EmptyDirVolumeSource)
if err := convert_api_EmptyDirVolumeSource_To_v1_EmptyDirVolumeSource(in.EmptyDir, out.EmptyDir, s); err != nil {
return err
}
} else {
out.EmptyDir = nil
}
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(GCEPersistentDiskVolumeSource)
if err := convert_api_GCEPersistentDiskVolumeSource_To_v1_GCEPersistentDiskVolumeSource(in.GCEPersistentDisk, out.GCEPersistentDisk, s); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(AWSElasticBlockStoreVolumeSource)
if err := convert_api_AWSElasticBlockStoreVolumeSource_To_v1_AWSElasticBlockStoreVolumeSource(in.AWSElasticBlockStore, out.AWSElasticBlockStore, s); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.GitRepo != nil {
out.GitRepo = new(GitRepoVolumeSource)
if err := convert_api_GitRepoVolumeSource_To_v1_GitRepoVolumeSource(in.GitRepo, out.GitRepo, s); err != nil {
return err
}
} else {
out.GitRepo = nil
}
if in.Secret != nil {
out.Secret = new(SecretVolumeSource)
if err := convert_api_SecretVolumeSource_To_v1_SecretVolumeSource(in.Secret, out.Secret, s); err != nil {
return err
}
} else {
out.Secret = nil
}
if in.NFS != nil {
out.NFS = new(NFSVolumeSource)
if err := convert_api_NFSVolumeSource_To_v1_NFSVolumeSource(in.NFS, out.NFS, s); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.ISCSI != nil {
out.ISCSI = new(ISCSIVolumeSource)
if err := convert_api_ISCSIVolumeSource_To_v1_ISCSIVolumeSource(in.ISCSI, out.ISCSI, s); err != nil {
return err
}
} else {
out.ISCSI = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(GlusterfsVolumeSource)
if err := convert_api_GlusterfsVolumeSource_To_v1_GlusterfsVolumeSource(in.Glusterfs, out.Glusterfs, s); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.PersistentVolumeClaim != nil {
out.PersistentVolumeClaim = new(PersistentVolumeClaimVolumeSource)
if err := convert_api_PersistentVolumeClaimVolumeSource_To_v1_PersistentVolumeClaimVolumeSource(in.PersistentVolumeClaim, out.PersistentVolumeClaim, s); err != nil {
return err
}
} else {
out.PersistentVolumeClaim = nil
}
if in.RBD != nil {
out.RBD = new(RBDVolumeSource)
if err := convert_api_RBDVolumeSource_To_v1_RBDVolumeSource(in.RBD, out.RBD, s); err != nil {
return err
}
} else {
out.RBD = nil
}
return nil
}
func convert_v1_AWSElasticBlockStoreVolumeSource_To_api_AWSElasticBlockStoreVolumeSource(in *AWSElasticBlockStoreVolumeSource, out *api.AWSElasticBlockStoreVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*AWSElasticBlockStoreVolumeSource))(in)
}
out.VolumeID = in.VolumeID
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_Binding_To_api_Binding(in *Binding, out *api.Binding, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Binding))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectReference_To_api_ObjectReference(&in.Target, &out.Target, s); err != nil {
return err
}
return nil
}
func convert_v1_Capabilities_To_api_Capabilities(in *Capabilities, out *api.Capabilities, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Capabilities))(in)
}
if in.Add != nil {
out.Add = make([]api.Capability, len(in.Add))
for i := range in.Add {
out.Add[i] = api.Capability(in.Add[i])
}
} else {
out.Add = nil
}
if in.Drop != nil {
out.Drop = make([]api.Capability, len(in.Drop))
for i := range in.Drop {
out.Drop[i] = api.Capability(in.Drop[i])
}
} else {
out.Drop = nil
}
return nil
}
func convert_v1_ComponentCondition_To_api_ComponentCondition(in *ComponentCondition, out *api.ComponentCondition, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ComponentCondition))(in)
}
out.Type = api.ComponentConditionType(in.Type)
out.Status = api.ConditionStatus(in.Status)
out.Message = in.Message
out.Error = in.Error
return nil
}
func convert_v1_ComponentStatus_To_api_ComponentStatus(in *ComponentStatus, out *api.ComponentStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ComponentStatus))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Conditions != nil {
out.Conditions = make([]api.ComponentCondition, len(in.Conditions))
for i := range in.Conditions {
if err := convert_v1_ComponentCondition_To_api_ComponentCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
return nil
}
func convert_v1_ComponentStatusList_To_api_ComponentStatusList(in *ComponentStatusList, out *api.ComponentStatusList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ComponentStatusList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.ComponentStatus, len(in.Items))
for i := range in.Items {
if err := convert_v1_ComponentStatus_To_api_ComponentStatus(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_Container_To_api_Container(in *Container, out *api.Container, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Container))(in)
}
out.Name = in.Name
out.Image = in.Image
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
if in.Args != nil {
out.Args = make([]string, len(in.Args))
for i := range in.Args {
out.Args[i] = in.Args[i]
}
} else {
out.Args = nil
}
out.WorkingDir = in.WorkingDir
if in.Ports != nil {
out.Ports = make([]api.ContainerPort, len(in.Ports))
for i := range in.Ports {
if err := convert_v1_ContainerPort_To_api_ContainerPort(&in.Ports[i], &out.Ports[i], s); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Env != nil {
out.Env = make([]api.EnvVar, len(in.Env))
for i := range in.Env {
if err := convert_v1_EnvVar_To_api_EnvVar(&in.Env[i], &out.Env[i], s); err != nil {
return err
}
}
} else {
out.Env = nil
}
if err := convert_v1_ResourceRequirements_To_api_ResourceRequirements(&in.Resources, &out.Resources, s); err != nil {
return err
}
if in.VolumeMounts != nil {
out.VolumeMounts = make([]api.VolumeMount, len(in.VolumeMounts))
for i := range in.VolumeMounts {
if err := convert_v1_VolumeMount_To_api_VolumeMount(&in.VolumeMounts[i], &out.VolumeMounts[i], s); err != nil {
return err
}
}
} else {
out.VolumeMounts = nil
}
if in.LivenessProbe != nil {
out.LivenessProbe = new(api.Probe)
if err := convert_v1_Probe_To_api_Probe(in.LivenessProbe, out.LivenessProbe, s); err != nil {
return err
}
} else {
out.LivenessProbe = nil
}
if in.ReadinessProbe != nil {
out.ReadinessProbe = new(api.Probe)
if err := convert_v1_Probe_To_api_Probe(in.ReadinessProbe, out.ReadinessProbe, s); err != nil {
return err
}
} else {
out.ReadinessProbe = nil
}
if in.Lifecycle != nil {
out.Lifecycle = new(api.Lifecycle)
if err := convert_v1_Lifecycle_To_api_Lifecycle(in.Lifecycle, out.Lifecycle, s); err != nil {
return err
}
} else {
out.Lifecycle = nil
}
out.TerminationMessagePath = in.TerminationMessagePath
out.ImagePullPolicy = api.PullPolicy(in.ImagePullPolicy)
if in.SecurityContext != nil {
out.SecurityContext = new(api.SecurityContext)
if err := convert_v1_SecurityContext_To_api_SecurityContext(in.SecurityContext, out.SecurityContext, s); err != nil {
return err
}
} else {
out.SecurityContext = nil
}
return nil
}
func convert_v1_ContainerPort_To_api_ContainerPort(in *ContainerPort, out *api.ContainerPort, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ContainerPort))(in)
}
out.Name = in.Name
out.HostPort = in.HostPort
out.ContainerPort = in.ContainerPort
out.Protocol = api.Protocol(in.Protocol)
out.HostIP = in.HostIP
return nil
}
func convert_v1_ContainerState_To_api_ContainerState(in *ContainerState, out *api.ContainerState, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ContainerState))(in)
}
if in.Waiting != nil {
out.Waiting = new(api.ContainerStateWaiting)
if err := convert_v1_ContainerStateWaiting_To_api_ContainerStateWaiting(in.Waiting, out.Waiting, s); err != nil {
return err
}
} else {
out.Waiting = nil
}
if in.Running != nil {
out.Running = new(api.ContainerStateRunning)
if err := convert_v1_ContainerStateRunning_To_api_ContainerStateRunning(in.Running, out.Running, s); err != nil {
return err
}
} else {
out.Running = nil
}
if in.Terminated != nil {
out.Terminated = new(api.ContainerStateTerminated)
if err := convert_v1_ContainerStateTerminated_To_api_ContainerStateTerminated(in.Terminated, out.Terminated, s); err != nil {
return err
}
} else {
out.Terminated = nil
}
return nil
}
func convert_v1_ContainerStateRunning_To_api_ContainerStateRunning(in *ContainerStateRunning, out *api.ContainerStateRunning, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ContainerStateRunning))(in)
}
if err := s.Convert(&in.StartedAt, &out.StartedAt, 0); err != nil {
return err
}
return nil
}
func convert_v1_ContainerStateTerminated_To_api_ContainerStateTerminated(in *ContainerStateTerminated, out *api.ContainerStateTerminated, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ContainerStateTerminated))(in)
}
out.ExitCode = in.ExitCode
out.Signal = in.Signal
out.Reason = in.Reason
out.Message = in.Message
if err := s.Convert(&in.StartedAt, &out.StartedAt, 0); err != nil {
return err
}
if err := s.Convert(&in.FinishedAt, &out.FinishedAt, 0); err != nil {
return err
}
out.ContainerID = in.ContainerID
return nil
}
func convert_v1_ContainerStateWaiting_To_api_ContainerStateWaiting(in *ContainerStateWaiting, out *api.ContainerStateWaiting, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ContainerStateWaiting))(in)
}
out.Reason = in.Reason
return nil
}
func convert_v1_ContainerStatus_To_api_ContainerStatus(in *ContainerStatus, out *api.ContainerStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ContainerStatus))(in)
}
out.Name = in.Name
if err := convert_v1_ContainerState_To_api_ContainerState(&in.State, &out.State, s); err != nil {
return err
}
if err := convert_v1_ContainerState_To_api_ContainerState(&in.LastTerminationState, &out.LastTerminationState, s); err != nil {
return err
}
out.Ready = in.Ready
out.RestartCount = in.RestartCount
out.Image = in.Image
out.ImageID = in.ImageID
out.ContainerID = in.ContainerID
return nil
}
func convert_v1_DeleteOptions_To_api_DeleteOptions(in *DeleteOptions, out *api.DeleteOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*DeleteOptions))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if in.GracePeriodSeconds != nil {
out.GracePeriodSeconds = new(int64)
*out.GracePeriodSeconds = *in.GracePeriodSeconds
} else {
out.GracePeriodSeconds = nil
}
return nil
}
func convert_v1_EmptyDirVolumeSource_To_api_EmptyDirVolumeSource(in *EmptyDirVolumeSource, out *api.EmptyDirVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EmptyDirVolumeSource))(in)
}
out.Medium = api.StorageMedium(in.Medium)
return nil
}
func convert_v1_EndpointAddress_To_api_EndpointAddress(in *EndpointAddress, out *api.EndpointAddress, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EndpointAddress))(in)
}
out.IP = in.IP
if in.TargetRef != nil {
out.TargetRef = new(api.ObjectReference)
if err := convert_v1_ObjectReference_To_api_ObjectReference(in.TargetRef, out.TargetRef, s); err != nil {
return err
}
} else {
out.TargetRef = nil
}
return nil
}
func convert_v1_EndpointPort_To_api_EndpointPort(in *EndpointPort, out *api.EndpointPort, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EndpointPort))(in)
}
out.Name = in.Name
out.Port = in.Port
out.Protocol = api.Protocol(in.Protocol)
return nil
}
func convert_v1_EndpointSubset_To_api_EndpointSubset(in *EndpointSubset, out *api.EndpointSubset, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EndpointSubset))(in)
}
if in.Addresses != nil {
out.Addresses = make([]api.EndpointAddress, len(in.Addresses))
for i := range in.Addresses {
if err := convert_v1_EndpointAddress_To_api_EndpointAddress(&in.Addresses[i], &out.Addresses[i], s); err != nil {
return err
}
}
} else {
out.Addresses = nil
}
if in.Ports != nil {
out.Ports = make([]api.EndpointPort, len(in.Ports))
for i := range in.Ports {
if err := convert_v1_EndpointPort_To_api_EndpointPort(&in.Ports[i], &out.Ports[i], s); err != nil {
return err
}
}
} else {
out.Ports = nil
}
return nil
}
func convert_v1_Endpoints_To_api_Endpoints(in *Endpoints, out *api.Endpoints, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Endpoints))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Subsets != nil {
out.Subsets = make([]api.EndpointSubset, len(in.Subsets))
for i := range in.Subsets {
if err := convert_v1_EndpointSubset_To_api_EndpointSubset(&in.Subsets[i], &out.Subsets[i], s); err != nil {
return err
}
}
} else {
out.Subsets = nil
}
return nil
}
func convert_v1_EndpointsList_To_api_EndpointsList(in *EndpointsList, out *api.EndpointsList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EndpointsList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Endpoints, len(in.Items))
for i := range in.Items {
if err := convert_v1_Endpoints_To_api_Endpoints(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_EnvVar_To_api_EnvVar(in *EnvVar, out *api.EnvVar, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EnvVar))(in)
}
out.Name = in.Name
out.Value = in.Value
if in.ValueFrom != nil {
out.ValueFrom = new(api.EnvVarSource)
if err := convert_v1_EnvVarSource_To_api_EnvVarSource(in.ValueFrom, out.ValueFrom, s); err != nil {
return err
}
} else {
out.ValueFrom = nil
}
return nil
}
func convert_v1_EnvVarSource_To_api_EnvVarSource(in *EnvVarSource, out *api.EnvVarSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EnvVarSource))(in)
}
if in.FieldRef != nil {
out.FieldRef = new(api.ObjectFieldSelector)
if err := convert_v1_ObjectFieldSelector_To_api_ObjectFieldSelector(in.FieldRef, out.FieldRef, s); err != nil {
return err
}
} else {
out.FieldRef = nil
}
return nil
}
func convert_v1_Event_To_api_Event(in *Event, out *api.Event, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Event))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectReference_To_api_ObjectReference(&in.InvolvedObject, &out.InvolvedObject, s); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
if err := convert_v1_EventSource_To_api_EventSource(&in.Source, &out.Source, s); err != nil {
return err
}
if err := s.Convert(&in.FirstTimestamp, &out.FirstTimestamp, 0); err != nil {
return err
}
if err := s.Convert(&in.LastTimestamp, &out.LastTimestamp, 0); err != nil {
return err
}
out.Count = in.Count
return nil
}
func convert_v1_EventList_To_api_EventList(in *EventList, out *api.EventList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EventList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Event, len(in.Items))
for i := range in.Items {
if err := convert_v1_Event_To_api_Event(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_EventSource_To_api_EventSource(in *EventSource, out *api.EventSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EventSource))(in)
}
out.Component = in.Component
out.Host = in.Host
return nil
}
func convert_v1_ExecAction_To_api_ExecAction(in *ExecAction, out *api.ExecAction, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ExecAction))(in)
}
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func convert_v1_GCEPersistentDiskVolumeSource_To_api_GCEPersistentDiskVolumeSource(in *GCEPersistentDiskVolumeSource, out *api.GCEPersistentDiskVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*GCEPersistentDiskVolumeSource))(in)
}
out.PDName = in.PDName
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_GitRepoVolumeSource_To_api_GitRepoVolumeSource(in *GitRepoVolumeSource, out *api.GitRepoVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*GitRepoVolumeSource))(in)
}
out.Repository = in.Repository
out.Revision = in.Revision
return nil
}
func convert_v1_GlusterfsVolumeSource_To_api_GlusterfsVolumeSource(in *GlusterfsVolumeSource, out *api.GlusterfsVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*GlusterfsVolumeSource))(in)
}
out.EndpointsName = in.EndpointsName
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_HTTPGetAction_To_api_HTTPGetAction(in *HTTPGetAction, out *api.HTTPGetAction, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*HTTPGetAction))(in)
}
out.Path = in.Path
if err := s.Convert(&in.Port, &out.Port, 0); err != nil {
return err
}
out.Host = in.Host
out.Scheme = api.URIScheme(in.Scheme)
return nil
}
func convert_v1_Handler_To_api_Handler(in *Handler, out *api.Handler, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Handler))(in)
}
if in.Exec != nil {
out.Exec = new(api.ExecAction)
if err := convert_v1_ExecAction_To_api_ExecAction(in.Exec, out.Exec, s); err != nil {
return err
}
} else {
out.Exec = nil
}
if in.HTTPGet != nil {
out.HTTPGet = new(api.HTTPGetAction)
if err := convert_v1_HTTPGetAction_To_api_HTTPGetAction(in.HTTPGet, out.HTTPGet, s); err != nil {
return err
}
} else {
out.HTTPGet = nil
}
if in.TCPSocket != nil {
out.TCPSocket = new(api.TCPSocketAction)
if err := convert_v1_TCPSocketAction_To_api_TCPSocketAction(in.TCPSocket, out.TCPSocket, s); err != nil {
return err
}
} else {
out.TCPSocket = nil
}
return nil
}
func convert_v1_HostPathVolumeSource_To_api_HostPathVolumeSource(in *HostPathVolumeSource, out *api.HostPathVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*HostPathVolumeSource))(in)
}
out.Path = in.Path
return nil
}
func convert_v1_ISCSIVolumeSource_To_api_ISCSIVolumeSource(in *ISCSIVolumeSource, out *api.ISCSIVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ISCSIVolumeSource))(in)
}
out.TargetPortal = in.TargetPortal
out.IQN = in.IQN
out.Lun = in.Lun
out.FSType = in.FSType
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_Lifecycle_To_api_Lifecycle(in *Lifecycle, out *api.Lifecycle, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Lifecycle))(in)
}
if in.PostStart != nil {
out.PostStart = new(api.Handler)
if err := convert_v1_Handler_To_api_Handler(in.PostStart, out.PostStart, s); err != nil {
return err
}
} else {
out.PostStart = nil
}
if in.PreStop != nil {
out.PreStop = new(api.Handler)
if err := convert_v1_Handler_To_api_Handler(in.PreStop, out.PreStop, s); err != nil {
return err
}
} else {
out.PreStop = nil
}
return nil
}
func convert_v1_LimitRange_To_api_LimitRange(in *LimitRange, out *api.LimitRange, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LimitRange))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_LimitRangeSpec_To_api_LimitRangeSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
return nil
}
func convert_v1_LimitRangeItem_To_api_LimitRangeItem(in *LimitRangeItem, out *api.LimitRangeItem, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LimitRangeItem))(in)
}
out.Type = api.LimitType(in.Type)
if in.Max != nil {
out.Max = make(map[api.ResourceName]resource.Quantity)
for key, val := range in.Max {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Max[api.ResourceName(key)] = newVal
}
} else {
out.Max = nil
}
if in.Min != nil {
out.Min = make(map[api.ResourceName]resource.Quantity)
for key, val := range in.Min {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Min[api.ResourceName(key)] = newVal
}
} else {
out.Min = nil
}
if in.Default != nil {
out.Default = make(map[api.ResourceName]resource.Quantity)
for key, val := range in.Default {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Default[api.ResourceName(key)] = newVal
}
} else {
out.Default = nil
}
return nil
}
func convert_v1_LimitRangeList_To_api_LimitRangeList(in *LimitRangeList, out *api.LimitRangeList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LimitRangeList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.LimitRange, len(in.Items))
for i := range in.Items {
if err := convert_v1_LimitRange_To_api_LimitRange(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_LimitRangeSpec_To_api_LimitRangeSpec(in *LimitRangeSpec, out *api.LimitRangeSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LimitRangeSpec))(in)
}
if in.Limits != nil {
out.Limits = make([]api.LimitRangeItem, len(in.Limits))
for i := range in.Limits {
if err := convert_v1_LimitRangeItem_To_api_LimitRangeItem(&in.Limits[i], &out.Limits[i], s); err != nil {
return err
}
}
} else {
out.Limits = nil
}
return nil
}
func convert_v1_List_To_api_List(in *List, out *api.List, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*List))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if err := s.Convert(&in.Items, &out.Items, 0); err != nil {
return err
}
return nil
}
func convert_v1_ListMeta_To_api_ListMeta(in *ListMeta, out *api.ListMeta, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ListMeta))(in)
}
out.SelfLink = in.SelfLink
out.ResourceVersion = in.ResourceVersion
return nil
}
func convert_v1_ListOptions_To_api_ListOptions(in *ListOptions, out *api.ListOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ListOptions))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := s.Convert(&in.LabelSelector, &out.LabelSelector, 0); err != nil {
return err
}
if err := s.Convert(&in.FieldSelector, &out.FieldSelector, 0); err != nil {
return err
}
out.Watch = in.Watch
out.ResourceVersion = in.ResourceVersion
return nil
}
func convert_v1_LoadBalancerIngress_To_api_LoadBalancerIngress(in *LoadBalancerIngress, out *api.LoadBalancerIngress, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LoadBalancerIngress))(in)
}
out.IP = in.IP
out.Hostname = in.Hostname
return nil
}
func convert_v1_LoadBalancerStatus_To_api_LoadBalancerStatus(in *LoadBalancerStatus, out *api.LoadBalancerStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LoadBalancerStatus))(in)
}
if in.Ingress != nil {
out.Ingress = make([]api.LoadBalancerIngress, len(in.Ingress))
for i := range in.Ingress {
if err := convert_v1_LoadBalancerIngress_To_api_LoadBalancerIngress(&in.Ingress[i], &out.Ingress[i], s); err != nil {
return err
}
}
} else {
out.Ingress = nil
}
return nil
}
func convert_v1_LocalObjectReference_To_api_LocalObjectReference(in *LocalObjectReference, out *api.LocalObjectReference, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LocalObjectReference))(in)
}
out.Name = in.Name
return nil
}
func convert_v1_NFSVolumeSource_To_api_NFSVolumeSource(in *NFSVolumeSource, out *api.NFSVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NFSVolumeSource))(in)
}
out.Server = in.Server
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_Namespace_To_api_Namespace(in *Namespace, out *api.Namespace, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Namespace))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_NamespaceSpec_To_api_NamespaceSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_NamespaceStatus_To_api_NamespaceStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_NamespaceList_To_api_NamespaceList(in *NamespaceList, out *api.NamespaceList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NamespaceList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Namespace, len(in.Items))
for i := range in.Items {
if err := convert_v1_Namespace_To_api_Namespace(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_NamespaceSpec_To_api_NamespaceSpec(in *NamespaceSpec, out *api.NamespaceSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NamespaceSpec))(in)
}
if in.Finalizers != nil {
out.Finalizers = make([]api.FinalizerName, len(in.Finalizers))
for i := range in.Finalizers {
out.Finalizers[i] = api.FinalizerName(in.Finalizers[i])
}
} else {
out.Finalizers = nil
}
return nil
}
func convert_v1_NamespaceStatus_To_api_NamespaceStatus(in *NamespaceStatus, out *api.NamespaceStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NamespaceStatus))(in)
}
out.Phase = api.NamespacePhase(in.Phase)
return nil
}
func convert_v1_Node_To_api_Node(in *Node, out *api.Node, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Node))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_NodeSpec_To_api_NodeSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_NodeStatus_To_api_NodeStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_NodeAddress_To_api_NodeAddress(in *NodeAddress, out *api.NodeAddress, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NodeAddress))(in)
}
out.Type = api.NodeAddressType(in.Type)
out.Address = in.Address
return nil
}
func convert_v1_NodeCondition_To_api_NodeCondition(in *NodeCondition, out *api.NodeCondition, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NodeCondition))(in)
}
out.Type = api.NodeConditionType(in.Type)
out.Status = api.ConditionStatus(in.Status)
if err := s.Convert(&in.LastHeartbeatTime, &out.LastHeartbeatTime, 0); err != nil {
return err
}
if err := s.Convert(&in.LastTransitionTime, &out.LastTransitionTime, 0); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
return nil
}
func convert_v1_NodeList_To_api_NodeList(in *NodeList, out *api.NodeList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NodeList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Node, len(in.Items))
for i := range in.Items {
if err := convert_v1_Node_To_api_Node(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_NodeSpec_To_api_NodeSpec(in *NodeSpec, out *api.NodeSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NodeSpec))(in)
}
out.PodCIDR = in.PodCIDR
out.ExternalID = in.ExternalID
out.ProviderID = in.ProviderID
out.Unschedulable = in.Unschedulable
return nil
}
func convert_v1_NodeStatus_To_api_NodeStatus(in *NodeStatus, out *api.NodeStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NodeStatus))(in)
}
if in.Capacity != nil {
out.Capacity = make(map[api.ResourceName]resource.Quantity)
for key, val := range in.Capacity {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Capacity[api.ResourceName(key)] = newVal
}
} else {
out.Capacity = nil
}
out.Phase = api.NodePhase(in.Phase)
if in.Conditions != nil {
out.Conditions = make([]api.NodeCondition, len(in.Conditions))
for i := range in.Conditions {
if err := convert_v1_NodeCondition_To_api_NodeCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
if in.Addresses != nil {
out.Addresses = make([]api.NodeAddress, len(in.Addresses))
for i := range in.Addresses {
if err := convert_v1_NodeAddress_To_api_NodeAddress(&in.Addresses[i], &out.Addresses[i], s); err != nil {
return err
}
}
} else {
out.Addresses = nil
}
if err := convert_v1_NodeSystemInfo_To_api_NodeSystemInfo(&in.NodeInfo, &out.NodeInfo, s); err != nil {
return err
}
return nil
}
func convert_v1_NodeSystemInfo_To_api_NodeSystemInfo(in *NodeSystemInfo, out *api.NodeSystemInfo, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NodeSystemInfo))(in)
}
out.MachineID = in.MachineID
out.SystemUUID = in.SystemUUID
out.BootID = in.BootID
out.KernelVersion = in.KernelVersion
out.OsImage = in.OsImage
out.ContainerRuntimeVersion = in.ContainerRuntimeVersion
out.KubeletVersion = in.KubeletVersion
out.KubeProxyVersion = in.KubeProxyVersion
return nil
}
func convert_v1_ObjectFieldSelector_To_api_ObjectFieldSelector(in *ObjectFieldSelector, out *api.ObjectFieldSelector, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ObjectFieldSelector))(in)
}
out.APIVersion = in.APIVersion
out.FieldPath = in.FieldPath
return nil
}
func convert_v1_ObjectMeta_To_api_ObjectMeta(in *ObjectMeta, out *api.ObjectMeta, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ObjectMeta))(in)
}
out.Name = in.Name
out.GenerateName = in.GenerateName
out.Namespace = in.Namespace
out.SelfLink = in.SelfLink
out.UID = in.UID
out.ResourceVersion = in.ResourceVersion
out.Generation = in.Generation
if err := s.Convert(&in.CreationTimestamp, &out.CreationTimestamp, 0); err != nil {
return err
}
if in.DeletionTimestamp != nil {
if err := s.Convert(&in.DeletionTimestamp, &out.DeletionTimestamp, 0); err != nil {
return err
}
} else {
out.DeletionTimestamp = nil
}
if in.Labels != nil {
out.Labels = make(map[string]string)
for key, val := range in.Labels {
out.Labels[key] = val
}
} else {
out.Labels = nil
}
if in.Annotations != nil {
out.Annotations = make(map[string]string)
for key, val := range in.Annotations {
out.Annotations[key] = val
}
} else {
out.Annotations = nil
}
return nil
}
func convert_v1_ObjectReference_To_api_ObjectReference(in *ObjectReference, out *api.ObjectReference, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ObjectReference))(in)
}
out.Kind = in.Kind
out.Namespace = in.Namespace
out.Name = in.Name
out.UID = in.UID
out.APIVersion = in.APIVersion
out.ResourceVersion = in.ResourceVersion
out.FieldPath = in.FieldPath
return nil
}
func convert_v1_PersistentVolume_To_api_PersistentVolume(in *PersistentVolume, out *api.PersistentVolume, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolume))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_PersistentVolumeSpec_To_api_PersistentVolumeSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_PersistentVolumeStatus_To_api_PersistentVolumeStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_PersistentVolumeClaim_To_api_PersistentVolumeClaim(in *PersistentVolumeClaim, out *api.PersistentVolumeClaim, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeClaim))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_PersistentVolumeClaimSpec_To_api_PersistentVolumeClaimSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_PersistentVolumeClaimStatus_To_api_PersistentVolumeClaimStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_PersistentVolumeClaimList_To_api_PersistentVolumeClaimList(in *PersistentVolumeClaimList, out *api.PersistentVolumeClaimList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeClaimList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.PersistentVolumeClaim, len(in.Items))
for i := range in.Items {
if err := convert_v1_PersistentVolumeClaim_To_api_PersistentVolumeClaim(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_PersistentVolumeClaimSpec_To_api_PersistentVolumeClaimSpec(in *PersistentVolumeClaimSpec, out *api.PersistentVolumeClaimSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeClaimSpec))(in)
}
if in.AccessModes != nil {
out.AccessModes = make([]api.PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = api.PersistentVolumeAccessMode(in.AccessModes[i])
}
} else {
out.AccessModes = nil
}
if err := convert_v1_ResourceRequirements_To_api_ResourceRequirements(&in.Resources, &out.Resources, s); err != nil {
return err
}
out.VolumeName = in.VolumeName
return nil
}
func convert_v1_PersistentVolumeClaimStatus_To_api_PersistentVolumeClaimStatus(in *PersistentVolumeClaimStatus, out *api.PersistentVolumeClaimStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeClaimStatus))(in)
}
out.Phase = api.PersistentVolumeClaimPhase(in.Phase)
if in.AccessModes != nil {
out.AccessModes = make([]api.PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = api.PersistentVolumeAccessMode(in.AccessModes[i])
}
} else {
out.AccessModes = nil
}
if in.Capacity != nil {
out.Capacity = make(map[api.ResourceName]resource.Quantity)
for key, val := range in.Capacity {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Capacity[api.ResourceName(key)] = newVal
}
} else {
out.Capacity = nil
}
return nil
}
func convert_v1_PersistentVolumeClaimVolumeSource_To_api_PersistentVolumeClaimVolumeSource(in *PersistentVolumeClaimVolumeSource, out *api.PersistentVolumeClaimVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeClaimVolumeSource))(in)
}
out.ClaimName = in.ClaimName
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_PersistentVolumeList_To_api_PersistentVolumeList(in *PersistentVolumeList, out *api.PersistentVolumeList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.PersistentVolume, len(in.Items))
for i := range in.Items {
if err := convert_v1_PersistentVolume_To_api_PersistentVolume(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_PersistentVolumeSource_To_api_PersistentVolumeSource(in *PersistentVolumeSource, out *api.PersistentVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeSource))(in)
}
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(api.GCEPersistentDiskVolumeSource)
if err := convert_v1_GCEPersistentDiskVolumeSource_To_api_GCEPersistentDiskVolumeSource(in.GCEPersistentDisk, out.GCEPersistentDisk, s); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(api.AWSElasticBlockStoreVolumeSource)
if err := convert_v1_AWSElasticBlockStoreVolumeSource_To_api_AWSElasticBlockStoreVolumeSource(in.AWSElasticBlockStore, out.AWSElasticBlockStore, s); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.HostPath != nil {
out.HostPath = new(api.HostPathVolumeSource)
if err := convert_v1_HostPathVolumeSource_To_api_HostPathVolumeSource(in.HostPath, out.HostPath, s); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(api.GlusterfsVolumeSource)
if err := convert_v1_GlusterfsVolumeSource_To_api_GlusterfsVolumeSource(in.Glusterfs, out.Glusterfs, s); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.NFS != nil {
out.NFS = new(api.NFSVolumeSource)
if err := convert_v1_NFSVolumeSource_To_api_NFSVolumeSource(in.NFS, out.NFS, s); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.RBD != nil {
out.RBD = new(api.RBDVolumeSource)
if err := convert_v1_RBDVolumeSource_To_api_RBDVolumeSource(in.RBD, out.RBD, s); err != nil {
return err
}
} else {
out.RBD = nil
}
if in.ISCSI != nil {
out.ISCSI = new(api.ISCSIVolumeSource)
if err := convert_v1_ISCSIVolumeSource_To_api_ISCSIVolumeSource(in.ISCSI, out.ISCSI, s); err != nil {
return err
}
} else {
out.ISCSI = nil
}
return nil
}
func convert_v1_PersistentVolumeSpec_To_api_PersistentVolumeSpec(in *PersistentVolumeSpec, out *api.PersistentVolumeSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeSpec))(in)
}
if in.Capacity != nil {
out.Capacity = make(map[api.ResourceName]resource.Quantity)
for key, val := range in.Capacity {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Capacity[api.ResourceName(key)] = newVal
}
} else {
out.Capacity = nil
}
if err := convert_v1_PersistentVolumeSource_To_api_PersistentVolumeSource(&in.PersistentVolumeSource, &out.PersistentVolumeSource, s); err != nil {
return err
}
if in.AccessModes != nil {
out.AccessModes = make([]api.PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = api.PersistentVolumeAccessMode(in.AccessModes[i])
}
} else {
out.AccessModes = nil
}
if in.ClaimRef != nil {
out.ClaimRef = new(api.ObjectReference)
if err := convert_v1_ObjectReference_To_api_ObjectReference(in.ClaimRef, out.ClaimRef, s); err != nil {
return err
}
} else {
out.ClaimRef = nil
}
out.PersistentVolumeReclaimPolicy = api.PersistentVolumeReclaimPolicy(in.PersistentVolumeReclaimPolicy)
return nil
}
func convert_v1_PersistentVolumeStatus_To_api_PersistentVolumeStatus(in *PersistentVolumeStatus, out *api.PersistentVolumeStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeStatus))(in)
}
out.Phase = api.PersistentVolumePhase(in.Phase)
out.Message = in.Message
out.Reason = in.Reason
return nil
}
func convert_v1_Pod_To_api_Pod(in *Pod, out *api.Pod, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Pod))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_PodSpec_To_api_PodSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_PodStatus_To_api_PodStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_PodCondition_To_api_PodCondition(in *PodCondition, out *api.PodCondition, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodCondition))(in)
}
out.Type = api.PodConditionType(in.Type)
out.Status = api.ConditionStatus(in.Status)
return nil
}
func convert_v1_PodExecOptions_To_api_PodExecOptions(in *PodExecOptions, out *api.PodExecOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodExecOptions))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Stdin = in.Stdin
out.Stdout = in.Stdout
out.Stderr = in.Stderr
out.TTY = in.TTY
out.Container = in.Container
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func convert_v1_PodList_To_api_PodList(in *PodList, out *api.PodList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Pod, len(in.Items))
for i := range in.Items {
if err := convert_v1_Pod_To_api_Pod(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_PodLogOptions_To_api_PodLogOptions(in *PodLogOptions, out *api.PodLogOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodLogOptions))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Container = in.Container
out.Follow = in.Follow
out.Previous = in.Previous
return nil
}
func convert_v1_PodProxyOptions_To_api_PodProxyOptions(in *PodProxyOptions, out *api.PodProxyOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodProxyOptions))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Path = in.Path
return nil
}
func convert_v1_PodStatus_To_api_PodStatus(in *PodStatus, out *api.PodStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodStatus))(in)
}
out.Phase = api.PodPhase(in.Phase)
if in.Conditions != nil {
out.Conditions = make([]api.PodCondition, len(in.Conditions))
for i := range in.Conditions {
if err := convert_v1_PodCondition_To_api_PodCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
out.Message = in.Message
out.Reason = in.Reason
out.HostIP = in.HostIP
out.PodIP = in.PodIP
if in.StartTime != nil {
if err := s.Convert(&in.StartTime, &out.StartTime, 0); err != nil {
return err
}
} else {
out.StartTime = nil
}
if in.ContainerStatuses != nil {
out.ContainerStatuses = make([]api.ContainerStatus, len(in.ContainerStatuses))
for i := range in.ContainerStatuses {
if err := convert_v1_ContainerStatus_To_api_ContainerStatus(&in.ContainerStatuses[i], &out.ContainerStatuses[i], s); err != nil {
return err
}
}
} else {
out.ContainerStatuses = nil
}
return nil
}
func convert_v1_PodStatusResult_To_api_PodStatusResult(in *PodStatusResult, out *api.PodStatusResult, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodStatusResult))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_PodStatus_To_api_PodStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_PodTemplate_To_api_PodTemplate(in *PodTemplate, out *api.PodTemplate, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodTemplate))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_PodTemplateSpec_To_api_PodTemplateSpec(&in.Template, &out.Template, s); err != nil {
return err
}
return nil
}
func convert_v1_PodTemplateList_To_api_PodTemplateList(in *PodTemplateList, out *api.PodTemplateList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodTemplateList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.PodTemplate, len(in.Items))
for i := range in.Items {
if err := convert_v1_PodTemplate_To_api_PodTemplate(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_PodTemplateSpec_To_api_PodTemplateSpec(in *PodTemplateSpec, out *api.PodTemplateSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodTemplateSpec))(in)
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_PodSpec_To_api_PodSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
return nil
}
func convert_v1_Probe_To_api_Probe(in *Probe, out *api.Probe, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Probe))(in)
}
if err := convert_v1_Handler_To_api_Handler(&in.Handler, &out.Handler, s); err != nil {
return err
}
out.InitialDelaySeconds = in.InitialDelaySeconds
out.TimeoutSeconds = in.TimeoutSeconds
return nil
}
func convert_v1_RBDVolumeSource_To_api_RBDVolumeSource(in *RBDVolumeSource, out *api.RBDVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*RBDVolumeSource))(in)
}
if in.CephMonitors != nil {
out.CephMonitors = make([]string, len(in.CephMonitors))
for i := range in.CephMonitors {
out.CephMonitors[i] = in.CephMonitors[i]
}
} else {
out.CephMonitors = nil
}
out.RBDImage = in.RBDImage
out.FSType = in.FSType
out.RBDPool = in.RBDPool
out.RadosUser = in.RadosUser
out.Keyring = in.Keyring
if in.SecretRef != nil {
out.SecretRef = new(api.LocalObjectReference)
if err := convert_v1_LocalObjectReference_To_api_LocalObjectReference(in.SecretRef, out.SecretRef, s); err != nil {
return err
}
} else {
out.SecretRef = nil
}
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_RangeAllocation_To_api_RangeAllocation(in *RangeAllocation, out *api.RangeAllocation, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*RangeAllocation))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
out.Range = in.Range
if err := s.Convert(&in.Data, &out.Data, 0); err != nil {
return err
}
return nil
}
func convert_v1_ReplicationController_To_api_ReplicationController(in *ReplicationController, out *api.ReplicationController, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ReplicationController))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_ReplicationControllerSpec_To_api_ReplicationControllerSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_ReplicationControllerStatus_To_api_ReplicationControllerStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_ReplicationControllerList_To_api_ReplicationControllerList(in *ReplicationControllerList, out *api.ReplicationControllerList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ReplicationControllerList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.ReplicationController, len(in.Items))
for i := range in.Items {
if err := convert_v1_ReplicationController_To_api_ReplicationController(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_ReplicationControllerStatus_To_api_ReplicationControllerStatus(in *ReplicationControllerStatus, out *api.ReplicationControllerStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ReplicationControllerStatus))(in)
}
out.Replicas = in.Replicas
out.ObservedGeneration = in.ObservedGeneration
return nil
}
func convert_v1_ResourceQuota_To_api_ResourceQuota(in *ResourceQuota, out *api.ResourceQuota, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ResourceQuota))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_ResourceQuotaSpec_To_api_ResourceQuotaSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_ResourceQuotaStatus_To_api_ResourceQuotaStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_ResourceQuotaList_To_api_ResourceQuotaList(in *ResourceQuotaList, out *api.ResourceQuotaList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ResourceQuotaList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.ResourceQuota, len(in.Items))
for i := range in.Items {
if err := convert_v1_ResourceQuota_To_api_ResourceQuota(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_ResourceQuotaSpec_To_api_ResourceQuotaSpec(in *ResourceQuotaSpec, out *api.ResourceQuotaSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ResourceQuotaSpec))(in)
}
if in.Hard != nil {
out.Hard = make(map[api.ResourceName]resource.Quantity)
for key, val := range in.Hard {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Hard[api.ResourceName(key)] = newVal
}
} else {
out.Hard = nil
}
return nil
}
func convert_v1_ResourceQuotaStatus_To_api_ResourceQuotaStatus(in *ResourceQuotaStatus, out *api.ResourceQuotaStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ResourceQuotaStatus))(in)
}
if in.Hard != nil {
out.Hard = make(map[api.ResourceName]resource.Quantity)
for key, val := range in.Hard {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Hard[api.ResourceName(key)] = newVal
}
} else {
out.Hard = nil
}
if in.Used != nil {
out.Used = make(map[api.ResourceName]resource.Quantity)
for key, val := range in.Used {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Used[api.ResourceName(key)] = newVal
}
} else {
out.Used = nil
}
return nil
}
func convert_v1_ResourceRequirements_To_api_ResourceRequirements(in *ResourceRequirements, out *api.ResourceRequirements, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ResourceRequirements))(in)
}
if in.Limits != nil {
out.Limits = make(map[api.ResourceName]resource.Quantity)
for key, val := range in.Limits {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Limits[api.ResourceName(key)] = newVal
}
} else {
out.Limits = nil
}
if in.Requests != nil {
out.Requests = make(map[api.ResourceName]resource.Quantity)
for key, val := range in.Requests {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Requests[api.ResourceName(key)] = newVal
}
} else {
out.Requests = nil
}
return nil
}
func convert_v1_SELinuxOptions_To_api_SELinuxOptions(in *SELinuxOptions, out *api.SELinuxOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*SELinuxOptions))(in)
}
out.User = in.User
out.Role = in.Role
out.Type = in.Type
out.Level = in.Level
return nil
}
func convert_v1_Secret_To_api_Secret(in *Secret, out *api.Secret, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Secret))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Data != nil {
out.Data = make(map[string][]uint8)
for key, val := range in.Data {
newVal := []uint8{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Data[key] = newVal
}
} else {
out.Data = nil
}
out.Type = api.SecretType(in.Type)
return nil
}
func convert_v1_SecretList_To_api_SecretList(in *SecretList, out *api.SecretList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*SecretList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Secret, len(in.Items))
for i := range in.Items {
if err := convert_v1_Secret_To_api_Secret(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_SecretVolumeSource_To_api_SecretVolumeSource(in *SecretVolumeSource, out *api.SecretVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*SecretVolumeSource))(in)
}
out.SecretName = in.SecretName
return nil
}
func convert_v1_SecurityContext_To_api_SecurityContext(in *SecurityContext, out *api.SecurityContext, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*SecurityContext))(in)
}
if in.Capabilities != nil {
out.Capabilities = new(api.Capabilities)
if err := convert_v1_Capabilities_To_api_Capabilities(in.Capabilities, out.Capabilities, s); err != nil {
return err
}
} else {
out.Capabilities = nil
}
if in.Privileged != nil {
out.Privileged = new(bool)
*out.Privileged = *in.Privileged
} else {
out.Privileged = nil
}
if in.SELinuxOptions != nil {
out.SELinuxOptions = new(api.SELinuxOptions)
if err := convert_v1_SELinuxOptions_To_api_SELinuxOptions(in.SELinuxOptions, out.SELinuxOptions, s); err != nil {
return err
}
} else {
out.SELinuxOptions = nil
}
if in.RunAsUser != nil {
out.RunAsUser = new(int64)
*out.RunAsUser = *in.RunAsUser
} else {
out.RunAsUser = nil
}
return nil
}
func convert_v1_SerializedReference_To_api_SerializedReference(in *SerializedReference, out *api.SerializedReference, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*SerializedReference))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectReference_To_api_ObjectReference(&in.Reference, &out.Reference, s); err != nil {
return err
}
return nil
}
func convert_v1_Service_To_api_Service(in *Service, out *api.Service, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Service))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_ServiceSpec_To_api_ServiceSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_ServiceStatus_To_api_ServiceStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_ServiceAccount_To_api_ServiceAccount(in *ServiceAccount, out *api.ServiceAccount, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ServiceAccount))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Secrets != nil {
out.Secrets = make([]api.ObjectReference, len(in.Secrets))
for i := range in.Secrets {
if err := convert_v1_ObjectReference_To_api_ObjectReference(&in.Secrets[i], &out.Secrets[i], s); err != nil {
return err
}
}
} else {
out.Secrets = nil
}
if in.ImagePullSecrets != nil {
out.ImagePullSecrets = make([]api.LocalObjectReference, len(in.ImagePullSecrets))
for i := range in.ImagePullSecrets {
if err := convert_v1_LocalObjectReference_To_api_LocalObjectReference(&in.ImagePullSecrets[i], &out.ImagePullSecrets[i], s); err != nil {
return err
}
}
} else {
out.ImagePullSecrets = nil
}
return nil
}
func convert_v1_ServiceAccountList_To_api_ServiceAccountList(in *ServiceAccountList, out *api.ServiceAccountList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ServiceAccountList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.ServiceAccount, len(in.Items))
for i := range in.Items {
if err := convert_v1_ServiceAccount_To_api_ServiceAccount(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_ServiceList_To_api_ServiceList(in *ServiceList, out *api.ServiceList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ServiceList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Service, len(in.Items))
for i := range in.Items {
if err := convert_v1_Service_To_api_Service(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_ServicePort_To_api_ServicePort(in *ServicePort, out *api.ServicePort, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ServicePort))(in)
}
out.Name = in.Name
out.Protocol = api.Protocol(in.Protocol)
out.Port = in.Port
if err := s.Convert(&in.TargetPort, &out.TargetPort, 0); err != nil {
return err
}
out.NodePort = in.NodePort
return nil
}
func convert_v1_ServiceSpec_To_api_ServiceSpec(in *ServiceSpec, out *api.ServiceSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ServiceSpec))(in)
}
if in.Ports != nil {
out.Ports = make([]api.ServicePort, len(in.Ports))
for i := range in.Ports {
if err := convert_v1_ServicePort_To_api_ServicePort(&in.Ports[i], &out.Ports[i], s); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
out.ClusterIP = in.ClusterIP
out.Type = api.ServiceType(in.Type)
if in.DeprecatedPublicIPs != nil {
out.DeprecatedPublicIPs = make([]string, len(in.DeprecatedPublicIPs))
for i := range in.DeprecatedPublicIPs {
out.DeprecatedPublicIPs[i] = in.DeprecatedPublicIPs[i]
}
} else {
out.DeprecatedPublicIPs = nil
}
out.SessionAffinity = api.ServiceAffinity(in.SessionAffinity)
return nil
}
func convert_v1_ServiceStatus_To_api_ServiceStatus(in *ServiceStatus, out *api.ServiceStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ServiceStatus))(in)
}
if err := convert_v1_LoadBalancerStatus_To_api_LoadBalancerStatus(&in.LoadBalancer, &out.LoadBalancer, s); err != nil {
return err
}
return nil
}
func convert_v1_Status_To_api_Status(in *Status, out *api.Status, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Status))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
out.Status = in.Status
out.Message = in.Message
out.Reason = api.StatusReason(in.Reason)
if in.Details != nil {
out.Details = new(api.StatusDetails)
if err := convert_v1_StatusDetails_To_api_StatusDetails(in.Details, out.Details, s); err != nil {
return err
}
} else {
out.Details = nil
}
out.Code = in.Code
return nil
}
func convert_v1_StatusCause_To_api_StatusCause(in *StatusCause, out *api.StatusCause, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*StatusCause))(in)
}
out.Type = api.CauseType(in.Type)
out.Message = in.Message
out.Field = in.Field
return nil
}
func convert_v1_StatusDetails_To_api_StatusDetails(in *StatusDetails, out *api.StatusDetails, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*StatusDetails))(in)
}
out.Name = in.Name
out.Kind = in.Kind
if in.Causes != nil {
out.Causes = make([]api.StatusCause, len(in.Causes))
for i := range in.Causes {
if err := convert_v1_StatusCause_To_api_StatusCause(&in.Causes[i], &out.Causes[i], s); err != nil {
return err
}
}
} else {
out.Causes = nil
}
out.RetryAfterSeconds = in.RetryAfterSeconds
return nil
}
func convert_v1_TCPSocketAction_To_api_TCPSocketAction(in *TCPSocketAction, out *api.TCPSocketAction, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*TCPSocketAction))(in)
}
if err := s.Convert(&in.Port, &out.Port, 0); err != nil {
return err
}
return nil
}
func convert_v1_TypeMeta_To_api_TypeMeta(in *TypeMeta, out *api.TypeMeta, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*TypeMeta))(in)
}
out.Kind = in.Kind
out.APIVersion = in.APIVersion
return nil
}
func convert_v1_Volume_To_api_Volume(in *Volume, out *api.Volume, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Volume))(in)
}
out.Name = in.Name
if err := convert_v1_VolumeSource_To_api_VolumeSource(&in.VolumeSource, &out.VolumeSource, s); err != nil {
return err
}
return nil
}
func convert_v1_VolumeMount_To_api_VolumeMount(in *VolumeMount, out *api.VolumeMount, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*VolumeMount))(in)
}
out.Name = in.Name
out.ReadOnly = in.ReadOnly
out.MountPath = in.MountPath
return nil
}
func convert_v1_VolumeSource_To_api_VolumeSource(in *VolumeSource, out *api.VolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*VolumeSource))(in)
}
if in.HostPath != nil {
out.HostPath = new(api.HostPathVolumeSource)
if err := convert_v1_HostPathVolumeSource_To_api_HostPathVolumeSource(in.HostPath, out.HostPath, s); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.EmptyDir != nil {
out.EmptyDir = new(api.EmptyDirVolumeSource)
if err := convert_v1_EmptyDirVolumeSource_To_api_EmptyDirVolumeSource(in.EmptyDir, out.EmptyDir, s); err != nil {
return err
}
} else {
out.EmptyDir = nil
}
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(api.GCEPersistentDiskVolumeSource)
if err := convert_v1_GCEPersistentDiskVolumeSource_To_api_GCEPersistentDiskVolumeSource(in.GCEPersistentDisk, out.GCEPersistentDisk, s); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(api.AWSElasticBlockStoreVolumeSource)
if err := convert_v1_AWSElasticBlockStoreVolumeSource_To_api_AWSElasticBlockStoreVolumeSource(in.AWSElasticBlockStore, out.AWSElasticBlockStore, s); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.GitRepo != nil {
out.GitRepo = new(api.GitRepoVolumeSource)
if err := convert_v1_GitRepoVolumeSource_To_api_GitRepoVolumeSource(in.GitRepo, out.GitRepo, s); err != nil {
return err
}
} else {
out.GitRepo = nil
}
if in.Secret != nil {
out.Secret = new(api.SecretVolumeSource)
if err := convert_v1_SecretVolumeSource_To_api_SecretVolumeSource(in.Secret, out.Secret, s); err != nil {
return err
}
} else {
out.Secret = nil
}
if in.NFS != nil {
out.NFS = new(api.NFSVolumeSource)
if err := convert_v1_NFSVolumeSource_To_api_NFSVolumeSource(in.NFS, out.NFS, s); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.ISCSI != nil {
out.ISCSI = new(api.ISCSIVolumeSource)
if err := convert_v1_ISCSIVolumeSource_To_api_ISCSIVolumeSource(in.ISCSI, out.ISCSI, s); err != nil {
return err
}
} else {
out.ISCSI = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(api.GlusterfsVolumeSource)
if err := convert_v1_GlusterfsVolumeSource_To_api_GlusterfsVolumeSource(in.Glusterfs, out.Glusterfs, s); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.PersistentVolumeClaim != nil {
out.PersistentVolumeClaim = new(api.PersistentVolumeClaimVolumeSource)
if err := convert_v1_PersistentVolumeClaimVolumeSource_To_api_PersistentVolumeClaimVolumeSource(in.PersistentVolumeClaim, out.PersistentVolumeClaim, s); err != nil {
return err
}
} else {
out.PersistentVolumeClaim = nil
}
if in.RBD != nil {
out.RBD = new(api.RBDVolumeSource)
if err := convert_v1_RBDVolumeSource_To_api_RBDVolumeSource(in.RBD, out.RBD, s); err != nil {
return err
}
} else {
out.RBD = nil
}
return nil
}
func init() {
err := api.Scheme.AddGeneratedConversionFuncs(
convert_api_AWSElasticBlockStoreVolumeSource_To_v1_AWSElasticBlockStoreVolumeSource,
convert_api_Binding_To_v1_Binding,
convert_api_Capabilities_To_v1_Capabilities,
convert_api_ComponentCondition_To_v1_ComponentCondition,
convert_api_ComponentStatusList_To_v1_ComponentStatusList,
convert_api_ComponentStatus_To_v1_ComponentStatus,
convert_api_ContainerPort_To_v1_ContainerPort,
convert_api_ContainerStateRunning_To_v1_ContainerStateRunning,
convert_api_ContainerStateTerminated_To_v1_ContainerStateTerminated,
convert_api_ContainerStateWaiting_To_v1_ContainerStateWaiting,
convert_api_ContainerState_To_v1_ContainerState,
convert_api_ContainerStatus_To_v1_ContainerStatus,
convert_api_Container_To_v1_Container,
convert_api_DeleteOptions_To_v1_DeleteOptions,
convert_api_EmptyDirVolumeSource_To_v1_EmptyDirVolumeSource,
convert_api_EndpointAddress_To_v1_EndpointAddress,
convert_api_EndpointPort_To_v1_EndpointPort,
convert_api_EndpointSubset_To_v1_EndpointSubset,
convert_api_EndpointsList_To_v1_EndpointsList,
convert_api_Endpoints_To_v1_Endpoints,
convert_api_EnvVarSource_To_v1_EnvVarSource,
convert_api_EnvVar_To_v1_EnvVar,
convert_api_EventList_To_v1_EventList,
convert_api_EventSource_To_v1_EventSource,
convert_api_Event_To_v1_Event,
convert_api_ExecAction_To_v1_ExecAction,
convert_api_GCEPersistentDiskVolumeSource_To_v1_GCEPersistentDiskVolumeSource,
convert_api_GitRepoVolumeSource_To_v1_GitRepoVolumeSource,
convert_api_GlusterfsVolumeSource_To_v1_GlusterfsVolumeSource,
convert_api_HTTPGetAction_To_v1_HTTPGetAction,
convert_api_Handler_To_v1_Handler,
convert_api_HostPathVolumeSource_To_v1_HostPathVolumeSource,
convert_api_ISCSIVolumeSource_To_v1_ISCSIVolumeSource,
convert_api_Lifecycle_To_v1_Lifecycle,
convert_api_LimitRangeItem_To_v1_LimitRangeItem,
convert_api_LimitRangeList_To_v1_LimitRangeList,
convert_api_LimitRangeSpec_To_v1_LimitRangeSpec,
convert_api_LimitRange_To_v1_LimitRange,
convert_api_ListMeta_To_v1_ListMeta,
convert_api_ListOptions_To_v1_ListOptions,
convert_api_List_To_v1_List,
convert_api_LoadBalancerIngress_To_v1_LoadBalancerIngress,
convert_api_LoadBalancerStatus_To_v1_LoadBalancerStatus,
convert_api_LocalObjectReference_To_v1_LocalObjectReference,
convert_api_NFSVolumeSource_To_v1_NFSVolumeSource,
convert_api_NamespaceList_To_v1_NamespaceList,
convert_api_NamespaceSpec_To_v1_NamespaceSpec,
convert_api_NamespaceStatus_To_v1_NamespaceStatus,
convert_api_Namespace_To_v1_Namespace,
convert_api_NodeAddress_To_v1_NodeAddress,
convert_api_NodeCondition_To_v1_NodeCondition,
convert_api_NodeList_To_v1_NodeList,
convert_api_NodeSpec_To_v1_NodeSpec,
convert_api_NodeStatus_To_v1_NodeStatus,
convert_api_NodeSystemInfo_To_v1_NodeSystemInfo,
convert_api_Node_To_v1_Node,
convert_api_ObjectFieldSelector_To_v1_ObjectFieldSelector,
convert_api_ObjectMeta_To_v1_ObjectMeta,
convert_api_ObjectReference_To_v1_ObjectReference,
convert_api_PersistentVolumeClaimList_To_v1_PersistentVolumeClaimList,
convert_api_PersistentVolumeClaimSpec_To_v1_PersistentVolumeClaimSpec,
convert_api_PersistentVolumeClaimStatus_To_v1_PersistentVolumeClaimStatus,
convert_api_PersistentVolumeClaimVolumeSource_To_v1_PersistentVolumeClaimVolumeSource,
convert_api_PersistentVolumeClaim_To_v1_PersistentVolumeClaim,
convert_api_PersistentVolumeList_To_v1_PersistentVolumeList,
convert_api_PersistentVolumeSource_To_v1_PersistentVolumeSource,
convert_api_PersistentVolumeSpec_To_v1_PersistentVolumeSpec,
convert_api_PersistentVolumeStatus_To_v1_PersistentVolumeStatus,
convert_api_PersistentVolume_To_v1_PersistentVolume,
convert_api_PodCondition_To_v1_PodCondition,
convert_api_PodExecOptions_To_v1_PodExecOptions,
convert_api_PodList_To_v1_PodList,
convert_api_PodLogOptions_To_v1_PodLogOptions,
convert_api_PodProxyOptions_To_v1_PodProxyOptions,
convert_api_PodStatusResult_To_v1_PodStatusResult,
convert_api_PodStatus_To_v1_PodStatus,
convert_api_PodTemplateList_To_v1_PodTemplateList,
convert_api_PodTemplateSpec_To_v1_PodTemplateSpec,
convert_api_PodTemplate_To_v1_PodTemplate,
convert_api_Pod_To_v1_Pod,
convert_api_Probe_To_v1_Probe,
convert_api_RBDVolumeSource_To_v1_RBDVolumeSource,
convert_api_RangeAllocation_To_v1_RangeAllocation,
convert_api_ReplicationControllerList_To_v1_ReplicationControllerList,
convert_api_ReplicationControllerStatus_To_v1_ReplicationControllerStatus,
convert_api_ReplicationController_To_v1_ReplicationController,
convert_api_ResourceQuotaList_To_v1_ResourceQuotaList,
convert_api_ResourceQuotaSpec_To_v1_ResourceQuotaSpec,
convert_api_ResourceQuotaStatus_To_v1_ResourceQuotaStatus,
convert_api_ResourceQuota_To_v1_ResourceQuota,
convert_api_ResourceRequirements_To_v1_ResourceRequirements,
convert_api_SELinuxOptions_To_v1_SELinuxOptions,
convert_api_SecretList_To_v1_SecretList,
convert_api_SecretVolumeSource_To_v1_SecretVolumeSource,
convert_api_Secret_To_v1_Secret,
convert_api_SecurityContext_To_v1_SecurityContext,
convert_api_SerializedReference_To_v1_SerializedReference,
convert_api_ServiceAccountList_To_v1_ServiceAccountList,
convert_api_ServiceAccount_To_v1_ServiceAccount,
convert_api_ServiceList_To_v1_ServiceList,
convert_api_ServicePort_To_v1_ServicePort,
convert_api_ServiceSpec_To_v1_ServiceSpec,
convert_api_ServiceStatus_To_v1_ServiceStatus,
convert_api_Service_To_v1_Service,
convert_api_StatusCause_To_v1_StatusCause,
convert_api_StatusDetails_To_v1_StatusDetails,
convert_api_Status_To_v1_Status,
convert_api_TCPSocketAction_To_v1_TCPSocketAction,
convert_api_TypeMeta_To_v1_TypeMeta,
convert_api_VolumeMount_To_v1_VolumeMount,
convert_api_VolumeSource_To_v1_VolumeSource,
convert_api_Volume_To_v1_Volume,
convert_v1_AWSElasticBlockStoreVolumeSource_To_api_AWSElasticBlockStoreVolumeSource,
convert_v1_Binding_To_api_Binding,
convert_v1_Capabilities_To_api_Capabilities,
convert_v1_ComponentCondition_To_api_ComponentCondition,
convert_v1_ComponentStatusList_To_api_ComponentStatusList,
convert_v1_ComponentStatus_To_api_ComponentStatus,
convert_v1_ContainerPort_To_api_ContainerPort,
convert_v1_ContainerStateRunning_To_api_ContainerStateRunning,
convert_v1_ContainerStateTerminated_To_api_ContainerStateTerminated,
convert_v1_ContainerStateWaiting_To_api_ContainerStateWaiting,
convert_v1_ContainerState_To_api_ContainerState,
convert_v1_ContainerStatus_To_api_ContainerStatus,
convert_v1_Container_To_api_Container,
convert_v1_DeleteOptions_To_api_DeleteOptions,
convert_v1_EmptyDirVolumeSource_To_api_EmptyDirVolumeSource,
convert_v1_EndpointAddress_To_api_EndpointAddress,
convert_v1_EndpointPort_To_api_EndpointPort,
convert_v1_EndpointSubset_To_api_EndpointSubset,
convert_v1_EndpointsList_To_api_EndpointsList,
convert_v1_Endpoints_To_api_Endpoints,
convert_v1_EnvVarSource_To_api_EnvVarSource,
convert_v1_EnvVar_To_api_EnvVar,
convert_v1_EventList_To_api_EventList,
convert_v1_EventSource_To_api_EventSource,
convert_v1_Event_To_api_Event,
convert_v1_ExecAction_To_api_ExecAction,
convert_v1_GCEPersistentDiskVolumeSource_To_api_GCEPersistentDiskVolumeSource,
convert_v1_GitRepoVolumeSource_To_api_GitRepoVolumeSource,
convert_v1_GlusterfsVolumeSource_To_api_GlusterfsVolumeSource,
convert_v1_HTTPGetAction_To_api_HTTPGetAction,
convert_v1_Handler_To_api_Handler,
convert_v1_HostPathVolumeSource_To_api_HostPathVolumeSource,
convert_v1_ISCSIVolumeSource_To_api_ISCSIVolumeSource,
convert_v1_Lifecycle_To_api_Lifecycle,
convert_v1_LimitRangeItem_To_api_LimitRangeItem,
convert_v1_LimitRangeList_To_api_LimitRangeList,
convert_v1_LimitRangeSpec_To_api_LimitRangeSpec,
convert_v1_LimitRange_To_api_LimitRange,
convert_v1_ListMeta_To_api_ListMeta,
convert_v1_ListOptions_To_api_ListOptions,
convert_v1_List_To_api_List,
convert_v1_LoadBalancerIngress_To_api_LoadBalancerIngress,
convert_v1_LoadBalancerStatus_To_api_LoadBalancerStatus,
convert_v1_LocalObjectReference_To_api_LocalObjectReference,
convert_v1_NFSVolumeSource_To_api_NFSVolumeSource,
convert_v1_NamespaceList_To_api_NamespaceList,
convert_v1_NamespaceSpec_To_api_NamespaceSpec,
convert_v1_NamespaceStatus_To_api_NamespaceStatus,
convert_v1_Namespace_To_api_Namespace,
convert_v1_NodeAddress_To_api_NodeAddress,
convert_v1_NodeCondition_To_api_NodeCondition,
convert_v1_NodeList_To_api_NodeList,
convert_v1_NodeSpec_To_api_NodeSpec,
convert_v1_NodeStatus_To_api_NodeStatus,
convert_v1_NodeSystemInfo_To_api_NodeSystemInfo,
convert_v1_Node_To_api_Node,
convert_v1_ObjectFieldSelector_To_api_ObjectFieldSelector,
convert_v1_ObjectMeta_To_api_ObjectMeta,
convert_v1_ObjectReference_To_api_ObjectReference,
convert_v1_PersistentVolumeClaimList_To_api_PersistentVolumeClaimList,
convert_v1_PersistentVolumeClaimSpec_To_api_PersistentVolumeClaimSpec,
convert_v1_PersistentVolumeClaimStatus_To_api_PersistentVolumeClaimStatus,
convert_v1_PersistentVolumeClaimVolumeSource_To_api_PersistentVolumeClaimVolumeSource,
convert_v1_PersistentVolumeClaim_To_api_PersistentVolumeClaim,
convert_v1_PersistentVolumeList_To_api_PersistentVolumeList,
convert_v1_PersistentVolumeSource_To_api_PersistentVolumeSource,
convert_v1_PersistentVolumeSpec_To_api_PersistentVolumeSpec,
convert_v1_PersistentVolumeStatus_To_api_PersistentVolumeStatus,
convert_v1_PersistentVolume_To_api_PersistentVolume,
convert_v1_PodCondition_To_api_PodCondition,
convert_v1_PodExecOptions_To_api_PodExecOptions,
convert_v1_PodList_To_api_PodList,
convert_v1_PodLogOptions_To_api_PodLogOptions,
convert_v1_PodProxyOptions_To_api_PodProxyOptions,
convert_v1_PodStatusResult_To_api_PodStatusResult,
convert_v1_PodStatus_To_api_PodStatus,
convert_v1_PodTemplateList_To_api_PodTemplateList,
convert_v1_PodTemplateSpec_To_api_PodTemplateSpec,
convert_v1_PodTemplate_To_api_PodTemplate,
convert_v1_Pod_To_api_Pod,
convert_v1_Probe_To_api_Probe,
convert_v1_RBDVolumeSource_To_api_RBDVolumeSource,
convert_v1_RangeAllocation_To_api_RangeAllocation,
convert_v1_ReplicationControllerList_To_api_ReplicationControllerList,
convert_v1_ReplicationControllerStatus_To_api_ReplicationControllerStatus,
convert_v1_ReplicationController_To_api_ReplicationController,
convert_v1_ResourceQuotaList_To_api_ResourceQuotaList,
convert_v1_ResourceQuotaSpec_To_api_ResourceQuotaSpec,
convert_v1_ResourceQuotaStatus_To_api_ResourceQuotaStatus,
convert_v1_ResourceQuota_To_api_ResourceQuota,
convert_v1_ResourceRequirements_To_api_ResourceRequirements,
convert_v1_SELinuxOptions_To_api_SELinuxOptions,
convert_v1_SecretList_To_api_SecretList,
convert_v1_SecretVolumeSource_To_api_SecretVolumeSource,
convert_v1_Secret_To_api_Secret,
convert_v1_SecurityContext_To_api_SecurityContext,
convert_v1_SerializedReference_To_api_SerializedReference,
convert_v1_ServiceAccountList_To_api_ServiceAccountList,
convert_v1_ServiceAccount_To_api_ServiceAccount,
convert_v1_ServiceList_To_api_ServiceList,
convert_v1_ServicePort_To_api_ServicePort,
convert_v1_ServiceSpec_To_api_ServiceSpec,
convert_v1_ServiceStatus_To_api_ServiceStatus,
convert_v1_Service_To_api_Service,
convert_v1_StatusCause_To_api_StatusCause,
convert_v1_StatusDetails_To_api_StatusDetails,
convert_v1_Status_To_api_Status,
convert_v1_TCPSocketAction_To_api_TCPSocketAction,
convert_v1_TypeMeta_To_api_TypeMeta,
convert_v1_VolumeMount_To_api_VolumeMount,
convert_v1_VolumeSource_To_api_VolumeSource,
convert_v1_Volume_To_api_Volume,
)
if err != nil {
// If one of the conversion functions is malformed, detect it immediately.
panic(err)
}
}
// AUTO-GENERATED FUNCTIONS END HERE<|fim▁end|> | |
<|file_name|>segment.py<|end_file_name|><|fim▁begin|>from . import strip
<|fim▁hole|>
def __init__(self, strip, length, offset=0):
if offset < 0 or length < 0:
raise ValueError('Segment indices are non-negative.')
if offset + length > len(strip):
raise ValueError('Segment too long.')
self.strip = strip
self.offset = offset
self.length = length
def __getitem__(self, index):
return self.strip[self._fix_index(index)]
def __setitem__(self, index, value):
self.strip[self._fix_index(index)] = value
def __len__(self):
return self.length
def next(self, length):
"""Return a new segment starting right after self in the same buffer."""
return Segment(self.strip, length, self.offset + self.length)
def _fix_index(self, index):
if isinstance(index, slice):
raise ValueError('Slicing segments not implemented.')
if index < 0:
index += self.length
if index >= 0 and index < self.length:
return self.offset + index
raise IndexError('Index out of range')
def make_segments(strip, length):
"""Return a list of Segments that evenly split the strip."""
if len(strip) % length:
raise ValueError('The length of strip must be a multiple of length')
s = []
try:
while True:
s.append(s[-1].next(length) if s else Segment(strip, length))
except ValueError:
return s<|fim▁end|> | class Segment(strip.Strip):
"""Represents an offset, length segment within a strip.""" |
<|file_name|>value.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, ParserInput};<|fim▁hole|>use style::stylesheets::{CssRuleType, Origin};
use style::values::specified::Number;
use style_traits::PARSING_MODE_ALLOW_ALL_NUMERIC_VALUES;
#[test]
fn test_parsing_allo_all_numeric_values() {
// In SVG length mode, non-zero lengths are assumed to be px.
let url = ::servo_url::ServoUrl::parse("http://localhost").unwrap();
let context = ParserContext::new(Origin::Author, &url,
Some(CssRuleType::Style), PARSING_MODE_ALLOW_ALL_NUMERIC_VALUES,
QuirksMode::NoQuirks);
let mut input = ParserInput::new("-1");
let mut parser = Parser::new(&mut input);
let result = Number::parse_non_negative(&context, &mut parser);
assert!(result.is_ok());
assert_eq!(result.unwrap(), Number::new(-1.));
}<|fim▁end|> | use style::context::QuirksMode;
use style::parser::ParserContext; |
<|file_name|>SkyXManager.cpp<|end_file_name|><|fim▁begin|>/*
This source file is part of Rigs of Rods
Copyright 2005-2012 Pierre-Michel Ricordel
Copyright 2007-2012 Thomas Fischer
Copyright 2013-2014 Petr Ohlidal
For more information, see http://www.rigsofrods.com/
Rigs of Rods is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License version 3, as
published by the Free Software Foundation.
Rigs of Rods is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Rigs of Rods. If not, see <http://www.gnu.org/licenses/>.
*/
#include "SkyXManager.h"
#include "Application.h"
#include "HydraxWater.h"
#include "OgreSubsystem.h"
#include "TerrainManager.h"
#include "TerrainGeometryManager.h"
using namespace Ogre;
using namespace RoR;
SkyXManager::SkyXManager(Ogre::String configFile)
{
InitLight();
//Ogre::ResourceGroupManager::getSingleton().addResourceLocation("..\\resource\\SkyX\\","FileSystem", "SkyX",true); //Temp
mBasicController = new SkyX::BasicController();
mSkyX = new SkyX::SkyX(gEnv->sceneManager, mBasicController);
mCfgFileManager = new SkyX::CfgFileManager(mSkyX, mBasicController, gEnv->mainCamera);
mCfgFileManager->load(configFile);
mSkyX->create();
<|fim▁hole|>
SkyXManager::~SkyXManager()
{
RoR::App::GetOgreSubsystem()->GetRenderWindow()->removeListener(mSkyX);
mSkyX->remove();
mSkyX = nullptr;
delete mBasicController;
mBasicController = nullptr;
}
Vector3 SkyXManager::getMainLightDirection()
{
if (mBasicController != nullptr)
return mBasicController->getSunDirection();
return Ogre::Vector3(0.0,0.0,0.0);
}
Light *SkyXManager::getMainLight()
{
return mLight1;
}
bool SkyXManager::update(float dt)
{
UpdateSkyLight();
mSkyX->update(dt);
return true;
}
bool SkyXManager::UpdateSkyLight()
{
Ogre::Vector3 lightDir = -getMainLightDirection();
Ogre::Vector3 sunPos = gEnv->mainCamera->getDerivedPosition() - lightDir*mSkyX->getMeshManager()->getSkydomeRadius(gEnv->mainCamera);
// Calculate current color gradients point
float point = (-lightDir.y + 1.0f) / 2.0f;
if (App::GetSimTerrain ()->getHydraxManager ())
{
App::GetSimTerrain ()->getHydraxManager ()->GetHydrax ()->setWaterColor (mWaterGradient.getColor (point));
App::GetSimTerrain ()->getHydraxManager ()->GetHydrax ()->setSunPosition (sunPos*0.1);
}
mLight0 = gEnv->sceneManager->getLight("Light0");
mLight1 = gEnv->sceneManager->getLight("Light1");
mLight0->setPosition(sunPos*0.02);
mLight1->setDirection(lightDir);
if (App::GetSimTerrain()->getWater())
{
App::GetSimTerrain()->getWater()->WaterSetSunPosition(sunPos*0.1);
}
//setFadeColour was removed with https://github.com/RigsOfRods/rigs-of-rods/pull/1459
/* Ogre::Vector3 sunCol = mSunGradient.getColor(point);
mLight0->setSpecularColour(sunCol.x, sunCol.y, sunCol.z);
if (App::GetSimTerrain()->getWater()) App::GetSimTerrain()->getWater()->setFadeColour(Ogre::ColourValue(sunCol.x, sunCol.y, sunCol.z));
*/
Ogre::Vector3 ambientCol = mAmbientGradient.getColor(point);
mLight1->setDiffuseColour(ambientCol.x, ambientCol.y, ambientCol.z);
mLight1->setPosition(100,100,100);
if (mBasicController->getTime().x > 12)
{
if (mBasicController->getTime().x > mBasicController->getTime().z)
mLight0->setVisible(false);
else
mLight0->setVisible(true);
}
else
{
if (mBasicController->getTime ().x < mBasicController->getTime ().z)
mLight0->setVisible (false);
else
mLight0->setVisible (true);
}
if (round (mBasicController->getTime ().x) != mLastHour)
{
TerrainGeometryManager* gm = App::GetSimTerrain ()->getGeometryManager ();
if (gm)
gm->updateLightMap ();
mLastHour = round (mBasicController->getTime ().x);
}
return true;
}
bool SkyXManager::InitLight()
{
// Water
mWaterGradient = SkyX::ColorGradient();
mWaterGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(0.058209,0.535822,0.779105)*0.4, 1));
mWaterGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(0.058209,0.535822,0.729105)*0.3, 0.8));
mWaterGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(0.058209,0.535822,0.679105)*0.25, 0.6));
mWaterGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(0.058209,0.535822,0.679105)*0.2, 0.5));
mWaterGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(0.058209,0.535822,0.679105)*0.1, 0.45));
mWaterGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(0.058209,0.535822,0.679105)*0.025, 0));
// Sun
mSunGradient = SkyX::ColorGradient();
mSunGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(0.8,0.75,0.55)*1.5, 1.0f));
mSunGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(0.8,0.75,0.55)*1.4, 0.75f));
mSunGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(0.8,0.75,0.55)*1.3, 0.5625f));
mSunGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(0.6,0.5,0.2)*1.5, 0.5f));
mSunGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(0.5,0.5,0.5)*0.25, 0.45f));
mSunGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(0.5,0.5,0.5)*0.01, 0.0f));
// Ambient
mAmbientGradient = SkyX::ColorGradient();
mAmbientGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(1,1,1)*1, 1.0f));
mAmbientGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(1,1,1)*1, 0.6f));
mAmbientGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(1,1,1)*0.6, 0.5f));
mAmbientGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(1,1,1)*0.3, 0.45f));
mAmbientGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(1,1,1)*0.1, 0.35f));
mAmbientGradient.addCFrame(SkyX::ColorGradient::ColorFrame(Ogre::Vector3(1,1,1)*0.05, 0.0f));
gEnv->sceneManager->setAmbientLight(ColourValue(0.35,0.35,0.35)); //Not needed because terrn2 has ambientlight settings
// Light
mLight0 = gEnv->sceneManager->createLight("Light0");
mLight0->setDiffuseColour(1, 1, 1);
mLight0->setCastShadows(false);
mLight1 = gEnv->sceneManager->createLight("Light1");
mLight1->setType(Ogre::Light::LT_DIRECTIONAL);
return true;
}
size_t SkyXManager::getMemoryUsage()
{
//TODO
return 0;
}
void SkyXManager::freeResources()
{
//TODO
}<|fim▁end|> | RoR::App::GetOgreSubsystem()->GetOgreRoot()->addFrameListener(mSkyX);
RoR::App::GetOgreSubsystem()->GetRenderWindow()->addListener(mSkyX);
} |
<|file_name|>l2t_csv.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains functions for outputting as l2t_csv.
Author description at: http://code.google.com/p/log2timeline/wiki/l2t_csv
"""
import logging
import re
from plaso.lib import errors
from plaso.lib import eventdata
from plaso.lib import output
from plaso.lib import timelib
from plaso.lib import utils
from plaso.output import helper
class L2tcsv(output.FileLogOutputFormatter):
"""CSV format used by log2timeline, with 17 fixed fields."""
FORMAT_ATTRIBUTE_RE = re.compile('{([^}]+)}')
def Start(self):
"""Returns a header for the output."""
# Build a hostname and username dict objects.
self._hostnames = {}
if self.store:
self._hostnames = helper.BuildHostDict(self.store)
self._preprocesses = {}
for info in self.store.GetStorageInformation():
if hasattr(info, 'store_range'):
for store_number in range(
info.store_range[0], info.store_range[1] + 1):
self._preprocesses[store_number] = info
self.filehandle.WriteLine(
u'date,time,timezone,MACB,source,sourcetype,type,user,host,short,desc,'
u'version,filename,inode,notes,format,extra\n')
def WriteEvent(self, event_object):
"""Write a single event."""
try:
self.EventBody(event_object)
except errors.NoFormatterFound:
logging.error(u'Unable to output line, no formatter found.')
logging.error(event_object)
def EventBody(self, event_object):
"""Formats data as l2t_csv and writes to the filehandle from OutputFormater.
Args:
event_object: The event object (EventObject).
Raises:
errors.NoFormatterFound: If no formatter for that event is found.
"""
if not hasattr(event_object, 'timestamp'):
return
event_formatter = eventdata.EventFormatterManager.GetFormatter(event_object)
if not event_formatter:
raise errors.NoFormatterFound(
u'Unable to find event formatter for: {0:s}.'.format(
event_object.DATA_TYPE))
msg, msg_short = event_formatter.GetMessages(event_object)
source_short, source_long = event_formatter.GetSources(event_object)
date_use = timelib.Timestamp.CopyToDatetime(
event_object.timestamp, self.zone)
extras = []
format_variables = self.FORMAT_ATTRIBUTE_RE.findall(
event_formatter.format_string)
for key in event_object.GetAttributes():
if key in utils.RESERVED_VARIABLES or key in format_variables:
continue
# Force a string conversion since some of the extra attributes
# can be numbers or bools.
value = getattr(event_object, key)
extras.append(u'{0:s}: {1!s} '.format(key, value))
extra = ' '.join(extras)
inode = getattr(event_object, 'inode', '-')
if inode == '-':
if hasattr(event_object, 'pathspec') and hasattr(
event_object.pathspec, 'image_inode'):
inode = event_object.pathspec.image_inode
hostname = getattr(event_object, 'hostname', u'')
# TODO: move this into a base output class.
username = getattr(event_object, 'username', u'-')
if self.store:
if not hostname:
hostname = self._hostnames.get(event_object.store_number, u'-')
pre_obj = self._preprocesses.get(event_object.store_number)
if pre_obj:
check_user = pre_obj.GetUsernameById(username)
if check_user != '-':
username = check_user
row = ('{0:02d}/{1:02d}/{2:04d}'.format(
date_use.month, date_use.day, date_use.year),
'{0:02d}:{1:02d}:{2:02d}'.format(
date_use.hour, date_use.minute, date_use.second),
self.zone,
helper.GetLegacy(event_object),
source_short,
source_long,
getattr(event_object, 'timestamp_desc', u'-'),
username,
hostname,
msg_short,<|fim▁hole|> msg,
'2',
getattr(event_object, 'display_name', u'-'),
inode,
getattr(event_object, 'notes', u'-'), # Notes field placeholder.
getattr(event_object, 'parser', u'-'),
extra.replace('\n', u'-').replace('\r', u''))
out_write = u'{0:s}\n'.format(
u','.join(unicode(x).replace(',', u' ') for x in row))
self.filehandle.WriteLine(out_write)<|fim▁end|> | |
<|file_name|>carmen.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright 2013 Sumana Harihareswara
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
A silly game in Python. See the README.
"""
import sys
import random
import textwrap
import mock
def anykey():
x = raw_input("Press Return to continue. ") + "a"
class City(object):
"""Each City has a name, a set of destinations one step away, and a clue."""
def __init__(self, n, c):
self.dests = []
self.name = n
self.clue = c
class Villain(object):
def __init__(self):
self.name = random.choice(["Carmen", "Waldo", "Edward Snowden", "Lyra"])
self.location = random.choice([cbl, chmr, ftl])
class Hero(object):
def __init__(self):
self.location = ind
self.name = raw_input("Detective at keyboard, identify yourself: ")
class Game():
def __init__(self):
self.player = Hero()
self.nemesis = Villain()
def __repr__(self):
return """A session of the game, with a hero and villain set up with names and locations."""
def wincondition(self):
"""The player wins when s/he is in the same City as the nemesis."""<|fim▁hole|> self.where2go()
print "You ask around about %s and learn that %s" % (self.nemesis.name, self.nemesis.location.clue)
choice = raw_input('OK, now which way will you go? Choose a number. ')
self.choose(choice)
self.wincondition()
def wingame(self):
print "You found her in %s so you win!" % currentsession.nemesis.location.name
playagain=raw_input('Would you like to play again? Y/N: ')
if (playagain == "N") or (playagain == "n"):
sys.exit()
else:
self.player.location = ind
self.nemesis = Villain()
print "Get ready for a new game!"
anykey()
def where2go(self):
for i,x in enumerate(self.player.location.dests):
print "%d. %s" % (i+1, x.name)
def choose(self, path):
try:
path = int(path)
except ValueError:
print "That doesn't make sense, %s, because it's not the number for one of your possible destinations." % self.player.name
print "So you stay in %s." % self.player.location.name
return
if path < 1 or path > (len(self.player.location.dests)):
return "That doesn't make sense, %s, so you stay in %s." % (self.player.name, self.player.location.name)
else:
self.player.location = self.player.location.dests[path-1]
if self.wincondition(): self.wingame()
self.nemesis.location = random.choice(self.nemesis.location.dests)
return "You follow %s to %s." % (self.nemesis.name, self.player.location.name)
ind = City("Independence", "she thought she'd stock up for a journey -- bullets, yokes of oxen, and whatnot.")
sjo = City("Saint Joseph", "she had a headache and needed to find some baby aspirin.")
cbl = City("Council Bluffs", "she knew that you can't beat City Hall, but thought another municipal body might back down more easily.")
fkn = City("Fort Kearney", "she wanted to visit the easternmost point of the Platte River Valley's natural roadway.")
chmr = City("Chimney Rock", "the tow-headed woman was tired of spelunking and wanted to try climbing.")
ftl = City("Fort Laramie", "she had a lot of questions about the American Fur Company.")
vc = City("Virginia City", "she wanted to see the birthplace of Calamity Jane.")
sp = City("South Pass", "she said she was fixin' to cross the Continental Divide!")
slc = City("Salt Lake City", "she said she was planning on having coffee with the Prophet... they didn't have the heart to tell her.")
fh = City("Fort Hall", "she asked about the Snake River country.")
pdx = City("Portland", "she said she longed to see the future home of Open Source Bridge, the yearly conference by the Stumptown Syndicate.")
# Clue wit by Leonard. Thank you @leonardr.
ind.dests = [fkn]
sjo.dests = [fkn]
cbl.dests = [fkn]
fkn.dests = [cbl, ind, ftl, sjo, vc, chmr]
chmr.dests = [fkn]
ftl.dests = [vc, sp, fkn]
vc.dests = [ftl, fkn]
sp.dests = [fh, ftl, slc]
slc.dests = [sp, fh]
fh.dests = [sp, pdx, slc]
pdx.dests = [fh]
def test_bidirectionalpaths():
for city in [ind, sjo, cbl, fkn, chmr, ftl, vc, sp, slc, fh, pdx]:
for dest in city.dests:
try:
assert city in dest.dests
except AssertionError:
print "bidirectional fail! City" , city.name , "was not in" , dest.name , "destinations."
dest.dests.append(city)
print "fixed it!" , city.name , "now in destinations for" , dest.name , "in this list:", map(lambda x: x.name,dest.dests)
test_bidirectionalpaths()
def test_pathfinding():
# try to get ind-fkn-ftl-sp-slc-fh-pdx
# FIXME: does not work yet due to syntax error
try:
map(lambda x,y:assert y in x.dests,[[ind,fkn], [fkn,ftl], [ftl,sp], [sp,slc], [slc,fh], [fh,pdx]])
except AssertionError:
print "whoops!",y,"not in the destination list for",x
# would be good to do pathfinding
gpl = """You are now playing:
Where On The Oregon Trail is Carmen Sandiego?
Copyright (C) 2013 Sumana Harihareswara and licensed under the GNU Public License.
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it under certain conditions; see https://www.gnu.org/licenses/gpl.txt for details."""
if __name__=="__main__":
print gpl
currentsession = Game()
currentrank = "Okay, %s, your current rank is: Carpenter. Welcome to %s." % (currentsession.player.name, currentsession.player.location.name)
print textwrap.fill(currentrank,70,replace_whitespace=False)
assert currentsession.nemesis != currentsession.player
anykey()
print "%s has stolen a wagon tongue and Interpol has assigned you to catch her! Get ready for a chase!" % currentsession.nemesis.name
while True:
currentsession.playturn()<|fim▁end|> | return self.player.location == self.nemesis.location
def playturn(self):
print "%s, you are now in %s and you can head to:" % (self.player.name, self.player.location.name) |
<|file_name|>0004_waiter_days_worked.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [<|fim▁hole|> migrations.AddField(
model_name='waiter',
name='days_worked',
field=models.IntegerField(default=None, null=True, blank=True),
),
]<|fim▁end|> | ('demo_models', '0003_auto_20150419_2110'),
]
operations = [ |
<|file_name|>df_interpreter.py<|end_file_name|><|fim▁begin|># coding: utf-8
#
# drums-backend a simple interactive audio sampler that plays vorbis samples
# Copyright (C) 2009 C.D. Immanuel Albrecht
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import code
import string
from df_global import *
class DfInterpreter(code.InteractiveInterpreter):
"""class that will act as input-stream for python code sent by the user interface"""
vars = DfGlobal()
cached = ""
<|fim▁hole|> self.oldshowtraceback = self.showtraceback
def new_traceback(*args):
self.tracebacked = True
return self.oldshowtraceback(*args)
self.showtraceback = new_traceback
def write(self,data):
send = "PYTHON:" + data.replace("\n","\nPYTHON':")+"\n"
self.vars["ui_out"].write(send)
self.vars["ui_out"].flush()
def hasTracebacked(self):
if self.tracebacked:
self.tracebacked = False
return True
else:
return False<|fim▁end|> | def __init__(self,globalvars,locals=None):
vars = globalvars
code.InteractiveInterpreter.__init__(self, locals)
self.tracebacked = False |
<|file_name|>angular-app.js<|end_file_name|><|fim▁begin|>"use strict";
(function() {
// "todos-angular" is just a hard-code id for storage
var LOCAL_STORAGE_KEY = 'todos-angular';
var ENTER_KEY = 13;
var ESC_KEY = 27;
var internalFilters = {
active: function(toDoItem) {
return !toDoItem.completed;
},
completed: function(toDoItem) {
return toDoItem.completed;
}
};
angular.module('ToDoAngular', ['ngRoute'])
.service('storage', function($q) {
// Storage service
return {
save: function(toDoCollection) {
localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(toDoCollection));
},
load: function() {
var itemCollectionString = localStorage.getItem(LOCAL_STORAGE_KEY);
return itemCollectionString && JSON.parse(itemCollectionString) || [];
}
}
})
.directive('escHandler', function() {
// Define directive for esc key
return {
restrict: 'A',
link: function(scope, iElement, iAttrs) {
function keyEventHandler(event) {
if (event.keyCode === ESC_KEY) {
scope.$apply(iAttrs.escHandler);
}
}
iElement.on('keydown', keyEventHandler);
scope.$on('$destroy', function() {
iElement.off('keydown', keyEventHandler);
});
}
};
})
.directive('enterHandler', function() {
// Define directive for enter key
return {
restrict: 'A',
link: function (scope, iElement, iAttrs) {
function keyEventHandler(event) {
if (event.keyCode === ENTER_KEY) {
scope.$apply(iAttrs.enterHandler);
}
}
iElement.on('keydown', keyEventHandler);
scope.$on('$destroy', function () {
iElement.off('keydown', keyEventHandler);
});
}
};
})
.directive('selectAndFocus', function($timeout) {
// Define directive for focus
return {
restrict: 'A',
link: function(scope, iElement, iAttrs) {
var focusPromise;
scope.$watch(iAttrs.selectAndFocus, function(newValue) {
if (newValue && !focusPromise) {
focusPromise = $timeout(function focus() {
focusPromise = null;
iElement[0].focus();
}, 0, false);
}
});
scope.$on('$destroy', function() {
if (focusPromise) {
$timeout.cancel(focusPromise);
focusPromise = null;
}
});
}
};
})
.directive('toDoItem', function() {
// Define directive for to-do item
return {
restrict: 'A',
templateUrl: 'angular-item-template.html',
scope: {
itemViewModel: '=toDoItem'
},
link: function (scope, iElement, iAttrs) {
scope.editing = false;
scope.originalTitle = '';
scope.$watch('itemViewModel.toDoItem.completed', function(newCompleted) {
iElement.toggleClass('completed', newCompleted);
});
scope.$watch('editing', function(newEditing) {
iElement.toggleClass('editing', newEditing);
});
scope.$watch('itemViewModel.isHidden', function(newHidden) {
iElement.toggleClass('hidden', newHidden);
});
scope.$watchGroup([
'itemViewModel.toDoItem.title',
'itemViewModel.toDoItem.completed'], function() {
scope.$emit('item-updated');
});
scope.destroy = function() {
scope.$emit('remove-item', scope.itemViewModel);
};
scope.edit = function() {
scope.originalTitle = scope.itemViewModel.toDoItem.title;
scope.editing = true;
};
scope.update = function() {
var title = scope.itemViewModel.toDoItem.title || '';
var trimmedTitle = title.trim();
if (scope.editing) {
if (title !== trimmedTitle) {
scope.itemViewModel.toDoItem.title = trimmedTitle;
}
if (!trimmedTitle) {
scope.destroy();
}
scope.editing = false;
}
};
scope.revert = function() {
scope.editing = false;
scope.itemViewModel.toDoItem.title = scope.originalTitle;
};
}
};
})
.controller('AppController', function AppController(
$scope, $routeParams, storedToDoCollection, storage) {
// Define app controller
$scope.toDoCollection = storedToDoCollection.map(function(storedToDo) {
return {
toDoItem: storedToDo,
isHidden: $scope.filter ? !$scope.filter(storedToDo): false
};
});
$scope.currentTitle = '';
$scope.$on('$routeChangeSuccess', function() {
var filterString = $routeParams.filter;
if (filterString && (filterString in internalFilters)) {
$scope.filterString = filterString;
$scope.filter = internalFilters[filterString];
} else {
$scope.filterString = '';
$scope.filter = null;
}
});
function save() {
storage.save($scope.toDoCollection.map(function(toDoViewModel) {
return toDoViewModel.toDoItem;
}));
}
$scope.$watch('filter', function(newFilter) {
$scope.toDoCollection.forEach(function(toDoViewModel) {
toDoViewModel.isHidden = newFilter ? !newFilter(toDoViewModel.toDoItem) : false;
});
});
$scope.$watch(function() {
return $scope.toDoCollection.filter(function(toDoViewModel){
return !toDoViewModel.toDoItem.completed;
}).length;<|fim▁hole|> $scope.remainingLabel = newValue === 1 ?
(newValue + ' item left') :
(newValue + ' items left');
}
});
$scope.$watchCollection('toDoCollection', function() {
save();
});
$scope.$on('item-updated', function() {
save();
});
$scope.$on('remove-item', function(scope, toDoViewModel) {
for(var index = 0; index < $scope.toDoCollection.length; index++) {
if ($scope.toDoCollection[index] === toDoViewModel) {
$scope.toDoCollection.splice(index, 1);
return;
}
}
});
$scope.create = function() {
var currentTitle = $scope.currentTitle.trim();
if (currentTitle) {
var toDoItem = {
title: currentTitle,
completed: false
};
var toDoItemViewModel = {
toDoItem: toDoItem,
isHidden: $scope.filter ? !$scope.filter(toDoItem): false
};
$scope.toDoCollection.push(toDoItemViewModel);
$scope.currentTitle = '';
}
};
})
.config(function($routeProvider) {
// Define routing
var routeConfig = {
controller: 'AppController',
templateUrl: 'angular-app-template.html',
resolve: {
storedToDoCollection: function(storage) {
return storage.load();
}
}
};
$routeProvider
.when('/', routeConfig)
.when('/:filter', routeConfig)
.otherwise({
redirectTo: '/'
});
});
})();<|fim▁end|> | }, function(newValue) {
if (newValue == null) {
$scope.remainingLabel = '';
} else { |
<|file_name|>pin.rs<|end_file_name|><|fim▁begin|>// Zinc, the bare metal stack for rust.
// Copyright 2014 Lionel Flandrin <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Pin configuration
//! Allows GPIO configuration
//! Pin muxing not implemented yet.
use hal::pin::{Gpio, GpioDirection, In, Out, GpioLevel, High, Low};
use hal::tiva_c::sysctl;
use util::support::get_reg_ref;
/// The pins are accessed through ports. Each port has 8 pins and are identified
/// by a letter (PortA, PortB, etc...).
#[allow(missing_docs)]
#[derive(Clone, Copy)]
pub enum PortId {
PortA,
PortB,
PortC,
PortD,
PortE,
PortF,
}
/// Structure describing a single HW pin
#[derive(Clone, Copy)]
pub struct Pin {
/// Timer register interface
regs: &'static reg::Port,
/// Pin index in the port
index: usize,
}
impl Pin {
/// Create and configure a Pin
pub fn new(pid: PortId,
pin_index: u8,
dir: GpioDirection,
function: u8) -> Pin {
// Retrieve GPIO port peripheral to enable it
let (periph, regs) = match pid {
PortId::PortA => (sysctl::periph::gpio::PORT_A, reg::PORT_A),
PortId::PortB => (sysctl::periph::gpio::PORT_B, reg::PORT_B),
PortId::PortC => (sysctl::periph::gpio::PORT_C, reg::PORT_C),
PortId::PortD => (sysctl::periph::gpio::PORT_D, reg::PORT_D),
PortId::PortE => (sysctl::periph::gpio::PORT_E, reg::PORT_E),<|fim▁hole|> PortId::PortF => (sysctl::periph::gpio::PORT_F, reg::PORT_F),
};
periph.ensure_enabled();
let pin = Pin { regs: get_reg_ref(regs), index: pin_index as usize };
pin.configure(dir, function);
pin
}
/// Configure GPIO pin
fn configure(&self, dir: GpioDirection, function: u8) {
// Disable the GPIO during reconfig
self.regs.den.set_den(self.index, false);
self.set_direction(dir);
// Configure the "alternate function". AFSEL 0 means GPIO, 1 means the port
// is driven by another peripheral. When AFSEL is 1 the actual function
// config goes into the CTL register.
match function {
0 => {
self.regs.afsel.set_afsel(self.index,
reg::Port_afsel_afsel::GPIO);
},
f => {
self.regs.afsel.set_afsel(self.index,
reg::Port_afsel_afsel::PERIPHERAL);
self.regs.pctl.set_pctl(self.index, f as u32);
}
}
// We can chose to drive each GPIO at either 2, 4 or 8mA. Default to 2mA for
// now.
// TODO(simias): make that configurable
self.regs.dr2r.set_dr2r(self.index, true);
self.regs.dr4r.set_dr4r(self.index, false);
self.regs.dr8r.set_dr8r(self.index, false);
// TODO(simias): configure open drain/pull up/pull down/slew rate if necessary
self.regs.odr.set_odr(self.index, false);
self.regs.pur.set_pur(self.index, false);
self.regs.pdr.set_pdr(self.index, false);
// Enable GPIO
self.regs.den.set_den(self.index, true);
}
fn set_level(&self, level: bool) {
self.regs.data.set_data(self.index, level);
}
}
impl Gpio for Pin {
/// Sets output GPIO value to high.
fn set_high(&self) {
self.set_level(true);
}
/// Sets output GPIO value to low.
fn set_low(&self) {
self.set_level(false);
}
/// Returns input GPIO level.
fn level(&self) -> GpioLevel {
match self.regs.data.data(self.index) {
true => High,
false => Low,
}
}
/// Sets output GPIO direction.
fn set_direction(&self, dir: GpioDirection) {
self.regs.dir.set_dir(self.index,
match dir {
In => reg::Port_dir_dir::INPUT,
Out => reg::Port_dir_dir::OUTPUT,
});
}
}
pub mod reg {
//! Pin registers definition
use volatile_cell::VolatileCell;
use core::ops::Drop;
ioregs!(Port = {
0x3FC => reg32 data {
//! Pin value
0..7 => data[8]
}
0x400 => reg32 dir {
//! Pin direction
0..7 => dir[8] {
0 => INPUT,
1 => OUTPUT,
}
}
0x420 => reg32 afsel {
//! Pin alternate function
0..7 => afsel[8] {
0 => GPIO,
1 => PERIPHERAL,
}
}
0x500 => reg32 dr2r {
//! Select 2mA drive strength
0..7 => dr2r[8]
}
0x504 => reg32 dr4r {
//! Select 4mA drive strength
0..7 => dr4r[8]
}
0x508 => reg32 dr8r {
//! Select 8mA drive strength
0..7 => dr8r[8]
}
0x50C => reg32 odr {
//! Configure pin as open drain
0..7 => odr[8]
}
0x510 => reg32 pur {
//! Enable pin pull-up
0..7 => pur[8]
}
0x514 => reg32 pdr {
//! Enable pin pull-down
0..7 => pdr[8]
}
0x518 => reg32 slr {
//! Slew rate control enable (only available for 8mA drive strength)
0..7 => slr[8]
}
0x51C => reg32 den {
//! Enable pin
0..7 => den[8]
}
0x52C => reg32 pctl {
//! Pin function selection when afsel is set for the pin.
0..31 => pctl[8]
}
});
#[allow(missing_docs)]
mod instances {
use super::*;
pub const PORT_A: *const Port = 0x40004000 as *const Port;
pub const PORT_B: *const Port = 0x40005000 as *const Port;
pub const PORT_C: *const Port = 0x40006000 as *const Port;
pub const PORT_D: *const Port = 0x40007000 as *const Port;
pub const PORT_E: *const Port = 0x40024000 as *const Port;
pub const PORT_F: *const Port = 0x40025000 as *const Port;
}
pub use self::instances::*;
}<|fim▁end|> | |
<|file_name|>reddit_db_scraper.py<|end_file_name|><|fim▁begin|>from pymongo import MongoClient
import json
import requests
import time
from datetime import datetime
def subredditInfo(sr, limit=100, sorting="top", period="day",
user_agent="ChicagoSchool's scraper", **kwargs):
"""retrieves X (max 100) amount of stories in a subreddit
'sorting' is whether or not the sorting of the reddit should be customized or not,
if it is: Allowed passing params/queries such as t=hour, week, month, year or all"""
#query to send
parameters = {"limit": limit,}
parameters.update(kwargs)
url = "http://www.reddit.com/r/%s/%s.json?limit=%d&t=%s" % (sr, sorting, limit, period)
r = requests.get(url, headers={"user-agent": user_agent})
j = json.loads(r.text)
#return list of stories
stories = []
for story in j["data"]["children"]:
stories.append(story)
return stories
def extractContent(link, sub, limit=100, sorting="top",
user_agent="ChicagoSchool's scraper", **kwargs):
url = "http://www.reddit.com/%s.json?sort=%s&limit=%d" % (link, sorting, limit)
r = requests.get(url, headers={"user-agent": user_agent})
j = json.loads(r.text)
date = datetime.fromtimestamp(j[0]["data"]["children"][0]["data"]["created"])
db_data = {"date": date, "link": link, "subreddit": sub, "content": j}
return db_data
def dbScraper(db_n, col_n, sub_l):
"""scrapes all the threads for a subreddit and stores them in a
mongodb db"""
m_ind = 0
t_f = datetime.now()
sub_ln = len(sub_l)
client = MongoClient()
db = client[db_n]
col = db[col_n]
while True:
t_1 = datetime.now()
for i, s in enumerate(sub_l):
try:<|fim▁hole|> sub_info = subredditInfo(s)
except Exception as e:
print e
time.sleep(300)
sub_info = subredditInfo(s)
time.sleep(2)
sub_info_ln = len(sub_info)
for j, l in enumerate(sub_info):
link = l["data"]["permalink"]
try:
content = extractContent(link, s)
col.insert(content)
except Exception as e:
print e
time.sleep(60)
try:
content = extractContent(link, s)
col.insert(content)
except Exception as e:
print e
time.sleep(300)
print i * 100. / sub_ln, j * 100. / sub_info_ln, m_ind, i, j, datetime.now() - t_1, datetime.now() - t_f
time.sleep(2)
# now we wait until a full day has passed since we started our search
t_diff = datetime.now() - t_1
while t_diff.days < 1:
time.sleep(60)
t_diff = datetime.now() - t_1<|fim▁end|> | |
<|file_name|>test_america.py<|end_file_name|><|fim▁begin|>from datetime import date
from . import GenericCalendarTest
from ..america import (
Argentina, Barbados, Chile, Colombia, Mexico, Panama, Paraguay
)
class ArgentinaTest(GenericCalendarTest):
cal_class = Argentina
def test_holidays_2018(self):
holidays = self.cal.holidays_set(2018)
# 1. Año Nuevo
self.assertIn(date(2018, 1, 1), holidays)
# 2. Carnaval
self.assertIn(date(2018, 2, 12), holidays)
# 3. Carnaval
self.assertIn(date(2018, 2, 13), holidays)
# 4. Día de la Memoria
self.assertIn(date(2018, 3, 24), holidays)
# 5. Día del Veterano y de los Caídos en la Guerra de Malvinas
self.assertIn(date(2018, 4, 2), holidays)
# 6. Viernes Santo
self.assertIn(date(2018, 3, 30), holidays)
# 7. Día del Trabajador
self.assertIn(date(2018, 5, 1), holidays)
# 8. Día de la Revolución de Mayo
self.assertIn(date(2018, 5, 25), holidays)
# 9. Día Paso a la Inmortalidad del General Manuel Belgrano
self.assertIn(date(2018, 6, 20), holidays)
# 10. Día de la Independencia
self.assertIn(date(2018, 7, 9), holidays)
# 11. Inmaculada Concepción de María
self.assertIn(date(2018, 12, 8), holidays)
# 12. Navidad
self.assertIn(date(2018, 12, 25), holidays)
# variable days
# 13. Día Paso a la Inmortalidad del General Martín Miguel de Güemes
self.assertIn(date(2018, 6, 17), holidays)
# 14. Paso a la Inmortalidad del General José de San Martín
self.assertIn(date(2018, 8, 20), holidays)
# 15. Día del Respeto a la Diversidad Cultural
self.assertIn(date(2018, 10, 15), holidays)
# 16. Día de la Soberanía Nacional
self.assertIn(date(2018, 11, 19), holidays)
def test_holidays_2019(self):
holidays = self.cal.holidays_set(2019)
self.assertIn(date(2019, 1, 1), holidays)
self.assertIn(date(2019, 3, 4), holidays)
self.assertIn(date(2019, 3, 5), holidays)
self.assertIn(date(2019, 3, 24), holidays)
self.assertIn(date(2019, 4, 2), holidays)
self.assertIn(date(2019, 4, 19), holidays)
self.assertIn(date(2019, 5, 1), holidays)
self.assertIn(date(2019, 5, 25), holidays)
self.assertIn(date(2019, 6, 20), holidays)
self.assertIn(date(2019, 7, 9), holidays)
self.assertIn(date(2019, 12, 8), holidays)
self.assertIn(date(2019, 12, 25), holidays)
# variable days
self.assertIn(date(2019, 6, 17), holidays)
self.assertIn(date(2019, 8, 19), holidays)
self.assertIn(date(2019, 10, 14), holidays)
self.assertIn(date(2019, 11, 18), holidays)
def test_holidays_2020(self):
holidays = self.cal.holidays_set(2020)
self.assertIn(date(2020, 1, 1), holidays)
self.assertIn(date(2020, 2, 24), holidays)
self.assertIn(date(2020, 2, 25), holidays)
self.assertIn(date(2020, 3, 24), holidays)
# Special case: Argentina has shifted this holiday due to
# Coronavirus lockdown in 2020.
self.assertNotIn(date(2020, 4, 2), holidays)
self.assertIn(date(2020, 3, 31), holidays)
# Back to normal, I hope...
self.assertIn(date(2020, 4, 10), holidays)
self.assertIn(date(2020, 5, 1), holidays)
self.assertIn(date(2020, 5, 25), holidays)
self.assertIn(date(2020, 6, 20), holidays)
self.assertIn(date(2020, 7, 9), holidays)
self.assertIn(date(2020, 12, 8), holidays)
self.assertIn(date(2020, 12, 25), holidays)
# variable days
self.assertIn(date(2020, 6, 15), holidays)
self.assertIn(date(2020, 8, 17), holidays)
self.assertIn(date(2020, 10, 12), holidays)
self.assertIn(date(2020, 11, 23), holidays)
def test_holidays_2021(self):
# Testing it because June 17th happens on THU (general_guemes_day).
holidays = self.cal.holidays_set(2021)
# Not happening on June 17
self.assertNotIn(date(2021, 6, 17), holidays)
# Happens on the 1st MON after this date.
self.assertIn(date(2021, 6, 20), holidays)
# Also, Día del Respeto a la Diversidad Cultural is shifted
self.assertNotIn(date(2021, 10, 12), holidays)
# The day before
self.assertIn(date(2021, 10, 11), holidays)<|fim▁hole|> self.assertEqual(
label,
"Día del Veterano y de los Caídos en la Guerra de Malvinas"
)
def test_dia_memoria_label(self):
holidays = self.cal.holidays(2020)
holidays = dict(holidays)
label_memoria = holidays[date(2020, 3, 24)]
self.assertEqual(
label_memoria,
"Día Nacional de la Memoria por la Verdad y la Justicia"
)
def test_carnival_label(self):
holidays = self.cal.holidays(2020)
holidays = dict(holidays)
label_carnival = holidays[date(2020, 2, 25)]
self.assertEqual(label_carnival, "Carnival")
def test_labour_day_label(self):
holidays = self.cal.holidays(2020)
holidays = dict(holidays)
label = holidays[date(2020, 5, 1)]
self.assertEqual(label, "Día del Trabajador")
def test_immaculate_conception_label(self):
holidays = self.cal.holidays(2020)
holidays = dict(holidays)
label = holidays[date(2020, 12, 8)]
self.assertEqual(label, "Día de la Inmaculada Concepción de María")
class ChileTest(GenericCalendarTest):
cal_class = Chile
def test_holidays_2013(self):
holidays = self.cal.holidays_set(2013)
self.assertIn(date(2013, 1, 1), holidays)
self.assertIn(date(2013, 3, 29), holidays)
self.assertIn(date(2013, 3, 30), holidays)
self.assertIn(date(2013, 5, 1), holidays)
self.assertIn(date(2013, 5, 21), holidays)
self.assertIn(date(2013, 6, 29), holidays)
self.assertIn(date(2013, 7, 16), holidays)
self.assertIn(date(2013, 8, 15), holidays)
self.assertIn(date(2013, 9, 18), holidays)
self.assertIn(date(2013, 9, 19), holidays)
self.assertIn(date(2013, 9, 20), holidays)
self.assertIn(date(2013, 10, 12), holidays)
self.assertIn(date(2013, 10, 31), holidays)
self.assertIn(date(2013, 11, 1), holidays)
self.assertIn(date(2013, 12, 8), holidays)
self.assertIn(date(2013, 12, 25), holidays)
self.assertIn(date(2013, 12, 31), holidays)
def test_reformation_day(self):
holidays = self.cal.holidays_set(2012)
self.assertNotIn(date(2012, 10, 31), holidays)
self.assertIn(date(2012, 11, 2), holidays)
#
holidays = self.cal.holidays_set(2017)
self.assertNotIn(date(2017, 10, 31), holidays)
self.assertIn(date(2017, 10, 27), holidays)
class ColombiaTest(GenericCalendarTest):
cal_class = Colombia
def test_holidays_2015(self):
holidays = self.cal.holidays_set(2015)
self.assertIn(date(2015, 1, 1), holidays) # New year
self.assertIn(date(2015, 1, 12), holidays) # Epiphany (shifted)
self.assertIn(date(2015, 3, 23), holidays) # Saint Joseph
self.assertIn(date(2015, 3, 29), holidays) # Palm Sunday
self.assertIn(date(2015, 4, 2), holidays) # Holy Thursday
self.assertIn(date(2015, 4, 3), holidays) # Good Friday
self.assertIn(date(2015, 4, 5), holidays) # Easter (SUN)
self.assertIn(date(2015, 5, 1), holidays) # Labour Day
self.assertIn(date(2015, 5, 18), holidays) # Ascension (shifted)
self.assertIn(date(2015, 6, 8), holidays) # Corpus Christi
self.assertIn(date(2015, 6, 15), holidays) # Sacred Heart
self.assertIn(date(2015, 6, 29), holidays) # St Peter & St Paul
self.assertIn(date(2015, 7, 20), holidays) # Independance Day
self.assertIn(date(2015, 8, 7), holidays) # Boyacá battle
self.assertIn(date(2015, 8, 17), holidays) # Assumption (shifted)
self.assertIn(date(2015, 10, 12), holidays) # Day of the Races
self.assertIn(date(2015, 11, 2), holidays) # All Saints (shifted)
self.assertIn(date(2015, 11, 16), holidays) # Cartagena independence
self.assertIn(date(2015, 12, 8), holidays) # Immaculate Conception
self.assertIn(date(2015, 12, 25), holidays) # XMas
self.assertEqual(len(holidays), 20)
def test_holidays_2020(self):
holidays = self.cal.holidays_set(2020)
self.assertIn(date(2020, 1, 1), holidays) # New year
self.assertIn(date(2020, 1, 6), holidays) # Epiphany
self.assertIn(date(2020, 3, 23), holidays) # Saint Joseph
self.assertIn(date(2020, 4, 5), holidays) # Palm Sunday
self.assertIn(date(2020, 4, 9), holidays) # Holy Thursday
self.assertIn(date(2020, 4, 10), holidays) # Good Friday
self.assertIn(date(2020, 4, 12), holidays) # Easter (SUN)
self.assertIn(date(2020, 5, 1), holidays) # Labour Day
self.assertIn(date(2020, 5, 25), holidays) # Ascension (shifted)
self.assertIn(date(2020, 6, 15), holidays) # Corpus Christi
self.assertIn(date(2020, 6, 22), holidays) # Sacred Heart
self.assertIn(date(2020, 6, 29), holidays) # St Peter & St Paul
self.assertIn(date(2020, 7, 20), holidays) # Independance Day
self.assertIn(date(2020, 8, 7), holidays) # Boyacá battle
self.assertIn(date(2020, 8, 17), holidays) # Assumption (shifted)
self.assertIn(date(2020, 10, 12), holidays) # Day of the Races
self.assertIn(date(2020, 11, 2), holidays) # All Saints (shifted)
self.assertIn(date(2020, 11, 16), holidays) # Cartagena independence
self.assertIn(date(2020, 12, 8), holidays) # Immaculate Conception
self.assertIn(date(2020, 12, 25), holidays) # XMas
self.assertEqual(len(holidays), 20)
def test_epiphany_monday(self):
# In 2020, Epiphany falls on MON
epiphany_2020 = self.cal.get_epiphany(2020)
self.assertEqual(epiphany_2020, date(2020, 1, 6))
# In 2021, it does not, so it's shifted to the next MON
epiphany_2021 = self.cal.get_epiphany(2021)
self.assertEqual(epiphany_2021, date(2021, 1, 11))
def test_saint_peter_and_saint_paul_monday(self):
# In 2020, Saint Peter and Saint Paul falls on MON
st_peter_paul_2020 = self.cal.get_saint_peter_and_saint_paul(2020)
self.assertEqual(st_peter_paul_2020, date(2020, 6, 29))
# In 2021, it does not, so it's shifted to the next MON
st_peter_paul_2021 = self.cal.get_saint_peter_and_saint_paul(2021)
self.assertEqual(st_peter_paul_2021, date(2021, 7, 5))
def test_assumption_monday(self):
# In 2021, Assumption falls on SUN, so it's shifted to MON
assumption_2021 = self.cal.get_assumption(2021)
self.assertEqual(assumption_2021, date(2021, 8, 16))
# In 2022, Assumption falls on MON
assumption_2022 = self.cal.get_assumption(2022)
self.assertEqual(assumption_2022, date(2022, 8, 15))
def test_day_of_the_races_monday(self):
# In 2020, Day of the races and hispanity falls on MON
day_races_2020 = self.cal.get_day_of_the_races(2020)
self.assertEqual(day_races_2020, date(2020, 10, 12))
# In 2021, It does not, so it's shifted to the next MON
day_races_2021 = self.cal.get_day_of_the_races(2021)
self.assertEqual(day_races_2021, date(2021, 10, 18))
def test_all_saints_monday(self):
# In 2021, The All Saints falls on MON
all_saints_2021 = self.cal.get_all_saints(2021)
self.assertEqual(all_saints_2021, date(2021, 11, 1))
# In 2022, It does not, so it's shifted to the next MON
all_saints_2022 = self.cal.get_all_saints(2022)
self.assertEqual(all_saints_2022, date(2022, 11, 7))
def test_cartagena_independence_monday(self):
# In 2019, The Cartagena Independance falls on MON
cartagena_2019 = self.cal.get_cartagena_independence(2019)
self.assertEqual(cartagena_2019, date(2019, 11, 11))
# In 2020, It does not, so it's shifted to the next MON
cartagena_2020 = self.cal.get_cartagena_independence(2020)
self.assertEqual(cartagena_2020, date(2020, 11, 16))
class MexicoTest(GenericCalendarTest):
cal_class = Mexico
def test_holidays_2013(self):
holidays = self.cal.holidays_set(2013)
self.assertIn(date(2013, 1, 1), holidays)
self.assertIn(date(2013, 2, 4), holidays) # Constitution day
self.assertIn(date(2013, 3, 18), holidays) # Benito Juárez's birthday
self.assertIn(date(2013, 5, 1), holidays) # Labour day
self.assertIn(date(2013, 9, 16), holidays) # Independence day
self.assertIn(date(2013, 11, 18), holidays) # Revolution day
self.assertIn(date(2013, 12, 25), holidays) # XMas
def test_shift_to_monday(self):
observed = set(map(self.cal.get_observed_date, self.cal.holidays_set(2017)))
# New year on Sunday -> shift
assert date(2017, 1, 2) in observed
observed = set(map(self.cal.get_observed_date, self.cal.holidays_set(2016)))
# XMas on sunday -> shift to monday
assert date(2016, 12, 26) in observed
# Same for Labour day
assert date(2016, 5, 2) in observed
def test_shift_to_friday(self):
holidays = self.cal.holidays_set(2021) | self.cal.holidays_set(2022)
observed = set(map(self.cal.get_observed_date, holidays))
# January 1st 2022 is a saturday, so we shift to friday
assert date(2021, 12, 31) in observed
# Same for Labour day
assert date(2021, 4, 30) in observed
holidays = self.cal.holidays_set(2021)
observed = set(map(self.cal.get_observed_date, holidays))
# December 25th, 2022 is a saturday, so we shift to friday
assert date(2021, 12, 24) in observed
class PanamaTest(GenericCalendarTest):
cal_class = Panama
def test_holidays_2013(self):
holidays = self.cal.holidays_set(2013)
self.assertIn(date(2013, 1, 1), holidays)
self.assertIn(date(2013, 1, 9), holidays) # Martyrs day
self.assertIn(date(2013, 2, 12), holidays) # carnival tuesday
self.assertIn(date(2013, 3, 29), holidays) # good friday
self.assertIn(date(2013, 3, 30), holidays) # easter saturday
self.assertIn(date(2013, 3, 31), holidays) # easter sunday
self.assertIn(date(2013, 5, 1), holidays) # labour day
self.assertIn(date(2013, 11, 3), holidays) # independence day
self.assertIn(date(2013, 11, 5), holidays) # colon day
# Shout in Villa de los Santos
self.assertIn(date(2013, 11, 10), holidays)
self.assertIn(date(2013, 11, 28), holidays) # Independence from spain
self.assertIn(date(2013, 12, 8), holidays) # mother day
self.assertIn(date(2013, 12, 25), holidays) # XMas
class ParaguayTest(GenericCalendarTest):
cal_class = Paraguay
def test_holidays_2019(self):
holidays = self.cal.holidays_set(2019)
self.assertIn(date(2019, 1, 1), holidays)
self.assertIn(date(2019, 3, 1), holidays) # Heroes day
self.assertIn(date(2019, 4, 18), holidays) # Maundy thursday
self.assertIn(date(2019, 4, 19), holidays) # Good friday
self.assertIn(date(2019, 5, 1), holidays) # Labour day
self.assertIn(date(2019, 5, 14), holidays) # Independance day
self.assertIn(date(2019, 6, 12), holidays) # Chaco Armistice Day
self.assertIn(date(2019, 8, 15), holidays) # Founding of Asunción
self.assertIn(date(2019, 9, 29), holidays) # Boqueron Battle Victory
self.assertIn(date(2019, 12, 8), holidays) # Virgin of Caacupe
self.assertIn(date(2019, 12, 25), holidays) # XMas
def test_holidays_2017(self):
holidays = self.cal.holidays_set(2017)
# In 2017, Heroes day has been moved to February 27th
self.assertNotIn(date(2017, 3, 1), holidays)
self.assertIn(date(2017, 2, 27), holidays)
# Fundation of Asunción day: moved to August 14 for 2017
self.assertNotIn(date(2017, 8, 15), holidays)
self.assertIn(date(2017, 8, 14), holidays)
# Boqueron Battle Victory Day: moved to October 2nd for 2017
self.assertNotIn(date(2017, 9, 29), holidays)
self.assertIn(date(2017, 10, 2), holidays)
def test_immaculate_conception_label(self):
holidays = self.cal.holidays(2020)
holidays = dict(holidays)
label = holidays[date(2020, 12, 8)]
self.assertEqual(label, "Virgin of Caacupé Day")
class BarbadosTest(GenericCalendarTest):
cal_class = Barbados
def test_holidays_2009(self):
holidays = self.cal.holidays_set(2009)
self.assertIn(date(2009, 1, 1), holidays)
self.assertIn(date(2009, 1, 21), holidays) # Errol Barrow Day
self.assertIn(date(2009, 4, 10), holidays) # Good Friday
self.assertIn(date(2009, 4, 12), holidays) # Easter Sunday
self.assertIn(date(2009, 4, 13), holidays) # Easter Monday
self.assertIn(date(2009, 4, 28), holidays) # National Heroes Day
self.assertIn(date(2009, 5, 1), holidays) # Labour Day
self.assertIn(date(2009, 6, 1), holidays) # Whit Monday
self.assertIn(date(2009, 8, 1), holidays) # Emancipation Day
self.assertIn(date(2009, 8, 3), holidays) # Kabooment Day
self.assertIn(date(2009, 11, 30), holidays) # Independant Day
self.assertIn(date(2009, 12, 25), holidays) # Christmas Day
self.assertIn(date(2009, 12, 26), holidays) # Boxing Day
def test_holidays_2018(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 1, 1), holidays)
self.assertIn(date(2018, 1, 21), holidays) # Errol Barrow Day
self.assertIn(date(2018, 1, 22), holidays) # Errol Barrow Day (shift)
self.assertIn(date(2018, 3, 30), holidays) # Good Friday
self.assertIn(date(2018, 4, 1), holidays) # Easter Sunday
self.assertIn(date(2018, 4, 2), holidays) # Easter Monday
self.assertIn(date(2018, 4, 28), holidays) # National Heroes Day
self.assertIn(date(2018, 5, 1), holidays) # Labour Day
self.assertIn(date(2018, 5, 21), holidays) # Whit Monday
self.assertIn(date(2018, 8, 1), holidays) # Emancipation Day
self.assertIn(date(2018, 8, 6), holidays) # Kabooment Day
self.assertIn(date(2018, 11, 30), holidays) # Independant Day
self.assertIn(date(2018, 12, 25), holidays) # Christmas Day
self.assertIn(date(2018, 12, 26), holidays) # Boxing Day
def test_holidays_2019(self):
holidays = self.cal.holidays_set(2019)
self.assertIn(date(2019, 1, 1), holidays)
self.assertIn(date(2019, 1, 21), holidays) # Errol Barrow Day
self.assertIn(date(2019, 4, 19), holidays) # Good Friday
self.assertIn(date(2019, 4, 21), holidays) # Easter Sunday
self.assertIn(date(2019, 4, 22), holidays) # Easter Monday
# National Heroes Day & shift
self.assertIn(date(2019, 4, 28), holidays)
self.assertIn(date(2019, 4, 29), holidays) # shft'd
self.assertIn(date(2019, 5, 1), holidays) # Labour Day
self.assertIn(date(2019, 6, 10), holidays) # Whit Monday
self.assertIn(date(2019, 8, 1), holidays) # Emancipation Day
self.assertIn(date(2019, 8, 5), holidays) # Kabooment Day
self.assertIn(date(2019, 11, 30), holidays) # Independant Day
self.assertIn(date(2019, 12, 25), holidays) # Christmas Day
self.assertIn(date(2019, 12, 26), holidays) # Boxing Day<|fim▁end|> |
def test_dia_malvinas_label(self):
_, label = self.cal.get_malvinas_day(2020) |
<|file_name|>AuthController.js<|end_file_name|><|fim▁begin|>/**
* Created by Roman on 16.12.13.
*/
var moment = require('moment');
function AuthController($scope, $api) {
var ctrl = this;
$scope.client_id = store.get('client_id');
$scope.api_key = store.get('api_key');
$scope.api_secret = store.get('api_secret');
<|fim▁hole|> // catch html pieces
var $modal = $('#modal-auth').modal({backdrop: true, show: false});
$scope.authenticate_clicked = function(){
NProgress.start();
$scope.err = null;
// validate keys and update scopes
$api.bitstamp.get_transactions($scope.api_key, $scope.api_secret, $scope.client_id)
.success(function(transactions){
$modal.modal('hide');
// save data
store.set('client_id', $scope.client_id);
store.set('api_key', $scope.api_key);
store.set('api_secret', $scope.api_secret);
store.set('transactions', transactions);
store.set('transactions_updated', moment().unix());
// notify parent that we authenticated
$scope.$parent.authenticated();
NProgress.done();
})
.error(function(){
$scope.err = "Failed to verify credentials. Are they correct?";
NProgress.done();
});
};
// init code
$('#btn-start').click(function(){
$modal.modal('show');
});
}
module.exports = AuthController;<|fim▁end|> | $scope.err = null;
|
<|file_name|>Start.py<|end_file_name|><|fim▁begin|>from vt_manager_kvm.communication.sfa.util.xrn import urn_to_hrn
from vt_manager_kvm.communication.sfa.trust.credential import Credential
from vt_manager_kvm.communication.sfa.trust.auth import Auth
<|fim▁hole|>
def __init__(self, xrn, creds, **kwargs):
hrn, type = urn_to_hrn(xrn)
valid_creds = Auth().checkCredentials(creds, 'startslice', hrn)
origin_hrn = Credential(string=valid_creds[0]).get_gid_caller().get_hrn()
return<|fim▁end|> | class Start: |
<|file_name|>vector.rs<|end_file_name|><|fim▁begin|>use ndarray::*;
use ndarray_linalg::*;
#[test]<|fim▁hole|>
#[test]
fn vector_norm_l1() {
let a = arr1(&[1.0, -1.0]);
assert_rclose!(a.norm_l1(), 2.0, 1e-7);
let b = arr2(&[[0.0, -1.0], [1.0, 0.0]]);
assert_rclose!(b.norm_l1(), 2.0, 1e-7);
}
#[test]
fn vector_norm_max() {
let a = arr1(&[1.0, 1.0, -3.0]);
assert_rclose!(a.norm_max(), 3.0, 1e-7);
let b = arr2(&[[1.0, 3.0], [1.0, -4.0]]);
assert_rclose!(b.norm_max(), 4.0, 1e-7);
}
#[test]
fn vector_norm_l1_rc() {
let a = rcarr1(&[1.0, -1.0]);
assert_rclose!(a.norm_l1(), 2.0, 1e-7);
let b = rcarr2(&[[0.0, -1.0], [1.0, 0.0]]);
assert_rclose!(b.norm_l1(), 2.0, 1e-7);
}
#[test]
fn vector_norm_max_rc() {
let a = rcarr1(&[1.0, 1.0, -3.0]);
assert_rclose!(a.norm_max(), 3.0, 1e-7);
let b = rcarr2(&[[1.0, 3.0], [1.0, -4.0]]);
assert_rclose!(b.norm_max(), 4.0, 1e-7);
}<|fim▁end|> | fn vector_norm() {
let a = Array::range(1., 10., 1.);
assert_rclose!(a.norm(), 285.0.sqrt(), 1e-7);
} |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Utilities for handling shell surfaces with the `wlr_layer_shell` protocol
//!
//! This interface should be suitable for the implementation of many desktop shell components,
//! and a broad number of other applications that interact with the desktop.
//!
//! ### Initialization
//!
//! To initialize this handler, simple use the [`wlr_layer_shell_init`] function provided in this module.
//! You need to provide a closure that will be invoked whenever some action is required from you,
//! are represented by the [`LayerShellRequest`] enum.
//!
//! ```no_run
//! # extern crate wayland_server;
//! #
//! use smithay::wayland::shell::wlr_layer::{wlr_layer_shell_init, LayerShellRequest};
//!
//! # let mut display = wayland_server::Display::new();
//! let (shell_state, _) = wlr_layer_shell_init(
//! &mut display,
//! // your implementation
//! |event: LayerShellRequest, dispatch_data| { /* handle the shell requests here */ },
//! None // put a logger if you want
//! );
//!
//! // You're now ready to go!
//! ```
use std::{
cell::RefCell,
rc::Rc,
sync::{Arc, Mutex},
};
use wayland_protocols::wlr::unstable::layer_shell::v1::server::{zwlr_layer_shell_v1, zwlr_layer_surface_v1};
use wayland_server::{
protocol::{wl_output::WlOutput, wl_surface},
DispatchData, Display, Filter, Global, Main,
};
use crate::{
utils::{DeadResource, Logical, Size},
wayland::{
compositor::{self, Cacheable},
Serial, SERIAL_COUNTER,
},
};
mod handlers;
mod types;
pub use types::{Anchor, ExclusiveZone, KeyboardInteractivity, Layer, Margins};
/// The role of a wlr_layer_shell_surface
pub const LAYER_SURFACE_ROLE: &str = "zwlr_layer_surface_v1";
/// Attributes for layer surface
#[derive(Debug)]
pub struct LayerSurfaceAttributes {
surface: zwlr_layer_surface_v1::ZwlrLayerSurfaceV1,
/// Defines if the surface has received at least one
/// layer_surface.ack_configure from the client
pub configured: bool,
/// The serial of the last acked configure
pub configure_serial: Option<Serial>,
/// Holds the state if the surface has sent the initial
/// configure event to the client. It is expected that
/// during the first commit a initial
/// configure event is sent to the client
pub initial_configure_sent: bool,
/// Holds the configures the server has sent out
/// to the client waiting to be acknowledged by
/// the client. All pending configures that are older
/// than the acknowledged one will be discarded during
/// processing layer_surface.ack_configure.
pending_configures: Vec<LayerSurfaceConfigure>,
/// Holds the pending state as set by the server.
pub server_pending: Option<LayerSurfaceState>,
/// Holds the last server_pending state that has been acknowledged
/// by the client. This state should be cloned to the current
/// during a commit.
pub last_acked: Option<LayerSurfaceState>,
/// Holds the current state of the layer after a successful
/// commit.
pub current: LayerSurfaceState,
}
impl LayerSurfaceAttributes {
fn new(surface: zwlr_layer_surface_v1::ZwlrLayerSurfaceV1) -> Self {
Self {
surface,
configured: false,
configure_serial: None,
initial_configure_sent: false,
pending_configures: Vec::new(),
server_pending: None,
last_acked: None,
current: Default::default(),
}
}
fn ack_configure(&mut self, serial: Serial) -> Option<LayerSurfaceConfigure> {
let configure = self
.pending_configures
.iter()
.find(|configure| configure.serial == serial)
.cloned()?;
self.last_acked = Some(configure.state.clone());
self.configured = true;
self.configure_serial = Some(serial);
self.pending_configures.retain(|c| c.serial > serial);
Some(configure)
}
}
/// State of a layer surface
#[derive(Debug, Default, Clone, PartialEq)]
pub struct LayerSurfaceState {
/// The suggested size of the surface
pub size: Option<Size<i32, Logical>>,
}
/// Represents the client pending state
#[derive(Debug, Default, Clone, Copy)]
pub struct LayerSurfaceCachedState {
/// The size requested by the client
pub size: Size<i32, Logical>,
/// Anchor bitflags, describing how the layers surface should be positioned and sized
pub anchor: Anchor,
/// Descripton of exclusive zone
pub exclusive_zone: ExclusiveZone,
/// Describes distance from the anchor point of the output
pub margin: Margins,
/// Describes how keyboard events are delivered to this surface
pub keyboard_interactivity: KeyboardInteractivity,
/// The layer that the surface is rendered on
pub layer: Layer,
}
impl Cacheable for LayerSurfaceCachedState {
fn commit(&mut self) -> Self {
*self
}
fn merge_into(self, into: &mut Self) {
*into = self;
}
}
/// Shell global state
///
/// This state allows you to retrieve a list of surfaces
/// currently known to the shell global.
#[derive(Debug)]
pub struct LayerShellState {
known_layers: Vec<LayerSurface>,
}
impl LayerShellState {
/// Access all the shell surfaces known by this handler
pub fn layer_surfaces(&self) -> &[LayerSurface] {
&self.known_layers[..]
}
}
#[derive(Clone)]
struct ShellUserData {
_log: ::slog::Logger,
user_impl: Rc<RefCell<dyn FnMut(LayerShellRequest, DispatchData<'_>)>>,
shell_state: Arc<Mutex<LayerShellState>>,
}
/// Create a new `wlr_layer_shell` globals
pub fn wlr_layer_shell_init<L, Impl>(
display: &mut Display,
implementation: Impl,
logger: L,
) -> (
Arc<Mutex<LayerShellState>>,
Global<zwlr_layer_shell_v1::ZwlrLayerShellV1>,
)
where
L: Into<Option<::slog::Logger>>,
Impl: FnMut(LayerShellRequest, DispatchData<'_>) + 'static,
{
let log = crate::slog_or_fallback(logger);
let shell_state = Arc::new(Mutex::new(LayerShellState {
known_layers: Vec::new(),
}));
let shell_data = ShellUserData {
_log: log.new(slog::o!("smithay_module" => "layer_shell_handler")),
user_impl: Rc::new(RefCell::new(implementation)),
shell_state: shell_state.clone(),
};
let layer_shell_global = display.create_global(
4,
Filter::new(
move |(shell, _version): (Main<zwlr_layer_shell_v1::ZwlrLayerShellV1>, _), _, _ddata| {
shell.quick_assign(self::handlers::layer_shell_implementation);
shell.as_ref().user_data().set({
let shell_data = shell_data.clone();
move || shell_data
});
},
),
);
(shell_state, layer_shell_global)
}
/// A handle to a layer surface
#[derive(Debug, Clone)]
pub struct LayerSurface {
wl_surface: wl_surface::WlSurface,
shell_surface: zwlr_layer_surface_v1::ZwlrLayerSurfaceV1,
}
impl std::cmp::PartialEq for LayerSurface {
fn eq(&self, other: &Self) -> bool {
self.alive() && other.alive() && self.wl_surface == other.wl_surface
}
}
impl LayerSurface {
/// Is the layer surface referred by this handle still alive?
pub fn alive(&self) -> bool {
self.shell_surface.as_ref().is_alive() && self.wl_surface.as_ref().is_alive()
}
/// Gets the current pending state for a configure
///
/// Returns `Some` if either no initial configure has been sent or
/// the `server_pending` is `Some` and different from the last pending
/// configure or `last_acked` if there is no pending
///
/// Returns `None` if either no `server_pending` or the pending
/// has already been sent to the client or the pending is equal
/// to the `last_acked`
fn get_pending_state(&self, attributes: &mut LayerSurfaceAttributes) -> Option<LayerSurfaceState> {
if !attributes.initial_configure_sent {
return Some(attributes.server_pending.take().unwrap_or_default());
}
let server_pending = match attributes.server_pending.take() {
Some(state) => state,
None => {
return None;
}
};
let last_state = attributes
.pending_configures
.last()
.map(|c| &c.state)
.or(attributes.last_acked.as_ref());
if let Some(state) = last_state {
if state == &server_pending {
return None;
}
}
Some(server_pending)
}
/// Send a configure event to this layer surface to suggest it a new configuration
///
/// The serial of this configure will be tracked waiting for the client to ACK it.
///
/// You can manipulate the state that will be sent to the client with the [`with_pending_state`](#method.with_pending_state)
/// method.
pub fn send_configure(&self) {
if let Some(surface) = self.get_surface() {
let configure = compositor::with_states(surface, |states| {
let mut attributes = states
.data_map
.get::<Mutex<LayerSurfaceAttributes>>()
.unwrap()
.lock()
.unwrap();
if let Some(pending) = self.get_pending_state(&mut *attributes) {
let configure = LayerSurfaceConfigure {
serial: SERIAL_COUNTER.next_serial(),
state: pending,
};
attributes.pending_configures.push(configure.clone());
attributes.initial_configure_sent = true;
Some(configure)
} else {
None
}
})
.unwrap_or(None);
// send surface configure
if let Some(configure) = configure {
let (width, height) = configure.state.size.unwrap_or_default().into();
let serial = configure.serial;
self.shell_surface
.configure(serial.into(), width as u32, height as u32);
}
}
}
/// Make sure this surface was configured
///
/// Returns `true` if it was, if not, returns `false` and raise
/// a protocol error to the associated layer surface. Also returns `false`
/// if the surface is already destroyed.
pub fn ensure_configured(&self) -> bool {
if !self.alive() {
return false;
}
let configured = compositor::with_states(&self.wl_surface, |states| {
states
.data_map
.get::<Mutex<LayerSurfaceAttributes>>()
.unwrap()
.lock()
.unwrap()
.configured
})
.unwrap();
if !configured {
self.shell_surface.as_ref().post_error(
zwlr_layer_shell_v1::Error::AlreadyConstructed as u32,
"layer_surface has never been configured".into(),
);
}
configured
}
/// Send a "close" event to the client
pub fn send_close(&self) {
self.shell_surface.closed()
}
/// Access the underlying `wl_surface` of this layer surface
///
/// Returns `None` if the layer surface actually no longer exists.
pub fn get_surface(&self) -> Option<&wl_surface::WlSurface> {
if self.alive() {
Some(&self.wl_surface)
} else {
None
}
}
/// Allows the pending state of this layer to
/// be manipulated.
///
/// This should be used to inform the client about size and state changes,
/// for example after a resize request from the client.
///
/// The state will be sent to the client when calling [`send_configure`](#method.send_configure).
pub fn with_pending_state<F, T>(&self, f: F) -> Result<T, DeadResource>
where
F: FnOnce(&mut LayerSurfaceState) -> T,
{
if !self.alive() {
return Err(DeadResource);
}
Ok(compositor::with_states(&self.wl_surface, |states| {
let mut attributes = states
.data_map
.get::<Mutex<LayerSurfaceAttributes>>()
.unwrap()
.lock()
.unwrap();
if attributes.server_pending.is_none() {
attributes.server_pending = Some(attributes.current.clone());
}
let server_pending = attributes.server_pending.as_mut().unwrap();
f(server_pending)
})
.unwrap())
}
/// Gets a copy of the current state of this layer
///
/// Returns `None` if the underlying surface has been
/// destroyed
pub fn current_state(&self) -> Option<LayerSurfaceState> {
if !self.alive() {
return None;
}
Some(
compositor::with_states(&self.wl_surface, |states| {
let attributes = states
.data_map
.get::<Mutex<LayerSurfaceAttributes>>()
.unwrap()
.lock()
.unwrap();
attributes.current.clone()
})
.unwrap(),
)
}
}
/// A configure message for layer surfaces
#[derive(Debug, Clone)]
pub struct LayerSurfaceConfigure {
/// The state associated with this configure
pub state: LayerSurfaceState,
/// A serial number to track ACK from the client
///
/// This should be an ever increasing number, as the ACK-ing
/// from a client for a serial will validate all pending lower
/// serials.
pub serial: Serial,
}
/// Events generated by layer shell surfaces
///
/// Depending on what you want to do, you might ignore some of them
#[derive(Debug)]
pub enum LayerShellRequest {
/// A new layer surface was created
///
/// You likely need to send a [`LayerSurfaceConfigure`] to the surface, to hint the
/// client as to how its layer surface should be sized.<|fim▁hole|> surface: LayerSurface,
/// The output that the layer will be displayed on
///
/// None means that the compositor should decide which output to use,
/// Generally this will be the one that the user most recently interacted with
output: Option<WlOutput>,
/// This values indicate on which layer a surface should be rendered on
layer: Layer,
/// namespace that defines the purpose of the layer surface
namespace: String,
},
/// A surface has acknowledged a configure serial.
AckConfigure {
/// The surface.
surface: wl_surface::WlSurface,
/// The configure serial.
configure: LayerSurfaceConfigure,
},
}<|fim▁end|> | NewLayerSurface {
/// the surface |
<|file_name|>Hr.test.tsx<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> * You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as React from 'react';
import Hr from './Hr';
import { create } from 'react-test-renderer';
describe('Hr', () => {
it('renders with the right styles', () => {
const tree = create(<Hr fields={[]} />);
expect(tree).toMatchSnapshot();
});
});<|fim▁end|> | * Copyright 2018 The Kubeflow Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.