prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>test_plugin.py<|end_file_name|><|fim▁begin|>import unittest
from mock import Mock
from nosealert.plugin import AlertPlugin
from nosealert.notifications import Notification
class TestAlertPlugin(unittest.TestCase):
def setUp(self):
self.plugin = AlertPlugin()
def test_get_notification_success(self):
result = Mock(
failures=[],
errors=[],
testsRun=3,
)
self.assertEqual(self.plugin.get_notification(result), Notification(
total=3,
))<|fim▁hole|> result = Mock(
failures=[1, 2],
errors=[3],
testsRun=5,
)
self.assertEqual(self.plugin.get_notification(result), Notification(
fails=2,
errors=1,
total=5,
))
def test_finalize_sends_notification(self):
notification = Mock()
result = Mock()
self.plugin.get_notification = Mock(return_value=notification)
self.plugin.finalize(result)
notification.send.assert_called_once_with()<|fim▁end|> |
def test_get_notification_with_fails(self): |
<|file_name|>method-on-generic-struct.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-android: FIXME(#10381)
<|fim▁hole|>// === GDB TESTS ===================================================================================
// gdb-command:rbreak zzz
// gdb-command:run
// STACK BY REF
// gdb-command:finish
// gdb-command:print *self
// gdb-check:$1 = {x = {8888, -8888}}
// gdb-command:print arg1
// gdb-check:$2 = -1
// gdb-command:print arg2
// gdb-check:$3 = -2
// gdb-command:continue
// STACK BY VAL
// gdb-command:finish
// gdb-command:print self
// gdb-check:$4 = {x = {8888, -8888}}
// gdb-command:print arg1
// gdb-check:$5 = -3
// gdb-command:print arg2
// gdb-check:$6 = -4
// gdb-command:continue
// OWNED BY REF
// gdb-command:finish
// gdb-command:print *self
// gdb-check:$7 = {x = 1234.5}
// gdb-command:print arg1
// gdb-check:$8 = -5
// gdb-command:print arg2
// gdb-check:$9 = -6
// gdb-command:continue
// OWNED BY VAL
// gdb-command:finish
// gdb-command:print self
// gdb-check:$10 = {x = 1234.5}
// gdb-command:print arg1
// gdb-check:$11 = -7
// gdb-command:print arg2
// gdb-check:$12 = -8
// gdb-command:continue
// OWNED MOVED
// gdb-command:finish
// gdb-command:print *self
// gdb-check:$13 = {x = 1234.5}
// gdb-command:print arg1
// gdb-check:$14 = -9
// gdb-command:print arg2
// gdb-check:$15 = -10
// gdb-command:continue
// === LLDB TESTS ==================================================================================
// lldb-command:run
// STACK BY REF
// lldb-command:print *self
// lldb-check:[...]$0 = Struct<(u32, i32)> { x: (8888, -8888) }
// lldb-command:print arg1
// lldb-check:[...]$1 = -1
// lldb-command:print arg2
// lldb-check:[...]$2 = -2
// lldb-command:continue
// STACK BY VAL
// lldb-command:print self
// lldb-check:[...]$3 = Struct<(u32, i32)> { x: (8888, -8888) }
// lldb-command:print arg1
// lldb-check:[...]$4 = -3
// lldb-command:print arg2
// lldb-check:[...]$5 = -4
// lldb-command:continue
// OWNED BY REF
// lldb-command:print *self
// lldb-check:[...]$6 = Struct<f64> { x: 1234.5 }
// lldb-command:print arg1
// lldb-check:[...]$7 = -5
// lldb-command:print arg2
// lldb-check:[...]$8 = -6
// lldb-command:continue
// OWNED BY VAL
// lldb-command:print self
// lldb-check:[...]$9 = Struct<f64> { x: 1234.5 }
// lldb-command:print arg1
// lldb-check:[...]$10 = -7
// lldb-command:print arg2
// lldb-check:[...]$11 = -8
// lldb-command:continue
// OWNED MOVED
// lldb-command:print *self
// lldb-check:[...]$12 = Struct<f64> { x: 1234.5 }
// lldb-command:print arg1
// lldb-check:[...]$13 = -9
// lldb-command:print arg2
// lldb-check:[...]$14 = -10
// lldb-command:continue
struct Struct<T> {
x: T
}
impl<T> Struct<T> {
fn self_by_ref(&self, arg1: int, arg2: int) -> int {
zzz(); // #break
arg1 + arg2
}
fn self_by_val(self, arg1: int, arg2: int) -> int {
zzz(); // #break
arg1 + arg2
}
fn self_owned(self: Box<Struct<T>>, arg1: int, arg2: int) -> int {
zzz(); // #break
arg1 + arg2
}
}
fn main() {
let stack = Struct { x: (8888_u32, -8888_i32) };
let _ = stack.self_by_ref(-1, -2);
let _ = stack.self_by_val(-3, -4);
let owned = box Struct { x: 1234.5f64 };
let _ = owned.self_by_ref(-5, -6);
let _ = owned.self_by_val(-7, -8);
let _ = owned.self_owned(-9, -10);
}
fn zzz() {()}<|fim▁end|> | // compile-flags:-g
|
<|file_name|>delete_target_pool.go<|end_file_name|><|fim▁begin|>package gce
import (
"context"
"github.com/supergiant/control/pkg/clouds/gcesdk"
"io"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"github.com/supergiant/control/pkg/workflows/steps"
"google.golang.org/api/compute/v1"
)
const DeleteTargetPoolStepName = "gce_delete_target_pool"
type DeleteTargetPoolStep struct {
getComputeSvc func(context.Context, steps.GCEConfig) (*computeService, error)
}
func NewDeleteTargetPoolStep() *DeleteTargetPoolStep {
return &DeleteTargetPoolStep{
getComputeSvc: func(ctx context.Context, config steps.GCEConfig) (*computeService, error) {
client, err := gcesdk.GetClient(ctx, config)
if err != nil {
return nil, err
}
return &computeService{
deleteTargetPool: func(ctx context.Context, config steps.GCEConfig, targetPoolName string) (*compute.Operation, error) {
return client.TargetPools.Delete(config.ServiceAccount.ProjectID, config.Region, targetPoolName).Do()
},
}, nil
},
}
}
func (s *DeleteTargetPoolStep) Run(ctx context.Context, output io.Writer,
config *steps.Config) error {
logrus.Debugf("Step %s", DeleteTargetPoolStepName)
svc, err := s.getComputeSvc(ctx, config.GCEConfig)
if err != nil {
logrus.Errorf("Error getting service %v", err)
return errors.Wrapf(err, "%s getting service caused", DeleteTargetPoolStepName)
}
_, err = svc.deleteTargetPool(ctx, config.GCEConfig, config.GCEConfig.TargetPoolName)
if err != nil {
logrus.Errorf("Error deleting target pool %v", err)
}
return nil
}
func (s *DeleteTargetPoolStep) Name() string {
return DeleteTargetPoolStepName
}
<|fim▁hole|>}
func (s *DeleteTargetPoolStep) Description() string {
return "Delete target pool master nodes"
}
func (s *DeleteTargetPoolStep) Rollback(context.Context, io.Writer, *steps.Config) error {
return nil
}<|fim▁end|> | func (s *DeleteTargetPoolStep) Depends() []string {
return nil |
<|file_name|>form.js<|end_file_name|><|fim▁begin|>module.exports = {
label: {
display: 'block',
width: '90%',
marginBottom: 15
},
labelText: {
color: '#777',
fontSize: '15px',
display: 'block'
},
input: {
display: 'block',
fontSize: '20px',
padding: 10,
width: '100%',
border: '1px solid #ddd'
},
textarea: {
display: 'block',
fontSize: '20px',
border: '1px solid #ddd',
width: 'auto',
height: 100,
padding: 10,
width: '100%'
}<|fim▁hole|>};<|fim▁end|> | |
<|file_name|>filters.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
# Copyright (C) Zing contributors.
#
# This file is a part of the Zing project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from django.db.models import Q
from pootle_statistics.models import SubmissionTypes
from pootle_store.constants import FUZZY, TRANSLATED, UNTRANSLATED
from pootle_store.util import SuggestionStates
class FilterNotFound(Exception):
pass
class BaseUnitFilter(object):
def __init__(self, qs, *args_, **kwargs_):
self.qs = qs
def filter(self, unit_filter):
try:
return getattr(
self, "filter_%s" % unit_filter.replace("-", "_"))()
except AttributeError:
raise FilterNotFound()
class UnitChecksFilter(BaseUnitFilter):
def __init__(self, qs, *args, **kwargs):
super(UnitChecksFilter, self).__init__(qs, *args, **kwargs)
self.checks = kwargs.get("checks")
self.category = kwargs.get("category")
def filter_checks(self):
if self.checks:
return self.qs.filter(
qualitycheck__false_positive=False,
qualitycheck__name__in=self.checks).distinct()
if self.category:
return self.qs.filter(
qualitycheck__false_positive=False,
qualitycheck__category=self.category).distinct()
return self.qs.filter(
qualitycheck__false_positive=False,
).distinct()
class UnitStateFilter(BaseUnitFilter):
"""Filter a Unit qs based on unit state"""
def filter_all(self):
return self.qs.all()
def filter_translated(self):
return self.qs.filter(state=TRANSLATED)
def filter_untranslated(self):
return self.qs.filter(state=UNTRANSLATED)
def filter_fuzzy(self):
return self.qs.filter(state=FUZZY)
def filter_incomplete(self):
return self.qs.filter(
Q(state=UNTRANSLATED) | Q(state=FUZZY))
class UnitContributionFilter(BaseUnitFilter):
"""Filter a Unit qs based on user contributions"""
def __init__(self, qs, *args, **kwargs):
super(UnitContributionFilter, self).__init__(qs, *args, **kwargs)
self.user = kwargs.get("user")
def filter_suggestions(self):
return self.qs.filter(
suggestion__state=SuggestionStates.PENDING).distinct()
def filter_user_suggestions(self):
if not self.user:
return self.qs.none()
return self.qs.filter(
suggestion__user=self.user,
suggestion__state=SuggestionStates.PENDING).distinct()
def filter_my_suggestions(self):
return self.filter_user_suggestions()
def filter_user_suggestions_accepted(self):
if not self.user:
return self.qs.none()
return self.qs.filter(
suggestion__user=self.user,
suggestion__state=SuggestionStates.ACCEPTED).distinct()
def filter_user_suggestions_rejected(self):
if not self.user:
return self.qs.none()
return self.qs.filter(
suggestion__user=self.user,
suggestion__state=SuggestionStates.REJECTED).distinct()
def filter_user_submissions(self):
if not self.user:
return self.qs.none()
return self.qs.filter(
submitted_by=self.user,
submission__type__in=SubmissionTypes.EDIT_TYPES).distinct()
def filter_my_submissions(self):
return self.filter_user_submissions()
def filter_user_submissions_overwritten(self):<|fim▁hole|> submission__type__in=SubmissionTypes.EDIT_TYPES)
return qs.exclude(submitted_by=self.user).distinct()
def filter_my_submissions_overwritten(self):
return self.filter_user_submissions_overwritten()
class UnitSearchFilter(object):
filters = (UnitChecksFilter, UnitStateFilter, UnitContributionFilter)
def filter(self, qs, unit_filter, *args, **kwargs):
for search_filter in self.filters:
# try each of the filter classes to find one with a method to handle
# `unit_filter`
try:
return search_filter(qs, *args, **kwargs).filter(unit_filter)
except FilterNotFound:
pass
# if none match then return the empty qs
return qs.none()
class UnitTextSearch(object):
"""Search Unit's fields for text strings
"""
search_fields = (
"source_f", "target_f", "locations",
"translator_comment", "developer_comment")
search_mappings = {
"notes": ["translator_comment", "developer_comment"],
"source": ["source_f"],
"target": ["target_f"]}
def __init__(self, qs):
self.qs = qs
def get_search_fields(self, sfields):
search_fields = set()
for field in sfields:
if field in self.search_mappings:
search_fields.update(self.search_mappings[field])
elif field in self.search_fields:
search_fields.add(field)
return search_fields
def get_words(self, text, exact):
if exact:
return [text]
return [t.strip() for t in text.split(" ") if t.strip()]
def search(self, text, sfields, exact=False):
result = self.qs.none()
words = self.get_words(text, exact)
for k in self.get_search_fields(sfields):
result = result | self.search_field(k, words)
return result
def search_field(self, k, words):
subresult = self.qs
for word in words:
subresult = subresult.filter(
**{("%s__icontains" % k): word})
return subresult<|fim▁end|> | if not self.user:
return self.qs.none()
qs = self.qs.filter(
submitted_by=self.user, |
<|file_name|>MainWindow.cc<|end_file_name|><|fim▁begin|>/*
* Copyright 2012 Open Source Robotics Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include "gazebo_config.h"
#include "gazebo/gui/TopicSelector.hh"
#include "gazebo/gui/DataLogger.hh"
#include "gazebo/gui/viewers/ViewFactory.hh"
#include "gazebo/gui/viewers/TopicView.hh"
#include "gazebo/gui/viewers/ImageView.hh"
#include "gazebo/gazebo.hh"
#include "gazebo/common/Console.hh"
#include "gazebo/common/Exception.hh"
#include "gazebo/common/Events.hh"
#include "gazebo/transport/Node.hh"
#include "gazebo/transport/Transport.hh"
#include "gazebo/rendering/UserCamera.hh"
#include "gazebo/rendering/RenderEvents.hh"
#include "gazebo/gui/Actions.hh"
#include "gazebo/gui/Gui.hh"
#include "gazebo/gui/InsertModelWidget.hh"
#include "gazebo/gui/ModelListWidget.hh"
#include "gazebo/gui/RenderWidget.hh"
#include "gazebo/gui/ToolsWidget.hh"
#include "gazebo/gui/GLWidget.hh"
#include "gazebo/gui/MainWindow.hh"
#include "gazebo/gui/GuiEvents.hh"
#include "gazebo/gui/building/BuildingEditorPalette.hh"
#include "gazebo/gui/building/EditorEvents.hh"
#include "sdf/sdf.hh"
#ifdef HAVE_QWT
#include "gazebo/gui/Diagnostics.hh"
#endif
using namespace gazebo;
using namespace gui;
extern bool g_fullscreen;
/////////////////////////////////////////////////
MainWindow::MainWindow()
: renderWidget(0)
{
this->menuBar = NULL;
this->setObjectName("mainWindow");
this->requestMsg = NULL;
this->node = transport::NodePtr(new transport::Node());
this->node->Init();
gui::set_world(this->node->GetTopicNamespace());
(void) new QShortcut(Qt::CTRL + Qt::Key_Q, this, SLOT(close()));
this->CreateActions();
this->CreateMenus();
QWidget *mainWidget = new QWidget;
QVBoxLayout *mainLayout = new QVBoxLayout;
mainWidget->show();
this->setCentralWidget(mainWidget);
this->setDockOptions(QMainWindow::AnimatedDocks);
this->modelListWidget = new ModelListWidget(this);
InsertModelWidget *insertModel = new InsertModelWidget(this);
int minimumTabWidth = 250;
this->tabWidget = new QTabWidget();
this->tabWidget->setObjectName("mainTab");
this->tabWidget->addTab(this->modelListWidget, "World");
this->tabWidget->addTab(insertModel, "Insert");
this->tabWidget->setSizePolicy(QSizePolicy::Expanding,
QSizePolicy::Expanding);
this->tabWidget->setMinimumWidth(minimumTabWidth);
this->buildingEditorPalette = new BuildingEditorPalette(this);
this->buildingEditorTabWidget = new QTabWidget();
this->buildingEditorTabWidget->setObjectName("buildingEditorTab");
this->buildingEditorTabWidget->addTab(
this->buildingEditorPalette, "Building Editor");
this->buildingEditorTabWidget->setSizePolicy(QSizePolicy::Expanding,
QSizePolicy::Expanding);
this->buildingEditorTabWidget->setMinimumWidth(minimumTabWidth);
this->buildingEditorTabWidget->hide();
this->toolsWidget = new ToolsWidget();
this->renderWidget = new RenderWidget(mainWidget);
QHBoxLayout *centerLayout = new QHBoxLayout;
QSplitter *splitter = new QSplitter(this);
splitter->addWidget(this->tabWidget);
splitter->addWidget(this->buildingEditorTabWidget);
splitter->addWidget(this->renderWidget);
splitter->addWidget(this->toolsWidget);
QList<int> sizes;
sizes.push_back(250);
sizes.push_back(250);
sizes.push_back(this->width() - 250);
sizes.push_back(0);
splitter->setSizes(sizes);
splitter->setStretchFactor(0, 0);<|fim▁hole|> splitter->setStretchFactor(1, 0);
splitter->setStretchFactor(2, 2);
splitter->setStretchFactor(3, 0);
splitter->setCollapsible(2, false);
splitter->setHandleWidth(10);
centerLayout->addWidget(splitter);
centerLayout->setContentsMargins(0, 0, 0, 0);
centerLayout->setSpacing(0);
mainLayout->setSpacing(0);
mainLayout->addLayout(centerLayout, 1);
mainLayout->addWidget(new QSizeGrip(mainWidget), 0,
Qt::AlignBottom | Qt::AlignRight);
mainWidget->setLayout(mainLayout);
this->setWindowIcon(QIcon(":/images/gazebo.svg"));
std::string title = "Gazebo : ";
title += gui::get_world();
this->setWindowIconText(tr(title.c_str()));
this->setWindowTitle(tr(title.c_str()));
this->connections.push_back(
gui::Events::ConnectFullScreen(
boost::bind(&MainWindow::OnFullScreen, this, _1)));
this->connections.push_back(
gui::Events::ConnectMoveMode(
boost::bind(&MainWindow::OnMoveMode, this, _1)));
this->connections.push_back(
gui::Events::ConnectManipMode(
boost::bind(&MainWindow::OnManipMode, this, _1)));
this->connections.push_back(
event::Events::ConnectSetSelectedEntity(
boost::bind(&MainWindow::OnSetSelectedEntity, this, _1, _2)));
this->connections.push_back(
gui::editor::Events::ConnectFinishBuildingModel(
boost::bind(&MainWindow::OnFinishBuilding, this)));
gui::ViewFactory::RegisterAll();
}
/////////////////////////////////////////////////
MainWindow::~MainWindow()
{
}
/////////////////////////////////////////////////
void MainWindow::Load()
{
this->guiSub = this->node->Subscribe("~/gui", &MainWindow::OnGUI, this, true);
}
/////////////////////////////////////////////////
void MainWindow::Init()
{
this->renderWidget->show();
// Set the initial size of the window to 0.75 the desktop size,
// with a minimum value of 1024x768.
QSize winSize = QApplication::desktop()->size() * 0.75;
winSize.setWidth(std::max(1024, winSize.width()));
winSize.setHeight(std::max(768, winSize.height()));
this->resize(winSize);
this->worldControlPub =
this->node->Advertise<msgs::WorldControl>("~/world_control");
this->serverControlPub =
this->node->Advertise<msgs::ServerControl>("/gazebo/server/control");
this->selectionPub =
this->node->Advertise<msgs::Selection>("~/selection");
this->scenePub =
this->node->Advertise<msgs::Scene>("~/scene");
this->newEntitySub = this->node->Subscribe("~/model/info",
&MainWindow::OnModel, this, true);
this->statsSub =
this->node->Subscribe("~/world_stats", &MainWindow::OnStats, this);
this->requestPub = this->node->Advertise<msgs::Request>("~/request");
this->responseSub = this->node->Subscribe("~/response",
&MainWindow::OnResponse, this);
this->worldModSub = this->node->Subscribe("/gazebo/world/modify",
&MainWindow::OnWorldModify, this);
this->requestMsg = msgs::CreateRequest("entity_list");
this->requestPub->Publish(*this->requestMsg);
}
/////////////////////////////////////////////////
void MainWindow::closeEvent(QCloseEvent * /*_event*/)
{
gazebo::stop();
this->renderWidget->hide();
this->tabWidget->hide();
this->toolsWidget->hide();
this->connections.clear();
delete this->renderWidget;
}
/////////////////////////////////////////////////
void MainWindow::New()
{
msgs::ServerControl msg;
msg.set_new_world(true);
this->serverControlPub->Publish(msg);
}
/////////////////////////////////////////////////
void MainWindow::Diagnostics()
{
#ifdef HAVE_QWT
gui::Diagnostics *diag = new gui::Diagnostics(this);
diag->show();
#endif
}
/////////////////////////////////////////////////
void MainWindow::SelectTopic()
{
TopicSelector *selector = new TopicSelector(this);
selector->exec();
std::string topic = selector->GetTopic();
std::string msgType = selector->GetMsgType();
delete selector;
if (!topic.empty())
{
TopicView *view = ViewFactory::NewView(msgType, topic, this);
if (view)
view->show();
else
gzerr << "Unable to create viewer for message type[" << msgType << "]\n";
}
}
/////////////////////////////////////////////////
void MainWindow::Open()
{
std::string filename = QFileDialog::getOpenFileName(this,
tr("Open World"), "",
tr("SDF Files (*.xml *.sdf *.world)")).toStdString();
if (!filename.empty())
{
msgs::ServerControl msg;
msg.set_open_filename(filename);
this->serverControlPub->Publish(msg);
}
}
/////////////////////////////////////////////////
void MainWindow::Import()
{
std::string filename = QFileDialog::getOpenFileName(this,
tr("Import Collada Mesh"), "",
tr("SDF Files (*.dae *.zip)")).toStdString();
if (!filename.empty())
{
if (filename.find(".dae") != std::string::npos)
{
gui::Events::createEntity("mesh", filename);
}
else
gzerr << "Unable to import mesh[" << filename << "]\n";
}
}
/////////////////////////////////////////////////
void MainWindow::SaveAs()
{
std::string filename = QFileDialog::getSaveFileName(this,
tr("Save World"), QString(),
tr("SDF Files (*.xml *.sdf *.world)")).toStdString();
// Return if the user has canceled.
if (filename.empty())
return;
g_saveAct->setEnabled(true);
this->saveFilename = filename;
this->Save();
}
/////////////////////////////////////////////////
void MainWindow::Save()
{
// Get the latest world in SDF.
boost::shared_ptr<msgs::Response> response =
transport::request(get_world(), "world_sdf");
msgs::GzString msg;
std::string msgData;
// Make sure the response is correct
if (response->response() != "error" && response->type() == msg.GetTypeName())
{
// Parse the response message
msg.ParseFromString(response->serialized_data());
// Parse the string into sdf, so that we can insert user camera settings.
sdf::SDF sdf_parsed;
sdf_parsed.SetFromString(msg.data());
// Check that sdf contains world
if (sdf_parsed.root->HasElement("world"))
{
sdf::ElementPtr world = sdf_parsed.root->GetElement("world");
sdf::ElementPtr guiElem = world->GetElement("gui");
if (guiElem->HasAttribute("fullscreen"))
guiElem->GetAttribute("fullscreen")->Set(g_fullscreen);
sdf::ElementPtr cameraElem = guiElem->GetElement("camera");
rendering::UserCameraPtr cam = gui::get_active_camera();
cameraElem->GetElement("pose")->Set(cam->GetWorldPose());
cameraElem->GetElement("view_controller")->Set(
cam->GetViewControllerTypeString());
// TODO: export track_visual properties as well.
msgData = sdf_parsed.root->ToString("");
}
else
{
msgData = msg.data();
gzerr << "Unable to parse world file to add user camera settings.\n";
}
// Open the file
std::ofstream out(this->saveFilename.c_str(), std::ios::out);
if (!out)
{
QMessageBox msgBox;
std::string str = "Unable to open file: " + this->saveFilename + "\n";
str += "Check file permissions.";
msgBox.setText(str.c_str());
msgBox.exec();
}
else
out << msgData;
out.close();
}
else
{
QMessageBox msgBox;
msgBox.setText("Unable to save world.\n"
"Unable to retrieve SDF world description from server.");
msgBox.exec();
}
}
/////////////////////////////////////////////////
void MainWindow::About()
{
std::string helpTxt;
helpTxt = "<table>"
"<tr><td style='padding-right:20px'>"
"<img src=':images/gazebo_neg_60x71.png'/></td>"
"<td>";
helpTxt += GAZEBO_VERSION_HEADER;
helpTxt += "</td></tr></table>";
helpTxt += "<div style='margin-left: 10px'>"
"<div>"
"<table>"
"<tr>"
"<td style='padding-right: 10px;'>Tutorials:</td>"
"<td><a href='http://gazebosim.org/wiki/tutorials' "
"style='text-decoration: none; color: #f58113'>"
"http://gazebosim.org/wiki/tutorials</a></td>"
"</tr>"
"<tr>"
"<td style='padding-right: 10px;'>User Guide:</td>"
"<td><a href='http://gazebosim.org/user_guide' "
"style='text-decoration: none; color: #f58113'>"
"http://gazebosim.org/user_guide</a></td>"
"</tr>"
"<tr>"
"<td style='padding-right: 10px;'>API:</td>"
"<td><a href='http://gazebosim.org/api' "
"style='text-decoration: none; color: #f58113'>"
"http://gazebosim.org/api</a></td>"
"</tr>"
"<tr>"
"<td style='padding-right: 10px;'>SDF:</td>"
"<td><a href='http://gazebosim.org/sdf' "
"style='text-decoration: none; color: #f58113'>"
"http://gazebosim.org/sdf</a></td>"
"</tr>"
"<tr>"
"<td style='padding-right: 10px;'>Messages:</td>"
"<td><a href='http://gazebosim.org/msgs' "
"style='text-decoration: none; color: #f58113'>"
"http://gazebosim.org/msgs</a></td>"
"</tr>"
"</table>"
"</div>";
QPixmap icon(":images/gazebo_neg_60x71.png");
QMessageBox aboutBox(this);
aboutBox.setWindowTitle("About Gazebo");
aboutBox.setTextFormat(Qt::RichText);
aboutBox.setText(QString::fromStdString(helpTxt));
aboutBox.exec();
}
/////////////////////////////////////////////////
void MainWindow::Play()
{
msgs::WorldControl msg;
msg.set_pause(false);
g_pauseAct->setChecked(false);
this->worldControlPub->Publish(msg);
}
/////////////////////////////////////////////////
void MainWindow::Pause()
{
msgs::WorldControl msg;
msg.set_pause(true);
g_playAct->setChecked(false);
this->worldControlPub->Publish(msg);
}
/////////////////////////////////////////////////
void MainWindow::Step()
{
msgs::WorldControl msg;
msg.set_step(true);
this->worldControlPub->Publish(msg);
}
/////////////////////////////////////////////////
void MainWindow::NewModel()
{
/*ModelBuilderWidget *modelBuilder = new ModelBuilderWidget();
modelBuilder->Init();
modelBuilder->show();
modelBuilder->resize(800, 600);
*/
}
/////////////////////////////////////////////////
void MainWindow::OnResetModelOnly()
{
msgs::WorldControl msg;
msg.mutable_reset()->set_all(false);
msg.mutable_reset()->set_time_only(false);
msg.mutable_reset()->set_model_only(true);
this->worldControlPub->Publish(msg);
}
/////////////////////////////////////////////////
void MainWindow::OnResetWorld()
{
msgs::WorldControl msg;
msg.mutable_reset()->set_all(true);
this->worldControlPub->Publish(msg);
}
/////////////////////////////////////////////////
void MainWindow::OnEditBuilding()
{
bool isChecked = g_editBuildingAct->isChecked();
if (isChecked)
{
this->Pause();
this->renderWidget->ShowEditor(true);
this->tabWidget->hide();
this->buildingEditorTabWidget->show();
this->AttachEditorMenuBar();
}
else
{
this->renderWidget->ShowEditor(false);
this->tabWidget->show();
this->buildingEditorTabWidget->hide();
this->AttachMainMenuBar();
this->Play();
}
}
/////////////////////////////////////////////////
void MainWindow::Arrow()
{
gui::Events::manipMode("select");
}
/////////////////////////////////////////////////
void MainWindow::Translate()
{
gui::Events::manipMode("translate");
}
/////////////////////////////////////////////////
void MainWindow::Rotate()
{
gui::Events::manipMode("rotate");
}
/////////////////////////////////////////////////
void MainWindow::CreateBox()
{
g_arrowAct->setChecked(true);
gui::Events::createEntity("box", "");
}
/////////////////////////////////////////////////
void MainWindow::CreateSphere()
{
g_arrowAct->setChecked(true);
gui::Events::createEntity("sphere", "");
}
/////////////////////////////////////////////////
void MainWindow::CreateCylinder()
{
g_arrowAct->setChecked(true);
gui::Events::createEntity("cylinder", "");
}
/////////////////////////////////////////////////
void MainWindow::CreateMesh()
{
g_arrowAct->setChecked(true);
gui::Events::createEntity("mesh", "mesh");
}
/////////////////////////////////////////////////
void MainWindow::CreatePointLight()
{
g_arrowAct->setChecked(true);
gui::Events::createEntity("pointlight", "");
}
/////////////////////////////////////////////////
void MainWindow::CreateSpotLight()
{
g_arrowAct->setChecked(true);
gui::Events::createEntity("spotlight", "");
}
/////////////////////////////////////////////////
void MainWindow::CreateDirectionalLight()
{
g_arrowAct->setChecked(true);
gui::Events::createEntity("directionallight", "");
}
/////////////////////////////////////////////////
void MainWindow::CaptureScreenshot()
{
rendering::UserCameraPtr cam = gui::get_active_camera();
cam->SetCaptureDataOnce();
this->renderWidget->DisplayOverlayMsg(
"Screenshot saved in: " + cam->GetScreenshotPath(), 2000);
}
/////////////////////////////////////////////////
void MainWindow::InsertModel()
{
}
/////////////////////////////////////////////////
void MainWindow::OnFullScreen(bool _value)
{
if (_value)
{
this->showFullScreen();
this->renderWidget->showFullScreen();
this->tabWidget->hide();
this->toolsWidget->hide();
this->menuBar->hide();
}
else
{
this->showNormal();
this->renderWidget->showNormal();
if (!g_editBuildingAct->isChecked())
this->tabWidget->show();
this->toolsWidget->show();
this->menuBar->show();
}
}
/////////////////////////////////////////////////
void MainWindow::Reset()
{
rendering::UserCameraPtr cam = gui::get_active_camera();
math::Vector3 camPos(5, -5, 2);
math::Vector3 lookAt(0, 0, 0);
math::Vector3 delta = camPos - lookAt;
double yaw = atan2(delta.x, delta.y);
double pitch = atan2(delta.z, sqrt(delta.x*delta.x + delta.y*delta.y));
cam->SetWorldPose(math::Pose(camPos, math::Vector3(0, pitch, yaw)));
}
/////////////////////////////////////////////////
void MainWindow::ShowCollisions()
{
if (g_showCollisionsAct->isChecked())
transport::requestNoReply(this->node->GetTopicNamespace(),
"show_collision", "all");
else
transport::requestNoReply(this->node->GetTopicNamespace(),
"hide_collision", "all");
}
/////////////////////////////////////////////////
void MainWindow::ShowGrid()
{
msgs::Scene msg;
msg.set_name("default");
msg.set_grid(g_showGridAct->isChecked());
this->scenePub->Publish(msg);
}
/////////////////////////////////////////////////
void MainWindow::ShowJoints()
{
if (g_showJointsAct->isChecked())
transport::requestNoReply(this->node->GetTopicNamespace(),
"show_joints", "all");
else
transport::requestNoReply(this->node->GetTopicNamespace(),
"hide_joints", "all");
}
/////////////////////////////////////////////////
void MainWindow::SetTransparent()
{
if (g_transparentAct->isChecked())
transport::requestNoReply(this->node->GetTopicNamespace(),
"set_transparent", "all");
else
transport::requestNoReply(this->node->GetTopicNamespace(),
"set_opaque", "all");
}
/////////////////////////////////////////////////
void MainWindow::SetWireframe()
{
if (g_viewWireframeAct->isChecked())
transport::requestNoReply(this->node->GetTopicNamespace(),
"set_wireframe", "all");
else
transport::requestNoReply(this->node->GetTopicNamespace(),
"set_solid", "all");
}
/////////////////////////////////////////////////
void MainWindow::ShowCOM()
{
if (g_showCOMAct->isChecked())
transport::requestNoReply(this->node->GetTopicNamespace(),
"show_com", "all");
else
transport::requestNoReply(this->node->GetTopicNamespace(),
"hide_com", "all");
}
/////////////////////////////////////////////////
void MainWindow::ShowContacts()
{
if (g_showContactsAct->isChecked())
transport::requestNoReply(this->node->GetTopicNamespace(),
"show_contact", "all");
else
transport::requestNoReply(this->node->GetTopicNamespace(),
"hide_contact", "all");
}
/////////////////////////////////////////////////
void MainWindow::FullScreen()
{
g_fullscreen = !g_fullscreen;
gui::Events::fullScreen(g_fullscreen);
}
/////////////////////////////////////////////////
void MainWindow::FPS()
{
gui::Events::fps();
}
/////////////////////////////////////////////////
void MainWindow::Orbit()
{
gui::Events::orbit();
}
/////////////////////////////////////////////////
void MainWindow::DataLogger()
{
gui::DataLogger *dataLogger = new gui::DataLogger(this);
dataLogger->show();
}
////////////////////////////////////////////////
void MainWindow::BuildingEditorSave()
{
gui::editor::Events::saveBuildingEditor();
}
/////////////////////////////////////////////////
void MainWindow::BuildingEditorDiscard()
{
gui::editor::Events::discardBuildingEditor();
}
/////////////////////////////////////////////////
void MainWindow::BuildingEditorDone()
{
gui::editor::Events::doneBuildingEditor();
}
/////////////////////////////////////////////////
void MainWindow::BuildingEditorExit()
{
gui::editor::Events::exitBuildingEditor();
}
/////////////////////////////////////////////////
void MainWindow::CreateActions()
{
/*g_newAct = new QAction(tr("&New World"), this);
g_newAct->setShortcut(tr("Ctrl+N"));
g_newAct->setStatusTip(tr("Create a new world"));
connect(g_newAct, SIGNAL(triggered()), this, SLOT(New()));
*/
g_topicVisAct = new QAction(tr("Topic Visualization"), this);
g_topicVisAct->setShortcut(tr("Ctrl+T"));
g_topicVisAct->setStatusTip(tr("Select a topic to visualize"));
connect(g_topicVisAct, SIGNAL(triggered()), this, SLOT(SelectTopic()));
#ifdef HAVE_QWT
/*g_diagnosticsAct = new QAction(tr("Diagnostic Plot"), this);
g_diagnosticsAct->setShortcut(tr("Ctrl+U"));
g_diagnosticsAct->setStatusTip(tr("Plot diagnostic information"));
connect(g_diagnosticsAct, SIGNAL(triggered()), this, SLOT(Diagnostics()));
*/
#endif
g_openAct = new QAction(tr("&Open World"), this);
g_openAct->setShortcut(tr("Ctrl+O"));
g_openAct->setStatusTip(tr("Open an world file"));
connect(g_openAct, SIGNAL(triggered()), this, SLOT(Open()));
/*g_importAct = new QAction(tr("&Import Mesh"), this);
g_importAct->setShortcut(tr("Ctrl+I"));
g_importAct->setStatusTip(tr("Import a Collada mesh"));
connect(g_importAct, SIGNAL(triggered()), this, SLOT(Import()));
*/
g_saveAct = new QAction(tr("&Save World"), this);
g_saveAct->setShortcut(tr("Ctrl+S"));
g_saveAct->setStatusTip(tr("Save world"));
g_saveAct->setEnabled(false);
connect(g_saveAct, SIGNAL(triggered()), this, SLOT(Save()));
g_saveAsAct = new QAction(tr("Save World &As"), this);
g_saveAsAct->setShortcut(tr("Ctrl+Shift+S"));
g_saveAsAct->setStatusTip(tr("Save world to new file"));
connect(g_saveAsAct, SIGNAL(triggered()), this, SLOT(SaveAs()));
g_aboutAct = new QAction(tr("&About"), this);
g_aboutAct->setStatusTip(tr("Show the about info"));
connect(g_aboutAct, SIGNAL(triggered()), this, SLOT(About()));
g_quitAct = new QAction(tr("&Quit"), this);
g_quitAct->setStatusTip(tr("Quit"));
connect(g_quitAct, SIGNAL(triggered()), this, SLOT(close()));
g_newModelAct = new QAction(tr("New &Model"), this);
g_newModelAct->setShortcut(tr("Ctrl+M"));
g_newModelAct->setStatusTip(tr("Create a new model"));
connect(g_newModelAct, SIGNAL(triggered()), this, SLOT(NewModel()));
g_resetModelsAct = new QAction(tr("&Reset Model Poses"), this);
g_resetModelsAct->setShortcut(tr("Ctrl+Shift+R"));
g_resetModelsAct->setStatusTip(tr("Reset model poses"));
connect(g_resetModelsAct, SIGNAL(triggered()), this,
SLOT(OnResetModelOnly()));
g_resetWorldAct = new QAction(tr("&Reset World"), this);
g_resetWorldAct->setShortcut(tr("Ctrl+R"));
g_resetWorldAct->setStatusTip(tr("Reset the world"));
connect(g_resetWorldAct, SIGNAL(triggered()), this, SLOT(OnResetWorld()));
g_editBuildingAct = new QAction(tr("&Building Editor"), this);
g_editBuildingAct->setShortcut(tr("Ctrl+B"));
g_editBuildingAct->setStatusTip(tr("Enter Building Editor Mode"));
g_editBuildingAct->setCheckable(true);
g_editBuildingAct->setChecked(false);
connect(g_editBuildingAct, SIGNAL(triggered()), this, SLOT(OnEditBuilding()));
g_playAct = new QAction(QIcon(":/images/play.png"), tr("Play"), this);
g_playAct->setStatusTip(tr("Run the world"));
g_playAct->setCheckable(true);
g_playAct->setChecked(true);
connect(g_playAct, SIGNAL(triggered()), this, SLOT(Play()));
g_pauseAct = new QAction(QIcon(":/images/pause.png"), tr("Pause"), this);
g_pauseAct->setStatusTip(tr("Pause the world"));
g_pauseAct->setCheckable(true);
g_pauseAct->setChecked(false);
connect(g_pauseAct, SIGNAL(triggered()), this, SLOT(Pause()));
g_stepAct = new QAction(QIcon(":/images/end.png"), tr("Step"), this);
g_stepAct->setStatusTip(tr("Step the world"));
connect(g_stepAct, SIGNAL(triggered()), this, SLOT(Step()));
g_arrowAct = new QAction(QIcon(":/images/arrow.png"),
tr("Selection Mode"), this);
g_arrowAct->setStatusTip(tr("Move camera"));
g_arrowAct->setCheckable(true);
g_arrowAct->setChecked(true);
connect(g_arrowAct, SIGNAL(triggered()), this, SLOT(Arrow()));
g_translateAct = new QAction(QIcon(":/images/translate.png"),
tr("Translation Mode"), this);
g_translateAct->setStatusTip(tr("Translate an object"));
g_translateAct->setCheckable(true);
g_translateAct->setChecked(false);
connect(g_translateAct, SIGNAL(triggered()), this, SLOT(Translate()));
g_rotateAct = new QAction(QIcon(":/images/rotate.png"),
tr("Rotation Mode"), this);
g_rotateAct->setStatusTip(tr("Rotate an object"));
g_rotateAct->setCheckable(true);
g_rotateAct->setChecked(false);
connect(g_rotateAct, SIGNAL(triggered()), this, SLOT(Rotate()));
g_boxCreateAct = new QAction(QIcon(":/images/box.png"), tr("Box"), this);
g_boxCreateAct->setStatusTip(tr("Create a box"));
g_boxCreateAct->setCheckable(true);
connect(g_boxCreateAct, SIGNAL(triggered()), this, SLOT(CreateBox()));
g_sphereCreateAct = new QAction(QIcon(":/images/sphere.png"),
tr("Sphere"), this);
g_sphereCreateAct->setStatusTip(tr("Create a sphere"));
g_sphereCreateAct->setCheckable(true);
connect(g_sphereCreateAct, SIGNAL(triggered()), this,
SLOT(CreateSphere()));
g_cylinderCreateAct = new QAction(QIcon(":/images/cylinder.png"),
tr("Cylinder"), this);
g_cylinderCreateAct->setStatusTip(tr("Create a sphere"));
g_cylinderCreateAct->setCheckable(true);
connect(g_cylinderCreateAct, SIGNAL(triggered()), this,
SLOT(CreateCylinder()));
g_meshCreateAct = new QAction(QIcon(":/images/cylinder.png"),
tr("Mesh"), this);
g_meshCreateAct->setStatusTip(tr("Create a mesh"));
g_meshCreateAct->setCheckable(true);
connect(g_meshCreateAct, SIGNAL(triggered()), this,
SLOT(CreateMesh()));
g_pointLghtCreateAct = new QAction(QIcon(":/images/pointlight.png"),
tr("Point Light"), this);
g_pointLghtCreateAct->setStatusTip(tr("Create a point light"));
g_pointLghtCreateAct->setCheckable(true);
connect(g_pointLghtCreateAct, SIGNAL(triggered()), this,
SLOT(CreatePointLight()));
g_spotLghtCreateAct = new QAction(QIcon(":/images/spotlight.png"),
tr("Spot Light"), this);
g_spotLghtCreateAct->setStatusTip(tr("Create a spot light"));
g_spotLghtCreateAct->setCheckable(true);
connect(g_spotLghtCreateAct, SIGNAL(triggered()), this,
SLOT(CreateSpotLight()));
g_dirLghtCreateAct = new QAction(QIcon(":/images/directionallight.png"),
tr("Directional Light"), this);
g_dirLghtCreateAct->setStatusTip(tr("Create a directional light"));
g_dirLghtCreateAct->setCheckable(true);
connect(g_dirLghtCreateAct, SIGNAL(triggered()), this,
SLOT(CreateDirectionalLight()));
g_resetAct = new QAction(tr("Reset Camera"), this);
g_resetAct->setStatusTip(tr("Move camera to pose"));
connect(g_resetAct, SIGNAL(triggered()), this,
SLOT(Reset()));
g_showCollisionsAct = new QAction(tr("Collisions"), this);
g_showCollisionsAct->setStatusTip(tr("Show Collisions"));
g_showCollisionsAct->setCheckable(true);
g_showCollisionsAct->setChecked(false);
connect(g_showCollisionsAct, SIGNAL(triggered()), this,
SLOT(ShowCollisions()));
g_showGridAct = new QAction(tr("Grid"), this);
g_showGridAct->setStatusTip(tr("Show Grid"));
g_showGridAct->setCheckable(true);
g_showGridAct->setChecked(true);
connect(g_showGridAct, SIGNAL(triggered()), this,
SLOT(ShowGrid()));
g_transparentAct = new QAction(tr("Transparent"), this);
g_transparentAct->setStatusTip(tr("Transparent"));
g_transparentAct->setCheckable(true);
g_transparentAct->setChecked(false);
connect(g_transparentAct, SIGNAL(triggered()), this,
SLOT(SetTransparent()));
g_viewWireframeAct = new QAction(tr("Wireframe"), this);
g_viewWireframeAct->setStatusTip(tr("Wireframe"));
g_viewWireframeAct->setCheckable(true);
g_viewWireframeAct->setChecked(false);
connect(g_viewWireframeAct, SIGNAL(triggered()), this,
SLOT(SetWireframe()));
g_showCOMAct = new QAction(tr("Center of Mass"), this);
g_showCOMAct->setStatusTip(tr("Show COM"));
g_showCOMAct->setCheckable(true);
g_showCOMAct->setChecked(false);
connect(g_showCOMAct, SIGNAL(triggered()), this,
SLOT(ShowCOM()));
g_showContactsAct = new QAction(tr("Contacts"), this);
g_showContactsAct->setStatusTip(tr("Show Contacts"));
g_showContactsAct->setCheckable(true);
g_showContactsAct->setChecked(false);
connect(g_showContactsAct, SIGNAL(triggered()), this,
SLOT(ShowContacts()));
g_showJointsAct = new QAction(tr("Joints"), this);
g_showJointsAct->setStatusTip(tr("Show Joints"));
g_showJointsAct->setCheckable(true);
g_showJointsAct->setChecked(false);
connect(g_showJointsAct, SIGNAL(triggered()), this,
SLOT(ShowJoints()));
g_fullScreenAct = new QAction(tr("Full Screen"), this);
g_fullScreenAct->setStatusTip(tr("Full Screen(F-11 to exit)"));
connect(g_fullScreenAct, SIGNAL(triggered()), this,
SLOT(FullScreen()));
// g_fpsAct = new QAction(tr("FPS View Control"), this);
// g_fpsAct->setStatusTip(tr("First Person Shooter View Style"));
// connect(g_fpsAct, SIGNAL(triggered()), this, SLOT(FPS()));
g_orbitAct = new QAction(tr("Orbit View Control"), this);
g_orbitAct->setStatusTip(tr("Orbit View Style"));
connect(g_orbitAct, SIGNAL(triggered()), this, SLOT(Orbit()));
g_dataLoggerAct = new QAction(tr("&Log Data"), this);
g_dataLoggerAct->setShortcut(tr("Ctrl+D"));
g_dataLoggerAct->setStatusTip(tr("Data Logging Utility"));
connect(g_dataLoggerAct, SIGNAL(triggered()), this, SLOT(DataLogger()));
g_buildingEditorSaveAct = new QAction(tr("&Save (As)"), this);
g_buildingEditorSaveAct->setStatusTip(tr("Save (As)"));
g_buildingEditorSaveAct->setShortcut(tr("Ctrl+S"));
g_buildingEditorSaveAct->setCheckable(false);
connect(g_buildingEditorSaveAct, SIGNAL(triggered()), this,
SLOT(BuildingEditorSave()));
g_buildingEditorDiscardAct = new QAction(tr("&Discard"), this);
g_buildingEditorDiscardAct->setStatusTip(tr("Discard"));
g_buildingEditorDiscardAct->setShortcut(tr("Ctrl+D"));
g_buildingEditorDiscardAct->setCheckable(false);
connect(g_buildingEditorDiscardAct, SIGNAL(triggered()), this,
SLOT(BuildingEditorDiscard()));
g_buildingEditorDoneAct = new QAction(tr("Don&e"), this);
g_buildingEditorDoneAct->setShortcut(tr("Ctrl+E"));
g_buildingEditorDoneAct->setStatusTip(tr("Done"));
g_buildingEditorDoneAct->setCheckable(false);
connect(g_buildingEditorDoneAct, SIGNAL(triggered()), this,
SLOT(BuildingEditorDone()));
g_buildingEditorExitAct = new QAction(tr("E&xit Building Editor"), this);
g_buildingEditorExitAct->setStatusTip(tr("Exit Building Editor"));
g_buildingEditorExitAct->setShortcut(tr("Ctrl+X"));
g_buildingEditorExitAct->setCheckable(false);
connect(g_buildingEditorExitAct, SIGNAL(triggered()), this,
SLOT(BuildingEditorExit()));
g_screenshotAct = new QAction(QIcon(":/images/screenshot.png"),
tr("Screenshot"), this);
g_screenshotAct->setStatusTip(tr("Take a screenshot"));
connect(g_screenshotAct, SIGNAL(triggered()), this,
SLOT(CaptureScreenshot()));
}
/////////////////////////////////////////////////
void MainWindow::AttachEditorMenuBar()
{
if (this->menuBar)
{
this->menuLayout->removeWidget(this->menuBar);
delete this->menuBar;
}
this->menuBar = new QMenuBar;
this->menuBar->setSizePolicy(QSizePolicy::Fixed,
QSizePolicy::Fixed);
QMenu *buildingEditorFileMenu = this->menuBar->addMenu(
tr("&File"));
buildingEditorFileMenu->addAction(g_buildingEditorSaveAct);
buildingEditorFileMenu->addAction(g_buildingEditorDiscardAct);
buildingEditorFileMenu->addAction(g_buildingEditorDoneAct);
buildingEditorFileMenu->addAction(g_buildingEditorExitAct);
this->menuLayout->setMenuBar(this->menuBar);
}
/////////////////////////////////////////////////
void MainWindow::AttachMainMenuBar()
{
if (this->menuBar)
{
this->menuLayout->removeWidget(this->menuBar);
delete this->menuBar;
}
this->menuBar = new QMenuBar;
this->menuBar->setSizePolicy(QSizePolicy::Fixed, QSizePolicy::Fixed);
QMenu *fileMenu = this->menuBar->addMenu(tr("&File"));
// fileMenu->addAction(g_openAct);
// fileMenu->addAction(g_importAct);
// fileMenu->addAction(g_newAct);
fileMenu->addAction(g_saveAct);
fileMenu->addAction(g_saveAsAct);
fileMenu->addSeparator();
fileMenu->addAction(g_quitAct);
QMenu *editMenu = this->menuBar->addMenu(tr("&Edit"));
editMenu->addAction(g_resetModelsAct);
editMenu->addAction(g_resetWorldAct);
editMenu->addAction(g_editBuildingAct);
QMenu *viewMenu = this->menuBar->addMenu(tr("&View"));
viewMenu->addAction(g_showGridAct);
viewMenu->addSeparator();
viewMenu->addAction(g_transparentAct);
viewMenu->addAction(g_viewWireframeAct);
viewMenu->addSeparator();
viewMenu->addAction(g_showCollisionsAct);
viewMenu->addAction(g_showJointsAct);
viewMenu->addAction(g_showCOMAct);
viewMenu->addAction(g_showContactsAct);
viewMenu->addSeparator();
viewMenu->addAction(g_resetAct);
viewMenu->addAction(g_fullScreenAct);
viewMenu->addSeparator();
// viewMenu->addAction(g_fpsAct);
viewMenu->addAction(g_orbitAct);
QMenu *windowMenu = this->menuBar->addMenu(tr("&Window"));
windowMenu->addAction(g_topicVisAct);
windowMenu->addSeparator();
windowMenu->addAction(g_dataLoggerAct);
#ifdef HAVE_QWT
// windowMenu->addAction(g_diagnosticsAct);
#endif
this->menuBar->addSeparator();
QMenu *helpMenu = this->menuBar->addMenu(tr("&Help"));
helpMenu->addAction(g_aboutAct);
this->menuLayout->setMenuBar(this->menuBar);
}
/////////////////////////////////////////////////
void MainWindow::CreateMenus()
{
this->menuLayout = new QHBoxLayout;
QFrame *frame = new QFrame;
this->AttachMainMenuBar();
this->menuLayout->addStretch(5);
this->menuLayout->setContentsMargins(0, 0, 0, 0);
frame->setLayout(this->menuLayout);
frame->setSizePolicy(QSizePolicy::Minimum, QSizePolicy::Fixed);
this->setMenuWidget(frame);
}
/////////////////////////////////////////////////
void MainWindow::CreateToolbars()
{
this->playToolbar = this->addToolBar(tr("Play"));
this->playToolbar->addAction(g_playAct);
this->playToolbar->addAction(g_pauseAct);
this->playToolbar->addAction(g_stepAct);
}
/////////////////////////////////////////////////
void MainWindow::OnMoveMode(bool _mode)
{
if (_mode)
{
g_boxCreateAct->setChecked(false);
g_sphereCreateAct->setChecked(false);
g_cylinderCreateAct->setChecked(false);
g_meshCreateAct->setChecked(false);
g_pointLghtCreateAct->setChecked(false);
g_spotLghtCreateAct->setChecked(false);
g_dirLghtCreateAct->setChecked(false);
}
}
/////////////////////////////////////////////////
void MainWindow::OnGUI(ConstGUIPtr &_msg)
{
if (_msg->has_fullscreen() && _msg->fullscreen())
{
this->FullScreen();
}
if (_msg->has_camera())
{
rendering::UserCameraPtr cam = gui::get_active_camera();
if (_msg->camera().has_pose())
{
const msgs::Pose &msg_pose = _msg->camera().pose();
math::Vector3 cam_pose_pos = math::Vector3(
msg_pose.position().x(),
msg_pose.position().y(),
msg_pose.position().z());
math::Quaternion cam_pose_rot = math::Quaternion(
msg_pose.orientation().w(),
msg_pose.orientation().x(),
msg_pose.orientation().y(),
msg_pose.orientation().z());
math::Pose cam_pose(cam_pose_pos, cam_pose_rot);
cam->SetWorldPose(cam_pose);
}
if (_msg->camera().has_view_controller())
{
cam->SetViewController(_msg->camera().view_controller());
}
if (_msg->camera().has_track())
{
std::string name = _msg->camera().track().name();
double minDist = 0.0;
double maxDist = 0.0;
if (_msg->camera().track().has_min_dist())
minDist = _msg->camera().track().min_dist();
if (_msg->camera().track().has_max_dist())
maxDist = _msg->camera().track().max_dist();
cam->AttachToVisual(name, false, minDist, maxDist);
}
}
}
/////////////////////////////////////////////////
void MainWindow::OnModel(ConstModelPtr &_msg)
{
this->entities[_msg->name()] = _msg->id();
for (int i = 0; i < _msg->link_size(); i++)
{
this->entities[_msg->link(i).name()] = _msg->link(i).id();
for (int j = 0; j < _msg->link(i).collision_size(); j++)
{
this->entities[_msg->link(i).collision(j).name()] =
_msg->link(i).collision(j).id();
}
}
gui::Events::modelUpdate(*_msg);
}
/////////////////////////////////////////////////
void MainWindow::OnResponse(ConstResponsePtr &_msg)
{
if (!this->requestMsg || _msg->id() != this->requestMsg->id())
return;
msgs::Model_V modelVMsg;
if (_msg->has_type() && _msg->type() == modelVMsg.GetTypeName())
{
modelVMsg.ParseFromString(_msg->serialized_data());
for (int i = 0; i < modelVMsg.models_size(); i++)
{
this->entities[modelVMsg.models(i).name()] = modelVMsg.models(i).id();
for (int j = 0; j < modelVMsg.models(i).link_size(); j++)
{
this->entities[modelVMsg.models(i).link(j).name()] =
modelVMsg.models(i).link(j).id();
for (int k = 0; k < modelVMsg.models(i).link(j).collision_size(); k++)
{
this->entities[modelVMsg.models(i).link(j).collision(k).name()] =
modelVMsg.models(i).link(j).collision(k).id();
}
}
gui::Events::modelUpdate(modelVMsg.models(i));
}
}
delete this->requestMsg;
this->requestMsg = NULL;
}
/////////////////////////////////////////////////
unsigned int MainWindow::GetEntityId(const std::string &_name)
{
unsigned int result = 0;
std::string name = _name;
boost::replace_first(name, gui::get_world()+"::", "");
std::map<std::string, unsigned int>::iterator iter;
iter = this->entities.find(name);
if (iter != this->entities.end())
result = iter->second;
return result;
}
/////////////////////////////////////////////////
bool MainWindow::HasEntityName(const std::string &_name)
{
bool result = false;
std::string name = _name;
boost::replace_first(name, gui::get_world()+"::", "");
std::map<std::string, unsigned int>::iterator iter;
iter = this->entities.find(name);
if (iter != this->entities.end())
result = true;
return result;
}
/////////////////////////////////////////////////
void MainWindow::OnWorldModify(ConstWorldModifyPtr &_msg)
{
if (_msg->has_create() && _msg->create())
{
this->renderWidget->CreateScene(_msg->world_name());
this->requestMsg = msgs::CreateRequest("entity_list");
this->requestPub->Publish(*this->requestMsg);
}
else if (_msg->has_remove() && _msg->remove())
this->renderWidget->RemoveScene(_msg->world_name());
}
/////////////////////////////////////////////////
void MainWindow::OnManipMode(const std::string &_mode)
{
if (_mode == "select" || _mode == "make_entity")
g_arrowAct->setChecked(true);
}
/////////////////////////////////////////////////
void MainWindow::OnSetSelectedEntity(const std::string &_name,
const std::string &/*_mode*/)
{
if (!_name.empty())
{
this->tabWidget->setCurrentIndex(0);
}
}
/////////////////////////////////////////////////
void MainWindow::OnStats(ConstWorldStatisticsPtr &_msg)
{
if (_msg->paused() && g_playAct->isChecked())
{
g_playAct->setChecked(false);
g_pauseAct->setChecked(true);
}
else if (!_msg->paused() && !g_playAct->isChecked())
{
g_playAct->setChecked(true);
g_pauseAct->setChecked(false);
}
}
/////////////////////////////////////////////////
void MainWindow::OnFinishBuilding()
{
g_editBuildingAct->setChecked(!g_editBuildingAct->isChecked());
this->OnEditBuilding();
}
/////////////////////////////////////////////////
void MainWindow::ItemSelected(QTreeWidgetItem *_item, int)
{
_item->setExpanded(!_item->isExpanded());
}
/////////////////////////////////////////////////
TreeViewDelegate::TreeViewDelegate(QTreeView *_view, QWidget *_parent)
: QItemDelegate(_parent), view(_view)
{
}
/////////////////////////////////////////////////
void TreeViewDelegate::paint(QPainter *painter,
const QStyleOptionViewItem &option,
const QModelIndex &index) const
{
const QAbstractItemModel *model = index.model();
Q_ASSERT(model);
if (!model->parent(index).isValid())
{
QRect r = option.rect;
QColor orange(245, 129, 19);
QColor blue(71, 99, 183);
QColor grey(100, 100, 100);
if (option.state & QStyle::State_Open ||
option.state & QStyle::State_MouseOver)
{
painter->setPen(blue);
painter->setBrush(QBrush(blue));
}
else
{
painter->setPen(grey);
painter->setBrush(QBrush(grey));
}
if (option.state & QStyle::State_Open)
painter->drawLine(r.left()+8, r.top() + (r.height()*0.5 - 5),
r.left()+8, r.top() + r.height()-1);
painter->save();
painter->setRenderHints(QPainter::Antialiasing |
QPainter::TextAntialiasing);
painter->drawRoundedRect(r.left()+4, r.top() + (r.height()*0.5 - 5),
10, 10, 20.0, 10.0, Qt::RelativeSize);
// draw text
QRect textrect = QRect(r.left() + 20, r.top(),
r.width() - 40,
r.height());
QString text = elidedText(
option.fontMetrics,
textrect.width(),
Qt::ElideMiddle,
model->data(index, Qt::DisplayRole).toString());
if (option.state & QStyle::State_MouseOver)
painter->setPen(QPen(orange, 1));
else
painter->setPen(QPen(grey, 1));
this->view->style()->drawItemText(painter, textrect, Qt::AlignLeft,
option.palette, this->view->isEnabled(), text);
painter->restore();
}
else
{
QItemDelegate::paint(painter, option, index);
}
}
/////////////////////////////////////////////////
QSize TreeViewDelegate::sizeHint(const QStyleOptionViewItem &_opt,
const QModelIndex &_index) const
{
QStyleOptionViewItem option = _opt;
QSize sz = QItemDelegate::sizeHint(_opt, _index) + QSize(2, 2);
return sz;
}<|fim▁end|> | |
<|file_name|>credentials.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# import keystoneclient.v2_0.client as ksclient
# import glanceclient.v2.client as glclient
# import novaclient.client as nvclient
# import neutronclient.v2_0.client as ntclient
# import cinderclient.v2.client as cdclient
# import swiftclient.client as sftclient
__author__ = 'Yuvv'
OS_PROJECT_DOMAIN_ID = 'default'
OS_USER_DOMAIN_ID = 'default'
OS_PROJECT_NAME = 'admin'
OS_TENANT_NAME = 'admin'
OS_USERNAME = 'admin'
OS_PASSWORD = 'yuvv'
# OS_AUTH_URL = 'http://controller:35357/v3'
OS_AUTH_URL = 'http://controller:5000/v2.0/'
OS_IDENTITY_API_VERSION = 3
OS_IMAGE_API_VERSION = 2
OS_AUTH_VERSION = 3
'''
keystone = ksclient.Client(auth_url=OS_AUTH_URL,
username=OS_USERNAME,
password=OS_PASSWORD,
tenant_name=OS_TENANT_NAME)
glance = glclient.Client(endpoint=keystone.service_catalog.url_for(service_type='image'),
token=keystone.auth_token)
neutron = ntclient.Client(endpoint_url=keystone.service_catalog.url_for(service_type='network'),
token=keystone.auth_token)
cinder = cdclient.Client(auth_url=OS_AUTH_URL,
username=OS_USERNAME,
api_key=OS_PASSWORD,
project_id=OS_TENANT_NAME)
nova = nvclient.Client('2.1', auth_url=OS_AUTH_URL,
username=OS_USERNAME,
api_key=OS_PASSWORD,
project_id=OS_TENANT_NAME)
'''
def get_keystone_credits():
cred = dict()
cred['username'] = OS_USERNAME
cred['password'] = OS_PASSWORD
cred['auth_url'] = OS_AUTH_URL
cred['tenant_name'] = OS_TENANT_NAME
return cred
def get_neutron_credits():
cred = dict()
cred['username'] = OS_USERNAME
cred['password'] = OS_PASSWORD
cred['auth_url'] = OS_AUTH_URL
cred['tenant_name'] = OS_TENANT_NAME
return cred
<|fim▁hole|>
def get_cinder_credits():
cred = dict()
cred['username'] = OS_USERNAME
cred['api_key'] = OS_PASSWORD
cred['auth_url'] = OS_AUTH_URL
cred['project_id'] = OS_TENANT_NAME
return cred
def get_nova_credits():
cred = dict()
cred['username'] = OS_USERNAME
cred['api_key'] = OS_PASSWORD
cred['auth_url'] = OS_AUTH_URL
cred['project_id'] = OS_TENANT_NAME
return cred
def get_swift_credits():
cred = dict()
cred['user'] = OS_USERNAME
cred['key'] = OS_PASSWORD
cred['authurl'] = OS_AUTH_URL
return cred
'''
+----------------------------------+----------+--------------+
| ID | Name | Type |
+----------------------------------+----------+--------------+
| 02e5b5c270784e76bf5c144f0fa54030 | cinder | volume |
| 3a7ecbf5069d42d784fdf3ebe9deb745 | swift | object-store |
| 8e185002e3fe4028bda5c6cd910d31f6 | nova | compute |
| aaf1a49b4a1e463990880ddf9c8fb658 | glance | image |
| b3600985814247558a289c332ad62f09 | keystone | identity |
| bc4d28242d3a466ebce7663b28465a99 | neutron | network |
| cb799b0f7447401fb15821cffb103e74 | cinderv2 | volumev2 |
+----------------------------------+----------+--------------+
'''<|fim▁end|> | |
<|file_name|>TestLdapComplex.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2010-2014 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.testing.longtest;
import com.evolveum.midpoint.common.LoggingConfigurationManager;
import com.evolveum.midpoint.common.ProfilingConfigurationManager;
import com.evolveum.midpoint.model.impl.sync.ReconciliationTaskHandler;
import com.evolveum.midpoint.model.test.AbstractModelIntegrationTest;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.query.ObjectQuery;
import com.evolveum.midpoint.prism.util.PrismAsserts;
import com.evolveum.midpoint.prism.util.PrismTestUtil;
import com.evolveum.midpoint.schema.ResultHandler;
import com.evolveum.midpoint.schema.constants.MidPointConstants;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.schema.util.ObjectQueryUtil;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.test.util.MidPointTestConstants;
import com.evolveum.midpoint.test.util.TestUtil;
import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AssignmentPolicyEnforcementType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectTemplateType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.RoleType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemConfigurationType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemObjectsType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.mutable.MutableInt;
import org.opends.server.types.Entry;<|fim▁hole|>import org.opends.server.util.LDIFReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.annotation.DirtiesContext.ClassMode;
import org.springframework.test.context.ContextConfiguration;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;
import javax.xml.namespace.QName;
import java.io.File;
import java.io.IOException;
import static com.evolveum.midpoint.test.IntegrationTestTools.display;
import static org.testng.AssertJUnit.assertEquals;
/**
* Mix of various tests for issues that are difficult to replicate using dummy resources.
*
* @author Radovan Semancik
*
*/
@ContextConfiguration(locations = {"classpath:ctx-longtest-test-main.xml"})
@DirtiesContext(classMode = ClassMode.AFTER_CLASS)
public class TestLdapComplex extends AbstractModelIntegrationTest {
public static final File TEST_DIR = new File(MidPointTestConstants.TEST_RESOURCES_DIR, "ldap-complex");
public static final File SYSTEM_CONFIGURATION_FILE = new File(COMMON_DIR, "system-configuration.xml");
public static final String SYSTEM_CONFIGURATION_OID = SystemObjectsType.SYSTEM_CONFIGURATION.value();
public static final File USER_TEMPLATE_FILE = new File(TEST_DIR, "user-template.xml");
protected static final File USER_ADMINISTRATOR_FILE = new File(COMMON_DIR, "user-administrator.xml");
protected static final String USER_ADMINISTRATOR_OID = "00000000-0000-0000-0000-000000000002";
protected static final String USER_ADMINISTRATOR_USERNAME = "administrator";
protected static final File ROLE_SUPERUSER_FILE = new File(COMMON_DIR, "role-superuser.xml");
protected static final String ROLE_SUPERUSER_OID = "00000000-0000-0000-0000-000000000004";
protected static final File ROLE_CAPTAIN_FILE = new File(TEST_DIR, "role-captain.xml");
protected static final File ROLE_JUDGE_FILE = new File(TEST_DIR, "role-judge.xml");
protected static final File ROLE_PIRATE_FILE = new File(TEST_DIR, "role-pirate.xml");
protected static final File ROLE_SAILOR_FILE = new File(TEST_DIR, "role-sailor.xml");
protected static final String ROLE_PIRATE_OID = "12345678-d34d-b33f-f00d-555555556603";
protected static final File ROLES_LDIF_FILE = new File(TEST_DIR, "roles.ldif");
protected static final File RESOURCE_OPENDJ_FILE = new File(COMMON_DIR, "resource-opendj-complex.xml");
protected static final String RESOURCE_OPENDJ_NAME = "Localhost OpenDJ";
protected static final String RESOURCE_OPENDJ_OID = "10000000-0000-0000-0000-000000000003";
protected static final String RESOURCE_OPENDJ_NAMESPACE = MidPointConstants.NS_RI;
// Make it at least 1501 so it will go over the 3000 entries size limit
private static final int NUM_LDAP_ENTRIES = 1000;
private static final String LDAP_GROUP_PIRATES_DN = "cn=Pirates,ou=groups,dc=example,dc=com";
protected ResourceType resourceOpenDjType;
protected PrismObject<ResourceType> resourceOpenDj;
@Autowired
private ReconciliationTaskHandler reconciliationTaskHandler;
@Override
protected void startResources() throws Exception {
openDJController.startCleanServer();
}
@AfterClass
public static void stopResources() throws Exception {
openDJController.stop();
}
@Override
public void initSystem(Task initTask, OperationResult initResult) throws Exception {
super.initSystem(initTask, initResult);
modelService.postInit(initResult);
// System Configuration
PrismObject<SystemConfigurationType> config;
try {
config = repoAddObjectFromFile(SYSTEM_CONFIGURATION_FILE, SystemConfigurationType.class, initResult);
} catch (ObjectAlreadyExistsException e) {
throw new ObjectAlreadyExistsException("System configuration already exists in repository;" +
"looks like the previous test haven't cleaned it up", e);
}
LoggingConfigurationManager.configure(
ProfilingConfigurationManager.checkSystemProfilingConfiguration(config),
config.asObjectable().getVersion(), initResult);
// administrator
PrismObject<UserType> userAdministrator = repoAddObjectFromFile(USER_ADMINISTRATOR_FILE, UserType.class, initResult);
repoAddObjectFromFile(ROLE_SUPERUSER_FILE, RoleType.class, initResult);
login(userAdministrator);
// Roles
repoAddObjectFromFile(ROLE_CAPTAIN_FILE, RoleType.class, initResult);
repoAddObjectFromFile(ROLE_JUDGE_FILE, RoleType.class, initResult);
repoAddObjectFromFile(ROLE_PIRATE_FILE, RoleType.class, initResult);
repoAddObjectFromFile(ROLE_SAILOR_FILE, RoleType.class, initResult);
// templates
repoAddObjectFromFile(USER_TEMPLATE_FILE, ObjectTemplateType.class, initResult);
// Resources
resourceOpenDj = importAndGetObjectFromFile(ResourceType.class, RESOURCE_OPENDJ_FILE, RESOURCE_OPENDJ_OID, initTask, initResult);
resourceOpenDjType = resourceOpenDj.asObjectable();
openDJController.setResource(resourceOpenDj);
assumeAssignmentPolicy(AssignmentPolicyEnforcementType.RELATIVE);
openDJController.addEntriesFromLdifFile(ROLES_LDIF_FILE.getPath());
display("initial LDAP content", openDJController.dumpEntries());
}
@Test
public void test100BigImport() throws Exception {
final String TEST_NAME = "test100BigImport";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
loadEntries("u");
Task task = taskManager.createTaskInstance(TestLdapComplex.class.getName() + "." + TEST_NAME);
task.setOwner(getUser(USER_ADMINISTRATOR_OID));
OperationResult result = task.getResult();
// WHEN
TestUtil.displayWhen(TEST_NAME);
//task.setExtensionPropertyValue(SchemaConstants.MODEL_EXTENSION_WORKER_THREADS, 2);
modelService.importFromResource(RESOURCE_OPENDJ_OID,
new QName(RESOURCE_OPENDJ_NAMESPACE, "AccountObjectClass"), task, result);
// THEN
TestUtil.displayThen(TEST_NAME);
OperationResult subresult = result.getLastSubresult();
TestUtil.assertInProgress("importAccountsFromResource result", subresult);
waitForTaskFinish(task, true, 20000 + NUM_LDAP_ENTRIES*2000);
// THEN
TestUtil.displayThen(TEST_NAME);
int userCount = modelService.countObjects(UserType.class, null, null, task, result);
display("Users", userCount);
assertEquals("Unexpected number of users", NUM_LDAP_ENTRIES+4, userCount);
assertUser("u1", task, result);
}
private void assertUser(String name, Task task, OperationResult result) throws com.evolveum.midpoint.util.exception.ObjectNotFoundException, com.evolveum.midpoint.util.exception.SchemaException, com.evolveum.midpoint.util.exception.SecurityViolationException, com.evolveum.midpoint.util.exception.CommunicationException, com.evolveum.midpoint.util.exception.ConfigurationException {
UserType user = findUserByUsername("u1").asObjectable();
display("user " + name, user.asPrismObject());
assertEquals("Wrong number of assignments", 4, user.getAssignment().size());
}
@Test(enabled = false)
public void test120BigReconciliation() throws Exception {
final String TEST_NAME = "test120BigReconciliation";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
Task task = taskManager.createTaskInstance(TestLdapComplex.class.getName() + "." + TEST_NAME);
task.setOwner(getUser(USER_ADMINISTRATOR_OID));
OperationResult result = task.getResult();
// WHEN
TestUtil.displayWhen(TEST_NAME);
//task.setExtensionPropertyValue(SchemaConstants.MODEL_EXTENSION_WORKER_THREADS, 2);
ResourceType resource = modelService.getObject(ResourceType.class, RESOURCE_OPENDJ_OID, null, task, result).asObjectable();
reconciliationTaskHandler.launch(resource,
new QName(RESOURCE_OPENDJ_NAMESPACE, "AccountObjectClass"), task, result);
// THEN
TestUtil.displayThen(TEST_NAME);
// TODO
// OperationResult subresult = result.getLastSubresult();
// TestUtil.assertInProgress("reconciliation launch result", subresult);
waitForTaskFinish(task, true, 20000 + NUM_LDAP_ENTRIES*2000);
// THEN
TestUtil.displayThen(TEST_NAME);
int userCount = modelService.countObjects(UserType.class, null, null, task, result);
display("Users", userCount);
assertEquals("Unexpected number of users", NUM_LDAP_ENTRIES+4, userCount);
assertUser("u1", task, result);
}
private void loadEntries(String prefix) throws LDIFException, IOException {
long ldapPopStart = System.currentTimeMillis();
for(int i=0; i < NUM_LDAP_ENTRIES; i++) {
String name = "user"+i;
Entry entry = createEntry(prefix+i, name);
openDJController.addEntry(entry);
}
long ldapPopEnd = System.currentTimeMillis();
display("Loaded "+NUM_LDAP_ENTRIES+" LDAP entries in "+((ldapPopEnd-ldapPopStart)/1000)+" seconds");
}
private Entry createEntry(String uid, String name) throws IOException, LDIFException {
StringBuilder sb = new StringBuilder();
String dn = "uid="+uid+","+openDJController.getSuffixPeople();
sb.append("dn: ").append(dn).append("\n");
sb.append("objectClass: inetOrgPerson\n");
sb.append("uid: ").append(uid).append("\n");
sb.append("cn: ").append(name).append("\n");
sb.append("sn: ").append(name).append("\n");
LDIFImportConfig importConfig = new LDIFImportConfig(IOUtils.toInputStream(sb.toString(), "utf-8"));
LDIFReader ldifReader = new LDIFReader(importConfig);
Entry ldifEntry = ldifReader.readEntry();
return ldifEntry;
}
private String toDn(String username) {
return "uid="+username+","+OPENDJ_PEOPLE_SUFFIX;
}
}<|fim▁end|> | import org.opends.server.types.LDIFImportConfig;
import org.opends.server.util.LDIFException; |
<|file_name|>slide_channel.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>
class Channel(models.Model):
_inherit = 'slide.channel'
nbr_certification = fields.Integer("Number of Certifications", compute='_compute_slides_statistics', store=True)
class Category(models.Model):
_inherit = 'slide.category'
nbr_certification = fields.Integer("Number of Certifications", compute='_count_presentations', store=True)<|fim▁end|> | # Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models |
<|file_name|>export_test.go<|end_file_name|><|fim▁begin|>// Copyright 2014 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
<|fim▁hole|> BundleCharms = (*Handler).bundleCharms
ParseSearchParams = parseSearchParams
DefaultIcon = defaultIcon
ArchiveCacheVersionedMaxAge = &archiveCacheVersionedMaxAge
ArchiveCacheNonVersionedMaxAge = &archiveCacheNonVersionedMaxAge
ParamsLogLevels = paramsLogLevels
ParamsLogTypes = paramsLogTypes
ProcessIcon = processIcon
UsernameAttr = usernameAttr
GroupsAttr = groupsAttr
GetPromulgatedURL = (*Handler).getPromulgatedURL
)<|fim▁end|> | package v4
var ( |
<|file_name|>table_manager.go<|end_file_name|><|fim▁begin|>package uploads
import (
"context"
"errors"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"sync"
"time"
"github.com/go-kit/log/level"
"github.com/prometheus/client_golang/prometheus"
"github.com/grafana/loki/pkg/storage/chunk"
"github.com/grafana/loki/pkg/storage/chunk/local"
chunk_util "github.com/grafana/loki/pkg/storage/chunk/util"
"github.com/grafana/loki/pkg/storage/stores/shipper/util"
util_log "github.com/grafana/loki/pkg/util/log"
)
type Config struct {
Uploader string
IndexDir string
UploadInterval time.Duration
DBRetainPeriod time.Duration
MakePerTenantBuckets bool
}
type TableManager struct {
cfg Config
boltIndexClient BoltDBIndexClient
storageClient StorageClient
metrics *metrics
tables map[string]*Table
tablesMtx sync.RWMutex
ctx context.Context
cancel context.CancelFunc
wg sync.WaitGroup
}
func NewTableManager(cfg Config, boltIndexClient BoltDBIndexClient, storageClient StorageClient, registerer prometheus.Registerer) (*TableManager, error) {
ctx, cancel := context.WithCancel(context.Background())
tm := TableManager{
cfg: cfg,
boltIndexClient: boltIndexClient,
storageClient: storageClient,
metrics: newMetrics(registerer),
ctx: ctx,
cancel: cancel,
}
tables, err := tm.loadTables()
if err != nil {
return nil, err
}
tm.tables = tables
go tm.loop()
return &tm, nil
}
func (tm *TableManager) loop() {
tm.wg.Add(1)
defer tm.wg.Done()
tm.uploadTables(context.Background(), false)
syncTicker := time.NewTicker(tm.cfg.UploadInterval)
defer syncTicker.Stop()
for {
select {
case <-syncTicker.C:
tm.uploadTables(context.Background(), false)
case <-tm.ctx.Done():
return
}
}
}
func (tm *TableManager) Stop() {
level.Info(util_log.Logger).Log("msg", "stopping table manager")
tm.cancel()
tm.wg.Wait()
tm.uploadTables(context.Background(), true)
}
func (tm *TableManager) QueryPages(ctx context.Context, queries []chunk.IndexQuery, callback chunk.QueryPagesCallback) error {
queriesByTable := util.QueriesByTable(queries)
for tableName, queries := range queriesByTable {
err := tm.query(ctx, tableName, queries, callback)
if err != nil {
return err
}
}
return nil
}
func (tm *TableManager) query(ctx context.Context, tableName string, queries []chunk.IndexQuery, callback chunk.QueryPagesCallback) error {
tm.tablesMtx.RLock()
defer tm.tablesMtx.RUnlock()
table, ok := tm.tables[tableName]
if !ok {
return nil
}
return util.DoParallelQueries(ctx, table, queries, callback)
}
func (tm *TableManager) BatchWrite(ctx context.Context, batch chunk.WriteBatch) error {
boltWriteBatch, ok := batch.(*local.BoltWriteBatch)
if !ok {
return errors.New("invalid write batch")
}
for tableName, tableWrites := range boltWriteBatch.Writes {
table, err := tm.getOrCreateTable(tableName)
if err != nil {
return err
}
err = table.Write(ctx, tableWrites)
if err != nil {
return err
}
}
<|fim▁hole|>
func (tm *TableManager) getOrCreateTable(tableName string) (*Table, error) {
tm.tablesMtx.RLock()
table, ok := tm.tables[tableName]
tm.tablesMtx.RUnlock()
if !ok {
tm.tablesMtx.Lock()
defer tm.tablesMtx.Unlock()
table, ok = tm.tables[tableName]
if !ok {
var err error
table, err = NewTable(filepath.Join(tm.cfg.IndexDir, tableName), tm.cfg.Uploader, tm.storageClient,
tm.boltIndexClient, tm.cfg.MakePerTenantBuckets)
if err != nil {
return nil, err
}
tm.tables[tableName] = table
}
}
return table, nil
}
func (tm *TableManager) uploadTables(ctx context.Context, force bool) {
tm.tablesMtx.RLock()
defer tm.tablesMtx.RUnlock()
level.Info(util_log.Logger).Log("msg", "uploading tables")
status := statusSuccess
for _, table := range tm.tables {
err := table.Snapshot()
if err != nil {
// we do not want to stop uploading of dbs due to failures in snapshotting them so logging just the error here.
level.Error(util_log.Logger).Log("msg", "failed to snapshot table for reads", "table", table.name, "err", err)
}
err = table.Upload(ctx, force)
if err != nil {
// continue uploading other tables while skipping cleanup for a failed one.
status = statusFailure
level.Error(util_log.Logger).Log("msg", "failed to upload dbs", "table", table.name, "err", err)
continue
}
// cleanup unwanted dbs from the table
err = table.Cleanup(tm.cfg.DBRetainPeriod)
if err != nil {
// we do not want to stop uploading of dbs due to failures in cleaning them up so logging just the error here.
level.Error(util_log.Logger).Log("msg", "failed to cleanup uploaded dbs past their retention period", "table", table.name, "err", err)
}
}
tm.metrics.tablesUploadOperationTotal.WithLabelValues(status).Inc()
}
func (tm *TableManager) loadTables() (map[string]*Table, error) {
localTables := make(map[string]*Table)
filesInfo, err := ioutil.ReadDir(tm.cfg.IndexDir)
if err != nil {
return nil, err
}
// regex matching table name patters, i.e prefix+period_number
re, err := regexp.Compile(`.+[0-9]+$`)
if err != nil {
return nil, err
}
for _, fileInfo := range filesInfo {
if !re.MatchString(fileInfo.Name()) {
continue
}
// since we are moving to keeping files for same table in a folder, if current element is a file we need to move it inside a directory with the same name
// i.e file index_123 would be moved to path index_123/index_123.
if !fileInfo.IsDir() {
level.Info(util_log.Logger).Log("msg", fmt.Sprintf("found a legacy file %s, moving it to folder with same name", fileInfo.Name()))
filePath := filepath.Join(tm.cfg.IndexDir, fileInfo.Name())
// create a folder with .temp suffix since we can't create a directory with same name as file.
tempDirPath := filePath + ".temp"
if err := chunk_util.EnsureDirectory(tempDirPath); err != nil {
return nil, err
}
// move the file to temp dir.
if err := os.Rename(filePath, filepath.Join(tempDirPath, fileInfo.Name())); err != nil {
return nil, err
}
// rename the directory to name of the file
if err := os.Rename(tempDirPath, filePath); err != nil {
return nil, err
}
}
level.Info(util_log.Logger).Log("msg", fmt.Sprintf("loading table %s", fileInfo.Name()))
table, err := LoadTable(filepath.Join(tm.cfg.IndexDir, fileInfo.Name()), tm.cfg.Uploader, tm.storageClient,
tm.boltIndexClient, tm.cfg.MakePerTenantBuckets, tm.metrics)
if err != nil {
return nil, err
}
if table == nil {
// if table is nil it means it has no files in it so remove the folder for that table.
err := os.Remove(filepath.Join(tm.cfg.IndexDir, fileInfo.Name()))
if err != nil {
level.Error(util_log.Logger).Log("msg", "failed to remove empty table folder", "table", fileInfo.Name(), "err", err)
}
continue
}
// Queries are only done against table snapshots so it's important we snapshot as soon as the table is loaded.
err = table.Snapshot()
if err != nil {
return nil, err
}
localTables[fileInfo.Name()] = table
}
return localTables, nil
}<|fim▁end|> | return nil
} |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>"""
dwm package setup
"""
from __future__ import print_function
from setuptools import setup, find_packages
__version__ = '1.1.0'
def readme():
""" open readme for long_description """
try:
with open('README.md') as fle:
return fle.read()
except IOError:
return ''
setup(
name='dwm',
version=__version__,
url='https://github.com/rh-marketingops/dwm',
license='GNU General Public License',
author='Jeremiah Coleman',
tests_require=['nose', 'mongomock>=3.5.0'],
install_requires=['pymongo>=3.2.2', 'tqdm>=4.8.4'],
author_email='[email protected]',
description='Best practices for marketing data quality management',
long_description=readme(),
packages=find_packages(),
include_package_data=True,
platforms='any',
test_suite='nose.collector',
classifiers=[
'Programming Language :: Python',
'Development Status :: 4 - Beta',
'Natural Language :: English',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',<|fim▁hole|> 'Topic :: Software Development :: Libraries :: Application Frameworks'
],
keywords='marketing automation data quality cleanse washing cleaning'
)<|fim▁end|> | 'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules', |
<|file_name|>bbox.py<|end_file_name|><|fim▁begin|>import scipy.ndimage as ndi
class BBox(object):
def __init__(self, x1, y1, x2, y2):
'''
(x1, y1) is the upper left corner,
(x2, y2) is the lower right corner,
with (0, 0) being in the upper left corner.
'''
if x1 > x2: x1, x2 = x2, x1
if y1 > y2: y1, y2 = y2, y1
self.x1 = x1
self.y1 = y1
self.x2 = x2
self.y2 = y2
self.area = (x2 - x1) * (y2 - y1)
def taxicab_diagonal(self):
'''
Return the taxicab distance from (x1,y1) to (x2,y2)
'''
return self.x2 - self.x1 + self.y2 - self.y1
def overlaps(self, other):
'''
Return True iff self and other overlap.
'''
return not ((self.x1 > other.x2)
or (self.x2 < other.x1)
or (self.y1 > other.y2)
or (self.y2 < other.y1))
def __eq__(self, other):
return (self.x1 == other.x1
and self.y1 == other.y1
and self.x2 == other.x2
and self.y2 == other.y2)
def find_paws(data, smooth_radius = 5, threshold = 0.0001):
# http://stackoverflow.com/questions/4087919/how-can-i-improve-my-paw-detection
"""Detects and isolates contiguous regions in the input array"""
# Blur the input data a bit so the paws have a continous footprint
data = ndi.uniform_filter(data, smooth_radius)<|fim▁hole|> thresh = data < threshold
# Fill any interior holes in the paws to get cleaner regions...
filled = ndi.morphology.binary_fill_holes(thresh)
# Label each contiguous paw
coded_paws, num_paws = ndi.label(filled)
# Isolate the extent of each paw
# find_objects returns a list of 2-tuples: (slice(...), slice(...))
# which represents a rectangular box around the object
data_slices = ndi.find_objects(coded_paws)
return data_slices
def slice_to_bbox(slices):
for s in slices:
dy, dx = s[:2]
yield BBox(dx.start, dy.start, dx.stop+1, dy.stop+1)<|fim▁end|> | # Threshold the blurred data (this needs to be a bit > 0 due to the blur) |
<|file_name|>persist_data.py<|end_file_name|><|fim▁begin|>"""BitBake Persistent Data Store
Used to store data in a central location such that other threads/tasks can
access them at some future date. Acts as a convenience wrapper around sqlite,
currently, providing a key/value store accessed by 'domain'.
"""
# Copyright (C) 2007 Richard Purdie
# Copyright (C) 2010 Chris Larson <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import collections
import logging
import os.path
import sys
import warnings
from bb.compat import total_ordering
from collections import Mapping
try:
import sqlite3
except ImportError:
from pysqlite2 import dbapi2 as sqlite3
sqlversion = sqlite3.sqlite_version_info
if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
raise Exception("sqlite3 version 3.3.0 or later is required.")
logger = logging.getLogger("BitBake.PersistData")
if hasattr(sqlite3, 'enable_shared_cache'):
try:
sqlite3.enable_shared_cache(True)
except sqlite3.OperationalError:
pass
@total_ordering
class SQLTable(collections.MutableMapping):
"""Object representing a table/domain in the database"""
def __init__(self, cachefile, table):
self.cachefile = cachefile
self.table = table
self.cursor = connect(self.cachefile)
self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);"
% table)
def _execute(self, *query):
"""Execute a query, waiting to acquire a lock if necessary"""
count = 0
while True:
try:
return self.cursor.execute(*query)
except sqlite3.OperationalError as exc:
if 'database is locked' in str(exc) and count < 500:
count = count + 1
self.cursor.close()
self.cursor = connect(self.cachefile)
continue
raise
def __enter__(self):
self.cursor.__enter__()
return self
def __exit__(self, *excinfo):
self.cursor.__exit__(*excinfo)
def __getitem__(self, key):
data = self._execute("SELECT * from %s where key=?;" %
self.table, [key])
for row in data:
return row[1]
raise KeyError(key)
def __delitem__(self, key):
if key not in self:
raise KeyError(key)
self._execute("DELETE from %s where key=?;" % self.table, [key])
def __setitem__(self, key, value):
if not isinstance(key, basestring):
raise TypeError('Only string keys are supported')
elif not isinstance(value, basestring):<|fim▁hole|> exists = len(list(data))
if exists:
self._execute("UPDATE %s SET value=? WHERE key=?;" % self.table,
[value, key])
else:
self._execute("INSERT into %s(key, value) values (?, ?);" %
self.table, [key, value])
def __contains__(self, key):
return key in set(self)
def __len__(self):
data = self._execute("SELECT COUNT(key) FROM %s;" % self.table)
for row in data:
return row[0]
def __iter__(self):
data = self._execute("SELECT key FROM %s;" % self.table)
return (row[0] for row in data)
def __lt__(self, other):
if not isinstance(other, Mapping):
raise NotImplemented
return len(self) < len(other)
def values(self):
return list(self.itervalues())
def itervalues(self):
data = self._execute("SELECT value FROM %s;" % self.table)
return (row[0] for row in data)
def items(self):
return list(self.iteritems())
def iteritems(self):
return self._execute("SELECT * FROM %s;" % self.table)
def clear(self):
self._execute("DELETE FROM %s;" % self.table)
def has_key(self, key):
return key in self
class PersistData(object):
"""Deprecated representation of the bitbake persistent data store"""
def __init__(self, d):
warnings.warn("Use of PersistData is deprecated. Please use "
"persist(domain, d) instead.",
category=DeprecationWarning,
stacklevel=2)
self.data = persist(d)
logger.debug(1, "Using '%s' as the persistent data cache",
self.data.filename)
def addDomain(self, domain):
"""
Add a domain (pending deprecation)
"""
return self.data[domain]
def delDomain(self, domain):
"""
Removes a domain and all the data it contains
"""
del self.data[domain]
def getKeyValues(self, domain):
"""
Return a list of key + value pairs for a domain
"""
return self.data[domain].items()
def getValue(self, domain, key):
"""
Return the value of a key for a domain
"""
return self.data[domain][key]
def setValue(self, domain, key, value):
"""
Sets the value of a key for a domain
"""
self.data[domain][key] = value
def delValue(self, domain, key):
"""
Deletes a key/value pair
"""
del self.data[domain][key]
def connect(database):
return sqlite3.connect(database, timeout=5, isolation_level=None)
def persist(domain, d):
"""Convenience factory for SQLTable objects based upon metadata"""
import bb.utils
cachedir = (d.getVar("PERSISTENT_DIR", True) or
d.getVar("CACHE", True))
if not cachedir:
logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
sys.exit(1)
bb.utils.mkdirhier(cachedir)
cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
return SQLTable(cachefile, domain)<|fim▁end|> | raise TypeError('Only string values are supported')
data = self._execute("SELECT * from %s where key=?;" %
self.table, [key]) |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
home = TemplateView.as_view(template_name='home.html')
urlpatterns = patterns(
'',<|fim▁hole|>)<|fim▁end|> | url(r'^filter/', include('demoproject.filter.urls')),
# An informative homepage.
url(r'', home, name='home') |
<|file_name|>project.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the PsychoPy library
# Copyright (C) 2002-2018 Jonathan Peirce (C) 2019-2022 Open Science Tools Ltd.
# Distributed under the terms of the GNU General Public License (GPL).
import io
import sys
import tempfile
import time
import os
import traceback
from pathlib import Path
import gitlab
import requests
from .functions import (setLocalPath, showCommitDialog, logInPavlovia,
noGitWarning)
from psychopy.localization import _translate
from psychopy.projects import pavlovia
from psychopy import logging
from psychopy.app.pavlovia_ui import sync, functions
import wx
from wx.lib import scrolledpanel as scrlpanel
from .. import utils
from ...projects.pavlovia import PavloviaProject
try:
import wx.lib.agw.hyperlink as wxhl # 4.0+
except ImportError:
import wx.lib.hyperlink as wxhl # <3.0.2
_starred = u"\u2605"
_unstarred = u"\u2606"
class ProjectEditor(wx.Dialog):
def __init__(self, parent=None, id=wx.ID_ANY, project=None, localRoot="",
*args, **kwargs):
wx.Dialog.__init__(self, parent, id,
*args, **kwargs)
panel = wx.Panel(self, wx.ID_ANY, style=wx.TAB_TRAVERSAL)
# when a project is successfully created these will be populated
if hasattr(parent, 'filename'):
self.filename = parent.filename
else:
self.filename = None
self.project = project # type: pavlovia.PavloviaProject
self.projInfo = None
self.parent = parent
if project:
# edit existing project
self.isNew = False
if project.localRoot and not localRoot:
localRoot = project.localRoot
else:
self.isNew = True
# create the controls
nameLabel = wx.StaticText(panel, -1, _translate("Name:"))
self.nameBox = wx.TextCtrl(panel, -1, size=(400, -1))
# Path can contain only letters, digits, '_', '-' and '.'.
# Cannot start with '-', end in '.git' or end in '.atom']
pavSession = pavlovia.getCurrentSession()
try:
username = pavSession.user.username
except AttributeError as e:
raise pavlovia.NoUserError("{}: Tried to create project with no user logged in.".format(e))
gpChoices = [username]
gpChoices.extend(pavSession.listUserGroups())
groupLabel = wx.StaticText(panel, -1, _translate("Group/owner:"))
self.groupBox = wx.Choice(panel, -1, size=(400, -1),
choices=gpChoices)
descrLabel = wx.StaticText(panel, -1, _translate("Description:"))
self.descrBox = wx.TextCtrl(panel, -1, size=(400, 200),
style=wx.TE_MULTILINE | wx.SUNKEN_BORDER)
localLabel = wx.StaticText(panel, -1, _translate("Local folder:"))
self.localBox = wx.TextCtrl(panel, -1, size=(400, -1),
value=localRoot)
self.btnLocalBrowse = wx.Button(panel, wx.ID_ANY, _translate("Browse..."))
self.btnLocalBrowse.Bind(wx.EVT_BUTTON, self.onBrowseLocal)
localPathSizer = wx.BoxSizer(wx.HORIZONTAL)
localPathSizer.Add(self.localBox)
localPathSizer.Add(self.btnLocalBrowse)
tagsLabel = wx.StaticText(panel, -1,
_translate("Tags (comma separated):"))
self.tagsBox = wx.TextCtrl(panel, -1, size=(400, 100),
value="PsychoPy, Builder, Coder",
style=wx.TE_MULTILINE | wx.SUNKEN_BORDER)
publicLabel = wx.StaticText(panel, -1, _translate("Public:"))
self.publicBox = wx.CheckBox(panel, -1)
# buttons
if self.isNew:
buttonMsg = _translate("Create project on Pavlovia")
else:
buttonMsg = _translate("Submit changes to Pavlovia")
updateBtn = wx.Button(panel, -1, buttonMsg)
updateBtn.Bind(wx.EVT_BUTTON, self.submitChanges)
cancelBtn = wx.Button(panel, -1, _translate("Cancel"))
cancelBtn.Bind(wx.EVT_BUTTON, self.onCancel)
btnSizer = wx.BoxSizer(wx.HORIZONTAL)
if sys.platform == "win32":
btns = [updateBtn, cancelBtn]
else:
btns = [cancelBtn, updateBtn]
btnSizer.AddMany(btns)
# do layout
fieldsSizer = wx.FlexGridSizer(cols=2, rows=6, vgap=5, hgap=5)
fieldsSizer.AddMany([(nameLabel, 0, wx.ALIGN_RIGHT), self.nameBox,
(groupLabel, 0, wx.ALIGN_RIGHT), self.groupBox,
(localLabel, 0, wx.ALIGN_RIGHT), localPathSizer,<|fim▁hole|>
border = wx.BoxSizer(wx.VERTICAL)
border.Add(fieldsSizer, 0, wx.ALL, 5)
border.Add(btnSizer, 0, wx.ALIGN_RIGHT | wx.ALL, 5)
panel.SetSizerAndFit(border)
self.Fit()
def onCancel(self, evt=None):
self.EndModal(wx.ID_CANCEL)
def submitChanges(self, evt=None):
session = pavlovia.getCurrentSession()
if not session.user:
user = logInPavlovia(parent=self.parent)
if not session.user:
return
# get current values
name = self.nameBox.GetValue()
namespace = self.groupBox.GetStringSelection()
descr = self.descrBox.GetValue()
visibility = self.publicBox.GetValue()
# tags need splitting and then
tagsList = self.tagsBox.GetValue().split(',')
tags = [thisTag.strip() for thisTag in tagsList]
localRoot = self.localBox.GetValue()
if not localRoot:
localRoot = setLocalPath(self.parent, project=None, path="")
# then create/update
if self.isNew:
project = session.createProject(name=name,
description=descr,
tags=tags,
visibility=visibility,
localRoot=localRoot,
namespace=namespace)
self.project = project
self.project._newRemote = True
else: # we're changing metadata of an existing project. Don't sync
self.project.pavlovia.name = name
self.project.pavlovia.description = descr
self.project.tags = tags
self.project.visibility = visibility
self.project.localRoot = localRoot
self.project.save() # pushes changed metadata to gitlab
self.project._newRemote = False
self.EndModal(wx.ID_OK)
pavlovia.knownProjects.save()
self.project.getRepo(forceRefresh=True)
self.parent.project = self.project
def onBrowseLocal(self, evt=None):
newPath = setLocalPath(self, path=self.filename)
if newPath:
self.localBox.SetLabel(newPath)
self.Layout()
if self.project:
self.project.localRoot = newPath
self.Raise()
class DetailsPanel(wx.Panel):
class StarBtn(wx.Button):
def __init__(self, parent, iconCache, value=False):
wx.Button.__init__(self, parent, label=_translate("Star"))
# Setup icons
self.icons = {
True: iconCache.getBitmap(name="starred", size=16),
False: iconCache.getBitmap(name="unstarred", size=16),
}
self.SetBitmapDisabled(self.icons[False]) # Always appear empty when disabled
# Set start value
self.value = value
@property
def value(self):
return self._value
@value.setter
def value(self, value):
# Store value
self._value = bool(value)
# Change icon
self.SetBitmap(self.icons[self._value])
self.SetBitmapCurrent(self.icons[self._value])
self.SetBitmapFocus(self.icons[self._value])
def toggle(self):
self.value = (not self.value)
def __init__(self, parent, project=None,
size=(650, 550),
style=wx.NO_BORDER):
wx.Panel.__init__(self, parent, -1,
size=size,
style=style)
self.SetBackgroundColour("white")
iconCache = parent.app.iconCache
# Setup sizer
self.contentBox = wx.BoxSizer()
self.SetSizer(self.contentBox)
self.sizer = wx.BoxSizer(wx.VERTICAL)
self.contentBox.Add(self.sizer, proportion=1, border=12, flag=wx.ALL | wx.EXPAND)
# Head sizer
self.headSizer = wx.BoxSizer(wx.HORIZONTAL)
self.sizer.Add(self.headSizer, border=0, flag=wx.EXPAND)
# Icon
self.icon = utils.ImageCtrl(self, bitmap=wx.Bitmap(), size=(128, 128))
self.icon.SetBackgroundColour("#f2f2f2")
self.icon.Bind(wx.EVT_FILEPICKER_CHANGED, self.updateProject)
self.headSizer.Add(self.icon, border=6, flag=wx.ALL)
self.icon.SetToolTip(_translate(
"An image to represent this project, this helps it stand out when browsing on Pavlovia."
))
# Title sizer
self.titleSizer = wx.BoxSizer(wx.VERTICAL)
self.headSizer.Add(self.titleSizer, proportion=1, flag=wx.EXPAND)
# Title
self.title = wx.TextCtrl(self,
size=(-1, 30 if sys.platform == 'darwin' else -1),
value="")
self.title.Bind(wx.EVT_KILL_FOCUS, self.updateProject)
self.title.SetFont(
wx.Font(24, wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD)
)
self.titleSizer.Add(self.title, border=6, flag=wx.ALL | wx.EXPAND)
self.title.SetToolTip(_translate(
"Title of the project. Unlike the project name, this isn't used as a filename anywhere; so you can "
"add spaces, apostrophes and emojis to your heart's content! 🦕✨"
))
# Author
self.author = wx.StaticText(self, size=(-1, -1), label="by ---")
self.titleSizer.Add(self.author, border=6, flag=wx.LEFT | wx.RIGHT)
# Pavlovia link
self.link = wxhl.HyperLinkCtrl(self, -1,
label="https://pavlovia.org/",
URL="https://pavlovia.org/",
)
self.link.SetBackgroundColour("white")
self.titleSizer.Add(self.link, border=6, flag=wx.EXPAND | wx.LEFT | wx.RIGHT | wx.BOTTOM)
self.link.SetToolTip(_translate(
"Click to view the project in Pavlovia."
))
# Button sizer
self.btnSizer = wx.BoxSizer(wx.HORIZONTAL)
self.titleSizer.Add(self.btnSizer, flag=wx.EXPAND)
# Star button
self.starLbl = wx.StaticText(self, label="-")
self.btnSizer.Add(self.starLbl, border=6, flag=wx.LEFT | wx.TOP | wx.BOTTOM | wx.ALIGN_CENTER_VERTICAL)
self.starBtn = self.StarBtn(self, iconCache=iconCache)
self.starBtn.Bind(wx.EVT_BUTTON, self.star)
self.btnSizer.Add(self.starBtn, border=6, flag=wx.ALL | wx.EXPAND)
self.starBtn.SetToolTip(_translate(
"'Star' this project to get back to it easily. Projects you've starred will appear first in your searches "
"and projects with more stars in total will appear higher in everyone's searches."
))
# Fork button
self.forkLbl = wx.StaticText(self, label="-")
self.btnSizer.Add(self.forkLbl, border=6, flag=wx.LEFT | wx.TOP | wx.BOTTOM | wx.ALIGN_CENTER_VERTICAL)
self.forkBtn = wx.Button(self, label=_translate("Fork"))
self.forkBtn.SetBitmap(iconCache.getBitmap(name="fork", size=16))
self.forkBtn.Bind(wx.EVT_BUTTON, self.fork)
self.btnSizer.Add(self.forkBtn, border=6, flag=wx.ALL | wx.EXPAND)
self.forkBtn.SetToolTip(_translate(
"Create a copy of this project on your own Pavlovia account so that you can make changes without affecting "
"the original project."
))
# Create button
self.createBtn = wx.Button(self, label=_translate("Create"))
self.createBtn.SetBitmap(iconCache.getBitmap(name="plus", size=16))
self.createBtn.Bind(wx.EVT_BUTTON, self.create)
self.btnSizer.Add(self.createBtn, border=6, flag=wx.RIGHT | wx.TOP | wx.BOTTOM | wx.ALIGN_CENTER_VERTICAL)
self.createBtn.SetToolTip(_translate(
"Create a Pavlovia project for the current experiment."
))
# Sync button
self.syncBtn = wx.Button(self, label=_translate("Sync"))
self.syncBtn.SetBitmap(iconCache.getBitmap(name="view-refresh", size=16))
self.syncBtn.Bind(wx.EVT_BUTTON, self.sync)
self.btnSizer.Add(self.syncBtn, border=6, flag=wx.ALL | wx.EXPAND)
self.syncBtn.SetToolTip(_translate(
"Synchronise this project's local files with their online counterparts. This will 'pull' changes from "
"Pavlovia and 'push' changes from your local files."
))
# Get button
self.downloadBtn = wx.Button(self, label=_translate("Download"))
self.downloadBtn.SetBitmap(iconCache.getBitmap(name="download", size=16))
self.downloadBtn.Bind(wx.EVT_BUTTON, self.sync)
self.btnSizer.Add(self.downloadBtn, border=6, flag=wx.ALL | wx.EXPAND)
self.downloadBtn.SetToolTip(_translate(
"'Clone' this project, creating local copies of all its files and tracking any changes you make so that "
"they can be applied when you next 'sync' the project."
))
# Sync label
self.syncLbl = wx.StaticText(self, size=(-1, -1), label="---")
self.btnSizer.Add(self.syncLbl, border=6, flag=wx.RIGHT | wx.TOP | wx.BOTTOM | wx.ALIGN_CENTER_VERTICAL)
self.syncLbl.SetToolTip(_translate(
"Last synced at..."
))
self.btnSizer.AddStretchSpacer(1)
# Sep
self.sizer.Add(wx.StaticLine(self, -1), border=6, flag=wx.EXPAND | wx.ALL)
# Local root
self.rootSizer = wx.BoxSizer(wx.HORIZONTAL)
self.sizer.Add(self.rootSizer, flag=wx.EXPAND)
self.localRootLabel = wx.StaticText(self, label="Local root:")
self.rootSizer.Add(self.localRootLabel, border=6, flag=wx.ALIGN_CENTER_VERTICAL | wx.ALL)
self.localRoot = utils.FileCtrl(self, dlgtype="dir")
self.localRoot.Bind(wx.EVT_FILEPICKER_CHANGED, self.updateProject)
self.rootSizer.Add(self.localRoot, proportion=1, border=6, flag=wx.EXPAND | wx.LEFT | wx.RIGHT | wx.BOTTOM)
self.localRoot.SetToolTip(_translate(
"Folder in which local files are stored for this project. Changes to files in this folder will be tracked "
"and applied to the project when you 'sync', so make sure the only files in this folder are relevant!"
))
# Sep
self.sizer.Add(wx.StaticLine(self, -1), border=6, flag=wx.EXPAND | wx.ALL)
# Description
self.description = wx.TextCtrl(self, size=(-1, -1), value="", style=wx.TE_MULTILINE)
self.description.Bind(wx.EVT_KILL_FOCUS, self.updateProject)
self.sizer.Add(self.description, proportion=1, border=6, flag=wx.ALL | wx.EXPAND)
self.description.SetToolTip(_translate(
"Description of the project to be shown on Pavlovia. Note: This is different than a README file!"
))
# Sep
self.sizer.Add(wx.StaticLine(self, -1), border=6, flag=wx.EXPAND | wx.ALL)
# Visibility
self.visSizer = wx.BoxSizer(wx.HORIZONTAL)
self.sizer.Add(self.visSizer, flag=wx.EXPAND)
self.visLbl = wx.StaticText(self, label=_translate("Visibility:"))
self.visSizer.Add(self.visLbl, border=6, flag=wx.ALIGN_CENTER_VERTICAL | wx.ALL)
self.visibility = wx.Choice(self, choices=["Private", "Public"])
self.visibility.Bind(wx.EVT_CHOICE, self.updateProject)
self.visSizer.Add(self.visibility, proportion=1, border=6, flag=wx.EXPAND | wx.ALL)
self.visibility.SetToolTip(_translate(
"Visibility of the current project; whether its visible only to its creator (Private) or to any user "
"(Public)."
))
# Status
self.statusSizer = wx.BoxSizer(wx.HORIZONTAL)
self.sizer.Add(self.statusSizer, flag=wx.EXPAND)
self.statusLbl = wx.StaticText(self, label=_translate("Status:"))
self.statusSizer.Add(self.statusLbl, border=6, flag=wx.ALIGN_CENTER_VERTICAL | wx.ALL)
self.status = wx.Choice(self, choices=["Running", "Piloting", "Inactive"])
self.status.Bind(wx.EVT_CHOICE, self.updateProject)
self.statusSizer.Add(self.status, proportion=1, border=6, flag=wx.EXPAND | wx.ALL)
self.status.SetToolTip(_translate(
"Project status; whether it can be run to collect data (Running), run by its creator without saving "
"data (Piloting) or cannot be run (Inactive)."
))
# Tags
self.tagSizer = wx.BoxSizer(wx.HORIZONTAL)
self.sizer.Add(self.tagSizer, flag=wx.EXPAND)
self.tagLbl = wx.StaticText(self, label=_translate("Keywords:"))
self.tagSizer.Add(self.tagLbl, border=6, flag=wx.ALIGN_CENTER_VERTICAL | wx.ALL)
self.tags = utils.ButtonArray(self, orient=wx.HORIZONTAL, items=[], itemAlias=_translate("tag"))
self.tags.Bind(wx.EVT_LIST_INSERT_ITEM, self.updateProject)
self.tags.Bind(wx.EVT_LIST_DELETE_ITEM, self.updateProject)
self.tagSizer.Add(self.tags, proportion=1, border=6, flag=wx.EXPAND | wx.ALL)
self.tags.SetToolTip(_translate(
"Keywords associated with this project, helping others to find it. For example, if your experiment is "
"useful to psychophysicists, you may want to add the keyword 'psychophysics'."
))
# Populate
if project is not None:
project.refresh()
self.project = project
@property
def project(self):
return self._project
@project.setter
def project(self, project):
self._project = project
# Populate fields
if project is None:
# Icon
self.icon.SetBitmap(wx.Bitmap())
self.icon.SetBackgroundColour("#f2f2f2")
self.icon.Disable()
# Title
self.title.SetValue("")
self.title.Disable()
# Author
self.author.SetLabel("by --- on ---")
self.author.Disable()
# Link
self.link.SetLabel("---/---")
self.link.SetURL("https://pavlovia.org/")
self.link.Disable()
# Star button
self.starBtn.Disable()
self.starBtn.value = False
# Star label
self.starLbl.SetLabel("-")
self.starLbl.Disable()
# Fork button
self.forkBtn.Disable()
# Fork label
self.forkLbl.SetLabel("-")
self.forkLbl.Disable()
# Create button
self.createBtn.Show()
self.createBtn.Enable(bool(self.session.user))
# Sync button
self.syncBtn.Hide()
# Get button
self.downloadBtn.Hide()
# Sync label
self.syncLbl.SetLabel("---")
self.syncLbl.Disable()
# Local root
self.localRootLabel.Disable()
wx.TextCtrl.SetValue(self.localRoot, "") # use base method to avoid callback
self.localRoot.Disable()
# Description
self.description.SetValue("")
self.description.Disable()
# Visibility
self.visibility.SetSelection(wx.NOT_FOUND)
self.visibility.Disable()
# Status
self.status.SetSelection(wx.NOT_FOUND)
self.status.Disable()
# Tags
self.tags.clear()
self.tags.Disable()
else:
# Refresh project to make sure it has info
if not hasattr(project, "_info"):
project.refresh()
# Icon
if 'avatarUrl' in project.info:
try:
content = requests.get(project['avatar_url']).content
icon = wx.Bitmap(wx.Image(io.BytesIO(content)))
except requests.exceptions.MissingSchema:
icon = wx.Bitmap()
else:
icon = wx.Bitmap()
self.icon.SetBitmap(icon)
self.icon.SetBackgroundColour("#f2f2f2")
self.icon.Enable(project.editable)
# Title
self.title.SetValue(project['name'])
self.title.Enable(project.editable)
# Author
self.author.SetLabel(f"by {project['path_with_namespace'].split('/')[0]} on {project['created_at']:%d %B %Y}")
self.author.Enable()
# Link
self.link.SetLabel(project['path_with_namespace'])
self.link.SetURL("https://pavlovia.org/" + project['path_with_namespace'])
self.link.Enable()
# Star button
self.starBtn.value = project.starred
self.starBtn.Enable(bool(project.session.user))
# Star label
self.starLbl.SetLabel(str(project['star_count']))
self.starLbl.Enable()
# Fork button
self.forkBtn.Enable(bool(project.session.user) and not project.owned)
# Fork label
self.forkLbl.SetLabel(str(project['forks_count']))
self.forkLbl.Enable()
# Create button
self.createBtn.Hide()
# Sync button
self.syncBtn.Show(bool(project.localRoot) or (not project.editable))
self.syncBtn.Enable(project.editable)
# Get button
self.downloadBtn.Show(not bool(project.localRoot) and project.editable)
self.downloadBtn.Enable(project.editable)
# Sync label
self.syncLbl.SetLabel(f"{project['last_activity_at']:%d %B %Y, %I:%M%p}")
self.syncLbl.Show(bool(project.localRoot) or (not project.editable))
self.syncLbl.Enable(project.editable)
# Local root
wx.TextCtrl.SetValue(self.localRoot, project.localRoot or "") # use base method to avoid callback
self.localRootLabel.Enable(project.editable)
self.localRoot.Enable(project.editable)
# Description
self.description.SetValue(project['description'])
self.description.Enable(project.editable)
# Visibility
self.visibility.SetStringSelection(project['visibility'])
self.visibility.Enable(project.editable)
# Status
self.status.SetStringSelection(str(project['status2']).title())
self.status.Enable(project.editable)
# Tags
self.tags.items = project['keywords']
self.tags.Enable(project.editable)
# Layout
self.Layout()
@property
def session(self):
# Cache session if not cached
if not hasattr(self, "_session"):
self._session = pavlovia.getCurrentSession()
# Return cached session
return self._session
def create(self, evt=None):
"""
Create a new project
"""
dlg = sync.CreateDlg(self, user=self.session.user)
dlg.ShowModal()
self.project = dlg.project
def sync(self, evt=None):
# If not synced locally, choose a folder
if not self.localRoot.GetValue():
self.localRoot.browse()
# If cancelled, return
if not self.localRoot.GetValue():
return
self.project.localRoot = self.localRoot.GetValue()
# Enable ctrl now that there is a local root
self.localRoot.Enable()
self.localRootLabel.Enable()
# Show sync dlg (does sync)
dlg = sync.SyncDialog(self, self.project)
dlg.sync()
functions.showCommitDialog(self, self.project, initMsg="", infoStream=dlg.status)
# Update project
self.project.refresh()
# Update last sync date & show
self.syncLbl.SetLabel(f"{self.project['last_activity_at']:%d %B %Y, %I:%M%p}")
self.syncLbl.Show()
self.syncLbl.Enable()
# Switch buttons to show Sync rather than Download/Create
self.createBtn.Hide()
self.downloadBtn.Hide()
self.syncBtn.Show()
self.syncBtn.Enable()
def fork(self, evt=None):
# Do fork
try:
proj = self.project.fork()
except gitlab.GitlabCreateError as e:
# If project already exists, ask user if they want to view it rather than create again
dlg = wx.MessageDialog(self, f"{e.error_message}\n\nOpen forked project?", style=wx.YES_NO)
if dlg.ShowModal() == wx.ID_YES:
# If yes, show forked project
projData = requests.get(
f"https://pavlovia.org/api/v2/experiments/{self.project.session.user['username']}/{self.project.info['pathWithNamespace'].split('/')[1]}"
).json()
self.project = PavloviaProject(projData['experiment']['gitlabId'])
return
else:
# If no, return
return
# Switch to new project
self.project = proj
# Sync
dlg = wx.MessageDialog(self, "Fork created! Sync it to a local folder?", style=wx.YES_NO)
if dlg.ShowModal() == wx.ID_YES:
self.sync()
def star(self, evt=None):
# Toggle button
self.starBtn.toggle()
# Star/unstar project
self.updateProject(evt)
# todo: Refresh stars count
def updateProject(self, evt=None):
# Skip if no project
if self.project is None or evt is None:
return
# Get object
obj = evt.GetEventObject()
# Update project attribute according to supplying object
if obj == self.title and self.project.editable:
self.project['name'] = self.title.Value
self.project.save()
if obj == self.icon:
# Create temporary image file
_, temp = tempfile.mkstemp(suffix=".png")
self.icon.BitmapFull.SaveFile(temp, wx.BITMAP_TYPE_PNG)
# Load and upload from temp file
self.project['avatar'] = open(temp, "rb")
self.project.save()
# Delete temp file
#os.remove(temp)
if obj == self.starBtn:
self.project.starred = self.starBtn.value
self.starLbl.SetLabel(str(self.project.info['nbStars']))
if obj == self.localRoot:
if Path(self.localRoot.Value).is_dir():
self.project.localRoot = self.localRoot.Value
else:
dlg = wx.MessageDialog(self,
message=_translate(
"Could not find folder {directory}, please select a different "
"local root.".format(directory=self.localRoot.Value)
),
caption="Directory not found",
style=wx.ICON_ERROR)
self.localRoot.SetValue("")
self.project.localRoot = ""
dlg.ShowModal()
# Set project again to trigger a refresh
self.project = self.project
if obj == self.description and self.project.editable:
self.project['description'] = self.description.Value
self.project.save()
if obj == self.visibility and self.project.editable:
self.project['visibility'] = self.visibility.GetStringSelection().lower()
self.project.save()
if obj == self.status and self.project.editable:
requests.put(f"https://pavlovia.org/api/v2/experiments/{self.project.id}",
data={"status2": self.status.GetStringSelection()},
headers={'OauthToken': self.session.getToken()})
if obj == self.tags and self.project.editable:
requests.put(f"https://pavlovia.org/api/v2/experiments/{self.project.id}",
data={"keywords": self.tags.GetValue()},
headers={'OauthToken': self.session.getToken()})
class ProjectFrame(wx.Dialog):
def __init__(self, app, parent=None, style=None,
pos=wx.DefaultPosition, project=None):
if style is None:
style = (wx.DEFAULT_DIALOG_STYLE | wx.CENTER |
wx.TAB_TRAVERSAL | wx.RESIZE_BORDER)
if project:
title = project['name']
else:
title = _translate("Project info")
self.frameType = 'ProjectInfo'
wx.Dialog.__init__(self, parent, -1, title=title, style=style,
size=(700, 500), pos=pos)
self.app = app
self.project = project
self.parent = parent
self.detailsPanel = DetailsPanel(parent=self, project=self.project)
self.mainSizer = wx.BoxSizer(wx.VERTICAL)
self.mainSizer.Add(self.detailsPanel, proportion=1, border=12, flag=wx.EXPAND | wx.ALL)
self.SetSizerAndFit(self.mainSizer)
if self.parent:
self.CenterOnParent()
self.Layout()
def syncProject(parent, project, file="", closeFrameWhenDone=False):
"""A function to sync the current project (if there is one)
Returns
-----------
1 for success
0 for fail
-1 for cancel at some point in the process
"""
# If not in a project, make one
if project is None:
msgDlg = wx.MessageDialog(parent,
message=_translate("This file doesn't belong to any existing project."),
style=wx.OK | wx.CANCEL | wx.CENTER)
msgDlg.SetOKLabel(_translate("Create a project"))
if msgDlg.ShowModal() == wx.ID_OK:
# Get start path and name from builder/coder if possible
if file:
file = Path(file)
name = file.stem
path = file.parent
else:
name = path = ""
# Open dlg to create new project
createDlg = sync.CreateDlg(parent,
user=pavlovia.getCurrentSession().user,
name=name,
path=path)
if createDlg.ShowModal() == wx.ID_OK and createDlg.project is not None:
project = createDlg.project
else:
return
else:
return
# If no local root, prompt to make one
if not project.localRoot:
defaultRoot = Path(file).parent
# Ask user if they want to
dlg = wx.MessageDialog(parent, message=_translate("Project root folder is not yet specified, specify one now?"), style=wx.YES_NO)
# Open folder picker
if dlg.ShowModal() == wx.ID_YES:
dlg = wx.DirDialog(parent, message=_translate("Specify folder..."), defaultPath=str(defaultRoot))
if dlg.ShowModal() == wx.ID_OK:
localRoot = Path(dlg.GetPath())
project.localRoot = str(localRoot)
else:
# If cancelled, cancel sync
return
else:
# If they don't want to specify, cancel sync
return
# Assign project to parent frame
parent.project = project
# If there is (now) a project, do sync
if project is not None:
dlg = sync.SyncDialog(parent, project)
functions.showCommitDialog(parent, project, initMsg="", infoStream=dlg.status)
dlg.sync()
class ForkDlg(wx.Dialog):
"""Simple dialog to help choose the location/name of a forked project"""
# this dialog is working fine, but the API call to fork to a specific
# namespace doesn't appear to work
def __init__(self, project, *args, **kwargs):
wx.Dialog.__init__(self, *args, **kwargs)
existingName = project.name
session = pavlovia.getCurrentSession()
groups = [session.user['username']]
groups.extend(session.listUserGroups())
msg = wx.StaticText(self, label="Where shall we fork to?")
groupLbl = wx.StaticText(self, label="Group:")
self.groupField = wx.Choice(self, choices=groups)
nameLbl = wx.StaticText(self, label="Project name:")
self.nameField = wx.TextCtrl(self, value=project.name)
fieldsSizer = wx.FlexGridSizer(cols=2, rows=2, vgap=5, hgap=5)
fieldsSizer.AddMany([groupLbl, self.groupField,
nameLbl, self.nameField])
buttonSizer = wx.BoxSizer(wx.HORIZONTAL)
buttonSizer.Add(wx.Button(self, id=wx.ID_OK, label="OK"))
buttonSizer.Add(wx.Button(self, id=wx.ID_CANCEL, label="Cancel"))
mainSizer = wx.BoxSizer(wx.VERTICAL)
mainSizer.Add(msg, 1, wx.ALL, 5)
mainSizer.Add(fieldsSizer, 1, wx.ALL, 5)
mainSizer.Add(buttonSizer, 1, wx.ALL | wx.ALIGN_RIGHT, 5)
self.SetSizerAndFit(mainSizer)
self.Layout()
class ProjectRecreator(wx.Dialog):
"""Use this Dlg to handle the case of a missing (deleted?) remote project
"""
def __init__(self, project, parent, *args, **kwargs):
wx.Dialog.__init__(self, parent, *args, **kwargs)
self.parent = parent
self.project = project
existingName = project.name
msgText = _translate("points to a remote that doesn't exist (deleted?).")
msgText += (" "+_translate("What shall we do?"))
msg = wx.StaticText(self, label="{} {}".format(existingName, msgText))
choices = [_translate("(Re)create a project"),
"{} ({})".format(_translate("Point to an different location"),
_translate("not yet supported")),
_translate("Forget the local git repository (deletes history keeps files)")]
self.radioCtrl = wx.RadioBox(self, label='RadioBox', choices=choices,
majorDimension=1)
self.radioCtrl.EnableItem(1, False)
self.radioCtrl.EnableItem(2, False)
mainSizer = wx.BoxSizer(wx.VERTICAL)
buttonSizer = wx.BoxSizer(wx.HORIZONTAL)
buttonSizer.Add(wx.Button(self, id=wx.ID_OK, label=_translate("OK")),
1, wx.ALL, 5)
buttonSizer.Add(wx.Button(self, id=wx.ID_CANCEL, label=_translate("Cancel")),
1, wx.ALL, 5)
mainSizer.Add(msg, 1, wx.ALL, 5)
mainSizer.Add(self.radioCtrl, 1, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, 5)
mainSizer.Add(buttonSizer, 1, wx.ALL | wx.ALIGN_RIGHT, 1)
self.SetSizer(mainSizer)
self.Layout()
def ShowModal(self):
if wx.Dialog.ShowModal(self) == wx.ID_OK:
choice = self.radioCtrl.GetSelection()
if choice == 0:
editor = ProjectEditor(parent=self.parent,
localRoot=self.project.localRoot)
if editor.ShowModal() == wx.ID_OK:
self.project = editor.project
return 1 # success!
else:
return -1 # user cancelled
elif choice == 1:
raise NotImplementedError("We don't yet support redirecting "
"your project to a new location.")
elif choice == 2:
raise NotImplementedError("Deleting the local git repo is not "
"yet implemented")
else:
return -1<|fim▁end|> | (descrLabel, 0, wx.ALIGN_RIGHT), self.descrBox,
(tagsLabel, 0, wx.ALIGN_RIGHT), self.tagsBox,
(publicLabel, 0, wx.ALIGN_RIGHT), self.publicBox]) |
<|file_name|>HashingOutputStream.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*<|fim▁hole|> * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.internal.hash;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Output stream decorator that hashes data written to the stream.
* Inspired by the Google Guava project.
*/
public final class HashingOutputStream extends FilterOutputStream {
private final Hasher hasher;
public HashingOutputStream(HashFunction hashFunction, OutputStream out) {
super(checkNotNull(out));
this.hasher = checkNotNull(hashFunction.newHasher());
}
@Override
public void write(int b) throws IOException {
hasher.putByte((byte) b);
out.write(b);
}
@Override
public void write(byte[] bytes, int off, int len) throws IOException {
hasher.putBytes(bytes, off, len);
out.write(bytes, off, len);
}
public HashCode hash() {
return hasher.hash();
}
@Override
public void close() throws IOException {
out.close();
}
}<|fim▁end|> | * http://www.apache.org/licenses/LICENSE-2.0
* |
<|file_name|>warrantyscope.js<|end_file_name|><|fim▁begin|>/* Thing > Intangible > Enumeration > WarrantyScope - A range of of services that will be provided to a customer free of charge in case of a defect or malfunction of a product.
Commonly used values:
http://purl.org/goodrelations/v1#Labor-BringIn
http://purl.org/goodrelations/v1#PartsAndLabor-BringIn
http://purl.org/goodrelations/v1#PartsAndLabor-PickUp. Generated automatically by the reactGenerator. */ var WarrantyScope= React.createClass({
getDefaultProps: function(){
return {
}
},
render: function(){
var props = this.props.props;
var potentialAction;
if( props.potentialAction ){
if( props.potentialAction instanceof Array ){
potentialAction = [ (<div key='header' data-advice='HTML for the *head* of the section'></div>) ]
potentialAction = potentialAction.concat( props.potentialAction.map( function(result, index){
return ( <Action {...result} key={index} /> )
}) );
potentialAction.push( ( <div key='footer' data-advice='HTML for the *footer* of the section'></div> ) );
} else {
potentialAction = ( <Action props={ props.potentialAction } /> ); }
}
var description;
if( props.description ){
if( props.description instanceof Array ){
description = [ (<div key='header' data-advice='HTML for the *head* of the section'></div>) ]
description = description.concat( props.description.map( function(result, index){
return ( <div key={index} data-advice='Put your HTML here. description is a Text.'></div> )
}) );
description.push( ( <div key='footer' data-advice='HTML for the *footer* of the section'></div> ) );
} else {
description = ( <div data-advice='Put your HTML here. description is a Text.'></div> );
}
}
var sameAs;
if( props.sameAs ){
if( props.sameAs instanceof Array ){
sameAs = [ (<div key='header' data-advice='HTML for the *head* of the section'></div>) ]
sameAs = sameAs.concat( props.sameAs.map( function(result, index){
return ( <div key={index} data-advice='Put your HTML here. sameAs is a URL.'></div> )
}) );
sameAs.push( ( <div key='footer' data-advice='HTML for the *footer* of the section'></div> ) );
} else {
sameAs = ( <div data-advice='Put your HTML here. sameAs is a URL.'></div> );
}
}
var image;
if( props.image ){
if( props.image instanceof Array ){
image = [ (<div key='header' data-advice='HTML for the *head* of the section'></div>) ]
image = image.concat( props.image.map( function(result, index){
return ( <div key={index} data-advice='Put your HTML here. image is a URL or
ImageObject.'></div> )
}) );<|fim▁hole|> image = ( <div data-advice='Put your HTML here. image is a URL or
ImageObject.'></div> );
}
}
var url;
if( props.url ){
if( props.url instanceof Array ){
url = [ (<div key='header' data-advice='HTML for the *head* of the section'></div>) ]
url = url.concat( props.url.map( function(result, index){
return ( <div key={index} data-advice='Put your HTML here. url is a URL.'></div> )
}) );
url.push( ( <div key='footer' data-advice='HTML for the *footer* of the section'></div> ) );
} else {
url = ( <div data-advice='Put your HTML here. url is a URL.'></div> );
}
}
var supersededBy;
if( props.supersededBy ){
if( props.supersededBy instanceof Array ){
supersededBy = [ (<div key='header' data-advice='HTML for the *head* of the section'></div>) ]
supersededBy = supersededBy.concat( props.supersededBy.map( function(result, index){
return ( <div key={index} data-advice='Put your HTML here. supersededBy is a Class or
Property or
Enumeration.'></div> )
}) );
supersededBy.push( ( <div key='footer' data-advice='HTML for the *footer* of the section'></div> ) );
} else {
supersededBy = ( <div data-advice='Put your HTML here. supersededBy is a Class or
Property or
Enumeration.'></div> );
}
}
var mainEntityOfPage;
if( props.mainEntityOfPage ){
if( props.mainEntityOfPage instanceof Array ){
mainEntityOfPage = [ (<div key='header' data-advice='HTML for the *head* of the section'></div>) ]
mainEntityOfPage = mainEntityOfPage.concat( props.mainEntityOfPage.map( function(result, index){
return ( <div key={index} data-advice='Put your HTML here. mainEntityOfPage is a CreativeWork or
URL.'></div> )
}) );
mainEntityOfPage.push( ( <div key='footer' data-advice='HTML for the *footer* of the section'></div> ) );
} else {
mainEntityOfPage = ( <div data-advice='Put your HTML here. mainEntityOfPage is a CreativeWork or
URL.'></div> );
}
}
var additionalType;
if( props.additionalType ){
if( props.additionalType instanceof Array ){
additionalType = [ (<div key='header' data-advice='HTML for the *head* of the section'></div>) ]
additionalType = additionalType.concat( props.additionalType.map( function(result, index){
return ( <div key={index} data-advice='Put your HTML here. additionalType is a URL.'></div> )
}) );
additionalType.push( ( <div key='footer' data-advice='HTML for the *footer* of the section'></div> ) );
} else {
additionalType = ( <div data-advice='Put your HTML here. additionalType is a URL.'></div> );
}
}
var alternateName;
if( props.alternateName ){
if( props.alternateName instanceof Array ){
alternateName = [ (<div key='header' data-advice='HTML for the *head* of the section'></div>) ]
alternateName = alternateName.concat( props.alternateName.map( function(result, index){
return ( <div key={index} data-advice='Put your HTML here. alternateName is a Text.'></div> )
}) );
alternateName.push( ( <div key='footer' data-advice='HTML for the *footer* of the section'></div> ) );
} else {
alternateName = ( <div data-advice='Put your HTML here. alternateName is a Text.'></div> );
}
}
var name;
if( props.name ){
if( props.name instanceof Array ){
name = [ (<div key='header' data-advice='HTML for the *head* of the section'></div>) ]
name = name.concat( props.name.map( function(result, index){
return ( <div key={index} data-advice='Put your HTML here. name is a Text.'></div> )
}) );
name.push( ( <div key='footer' data-advice='HTML for the *footer* of the section'></div> ) );
} else {
name = ( <div data-advice='Put your HTML here. name is a Text.'></div> );
}
}
return (<div title='WarrantyScope' className='WarrantyScope entity'>
{ potentialAction }
{ description }
{ sameAs }
{ image }
{ url }
{ supersededBy }
{ mainEntityOfPage }
{ additionalType }
{ alternateName }
{ name }
</div>);
}
});<|fim▁end|> | image.push( ( <div key='footer' data-advice='HTML for the *footer* of the section'></div> ) );
} else { |
<|file_name|>problem_029.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#-*-coding:utf-8-*-<|fim▁hole|>
21 22 23 24 25
20 7 8 9 10
19 6 1 2 11
18 5 4 3 12
17 16 15 14 13
It can be verified that the sum of the numbers on the diagonals is 101.
What is the sum of the numbers on the diagonals
in a 1001 by 1001 spiral formed in the same way?
'''
import timeit
def calc(a, b):
return len(set([i**j for i in range(2, a+1) for j in range(2, b+1)]))
if __name__ == '__main__':
print calc(100, 100)
print timeit.Timer('problem_029.calc(100, 100)', 'import problem_029').timeit(10)<|fim▁end|> |
'''
Starting with the number 1 and moving to the right
in a clockwise direction a 5 by 5 spiral is formed as follows: |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main
import (
"os"
"github.com/mkideal/cli"
)<|fim▁hole|> Username string `cli:"u,username" usage:"github account" prompt:"type github account"`
Password string `pw:"p,password" usage:"password of github account" prompt:"type the password"`
}
func main() {
os.Exit(cli.Run(new(argT), func(ctx *cli.Context) error {
argv := ctx.Argv().(*argT)
ctx.String("username=%s, password=%s\n", argv.Username, argv.Password)
return nil
}))
}<|fim▁end|> |
type argT struct {
cli.Helper |
<|file_name|>headerRenderer.js<|end_file_name|><|fim▁begin|>/**
* ag-grid - Advanced Data Grid / Data Table supporting Javascript / React / AngularJS / Web Components
* @version v12.0.1
* @link http://www.ag-grid.com/
* @license MIT
*/
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
Object.defineProperty(exports, "__esModule", { value: true });
var gridOptionsWrapper_1 = require("../gridOptionsWrapper");
var columnController_1 = require("../columnController/columnController");
var gridPanel_1 = require("../gridPanel/gridPanel");
var column_1 = require("../entities/column");
var context_1 = require("../context/context");
var headerContainer_1 = require("./headerContainer");
var eventService_1 = require("../eventService");
var events_1 = require("../events");
var scrollVisibleService_1 = require("../gridPanel/scrollVisibleService");
var HeaderRenderer = (function () {
function HeaderRenderer() {
}
HeaderRenderer.prototype.init = function () {
var _this = this;
this.eHeaderViewport = this.gridPanel.getHeaderViewport();
this.eRoot = this.gridPanel.getRoot();
this.eHeaderOverlay = this.gridPanel.getHeaderOverlay();
this.centerContainer = new headerContainer_1.HeaderContainer(this.gridPanel.getHeaderContainer(), this.gridPanel.getHeaderViewport(), this.eRoot, null);
this.childContainers = [this.centerContainer];
if (!this.gridOptionsWrapper.isForPrint()) {
this.pinnedLeftContainer = new headerContainer_1.HeaderContainer(this.gridPanel.getPinnedLeftHeader(), null, this.eRoot, column_1.Column.PINNED_LEFT);
this.pinnedRightContainer = new headerContainer_1.HeaderContainer(this.gridPanel.getPinnedRightHeader(), null, this.eRoot, column_1.Column.PINNED_RIGHT);
this.childContainers.push(this.pinnedLeftContainer);
this.childContainers.push(this.pinnedRightContainer);
}
this.childContainers.forEach(function (container) { return _this.context.wireBean(container); });
// when grid columns change, it means the number of rows in the header has changed and it's all new columns
this.eventService.addEventListener(events_1.Events.EVENT_GRID_COLUMNS_CHANGED, this.onGridColumnsChanged.bind(this));
// shotgun way to get labels to change, eg from sum(amount) to avg(amount)
this.eventService.addEventListener(events_1.Events.EVENT_COLUMN_VALUE_CHANGED, this.refreshHeader.bind(this));
// for resized, the individual cells take care of this, so don't need to refresh everything
this.eventService.addEventListener(events_1.Events.EVENT_COLUMN_RESIZED, this.setPinnedColContainerWidth.bind(this));
this.eventService.addEventListener(events_1.Events.EVENT_DISPLAYED_COLUMNS_CHANGED, this.setPinnedColContainerWidth.bind(this));
this.eventService.addEventListener(events_1.Events.EVENT_SCROLL_VISIBILITY_CHANGED, this.onScrollVisibilityChanged.bind(this));
if (this.columnController.isReady()) {
this.refreshHeader();
}
};
HeaderRenderer.prototype.onScrollVisibilityChanged = function () {
this.setPinnedColContainerWidth();
};
HeaderRenderer.prototype.forEachHeaderElement = function (callback) {
this.childContainers.forEach(function (childContainer) { return childContainer.forEachHeaderElement(callback); });
};
HeaderRenderer.prototype.destroy = function () {
this.childContainers.forEach(function (container) { return container.destroy(); });
};
HeaderRenderer.prototype.onGridColumnsChanged = function () {
this.setHeight();
};
HeaderRenderer.prototype.refreshHeader = function () {
this.setHeight();
this.childContainers.forEach(function (container) { return container.refresh(); });
this.setPinnedColContainerWidth();
};
HeaderRenderer.prototype.setHeight = function () {
// if forPrint, overlay is missing
if (this.eHeaderOverlay) {
var rowHeight = this.gridOptionsWrapper.getHeaderHeight();
// we can probably get rid of this when we no longer need the overlay
var dept = this.columnController.getHeaderRowCount();
this.eHeaderOverlay.style.height = rowHeight + 'px';
this.eHeaderOverlay.style.top = ((dept - 1) * rowHeight) + 'px';
}
};
HeaderRenderer.prototype.setPinnedColContainerWidth = function () {
// pinned col doesn't exist when doing forPrint
if (this.gridOptionsWrapper.isForPrint()) {
return;
}
var pinnedLeftWidthWithScroll = this.scrollVisibleService.getPinnedLeftWithScrollWidth();
var pinnedRightWidthWithScroll = this.scrollVisibleService.getPinnedRightWithScrollWidth();
this.eHeaderViewport.style.marginLeft = pinnedLeftWidthWithScroll + 'px';
this.eHeaderViewport.style.marginRight = pinnedRightWidthWithScroll + 'px';
};
__decorate([
context_1.Autowired('gridOptionsWrapper'),
__metadata("design:type", gridOptionsWrapper_1.GridOptionsWrapper)
], HeaderRenderer.prototype, "gridOptionsWrapper", void 0);
__decorate([
context_1.Autowired('columnController'),
__metadata("design:type", columnController_1.ColumnController)
], HeaderRenderer.prototype, "columnController", void 0);
__decorate([
context_1.Autowired('gridPanel'),
__metadata("design:type", gridPanel_1.GridPanel)
], HeaderRenderer.prototype, "gridPanel", void 0);<|fim▁hole|> ], HeaderRenderer.prototype, "context", void 0);
__decorate([
context_1.Autowired('eventService'),
__metadata("design:type", eventService_1.EventService)
], HeaderRenderer.prototype, "eventService", void 0);
__decorate([
context_1.Autowired('scrollVisibleService'),
__metadata("design:type", scrollVisibleService_1.ScrollVisibleService)
], HeaderRenderer.prototype, "scrollVisibleService", void 0);
__decorate([
context_1.PostConstruct,
__metadata("design:type", Function),
__metadata("design:paramtypes", []),
__metadata("design:returntype", void 0)
], HeaderRenderer.prototype, "init", null);
__decorate([
context_1.PreDestroy,
__metadata("design:type", Function),
__metadata("design:paramtypes", []),
__metadata("design:returntype", void 0)
], HeaderRenderer.prototype, "destroy", null);
HeaderRenderer = __decorate([
context_1.Bean('headerRenderer')
], HeaderRenderer);
return HeaderRenderer;
}());
exports.HeaderRenderer = HeaderRenderer;<|fim▁end|> | __decorate([
context_1.Autowired('context'),
__metadata("design:type", context_1.Context) |
<|file_name|>runstatus_page.go<|end_file_name|><|fim▁begin|>package egoscale
import (
"context"
"encoding/json"
"fmt"
"log"
"time"
)
// RunstatusPage runstatus page
type RunstatusPage struct {
Created *time.Time `json:"created,omitempty"`
DarkTheme bool `json:"dark_theme,omitempty"`
Domain string `json:"domain,omitempty"`
GradientEnd string `json:"gradient_end,omitempty"`
GradientStart string `json:"gradient_start,omitempty"`
HeaderBackground string `json:"header_background,omitempty"`
ID int `json:"id,omitempty"`
Incidents []RunstatusIncident `json:"incidents,omitempty"`
IncidentsURL string `json:"incidents_url,omitempty"`
Logo string `json:"logo,omitempty"`
Maintenances []RunstatusMaintenance `json:"maintenances,omitempty"`
MaintenancesURL string `json:"maintenances_url,omitempty"`
Name string `json:"name"` //fake field (used to post a new runstatus page)
OkText string `json:"ok_text,omitempty"`
Plan string `json:"plan,omitempty"`
PublicURL string `json:"public_url,omitempty"`
Services []RunstatusService `json:"services,omitempty"`
ServicesURL string `json:"services_url,omitempty"`
State string `json:"state,omitempty"`
Subdomain string `json:"subdomain"`
SupportEmail string `json:"support_email,omitempty"`
TimeZone string `json:"time_zone,omitempty"`
Title string `json:"title,omitempty"`
TitleColor string `json:"title_color,omitempty"`
TwitterUsername string `json:"twitter_username,omitempty"`
URL string `json:"url,omitempty"`
}
// Match returns true if the other page has got similarities with itself
func (page RunstatusPage) Match(other RunstatusPage) bool {
if other.Subdomain != "" && page.Subdomain == other.Subdomain {
return true
}
if other.ID > 0 && page.ID == other.ID {
return true
}
return false
}
// RunstatusPageList runstatus page list
type RunstatusPageList struct {
Next string `json:"next"`
Previous string `json:"previous"`
Pages []RunstatusPage `json:"results"`
}
// CreateRunstatusPage create runstatus page
func (client *Client) CreateRunstatusPage(ctx context.Context, page RunstatusPage) (*RunstatusPage, error) {
resp, err := client.runstatusRequest(ctx, client.Endpoint+runstatusPagesURL, page, "POST")
if err != nil {
return nil, err
}
var p *RunstatusPage
if err := json.Unmarshal(resp, &p); err != nil {
return nil, err
}
return p, nil
}
// DeleteRunstatusPage delete runstatus page
func (client *Client) DeleteRunstatusPage(ctx context.Context, page RunstatusPage) error {
if page.URL == "" {
return fmt.Errorf("empty URL for %#v", page)
}
_, err := client.runstatusRequest(ctx, page.URL, nil, "DELETE")
return err
}
// GetRunstatusPage fetches the runstatus page
func (client *Client) GetRunstatusPage(ctx context.Context, page RunstatusPage) (*RunstatusPage, error) {
if page.URL != "" {
return client.getRunstatusPage(ctx, page.URL)
}
ps, err := client.ListRunstatusPages(ctx)
if err != nil {
return nil, err
}
for i := range ps {
if ps[i].Match(page) {
return client.getRunstatusPage(ctx, ps[i].URL)
}
}
return nil, fmt.Errorf("%#v not found", page)
}
func (client *Client) getRunstatusPage(ctx context.Context, pageURL string) (*RunstatusPage, error) {
resp, err := client.runstatusRequest(ctx, pageURL, nil, "GET")
if err != nil {
return nil, err
}
p := new(RunstatusPage)
if err := json.Unmarshal(resp, p); err != nil {
return nil, err
}
// NOTE: fix the missing IDs
for i := range p.Maintenances {
if err := p.Maintenances[i].FakeID(); err != nil {
log.Printf("bad fake ID for %#v, %s", p.Maintenances[i], err)
}
}
for i := range p.Services {
if err := p.Services[i].FakeID(); err != nil {
log.Printf("bad fake ID for %#v, %s", p.Services[i], err)
}
}
return p, nil
}
// ListRunstatusPages list all the runstatus pages
func (client *Client) ListRunstatusPages(ctx context.Context) ([]RunstatusPage, error) {
resp, err := client.runstatusRequest(ctx, client.Endpoint+runstatusPagesURL, nil, "GET")
if err != nil {
return nil, err
}
var p *RunstatusPageList
if err := json.Unmarshal(resp, &p); err != nil {
return nil, err
}
return p.Pages, nil
}
//PaginateRunstatusPages paginate on runstatus pages
func (client *Client) PaginateRunstatusPages(ctx context.Context, callback func(pages []RunstatusPage, e error) bool) {
pageURL := client.Endpoint + runstatusPagesURL
for pageURL != "" {
resp, err := client.runstatusRequest(ctx, pageURL, nil, "GET")
if err != nil {
callback(nil, err)
return
}
var p *RunstatusPageList
if err := json.Unmarshal(resp, &p); err != nil {
callback(nil, err)
return
}
if ok := callback(p.Pages, nil); ok {
return
}<|fim▁hole|>}<|fim▁end|> |
pageURL = p.Next
} |
<|file_name|>inhibit.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2011 Christoph Reiter <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
if os.name == "nt" or sys.platform == "darwin":
from quodlibet.plugins import PluginNotSupportedError
raise PluginNotSupportedError
import dbus
from quodlibet import _
from quodlibet import app
from quodlibet.qltk import Icons
from quodlibet.plugins.events import EventPlugin
def get_toplevel_xid():
if app.window.get_window():
try:
return app.window.get_window().get_xid()
except AttributeError: # non x11
pass
return 0
class InhibitFlags(object):
LOGOUT = 1
USERSWITCH = 1 << 1
SUSPEND = 1 << 2
IDLE = 1 << 3
class SessionInhibit(EventPlugin):
PLUGIN_ID = "screensaver_inhibit"
PLUGIN_NAME = _("Inhibit Screensaver")
PLUGIN_DESC = _("Prevents the GNOME screensaver from activating while"
" a song is playing.")<|fim▁hole|> DBUS_NAME = "org.gnome.SessionManager"
DBUS_INTERFACE = "org.gnome.SessionManager"
DBUS_PATH = "/org/gnome/SessionManager"
APPLICATION_ID = "quodlibet"
INHIBIT_REASON = _("Music is playing")
__cookie = None
def enabled(self):
if not app.player.paused:
self.plugin_on_unpaused()
def disabled(self):
if not app.player.paused:
self.plugin_on_paused()
def plugin_on_unpaused(self):
xid = dbus.UInt32(get_toplevel_xid())
flags = dbus.UInt32(InhibitFlags.IDLE)
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
self.__cookie = iface.Inhibit(
self.APPLICATION_ID, xid, self.INHIBIT_REASON, flags)
except dbus.DBusException:
pass
def plugin_on_paused(self):
if self.__cookie is None:
return
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.DBUS_NAME, self.DBUS_PATH)
iface = dbus.Interface(obj, self.DBUS_INTERFACE)
iface.Uninhibit(self.__cookie)
self.__cookie = None
except dbus.DBusException:
pass<|fim▁end|> | PLUGIN_ICON = Icons.PREFERENCES_DESKTOP_SCREENSAVER
|
<|file_name|>Component.meta.tsx<|end_file_name|><|fim▁begin|>import { {%ComponentName%}, {%ComponentName%}Props } from '.';<|fim▁hole|>import Registry from '@ui-autotools/registry';
const {%ComponentName%}Metadata = Registry.getComponentMetadata({%ComponentName%});
{%ComponentName%}Metadata.nonReactStrictModeCompliant = true;
{%ComponentName%}Metadata.addSim({
title: 'render',
props: {
buttonText: 'Click me!'
} as {%ComponentName%}Props,
});<|fim▁end|> | |
<|file_name|>noop_method_call.rs<|end_file_name|><|fim▁begin|>use crate::context::LintContext;
use crate::rustc_middle::ty::TypeFoldable;
use crate::LateContext;
use crate::LateLintPass;
use rustc_hir::def::DefKind;
use rustc_hir::{Expr, ExprKind};
use rustc_middle::ty;
use rustc_span::symbol::sym;
declare_lint! {
/// The `noop_method_call` lint detects specific calls to noop methods
/// such as a calling `<&T as Clone>::clone` where `T: !Clone`.
///
/// ### Example
///
/// ```rust
/// # #![allow(unused)]
/// #![warn(noop_method_call)]
/// struct Foo;
/// let foo = &Foo;
/// let clone: &Foo = foo.clone();
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Some method calls are noops meaning that they do nothing. Usually such methods
/// are the result of blanket implementations that happen to create some method invocations
/// that end up not doing anything. For instance, `Clone` is implemented on all `&T`, but
/// calling `clone` on a `&T` where `T` does not implement clone, actually doesn't do anything
/// as references are copy. This lint detects these calls and warns the user about them.
pub NOOP_METHOD_CALL,
Allow,
"detects the use of well-known noop methods"
}
declare_lint_pass!(NoopMethodCall => [NOOP_METHOD_CALL]);
impl<'tcx> LateLintPass<'tcx> for NoopMethodCall {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
// We only care about method calls.
let (call, elements) = match expr.kind {
ExprKind::MethodCall(call, _, elements, _) => (call, elements),
_ => return,
};
// We only care about method calls corresponding to the `Clone`, `Deref` and `Borrow`
// traits and ignore any other method call.
let (trait_id, did) = match cx.typeck_results().type_dependent_def(expr.hir_id) {
// Verify we are dealing with a method/associated function.
Some((DefKind::AssocFn, did)) => match cx.tcx.trait_of_item(did) {
// Check that we're dealing with a trait method for one of the traits we care about.
Some(trait_id)
if matches!(
cx.tcx.get_diagnostic_name(trait_id),
Some(sym::Borrow | sym::Clone | sym::Deref)
) =>
{
(trait_id, did)
}
_ => return,
},
_ => return,
};
let substs = cx.typeck_results().node_substs(expr.hir_id);
if substs.definitely_needs_subst(cx.tcx) {
// We can't resolve on types that require monomorphization, so we don't handle them if
// we need to perfom substitution.
return;
}
let param_env = cx.tcx.param_env(trait_id);
// Resolve the trait method instance.
let i = match ty::Instance::resolve(cx.tcx, param_env, did, substs) {
Ok(Some(i)) => i,
_ => return,
};
// (Re)check that it implements the noop diagnostic.
for s in [sym::noop_method_clone, sym::noop_method_deref, sym::noop_method_borrow].iter() {
if cx.tcx.is_diagnostic_item(*s, i.def_id()) {
let method = &call.ident.name;
let receiver = &elements[0];
let receiver_ty = cx.typeck_results().expr_ty(receiver);
let expr_ty = cx.typeck_results().expr_ty_adjusted(expr);
if receiver_ty != expr_ty {
// This lint will only trigger if the receiver type and resulting expression \
// type are the same, implying that the method call is unnecessary.
return;
}<|fim▁hole|> let note = format!(
"the type `{:?}` which `{}` is being called on is the same as \
the type returned from `{}`, so the method call does not do \
anything and can be removed",
receiver_ty, method, method,
);
let span = expr_span.with_lo(receiver.span.hi());
cx.struct_span_lint(NOOP_METHOD_CALL, span, |lint| {
let method = &call.ident.name;
let message = format!(
"call to `.{}()` on a reference in this situation does nothing",
&method,
);
lint.build(&message)
.span_label(span, "unnecessary method call")
.note(¬e)
.emit()
});
}
}
}
}<|fim▁end|> | let expr_span = expr.span; |
<|file_name|>testAll.js<|end_file_name|><|fim▁begin|>const SafeHarborModules = require('./SafeHarborModules');
const SafeHarborInverters = require('./SafeHarborInverters');
module.exports = () => {
SafeHarborModules();<|fim▁hole|><|fim▁end|> | SafeHarborInverters();
} |
<|file_name|>event.py<|end_file_name|><|fim▁begin|>"""!event [num]: Displays the next upcoming H@B event."""
<|fim▁hole|><|fim▁end|> | __match__ = r"!event( .*)" |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>declare namespace binVersion {
interface Options {
/**
The arguments to pass to `binary` so that it will print its version.
@default ['--version']
*/
args?: string[];
}<|fim▁hole|>
/**
Get the version of a binary in [semver](https://github.com/npm/node-semver) format.
@param binary - The name of or path to the binary to get the version from.
@returns The version of the `binary`.
@example
```
import binVersion = require('bin-version');
(async () => {
// $ curl --version
// curl 7.30.0 (x86_64-apple-darwin13.0)
console.log(await binVersion('curl'));
//=> '7.30.0'
// $ openssl version
// OpenSSL 1.0.2d 9 Jul 2015
console.log(await binVersion('openssl', {args: ['version']}));
//=> '1.0.2'
})();
```
*/
declare function binVersion(
binary: string,
options?: binVersion.Options
): Promise<string>;
export = binVersion;<|fim▁end|> | } |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* External dependencies
*/
import styled from 'styled-components';
import PropTypes from 'prop-types';
import { Popup } from '@googleforcreators/design-system';
import { __ } from '@googleforcreators/i18n';
/**
* Internal dependencies
*/
import { useCanvas, useConfig } from '../../../app';
import useElementsWithLinks from '../../../utils/useElementsWithLinks';
import { OUTLINK_THEME } from '../../../constants';
import DefaultIcon from './icons/defaultIcon.svg';
import ArrowIcon from './icons/arrowBar.svg';
const Wrapper = styled.div`
position: absolute;
display: flex;
align-items: center;
justify-content: flex-end;
flex-direction: column;
bottom: 0;
height: 20%;
width: 100%;
color: ${({ theme }) => theme.colors.standard.white};
z-index: 3;
`;
const Guideline = styled.div`
mix-blend-mode: difference;
position: absolute;
height: 1px;
bottom: 20%;
width: 100%;
background-image: ${({ theme }) =>
`linear-gradient(to right, ${theme.colors.standard.black} 50%, ${theme.colors.standard.white} 0%)`};
background-position: top;
background-size: 16px 0.5px;
background-repeat: repeat-x;
z-index: 3;
`;
// The CSS here is based on how it's displayed in the front-end, including static
// font-size, line-height, etc. independent of the viewport size -- it's not responsive.
const ArrowBar = styled(ArrowIcon)`
display: block;
cursor: pointer;
margin-bottom: 10px;
filter: drop-shadow(0px 2px 6px rgba(0, 0, 0, 0.3));
width: 20px;
height: 8px;
`;
const OutlinkChip = styled.div`
height: 36px;
display: flex;
position: relative;
padding: 10px 6px;
margin: 0 0 20px;
max-width: calc(100% - 64px);
border-radius: 30px;
place-items: center;
box-shadow: 0px 4px 10px rgba(0, 0, 0, 0.15);
background: ${({ bgColor }) => bgColor};
`;
const TextWrapper = styled.span`
font-family: Roboto, sans-serif;
font-size: 16px;
line-height: 18px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
position: relative;
padding-inline-start: 6px;
padding-inline-end: 8px;
height: 16px;
letter-spacing: 0.3px;
font-weight: 700;
max-width: 210px;
color: ${({ fgColor }) => fgColor};
`;
const Tooltip = styled.div`
background-color: ${({ theme }) => theme.colors.standard.black};
color: ${({ theme }) => theme.colors.standard.white};
width: 200px;
padding: 8px;
font-size: 14px;
border-radius: 4px;
text-align: center;
`;
const LinkImage = styled.div`
height: 24px;
width: 24px;
vertical-align: middle;
background-size: cover;
background-repeat: no-repeat;
background-position: 50%;
border-radius: 50%;
background-image: url('${({ icon }) => icon}') !important;
`;
const spacing = { x: 8 };
<|fim▁hole|>const LIGHT_COLOR = '#FFFFFF';
const DARK_COLOR = '#000000';
function PageAttachment({ pageAttachment = {} }) {
const {
displayLinkGuidelines,
pageAttachmentContainer,
setPageAttachmentContainer,
} = useCanvas((state) => ({
displayLinkGuidelines: state.state.displayLinkGuidelines,
pageAttachmentContainer: state.state.pageAttachmentContainer,
setPageAttachmentContainer: state.actions.setPageAttachmentContainer,
}));
const { hasInvalidLinkSelected } = useElementsWithLinks();
const {
ctaText = __('Learn more', 'web-stories'),
url,
icon,
theme,
} = pageAttachment;
const { isRTL, styleConstants: { topOffset } = {} } = useConfig();
const bgColor = theme === OUTLINK_THEME.DARK ? DARK_COLOR : LIGHT_COLOR;
const fgColor = theme === OUTLINK_THEME.DARK ? LIGHT_COLOR : DARK_COLOR;
return (
<>
{(displayLinkGuidelines || hasInvalidLinkSelected) && <Guideline />}
<Wrapper role="presentation" ref={setPageAttachmentContainer}>
{url?.length > 0 && (
<>
<ArrowBar fill={bgColor} />
<OutlinkChip bgColor={bgColor}>
{icon ? (
<LinkImage icon={icon} />
) : (
<DefaultIcon fill={fgColor} width={24} height={24} />
)}
<TextWrapper fgColor={fgColor}>{ctaText}</TextWrapper>
</OutlinkChip>
{pageAttachmentContainer && hasInvalidLinkSelected && (
<Popup
isRTL={isRTL}
anchor={{ current: pageAttachmentContainer }}
isOpen
placement={'left'}
spacing={spacing}
topOffset={topOffset}
>
<Tooltip>
{__(
'Links can not reside below the dashed line when a page attachment is present. Your viewers will not be able to click on the link.',
'web-stories'
)}
</Tooltip>
</Popup>
)}
</>
)}
</Wrapper>
</>
);
}
PageAttachment.propTypes = {
pageAttachment: PropTypes.shape({
url: PropTypes.string,
ctaText: PropTypes.string,
}),
};
export default PageAttachment;<|fim▁end|> | |
<|file_name|>0076_articlepage_video_document.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
<|fim▁hole|> ('articles', '0075_auto_20151015_2022'),
]
operations = [
migrations.AddField(
model_name='articlepage',
name='video_document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', null=True),
),
]<|fim▁end|> | dependencies = [
('wagtaildocs', '0003_add_verbose_names'), |
<|file_name|>LinePathAnnotation.tsx<|end_file_name|><|fim▁begin|>import React from 'react';
import cx from 'classnames';
import { Point } from '@visx/point';
import { Group } from '@visx/group';
import { LinePath } from '@visx/shape';
interface SimplePoint {
x: number;
y: number;
}
export type LinePathAnnotationProps = {
/**
* A top pixel offset applied to the entire bar group.
*/
top?: number;
/**
* A left pixel offset applied to the entire bar group.
*/
left?: number;
/**
* An array of points describing the line path.
*/
points?: (Point | SimplePoint)[];
/**
* The color of the line.
*/
stroke?: string;
/**
* The pixel width of the line.
*/
strokeWidth?: number;
/**
* Add a class name to the line path.
*/
className?: string;
/**
* The text for your label.
*/
label?: string;
/**
* The label's textAnchor.
*/
labelAnchor?: 'start' | 'middle' | 'end';
/**
* The x-coordinate shift to the label.
*/
labelDx?: number;
/**
* The y-coordinate shift to the label
*/
labelDy?: number;
/**
* The color of label. Defaults to *props*.**stroke**.
*/
labelFill?: string;
/**
* The font size of the label text.
*/
labelFontSize?: number;
/**
* The color of the label.
*/
labelStroke?: string;
/**
* The stroke width of the label text.
*/
labelStrokeWidth?: number;
/**
* The label's SVG [paint-order](https://developer.mozilla.org/en-US/docs/Web/SVG/Attribute/paint-order).
*/
labelPaintOrder?: string;
};
export default function LinePathAnnotation({
top = 0,
left = 0,
points = [],
stroke = 'black',
strokeWidth = 1,
className,
label,
labelAnchor = 'middle',
labelDx = 0,
labelDy = 0,
labelFill,
labelFontSize = 10,
labelStroke = 'white',
labelStrokeWidth = 3,
labelPaintOrder = 'stroke',
}: LinePathAnnotationProps) {
const endPoint = points[points.length - 1];<|fim▁hole|> data={points}
x={p => p.x}
y={p => p.y}
stroke={stroke}
strokeWidth={strokeWidth}
/>
{label && endPoint && (
<text
x={endPoint.x}
y={endPoint.y}
dx={labelDx}
dy={labelDy}
fontSize={labelFontSize}
fill={labelFill || stroke}
stroke={labelStroke}
strokeWidth={labelStrokeWidth}
textAnchor={labelAnchor}
paintOrder={labelPaintOrder}
>
{label}
</text>
)}
</Group>
);
}<|fim▁end|> | return (
<Group className="visx-line-path-annotation-group" top={top} left={left}>
<LinePath<Point | SimplePoint>
className={cx('visx-line-path-annotation', className)} |
<|file_name|>helpers.py<|end_file_name|><|fim▁begin|>import datetime
import commonware.log
import django_tables as tables
import jinja2
from django.conf import settings
from django.template import Context, loader
from django.utils.datastructures import SortedDict
from jingo import register
from tower import ugettext as _, ugettext_lazy as _lazy, ungettext as ngettext
import amo
from access import acl
from addons.helpers import new_context
from addons.models import Addon
from amo.helpers import absolutify, breadcrumbs, page_title
from amo.urlresolvers import reverse
from amo.utils import send_mail as amo_send_mail
from editors.models import (ReviewerScore, ViewFastTrackQueue,
ViewFullReviewQueue, ViewPendingQueue,
ViewPreliminaryQueue)
from editors.sql_table import SQLTable
@register.function
def file_compare(file_obj, version):
# Compare this file to the one in the version with same platform
file_obj = version.files.filter(platform=file_obj.platform)
# If not there, just compare to all.
if not file_obj:
file_obj = version.files.filter(platform=amo.PLATFORM_ALL.id)
# At this point we've got no idea what Platform file to
# compare with, so just chose the first.
if not file_obj:
file_obj = version.files.all()
return file_obj[0]
@register.function
def file_review_status(addon, file):
if file.status not in [amo.STATUS_DISABLED, amo.STATUS_PUBLIC]:
if addon.status in [amo.STATUS_UNREVIEWED, amo.STATUS_LITE]:
return _(u'Pending Preliminary Review')
elif addon.status in [amo.STATUS_NOMINATED,
amo.STATUS_LITE_AND_NOMINATED,
amo.STATUS_PUBLIC]:
return _(u'Pending Full Review')
if file.status in [amo.STATUS_DISABLED, amo.STATUS_REJECTED]:
if file.reviewed is not None:
return _(u'Rejected')
# Can't assume that if the reviewed date is missing its
# unreviewed. Especially for versions.
else:
return _(u'Rejected or Unreviewed')
return amo.STATUS_CHOICES[file.status]
@register.function
def version_status(addon, version):
return ','.join(unicode(s) for s in version.status)
@register.function
@jinja2.contextfunction
def editor_page_title(context, title=None, addon=None):
"""Wrapper for editor page titles. Eerily similar to dev_page_title."""
if addon:
title = u'%s :: %s' % (title, addon.name)
else:
section = _lazy('Editor Tools')
title = u'%s :: %s' % (title, section) if title else section
return page_title(context, title)
@register.function
@jinja2.contextfunction
def editors_breadcrumbs(context, queue=None, addon_queue=None, items=None,
themes=False):
"""
Wrapper function for ``breadcrumbs``. Prepends 'Editor Tools'
breadcrumbs.
**items**
list of [(url, label)] to be inserted after Add-on.
**addon_queue**
Addon object. This sets the queue by addon type or addon status.
**queue**
Explicit queue type to set.
"""
crumbs = [(reverse('editors.home'), _('Editor Tools'))]
if themes:
crumbs.append((reverse('editors.themes.home'), _('Themes')))
if addon_queue:
queue_id = addon_queue.status
queue_ids = {amo.STATUS_UNREVIEWED: 'prelim',
amo.STATUS_NOMINATED: 'nominated',
amo.STATUS_PUBLIC: 'pending',
amo.STATUS_LITE: 'prelim',
amo.STATUS_LITE_AND_NOMINATED: 'nominated',
amo.STATUS_PENDING: 'pending'}
queue = queue_ids.get(queue_id, 'queue')<|fim▁hole|> if queue:
queues = {
'queue': _('Queue'),
'pending': _('Pending Updates'),
'nominated': _('Full Reviews'),
'prelim': _('Preliminary Reviews'),
'moderated': _('Moderated Reviews'),
'fast_track': _('Fast Track'),
'pending_themes': _('Pending Themes'),
'flagged_themes': _('Flagged Themes'),
'rereview_themes': _('Update Themes'),
}
if items and not queue == 'queue':
url = reverse('editors.queue_%s' % queue)
else:
# The Addon is the end of the trail.
url = None
crumbs.append((url, queues[queue]))
if items:
crumbs.extend(items)
return breadcrumbs(context, crumbs, add_default=False)
@register.function
@jinja2.contextfunction
def queue_tabnav(context):
"""Returns tuple of tab navigation for the queue pages.
Each tuple contains three elements: (tab_code, page_url, tab_text)
"""
from .views import queue_counts
counts = queue_counts()
tabnav = [('fast_track', 'queue_fast_track',
(ngettext('Fast Track ({0})', 'Fast Track ({0})',
counts['fast_track'])
.format(counts['fast_track']))),
('nominated', 'queue_nominated',
(ngettext('Full Review ({0})', 'Full Reviews ({0})',
counts['nominated'])
.format(counts['nominated']))),
('pending', 'queue_pending',
(ngettext('Pending Update ({0})', 'Pending Updates ({0})',
counts['pending'])
.format(counts['pending']))),
('prelim', 'queue_prelim',
(ngettext('Preliminary Review ({0})',
'Preliminary Reviews ({0})',
counts['prelim'])
.format(counts['prelim']))),
('moderated', 'queue_moderated',
(ngettext('Moderated Review ({0})', 'Moderated Reviews ({0})',
counts['moderated'])
.format(counts['moderated'])))]
return tabnav
@register.inclusion_tag('editors/includes/reviewers_score_bar.html')
@jinja2.contextfunction
def reviewers_score_bar(context, types=None, addon_type=None):
user = context.get('amo_user')
return new_context(dict(
request=context.get('request'),
amo=amo, settings=settings,
points=ReviewerScore.get_recent(user, addon_type=addon_type),
total=ReviewerScore.get_total(user),
**ReviewerScore.get_leaderboards(user, types=types,
addon_type=addon_type)))
class ItemStateTable(object):
def increment_item(self):
self.item_number += 1
def set_page(self, page):
self.item_number = page.start_index()
class EditorQueueTable(SQLTable, ItemStateTable):
addon_name = tables.Column(verbose_name=_lazy(u'Addon'))
addon_type_id = tables.Column(verbose_name=_lazy(u'Type'))
waiting_time_min = tables.Column(verbose_name=_lazy(u'Waiting Time'))
flags = tables.Column(verbose_name=_lazy(u'Flags'), sortable=False)
applications = tables.Column(verbose_name=_lazy(u'Applications'),
sortable=False)
platforms = tables.Column(verbose_name=_lazy(u'Platforms'),
sortable=False)
additional_info = tables.Column(
verbose_name=_lazy(u'Additional'), sortable=False)
def render_addon_name(self, row):
url = '%s?num=%s' % (reverse('editors.review',
args=[row.addon_slug]),
self.item_number)
self.increment_item()
return u'<a href="%s">%s <em>%s</em></a>' % (
url, jinja2.escape(row.addon_name),
jinja2.escape(row.latest_version))
def render_addon_type_id(self, row):
return amo.ADDON_TYPE[row.addon_type_id]
def render_additional_info(self, row):
info = []
if row.is_site_specific:
info.append(_lazy(u'Site Specific'))
if row.external_software:
info.append(_lazy(u'Requires External Software'))
if row.binary or row.binary_components:
info.append(_lazy(u'Binary Components'))
return u', '.join([jinja2.escape(i) for i in info])
def render_applications(self, row):
# TODO(Kumar) show supported version ranges on hover (if still needed)
icon = u'<div class="app-icon ed-sprite-%s" title="%s"></div>'
return u''.join([icon % (amo.APPS_ALL[i].short, amo.APPS_ALL[i].pretty)
for i in row.application_ids])
def render_platforms(self, row):
icons = []
html = u'<div class="platform-icon plat-sprite-%s" title="%s"></div>'
for platform in row.file_platform_ids:
icons.append(html % (amo.PLATFORMS[int(platform)].shortname,
amo.PLATFORMS[int(platform)].name))
return u''.join(icons)
def render_flags(self, row):
return ''.join(u'<div class="app-icon ed-sprite-%s" '
u'title="%s"></div>' % flag
for flag in row.flags)
def render_waiting_time_min(self, row):
if row.waiting_time_min == 0:
r = _lazy('moments ago')
elif row.waiting_time_hours == 0:
# L10n: first argument is number of minutes
r = ngettext(u'{0} minute', u'{0} minutes',
row.waiting_time_min).format(row.waiting_time_min)
elif row.waiting_time_days == 0:
# L10n: first argument is number of hours
r = ngettext(u'{0} hour', u'{0} hours',
row.waiting_time_hours).format(row.waiting_time_hours)
else:
# L10n: first argument is number of days
r = ngettext(u'{0} day', u'{0} days',
row.waiting_time_days).format(row.waiting_time_days)
return jinja2.escape(r)
@classmethod
def translate_sort_cols(cls, colname):
legacy_sorts = {
'name': 'addon_name',
'age': 'waiting_time_min',
'type': 'addon_type_id',
}
return legacy_sorts.get(colname, colname)
@classmethod
def default_order_by(cls):
return '-waiting_time_min'
@classmethod
def review_url(cls, row):
return reverse('editors.review', args=[row.addon_slug])
class Meta:
sortable = True
columns = ['addon_name', 'addon_type_id', 'waiting_time_min',
'flags', 'applications', 'additional_info']
class ViewPendingQueueTable(EditorQueueTable):
class Meta(EditorQueueTable.Meta):
model = ViewPendingQueue
class ViewFullReviewQueueTable(EditorQueueTable):
class Meta(EditorQueueTable.Meta):
model = ViewFullReviewQueue
class ViewPreliminaryQueueTable(EditorQueueTable):
class Meta(EditorQueueTable.Meta):
model = ViewPreliminaryQueue
class ViewFastTrackQueueTable(EditorQueueTable):
class Meta(EditorQueueTable.Meta):
model = ViewFastTrackQueue
log = commonware.log.getLogger('z.mailer')
NOMINATED_STATUSES = (amo.STATUS_NOMINATED, amo.STATUS_LITE_AND_NOMINATED)
PRELIMINARY_STATUSES = (amo.STATUS_UNREVIEWED, amo.STATUS_LITE)
PENDING_STATUSES = (amo.STATUS_BETA, amo.STATUS_DISABLED, amo.STATUS_NULL,
amo.STATUS_PENDING, amo.STATUS_PUBLIC)
def send_mail(template, subject, emails, context, perm_setting=None):
template = loader.get_template(template)
amo_send_mail(subject, template.render(Context(context, autoescape=False)),
recipient_list=emails, from_email=settings.EDITORS_EMAIL,
use_blacklist=False, perm_setting=perm_setting)
@register.function
def get_position(addon):
if addon.is_persona() and addon.is_pending():
qs = (Addon.objects.filter(status=amo.STATUS_PENDING,
type=amo.ADDON_PERSONA)
.no_transforms().order_by('created')
.values_list('id', flat=True))
id_ = addon.id
position = 0
for idx, addon_id in enumerate(qs, start=1):
if addon_id == id_:
position = idx
break
total = qs.count()
return {'pos': position, 'total': total}
else:
version = addon.latest_version
if not version:
return False
q = version.current_queue
if not q:
return False
mins_query = q.objects.filter(id=addon.id)
if mins_query.count() > 0:
mins = mins_query[0].waiting_time_min
pos = q.objects.having('waiting_time_min >=', mins).count()
total = q.objects.count()
return dict(mins=mins, pos=pos, total=total)
return False
class ReviewHelper:
"""
A class that builds enough to render the form back to the user and
process off to the correct handler.
"""
def __init__(self, request=None, addon=None, version=None):
self.handler = None
self.required = {}
self.addon = addon
self.all_files = version.files.all() if version else []
self.get_review_type(request, addon, version)
self.actions = self.get_actions()
def set_data(self, data):
self.handler.set_data(data)
def get_review_type(self, request, addon, version):
if self.addon.status in NOMINATED_STATUSES:
self.review_type = 'nominated'
self.handler = ReviewAddon(request, addon, version, 'nominated')
elif self.addon.status == amo.STATUS_UNREVIEWED:
self.review_type = 'preliminary'
self.handler = ReviewAddon(request, addon, version, 'preliminary')
elif self.addon.status == amo.STATUS_LITE:
self.review_type = 'preliminary'
self.handler = ReviewFiles(request, addon, version, 'preliminary')
else:
self.review_type = 'pending'
self.handler = ReviewFiles(request, addon, version, 'pending')
def get_actions(self):
labels, details = self._review_actions()
actions = SortedDict()
if self.review_type != 'preliminary':
actions['public'] = {'method': self.handler.process_public,
'minimal': False,
'label': _lazy('Push to public')}
actions['prelim'] = {'method': self.handler.process_preliminary,
'label': labels['prelim'],
'minimal': False}
actions['reject'] = {'method': self.handler.process_sandbox,
'label': _lazy('Reject'),
'minimal': False}
actions['info'] = {'method': self.handler.request_information,
'label': _lazy('Request more information'),
'minimal': True}
actions['super'] = {'method': self.handler.process_super_review,
'label': _lazy('Request super-review'),
'minimal': True}
actions['comment'] = {'method': self.handler.process_comment,
'label': _lazy('Comment'),
'minimal': True}
for k, v in actions.items():
v['details'] = details.get(k)
return actions
def _review_actions(self):
labels = {'prelim': _lazy('Grant preliminary review')}
details = {'prelim': _lazy('This will mark the files as '
'preliminarily reviewed.'),
'info': _lazy('Use this form to request more information '
'from the author. They will receive an email '
'and be able to answer here. You will be '
'notified by email when they reply.'),
'super': _lazy('If you have concerns about this add-on\'s '
'security, copyright issues, or other '
'concerns that an administrator should look '
'into, enter your comments in the area '
'below. They will be sent to '
'administrators, not the author.'),
'reject': _lazy('This will reject the add-on and remove '
'it from the review queue.'),
'comment': _lazy('Make a comment on this version. The '
'author won\'t be able to see this.')}
if self.addon.status == amo.STATUS_LITE:
details['reject'] = _lazy('This will reject the files and remove '
'them from the review queue.')
if self.addon.status in (amo.STATUS_UNREVIEWED, amo.STATUS_NOMINATED):
details['prelim'] = _lazy('This will mark the add-on as '
'preliminarily reviewed. Future '
'versions will undergo '
'preliminary review.')
elif self.addon.status == amo.STATUS_LITE:
details['prelim'] = _lazy('This will mark the files as '
'preliminarily reviewed. Future '
'versions will undergo '
'preliminary review.')
elif self.addon.status == amo.STATUS_LITE_AND_NOMINATED:
labels['prelim'] = _lazy('Retain preliminary review')
details['prelim'] = _lazy('This will retain the add-on as '
'preliminarily reviewed. Future '
'versions will undergo preliminary '
'review.')
if self.review_type == 'pending':
details['public'] = _lazy('This will approve a sandboxed version '
'of a public add-on to appear on the '
'public side.')
details['reject'] = _lazy('This will reject a version of a public '
'add-on and remove it from the queue.')
else:
details['public'] = _lazy('This will mark the add-on and its most '
'recent version and files as public. '
'Future versions will go into the '
'sandbox until they are reviewed by an '
'editor.')
return labels, details
def process(self):
action = self.handler.data.get('action', '')
if not action:
raise NotImplementedError
return self.actions[action]['method']()
class ReviewBase(object):
def __init__(self, request, addon, version, review_type):
self.request = request
self.user = self.request.user
self.addon = addon
self.version = version
self.review_type = review_type
self.files = None
def set_addon(self, **kw):
"""Alters addon and sets reviewed timestamp on version."""
self.addon.update(**kw)
self.version.update(reviewed=datetime.datetime.now())
def set_files(self, status, files, copy_to_mirror=False,
hide_disabled_file=False):
"""Change the files to be the new status
and copy, remove from the mirror as appropriate."""
for file in files:
file.datestatuschanged = datetime.datetime.now()
file.reviewed = datetime.datetime.now()
if copy_to_mirror:
file.copy_to_mirror()
if hide_disabled_file:
file.hide_disabled_file()
file.status = status
file.save()
def log_action(self, action):
details = {'comments': self.data['comments'],
'reviewtype': self.review_type}
if self.files:
details['files'] = [f.id for f in self.files]
if self.version:
details['version'] = self.version.version
amo.log(action, self.addon, self.version, user=self.user,
created=datetime.datetime.now(), details=details)
def notify_email(self, template, subject):
"""Notify the authors that their addon has been reviewed."""
emails = [a.email for a in self.addon.authors.all()]
data = self.data.copy()
data.update(self.get_context_data())
data['tested'] = ''
os, app = data.get('operating_systems'), data.get('applications')
if os and app:
data['tested'] = 'Tested on %s with %s' % (os, app)
elif os and not app:
data['tested'] = 'Tested on %s' % os
elif not os and app:
data['tested'] = 'Tested with %s' % app
data['addon_type'] = (_lazy('add-on'))
send_mail('editors/emails/%s.ltxt' % template,
subject % (self.addon.name, self.version.version),
emails, Context(data), perm_setting='editor_reviewed')
def get_context_data(self):
return {'name': self.addon.name,
'number': self.version.version,
'reviewer': (self.request.user.display_name),
'addon_url': absolutify(
self.addon.get_url_path(add_prefix=False)),
'review_url': absolutify(reverse('editors.review',
args=[self.addon.pk],
add_prefix=False)),
'comments': self.data['comments'],
'SITE_URL': settings.SITE_URL}
def request_information(self):
"""Send a request for information to the authors."""
emails = [a.email for a in self.addon.authors.all()]
self.log_action(amo.LOG.REQUEST_INFORMATION)
self.version.update(has_info_request=True)
log.info(u'Sending request for information for %s to %s' %
(self.addon, emails))
send_mail('editors/emails/info.ltxt',
u'Mozilla Add-ons: %s %s' %
(self.addon.name, self.version.version),
emails, Context(self.get_context_data()),
perm_setting='individual_contact')
def send_super_mail(self):
self.log_action(amo.LOG.REQUEST_SUPER_REVIEW)
log.info(u'Super review requested for %s' % (self.addon))
send_mail('editors/emails/super_review.ltxt',
u'Super review requested: %s' % (self.addon.name),
[settings.SENIOR_EDITORS_EMAIL],
Context(self.get_context_data()))
def process_comment(self):
self.version.update(has_editor_comment=True)
self.log_action(amo.LOG.COMMENT_VERSION)
class ReviewAddon(ReviewBase):
def __init__(self, *args, **kwargs):
super(ReviewAddon, self).__init__(*args, **kwargs)
self.is_upgrade = (self.addon.status == amo.STATUS_LITE_AND_NOMINATED
and self.review_type == 'nominated')
def set_data(self, data):
self.data = data
self.files = self.version.files.all()
def process_public(self):
"""Set an addon to public."""
if self.review_type == 'preliminary':
raise AssertionError('Preliminary addons cannot be made public.')
# Hold onto the status before we change it.
status = self.addon.status
# Save files first, because set_addon checks to make sure there
# is at least one public file or it won't make the addon public.
self.set_files(amo.STATUS_PUBLIC, self.version.files.all(),
copy_to_mirror=True)
self.set_addon(highest_status=amo.STATUS_PUBLIC,
status=amo.STATUS_PUBLIC)
self.log_action(amo.LOG.APPROVE_VERSION)
self.notify_email('%s_to_public' % self.review_type,
u'Mozilla Add-ons: %s %s Fully Reviewed')
log.info(u'Making %s public' % (self.addon))
log.info(u'Sending email for %s' % (self.addon))
# Assign reviewer incentive scores.
ReviewerScore.award_points(self.request.amo_user, self.addon, status)
def process_sandbox(self):
"""Set an addon back to sandbox."""
# Hold onto the status before we change it.
status = self.addon.status
if (not self.is_upgrade or
not self.addon.versions.exclude(id=self.version.id)
.filter(files__status__in=amo.REVIEWED_STATUSES)):
self.set_addon(status=amo.STATUS_NULL)
else:
self.set_addon(status=amo.STATUS_LITE)
self.set_files(amo.STATUS_DISABLED, self.version.files.all(),
hide_disabled_file=True)
self.log_action(amo.LOG.REJECT_VERSION)
self.notify_email('%s_to_sandbox' % self.review_type,
u'Mozilla Add-ons: %s %s Rejected')
log.info(u'Making %s disabled' % (self.addon))
log.info(u'Sending email for %s' % (self.addon))
# Assign reviewer incentive scores.
ReviewerScore.award_points(self.request.amo_user, self.addon, status)
def process_preliminary(self):
"""Set an addon to preliminary."""
# Hold onto the status before we change it.
status = self.addon.status
changes = {'status': amo.STATUS_LITE}
if (self.addon.status in (amo.STATUS_PUBLIC,
amo.STATUS_LITE_AND_NOMINATED)):
changes['highest_status'] = amo.STATUS_LITE
template = '%s_to_preliminary' % self.review_type
if (self.review_type == 'preliminary' and
self.addon.status == amo.STATUS_LITE_AND_NOMINATED):
template = 'nominated_to_nominated'
self.set_addon(**changes)
self.set_files(amo.STATUS_LITE, self.version.files.all(),
copy_to_mirror=True)
self.log_action(amo.LOG.PRELIMINARY_VERSION)
self.notify_email(template,
u'Mozilla Add-ons: %s %s Preliminary Reviewed')
log.info(u'Making %s preliminary' % (self.addon))
log.info(u'Sending email for %s' % (self.addon))
# Assign reviewer incentive scores.
ReviewerScore.award_points(self.request.amo_user, self.addon, status)
def process_super_review(self):
"""Give an addon super review."""
self.addon.update(admin_review=True)
self.notify_email('author_super_review',
u'Mozilla Add-ons: %s %s flagged for Admin Review')
self.send_super_mail()
class ReviewFiles(ReviewBase):
def set_data(self, data):
self.data = data
self.files = data.get('addon_files', None)
def process_public(self):
"""Set an addons files to public."""
if self.review_type == 'preliminary':
raise AssertionError('Preliminary addons cannot be made public.')
# Hold onto the status before we change it.
status = self.addon.status
self.set_files(amo.STATUS_PUBLIC, self.data['addon_files'],
copy_to_mirror=True)
self.log_action(amo.LOG.APPROVE_VERSION)
self.notify_email('%s_to_public' % self.review_type,
u'Mozilla Add-ons: %s %s Fully Reviewed')
log.info(u'Making %s files %s public' %
(self.addon,
', '.join([f.filename for f in self.data['addon_files']])))
log.info(u'Sending email for %s' % (self.addon))
# Assign reviewer incentive scores.
ReviewerScore.award_points(self.request.amo_user, self.addon, status)
def process_sandbox(self):
"""Set an addons files to sandbox."""
# Hold onto the status before we change it.
status = self.addon.status
self.set_files(amo.STATUS_DISABLED, self.data['addon_files'],
hide_disabled_file=True)
self.log_action(amo.LOG.REJECT_VERSION)
self.notify_email('%s_to_sandbox' % self.review_type,
u'Mozilla Add-ons: %s %s Rejected')
log.info(u'Making %s files %s disabled' %
(self.addon,
', '.join([f.filename for f in self.data['addon_files']])))
log.info(u'Sending email for %s' % (self.addon))
# Assign reviewer incentive scores.
ReviewerScore.award_points(self.request.amo_user, self.addon, status)
def process_preliminary(self):
"""Set an addons files to preliminary."""
# Hold onto the status before we change it.
status = self.addon.status
self.set_files(amo.STATUS_LITE, self.data['addon_files'],
copy_to_mirror=True)
self.log_action(amo.LOG.PRELIMINARY_VERSION)
self.notify_email('%s_to_preliminary' % self.review_type,
u'Mozilla Add-ons: %s %s Preliminary Reviewed')
log.info(u'Making %s files %s preliminary' %
(self.addon,
', '.join([f.filename for f in self.data['addon_files']])))
log.info(u'Sending email for %s' % (self.addon))
# Assign reviewer incentive scores.
ReviewerScore.award_points(self.request.amo_user, self.addon, status)
def process_super_review(self):
"""Give an addon super review when preliminary."""
self.addon.update(admin_review=True)
self.notify_email('author_super_review',
u'Mozilla Add-ons: %s %s flagged for Admin Review')
self.send_super_mail()
@register.function
@jinja2.contextfunction
def logs_tabnav_themes(context):
"""
Returns tuple of tab navigation for the log pages.
Each tuple contains three elements: (named url, tab_code, tab_text)
"""
rv = [
('editors.themes.logs', 'themes', _('Reviews'))
]
if acl.action_allowed(context['request'], 'SeniorPersonasTools', 'View'):
rv.append(('editors.themes.deleted', 'deleted', _('Deleted')))
return rv
@register.function
@jinja2.contextfunction
def queue_tabnav_themes(context):
"""Similar to queue_tabnav, but for themes."""
tabs = []
if acl.action_allowed(context['request'], 'Personas', 'Review'):
tabs.append((
'editors.themes.list', 'pending_themes', _('Pending'),
))
if acl.action_allowed(context['request'], 'SeniorPersonasTools', 'View'):
tabs.append((
'editors.themes.list_flagged', 'flagged_themes', _('Flagged'),
))
tabs.append((
'editors.themes.list_rereview', 'rereview_themes',
_('Updates'),
))
return tabs
@register.function
@jinja2.contextfunction
def queue_tabnav_themes_interactive(context):
"""Tabnav for the interactive shiny theme queues."""
tabs = []
if acl.action_allowed(context['request'], 'Personas', 'Review'):
tabs.append((
'editors.themes.queue_themes', 'pending', _('Pending'),
))
if acl.action_allowed(context['request'], 'SeniorPersonasTools', 'View'):
tabs.append((
'editors.themes.queue_flagged', 'flagged', _('Flagged'),
))
tabs.append((
'editors.themes.queue_rereview', 'rereview', _('Updates'),
))
return tabs
@register.function
@jinja2.contextfunction
def is_expired_lock(context, lock):
return lock.expiry < datetime.datetime.now()<|fim▁end|> | |
<|file_name|>ColorPickerDialog.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.appyvet.rangebarsample.colorpicker;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.ProgressBar;
import com.appyvet.rangebarsample.Component;
import com.appyvet.rangebarsample.R;
/**
* A dialog which takes in as input an array of colors and creates a palette allowing the user to
* select a specific color swatch, which invokes a listener.
*/
public class ColorPickerDialog extends DialogFragment implements ColorPickerSwatch.OnSwatchColorSelectedListener {
/**
* Interface for a callback when a color square is selected.
*/
public interface OnColorSelectedListener {
/**
* Called when a specific color square has been selected.
*/
public void onColorSelected(int color, Component component);
}
public static final int SIZE_LARGE = 1;
public static final int SIZE_SMALL = 2;
protected AlertDialog mAlertDialog;
protected static final String KEY_TITLE_ID = "title_id";
protected static final String KEY_COLORS = "colors";
protected static final String KEY_SELECTED_COLOR = "selected_color";
protected static final String KEY_COLUMNS = "columns";
protected static final String KEY_SIZE = "size";
protected int mTitleResId = R.string.color_picker_default_title;
protected int[] mColors = null;
protected int mSelectedColor;
protected int mColumns;
protected int mSize;
private Component mComponent;
private ColorPickerPalette mPalette;
private ProgressBar mProgress;
protected OnColorSelectedListener mListener;
public ColorPickerDialog() {
// Empty constructor required for dialog fragments.
}
public static ColorPickerDialog newInstance(int titleResId, int[] colors, int selectedColor,
int columns, int size, Component component) {
ColorPickerDialog ret = new ColorPickerDialog();
ret.initialize(titleResId, colors, selectedColor, columns, size, component);
return ret;
}
public void initialize(int titleResId, int[] colors, int selectedColor, int columns, int size, Component component) {
setArguments(titleResId, columns, size);
setColors(colors, selectedColor);
mComponent = component;
}
public void setArguments(int titleResId, int columns, int size) {
Bundle bundle = new Bundle();
bundle.putInt(KEY_TITLE_ID, titleResId);
bundle.putInt(KEY_COLUMNS, columns);
bundle.putInt(KEY_SIZE, size);
setArguments(bundle);
}
public void setOnColorSelectedListener(OnColorSelectedListener listener) {
mListener = listener;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mTitleResId = getArguments().getInt(KEY_TITLE_ID);
mColumns = getArguments().getInt(KEY_COLUMNS);
mSize = getArguments().getInt(KEY_SIZE);
}
if (savedInstanceState != null) {
mColors = savedInstanceState.getIntArray(KEY_COLORS);
mSelectedColor = (Integer) savedInstanceState.getSerializable(KEY_SELECTED_COLOR);
}
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
View view = LayoutInflater.from(getActivity()).inflate(R.layout.color_picker_dialog, null);
mProgress = (ProgressBar) view.findViewById(android.R.id.progress);
mPalette = (ColorPickerPalette) view.findViewById(R.id.color_picker);
mPalette.init(mSize, mColumns, this);
if (mColors != null) {
showPaletteView();
}
mAlertDialog = new AlertDialog.Builder(activity)
.setTitle(mTitleResId)
.setView(view)
.create();
return mAlertDialog;
}
@Override
public void onSwatchColorSelected(int color) {
if (mListener != null) {
mListener.onColorSelected(color, mComponent);
}
if (getTargetFragment() instanceof ColorPickerSwatch.OnSwatchColorSelectedListener) {
final OnColorSelectedListener listener =
(OnColorSelectedListener) getTargetFragment();
listener.onColorSelected(color, mComponent);
}
if (color != mSelectedColor) {
mSelectedColor = color;
// Redraw palette to show checkmark on newly selected color before dismissing.
mPalette.drawPalette(mColors, mSelectedColor);
}
dismiss();
}
public void showPaletteView() {
if (mProgress != null && mPalette != null) {
mProgress.setVisibility(View.GONE);
refreshPalette();
mPalette.setVisibility(View.VISIBLE);
}
}
public void showProgressBarView() {
if (mProgress != null && mPalette != null) {
mProgress.setVisibility(View.VISIBLE);
mPalette.setVisibility(View.GONE);
}
}
public void setColors(int[] colors, int selectedColor) {
if (mColors != colors || mSelectedColor != selectedColor) {
mColors = colors;
mSelectedColor = selectedColor;
refreshPalette();
}
}
public void setColors(int[] colors) {
if (mColors != colors) {
mColors = colors;<|fim▁hole|> public void setSelectedColor(int color) {
if (mSelectedColor != color) {
mSelectedColor = color;
refreshPalette();
}
}
private void refreshPalette() {
if (mPalette != null && mColors != null) {
mPalette.drawPalette(mColors, mSelectedColor);
}
}
public int[] getColors() {
return mColors;
}
public int getSelectedColor() {
return mSelectedColor;
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putIntArray(KEY_COLORS, mColors);
outState.putSerializable(KEY_SELECTED_COLOR, mSelectedColor);
}
}<|fim▁end|> | refreshPalette();
}
}
|
<|file_name|>DictServiceImpl.java<|end_file_name|><|fim▁begin|>package com.cnpc.framework.base.service.impl;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.springframework.stereotype.Service;
import com.alibaba.fastjson.JSON;
<|fim▁hole|>import com.cnpc.framework.base.entity.Dict;
import com.cnpc.framework.base.pojo.TreeNode;
import com.cnpc.framework.base.service.DictService;
import com.cnpc.framework.constant.RedisConstant;
import com.cnpc.framework.utils.StrUtil;
import com.cnpc.framework.utils.TreeUtil;
@Service("dictService")
public class DictServiceImpl extends BaseServiceImpl implements DictService {
@Override
public List<TreeNode> getTreeData() {
// 获取数据
String key = RedisConstant.DICT_PRE+"tree";
List<TreeNode> tnlist = null;
String tnStr = redisDao.get(key);
if(!StrUtil.isEmpty(key)) {
tnlist = JSON.parseArray(tnStr,TreeNode.class);
}
if (tnlist != null) {
return tnlist;
} else {
String hql = "from Dict order by levelCode asc";
List<Dict> dicts = this.find(hql);
Map<String, TreeNode> nodelist = new LinkedHashMap<String, TreeNode>();
for (Dict dict : dicts) {
TreeNode node = new TreeNode();
node.setText(dict.getName());
node.setId(dict.getId());
node.setParentId(dict.getParentId());
node.setLevelCode(dict.getLevelCode());
nodelist.put(node.getId(), node);
}
// 构造树形结构
tnlist = TreeUtil.getNodeList(nodelist);
redisDao.save(key, tnlist);
return tnlist;
}
}
public List<Dict> getDictsByCode(String code) {
String key = RedisConstant.DICT_PRE+ code;
List dicts = redisDao.get(key, List.class);
if (dicts == null) {
String hql = "from Dict where code='" + code + "'";
Dict dict = this.get(hql);
dicts = this.find("from Dict where parentId='" + dict.getId() + "' order by levelCode");
redisDao.add(key, dicts);
return dicts;
} else {
return dicts;
}
}
@Override
public List<TreeNode> getTreeDataByCode(String code) {
// 获取数据
String key = RedisConstant.DICT_PRE + code + "s";
List<TreeNode> tnlist = null;
String tnStr = redisDao.get(key);
if(!StrUtil.isEmpty(key)) {
tnlist = JSON.parseArray(tnStr,TreeNode.class);
}
if (tnlist != null) {
return tnlist;
} else {
String hql = "from Dict where code='" + code + "' order by levelCode asc";
List<Dict> dicts = this.find(hql);
hql = "from Dict where code='" + code + "' or parent_id = '" +dicts.get(0).getId()+ "' order by levelCode asc";
dicts = this.find(hql);
Map<String, TreeNode> nodelist = new LinkedHashMap<String, TreeNode>();
for (Dict dict : dicts) {
TreeNode node = new TreeNode();
node.setText(dict.getName());
node.setId(dict.getId());
node.setParentId(dict.getParentId());
node.setLevelCode(dict.getLevelCode());
nodelist.put(node.getId(), node);
}
// 构造树形结构
tnlist = TreeUtil.getNodeList(nodelist);
redisDao.save(key, tnlist);
return tnlist;
}
}
@Override
public List<TreeNode> getMeasureTreeData() {
// 获取数据
String hql = "from Dict WHERE (levelCode LIKE '000026%' OR levelCode LIKE '000027%') order by levelCode asc";
List<Dict> funcs = this.find(hql);
Map<String, TreeNode> nodelist = new LinkedHashMap<String, TreeNode>();
for (Dict dict : funcs) {
TreeNode node = new TreeNode();
node.setText(dict.getName());
node.setId(dict.getId());
node.setParentId(dict.getParentId());
node.setLevelCode(dict.getLevelCode());
nodelist.put(node.getId(), node);
}
// 构造树形结构
return TreeUtil.getNodeList(nodelist);
}
}<|fim▁end|> | |
<|file_name|>TDEMPlanewave.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import division
import numpy as np
from scipy.constants import mu_0, pi, epsilon_0
import numpy as np
from SimPEG import Utils
def E_field_from_SheetCurruent(XYZ, srcLoc, sig, t, E0=1., orientation='X', kappa=0., epsr=1.):
"""
Computing Analytic Electric fields from Plane wave in a Wholespace
TODO:
Add description of parameters
"""
XYZ = Utils.asArray_N_x_Dim(XYZ, 3)
# Check
if XYZ.shape[0] > 1 & t.shape[0] > 1:
raise Exception("I/O type error: For multiple field locations only a single frequency can be specified.")
mu = mu_0*(1+kappa)
if orientation == "X":<|fim▁hole|> Ex = bunja / bunmo
Ey = np.zeros_like(z)
Ez = np.zeros_like(z)
return Ex, Ey, Ez
else:
raise NotImplementedError()
def H_field_from_SheetCurruent(XYZ, srcLoc, sig, t, E0=1., orientation='X', kappa=0., epsr=1.):
"""
Plane wave propagating downward (negative z (depth))
"""
XYZ = Utils.asArray_N_x_Dim(XYZ, 3)
# Check
if XYZ.shape[0] > 1 & t.shape[0] > 1:
raise Exception("I/O type error: For multiple field locations only a single frequency can be specified.")
mu = mu_0*(1+kappa)
if orientation == "X":
z = XYZ[:, 2]
Hx = np.zeros_like(z)
Hy = E0 * np.sqrt(sig / (np.pi*mu*t))*np.exp(-(mu*sig*z**2) / (4*t))
Hz = np.zeros_like(z)
return Hx, Hy, Hz
else:
raise NotImplementedError()
if __name__ == '__main__':
pass<|fim▁end|> | z = XYZ[:, 2]
bunja = -E0*(mu*sig)**0.5 * z * np.exp(-(mu*sig*z**2) / (4*t))
bunmo = 2 * np.pi**0.5 * t**1.5 |
<|file_name|>YieldFrom.py<|end_file_name|><|fim▁begin|>"""
YieldFrom astroid node
This node represents the Python "yield from" statement, which functions
similarly to the "yield" statement except that the generator can delegate
some generating work to another generator.
Attributes:
- value (GeneratorExp)
- The generator that this YieldFrom is delegating work to.
Example:
- value -> Call(range, Name('g', Load()))
"""<|fim▁hole|>
def fun(g):
yield from range(g)<|fim▁end|> | |
<|file_name|>StringField.cpp<|end_file_name|><|fim▁begin|>/**
* Pennyworth - A new smarthome protocol.
* Copyright (C) 2012 Dream-Crusher Labs LLC
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
/*
* StringField.cpp
*
* Created on: Jun 18, 2012
* Author: jmonk
*/
#include "StringField.h"
namespace dvs {
StringField::StringField(string name, unsigned char id, bool writable, bool vol, Device* device) :
Field(STRING, name, id, writable, vol, device) {
}
StringField::~StringField() {
}
void StringField::setString(unsigned char* value) {
string n((const char *) value);
this->value = n;
}
unsigned char* StringField::getString() {
return (unsigned char *) value.c_str();
}
unsigned int StringField::getLength() {
return value.length() + 1;
}
void StringField::setRealString(string val) {
this->value = val;
this->sendPacket();
}
string StringField::getValue() {<|fim▁hole|><|fim▁end|> | return this->value;
}
} /* namespace dvs */ |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Xyrosource Team.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This build.rs script is inspired from
// https://github.com/simias/pockystation/blob/master/build.rs
use std::env;
use std::fs::File;
use std::io::Write;
use std::path::Path;
fn main() {
let out_dir = env::var("OUT_DIR").unwrap();
let version_file = Path::new(&out_dir).join("version.rs");
let mut fil = File::create(&version_file).unwrap();<|fim▁hole|> let cargo_version = env!("CARGO_PKG_VERSION").to_owned();
writeln!(fil, "pub const VERSION: &'static str = \
\"{}\";", cargo_version).unwrap();
}<|fim▁end|> | |
<|file_name|>test_energy_profiler.py<|end_file_name|><|fim▁begin|>"""Test energy_profiler module."""
import unittest
from physalia.energy_profiler import AndroidUseCase
# pylint: disable=missing-docstring
class TestEnergyProfiler(unittest.TestCase):
def test_empty_android_use_case(self):
# pylint: disable=no-self-use
use_case = AndroidUseCase(
name="Test",
app_apk="no/path",
app_pkg="no.package",
app_version="0.0.0",
run=None,
prepare=None,
cleanup=None
)<|fim▁hole|><|fim▁end|> | use_case.run() |
<|file_name|>response.spec.js<|end_file_name|><|fim▁begin|>const { decode, parse } = require('./response')
describe('Protocol > Requests > ListOffsets > v1', () => {
test('response', async () => {
const data = await decode(Buffer.from(require('../fixtures/v1_response.json')))
expect(data).toEqual({
responses: [
{
topic: 'test-topic-16e956902e39874d06f5-91705-2958a472-e582-47a4-86f0-b258630fb3e6',<|fim▁hole|> })
await expect(parse(data)).resolves.toBeTruthy()
})
})<|fim▁end|> | partitions: [{ partition: 0, errorCode: 0, timestamp: '1543343103774', offset: '0' }],
},
], |
<|file_name|>alarm.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from heat.common import exception
from heat.common.i18n import _
from heat.engine import constraints
from heat.engine import properties
from heat.engine import resource
from heat.engine import support
from heat.engine import watchrule
COMMON_PROPERTIES = (
ALARM_ACTIONS, OK_ACTIONS, REPEAT_ACTIONS, INSUFFICIENT_DATA_ACTIONS,
DESCRIPTION, ENABLED,
) = (
'alarm_actions', 'ok_actions', 'repeat_actions',
'insufficient_data_actions', 'description', 'enabled',
)
common_properties_schema = {
DESCRIPTION: properties.Schema(
properties.Schema.STRING,
_('Description for the alarm.'),
update_allowed=True
),
ENABLED: properties.Schema(
properties.Schema.BOOLEAN,
_('True if alarm evaluation/actioning is enabled.'),
default='true',
update_allowed=True
),
ALARM_ACTIONS: properties.Schema(
properties.Schema.LIST,
_('A list of URLs (webhooks) to invoke when state transitions to '
'alarm.'),
update_allowed=True
),
OK_ACTIONS: properties.Schema(
properties.Schema.LIST,
_('A list of URLs (webhooks) to invoke when state transitions to '
'ok.'),
update_allowed=True
),
INSUFFICIENT_DATA_ACTIONS: properties.Schema(
properties.Schema.LIST,
_('A list of URLs (webhooks) to invoke when state transitions to '
'insufficient-data.'),
update_allowed=True
),
REPEAT_ACTIONS: properties.Schema(
properties.Schema.BOOLEAN,
_("False to trigger actions when the threshold is reached AND "
"the alarm's state has changed. By default, actions are called "
"each time the threshold is reached."),
default='true',
update_allowed=True
)
}
NOVA_METERS = ['instance', 'memory', 'memory.usage',
'cpu', 'cpu_util', 'vcpus',
'disk.read.requests', 'disk.read.requests.rate',
'disk.write.requests', 'disk.write.requests.rate',
'disk.read.bytes', 'disk.read.bytes.rate',
'disk.write.bytes', 'disk.write.bytes.rate',
'disk.device.read.requests', 'disk.device.read.requests.rate',
'disk.device.write.requests', 'disk.device.write.requests.rate',
'disk.device.read.bytes', 'disk.device.read.bytes.rate',
'disk.device.write.bytes', 'disk.device.write.bytes.rate',
'disk.root.size', 'disk.ephemeral.size',
'network.incoming.bytes', 'network.incoming.bytes.rate',
'network.outgoing.bytes', 'network.outgoing.bytes.rate',
'network.incoming.packets', 'network.incoming.packets.rate',
'network.outgoing.packets', 'network.outgoing.packets.rate']
def actions_to_urls(stack, properties):
kwargs = {}
for k, v in iter(properties.items()):
if k in [ALARM_ACTIONS, OK_ACTIONS,
INSUFFICIENT_DATA_ACTIONS] and v is not None:
kwargs[k] = []
for act in v:
# if the action is a resource name
# we ask the destination resource for an alarm url.
# the template writer should really do this in the
# template if possible with:
# {Fn::GetAtt: ['MyAction', 'AlarmUrl']}
if act in stack:
url = stack[act].FnGetAtt('AlarmUrl')
kwargs[k].append(url)
else:
if act:
kwargs[k].append(act)
else:
kwargs[k] = v
return kwargs
class CeilometerAlarm(resource.Resource):
PROPERTIES = (
COMPARISON_OPERATOR, EVALUATION_PERIODS, METER_NAME, PERIOD,
STATISTIC, THRESHOLD, MATCHING_METADATA, QUERY,
) = (
'comparison_operator', 'evaluation_periods', 'meter_name', 'period',
'statistic', 'threshold', 'matching_metadata', 'query',
)
QUERY_FACTOR_FIELDS = (
QF_FIELD, QF_OP, QF_VALUE,
) = (
'field', 'op', 'value',
)
QF_OP_VALS = constraints.AllowedValues(['le', 'ge', 'eq',
'lt', 'gt', 'ne'])
properties_schema = {
COMPARISON_OPERATOR: properties.Schema(
properties.Schema.STRING,
_('Operator used to compare specified statistic with threshold.'),
constraints=[
constraints.AllowedValues(['ge', 'gt', 'eq', 'ne', 'lt',
'le']),
],
update_allowed=True
),
EVALUATION_PERIODS: properties.Schema(
properties.Schema.INTEGER,
_('Number of periods to evaluate over.'),
update_allowed=True
),
METER_NAME: properties.Schema(
properties.Schema.STRING,
_('Meter name watched by the alarm.'),
required=True
),
PERIOD: properties.Schema(
properties.Schema.INTEGER,
_('Period (seconds) to evaluate over.'),
update_allowed=True
),
STATISTIC: properties.Schema(
properties.Schema.STRING,
_('Meter statistic to evaluate.'),
constraints=[
constraints.AllowedValues(['count', 'avg', 'sum', 'min',
'max']),
],
update_allowed=True
),
THRESHOLD: properties.Schema(
properties.Schema.NUMBER,
_('Threshold to evaluate against.'),
required=True,
update_allowed=True
),
MATCHING_METADATA: properties.Schema(
properties.Schema.MAP,
_('Meter should match this resource metadata (key=value) '
'additionally to the meter_name.'),
default={},
update_allowed=True
),
QUERY: properties.Schema(
properties.Schema.LIST,
_('A list of query factors, each comparing '
'a Sample attribute with a value. '
'Implicitly combined with matching_metadata, if any.'),
update_allowed=True,
support_status=support.SupportStatus(version='2015.1'),
schema=properties.Schema(
properties.Schema.MAP,
schema={
QF_FIELD: properties.Schema(
properties.Schema.STRING,
_('Name of attribute to compare. '
'Names of the form metadata.user_metadata.X '
'or metadata.metering.X are equivalent to what '
'you can address through matching_metadata; '
'the former for Nova meters, '
'the latter for all others. '
'To see the attributes of your Samples, '
'use `ceilometer --debug sample-list`.')
),
QF_OP: properties.Schema(
properties.Schema.STRING,
_('Comparison operator'),
constraints=[QF_OP_VALS]
),
QF_VALUE: properties.Schema(
properties.Schema.STRING,
_('String value with which to compare')
)
}
)
)
}
properties_schema.update(common_properties_schema)
default_client_name = 'ceilometer'
def cfn_to_ceilometer(self, stack, properties):
"""Apply all relevant compatibility xforms."""
kwargs = actions_to_urls(stack, properties)
kwargs['type'] = 'threshold'
if kwargs.get(self.METER_NAME) in NOVA_METERS:
prefix = 'user_metadata.'
else:
prefix = 'metering.'
rule = {}
for field in ['period', 'evaluation_periods', 'threshold',
'statistic', 'comparison_operator', 'meter_name']:
if field in kwargs:
rule[field] = kwargs[field]
del kwargs[field]
mmd = properties.get(self.MATCHING_METADATA) or {}
query = properties.get(self.QUERY) or []
# make sure the matching_metadata appears in the query like this:
# {field: metadata.$prefix.x, ...}
for m_k, m_v in six.iteritems(mmd):
if m_k.startswith('metadata.%s' % prefix):
key = m_k
elif m_k.startswith(prefix):
key = 'metadata.%s' % m_k
else:
key = 'metadata.%s%s' % (prefix, m_k)
# NOTE(prazumovsky): type of query value must be a string, but
# matching_metadata value type can not be a string, so we
# must convert value to a string type.
query.append(dict(field=key, op='eq', value=six.text_type(m_v)))
if self.MATCHING_METADATA in kwargs:
del kwargs[self.MATCHING_METADATA]
if self.QUERY in kwargs:
del kwargs[self.QUERY]
if query:
rule['query'] = query
kwargs['threshold_rule'] = rule
return kwargs
def handle_create(self):
props = self.cfn_to_ceilometer(self.stack,
self.properties)
props['name'] = self.physical_resource_name()
alarm = self.ceilometer().alarms.create(**props)
self.resource_id_set(alarm.alarm_id)
# the watchrule below is for backwards compatibility.
# 1) so we don't create watch tasks unneccessarly
# 2) to support CW stats post, we will redirect the request
# to ceilometer.
wr = watchrule.WatchRule(context=self.context,
watch_name=self.physical_resource_name(),
rule=self.parsed_template('Properties'),
stack_id=self.stack.id)
wr.state = wr.CEILOMETER_CONTROLLED
wr.store()
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
if prop_diff:
kwargs = {'alarm_id': self.resource_id}
kwargs.update(self.properties)
kwargs.update(prop_diff)
alarms_client = self.ceilometer().alarms
alarms_client.update(**self.cfn_to_ceilometer(self.stack, kwargs))
def handle_suspend(self):
if self.resource_id is not None:
self.ceilometer().alarms.update(alarm_id=self.resource_id,
enabled=False)
def handle_resume(self):
if self.resource_id is not None:
self.ceilometer().alarms.update(alarm_id=self.resource_id,
enabled=True)
def handle_delete(self):
try:
wr = watchrule.WatchRule.load(
self.context, watch_name=self.physical_resource_name())
wr.destroy()
except exception.WatchRuleNotFound:
pass
if self.resource_id is not None:
try:
self.ceilometer().alarms.delete(self.resource_id)
except Exception as ex:
self.client_plugin().ignore_not_found(ex)
def handle_check(self):
watch_name = self.physical_resource_name()
watchrule.WatchRule.load(self.context, watch_name=watch_name)
self.ceilometer().alarms.get(self.resource_id)
class BaseCeilometerAlarm(resource.Resource):
default_client_name = 'ceilometer'
def handle_create(self):
properties = actions_to_urls(self.stack,
self.properties)
properties['name'] = self.physical_resource_name()
properties['type'] = self.ceilometer_alarm_type
alarm = self.ceilometer().alarms.create(
**self._reformat_properties(properties))
self.resource_id_set(alarm.alarm_id)
<|fim▁hole|> def _reformat_properties(self, properties):
rule = {}
for name in self.PROPERTIES:
value = properties.pop(name, None)
if value:
rule[name] = value
if rule:
properties['%s_rule' % self.ceilometer_alarm_type] = rule
return properties
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
if prop_diff:
kwargs = {'alarm_id': self.resource_id}
kwargs.update(prop_diff)
alarms_client = self.ceilometer().alarms
alarms_client.update(**self._reformat_properties(
actions_to_urls(self.stack, kwargs)))
def handle_suspend(self):
self.ceilometer().alarms.update(
alarm_id=self.resource_id, enabled=False)
def handle_resume(self):
self.ceilometer().alarms.update(
alarm_id=self.resource_id, enabled=True)
def handle_delete(self):
try:
self.ceilometer().alarms.delete(self.resource_id)
except Exception as ex:
self.client_plugin().ignore_not_found(ex)
def handle_check(self):
self.ceilometer().alarms.get(self.resource_id)
class CombinationAlarm(BaseCeilometerAlarm):
support_status = support.SupportStatus(version='2014.1')
PROPERTIES = (
ALARM_IDS, OPERATOR,
) = (
'alarm_ids', 'operator',
)
properties_schema = {
ALARM_IDS: properties.Schema(
properties.Schema.LIST,
_('List of alarm identifiers to combine.'),
required=True,
constraints=[constraints.Length(min=1)],
update_allowed=True),
OPERATOR: properties.Schema(
properties.Schema.STRING,
_('Operator used to combine the alarms.'),
constraints=[constraints.AllowedValues(['and', 'or'])],
update_allowed=True)
}
properties_schema.update(common_properties_schema)
ceilometer_alarm_type = 'combination'
def resource_mapping():
return {
'OS::Ceilometer::Alarm': CeilometerAlarm,
'OS::Ceilometer::CombinationAlarm': CombinationAlarm,
}<|fim▁end|> | |
<|file_name|>toolbar.d.ts<|end_file_name|><|fim▁begin|>import { ElementRef, Renderer } from '@angular/core';
import { Config } from '../../config/config';
import { Ion } from '../ion';
import { ToolbarTitle } from './toolbar-title';
import { ViewController } from '../../navigation/view-controller';
/**
* @name Header
* @description
* Header is a parent component that holds the navbar and toolbar component.
* It's important to note that `ion-header` needs to be the one of the three root elements of a page
*
* @usage
*
* ```html
* <ion-header>
* <ion-navbar>
* <ion-title>Page1</ion-title>
* </ion-navbar>
*
* <ion-toolbar>
* <ion-title>Subheader</ion-title>
* </ion-toolbar>
* </ion-header>
*
* <ion-content></ion-content>
* ```
*
*/
export declare class Header extends Ion {
constructor(config: Config, elementRef: ElementRef, renderer: Renderer, viewCtrl: ViewController);
}
/**
* @name Footer
* @description
* Footer is a root component of a page that sits at the bottom of the page.
* Footer can be a wrapper for `ion-toolbar` to make sure the content area is sized correctly.
*
* @usage
*
* ```html
* <ion-content></ion-content>
*
* <ion-footer>
* <ion-toolbar>
* <ion-title>Footer</ion-title>
* </ion-toolbar>
* </ion-footer>
* ```
*
*/
export declare class Footer extends Ion {
constructor(config: Config, elementRef: ElementRef, renderer: Renderer, viewCtrl: ViewController);
}
/**
* @private
*/
export declare class ToolbarBase extends Ion {
private _title;
constructor(config: Config, elementRef: ElementRef, renderer: Renderer);
/**
* @private
*/
_setTitle(titleCmp: ToolbarTitle): void;
/**
* @private
* Returns the toolbar title text if it exists or an empty string
*/
getTitleText(): any;
}
/**
* @name Toolbar
* @description
* A Toolbar is a generic bar that is positioned above or below content.
* Unlike a [Navbar](../../navbar/Navbar), a toolbar can be used as a subheader.
* When toolbars are placed within an `<ion-header>` or `<ion-footer>`,
* the toolbars stay fixed in their respective location. When placed within
* `<ion-content>`, toolbars will scroll with the content.
*
*
* ### Buttons in a Toolbar
* Buttons placed in a toolbar should be placed inside of the `<ion-buttons>`
* element. An exception to this is a [menuToggle](../../menu/MenuToggle) button.
* It should not be placed inside of the `<ion-buttons>` element. Both the
* `<ion-buttons>` element and the `menuToggle` can be positioned inside of the
* toolbar using different properties. The below chart has a description of each
* property.
*
* | Property | Description |
* |-------------|-----------------------------------------------------------------------------------------------------------------------|
* | `start` | Positions element to the left of the content in `ios` mode, and directly to the right in `md` and `wp` mode. |
* | `end` | Positions element to the right of the content in `ios` mode, and to the far right in `md` and `wp` mode. |
* | `left` | Positions element to the left of all other elements. |
* | `right` | Positions element to the right of all other elements. |
*
*
* ### Header / Footer Box Shadow and Border
* In `md` mode, the `<ion-header>` will receive a box-shadow on the bottom, and the
* `<ion-footer>` will receive a box-shadow on the top. In `ios` mode, the `<ion-header>`
* will receive a border on the bottom, and the `<ion-footer>` will receive a border on the
* top. Both the `md` box-shadow and the `ios` border can be removed by adding the `no-border`
* attribute to the element.
*
* ```html
* <ion-header no-border>
* <ion-toolbar>
* <ion-title>Header</ion-title>
* </ion-toolbar>
* </ion-header>
*
* <ion-content>
* </ion-content>
*
* <ion-footer no-border>
* <ion-toolbar>
* <ion-title>Footer</ion-title>
* </ion-toolbar><|fim▁hole|> * ```
*
* @usage
*
* ```html
*
* <ion-header no-border>
*
* <ion-toolbar>
* <ion-title>My Toolbar Title</ion-title>
* </ion-toolbar>
*
* <ion-toolbar>
* <ion-title>I'm a subheader</ion-title>
* </ion-toolbar>
*
* <ion-header>
*
*
* <ion-content>
*
* <ion-toolbar>
* <ion-title>Scrolls with the content</ion-title>
* </ion-toolbar>
*
* </ion-content>
*
*
* <ion-footer no-border>
*
* <ion-toolbar>
* <ion-title>I'm a footer</ion-title>
* </ion-toolbar>
*
* </ion-footer>
* ```
*
* @demo /docs/demos/src/toolbar/
* @see {@link ../../navbar/Navbar/ Navbar API Docs}
*/
export declare class Toolbar extends ToolbarBase {
/** @private */
_sbPadding: boolean;
/**
* @input {string} The predefined color to use. For example: `"primary"`, `"secondary"`, `"danger"`.
*/
color: string;
/**
* @input {string} The mode to apply to this component.
*/
mode: string;
constructor(viewCtrl: ViewController, config: Config, elementRef: ElementRef, renderer: Renderer);
}<|fim▁end|> | * </ion-footer> |
<|file_name|>rendering.py<|end_file_name|><|fim▁begin|>import maya.cmds;mc = maya.cmds
import pymel.core;pm = pymel.core
from pytaya.core.general import listForNone
from pytd.util.logutils import logMsg
from pytd.util.sysutils import grouper
def fileNodesFromObjects(oObjList):
return fileNodesFromShaders(shadersFromObjects(oObjList))
def fileNodesFromShaders(oMatList):
oFileNodeList = set()
for oMat in oMatList:
oFileNodeList.update(oMat.listHistory(type="file"))
return list(oFileNodeList)
def shadersFromObjects(objList, connectedTo=""):
sAttrName = connectedTo
if not objList:
return []
oMatSgList = shadingGroupsFromObjects(objList)
oMatList = []
for oMatSg in oMatSgList:
sName = oMatSg.attr(sAttrName).name() if connectedTo else oMatSg.name()
oMatList.extend(pm.ls(listForNone(mc.listConnections(sName, source=True,
destination=False)),
type=mc.listNodeTypes('shader', ex="texture")))
return oMatList
def shadingGroupsFromObjects(objList):
oShdGrpList = set()
for obj in objList:
oObj = obj if isinstance(obj, pm.PyNode) else pm.PyNode(obj)
oShdGrpList.update(shadingGroupsForObject(oObj))
return list(oShdGrpList)
def shadingGroupsForObject(oObj, warn=True):
oShdGrpList = []
oShape = None
if isinstance(oObj, pm.general.MeshFace):
indiceList = oObj.indices()
for oShdEng in oObj.listHistory(type="shadingEngine"):
if set(indiceList).intersection(set(oShdEng.members()[0].indices())):
oShdGrpList.append(oShdEng)
elif isinstance(oObj, pm.general.NurbsSurfaceFace):
oShape = oObj.node()
elif isinstance(oObj, pm.nt.Transform):
oShape = oObj.getShape()
elif isinstance(oObj, (pm.nt.Mesh, pm.nt.NurbsSurface)):
oShape = oObj
elif warn:
logMsg("Can't get shading groups from {}".format(repr(oObj)) , warning=True)
if not oShdGrpList:
if oShape:
oShdGrpList = oShape.shadingGroups()
if not oShdGrpList:
oShdGrpList = oShape.connections(type="shadingEngine")
return oShdGrpList
def conformShadingNetworkToNamespace(oMeshList, sNamespaceToMatch , **kwargs):
bForce = kwargs.get("force", False)
oShadingGroupMembersDct = {}
oMatNotConformList = []
for oShape in oMeshList:
# print "\nfor shape: ", oShape
oMatSGList = shadingGroupsForObject(oShape)
for oMatSG in oMatSGList:
# print "for shadingGroup: ", oMatSG
oMatList = pm.ls(oMatSG.inputs(), type=mc.listNodeTypes('shader', ex="texture"))
oMat = oMatList[0]
##ignore shadingGroups where materials are defaultNode
if oMat.isDefaultNode():
continue
##ignore shadingGroups where materials are already in namespace to match
sMatNamespace = oMat.namespace()
# print "sMatNamespace", sMatNamespace
# print "sNamespaceToMatch", sNamespaceToMatch
if sMatNamespace == sNamespaceToMatch:
continue
else:
oMatNotConformList.append(oMat)
oMembers = oMatSG.members()
for oMember in oMembers:
# print "member :", oMember
if oMember.node() == oShape:
oShadingGroupMembersDct.setdefault(oMatSG, []).append(oMember)
# for k, v in oShadingGroupMembersDct.iteritems():
# print "for shadingGroup: ", k, ", specific members are: ", v
if oMatNotConformList:
if bForce:
pass
else:
result = pm.confirmDialog(title='Materials not conform to Namespace...'
, message="Found materials not conform to Namespace,\nCopy Shading Network, Conform to Namespace & Assign ?"
, button=["OK", 'Cancel']
, defaultButton='Cancel'
, cancelButton='Cancel'
, dismissString='Cancel')
if result == "Cancel":
pm.warning("Materials Namespace conformation cancelled.")
return bForce
else:
bForce = True
else:
if sNamespaceToMatch:
logMsg('Materials already conformed to Namespace: "{0}"'.format(sNamespaceToMatch) , warning=True)
return bForce
##Force current namespace to the one to match to duplicate in this namespace
mc.namespace(set=":")
mc.namespace(set=sNamespaceToMatch if sNamespaceToMatch else ":")
oMatNotConformList = []
oShapeAssignedList = []
for oMatSG, oMembers in oShadingGroupMembersDct.iteritems():
oNewMatSGs = pm.duplicate(oMatSG, rr=True, un=True)
oNewMatSG = oNewMatSGs[0]
# print "old shadingGroup: ", oMatSG
# print "new shadingGroup: ", oNewMatSGs[0]
# print "oMembers", oMembers
# print oMembers[0]
for oMember in oMembers:
oShape = oMember.node()
if oShape not in oShapeAssignedList:
oShapeAssignedList.append(oShape)
try:
pm.sets(oNewMatSG, e=True, forceElement=oShape)
logMsg('Material "{0}" assigned first to: "{1}"'.format(oNewMatSG, oShape) , warning=True)
except:
logMsg('Could not assign material "{0}" first to: "{1}"'.format(oNewMatSG, oShape) , warning=True)
try:
pm.sets(oNewMatSG, e=True, forceElement=oMembers)
logMsg('Material "{0}" assigned to: "{1}"'.format(oNewMatSG, oMembers) , warning=True)
except:
logMsg('Could not assign material "{0}" to: "{1}"'.format(oNewMatSG, oMembers) , warning=True)
mc.namespace(set=":")
return bForce
def transferUvAndShaders(oSrcGrp, oDestGrp):
notCompatibleShapeList = []
sSourceNameSpace = oSrcGrp.namespace()
notFoundList = []
transferList = []
oTargetList = pm.ls(oDestGrp, dag=True, tr=True)
#searchCount = len(oTargetList)
for oTargetXfm in oTargetList:
oShape = oTargetXfm.getShape(ni=True)
if isinstance(oShape, pm.nt.Mesh):
sXfmName = oTargetXfm.nodeName()
sSourceName = sSourceNameSpace + sXfmName
oSourceXfm = pm.PyNode(sSourceName)
if oSourceXfm:
transferList.append((oSourceXfm, oTargetXfm))
# print oSourceXfm, oTargetXfm
else:
notFoundList.append(oTargetXfm)
print 'No match found for "{0}"'.format(sXfmName)
print "Searching... {0}".format(oTargetXfm.nodeName())
# oSet = fncTools.checkSet("noMatchFound")
# if notFoundList:
# pm.sets(oSet, addElement=notFoundList)
result = pm.confirmDialog(title='Transfer Uvs',
message='Found {0}/{1} mismatches :'.format(len(notFoundList), len(transferList)),
button=['Ok', 'Cancel'],
defaultButton='Cancel',
cancelButton='Cancel',
dismissString='Cancel')
if result == 'Cancel':
return
else :
for oSourceXfm, oTargetXfm in transferList:
oSourceShape = oSourceXfm.getShape(ni=True)
oHistList = oTargetXfm.listHistory()
oShapeList = pm.ls(oHistList, type="mesh")
oTargetShape = None
bShapeOrig = False
oTargetCurrentShape = oTargetXfm.getShape(ni=True)
if len(oShapeList) > 1:
for oShape in oShapeList:
if oShape.getAttr("intermediateObject") and oShape.attr("worldMesh").outputs():
bShapeOrig = True
oShape.setAttr("intermediateObject", False)
oTargetShape = oShape
break
else:
oTargetShape = oTargetCurrentShape
if oTargetShape:
try:
print ('transferring uvs and shaders from "{0}" to "{1}"'
.format(oSourceShape, oTargetShape))
if oTargetCurrentShape.numVertices() != oSourceShape.numVertices():
notCompatibleShapeList.extend([oSourceShape, oTargetCurrentShape])
pm.transferAttributes(oSourceShape, oTargetShape, transferPositions=0,
transferNormals=0, transferUVs=2, transferColors=2,
sampleSpace=5, sourceUvSpace="map1", targetUvSpace="map1",
searchMethod=3, flipUVs=0, colorBorders=1)
pm.transferShadingSets(oSourceShape, oTargetShape, sampleSpace=0, searchMethod=3)
pm.delete(oTargetShape, ch=True)
finally:
if bShapeOrig:
oTargetShape.setAttr("intermediateObject", True)
pm.select(clear=True)
pm.select(oSourceShape, r=True)
pm.select(oTargetCurrentShape, tgl=True)
pm.transferShadingSets(sampleSpace=1, searchMethod=3)
# oSet = fncTools.checkSet("Shapes_Without_Same_Topology")
# if notCompatibleShapeList:
# pm.sets(oSet, addElement=notCompatibleShapeList)
# pm.select(notCompatibleShapeList)
# pm.warning("The selected node's may have potentially problems on transferring uvs and materials.")
return notFoundList, notCompatibleShapeList
def averageVertexColorsToMaterial(oMatList="NoEntry"):
if oMatList == "NoEntry":
oMatList = pm.selected()
if not oMatList:
logMsg("Nothing is selected. Select meshes to apply vertex color." , warning=True)
return
for oMat in oMatList:
logMsg("Processing {0}".format(repr(oMat)))
try:
colorAttr = oMat.attr("color")
except pm.MayaAttributeError:
logMsg("\tNo color attribute found.")
continue
try:
oSG = oMat.shadingGroups()[0]
except IndexError:
print "\tNo ShadingGroup found."<|fim▁hole|>
oMemberList = oSG.members()
if not oMemberList:
logMsg("\tShadingGroup is empty.")
continue
pm.select(oMemberList, r=True)
pm.mel.ConvertSelectionToVertices()
sSelectedVerts = mc.ls(sl=True)
pm.refresh()
try:
vtxColorList = tuple(grouper(3, mc.polyColorPerVertex(sSelectedVerts, q=True, rgb=True)))
except:
logMsg("\tNo vertex colors found.")
continue
numVtx = len(vtxColorList)
rSum = 0.0
gSum = 0.0
bSum = 0.0
for r, g, b in vtxColorList:
rSum += r
gSum += g
bSum += b
if rSum + gSum + bSum > 0.0:
avrVtxColor = (rSum / numVtx, gSum / numVtx, bSum / numVtx)
try:
colorAttr.disconnect()
colorAttr.set(avrVtxColor)
except Exception, e:
logMsg("\t{0}".format(e))
def duplicateShadersPerObject(oMatList):
oNewMatList = []
for oMat in oMatList:
oShadEngList = oMat.outputs(type="shadingEngine")
if not oShadEngList:
continue
oShadEng = oShadEngList[0]
oShadEngMemberList = oShadEng.members()
oMemberByGeoObjDct = {}
for member in oShadEngMemberList:
oMesh = member.node() if isinstance(member, pm.MeshFace) else member
oMemberByGeoObjDct.setdefault(oMesh, []).append(member)
count = len(oMemberByGeoObjDct)
if count <= 1:
continue
oMemberByGeoObjDct.popitem()
for oShadingMembers in oMemberByGeoObjDct.itervalues():
oNewMat = pm.duplicate(oMat, inputConnections=True)[0]
# pm.select(oShadingMembers, replace=True)
# pm.hyperShade(assign=oNewMat)
oSG = pm.sets(renderable=True, noSurfaceShader=True, empty=True, name=oNewMat.nodeName() + "SG")
oNewMat.attr("outColor") >> oSG.attr("surfaceShader")
pm.sets(oSG, forceElement=oShadingMembers)
oNewMatList.append(oNewMat)
return oNewMatList<|fim▁end|> | continue |
<|file_name|>review.js<|end_file_name|><|fim▁begin|>// Release 1: User Stories
// As a user, I want to be able to create a new grocery list. After that, I need to be able to add an item with a quantity to the list, remove an item, and update the quantities if they change. I need a way to print out the list in a format that is very readable.
// Release 2: Pseudocode
// input: string of items separated by spaces
// output: object
// create a new object as new variable
// convert string to array (split)
// take each item in array and add to object as a property with a default quantity/value of 1
//
// Release 3: Initial Solution
// function to create list
// var foodList = ("salmon iceCream macAndCheese")
// var groceryList = {};
// var createList = function(foodList) {
// var foodArray = foodList.split(" ");
// for (var i = 0; i < foodArray.length; i++){
// groceryList[(foodArray[i])] = 1;
// }
// console.log(groceryList);
// }
// createList(foodList)
// // function to add item to list
// var addItem = function(newItem) {
// groceryList[newItem] = 1;
// console.log(groceryList);
// }
// addItem("peas")
// // function to remove item from list
// var removeItem = function(itemToLose) {
// delete groceryList[itemToLose];
// console.log(groceryList);
// }
// removeItem("peas")
// // function to update quantity
// var updateList = function(updateItem, newQuantity) {
// groceryList[updateItem] = newQuantity;
// console.log(groceryList);
// }
// updateList("macAndCheese", 5)
// // function to display list
// var displayList = function(groceryList) {
// for (food in groceryList) {
// console.log(food + ": " + groceryList[food]);
// }
// }
// displayList(groceryList)
// Release 4: Refactor
// function to create list
var groceryList = {};
var displayList = function(groceryList) {
console.log("Your Grocery List:")
for (food in groceryList) {
console.log(food + ": " + groceryList[food]);
}
console.log("----------")
}
var createList = function(foodList) {
var foodArray = foodList.split(" ");
for (var i = 0; i < foodArray.length; i++){
groceryList[(foodArray[i])] = 1;
}
displayList(groceryList);
}
var addItem = function(newItem) {
groceryList[newItem] = 1;
displayList(groceryList);
}
var removeItem = function(itemToLose) {
delete groceryList[itemToLose];
displayList(groceryList);
}
var updateList = function(updateItem, newQuantity) {
groceryList[updateItem] = newQuantity;
displayList(groceryList);
}
var foodList = ("funfettiMix bananas chocolateCoveredAlmonds")
createList(foodList)
addItem("peaches")
updateList("peaches", 20)
removeItem("bananas")<|fim▁hole|>// What was the most difficult part of this challenge?
// I forgot I needed to convert the string to an array, but once I did that with the .split(" ") method, all of the strings were easily accessible to add to the new object.
// Did an array or object make more sense to use and why?
// This was weirdly WAY easier with JavaScript than it was initially with Ruby (probably because we were on our second week of Ruby at this point!). It was so easy to add each string from an array into the object as a property and set it's default. Accessing these properties to update or delete was made easier by using bracket notation. Instead of complicated hash methods and having to convert strings to arrays to hashes, all I had to do was split the string and add each string to the object with a default value.<|fim▁end|> |
// Release 5: Reflect
// What concepts did you solidify in working on this challenge? (reviewing the passing of information, objects, constructors, etc.)
// I solidified accessing different properties in an object. I was able to add strings from an array into an empty object and set their default value. To change those values I knew I needed to access the property using bracket notation, and change the value it was = to. |
<|file_name|>LineNumbersTest.java<|end_file_name|><|fim▁begin|>import java.util.*;
public class LineNumbersTest extends LinkedList<Object>{
public LineNumbersTest(int x) {
super((x & 0) == 1 ?
new LinkedList<Object>((x & 1) == x++ ? new ArrayList<Object>() : new HashSet<Object>())
:new HashSet<Object>());
super.add(x = getLineNo());
this.add(x = getLineNo());
}
static int getLineNo() {
return new Throwable().fillInStackTrace().getStackTrace()[1].getLineNumber();
}
public static void main(String[] args) {
System.out.println(getLineNo());<|fim▁hole|>
System.out.println(new LineNumbersTest(2));
List<Object> foo = new ArrayList<>();
System.out.println(foo.addAll(foo));
}
}<|fim▁end|> | System.out.println(new Throwable().fillInStackTrace().getStackTrace()[0].getFileName());
System.out.println(getLineNo()); |
<|file_name|>test_preprocess.py<|end_file_name|><|fim▁begin|>#
# This is Seisflows
#
# See LICENCE file
#
###############################################################################
# Import system modules
import sys
import traceback
from glob import glob
from os.path import abspath, basename, dirname, exists
# Import Numpy
import numpy as np
# Local imports
from seisflows.config import ParameterError
from seisflows.workflow.base import base
PAR = sys.modules['seisflows_parameters']
PATH = sys.modules['seisflows_paths']
preprocess = sys.modules['seisflows_preprocess']
class test_preprocess(base):
""" Signal processing integration test
"""
def check(self):
""" Checks parameters and paths
"""
# data file format
if 'FORMAT' not in PAR:
raise ParameterError(PAR, 'FORMAT')
# data normalization option
if 'NORMALIZE' not in PAR:
setattr(PAR, 'NORMALIZE', None)
# data muting option
if 'MUTE' not in PAR:
setattr(PAR, 'MUTE', None)
# data filtering option
if 'FILTER' not in PAR:
setattr(PAR, 'FILTER', None)
if 'DATA' not in PATH:
raise Exception
if not exists(PATH.DATA):
raise Exception
if 'SYNTHETICS' not in PATH:
setattr(PATH, 'SYNTHETICS', '')
if PATH.SYNTHETICS:
assert exists(PATH.SYNTHETICS)
if 'WORKDIR' not in PATH:<|fim▁hole|> """ Tests data processing methods
"""
print 'testing reader...'
data = self.test_reader()
print 'testing writer...'
self.test_writer(data)
if PAR.NORMALIZE:
print 'testing normalizing...'
self.test_normalize(data)
if PAR.FILTER:
print 'testing filtering...'
self.test_filter(data)
if PAR.MUTE:
print 'testing muting...'
self.test_mute(data)
if PAR.MISFIT and \
PATH.DATA and \
PATH.SYNTHETICS:
dat = preprocess.reader(dirname(PATH.DATA),
basename(PATH.DATA))
syn = preprocess.reader(dirname(PATH.SYNTHETICS),
basename(PATH.SYNTHETICS))
print 'testing misfit...'
self.test_misfit(dat, syn)
print 'testing adjoint...'
self.test_adjoint(dat, syn)
print 'SUCCESS\n'
def test_reader(self):
try:
preprocess.setup()
except Exception, e:
print 'setup FAILED\n'
sys.exit(-1)
try:
data = preprocess.reader(dirname(PATH.DATA),
basename(PATH.DATA))
except Exception, e:
print 'reader FAILED'
sys.exit(-1)
else:
print ''
return data
def test_writer(self, data):
try:
if PAR.FORMAT in ['SU', 'su']:
extension = '.su'
else:
extension = ''
preprocess.writer(data, PATH.WORKDIR, 'output_data'+extension)
except Exception, e:
print 'writer FAILED\n'
print e.message
print e.__class__.__name__
traceback.print_exc(e)
sys.exit(-1)
else:
print ''
def test_normalize(self, dat):
try:
out = preprocess.apply_normalize(dat)
except Exception, e:
print 'normalization FAILED\n'
print e.message
print e.__class__.__name__
traceback.print_exc(e)
sys.exit(-1)
else:
self.save(out, 'output_data_normalized')
print ''
def test_filter(self, dat):
try:
out = preprocess.apply_filter(dat)
except Exception, e:
print 'filtering FAILED\n'
print e.message
print e.__class__.__name__
traceback.print_exc(e)
sys.exit(-1)
else:
self.save(out, 'output_data_filtered')
print ''
def test_mute(self, dat):
try:
out = preprocess.apply_mute(dat)
except Exception, e:
print 'muting FAILED\n'
print e.message
print e.__class__.__name__
traceback.print_exc(e)
sys.exit(-1)
else:
self.save(out, 'output_data_muted')
print ''
def test_misfit(self, dat, syn):
nt, dt, _ = preprocess.get_time_scheme(syn)
nn, _ = preprocess.get_network_size(syn)
rsd = []
for ii in range(nn):
rsd.append(preprocess.misfit(syn[ii].data, dat[ii].data, nt, dt))
filename = PATH.WORKDIR+'/'+'output_misfit'
np.savetxt(filename, rsd)
print ''
def test_adjoint(self, dat, syn):
nt, dt, _ = preprocess.get_time_scheme(syn)
nn, _ = preprocess.get_network_size(syn)
adj = syn
for ii in range(nn):
adj[ii].data = preprocess.adjoint(syn[ii].data, dat[ii].data, nt,
dt)
self.save(adj, 'output_adjoint')
print ''
def save(self, data, filename):
if PAR.FORMAT in ['SU', 'su']:
extension = '.su'
else:
extension = ''
preprocess.writer(data, PATH.WORKDIR, filename+extension)<|fim▁end|> | setattr(PATH, 'WORKDIR', abspath('.'))
def main(self): |
<|file_name|>arrayCopy.js<|end_file_name|><|fim▁begin|>/**
* Copies the values of `source` to `array`.
*
* @private
* @param {Array} source The array to copy values from.
* @param {Array} [array=[]] The array to copy values to.
* @returns {Array} Returns `array`.<|fim▁hole|> */
function arrayCopy(source, array) {
var index = -1,
length = source.length;
array || (array = Array(length));
while (++i < length) {
array[i] = source[i];
}
return array;
}
module.exports = arrayCopy;<|fim▁end|> | |
<|file_name|>run.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.7
from argparse import ArgumentParser
parser = ArgumentParser()
args = parser.parse_args()
from sys import exit
from subtlenet.models import cluster as train
import numpy as np
from subtlenet.utils import mpl, plt
from mpl_toolkits.mplot3d import Axes3D
train.NEPOCH = 10
train.encoded_size=2
dims = train.instantiate('RadialClustering')
gen = train.setup_data(batch_size=100)
clusterer, encoder = train.build_model(dims, w_ae=0.1)
train.train(clusterer, 'cluster', gen['train'], gen['validate'])
plotgen = train.gen(batch_size=1000, label=True)()
i, o, _ = next(plotgen)
i = i[0]
p = clusterer.predict(i)[1]
d = clusterer.predict(i)[0]
e = encoder.predict(i)
print
for b in xrange(1,4):
print i[b], d[b], p[b], np.argmax(p[b]), o[-1][b], e[b]
print i[-b], d[-b], p[-b], np.argmax(p[-b]), o[-1][-b], e[-b]
print
w = clusterer.get_weights()[-1][0]
print w
# make 2d plots
plt.clf()
cls = np.argmax(p, axis=-1)
mask = cls == 0
plt.scatter(e[:,0][mask], e[:,1][mask], c='b', alpha=0.5)
mask = cls == 1
plt.scatter(e[:,0][mask], e[:,1][mask], c='r', alpha=0.5)
plt.scatter(w[0], w[1], c='k')
plt.savefig('/home/snarayan/public_html/figs/clustering/encoded.png',bbox_inches='tight',dpi=300)
plt.savefig('/home/snarayan/public_html/figs/clustering/encoded.pdf')
plt.clf()
cls = np.argmax(p, axis=-1)
mask = o[-1] < 0.75
plt.scatter(e[:,0][mask], e[:,1][mask], c='k', alpha=0.5)
mask = o[-1] > 0.75
plt.scatter(e[:,0][mask], e[:,1][mask], c='m', alpha=0.5)
plt.savefig('/home/snarayan/public_html/figs/clustering/encoded_truth.png',bbox_inches='tight',dpi=300)
plt.savefig('/home/snarayan/public_html/figs/clustering/encoded_truth.pdf')
plt.clf()
fig = plt.figure()
ax = Axes3D(fig)
mask = cls == 0
ax.scatter(i[mask,0], i[mask,1], i[mask,2], c='b', alpha=0.5)
mask = cls == 1
ax.scatter(i[mask,0], i[mask,1], i[mask,2], c='r', alpha=0.5)
plt.savefig('/home/snarayan/public_html/figs/clustering/original_clust.png',bbox_inches='tight',dpi=300)
plt.savefig('/home/snarayan/public_html/figs/clustering/original_clust.pdf')
plt.clf()
fig = plt.figure()
ax = Axes3D(fig)
mask = o[-1] < 0.75
ax.scatter(i[mask,0], i[mask,1], i[mask,2], c='k', alpha=0.5)
mask = o[-1] > 0.75
ax.scatter(i[mask,0], i[mask,1], i[mask,2], c='m', alpha=0.5)
plt.savefig('/home/snarayan/public_html/figs/clustering/original.png',bbox_inches='tight',dpi=300)
plt.savefig('/home/snarayan/public_html/figs/clustering/original.pdf')
plt.clf()
fig = plt.figure()<|fim▁hole|>ax.scatter(d[mask,0], d[mask,1], d[mask,2], c='b', alpha=0.5)
mask = cls == 1
ax.scatter(d[mask,0], d[mask,1], d[mask,2], c='r', alpha=0.5)
plt.savefig('/home/snarayan/public_html/figs/clustering/autoencoded.png',bbox_inches='tight',dpi=300)
plt.savefig('/home/snarayan/public_html/figs/clustering/autoencoded.pdf')<|fim▁end|> | ax = Axes3D(fig)
mask = cls == 0 |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.backends.base
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Contains BaseCache class which can be used as in-memory cache backend or
extended to support persistence.
"""
from datetime import datetime
import hashlib
from copy import copy
from io import BytesIO
import requests
from ..compat import is_py2, urlencode, urlparse, urlunparse, parse_qsl
_DEFAULT_HEADERS = requests.utils.default_headers()
<|fim▁hole|> :attr:`keys_map` and :attr:`responses` or override public methods.
"""
def __init__(self, *args, **kwargs):
#: `key` -> `key_in_responses` mapping
self.keys_map = {}
#: `key_in_cache` -> `response` mapping
self.responses = {}
self._include_get_headers = kwargs.get("include_get_headers", False)
self._ignored_parameters = set(kwargs.get("ignored_parameters") or [])
def save_response(self, key, response):
""" Save response to cache
:param key: key for this response
:param response: response to save
.. note:: Response is reduced before saving (with :meth:`reduce_response`)
to make it picklable
"""
self.responses[key] = self.reduce_response(response), datetime.utcnow()
def add_key_mapping(self, new_key, key_to_response):
"""
Adds mapping of `new_key` to `key_to_response` to make it possible to
associate many keys with single response
:param new_key: new key (e.g. url from redirect)
:param key_to_response: key which can be found in :attr:`responses`
:return:
"""
self.keys_map[new_key] = key_to_response
def get_response_and_time(self, key, default=(None, None)):
""" Retrieves response and timestamp for `key` if it's stored in cache,
otherwise returns `default`
:param key: key of resource
:param default: return this if `key` not found in cache
:returns: tuple (response, datetime)
.. note:: Response is restored after unpickling with :meth:`restore_response`
"""
try:
if key not in self.responses:
key = self.keys_map[key]
response, timestamp = self.responses[key]
except KeyError:
return default
return self.restore_response(response), timestamp
def delete(self, key):
""" Delete `key` from cache. Also deletes all responses from response history
"""
try:
if key in self.responses:
response, _ = self.responses[key]
del self.responses[key]
else:
response, _ = self.responses[self.keys_map[key]]
del self.keys_map[key]
for r in response.history:
del self.keys_map[self.create_key(r.request)]
except KeyError:
pass
def delete_url(self, url):
""" Delete response associated with `url` from cache.
Also deletes all responses from response history. Works only for GET requests
"""
self.delete(self._url_to_key(url))
def clear(self):
""" Clear cache
"""
self.responses.clear()
self.keys_map.clear()
def has_key(self, key):
""" Returns `True` if cache has `key`, `False` otherwise
"""
return key in self.responses or key in self.keys_map
def has_url(self, url):
""" Returns `True` if cache has `url`, `False` otherwise.
Works only for GET request urls
"""
return self.has_key(self._url_to_key(url))
def _url_to_key(self, url):
session = requests.Session()
return self.create_key(session.prepare_request(requests.Request('GET', url)))
_response_attrs = ['_content', 'url', 'status_code', 'cookies',
'headers', 'encoding', 'request', 'reason', 'raw']
_raw_response_attrs = ['_original_response', 'decode_content', 'headers',
'reason', 'status', 'strict', 'version']
def reduce_response(self, response, seen=None):
""" Reduce response object to make it compatible with ``pickle``
"""
if seen is None:
seen = {}
try:
return seen[id(response)]
except KeyError:
pass
result = _Store()
# prefetch
response.content
for field in self._response_attrs:
setattr(result, field, self._picklable_field(response, field))
seen[id(response)] = result
result.history = tuple(self.reduce_response(r, seen) for r in response.history)
return result
def _picklable_field(self, response, name):
value = getattr(response, name)
if name == 'request':
value = copy(value)
value.hooks = []
elif name == 'raw':
result = _RawStore()
for field in self._raw_response_attrs:
setattr(result, field, getattr(value, field, None))
if result._original_response is not None:
setattr(result._original_response, "fp", None) # _io.BufferedReader is not picklable
value = result
return value
def restore_response(self, response, seen=None):
""" Restore response object after unpickling
"""
if seen is None:
seen = {}
try:
return seen[id(response)]
except KeyError:
pass
result = requests.Response()
for field in self._response_attrs:
setattr(result, field, getattr(response, field, None))
result.raw._cached_content_ = result.content
seen[id(response)] = result
result.history = tuple(self.restore_response(r, seen) for r in response.history)
return result
def _remove_ignored_parameters(self, request):
def filter_ignored_parameters(data):
return [(k, v) for k, v in data if k not in self._ignored_parameters]
url = urlparse(request.url)
query = parse_qsl(url.query)
query = filter_ignored_parameters(query)
query = urlencode(query)
url = urlunparse((url.scheme, url.netloc, url.path, url.params, query, url.fragment))
body = request.body
content_type = request.headers.get('content-type')
if body and content_type:
if content_type == 'application/x-www-form-urlencoded':
body = parse_qsl(body)
body = filter_ignored_parameters(body)
body = urlencode(body)
elif content_type == 'application/json':
import json
body = json.loads(body)
body = filter_ignored_parameters(sorted(body.items()))
body = json.dumps(body)
return url, body
def create_key(self, request):
if self._ignored_parameters:
url, body = self._remove_ignored_parameters(request)
else:
url, body = request.url, request.body
key = hashlib.sha256()
key.update(_to_bytes(request.method.upper()))
key.update(_to_bytes(url))
if request.body:
key.update(_to_bytes(body))
else:
if self._include_get_headers and request.headers != _DEFAULT_HEADERS:
for name, value in sorted(request.headers.items()):
key.update(_to_bytes(name))
key.update(_to_bytes(value))
return key.hexdigest()
def __str__(self):
return 'keys: %s\nresponses: %s' % (self.keys_map, self.responses)
# used for saving response attributes
class _Store(object):
pass
class _RawStore(object):
# noop for cached response
def release_conn(self):
pass
# for streaming requests support
def read(self, chunk_size=1):
if not hasattr(self, "_io_with_content_"):
self._io_with_content_ = BytesIO(self._cached_content_)
return self._io_with_content_.read(chunk_size)
def _to_bytes(s, encoding='utf-8'):
if is_py2 or isinstance(s, bytes):
return s
return bytes(s, encoding)<|fim▁end|> | class BaseCache(object):
""" Base class for cache implementations, can be used as in-memory cache.
To extend it you can provide dictionary-like objects for |
<|file_name|>column.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<% data.new_style_struct("Column", inherited=False) %>
${helpers.predefined_type("column-width",
"length::LengthOrAuto",
"Either::Second(Auto)",
initial_specified_value="Either::Second(Auto)",
parse_method="parse_non_negative_length",
extra_prefixes="moz",
animation_value_type="ComputedValue",
experimental=True,
spec="https://drafts.csswg.org/css-multicol/#propdef-column-width")}
${helpers.predefined_type("column-count",
"IntegerOrAuto",
"Either::Second(Auto)",
parse_method="parse_positive",
initial_specified_value="Either::Second(Auto)",
experimental="True",
animation_value_type="ComputedValue",<|fim▁hole|> "length::LengthOrNormal",
"Either::Second(Normal)",
parse_method='parse_non_negative_length',
extra_prefixes="moz",
experimental=True,
animation_value_type="ComputedValue",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-gap")}
${helpers.single_keyword("column-fill", "balance auto", extra_prefixes="moz",
products="gecko", animation_value_type="none",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-fill")}
${helpers.predefined_type("column-rule-width", "BorderWidth", "Au::from_px(3)",
initial_specified_value="specified::BorderWidth::Medium",
products="gecko", computed_type="::app_units::Au",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-rule-width",
animation_value_type="ComputedValue", extra_prefixes="moz")}
// https://drafts.csswg.org/css-multicol-1/#crc
${helpers.predefined_type("column-rule-color", "CSSColor",
"::cssparser::Color::CurrentColor",
initial_specified_value="specified::CSSColor::currentcolor()",
products="gecko", animation_value_type="IntermediateColor", extra_prefixes="moz",
complex_color=True, need_clone=True,
spec="https://drafts.csswg.org/css-multicol/#propdef-column-rule-color")}
${helpers.single_keyword("column-span", "none all",
products="gecko", animation_value_type="none",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-span")}
${helpers.single_keyword("column-rule-style",
"none hidden dotted dashed solid double groove ridge inset outset",
products="gecko", extra_prefixes="moz",
gecko_constant_prefix="NS_STYLE_BORDER_STYLE",
animation_value_type="none",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-rule-style")}<|fim▁end|> | extra_prefixes="moz",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-count")}
${helpers.predefined_type("column-gap", |
<|file_name|>body_config.py<|end_file_name|><|fim▁begin|>from collections import namedtuple<|fim▁hole|># Size and Weight Constants
TOTAL_MASS = 20 # Made up units
TOTAL_HEIGHT = 350 # Pygame pixels
STARTING_SPEED = 0, 0 # pixels/sec?
BASE_STRENGTH = 1500000
# Mass Fractions #
mass_fractions = {
"head": 0.0826,
"torso": 0.551,
"upper_arm": 0.0325,
"forearm": 0.0187 + 0.0065, # Including hand
"thigh": 0.105,
"calf": 0.0475,
"foot": 0.0143
}
# Segment Masses
masses = {}
for segment in mass_fractions:
masses[segment] = mass_fractions[segment] * TOTAL_MASS
# Height Fractions #
height_fractions = {
"head": 0.2, # Larger for cartoon, anatomically correct is 0.1075
"torso": 0.3,
"upper_arm": 0.172,
"forearm": 0.157 + 0.057, # Including hand
"thigh": 0.25, # standard is .232
"calf": 0.23, # standard is .247
"foot": 0.1 # Counts foot length, not height
}
# Segment Lengths
lengths = {}
for segment in height_fractions:
lengths[segment] = height_fractions[segment] * TOTAL_HEIGHT
# Joint Constraints #
joint_ranges = {
"neck": (3 * pi / 4, 5 * pi / 4),
"elbow": (0, 3 * pi / 4),
"shoulder": (-pi / 2, pi),
"hip": (-pi / 8, pi / 2),
"knee": (-3 * pi / 4, 0),
"ankle": (0, 2 * pi / 3)
}
joint_strengths = {
"neck": .15 * BASE_STRENGTH,
"elbow": .3 * BASE_STRENGTH,
"shoulder": .5 * BASE_STRENGTH,
"hip": .8 * BASE_STRENGTH,
"knee": .8 * BASE_STRENGTH,
"ankle": .4 * BASE_STRENGTH
}
# Collision Types #
collision_types = {
"upper": 1,
"lower": 2,
"ground": 3
}
body_collision_types = {
"torso": collision_types["upper"],
"head": collision_types["upper"],
"upper_arm": collision_types["upper"],
"forearm": collision_types["upper"],
"thigh": collision_types["upper"],
"calf": collision_types["lower"],
"foot": collision_types["lower"]
}
# Images
images = {
"torso": pygame.image.load("images/torso.bmp"),
"head": pygame.image.load("images/head.bmp"),
"upper_arm": pygame.image.load("images/upper_arm.bmp"),
"forearm": pygame.image.load("images/forearm.bmp"),
"thigh": pygame.image.load("images/thigh.bmp"),
"calf": pygame.image.load("images/leg.bmp"),
"foot": pygame.image.load("images/foot.bmp")
}
SegmentInfo = namedtuple('SegmentInfo', 'mass length start_speed collision_type image')
segments = {}
# todo I don't like this loop, it assumes that all other dictionaries have the same keys
for key in mass_fractions:
segments[key] = SegmentInfo(masses[key], lengths[key], STARTING_SPEED, body_collision_types[key], images[key])
JointInfo = namedtuple('JointInfo', 'range max_torque')
joints = {}
for key in joint_ranges:
joints[key] = JointInfo(joint_ranges[key], joint_strengths[key])<|fim▁end|> | from math import pi
import pygame
|
<|file_name|>earthmine_qgis.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import copy
import json
import math
from functools import partial
from PyQt4.QtCore import QSettings, QTranslator, qVersion, QCoreApplication, pyqtSignal, QObject, pyqtSlot, Qt, QUrl, \
QRectF, SIGNAL, QPointF, QLineF
from PyQt4.QtGui import QAction, QIcon, QPainter, QPen, QBrush, QColor, QPixmap, QCursor, QPolygon
from PyQt4.QtSvg import QSvgRenderer
# Initialize Qt resources from file resources.py
from qgis._core import QgsMapLayerRegistry
import resources_rc
# Import the code for the dialog
from viewer import Viewer
from settingsdialog import SettingsDialog
import os.path
import contextlib
from qgis.core import QgsMessageLog, QgsCoordinateTransform, QgsCoordinateReferenceSystem, QgsPoint, QgsRectangle, \
QgsMapLayerRegistry, QGis, QgsGeometry, QgsFeatureRequest, QgsFeature, QgsDistanceArea, QgsRenderContext, QgsMapLayer
from qgis.gui import QgsMapCanvasItem, QgsMapToolEmitPoint, QgsMessageBar, QgsAttributeDialog, QgsRubberBand
class EarthmineSettingsError(Exception):
pass
@contextlib.contextmanager
def settinggroup(settings, name):
settings.beginGroup(name)
yield settings
settings.endGroup()
def maplayers():
return QgsMapLayerRegistry.instance().mapLayers().values()
def layer_by_name(name):
return QgsMapLayerRegistry.instance().mapLayersByName(name)[0]
def layer_by_id(layerid):
return QgsMapLayerRegistry.instance().mapLayer(layerid)
def feature_by_id(layer, featureid):
rq = QgsFeatureRequest(int(featureid))
feature = layer.getFeatures(rq).next()
return feature
def get_color(render, feature):
symbol = render.symbolForFeature(feature)
# name() returns the hex value for the colour
if not symbol:
return "0x00ff00"
name = symbol.color().name()
value = int("0x" + name[1:], 16)
return value
def search_area(units, distancearea, point):
distancearea.sourceCrs()
distance = 100
distance = distancearea.convertMeasurement(distance, QGis.Meters, units, False)
QgsMessageLog.logMessage(str(distance), "Earthmine")
QgsMessageLog.logMessage(str(units), "Earthmine")
geom = QgsGeometry.fromPoint(point)
rect = geom.buffer(distance[0], 10).boundingBox()
return rect
class EarthminePoint():
def __init__(self, qgispoint, pointdata):
for k, v in pointdata.items():
setattr(self, k, v)
self.qgispoint = QgsGeometry.fromPoint(qgispoint)
def distance(self, point):
return self.qgispoint.distance(point.qgispoint)
def height_diff(p1, p2):
if not p1.alt or not p2.alt:
return 0
try:
y = p1.alt - p2.alt
return y
except IndexError:
return 0
def safe_disconnect(signal, method):
try:
signal.disconnect(method)
except TypeError:
pass
class EarthmineLine():
def __init__(self, points, stats):
self.points = points
self._stats = stats
self.dist = QgsDistanceArea()
self.convert = self.dist.convertMeasurement
@property
def slope(self):
run = self.total_length_unadjusted
QgsMessageLog.logMessage(str(run), "Earthmine")
height = self._stats['height']
QgsMessageLog.logMessage(str(height), "Earthmine")
if not height:
return 0
try:
return height / run * 100
except ZeroDivisionError:
return 0
@property
def total_length(self):
return self._stats['3D-Total']
@property
def total_length_unadjusted(self):
return self._stats['2D-Total']
@property
def total_height(self):
height = self._stats['height']
if not height:
return 0
return abs(height)
@property
def slope_display(self):
return str(self.slope) + "%"
def stats(self, units, mode):
return self.total_length_display(units, mode), \
self.segment_length_display(units, mode), \
self.slope_display
def total_length_display(self, units, mode):
if mode == "3D":
return self.convert_to(self.total_length, units)
elif mode == "Horizontal":
return self.convert_to(self.total_length_unadjusted, units)
elif mode == "Vertical":
return self.convert_to(abs(self.total_height), units)
else:
return ""
def segment_length_display(self, units, mode):
if mode == "3D":
return self.convert_to(self._stats['3D'], units)
if mode == "Horizontal":
return self.convert_to(self._stats['2D'], units)
elif mode == "Vertical":
return self.convert_to(abs(self.total_height), units)
else:
return ""
def segments(self):
it = zip(self.points, self.points[1:])
for start, end in it:
yield self.segment(start, end)
def segment(self, start, end):
startlength = start.distance(end)
height = height_diff(end, start)
length = math.sqrt(startlength ** 2 + height ** 2)
return dict(length=startlength, adjusted=length, height=height)
def convert_to(self, length, units):
length, _ = self.convert(length, 0, units, False)
length = QgsDistanceArea.textUnit(length, 3, units, False, True)
return length
def to_feature_data(layerid, feature, renderer, transform):
"""
Transform the feature into the data for the viewer to use.
:param feature: QgsFeature
:param renderer:
:param transform:
:return:
"""
def polylinenodes(polyline):
nodes = []
for point in polyline:
point = transform.transform(point, QgsCoordinateTransform.ReverseTransform)
location = dict(lat=point.y(), lng=point.x())
nodes.append(location)
return nodes
geom = feature.geometry()
geomtype = geom.type()
featuredata = []
data = dict(id=feature.id(),
layerid=layerid,
color=get_color(renderer, feature),
geomtype=QGis.vectorGeometryType(geomtype))
if geomtype == QGis.Point:
geom = geom.asPoint()
point = transform.transform(geom, QgsCoordinateTransform.ReverseTransform)
try:
z = feature['Z']
if not z:
z = 0
except KeyError:
z = 0
location = dict(lat=point.y(), lng=point.x(), z=z)
data['nodes'] = [location]
featuredata.append(data)
elif geomtype == QGis.Line:
if geom.isMultipart():
# Copy the data for each polyline
for polyline in geom.asMultiPolyline():
newdata = copy.copy(data)
newdata['nodes'] = polylinenodes(polyline)
featuredata.append(newdata)
else:
data['nodes'] = polylinenodes(geom.asPolyline())
featuredata.append(data)
return featuredata
def get_features_in_area(layer, area, transform, mapsettings):
"""
Return all the features for the given layer in the search area
:param layer: Search layer
:param area: Search area
:param transform:
:return: yields a dict for each feature found in the area
"""
renderer = layer.rendererV2()
layerid = layer.id()
context = QgsRenderContext.fromMapSettings(mapsettings)
renderer.startRender(context, layer.pendingFields())
for feature in layer.getFeatures(QgsFeatureRequest(area)):
featuredata = to_feature_data(layerid, feature, renderer, transform)
for data in featuredata:
yield data
renderer.stopRender(context)
def get_feature_form(layer, feature, isadd=False):
dlg = QgsAttributeDialog(layer, feature, False, None)
dlg.setIsAddDialog(isadd)
return dlg
class EarthMineQGIS(QObject):
"""QGIS Plugin Implementation."""
def __init__(self, iface):
"""Constructor.
:param iface: An interface instance that will be passed to this class
which provides the hook by which you can manipulate the QGIS
application at run time.
:type iface: QgsInterface
"""
# Save reference to the QGIS interface
super(EarthMineQGIS, self).__init__()
self.movingfeature = None
self.iface = iface
self.viewer = None
self.canvas = self.iface.mapCanvas()
self.settings = QSettings()
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QSettings().value('locale/userLocale')[0:2]
locale_path = os.path.join(
self.plugin_dir,
'i18n',
'EarthMineQGIS_{}.qm'.format(locale))
if os.path.exists(locale_path):
self.translator = QTranslator()
self.translator.load(locale_path)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
self.pointtool = QgsMapToolEmitPoint(self.canvas)
self.pointtool.canvasClicked.connect(self.set_viewer_location)
self.settingsdialog = SettingsDialog(self.iface.mainWindow())
self.actions = []
self.menu = self.tr(u'&Earthmine')
self.toolbar = self.iface.addToolBar(u'EarthMineQGIS')
self.toolbar.setObjectName(u'EarthMineQGIS')
self.legend = self.iface.legendInterface()
emcolor = QColor(1, 150, 51)
self.tempband = QgsRubberBand(self.canvas, QGis.Line)
self.tempband.setWidth(5)
self.tempband.setColor(emcolor)
self.tempbandpoints = QgsRubberBand(self.canvas, QGis.Point)
self.tempbandpoints.setWidth(7)
self.tempbandpoints.setColor(emcolor)
self.movingband = QgsRubberBand(self.canvas, QGis.Point)
self.movingband.setWidth(5)
self.movingband.setColor(emcolor)
self.layersignals = []
self.marker = None
def initGui(self):
"""Create the menu entries and toolbar icons inside the QGIS GUI."""
icon_path = ':/icons/settings'
self.add_action(
icon_path,
text=self.tr(u'Show Settings'),
callback=self.show_settings,
parent=self.iface.mainWindow())
icon_path = ':/icons/viewer'
self.add_action(
icon_path,
text=self.tr(u'Earthmine Viewer'),
callback=self.open_viewer,
parent=self.iface.mainWindow())
self.marker = PostionMarker(self.canvas)
self.marker.hide()
self.viewer = Viewer(callbackobject=self)
self.viewer.trackingChanged.connect(self.marker.setTracking)
self.viewer.setLocationTriggered.connect(partial(self.canvas.setMapTool, self.pointtool))
self.viewer.updateFeatures.connect(self.update_earthmine_features)
self.viewer.layerChanged.connect(self.iface.setActiveLayer)
self.viewer.clearLine.connect(self.clear_bands)
self.viewer.closed.connect(self.remove_items)
self.iface.currentLayerChanged.connect(self.viewer.update_current_layer)
cursor = QCursor(QPixmap(":/icons/location"))
self.pointtool.setCursor(cursor)
self.pointtool.setAction(self.viewer.setlocationaction)
def remove_items(self):
self.marker.setTracking(False)
self.disconnect_projectsignals()
self.iface.actionPan().trigger()
def unload(self):
"""Removes the plugin menu item and icon from QGIS GUI."""
self.canvas.scene().removeItem(self.marker)
del self.marker
self.disconnect_projectsignals()
for action in self.actions:
self.iface.removePluginMenu(
self.tr(u'&Earthmine'),
action)
self.iface.removeToolBarIcon(action)
del self.toolbar
self.iface.removeDockWidget(self.viewer)
self.viewer.deleteLater()
def disconnect_projectsignals(self):
safe_disconnect(QgsMapLayerRegistry.instance().layerWasAdded, self.connect_layer_signals)
safe_disconnect(QgsMapLayerRegistry.instance().layersRemoved, self.layers_removed)
safe_disconnect(self.canvas.layersChanged, self.layers_changed)
safe_disconnect(self.iface.projectRead, self.connect_signals)
safe_disconnect(self.canvas.selectionChanged, self.selection_changed)
safe_disconnect(self.canvas.selectionChanged, self.viewer.selection_changed)
def clear_bands(self):
self.tempband.reset(QGis.Line)
self.tempbandpoints.reset(QGis.Point)
def visible_layers(self):
"""
Return the visible layers shown in the map canvas
:return:
"""
return (layer for layer, visible in self.layers_with_states() if visible)
def layers_with_states(self):
for layer in maplayers():
if not layer.type() == QgsMapLayer.VectorLayer:
continue
if not layer.geometryType() in [QGis.Point, QGis.Line]:
continue
yield layer, self.legend.isLayerVisible(layer)
def _layer_feature_added(self, featureid):
layer = self.sender()
if not layer:
return
self.layer_feature_added(layer, featureid)
def layer_feature_added(self, layer, featureid):
if not self.viewer:
return
feature = layer.getFeatures(QgsFeatureRequest(featureid)).next()
renderer = layer.rendererV2()
transform = self.coordinatetransform(layer)
featuredata = to_feature_data(layer.id(), feature, renderer, transform)
geomtype = layer.geometryType()
layerdata = dict(id=layer.id(),
geomtype=QGis.vectorGeometryType(geomtype))
self.viewer.load_features(layerdata, featuredata)
def _layer_feature_delete(self, featureid):
layer = self.sender()
if not layer:
return
self.layer_feature_delete(layer, featureid)
def layer_feature_delete(self, layer, featureid):
if not self.viewer:
return
self.viewer.remove_feature(layer.id(), featureid)
def _layer_geometry_changed(self, featureid, geometry):
layer = self.sender()
if not layer:
return
self.layer_geometry_changed(layer, featureid, geometry)
def layer_geometry_changed(self, layer, featureid, geometry):
if not self.viewer:
return
geomtype = layer.geometryType()
if geomtype == QGis.Point:
geom = geometry.asPoint()
transform = self.coordinatetransform(layer)
point = transform.transform(geom, QgsCoordinateTransform.ReverseTransform)
location = dict(lat=point.y(), lng=point.x())
self.viewer.edit_feature(layer.id(), featureid, [location])
elif geomtype == QGis.Line:
self.layer_feature_delete(layer, featureid)
self.layer_feature_added(layer, featureid)<|fim▁hole|> return
layer.featureAdded.connect(self._layer_feature_added)
layer.featureDeleted.connect(self._layer_feature_delete)
layer.editingStarted.connect(self.layer_editstate_changed)
layer.editingStopped.connect(self.layer_editstate_changed)
# HACK The new style doesn't work here
# http://hub.qgis.org/issues/6573
signal = SIGNAL("geometryChanged(QgsFeatureId, QgsGeometry&)")
self.connect(layer, signal, self._layer_geometry_changed)
self.load_layer_features(layers=[layer])
def layer_editstate_changed(self):
layer = self.sender()
if layer == self.iface.activeLayer():
self.viewer.layer_changed(layer)
def disconnect_signals(self):
self.disconnect_projectsignals()
for layer in maplayers():
if not layer.type() == QgsMapLayer.VectorLayer:
return
safe_disconnect(layer.featureAdded, self._layer_feature_added)
safe_disconnect(layer.featureDeleted, self._layer_feature_delete)
safe_disconnect(layer.editingStarted, self.layer_editstate_changed)
safe_disconnect(layer.editingStopped, self.layer_editstate_changed)
# HACK The new style doesn't work here
# http://hub.qgis.org/issues/6573
signal = SIGNAL("geometryChanged(QgsFeatureId, QgsGeometry&)")
self.disconnect(layer, signal, self._layer_geometry_changed)
def connect_signals(self):
for layer in maplayers():
self.connect_layer_signals(layer)
self.center_on_canvas()
def set_viewer_location(self, point, mousebutton):
transform = self.coordinatetransform()
point = transform.transform(point, QgsCoordinateTransform.ReverseTransform)
self.viewer.set_location(point)
def distancearea(self):
area = QgsDistanceArea()
dest = self.canvas.mapRenderer().destinationCrs()
area.setSourceCrs(dest)
return area, dest.mapUnits()
def coordinatetransform(self, layer=None):
"""
Return the transform for WGS84 -> QGIS projection.
"""
source = QgsCoordinateReferenceSystem()
source.createFromWkt(
'GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]]')
if not layer:
dest = self.canvas.mapRenderer().destinationCrs()
else:
dest = layer.crs()
transform = QgsCoordinateTransform(source, dest)
return transform
def earthmine_settings(self):
settings = {}
with settinggroup(self.settings, "plugins/Earthmine"):
for key in ['serviceUrl', 'baseDataUrl', "apiKey", 'secretKey', 'viewerUrl']:
if not self.settings.contains(key):
raise EarthmineSettingsError("{} not set".format(key))
value = self.settings.value(key, type=str)
if value is None:
raise EarthmineSettingsError("{} not set".format(key))
settings[key] = value
return settings
@pyqtSlot()
def ready(self):
"""
Called when the viewer is ready to be started. At this point the viewer hasn't been loaded
so no other methods apart from startViewer will be handled.
"""
settings = self.earthmine_settings()
self.viewer.startViewer(settings)
@pyqtSlot()
def viewerReady(self):
"""
Called once the viewer is loaded and ready to get location events.
"""
self.disconnect_signals()
self.connect_signals()
self.iface.projectRead.connect(self.connect_signals)
self.canvas.layersChanged.connect(self.layers_changed)
self.canvas.selectionChanged.connect(self.selection_changed)
self.canvas.selectionChanged.connect(self.viewer.selection_changed)
QgsMapLayerRegistry.instance().layersRemoved.connect(self.layers_removed)
QgsMapLayerRegistry.instance().layerWasAdded.connect(self.connect_layer_signals)
self.center_on_canvas()
self.viewer.activelayercombo.setLayer(self.iface.activeLayer())
def center_on_canvas(self):
point = self.canvas.extent().center()
transform = self.coordinatetransform()
point = transform.transform(point, QgsCoordinateTransform.ReverseTransform)
self.viewer.set_location(point)
self.viewer.infoaction.toggle()
def selection_changed(self, layer):
ids = [feature.id() for feature in layer.selectedFeatures()]
if not ids:
self.viewer.clear_selection(layer.id())
else:
self.viewer.set_selection(layer.id(), ids)
def layers_changed(self):
layerstates = self.layers_with_states()
for layer, visible in layerstates:
layerid = layer.id()
viewerloaded = self.viewer.layer_loaded(layerid)
QgsMessageLog.instance().logMessage(layerid, "Earthmine")
QgsMessageLog.instance().logMessage("Viewer State:" + str(viewerloaded), "Earthmine")
QgsMessageLog.instance().logMessage("QGIS State:" + str(visible), "Earthmine")
if (viewerloaded and visible) or (not viewerloaded and not visible):
QgsMessageLog.instance().logMessage("Ignoring as states match", "Earthmine")
continue
if viewerloaded and not visible:
QgsMessageLog.instance().logMessage("Clearing layer because viewer loaded and disabled in QGIS",
"Earthmine")
self.viewer.clear_layer_features(layerid)
continue
if not viewerloaded and visible:
QgsMessageLog.instance().logMessage("Loading layer", "Earthmine")
self.load_layer_features(layers=[layer])
continue
def layers_removed(self, layers):
for layerid in layers:
self.viewer.clear_layer_features(layerid)
@pyqtSlot(str, float, float)
def viewChanged(self, event, yaw, angle):
self.marker.setAngle(angle)
self.marker.setYaw(yaw)
@pyqtSlot(str, str)
def getInfo(self, layerid, featureid):
featureid = int(featureid)
activelayer = self.iface.activeLayer()
if not activelayer:
return
activetool = self.viewer.active_tool()
if not activetool in ["Info", "Select"]:
return
# Only show information for the active layer
if not layerid == activelayer.id():
return
layer = layer_by_id(layerid)
if activetool == "Select":
layer.setSelectedFeatures([featureid])
elif activetool == "Info":
rq = QgsFeatureRequest(featureid)
feature = layer.getFeatures(rq).next()
dlg = get_feature_form(layer, feature)
if dlg.dialog().exec_():
self.canvas.refresh()
@pyqtSlot(str, str, float, float, bool)
def featureMoved(self, layerid, featureid, lat, lng, end):
layer = layer_by_id(layerid)
transform = self.coordinatetransform(layer)
point = transform.transform(lng, lat)
if not end:
self.movingband.show()
self.movingband.setToGeometry(QgsGeometry.fromPoint(point), layer)
self.movingband.updatePosition()
self.movingband.update()
else:
self.movingband.hide()
feature = feature_by_id(layer, featureid)
startpoint = feature.geometry().asPoint()
dx = point.x() - startpoint.x()
dy = point.y() - startpoint.y()
layer.beginEditCommand("Feature Moved")
# Block signals for this move as the geometry changed signal will re add the geometry on use.
layer.blockSignals(True)
layer.translateFeature(feature.id(), dx, dy)
layer.blockSignals(False)
self.canvas.refresh()
layer.endEditCommand()
@pyqtSlot(str, str)
def onError(self, message, stacktrace=None):
self.iface.messageBar().pushMessage("Earthmine", message, QgsMessageBar.WARNING)
QgsMessageLog.logMessage(stacktrace, "Earthmine")
@pyqtSlot(float, float, float)
def addPoint(self, lat, lng, z):
layer = self.viewer.active_layer
if not layer.isEditable():
self.iface.messageBar().pushMessage("Earthmine",
"Selected layer isn't editable. Please enable edit mode to add features",
duration=3, level=QgsMessageBar.WARNING)
return
transform = self.coordinatetransform(layer)
point = transform.transform(lng, lat)
geom = QgsGeometry.fromPoint(point)
self.add_feature(layer, geom, z)
def add_feature(self, layer, geom, z=None):
feature = QgsFeature(layer.pendingFields())
if z and self.viewer.copyZvalue:
try:
feature['Z'] = z
except KeyError:
QgsMessageLog.log("No Z found on layer {}".format(layer.name()))
pass
feature.setGeometry(geom)
dlg = get_feature_form(layer, feature, isadd=True)
if dlg.dialog().exec_():
self.canvas.refresh()
@pyqtSlot(str, bool, str)
def drawLine(self, points, end, stats):
points = json.loads(points)
stats = json.loads(stats)
QgsMessageLog.logMessage(str(stats), "Earthmine")
self.tempband.reset(QGis.Line)
self.tempbandpoints.reset(QGis.Point)
color = QColor(self.viewer.current_action_color)
self.tempband.setColor(color)
self.tempbandpoints.setColor(color)
layer = self.viewer.active_layer
transform = self.coordinatetransform(layer)
earthminepoints = []
for point in points:
newpoint = transform.transform(point['lng'], point['lat'])
self.tempband.addPoint(newpoint)
self.tempbandpoints.addPoint(newpoint)
empoint = EarthminePoint(newpoint, point)
earthminepoints.append(empoint)
if end and not self.viewer.mode == "Vertical":
geom = self.tempband.asGeometry()
self.add_feature(layer, geom)
self.clear_bands()
self.viewer.geom = EarthmineLine(earthminepoints, stats)
self.tempband.show()
self.tempbandpoints.show()
@pyqtSlot(str, str, str, float)
def locationChanged(self, lat, lng, yaw, angle):
transform = self.coordinatetransform()
point = transform.transform(float(lng), float(lat))
self.marker.setCenter(point)
yaw = float(yaw)
self.marker.setAngle(angle)
self.marker.setYaw(yaw)
self.marker.setTracking(self.viewer.tracking)
if self.marker.tracking:
rect = QgsRectangle(point, point)
extentlimt = QgsRectangle(self.canvas.extent())
extentlimt.scale(0.95)
if not extentlimt.contains(point):
self.canvas.setExtent(rect)
self.canvas.refresh()
# Clear old features
self.viewer.clear_features()
self.load_layer_features(point)
def update_earthmine_features(self, viewfeatures):
self.viewer.clear_features()
if viewfeatures:
self.load_layer_features()
def load_layer_features(self, point=None, layers=None):
# TODO Move this logic into the viewer and let it track it's position
if point is None and self.marker.map_pos is None:
return
if point is None:
point = self.marker.map_pos
area, units = self.distancearea()
rect = search_area(units, area, point)
if layers is None:
layers = self.visible_layers()
for layer in layers:
transform = self.coordinatetransform(layer)
# Transform the rect
source = self.canvas.mapRenderer().destinationCrs()
dest = layer.crs()
recttransform = QgsCoordinateTransform(source, dest)
rect = recttransform.transformBoundingBox(rect)
features = list(get_features_in_area(layer, rect, transform, self.canvas.mapSettings()))
geomtype = layer.geometryType()
layerdata = dict(id=layer.id(),
geomtype=QGis.vectorGeometryType(geomtype))
self.viewer.load_features(layerdata, features)
# noinspection PyMethodMayBeStatic
def tr(self, message):
"""Get the translation for a string using Qt translation API.
We implement this ourselves since we do not inherit QObject.
:param message: String for translation.
:type message: str, QString
:returns: Translated version of message.
:rtype: QString
"""
# noinspection PyTypeChecker,PyArgumentList,PyCallByClass
return QCoreApplication.translate('EarthMineQGIS', message)
def add_action(
self,
icon_path,
text,
callback,
enabled_flag=True,
add_to_menu=True,
add_to_toolbar=True,
status_tip=None,
whats_this=None,
parent=None):
"""Add a toolbar icon to the InaSAFE toolbar.
:param icon_path: Path to the icon for this action. Can be a resource
path (e.g. ':/plugins/foo/bar.png') or a normal file system path.
:type icon_path: str
:param text: Text that should be shown in menu items for this action.
:type text: str
:param callback: Function to be called when the action is triggered.
:type callback: function
:param enabled_flag: A flag indicating if the action should be enabled
by default. Defaults to True.
:type enabled_flag: bool
:param add_to_menu: Flag indicating whether the action should also
be added to the menu. Defaults to True.
:type add_to_menu: bool
:param add_to_toolbar: Flag indicating whether the action should also
be added to the toolbar. Defaults to True.
:type add_to_toolbar: bool
:param status_tip: Optional text to s, "Earhtmine"how in a popup when mouse pointer
hovers over the action.
:type status_tip: str
:param parent: Parent widget for the new action. Defaults None.
:type parent: QWidget
:param whats_this: Optional text to show in the status bar when the
mouse pointer hovers over the action.
:returns: The action that was created. Note that the action is also
added to self.actions list.
:rtype: QAction
"""
icon = QIcon(icon_path)
action = QAction(icon, text, parent)
action.triggered.connect(callback)
action.setEnabled(enabled_flag)
if status_tip is not None:
action.setStatusTip(status_tip)
if whats_this is not None:
action.setWhatsThis(whats_this)
if add_to_toolbar:
self.toolbar.addAction(action)
if add_to_menu:
self.iface.addPluginToMenu(
self.menu,
action)
self.actions.append(action)
return action
def open_viewer(self):
"""Run method that performs all the real work"""
try:
settings = self.earthmine_settings()
except EarthmineSettingsError as ex:
self.onError(ex.message)
self.show_settings()
return
url = settings["viewerUrl"]
if not url.startswith("http"):
url = url.replace("\\\\", "\\")
url = QUrl.fromLocalFile(url)
else:
url = QUrl(url)
if not self.viewer.isVisible():
self.iface.addDockWidget(Qt.RightDockWidgetArea, self.viewer)
self.viewer.loadviewer(url)
def show_settings(self):
self.settingsdialog.show()
class PostionMarker(QgsMapCanvasItem):
"""
Position marker for the current location in the viewer.
"""
def __init__(self, canvas):
self._yaw = 0
self._angle = 0
self.size = 8
self.halfsize = self.size / 2.0
super(PostionMarker, self).__init__(canvas)
self.canvas = canvas
colorvalue = "#019633"
colour = QColor(colorvalue)
colour.setAlpha(50)
self.conebrush = QBrush(colour)
pencolour = QColor(colorvalue)
self.pointpen = QPen(pencolour, 1)
self.solidbrush = QBrush(pencolour)
self.map_pos = QgsPoint()
self.tracking = False
def setAngle(self, angle):
self._angle = angle
self.update()
def setSize(self, size):
self.size = size
self.halfsize = self.size / 2.0
self.update()
def setYaw(self, yaw):
self._yaw = yaw
self.update()
def paint(self, painter, xxx, xxx2):
if not self.tracking:
return
halfanlge = self._angle / 2
painter.save()
painter.setRenderHint(QPainter.Antialiasing)
painter.setBrush(self.solidbrush)
painter.setPen(self.pointpen)
painter.rotate(-90 + self._yaw)
painter.drawEllipse(QPointF(0, 0), self.size, self.size)
painter.setBrush(self.conebrush)
painter.drawPie(self.boundingRect(), halfanlge * 16, -self._angle * 16)
# painter.drawRect(self.boundingRect())
painter.restore()
def distancearea(self):
area = QgsDistanceArea()
dest = self.canvas.mapRenderer().destinationCrs()
area.setSourceCrs(dest)
return area, dest.mapUnits()
def boundingRect(self):
distance = 15
area, units = self.distancearea()
distance = area.convertMeasurement(distance, QGis.Meters, units, False)
s = self.toCanvasCoordinates(QgsPoint(0, 0))
e = self.toCanvasCoordinates(QgsPoint(0, distance[0]))
length = s.y() - e.y()
half = length / 2
bounding = QRectF(-half * 2.0, -half * 2.0, 2.0 * length, 2.0 * length)
return bounding
def setCenter(self, map_pos):
self.map_pos = map_pos
self.setPos(self.toCanvasCoordinates(map_pos))
def updatePosition(self):
self.setCenter(self.map_pos)
self.setVisible(self.tracking)
def setTracking(self, tracking):
self.tracking = tracking
self.setVisible(tracking)<|fim▁end|> |
def connect_layer_signals(self, layer):
if not layer.type() == QgsMapLayer.VectorLayer: |
<|file_name|>log.js<|end_file_name|><|fim▁begin|>'use strict';
var util = require('util');
var events = require('events');
var _ = require('lodash');
var table = require('text-table');
var chalk = require('chalk');
// padding step
var step = ' ';
var padding = ' ';
// color -> status mappings
var colors = {
skip: 'yellow',
force: 'yellow',
create: 'green',
invoke: 'bold',
conflict: 'red',
identical: 'cyan',
info: 'gray'
};
function pad(status) {
var max = 'identical'.length;
var delta = max - status.length;
return delta ? new Array(delta + 1).join(' ') + status : status;
}
// borrowed from https://github.com/mikeal/logref/blob/master/main.js#L6-15
function formatter(msg, ctx) {
while (msg.indexOf('%') !== -1) {
var start = msg.indexOf('%');
var end = msg.indexOf(' ', start);
if (end === -1) {
end = msg.length;
}
msg = msg.slice(0, start) + ctx[msg.slice(start + 1, end)] + msg.slice(end);
}
return msg;
}
module.exports = function logger() {
// `this.log` is a [logref](https://github.com/mikeal/logref)
// compatible logger, with an enhanced API.
//
// It also has EventEmitter like capabilities, so you can call on / emit
// on it, namely used to increase or decrease the padding.
//
// All logs are done against STDERR, letting you stdout for meaningfull
// value and redirection, should you need to generate output this way.
//
// Log functions take two arguments, a message and a context. For any
// other kind of paramters, `console.error` is used, so all of the
// console format string goodies you're used to work fine.
//
// - msg - The message to show up
// - context - The optional context to escape the message against<|fim▁hole|> msg = msg || '';
if (!ctx) {
ctx = {};
}
if (typeof ctx === 'object' && !Array.isArray(ctx)) {
console.error(formatter(msg, ctx));
} else {
console.error.apply(console, arguments);
}
return log;
}
_.extend(log, events.EventEmitter.prototype);
// A simple write method, with formatted message. If `msg` is
// ommitted, then a single `\n` is written.
//
// Returns the logger
log.write = function (msg) {
if (!msg) {
return this.write('\n');
}
process.stderr.write(util.format.apply(util, arguments));
return this;
};
// Same as `log.write()` but automatically appends a `\n` at the end
// of the message.
log.writeln = function () {
return this.write.apply(this, arguments).write();
};
// Convenience helper to write sucess status, this simply prepends the
// message with a gren `✔`.
log.ok = function () {
this.write(chalk.green('✔ ') + util.format.apply(util, arguments) + '\n');
return this;
};
log.error = function () {
this.write(chalk.red('✗ ') + util.format.apply(util, arguments) + '\n');
return this;
};
log.on('up', function () {
padding = padding + step;
});
log.on('down', function () {
padding = padding.replace(step, '');
});
Object.keys(colors).forEach(function (status) {
// Each predefined status has its logging method utility, handling
// status color and padding before the usual `.write()`
//
// Example
//
// this.log
// .write()
// .info('Doing something')
// .force('Forcing filepath %s, 'some path')
// .conflict('on %s' 'model.js')
// .write()
// .ok('This is ok');
//
// The list of status and mapping colors
//
// skip yellow
// force yellow
// create green
// invoke bold
// conflict red
// identical cyan
// info grey
//
// Returns the logger
log[status] = function () {
var color = colors[status];
this.write(chalk[color](pad(status))).write(padding);
this.write(util.format.apply(util, arguments) + '\n');
return this;
};
});
// A basic wrapper around `cli-table` package, resetting any single
// char to empty strings, this is used for aligning options and
// arguments without too much Math on our side.
//
// - opts - A list of rows or an Hash of options to pass through cli
// table.
//
// Returns the table reprensetation
log.table = function (opts) {
var tableData = [];
opts = Array.isArray(opts) ? { rows: opts }: opts;
opts.rows = opts.rows || [];
opts.rows.forEach(function (row) {
tableData.push(row);
});
return table(tableData);
};
return log;
};<|fim▁end|> | //
// Retunrns the logger
function log(msg, ctx) { |
<|file_name|>resource_app_engine_application_url_dispatch_rules.go<|end_file_name|><|fim▁begin|>// ----------------------------------------------------------------------------
//
// *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
//
// ----------------------------------------------------------------------------
//
// This file is automatically generated by Magic Modules and manual
// changes will be clobbered when the file is regenerated.
//
// Please read more about how to change this file in
// .github/CONTRIBUTING.md.
//
// ----------------------------------------------------------------------------
package google
import (
"fmt"
"log"
"reflect"
"time"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceAppEngineApplicationUrlDispatchRules() *schema.Resource {
return &schema.Resource{
Create: resourceAppEngineApplicationUrlDispatchRulesCreate,
Read: resourceAppEngineApplicationUrlDispatchRulesRead,
Update: resourceAppEngineApplicationUrlDispatchRulesUpdate,
Delete: resourceAppEngineApplicationUrlDispatchRulesDelete,
Importer: &schema.ResourceImporter{
State: resourceAppEngineApplicationUrlDispatchRulesImport,
},
Timeouts: &schema.ResourceTimeout{
Create: schema.DefaultTimeout(4 * time.Minute),
Update: schema.DefaultTimeout(4 * time.Minute),
Delete: schema.DefaultTimeout(4 * time.Minute),
},
Schema: map[string]*schema.Schema{
"dispatch_rules": {
Type: schema.TypeList,
Required: true,
Description: `Rules to match an HTTP request and dispatch that request to a service.`,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"path": {
Type: schema.TypeString,
Required: true,
Description: `Pathname within the host. Must start with a "/". A single "*" can be included at the end of the path.
The sum of the lengths of the domain and path may not exceed 100 characters.`,
},
"service": {
Type: schema.TypeString,
Required: true,
Description: `Pathname within the host. Must start with a "/". A single "*" can be included at the end of the path.
The sum of the lengths of the domain and path may not exceed 100 characters.`,
},
"domain": {
Type: schema.TypeString,
Optional: true,
Description: `Domain name to match against. The wildcard "*" is supported if specified before a period: "*.".
Defaults to matching all domains: "*".`,
Default: "*",
},
},
},
},
"project": {
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
},
},
}
}
func resourceAppEngineApplicationUrlDispatchRulesCreate(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
obj := make(map[string]interface{})
dispatchRulesProp, err := expandAppEngineApplicationUrlDispatchRulesDispatchRules(d.Get("dispatch_rules"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("dispatch_rules"); !isEmptyValue(reflect.ValueOf(dispatchRulesProp)) && (ok || !reflect.DeepEqual(v, dispatchRulesProp)) {
obj["dispatchRules"] = dispatchRulesProp
}
lockName, err := replaceVars(d, config, "apps/{{project}}")
if err != nil {
return err
}
mutexKV.Lock(lockName)
defer mutexKV.Unlock(lockName)
url, err := replaceVars(d, config, "{{AppEngineBasePath}}apps/{{project}}?updateMask=dispatch_rules")
if err != nil {
return err
}
log.Printf("[DEBUG] Creating new ApplicationUrlDispatchRules: %#v", obj)
project, err := getProject(d, config)
if err != nil {
return err
}
res, err := sendRequestWithTimeout(config, "PATCH", project, url, obj, d.Timeout(schema.TimeoutCreate), isAppEngineRetryableError)
if err != nil {
return fmt.Errorf("Error creating ApplicationUrlDispatchRules: %s", err)
}
// Store the ID now
id, err := replaceVars(d, config, "{{project}}")
if err != nil {
return fmt.Errorf("Error constructing id: %s", err)
}
d.SetId(id)
err = appEngineOperationWaitTime(
config, res, project, "Creating ApplicationUrlDispatchRules",
d.Timeout(schema.TimeoutCreate))
if err != nil {
// The resource didn't actually create
d.SetId("")
return fmt.Errorf("Error waiting to create ApplicationUrlDispatchRules: %s", err)
}
log.Printf("[DEBUG] Finished creating ApplicationUrlDispatchRules %q: %#v", d.Id(), res)
return resourceAppEngineApplicationUrlDispatchRulesRead(d, meta)
}
func resourceAppEngineApplicationUrlDispatchRulesRead(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
url, err := replaceVars(d, config, "{{AppEngineBasePath}}apps/{{project}}/{{name}}")
if err != nil {
return err
}
project, err := getProject(d, config)
if err != nil {
return err
}
res, err := sendRequest(config, "GET", project, url, nil, isAppEngineRetryableError)
if err != nil {<|fim▁hole|>
if err := d.Set("project", project); err != nil {
return fmt.Errorf("Error reading ApplicationUrlDispatchRules: %s", err)
}
if err := d.Set("dispatch_rules", flattenAppEngineApplicationUrlDispatchRulesDispatchRules(res["dispatchRules"], d, config)); err != nil {
return fmt.Errorf("Error reading ApplicationUrlDispatchRules: %s", err)
}
return nil
}
func resourceAppEngineApplicationUrlDispatchRulesUpdate(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
project, err := getProject(d, config)
if err != nil {
return err
}
obj := make(map[string]interface{})
dispatchRulesProp, err := expandAppEngineApplicationUrlDispatchRulesDispatchRules(d.Get("dispatch_rules"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("dispatch_rules"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, dispatchRulesProp)) {
obj["dispatchRules"] = dispatchRulesProp
}
lockName, err := replaceVars(d, config, "apps/{{project}}")
if err != nil {
return err
}
mutexKV.Lock(lockName)
defer mutexKV.Unlock(lockName)
url, err := replaceVars(d, config, "{{AppEngineBasePath}}apps/{{project}}?updateMask=dispatch_rules")
if err != nil {
return err
}
log.Printf("[DEBUG] Updating ApplicationUrlDispatchRules %q: %#v", d.Id(), obj)
res, err := sendRequestWithTimeout(config, "PATCH", project, url, obj, d.Timeout(schema.TimeoutUpdate), isAppEngineRetryableError)
if err != nil {
return fmt.Errorf("Error updating ApplicationUrlDispatchRules %q: %s", d.Id(), err)
} else {
log.Printf("[DEBUG] Finished updating ApplicationUrlDispatchRules %q: %#v", d.Id(), res)
}
err = appEngineOperationWaitTime(
config, res, project, "Updating ApplicationUrlDispatchRules",
d.Timeout(schema.TimeoutUpdate))
if err != nil {
return err
}
return resourceAppEngineApplicationUrlDispatchRulesRead(d, meta)
}
func resourceAppEngineApplicationUrlDispatchRulesDelete(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)
project, err := getProject(d, config)
if err != nil {
return err
}
lockName, err := replaceVars(d, config, "apps/{{project}}")
if err != nil {
return err
}
mutexKV.Lock(lockName)
defer mutexKV.Unlock(lockName)
url, err := replaceVars(d, config, "{{AppEngineBasePath}}apps/{{project}}?updateMask=dispatch_rules")
if err != nil {
return err
}
var obj map[string]interface{}
log.Printf("[DEBUG] Deleting ApplicationUrlDispatchRules %q", d.Id())
res, err := sendRequestWithTimeout(config, "PATCH", project, url, obj, d.Timeout(schema.TimeoutDelete), isAppEngineRetryableError)
if err != nil {
return handleNotFoundError(err, d, "ApplicationUrlDispatchRules")
}
err = appEngineOperationWaitTime(
config, res, project, "Deleting ApplicationUrlDispatchRules",
d.Timeout(schema.TimeoutDelete))
if err != nil {
return err
}
log.Printf("[DEBUG] Finished deleting ApplicationUrlDispatchRules %q: %#v", d.Id(), res)
return nil
}
func resourceAppEngineApplicationUrlDispatchRulesImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
config := meta.(*Config)
if err := parseImportId([]string{
"(?P<project>[^/]+)",
}, d, config); err != nil {
return nil, err
}
// Replace import id for the resource id
id, err := replaceVars(d, config, "{{project}}")
if err != nil {
return nil, fmt.Errorf("Error constructing id: %s", err)
}
d.SetId(id)
return []*schema.ResourceData{d}, nil
}
func flattenAppEngineApplicationUrlDispatchRulesDispatchRules(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return v
}
l := v.([]interface{})
transformed := make([]interface{}, 0, len(l))
for _, raw := range l {
original := raw.(map[string]interface{})
if len(original) < 1 {
// Do not include empty json objects coming back from the api
continue
}
transformed = append(transformed, map[string]interface{}{
"domain": flattenAppEngineApplicationUrlDispatchRulesDispatchRulesDomain(original["domain"], d, config),
"path": flattenAppEngineApplicationUrlDispatchRulesDispatchRulesPath(original["path"], d, config),
"service": flattenAppEngineApplicationUrlDispatchRulesDispatchRulesService(original["service"], d, config),
})
}
return transformed
}
func flattenAppEngineApplicationUrlDispatchRulesDispatchRulesDomain(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenAppEngineApplicationUrlDispatchRulesDispatchRulesPath(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func flattenAppEngineApplicationUrlDispatchRulesDispatchRulesService(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
func expandAppEngineApplicationUrlDispatchRulesDispatchRules(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
req := make([]interface{}, 0, len(l))
for _, raw := range l {
if raw == nil {
continue
}
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})
transformedDomain, err := expandAppEngineApplicationUrlDispatchRulesDispatchRulesDomain(original["domain"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedDomain); val.IsValid() && !isEmptyValue(val) {
transformed["domain"] = transformedDomain
}
transformedPath, err := expandAppEngineApplicationUrlDispatchRulesDispatchRulesPath(original["path"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedPath); val.IsValid() && !isEmptyValue(val) {
transformed["path"] = transformedPath
}
transformedService, err := expandAppEngineApplicationUrlDispatchRulesDispatchRulesService(original["service"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedService); val.IsValid() && !isEmptyValue(val) {
transformed["service"] = transformedService
}
req = append(req, transformed)
}
return req, nil
}
func expandAppEngineApplicationUrlDispatchRulesDispatchRulesDomain(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandAppEngineApplicationUrlDispatchRulesDispatchRulesPath(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
func expandAppEngineApplicationUrlDispatchRulesDispatchRulesService(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}<|fim▁end|> | return handleNotFoundError(err, d, fmt.Sprintf("AppEngineApplicationUrlDispatchRules %q", d.Id()))
} |
<|file_name|>reader.py<|end_file_name|><|fim▁begin|># Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import collections
import io
import json
import time
try:
import fastavro
except ImportError: # pragma: NO COVER
fastavro = None
import google.api_core.exceptions
import google.rpc.error_details_pb2
try:
import pandas
except ImportError: # pragma: NO COVER
pandas = None
try:
import pyarrow
except ImportError: # pragma: NO COVER
pyarrow = None
try:
import pyarrow
except ImportError: # pragma: NO COVER
pyarrow = None
_STREAM_RESUMPTION_EXCEPTIONS = (
google.api_core.exceptions.ServiceUnavailable,
# Caused by transport-level error. No status code was received.
# https://github.com/googleapis/python-bigquery-storage/issues/262
google.api_core.exceptions.Unknown,
)
# The Google API endpoint can unexpectedly close long-running HTTP/2 streams.
# Unfortunately, this condition is surfaced to the caller as an internal error
# by gRPC. We don't want to resume on all internal errors, so instead we look
# for error message that we know are caused by problems that are safe to
# reconnect.
_STREAM_RESUMPTION_INTERNAL_ERROR_MESSAGES = (
# See: https://github.com/googleapis/google-cloud-python/pull/9994
"RST_STREAM",
)
_FASTAVRO_REQUIRED = (
"fastavro is required to parse ReadRowResponse messages with Avro bytes."
)
_PANDAS_REQUIRED = "pandas is required to create a DataFrame"
_PYARROW_REQUIRED = (
"pyarrow is required to parse ReadRowResponse messages with Arrow bytes."
)
class ReadRowsStream(object):
"""A stream of results from a read rows request.
This stream is an iterable of
:class:`~google.cloud.bigquery_storage_v1.types.ReadRowsResponse`.
Iterate over it to fetch all row messages.
If the fastavro library is installed, use the
:func:`~google.cloud.bigquery_storage_v1.reader.ReadRowsStream.rows()`
method to parse all messages into a stream of row dictionaries.
If the pandas and fastavro libraries are installed, use the
:func:`~google.cloud.bigquery_storage_v1.reader.ReadRowsStream.to_dataframe()`
method to parse all messages into a :class:`pandas.DataFrame`.
This object should not be created directly, but is returned by
other methods in this library.
"""
def __init__(
self, client, name, offset, read_rows_kwargs, retry_delay_callback=None
):
"""Construct a ReadRowsStream.
Args:
client ( \
~google.cloud.bigquery_storage_v1.services. \
big_query_read.BigQueryReadClient \
):
A GAPIC client used to reconnect to a ReadRows stream. This
must be the GAPIC client to avoid a circular dependency on
this class.
name (str):
Required. Stream ID from which rows are being read.
offset (int):
Required. Position in the stream to start
reading from. The offset requested must be less than the last
row read from ReadRows. Requesting a larger offset is
undefined.
read_rows_kwargs (dict):
Keyword arguments to use when reconnecting to a ReadRows
stream.
retry_delay_callback (Optional[Callable[[float], None]]):
If the client receives a retryable error that asks the client to
delay its next attempt and retry_delay_callback is not None,
ReadRowsStream will call retry_delay_callback with the delay
duration (in seconds) before it starts sleeping until the next
attempt.
Returns:
Iterable[ \
~google.cloud.bigquery_storage.types.ReadRowsResponse \
]:
A sequence of row messages.
"""
# Make a copy of the read position so that we can update it without
# mutating the original input.
self._client = client
self._name = name
self._offset = offset
self._read_rows_kwargs = read_rows_kwargs
self._retry_delay_callback = retry_delay_callback
self._wrapped = None
def __iter__(self):
"""An iterable of messages.
Returns:
Iterable[ \
~google.cloud.bigquery_storage_v1.types.ReadRowsResponse \
]:
A sequence of row messages.
"""
# Infinite loop to reconnect on reconnectable errors while processing
# the row stream.
if self._wrapped is None:
self._reconnect()
while True:
try:
for message in self._wrapped:
rowcount = message.row_count
self._offset += rowcount
yield message
return # Made it through the whole stream.
except google.api_core.exceptions.InternalServerError as exc:
resumable_error = any(
resumable_message in exc.message
for resumable_message in _STREAM_RESUMPTION_INTERNAL_ERROR_MESSAGES
)
if not resumable_error:
raise
except _STREAM_RESUMPTION_EXCEPTIONS:
# Transient error, so reconnect to the stream.
pass
except Exception as exc:
if not self._resource_exhausted_exception_is_retryable(exc):
raise
self._reconnect()
def _reconnect(self):
"""Reconnect to the ReadRows stream using the most recent offset."""
while True:
try:
self._wrapped = self._client.read_rows(
read_stream=self._name,
offset=self._offset,
**self._read_rows_kwargs
)
break
except Exception as exc:
if not self._resource_exhausted_exception_is_retryable(exc):
raise
def _resource_exhausted_exception_is_retryable(self, exc):
if isinstance(exc, google.api_core.exceptions.ResourceExhausted):
# ResourceExhausted errors are only retried if a valid
# RetryInfo is provided with the error.
#
# TODO: Remove hasattr logic when we require google-api-core >= 2.2.0.
# ResourceExhausted added details/_details in google-api-core 2.2.0.
details = None
if hasattr(exc, "details"):
details = exc.details
elif hasattr(exc, "_details"):
details = exc._details
if details is not None:
for detail in details:
if isinstance(detail, google.rpc.error_details_pb2.RetryInfo):
retry_delay = detail.retry_delay
if retry_delay is not None:
delay = max(
0,
float(retry_delay.seconds)
+ (float(retry_delay.nanos) / 1e9),
)
if self._retry_delay_callback:
self._retry_delay_callback(delay)
time.sleep(delay)
return True
return False
def rows(self, read_session=None):
"""Iterate over all rows in the stream.
This method requires the fastavro library in order to parse row
messages in avro format. For arrow format messages, the pyarrow
library is required.
.. warning::
DATETIME columns are not supported. They are currently parsed as
strings in the fastavro library.
Args:
read_session ( \
Optional[~google.cloud.bigquery_storage_v1.types.ReadSession] \
):
DEPRECATED.
This argument was used to specify the schema of the rows in the
stream, but now the first message in a read stream contains
this information.
Returns:
Iterable[Mapping]:
A sequence of rows, represented as dictionaries.
"""
return ReadRowsIterable(self, read_session=read_session)
<|fim▁hole|> This method requires the pyarrow library and a stream using the Arrow
format.
Args:
read_session ( \
~google.cloud.bigquery_storage_v1.types.ReadSession \
):
DEPRECATED.
This argument was used to specify the schema of the rows in the
stream, but now the first message in a read stream contains
this information.
Returns:
pyarrow.Table:
A table of all rows in the stream.
"""
return self.rows(read_session=read_session).to_arrow()
def to_dataframe(self, read_session=None, dtypes=None):
"""Create a :class:`pandas.DataFrame` of all rows in the stream.
This method requires the pandas libary to create a data frame and the
fastavro library to parse row messages.
.. warning::
DATETIME columns are not supported. They are currently parsed as
strings.
Args:
read_session ( \
~google.cloud.bigquery_storage_v1.types.ReadSession \
):
DEPRECATED.
This argument was used to specify the schema of the rows in the
stream, but now the first message in a read stream contains
this information.
dtypes ( \
Map[str, Union[str, pandas.Series.dtype]] \
):
Optional. A dictionary of column names pandas ``dtype``s. The
provided ``dtype`` is used when constructing the series for
the column specified. Otherwise, the default pandas behavior
is used.
Returns:
pandas.DataFrame:
A data frame of all rows in the stream.
"""
if pandas is None:
raise ImportError(_PANDAS_REQUIRED)
return self.rows(read_session=read_session).to_dataframe(dtypes=dtypes)
class ReadRowsIterable(object):
"""An iterable of rows from a read session.
Args:
reader (google.cloud.bigquery_storage_v1.reader.ReadRowsStream):
A read rows stream.
read_session ( \
Optional[~google.cloud.bigquery_storage_v1.types.ReadSession] \
):
DEPRECATED.
This argument was used to specify the schema of the rows in the
stream, but now the first message in a read stream contains
this information.
"""
# This class is modelled after the google.cloud.bigquery.table.RowIterator
# and aims to be API compatible where possible.
def __init__(self, reader, read_session=None):
self._reader = reader
if read_session is not None:
self._stream_parser = _StreamParser.from_read_session(read_session)
else:
self._stream_parser = None
@property
def pages(self):
"""A generator of all pages in the stream.
Returns:
types.GeneratorType[google.cloud.bigquery_storage_v1.ReadRowsPage]:
A generator of pages.
"""
# Each page is an iterator of rows. But also has num_items, remaining,
# and to_dataframe.
for message in self._reader:
# Only the first message contains the schema, which is needed to
# decode the messages.
if not self._stream_parser:
self._stream_parser = _StreamParser.from_read_rows_response(message)
yield ReadRowsPage(self._stream_parser, message)
def __iter__(self):
"""Iterator for each row in all pages."""
for page in self.pages:
for row in page:
yield row
def to_arrow(self):
"""Create a :class:`pyarrow.Table` of all rows in the stream.
This method requires the pyarrow library and a stream using the Arrow
format.
Returns:
pyarrow.Table:
A table of all rows in the stream.
"""
record_batches = []
for page in self.pages:
record_batches.append(page.to_arrow())
if record_batches:
return pyarrow.Table.from_batches(record_batches)
# No data, return an empty Table.
self._stream_parser._parse_arrow_schema()
return pyarrow.Table.from_batches([], schema=self._stream_parser._schema)
def to_dataframe(self, dtypes=None):
"""Create a :class:`pandas.DataFrame` of all rows in the stream.
This method requires the pandas libary to create a data frame and the
fastavro library to parse row messages.
.. warning::
DATETIME columns are not supported. They are currently parsed as
strings in the fastavro library.
Args:
dtypes ( \
Map[str, Union[str, pandas.Series.dtype]] \
):
Optional. A dictionary of column names pandas ``dtype``s. The
provided ``dtype`` is used when constructing the series for
the column specified. Otherwise, the default pandas behavior
is used.
Returns:
pandas.DataFrame:
A data frame of all rows in the stream.
"""
if pandas is None:
raise ImportError(_PANDAS_REQUIRED)
if dtypes is None:
dtypes = {}
# If it's an Arrow stream, calling to_arrow, then converting to a
# pandas dataframe is about 2x faster. This is because pandas.concat is
# rarely no-copy, whereas pyarrow.Table.from_batches + to_pandas is
# usually no-copy.
try:
record_batch = self.to_arrow()
except NotImplementedError:
pass
else:
df = record_batch.to_pandas()
for column in dtypes:
df[column] = pandas.Series(df[column], dtype=dtypes[column])
return df
frames = [page.to_dataframe(dtypes=dtypes) for page in self.pages]
if frames:
return pandas.concat(frames)
# No data, construct an empty dataframe with columns matching the schema.
# The result should be consistent with what an empty ARROW stream would produce.
self._stream_parser._parse_avro_schema()
schema = self._stream_parser._avro_schema_json
column_dtypes = self._dtypes_from_avro(schema["fields"])
column_dtypes.update(dtypes)
df = pandas.DataFrame(columns=column_dtypes.keys())
for column in df:
df[column] = pandas.Series([], dtype=column_dtypes[column])
return df
def _dtypes_from_avro(self, avro_fields):
"""Determine Pandas dtypes for columns in Avro schema.
Args:
avro_fields (Iterable[Mapping[str, Any]]):
Avro fields' metadata.
Returns:
colelctions.OrderedDict[str, str]:
Column names with their corresponding Pandas dtypes.
"""
result = collections.OrderedDict()
type_map = {"long": "int64", "double": "float64", "boolean": "bool"}
for field_info in avro_fields:
# If a type is an union of multiple types, pick the first type
# that is not "null".
if isinstance(field_info["type"], list):
type_info = next(item for item in field_info["type"] if item != "null")
if isinstance(type_info, str):
field_dtype = type_map.get(type_info, "object")
else:
logical_type = type_info.get("logicalType")
if logical_type == "timestamp-micros":
field_dtype = "datetime64[ns, UTC]"
else:
field_dtype = "object"
result[field_info["name"]] = field_dtype
return result
class ReadRowsPage(object):
"""An iterator of rows from a read session message.
Args:
stream_parser (google.cloud.bigquery_storage_v1.reader._StreamParser):
A helper for parsing messages into rows.
message (google.cloud.bigquery_storage_v1.types.ReadRowsResponse):
A message of data from a read rows stream.
"""
# This class is modeled after google.api_core.page_iterator.Page and aims
# to provide API compatibility where possible.
def __init__(self, stream_parser, message):
self._stream_parser = stream_parser
self._message = message
self._iter_rows = None
self._num_items = self._message.row_count
self._remaining = self._message.row_count
def _parse_rows(self):
"""Parse rows from the message only once."""
if self._iter_rows is not None:
return
rows = self._stream_parser.to_rows(self._message)
self._iter_rows = iter(rows)
@property
def num_items(self):
"""int: Total items in the page."""
return self._num_items
@property
def remaining(self):
"""int: Remaining items in the page."""
return self._remaining
def __iter__(self):
"""A ``ReadRowsPage`` is an iterator."""
return self
def next(self):
"""Get the next row in the page."""
self._parse_rows()
if self._remaining > 0:
self._remaining -= 1
return next(self._iter_rows)
# Alias needed for Python 2/3 support.
__next__ = next
def to_arrow(self):
"""Create an :class:`pyarrow.RecordBatch` of rows in the page.
Returns:
pyarrow.RecordBatch:
Rows from the message, as an Arrow record batch.
"""
return self._stream_parser.to_arrow(self._message)
def to_dataframe(self, dtypes=None):
"""Create a :class:`pandas.DataFrame` of rows in the page.
This method requires the pandas libary to create a data frame and the
fastavro library to parse row messages.
.. warning::
DATETIME columns are not supported. They are currently parsed as
strings in the fastavro library.
Args:
dtypes ( \
Map[str, Union[str, pandas.Series.dtype]] \
):
Optional. A dictionary of column names pandas ``dtype``s. The
provided ``dtype`` is used when constructing the series for
the column specified. Otherwise, the default pandas behavior
is used.
Returns:
pandas.DataFrame:
A data frame of all rows in the stream.
"""
if pandas is None:
raise ImportError(_PANDAS_REQUIRED)
return self._stream_parser.to_dataframe(self._message, dtypes=dtypes)
class _StreamParser(object):
def to_arrow(self, message):
raise NotImplementedError("Not implemented.")
def to_dataframe(self, message, dtypes=None):
raise NotImplementedError("Not implemented.")
def to_rows(self, message):
raise NotImplementedError("Not implemented.")
def _parse_avro_schema(self):
raise NotImplementedError("Not implemented.")
def _parse_arrow_schema(self):
raise NotImplementedError("Not implemented.")
@staticmethod
def from_read_session(read_session):
schema_type = read_session._pb.WhichOneof("schema")
if schema_type == "avro_schema":
return _AvroStreamParser(read_session)
elif schema_type == "arrow_schema":
return _ArrowStreamParser(read_session)
else:
raise TypeError(
"Unsupported schema type in read_session: {0}".format(schema_type)
)
@staticmethod
def from_read_rows_response(message):
schema_type = message._pb.WhichOneof("schema")
if schema_type == "avro_schema":
return _AvroStreamParser(message)
elif schema_type == "arrow_schema":
return _ArrowStreamParser(message)
else:
raise TypeError(
"Unsupported schema type in message: {0}".format(schema_type)
)
class _AvroStreamParser(_StreamParser):
"""Helper to parse Avro messages into useful representations."""
def __init__(self, message):
"""Construct an _AvroStreamParser.
Args:
message (Union[
google.cloud.bigquery_storage_v1.types.ReadSession, \
google.cloud.bigquery_storage_v1.types.ReadRowsResponse, \
]):
Either the first message of data from a read rows stream or a
read session. Both types contain a oneof "schema" field, which
can be used to determine how to deserialize rows.
"""
if fastavro is None:
raise ImportError(_FASTAVRO_REQUIRED)
self._first_message = message
self._avro_schema_json = None
self._fastavro_schema = None
self._column_names = None
def to_arrow(self, message):
"""Create an :class:`pyarrow.RecordBatch` of rows in the page.
Args:
message (google.cloud.bigquery_storage_v1.types.ReadRowsResponse):
Protocol buffer from the read rows stream, to convert into an
Arrow record batch.
Returns:
pyarrow.RecordBatch:
Rows from the message, as an Arrow record batch.
"""
raise NotImplementedError("to_arrow not implemented for Avro streams.")
def to_dataframe(self, message, dtypes=None):
"""Create a :class:`pandas.DataFrame` of rows in the page.
This method requires the pandas libary to create a data frame and the
fastavro library to parse row messages.
.. warning::
DATETIME columns are not supported. They are currently parsed as
strings in the fastavro library.
Args:
message ( \
~google.cloud.bigquery_storage_v1.types.ReadRowsResponse \
):
A message containing Avro bytes to parse into a pandas DataFrame.
dtypes ( \
Map[str, Union[str, pandas.Series.dtype]] \
):
Optional. A dictionary of column names pandas ``dtype``s. The
provided ``dtype`` is used when constructing the series for
the column specified. Otherwise, the default pandas behavior
is used.
Returns:
pandas.DataFrame:
A data frame of all rows in the stream.
"""
self._parse_avro_schema()
if dtypes is None:
dtypes = {}
columns = collections.defaultdict(list)
for row in self.to_rows(message):
for column in row:
columns[column].append(row[column])
for column in dtypes:
columns[column] = pandas.Series(columns[column], dtype=dtypes[column])
return pandas.DataFrame(columns, columns=self._column_names)
def _parse_avro_schema(self):
"""Extract and parse Avro schema from a read session."""
if self._avro_schema_json:
return
self._avro_schema_json = json.loads(self._first_message.avro_schema.schema)
self._column_names = tuple(
(field["name"] for field in self._avro_schema_json["fields"])
)
self._first_message = None
def _parse_fastavro(self):
"""Convert parsed Avro schema to fastavro format."""
self._parse_avro_schema()
self._fastavro_schema = fastavro.parse_schema(self._avro_schema_json)
def to_rows(self, message):
"""Parse all rows in a stream message.
Args:
message ( \
~google.cloud.bigquery_storage_v1.types.ReadRowsResponse \
):
A message containing Avro bytes to parse into rows.
Returns:
Iterable[Mapping]:
A sequence of rows, represented as dictionaries.
"""
self._parse_fastavro()
messageio = io.BytesIO(message.avro_rows.serialized_binary_rows)
while True:
# Loop in a while loop because schemaless_reader can only read
# a single record.
try:
# TODO: Parse DATETIME into datetime.datetime (no timezone),
# instead of as a string.
yield fastavro.schemaless_reader(messageio, self._fastavro_schema)
except StopIteration:
break # Finished with message
class _ArrowStreamParser(_StreamParser):
def __init__(self, message):
"""Construct an _ArrowStreamParser.
Args:
message (Union[
google.cloud.bigquery_storage_v1.types.ReadSession, \
google.cloud.bigquery_storage_v1.types.ReadRowsResponse, \
]):
Either the first message of data from a read rows stream or a
read session. Both types contain a oneof "schema" field, which
can be used to determine how to deserialize rows.
"""
if pyarrow is None:
raise ImportError(_PYARROW_REQUIRED)
self._first_message = message
self._schema = None
def to_arrow(self, message):
return self._parse_arrow_message(message)
def to_rows(self, message):
record_batch = self._parse_arrow_message(message)
# Iterate through each column simultaneously, and make a dict from the
# row values
for row in zip(*record_batch.columns):
yield dict(zip(self._column_names, row))
def to_dataframe(self, message, dtypes=None):
record_batch = self._parse_arrow_message(message)
if dtypes is None:
dtypes = {}
df = record_batch.to_pandas()
for column in dtypes:
df[column] = pandas.Series(df[column], dtype=dtypes[column])
return df
def _parse_arrow_message(self, message):
self._parse_arrow_schema()
return pyarrow.ipc.read_record_batch(
pyarrow.py_buffer(message.arrow_record_batch.serialized_record_batch),
self._schema,
)
def _parse_arrow_schema(self):
if self._schema:
return
self._schema = pyarrow.ipc.read_schema(
pyarrow.py_buffer(self._first_message.arrow_schema.serialized_schema)
)
self._column_names = [field.name for field in self._schema]
self._first_message = None<|fim▁end|> | def to_arrow(self, read_session=None):
"""Create a :class:`pyarrow.Table` of all rows in the stream.
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/* eslint-env mocha */
import path from 'path';
import fs from 'fs';
import assert from 'assert';
import {transformFileSync} from 'babel-core';
function trim(str) {
return str.replace(/^\s+|\s+$/, '');
}
describe('Transpile ES7 async/await to vanilla ES6 Promise chains -', function () {<|fim▁hole|>
const fixturesDir = path.join(__dirname, 'fixtures');
fs.readdirSync(fixturesDir).forEach(caseName => {
const fixtureDir = path.join(fixturesDir, caseName);
const actualPath = path.join(fixtureDir, 'actual.js');
if (!fs.statSync(fixtureDir).isDirectory()) {
return;
}
it(caseName.split('-').join(' '), () => {
const actual = transformFileSync(actualPath).code;
const expected = fs.readFileSync(
path.join(fixtureDir, 'expected.js')
).toString();
assert.equal(trim(actual), trim(expected));
});
});
});<|fim▁end|> | // sometimes 2000 isn't enough when starting up in coverage mode.
this.timeout(5000); |
<|file_name|>convRolledInF4OutF8.rs<|end_file_name|><|fim▁begin|>#pragma version(1)
#pragma rs_fp_relaxed
#pragma rs java_package_name(layers)
rs_allocation In_Blob;
rs_allocation Kernel_Blob;
rs_allocation Bias_Blob;
rs_allocation Out_Alloc;
int c_i;
int h_i;
int w_i;
int n_k;
int c_k;
int h_k;
int w_k;
int h_o;
int w_o;
int pad_x;
int pad_y;
int stride_x;
int stride_y;
int group;
void root(float4* out, uint32_t x)
{
float4 sum1, sum2;
sum1.x = sum1.y = sum1.z = sum1.w = 0;
sum2.x = sum2.y = sum2.z = sum2.w = 0;
int kernel_num = x % (n_k / 8);
int g = (kernel_num * 8) / (n_k / group);
int channel_offset = g * c_k / 4;
int h_num = (x * 8) / (w_o * n_k);
int w_num = (x % (w_o * n_k / 8)) / (n_k / 8);
int c_k_new = c_k / 4;
for (int h = 0 ; h < h_k ; h++){
for (int w = 0 ; w < w_k ; w++){
for (int i = 0 ; i < c_k_new ; i++)
{
int cur_x = h_num * stride_x + h; //should take care of the strides(Be careful)
int cur_y = w_num * stride_y + w; //should take care of the strides(Be careful)
if (cur_x < pad_x || cur_x >= (pad_x + h_i))
continue;
else if (cur_y < pad_y || cur_y >= (pad_y + w_i))
continue;
else
{
int frame_index = (cur_x - pad_x) * w_i * c_i / 4 + (cur_y - pad_y) * c_i / 4 + (i + channel_offset);
float4 frame_value = rsGetElementAt_float4(In_Blob,frame_index);
float4 kernel_value1, kernel_value2;
int kernel_size = h_k * w_k * c_k_new;
int kernel_offset = 2 * kernel_size;
int kernel_index = kernel_num * 8 * kernel_size + h * w_k * c_k_new + w * c_k_new + i;
kernel_value1 = rsGetElementAt_float4(Kernel_Blob, kernel_index);
kernel_value2 = rsGetElementAt_float4(Kernel_Blob, kernel_index + kernel_size);
sum1.x += dot(frame_value, kernel_value1);
sum2.x += dot(frame_value, kernel_value2);
kernel_index += kernel_offset;
kernel_value1 = rsGetElementAt_float4(Kernel_Blob, kernel_index);
kernel_value2 = rsGetElementAt_float4(Kernel_Blob, kernel_index + kernel_size);
sum1.y += dot(frame_value, kernel_value1);
sum2.y += dot(frame_value, kernel_value2);
kernel_index += kernel_offset;
kernel_value1 = rsGetElementAt_float4(Kernel_Blob, kernel_index);
kernel_value2 = rsGetElementAt_float4(Kernel_Blob, kernel_index + kernel_size);
sum1.z += dot(frame_value, kernel_value1);
sum2.z += dot(frame_value, kernel_value2);
kernel_index += kernel_offset;
kernel_value1 = rsGetElementAt_float4(Kernel_Blob, kernel_index);
kernel_value2 = rsGetElementAt_float4(Kernel_Blob, kernel_index + kernel_size);
sum1.w += dot(frame_value, kernel_value1);
sum2.w += dot(frame_value, kernel_value2);
}
}
}
}
float4 bias1 = rsGetElementAt_float4(Bias_Blob,kernel_num * 2);
float4 bias2 = rsGetElementAt_float4(Bias_Blob,kernel_num * 2 + 1);
sum1.x += bias1.x;
sum2.x += bias1.y;
sum1.y += bias1.z;
sum2.y += bias1.w;
sum1.z += bias2.x;
sum2.z += bias2.y;
sum1.w += bias2.z;
<|fim▁hole|>
rsSetElementAt_float4(Out_Alloc, sum2, x);
(*out) = sum1;
}<|fim▁end|> | sum2.w += bias2.w;
|
<|file_name|>RTree.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the
* NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package com.ricemap.spateDB.core;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.Queue;
import java.util.Stack;
import java.util.Vector;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.IndexedSortable;
import org.apache.hadoop.util.IndexedSorter;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.PriorityQueue;
import org.apache.hadoop.util.QuickSort;
import com.ricemap.spateDB.io.MemoryInputStream;
import com.ricemap.spateDB.io.Text2;
import com.ricemap.spateDB.shape.Point3d;
import com.ricemap.spateDB.shape.Prism;
import com.ricemap.spateDB.shape.Shape;
/**
* An RTree loaded in bulk and never changed after that. It cannot by
* dynamically manipulated by either insertion or deletion. It only works with
* 2-dimensional objects (keys).
*
* @author tonyren, eldawy
*
*/
public class RTree<T extends Shape> implements Writable, Iterable<T> {
public static enum FIELD_TYPE{NULL, Integer, Long, Double};
/** Logger */
private static final Log LOG = LogFactory.getLog(RTree.class);
/** Size of tree header on disk. Height + Degree + Number of records + isColumnar*/
public static final int TreeHeaderSize = 4 + 4 + 4 + 4;
/** Size of a node. Offset of first child + dimensions (x, y, width, height) */
public static final int NodeSize = 4 + 8 * 6;
/** t, x ,y */
public static final int IndexUnitSize = 8 * 3;
/** An instance of T that can be used to deserialize objects from disk */
T stockObject;
public boolean columnar;
/** Height of the tree (number of levels) */
private int height;
/** Degree of internal nodes in the tree */
private int degree;
/** Total number of nodes in the tree */
private int nodeCount;
/** Number of leaf nodes */
private int leafNodeCount;
/** Number of non-leaf nodes */
private int nonLeafNodeCount;
/** Number of elements in the tree */
private int elementCount;
/** An input stream that is used to read node structure (i.e., nodes) */
private FSDataInputStream structure;
/** Input stream to tree data */
private FSDataInputStream data;
/** The start offset of the tree in the data stream */
private long treeStartOffset;
/**
* Total tree size (header + structure + data) used to read the data in the
* last leaf node correctly
*/
private int treeSize;
public RTree() {
}
/**
* Builds the RTree given a serialized list of elements. It uses the given
* stockObject to deserialize these elements and build the tree. Also writes
* the created tree to the disk directly.
*
* @param elements
* - serialization of elements to be written
* @param offset
* - index of the first element to use in the elements array
* @param len
* - number of bytes to user from the elements array
* @param bytesAvailable
* - size available (in bytes) to store the tree structures
* @param dataOut
* - an output to use for writing the tree to
* @param fast_sort
* - setting this to <code>true</code> allows the method to run
* faster by materializing the offset of each element in the list
* which speeds up the comparison. However, this requires an
* additional 16 bytes per element. So, for each 1M elements, the
* method will require an additional 16 M bytes (approximately).
*/
public void bulkLoadWrite(final byte[] element_bytes, final int offset,
final int len, final int degree, DataOutput dataOut,
final boolean fast_sort, final boolean columnarStorage) {
try {
columnar = columnarStorage;
//TODO: the order of fields should be stable under Oracle JVM, but not guaranteed
Field[] fields = stockObject.getClass().getDeclaredFields();
// Count number of elements in the given text
int i_start = offset;
final Text line = new Text();
while (i_start < offset + len) {
int i_end = skipToEOL(element_bytes, i_start);
// Extract the line without end of line character
line.set(element_bytes, i_start, i_end - i_start - 1);
stockObject.fromText(line);
elementCount++;
i_start = i_end;<|fim▁hole|> // whole
// tree is loaded to memory when processed. However, as current
// algorithms
// process the tree while it's on disk, a higher degree should be
// selected
// such that a node fits one file block (assumed to be 4K).
// final int degree = findBestDegree(bytesAvailable, elementCount);
LOG.info("Writing an RTree with degree " + degree);
int height = Math.max(1,
(int) Math.ceil(Math.log(elementCount) / Math.log(degree)));
int leafNodeCount = (int) Math.pow(degree, height - 1);
if (elementCount < 2 * leafNodeCount && height > 1) {
height--;
leafNodeCount = (int) Math.pow(degree, height - 1);
}
int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1));
int nonLeafNodeCount = nodeCount - leafNodeCount;
// Keep track of the offset of each element in the text
final int[] offsets = new int[elementCount];
final int[] ids = new int[elementCount];
final double[] ts = fast_sort ? new double[elementCount] : null;
final double[] xs = fast_sort ? new double[elementCount] : null;
final double[] ys = fast_sort ? new double[elementCount] : null;
//initialize columnar data output
ByteArrayOutputStream index_bos = new ByteArrayOutputStream();
DataOutputStream index_dos = new DataOutputStream(index_bos);
ByteArrayOutputStream[] bos = new ByteArrayOutputStream[fields.length];
DataOutputStream[] dos = new DataOutputStream[fields.length];
for (int i = 0; i < bos.length; i++){
bos[i] = new ByteArrayOutputStream();
dos[i] = new DataOutputStream(bos[i]);
}
i_start = offset;
line.clear();
for (int i = 0; i < elementCount; i++) {
offsets[i] = i_start;
ids[i] = i;
int i_end = skipToEOL(element_bytes, i_start);
if (xs != null) {
// Extract the line with end of line character
line.set(element_bytes, i_start, i_end - i_start - 1);
stockObject.fromText(line);
// Sample center of the shape
ts[i] = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2;
xs[i] = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2;
ys[i] = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2;
//build columnar storage
if (stockObject instanceof Point3d){
index_dos.writeDouble(ts[i]);
index_dos.writeDouble(xs[i]);
index_dos.writeDouble(ys[i]);
}
else{
throw new RuntimeException("Indexing non-point shape with RTREE is not supported yet");
}
for (int j = 0 ; j < fields.length; j++){
if (fields[j].getType().equals(Integer.TYPE)){
dos[j].writeInt(fields[j].getInt(stockObject));
}
else if (fields[j].getType().equals(Double.TYPE)){
dos[j].writeDouble(fields[j].getDouble(stockObject));
}
else if (fields[j].getType().equals(Long.TYPE)){
dos[j].writeLong(fields[j].getLong(stockObject));
}
else{
continue;
//throw new RuntimeException("Field type is not supported yet");
}
}
}
i_start = i_end;
}
index_dos.close();
for (int i = 0; i < dos.length; i++){
dos[i].close();
}
/** A struct to store information about a split */
class SplitStruct extends Prism {
/** Start and end index for this split */
int index1, index2;
/** Direction of this split */
byte direction;
/** Index of first element on disk */
int offsetOfFirstElement;
static final byte DIRECTION_T = 0;
static final byte DIRECTION_X = 1;
static final byte DIRECTION_Y = 2;
SplitStruct(int index1, int index2, byte direction) {
this.index1 = index1;
this.index2 = index2;
this.direction = direction;
}
@Override
public void write(DataOutput out) throws IOException {
//
if (columnarStorage)
out.writeInt(index1);
else
out.writeInt(offsetOfFirstElement);
super.write(out);
}
void partition(Queue<SplitStruct> toBePartitioned) {
IndexedSortable sortableT;
IndexedSortable sortableX;
IndexedSortable sortableY;
if (fast_sort) {
// Use materialized xs[] and ys[] to do the comparisons
sortableT = new IndexedSortable() {
@Override
public void swap(int i, int j) {
// Swap ts
double tempt = ts[i];
ts[i] = ts[j];
ts[j] = tempt;
// Swap xs
double tempx = xs[i];
xs[i] = xs[j];
xs[j] = tempx;
// Swap ys
double tempY = ys[i];
ys[i] = ys[j];
ys[j] = tempY;
// Swap id
int tempid = offsets[i];
offsets[i] = offsets[j];
offsets[j] = tempid;
tempid = ids[i];
ids[i] = ids[j];
ids[j] = tempid;
}
@Override
public int compare(int i, int j) {
if (ts[i] < ts[j])
return -1;
if (ts[i] > ts[j])
return 1;
return 0;
}
};
sortableX = new IndexedSortable() {
@Override
public void swap(int i, int j) {
// Swap ts
double tempt = ts[i];
ts[i] = ts[j];
ts[j] = tempt;
// Swap xs
double tempx = xs[i];
xs[i] = xs[j];
xs[j] = tempx;
// Swap ys
double tempY = ys[i];
ys[i] = ys[j];
ys[j] = tempY;
// Swap id
int tempid = offsets[i];
offsets[i] = offsets[j];
offsets[j] = tempid;
tempid = ids[i];
ids[i] = ids[j];
ids[j] = tempid;
}
@Override
public int compare(int i, int j) {
if (ts[i] < ts[j])
return -1;
if (xs[i] < xs[j])
return -1;
if (xs[i] > xs[j])
return 1;
return 0;
}
};
sortableY = new IndexedSortable() {
@Override
public void swap(int i, int j) {
// Swap ts
double tempt = ts[i];
ts[i] = ts[j];
ts[j] = tempt;
// Swap xs
double tempx = xs[i];
xs[i] = xs[j];
xs[j] = tempx;
// Swap ys
double tempY = ys[i];
ys[i] = ys[j];
ys[j] = tempY;
// Swap id
int tempid = offsets[i];
offsets[i] = offsets[j];
offsets[j] = tempid;
tempid = ids[i];
ids[i] = ids[j];
ids[j] = tempid;
}
@Override
public int compare(int i, int j) {
if (ys[i] < ys[j])
return -1;
if (ys[i] > ys[j])
return 1;
return 0;
}
};
} else {
// No materialized xs and ys. Always deserialize objects
// to compare
sortableT = new IndexedSortable() {
@Override
public void swap(int i, int j) {
// Swap id
int tempid = offsets[i];
offsets[i] = offsets[j];
offsets[j] = tempid;
tempid = ids[i];
ids[i] = ids[j];
ids[j] = tempid;
}
@Override
public int compare(int i, int j) {
// Get end of line
int eol = skipToEOL(element_bytes, offsets[i]);
line.set(element_bytes, offsets[i], eol
- offsets[i] - 1);
stockObject.fromText(line);
double ti = (stockObject.getMBR().t1 + stockObject
.getMBR().t2) / 2;
eol = skipToEOL(element_bytes, offsets[j]);
line.set(element_bytes, offsets[j], eol
- offsets[j] - 1);
stockObject.fromText(line);
double tj = (stockObject.getMBR().t1 + stockObject
.getMBR().t2) / 2;
if (ti < tj)
return -1;
if (ti > tj)
return 1;
return 0;
}
};
sortableX = new IndexedSortable() {
@Override
public void swap(int i, int j) {
// Swap id
int tempid = offsets[i];
offsets[i] = offsets[j];
offsets[j] = tempid;
tempid = ids[i];
ids[i] = ids[j];
ids[j] = tempid;
}
@Override
public int compare(int i, int j) {
// Get end of line
int eol = skipToEOL(element_bytes, offsets[i]);
line.set(element_bytes, offsets[i], eol
- offsets[i] - 1);
stockObject.fromText(line);
double xi = (stockObject.getMBR().x1 + stockObject
.getMBR().x2) / 2;
eol = skipToEOL(element_bytes, offsets[j]);
line.set(element_bytes, offsets[j], eol
- offsets[j] - 1);
stockObject.fromText(line);
double xj = (stockObject.getMBR().x1 + stockObject
.getMBR().x2) / 2;
if (xi < xj)
return -1;
if (xi > xj)
return 1;
return 0;
}
};
sortableY = new IndexedSortable() {
@Override
public void swap(int i, int j) {
// Swap id
int tempid = offsets[i];
offsets[i] = offsets[j];
offsets[j] = tempid;
tempid = ids[i];
ids[i] = ids[j];
ids[j] = tempid;
}
@Override
public int compare(int i, int j) {
int eol = skipToEOL(element_bytes, offsets[i]);
line.set(element_bytes, offsets[i], eol
- offsets[i] - 1);
stockObject.fromText(line);
double yi = (stockObject.getMBR().y1 + stockObject
.getMBR().y2) / 2;
eol = skipToEOL(element_bytes, offsets[j]);
line.set(element_bytes, offsets[j], eol
- offsets[j] - 1);
stockObject.fromText(line);
double yj = (stockObject.getMBR().y1 + stockObject
.getMBR().y2) / 2;
if (yi < yj)
return -1;
if (yi > yj)
return 1;
return 0;
}
};
}
final IndexedSorter sorter = new QuickSort();
final IndexedSortable[] sortables = new IndexedSortable[3];
sortables[SplitStruct.DIRECTION_T] = sortableT;
sortables[SplitStruct.DIRECTION_X] = sortableX;
sortables[SplitStruct.DIRECTION_Y] = sortableY;
sorter.sort(sortables[direction], index1, index2);
// Partition into maxEntries partitions (equally) and
// create a SplitStruct for each partition
int i1 = index1;
for (int iSplit = 0; iSplit < degree; iSplit++) {
int i2 = index1 + (index2 - index1) * (iSplit + 1)
/ degree;
SplitStruct newSplit;
if (direction == 0){
newSplit = new SplitStruct(i1, i2,
(byte) 1);
}
else if (direction == 1){
newSplit = new SplitStruct(i1, i2,
(byte) 2);
}
else{
newSplit = new SplitStruct(i1, i2,
(byte) 0);
}
toBePartitioned.add(newSplit);
i1 = i2;
}
}
}
// All nodes stored in level-order traversal
Vector<SplitStruct> nodes = new Vector<SplitStruct>();
final Queue<SplitStruct> toBePartitioned = new LinkedList<SplitStruct>();
toBePartitioned.add(new SplitStruct(0, elementCount,
SplitStruct.DIRECTION_X));
while (!toBePartitioned.isEmpty()) {
SplitStruct split = toBePartitioned.poll();
if (nodes.size() < nonLeafNodeCount) {
// This is a non-leaf
split.partition(toBePartitioned);
}
nodes.add(split);
}
if (nodes.size() != nodeCount) {
throw new RuntimeException("Expected node count: " + nodeCount
+ ". Real node count: " + nodes.size());
}
// Now we have our data sorted in the required order. Start building
// the tree.
// Store the offset of each leaf node in the tree
FSDataOutputStream fakeOut = new FSDataOutputStream(
new java.io.OutputStream() {
// Null output stream
@Override
public void write(int b) throws IOException {
// Do nothing
}
@Override
public void write(byte[] b, int off, int len)
throws IOException {
// Do nothing
}
@Override
public void write(byte[] b) throws IOException {
// Do nothing
}
}, null, TreeHeaderSize + nodes.size() * NodeSize);
for (int i_leaf = nonLeafNodeCount, i = 0; i_leaf < nodes.size(); i_leaf++) {
nodes.elementAt(i_leaf).offsetOfFirstElement = (int) fakeOut
.getPos();
if (i != nodes.elementAt(i_leaf).index1)
throw new RuntimeException();
double t1, x1, y1, t2, x2, y2;
// Initialize MBR to first object
int eol = skipToEOL(element_bytes, offsets[i]);
fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
stockObject.fromText(line);
Prism mbr = stockObject.getMBR();
t1 = mbr.t1;
x1 = mbr.x1;
y1 = mbr.y1;
t2 = mbr.t2;
x2 = mbr.x2;
y2 = mbr.y2;
i++;
while (i < nodes.elementAt(i_leaf).index2) {
eol = skipToEOL(element_bytes, offsets[i]);
fakeOut.write(element_bytes, offsets[i], eol - offsets[i]);
line.set(element_bytes, offsets[i], eol - offsets[i] - 1);
stockObject.fromText(line);
mbr = stockObject.getMBR();
if (mbr.t1 < t1)
t1 = mbr.t1;
if (mbr.x1 < x1)
x1 = mbr.x1;
if (mbr.y1 < y1)
y1 = mbr.y1;
if (mbr.t2 > t2)
t2 = mbr.t2;
if (mbr.x2 > x2)
x2 = mbr.x2;
if (mbr.y2 > y2)
y2 = mbr.y2;
i++;
}
nodes.elementAt(i_leaf).set(t1, x1, y1, t2, x2, y2);
}
fakeOut.close();
fakeOut = null;
// Calculate MBR and offsetOfFirstElement for non-leaves
for (int i_node = nonLeafNodeCount - 1; i_node >= 0; i_node--) {
int i_first_child = i_node * degree + 1;
nodes.elementAt(i_node).offsetOfFirstElement = nodes
.elementAt(i_first_child).offsetOfFirstElement;
int i_child = 0;
Prism mbr;
mbr = nodes.elementAt(i_first_child + i_child);
double t1 = mbr.t1;
double x1 = mbr.x1;
double y1 = mbr.y1;
double t2 = mbr.t2;
double x2 = mbr.x2;
double y2 = mbr.y2;
i_child++;
while (i_child < degree) {
mbr = nodes.elementAt(i_first_child + i_child);
if (mbr.t1 < t1)
t1 = mbr.t1;
if (mbr.x1 < x1)
x1 = mbr.x1;
if (mbr.y1 < y1)
y1 = mbr.y1;
if (mbr.t2 > t2)
t2 = mbr.t2;
if (mbr.x2 > x2)
x2 = mbr.x2;
if (mbr.y2 > y2)
y2 = mbr.y2;
i_child++;
}
nodes.elementAt(i_node).set(t1, x1, y1, t2, x2, y2);
}
// Start writing the tree
// write tree header (including size)
// Total tree size. (== Total bytes written - 8 bytes for the size
// itself)
dataOut.writeInt(TreeHeaderSize + NodeSize * nodeCount + len);
// Tree height
dataOut.writeInt(height);
// Degree
dataOut.writeInt(degree);
dataOut.writeInt(elementCount);
//isColumnar
dataOut.writeInt(columnarStorage ? 1 : 0);
// write nodes
for (SplitStruct node : nodes) {
node.write(dataOut);
}
// write elements
if (columnarStorage){
byte[] index_bs = index_bos.toByteArray();
byte[][] bss = new byte[bos.length][];
for (int i = 0; i < bss.length; i++){
bss[i] = bos[i].toByteArray();
}
for (int element_i = 0; element_i < elementCount; element_i++) {
//int eol = skipToEOL(element_bytes, offsets[element_i]);
//dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
dataOut.write(index_bs, ids[element_i]*IndexUnitSize, IndexUnitSize);
}
for (int i = 0; i < fields.length; i++){
int fieldSize = 0;
if (fields[i].getType().equals(Integer.TYPE)){
fieldSize = 4;
}
else if (fields[i].getType().equals(Long.TYPE)){
fieldSize = 8;
}
else if (fields[i].getType().equals(Double.TYPE)){
fieldSize = 8;
}
else{
//throw new RuntimeException("Unsupported field type: " + fields[i].getType().getName());
continue;
}
for (int element_i = 0; element_i < elementCount; element_i++) {
//int eol = skipToEOL(element_bytes, offsets[element_i]);
//dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
dataOut.write(bss[i], ids[element_i]*fieldSize, fieldSize);
}
}
}
else{
for (int element_i = 0; element_i < elementCount; element_i++) {
int eol = skipToEOL(element_bytes, offsets[element_i]);
dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]);
}
}
} catch (IOException e) {
e.printStackTrace();
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public void write(DataOutput out) throws IOException {
throw new RuntimeException("write is no longer supported. "
+ "Please use bulkLoadWrite to write the RTree.");
}
@Override
public void readFields(DataInput in) throws IOException {
// Tree size (Header + structure + data)
treeSize = in.readInt();
if (treeSize == 0) {
height = elementCount = 0;
return;
}
// Read only the tree structure in memory while actual records remain on
// disk and loaded when necessary
height = in.readInt();
if (height == 0)
return;
degree = in.readInt();
elementCount = in.readInt();
columnar = in.readInt()==1;
// Keep only tree structure in memory
nodeCount = (int) ((powInt(degree, height) - 1) / (degree - 1));
int structureSize = nodeCount * NodeSize;
byte[] treeStructure = new byte[structureSize];
in.readFully(treeStructure, 0, structureSize);
structure = new FSDataInputStream(new MemoryInputStream(treeStructure));
if (in instanceof FSDataInputStream) {
this.treeStartOffset = ((FSDataInputStream) in).getPos()
- structureSize - TreeHeaderSize;
this.data = (FSDataInputStream) in;
} else {
// Load all tree data in memory
this.treeStartOffset = 0 - structureSize - TreeHeaderSize;
int treeDataSize = treeSize - TreeHeaderSize - structureSize;
byte[] treeData = new byte[treeDataSize];
in.readFully(treeData, 0, treeDataSize);
this.data = new FSDataInputStream(new MemoryInputStream(treeData));
}
nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1));
leafNodeCount = (int) Math.pow(degree, height - 1);
nonLeafNodeCount = nodeCount - leafNodeCount;
}
/**
* Reads and skips the header of the tree returning the total number of
* bytes skipped from the stream. This is used as a preparatory function to
* read all elements in the tree without the index part.
*
* @param in
* @return - Total number of bytes read and skipped
* @throws IOException
*/
public static int skipHeader(InputStream in) throws IOException {
DataInput dataIn = in instanceof DataInput ? (DataInput) in
: new DataInputStream(in);
int skippedBytes = 0;
/* int treeSize = */dataIn.readInt();
skippedBytes += 4;
int height = dataIn.readInt();
skippedBytes += 4;
if (height == 0) {
// Empty tree. No results
return skippedBytes;
}
int degree = dataIn.readInt();
skippedBytes += 4;
int nodeCount = (int) ((powInt(degree, height) - 1) / (degree - 1));
/* int elementCount = */dataIn.readInt();
skippedBytes += 4;
// Skip all nodes
dataIn.skipBytes(nodeCount * NodeSize);
skippedBytes += nodeCount * NodeSize;
return skippedBytes;
}
/**
* Returns the total size of the header (including the index) in bytes.
* Assume that the input is aligned to the start offset of the tree
* (header). Note that the part of the header is consumed from the given
* input to be able to determine header size.
*
* @param in
* @return
* @throws IOException
*/
public static int getHeaderSize(DataInput in) throws IOException {
int header_size = 0;
/* int treeSize = */in.readInt();
header_size += 4;
int height = in.readInt();
header_size += 4;
if (height == 0) {
// Empty tree. No results
return header_size;
}
int degree = in.readInt();
header_size += 4;
int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1));
/* int elementCount = */in.readInt();
header_size += 4;
// Add the size of all nodes
header_size += nodeCount * NodeSize;
return header_size;
}
/**
* Returns total number of elements
*
* @return
*/
public int getElementCount() {
return elementCount;
}
/**
* Returns the MBR of the root
*
* @return
*/
public Prism getMBR() {
Prism mbr = null;
try {
// MBR of the tree is the MBR of the root node
structure.seek(0);
mbr = new Prism();
/* int offset = */structure.readInt();
mbr.readFields(structure);
} catch (IOException e) {
e.printStackTrace();
}
return mbr;
}
/**
* Reads and returns the element with the given index
*
* @param i
* @return
* @throws IOException
*/
public T readElement(int i) {
Iterator<T> iter = iterator();
while (i-- > 0 && iter.hasNext()) {
iter.next();
}
return iter.next();
}
public void setStockObject(T stockObject) {
this.stockObject = stockObject;
}
/**
* Create Prisms that together pack all points in sample such that each
* Prism contains roughly the same number of points. In other words it tries
* to balance number of points in each Prism. Works similar to the logic of
* bulkLoad but does only one level of Prisms.
*
* @param samples
* @param gridInfo
* - Used as a hint for number of Prisms per row or column
* @return
*/
public static Prism[] packInPrisms(GridInfo gridInfo, final Point3d[] sample) {
Prism[] Prisms = new Prism[gridInfo.layers * gridInfo.columns * gridInfo.rows];
int iPrism = 0;
// Sort in t direction
final IndexedSortable sortableT = new IndexedSortable() {
@Override
public void swap(int i, int j) {
Point3d temp = sample[i];
sample[i] = sample[j];
sample[j] = temp;
}
@Override
public int compare(int i, int j) {
if (sample[i].t < sample[j].t)
return -1;
if (sample[i].t > sample[j].t)
return 1;
return 0;
}
};
// Sort in x direction
final IndexedSortable sortableX = new IndexedSortable() {
@Override
public void swap(int i, int j) {
Point3d temp = sample[i];
sample[i] = sample[j];
sample[j] = temp;
}
@Override
public int compare(int i, int j) {
if (sample[i].x < sample[j].x)
return -1;
if (sample[i].x > sample[j].x)
return 1;
return 0;
}
};
// Sort in y direction
final IndexedSortable sortableY = new IndexedSortable() {
@Override
public void swap(int i, int j) {
Point3d temp = sample[i];
sample[i] = sample[j];
sample[j] = temp;
}
@Override
public int compare(int i, int j) {
if (sample[i].y < sample[j].y)
return -1;
if (sample[i].y > sample[j].y)
return 1;
return 0;
}
};
final QuickSort quickSort = new QuickSort();
quickSort.sort(sortableT, 0, sample.length);
//tony
int tindex1 = 0;
double t1 = gridInfo.t1;
for (int lay = 0; lay < gridInfo.layers; lay++){
int tindex2 = sample.length * (lay + 1) / gridInfo.layers;
double t2 = lay == gridInfo.layers - 1 ? gridInfo.t2 : sample[tindex2 - 1].t;
quickSort.sort(sortableX, tindex1, tindex2);
int xindex1 = tindex1;
double x1 = gridInfo.x1;
for (int col = 0; col < gridInfo.columns; col++) {
int xindex2 = sample.length * (col + 1) / gridInfo.columns;
// Determine extents for all Prisms in this column
double x2 = col == gridInfo.columns - 1 ? gridInfo.x2
: sample[xindex2 - 1].x;
// Sort all points in this column according to its y-coordinate
quickSort.sort(sortableY, xindex1, xindex2);
// Create Prisms in this column
double y1 = gridInfo.y1;
for (int row = 0; row < gridInfo.rows; row++) {
int yindex2 = xindex1 + (xindex2 - xindex1) * (row + 1)
/ gridInfo.rows;
double y2 = row == gridInfo.rows - 1 ? gridInfo.y2
: sample[yindex2 - 1].y;
Prisms[iPrism++] = new Prism(t1, x1, y1, t2, x2, y2);
y1 = y2;
}
xindex1 = xindex2;
x1 = x2;
}
}
return Prisms;
}
/**
* An iterator that goes over all elements in the tree in no particular
* order
*
* @author tonyren, eldawy
*
*/
class RTreeIterator implements Iterator<T> {
/** Current offset in the data stream */
int offset;
/** Temporary text that holds one line to deserialize objects */
Text line;
/** A stock object to read from stream */
T _stockObject;
/** A reader to read lines from the tree */
LineReader reader;
RTreeIterator() throws IOException {
offset = TreeHeaderSize + NodeSize * RTree.this.nodeCount;
_stockObject = (T) RTree.this.stockObject.clone();
line = new Text();
RTree.this.data.seek(offset + RTree.this.treeStartOffset);
reader = new LineReader(RTree.this.data);
}
@Override
public boolean hasNext() {
return offset < RTree.this.treeSize;
}
@Override
public T next() {
try {
offset += reader.readLine(line);
_stockObject.fromText(line);
} catch (IOException e) {
e.printStackTrace();
return null;
}
return _stockObject;
}
@Override
public void remove() {
throw new RuntimeException("Not supported");
}
}
/**
* Skip bytes until the end of line
*
* @param bytes
* @param startOffset
* @return
*/
public static int skipToEOL(byte[] bytes, int startOffset) {
int eol = startOffset;
while (eol < bytes.length && (bytes[eol] != '\n' && bytes[eol] != '\r'))
eol++;
while (eol < bytes.length && (bytes[eol] == '\n' || bytes[eol] == '\r'))
eol++;
return eol;
}
@Override
public Iterator<T> iterator() {
try {
return new RTreeIterator();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
/**
* Given a block size, record size and a required tree degree, this function
* calculates the maximum number of records that can be stored in this block
* taking into consideration the overhead needed by node structure.
*
* @param blockSize
* @param degree
* @param recordSize
* @return
*/
public static int getBlockCapacity(long blockSize, int degree,
int recordSize) {
double a = (double) NodeSize / (degree - 1);
double ratio = (blockSize + a) / (recordSize + a);
double break_even_height = Math.log(ratio) / Math.log(degree);
double h_min = Math.floor(break_even_height);
double capacity1 = Math.floor(Math.pow(degree, h_min));
double structure_size = 4 + TreeHeaderSize + a
* (capacity1 * degree - 1);
double capacity2 = Math
.floor((blockSize - structure_size) / recordSize);
return Math.max((int) capacity1, (int) capacity2);
}
/**
* Searches the RTree starting from the given start position. This is either
* a node number or offset of an element. If it's a node number, it performs
* the search in the subtree rooted at this node. If it's an offset number,
* it searches only the object found there. It is assumed that the
* openQuery() has been called before this function and that endQuery() will
* be called afterwards.
*
* @param query_mbr
* @param output
* @param start
* - where to start searching
* @param end
* - where to end searching. Only used when start is an offset of
* an object.
* @return
* @throws IOException
*/
protected int searchColumnar(Shape query_shape, ResultCollector<Writable> output,
int start, int end, String field) throws IOException {
if (output == null){
throw new RuntimeException("Output is NULL");
}
//build search field
int fieldOffset = 0;
int fieldSize = -1;
FIELD_TYPE fieldType = FIELD_TYPE.NULL;
//get fields
Field[] fields = stockObject.getClass().getDeclaredFields();
for (int i = 0; i < fields.length; i++){
if (fields[i].getName().equals(field)){
if ( fields[i].getType().equals(Integer.TYPE)){
fieldSize = 4;
fieldType = FIELD_TYPE.Integer;
}
else if ( fields[i].getType().equals(Long.TYPE)){
fieldSize = 8;
fieldType = FIELD_TYPE.Long;
}
else if ( fields[i].getType().equals(Double.TYPE)){
fieldSize = 8;
fieldType = FIELD_TYPE.Double;
}
else{
//throw new RuntimeException("Unsupported type: " + fields[i].getType());
}
break;
}
else{
if ( fields[i].getType().equals(Integer.TYPE)){
fieldOffset += elementCount * 4;
}
else if ( fields[i].getType().equals(Long.TYPE) || fields[i].getType().equals(Double.TYPE)){
fieldOffset += elementCount * 8;
}
else{
//throw new RuntimeException("Unsupported type: " + fields[i].getType());
}
}
}
Prism query_mbr = query_shape.getMBR();
int resultSize = 0;
// Special case for an empty tree
if (height == 0)
return 0;
Stack<Integer> toBeSearched = new Stack<Integer>();
// Start from the given node
toBeSearched.push(start);
if (start >= nodeCount) {
toBeSearched.push(end);
}
Prism node_mbr = new Prism();
// Holds one data line from tree data
Text line = new Text2();
while (!toBeSearched.isEmpty()) {
int searchNumber = toBeSearched.pop();
int mbrsToTest = searchNumber == 0 ? 1 : degree;
if (searchNumber < nodeCount) {
long nodeOffset = NodeSize * searchNumber;
structure.seek(nodeOffset);
int dataOffset = structure.readInt();
for (int i = 0; i < mbrsToTest; i++) {
node_mbr.readFields(structure);
int lastOffset = (searchNumber + i) == nodeCount - 1 ? elementCount - 1
: structure.readInt();
if (query_mbr.contains(node_mbr)) {
// The node is full contained in the query range.
// Save the time and do full scan for this node
// Checks if this node is the last node in its level
// This can be easily detected because the next node in
// the level
// order traversal will be the first node in the next
// level
// which means it will have an offset less than this
// node
if (lastOffset <= dataOffset)
lastOffset = elementCount;
data.seek(treeStartOffset + TreeHeaderSize + nodeCount * NodeSize + elementCount * IndexUnitSize + fieldOffset + dataOffset * fieldSize);
for (int j = 0; j < lastOffset - dataOffset; j++){
switch (fieldType){
case Integer:
output.collect(new IntWritable(data.readInt()));
break;
case Long:
output.collect(new LongWritable(data.readLong()));
break;
case Double:
output.collect(new DoubleWritable(data.readDouble()));
break;
default:
output.collect(new Point3d(data.readDouble(), data.readDouble(), data.readDouble()));
break;
}
resultSize++;
}
} else if (query_mbr.isIntersected(node_mbr)) {
// Node partially overlaps with query. Go deep under
// this node
if (searchNumber < nonLeafNodeCount) {
// Search child nodes
toBeSearched.push((searchNumber + i) * degree + 1);
} else {
// Search all elements in this node
//toBeSearched.push(dataOffset);
// Checks if this node is the last node in its level
// This can be easily detected because the next node
// in the level
// order traversal will be the first node in the
// next level
// which means it will have an offset less than this
// node
if (lastOffset <= dataOffset)
lastOffset = elementCount;
//toBeSearched.push(lastOffset);
data.seek(treeStartOffset + TreeHeaderSize + nodeCount * NodeSize + dataOffset * IndexUnitSize);
boolean report[] = new boolean[lastOffset - dataOffset];
Point3d point = new Point3d();
for (int j = 0; j < lastOffset - dataOffset; j++){
point.t = data.readDouble();
point.x = data.readDouble();
point.y = data.readDouble();
if (point.isIntersected(query_shape)){
report[j] = true;
}
else
report[j] = false;
}
data.seek(treeStartOffset + TreeHeaderSize + nodeCount * NodeSize + elementCount * IndexUnitSize + fieldOffset + dataOffset * fieldSize);
for (int j = 0; j < lastOffset - dataOffset; j++){
if (report[j]){
switch (fieldType){
case Integer:
output.collect(new IntWritable(data.readInt()));
break;
case Long:
output.collect(new LongWritable(data.readLong()));
break;
case Double:
output.collect(new DoubleWritable(data.readDouble()));
break;
default:
output.collect(new Point3d(data.readDouble(), data.readDouble(), data.readDouble()));
break;
}
resultSize++;
}
}
}
}
dataOffset = lastOffset;
}
} else {
LOG.error("searchNumber > nodeCount, something is wrong");
int firstOffset, lastOffset;
// Search for data items (records)
lastOffset = searchNumber;
firstOffset = toBeSearched.pop();
data.seek(firstOffset + treeStartOffset);
LineReader lineReader = new LineReader(data);
while (firstOffset < lastOffset) {
firstOffset += lineReader.readLine(line);
stockObject.fromText(line);
if (stockObject.isIntersected(query_shape)) {
resultSize++;
if (output != null)
output.collect(stockObject);
}
}
}
}
return resultSize;
}
protected int search(Shape query_shape, ResultCollector<T> output,
int start, int end) throws IOException {
Prism query_mbr = query_shape.getMBR();
int resultSize = 0;
// Special case for an empty tree
if (height == 0)
return 0;
Stack<Integer> toBeSearched = new Stack<Integer>();
// Start from the given node
toBeSearched.push(start);
if (start >= nodeCount) {
toBeSearched.push(end);
}
Prism node_mbr = new Prism();
// Holds one data line from tree data
Text line = new Text2();
while (!toBeSearched.isEmpty()) {
int searchNumber = toBeSearched.pop();
int mbrsToTest = searchNumber == 0 ? 1 : degree;
if (searchNumber < nodeCount) {
long nodeOffset = NodeSize * searchNumber;
structure.seek(nodeOffset);
int dataOffset = structure.readInt();
for (int i = 0; i < mbrsToTest; i++) {
node_mbr.readFields(structure);
int lastOffset = (searchNumber + i) == nodeCount - 1 ? treeSize
: structure.readInt();
if (query_mbr.contains(node_mbr)) {
// The node is full contained in the query range.
// Save the time and do full scan for this node
toBeSearched.push(dataOffset);
// Checks if this node is the last node in its level
// This can be easily detected because the next node in
// the level
// order traversal will be the first node in the next
// level
// which means it will have an offset less than this
// node
if (lastOffset <= dataOffset)
lastOffset = treeSize;
toBeSearched.push(lastOffset);
} else if (query_mbr.isIntersected(node_mbr)) {
// Node partially overlaps with query. Go deep under
// this node
if (searchNumber < nonLeafNodeCount) {
// Search child nodes
toBeSearched.push((searchNumber + i) * degree + 1);
} else {
// Search all elements in this node
toBeSearched.push(dataOffset);
// Checks if this node is the last node in its level
// This can be easily detected because the next node
// in the level
// order traversal will be the first node in the
// next level
// which means it will have an offset less than this
// node
if (lastOffset <= dataOffset)
lastOffset = treeSize;
toBeSearched.push(lastOffset);
}
}
dataOffset = lastOffset;
}
} else {
int firstOffset, lastOffset;
// Search for data items (records)
lastOffset = searchNumber;
firstOffset = toBeSearched.pop();
data.seek(firstOffset + treeStartOffset);
LineReader lineReader = new LineReader(data);
while (firstOffset < lastOffset) {
firstOffset += lineReader.readLine(line);
stockObject.fromText(line);
if (stockObject.isIntersected(query_shape)) {
resultSize++;
if (output != null)
output.collect(stockObject);
}
}
}
}
return resultSize;
}
/**
* Performs a range query over this tree using the given query range.
*
* @param query
* - The query Prism to use (TODO make it any shape not just
* Prism)
* @param output
* - Shapes found are reported to this output. If null, results
* are not reported
* @return - Total number of records found
*/
public int searchColumnar(Shape query, ResultCollector<Writable> output, String field) {
int resultCount = 0;
try {
resultCount = searchColumnar(query, output, 0, 0, field);
} catch (IOException e) {
e.printStackTrace();
}
return resultCount;
}
public int search(Shape query, ResultCollector<T> output, String field) {
int resultCount = 0;
try {
resultCount = search(query, output, 0, 0);
} catch (IOException e) {
e.printStackTrace();
}
return resultCount;
}
/**
* k nearest neighbor query Note: Current algorithm is approximate just for
* simplicity. Writing an exact algorithm is on our TODO list
*
* @param qx
* @param qy
* @param k
* @param output
*/
public int knn(final double qt, final double qx, final double qy, int k,
final ResultCollector2<T, Double> output) {
double query_area = ((getMBR().x2 - getMBR().x1) * (getMBR().y2 - getMBR().y1))
* k / getElementCount();
double query_radius = Math.sqrt(query_area / Math.PI);
boolean result_correct;
final Vector<Double> distances = new Vector<Double>();
final Vector<T> shapes = new Vector<T>();
// Find results in the range and increase this range if needed to ensure
// correctness of the answer
do {
// Initialize result and query range
distances.clear();
shapes.clear();
Prism queryRange = new Prism();
queryRange.x1 = qx - query_radius / 2;
queryRange.y1 = qy - query_radius / 2;
queryRange.x2 = qx + query_radius / 2;
queryRange.y2 = qy + query_radius / 2;
// Retrieve all results in range
searchColumnar(queryRange, new ResultCollector<Writable>() {
@Override
public void collect(Writable shape) {
distances.add(((T)shape).distanceTo(qt, qx, qy));
shapes.add((T) ((T) shape).clone());
}
}, null);
if (shapes.size() < k) {
// Didn't find k elements in range, double the range to get more
// items
if (shapes.size() == getElementCount()) {
// Already returned all possible elements
result_correct = true;
} else {
query_radius *= 2;
result_correct = false;
}
} else {
// Sort items by distance to get the kth neighbor
IndexedSortable s = new IndexedSortable() {
@Override
public void swap(int i, int j) {
double temp_distance = distances.elementAt(i);
distances.set(i, distances.elementAt(j));
distances.set(j, temp_distance);
T temp_shape = shapes.elementAt(i);
shapes.set(i, shapes.elementAt(j));
shapes.set(j, temp_shape);
}
@Override
public int compare(int i, int j) {
// Note. Equality is not important to check because
// items with the
// same distance can be ordered anyway.
if (distances.elementAt(i) < distances.elementAt(j))
return -1;
return 1;
}
};
IndexedSorter sorter = new QuickSort();
sorter.sort(s, 0, shapes.size());
if (distances.elementAt(k - 1) > query_radius) {
result_correct = false;
query_radius = distances.elementAt(k);
} else {
result_correct = true;
}
}
} while (!result_correct);
int result_size = Math.min(k, shapes.size());
if (output != null) {
for (int i = 0; i < result_size; i++) {
output.collect(shapes.elementAt(i), distances.elementAt(i));
}
}
return result_size;
}
protected static <S1 extends Shape, S2 extends Shape> int spatialJoinMemory(
final RTree<S1> R, final RTree<S2> S,
final ResultCollector2<S1, S2> output) throws IOException {
S1[] rs = (S1[]) Array.newInstance(R.stockObject.getClass(),
R.getElementCount());
int i = 0;
for (S1 r : R)
rs[i++] = (S1) r.clone();
if (i != rs.length)
throw new RuntimeException(i + "!=" + rs.length);
S2[] ss = (S2[]) Array.newInstance(S.stockObject.getClass(),
S.getElementCount());
i = 0;
for (S2 s : S)
ss[i++] = (S2) s.clone();
if (i != ss.length)
throw new RuntimeException(i + "!=" + ss.length);
return SpatialAlgorithms.SpatialJoin_planeSweep(rs, ss, output);
}
// LRU cache used to avoid deserializing the same records again and again
static class LruCache<A, B> extends LinkedHashMap<A, B> {
private static final long serialVersionUID = 702044567572914544L;
private final int maxEntries;
private B unusedEntry;
public LruCache(final int maxEntries) {
super(maxEntries + 1, 1.0f, true);
this.maxEntries = maxEntries;
}
@Override
protected boolean removeEldestEntry(final Map.Entry<A, B> eldest) {
if (super.size() > maxEntries) {
unusedEntry = eldest.getValue();
return true;
}
return false;
}
public B popUnusedEntry() {
B temp = unusedEntry;
unusedEntry = null;
return temp;
}
}
/**
* Performs a spatial join between records in two R-trees
*
* @param R
* @param S
* @param output
* @return
* @throws IOException
*/
protected static <S1 extends Shape, S2 extends Shape> int spatialJoinDisk(
final RTree<S1> R, final RTree<S2> S,
final ResultCollector2<S1, S2> output) throws IOException {
// Reserve locations for nodes MBRs and data offset [start, end)
final Prism[] r_nodes = new Prism[R.degree];
for (int i = 0; i < r_nodes.length; i++)
r_nodes[i] = new Prism();
final int[] r_data_offset = new int[R.degree + 1];
final Prism[] s_nodes = new Prism[S.degree];
for (int i = 0; i < s_nodes.length; i++)
s_nodes[i] = new Prism();
final int[] s_data_offset = new int[S.degree + 1];
PriorityQueue<Long> nodesToJoin = new PriorityQueue<Long>() {
{
initialize(R.leafNodeCount + S.leafNodeCount);
}
@Override
protected boolean lessThan(Object a, Object b) {
return ((Long) a) < ((Long) b);
}
};
nodesToJoin.put(0L);
LruCache<Integer, Shape[]> r_records_cache = new LruCache<Integer, Shape[]>(
R.degree * 2);
LruCache<Integer, Shape[]> s_records_cache = new LruCache<Integer, Shape[]>(
S.degree * R.degree * 4);
Text line = new Text2();
int result_count = 0;
LineReader r_lr = null, s_lr = null;
// Last offset read from r and s
int r_last_offset = 0;
int s_last_offset = 0;
while (nodesToJoin.size() > 0) {
long nodes_to_join = nodesToJoin.pop();
int r_node = (int) (nodes_to_join >>> 32);
int s_node = (int) (nodes_to_join & 0xFFFFFFFF);
// Read all R nodes
int r_mbrsToTest = r_node == 0 ? 1 : R.degree;
boolean r_leaf = r_node * R.degree + 1 >= R.nodeCount;
long nodeOffset = NodeSize * r_node;
R.structure.seek(nodeOffset);
for (int i = 0; i < r_mbrsToTest; i++) {
r_data_offset[i] = R.structure.readInt();
r_nodes[i].readFields(R.structure);
}
r_data_offset[r_mbrsToTest] = (r_node + r_mbrsToTest) == R.nodeCount ? R.treeSize
: R.structure.readInt();
// Read all S nodes
int s_mbrsToTest = s_node == 0 ? 1 : S.degree;
boolean s_leaf = s_node * S.degree + 1 >= S.nodeCount;
if (r_leaf != s_leaf) {
// This case happens when the two trees are of different heights
if (r_leaf)
r_mbrsToTest = 1;
else
s_mbrsToTest = 1;
}
nodeOffset = NodeSize * s_node;
S.structure.seek(nodeOffset);
for (int i = 0; i < s_mbrsToTest; i++) {
s_data_offset[i] = S.structure.readInt();
s_nodes[i].readFields(S.structure);
}
s_data_offset[s_mbrsToTest] = (s_node + s_mbrsToTest) == S.nodeCount ? S.treeSize
: S.structure.readInt();
// Find overlapping nodes by Cartesian product
for (int i = 0; i < r_mbrsToTest; i++) {
for (int j = 0; j < s_mbrsToTest; j++) {
if (r_nodes[i].isIntersected(s_nodes[j])) {
if (r_leaf && s_leaf) {
// Reached leaf nodes in both trees. Start comparing
// records
int r_start_offset = r_data_offset[i];
int r_end_offset = r_data_offset[i + 1];
int s_start_offset = s_data_offset[j];
int s_end_offset = s_data_offset[j + 1];
// /////////////////////////////////////////////////////////////////
// Read or retrieve r_records
Shape[] r_records = r_records_cache
.get(r_start_offset);
if (r_records == null) {
int cache_key = r_start_offset;
r_records = r_records_cache.popUnusedEntry();
if (r_records == null) {
r_records = new Shape[R.degree * 2];
}
// Need to read it from stream
if (r_last_offset != r_start_offset) {
long seekTo = r_start_offset
+ R.treeStartOffset;
R.data.seek(seekTo);
r_lr = new LineReader(R.data);
}
int record_i = 0;
while (r_start_offset < r_end_offset) {
r_start_offset += r_lr.readLine(line);
if (r_records[record_i] == null)
r_records[record_i] = R.stockObject
.clone();
r_records[record_i].fromText(line);
record_i++;
}
r_last_offset = r_start_offset;
// Nullify other records
while (record_i < r_records.length)
r_records[record_i++] = null;
r_records_cache.put(cache_key, r_records);
}
// Read or retrieve s_records
Shape[] s_records = s_records_cache
.get(s_start_offset);
if (s_records == null) {
int cache_key = s_start_offset;
// Need to read it from stream
if (s_lr == null
|| s_last_offset != s_start_offset) {
// Need to reposition s_lr (LineReader of S)
long seekTo = s_start_offset
+ S.treeStartOffset;
S.data.seek(seekTo);
s_lr = new LineReader(S.data);
}
s_records = s_records_cache.popUnusedEntry();
if (s_records == null) {
s_records = new Shape[S.degree * 2];
}
int record_i = 0;
while (s_start_offset < s_end_offset) {
s_start_offset += s_lr.readLine(line);
if (s_records[record_i] == null)
s_records[record_i] = S.stockObject
.clone();
s_records[record_i].fromText(line);
record_i++;
}
// Nullify other records
while (record_i < s_records.length)
s_records[record_i++] = null;
// Put in cache
s_records_cache.put(cache_key, s_records);
s_last_offset = s_start_offset;
}
// Do Cartesian product between records to find
// overlapping pairs
for (int i_r = 0; i_r < r_records.length
&& r_records[i_r] != null; i_r++) {
for (int i_s = 0; i_s < s_records.length
&& s_records[i_s] != null; i_s++) {
if (r_records[i_r]
.isIntersected(s_records[i_s])) {
result_count++;
if (output != null) {
output.collect((S1) r_records[i_r],
(S2) s_records[i_s]);
}
}
}
}
// /////////////////////////////////////////////////////////////////
} else {
// Add a new pair to node pairs to be tested
// Go down one level if possible
int new_r_node, new_s_node;
if (!r_leaf) {
new_r_node = (r_node + i) * R.degree + 1;
} else {
new_r_node = r_node + i;
}
if (!s_leaf) {
new_s_node = (s_node + j) * S.degree + 1;
} else {
new_s_node = s_node + j;
}
long new_pair = (((long) new_r_node) << 32)
| new_s_node;
nodesToJoin.put(new_pair);
}
}
}
}
}
return result_count;
}
public static <S1 extends Shape, S2 extends Shape> int spatialJoin(
final RTree<S1> R, final RTree<S2> S,
final ResultCollector2<S1, S2> output) throws IOException {
if (R.treeStartOffset >= 0 && S.treeStartOffset >= 0) {
// Both trees are read from disk
return spatialJoinDisk(R, S, output);
} else {
return spatialJoinMemory(R, S, output);
}
}
/**
* Calculate the storage overhead required to build an RTree for the given
* number of nodes.
*
* @return - storage overhead in bytes
*/
public static int calculateStorageOverhead(int elementCount, int degree) {
// Update storage overhead
int height = Math.max(1,
(int) Math.ceil(Math.log(elementCount) / Math.log(degree)));
int leafNodeCount = (int) Math.pow(degree, height - 1);
if (elementCount <= 2 * leafNodeCount && height > 1) {
height--;
leafNodeCount = (int) Math.pow(degree, height - 1);
}
int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1));
int storage_overhead = 4 + TreeHeaderSize + nodeCount * NodeSize;
return storage_overhead;
}
/**
* Find log to the base 2 quickly
*
* @param x
* @return
*/
public static int log2Floor(int x) {
if (x == 0)
return -1;
int pos = 0;
if ((x & 0xFFFF0000) != 0) {
pos += 16;
x >>>= 16;
}
if ((x & 0xFF00) != 0) {
pos += 8;
x >>>= 8;
}
if ((x & 0xF0) != 0) {
pos += 4;
x >>>= 4;
}
if ((x & 0xC) != 0) {
pos += 2;
x >>>= 2;
}
if ((x & 0x2) != 0) {
pos++;
x >>>= 1;
}
return pos;
}
public static int powInt(int base, int exponent) {
int pow = 1;
while (exponent != 0) {
if ((exponent & 1) != 0)
pow *= base;
exponent >>>= 1;
base *= base;
}
return pow;
}
private static final double LogLookupTable[];
static {
int count = 100;
LogLookupTable = new double[count];
for (int i = 0; i < count; i++) {
LogLookupTable[i] = Math.log(i);
}
}
public static double fastLog(int x) {
if (x < LogLookupTable.length) {
return LogLookupTable[x];
}
return Math.log(x);
}
public static double fastPow(double a, double b) {
final long tmp = (long) (9076650 * (a - 1)
/ (a + 1 + 4 * (Math.sqrt(a))) * b + 1072632447);
return Double.longBitsToDouble(tmp << 32);
}
/**
* Find the best (minimum) degree that can index the given number of records
* such that the whole tree structure can be stored in the given bytes
* available.
*
* @param bytesAvailable
* @param recordCount
* @return
*/
public static int findBestDegree(int bytesAvailable, int recordCount) {
// Maximum number of nodes that can be stored in the bytesAvailable
int maxNodeCount = (bytesAvailable - TreeHeaderSize) / NodeSize;
// Calculate maximum possible tree height to store the given record
// count
int h_max = log2Floor(recordCount / 2);
// Minimum height is always 1 (degree = recordCount)
int h_min = 2;
// Best degree is the minimum degree
int d_best = Integer.MAX_VALUE;
double log_recordcount_e = Math.log(recordCount / 2);
double log_recordcount_2 = log_recordcount_e / fastLog(2);
// Find the best height among all possible heights
for (int h = h_min; h <= h_max; h++) {
// Find the minimum degree for the given height (h)
// This approximation is good enough for our case.
// Not proven but tested with millions of random cases
int d_min = (int) Math.ceil(fastPow(2.0, log_recordcount_2
/ (h + 1)));
// Some heights are invalid, recalculate the height to ensure it's
// valid
int h_recalculated = (int) Math.floor(log_recordcount_e
/ fastLog(d_min));
if (h != h_recalculated)
continue;
int nodeCount = (int) ((powInt(d_min, h + 1) - 1) / (d_min - 1));
if (nodeCount < maxNodeCount && d_min < d_best)
d_best = d_min;
}
return d_best;
}
public static int calculateTreeStorage(int elementCount, int degree) {
int height = Math.max(1,
(int) Math.ceil(Math.log(elementCount) / Math.log(degree)));
int leafNodeCount = (int) Math.pow(degree, height - 1);
if (elementCount < 2 * leafNodeCount && height > 1) {
height--;
leafNodeCount = (int) Math.pow(degree, height - 1);
}
int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1));
return TreeHeaderSize + nodeCount * NodeSize;
}
}<|fim▁end|> | }
LOG.info("Bulk loading an RTree with " + elementCount + " elements");
// It turns out the findBestDegree returns the best degree when the |
<|file_name|>pico_dev_vde.rs<|end_file_name|><|fim▁begin|>#![allow(non_camel_case_types)]
extern crate libc;
use libc::{c_char, uint8_t, uint32_t};
use pico_stack::*;
/*
* FOREIGN FUNCTION INTERFACE
*/
#[link(name = "picotcp")]
extern "C" {
pub fn pico_vde_destroy(vde: *mut pico_device);
pub fn pico_vde_create(sock: *mut c_char, name: *mut c_char, mac: *mut uint8_t) -> *mut pico_device;
pub fn pico_vde_set_packetloss(dev: *mut pico_device, in_pct: uint32_t, out_pct: uint32_t);
}
/* <|fim▁hole|> * RUST FUNCTION INTERFACE
*/<|fim▁end|> | |
<|file_name|>config.go<|end_file_name|><|fim▁begin|>package server
import (
"io"
"time"
)
// DatabaseConfig contains all the information to start the Kappa server.
type DatabaseConfig struct {
// NodeName is the name of the node. If it is empty, a name
// will be generated.
NodeName string
// ClusterName prevents two clusters from merging.
ClusterName string
// ExistingNodes is an array of nodes already in the cluster.
ExistingNodes []string
// Bootstrap determines if the server is bootstrapped into the cluster.
Bootstrap bool
BootstrapExpect int
// Build is the running server revision.
Build string
// AdminCertificateFile is the path to the admin user's certificate.
AdminCertificateFile string
// CACertificateFile is the path to the CA certificate.
CACertificateFile string
// DataPath is the root directory for all data produced by the database.
DataPath string
// LogOutput is the writer to which all logs are
// written to. If nil, it defaults to os.Stdout.
LogOutput io.Writer
// SSHBindAddress is the address on which the SSH server listens.
SSHBindAddress string
// SSHConnectionDeadline is the deadline for maximum connection attempts.
SSHConnectionDeadline time.Duration
// SSHPrivateKeyFile refers to the private key file of the SSH server.
SSHPrivateKeyFile string
<|fim▁hole|>
// GossipBindPort
GossipBindPort int
// GossipAdvertiseAddr
GossipAdvertiseAddr string
// GossipAdvertisePort
GossipAdvertisePort int
}<|fim▁end|> | // GossipBindAddr
GossipBindAddr string |
<|file_name|>mat4d.d.ts<|end_file_name|><|fim▁begin|>declare module goog {
function require(name: 'goog.vec.mat4d'): typeof goog.vec.mat4d;
}
declare module goog.vec.mat4d {
/** @typedef {goog.vec.Float64} */
type Type = goog.vec.Float64;
/**
* Creates a mat4d with all elements initialized to zero.
*
* @return {!goog.vec.mat4d.Type} The new mat4d.
*/
function create(): goog.vec.mat4d.Type;
/**<|fim▁hole|> * @param {goog.vec.mat4d.Type} mat The matrix to receive the
* values.
* @param {number} v00 The values at (0, 0).
* @param {number} v10 The values at (1, 0).
* @param {number} v20 The values at (2, 0).
* @param {number} v30 The values at (3, 0).
* @param {number} v01 The values at (0, 1).
* @param {number} v11 The values at (1, 1).
* @param {number} v21 The values at (2, 1).
* @param {number} v31 The values at (3, 1).
* @param {number} v02 The values at (0, 2).
* @param {number} v12 The values at (1, 2).
* @param {number} v22 The values at (2, 2).
* @param {number} v32 The values at (3, 2).
* @param {number} v03 The values at (0, 3).
* @param {number} v13 The values at (1, 3).
* @param {number} v23 The values at (2, 3).
* @param {number} v33 The values at (3, 3).
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained together.
*/
function setFromValues(mat: goog.vec.mat4d.Type, v00: number, v10: number, v20: number, v30: number, v01: number, v11: number, v21: number, v31: number, v02: number, v12: number, v22: number, v32: number, v03: number, v13: number, v23: number, v33: number): goog.vec.mat4d.Type;
/**
* Initializes mat4d mat from mat4d src.
*
* @param {goog.vec.mat4d.Type} mat The destination matrix.
* @param {goog.vec.mat4d.Type} src The source matrix.
* @return {!goog.vec.mat4d.Type} Return mat so that operations can be
* chained together.
*/
function setFromMat4d(mat: goog.vec.mat4d.Type, src: goog.vec.mat4d.Type): goog.vec.mat4d.Type;
/**
* Initializes mat4d mat from mat4f src (typed as a Float32Array to
* avoid circular goog.requires).
*
* @param {goog.vec.mat4d.Type} mat The destination matrix.
* @param {Float32Array} src The source matrix.
* @return {!goog.vec.mat4d.Type} Return mat so that operations can be
* chained together.
*/
function setFromMat4f(mat: goog.vec.mat4d.Type, src: Float32Array): goog.vec.mat4d.Type;
/**
* Initializes mat4d mat from Array src.
*
* @param {goog.vec.mat4d.Type} mat The destination matrix.
* @param {Array<number>} src The source matrix.
* @return {!goog.vec.mat4d.Type} Return mat so that operations can be
* chained together.
*/
function setFromArray(mat: goog.vec.mat4d.Type, src: Array<number>): goog.vec.mat4d.Type;
/**
* Retrieves the element at the requested row and column.
*
* @param {goog.vec.mat4d.Type} mat The matrix containing the value to
* retrieve.
* @param {number} row The row index.
* @param {number} column The column index.
* @return {number} The element value at the requested row, column indices.
*/
function getElement(mat: goog.vec.mat4d.Type, row: number, column: number): number;
/**
* Sets the element at the requested row and column.
*
* @param {goog.vec.mat4d.Type} mat The matrix containing the value to
* retrieve.
* @param {number} row The row index.
* @param {number} column The column index.
* @param {number} value The value to set at the requested row, column.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained together.
*/
function setElement(mat: goog.vec.mat4d.Type, row: number, column: number, value: number): goog.vec.mat4d.Type;
/**
* Sets the diagonal values of the matrix from the given values.
*
* @param {goog.vec.mat4d.Type} mat The matrix to receive the values.
* @param {number} v00 The values for (0, 0).
* @param {number} v11 The values for (1, 1).
* @param {number} v22 The values for (2, 2).
* @param {number} v33 The values for (3, 3).
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained together.
*/
function setDiagonalValues(mat: goog.vec.mat4d.Type, v00: number, v11: number, v22: number, v33: number): goog.vec.mat4d.Type;
/**
* Sets the diagonal values of the matrix from the given vector.
*
* @param {goog.vec.mat4d.Type} mat The matrix to receive the values.
* @param {goog.vec.vec4d.Type} vec The vector containing the values.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained together.
*/
function setDiagonal(mat: goog.vec.mat4d.Type, vec: goog.vec.vec4d.Type): goog.vec.mat4d.Type;
/**
* Gets the diagonal values of the matrix into the given vector.
*
* @param {goog.vec.mat4d.Type} mat The matrix containing the values.
* @param {goog.vec.vec4d.Type} vec The vector to receive the values.
* @param {number=} opt_diagonal Which diagonal to get. A value of 0 selects the
* main diagonal, a positive number selects a super diagonal and a negative
* number selects a sub diagonal.
* @return {goog.vec.vec4d.Type} return vec so that operations can be
* chained together.
*/
function getDiagonal(mat: goog.vec.mat4d.Type, vec: goog.vec.vec4d.Type, opt_diagonal?: number): goog.vec.vec4d.Type;
/**
* Sets the specified column with the supplied values.
*
* @param {goog.vec.mat4d.Type} mat The matrix to recieve the values.
* @param {number} column The column index to set the values on.
* @param {number} v0 The value for row 0.
* @param {number} v1 The value for row 1.
* @param {number} v2 The value for row 2.
* @param {number} v3 The value for row 3.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained together.
*/
function setColumnValues(mat: goog.vec.mat4d.Type, column: number, v0: number, v1: number, v2: number, v3: number): goog.vec.mat4d.Type;
/**
* Sets the specified column with the value from the supplied vector.
*
* @param {goog.vec.mat4d.Type} mat The matrix to receive the values.
* @param {number} column The column index to set the values on.
* @param {goog.vec.vec4d.Type} vec The vector of elements for the column.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained together.
*/
function setColumn(mat: goog.vec.mat4d.Type, column: number, vec: goog.vec.vec4d.Type): goog.vec.mat4d.Type;
/**
* Retrieves the specified column from the matrix into the given vector.
*
* @param {goog.vec.mat4d.Type} mat The matrix supplying the values.
* @param {number} column The column to get the values from.
* @param {goog.vec.vec4d.Type} vec The vector of elements to
* receive the column.
* @return {!goog.vec.vec4d.Type} return vec so that operations can be
* chained together.
*/
function getColumn(mat: goog.vec.mat4d.Type, column: number, vec: goog.vec.vec4d.Type): goog.vec.vec4d.Type;
/**
* Sets the columns of the matrix from the given vectors.
*
* @param {goog.vec.mat4d.Type} mat The matrix to receive the values.
* @param {goog.vec.vec4d.Type} vec0 The values for column 0.
* @param {goog.vec.vec4d.Type} vec1 The values for column 1.
* @param {goog.vec.vec4d.Type} vec2 The values for column 2.
* @param {goog.vec.vec4d.Type} vec3 The values for column 3.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained together.
*/
function setColumns(mat: goog.vec.mat4d.Type, vec0: goog.vec.vec4d.Type, vec1: goog.vec.vec4d.Type, vec2: goog.vec.vec4d.Type, vec3: goog.vec.vec4d.Type): goog.vec.mat4d.Type;
/**
* Retrieves the column values from the given matrix into the given vectors.
*
* @param {goog.vec.mat4d.Type} mat The matrix supplying the columns.
* @param {goog.vec.vec4d.Type} vec0 The vector to receive column 0.
* @param {goog.vec.vec4d.Type} vec1 The vector to receive column 1.
* @param {goog.vec.vec4d.Type} vec2 The vector to receive column 2.
* @param {goog.vec.vec4d.Type} vec3 The vector to receive column 3.
*/
function getColumns(mat: goog.vec.mat4d.Type, vec0: goog.vec.vec4d.Type, vec1: goog.vec.vec4d.Type, vec2: goog.vec.vec4d.Type, vec3: goog.vec.vec4d.Type): void;
/**
* Sets the row values from the supplied values.
*
* @param {goog.vec.mat4d.Type} mat The matrix to receive the values.
* @param {number} row The index of the row to receive the values.
* @param {number} v0 The value for column 0.
* @param {number} v1 The value for column 1.
* @param {number} v2 The value for column 2.
* @param {number} v3 The value for column 3.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained together.
*/
function setRowValues(mat: goog.vec.mat4d.Type, row: number, v0: number, v1: number, v2: number, v3: number): goog.vec.mat4d.Type;
/**
* Sets the row values from the supplied vector.
*
* @param {goog.vec.mat4d.Type} mat The matrix to receive the row values.
* @param {number} row The index of the row.
* @param {goog.vec.vec4d.Type} vec The vector containing the values.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained together.
*/
function setRow(mat: goog.vec.mat4d.Type, row: number, vec: goog.vec.vec4d.Type): goog.vec.mat4d.Type;
/**
* Retrieves the row values into the given vector.
*
* @param {goog.vec.mat4d.Type} mat The matrix supplying the values.
* @param {number} row The index of the row supplying the values.
* @param {goog.vec.vec4d.Type} vec The vector to receive the row.
* @return {!goog.vec.vec4d.Type} return vec so that operations can be
* chained together.
*/
function getRow(mat: goog.vec.mat4d.Type, row: number, vec: goog.vec.vec4d.Type): goog.vec.vec4d.Type;
/**
* Sets the rows of the matrix from the supplied vectors.
*
* @param {goog.vec.mat4d.Type} mat The matrix to receive the values.
* @param {goog.vec.vec4d.Type} vec0 The values for row 0.
* @param {goog.vec.vec4d.Type} vec1 The values for row 1.
* @param {goog.vec.vec4d.Type} vec2 The values for row 2.
* @param {goog.vec.vec4d.Type} vec3 The values for row 3.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained together.
*/
function setRows(mat: goog.vec.mat4d.Type, vec0: goog.vec.vec4d.Type, vec1: goog.vec.vec4d.Type, vec2: goog.vec.vec4d.Type, vec3: goog.vec.vec4d.Type): goog.vec.mat4d.Type;
/**
* Retrieves the rows of the matrix into the supplied vectors.
*
* @param {goog.vec.mat4d.Type} mat The matrix to supply the values.
* @param {goog.vec.vec4d.Type} vec0 The vector to receive row 0.
* @param {goog.vec.vec4d.Type} vec1 The vector to receive row 1.
* @param {goog.vec.vec4d.Type} vec2 The vector to receive row 2.
* @param {goog.vec.vec4d.Type} vec3 The vector to receive row 3.
*/
function getRows(mat: goog.vec.mat4d.Type, vec0: goog.vec.vec4d.Type, vec1: goog.vec.vec4d.Type, vec2: goog.vec.vec4d.Type, vec3: goog.vec.vec4d.Type): void;
/**
* Makes the given 4x4 matrix the zero matrix.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @return {!goog.vec.mat4d.Type} return mat so operations can be chained.
*/
function makeZero(mat: goog.vec.mat4d.Type): goog.vec.mat4d.Type;
/**
* Makes the given 4x4 matrix the identity matrix.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @return {!goog.vec.mat4d.Type} return mat so operations can be chained.
*/
function makeIdentity(mat: goog.vec.mat4d.Type): goog.vec.mat4d.Type;
/**
* Performs a per-component addition of the matrix mat0 and mat1, storing
* the result into resultMat.
*
* @param {goog.vec.mat4d.Type} mat0 The first addend.
* @param {goog.vec.mat4d.Type} mat1 The second addend.
* @param {goog.vec.mat4d.Type} resultMat The matrix to
* receive the results (may be either mat0 or mat1).
* @return {!goog.vec.mat4d.Type} return resultMat so that operations can be
* chained together.
*/
function addMat(mat0: goog.vec.mat4d.Type, mat1: goog.vec.mat4d.Type, resultMat: goog.vec.mat4d.Type): goog.vec.mat4d.Type;
/**
* Performs a per-component subtraction of the matrix mat0 and mat1,
* storing the result into resultMat.
*
* @param {goog.vec.mat4d.Type} mat0 The minuend.
* @param {goog.vec.mat4d.Type} mat1 The subtrahend.
* @param {goog.vec.mat4d.Type} resultMat The matrix to receive
* the results (may be either mat0 or mat1).
* @return {!goog.vec.mat4d.Type} return resultMat so that operations can be
* chained together.
*/
function subMat(mat0: goog.vec.mat4d.Type, mat1: goog.vec.mat4d.Type, resultMat: goog.vec.mat4d.Type): goog.vec.mat4d.Type;
/**
* Multiplies matrix mat with the given scalar, storing the result
* into resultMat.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} scalar The scalar value to multiply to each element of mat.
* @param {goog.vec.mat4d.Type} resultMat The matrix to receive
* the results (may be mat).
* @return {!goog.vec.mat4d.Type} return resultMat so that operations can be
* chained together.
*/
function multScalar(mat: goog.vec.mat4d.Type, scalar: number, resultMat: goog.vec.mat4d.Type): goog.vec.mat4d.Type;
/**
* Multiplies the two matrices mat0 and mat1 using matrix multiplication,
* storing the result into resultMat.
*
* @param {goog.vec.mat4d.Type} mat0 The first (left hand) matrix.
* @param {goog.vec.mat4d.Type} mat1 The second (right hand) matrix.
* @param {goog.vec.mat4d.Type} resultMat The matrix to receive
* the results (may be either mat0 or mat1).
* @return {!goog.vec.mat4d.Type} return resultMat so that operations can be
* chained together.
*/
function multMat(mat0: goog.vec.mat4d.Type, mat1: goog.vec.mat4d.Type, resultMat: goog.vec.mat4d.Type): goog.vec.mat4d.Type;
/**
* Transposes the given matrix mat storing the result into resultMat.
*
* @param {goog.vec.mat4d.Type} mat The matrix to transpose.
* @param {goog.vec.mat4d.Type} resultMat The matrix to receive
* the results (may be mat).
* @return {!goog.vec.mat4d.Type} return resultMat so that operations can be
* chained together.
*/
function transpose(mat: goog.vec.mat4d.Type, resultMat: goog.vec.mat4d.Type): goog.vec.mat4d.Type;
/**
* Computes the determinant of the matrix.
*
* @param {goog.vec.mat4d.Type} mat The matrix to compute the matrix for.
* @return {number} The determinant of the matrix.
*/
function determinant(mat: goog.vec.mat4d.Type): number;
/**
* Computes the inverse of mat storing the result into resultMat. If the
* inverse is defined, this function returns true, false otherwise.
*
* @param {goog.vec.mat4d.Type} mat The matrix to invert.
* @param {goog.vec.mat4d.Type} resultMat The matrix to receive
* the result (may be mat).
* @return {boolean} True if the inverse is defined. If false is returned,
* resultMat is not modified.
*/
function invert(mat: goog.vec.mat4d.Type, resultMat: goog.vec.mat4d.Type): boolean;
/**
* Returns true if the components of mat0 are equal to the components of mat1.
*
* @param {goog.vec.mat4d.Type} mat0 The first matrix.
* @param {goog.vec.mat4d.Type} mat1 The second matrix.
* @return {boolean} True if the the two matrices are equivalent.
*/
function equals(mat0: goog.vec.mat4d.Type, mat1: goog.vec.mat4d.Type): boolean;
/**
* Transforms the given vector with the given matrix storing the resulting,
* transformed vector into resultVec. The input vector is multiplied against the
* upper 3x4 matrix omitting the projective component.
*
* @param {goog.vec.mat4d.Type} mat The matrix supplying the transformation.
* @param {goog.vec.vec3d.Type} vec The 3 element vector to transform.
* @param {goog.vec.vec3d.Type} resultVec The 3 element vector to
* receive the results (may be vec).
* @return {!goog.vec.vec3d.Type} return resultVec so that operations can be
* chained together.
*/
function multVec3(mat: goog.vec.mat4d.Type, vec: goog.vec.vec3d.Type, resultVec: goog.vec.vec3d.Type): goog.vec.vec3d.Type;
/**
* Transforms the given vector with the given matrix storing the resulting,
* transformed vector into resultVec. The input vector is multiplied against the
* upper 3x3 matrix omitting the projective component and translation
* components.
*
* @param {goog.vec.mat4d.Type} mat The matrix supplying the transformation.
* @param {goog.vec.vec3d.Type} vec The 3 element vector to transform.
* @param {goog.vec.vec3d.Type} resultVec The 3 element vector to
* receive the results (may be vec).
* @return {!goog.vec.vec3d.Type} return resultVec so that operations can be
* chained together.
*/
function multVec3NoTranslate(mat: goog.vec.mat4d.Type, vec: goog.vec.vec3d.Type, resultVec: goog.vec.vec3d.Type): goog.vec.vec3d.Type;
/**
* Transforms the given vector with the given matrix storing the resulting,
* transformed vector into resultVec. The input vector is multiplied against the
* full 4x4 matrix with the homogeneous divide applied to reduce the 4 element
* vector to a 3 element vector.
*
* @param {goog.vec.mat4d.Type} mat The matrix supplying the transformation.
* @param {goog.vec.vec3d.Type} vec The 3 element vector to transform.
* @param {goog.vec.vec3d.Type} resultVec The 3 element vector
* to receive the results (may be vec).
* @return {!goog.vec.vec3d.Type} return resultVec so that operations can be
* chained together.
*/
function multVec3Projective(mat: goog.vec.mat4d.Type, vec: goog.vec.vec3d.Type, resultVec: goog.vec.vec3d.Type): goog.vec.vec3d.Type;
/**
* Transforms the given vector with the given matrix storing the resulting,
* transformed vector into resultVec.
*
* @param {goog.vec.mat4d.Type} mat The matrix supplying the transformation.
* @param {goog.vec.vec4d.Type} vec The vector to transform.
* @param {goog.vec.vec4d.Type} resultVec The vector to
* receive the results (may be vec).
* @return {!goog.vec.vec4d.Type} return resultVec so that operations can be
* chained together.
*/
function multVec4(mat: goog.vec.mat4d.Type, vec: goog.vec.vec4d.Type, resultVec: goog.vec.vec4d.Type): goog.vec.vec4d.Type;
/**
* Makes the given 4x4 matrix a translation matrix with x, y and z
* translation factors.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} x The translation along the x axis.
* @param {number} y The translation along the y axis.
* @param {number} z The translation along the z axis.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function makeTranslate(mat: goog.vec.mat4d.Type, x: number, y: number, z: number): goog.vec.mat4d.Type;
/**
* Makes the given 4x4 matrix as a scale matrix with x, y and z scale factors.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} x The scale along the x axis.
* @param {number} y The scale along the y axis.
* @param {number} z The scale along the z axis.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function makeScale(mat: goog.vec.mat4d.Type, x: number, y: number, z: number): goog.vec.mat4d.Type;
/**
* Makes the given 4x4 matrix a rotation matrix with the given rotation
* angle about the axis defined by the vector (ax, ay, az).
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} angle The rotation angle in radians.
* @param {number} ax The x component of the rotation axis.
* @param {number} ay The y component of the rotation axis.
* @param {number} az The z component of the rotation axis.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function makeRotate(mat: goog.vec.mat4d.Type, angle: number, ax: number, ay: number, az: number): goog.vec.mat4d.Type;
/**
* Makes the given 4x4 matrix a rotation matrix with the given rotation
* angle about the X axis.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} angle The rotation angle in radians.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function makeRotateX(mat: goog.vec.mat4d.Type, angle: number): goog.vec.mat4d.Type;
/**
* Makes the given 4x4 matrix a rotation matrix with the given rotation
* angle about the Y axis.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} angle The rotation angle in radians.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function makeRotateY(mat: goog.vec.mat4d.Type, angle: number): goog.vec.mat4d.Type;
/**
* Makes the given 4x4 matrix a rotation matrix with the given rotation
* angle about the Z axis.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} angle The rotation angle in radians.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function makeRotateZ(mat: goog.vec.mat4d.Type, angle: number): goog.vec.mat4d.Type;
/**
* Makes the given 4x4 matrix a perspective projection matrix.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} left The coordinate of the left clipping plane.
* @param {number} right The coordinate of the right clipping plane.
* @param {number} bottom The coordinate of the bottom clipping plane.
* @param {number} top The coordinate of the top clipping plane.
* @param {number} near The distance to the near clipping plane.
* @param {number} far The distance to the far clipping plane.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function makeFrustum(mat: goog.vec.mat4d.Type, left: number, right: number, bottom: number, top: number, near: number, far: number): goog.vec.mat4d.Type;
/**
* Makse the given 4x4 matrix perspective projection matrix given a
* field of view and aspect ratio.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} fovy The field of view along the y (vertical) axis in
* radians.
* @param {number} aspect The x (width) to y (height) aspect ratio.
* @param {number} near The distance to the near clipping plane.
* @param {number} far The distance to the far clipping plane.
* @return {goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function makePerspective(mat: goog.vec.mat4d.Type, fovy: number, aspect: number, near: number, far: number): goog.vec.mat4d.Type;
/**
* Makes the given 4x4 matrix an orthographic projection matrix.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} left The coordinate of the left clipping plane.
* @param {number} right The coordinate of the right clipping plane.
* @param {number} bottom The coordinate of the bottom clipping plane.
* @param {number} top The coordinate of the top clipping plane.
* @param {number} near The distance to the near clipping plane.
* @param {number} far The distance to the far clipping plane.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function makeOrtho(mat: goog.vec.mat4d.Type, left: number, right: number, bottom: number, top: number, near: number, far: number): goog.vec.mat4d.Type;
/**
* Makes the given 4x4 matrix a modelview matrix of a camera so that
* the camera is 'looking at' the given center point.
*
* Note that unlike most other goog.vec functions where we inline
* everything, this function does not inline various goog.vec
* functions. This makes the code more readable, but somewhat
* less efficient.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {goog.vec.vec3d.Type} eyePt The position of the eye point
* (camera origin).
* @param {goog.vec.vec3d.Type} centerPt The point to aim the camera at.
* @param {goog.vec.vec3d.Type} worldUpVec The vector that identifies
* the up direction for the camera.
* @return {goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function makeLookAt(mat: goog.vec.mat4d.Type, eyePt: goog.vec.vec3d.Type, centerPt: goog.vec.vec3d.Type, worldUpVec: goog.vec.vec3d.Type): goog.vec.mat4d.Type;
/**
* Decomposes a matrix into the lookAt vectors eyePt, fwdVec and worldUpVec.
* The matrix represents the modelview matrix of a camera. It is the inverse
* of lookAt except for the output of the fwdVec instead of centerPt.
* The centerPt itself cannot be recovered from a modelview matrix.
*
* Note that unlike most other goog.vec functions where we inline
* everything, this function does not inline various goog.vec
* functions. This makes the code more readable, but somewhat
* less efficient.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {goog.vec.vec3d.Type} eyePt The position of the eye point
* (camera origin).
* @param {goog.vec.vec3d.Type} fwdVec The vector describing where
* the camera points to.
* @param {goog.vec.vec3d.Type} worldUpVec The vector that
* identifies the up direction for the camera.
* @return {boolean} True if the method succeeds, false otherwise.
* The method can only fail if the inverse of viewMatrix is not defined.
*/
function toLookAt(mat: goog.vec.mat4d.Type, eyePt: goog.vec.vec3d.Type, fwdVec: goog.vec.vec3d.Type, worldUpVec: goog.vec.vec3d.Type): boolean;
/**
* Makes the given 4x4 matrix a rotation matrix given Euler angles using
* the ZXZ convention.
* Given the euler angles [theta1, theta2, theta3], the rotation is defined as
* rotation = rotation_z(theta1) * rotation_x(theta2) * rotation_z(theta3),
* with theta1 in [0, 2 * pi], theta2 in [0, pi] and theta3 in [0, 2 * pi].
* rotation_x(theta) means rotation around the X axis of theta radians,
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} theta1 The angle of rotation around the Z axis in radians.
* @param {number} theta2 The angle of rotation around the X axis in radians.
* @param {number} theta3 The angle of rotation around the Z axis in radians.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function makeEulerZXZ(mat: goog.vec.mat4d.Type, theta1: number, theta2: number, theta3: number): goog.vec.mat4d.Type;
/**
* Decomposes a rotation matrix into Euler angles using the ZXZ convention so
* that rotation = rotation_z(theta1) * rotation_x(theta2) * rotation_z(theta3),
* with theta1 in [0, 2 * pi], theta2 in [0, pi] and theta3 in [0, 2 * pi].
* rotation_x(theta) means rotation around the X axis of theta radians.
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {goog.vec.vec3d.Type} euler The ZXZ Euler angles in
* radians as [theta1, theta2, theta3].
* @param {boolean=} opt_theta2IsNegative Whether theta2 is in [-pi, 0] instead
* of the default [0, pi].
* @return {!goog.vec.vec4d.Type} return euler so that operations can be
* chained together.
*/
function toEulerZXZ(mat: goog.vec.mat4d.Type, euler: goog.vec.vec3d.Type, opt_theta2IsNegative?: boolean): goog.vec.vec4d.Type;
/**
* Translates the given matrix by x,y,z. Equvialent to:
* goog.vec.mat4d.multMat(
* mat,
* goog.vec.mat4d.makeTranslate(goog.vec.mat4d.create(), x, y, z),
* mat);
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} x The translation along the x axis.
* @param {number} y The translation along the y axis.
* @param {number} z The translation along the z axis.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function translate(mat: goog.vec.mat4d.Type, x: number, y: number, z: number): goog.vec.mat4d.Type;
/**
* Scales the given matrix by x,y,z. Equivalent to:
* goog.vec.mat4d.multMat(
* mat,
* goog.vec.mat4d.makeScale(goog.vec.mat4d.create(), x, y, z),
* mat);
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} x The x scale factor.
* @param {number} y The y scale factor.
* @param {number} z The z scale factor.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function scale(mat: goog.vec.mat4d.Type, x: number, y: number, z: number): goog.vec.mat4d.Type;
/**
* Rotate the given matrix by angle about the x,y,z axis. Equivalent to:
* goog.vec.mat4d.multMat(
* mat,
* goog.vec.mat4d.makeRotate(goog.vec.mat4d.create(), angle, x, y, z),
* mat);
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} angle The angle in radians.
* @param {number} x The x component of the rotation axis.
* @param {number} y The y component of the rotation axis.
* @param {number} z The z component of the rotation axis.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function rotate(mat: goog.vec.mat4d.Type, angle: number, x: number, y: number, z: number): goog.vec.mat4d.Type;
/**
* Rotate the given matrix by angle about the x axis. Equivalent to:
* goog.vec.mat4d.multMat(
* mat,
* goog.vec.mat4d.makeRotateX(goog.vec.mat4d.create(), angle),
* mat);
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} angle The angle in radians.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function rotateX(mat: goog.vec.mat4d.Type, angle: number): goog.vec.mat4d.Type;
/**
* Rotate the given matrix by angle about the y axis. Equivalent to:
* goog.vec.mat4d.multMat(
* mat,
* goog.vec.mat4d.makeRotateY(goog.vec.mat4d.create(), angle),
* mat);
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} angle The angle in radians.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function rotateY(mat: goog.vec.mat4d.Type, angle: number): goog.vec.mat4d.Type;
/**
* Rotate the given matrix by angle about the z axis. Equivalent to:
* goog.vec.mat4d.multMat(
* mat,
* goog.vec.mat4d.makeRotateZ(goog.vec.mat4d.create(), angle),
* mat);
*
* @param {goog.vec.mat4d.Type} mat The matrix.
* @param {number} angle The angle in radians.
* @return {!goog.vec.mat4d.Type} return mat so that operations can be
* chained.
*/
function rotateZ(mat: goog.vec.mat4d.Type, angle: number): goog.vec.mat4d.Type;
/**
* Retrieves the translation component of the transformation matrix.
*
* @param {goog.vec.mat4d.Type} mat The transformation matrix.
* @param {goog.vec.vec3d.Type} translation The vector for storing the
* result.
* @return {!goog.vec.vec3d.Type} return translation so that operations can be
* chained.
*/
function getTranslation(mat: goog.vec.mat4d.Type, translation: goog.vec.vec3d.Type): goog.vec.vec3d.Type;
}<|fim▁end|> | * Initializes the matrix from the set of values. Note the values supplied are
* in column major order.
* |
<|file_name|>routes.js<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2014 rafa
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
function fNotaRoutes() {
// Path.map("#/nota").to(function () {
// $('#indexContenidoJsp').spinner();
// control('nota').list($('#indexContenido'), param().defaultizeUrlObjectParameters({}), null);
// //notaControl.modalListEventsLoading(notaObject, notaView, $('#indexContenido'), param().defaultizeUrlObjectParameters({}), null);
// $('#indexContenidoJsp').empty();
// return false;
// });
Path.map("#/nota").to(function () {
$('#indexContenidoJsp').spinner();
oNotaControl.list($('#indexContenido'), param().defaultizeUrlObjectParameters({}), null, oNotaModel, oNotaView);
//notaControl.modalListEventsLoading(notaObject, notaView, $('#indexContenido'), param().defaultizeUrlObjectParameters({}), null);
$('#indexContenidoJsp').empty();
//$('#indexContenidoJsp').append(oNotaControl.getClassNameNota());
return false;
});
Path.map("#/nota/list/:url").to(function () {
$('#indexContenidoJsp').spinner();
var paramsObject = param().defaultizeUrlObjectParameters(param().getUrlObjectFromUrlString(this.params['url']));
oNotaControl.list($('#indexContenido'), paramsObject, null, oNotaModel, oNotaView);
$('#indexContenidoJsp').empty();
return false;
});
Path.map("#/nota/view/:id").to(function () {
$('#indexContenidoJsp').spinner();
var paramsObject = param().defaultizeUrlObjectParameters(param().getUrlObjectFromUrlString(this.params['url']));
oNotaControl.view($('#indexContenido'), paramsObject['id'], oNotaModel, oNotaView);
$('#indexContenidoJsp').empty();
return false;
});
Path.map("#/nota/edit/:id").to(function () {
$('#indexContenidoJsp').spinner();
var paramsObject = param().defaultizeUrlObjectParameters(param().getUrlObjectFromUrlString(this.params['url']));
oNotaControl.edit($('#indexContenido'), paramsObject['id'], oNotaModel, oNotaView);
$('#indexContenidoJsp').empty();
});
Path.map("#/nota/new").to(function () {<|fim▁hole|> //var paramsObject = param().defaultizeUrlObjectParameters(param().getUrlObjectFromUrlString(this.params['url']));
oNotaControl.new($('#indexContenido'), null, oNotaModel, oNotaView);
$('#indexContenidoJsp').empty();
return false;
});
Path.map("#/nota/new/:url").to(function () {
$('#indexContenidoJsp').spinner();
var paramsObject = param().defaultizeUrlObjectParameters(param().getUrlObjectFromUrlString(this.params['url']));
oNotaControl.new($('#indexContenido'), paramsObject, oNotaModel, oNotaView);
$('#indexContenidoJsp').empty();
return false;
});
Path.map("#/nota/remove/:id").to(function () {
$('#indexContenidoJsp').spinner();
var paramsObject = param().defaultizeUrlObjectParameters(param().getUrlObjectFromUrlString(this.params['url']));
oNotaControl.remove($('#indexContenido'), paramsObject['id'], oNotaModel, oNotaView);
$('#indexContenidoJsp').empty();
return false;
});
}<|fim▁end|> | $('#indexContenidoJsp').spinner(); |
<|file_name|>choices.py<|end_file_name|><|fim▁begin|># Add your own choices here!
fruit = ["apples", "oranges", "pears", "grapes", "blueberries"]
lunch = ["pho", "timmies", "thai", "burgers", "buffet!", "indian", "montanas"]<|fim▁hole|><|fim▁end|> | situations = {"fruit":fruit, "lunch":lunch} |
<|file_name|>test_signup.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.conf import settings
from django.test import TestCase
from zilencer.models import Deployment
from zerver.models import (
get_realm, get_user_profile_by_email,
PreregistrationUser, Realm, ScheduledJob, UserProfile,
)
from zerver.lib.actions import (
create_stream_if_needed,
do_add_subscription,
set_default_streams,
)
from zerver.lib.digest import send_digest_email
from zerver.lib.notifications import enqueue_welcome_emails, one_click_unsubscribe_link
from zerver.lib.test_helpers import AuthedTestCase, find_key_by_email, queries_captured
from zerver.lib.test_runner import slow
from zerver.lib.session_user import get_session_dict_user
import re
import ujson
from urlparse import urlparse
class PublicURLTest(TestCase):
"""
Account creation URLs are accessible even when not logged in. Authenticated
URLs redirect to a page.
"""
def fetch(self, method, urls, expected_status):
for url in urls:
if method == "get":
response = self.client.get(url)
else:
response = self.client.post(url)
self.assertEqual(response.status_code, expected_status,
msg="Expected %d, received %d for %s to %s" % (
expected_status, response.status_code, method, url))
def test_public_urls(self):
"""
Test which views are accessible when not logged in.
"""
# FIXME: We should also test the Tornado URLs -- this codepath
# can't do so because this Django test mechanism doesn't go
# through Tornado.
get_urls = {200: ["/accounts/home/", "/accounts/login/"],
302: ["/"],
401: ["/api/v1/streams/Denmark/members",
"/api/v1/users/me/subscriptions",
"/api/v1/messages",
],
}
post_urls = {200: ["/accounts/login/"],
302: ["/accounts/logout/"],
401: ["/json/get_public_streams",
"/json/get_old_messages",
"/json/update_pointer",
"/json/send_message",
"/json/invite_users",
"/json/settings/change",
"/json/subscriptions/remove",
"/json/subscriptions/exists",
"/json/subscriptions/add",
"/json/subscriptions/property",
"/json/get_subscribers",
"/json/fetch_api_key",
"/api/v1/users/me/subscriptions",
],
400: ["/api/v1/send_message",
"/api/v1/external/github",
"/api/v1/fetch_api_key",
],
}
for status_code, url_set in get_urls.iteritems():
self.fetch("get", url_set, status_code)
for status_code, url_set in post_urls.iteritems():
self.fetch("post", url_set, status_code)
def test_get_gcid_when_not_configured(self):
with self.settings(GOOGLE_CLIENT_ID=None):
resp = self.client.get("/api/v1/fetch_google_client_id")
self.assertEquals(400, resp.status_code,
msg="Expected 400, received %d for GET /api/v1/fetch_google_client_id" % resp.status_code,
)
data = ujson.loads(resp.content)
self.assertEqual('error', data['result'])
def test_get_gcid_when_configured(self):
with self.settings(GOOGLE_CLIENT_ID="ABCD"):
resp = self.client.get("/api/v1/fetch_google_client_id")
self.assertEquals(200, resp.status_code,
msg="Expected 200, received %d for GET /api/v1/fetch_google_client_id" % resp.status_code,
)
data = ujson.loads(resp.content)
self.assertEqual('success', data['result'])
self.assertEqual('ABCD', data['google_client_id'])
class LoginTest(AuthedTestCase):
"""
Logging in, registration, and logging out.
"""
def test_login(self):
self.login("[email protected]")
user_profile = get_user_profile_by_email('[email protected]')
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
def test_login_bad_password(self):
self.login("[email protected]", "wrongpassword")
self.assertIsNone(get_session_dict_user(self.client.session))
def test_login_nonexist_user(self):
result = self.login("[email protected]", "xxx")
self.assertIn("Please enter a correct email and password", result.content)
def test_register(self):
realm = get_realm("zulip.com")
streams = ["stream_%s" % i for i in xrange(40)]
for stream in streams:
create_stream_if_needed(realm, stream)
set_default_streams(realm, streams)
with queries_captured() as queries:
self.register("test", "test")
# Ensure the number of queries we make is not O(streams)
self.assert_length(queries, 67)
user_profile = get_user_profile_by_email('[email protected]')
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
def test_register_deactivated(self):
"""
If you try to register for a deactivated realm, you get a clear error
page.
"""
realm = get_realm("zulip.com")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.register("test", "test")
self.assertIn("has been deactivated", result.content.replace("\n", " "))
with self.assertRaises(UserProfile.DoesNotExist):
get_user_profile_by_email('[email protected]')
def test_login_deactivated(self):
"""
If you try to log in to a deactivated realm, you get a clear error page.
"""
realm = get_realm("zulip.com")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.login("[email protected]")
self.assertIn("has been deactivated", result.content.replace("\n", " "))
def test_logout(self):
self.login("[email protected]")
self.client.post('/accounts/logout/')
self.assertIsNone(get_session_dict_user(self.client.session))
def test_non_ascii_login(self):
"""
You can log in even if your password contain non-ASCII characters.
"""
email = "[email protected]"
password = u"hümbüǵ"
# Registering succeeds.
self.register("test", password)
user_profile = get_user_profile_by_email(email)
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
self.client.post('/accounts/logout/')
self.assertIsNone(get_session_dict_user(self.client.session))
# Logging in succeeds.
self.client.post('/accounts/logout/')
self.login(email, password)
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
def test_register_first_user_with_invites(self):
"""
The first user in a realm has a special step in their signup workflow
for inviting coworkers. Do as realistic an end-to-end test as we can
without Tornado running.
"""
username = "user1"
password = "test"
domain = "test.com"
email = "[email protected]"
# Create a new realm to ensure that we're the first user in it.
Realm.objects.create(domain=domain, name="Test Inc.")
# Start the signup process by supplying an email address.
result = self.client.post('/accounts/home/', {'email': email})
# Check the redirect telling you to check your mail for a confirmation
# link.
self.assertEquals(result.status_code, 302)
self.assertTrue(result["Location"].endswith(
"/accounts/send_confirm/%s@%s" % (username, domain)))
result = self.client.get(result["Location"])
self.assertIn("Check your email so we can get started.", result.content)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
confirmation_link_pattern = re.compile(settings.EXTERNAL_HOST + "(\S+)>")
confirmation_url = confirmation_link_pattern.search(
message.body).groups()[0]
break
else:
raise ValueError("Couldn't find a confirmation email.")
result = self.client.get(confirmation_url)
self.assertEquals(result.status_code, 200)
# Pick a password and agree to the ToS.
result = self.submit_reg_form_for_user(username, password, domain)
self.assertEquals(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/invite/"))
# Invite coworkers to join you.
result = self.client.get(result["Location"])
self.assertIn("You're the first one here!", result.content)
# Reset the outbox for our invites.
outbox.pop()
invitees = ['alice@' + domain, 'bob@' + domain]
params = {
'invitee_emails': ujson.dumps(invitees)
}
result = self.client.post('/json/bulk_invite_users', params)
self.assert_json_success(result)
# We really did email these users, and they have PreregistrationUser
# objects.
email_recipients = [message.recipients()[0] for message in outbox]
self.assertEqual(len(outbox), len(invitees))
self.assertItemsEqual(email_recipients, invitees)
user_profile = get_user_profile_by_email(email)
self.assertEqual(len(invitees), PreregistrationUser.objects.filter(
referred_by=user_profile).count())
# After this we start manipulating browser information, so stop here.
class InviteUserTest(AuthedTestCase):
def invite(self, users, streams):
"""
Invites the specified users to Zulip with the specified streams.
users should be a string containing the users to invite, comma or
newline separated.
streams should be a list of strings.
"""
return self.client.post("/json/invite_users",
{"invitee_emails": users,
"stream": streams})
def check_sent_emails(self, correct_recipients):
from django.core.mail import outbox
self.assertEqual(len(outbox), len(correct_recipients))
email_recipients = [email.recipients()[0] for email in outbox]
self.assertItemsEqual(email_recipients, correct_recipients)
def test_bulk_invite_users(self):
# The bulk_invite_users code path is for the first user in a realm.
self.login('[email protected]')
invitees = ['[email protected]', '[email protected]']
params = {
'invitee_emails': ujson.dumps(invitees)
}
result = self.client.post('/json/bulk_invite_users', params)
self.assert_json_success(result)
self.check_sent_emails(invitees)
def test_successful_invite_user(self):
"""
A call to /json/invite_users with valid parameters causes an invitation
email to be sent.
"""
self.login("[email protected]")
invitee = "[email protected]"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(invitee))
self.check_sent_emails([invitee])
def test_multi_user_invite(self):
"""
Invites multiple users with a variety of delimiters.<|fim▁hole|> # Intentionally use a weird string.
self.assert_json_success(self.invite(
"""[email protected], [email protected],
[email protected]
[email protected]""", ["Denmark"]))
for user in ("bob", "carol", "dave", "earl"):
self.assertTrue(find_key_by_email("%[email protected]" % user))
self.check_sent_emails(["[email protected]", "[email protected]",
"[email protected]", "[email protected]"])
def test_missing_or_invalid_params(self):
"""
Tests inviting with various missing or invalid parameters.
"""
self.login("[email protected]")
self.assert_json_error(
self.client.post("/json/invite_users", {"invitee_emails": "[email protected]"}),
"You must specify at least one stream for invitees to join.")
for address in ("noatsign.com", "[email protected]"):
self.assert_json_error(
self.invite(address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.")
self.check_sent_emails([])
def test_invalid_stream(self):
"""
Tests inviting to a non-existent stream.
"""
self.login("[email protected]")
self.assert_json_error(self.invite("[email protected]", ["NotARealStream"]),
"Stream does not exist: NotARealStream. No invites were sent.")
self.check_sent_emails([])
def test_invite_existing_user(self):
"""
If you invite an address already using Zulip, no invitation is sent.
"""
self.login("[email protected]")
self.assert_json_error(
self.client.post("/json/invite_users",
{"invitee_emails": "[email protected]",
"stream": ["Denmark"]}),
"We weren't able to invite anyone.")
self.assertRaises(PreregistrationUser.DoesNotExist,
lambda: PreregistrationUser.objects.get(
email="[email protected]"))
self.check_sent_emails([])
def test_invite_some_existing_some_new(self):
"""
If you invite a mix of already existing and new users, invitations are
only sent to the new users.
"""
self.login("[email protected]")
existing = ["[email protected]", "[email protected]"]
new = ["[email protected]", "[email protected]"]
result = self.client.post("/json/invite_users",
{"invitee_emails": "\n".join(existing + new),
"stream": ["Denmark"]})
self.assert_json_error(result,
"Some of those addresses are already using Zulip, \
so we didn't send them an invitation. We did send invitations to everyone else!")
# We only created accounts for the new users.
for email in existing:
self.assertRaises(PreregistrationUser.DoesNotExist,
lambda: PreregistrationUser.objects.get(
email=email))
for email in new:
self.assertTrue(PreregistrationUser.objects.get(email=email))
# We only sent emails to the new users.
self.check_sent_emails(new)
def test_invite_outside_domain_in_closed_realm(self):
"""
In a realm with `restricted_to_domain = True`, you can't invite people
with a different domain from that of the realm or your e-mail address.
"""
zulip_realm = get_realm("zulip.com")
zulip_realm.restricted_to_domain = True
zulip_realm.save()
self.login("[email protected]")
external_address = "[email protected]"
self.assert_json_error(
self.invite(external_address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.")
@slow(0.20, 'inviting is slow')
def test_invite_outside_domain_in_open_realm(self):
"""
In a realm with `restricted_to_domain = False`, you can invite people
with a different domain from that of the realm or your e-mail address.
"""
zulip_realm = get_realm("zulip.com")
zulip_realm.restricted_to_domain = False
zulip_realm.save()
self.login("[email protected]")
external_address = "[email protected]"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
def test_invite_with_non_ascii_streams(self):
"""
Inviting someone to streams with non-ASCII characters succeeds.
"""
self.login("[email protected]")
invitee = "[email protected]"
stream_name = u"hümbüǵ"
realm = get_realm("zulip.com")
stream, _ = create_stream_if_needed(realm, stream_name)
# Make sure we're subscribed before inviting someone.
do_add_subscription(
get_user_profile_by_email("[email protected]"),
stream, no_log=True)
self.assert_json_success(self.invite(invitee, [stream_name]))
class EmailUnsubscribeTests(AuthedTestCase):
def test_missedmessage_unsubscribe(self):
"""
We provide one-click unsubscribe links in missed message
e-mails that you can click even when logged out to update your
email notification settings.
"""
user_profile = get_user_profile_by_email("[email protected]")
user_profile.enable_offline_email_notifications = True
user_profile.save()
unsubscribe_link = one_click_unsubscribe_link(user_profile,
"missed_messages")
result = self.client.get(urlparse(unsubscribe_link).path)
self.assertEqual(result.status_code, 200)
# Circumvent user_profile caching.
user_profile = UserProfile.objects.get(email="[email protected]")
self.assertFalse(user_profile.enable_offline_email_notifications)
def test_welcome_unsubscribe(self):
"""
We provide one-click unsubscribe links in welcome e-mails that you can
click even when logged out to stop receiving them.
"""
email = "[email protected]"
user_profile = get_user_profile_by_email("[email protected]")
# Simulate a new user signing up, which enqueues 2 welcome e-mails.
enqueue_welcome_emails(email, "King Hamlet")
self.assertEqual(2, len(ScheduledJob.objects.filter(
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
# Simulate unsubscribing from the welcome e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "welcome")
result = self.client.get(urlparse(unsubscribe_link).path)
# The welcome email jobs are no longer scheduled.
self.assertEqual(result.status_code, 200)
self.assertEqual(0, len(ScheduledJob.objects.filter(
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
def test_digest_unsubscribe(self):
"""
We provide one-click unsubscribe links in digest e-mails that you can
click even when logged out to stop receiving them.
Unsubscribing from these emails also dequeues any digest email jobs that
have been queued.
"""
email = "[email protected]"
user_profile = get_user_profile_by_email("[email protected]")
self.assertTrue(user_profile.enable_digest_emails)
# Enqueue a fake digest email.
send_digest_email(user_profile, "", "")
self.assertEqual(1, len(ScheduledJob.objects.filter(
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
# Simulate unsubscribing from digest e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "digest")
result = self.client.get(urlparse(unsubscribe_link).path)
# The setting is toggled off, and scheduled jobs have been removed.
self.assertEqual(result.status_code, 200)
# Circumvent user_profile caching.
user_profile = UserProfile.objects.get(email="[email protected]")
self.assertFalse(user_profile.enable_digest_emails)
self.assertEqual(0, len(ScheduledJob.objects.filter(
type=ScheduledJob.EMAIL, filter_string__iexact=email)))<|fim▁end|> | """
self.login("[email protected]") |
<|file_name|>local_settings_template.py<|end_file_name|><|fim▁begin|># flake8: noqa
# -*- coding: utf-8 -*-
###############################################
# Geosite local settings
###############################################
import os
# Outside URL
SITEURL = 'http://$DOMAIN'
OGC_SERVER['default']['LOCATION'] = os.path.join(GEOSERVER_URL, 'geoserver/')<|fim▁hole|>
# databases unique to site if not defined in site settings
"""
SITE_DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, '../development.db'),
},
}
"""<|fim▁end|> | OGC_SERVER['default']['PUBLIC_LOCATION'] = os.path.join(SITEURL, 'geoserver/') |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main
import (
"os"
"github.com/kamaln7/karmabot"
"github.com/kamaln7/karmabot/ctlcommands"
"github.com/aybabtme/log"
"github.com/urfave/cli"
)
var (
ll *log.Log
)
func main() {
// logging
ll = log.KV("version", karmabot.Version)
// commands
cc := &ctlcommands.Commands{
Logger: ll,
}
// app
app := cli.NewApp()
app.Name = "karmabotctl"
app.Version = karmabot.Version
app.Usage = "manually manage karmabot"
// general flags
dbpath := cli.StringFlag{
Name: "db",
Value: "./db.sqlite3",
Usage: "path to sqlite database",
}
debug := cli.BoolFlag{
Name: "debug",
Usage: "set debug mode",
}
leaderboardlimit := cli.IntFlag{
Name: "leaderboardlimit",
Value: 10,
Usage: "the default amount of users to list in the leaderboard",
}
// webui
webuiCommands := []cli.Command{
{
Name: "totp",
Usage: "generate a TOTP token",
Flags: []cli.Flag{
cli.StringFlag{
Name: "totp",
Usage: "totp key",
},
},
Action: cc.Mktotp,
},
{
Name: "serve",
Usage: "start a webserver",
Flags: []cli.Flag{
dbpath,
debug,
leaderboardlimit,
cli.StringFlag{
Name: "totp",
Usage: "totp key",
},
cli.StringFlag{
Name: "path",
Usage: "path to web UI files",
},
cli.StringFlag{
Name: "listenaddr",
Usage: "address to listen and serve the web ui on",
},
cli.StringFlag{
Name: "url",
Usage: "url address for accessing the web ui",
},
},
Action: cc.Serve,
},
}
// karma
karmaCommands := []cli.Command{
{
Name: "add",
Usage: "add karma to a user",
Flags: []cli.Flag{
dbpath,
cli.StringFlag{
Name: "from",
},
cli.StringFlag{
Name: "to",
},
cli.StringFlag{
Name: "reason",
},
cli.IntFlag{
Name: "points",
},
},
Action: cc.AddKarma,
},
{
Name: "migrate",
Usage: "move a user's karma to another user",
Flags: []cli.Flag{
dbpath,
cli.StringFlag{
Name: "from",
},
cli.StringFlag{
Name: "to",
},
cli.StringFlag{
Name: "reason",
},
},
Action: cc.MigrateKarma,
},
{
Name: "reset",
Usage: "reset a user's karma",
Flags: []cli.Flag{
dbpath,
cli.StringFlag{
Name: "user",
},
},
Action: cc.ResetKarma,
},
{
Name: "set",
Usage: "set a user's karma to a specific number",
Flags: []cli.Flag{
dbpath,
cli.StringFlag{
Name: "user",
},
cli.IntFlag{
Name: "points",
},
},
Action: cc.SetKarma,
},
{
Name: "throwback",
Usage: "get a karma throwback for a user",
Flags: []cli.Flag{
dbpath,
cli.StringFlag{
Name: "user",
},
},
Action: cc.GetThrowback,
},
}
// main app
app.Commands = []cli.Command{
{
Name: "karma",
Subcommands: karmaCommands,
},
{<|fim▁hole|> }
app.Run(os.Args)
}<|fim▁end|> | Name: "webui",
Subcommands: webuiCommands,
}, |
<|file_name|>StructuralInterface.cpp<|end_file_name|><|fim▁begin|>#include "StructuralInterface.h"
StructuralInterface::StructuralInterface (StructuralEntity *parent)
: StructuralEntity (parent)
{
setCategory (Structural::Interface);
setStructuralType (Structural::NoType);
setResizable (false);
setTop (0);
setLeft (0);
setWidth (ST_DEFAULT_INTERFACE_W);
setHeight (ST_DEFAULT_INTERFACE_H);
if (!ST_OPT_SHOW_INTERFACES)
setHidden (true);
}
void
StructuralInterface::adjust (bool collision, bool recursion)
{
StructuralEntity::adjust (collision, recursion);
// Adjusting position...
StructuralEntity *parent = structuralParent ();
if (parent || !ST_OPT_WITH_BODY)
{
if (!collision)
{
// Tries (10x) to find a position where there is no collision with others
// relatives
for (int i = 0; i < 10; i++)
{
bool colliding = false;
for (StructuralEntity *ent : StructuralUtil::neighbors (this))
{
if (ent != this)
{
int n = 0, max = 1000;
qreal current = 0.0;
ent->setSelectable (false);
while (collidesWithItem (ent, Qt::IntersectsItemBoundingRect))
{
QLineF line
= QLineF (left () + width () / 2, top () + height () / 2,
ent->width () / 2, ent->height () / 2);
line.setAngle (qrand () % 360);
current += (double)(qrand () % 100) / 1000.0;
setTop (top () + line.pointAt (current / 2).y ()
- line.p1 ().y ());
setLeft (left () + line.pointAt (current / 2).x ()
- line.p1 ().x ());
if (++n > max)
break;
}
constrain ();
ent->setSelectable (true);
}
}
for (StructuralEntity *ent : StructuralUtil::neighbors (this))
if (collidesWithItem (ent, Qt::IntersectsItemBoundingRect))
colliding = true;
if (!colliding)
break;
}
}
constrain ();
StructuralUtil::adjustEdges (this);
}
}
void
StructuralInterface::constrain ()
{
StructuralEntity *parent = structuralParent ();
if (parent != nullptr)
{
QPointF tail (parent->width () / 2, parent->height () / 2);
QPointF head (left () + width () / 2, top () + height () / 2);
if (tail == head)
{
head.setX (tail.x ());
head.setY (tail.y () - 10);
}
QPointF p = head;
QLineF line (tail, head);
bool status = true;
qreal current = 1.0;
qreal step = 0.01;
if (!parent->contains (p))
{
step = -0.01;
status = false;
}
do
{
current += step;
p = line.pointAt (current);
} while (parent->contains (p) == status);
if (QLineF (p, head).length () > 7)
{
setTop (p.y () - height () / 2);
setLeft (p.x () - width () / 2);
}
}
}
void
StructuralInterface::draw (QPainter *painter)
{
int x = ST_DEFAULT_ENTITY_PADDING + ST_DEFAULT_INTERFACE_PADDING;
int y = ST_DEFAULT_ENTITY_PADDING + ST_DEFAULT_INTERFACE_PADDING;
int w = width () - 2 * ST_DEFAULT_INTERFACE_PADDING;
int h = height () - 2 * ST_DEFAULT_INTERFACE_PADDING;
painter->drawPixmap (x, y, w, h,
QPixmap (StructuralUtil::icon (structuralType ())));
if (!ST_OPT_WITH_BODY && !ST_OPT_USE_FLOATING_INTERFACES)
{
if (property (ST_ATTR_ENT_AUTOSTART) == ST_VALUE_TRUE)
{<|fim▁hole|> painter->drawRect (x, y, w, h);
}
}
if (!error ().isEmpty () || !warning ().isEmpty ())
{
QString icon;
if (!error ().isEmpty ())
icon = QString (ST_DEFAULT_ALERT_ERROR_ICON);
else
icon = QString (ST_DEFAULT_ALERT_WARNING_ICON);
painter->drawPixmap (x + w / 2 - (ST_DEFAULT_ALERT_ICON_W - 3) / 2,
y + h / 2 - (ST_DEFAULT_ALERT_ICON_H - 3) / 2,
ST_DEFAULT_ALERT_ICON_W - 3,
ST_DEFAULT_ALERT_ICON_H - 3, QPixmap (icon));
}
if (isMoving ())
{
painter->setBrush (QBrush (Qt::NoBrush));
painter->setPen (QPen (QBrush (Qt::black), 0));
int moveX = x + moveLeft () - left ();
int moveY = y + moveTop () - top ();
int moveW = w;
int moveH = h;
painter->drawRect (moveX, moveY, moveW, moveH);
}
}<|fim▁end|> | painter->setPen (QPen (QBrush (QColor (76, 76, 76)), 2)); |
<|file_name|>suite.py<|end_file_name|><|fim▁begin|># uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: suite.py
"""TestSuite"""
import sys
from . import case
from . import util
__unittest = True
def _call_if_exists(parent, attr):
func = getattr(parent, attr, lambda : None)
func()
class BaseTestSuite(object):
"""A simple test suite that doesn't provide class or module shared fixtures.
"""
def __init__(self, tests=()):
self._tests = []
self.addTests(tests)
def __repr__(self):
return '<%s tests=%s>' % (util.strclass(self.__class__), list(self))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return list(self) == list(other)
def __ne__(self, other):
return not self == other
__hash__ = None
def __iter__(self):
return iter(self._tests)
def countTestCases(self):
cases = 0
for test in self:
cases += test.countTestCases()
return cases
def addTest(self, test):
if not hasattr(test, '__call__'):
raise TypeError('{} is not callable'.format(repr(test)))
if isinstance(test, type) and issubclass(test, (
case.TestCase, TestSuite)):
raise TypeError('TestCases and TestSuites must be instantiated before passing them to addTest()')
self._tests.append(test)
def addTests(self, tests):
if isinstance(tests, basestring):
raise TypeError('tests must be an iterable of tests, not a string')
for test in tests:
self.addTest(test)
def run(self, result):
for test in self:
if result.shouldStop:
break
test(result)
return result
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
for test in self:
test.debug()
class TestSuite(BaseTestSuite):
"""A test suite is a composite test consisting of a number of TestCases.
For use, create an instance of TestSuite, then add test case instances.
When all tests have been added, the suite can be passed to a test
runner, such as TextTestRunner. It will run the individual test cases
in the order in which they were added, aggregating the results. When
subclassing, do not forget to call the base class constructor.
"""
def run(self, result, debug=False):
topLevel = False
if getattr(result, '_testRunEntered', False) is False:
result._testRunEntered = topLevel = True
for test in self:
if result.shouldStop:
break
if _isnotsuite(test):
self._tearDownPreviousClass(test, result)
self._handleModuleFixture(test, result)
self._handleClassSetUp(test, result)
result._previousTestClass = test.__class__
if getattr(test.__class__, '_classSetupFailed', False) or getattr(result, '_moduleSetUpFailed', False):
continue
if not debug:
test(result)
else:
test.debug()
if topLevel:
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
result._testRunEntered = False
return result
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
debug = _DebugResult()
self.run(debug, True)
def _handleClassSetUp(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
else:
if result._moduleSetUpFailed:
return
if getattr(currentClass, '__unittest_skip__', False):
return
try:
currentClass._classSetupFailed = False
except TypeError:
pass
setUpClass = getattr(currentClass, 'setUpClass', None)
if setUpClass is not None:
_call_if_exists(result, '_setupStdout')
try:
try:
setUpClass()
except Exception as e:
if isinstance(result, _DebugResult):
raise
currentClass._classSetupFailed = True
className = util.strclass(currentClass)
errorName = 'setUpClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
return
def _get_previous_module(self, result):
previousModule = None
previousClass = getattr(result, '_previousTestClass', None)
if previousClass is not None:
previousModule = previousClass.__module__
return previousModule
def _handleModuleFixture(self, test, result):
previousModule = self._get_previous_module(result)
currentModule = test.__class__.__module__
if currentModule == previousModule:
return
else:
self._handleModuleTearDown(result)
result._moduleSetUpFailed = False
try:
module = sys.modules[currentModule]
except KeyError:
return
setUpModule = getattr(module, 'setUpModule', None)
if setUpModule is not None:
_call_if_exists(result, '_setupStdout')
try:
try:
setUpModule()
except Exception as e:
if isinstance(result, _DebugResult):
raise
result._moduleSetUpFailed = True
errorName = 'setUpModule (%s)' % currentModule
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
return
def _addClassOrModuleLevelException(self, result, exception, errorName):
error = _ErrorHolder(errorName)
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None and isinstance(exception, case.SkipTest):
addSkip(error, str(exception))
else:
result.addError(error, sys.exc_info())
return
def _handleModuleTearDown(self, result):
previousModule = self._get_previous_module(result)
if previousModule is None:
return
else:
if result._moduleSetUpFailed:
return
try:
module = sys.modules[previousModule]
except KeyError:
return
tearDownModule = getattr(module, 'tearDownModule', None)
if tearDownModule is not None:
_call_if_exists(result, '_setupStdout')
try:
try:
tearDownModule()
except Exception as e:
if isinstance(result, _DebugResult):
raise
errorName = 'tearDownModule (%s)' % previousModule
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
return
def _tearDownPreviousClass(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
else:
if getattr(previousClass, '_classSetupFailed', False):
return
if getattr(result, '_moduleSetUpFailed', False):
return
if getattr(previousClass, '__unittest_skip__', False):
return
tearDownClass = getattr(previousClass, 'tearDownClass', None)
if tearDownClass is not None:
_call_if_exists(result, '_setupStdout')
try:
try:
tearDownClass()
except Exception as e:
if isinstance(result, _DebugResult):
raise
className = util.strclass(previousClass)
errorName = 'tearDownClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
return
class _ErrorHolder(object):
"""
Placeholder for a TestCase inside a result. As far as a TestResult
is concerned, this looks exactly like a unit test. Used to insert
arbitrary errors into a test suite run.
"""
failureException = None
def __init__(self, description):
self.description = description
def id(self):
return self.description
def shortDescription(self):
return None
def __repr__(self):
return '<ErrorHolder description=%r>' % (self.description,)
def __str__(self):
return self.id()
def run(self, result):
pass
def __call__(self, result):
return self.run(result)
def countTestCases(self):
return 0
def _isnotsuite(test):<|fim▁hole|> iter(test)
except TypeError:
return True
return False
class _DebugResult(object):
"""Used by the TestSuite to hold previous class when running in debug."""
_previousTestClass = None
_moduleSetUpFailed = False
shouldStop = False<|fim▁end|> | """A crude way to tell apart testcases and suites with duck-typing"""
try: |
<|file_name|>mat3.ts<|end_file_name|><|fim▁begin|>namespace la.mat3 {
/// NOTE:
/// Row-major order
/// [m11, m12, m21, m22, x0, y0]
export function create(src?: number[]|Float32Array): Float32Array {
var dest = new Float32Array(6);
if (src) {
dest[0] = src[0];
dest[1] = src[1];
dest[2] = src[2];
dest[3] = src[3];
dest[4] = src[4];
dest[5] = src[5];
} else {
dest[0] = dest[1] = dest[2] = dest[3] = dest[4] = dest[5] = 0;
}
return dest;
}
export function copyTo(src: Float32Array, dest: Float32Array): Float32Array {
dest[0] = src[0];
dest[1] = src[1];
dest[2] = src[2];
dest[3] = src[3];
dest[4] = src[4];
dest[5] = src[5];
return dest;
}
export function init(dest: Float32Array, m11: number, m12: number, m21: number, m22: number, x0: number, y0: number): Float32Array {
dest[0] = m11;
dest[1] = m12;
dest[2] = m21;
dest[3] = m22;
dest[4] = x0;
dest[5] = y0;
return dest;
}
export function identity(dest?: Float32Array): Float32Array {
if (!dest) dest = create();
dest[0] = 1;
dest[1] = 0;
dest[2] = 0;
dest[3] = 1;
dest[4] = 0;
dest[5] = 0;
return dest;
}
export function equal(a: Float32Array, b: Float32Array): boolean {
return a === b || (
Math.abs(a[0] - b[0]) < EPSILON &&
Math.abs(a[1] - b[1]) < EPSILON &&
Math.abs(a[2] - b[2]) < EPSILON &&
Math.abs(a[3] - b[3]) < EPSILON &&
Math.abs(a[4] - b[4]) < EPSILON &&
Math.abs(a[5] - b[5]) < EPSILON
);
}
// dest = a * b
export function multiply(a: Float32Array, b: Float32Array, dest?: Float32Array): Float32Array {
if (!dest) dest = a;
var a11 = a[0], a12 = a[1],
a21 = a[2], a22 = a[3],
ax0 = a[4], ay0 = a[5],
b11 = b[0], b12 = b[1],
b21 = b[2], b22 = b[3],
bx0 = b[4], by0 = b[5];
dest[0] = a11 * b11 + a12 * b21;<|fim▁hole|> dest[1] = a11 * b12 + a12 * b22;
dest[2] = a21 * b11 + a22 * b21;
dest[3] = a21 * b12 + a22 * b22;
dest[4] = a11 * bx0 + a12 * by0 + ax0;
dest[5] = a21 * bx0 + a22 * by0 + ay0;
return dest;
}
export function inverse(mat: Float32Array, dest?: Float32Array): Float32Array {
if (Math.abs(mat[1]) < EPSILON && Math.abs(mat[2]) < EPSILON) //Simple scaling/translation matrix
return simple_inverse(mat, dest);
else
return complex_inverse(mat, dest);
}
export function transformVec2(mat: Float32Array, vec: Float32Array, dest?: Float32Array): Float32Array {
if (!dest) dest = vec;
var x = vec[0],
y = vec[1];
dest[0] = (mat[0] * x) + (mat[1] * y) + mat[4];
dest[1] = (mat[2] * x) + (mat[3] * y) + mat[5];
return dest;
}
export function transformVec2s(mat: Float32Array, ...vecs: Float32Array[]) {
for (var i = vecs.length - 1; i >= 0; i--) {
transformVec2(mat, vecs[i]);
}
}
export function createTranslate(x: number, y: number, dest?: Float32Array): Float32Array {
if (!dest) dest = create();
dest[0] = 1;
dest[1] = 0;
dest[2] = 0;
dest[3] = 1;
dest[4] = x;
dest[5] = y;
return dest;
}
export function translate(mat: Float32Array, x: number, y: number): Float32Array {
mat[4] += x;
mat[5] += y;
return mat;
}
export function createScale(sx: number, sy: number, dest?: Float32Array): Float32Array {
if (!dest) dest = create();
dest[0] = sx;
dest[1] = 0;
dest[2] = 0;
dest[3] = sy;
dest[4] = 0;
dest[5] = 0;
return dest;
}
export function scale(mat: Float32Array, sx: number, sy: number): Float32Array {
mat[0] *= sx;
mat[2] *= sx;
mat[4] *= sx;
mat[1] *= sy;
mat[3] *= sy;
mat[5] *= sy;
return mat;
}
export function createRotate(angleRad: number, dest?: Float32Array): Float32Array {
if (!dest) dest = create();
var c = Math.cos(angleRad);
var s = Math.sin(angleRad);
dest[0] = c;
dest[1] = -s;
dest[2] = s;
dest[3] = c;
dest[4] = 0;
dest[5] = 0;
return dest;
}
export function createSkew(angleRadX: number, angleRadY: number, dest?: Float32Array): Float32Array {
if (!dest) dest = create();
dest[0] = 1;
dest[1] = Math.tan(angleRadX);
dest[2] = Math.tan(angleRadY);
dest[3] = 1;
dest[4] = 0;
dest[5] = 0;
return dest;
}
export function preapply(dest: Float32Array, mat: Float32Array): Float32Array {
return multiply(dest, mat, dest);
}
export function apply(dest: Float32Array, mat: Float32Array): Float32Array {
return multiply(mat, dest, dest);
}
function simple_inverse(mat: Float32Array, dest?: Float32Array): Float32Array {
var m11 = mat[0];
if (Math.abs(m11) < EPSILON)
return null;
var m22 = mat[3];
if (Math.abs(m22) < EPSILON)
return null;
if (!dest) {
dest = mat;
} else {
dest[1] = mat[1];
dest[2] = mat[2];
}
var x0 = -mat[4];
var y0 = -mat[5];
if (Math.abs(m11 - 1) > EPSILON) {
m11 = 1 / m11;
x0 *= m11;
}
if (Math.abs(m22 - 1) > EPSILON) {
m22 = 1 / m22;
y0 *= m22;
}
dest[0] = m11;
dest[3] = m22;
dest[4] = x0;
dest[5] = y0;
return dest;
}
function complex_inverse(mat: Float32Array, dest?: Float32Array): Float32Array {
if (!dest) dest = mat;
var m11 = mat[0], m12 = mat[1],
m21 = mat[2], m22 = mat[3];
//inv(A) = 1/det(A) * adj(A)
var det = m11 * m22 - m12 * m21;
if (det === 0 || !isFinite(det))
return null;
var id = 1 / det;
var x0 = mat[4], y0 = mat[5];
dest[0] = m22 * id;
dest[1] = -m12 * id;
dest[2] = -m21 * id;
dest[3] = m11 * id;
dest[4] = (m21 * y0 - m22 * x0) * id;
dest[5] = (m12 * x0 - m11 * y0) * id;
return dest;
}
}<|fim▁end|> | |
<|file_name|>test_compute_api.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for compute API."""
import contextlib
import copy
import datetime
import iso8601
import mock
from mox3 import mox
from oslo_utils import timeutils
from oslo_utils import uuidutils
from nova.compute import api as compute_api
from nova.compute import arch
from nova.compute import cells_api as compute_cells_api
from nova.compute import flavors
from nova.compute import instance_actions
from nova.compute import rpcapi as compute_rpcapi
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute import vm_mode
from nova.compute import vm_states
from nova import conductor
from nova import context
from nova import db
from nova import exception
from nova import objects
from nova.objects import base as obj_base
from nova.objects import quotas as quotas_obj
from nova.openstack.common import policy as common_policy
from nova import policy
from nova import quota
from nova import test
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_instance
from nova.tests.unit.image import fake as fake_image
from nova.tests.unit import matchers
from nova.tests.unit.objects import test_flavor
from nova.tests.unit.objects import test_migration
from nova.tests.unit.objects import test_service
from nova import utils
from nova.volume import cinder
FAKE_IMAGE_REF = 'fake-image-ref'
NODENAME = 'fakenode1'
SHELVED_IMAGE = 'fake-shelved-image'
SHELVED_IMAGE_NOT_FOUND = 'fake-shelved-image-notfound'
SHELVED_IMAGE_NOT_AUTHORIZED = 'fake-shelved-image-not-authorized'
SHELVED_IMAGE_EXCEPTION = 'fake-shelved-image-exception'
class _ComputeAPIUnitTestMixIn(object):
def setUp(self):
super(_ComputeAPIUnitTestMixIn, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id,
self.project_id)
def _get_vm_states(self, exclude_states=None):
vm_state = set([vm_states.ACTIVE, vm_states.BUILDING, vm_states.PAUSED,
vm_states.SUSPENDED, vm_states.RESCUED, vm_states.STOPPED,
vm_states.RESIZED, vm_states.SOFT_DELETED,
vm_states.DELETED, vm_states.ERROR, vm_states.SHELVED,
vm_states.SHELVED_OFFLOADED])
if not exclude_states:
exclude_states = set()
return vm_state - exclude_states
def _create_flavor(self, **updates):
flavor = {'id': 1,
'flavorid': 1,
'name': 'm1.tiny',
'memory_mb': 512,
'vcpus': 1,
'vcpu_weight': None,
'root_gb': 1,
'ephemeral_gb': 0,
'rxtx_factor': 1,
'swap': 0,
'deleted': 0,
'disabled': False,
'is_public': True,
'deleted_at': None,
'created_at': datetime.datetime(2012, 1, 19, 18,
49, 30, 877329),
'updated_at': None,
}
if updates:
flavor.update(updates)
return objects.Flavor._from_db_object(self.context, objects.Flavor(),
flavor)
def _create_instance_obj(self, params=None, flavor=None):
"""Create a test instance."""
if not params:
params = {}
if flavor is None:
flavor = self._create_flavor()
now = timeutils.utcnow()
instance = objects.Instance()
instance.metadata = {}
instance.metadata.update(params.pop('metadata', {}))
instance.system_metadata = params.pop('system_metadata', {})
instance._context = self.context
instance.id = 1
instance.uuid = uuidutils.generate_uuid()
instance.cell_name = 'api!child'
instance.vm_state = vm_states.ACTIVE
instance.task_state = None
instance.image_ref = FAKE_IMAGE_REF
instance.reservation_id = 'r-fakeres'
instance.user_id = self.user_id
instance.project_id = self.project_id
instance.host = 'fake_host'
instance.node = NODENAME
instance.instance_type_id = flavor.id
instance.ami_launch_index = 0
instance.memory_mb = 0
instance.vcpus = 0
instance.root_gb = 0
instance.ephemeral_gb = 0
instance.architecture = arch.X86_64
instance.os_type = 'Linux'
instance.locked = False
instance.created_at = now
instance.updated_at = now
instance.launched_at = now
instance.disable_terminate = False
instance.info_cache = objects.InstanceInfoCache()
instance.flavor = flavor
instance.old_flavor = instance.new_flavor = None
if params:
instance.update(params)
instance.obj_reset_changes()
return instance
def test_create_quota_exceeded_messages(self):
image_href = "image_href"
image_id = 0
instance_type = self._create_flavor()
self.mox.StubOutWithMock(self.compute_api, "_get_image")
self.mox.StubOutWithMock(quota.QUOTAS, "limit_check")
self.mox.StubOutWithMock(quota.QUOTAS, "reserve")
quotas = {'instances': 1, 'cores': 1, 'ram': 1}
usages = {r: {'in_use': 1, 'reserved': 1} for r in
['instances', 'cores', 'ram']}
quota_exception = exception.OverQuota(quotas=quotas,
usages=usages, overs=['instances'])
for _unused in range(2):
self.compute_api._get_image(self.context, image_href).AndReturn(
(image_id, {}))
quota.QUOTAS.limit_check(self.context, metadata_items=mox.IsA(int),
project_id=mox.IgnoreArg(),
user_id=mox.IgnoreArg())
quota.QUOTAS.reserve(self.context, instances=40,
cores=mox.IsA(int),
expire=mox.IgnoreArg(),
project_id=mox.IgnoreArg(),
user_id=mox.IgnoreArg(),
ram=mox.IsA(int)).AndRaise(quota_exception)
self.mox.ReplayAll()
for min_count, message in [(20, '20-40'), (40, '40')]:
try:
self.compute_api.create(self.context, instance_type,
"image_href", min_count=min_count,
max_count=40)
except exception.TooManyInstances as e:
self.assertEqual(message, e.kwargs['req'])
else:
self.fail("Exception not raised")
def test_specified_port_and_multiple_instances_neutronv2(self):
# Tests that if port is specified there is only one instance booting
# (i.e max_count == 1) as we can't share the same port across multiple
# instances.
self.flags(network_api_class='nova.network.neutronv2.api.API')
port = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '10.0.0.1'
min_count = 1
max_count = 2
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(address=address,
port_id=port)])
self.assertRaises(exception.MultiplePortsNotApplicable,
self.compute_api.create, self.context, 'fake_flavor', 'image_id',
min_count=min_count, max_count=max_count,
requested_networks=requested_networks)
def _test_specified_ip_and_multiple_instances_helper(self,
requested_networks):
# Tests that if ip is specified there is only one instance booting
# (i.e max_count == 1)
min_count = 1
max_count = 2
self.assertRaises(exception.InvalidFixedIpAndMaxCountRequest,
self.compute_api.create, self.context, "fake_flavor", 'image_id',
min_count=min_count, max_count=max_count,
requested_networks=requested_networks)
def test_specified_ip_and_multiple_instances(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '10.0.0.1'
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=network,
address=address)])
self._test_specified_ip_and_multiple_instances_helper(
requested_networks)
def test_specified_ip_and_multiple_instances_neutronv2(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '10.0.0.1'
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=network,
address=address)])
self._test_specified_ip_and_multiple_instances_helper(
requested_networks)
def test_suspend(self):
# Ensure instance can be suspended.
instance = self._create_instance_obj()
self.assertEqual(instance.vm_state, vm_states.ACTIVE)
self.assertIsNone(instance.task_state)
self.mox.StubOutWithMock(instance, 'save')
self.mox.StubOutWithMock(self.compute_api,
'_record_action_start')
if self.cell_type == 'api':
rpcapi = self.compute_api.cells_rpcapi
else:
rpcapi = self.compute_api.compute_rpcapi
self.mox.StubOutWithMock(rpcapi, 'suspend_instance')
instance.save(expected_task_state=[None])
self.compute_api._record_action_start(self.context,
instance, instance_actions.SUSPEND)
rpcapi.suspend_instance(self.context, instance)
self.mox.ReplayAll()
self.compute_api.suspend(self.context, instance)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertEqual(task_states.SUSPENDING,
instance.task_state)
def _test_suspend_fails(self, vm_state):
params = dict(vm_state=vm_state)
instance = self._create_instance_obj(params=params)
self.assertIsNone(instance.task_state)
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.suspend,
self.context, instance)
def test_suspend_fails_invalid_states(self):
invalid_vm_states = self._get_vm_states(set([vm_states.ACTIVE]))
for state in invalid_vm_states:
self._test_suspend_fails(state)
def test_resume(self):
# Ensure instance can be resumed (if suspended).
instance = self._create_instance_obj(
params=dict(vm_state=vm_states.SUSPENDED))
self.assertEqual(instance.vm_state, vm_states.SUSPENDED)
self.assertIsNone(instance.task_state)
self.mox.StubOutWithMock(instance, 'save')
self.mox.StubOutWithMock(self.compute_api,
'_record_action_start')
if self.cell_type == 'api':
rpcapi = self.compute_api.cells_rpcapi
else:
rpcapi = self.compute_api.compute_rpcapi
self.mox.StubOutWithMock(rpcapi, 'resume_instance')
instance.save(expected_task_state=[None])
self.compute_api._record_action_start(self.context,
instance, instance_actions.RESUME)
rpcapi.resume_instance(self.context, instance)
self.mox.ReplayAll()
self.compute_api.resume(self.context, instance)
self.assertEqual(vm_states.SUSPENDED, instance.vm_state)
self.assertEqual(task_states.RESUMING,
instance.task_state)
def test_start(self):
params = dict(vm_state=vm_states.STOPPED)
instance = self._create_instance_obj(params=params)
self.mox.StubOutWithMock(instance, 'save')
self.mox.StubOutWithMock(self.compute_api,
'_record_action_start')
instance.save(expected_task_state=[None])
self.compute_api._record_action_start(self.context,
instance, instance_actions.START)
if self.cell_type == 'api':
rpcapi = self.compute_api.cells_rpcapi
else:
rpcapi = self.compute_api.compute_rpcapi
self.mox.StubOutWithMock(rpcapi, 'start_instance')
rpcapi.start_instance(self.context, instance)
self.mox.ReplayAll()
self.compute_api.start(self.context, instance)
self.assertEqual(task_states.POWERING_ON,
instance.task_state)
def test_start_invalid_state(self):
instance = self._create_instance_obj()
self.assertEqual(instance.vm_state, vm_states.ACTIVE)
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.start,
self.context, instance)
def test_start_no_host(self):
params = dict(vm_state=vm_states.STOPPED, host='')
instance = self._create_instance_obj(params=params)
self.assertRaises(exception.InstanceNotReady,
self.compute_api.start,
self.context, instance)
def _test_stop(self, vm_state, force=False, clean_shutdown=True):
# Make sure 'progress' gets reset
params = dict(task_state=None, progress=99, vm_state=vm_state)
instance = self._create_instance_obj(params=params)
self.mox.StubOutWithMock(instance, 'save')
self.mox.StubOutWithMock(self.compute_api,
'_record_action_start')
instance.save(expected_task_state=[None])
self.compute_api._record_action_start(self.context,
instance, instance_actions.STOP)
if self.cell_type == 'api':
rpcapi = self.compute_api.cells_rpcapi
else:
rpcapi = self.compute_api.compute_rpcapi
self.mox.StubOutWithMock(rpcapi, 'stop_instance')
rpcapi.stop_instance(self.context, instance, do_cast=True,
clean_shutdown=clean_shutdown)
self.mox.ReplayAll()
if force:
self.compute_api.force_stop(self.context, instance,
clean_shutdown=clean_shutdown)
else:
self.compute_api.stop(self.context, instance,
clean_shutdown=clean_shutdown)
self.assertEqual(task_states.POWERING_OFF,
instance.task_state)
self.assertEqual(0, instance.progress)
def test_stop(self):
self._test_stop(vm_states.ACTIVE)
def test_stop_stopped_instance_with_bypass(self):
self._test_stop(vm_states.STOPPED, force=True)
def test_stop_forced_shutdown(self):
self._test_stop(vm_states.ACTIVE, force=True)
def test_stop_without_clean_shutdown(self):
self._test_stop(vm_states.ACTIVE,
clean_shutdown=False)
def test_stop_forced_without_clean_shutdown(self):
self._test_stop(vm_states.ACTIVE, force=True,
clean_shutdown=False)
def _test_stop_invalid_state(self, vm_state):
params = dict(vm_state=vm_state)
instance = self._create_instance_obj(params=params)
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.stop,
self.context, instance)
def test_stop_fails_invalid_states(self):
invalid_vm_states = self._get_vm_states(set([vm_states.ACTIVE,
vm_states.ERROR]))
for state in invalid_vm_states:
self._test_stop_invalid_state(state)
def test_stop_a_stopped_inst(self):
params = {'vm_state': vm_states.STOPPED}
instance = self._create_instance_obj(params=params)
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.stop,
self.context, instance)
def test_stop_no_host(self):
params = {'host': ''}
instance = self._create_instance_obj(params=params)
self.assertRaises(exception.InstanceNotReady,
self.compute_api.stop,
self.context, instance)
def _test_shelve(self, vm_state=vm_states.ACTIVE,
boot_from_volume=False, clean_shutdown=True):
params = dict(task_state=None, vm_state=vm_state,
display_name='fake-name')
instance = self._create_instance_obj(params=params)
with contextlib.nested(
mock.patch.object(self.compute_api, 'is_volume_backed_instance',
return_value=boot_from_volume),
mock.patch.object(self.compute_api, '_create_image',
return_value=dict(id='fake-image-id')),
mock.patch.object(instance, 'save'),
mock.patch.object(self.compute_api, '_record_action_start'),
mock.patch.object(self.compute_api.compute_rpcapi,
'shelve_instance'),
mock.patch.object(self.compute_api.compute_rpcapi,
'shelve_offload_instance')
) as (
volume_backed_inst, create_image, instance_save,
record_action_start, rpcapi_shelve_instance,
rpcapi_shelve_offload_instance
):
self.compute_api.shelve(self.context, instance,
clean_shutdown=clean_shutdown)
# assert field values set on the instance object
self.assertEqual(task_states.SHELVING, instance.task_state)
# assert our mock calls
volume_backed_inst.assert_called_once_with(
self.context, instance)
instance_save.assert_called_once_with(expected_task_state=[None])
record_action_start.assert_called_once_with(
self.context, instance, instance_actions.SHELVE)
if boot_from_volume:
rpcapi_shelve_offload_instance.assert_called_once_with(
self.context, instance=instance,
clean_shutdown=clean_shutdown)
else:
rpcapi_shelve_instance.assert_called_once_with(
self.context, instance=instance, image_id='fake-image-id',
clean_shutdown=clean_shutdown)
def test_shelve(self):
self._test_shelve()
def test_shelve_stopped(self):
self._test_shelve(vm_state=vm_states.STOPPED)
def test_shelve_paused(self):
self._test_shelve(vm_state=vm_states.PAUSED)
def test_shelve_suspended(self):
self._test_shelve(vm_state=vm_states.SUSPENDED)
def test_shelve_boot_from_volume(self):
self._test_shelve(boot_from_volume=True)
def test_shelve_forced_shutdown(self):
self._test_shelve(clean_shutdown=False)
def test_shelve_boot_from_volume_forced_shutdown(self):
self._test_shelve(boot_from_volume=True,
clean_shutdown=False)
def _test_shelve_invalid_state(self, vm_state):
params = dict(vm_state=vm_state)
instance = self._create_instance_obj(params=params)
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.shelve,
self.context, instance)
def test_shelve_fails_invalid_states(self):
invalid_vm_states = self._get_vm_states(set([vm_states.ACTIVE,
vm_states.STOPPED,
vm_states.PAUSED,
vm_states.SUSPENDED]))
for state in invalid_vm_states:
self._test_shelve_invalid_state(state)
def _test_shelve_offload(self, clean_shutdown=True):
params = dict(task_state=None, vm_state=vm_states.SHELVED)
instance = self._create_instance_obj(params=params)
with contextlib.nested(
mock.patch.object(instance, 'save'),
mock.patch.object(self.compute_api.compute_rpcapi,
'shelve_offload_instance')
) as (
instance_save, rpcapi_shelve_offload_instance
):
self.compute_api.shelve_offload(self.context, instance,
clean_shutdown=clean_shutdown)
# assert field values set on the instance object
self.assertEqual(task_states.SHELVING_OFFLOADING,
instance.task_state)
instance_save.assert_called_once_with(expected_task_state=[None])
rpcapi_shelve_offload_instance.assert_called_once_with(
self.context, instance=instance,
clean_shutdown=clean_shutdown)
def test_shelve_offload(self):
self._test_shelve_offload()
def test_shelve_offload_forced_shutdown(self):
self._test_shelve_offload(clean_shutdown=False)
def _test_shelve_offload_invalid_state(self, vm_state):
params = dict(vm_state=vm_state)
instance = self._create_instance_obj(params=params)
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.shelve_offload,
self.context, instance)
def test_shelve_offload_fails_invalid_states(self):
invalid_vm_states = self._get_vm_states(set([vm_states.SHELVED]))
for state in invalid_vm_states:
self._test_shelve_offload_invalid_state(state)
def _test_reboot_type(self, vm_state, reboot_type, task_state=None):
# Ensure instance can be soft rebooted.
inst = self._create_instance_obj()
inst.vm_state = vm_state
inst.task_state = task_state
self.mox.StubOutWithMock(self.context, 'elevated')
self.mox.StubOutWithMock(self.compute_api, '_record_action_start')
self.mox.StubOutWithMock(inst, 'save')
expected_task_state = [None]
if reboot_type == 'HARD':
expected_task_state.extend([task_states.REBOOTING,
task_states.REBOOT_PENDING,
task_states.REBOOT_STARTED,
task_states.REBOOTING_HARD,
task_states.RESUMING,
task_states.UNPAUSING,
task_states.SUSPENDING])
inst.save(expected_task_state=expected_task_state)
self.compute_api._record_action_start(self.context, inst,
instance_actions.REBOOT)
if self.cell_type == 'api':
rpcapi = self.compute_api.cells_rpcapi
else:
rpcapi = self.compute_api.compute_rpcapi
self.mox.StubOutWithMock(rpcapi, 'reboot_instance')
rpcapi.reboot_instance(self.context, instance=inst,
block_device_info=None,
reboot_type=reboot_type)
self.mox.ReplayAll()
self.compute_api.reboot(self.context, inst, reboot_type)
def _test_reboot_type_fails(self, reboot_type, **updates):
inst = self._create_instance_obj()
inst.update(updates)
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.reboot,
self.context, inst, reboot_type)
def test_reboot_hard_active(self):
self._test_reboot_type(vm_states.ACTIVE, 'HARD')
def test_reboot_hard_error(self):
self._test_reboot_type(vm_states.ERROR, 'HARD')
def test_reboot_hard_rebooting(self):
self._test_reboot_type(vm_states.ACTIVE, 'HARD',
task_state=task_states.REBOOTING)
def test_reboot_hard_reboot_started(self):
self._test_reboot_type(vm_states.ACTIVE, 'HARD',
task_state=task_states.REBOOT_STARTED)
def test_reboot_hard_reboot_pending(self):
self._test_reboot_type(vm_states.ACTIVE, 'HARD',
task_state=task_states.REBOOT_PENDING)
def test_reboot_hard_rescued(self):
self._test_reboot_type_fails('HARD', vm_state=vm_states.RESCUED)
def test_reboot_hard_resuming(self):
self._test_reboot_type(vm_states.ACTIVE,
'HARD', task_state=task_states.RESUMING)
def test_reboot_hard_pausing(self):
self._test_reboot_type(vm_states.ACTIVE,
'HARD', task_state=task_states.PAUSING)
def test_reboot_hard_unpausing(self):
self._test_reboot_type(vm_states.ACTIVE,
'HARD', task_state=task_states.UNPAUSING)
def test_reboot_hard_suspending(self):
self._test_reboot_type(vm_states.ACTIVE,
'HARD', task_state=task_states.SUSPENDING)
def test_reboot_hard_error_not_launched(self):
self._test_reboot_type_fails('HARD', vm_state=vm_states.ERROR,
launched_at=None)
def test_reboot_soft(self):
self._test_reboot_type(vm_states.ACTIVE, 'SOFT')
def test_reboot_soft_error(self):
self._test_reboot_type_fails('SOFT', vm_state=vm_states.ERROR)
def test_reboot_soft_paused(self):
self._test_reboot_type_fails('SOFT', vm_state=vm_states.PAUSED)
def test_reboot_soft_stopped(self):
self._test_reboot_type_fails('SOFT', vm_state=vm_states.STOPPED)
def test_reboot_soft_suspended(self):
self._test_reboot_type_fails('SOFT', vm_state=vm_states.SUSPENDED)
def test_reboot_soft_rebooting(self):
self._test_reboot_type_fails('SOFT', task_state=task_states.REBOOTING)
def test_reboot_soft_rebooting_hard(self):
self._test_reboot_type_fails('SOFT',
task_state=task_states.REBOOTING_HARD)
def test_reboot_soft_reboot_started(self):
self._test_reboot_type_fails('SOFT',
task_state=task_states.REBOOT_STARTED)
def test_reboot_soft_reboot_pending(self):
self._test_reboot_type_fails('SOFT',
task_state=task_states.REBOOT_PENDING)
def test_reboot_soft_rescued(self):
self._test_reboot_type_fails('SOFT', vm_state=vm_states.RESCUED)
def test_reboot_soft_error_not_launched(self):
self._test_reboot_type_fails('SOFT', vm_state=vm_states.ERROR,
launched_at=None)
def test_reboot_soft_resuming(self):
self._test_reboot_type_fails('SOFT', task_state=task_states.RESUMING)
def test_reboot_soft_pausing(self):
self._test_reboot_type_fails('SOFT', task_state=task_states.PAUSING)
def test_reboot_soft_unpausing(self):
self._test_reboot_type_fails('SOFT', task_state=task_states.UNPAUSING)
def test_reboot_soft_suspending(self):
self._test_reboot_type_fails('SOFT', task_state=task_states.SUSPENDING)
def _test_delete_resizing_part(self, inst, deltas):
fake_db_migration = test_migration.fake_db_migration()
migration = objects.Migration._from_db_object(
self.context, objects.Migration(),
fake_db_migration)
inst.instance_type_id = migration.new_instance_type_id
old_flavor = self._create_flavor(vcpus=1, memory_mb=512)
deltas['cores'] = -old_flavor.vcpus
deltas['ram'] = -old_flavor.memory_mb
self.mox.StubOutWithMock(objects.Migration,
'get_by_instance_and_status')
self.mox.StubOutWithMock(flavors, 'get_flavor')
self.context.elevated().AndReturn(self.context)
objects.Migration.get_by_instance_and_status(
self.context, inst.uuid, 'post-migrating').AndReturn(migration)
flavors.get_flavor(migration.old_instance_type_id).AndReturn(
old_flavor)
def _test_delete_resized_part(self, inst):
migration = objects.Migration._from_db_object(
self.context, objects.Migration(),
test_migration.fake_db_migration())
self.mox.StubOutWithMock(objects.Migration,
'get_by_instance_and_status')
self.context.elevated().AndReturn(self.context)
objects.Migration.get_by_instance_and_status(
self.context, inst.uuid, 'finished').AndReturn(migration)
self.compute_api._downsize_quota_delta(self.context, inst
).AndReturn('deltas')
fake_quotas = objects.Quotas.from_reservations(self.context,
['rsvs'])
self.compute_api._reserve_quota_delta(self.context, 'deltas', inst,
).AndReturn(fake_quotas)
self.compute_api._record_action_start(
self.context, inst, instance_actions.CONFIRM_RESIZE)
self.compute_api.compute_rpcapi.confirm_resize(
self.context, inst, migration,
migration['source_compute'], fake_quotas.reservations, cast=False)
def _test_delete_shelved_part(self, inst):
image_api = self.compute_api.image_api
self.mox.StubOutWithMock(image_api, 'delete')
snapshot_id = inst.system_metadata.get('shelved_image_id')
if snapshot_id == SHELVED_IMAGE:
image_api.delete(self.context, snapshot_id).AndReturn(True)
elif snapshot_id == SHELVED_IMAGE_NOT_FOUND:
image_api.delete(self.context, snapshot_id).AndRaise(
exception.ImageNotFound(image_id=snapshot_id))
elif snapshot_id == SHELVED_IMAGE_NOT_AUTHORIZED:
image_api.delete(self.context, snapshot_id).AndRaise(
exception.ImageNotAuthorized(image_id=snapshot_id))
elif snapshot_id == SHELVED_IMAGE_EXCEPTION:
image_api.delete(self.context, snapshot_id).AndRaise(
test.TestingException("Unexpected error"))
def _test_downed_host_part(self, inst, updates, delete_time, delete_type):
inst.info_cache.delete()
compute_utils.notify_about_instance_usage(
self.compute_api.notifier, self.context, inst,
'%s.start' % delete_type)
self.context.elevated().AndReturn(self.context)
self.compute_api.network_api.deallocate_for_instance(
self.context, inst)
state = ('soft' in delete_type and vm_states.SOFT_DELETED or
vm_states.DELETED)
updates.update({'vm_state': state,
'task_state': None,
'terminated_at': delete_time})
inst.save()
updates.update({'deleted_at': delete_time,
'deleted': True})
fake_inst = fake_instance.fake_db_instance(**updates)
db.instance_destroy(self.context, inst.uuid,
constraint=None).AndReturn(fake_inst)
compute_utils.notify_about_instance_usage(
self.compute_api.notifier,
self.context, inst, '%s.end' % delete_type,
system_metadata=inst.system_metadata)
def _test_delete(self, delete_type, **attrs):
reservations = ['fake-resv']
inst = self._create_instance_obj()
inst.update(attrs)
inst._context = self.context
deltas = {'instances': -1,
'cores': -inst.vcpus,
'ram': -inst.memory_mb}
delete_time = datetime.datetime(1955, 11, 5, 9, 30,
tzinfo=iso8601.iso8601.Utc())
timeutils.set_time_override(delete_time)
task_state = (delete_type == 'soft_delete' and
task_states.SOFT_DELETING or task_states.DELETING)
updates = {'progress': 0, 'task_state': task_state}
if delete_type == 'soft_delete':
updates['deleted_at'] = delete_time
self.mox.StubOutWithMock(inst, 'save')
self.mox.StubOutWithMock(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
self.mox.StubOutWithMock(quota.QUOTAS, 'reserve')
self.mox.StubOutWithMock(self.context, 'elevated')
self.mox.StubOutWithMock(db, 'service_get_by_compute_host')
self.mox.StubOutWithMock(self.compute_api.servicegroup_api,
'service_is_up')
self.mox.StubOutWithMock(self.compute_api, '_downsize_quota_delta')
self.mox.StubOutWithMock(self.compute_api, '_reserve_quota_delta')
self.mox.StubOutWithMock(self.compute_api, '_record_action_start')
self.mox.StubOutWithMock(db, 'instance_update_and_get_original')
self.mox.StubOutWithMock(inst.info_cache, 'delete')
self.mox.StubOutWithMock(self.compute_api.network_api,
'deallocate_for_instance')
self.mox.StubOutWithMock(db, 'instance_system_metadata_get')
self.mox.StubOutWithMock(db, 'instance_destroy')
self.mox.StubOutWithMock(compute_utils,
'notify_about_instance_usage')
self.mox.StubOutWithMock(quota.QUOTAS, 'commit')
rpcapi = self.compute_api.compute_rpcapi
self.mox.StubOutWithMock(rpcapi, 'confirm_resize')
if (inst.vm_state in
(vm_states.SHELVED, vm_states.SHELVED_OFFLOADED)):
self._test_delete_shelved_part(inst)
if self.cell_type == 'api':
rpcapi = self.compute_api.cells_rpcapi
self.mox.StubOutWithMock(rpcapi, 'terminate_instance')
self.mox.StubOutWithMock(rpcapi, 'soft_delete_instance')
objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, inst.uuid).AndReturn([])
inst.save()
if inst.task_state == task_states.RESIZE_FINISH:
self._test_delete_resizing_part(inst, deltas)
quota.QUOTAS.reserve(self.context, project_id=inst.project_id,
user_id=inst.user_id,
expire=mox.IgnoreArg(),
**deltas).AndReturn(reservations)
# NOTE(comstud): This is getting messy. But what we are wanting
# to test is:
# If cells is enabled and we're the API cell:
# * Cast to cells_rpcapi.<method> with reservations=None
# * Commit reservations
# Otherwise:
# * Check for downed host
# * If downed host:
# * Clean up instance, destroying it, sending notifications.
# (Tested in _test_downed_host_part())
# * Commit reservations
# * If not downed host:
# * Record the action start.
# * Cast to compute_rpcapi.<method> with the reservations
cast = True
commit_quotas = True
soft_delete = False
if self.cell_type != 'api':
if inst.vm_state == vm_states.RESIZED:
self._test_delete_resized_part(inst)
if inst.vm_state == vm_states.SOFT_DELETED:
soft_delete = True
if inst.vm_state != vm_states.SHELVED_OFFLOADED:
self.context.elevated().AndReturn(self.context)
db.service_get_by_compute_host(
self.context, inst.host).AndReturn(
test_service.fake_service)
self.compute_api.servicegroup_api.service_is_up(
mox.IsA(objects.Service)).AndReturn(
inst.host != 'down-host')
if (inst.host == 'down-host' or
inst.vm_state == vm_states.SHELVED_OFFLOADED):
self._test_downed_host_part(inst, updates, delete_time,
delete_type)
cast = False
else:
# Happens on the manager side
commit_quotas = False
if cast:
if self.cell_type != 'api':
self.compute_api._record_action_start(self.context, inst,
instance_actions.DELETE)
if commit_quotas or soft_delete:
cast_reservations = None
else:
cast_reservations = reservations
if delete_type == 'soft_delete':
rpcapi.soft_delete_instance(self.context, inst,
reservations=cast_reservations)
elif delete_type in ['delete', 'force_delete']:
rpcapi.terminate_instance(self.context, inst, [],
reservations=cast_reservations)
if commit_quotas:
# Local delete or when we're testing API cell.
quota.QUOTAS.commit(self.context, reservations,
project_id=inst.project_id,
user_id=inst.user_id)
self.mox.ReplayAll()
getattr(self.compute_api, delete_type)(self.context, inst)
for k, v in updates.items():
self.assertEqual(inst[k], v)
self.mox.UnsetStubs()
def test_delete(self):
self._test_delete('delete')
def test_delete_if_not_launched(self):
self._test_delete('delete', launched_at=None)
def test_delete_in_resizing(self):
self._test_delete('delete',
task_state=task_states.RESIZE_FINISH)
def test_delete_in_resized(self):
self._test_delete('delete', vm_state=vm_states.RESIZED)
def test_delete_shelved(self):
fake_sys_meta = {'shelved_image_id': SHELVED_IMAGE}
self._test_delete('delete',
vm_state=vm_states.SHELVED,
system_metadata=fake_sys_meta)
def test_delete_shelved_offloaded(self):
fake_sys_meta = {'shelved_image_id': SHELVED_IMAGE}
self._test_delete('delete',
vm_state=vm_states.SHELVED_OFFLOADED,
system_metadata=fake_sys_meta)
def test_delete_shelved_image_not_found(self):
fake_sys_meta = {'shelved_image_id': SHELVED_IMAGE_NOT_FOUND}
self._test_delete('delete',
vm_state=vm_states.SHELVED_OFFLOADED,
system_metadata=fake_sys_meta)
def test_delete_shelved_image_not_authorized(self):
fake_sys_meta = {'shelved_image_id': SHELVED_IMAGE_NOT_AUTHORIZED}
self._test_delete('delete',
vm_state=vm_states.SHELVED_OFFLOADED,
system_metadata=fake_sys_meta)
def test_delete_shelved_exception(self):
fake_sys_meta = {'shelved_image_id': SHELVED_IMAGE_EXCEPTION}
self._test_delete('delete',
vm_state=vm_states.SHELVED,
system_metadata=fake_sys_meta)
def test_delete_with_down_host(self):
self._test_delete('delete', host='down-host')
def test_delete_soft_with_down_host(self):
self._test_delete('soft_delete', host='down-host')
def test_delete_soft(self):
self._test_delete('soft_delete')
def test_delete_forced(self):
fake_sys_meta = {'shelved_image_id': SHELVED_IMAGE}
for vm_state in self._get_vm_states():
if vm_state in (vm_states.SHELVED, vm_states.SHELVED_OFFLOADED):
self._test_delete('force_delete',
vm_state=vm_state,
system_metadata=fake_sys_meta)
self._test_delete('force_delete', vm_state=vm_state)
def test_delete_fast_if_host_not_set(self):
inst = self._create_instance_obj()
inst.host = ''
quotas = quotas_obj.Quotas(self.context)
updates = {'progress': 0, 'task_state': task_states.DELETING}
self.mox.StubOutWithMock(inst, 'save')
self.mox.StubOutWithMock(db,
'block_device_mapping_get_all_by_instance')
self.mox.StubOutWithMock(db, 'constraint')
self.mox.StubOutWithMock(db, 'instance_destroy')
self.mox.StubOutWithMock(self.compute_api, '_create_reservations')
self.mox.StubOutWithMock(compute_utils,
'notify_about_instance_usage')
if self.cell_type == 'api':
rpcapi = self.compute_api.cells_rpcapi
else:
rpcapi = self.compute_api.compute_rpcapi
self.mox.StubOutWithMock(rpcapi, 'terminate_instance')
db.block_device_mapping_get_all_by_instance(self.context,
inst.uuid,
use_slave=False).AndReturn([])
inst.save()
self.compute_api._create_reservations(self.context,
inst, inst.task_state,
inst.project_id, inst.user_id
).AndReturn(quotas)
if self.cell_type == 'api':
rpcapi.terminate_instance(
self.context, inst,
mox.IsA(objects.BlockDeviceMappingList),
reservations=None)
else:
compute_utils.notify_about_instance_usage(
self.compute_api.notifier, self.context,
inst, 'delete.start')
db.constraint(host=mox.IgnoreArg()).AndReturn('constraint')
delete_time = datetime.datetime(1955, 11, 5, 9, 30,
tzinfo=iso8601.iso8601.Utc())
updates['deleted_at'] = delete_time
updates['deleted'] = True
fake_inst = fake_instance.fake_db_instance(**updates)
db.instance_destroy(self.context, inst.uuid,
constraint='constraint').AndReturn(fake_inst)
compute_utils.notify_about_instance_usage(
self.compute_api.notifier, self.context,
inst, 'delete.end',
system_metadata=inst.system_metadata)
self.mox.ReplayAll()
self.compute_api.delete(self.context, inst)
for k, v in updates.items():
self.assertEqual(inst[k], v)
def _fake_do_delete(context, instance, bdms,
rservations=None, local=False):
pass
def test_local_delete_with_deleted_volume(self):
bdms = [objects.BlockDeviceMapping(
**fake_block_device.FakeDbBlockDeviceDict(
{'id': 42, 'volume_id': 'volume_id',
'source_type': 'volume', 'destination_type': 'volume',
'delete_on_termination': False}))]
inst = self._create_instance_obj()
inst._context = self.context
self.mox.StubOutWithMock(inst, 'destroy')
self.mox.StubOutWithMock(self.context, 'elevated')
self.mox.StubOutWithMock(inst.info_cache, 'delete')
self.mox.StubOutWithMock(self.compute_api.network_api,
'deallocate_for_instance')
self.mox.StubOutWithMock(db, 'instance_system_metadata_get')
self.mox.StubOutWithMock(compute_utils,
'notify_about_instance_usage')
self.mox.StubOutWithMock(self.compute_api.volume_api,
'terminate_connection')
self.mox.StubOutWithMock(objects.BlockDeviceMapping, 'destroy')
inst.info_cache.delete()
compute_utils.notify_about_instance_usage(
self.compute_api.notifier, self.context,
inst, 'delete.start')
self.context.elevated().MultipleTimes().AndReturn(self.context)
if self.cell_type != 'api':
self.compute_api.network_api.deallocate_for_instance(
self.context, inst)
self.compute_api.volume_api.terminate_connection(
mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).\
AndRaise(exception. VolumeNotFound('volume_id'))
bdms[0].destroy()
inst.destroy()
compute_utils.notify_about_instance_usage(
self.compute_api.notifier, self.context,
inst, 'delete.end',
system_metadata=inst.system_metadata)
self.mox.ReplayAll()
self.compute_api._local_delete(self.context, inst, bdms,
'delete',
self._fake_do_delete)
def test_local_delete_without_info_cache(self):
inst = self._create_instance_obj()
with contextlib.nested(
mock.patch.object(inst, 'destroy'),
mock.patch.object(self.context, 'elevated'),
mock.patch.object(self.compute_api.network_api,
'deallocate_for_instance'),
mock.patch.object(db, 'instance_system_metadata_get'),
mock.patch.object(compute_utils,
'notify_about_instance_usage')
) as (
inst_destroy, context_elevated, net_api_deallocate_for_instance,
db_instance_system_metadata_get, notify_about_instance_usage
):
compute_utils.notify_about_instance_usage(
self.compute_api.notifier, self.context,
inst, 'delete.start')
self.context.elevated().MultipleTimes().AndReturn(self.context)
if self.cell_type != 'api':
self.compute_api.network_api.deallocate_for_instance(
self.context, inst)
inst.destroy()
compute_utils.notify_about_instance_usage(
self.compute_api.notifier, self.context,
inst, 'delete.end',
system_metadata=inst.system_metadata)
inst.info_cache = None
self.compute_api._local_delete(self.context, inst, [],
'delete',
self._fake_do_delete)
def test_delete_disabled(self):
inst = self._create_instance_obj()
inst.disable_terminate = True
self.mox.StubOutWithMock(db, 'instance_update_and_get_original')
self.mox.ReplayAll()
self.compute_api.delete(self.context, inst)
def test_delete_soft_rollback(self):
inst = self._create_instance_obj()
self.mox.StubOutWithMock(db,
'block_device_mapping_get_all_by_instance')
self.mox.StubOutWithMock(inst, 'save')
delete_time = datetime.datetime(1955, 11, 5)
timeutils.set_time_override(delete_time)
db.block_device_mapping_get_all_by_instance(
self.context, inst.uuid, use_slave=False).AndReturn([])
inst.save().AndRaise(test.TestingException)
self.mox.ReplayAll()
self.assertRaises(test.TestingException,
self.compute_api.soft_delete, self.context, inst)
def _test_confirm_resize(self, mig_ref_passed=False):
params = dict(vm_state=vm_states.RESIZED)
fake_inst = self._create_instance_obj(params=params)
fake_mig = objects.Migration._from_db_object(
self.context, objects.Migration(),
test_migration.fake_db_migration())
self.mox.StubOutWithMock(self.context, 'elevated')
self.mox.StubOutWithMock(objects.Migration,
'get_by_instance_and_status')
self.mox.StubOutWithMock(self.compute_api, '_downsize_quota_delta')
self.mox.StubOutWithMock(self.compute_api, '_reserve_quota_delta')
self.mox.StubOutWithMock(fake_mig, 'save')
self.mox.StubOutWithMock(self.compute_api, '_record_action_start')
self.mox.StubOutWithMock(self.compute_api.compute_rpcapi,
'confirm_resize')
self.context.elevated().AndReturn(self.context)
if not mig_ref_passed:
objects.Migration.get_by_instance_and_status(
self.context, fake_inst['uuid'], 'finished').AndReturn(
fake_mig)
self.compute_api._downsize_quota_delta(self.context,
fake_inst).AndReturn('deltas')
resvs = ['resvs']
fake_quotas = objects.Quotas.from_reservations(self.context, resvs)
self.compute_api._reserve_quota_delta(self.context, 'deltas',
fake_inst).AndReturn(fake_quotas)
def _check_mig(expected_task_state=None):
self.assertEqual('confirming', fake_mig.status)
fake_mig.save().WithSideEffects(_check_mig)
if self.cell_type:
fake_quotas.commit()
self.compute_api._record_action_start(self.context, fake_inst,
'confirmResize')
self.compute_api.compute_rpcapi.confirm_resize(
self.context, fake_inst, fake_mig, 'compute-source',
[] if self.cell_type else fake_quotas.reservations)
self.mox.ReplayAll()
if mig_ref_passed:
self.compute_api.confirm_resize(self.context, fake_inst,
migration=fake_mig)
else:
self.compute_api.confirm_resize(self.context, fake_inst)
def test_confirm_resize(self):
self._test_confirm_resize()
def test_confirm_resize_with_migration_ref(self):
self._test_confirm_resize(mig_ref_passed=True)
def _test_revert_resize(self):
params = dict(vm_state=vm_states.RESIZED)
fake_inst = self._create_instance_obj(params=params)
fake_mig = objects.Migration._from_db_object(
self.context, objects.Migration(),
test_migration.fake_db_migration())
self.mox.StubOutWithMock(self.context, 'elevated')
self.mox.StubOutWithMock(objects.Migration,
'get_by_instance_and_status')
self.mox.StubOutWithMock(self.compute_api,
'_reverse_upsize_quota_delta')
self.mox.StubOutWithMock(self.compute_api, '_reserve_quota_delta')
self.mox.StubOutWithMock(fake_inst, 'save')
self.mox.StubOutWithMock(fake_mig, 'save')
self.mox.StubOutWithMock(self.compute_api, '_record_action_start')
self.mox.StubOutWithMock(self.compute_api.compute_rpcapi,
'revert_resize')
self.context.elevated().AndReturn(self.context)
objects.Migration.get_by_instance_and_status(
self.context, fake_inst['uuid'], 'finished').AndReturn(
fake_mig)
self.compute_api._reverse_upsize_quota_delta(
self.context, fake_mig).AndReturn('deltas')
resvs = ['resvs']
fake_quotas = objects.Quotas.from_reservations(self.context, resvs)
self.compute_api._reserve_quota_delta(self.context, 'deltas',
fake_inst).AndReturn(fake_quotas)
def _check_state(expected_task_state=None):
self.assertEqual(task_states.RESIZE_REVERTING,
fake_inst.task_state)
fake_inst.save(expected_task_state=[None]).WithSideEffects(
_check_state)
def _check_mig(expected_task_state=None):
self.assertEqual('reverting', fake_mig.status)
fake_mig.save().WithSideEffects(_check_mig)
if self.cell_type:
fake_quotas.commit()
self.compute_api._record_action_start(self.context, fake_inst,
'revertResize')
self.compute_api.compute_rpcapi.revert_resize(
self.context, fake_inst, fake_mig, 'compute-dest',
[] if self.cell_type else fake_quotas.reservations)
self.mox.ReplayAll()
self.compute_api.revert_resize(self.context, fake_inst)
def test_revert_resize(self):
self._test_revert_resize()
def test_revert_resize_concurent_fail(self):
params = dict(vm_state=vm_states.RESIZED)
fake_inst = self._create_instance_obj(params=params)
fake_mig = objects.Migration._from_db_object(
self.context, objects.Migration(),
test_migration.fake_db_migration())
self.mox.StubOutWithMock(self.context, 'elevated')
self.mox.StubOutWithMock(objects.Migration,
'get_by_instance_and_status')
self.mox.StubOutWithMock(self.compute_api,
'_reverse_upsize_quota_delta')
self.mox.StubOutWithMock(self.compute_api, '_reserve_quota_delta')
self.mox.StubOutWithMock(fake_inst, 'save')
self.context.elevated().AndReturn(self.context)
objects.Migration.get_by_instance_and_status(
self.context, fake_inst['uuid'], 'finished').AndReturn(fake_mig)
delta = ['delta']
self.compute_api._reverse_upsize_quota_delta(
self.context, fake_mig).AndReturn(delta)
resvs = ['resvs']
fake_quotas = objects.Quotas.from_reservations(self.context, resvs)
self.compute_api._reserve_quota_delta(
self.context, delta, fake_inst).AndReturn(fake_quotas)
exc = exception.UnexpectedTaskStateError(
instance_uuid=fake_inst['uuid'],
actual={'task_state': task_states.RESIZE_REVERTING},
expected={'task_state': [None]})
fake_inst.save(expected_task_state=[None]).AndRaise(exc)
fake_quotas.rollback()
self.mox.ReplayAll()
self.assertRaises(exception.UnexpectedTaskStateError,
self.compute_api.revert_resize,
self.context,
fake_inst)
def _test_resize(self, flavor_id_passed=True,
same_host=False, allow_same_host=False,
project_id=None,
extra_kwargs=None,
same_flavor=False,
clean_shutdown=True):
if extra_kwargs is None:
extra_kwargs = {}
self.flags(allow_resize_to_same_host=allow_same_host)
params = {}
if project_id is not None:
# To test instance w/ different project id than context (admin)
params['project_id'] = project_id
fake_inst = self._create_instance_obj(params=params)
self.mox.StubOutWithMock(flavors, 'get_flavor_by_flavor_id')
self.mox.StubOutWithMock(self.compute_api, '_upsize_quota_delta')
self.mox.StubOutWithMock(self.compute_api, '_reserve_quota_delta')
self.mox.StubOutWithMock(fake_inst, 'save')
self.mox.StubOutWithMock(self.compute_api, '_record_action_start')
self.mox.StubOutWithMock(self.compute_api.compute_task_api,
'resize_instance')
current_flavor = fake_inst.get_flavor()
if flavor_id_passed:
new_flavor = self._create_flavor(id=200, flavorid='new-flavor-id',
name='new_flavor', disabled=False)
if same_flavor:
new_flavor.id = current_flavor.id
flavors.get_flavor_by_flavor_id(
'new-flavor-id',
read_deleted='no').AndReturn(new_flavor)
else:
new_flavor = current_flavor
if (self.cell_type == 'compute' or
not (flavor_id_passed and same_flavor)):
resvs = ['resvs']
project_id, user_id = quotas_obj.ids_from_instance(self.context,
fake_inst)
fake_quotas = objects.Quotas.from_reservations(self.context,
resvs)
if flavor_id_passed:
self.compute_api._upsize_quota_delta(
self.context, mox.IsA(objects.Flavor),
mox.IsA(objects.Flavor)).AndReturn('deltas')
self.compute_api._reserve_quota_delta(self.context, 'deltas',
fake_inst).AndReturn(fake_quotas)
def _check_state(expected_task_state=None):
self.assertEqual(task_states.RESIZE_PREP,
fake_inst.task_state)
self.assertEqual(fake_inst.progress, 0)
for key, value in extra_kwargs.items():
self.assertEqual(value, getattr(fake_inst, key))
fake_inst.save(expected_task_state=[None]).WithSideEffects(
_check_state)
if allow_same_host:
filter_properties = {'ignore_hosts': []}
else:
filter_properties = {'ignore_hosts': [fake_inst['host']]}
if flavor_id_passed:
expected_reservations = fake_quotas.reservations
else:
expected_reservations = []
if self.cell_type == 'api':
fake_quotas.commit()
expected_reservations = []
mig = objects.Migration()
def _get_migration(context=None):
return mig
def _check_mig():
self.assertEqual(fake_inst.uuid, mig.instance_uuid)
self.assertEqual(current_flavor.id,
mig.old_instance_type_id)
self.assertEqual(new_flavor.id,
mig.new_instance_type_id)
self.assertEqual('finished', mig.status)
if new_flavor.id != current_flavor.id:
self.assertEqual('resize', mig.migration_type)
else:
self.assertEqual('migration', mig.migration_type)
self.stubs.Set(objects, 'Migration', _get_migration)
self.mox.StubOutWithMock(self.context, 'elevated')
self.mox.StubOutWithMock(mig, 'create')
self.context.elevated().AndReturn(self.context)
mig.create().WithSideEffects(_check_mig)
if flavor_id_passed:
self.compute_api._record_action_start(self.context, fake_inst,
'resize')
else:
self.compute_api._record_action_start(self.context, fake_inst,
'migrate')
scheduler_hint = {'filter_properties': filter_properties}
self.compute_api.compute_task_api.resize_instance(
self.context, fake_inst, extra_kwargs,
scheduler_hint=scheduler_hint,
flavor=mox.IsA(objects.Flavor),
reservations=expected_reservations,
clean_shutdown=clean_shutdown)
self.mox.ReplayAll()
if flavor_id_passed:
self.compute_api.resize(self.context, fake_inst,
flavor_id='new-flavor-id',
clean_shutdown=clean_shutdown,
**extra_kwargs)
else:
self.compute_api.resize(self.context, fake_inst,
clean_shutdown=clean_shutdown,
**extra_kwargs)
def _test_migrate(self, *args, **kwargs):
self._test_resize(*args, flavor_id_passed=False, **kwargs)
def test_resize(self):
self._test_resize()
def test_resize_with_kwargs(self):
self._test_resize(extra_kwargs=dict(cow='moo'))
def test_resize_same_host_and_allowed(self):
self._test_resize(same_host=True, allow_same_host=True)
def test_resize_same_host_and_not_allowed(self):
self._test_resize(same_host=True, allow_same_host=False)
def test_resize_different_project_id(self):
self._test_resize(project_id='different')
def test_resize_forced_shutdown(self):
self._test_resize(clean_shutdown=False)
def test_migrate(self):
self._test_migrate()
def test_migrate_with_kwargs(self):
self._test_migrate(extra_kwargs=dict(cow='moo'))
def test_migrate_same_host_and_allowed(self):
self._test_migrate(same_host=True, allow_same_host=True)
def test_migrate_same_host_and_not_allowed(self):
self._test_migrate(same_host=True, allow_same_host=False)
def test_migrate_different_project_id(self):
self._test_migrate(project_id='different')
def test_resize_invalid_flavor_fails(self):
self.mox.StubOutWithMock(flavors, 'get_flavor_by_flavor_id')
# Should never reach these.
self.mox.StubOutWithMock(self.compute_api, '_reserve_quota_delta')
self.mox.StubOutWithMock(quota.QUOTAS, 'commit')
self.mox.StubOutWithMock(self.compute_api, '_record_action_start')
self.mox.StubOutWithMock(self.compute_api.compute_task_api,
'resize_instance')
fake_inst = self._create_instance_obj()
exc = exception.FlavorNotFound(flavor_id='flavor-id')
flavors.get_flavor_by_flavor_id('flavor-id',
read_deleted='no').AndRaise(exc)
self.mox.ReplayAll()
with mock.patch.object(fake_inst, 'save') as mock_save:
self.assertRaises(exception.FlavorNotFound,
self.compute_api.resize, self.context,
fake_inst, flavor_id='flavor-id')
self.assertFalse(mock_save.called)
def test_resize_disabled_flavor_fails(self):
self.mox.StubOutWithMock(flavors, 'get_flavor_by_flavor_id')
# Should never reach these.
self.mox.StubOutWithMock(self.compute_api, '_reserve_quota_delta')
self.mox.StubOutWithMock(quota.QUOTAS, 'commit')
self.mox.StubOutWithMock(self.compute_api, '_record_action_start')
self.mox.StubOutWithMock(self.compute_api.compute_task_api,
'resize_instance')
fake_inst = self._create_instance_obj()
fake_flavor = self._create_flavor(id=200, flavorid='flavor-id',
name='foo', disabled=True)
flavors.get_flavor_by_flavor_id(
'flavor-id', read_deleted='no').AndReturn(fake_flavor)
self.mox.ReplayAll()
with mock.patch.object(fake_inst, 'save') as mock_save:
self.assertRaises(exception.FlavorNotFound,
self.compute_api.resize, self.context,
fake_inst, flavor_id='flavor-id')
self.assertFalse(mock_save.called)
@mock.patch.object(flavors, 'get_flavor_by_flavor_id')
def test_resize_to_zero_disk_flavor_fails(self, get_flavor_by_flavor_id):
fake_inst = self._create_instance_obj()
fake_flavor = self._create_flavor(id=200, flavorid='flavor-id',
name='foo', root_gb=0)
get_flavor_by_flavor_id.return_value = fake_flavor
self.assertRaises(exception.CannotResizeDisk,
self.compute_api.resize, self.context,
fake_inst, flavor_id='flavor-id')
def test_resize_quota_exceeds_fails(self):
self.mox.StubOutWithMock(flavors, 'get_flavor_by_flavor_id')
self.mox.StubOutWithMock(self.compute_api, '_upsize_quota_delta')
self.mox.StubOutWithMock(self.compute_api, '_reserve_quota_delta')
# Should never reach these.
self.mox.StubOutWithMock(quota.QUOTAS, 'commit')
self.mox.StubOutWithMock(self.compute_api, '_record_action_start')
self.mox.StubOutWithMock(self.compute_api.compute_task_api,
'resize_instance')
fake_inst = self._create_instance_obj()
fake_flavor = self._create_flavor(id=200, flavorid='flavor-id',
name='foo', disabled=False)
flavors.get_flavor_by_flavor_id(
'flavor-id', read_deleted='no').AndReturn(fake_flavor)
deltas = dict(resource=0)
self.compute_api._upsize_quota_delta(
self.context, mox.IsA(objects.Flavor),
mox.IsA(objects.Flavor)).AndReturn(deltas)
usage = dict(in_use=0, reserved=0)
quotas = {'resource': 0}
usages = {'resource': usage}
overs = ['resource']
over_quota_args = dict(quotas=quotas,
usages=usages,
overs=overs)
self.compute_api._reserve_quota_delta(self.context, deltas,
fake_inst).AndRaise(
exception.OverQuota(**over_quota_args))
self.mox.ReplayAll()
with mock.patch.object(fake_inst, 'save') as mock_save:
self.assertRaises(exception.TooManyInstances,
self.compute_api.resize, self.context,
fake_inst, flavor_id='flavor-id')
self.assertFalse(mock_save.called)
def test_pause(self):
# Ensure instance can be paused.
instance = self._create_instance_obj()
self.assertEqual(instance.vm_state, vm_states.ACTIVE)
self.assertIsNone(instance.task_state)
self.mox.StubOutWithMock(instance, 'save')
self.mox.StubOutWithMock(self.compute_api,
'_record_action_start')
if self.cell_type == 'api':
rpcapi = self.compute_api.cells_rpcapi
else:
rpcapi = self.compute_api.compute_rpcapi
self.mox.StubOutWithMock(rpcapi, 'pause_instance')
instance.save(expected_task_state=[None])
self.compute_api._record_action_start(self.context,
instance, instance_actions.PAUSE)
rpcapi.pause_instance(self.context, instance)
self.mox.ReplayAll()
self.compute_api.pause(self.context, instance)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertEqual(task_states.PAUSING,
instance.task_state)
def _test_pause_fails(self, vm_state):
params = dict(vm_state=vm_state)
instance = self._create_instance_obj(params=params)
self.assertIsNone(instance.task_state)
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.pause,
self.context, instance)
def test_pause_fails_invalid_states(self):
invalid_vm_states = self._get_vm_states(set([vm_states.ACTIVE]))
for state in invalid_vm_states:
self._test_pause_fails(state)
def test_unpause(self):
# Ensure instance can be unpaused.
params = dict(vm_state=vm_states.PAUSED)
instance = self._create_instance_obj(params=params)
self.assertEqual(instance.vm_state, vm_states.PAUSED)
self.assertIsNone(instance.task_state)
self.mox.StubOutWithMock(instance, 'save')
self.mox.StubOutWithMock(self.compute_api,
'_record_action_start')
if self.cell_type == 'api':
rpcapi = self.compute_api.cells_rpcapi
else:
rpcapi = self.compute_api.compute_rpcapi
self.mox.StubOutWithMock(rpcapi, 'unpause_instance')
instance.save(expected_task_state=[None])
self.compute_api._record_action_start(self.context,
instance, instance_actions.UNPAUSE)
rpcapi.unpause_instance(self.context, instance)
self.mox.ReplayAll()
self.compute_api.unpause(self.context, instance)
self.assertEqual(vm_states.PAUSED, instance.vm_state)
self.assertEqual(task_states.UNPAUSING, instance.task_state)
def test_live_migrate_active_vm_state(self):
instance = self._create_instance_obj()
self._live_migrate_instance(instance)
def test_live_migrate_paused_vm_state(self):
paused_state = dict(vm_state=vm_states.PAUSED)
instance = self._create_instance_obj(params=paused_state)
self._live_migrate_instance(instance)
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(objects.InstanceAction, 'action_start')
def _live_migrate_instance(self, instance, _save, _action):
# TODO(gilliard): This logic is upside-down (different
# behaviour depending on which class this method is mixed-into. Once
# we have cellsv2 we can remove this kind of logic from this test
if self.cell_type == 'api':
api = self.compute_api.cells_rpcapi
else:
api = conductor.api.ComputeTaskAPI
with mock.patch.object(api, 'live_migrate_instance') as task:
self.compute_api.live_migrate(self.context, instance,
block_migration=True,
disk_over_commit=True,
host_name='fake_dest_host')
self.assertEqual(task_states.MIGRATING, instance.task_state)
task.assert_called_once_with(self.context, instance,
'fake_dest_host',
block_migration=True,
disk_over_commit=True)
def test_swap_volume_volume_api_usage(self):
# This test ensures that volume_id arguments are passed to volume_api
# and that volumes return to previous states in case of error.
def fake_vol_api_begin_detaching(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
volumes[volume_id]['status'] = 'detaching'
def fake_vol_api_roll_detaching(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
if volumes[volume_id]['status'] == 'detaching':
volumes[volume_id]['status'] = 'in-use'
def fake_vol_api_reserve(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
self.assertEqual(volumes[volume_id]['status'], 'available')
volumes[volume_id]['status'] = 'attaching'
def fake_vol_api_unreserve(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
if volumes[volume_id]['status'] == 'attaching':
volumes[volume_id]['status'] = 'available'
def fake_swap_volume_exc(context, instance, old_volume_id,
new_volume_id):
raise AttributeError # Random exception
# Should fail if VM state is not valid
instance = fake_instance.fake_instance_obj(None, **{
'vm_state': vm_states.BUILDING,
'launched_at': timeutils.utcnow(),
'locked': False,
'availability_zone': 'fake_az',
'uuid': 'fake'})
volumes = {}
old_volume_id = uuidutils.generate_uuid()
volumes[old_volume_id] = {'id': old_volume_id,
'display_name': 'old_volume',
'attach_status': 'attached',
'instance_uuid': 'fake',
'size': 5,
'status': 'in-use'}
new_volume_id = uuidutils.generate_uuid()
volumes[new_volume_id] = {'id': new_volume_id,
'display_name': 'new_volume',
'attach_status': 'detached',
'instance_uuid': None,
'size': 5,
'status': 'available'}
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.swap_volume, self.context, instance,
volumes[old_volume_id], volumes[new_volume_id])
instance['vm_state'] = vm_states.ACTIVE
instance['task_state'] = None
# Should fail if old volume is not attached
volumes[old_volume_id]['attach_status'] = 'detached'
self.assertRaises(exception.VolumeUnattached,
self.compute_api.swap_volume, self.context, instance,
volumes[old_volume_id], volumes[new_volume_id])
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
self.assertEqual(volumes[new_volume_id]['status'], 'available')
volumes[old_volume_id]['attach_status'] = 'attached'
# Should fail if old volume's instance_uuid is not that of the instance
volumes[old_volume_id]['instance_uuid'] = 'fake2'
self.assertRaises(exception.InvalidVolume,
self.compute_api.swap_volume, self.context, instance,
volumes[old_volume_id], volumes[new_volume_id])
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
self.assertEqual(volumes[new_volume_id]['status'], 'available')
volumes[old_volume_id]['instance_uuid'] = 'fake'
# Should fail if new volume is attached
volumes[new_volume_id]['attach_status'] = 'attached'
self.assertRaises(exception.InvalidVolume,
self.compute_api.swap_volume, self.context, instance,
volumes[old_volume_id], volumes[new_volume_id])
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
self.assertEqual(volumes[new_volume_id]['status'], 'available')
volumes[new_volume_id]['attach_status'] = 'detached'
# Should fail if new volume is smaller than the old volume
volumes[new_volume_id]['size'] = 4
self.assertRaises(exception.InvalidVolume,
self.compute_api.swap_volume, self.context, instance,
volumes[old_volume_id], volumes[new_volume_id])
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
self.assertEqual(volumes[new_volume_id]['status'], 'available')
volumes[new_volume_id]['size'] = 5
# Fail call to swap_volume
self.stubs.Set(self.compute_api.volume_api, 'begin_detaching',
fake_vol_api_begin_detaching)
self.stubs.Set(self.compute_api.volume_api, 'roll_detaching',
fake_vol_api_roll_detaching)
self.stubs.Set(self.compute_api.volume_api, 'reserve_volume',
fake_vol_api_reserve)
self.stubs.Set(self.compute_api.volume_api, 'unreserve_volume',
fake_vol_api_unreserve)
self.stubs.Set(self.compute_api.compute_rpcapi, 'swap_volume',
fake_swap_volume_exc)
self.assertRaises(AttributeError,
self.compute_api.swap_volume, self.context, instance,
volumes[old_volume_id], volumes[new_volume_id])
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
self.assertEqual(volumes[new_volume_id]['status'], 'available')
# Should succeed
self.stubs.Set(self.compute_api.compute_rpcapi, 'swap_volume',
lambda c, instance, old_volume_id, new_volume_id: True)
self.compute_api.swap_volume(self.context, instance,
volumes[old_volume_id],
volumes[new_volume_id])
def _test_snapshot_and_backup(self, is_snapshot=True,
with_base_ref=False, min_ram=None,
min_disk=None,
create_fails=False,
instance_vm_state=vm_states.ACTIVE):
params = dict(locked=True)
instance = self._create_instance_obj(params=params)
instance.vm_state = instance_vm_state
# 'cache_in_nova' is for testing non-inheritable properties
# 'user_id' should also not be carried from sys_meta into
# image property...since it should be set explicitly by
# _create_image() in compute api.
fake_image_meta = {
'is_public': True,
'name': 'base-name',
'properties': {
'user_id': 'meow',
'foo': 'bar',
'blah': 'bug?',
'cache_in_nova': 'dropped',
},
}
image_type = is_snapshot and 'snapshot' or 'backup'
sent_meta = {
'is_public': False,
'name': 'fake-name',
'properties': {
'user_id': self.context.user_id,
'instance_uuid': instance.uuid,
'image_type': image_type,
'foo': 'bar',
'blah': 'bug?',
'cow': 'moo',
'cat': 'meow',
},
}
if is_snapshot:
if min_ram is not None:
fake_image_meta['min_ram'] = min_ram
sent_meta['min_ram'] = min_ram
if min_disk is not None:
fake_image_meta['min_disk'] = min_disk
sent_meta['min_disk'] = min_disk
else:
sent_meta['properties']['backup_type'] = 'fake-backup-type'
extra_props = dict(cow='moo', cat='meow')
self.mox.StubOutWithMock(utils, 'get_image_from_system_metadata')
self.mox.StubOutWithMock(self.compute_api.image_api,
'create')
self.mox.StubOutWithMock(instance, 'save')
self.mox.StubOutWithMock(self.compute_api.compute_rpcapi,
'snapshot_instance')
self.mox.StubOutWithMock(self.compute_api.compute_rpcapi,
'backup_instance')
if not is_snapshot:
self.mox.StubOutWithMock(self.compute_api,
'is_volume_backed_instance')
self.compute_api.is_volume_backed_instance(self.context,
instance).AndReturn(False)
utils.get_image_from_system_metadata(
instance.system_metadata).AndReturn(fake_image_meta)
fake_image = dict(id='fake-image-id')
mock_method = self.compute_api.image_api.create(
self.context, sent_meta)
if create_fails:
mock_method.AndRaise(test.TestingException())
else:
mock_method.AndReturn(fake_image)
def check_state(expected_task_state=None):
expected_state = (is_snapshot and
task_states.IMAGE_SNAPSHOT_PENDING or
task_states.IMAGE_BACKUP)
self.assertEqual(expected_state, instance.task_state)
if not create_fails:
instance.save(expected_task_state=[None]).WithSideEffects(
check_state)
if is_snapshot:
self.compute_api.compute_rpcapi.snapshot_instance(
self.context, instance, fake_image['id'])
else:
self.compute_api.compute_rpcapi.backup_instance(
self.context, instance, fake_image['id'],
'fake-backup-type', 'fake-rotation')
self.mox.ReplayAll()
got_exc = False
try:
if is_snapshot:
res = self.compute_api.snapshot(self.context, instance,
'fake-name',
extra_properties=extra_props)
else:
res = self.compute_api.backup(self.context, instance,
'fake-name',
'fake-backup-type',
'fake-rotation',
extra_properties=extra_props)
self.assertEqual(fake_image, res)
except test.TestingException:
got_exc = True
self.assertEqual(create_fails, got_exc)
self.mox.UnsetStubs()
def test_snapshot(self):
self._test_snapshot_and_backup()
def test_snapshot_fails(self):
self._test_snapshot_and_backup(create_fails=True)
def test_snapshot_invalid_state(self):
instance = self._create_instance_obj()
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_SNAPSHOT
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.snapshot,
self.context, instance, 'fake-name')
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_BACKUP
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.snapshot,
self.context, instance, 'fake-name')
instance.vm_state = vm_states.BUILDING
instance.task_state = None
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.snapshot,
self.context, instance, 'fake-name')
def test_snapshot_with_base_image_ref(self):
self._test_snapshot_and_backup(with_base_ref=True)
def test_snapshot_min_ram(self):
self._test_snapshot_and_backup(min_ram=42)
def test_snapshot_min_disk(self):
self._test_snapshot_and_backup(min_disk=42)
def test_backup(self):
for state in [vm_states.ACTIVE, vm_states.STOPPED,
vm_states.PAUSED, vm_states.SUSPENDED]:
self._test_snapshot_and_backup(is_snapshot=False,
instance_vm_state=state)
def test_backup_fails(self):
self._test_snapshot_and_backup(is_snapshot=False, create_fails=True)
def test_backup_invalid_state(self):
instance = self._create_instance_obj()
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_SNAPSHOT
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.backup,
self.context, instance, 'fake-name',
'fake', 'fake')
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_BACKUP
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.backup,
self.context, instance, 'fake-name',
'fake', 'fake')
instance.vm_state = vm_states.BUILDING
instance.task_state = None
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.backup,
self.context, instance, 'fake-name',
'fake', 'fake')
def test_backup_with_base_image_ref(self):
self._test_snapshot_and_backup(is_snapshot=False,
with_base_ref=True)
def test_backup_volume_backed_instance(self):
instance = self._create_instance_obj()
with mock.patch.object(self.compute_api,
'is_volume_backed_instance',
return_value=True) as mock_is_volume_backed:
self.assertRaises(exception.InvalidRequest,
self.compute_api.backup, self.context,
instance, 'fake-name', 'weekly',
3, extra_properties={})
mock_is_volume_backed.assert_called_once_with(self.context,
instance)
def _test_snapshot_volume_backed(self, quiesce_required, quiesce_fails,
vm_state=vm_states.ACTIVE):
params = dict(locked=True, vm_state=vm_state)
instance = self._create_instance_obj(params=params)
instance['root_device_name'] = 'vda'
instance_bdms = []
image_meta = {
'id': 'fake-image-id',
'properties': {'mappings': []},
'status': 'fake-status',
'location': 'far-away',
'owner': 'fake-tenant',
}
expect_meta = {
'name': 'test-snapshot',
'properties': {'root_device_name': 'vda',
'mappings': 'DONTCARE'},
'size': 0,
'is_public': False
}
quiesced = [False, False]
quiesce_expected = not quiesce_fails and vm_state == vm_states.ACTIVE
if quiesce_required:
image_meta['properties']['os_require_quiesce'] = 'yes'
expect_meta['properties']['os_require_quiesce'] = 'yes'
def fake_get_all_by_instance(context, instance, use_slave=False):
return copy.deepcopy(instance_bdms)
def fake_image_create(context, image_meta, data=None):
self.assertThat(image_meta, matchers.DictMatches(expect_meta))
def fake_volume_get(context, volume_id):
return {'id': volume_id, 'display_description': ''}
def fake_volume_create_snapshot(context, volume_id, name, description):
return {'id': '%s-snapshot' % volume_id}
def fake_quiesce_instance(context, instance):
if quiesce_fails:
raise exception.InstanceQuiesceNotSupported(
instance_id=instance['uuid'], reason='test')
quiesced[0] = True
def fake_unquiesce_instance(context, instance, mapping=None):
quiesced[1] = True
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_get_all_by_instance)
self.stubs.Set(self.compute_api.image_api, 'create',
fake_image_create)
self.stubs.Set(self.compute_api.volume_api, 'get',
fake_volume_get)
self.stubs.Set(self.compute_api.volume_api, 'create_snapshot_force',
fake_volume_create_snapshot)
self.stubs.Set(self.compute_api.compute_rpcapi, 'quiesce_instance',
fake_quiesce_instance)
self.stubs.Set(self.compute_api.compute_rpcapi, 'unquiesce_instance',
fake_unquiesce_instance)
# No block devices defined
self.compute_api.snapshot_volume_backed(
self.context, instance, copy.deepcopy(image_meta), 'test-snapshot')
bdm = fake_block_device.FakeDbBlockDeviceDict(
{'no_device': False, 'volume_id': '1', 'boot_index': 0,
'connection_info': 'inf', 'device_name': '/dev/vda',
'source_type': 'volume', 'destination_type': 'volume'})
instance_bdms.append(bdm)
expect_meta['properties']['bdm_v2'] = True
expect_meta['properties']['block_device_mapping'] = []
expect_meta['properties']['block_device_mapping'].append(
{'guest_format': None, 'boot_index': 0, 'no_device': None,
'image_id': None, 'volume_id': None, 'disk_bus': None,
'volume_size': None, 'source_type': 'snapshot',
'device_type': None, 'snapshot_id': '1-snapshot',
'device_name': '/dev/vda',
'destination_type': 'volume', 'delete_on_termination': False})
# All the db_only fields and the volume ones are removed
self.compute_api.snapshot_volume_backed(
self.context, instance, copy.deepcopy(image_meta), 'test-snapshot')
self.assertEqual(quiesce_expected, quiesced[0])
self.assertEqual(quiesce_expected, quiesced[1])
image_mappings = [{'virtual': 'ami', 'device': 'vda'},
{'device': 'vda', 'virtual': 'ephemeral0'},
{'device': 'vdb', 'virtual': 'swap'},
{'device': 'vdc', 'virtual': 'ephemeral1'}]
image_meta['properties']['mappings'] = image_mappings
expect_meta['properties']['mappings'] = [
{'virtual': 'ami', 'device': 'vda'}]
quiesced = [False, False]
# Check that the mappgins from the image properties are included
self.compute_api.snapshot_volume_backed(
self.context, instance, copy.deepcopy(image_meta), 'test-snapshot')
self.assertEqual(quiesce_expected, quiesced[0])
self.assertEqual(quiesce_expected, quiesced[1])
def test_snapshot_volume_backed(self):
self._test_snapshot_volume_backed(False, False)
def test_snapshot_volume_backed_with_quiesce(self):
self._test_snapshot_volume_backed(True, False)
def test_snapshot_volume_backed_with_quiesce_skipped(self):
self._test_snapshot_volume_backed(False, True)
def test_snapshot_volume_backed_with_quiesce_exception(self):<|fim▁hole|> self.assertRaises(exception.NovaException,
self._test_snapshot_volume_backed, True, True)
def test_snapshot_volume_backed_with_quiesce_stopped(self):
self._test_snapshot_volume_backed(True, True,
vm_state=vm_states.STOPPED)
def test_volume_snapshot_create(self):
volume_id = '1'
create_info = {'id': 'eyedee'}
fake_bdm = fake_block_device.FakeDbBlockDeviceDict({
'id': 123,
'device_name': '/dev/sda2',
'source_type': 'volume',
'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}",
'volume_id': 1,
'boot_index': -1})
fake_bdm['instance'] = fake_instance.fake_db_instance()
fake_bdm['instance_uuid'] = fake_bdm['instance']['uuid']
fake_bdm = objects.BlockDeviceMapping._from_db_object(
self.context, objects.BlockDeviceMapping(),
fake_bdm, expected_attrs=['instance'])
self.mox.StubOutWithMock(objects.BlockDeviceMapping,
'get_by_volume_id')
self.mox.StubOutWithMock(self.compute_api.compute_rpcapi,
'volume_snapshot_create')
objects.BlockDeviceMapping.get_by_volume_id(
self.context, volume_id,
expected_attrs=['instance']).AndReturn(fake_bdm)
self.compute_api.compute_rpcapi.volume_snapshot_create(self.context,
fake_bdm['instance'], volume_id, create_info)
self.mox.ReplayAll()
snapshot = self.compute_api.volume_snapshot_create(self.context,
volume_id, create_info)
expected_snapshot = {
'snapshot': {
'id': create_info['id'],
'volumeId': volume_id,
},
}
self.assertEqual(snapshot, expected_snapshot)
def test_volume_snapshot_delete(self):
volume_id = '1'
snapshot_id = '2'
fake_bdm = fake_block_device.FakeDbBlockDeviceDict({
'id': 123,
'device_name': '/dev/sda2',
'source_type': 'volume',
'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}",
'volume_id': 1,
'boot_index': -1})
fake_bdm['instance'] = fake_instance.fake_db_instance()
fake_bdm['instance_uuid'] = fake_bdm['instance']['uuid']
fake_bdm = objects.BlockDeviceMapping._from_db_object(
self.context, objects.BlockDeviceMapping(),
fake_bdm, expected_attrs=['instance'])
self.mox.StubOutWithMock(objects.BlockDeviceMapping,
'get_by_volume_id')
self.mox.StubOutWithMock(self.compute_api.compute_rpcapi,
'volume_snapshot_delete')
objects.BlockDeviceMapping.get_by_volume_id(
self.context, volume_id,
expected_attrs=['instance']).AndReturn(fake_bdm)
self.compute_api.compute_rpcapi.volume_snapshot_delete(self.context,
fake_bdm['instance'], volume_id, snapshot_id, {})
self.mox.ReplayAll()
self.compute_api.volume_snapshot_delete(self.context, volume_id,
snapshot_id, {})
def _test_boot_volume_bootable(self, is_bootable=False):
def get_vol_data(*args, **kwargs):
return {'bootable': is_bootable}
block_device_mapping = [{
'id': 1,
'device_name': 'vda',
'no_device': None,
'virtual_name': None,
'snapshot_id': None,
'volume_id': '1',
'delete_on_termination': False,
}]
expected_meta = {'min_disk': 0, 'min_ram': 0, 'properties': {},
'size': 0, 'status': 'active'}
with mock.patch.object(self.compute_api.volume_api, 'get',
side_effect=get_vol_data):
if not is_bootable:
self.assertRaises(exception.InvalidBDMVolumeNotBootable,
self.compute_api._get_bdm_image_metadata,
self.context, block_device_mapping)
else:
meta = self.compute_api._get_bdm_image_metadata(self.context,
block_device_mapping)
self.assertEqual(expected_meta, meta)
def test_boot_volume_non_bootable(self):
self._test_boot_volume_bootable(False)
def test_boot_volume_bootable(self):
self._test_boot_volume_bootable(True)
def test_boot_volume_basic_property(self):
block_device_mapping = [{
'id': 1,
'device_name': 'vda',
'no_device': None,
'virtual_name': None,
'snapshot_id': None,
'volume_id': '1',
'delete_on_termination': False,
}]
fake_volume = {"volume_image_metadata":
{"min_ram": 256, "min_disk": 128, "foo": "bar"}}
with mock.patch.object(self.compute_api.volume_api, 'get',
return_value=fake_volume):
meta = self.compute_api._get_bdm_image_metadata(
self.context, block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
def test_boot_volume_snapshot_basic_property(self):
block_device_mapping = [{
'id': 1,
'device_name': 'vda',
'no_device': None,
'virtual_name': None,
'snapshot_id': '2',
'volume_id': None,
'delete_on_termination': False,
}]
fake_volume = {"volume_image_metadata":
{"min_ram": 256, "min_disk": 128, "foo": "bar"}}
fake_snapshot = {"volume_id": "1"}
with contextlib.nested(
mock.patch.object(self.compute_api.volume_api, 'get',
return_value=fake_volume),
mock.patch.object(self.compute_api.volume_api, 'get_snapshot',
return_value=fake_snapshot)) as (
volume_get, volume_get_snapshot):
meta = self.compute_api._get_bdm_image_metadata(
self.context, block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
volume_get_snapshot.assert_called_once_with(self.context,
block_device_mapping[0]['snapshot_id'])
volume_get.assert_called_once_with(self.context,
fake_snapshot['volume_id'])
def _create_instance_with_disabled_disk_config(self, object=False):
sys_meta = {"image_auto_disk_config": "Disabled"}
params = {"system_metadata": sys_meta}
instance = self._create_instance_obj(params=params)
if object:
return instance
return obj_base.obj_to_primitive(instance)
def _setup_fake_image_with_disabled_disk_config(self):
self.fake_image = {
'id': 1,
'name': 'fake_name',
'status': 'active',
'properties': {"auto_disk_config": "Disabled"},
}
def fake_show(obj, context, image_id, **kwargs):
return self.fake_image
fake_image.stub_out_image_service(self.stubs)
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
return self.fake_image['id']
def test_resize_with_disabled_auto_disk_config_fails(self):
fake_inst = self._create_instance_with_disabled_disk_config(
object=True)
self.assertRaises(exception.AutoDiskConfigDisabledByImage,
self.compute_api.resize,
self.context, fake_inst,
auto_disk_config=True)
def test_create_with_disabled_auto_disk_config_fails(self):
image_id = self._setup_fake_image_with_disabled_disk_config()
self.assertRaises(exception.AutoDiskConfigDisabledByImage,
self.compute_api.create, self.context,
"fake_flavor", image_id, auto_disk_config=True)
def test_rebuild_with_disabled_auto_disk_config_fails(self):
fake_inst = self._create_instance_with_disabled_disk_config(
object=True)
image_id = self._setup_fake_image_with_disabled_disk_config()
self.assertRaises(exception.AutoDiskConfigDisabledByImage,
self.compute_api.rebuild,
self.context,
fake_inst,
image_id,
"new password",
auto_disk_config=True)
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(objects.Instance, 'get_flavor')
@mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid')
@mock.patch.object(compute_api.API, '_get_image')
@mock.patch.object(compute_api.API, '_check_auto_disk_config')
@mock.patch.object(compute_api.API, '_checks_for_create_and_rebuild')
@mock.patch.object(compute_api.API, '_record_action_start')
def test_rebuild(self, _record_action_start,
_checks_for_create_and_rebuild, _check_auto_disk_config,
_get_image, bdm_get_by_instance_uuid, get_flavor, instance_save):
orig_system_metadata = {}
instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE, cell_name='fake-cell',
launched_at=timeutils.utcnow(),
system_metadata=orig_system_metadata,
expected_attrs=['system_metadata'])
get_flavor.return_value = test_flavor.fake_flavor
flavor = instance.get_flavor()
image_href = ''
image = {"min_ram": 10, "min_disk": 1,
"properties": {'architecture': arch.X86_64}}
admin_pass = ''
files_to_inject = []
bdms = []
_get_image.return_value = (None, image)
bdm_get_by_instance_uuid.return_value = bdms
with mock.patch.object(self.compute_api.compute_task_api,
'rebuild_instance') as rebuild_instance:
self.compute_api.rebuild(self.context, instance, image_href,
admin_pass, files_to_inject)
rebuild_instance.assert_called_once_with(self.context,
instance=instance, new_pass=admin_pass,
injected_files=files_to_inject, image_ref=image_href,
orig_image_ref=image_href,
orig_sys_metadata=orig_system_metadata, bdms=bdms,
preserve_ephemeral=False, host=instance.host, kwargs={})
_check_auto_disk_config.assert_called_once_with(image=image)
_checks_for_create_and_rebuild.assert_called_once_with(self.context,
None, image, flavor, {}, [])
self.assertNotEqual(orig_system_metadata, instance.system_metadata)
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(objects.Instance, 'get_flavor')
@mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid')
@mock.patch.object(compute_api.API, '_get_image')
@mock.patch.object(compute_api.API, '_check_auto_disk_config')
@mock.patch.object(compute_api.API, '_checks_for_create_and_rebuild')
@mock.patch.object(compute_api.API, '_record_action_start')
def test_rebuild_change_image(self, _record_action_start,
_checks_for_create_and_rebuild, _check_auto_disk_config,
_get_image, bdm_get_by_instance_uuid, get_flavor, instance_save):
orig_system_metadata = {}
get_flavor.return_value = test_flavor.fake_flavor
orig_image_href = 'orig_image'
orig_image = {"min_ram": 10, "min_disk": 1,
"properties": {'architecture': arch.X86_64,
'vm_mode': 'hvm'}}
new_image_href = 'new_image'
new_image = {"min_ram": 10, "min_disk": 1,
"properties": {'architecture': arch.X86_64,
'vm_mode': 'xen'}}
admin_pass = ''
files_to_inject = []
bdms = []
instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE, cell_name='fake-cell',
launched_at=timeutils.utcnow(),
system_metadata=orig_system_metadata,
expected_attrs=['system_metadata'],
image_ref=orig_image_href,
vm_mode=vm_mode.HVM)
flavor = instance.get_flavor()
def get_image(context, image_href):
if image_href == new_image_href:
return (None, new_image)
if image_href == orig_image_href:
return (None, orig_image)
_get_image.side_effect = get_image
bdm_get_by_instance_uuid.return_value = bdms
with mock.patch.object(self.compute_api.compute_task_api,
'rebuild_instance') as rebuild_instance:
self.compute_api.rebuild(self.context, instance, new_image_href,
admin_pass, files_to_inject)
rebuild_instance.assert_called_once_with(self.context,
instance=instance, new_pass=admin_pass,
injected_files=files_to_inject, image_ref=new_image_href,
orig_image_ref=orig_image_href,
orig_sys_metadata=orig_system_metadata, bdms=bdms,
preserve_ephemeral=False, host=instance.host, kwargs={})
_check_auto_disk_config.assert_called_once_with(image=new_image)
_checks_for_create_and_rebuild.assert_called_once_with(self.context,
None, new_image, flavor, {}, [])
self.assertEqual(vm_mode.XEN, instance.vm_mode)
def _test_check_injected_file_quota_onset_file_limit_exceeded(self,
side_effect):
injected_files = [
{
"path": "/etc/banner.txt",
"contents": "foo"
}
]
with mock.patch.object(quota.QUOTAS, 'limit_check',
side_effect=side_effect):
self.compute_api._check_injected_file_quota(
self.context, injected_files)
def test_check_injected_file_quota_onset_file_limit_exceeded(self):
# This is the first call to limit_check.
side_effect = exception.OverQuota(overs='injected_files')
self.assertRaises(exception.OnsetFileLimitExceeded,
self._test_check_injected_file_quota_onset_file_limit_exceeded,
side_effect)
def test_check_injected_file_quota_onset_file_path_limit(self):
# This is the second call to limit_check.
side_effect = (mock.DEFAULT,
exception.OverQuota(overs='injected_file_path_bytes'))
self.assertRaises(exception.OnsetFilePathLimitExceeded,
self._test_check_injected_file_quota_onset_file_limit_exceeded,
side_effect)
def test_check_injected_file_quota_onset_file_content_limit(self):
# This is the second call to limit_check but with different overs.
side_effect = (mock.DEFAULT,
exception.OverQuota(overs='injected_file_content_bytes'))
self.assertRaises(exception.OnsetFileContentLimitExceeded,
self._test_check_injected_file_quota_onset_file_limit_exceeded,
side_effect)
@mock.patch('nova.objects.Quotas.commit')
@mock.patch('nova.objects.Quotas.reserve')
@mock.patch('nova.objects.Instance.save')
@mock.patch('nova.objects.InstanceAction.action_start')
def test_restore(self, action_start, instance_save, quota_reserve,
quota_commit):
instance = self._create_instance_obj()
instance.vm_state = vm_states.SOFT_DELETED
instance.task_state = None
instance.save()
with mock.patch.object(self.compute_api, 'compute_rpcapi') as rpc:
self.compute_api.restore(self.context, instance)
rpc.restore_instance.assert_called_once_with(self.context,
instance)
self.assertEqual(instance.task_state, task_states.RESTORING)
self.assertEqual(1, quota_commit.call_count)
def test_external_instance_event(self):
instances = [
objects.Instance(uuid='uuid1', host='host1'),
objects.Instance(uuid='uuid2', host='host1'),
objects.Instance(uuid='uuid3', host='host2'),
]
events = [
objects.InstanceExternalEvent(instance_uuid='uuid1'),
objects.InstanceExternalEvent(instance_uuid='uuid2'),
objects.InstanceExternalEvent(instance_uuid='uuid3'),
]
self.compute_api.compute_rpcapi = mock.MagicMock()
self.compute_api.external_instance_event(self.context,
instances, events)
method = self.compute_api.compute_rpcapi.external_instance_event
method.assert_any_call(self.context, instances[0:2], events[0:2])
method.assert_any_call(self.context, instances[2:], events[2:])
self.assertEqual(2, method.call_count)
def test_volume_ops_invalid_task_state(self):
instance = self._create_instance_obj()
self.assertEqual(instance.vm_state, vm_states.ACTIVE)
instance.task_state = 'Any'
volume_id = uuidutils.generate_uuid()
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.attach_volume,
self.context, instance, volume_id)
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.detach_volume,
self.context, instance, volume_id)
new_volume_id = uuidutils.generate_uuid()
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.swap_volume,
self.context, instance,
volume_id, new_volume_id)
@mock.patch.object(cinder.API, 'get',
side_effect=exception.CinderConnectionFailed(reason='error'))
def test_get_bdm_image_metadata_with_cinder_down(self, mock_get):
bdms = [objects.BlockDeviceMapping(
**fake_block_device.FakeDbBlockDeviceDict(
{
'id': 1,
'volume_id': 1,
'source_type': 'volume',
'destination_type': 'volume',
'device_name': 'vda',
}))]
self.assertRaises(exception.CinderConnectionFailed,
self.compute_api._get_bdm_image_metadata,
self.context,
bdms, legacy_bdm=True)
@mock.patch.object(cinder.API, 'get')
@mock.patch.object(cinder.API, 'check_attach',
side_effect=exception.InvalidVolume(reason='error'))
def test_validate_bdm_with_error_volume(self, mock_check_attach, mock_get):
# Tests that an InvalidVolume exception raised from
# volume_api.check_attach due to the volume status not being
# 'available' results in _validate_bdm re-raising InvalidVolume.
instance = self._create_instance_obj()
instance_type = self._create_flavor()
volume_id = 'e856840e-9f5b-4894-8bde-58c6e29ac1e8'
volume_info = {'status': 'error',
'attach_status': 'detached',
'id': volume_id}
mock_get.return_value = volume_info
bdms = [objects.BlockDeviceMapping(
**fake_block_device.FakeDbBlockDeviceDict(
{
'boot_index': 0,
'volume_id': volume_id,
'source_type': 'volume',
'destination_type': 'volume',
'device_name': 'vda',
}))]
self.assertRaises(exception.InvalidVolume,
self.compute_api._validate_bdm,
self.context,
instance, instance_type, bdms)
mock_get.assert_called_once_with(self.context, volume_id)
mock_check_attach.assert_called_once_with(
self.context, volume_info, instance=instance)
@mock.patch.object(cinder.API, 'get_snapshot',
side_effect=exception.CinderConnectionFailed(reason='error'))
@mock.patch.object(cinder.API, 'get',
side_effect=exception.CinderConnectionFailed(reason='error'))
def test_validate_bdm_with_cinder_down(self, mock_get, mock_get_snapshot):
instance = self._create_instance_obj()
instance_type = self._create_flavor()
bdm = [objects.BlockDeviceMapping(
**fake_block_device.FakeDbBlockDeviceDict(
{
'id': 1,
'volume_id': 1,
'source_type': 'volume',
'destination_type': 'volume',
'device_name': 'vda',
'boot_index': 0,
}))]
bdms = [objects.BlockDeviceMapping(
**fake_block_device.FakeDbBlockDeviceDict(
{
'id': 1,
'snapshot_id': 1,
'source_type': 'volume',
'destination_type': 'volume',
'device_name': 'vda',
'boot_index': 0,
}))]
self.assertRaises(exception.CinderConnectionFailed,
self.compute_api._validate_bdm,
self.context,
instance, instance_type, bdm)
self.assertRaises(exception.CinderConnectionFailed,
self.compute_api._validate_bdm,
self.context,
instance, instance_type, bdms)
def _test_create_db_entry_for_new_instance_with_cinder_error(self,
expected_exception):
@mock.patch.object(objects.Instance, 'create')
@mock.patch.object(compute_api.SecurityGroupAPI, 'ensure_default')
@mock.patch.object(compute_api.API, '_populate_instance_names')
@mock.patch.object(compute_api.API, '_populate_instance_for_create')
def do_test(self, mock_create, mock_names, mock_ensure,
mock_inst_create):
instance = self._create_instance_obj()
instance['display_name'] = 'FAKE_DISPLAY_NAME'
instance['shutdown_terminate'] = False
instance_type = self._create_flavor()
fake_image = {
'id': 'fake-image-id',
'properties': {'mappings': []},
'status': 'fake-status',
'location': 'far-away'}
fake_security_group = None
fake_num_instances = 1
fake_index = 1
bdm = [objects.BlockDeviceMapping(
**fake_block_device.FakeDbBlockDeviceDict(
{
'id': 1,
'volume_id': 1,
'source_type': 'volume',
'destination_type': 'volume',
'device_name': 'vda',
'boot_index': 0,
}))]
with mock.patch.object(instance, "destroy") as destroy:
self.assertRaises(expected_exception,
self.compute_api.
create_db_entry_for_new_instance,
self.context,
instance_type,
fake_image,
instance,
fake_security_group,
bdm,
fake_num_instances,
fake_index)
destroy.assert_called_once_with()
# We use a nested method so we can decorate with the mocks.
do_test(self)
@mock.patch.object(cinder.API, 'get',
side_effect=exception.CinderConnectionFailed(reason='error'))
def test_create_db_entry_for_new_instancewith_cinder_down(self, mock_get):
self._test_create_db_entry_for_new_instance_with_cinder_error(
expected_exception=exception.CinderConnectionFailed)
@mock.patch.object(cinder.API, 'get',
return_value={'id': 1, 'status': 'error',
'attach_status': 'detached'})
def test_create_db_entry_for_new_instancewith_error_volume(self, mock_get):
self._test_create_db_entry_for_new_instance_with_cinder_error(
expected_exception=exception.InvalidVolume)
def _test_rescue(self, vm_state=vm_states.ACTIVE, rescue_password=None,
rescue_image=None, clean_shutdown=True):
instance = self._create_instance_obj(params={'vm_state': vm_state})
bdms = []
with contextlib.nested(
mock.patch.object(objects.BlockDeviceMappingList,
'get_by_instance_uuid', return_value=bdms),
mock.patch.object(self.compute_api, 'is_volume_backed_instance',
return_value=False),
mock.patch.object(instance, 'save'),
mock.patch.object(self.compute_api, '_record_action_start'),
mock.patch.object(self.compute_api.compute_rpcapi,
'rescue_instance')
) as (
bdm_get_by_instance_uuid, volume_backed_inst, instance_save,
record_action_start, rpcapi_rescue_instance
):
self.compute_api.rescue(self.context, instance,
rescue_password=rescue_password,
rescue_image_ref=rescue_image,
clean_shutdown=clean_shutdown)
# assert field values set on the instance object
self.assertEqual(task_states.RESCUING, instance.task_state)
# assert our mock calls
bdm_get_by_instance_uuid.assert_called_once_with(
self.context, instance.uuid)
volume_backed_inst.assert_called_once_with(
self.context, instance, bdms)
instance_save.assert_called_once_with(expected_task_state=[None])
record_action_start.assert_called_once_with(
self.context, instance, instance_actions.RESCUE)
rpcapi_rescue_instance.assert_called_once_with(
self.context, instance=instance,
rescue_password=rescue_password,
rescue_image_ref=rescue_image,
clean_shutdown=clean_shutdown)
def test_rescue_active(self):
self._test_rescue()
def test_rescue_stopped(self):
self._test_rescue(vm_state=vm_states.STOPPED)
def test_rescue_error(self):
self._test_rescue(vm_state=vm_states.ERROR)
def test_rescue_with_password(self):
self._test_rescue(rescue_password='fake-password')
def test_rescue_with_image(self):
self._test_rescue(rescue_image='fake-image')
def test_rescue_forced_shutdown(self):
self._test_rescue(clean_shutdown=False)
def test_unrescue(self):
instance = self._create_instance_obj(
params={'vm_state': vm_states.RESCUED})
with contextlib.nested(
mock.patch.object(instance, 'save'),
mock.patch.object(self.compute_api, '_record_action_start'),
mock.patch.object(self.compute_api.compute_rpcapi,
'unrescue_instance')
) as (
instance_save, record_action_start, rpcapi_unrescue_instance
):
self.compute_api.unrescue(self.context, instance)
# assert field values set on the instance object
self.assertEqual(task_states.UNRESCUING, instance.task_state)
# assert our mock calls
instance_save.assert_called_once_with(expected_task_state=[None])
record_action_start.assert_called_once_with(
self.context, instance, instance_actions.UNRESCUE)
rpcapi_unrescue_instance.assert_called_once_with(
self.context, instance=instance)
def test_set_admin_password_invalid_state(self):
# Tests that InstanceInvalidState is raised when not ACTIVE.
instance = self._create_instance_obj({'vm_state': vm_states.STOPPED})
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.set_admin_password,
self.context, instance)
def test_set_admin_password(self):
# Ensure instance can have its admin password set.
instance = self._create_instance_obj()
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(self.compute_api, '_record_action_start')
@mock.patch.object(self.compute_api.compute_rpcapi,
'set_admin_password')
def do_test(compute_rpcapi_mock, record_mock, instance_save_mock):
# call the API
self.compute_api.set_admin_password(self.context, instance)
# make our assertions
instance_save_mock.assert_called_once_with(
expected_task_state=[None])
record_mock.assert_called_once_with(
self.context, instance, instance_actions.CHANGE_PASSWORD)
compute_rpcapi_mock.assert_called_once_with(
self.context, instance=instance, new_pass=None)
do_test()
def _test_attach_interface_invalid_state(self, state):
instance = self._create_instance_obj(
params={'vm_state': state})
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.attach_interface,
self.context, instance, '', '', '', [])
def test_attach_interface_invalid_state(self):
for state in [vm_states.BUILDING, vm_states.DELETED,
vm_states.ERROR, vm_states.RESCUED,
vm_states.RESIZED, vm_states.SOFT_DELETED,
vm_states.SUSPENDED, vm_states.SHELVED,
vm_states.SHELVED_OFFLOADED]:
self._test_attach_interface_invalid_state(state)
def _test_detach_interface_invalid_state(self, state):
instance = self._create_instance_obj(
params={'vm_state': state})
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.detach_interface,
self.context, instance, '', '', '', [])
def test_detach_interface_invalid_state(self):
for state in [vm_states.BUILDING, vm_states.DELETED,
vm_states.ERROR, vm_states.RESCUED,
vm_states.RESIZED, vm_states.SOFT_DELETED,
vm_states.SUSPENDED, vm_states.SHELVED,
vm_states.SHELVED_OFFLOADED]:
self._test_detach_interface_invalid_state(state)
def test_check_and_transform_bdm(self):
instance_type = self._create_flavor()
base_options = {'uuid': 'fake_uuid',
'image_ref': 'fake_image_ref',
'metadata': {}}
image_meta = {'status': 'active',
'name': 'image_name',
'deleted': False,
'container_format': 'bare',
'id': 'image_id'}
legacy_bdm = False
block_device_mapping = [{'boot_index': 0,
'device_name': None,
'image_id': 'image_id',
'source_type': 'image'},
{'device_name': '/dev/vda',
'source_type': 'volume',
'device_type': None,
'volume_id': 'volume_id'}]
self.assertRaises(exception.InvalidRequest,
self.compute_api._check_and_transform_bdm,
self.context, base_options, instance_type,
image_meta, 1, 1, block_device_mapping, legacy_bdm)
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(objects.InstanceAction, 'action_start')
@mock.patch.object(compute_rpcapi.ComputeAPI, 'pause_instance')
@mock.patch.object(objects.Instance, 'get_by_uuid')
@mock.patch.object(compute_api.API, '_get_instances_by_filters',
return_value=[])
@mock.patch.object(compute_api.API, '_create_instance')
def test_skip_policy_check(self, mock_create, mock_get_ins_by_filters,
mock_get, mock_pause, mock_action, mock_save):
policy.reset()
rules = {'compute:pause': common_policy.parse_rule('!'),
'compute:get': common_policy.parse_rule('!'),
'compute:get_all': common_policy.parse_rule('!'),
'compute:create': common_policy.parse_rule('!')}
policy.set_rules(common_policy.Rules(rules))
instance = self._create_instance_obj()
mock_get.return_value = instance
self.assertRaises(exception.PolicyNotAuthorized,
self.compute_api.pause, self.context, instance)
api = compute_api.API(skip_policy_check=True)
api.pause(self.context, instance)
self.assertRaises(exception.PolicyNotAuthorized,
self.compute_api.get, self.context, instance.uuid)
api = compute_api.API(skip_policy_check=True)
api.get(self.context, instance.uuid)
self.assertRaises(exception.PolicyNotAuthorized,
self.compute_api.get_all, self.context)
api = compute_api.API(skip_policy_check=True)
api.get_all(self.context)
self.assertRaises(exception.PolicyNotAuthorized,
self.compute_api.create, self.context, None, None)
api = compute_api.API(skip_policy_check=True)
api.create(self.context, None, None)
@mock.patch.object(compute_api.API, '_get_instances_by_filters')
def test_tenant_to_project_conversion(self, mock_get):
mock_get.return_value = []
api = compute_api.API()
api.get_all(self.context, search_opts={'tenant_id': 'foo'})
filters = mock_get.call_args_list[0][0][1]
self.assertEqual({'project_id': 'foo'}, filters)
class ComputeAPIUnitTestCase(_ComputeAPIUnitTestMixIn, test.NoDBTestCase):
def setUp(self):
super(ComputeAPIUnitTestCase, self).setUp()
self.compute_api = compute_api.API()
self.cell_type = None
def test_resize_same_flavor_fails(self):
self.assertRaises(exception.CannotResizeToSameFlavor,
self._test_resize, same_flavor=True)
class ComputeAPIAPICellUnitTestCase(_ComputeAPIUnitTestMixIn,
test.NoDBTestCase):
def setUp(self):
super(ComputeAPIAPICellUnitTestCase, self).setUp()
self.flags(cell_type='api', enable=True, group='cells')
self.compute_api = compute_cells_api.ComputeCellsAPI()
self.cell_type = 'api'
def test_resize_same_flavor_fails(self):
self.assertRaises(exception.CannotResizeToSameFlavor,
self._test_resize, same_flavor=True)
class ComputeAPIComputeCellUnitTestCase(_ComputeAPIUnitTestMixIn,
test.NoDBTestCase):
def setUp(self):
super(ComputeAPIComputeCellUnitTestCase, self).setUp()
self.flags(cell_type='compute', enable=True, group='cells')
self.compute_api = compute_api.API()
self.cell_type = 'compute'
def test_resize_same_flavor_passes(self):
self._test_resize(same_flavor=True)
class DiffDictTestCase(test.NoDBTestCase):
"""Unit tests for _diff_dict()."""
def test_no_change(self):
old = dict(a=1, b=2, c=3)
new = dict(a=1, b=2, c=3)
diff = compute_api._diff_dict(old, new)
self.assertEqual(diff, {})
def test_new_key(self):
old = dict(a=1, b=2, c=3)
new = dict(a=1, b=2, c=3, d=4)
diff = compute_api._diff_dict(old, new)
self.assertEqual(diff, dict(d=['+', 4]))
def test_changed_key(self):
old = dict(a=1, b=2, c=3)
new = dict(a=1, b=4, c=3)
diff = compute_api._diff_dict(old, new)
self.assertEqual(diff, dict(b=['+', 4]))
def test_removed_key(self):
old = dict(a=1, b=2, c=3)
new = dict(a=1, c=3)
diff = compute_api._diff_dict(old, new)
self.assertEqual(diff, dict(b=['-']))
class SecurityGroupAPITest(test.NoDBTestCase):
def setUp(self):
super(SecurityGroupAPITest, self).setUp()
self.secgroup_api = compute_api.SecurityGroupAPI()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id,
self.project_id)
@mock.patch('nova.objects.security_group.SecurityGroupList.'
'get_by_instance')
def test_get_instance_security_groups(self, mock_get):
groups = objects.SecurityGroupList()
groups.objects = [objects.SecurityGroup(name='foo'),
objects.SecurityGroup(name='bar')]
mock_get.return_value = groups
names = self.secgroup_api.get_instance_security_groups(self.context,
'fake-uuid')
self.assertEqual([{'name': 'bar'}, {'name': 'foo'}], sorted(names))
self.assertEqual(1, mock_get.call_count)
self.assertEqual('fake-uuid', mock_get.call_args_list[0][0][1].uuid)<|fim▁end|> | |
<|file_name|>managers.py<|end_file_name|><|fim▁begin|>"""Model managers for Reversion."""
try:
set
except NameError:
from sets import Set as set # Python 2.3 fallback.
from django.contrib.contenttypes.models import ContentType
from django.db import models
class VersionManager(models.Manager):
"""Manager for Version models."""
def get_for_object(self, object):
"""Returns all the versions of the given Revision, ordered by date created."""
content_type = ContentType.objects.get_for_model(object)
return self.filter(content_type=content_type, object_id=unicode(object.pk)).order_by("pk").select_related().order_by("pk")
def get_unique_for_object(self,obj):
"""Returns unique versions associated with the object."""
versions = self.get_for_object(obj)
changed_versions = []<|fim▁hole|> serialized_data = version.serialized_data
if serialized_data in known_serialized_data:
continue
known_serialized_data.add(serialized_data)
changed_versions.append(version)
return changed_versions
def get_for_date(self, object, date):
"""Returns the latest version of an object for the given date."""
try:
return self.get_for_object(object).filter(revision__date_created__lte=date).order_by("-pk")[0]
except IndexError:
raise self.model.DoesNotExist
def get_deleted(self, model_class):
"""Returns all the deleted versions for the given model class."""
live_ids = [unicode(row[0]) for row in model_class._default_manager.all().values_list("pk")]
content_type = ContentType.objects.get_for_model(model_class)
deleted_ids = self.filter(content_type=content_type).exclude(object_id__in=live_ids).order_by().values_list("object_id").distinct()
deleted = []
for object_id, in deleted_ids:
deleted.append(self.get_deleted_object(model_class, object_id))
return deleted
def get_deleted_object(self, model_class, object_id):
"""
Returns the version corresponding to the deletion of the object with
the given id.
"""
try:
content_type = ContentType.objects.get_for_model(model_class)
return self.filter(content_type=content_type, object_id=unicode(object_id)).order_by("-pk").select_related()[0]
except IndexError:
raise self.model.DoesNotExist<|fim▁end|> | known_serialized_data = set()
for version in versions: |
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|>################################
# Author : septicmk
# Date : 2015/07/24 19:41:26
# FileName : test_utils.py
################################
import shutil
import tempfile
import unittest
from numpy import vstack
from pyspark import SparkContext
class PySparkTestCase(unittest.TestCase):
def setUp(self):
class_name = self.__class__.__name__
self.sc = SparkContext('local', class_name)
self.sc._jvm.System.setProperty("spark.ui.showConsoleProgress", "false")
log4j = self.sc._jvm.org.apache.log4j
log4j.LogManager.getRootLogger().setLevel(log4j.Level.FATAL)
def tearDown(self):
self.sc.stop()
# To avoid Akka rebinding to the same port, since it doesn't unbind
# immediately on shutdown
self.sc._jvm.System.clearProperty("spark.driver.port")<|fim▁hole|> super(PySparkTestCaseWithOutputDir, self).setUp()
self.outputdir = tempfile.mkdtemp()
def tearDown(self):
super(PySparkTestCaseWithOutputDir, self).tearDown()
shutil.rmtree(self.outputdir)
class LocalTestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
class LocalTestCaseWithOutputDir(LocalTestCase):
def setUp(self):
super(LocalTestCaseWithOutputDir, self).setUp()
self.outputdir = tempfile.mktemp()
def tearDown(self):
super(LocalTestCaseWithOutputDir, self).tearDown()
shutil.rmtree(self.outputdir)<|fim▁end|> |
class PySparkTestCaseWithOutputDir(PySparkTestCase):
def setUp(self): |
<|file_name|>test_TxnProcessor.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from unittest import TestCase
from mock import Mock, patch
from qrl.core.misc import logger
from qrl.core.processors.TxnProcessor import TxnProcessor
from qrl.core.ChainManager import ChainManager
from qrl.core.State import State
from qrl.core.OptimizedAddressState import OptimizedAddressState
from qrl.core.txs.TransferTransaction import TransferTransaction
from qrl.core.TransactionPool import TransactionPool
from tests.misc.helper import replacement_getTime
from qrl.core.p2p.p2pfactory import P2PFactory
logger.initialize_default()
def make_tx(txhash=b'hashbrownies', fee=1, autospec=TransferTransaction, PK=b'publickey', **kwargs):
return Mock(autospec=autospec, txhash=txhash, fee=fee, PK=PK, **kwargs)
@patch('qrl.core.processors.TxnProcessor.logger')
@patch('qrl.core.txs.Transaction.Transaction.get_slave')
class TestTxnProcessor(TestCase):
def setUp(self):
m_state = Mock(name='A Mock State', autospec=State)
m_state.get_address_state.return_value = Mock(name='A Mock AddressState', autospec=OptimizedAddressState)
self.chain_manager = Mock(autospec=ChainManager)
self.chain_manager._state = m_state
tx_attrs = {
'validate.return_value': True, # Custom validation for different Transaction Types
'validate_extended.return_value': True, # Master/slave XMSS tree validation; balance & fee, OTS key reuse
'validate_transaction_pool.return_value': True # checks for OTS key reuse within TransactionPool only
}
self.tx1 = make_tx(name='Mock TX 1', **tx_attrs)
self.tx2 = make_tx(name='Mock TX 2', **tx_attrs)
self.tx3 = make_tx(name='Mock TX 3', **tx_attrs)
self.tx4 = make_tx(name='Mock TX 4', **tx_attrs)
self.m_txpool = Mock(autospec=TransactionPool)
self.m_txpool.get_pending_transaction.side_effect = [(self.tx1, replacement_getTime()),
(self.tx2, replacement_getTime()),
(self.tx3, replacement_getTime()),
(self.tx4, replacement_getTime())]
self.m_broadcast_tx = Mock(autospec=P2PFactory.broadcast_tx)
self.txnprocessor = TxnProcessor(chain_manager=self.chain_manager,
transaction_pool_obj=self.m_txpool,
broadcast_tx=self.m_broadcast_tx)
def test_txnprocessor_all_valid(self, m_get_slave, m_logger):
# Transaction.get_slave() gives you the slave's Qaddress, if the TXN is signed by a slave XMSS tree.
# If it's a normal TXN signed by the master XMSS tree, it returns None. Since we mocked out validate_extended(),
# it doesn't really matter what we set here. It's just to make things explicit. Also because class-level patch
# cannot extend into the setUp() function, only the test_* functions.
m_get_slave.return_value = b'PUBLICKEY'
tx_results = [t for t in self.txnprocessor]
self.assertEqual([True, True, True, True], tx_results)
self.assertEqual(4, self.m_txpool.add_tx_to_pool.call_count)
self.assertEqual(4, self.m_broadcast_tx.call_count)<|fim▁hole|> self.chain_manager.validate_all.return_value = False
tx_results = []
for t in self.txnprocessor:
tx_results.append(t)
self.chain_manager.validate_all.return_value = True
self.assertEqual([False, True, True, True], tx_results)
self.assertEqual(3, self.m_txpool.add_tx_to_pool.call_count)
self.assertEqual(3, self.m_broadcast_tx.call_count)
def test_txnprocessor_tx_validate_extended_fail(self, m_get_slave, m_logger):
m_get_slave.return_value = None
self.chain_manager.validate_all.return_value = True
tx_results = []
for t in self.txnprocessor:
tx_results.append(t)
if len(tx_results) == 3:
self.chain_manager.validate_all.return_value = True
else:
self.chain_manager.validate_all.return_value = False
m_logger.info.assert_called()
self.assertEqual([True, False, False, True], tx_results)
self.assertEqual(2, self.m_txpool.add_tx_to_pool.call_count)
self.assertEqual(2, self.m_broadcast_tx.call_count)
def test_txnprocessor_tx_validate_transaction_pool_fail(self, m_get_slave, m_logger):
m_get_slave.return_value = None
tx_results = []
for t in self.txnprocessor:
tx_results.append(t)
if len(tx_results) < 2:
self.chain_manager.validate_all.return_value = True
else:
self.chain_manager.validate_all.return_value = False
m_logger.info.assert_called()
self.assertEqual([True, True, False, False], tx_results)
self.assertEqual(2, self.m_txpool.add_tx_to_pool.call_count)
self.assertEqual(2, self.m_broadcast_tx.call_count)
def test_txnprocessor_tx_all_failure_modes(self, m_get_slave, m_logger):
m_get_slave.return_value = None
tx_results = []
self.chain_manager.validate_all.return_value = True
for t in self.txnprocessor:
tx_results.append(t)
self.chain_manager.validate_all.return_value = False
m_logger.info.assert_called()
self.assertEqual([True, False, False, False], tx_results)
self.assertEqual(1, self.m_txpool.add_tx_to_pool.call_count)
self.assertEqual(1, self.m_broadcast_tx.call_count)
def test_empty(self, m_get_slave, m_logger):
m_get_slave.return_value = None
self.m_txpool.get_pending_transaction.side_effect = None
self.m_txpool.get_pending_transaction.return_value = None
tx_results = [t for t in self.txnprocessor]
self.assertEqual([], tx_results)
self.m_txpool.add_tx_to_pool.assert_not_called()
self.m_broadcast_tx.assert_not_called()<|fim▁end|> |
def test_txnprocessor_tx_validate_fail(self, m_get_slave, m_logger):
m_get_slave.return_value = None |
<|file_name|>data.py<|end_file_name|><|fim▁begin|>import torch
def get_data(params):
batch_size = params["batch_size"]
M, N, K = params["problem"]["size"]
flop = (2.0 * M * N * K)
params["problem"]["flop_estimated"] = flop * params["nb_epoch"] * batch_size<|fim▁hole|> n = params["problem"]["size"][1]
k = params["problem"]["size"][2]
matr_1 = torch.randn(batch_size, m, n)
matr_2 = torch.randn(batch_size, n, k)
return matr_1, matr_2<|fim▁end|> | m = params["problem"]["size"][0] |
<|file_name|>host.html.0.js<|end_file_name|><|fim▁begin|><|fim▁hole|>Polymer('x-foo')<|fim▁end|> | |
<|file_name|>fs.rs<|end_file_name|><|fim▁begin|>// This file is part of the uutils coreutils package.
//
// (c) Joseph Crail <[email protected]>
// (c) Jian Zeng <anonymousknight96 AT gmail.com>
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
#[cfg(target_os = "redox")]
extern crate termion;
#[cfg(unix)]
use super::libc;
use std::env;
use std::fs;
#[cfg(target_os = "redox")]
use std::io;
use std::io::{Error, ErrorKind};
use std::io::Result as IOResult;
use std::path::{Component, Path, PathBuf};
use std::borrow::Cow;
pub fn resolve_relative_path<'a>(path: &'a Path) -> Cow<'a, Path> {
if path.components().all(|e| e != Component::ParentDir) {
return path.into();
}
let root = Component::RootDir.as_os_str();
let mut result = env::current_dir().unwrap_or(PathBuf::from(root));
for comp in path.components() {
match comp {
Component::ParentDir => {
if let Ok(p) = result.read_link() {
result = p;
}
result.pop();
}
Component::CurDir => (),
Component::RootDir | Component::Normal(_) | Component::Prefix(_) => {
result.push(comp.as_os_str())
}
}
}
result.into()
}<|fim▁hole|>#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum CanonicalizeMode {
None,
Normal,
Existing,
Missing,
}
fn resolve<P: AsRef<Path>>(original: P) -> IOResult<PathBuf> {
const MAX_LINKS_FOLLOWED: u32 = 255;
let mut followed = 0;
let mut result = original.as_ref().to_path_buf();
loop {
if followed == MAX_LINKS_FOLLOWED {
return Err(Error::new(
ErrorKind::InvalidInput,
"maximum links followed",
));
}
match fs::symlink_metadata(&result) {
Err(e) => return Err(e),
Ok(ref m) if !m.file_type().is_symlink() => break,
Ok(..) => {
followed += 1;
match fs::read_link(&result) {
Ok(path) => {
result.pop();
result.push(path);
}
Err(e) => {
return Err(e);
}
}
}
}
}
Ok(result)
}
pub fn canonicalize<P: AsRef<Path>>(original: P, can_mode: CanonicalizeMode) -> IOResult<PathBuf> {
// Create an absolute path
let original = original.as_ref();
let original = if original.is_absolute() {
original.to_path_buf()
} else {
env::current_dir().unwrap().join(original)
};
let mut result = PathBuf::new();
let mut parts = vec![];
// Split path by directory separator; add prefix (Windows-only) and root
// directory to final path buffer; add remaining parts to temporary
// vector for canonicalization.
for part in original.components() {
match part {
Component::Prefix(_) | Component::RootDir => {
result.push(part.as_os_str());
}
Component::CurDir => (),
Component::ParentDir => {
parts.pop();
}
Component::Normal(_) => {
parts.push(part.as_os_str());
}
}
}
// Resolve the symlinks where possible
if !parts.is_empty() {
for part in parts[..parts.len() - 1].iter() {
result.push(part);
if can_mode == CanonicalizeMode::None {
continue;
}
match resolve(&result) {
Err(e) => match can_mode {
CanonicalizeMode::Missing => continue,
_ => return Err(e),
},
Ok(path) => {
result.pop();
result.push(path);
}
}
}
result.push(parts.last().unwrap());
match resolve(&result) {
Err(e) => {
if can_mode == CanonicalizeMode::Existing {
return Err(e);
}
}
Ok(path) => {
result.pop();
result.push(path);
}
}
}
Ok(result)
}
#[cfg(unix)]
pub fn is_stdin_interactive() -> bool {
unsafe { libc::isatty(libc::STDIN_FILENO) == 1 }
}
#[cfg(windows)]
pub fn is_stdin_interactive() -> bool {
false
}
#[cfg(target_os = "redox")]
pub fn is_stdin_interactive() -> bool {
termion::is_tty(&io::stdin())
}
#[cfg(unix)]
pub fn is_stdout_interactive() -> bool {
unsafe { libc::isatty(libc::STDOUT_FILENO) == 1 }
}
#[cfg(windows)]
pub fn is_stdout_interactive() -> bool {
false
}
#[cfg(target_os = "redox")]
pub fn is_stdout_interactive() -> bool {
termion::is_tty(&io::stdout())
}
#[cfg(unix)]
pub fn is_stderr_interactive() -> bool {
unsafe { libc::isatty(libc::STDERR_FILENO) == 1 }
}
#[cfg(windows)]
pub fn is_stderr_interactive() -> bool {
false
}
#[cfg(target_os = "redox")]
pub fn is_stderr_interactive() -> bool {
termion::is_tty(&io::stderr())
}<|fim▁end|> | |
<|file_name|>ClientCssHelper.js<|end_file_name|><|fim▁begin|>var logger = require('../logging').getLogger(__LOGGER__);
var {PAGE_CSS_NODE_ID} = require('../constants');
var Q = require('q');
var PageUtil = require('./PageUtil')
var loadedCss = {};
module.exports = {
registerPageLoad: function registerPageLoad() {
if (SERVER_SIDE) {
throw new Error("ClientCssHelper.registerPageLoad can't be called server-side");
}
// for each css node in the head that the react-server server wrote to the response, note it down in the cache, so that
// we can remove it on a page to page transition.
var serverWrittenLinkNodes = document.head.querySelectorAll(`link[${PAGE_CSS_NODE_ID}],style[${PAGE_CSS_NODE_ID}]`);
for (var i = 0; i < serverWrittenLinkNodes.length; i++) {
var key, styleNode = serverWrittenLinkNodes[i];
if (styleNode.href) {
key = normalizeLocalUrl(styleNode.href);
} else {
key = styleNode.innerHTML;
}
loadedCss[key] = styleNode;
}
},
ensureCss: function ensureCss(routeName, pageObject) {
if (SERVER_SIDE) {
throw new Error("ClientCssHelper.registerPageLoad can't be called server-side");
}
return Q.all(PageUtil.standardizeStyles(pageObject.getHeadStylesheets())).then(newCss => {
var newCssByKey = {};
newCss
.filter(style => !!style)
.forEach(style => {newCssByKey[this._keyFromStyleSheet(style)] = style});
// first, remove the unneeded CSS link elements.
Object.keys(loadedCss).forEach(loadedCssKey => {
if (!newCssByKey[loadedCssKey]) {
// remove the corresponding node from the DOM.
logger.debug("Removing stylesheet: " + loadedCssKey);
var node = loadedCss[loadedCssKey];
node.parentNode.removeChild(node);
delete loadedCss[loadedCssKey];
}
});
// next add the style URLs that weren't already loaded.<|fim▁hole|> // document, so we need to add it.
logger.debug("Adding stylesheet: " + newCssKey);
var style = newCssByKey[newCssKey];
var styleTag;
if (style.href) {
styleTag = document.createElement('link');
styleTag.rel = 'stylesheet';
styleTag.href = style.href;
// If we _can_ wait for the CSS to be loaded before
// proceeding, let's do so.
if ('onload' in styleTag) {
var dfd = Q.defer();
styleTag.onload = dfd.resolve;
retval = dfd.promise;
}
} else {
styleTag = document.createElement('style');
styleTag.innerHTML = style.text;
}
styleTag.type = style.type;
styleTag.media = style.media;
loadedCss[newCssKey] = styleTag;
document.head.appendChild(styleTag);
} else {
logger.debug(`Stylesheet already loaded (no-op): ${newCssKey}`);
}
return retval;
}));
});
},
_keyFromStyleSheet: function(style) {
return normalizeLocalUrl(style.href) || style.text;
},
}
function normalizeLocalUrl(url) {
// Step 1: make the url protocol less first. This helps recognizing http://0.0.0.0:3001/common.css
// and //0.0.0.0:3001/common.css as the same file.
// Step 2: The browser will give us a full URL even if we only put a
// path in on the server. So, if we're comparing against just
// a path here we need to strip the base off to avoid a flash
// of unstyled content.
if (typeof url === 'string') {
url = url
.replace(/^http[s]?:/, '')
.replace(new RegExp("^//" + location.host), '');
}
return url;
}<|fim▁end|> | return Q.all(Object.keys(newCssByKey).map(newCssKey => {
var retval;
if (!loadedCss[newCssKey]) {
// this means that the CSS is not currently present in the |
<|file_name|>AdxDataSetMetadata.java<|end_file_name|><|fim▁begin|>package org.hisp.dhis.dxf2.adx;
/*
* Copyright (c) 2015, UiO
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.xerces.util.XMLChar;
import org.hisp.dhis.dataelement.DataElementCategory;
import org.hisp.dhis.dataelement.DataElementCategoryCombo;
import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.dataset.DataSetElement;<|fim▁hole|> */
public class AdxDataSetMetadata
{
// Lookup category options per cat option combo
private final Map<Integer, Map<String, String>> categoryOptionMap;
AdxDataSetMetadata( DataSet dataSet )
throws AdxException
{
categoryOptionMap = new HashMap<>();
Set<DataElementCategoryCombo> catCombos = new HashSet<>();
catCombos.add( dataSet.getCategoryCombo() );
for ( DataSetElement element : dataSet.getDataSetElements() )
{
catCombos.add( element.getResolvedCategoryCombo() );
}
for ( DataElementCategoryCombo categoryCombo : catCombos )
{
for ( DataElementCategoryOptionCombo catOptCombo : categoryCombo.getOptionCombos() )
{
addExplodedCategoryAttributes( catOptCombo );
}
}
}
private void addExplodedCategoryAttributes( DataElementCategoryOptionCombo coc )
throws AdxException
{
Map<String, String> categoryAttributes = new HashMap<>();
if ( !coc.isDefault() )
{
for ( DataElementCategory category : coc.getCategoryCombo().getCategories() )
{
String categoryCode = category.getCode();
if ( categoryCode == null || !XMLChar.isValidName( categoryCode ) )
{
throw new AdxException(
"Category code for " + category.getName() + " is missing or invalid: " + categoryCode );
}
String catOptCode = category.getCategoryOption( coc ).getCode();
if ( catOptCode == null || catOptCode.isEmpty() )
{
throw new AdxException(
"CategoryOption code for " + category.getCategoryOption( coc ).getName() + " is missing" );
}
categoryAttributes.put( categoryCode, catOptCode );
}
}
categoryOptionMap.put( coc.getId(), categoryAttributes );
}
public Map<String, String> getExplodedCategoryAttributes( int cocId )
{
return this.categoryOptionMap.get( cocId );
}
}<|fim▁end|> |
/**
* @author bobj |
<|file_name|>softlayer_vs_ip.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-<|fim▁hole|>DOCUMENTATION = '''
---
module: softlayer_vs_ip
short_description: Retrieves instance ip addresses from Softlayer
description:
- Retrieves instance ip addresses of all adapters from Softlayer
- the result is stored in the "result" dict entry of he registered variable
requirements:
- Requires SoftLayer python client
- Requires Ansible
options:
api_key:
description:
- SoftLayer API Key
default: null
sl_username:
description:
- SoftLayer username
default: null
fqdn:
description:
- The fully qualified domain name of the instance.
type: string
required: true
author: scoss
notes:
- Instead of supplying api_key and username, .softlayer or env variables
'''
from ansible.module_utils.basic import *
import SoftLayer
import sys
import logging
import time
from softlayer_vs_basic import *
class IpAddressReader(SoftlayerVirtualServerBasic):
def __init__(self, sl_client, instance_config):
SoftlayerVirtualServerBasic.__init__(self, sl_client, instance_config)
def read_ip_address(self):
sl_instance = self.sl_virtual_guest.getObject(id=self.get_vs_id(True), mask='primaryBackendIpAddress, primaryIpAddress')
return sl_instance
def main():
module_helper = AnsibleModule(
argument_spec = dict(
SLClientConfig.arg_spec().items() + VSInstanceConfigBasic.arg_spec().items()
)
)
sl_client_config = SLClientConfig(module_helper.params)
sl_client = SoftLayer.Client(username=sl_client_config.sl_username, api_key=sl_client_config.api_key)
vs = IpAddressReader(sl_client,
VSInstanceConfigBasic(ansible_config=module_helper.params))
try:
module_helper.exit_json(changed=False, result=vs.read_ip_address())
except Exception as se:
module_helper.fail_json(changed=False, msg=str(se))
main()<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import itertools
import tempfile
from django.core.files.storage import FileSystemStorage
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
callable_default_counter = itertools.count()
callable_default = lambda: next(callable_default_counter)
temp_storage = FileSystemStorage(location=tempfile.mkdtemp())
class BoundaryModel(models.Model):
positive_integer = models.PositiveIntegerField(null=True, blank=True)
class Defaults(models.Model):
name = models.CharField(max_length=255, default='class default value')
def_date = models.DateField(default=datetime.date(1980, 1, 1))
value = models.IntegerField(default=42)
callable_default = models.IntegerField(default=callable_default)
class ChoiceModel(models.Model):
"""For ModelChoiceField and ModelMultipleChoiceField tests."""
CHOICES = [
('', 'No Preference'),
('f', 'Foo'),
('b', 'Bar'),
]
INTEGER_CHOICES = [
(None, 'No Preference'),
(1, 'Foo'),
(2, 'Bar'),<|fim▁hole|> (None, 'No Preference'),
('f', 'Foo'),
('b', 'Bar'),
]
name = models.CharField(max_length=10)
choice = models.CharField(max_length=2, blank=True, choices=CHOICES)
choice_string_w_none = models.CharField(
max_length=2, blank=True, null=True, choices=STRING_CHOICES_WITH_NONE)
choice_integer = models.IntegerField(choices=INTEGER_CHOICES, blank=True,
null=True)
@python_2_unicode_compatible
class ChoiceOptionModel(models.Model):
"""Destination for ChoiceFieldModel's ForeignKey.
Can't reuse ChoiceModel because error_message tests require that it have no instances."""
name = models.CharField(max_length=10)
class Meta:
ordering = ('name',)
def __str__(self):
return 'ChoiceOption %d' % self.pk
class ChoiceFieldModel(models.Model):
"""Model with ForeignKey to another model, for testing ModelForm
generation with ModelChoiceField."""
choice = models.ForeignKey(ChoiceOptionModel, blank=False,
default=lambda: ChoiceOptionModel.objects.get(name='default'))
choice_int = models.ForeignKey(ChoiceOptionModel, blank=False, related_name='choice_int',
default=lambda: 1)
multi_choice = models.ManyToManyField(ChoiceOptionModel, blank=False, related_name='multi_choice',
default=lambda: ChoiceOptionModel.objects.filter(name='default'))
multi_choice_int = models.ManyToManyField(ChoiceOptionModel, blank=False, related_name='multi_choice_int',
default=lambda: [1])
class OptionalMultiChoiceModel(models.Model):
multi_choice = models.ManyToManyField(ChoiceOptionModel, blank=False, related_name='not_relevant',
default=lambda: ChoiceOptionModel.objects.filter(name='default'))
multi_choice_optional = models.ManyToManyField(ChoiceOptionModel, blank=True,
related_name='not_relevant2')
class FileModel(models.Model):
file = models.FileField(storage=temp_storage, upload_to='tests')
@python_2_unicode_compatible
class Group(models.Model):
name = models.CharField(max_length=10)
def __str__(self):
return '%s' % self.name
class Cheese(models.Model):
name = models.CharField(max_length=100)
class Article(models.Model):
content = models.TextField()<|fim▁end|> | ]
STRING_CHOICES_WITH_NONE = [ |
<|file_name|>mks.py<|end_file_name|><|fim▁begin|># Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain<|fim▁hole|># a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
mks_group = cfg.OptGroup('mks', title='MKS Options')
mks_opts = [
cfg.StrOpt('mksproxy_base_url',
default='http://127.0.0.1:6090/',
help='Location of MKS web console proxy, in the form '
'"http://127.0.0.1:6090/"'),
cfg.BoolOpt('enabled',
default=False,
help='Enable MKS related features'),
]
ALL_MKS_OPTS = mks_opts
def register_opts(conf):
conf.register_group(mks_group)
conf.register_opts(ALL_MKS_OPTS, group = mks_group)
def list_opts():
return {mks_group: ALL_MKS_OPTS}<|fim▁end|> | |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for sms_relay project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable<|fim▁hole|>Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import djcelery
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "sms_relay.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sms_relay.settings")
djcelery.setup_loader()
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)<|fim▁end|> | named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
|
<|file_name|>gen_test2111b.py<|end_file_name|><|fim▁begin|>mcinif='mcini_gen2'<|fim▁hole|>update_prec=0.04
update_mf=False
update_part=500
import sys
sys.path.append(pathdir)
import run_echoRD as rE
rE.echoRD_job(mcinif=mcinif,mcpick=mcpick,runname=runname,wdir=wdir,pathdir=pathdir,update_prec=update_prec,update_mf=update_mf,update_part=update_part,hdf5pick=False)<|fim▁end|> | runname='gen_test2111b'
mcpick='gen_test2b.pickle'
pathdir='/beegfs/work/ka_oj4748/echoRD'
wdir='/beegfs/work/ka_oj4748/gen_tests' |
<|file_name|>final.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'final.ui'
#
# Created by: PyQt5 UI code generator 5.8.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from forecastiopy import *
import datetime
import sys
from ubidots import ApiClient
import time
import webbrowser
from threading import Thread
import numpy as np
import skfuzzy as fuzz
from skfuzzy import control as ctrl
import os.path
import serial
# Import SPI library (for hardware SPI) and MCP3008 library.
import Adafruit_SSD1306
# Raspberry Pi pin configuration:
RST = 32
# 128x32 display with hardware I2C:
disp = Adafruit_SSD1306.SSD1306_128_32(rst=RST)
import Adafruit_GPIO.SPI as SPI
import Adafruit_MCP3008
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
PORT = '/dev/ttyUSB0'
BAUD_RATE = 9600
# Open serial port
ser = serial.Serial(PORT, BAUD_RATE)
class MovieSplashScreen(QSplashScreen):
def __init__(self, movie, parent = None):
movie.jumpToFrame(0)
pixmap = QPixmap(movie.frameRect().size())
QSplashScreen.__init__(self, pixmap)
self.movie = movie
self.movie.frameChanged.connect(self.repaint)
def showEvent(self, event):
self.movie.start()
def hideEvent(self, event):
self.movie.stop()
def paintEvent(self, event):
painter = QPainter(self)
pixmap = self.movie.currentPixmap()
self.setMask(pixmap.mask())
painter.drawPixmap(0, 0, pixmap)
def sizeHint(self):
return self.movie.scaledSize()
def mousePressEvent(self, mouse_event):
pass
class Ui_system(object):
done1 = False
done2 = False
done3 = False
t = 0
c = 0
b = 0
eco = 0
roomt = 0
roomh = 0
def setupUi(self, system):
system.setObjectName("system")
system.resize(800, 600)
system.setToolTip("")
system.setStyleSheet("background-color: rgb(44, 0, 30);")
self.Fuzzy_system = QtWidgets.QWidget()
self.Fuzzy_system.setEnabled(True)
self.Fuzzy_system.setGeometry(QtCore.QRect(0, 0, 800, 538))
self.Fuzzy_system.setObjectName("Fuzzy_system")
self.title_1 = QtWidgets.QLabel(self.Fuzzy_system)
self.title_1.setGeometry(QtCore.QRect(150, -20, 503, 85))
self.title_1.setStyleSheet("font: 36pt \"Peace Sans\";\n"
"color: rgb(233, 84, 32);")
self.title_1.setObjectName("title_1")
self.time_hours = QtWidgets.QLabel(self.Fuzzy_system)
self.time_hours.setGeometry(QtCore.QRect(576, 60, 121, 121))
self.time_hours.setStyleSheet("font: 76pt \"Slim Joe\";\n"
"color:rgb(238, 247, 251);")
self.time_hours.setObjectName("time_hours")
self.time_min = QtWidgets.QLabel(self.Fuzzy_system)
self.time_min.setGeometry(QtCore.QRect(710, 80, 67, 41))
self.time_min.setStyleSheet("font: 26pt \"Big John\";\n"
"color:rgb(238, 247, 251);")
self.time_min.setText("")
self.time_min.setObjectName("time_min")
self.time_hours.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.time_min.setAlignment(QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter)
self.timer1 = QtCore.QTimer()
self.timer1.setInterval(1000)
self.timer1.timeout.connect(self.Time)
self.timer1.start()
self.date = QtWidgets.QLabel(self.Fuzzy_system)
self.date.setGeometry(QtCore.QRect(700, 130, 101, 21))
self.date.setStyleSheet("font: 10pt \"Big John\";\n"
"color:rgb(238, 247, 251);")
self.date.setText("")
self.date.setObjectName("date")
self.date.setAlignment(QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter)
self.timer2 = QtCore.QTimer()
self.timer2.setInterval(1000)
self.timer2.timeout.connect(self.Date)
self.timer2.start()
self.run_system = QtWidgets.QPushButton(self.Fuzzy_system)
self.run_system.setGeometry(QtCore.QRect(230, 480, 361, 51))
self.run_system.setStyleSheet("color: rgb(255, 255, 255);\n"
"font: 11pt \"Big John\";")
self.run_system.setObjectName("run_system")
self.run_system.clicked.connect(self.Run_System)
self.timer5 = QtCore.QTimer()
self.timer5.setInterval(1000 * 300)
self.timer5.timeout.connect(self.Run_System)
self.timer5.start()
self.avg_temp_txt = QtWidgets.QLabel(self.Fuzzy_system)
self.avg_temp_txt.setGeometry(QtCore.QRect(0, 100, 121, 51))
self.avg_temp_txt.setStyleSheet("font: 75 32pt \"Moon\";\n"
"color:rgbrgb(85, 85, 255);")
self.avg_temp_txt.setObjectName("avg_temp_txt")
self.avg_temp_txt.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
self.temp_icon = QtWidgets.QLabel(self.Fuzzy_system)
self.temp_icon.setGeometry(QtCore.QRect(340, 110, 32, 32))
self.temp_icon.setStyleSheet("font: 26pt \"Big John\";\n"
"color:rgb(174, 167, 159)")
self.temp_icon.setObjectName("temp_icon")
self.avg_cc_txt = QtWidgets.QLabel(self.Fuzzy_system)
self.avg_cc_txt.setGeometry(QtCore.QRect(0, 170, 121, 51))
self.avg_cc_txt.setStyleSheet("font: 75 32pt \"Moon\";\n"
"color:rgb(85, 85, 255);")
self.avg_cc_txt.setObjectName("avg_cc_txt")
self.avg_cc_txt.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
self.avg_batt_txt = QtWidgets.QLabel(self.Fuzzy_system)
self.avg_batt_txt.setGeometry(QtCore.QRect(0, 240, 121, 51))
self.avg_batt_txt.setStyleSheet("font: 75 32pt \"Moon\";\n"
"color:rgb(85, 85, 255);")
self.avg_batt_txt.setObjectName("avg_batt_txt")
self.avg_batt_txt.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
self.timer3 = QtCore.QTimer()
self.timer3.setInterval(1000 * 900)
self.timer3.timeout.connect(self.Update_Battery)
self.timer3.start()
self.battery_percent_but = QtWidgets.QPushButton(self.Fuzzy_system)
self.battery_percent_but.setGeometry(QtCore.QRect(120, 250, 221, 32))
self.battery_percent_but.setStyleSheet("font: 75 11pt \"Moon\";\n"
"color: rgb(200, 226, 240);")
self.battery_percent_but.clicked.connect(self.Batt_Percent)
self.battery_percent_but.setObjectName("battery_percent_but")
self.batt_icon = QtWidgets.QLabel(self.Fuzzy_system)
self.batt_icon.setGeometry(QtCore.QRect(340, 250, 32, 32))
self.batt_icon.setStyleSheet("font: 26pt \"Big John\";\n"
"color:rgb(174, 167, 159)")
self.batt_icon.setObjectName("batt_icon")
self.cloud_icon = QtWidgets.QLabel(self.Fuzzy_system)
self.cloud_icon.setGeometry(QtCore.QRect(340, 180, 32, 32))
self.cloud_icon.setStyleSheet("font: 26pt \"Big John\";\n"
"color:rgb(174, 167, 159)")
self.cloud_icon.setObjectName("cloud_icon")
self.average_cc_but = QtWidgets.QPushButton(self.Fuzzy_system)
self.average_cc_but.setGeometry(QtCore.QRect(120, 180, 221, 32))
self.average_cc_but.setStyleSheet("font: 75 11pt \"Moon\";\n"
"color: rgb(200, 226, 240);")
self.average_cc_but.setObjectName("average_cc_but")
self.average_cc_but.clicked.connect(self.Avg_CC)
self.defuzz_txt = QtWidgets.QLabel(self.Fuzzy_system)
self.defuzz_txt.setGeometry(QtCore.QRect(240, 380, 161, 71))
self.defuzz_txt.setStyleSheet("font: 40pt \"Big John\";\n"
"color:rgb(238, 247, 251);")
self.defuzz_txt.setObjectName("defuzz_txt")
self.defuzz_but = QtWidgets.QPushButton(self.Fuzzy_system)
self.defuzz_but.setGeometry(QtCore.QRect(50, 400, 179, 32))
self.defuzz_but.setStyleSheet("font: 11pt \"Peace Sans\";\n"
"color: rgb(34, 139, 34)")
self.defuzz_but.setObjectName("defuzz_but")
self.defuzz_but.clicked.connect(self.Defuzz)
self.eco_level_but = QtWidgets.QPushButton(self.Fuzzy_system)
self.eco_level_but.setGeometry(QtCore.QRect(450, 400, 179, 32))
self.eco_level_but.setStyleSheet("font: 11pt \"Peace Sans\";\n"
"color: rgb(34, 139, 34)")
self.eco_level_but.setObjectName("eco_level_but")
self.eco_level_but.clicked.connect(self.Eco)
self.temp_but = QtWidgets.QPushButton(self.Fuzzy_system)
self.temp_but.setGeometry(QtCore.QRect(500, 200, 161, 26))
self.temp_but.setStyleSheet("color:rgb(200, 226, 240);\n"
"font: 75 11pt \"Moon\";")
self.temp_but.setObjectName("temp_but")
self.temp_but.clicked.connect(self.DarkSky)
self.average_temp_but = QtWidgets.QPushButton(self.Fuzzy_system)
self.average_temp_but.setGeometry(QtCore.QRect(120, 110, 221, 32))
self.average_temp_but.setStyleSheet("font: 75 11pt \"Moon\";\n"
"color: rgb(200, 226, 240);")
self.average_temp_but.setObjectName("average_temp_but")
self.average_temp_but.clicked.connect(self.Avg_temp)
self.cloud_cover_but = QtWidgets.QPushButton(self.Fuzzy_system)
self.cloud_cover_but.setGeometry(QtCore.QRect(500, 270, 161, 26))
self.cloud_cover_but.setStyleSheet("color:rgb(200, 226, 240);\n"
"font: 75 11pt \"Moon\";")
self.cloud_cover_but.setObjectName("cloud_cover_but")
self.cloud_cover_but.clicked.connect(self.DarkSky)
self.temp_text = QtWidgets.QLabel(self.Fuzzy_system)
self.temp_text.setGeometry(QtCore.QRect(662, 180, 131, 61))
self.temp_text.setStyleSheet("font: 75 32pt \"Moon\";\n"
"color:rgb(233, 99, 94);")
self.temp_text.setObjectName("temp_text")
self.temp_text.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
self.eco_level_txt = QtWidgets.QLabel(self.Fuzzy_system)
self.eco_level_txt.setGeometry(QtCore.QRect(640, 380, 61, 71))
self.eco_level_txt.setStyleSheet("font: 40pt \"Big John\";\n"
"color:rgb(238, 247, 251);")
self.eco_level_txt.setObjectName("eco_level_txt")
self.cloud_cover_txt = QtWidgets.QLabel(self.Fuzzy_system)
self.cloud_cover_txt.setGeometry(QtCore.QRect(662, 250, 131, 61))
self.cloud_cover_txt.setStyleSheet("font: 75 32pt \"Moon\";\n"
"color:rgb(233, 99, 94);")
self.cloud_cover_txt.setObjectName("cloud_cover_txt")
self.cloud_cover_txt.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
self.refresh_current = QtWidgets.QToolButton(self.Fuzzy_system)
self.refresh_current.setGeometry(QtCore.QRect(610, 330, 88, 31))
self.refresh_current.setStyleSheet("font: 11pt \"Peace Sans\";\n"
"color: rgb(34, 139, 34)")
self.refresh_current.setObjectName("refresh_current")
self.refresh_current.clicked.connect(self.loading2)
self.refresh_avg = QtWidgets.QToolButton(self.Fuzzy_system)
self.refresh_avg.setGeometry(QtCore.QRect(150, 300, 88, 31))
self.refresh_avg.setStyleSheet("font: 11pt \"Peace Sans\";\n"
"color: rgb(34, 139, 34)")
self.refresh_avg.setObjectName("refresh_avg")
self.refresh_avg.clicked.connect(self.loading1)
self.timer4 = QtCore.QTimer()
self.timer4.setInterval(1000 * 86400)
self.timer4.timeout.connect(self.loading1)
self.timer4.start()
self.dark_sky_1 = QtWidgets.QToolButton(self.Fuzzy_system)
self.dark_sky_1.setGeometry(QtCore.QRect(640, 510, 158, 23))
self.dark_sky_1.setStyleSheet("font: 25 10pt \"Ubuntu\";\n"
"color: rgb(85, 170, 255)")
self.dark_sky_1.setObjectName("dark_sky_1")
self.dark_sky_1.clicked.connect(self.DarkSky)
self.title_1.raise_()
self.time_hours.raise_()
self.time_min.raise_()
self.date.raise_()
self.run_system.raise_()
self.avg_temp_txt.raise_()
self.avg_cc_txt.raise_()
self.avg_batt_txt.raise_()
self.defuzz_txt.raise_()
self.average_temp_but.raise_()
self.temp_icon.raise_()
self.average_cc_but.raise_()
self.cloud_icon.raise_()
self.battery_percent_but.raise_()
self.batt_icon.raise_()
self.cloud_cover_but.raise_()
self.temp_text.raise_()
self.defuzz_but.raise_()
self.eco_level_but.raise_()
self.eco_level_txt.raise_()
self.temp_but.raise_()
self.cloud_cover_txt.raise_()
self.refresh_current.raise_()
self.refresh_avg.raise_()
self.dark_sky_1.raise_()
system.addItem(self.Fuzzy_system, "")
self.Room_Conditions = QtWidgets.QWidget()
self.Room_Conditions.setGeometry(QtCore.QRect(0, 0, 800, 538))
self.Room_Conditions.setObjectName("Room_Conditions")
self.title_2 = QtWidgets.QLabel(self.Room_Conditions)
self.title_2.setGeometry(QtCore.QRect(130, -20, 521, 85))
self.title_2.setStyleSheet("font: 36pt \"Peace Sans\";\n"
"color: rgb(233, 84, 32);")
self.title_2.setObjectName("title_2")
self.room_temp_txt = QtWidgets.QLabel(self.Room_Conditions)
self.room_temp_txt.setGeometry(QtCore.QRect(2, 90, 131, 61))
self.room_temp_txt.setStyleSheet("font: 75 32pt \"Moon\";\n"
"color:rgb(238, 247, 251);")
self.room_temp_txt.setObjectName("room_temp_txt")
self.room_temp_txt.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
self.room_hum_but = QtWidgets.QPushButton(self.Room_Conditions)
self.room_hum_but.setGeometry(QtCore.QRect(490, 110, 161, 26))
self.room_hum_but.setStyleSheet("color:rgb(233, 99, 94);\n"
"font: 75 11pt \"Moon\";")
self.room_hum_but.setObjectName("room_hum_but")
self.room_hum_but.clicked.connect(self.Room_hum_browser)
self.room_hum_txt = QtWidgets.QLabel(self.Room_Conditions)
self.room_hum_txt.setGeometry(QtCore.QRect(660, 90, 131, 61))
self.room_hum_txt.setStyleSheet("font: 75 32pt \"Moon\";\n"
"color:rgb(238, 247, 251);")
self.room_hum_txt.setObjectName("room_hum_txt")
self.room_hum_txt.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
self.room_temp_but = QtWidgets.QPushButton(self.Room_Conditions)
self.room_temp_but.setGeometry(QtCore.QRect(140, 110, 161, 26))
self.room_temp_but.setStyleSheet("color:rgb(233, 99, 94);\n"
"font: 75 11pt \"Moon\";")
self.room_temp_but.setObjectName("room_temp_but")
self.room_temp_but.clicked.connect(self.Room_temp_browser)
self.heater_on = QtWidgets.QLabel(self.Room_Conditions)
self.heater_on.setGeometry(QtCore.QRect(230, 310, 61, 61))
self.heater_on.setStyleSheet("font: 75 26pt \"Moon\";\n"
"color: rgb(0, 255, 0);")
self.heater_on.setObjectName("heater_on")
self.cooler_on = QtWidgets.QLabel(self.Room_Conditions)
self.cooler_on.setGeometry(QtCore.QRect(230, 380, 61, 61))
self.cooler_on.setStyleSheet("font: 75 26pt \"Moon\";\n"
"color: rgb(0, 255, 0);")
self.cooler_on.setObjectName("cooler_on")
self.heater_off = QtWidgets.QLabel(self.Room_Conditions)
self.heater_off.setGeometry(QtCore.QRect(300, 310, 61, 61))
self.heater_off.setStyleSheet("font: 75 26pt \"Moon\";\n"
"color: rgb(255, 0, 0);\n"
"")
self.heater_off.setObjectName("heater_off")
self.cooler_off = QtWidgets.QLabel(self.Room_Conditions)
self.cooler_off.setGeometry(QtCore.QRect(300, 380, 61, 61))
self.cooler_off.setStyleSheet("font: 75 26pt \"Moon\";\n"
"color: rgb(255, 0, 0);")
self.cooler_off.setObjectName("cooler_off")
self.heater = QtWidgets.QLabel(self.Room_Conditions)
self.heater.setGeometry(QtCore.QRect(150, 330, 71, 31))
self.heater.setStyleSheet("font: 11pt \"Peace Sans\";\n"
"color:rgb(85, 85, 255);")
self.heater.setObjectName("heater")
self.cooler = QtWidgets.QLabel(self.Room_Conditions)
self.cooler.setGeometry(QtCore.QRect(150, 400, 71, 31))
self.cooler.setStyleSheet("color:rgb(85, 85, 255);\n"
"font: 11pt \"Peace Sans\";")
self.cooler.setObjectName("cooler")
self.dehumid_on = QtWidgets.QLabel(self.Room_Conditions)
self.dehumid_on.setGeometry(QtCore.QRect(490, 380, 61, 61))
self.dehumid_on.setStyleSheet("font: 75 26pt \"Moon\";\n"
"color: rgb(0, 255, 0);")
self.dehumid_on.setObjectName("dehumid_on")
self.humid_off = QtWidgets.QLabel(self.Room_Conditions)
self.humid_off.setGeometry(QtCore.QRect(420, 310, 61, 61))
self.humid_off.setStyleSheet("font: 75 26pt \"Moon\";\n"
"color: rgb(255, 0, 0);")
self.humid_off.setObjectName("humid_off")
self.humid_on = QtWidgets.QLabel(self.Room_Conditions)
self.humid_on.setGeometry(QtCore.QRect(490, 310, 61, 61))
self.humid_on.setStyleSheet("font: 75 26pt \"Moon\";\n"
"color: rgb(0, 255, 0);")
self.humid_on.setObjectName("humid_on")
self.dehumid_off = QtWidgets.QLabel(self.Room_Conditions)
self.dehumid_off.setGeometry(QtCore.QRect(420, 380, 61, 61))
self.dehumid_off.setStyleSheet("font: 75 26pt \"Moon\";\n"
"color: rgb(255, 0, 0);")
self.dehumid_off.setObjectName("dehumid_off")
self.humidifier = QtWidgets.QLabel(self.Room_Conditions)
self.humidifier.setGeometry(QtCore.QRect(560, 330, 101, 31))
self.humidifier.setStyleSheet("font: 11pt \"Peace Sans\";\n"
"color:rgb(85, 85, 255);")
self.humidifier.setObjectName("humidifier")
self.dehumidifier = QtWidgets.QLabel(self.Room_Conditions)
self.dehumidifier.setGeometry(QtCore.QRect(560, 400, 121, 31))
self.dehumidifier.setStyleSheet("font: 11pt \"Peace Sans\";\n"
"color:rgb(85, 85, 255);")
self.dehumidifier.setObjectName("dehumidifier")
self.running = QtWidgets.QLabel(self.Room_Conditions)
self.running.setGeometry(QtCore.QRect(230, 170, 331, 41))
self.running.setStyleSheet("color: rgb(255, 255, 0);\n"
"font: 14pt \"Big John\";")
self.running.setObjectName("running")
self.run_eco_level = QtWidgets.QLabel(self.Room_Conditions)
self.run_eco_level.setGeometry(QtCore.QRect(350, 220, 81, 61))
self.run_eco_level.setStyleSheet("font: 40pt \"Big John\";\n"
"color: rgb(255, 255, 255);\n"
"")
self.run_eco_level.setObjectName("run_eco_level")
self.run_eco_level.setObjectName("run_eco_level")
self.run_eco_level.setText("--")
self.run_eco_level.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
self.open_ubidots = QtWidgets.QPushButton(self.Room_Conditions)
self.open_ubidots.setGeometry(QtCore.QRect(230, 460, 361, 51))
self.open_ubidots.setStyleSheet("color: rgb(255, 255, 255);\n"
"font: 11pt \"Big John\";")
self.open_ubidots.setObjectName("open_ubidots")
self.open_ubidots.clicked.connect(self.Open_ubidots)
self.dark_sky_2 = QtWidgets.QToolButton(self.Room_Conditions)
self.dark_sky_2.setGeometry(QtCore.QRect(640, 490, 158, 23))
self.dark_sky_2.setStyleSheet("font: 25 10pt \"Ubuntu\";\n"
"color: rgb(85, 170, 255)")
self.dark_sky_2.setObjectName("dark_sky_2")
system.addItem(self.Room_Conditions, "")
self.retranslateUi(system)
system.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(system)
def retranslateUi(self, system):
_translate = QtCore.QCoreApplication.translate
system.setWindowTitle(_translate("system", "ToolBox"))
self.title_1.setText(_translate("system", "SYSTEM VARIABLES"))
self.time_hours.setText(_translate("system", "<html><head/><body><p align=\"right\"><br/></p></body></html>"))
self.run_system.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-family:\'Moon\'; font-size:9pt; font-weight:600; color:#e95420;\">RUN SYSTEM IN OBTAINED ECONOMY LEVEL</span></p></body></html>"))
self.run_system.setText(_translate("system", "RUN SYSTEM"))
self.avg_temp_txt.setText(_translate("system", "<html><head/><body><p><br/></p></body></html>"))
self.temp_icon.setText(_translate("system", "<html><head/><body><p><img src=\":/icons/Icons/thermometer.png\"/></p></body></html>"))
self.avg_cc_txt.setText(_translate("system", "<html><head/><body><p><br/></p></body></html>"))
self.avg_batt_txt.setText(_translate("system", "<html><head/><body><p><br/></p></body></html>"))
self.battery_percent_but.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-size:9pt; font-weight:600; color:#e95420;\">VIEW PLOT IN UBIDOTS</span></p></body></html>"))
self.battery_percent_but.setText(_translate("system", "BATTERY PERCENTAGE"))
self.batt_icon.setText(_translate("system", "<html><head/><body><p><img src=\":/icons/Icons/battery.png\"/></p></body></html>"))
self.cloud_icon.setText(_translate("system", "<html><head/><body><p><img src=\":/icons/Icons/cloudy.png\"/></p></body></html>"))
self.average_cc_but.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-size:9pt; font-weight:600; color:#e95420;\">VIEW PLOT IN UBIDOTS</span></p></body></html>"))
self.average_cc_but.setText(_translate("system", "AVERAGE CLOUD COVER"))
self.defuzz_txt.setText(_translate("system", "<html><head/><body><p><br/></p></body></html>"))
self.defuzz_but.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-family:\'Moon\'; font-size:9pt; font-weight:600; color:#e95420;\">DEFUZZIFY THE INPUTS</span></p></body></html>"))
self.defuzz_but.setText(_translate("system", "DEFUZZIFICATION"))
self.eco_level_but.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-family:\'Moon\'; font-size:9pt; font-weight:600; color:#e95420;\">Log DATA</span></p></body></html>"))
self.eco_level_but.setText(_translate("system", "ECONOMY LEVEL"))
self.temp_but.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-size:9pt; font-weight:600; color:#e95420;\">WEATHER FORECAST</span></p></body></html>"))
self.temp_but.setText(_translate("system", "TEMPERATURE"))
self.average_temp_but.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-size:9pt; font-weight:600; color:#e95420;\">VIEW PLOT IN UBIDOTS</span></p></body></html>"))
self.average_temp_but.setText(_translate("system", "AVERAGE TEMPERATURE"))
self.cloud_cover_but.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-size:9pt; font-weight:600; color:#e95420;\">WEATHER FORECAST</span></p></body></html>"))
self.cloud_cover_but.setText(_translate("system", "CLOUD COVER"))
self.temp_text.setText(_translate("system", "<html><head/><body><p><br/></p></body></html>"))
self.eco_level_txt.setText(_translate("system", "<html><head/><body><p><br/></p></body></html>"))
self.cloud_cover_txt.setText(_translate("system", "<html><head/><body><p><br/></p></body></html>"))
self.refresh_current.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-family:\'Moon\'; font-size:9pt; font-weight:600; color:#e95420;\">REFRESH DATA</span></p></body></html>"))
self.refresh_current.setText(_translate("system", "REFRESH"))
self.refresh_avg.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-family:\'Moon\'; font-size:9pt; font-weight:600; color:#e95420;\">REFRESH DATA</span></p></body></html>"))
self.refresh_avg.setText(_translate("system", "REFRESH"))
self.dark_sky_1.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-family:\'Moon\'; font-size:9pt; font-weight:600; color:#e95420;\">DARK SKY</span></p></body></html>"))
self.dark_sky_1.setText(_translate("system", "POWERED BY DARK SKY"))
system.setItemText(system.indexOf(self.Fuzzy_system), _translate("system", "Page 1"))
self.title_2.setText(_translate("system", "ROOM CONDITIONS"))
self.room_temp_txt.setText(_translate("system", "<html><head/><body><p><br/></p></body></html>"))
self.room_hum_but.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-size:9pt; font-weight:600; color:#e95420;\">VIEW PLOT IN UBIDOTS</span></p></body></html>"))
self.room_hum_but.setText(_translate("system", "HUMIDITY"))
self.room_hum_txt.setText(_translate("system", "<html><head/><body><p><br/></p></body></html>"))
self.room_temp_but.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-size:9pt; font-weight:600; color:#e95420;\">VIEW PLOT IN UBIDOTS</span></p></body></html>"))
self.room_temp_but.setText(_translate("system", "TEMPERATURE"))
self.heater_on.setText(_translate("system", "<html><head/><body><p><br/></p></body></html>"))
self.cooler_on.setText(_translate("system", "<html><head/><body><p><br/></p></body></html>"))
self.heater_off.setText(_translate("system", "<html><head/><body><p align=\"right\"><br/></p></body></html>"))
self.cooler_off.setText(_translate("system", "<html><head/><body><p align=\"right\"><br/></p></body></html>"))
self.heater.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-family:\'Moon\'; font-size:9pt; font-weight:600; color:#e95420;\">HEATER STATUS</span></p></body></html>"))
self.heater.setText(_translate("system", "<html><head/><body><p align=\"right\">HEATER</p></body></html>"))
self.cooler.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-family:\'Moon\'; font-size:9pt; font-weight:600; color:#e95420;\">COOLER STATUS</span></p></body></html>"))
self.cooler.setText(_translate("system", "<html><head/><body><p align=\"right\">COOLER</p></body></html>"))
self.dehumid_on.setText(_translate("system", "<html><head/><body><p align=\"right\"><br/></p></body></html>"))
self.humid_off.setText(_translate("system", "<html><head/><body><p><br/></p></body></html>"))
self.humid_on.setText(_translate("system", "<html><head/><body><p align=\"right\"><br/></p></body></html>"))
self.dehumid_off.setText(_translate("system", "<html><head/><body><p><br/></p></body></html>"))
self.humidifier.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-family:\'Moon\'; font-size:9pt; font-weight:600; color:#e95420;\">HUMIDIFIER STATUS</span></p></body></html>"))
self.humidifier.setText(_translate("system", "HUMIDIFIER"))
self.dehumidifier.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-family:\'Moon\'; font-size:9pt; font-weight:600; color:#e95420;\">DEHUMIDIFIER STATUS</span></p></body></html>"))
self.dehumidifier.setText(_translate("system", "DEHUMIDIFIER"))
self.running.setText(_translate("system", "<html><head/><body><p align=\"center\">RUNNING IN ECONOMY LEVEL</p></body></html>"))
self.run_eco_level.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-family:\'Moon\'; font-size:9pt; font-weight:600; color:#e95420;\">VIEW PLOT</span></p></body></html>"))
self.run_eco_level.setText(_translate("system", "<html><head/><body><p align=\"center\"><br/></p></body></html>"))
self.open_ubidots.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-family:\'Moon\'; font-size:9pt; font-weight:600; color:#e95420;\">OPEN UBIDOTS IN WEB BROWSER</span></p></body></html>"))
self.open_ubidots.setText(_translate("system", "OPEN UBIDOTS"))
self.dark_sky_2.setToolTip(_translate("system", "<html><head/><body><p align=\"center\"><span style=\" font-family:\'Moon\'; font-size:9pt; font-weight:600; color:#e95420;\">DARK SKY</span></p></body></html>"))
self.dark_sky_2.setText(_translate("system", "POWERED BY DARK SKY"))
system.setItemText(system.indexOf(self.Room_Conditions), _translate("system", "Page 2"))
def DarkSky(self):
webbrowser.open('https://darksky.net/poweredby/', new = 2)
def Time(self):
self.time_hours.setText(QtCore.QTime.currentTime().toString("h"))
self.time_min.setText(QtCore.QTime.currentTime().toString("mm"))
def Date(self):
self.date.setText(QtCore.QDate.currentDate().toString("ddd, MMM d"))
def loading1(self):
self.done1 = False
movie = QMovie("Icons/loading.gif")
splash = MovieSplashScreen(movie)
splash.setMask(splash.mask())
splash.show()
test1 = Thread(target = self.Update_Average).start()
while not self.done1:
app.processEvents()
splash.finish(system)
def Update_Average(self):
f = open('Ubidots_APIkey.txt', 'r')
apikey = f.readline().strip()
f.close()
api = ApiClient(token = apikey)
try:
temp = api.get_variable("58d76383762542260cf36d8f")
cloud_cover = api.get_variable("58d76394762542260a851a05")
batt = api.get_variable("58d763aa762542260cf36f24")
except ValueError:
print('Unable to obtain variable')
f = open('DS_APIkey.txt','r')
apikey = f.read()
f.close()
Bangalore = [12.9716, 77.5946]
fio = ForecastIO.ForecastIO(apikey,
units=ForecastIO.ForecastIO.UNITS_SI,
lang=ForecastIO.ForecastIO.LANG_ENGLISH,
latitude=Bangalore[0], longitude=Bangalore[1],
)
tempc = 0
clouds = 0
if fio.has_hourly() is True:
hourly = FIOHourly.FIOHourly(fio)
for hour in range(0, 48):
tempc = tempc + float(str(hourly.get_hour(hour)['temperature']))
clouds = clouds + float(str(hourly.get_hour(hour)['cloudCover']))
else:
print('No Hourly data')
self.t = round(tempc / 48, 2)
self.c = round(clouds / 48, 2)
self.b = self.Update_Battery()
try:
temp.save_value({'value': self.t})
cloud_cover.save_value({'value': self.c})
batt.save_value({'value': self.b})
time.sleep(1)
except:
print('Value not sent')
self.avg_temp_txt.setText('{:0.01f}°'.format(self.t))
self.avg_cc_txt.setText('{}%'.format(int(self.c*100)))
self.avg_batt_txt.setText('{}%'.format(self.b))
self.done1 = True
def loading2(self):
self.done2 = False
movie = QMovie("Icons/loading.gif")
splash = MovieSplashScreen(movie)
splash.setMask(splash.mask())
splash.show()
test = Thread(target = self.Update_Current).start()
while not self.done2:
app.processEvents()
splash.finish(system)
def Batt_Percent(self):
webbrowser.open('https://app.ubidots.com/ubi/getchart/page/R2kbUV5P5DSJVlXdTfMOXflxNtM', new = 2)
def Avg_CC(self):
webbrowser.open('https://app.ubidots.com/ubi/getchart/page/0f62Hh2lV0PMO8-p_X7DYFyNnd4', new = 2)
def Avg_temp(self):
webbrowser.open('https://app.ubidots.com/ubi/getchart/page/DlD6wC0uiipZzD3nbBT_Xty6myk', new = 2)
def Update_Battery(self):
f = open('Ubidots_APIkey.txt', 'r')
apikey = f.readline().strip()
f.close()
api = ApiClient(token = apikey)
try:
batt = api.get_variable("58d763aa762542260cf36f24")
except ValueError:
print('Value Error')
# Initialize library.
disp.begin()
time.sleep(5)
width = disp.width
height = disp.height
# Clear display.
disp.clear()
disp.display()
image = Image.new('1', (width, height))
# Get drawing object to draw on image.
draw = ImageDraw.Draw(image)
# Load default font.
font = ImageFont.load_default()
# Alternatively load a TTF font. Make sure the .ttf font file is in the same directory as the python script!
# Some other nice fonts to try: http://www.dafont.com/bitmap.php
#font = ImageFont.truetype('Minecraftia.ttf', 8)
# Hardware SPI configuration:
SPI_PORT = 0
SPI_DEVICE = 0
mcp = Adafruit_MCP3008.MCP3008(spi=SPI.SpiDev(SPI_PORT, SPI_DEVICE))
# Main program loop.
time.sleep(2)
# Draw a black filled box to clear the image.
draw.rectangle((0,0,width,height), outline=0, fill=0)
value = mcp.read_adc(0)
volts = ((value*3.3)) / float(1023) #voltage divider voltage
volts = volts * 5.7 #actual voltage
volts = round(volts,2)
if (volts >=13.6):
batt = 100
print('100% Battery')
draw.text((0, 0), 'Battery percent at: ',font=font, fill = 255)
draw.text((50, 20),str(batt) , font=font, fill = 255)
disp.image(image)
disp.display()
time.sleep(1)
elif (volts > 11.6):
batt = round ((volts - 11.6) * 50,1)
print(batt,'% Battery')
draw.text((10, 0), 'Battery percent at: ',font=font, fill = 255)
draw.text((45, 20),str(batt) , font=font, fill = 255)
disp.image(image)
disp.display()
time.sleep(1)
else:
batt = 0
print('Connection Error')
draw.text((55, 10),':(' , font=font, fill = 255)
disp.image(image)
disp.display()
# Print the ADC values.
# Pause time.
time.sleep(1)
return(batt)
def Update_Current(self):
f = open('DS_APIkey.txt','r')
apikey = f.read()
f.close()
Bangalore = [12.9716, 77.5946]
fio = ForecastIO.ForecastIO(apikey,
units=ForecastIO.ForecastIO.UNITS_SI,
lang=ForecastIO.ForecastIO.LANG_ENGLISH,
latitude=Bangalore[0], longitude=Bangalore[1],
)
if fio.has_currently() is True:
currently = FIOCurrently.FIOCurrently(fio)
self.temp_text.setText('{:0.01f}°'.format(currently.temperature))
self.cloud_cover_txt.setText('{}%'.format(int(currently.cloudCover * 100)))
else:
print('No Currently data')
self.done2 = True
def Defuzz(self):
# New Antecedent/Consequent objects hold universe variables and membership
# functions
batt_percent = ctrl.Antecedent(np.arange(0, 100, 1), 'Battery_percentage')
temp = ctrl.Antecedent(np.arange(15, 30, 1), 'Temperature')
cloud_cover = ctrl.Antecedent(np.arange(0, 1, 0.01), 'Cloud_cover')
eco_level = ctrl.Consequent(np.arange(1, 4, 0.01), 'Economy_level')
# Battery membership function population
batt_percent['Low_battery'] = fuzz.trapmf(batt_percent.universe, [0, 0, 20, 30])
batt_percent['Medium_battery'] = fuzz.trapmf(batt_percent.universe, [20, 25, 75, 80])
batt_percent['High_battery'] = fuzz.trapmf(batt_percent.universe, [75, 80, 100, 100])
# Temperature membership function population
temp['Low_temperature'] = fuzz.trapmf(temp.universe, [0, 0, 18, 20])
temp['Medium_temperature'] = fuzz.trapmf(temp.universe, [18, 20, 24, 26])
temp['High_temperature'] = fuzz.trapmf(temp.universe, [24 , 26, 30, 30])
# Cloud_cover membership function population
cloud_cover['Minimum_clouds'] = fuzz.trapmf(cloud_cover.universe, [0, 0, 0.20, 0.25])
cloud_cover['Medium_clouds'] = fuzz.trapmf(cloud_cover.universe, [0.20, 0.25, 0.65, 0.70])
cloud_cover['High_clouds'] = fuzz.trapmf(cloud_cover.universe, [0.65, 0.70, 1, 1])
# Custom membership functions can be built interactively with a familiar,
# Pythonic API
eco_level['Critical'] = fuzz.trimf(eco_level.universe, [0, 1.0, 2.0])
eco_level['Alert'] = fuzz.trimf(eco_level.universe, [1.75, 2.25, 2.75])
eco_level['Normal'] = fuzz.trimf(eco_level.universe, [2.5, 3.0, 3.5])
eco_level['Economyless'] = fuzz.trimf(eco_level.universe, [3.25, 4.0, 5.0])
# Rules
rule1 = ctrl.Rule(batt_percent['Low_battery'] &
(~temp['High_temperature']),
eco_level['Critical'])
rule2 = ctrl.Rule(batt_percent['Low_battery'] &
temp['High_temperature'] &
cloud_cover['High_clouds'],
eco_level['Critical'])
rule3 = ctrl.Rule(batt_percent['Low_battery'] &
temp['High_temperature'] &
(~cloud_cover['High_clouds']),
eco_level['Alert'])
rule4 = ctrl.Rule(batt_percent['Medium_battery'] &
temp['Low_temperature'] &
(~cloud_cover['High_clouds']),
eco_level['Alert'])
rule5 = ctrl.Rule(batt_percent['Medium_battery'] &
temp['Low_temperature'] &
cloud_cover['High_clouds'],
eco_level['Critical'])
rule6 = ctrl.Rule(batt_percent['Medium_battery'] &
(~temp['Low_temperature']) &
(~cloud_cover['High_clouds']),
eco_level['Normal'])
rule7 = ctrl.Rule(batt_percent['Medium_battery'] &
(~temp['Low_temperature']) &
cloud_cover['High_clouds'],
eco_level['Alert'])
rule8 = ctrl.Rule(batt_percent['High_battery'] &
temp['Low_temperature'] &
(~cloud_cover['High_clouds']),
eco_level['Normal'])
rule9 = ctrl.Rule(batt_percent['High_battery'] &
temp['Low_temperature'] &
cloud_cover['High_clouds'],
eco_level['Alert'])
rule10 = ctrl.Rule(batt_percent['High_battery'] &
(~temp['Low_temperature']) &
(~cloud_cover['High_clouds']),
eco_level['Economyless'])
rule11 = ctrl.Rule(batt_percent['High_battery'] &
(~temp['Low_temperature']) &
cloud_cover['High_clouds'],
eco_level['Normal'])
eco_ctrl = ctrl.ControlSystem([rule1, rule2, rule3, rule4,
rule5, rule6, rule7, rule8,
rule9, rule10, rule11])
eco_mode = ctrl.ControlSystemSimulation(eco_ctrl)
# Pass inputs to the ControlSystem using Antecedent labels with Pythonic API
# Note: if you like passing many inputs all at once, use .inputs(dict_of_data)
eco_mode.input['Temperature'] = self.t
eco_mode.input['Cloud_cover'] = self.c
eco_mode.input['Battery_percentage'] = self.b
# Crunch the numbers<|fim▁hole|> self.defuzz_txt.setText(format(defuzz,'.2f'))
self.eco = int(defuzz + 0.5)
def Eco(self):
if (self.eco < 1):
self.eco = 1
self.eco_level_txt.setNum(self.eco)
self.run_eco_level.setNum(self.eco)
filename1 = datetime.datetime.now().strftime("%Y.%m.%d_%H:%M")
save_path = 'Logs/'
complete_path = os.path.join(save_path, filename1+'.log')
f = open(complete_path, 'w')
if (self.t == 0) or (self.c == 0) or (self.b == 0):
f.write('Data Unavailable, running in economy level 1')
else:
f.write('Average Temperature is: ' + str(self.t) + ' °C' + '\n')
f.write('Average Cloud Cover is: ' + str(self.c) + ' %' + '\n')
f.write('Battery level is: ' + str(self.b) + '%' + '\n')
f.write('Economy Level is: ' + str(self.eco) + '\n')
f.close()
else:
self.eco_level_txt.setNum(self.eco)
self.run_eco_level.setNum(self.eco)
filename1 = datetime.datetime.now().strftime("%Y.%m.%d_%H:%M")
save_path = 'Logs/'
complete_path = os.path.join(save_path, filename1+'.txt')
f = open(complete_path, 'w')
if (self.t == 0) or (self.c == 0) or (self.b == 0):
f.write('Data Unavailable, running in economy level 1')
else:
f.write('Average Temperature is: ' + str(self.t) + ' °C' + '\n')
f.write('Average Cloud Cover is: ' + str(self.c) + ' %' + '\n')
f.write('Battery level is: ' + str(self.b) + ' % ' + '\n')
f.write('Economy Level is: ' + str(self.eco) + '\n')
f.close()
def Room_cond(self):
if ser.isOpen():
ser.close()
ser.open()
ser.isOpen()
ser.write('s'.encode())
time.sleep(2)
response = ser.readline().strip().decode()
hum = float(response[:5])
temp = float(response[5:])
f = open('Ubidots_APIkey.txt', 'r')
apikey = f.readline().strip()
f.close()
api = ApiClient(token = apikey)
try:
roomtemp = api.get_variable("58d763b8762542260a851bd1")
roomhumidity = api.get_variable("58d763c57625422609b8d088")
except ValueError:
print('Unable to obtain variable')
self.roomt = temp
self.roomh = hum
try:
roomtemp.save_value({'value': self.roomt})
roomhumidity.save_value({'value': self.roomh})
time.sleep(1)
except:
pass
self.room_temp_txt.setText(format(self.roomt,'.2f'))
self.room_hum_txt.setText(format(self.roomh,'.2f'))
def Room_temp_browser(self):
webbrowser.open('https://app.ubidots.com/ubi/getchart/page/G284654CCK1E77kbBR7zmpBDNkw', new = 2)
def Room_hum_browser(self):
webbrowser.open('https://app.ubidots.com/ubi/getchart/page/qgaJ95jUNq91E3aVxJsNo7NphbU', new = 2)
def Run_System(self):
f = open('Ubidots_APIkey.txt', 'r')
apikey = f.readline().strip()
f.close()
api = ApiClient(token = apikey)
self.cooler_on.setText(' ')
self.heater_on.setText(' ')
self.humid_on.setText(' ')
self.dehumid_on.setText(' ')
self.cooler_off.setText(' ')
self.heater_off.setText(' ')
self.humid_off.setText(' ')
self.dehumid_off.setText(' ')
self.Room_cond()
try:
cooler = api.get_variable("58d768e0762542260a855c7a")
heater = api.get_variable("58d768eb7625422609b91152")
humidifier = api.get_variable("58d768f8762542260cf3b292")
exhaust = api.get_variable("58d76907762542260dfad769")
except ValueError:
print('Unable to obtain variable')
cooler.save_value({'value': 0})
heater.save_value({'value': 0})
humidifier.save_value({'value': 0})
exhaust.save_value({'value': 0})
if (self.eco < 1):
self.run_eco_level.setText('--')
elif (self.eco == 1):
t = self.roomt
h = self.roomh
if (t >= 35):
ser.write('c'.encode())
self.cooler_on.setText('ON')
self.heater_off.setText('OFF')
cooler.save_value({'value': 1})
heater.save_value({'value': 0})
time.sleep(1)
if (t <= 15):
ser.write('f'.encode())
self.heater_on.setText('ON')
self.cooler_off.setText('OFF')
heater.save_value({'value': 1})
cooler.save_value({'value': 0})
time.sleep(1)
if (h <= 25):
ser.write('h'.encode())
self.humid_on.setText('ON')
self.dehumid_off.setText('OFF')
humidifier.save_value({'value': 1})
exhaust.save_value({'value': 0})
time.sleep(1)
if (h >= 80):
ser.write('e'.encode())
self.dehumid_on.setText('ON')
self.humid_off.setText('OFF')
exhaust.save_value({'value': 1})
humidifier.save_value({'value': 0})
time.sleep(1)
if ((h > 25 and h < 80)):
self.humid_off.setText('OFF')
self.dehumid_off.setText('OFF')
humidifier.save_value({'value': 0})
exhaust.save_value({'value': 0})
time.sleep(1)
if ((t > 15) and (t < 35)):
self.cooler_off.setText('OFF')
self.heater_off.setText('OFF')
cooler.save_value({'value': 0})
heater.save_value({'value': 0})
time.sleep(1)
elif (self.eco == 2):
t = self.roomt
h = self.roomh
if (t >= 32):
ser.write('c'.encode())
self.cooler_on.setText('ON')
self.heater_off.setText('OFF')
cooler.save_value({'value': 1})
heater.save_value({'value': 0})
time.sleep(1)
if (t <= 18):
ser.write('f'.encode())
self.heater_on.setText('ON')
self.cooler_off.setText('OFF')
heater.save_value({'value': 1})
cooler.save_value({'value': 0})
time.sleep(1)
if (h <= 30):
ser.write('h'.encode())
self.humid_on.setText('ON')
self.dehumid_off.setText('OFF')
humidifier.save_value({'value': 1})
exhaust.save_value({'value': 0})
time.sleep(1)
if (h >= 70):
ser.write('e'.encode())
self.dehumid_on.setText('ON')
self.humid_off.setText('OFF')
exhaust.save_value({'value': 1})
humidifier.save_value({'value': 0})
time.sleep(1)
if ((h > 30 and h < 70)):
self.humid_off.setText('OFF')
self.dehumid_off.setText('OFF')
exhaust.save_value({'value': 0})
humidifier.save_value({'value': 0})
time.sleep(1)
if ((t > 18) and (t < 32)):
self.cooler_off.setText('OFF')
self.heater_off.setText('OFF')
cooler.save_value({'value': 0})
heater.save_value({'value': 0})
time.sleep(1)
elif (self.eco == 3):
t = self.roomt
h = self.roomh
if (t >= 30):
ser.write('c'.encode())
self.cooler_on.setText('ON')
self.heater_off.setText('OFF')
cooler.save_value({'value': 1})
heater.save_value({'value': 0})
time.sleep(1)
if (t <= 20):
ser.write('f'.encode())
self.heater_on.setText('ON')
self.cooler_off.setText('OFF')
heater.save_value({'value': 1})
cooler.save_value({'value': 0})
time.sleep(1)
if (h <= 40):
ser.write('h'.encode())
self.humid_on.setText('ON')
self.dehumid_off.setText('OFF')
humidifier.save_value({'value': 1})
exhaust.save_value({'value': 0})
time.sleep(1)
if (h >= 60):
ser.write('e'.encode())
self.dehumid_on.setText('ON')
self.humid_off.setText('OFF')
exhaust.save_value({'value': 1})
humidifier.save_value({'value': 0})
time.sleep(1)
if ((h > 40 and h < 60)):
self.humid_off.setText('OFF')
self.dehumid_off.setText('OFF')
exhaust.save_value({'value': 0})
humidifier.save_value({'value': 0})
time.sleep(1)
if ((t > 20) and (t < 30)):
self.cooler_off.setText('OFF')
self.heater_off.setText('OFF')
cooler.save_value({'value': 0})
heater.save_value({'value': 0})
time.sleep(1)
elif (self.eco == 4):
t = self.roomt
h = self.roomh
if (t >= 27):
ser.write('c'.encode())
self.cooler_on.setText('ON')
self.heater_off.setText('OFF')
cooler.save_value({'value': 1})
heater.save_value({'value': 0})
time.sleep(1)
if (t <= 22):
ser.write('f'.encode())
self.heater_on.setText('ON')
self.cooler_off.setText('OFF')
heater.save_value({'value': 1})
cooler.save_value({'value': 0})
time.sleep(1)
if (h <= 25):
ser.write('h'.encode())
self.humid_on.setText('ON')
self.dehumid_off.setText('OFF')
humidifier.save_value({'value': 1})
exhaust.save_value({'value': 0})
time.sleep(1)
if (h >= 50):
ser.write('e'.encode())
self.dehumid_on.setText('ON')
self.humid_off.setText('OFF')
exhaust.save_value({'value': 1})
humidifier.save_value({'value': 0})
time.sleep(1)
if ((h > 25) and (h < 50)):
self.humid_off.setText('OFF')
self.dehumid_off.setText('OFF')
exhaust.save_value({'value': 0})
humidifier.save_value({'value': 0})
time.sleep(1)
if ((t > 22) and (t < 27)):
self.cooler_off.setText('OFF')
self.heater_off.setText('OFF')
cooler.save_value({'value': 0})
heater.save_value({'value': 0})
time.sleep(1)
def Open_ubidots(self):
webbrowser.open('https://app.ubidots.com/ubi/public/getdashboard/page/P8OAd8cR6dtoL6aO4AQ384euynE', new = 2)
import system_rc
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
system = QtWidgets.QToolBox()
ui = Ui_system()
ui.setupUi(system)
system.move(QApplication.desktop().screen().rect().center() - system.rect().center())
system.show()
sys.exit(app.exec_())<|fim▁end|> | eco_mode.compute()
defuzz = eco_mode.output['Economy_level']
|
<|file_name|>wrappers.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-gogo.
// source: wrappers.proto
// DO NOT EDIT!
/*
Package types is a generated protocol buffer package.
It is generated from these files:
wrappers.proto
It has these top-level messages:
DoubleValue
FloatValue
Int64Value
UInt64Value
Int32Value
UInt32Value
BoolValue
StringValue
BytesValue
*/
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import bytes "bytes"
import strings "strings"
import github_com_gogo_protobuf_proto "github.com/gogo/protobuf/proto"
import sort "sort"
import strconv "strconv"
import reflect "reflect"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
// Wrapper message for `double`.
//
// The JSON representation for `DoubleValue` is JSON number.
type DoubleValue struct {
// The double value.
Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *DoubleValue) Reset() { *m = DoubleValue{} }
func (*DoubleValue) ProtoMessage() {}
func (*DoubleValue) Descriptor() ([]byte, []int) { return fileDescriptorWrappers, []int{0} }
func (*DoubleValue) XXX_WellKnownType() string { return "DoubleValue" }
func (m *DoubleValue) GetValue() float64 {
if m != nil {
return m.Value
}
return 0
}
// Wrapper message for `float`.
//
// The JSON representation for `FloatValue` is JSON number.
type FloatValue struct {
// The float value.
Value float32 `protobuf:"fixed32,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *FloatValue) Reset() { *m = FloatValue{} }
func (*FloatValue) ProtoMessage() {}
func (*FloatValue) Descriptor() ([]byte, []int) { return fileDescriptorWrappers, []int{1} }
func (*FloatValue) XXX_WellKnownType() string { return "FloatValue" }
func (m *FloatValue) GetValue() float32 {
if m != nil {
return m.Value
}
return 0
}
// Wrapper message for `int64`.
//
// The JSON representation for `Int64Value` is JSON string.
type Int64Value struct {
// The int64 value.
Value int64 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *Int64Value) Reset() { *m = Int64Value{} }
func (*Int64Value) ProtoMessage() {}
func (*Int64Value) Descriptor() ([]byte, []int) { return fileDescriptorWrappers, []int{2} }
func (*Int64Value) XXX_WellKnownType() string { return "Int64Value" }
func (m *Int64Value) GetValue() int64 {
if m != nil {
return m.Value
}
return 0
}
// Wrapper message for `uint64`.
//
// The JSON representation for `UInt64Value` is JSON string.
type UInt64Value struct {
// The uint64 value.
Value uint64 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *UInt64Value) Reset() { *m = UInt64Value{} }
func (*UInt64Value) ProtoMessage() {}
func (*UInt64Value) Descriptor() ([]byte, []int) { return fileDescriptorWrappers, []int{3} }
func (*UInt64Value) XXX_WellKnownType() string { return "UInt64Value" }
func (m *UInt64Value) GetValue() uint64 {
if m != nil {
return m.Value
}
return 0
}
// Wrapper message for `int32`.
//
// The JSON representation for `Int32Value` is JSON number.
type Int32Value struct {
// The int32 value.
Value int32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *Int32Value) Reset() { *m = Int32Value{} }
func (*Int32Value) ProtoMessage() {}
func (*Int32Value) Descriptor() ([]byte, []int) { return fileDescriptorWrappers, []int{4} }
func (*Int32Value) XXX_WellKnownType() string { return "Int32Value" }
func (m *Int32Value) GetValue() int32 {
if m != nil {
return m.Value
}
return 0
}
// Wrapper message for `uint32`.
//
// The JSON representation for `UInt32Value` is JSON number.
type UInt32Value struct {
// The uint32 value.
Value uint32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *UInt32Value) Reset() { *m = UInt32Value{} }
func (*UInt32Value) ProtoMessage() {}
func (*UInt32Value) Descriptor() ([]byte, []int) { return fileDescriptorWrappers, []int{5} }
func (*UInt32Value) XXX_WellKnownType() string { return "UInt32Value" }
func (m *UInt32Value) GetValue() uint32 {
if m != nil {
return m.Value
}
return 0
}
// Wrapper message for `bool`.
//
// The JSON representation for `BoolValue` is JSON `true` and `false`.
type BoolValue struct {
// The bool value.
Value bool `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *BoolValue) Reset() { *m = BoolValue{} }
func (*BoolValue) ProtoMessage() {}
func (*BoolValue) Descriptor() ([]byte, []int) { return fileDescriptorWrappers, []int{6} }
func (*BoolValue) XXX_WellKnownType() string { return "BoolValue" }
func (m *BoolValue) GetValue() bool {
if m != nil {
return m.Value
}
return false
}
// Wrapper message for `string`.
//
// The JSON representation for `StringValue` is JSON string.
type StringValue struct {
// The string value.
Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *StringValue) Reset() { *m = StringValue{} }
func (*StringValue) ProtoMessage() {}
func (*StringValue) Descriptor() ([]byte, []int) { return fileDescriptorWrappers, []int{7} }
func (*StringValue) XXX_WellKnownType() string { return "StringValue" }
func (m *StringValue) GetValue() string {
if m != nil {
return m.Value
}
return ""
}
// Wrapper message for `bytes`.
//
// The JSON representation for `BytesValue` is JSON string.
type BytesValue struct {
// The bytes value.
Value []byte `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *BytesValue) Reset() { *m = BytesValue{} }
func (*BytesValue) ProtoMessage() {}
func (*BytesValue) Descriptor() ([]byte, []int) { return fileDescriptorWrappers, []int{8} }
func (*BytesValue) XXX_WellKnownType() string { return "BytesValue" }
func (m *BytesValue) GetValue() []byte {
if m != nil {
return m.Value
}
return nil
}
func init() {
proto.RegisterType((*DoubleValue)(nil), "google.protobuf.DoubleValue")
proto.RegisterType((*FloatValue)(nil), "google.protobuf.FloatValue")
proto.RegisterType((*Int64Value)(nil), "google.protobuf.Int64Value")
proto.RegisterType((*UInt64Value)(nil), "google.protobuf.UInt64Value")
proto.RegisterType((*Int32Value)(nil), "google.protobuf.Int32Value")
proto.RegisterType((*UInt32Value)(nil), "google.protobuf.UInt32Value")
proto.RegisterType((*BoolValue)(nil), "google.protobuf.BoolValue")
proto.RegisterType((*StringValue)(nil), "google.protobuf.StringValue")
proto.RegisterType((*BytesValue)(nil), "google.protobuf.BytesValue")
}
func (this *DoubleValue) Compare(that interface{}) int {
if that == nil {
if this == nil {
return 0
}
return 1
}
that1, ok := that.(*DoubleValue)
if !ok {
that2, ok := that.(DoubleValue)
if ok {
that1 = &that2
} else {
return 1
}
}
if that1 == nil {
if this == nil {
return 0
}
return 1
} else if this == nil {
return -1
}
if this.Value != that1.Value {
if this.Value < that1.Value {
return -1
}
return 1
}
return 0
}
func (this *FloatValue) Compare(that interface{}) int {
if that == nil {
if this == nil {
return 0
}
return 1
}
that1, ok := that.(*FloatValue)
if !ok {
that2, ok := that.(FloatValue)
if ok {
that1 = &that2
} else {
return 1
}
}
if that1 == nil {
if this == nil {
return 0
}
return 1
} else if this == nil {
return -1
}
if this.Value != that1.Value {
if this.Value < that1.Value {
return -1
}
return 1
}
return 0
}
func (this *Int64Value) Compare(that interface{}) int {
if that == nil {
if this == nil {
return 0
}
return 1
}
that1, ok := that.(*Int64Value)
if !ok {
that2, ok := that.(Int64Value)
if ok {
that1 = &that2
} else {
return 1
}
}
if that1 == nil {
if this == nil {
return 0
}
return 1
} else if this == nil {
return -1
}
if this.Value != that1.Value {
if this.Value < that1.Value {
return -1
}
return 1
}
return 0
}
func (this *UInt64Value) Compare(that interface{}) int {
if that == nil {
if this == nil {
return 0
}
return 1
}
that1, ok := that.(*UInt64Value)
if !ok {
that2, ok := that.(UInt64Value)
if ok {
that1 = &that2
} else {
return 1
}
}
if that1 == nil {
if this == nil {
return 0
}
return 1
} else if this == nil {
return -1
}
if this.Value != that1.Value {
if this.Value < that1.Value {
return -1
}
return 1
}
return 0
}
func (this *Int32Value) Compare(that interface{}) int {
if that == nil {
if this == nil {
return 0
}
return 1
}
that1, ok := that.(*Int32Value)
if !ok {
that2, ok := that.(Int32Value)
if ok {
that1 = &that2
} else {
return 1
}
}
if that1 == nil {
if this == nil {
return 0
}
return 1
} else if this == nil {
return -1
}
if this.Value != that1.Value {
if this.Value < that1.Value {
return -1
}
return 1
}
return 0
}
func (this *UInt32Value) Compare(that interface{}) int {
if that == nil {
if this == nil {
return 0
}
return 1
}
that1, ok := that.(*UInt32Value)
if !ok {
that2, ok := that.(UInt32Value)
if ok {
that1 = &that2
} else {
return 1
}
}
if that1 == nil {
if this == nil {
return 0
}
return 1
} else if this == nil {
return -1
}
if this.Value != that1.Value {
if this.Value < that1.Value {
return -1
}
return 1
}
return 0
}
func (this *BoolValue) Compare(that interface{}) int {
if that == nil {
if this == nil {
return 0
}
return 1
}
that1, ok := that.(*BoolValue)
if !ok {
that2, ok := that.(BoolValue)
if ok {
that1 = &that2
} else {
return 1
}
}
if that1 == nil {
if this == nil {
return 0
}
return 1
} else if this == nil {
return -1
}
if this.Value != that1.Value {
if !this.Value {
return -1
}
return 1
}
return 0
}
func (this *StringValue) Compare(that interface{}) int {
if that == nil {
if this == nil {
return 0
}
return 1
}
that1, ok := that.(*StringValue)
if !ok {
that2, ok := that.(StringValue)
if ok {
that1 = &that2
} else {
return 1
}
}
if that1 == nil {
if this == nil {
return 0
}
return 1
} else if this == nil {
return -1
}
if this.Value != that1.Value {
if this.Value < that1.Value {
return -1
}
return 1
}
return 0
}
func (this *BytesValue) Compare(that interface{}) int {
if that == nil {
if this == nil {
return 0
}
return 1
}
that1, ok := that.(*BytesValue)
if !ok {
that2, ok := that.(BytesValue)
if ok {
that1 = &that2
} else {
return 1
}
}
if that1 == nil {
if this == nil {
return 0
}
return 1
} else if this == nil {
return -1
}
if c := bytes.Compare(this.Value, that1.Value); c != 0 {
return c
}
return 0
}
func (this *DoubleValue) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*DoubleValue)
if !ok {
that2, ok := that.(DoubleValue)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Value != that1.Value {
return false
}
return true
}
func (this *FloatValue) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*FloatValue)
if !ok {
that2, ok := that.(FloatValue)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Value != that1.Value {
return false
}
return true
}
func (this *Int64Value) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*Int64Value)
if !ok {
that2, ok := that.(Int64Value)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Value != that1.Value {
return false
}
return true
}
func (this *UInt64Value) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true<|fim▁hole|> that1, ok := that.(*UInt64Value)
if !ok {
that2, ok := that.(UInt64Value)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Value != that1.Value {
return false
}
return true
}
func (this *Int32Value) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*Int32Value)
if !ok {
that2, ok := that.(Int32Value)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Value != that1.Value {
return false
}
return true
}
func (this *UInt32Value) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*UInt32Value)
if !ok {
that2, ok := that.(UInt32Value)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Value != that1.Value {
return false
}
return true
}
func (this *BoolValue) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*BoolValue)
if !ok {
that2, ok := that.(BoolValue)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Value != that1.Value {
return false
}
return true
}
func (this *StringValue) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*StringValue)
if !ok {
that2, ok := that.(StringValue)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Value != that1.Value {
return false
}
return true
}
func (this *BytesValue) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*BytesValue)
if !ok {
that2, ok := that.(BytesValue)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if !bytes.Equal(this.Value, that1.Value) {
return false
}
return true
}
func (this *DoubleValue) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&types.DoubleValue{")
s = append(s, "Value: "+fmt.Sprintf("%#v", this.Value)+",\n")
s = append(s, "}")
return strings.Join(s, "")
}
func (this *FloatValue) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&types.FloatValue{")
s = append(s, "Value: "+fmt.Sprintf("%#v", this.Value)+",\n")
s = append(s, "}")
return strings.Join(s, "")
}
func (this *Int64Value) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&types.Int64Value{")
s = append(s, "Value: "+fmt.Sprintf("%#v", this.Value)+",\n")
s = append(s, "}")
return strings.Join(s, "")
}
func (this *UInt64Value) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&types.UInt64Value{")
s = append(s, "Value: "+fmt.Sprintf("%#v", this.Value)+",\n")
s = append(s, "}")
return strings.Join(s, "")
}
func (this *Int32Value) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&types.Int32Value{")
s = append(s, "Value: "+fmt.Sprintf("%#v", this.Value)+",\n")
s = append(s, "}")
return strings.Join(s, "")
}
func (this *UInt32Value) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&types.UInt32Value{")
s = append(s, "Value: "+fmt.Sprintf("%#v", this.Value)+",\n")
s = append(s, "}")
return strings.Join(s, "")
}
func (this *BoolValue) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&types.BoolValue{")
s = append(s, "Value: "+fmt.Sprintf("%#v", this.Value)+",\n")
s = append(s, "}")
return strings.Join(s, "")
}
func (this *StringValue) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&types.StringValue{")
s = append(s, "Value: "+fmt.Sprintf("%#v", this.Value)+",\n")
s = append(s, "}")
return strings.Join(s, "")
}
func (this *BytesValue) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 5)
s = append(s, "&types.BytesValue{")
s = append(s, "Value: "+fmt.Sprintf("%#v", this.Value)+",\n")
s = append(s, "}")
return strings.Join(s, "")
}
func valueToGoStringWrappers(v interface{}, typ string) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("func(v %v) *%v { return &v } ( %#v )", typ, typ, pv)
}
func extensionToGoStringWrappers(m github_com_gogo_protobuf_proto.Message) string {
e := github_com_gogo_protobuf_proto.GetUnsafeExtensionsMap(m)
if e == nil {
return "nil"
}
s := "proto.NewUnsafeXXX_InternalExtensions(map[int32]proto.Extension{"
keys := make([]int, 0, len(e))
for k := range e {
keys = append(keys, int(k))
}
sort.Ints(keys)
ss := []string{}
for _, k := range keys {
ss = append(ss, strconv.Itoa(k)+": "+e[int32(k)].GoString())
}
s += strings.Join(ss, ",") + "})"
return s
}
func (m *DoubleValue) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *DoubleValue) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.Value != 0 {
dAtA[i] = 0x9
i++
i = encodeFixed64Wrappers(dAtA, i, uint64(math.Float64bits(float64(m.Value))))
}
return i, nil
}
func (m *FloatValue) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *FloatValue) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.Value != 0 {
dAtA[i] = 0xd
i++
i = encodeFixed32Wrappers(dAtA, i, uint32(math.Float32bits(float32(m.Value))))
}
return i, nil
}
func (m *Int64Value) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Int64Value) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.Value != 0 {
dAtA[i] = 0x8
i++
i = encodeVarintWrappers(dAtA, i, uint64(m.Value))
}
return i, nil
}
func (m *UInt64Value) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *UInt64Value) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.Value != 0 {
dAtA[i] = 0x8
i++
i = encodeVarintWrappers(dAtA, i, uint64(m.Value))
}
return i, nil
}
func (m *Int32Value) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Int32Value) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.Value != 0 {
dAtA[i] = 0x8
i++
i = encodeVarintWrappers(dAtA, i, uint64(m.Value))
}
return i, nil
}
func (m *UInt32Value) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *UInt32Value) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.Value != 0 {
dAtA[i] = 0x8
i++
i = encodeVarintWrappers(dAtA, i, uint64(m.Value))
}
return i, nil
}
func (m *BoolValue) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *BoolValue) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.Value {
dAtA[i] = 0x8
i++
if m.Value {
dAtA[i] = 1
} else {
dAtA[i] = 0
}
i++
}
return i, nil
}
func (m *StringValue) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *StringValue) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.Value) > 0 {
dAtA[i] = 0xa
i++
i = encodeVarintWrappers(dAtA, i, uint64(len(m.Value)))
i += copy(dAtA[i:], m.Value)
}
return i, nil
}
func (m *BytesValue) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *BytesValue) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.Value) > 0 {
dAtA[i] = 0xa
i++
i = encodeVarintWrappers(dAtA, i, uint64(len(m.Value)))
i += copy(dAtA[i:], m.Value)
}
return i, nil
}
func encodeFixed64Wrappers(dAtA []byte, offset int, v uint64) int {
dAtA[offset] = uint8(v)
dAtA[offset+1] = uint8(v >> 8)
dAtA[offset+2] = uint8(v >> 16)
dAtA[offset+3] = uint8(v >> 24)
dAtA[offset+4] = uint8(v >> 32)
dAtA[offset+5] = uint8(v >> 40)
dAtA[offset+6] = uint8(v >> 48)
dAtA[offset+7] = uint8(v >> 56)
return offset + 8
}
func encodeFixed32Wrappers(dAtA []byte, offset int, v uint32) int {
dAtA[offset] = uint8(v)
dAtA[offset+1] = uint8(v >> 8)
dAtA[offset+2] = uint8(v >> 16)
dAtA[offset+3] = uint8(v >> 24)
return offset + 4
}
func encodeVarintWrappers(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func NewPopulatedDoubleValue(r randyWrappers, easy bool) *DoubleValue {
this := &DoubleValue{}
this.Value = float64(r.Float64())
if r.Intn(2) == 0 {
this.Value *= -1
}
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedFloatValue(r randyWrappers, easy bool) *FloatValue {
this := &FloatValue{}
this.Value = float32(r.Float32())
if r.Intn(2) == 0 {
this.Value *= -1
}
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedInt64Value(r randyWrappers, easy bool) *Int64Value {
this := &Int64Value{}
this.Value = int64(r.Int63())
if r.Intn(2) == 0 {
this.Value *= -1
}
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedUInt64Value(r randyWrappers, easy bool) *UInt64Value {
this := &UInt64Value{}
this.Value = uint64(uint64(r.Uint32()))
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedInt32Value(r randyWrappers, easy bool) *Int32Value {
this := &Int32Value{}
this.Value = int32(r.Int31())
if r.Intn(2) == 0 {
this.Value *= -1
}
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedUInt32Value(r randyWrappers, easy bool) *UInt32Value {
this := &UInt32Value{}
this.Value = uint32(r.Uint32())
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedBoolValue(r randyWrappers, easy bool) *BoolValue {
this := &BoolValue{}
this.Value = bool(bool(r.Intn(2) == 0))
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedStringValue(r randyWrappers, easy bool) *StringValue {
this := &StringValue{}
this.Value = string(randStringWrappers(r))
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedBytesValue(r randyWrappers, easy bool) *BytesValue {
this := &BytesValue{}
v1 := r.Intn(100)
this.Value = make([]byte, v1)
for i := 0; i < v1; i++ {
this.Value[i] = byte(r.Intn(256))
}
if !easy && r.Intn(10) != 0 {
}
return this
}
type randyWrappers interface {
Float32() float32
Float64() float64
Int63() int64
Int31() int32
Uint32() uint32
Intn(n int) int
}
func randUTF8RuneWrappers(r randyWrappers) rune {
ru := r.Intn(62)
if ru < 10 {
return rune(ru + 48)
} else if ru < 36 {
return rune(ru + 55)
}
return rune(ru + 61)
}
func randStringWrappers(r randyWrappers) string {
v2 := r.Intn(100)
tmps := make([]rune, v2)
for i := 0; i < v2; i++ {
tmps[i] = randUTF8RuneWrappers(r)
}
return string(tmps)
}
func randUnrecognizedWrappers(r randyWrappers, maxFieldNumber int) (dAtA []byte) {
l := r.Intn(5)
for i := 0; i < l; i++ {
wire := r.Intn(4)
if wire == 3 {
wire = 5
}
fieldNumber := maxFieldNumber + r.Intn(100)
dAtA = randFieldWrappers(dAtA, r, fieldNumber, wire)
}
return dAtA
}
func randFieldWrappers(dAtA []byte, r randyWrappers, fieldNumber int, wire int) []byte {
key := uint32(fieldNumber)<<3 | uint32(wire)
switch wire {
case 0:
dAtA = encodeVarintPopulateWrappers(dAtA, uint64(key))
v3 := r.Int63()
if r.Intn(2) == 0 {
v3 *= -1
}
dAtA = encodeVarintPopulateWrappers(dAtA, uint64(v3))
case 1:
dAtA = encodeVarintPopulateWrappers(dAtA, uint64(key))
dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)))
case 2:
dAtA = encodeVarintPopulateWrappers(dAtA, uint64(key))
ll := r.Intn(100)
dAtA = encodeVarintPopulateWrappers(dAtA, uint64(ll))
for j := 0; j < ll; j++ {
dAtA = append(dAtA, byte(r.Intn(256)))
}
default:
dAtA = encodeVarintPopulateWrappers(dAtA, uint64(key))
dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)))
}
return dAtA
}
func encodeVarintPopulateWrappers(dAtA []byte, v uint64) []byte {
for v >= 1<<7 {
dAtA = append(dAtA, uint8(uint64(v)&0x7f|0x80))
v >>= 7
}
dAtA = append(dAtA, uint8(v))
return dAtA
}
func (m *DoubleValue) Size() (n int) {
var l int
_ = l
if m.Value != 0 {
n += 9
}
return n
}
func (m *FloatValue) Size() (n int) {
var l int
_ = l
if m.Value != 0 {
n += 5
}
return n
}
func (m *Int64Value) Size() (n int) {
var l int
_ = l
if m.Value != 0 {
n += 1 + sovWrappers(uint64(m.Value))
}
return n
}
func (m *UInt64Value) Size() (n int) {
var l int
_ = l
if m.Value != 0 {
n += 1 + sovWrappers(uint64(m.Value))
}
return n
}
func (m *Int32Value) Size() (n int) {
var l int
_ = l
if m.Value != 0 {
n += 1 + sovWrappers(uint64(m.Value))
}
return n
}
func (m *UInt32Value) Size() (n int) {
var l int
_ = l
if m.Value != 0 {
n += 1 + sovWrappers(uint64(m.Value))
}
return n
}
func (m *BoolValue) Size() (n int) {
var l int
_ = l
if m.Value {
n += 2
}
return n
}
func (m *StringValue) Size() (n int) {
var l int
_ = l
l = len(m.Value)
if l > 0 {
n += 1 + l + sovWrappers(uint64(l))
}
return n
}
func (m *BytesValue) Size() (n int) {
var l int
_ = l
l = len(m.Value)
if l > 0 {
n += 1 + l + sovWrappers(uint64(l))
}
return n
}
func sovWrappers(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozWrappers(x uint64) (n int) {
return sovWrappers(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *DoubleValue) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&DoubleValue{`,
`Value:` + fmt.Sprintf("%v", this.Value) + `,`,
`}`,
}, "")
return s
}
func (this *FloatValue) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&FloatValue{`,
`Value:` + fmt.Sprintf("%v", this.Value) + `,`,
`}`,
}, "")
return s
}
func (this *Int64Value) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Int64Value{`,
`Value:` + fmt.Sprintf("%v", this.Value) + `,`,
`}`,
}, "")
return s
}
func (this *UInt64Value) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&UInt64Value{`,
`Value:` + fmt.Sprintf("%v", this.Value) + `,`,
`}`,
}, "")
return s
}
func (this *Int32Value) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Int32Value{`,
`Value:` + fmt.Sprintf("%v", this.Value) + `,`,
`}`,
}, "")
return s
}
func (this *UInt32Value) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&UInt32Value{`,
`Value:` + fmt.Sprintf("%v", this.Value) + `,`,
`}`,
}, "")
return s
}
func (this *BoolValue) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&BoolValue{`,
`Value:` + fmt.Sprintf("%v", this.Value) + `,`,
`}`,
}, "")
return s
}
func (this *StringValue) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&StringValue{`,
`Value:` + fmt.Sprintf("%v", this.Value) + `,`,
`}`,
}, "")
return s
}
func (this *BytesValue) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&BytesValue{`,
`Value:` + fmt.Sprintf("%v", this.Value) + `,`,
`}`,
}, "")
return s
}
func valueToStringWrappers(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *DoubleValue) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: DoubleValue: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: DoubleValue: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 1 {
return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType)
}
var v uint64
if (iNdEx + 8) > l {
return io.ErrUnexpectedEOF
}
iNdEx += 8
v = uint64(dAtA[iNdEx-8])
v |= uint64(dAtA[iNdEx-7]) << 8
v |= uint64(dAtA[iNdEx-6]) << 16
v |= uint64(dAtA[iNdEx-5]) << 24
v |= uint64(dAtA[iNdEx-4]) << 32
v |= uint64(dAtA[iNdEx-3]) << 40
v |= uint64(dAtA[iNdEx-2]) << 48
v |= uint64(dAtA[iNdEx-1]) << 56
m.Value = float64(math.Float64frombits(v))
default:
iNdEx = preIndex
skippy, err := skipWrappers(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *FloatValue) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: FloatValue: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: FloatValue: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 5 {
return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType)
}
var v uint32
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
iNdEx += 4
v = uint32(dAtA[iNdEx-4])
v |= uint32(dAtA[iNdEx-3]) << 8
v |= uint32(dAtA[iNdEx-2]) << 16
v |= uint32(dAtA[iNdEx-1]) << 24
m.Value = float32(math.Float32frombits(v))
default:
iNdEx = preIndex
skippy, err := skipWrappers(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *Int64Value) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Int64Value: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Int64Value: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType)
}
m.Value = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Value |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipWrappers(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *UInt64Value) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: UInt64Value: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: UInt64Value: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType)
}
m.Value = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Value |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipWrappers(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *Int32Value) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Int32Value: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Int32Value: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType)
}
m.Value = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Value |= (int32(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipWrappers(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *UInt32Value) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: UInt32Value: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: UInt32Value: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType)
}
m.Value = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Value |= (uint32(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipWrappers(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *BoolValue) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: BoolValue: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: BoolValue: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.Value = bool(v != 0)
default:
iNdEx = preIndex
skippy, err := skipWrappers(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *StringValue) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: StringValue: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: StringValue: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthWrappers
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Value = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipWrappers(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *BytesValue) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: BytesValue: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: BytesValue: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType)
}
var byteLen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowWrappers
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
byteLen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if byteLen < 0 {
return ErrInvalidLengthWrappers
}
postIndex := iNdEx + byteLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Value = append(m.Value[:0], dAtA[iNdEx:postIndex]...)
if m.Value == nil {
m.Value = []byte{}
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipWrappers(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipWrappers(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowWrappers
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowWrappers
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowWrappers
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthWrappers
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowWrappers
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipWrappers(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthWrappers = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowWrappers = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("wrappers.proto", fileDescriptorWrappers) }
var fileDescriptorWrappers = []byte{
// 281 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xe2, 0xe2, 0x2b, 0x2f, 0x4a, 0x2c,
0x28, 0x48, 0x2d, 0x2a, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x4f, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0x85, 0xf0, 0x92, 0x4a, 0xd3, 0x94, 0x94, 0xb9, 0xb8, 0x5d, 0xf2, 0x4b, 0x93, 0x72,
0x52, 0xc3, 0x12, 0x73, 0x4a, 0x53, 0x85, 0x44, 0xb8, 0x58, 0xcb, 0x40, 0x0c, 0x09, 0x46, 0x05,
0x46, 0x0d, 0xc6, 0x20, 0x08, 0x47, 0x49, 0x89, 0x8b, 0xcb, 0x2d, 0x27, 0x3f, 0xb1, 0x04, 0x8b,
0x1a, 0x26, 0x24, 0x35, 0x9e, 0x79, 0x25, 0x66, 0x26, 0x58, 0xd4, 0x30, 0xc3, 0xd4, 0x28, 0x73,
0x71, 0x87, 0xe2, 0x52, 0xc4, 0x82, 0x6a, 0x90, 0xb1, 0x11, 0x16, 0x35, 0xac, 0x68, 0x06, 0x61,
0x55, 0xc4, 0x0b, 0x53, 0xa4, 0xc8, 0xc5, 0xe9, 0x94, 0x9f, 0x9f, 0x83, 0x45, 0x09, 0x07, 0x92,
0x39, 0xc1, 0x25, 0x45, 0x99, 0x79, 0xe9, 0x58, 0x14, 0x71, 0x22, 0x39, 0xc8, 0xa9, 0xb2, 0x24,
0xb5, 0x18, 0x8b, 0x1a, 0x1e, 0xa8, 0x1a, 0xa7, 0x2e, 0xc6, 0x0b, 0x0f, 0xe5, 0x18, 0x6e, 0x3c,
0x94, 0x63, 0xf8, 0xf0, 0x50, 0x8e, 0xf1, 0xc7, 0x43, 0x39, 0xc6, 0x86, 0x47, 0x72, 0x8c, 0x2b,
0x1e, 0xc9, 0x31, 0x9e, 0x78, 0x24, 0xc7, 0x78, 0xe1, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c,
0x2f, 0x1e, 0xc9, 0x31, 0x7c, 0x00, 0x89, 0x3f, 0x96, 0x63, 0xe4, 0x12, 0x4e, 0xce, 0xcf, 0xd5,
0x43, 0x8b, 0x0e, 0x27, 0xde, 0x70, 0x68, 0x7c, 0x05, 0x80, 0x44, 0x02, 0x18, 0xa3, 0x58, 0x4b,
0x2a, 0x0b, 0x52, 0x8b, 0x17, 0x30, 0x32, 0xfe, 0x60, 0x64, 0x5c, 0xc4, 0xc4, 0xec, 0x1e, 0xe0,
0xb4, 0x8a, 0x49, 0xce, 0x1d, 0xa2, 0x2b, 0x00, 0xaa, 0x4b, 0x2f, 0x3c, 0x35, 0x27, 0xc7, 0x3b,
0x2f, 0xbf, 0x3c, 0x2f, 0x04, 0xa4, 0x38, 0x89, 0x0d, 0x6c, 0x9c, 0x31, 0x20, 0x00, 0x00, 0xff,
0xff, 0xac, 0x8b, 0x9f, 0x55, 0xfd, 0x01, 0x00, 0x00,
}<|fim▁end|> | }
return false
}
|
<|file_name|>networkRead.java<|end_file_name|><|fim▁begin|>import java.io.IOException;
import java.net.Socket;
import java.util.Scanner;
public class networkRead implements Runnable
{
//Objects
Scanner read;
debugLogger myLogger;
sharedData myData;
arduinoWrite ardWriter;
Socket pipe;
//Variables
String data;
long startTime;
long endTime;
boolean dead = false;
public networkRead(Socket tempSocket, sharedData tempData, debugLogger tempLog, arduinoWrite tempWriter)
{
pipe = tempSocket;
myData = tempData;
myLogger = tempLog;
ardWriter = tempWriter;
try
{
read = new Scanner(tempSocket.getInputStream());
}
catch (IOException e)
{
myLogger.writeLog("ERROR: Failure to write to network socket!");
myLogger.writeLog("\n\n");
e.printStackTrace();
myLogger.writeLog("\n\n");
}
}
public void run()
{
data = "myData";
startTime = System.currentTimeMillis();
endTime = startTime + 2000;
while (myData.getAlive())
{
if(pipe.isConnected())
{
if(read.hasNextLine())
{
//Read data
data = read.nextLine();
data = data.trim();
//Store Data in shared object
myData.setReadNet(data);
//Log the input
myLogger.writeLog("Read From Server: \t" + data);
//Pass input to arduino
ardWriter.writeToSerial(data);
}
}
if(data.equals("ping"))
{
startTime = System.currentTimeMillis();
endTime = startTime + 2000;
}
<|fim▁hole|> }
ardWriter.writeToSerial("BB");
ardWriter.writeToSerial("EVF");
ardWriter.writeToSerial("OFF");
myLogger.writeLog("CRITICAL: CONNECTION LOST!");
}
public void keepAlive()
{
long temp = System.currentTimeMillis();
if((temp >endTime))
{
myData.setAliveArd(false);
myData.setAliveNet(false);
}
}
}<|fim▁end|> | keepAlive();
data = "";
|
<|file_name|>honest_node.rs<|end_file_name|><|fim▁begin|>use crate::protocol::{Node, Digest, Nid, SignedMessage, Slot, View, StateMachine};
use std::sync::mpsc::{Sender, Receiver};
use crate::network::Network;
pub(crate) struct HonestNode<S: StateMachine> {
pub(crate) state_machine: S,
pub(crate) network: Network<SignedMessage>,
}
impl<S: StateMachine> Node<S> for HonestNode<S> {
fn recv_message(&self, msg: SignedMessage) {
unimplemented!()
}<|fim▁hole|> fn prepared(&self, digest: Digest, view: View, slot: Slot, nid: Nid) {
unimplemented!()
}
fn commited_local(&self, digest: Digest, view: View, slot: Slot, nid: Nid) {
unimplemented!()
}
}<|fim▁end|> | |
<|file_name|>32e5974ada25_add_neutron_resources_table.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Add standard attribute table
Revision ID: 32e5974ada25
Revises: 13cfb89f881a
Create Date: 2015-09-10 00:22:47.618593
"""
# revision identifiers, used by Alembic.
revision = '32e5974ada25'
down_revision = '13cfb89f881a'
from alembic import op
import sqlalchemy as sa
TABLES = ('ports', 'networks', 'subnets', 'subnetpools', 'securitygroups',
'floatingips', 'routers', 'securitygrouprules')
def upgrade():
op.create_table(
'standardattributes',
sa.Column('id', sa.BigInteger(), autoincrement=True),
sa.Column('resource_type', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
for table in TABLES:
op.add_column(table, sa.Column('standard_attr_id', sa.BigInteger(),<|fim▁hole|><|fim▁end|> | nullable=True)) |
<|file_name|>cli.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import os
gettext = lambda s: s
urlpatterns = []
def configure(**extra):
from django.conf import settings
os.environ['DJANGO_SETTINGS_MODULE'] = 'cms.test_utils.cli'
defaults = dict(
CACHE_BACKEND='locmem:///',
DEBUG=True,
DATABASE_SUPPORTS_TRANSACTIONS=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
SITE_ID=1,
USE_I18N=True,
MEDIA_ROOT='/media/',
STATIC_ROOT='/static/',
CMS_MEDIA_ROOT='/cms-media/',
CMS_MEDIA_URL='/cms-media/',
MEDIA_URL='/media/',
STATIC_URL='/static/',
ADMIN_MEDIA_PREFIX='/static/admin/',
EMAIL_BACKEND='django.core.mail.backends.locmem.EmailBackend',
SECRET_KEY='key',
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.abspath(
os.path.join(os.path.dirname(__file__),
'project',
'templates')
)
],
'OPTIONS': {
'context_processors': [
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.i18n",
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.template.context_processors.media",
'django.template.context_processors.csrf',
"cms.context_processors.media",
"sekizai.context_processors.sekizai",
"django.template.context_processors.static",
],
'loaders': (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
),
},
},
],
MIDDLEWARE_CLASSES=[
'django.contrib.sessions.middleware.SessionMiddleware',
'cms.middleware.multilingual.MultilingualURLMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.admindocs.middleware.XViewMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
'cms.middleware.page.CurrentPageMiddleware',
'cms.middleware.toolbar.ToolbarMiddleware',
],
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'django.contrib.sites',
'django.contrib.staticfiles',
'cms',
'menus',
'mptt',
'cms.test_utils.project.sampleapp',
'cms.test_utils.project.placeholderapp',
'cms.test_utils.project.pluginapp',
'cms.test_utils.project.pluginapp.plugins.manytomany_rel',
'cms.test_utils.project.pluginapp.plugins.extra_context',
'cms.test_utils.project.pluginapp.plugins.meta',
'cms.test_utils.project.fakemlng',
'cms.test_utils.project.fileapp',
'cms.plugins.text',
'cms.plugins.picture',
'cms.plugins.file',
'cms.plugins.flash',
'cms.plugins.link',
'cms.plugins.snippet',
'cms.plugins.googlemap',
'cms.plugins.teaser',
'cms.plugins.video',
'cms.plugins.twitter',
'cms.plugins.inherit',
'reversion',
'sekizai',
],
LANGUAGE_CODE="en",
LANGUAGES=(
('en', gettext('English')),
('fr', gettext('French')),
('de', gettext('German')),
('pt-BR', gettext("Brazil")),
('nl', gettext("Dutch")),
),
CMS_LANGUAGES=(
('en', gettext('English')),
('fr', gettext('French')),
('de', gettext('German')),
('pt-BR', gettext("Brazil")),
('nl', gettext("Dutch")),
),
CMS_FRONTEND_LANGUAGES=(
'fr',
'de',
'nl',
),
CMS_LANGUAGE_CONF={
'de': ['fr', 'en'],
'en': ['fr', 'de'],
},
CMS_SITE_LANGUAGES={
1: ['en', 'de', 'fr', 'pt-BR'],
2: ['de', 'fr'],
3: ['nl'],
},
CMS_TEMPLATES=(
('col_two.html', gettext('two columns')),
('col_three.html', gettext('three columns')),
('nav_playground.html', gettext('navigation examples')),
),
CMS_PLACEHOLDER_CONF={
'col_sidebar': {
'plugins': ('FilePlugin', 'FlashPlugin', 'LinkPlugin',
'PicturePlugin', 'TextPlugin', 'SnippetPlugin'),
'name': gettext("sidebar column")
},
'col_left': {
'plugins': ('FilePlugin', 'FlashPlugin', 'LinkPlugin',
'PicturePlugin', 'TextPlugin', 'SnippetPlugin',
'GoogleMapPlugin',),
'name': gettext("left column")
},
'col_right': {
'plugins': ('FilePlugin', 'FlashPlugin', 'LinkPlugin',
'PicturePlugin', 'TextPlugin', 'SnippetPlugin',
'GoogleMapPlugin',),
'name': gettext("right column")
},
'extra_context': {
"plugins": ('TextPlugin',),
"extra_context": {"width": 250},
"name": "extra context"
},
},
CMS_SOFTROOT=True,
CMS_MODERATOR=True,
CMS_PERMISSION=True,
CMS_PUBLIC_FOR='all',
CMS_CACHE_DURATIONS={
'menus': 0,
'content': 0,
'permissions': 0,
},
CMS_APPHOOKS=[],
CMS_REDIRECTS=True,
CMS_SEO_FIELDS=True,
CMS_FLAT_URLS=False,
CMS_MENU_TITLE_OVERWRITE=True,
CMS_HIDE_UNTRANSLATED=False,
CMS_URL_OVERWRITE=True,
CMS_SHOW_END_DATE=True,
CMS_SHOW_START_DATE=True,
CMS_PLUGIN_PROCESSORS=tuple(),
CMS_PLUGIN_CONTEXT_PROCESSORS=tuple(),
CMS_SITE_CHOICES_CACHE_KEY='CMS:site_choices',
CMS_PAGE_CHOICES_CACHE_KEY='CMS:page_choices',
CMS_NAVIGATION_EXTENDERS=(
('cms.test_utils.project.sampleapp.menu_extender.get_nodes',
'SampleApp Menu'),
),
TEST_RUNNER='cms.test_utils.runners.NormalTestRunner',
JUNIT_OUTPUT_DIR='.',
TIME_TESTS=False,
ROOT_URLCONF='cms.test_utils.cli',
PASSWORD_HASHERS=(<|fim▁hole|> )
# Disable migrations for Django 1.7+
class DisableMigrations(object):
def __contains__(self, item):
return True
def __getitem__(self, item):
return "notmigrations"
defaults['MIGRATION_MODULES'] = DisableMigrations()
defaults.update(extra)
settings.configure(**defaults)
import django
django.setup()<|fim▁end|> | 'django.contrib.auth.hashers.MD5PasswordHasher',
) |
<|file_name|>test__reshape_vector_args.py<|end_file_name|><|fim▁begin|># (C) British Crown Copyright 2014 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|># along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Unit tests for
:func:`iris.fileformats.pp_rules._reshape_vector_args`.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
import numpy as np
from iris.fileformats.pp_rules import _reshape_vector_args
class TestEmpty(tests.IrisTest):
def test(self):
result = _reshape_vector_args([])
self.assertEqual(result, [])
class TestSingleArg(tests.IrisTest):
def _check(self, result, expected):
self.assertEqual(len(result), len(expected))
for result_arr, expected_arr in zip(result, expected):
self.assertArrayEqual(result_arr, expected_arr)
def test_nochange(self):
points = np.array([[1, 2, 3], [4, 5, 6]])
result = _reshape_vector_args([(points, (0, 1))])
expected = [points]
self._check(result, expected)
def test_bad_dimensions(self):
points = np.array([[1, 2, 3], [4, 5, 6]])
with self.assertRaisesRegexp(ValueError, 'Length'):
_reshape_vector_args([(points, (0, 1, 2))])
def test_scalar(self):
points = 5
result = _reshape_vector_args([(points, ())])
expected = [points]
self._check(result, expected)
def test_nonarray(self):
points = [[1, 2, 3], [4, 5, 6]]
result = _reshape_vector_args([(points, (0, 1))])
expected = [np.array(points)]
self._check(result, expected)
def test_transpose(self):
points = np.array([[1, 2, 3], [4, 5, 6]])
result = _reshape_vector_args([(points, (1, 0))])
expected = [points.T]
self._check(result, expected)
def test_extend(self):
points = np.array([[1, 2, 3, 4], [21, 22, 23, 24], [31, 32, 33, 34]])
result = _reshape_vector_args([(points, (1, 3))])
expected = [points.reshape(1, 3, 1, 4)]
self._check(result, expected)
class TestMultipleArgs(tests.IrisTest):
def _check(self, result, expected):
self.assertEqual(len(result), len(expected))
for result_arr, expected_arr in zip(result, expected):
self.assertArrayEqual(result_arr, expected_arr)
def test_nochange(self):
a1 = np.array([[1, 2, 3], [4, 5, 6]])
a2 = np.array([[0, 2, 4], [7, 8, 9]])
result = _reshape_vector_args([(a1, (0, 1)), (a2, (0, 1))])
expected = [a1, a2]
self._check(result, expected)
def test_array_and_scalar(self):
a1 = [[1, 2, 3], [3, 4, 5]]
a2 = 5
result = _reshape_vector_args([(a1, (0, 1)), (a2, ())])
expected = [a1, np.array([[5]])]
self._check(result, expected)
def test_transpose(self):
a1 = np.array([[1, 2, 3], [4, 5, 6]])
a2 = np.array([[0, 2, 4], [7, 8, 9]])
result = _reshape_vector_args([(a1, (0, 1)), (a2, (1, 0))])
expected = [a1, a2.T]
self._check(result, expected)
def test_incompatible(self):
# Does not enforce compatibility of results.
a1 = np.array([1, 2])
a2 = np.array([1, 2, 3])
result = _reshape_vector_args([(a1, (0,)), (a2, (0,))])
expected = [a1, a2]
self._check(result, expected)
def test_extend(self):
a1 = np.array([[1, 2, 3], [4, 5, 6]])
a2 = np.array([11, 12, 13])
result = _reshape_vector_args([(a1, (0, 1)), (a2, (1,))])
expected = [a1, a2.reshape(1, 3)]
self._check(result, expected)
def test_extend_transpose(self):
a1 = np.array([[1, 2, 3], [4, 5, 6]])
a2 = np.array([11, 12, 13])
result = _reshape_vector_args([(a1, (1, 0)), (a2, (1,))])
expected = [a1.T, a2.reshape(1, 3)]
self._check(result, expected)
def test_double_extend(self):
a1 = np.array([[1, 2, 3], [4, 5, 6]])
a2 = np.array(1)
result = _reshape_vector_args([(a1, (0, 2)), (a2, ())])
expected = [a1.reshape(2, 1, 3), a2.reshape(1, 1, 1)]
self._check(result, expected)
def test_triple(self):
a1 = np.array([[1, 2, 3, 4]])
a2 = np.array([3, 4])
a3 = np.array(7)
result = _reshape_vector_args([(a1, (0, 2)), (a2, (1,)), (a3, ())])
expected = [a1.reshape(1, 1, 4),
a2.reshape(1, 2, 1),
a3.reshape(1, 1, 1)]
self._check(result, expected)
if __name__ == "__main__":
tests.main()<|fim▁end|> | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License |
<|file_name|>mail.js<|end_file_name|><|fim▁begin|>/**
* Created by GYX on 15/6/27.
*/
var nodeMailer = require('nodemailer');
var Imap = require("imap");
var MailParser = require("mailparser").MailParser;
var imapconn =null;
function mail(option) {
this.smtp = option.smtp || "";
this.smtpPort = option.smtpPort || "";
this.imap = option.imap || "";
this.imapPort = option.imapPort || "";
this.mailAddress = option.mailAddress || "";
this.password = option.password || "";
this.transporter=null;
this._mailbox=null;
this._cb=null;
this._onerror=null;
}
mail.prototype.setMailOption = function(otherOption){
this.smtp = otherOption.smtp || "";
this.smtpPort = otherOption.smtpPort || "";
this.imap = otherOption.imap || "";
this.imapPort = otherOption.imapPort || "";
this.mailAddress = otherOption.mailAddress || "";
this.password = otherOption.password || "";
};
//当然你也可以直接通过变量获取
mail.prototype.getMailOption = function(){
return {smtp:this.smtp,smtpPort:this.smtpPort,
imap:this.imap,imapPort:this.imapPort,
mailAddress:this.mailAddress,password:this.password};
};
/**
* var mailOptions = {
from: 'Fred Foo <[email protected]>', // sender address
to: '[email protected], [email protected]', // list of receivers
subject: 'Hello ', // Subject line
text: 'Hello world ', // plaintext body
html: '<b>Hello world </b>' // html body
};
callback =function(error, info){
if(error){
console.log(error);
}else{
console.log('Message sent: ' +info.message);
}};
*/
//发送邮件
mail.prototype.sendMail=function(mailOptions,callback){
if(!this.smtp||!this.smtpPort||!this.mailAddress||!this.password){
return {success:0,error:"Error,mail option is not enough"};
}
this.transporter = nodeMailer.createTransport("SMTP",{
host:this.smtp,
port:this.smtpPort,
secureConnection:true,
auth: {
user: this.mailAddress,
pass: this.password
}
});
this.transporter.sendMail(mailOptions,callback);
};
//停止SMTP连接
mail.prototype.stopSMTPConnection = function(){
if(this.transporter==null)
return {success:0,error:"please start smtp again"};
this.transporter.close();
};
/** searchFilter:
* case 'ALL':
case 'ANSWERED':
case 'DELETED':
case 'DRAFT':
case 'FLAGGED':
case 'NEW':
case 'SEEN':
case 'RECENT':
case 'OLD':
case 'UNANSWERED':
case 'UNDELETED':
case 'UNDRAFT':
case 'UNFLAGGED':
case 'UNSEEN':
*/
/**
* 北航邮箱的文件夹
* { INBOX: { attribs: [], delimiter: '/', children: null, parent: null },
Drafts:
{ attribs: [ '\\Drafts' ],
delimiter: '/',
children: null,
parent: null,
special_use_attrib: '\\Drafts' },
'Sent Items':
{ attribs: [ '\\Sent' ],
delimiter: '/',
children: null,
parent: null,
special_use_attrib: '\\Sent' },
Trash:
{ attribs: [ '\\Trash' ],
delimiter: '/',
children: null,
parent: null,
special_use_attrib: '\\Trash' },
'Junk E-mail':
{ attribs: [ '\\Junk' ],
delimiter: '/',
children: null,
parent: null,
special_use_attrib: '\\Junk' },
'Virus Items': { attribs: [], delimiter: '/', children: null, parent: null } }
*/
mail.prototype.openBox=function(mailbox,searchFilter,cb,onerror) {
this._cb =cb;<|fim▁hole|> if(!this.imap||!this.imapPort||!this.mailAddress||!this.password){
return {success:0,error:"Error,mail option is not enough"};
}
imapconn= new Imap({
user:this.mailAddress,
password:this.password,
host:this.imap,
port:this.imapPort,
tls: true,
tlsOptions: { rejectUnauthorized: false },
attachments:false
});
imapconn.once('error', this._onerror);
imapconn.once('ready',function(){
self._openbox(searchFilter);
});
imapconn.connect();
}
else
{
self._openbox(searchFilter);
}
};
mail.prototype.getFullMail=function(mailbox,messageId,cb,onerror) {
this._cb =cb;
this._mailbox = mailbox;
this._onerror = onerror;
var self= this;
if(!imapconn){
if(!this.imap||!this.imapPort||!this.mailAddress||!this.password){
return {success:0,error:"Error,mail option is not enough"};
}
imapconn= new Imap({
user:this.mailAddress,
password:this.password,
host:this.imap,
port:this.imapPort,
tls: true,
tlsOptions: { rejectUnauthorized: false },
attachments:false
});
imapconn.once('error', this._onerror);
imapconn.once('ready',function(){
self._getFullMail(messageId);
});
imapconn.connect();
}
else
{
self._getFullMail(messageId);
}
};
mail.prototype.killImap= function(){
imapconn.end();
imapconn=undefined;
};
mail.prototype._openbox =function(searchFilter){
var self = this;
imapconn.openBox(this._mailbox, false,
function(err, box){
if (err) throw err;
imapconn.search(searchFilter, function(err, results) {
if (err) throw err;
if(results.length>0) {
var f = imapconn.fetch(results, {bodies: 'HEADER'});
f.on('message', function (msg) {
var mailparser = new MailParser();
msg.on('body', function (stream, info) {
stream.pipe(mailparser);
mailparser.on("end", function (mail) {
mail.messageId = mail.headers["message-id"];
delete mail.headers;
self._cb(mail);
})
});
});
f.once('error', function (err) {
console.log('Fetch error: ' + err);
});
}
});
});
};
mail.prototype._getFullMail=function(messageId){
var self = this;
imapconn.openBox(this._mailbox, false,
function(err, box){
if (err) throw err;
imapconn.search([["header","message-id",messageId]], function(err, results) {
if (err) throw err;
if(results.length>0){
var f = imapconn.fetch(results, { bodies: '' });
f.on('message', function(msg) {
var mailparser = new MailParser();
msg.on('body', function(stream, info) {
stream.pipe( mailparser );
mailparser.on("end",function( mail ){
mail.messageId=mail.headers["message-id"];
delete mail.headers;
self._cb(mail);
console.log(mail);
})
});
});
f.once('error', function(err) {
console.log('Fetch error: ' + err);
});
}
});
});
};
mail.prototype.imapTest =function(cb){
if(!this.imap||!this.imapPort||!this.mailAddress||!this.password){
return {success:0,error:"Error,mail option is not enough"};
}
conn= new Imap({
user:this.mailAddress,
password:this.password,
host:this.imap,
port:this.imapPort,
tls: true,
tlsOptions: { rejectUnauthorized: false },
connTimeout:1000
});
conn.once("error",function(){
cb(103,"无法连接到imap服务器");
});
conn.once('ready',function(){
cb(0,'success');
});
conn.connect();
};
mail.prototype.getAll=function(cb){
this.openBox("INBOX",["ALL"],cb);
};
mail.prototype.getSince =function(time,cb){
this.openBox("INBOX",[["SINCE",time]],cb);
};
mail.prototype.getUnseen=function(cb){
this.open("INBOX",["UNSEEN"],cb);
};
module.exports = mail;<|fim▁end|> | this._mailbox = mailbox;
this._onerror = onerror;
var self= this;
if(!imapconn){ |
<|file_name|>docs_test.py<|end_file_name|><|fim▁begin|>from selenium_test_case import SeleniumTestCase
class DocsTest(SeleniumTestCase):
def test_links_between_pages(self):
self.open_path('/help')
self.assert_text_present('Frequently Asked Questions')
self.click_and_wait('link=Terms of Service')
self.assert_text_present('Terms of Service for Google Resource Finder')
self.click_and_wait('link=Privacy')
self.assert_text_present('Google Resource Finder Privacy Policy')
self.click_and_wait('link=Help')
self.assert_text_present('Frequently Asked Questions')
def test_languages(self):
# English (en)
self.open_path('/help?lang=en')
self.assert_text_present('Frequently Asked Questions')
self.click_and_wait('link=Terms of Service')
self.assert_text_present('Terms of Service for Google Resource Finder')
self.click_and_wait('link=Privacy')
self.assert_text_present('Google Resource Finder Privacy Policy')
self.click_and_wait('link=Help')
self.assert_text_present('Frequently Asked Questions')
# Spanish (es-419)
self.open_path('/help?lang=es')
self.assert_text_present('Preguntas frecuentes')
self.click_and_wait('link=Condiciones del servicio')
self.assert_text_present(
'Condiciones del servicio del Buscador de recursos de Google')
self.click_and_wait(u'link=Privacidad')
self.assert_text_present(
u'Pol\u00edtica de privacidad del Buscador de recursos de Google')
self.click_and_wait(u'link=Ayuda')
self.assert_text_present('Preguntas frecuentes')
# French (fr)
self.open_path('/help?lang=fr')
self.assert_text_present(u'Questions fr\u00e9quentes')
self.click_and_wait('link=Conditions d\'utilisation')
self.assert_text_present(
u'Conditions d\'utilisation de Google Resource Finder')
self.click_and_wait(u'link=Confidentialit\u00e9')
self.assert_text_present(
u'R\u00e8gles de confidentialit\u00e9 de Google Resource Finder')
self.click_and_wait(u'link=Aide')
self.assert_text_present(u'Questions fr\u00e9quentes')
# Kreyol (ht)
self.open_path('/help?lang=ht')
self.assert_text_present(u'Kesyon Div\u00e8s Moun Poze Tout Tan')
self.click_and_wait(u'link=Kondisyon S\u00e8vis yo')
self.assert_text_present(
u'Kondisyon S\u00e8vis pou Resource Finder Google')
self.click_and_wait(u'link=Vi prive')
self.assert_text_present(u'Politik Resp\u00e8 Pou Moun ak ' +
u'\u201cResource Finder\u201d nan Google')
<|fim▁hole|><|fim▁end|> | self.click_and_wait(u'link=Ed')
self.assert_text_present(u'Kesyon Div\u00e8s Moun Poze Tout Tan') |
<|file_name|>model.py<|end_file_name|><|fim▁begin|>import logging
from .data import DefinedTable
logger = logging.getLogger(__name__)
def ensure_tables():
"""When called, ensure that all the tables that we need are created in the
database. The real work is supplied by the DefinedTable base class
"""
for tab in [Subject, ExpCondition]:
logger.debug("Creating table %s", tab.get_table_name())
tab.ensure_table()
class Subject(DefinedTable):
"""An experimental subject that we are tracking in an experimental condition
"""
@classmethod
def get_table_name(self):
return "Subjects"
@classmethod
def get_key_name(self):
return "subject_id"
def __init__(
self,
subject_id=None,
first_name=None,
last_name=None,
email=None,
exp_condition=None
):
self.subject_id = subject_id
self.first_name = first_name
self.last_name = last_name
self.email = email
self.exp_condition = exp_condition
def errors(self):
if not self.subject_id:
yield "Missing subject ID"
if not self.exp_condition:
yield "Missing Experimental Condition"
class ExpCondition(DefinedTable):<|fim▁hole|>
@classmethod
def get_table_name(self):
return "Conditions"
@classmethod
def get_key_name(self):
return "condition_id"
def __init__(
self,
condition_id=None,
condition_name=None,
description=None
):
self.condition_id = condition_id
self.condition_name = condition_name
self.description = description<|fim▁end|> | """A single experimental condition that any number of subjects may be a part of
""" |
<|file_name|>str.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use cssparser::{self, Color, RGBA};
use js::conversions::{FromJSValConvertible, ToJSValConvertible, latin1_to_string};
use js::jsapi::{JSContext, JSString, HandleValue, MutableHandleValue};
use js::jsapi::{JS_GetTwoByteStringCharsAndLength, JS_StringHasLatin1Chars};
use js::rust::ToString;
use libc::c_char;
use num_lib::ToPrimitive;
use opts;
use std::ascii::AsciiExt;
use std::borrow::ToOwned;
use std::char;
use std::convert::AsRef;
use std::ffi::CStr;
use std::fmt;
use std::iter::{Filter, Peekable};
use std::ops::{Deref, DerefMut};
use std::ptr;
use std::slice;
use std::str::{CharIndices, FromStr, Split, from_utf8};
#[derive(Clone, PartialOrd, Ord, PartialEq, Eq, Deserialize, Serialize, Hash, Debug)]
pub struct DOMString(String);
impl !Send for DOMString {}
impl DOMString {
pub fn new() -> DOMString {
DOMString(String::new())
}
// FIXME(ajeffrey): implement more of the String methods on DOMString?
pub fn push_str(&mut self, string: &str) {
self.0.push_str(string)
}
pub fn clear(&mut self) {
self.0.clear()
}
}
impl Default for DOMString {
fn default() -> Self {
DOMString(String::new())
}
}
impl Deref for DOMString {
type Target = str;
#[inline]
fn deref(&self) -> &str {
&self.0
}
}
impl DerefMut for DOMString {
#[inline]
fn deref_mut(&mut self) -> &mut str {
&mut self.0
}
}
impl AsRef<str> for DOMString {
fn as_ref(&self) -> &str {
&self.0
}
}
impl fmt::Display for DOMString {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl PartialEq<str> for DOMString {
fn eq(&self, other: &str) -> bool {
&**self == other
}
}
impl<'a> PartialEq<&'a str> for DOMString {
fn eq(&self, other: &&'a str) -> bool {
&**self == *other
}
}
impl From<String> for DOMString {
fn from(contents: String) -> DOMString {
DOMString(contents)
}
}
impl<'a> From<&'a str> for DOMString {
fn from(contents: &str) -> DOMString {
DOMString::from(String::from(contents))
}
}
impl From<DOMString> for String {
fn from(contents: DOMString) -> String {
contents.0
}
}
impl Into<Vec<u8>> for DOMString {
fn into(self) -> Vec<u8> {
self.0.into()
}
}
// https://heycam.github.io/webidl/#es-DOMString
impl ToJSValConvertible for DOMString {
unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {
(**self).to_jsval(cx, rval);
}
}
/// Behavior for stringification of `JSVal`s.
#[derive(PartialEq)]
pub enum StringificationBehavior {
/// Convert `null` to the string `"null"`.
Default,
/// Convert `null` to the empty string.
Empty,
}
/// Convert the given `JSString` to a `DOMString`. Fails if the string does not
/// contain valid UTF-16.
pub unsafe fn jsstring_to_str(cx: *mut JSContext, s: *mut JSString) -> DOMString {
let latin1 = JS_StringHasLatin1Chars(s);
DOMString(if latin1 {
latin1_to_string(cx, s)
} else {
let mut length = 0;
let chars = JS_GetTwoByteStringCharsAndLength(cx, ptr::null(), s, &mut length);
assert!(!chars.is_null());
let potentially_ill_formed_utf16 = slice::from_raw_parts(chars, length as usize);
let mut s = String::with_capacity(length as usize);
for item in char::decode_utf16(potentially_ill_formed_utf16.iter().cloned()) {
match item {
Ok(c) => s.push(c),
Err(_) => {
// FIXME: Add more info like document URL in the message?
macro_rules! message {
() => {
"Found an unpaired surrogate in a DOM string. \
If you see this in real web content, \
please comment on https://github.com/servo/servo/issues/6564"
}
}
if opts::get().replace_surrogates {
error!(message!());
s.push('\u{FFFD}');
} else {
panic!(concat!(message!(), " Use `-Z replace-surrogates` \
on the command line to make this non-fatal."));
}
}
}
}
s
})
}
// https://heycam.github.io/webidl/#es-DOMString
impl FromJSValConvertible for DOMString {
type Config = StringificationBehavior;
unsafe fn from_jsval(cx: *mut JSContext,
value: HandleValue,
null_behavior: StringificationBehavior)
-> Result<DOMString, ()> {
if null_behavior == StringificationBehavior::Empty &&
value.get().is_null() {
Ok(DOMString::new())
} else {
let jsstr = ToString(cx, value);
if jsstr.is_null() {
debug!("ToString failed");
Err(())
} else {
Ok(jsstring_to_str(cx, jsstr))
}
}
}
}
impl Extend<char> for DOMString {
fn extend<I>(&mut self, iterable: I) where I: IntoIterator<Item=char> {
self.0.extend(iterable)
}
}
pub type StaticCharVec = &'static [char];
pub type StaticStringVec = &'static [&'static str];
/// Whitespace as defined by HTML5 § 2.4.1.
// TODO(SimonSapin) Maybe a custom Pattern can be more efficient?
const WHITESPACE: &'static [char] = &[' ', '\t', '\x0a', '\x0c', '\x0d'];
pub fn is_whitespace(s: &str) -> bool {
s.chars().all(char_is_whitespace)
}
#[inline]
pub fn char_is_whitespace(c: char) -> bool {
WHITESPACE.contains(&c)
}
/// A "space character" according to:
///
/// https://html.spec.whatwg.org/multipage/#space-character
pub static HTML_SPACE_CHARACTERS: StaticCharVec = &[
'\u{0020}',
'\u{0009}',
'\u{000a}',
'\u{000c}',
'\u{000d}',
];
pub fn split_html_space_chars<'a>(s: &'a str) ->
Filter<Split<'a, StaticCharVec>, fn(&&str) -> bool> {
fn not_empty(&split: &&str) -> bool { !split.is_empty() }
s.split(HTML_SPACE_CHARACTERS).filter(not_empty as fn(&&str) -> bool)
}
fn is_ascii_digit(c: &char) -> bool {
match *c {
'0'...'9' => true,
_ => false,
}
}
fn read_numbers<I: Iterator<Item=char>>(mut iter: Peekable<I>) -> Option<i64> {
match iter.peek() {
Some(c) if is_ascii_digit(c) => (),
_ => return None,
}
iter.take_while(is_ascii_digit).map(|d| {
d as i64 - '0' as i64
}).fold(Some(0i64), |accumulator, d| {
accumulator.and_then(|accumulator| {
accumulator.checked_mul(10)
}).and_then(|accumulator| {
accumulator.checked_add(d)
})
})
}
/// Shared implementation to parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers> or
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
fn do_parse_integer<T: Iterator<Item=char>>(input: T) -> Option<i64> {
let mut input = input.skip_while(|c| {
HTML_SPACE_CHARACTERS.iter().any(|s| s == c)
}).peekable();
let sign = match input.peek() {
None => return None,
Some(&'-') => {
input.next();
-1
},
Some(&'+') => {
input.next();
1
},
Some(_) => 1,
};
let value = read_numbers(input);
value.and_then(|value| value.checked_mul(sign))
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-integers>.
pub fn parse_integer<T: Iterator<Item=char>>(input: T) -> Option<i32> {
do_parse_integer(input).and_then(|result| {
result.to_i32()
})
}
/// Parse an integer according to
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-non-negative-integers>
pub fn parse_unsigned_integer<T: Iterator<Item=char>>(input: T) -> Option<u32> {
do_parse_integer(input).and_then(|result| {
result.to_u32()
})
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum LengthOrPercentageOrAuto {
Auto,
Percentage(f32),
Length(Au),
}
/// TODO: this function can be rewritten to return Result<LengthOrPercentage, _>
/// Parses a dimension value per HTML5 § 2.4.4.4. If unparseable, `Auto` is
/// returned.
/// https://html.spec.whatwg.org/multipage/#rules-for-parsing-dimension-values
pub fn parse_length(mut value: &str) -> LengthOrPercentageOrAuto {
// Steps 1 & 2 are not relevant
// Step 3
value = value.trim_left_matches(WHITESPACE);
// Step 4
if value.is_empty() {
return LengthOrPercentageOrAuto::Auto
}
// Step 5
if value.starts_with("+") {
value = &value[1..]
}
// Steps 6 & 7
match value.chars().nth(0) {
Some('0'...'9') => {},
_ => return LengthOrPercentageOrAuto::Auto,
}
// Steps 8 to 13
// We trim the string length to the minimum of:
// 1. the end of the string
// 2. the first occurence of a '%' (U+0025 PERCENT SIGN)
// 3. the second occurrence of a '.' (U+002E FULL STOP)
// 4. the occurrence of a character that is neither a digit nor '%' nor '.'
// Note: Step 10 is directly subsumed by FromStr::from_str
let mut end_index = value.len();
let (mut found_full_stop, mut found_percent) = (false, false);
for (i, ch) in value.chars().enumerate() {
match ch {
'0'...'9' => continue,
'%' => {
found_percent = true;
end_index = i;
break
}
'.' if !found_full_stop => {
found_full_stop = true;
continue
}
_ => {
end_index = i;
break
}
}
}
value = &value[..end_index];
if found_percent {
let result: Result<f32, _> = FromStr::from_str(value);
match result {
Ok(number) => return LengthOrPercentageOrAuto::Percentage((number as f32) / 100.0),
Err(_) => return LengthOrPercentageOrAuto::Auto,
}
}
match FromStr::from_str(value) {
Ok(number) => LengthOrPercentageOrAuto::Length(Au::from_f64_px(number)),
Err(_) => LengthOrPercentageOrAuto::Auto,
}
}
/// https://html.spec.whatwg.org/multipage/#rules-for-parsing-a-legacy-font-size
pub fn parse_legacy_font_size(mut input: &str) -> Option<&'static str> {
// Steps 1 & 2 are not relevant
// Step 3
input = input.trim_matches(WHITESPACE);
enum ParseMode {
RelativePlus,
RelativeMinus,
Absolute,
}
let mut input_chars = input.chars().peekable();
let parse_mode = match input_chars.peek() {
// Step 4
None => return None,
// Step 5
Some(&'+') => {
let _ = input_chars.next(); // consume the '+'
ParseMode::RelativePlus
}
Some(&'-') => {
let _ = input_chars.next(); // consume the '-'
ParseMode::RelativeMinus
}
Some(_) => ParseMode::Absolute,
};
// Steps 6, 7, 8
let mut value = match read_numbers(input_chars) {
Some(v) => v,
None => return None,
};
// Step 9
match parse_mode {
ParseMode::RelativePlus => value = 3 + value,
ParseMode::RelativeMinus => value = 3 - value,
ParseMode::Absolute => (),
}
// Steps 10, 11, 12
Some(match value {
n if n >= 7 => "xxx-large",
6 => "xx-large",
5 => "x-large",
4 => "large",
3 => "medium",
2 => "small",
n if n <= 1 => "x-small",
_ => unreachable!(),
})
}
/// Parses a legacy color per HTML5 § 2.4.6. If unparseable, `Err` is returned.
pub fn parse_legacy_color(mut input: &str) -> Result<RGBA, ()> {
// Steps 1 and 2.
if input.is_empty() {
return Err(())
}
// Step 3.
input = input.trim_matches(WHITESPACE);
// Step 4.
if input.eq_ignore_ascii_case("transparent") {
return Err(())
}
// Step 5.<|fim▁hole|> return Ok(rgba);
}
// Step 6.
if input.len() == 4 {
match (input.as_bytes()[0],
hex(input.as_bytes()[1] as char),
hex(input.as_bytes()[2] as char),
hex(input.as_bytes()[3] as char)) {
(b'#', Ok(r), Ok(g), Ok(b)) => {
return Ok(RGBA {
red: (r as f32) * 17.0 / 255.0,
green: (g as f32) * 17.0 / 255.0,
blue: (b as f32) * 17.0 / 255.0,
alpha: 1.0,
})
}
_ => {}
}
}
// Step 7.
let mut new_input = String::new();
for ch in input.chars() {
if ch as u32 > 0xffff {
new_input.push_str("00")
} else {
new_input.push(ch)
}
}
let mut input = &*new_input;
// Step 8.
for (char_count, (index, _)) in input.char_indices().enumerate() {
if char_count == 128 {
input = &input[..index];
break
}
}
// Step 9.
if input.as_bytes()[0] == b'#' {
input = &input[1..]
}
// Step 10.
let mut new_input = Vec::new();
for ch in input.chars() {
if hex(ch).is_ok() {
new_input.push(ch as u8)
} else {
new_input.push(b'0')
}
}
let mut input = new_input;
// Step 11.
while input.is_empty() || (input.len() % 3) != 0 {
input.push(b'0')
}
// Step 12.
let mut length = input.len() / 3;
let (mut red, mut green, mut blue) = (&input[..length],
&input[length..length * 2],
&input[length * 2..]);
// Step 13.
if length > 8 {
red = &red[length - 8..];
green = &green[length - 8..];
blue = &blue[length - 8..];
length = 8
}
// Step 14.
while length > 2 && red[0] == b'0' && green[0] == b'0' && blue[0] == b'0' {
red = &red[1..];
green = &green[1..];
blue = &blue[1..];
length -= 1
}
// Steps 15-20.
return Ok(RGBA {
red: hex_string(red).unwrap() as f32 / 255.0,
green: hex_string(green).unwrap() as f32 / 255.0,
blue: hex_string(blue).unwrap() as f32 / 255.0,
alpha: 1.0,
});
fn hex(ch: char) -> Result<u8, ()> {
match ch {
'0'...'9' => Ok((ch as u8) - b'0'),
'a'...'f' => Ok((ch as u8) - b'a' + 10),
'A'...'F' => Ok((ch as u8) - b'A' + 10),
_ => Err(()),
}
}
fn hex_string(string: &[u8]) -> Result<u8, ()> {
match string.len() {
0 => Err(()),
1 => hex(string[0] as char),
_ => {
let upper = try!(hex(string[0] as char));
let lower = try!(hex(string[1] as char));
Ok((upper << 4) | lower)
}
}
}
}
#[derive(Clone, Eq, PartialEq, Hash, Debug)]
pub struct LowercaseString {
inner: String,
}
impl LowercaseString {
pub fn new(s: &str) -> LowercaseString {
LowercaseString {
inner: s.to_lowercase(),
}
}
}
impl Deref for LowercaseString {
type Target = str;
#[inline]
fn deref(&self) -> &str {
&*self.inner
}
}
/// Creates a String from the given null-terminated buffer.
/// Panics if the buffer does not contain UTF-8.
pub unsafe fn c_str_to_string(s: *const c_char) -> String {
from_utf8(CStr::from_ptr(s).to_bytes()).unwrap().to_owned()
}
pub fn str_join<I, T>(strs: I, join: &str) -> String
where I: IntoIterator<Item=T>, T: AsRef<str>,
{
strs.into_iter().enumerate().fold(String::new(), |mut acc, (i, s)| {
if i > 0 { acc.push_str(join); }
acc.push_str(s.as_ref());
acc
})
}
// Lifted from Rust's StrExt implementation, which is being removed.
pub fn slice_chars(s: &str, begin: usize, end: usize) -> &str {
assert!(begin <= end);
let mut count = 0;
let mut begin_byte = None;
let mut end_byte = None;
// This could be even more efficient by not decoding,
// only finding the char boundaries
for (idx, _) in s.char_indices() {
if count == begin { begin_byte = Some(idx); }
if count == end { end_byte = Some(idx); break; }
count += 1;
}
if begin_byte.is_none() && count == begin { begin_byte = Some(s.len()) }
if end_byte.is_none() && count == end { end_byte = Some(s.len()) }
match (begin_byte, end_byte) {
(None, _) => panic!("slice_chars: `begin` is beyond end of string"),
(_, None) => panic!("slice_chars: `end` is beyond end of string"),
(Some(a), Some(b)) => unsafe { s.slice_unchecked(a, b) }
}
}
// searches a character index in CharIndices
// returns indices.count if not found
pub fn search_index(index: usize, indices: CharIndices) -> isize {
let mut character_count = 0;
for (character_index, _) in indices {
if character_index == index {
return character_count;
}
character_count += 1
}
character_count
}<|fim▁end|> | if let Ok(Color::RGBA(rgba)) = cssparser::parse_color_keyword(input) { |
<|file_name|>ConversationSkinDirective.js<|end_file_name|><|fim▁begin|>// Copyright 2014 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Controller for the conversation skin.
*/
// Note: This file should be assumed to be in an IIFE, and the constants below
// should only be used within this file.
var TIME_FADEOUT_MSEC = 100;
var TIME_HEIGHT_CHANGE_MSEC = 500;
var TIME_FADEIN_MSEC = 100;
var TIME_NUM_CARDS_CHANGE_MSEC = 500;
oppia.animation('.conversation-skin-responses-animate-slide', function() {
return {
removeClass: function(element, className, done) {
if (className !== 'ng-hide') {
done();
return;
}
element.hide().slideDown(400, done);
},
addClass: function(element, className, done) {
if (className !== 'ng-hide') {
done();
return;
}
element.slideUp(400, done);
}
};
});
oppia.animation('.conversation-skin-animate-tutor-card-on-narrow', function() {
var tutorCardLeft, tutorCardWidth, tutorCardHeight, oppiaAvatarLeft;
var tutorCardAnimatedLeft, tutorCardAnimatedWidth;
var beforeAddClass = function(element, className, done) {
if (className !== 'ng-hide') {
done();
return;
}
var tutorCard = element;
var supplementalCard = $('.conversation-skin-supplemental-card-container');
var oppiaAvatar = $('.conversation-skin-oppia-avatar.show-tutor-card');
oppiaAvatarLeft = supplementalCard.position().left +
supplementalCard.width() - oppiaAvatar.width();
tutorCardLeft = tutorCard.position().left;
tutorCardWidth = tutorCard.width();
tutorCardHeight = tutorCard.height();
if (tutorCard.offset().left + tutorCardWidth > oppiaAvatar.offset().left) {
var animationLength = Math.min(oppiaAvatarLeft - tutorCard.offset().left,
tutorCardWidth);
tutorCardAnimatedLeft = tutorCardLeft + animationLength;
tutorCardAnimatedWidth = tutorCardWidth - animationLength;
} else {
tutorCardAnimatedLeft = oppiaAvatarLeft;
tutorCardAnimatedWidth = 0;
}
oppiaAvatar.hide();
tutorCard.css({
'min-width': 0
});
tutorCard.animate({
left: tutorCardAnimatedLeft,
width: tutorCardAnimatedWidth,
height: 0,
opacity: 1
}, 500, function() {
oppiaAvatar.show();
tutorCard.css({
left: '',
width: '',
height: '',
opacity: '',
'min-width': ''
});
done();
});<|fim▁hole|> };
var removeClass = function(element, className, done) {
if (className !== 'ng-hide') {
done();
return;
}
var tutorCard = element;
$('.conversation-skin-oppia-avatar.show-tutor-card').hide(0, function() {
tutorCard.css({
left: tutorCardAnimatedLeft,
width: tutorCardAnimatedWidth,
height: 0,
opacity: 0,
'min-width': 0
});
tutorCard.animate({
left: tutorCardLeft,
width: tutorCardWidth,
height: tutorCardHeight,
opacity: 1
}, 500, function() {
tutorCard.css({
left: '',
width: '',
height: '',
opacity: '',
'min-width': ''
});
done();
});
});
};
return {
beforeAddClass: beforeAddClass,
removeClass: removeClass
};
});
oppia.animation('.conversation-skin-animate-cards', function() {
// This removes the newly-added class once the animation is finished.
var animateCards = function(element, className, done) {
var tutorCardElt = jQuery(element).find(
'.conversation-skin-main-tutor-card');
var supplementalCardElt = jQuery(element).find(
'.conversation-skin-supplemental-card-container');
if (className === 'animate-to-two-cards') {
var supplementalWidth = supplementalCardElt.width();
supplementalCardElt.css({
width: 0,
'min-width': '0',
opacity: '0'
});
supplementalCardElt.animate({
width: supplementalWidth
}, TIME_NUM_CARDS_CHANGE_MSEC, function() {
supplementalCardElt.animate({
opacity: '1'
}, TIME_FADEIN_MSEC, function() {
supplementalCardElt.css({
width: '',
'min-width': '',
opacity: ''
});
jQuery(element).removeClass('animate-to-two-cards');
done();
});
});
return function(cancel) {
if (cancel) {
supplementalCardElt.css({
width: '',
'min-width': '',
opacity: ''
});
supplementalCardElt.stop();
jQuery(element).removeClass('animate-to-two-cards');
}
};
} else if (className === 'animate-to-one-card') {
supplementalCardElt.css({
opacity: 0,
'min-width': 0
});
supplementalCardElt.animate({
width: 0
}, TIME_NUM_CARDS_CHANGE_MSEC, function() {
jQuery(element).removeClass('animate-to-one-card');
done();
});
return function(cancel) {
if (cancel) {
supplementalCardElt.css({
opacity: '',
'min-width': '',
width: ''
});
supplementalCardElt.stop();
jQuery(element).removeClass('animate-to-one-card');
}
};
} else {
return;
}
};
return {
addClass: animateCards
};
});
oppia.animation('.conversation-skin-animate-card-contents', function() {
var animateCardChange = function(element, className, done) {
if (className !== 'animate-card-change') {
return;
}
var currentHeight = element.height();
var expectedNextHeight = $(
'.conversation-skin-future-tutor-card ' +
'.conversation-skin-tutor-card-content'
).height();
// Fix the current card height, so that it does not change during the
// animation, even though its contents might.
element.css('height', currentHeight);
jQuery(element).animate({
opacity: 0
}, TIME_FADEOUT_MSEC).animate({
height: expectedNextHeight
}, TIME_HEIGHT_CHANGE_MSEC).animate({
opacity: 1
}, TIME_FADEIN_MSEC, function() {
element.css('height', '');
done();
});
return function(cancel) {
if (cancel) {
element.css('opacity', '1.0');
element.css('height', '');
element.stop();
}
};
};
return {
addClass: animateCardChange
};
});
oppia.directive('conversationSkin', [function() {
return {
restrict: 'E',
scope: {},
templateUrl: 'skins/Conversation',
controller: [
'$scope', '$timeout', '$rootScope', '$window', '$translate',
'messengerService', 'oppiaPlayerService', 'urlService', 'focusService',
'LearnerViewRatingService', 'windowDimensionsService',
'playerTranscriptService', 'LearnerParamsService',
'playerPositionService', 'explorationRecommendationsService',
'StatsReportingService', 'UrlInterpolationService',
function(
$scope, $timeout, $rootScope, $window, $translate,
messengerService, oppiaPlayerService, urlService, focusService,
LearnerViewRatingService, windowDimensionsService,
playerTranscriptService, LearnerParamsService,
playerPositionService, explorationRecommendationsService,
StatsReportingService, UrlInterpolationService) {
$scope.CONTINUE_BUTTON_FOCUS_LABEL = 'continueButton';
// The exploration domain object.
$scope.exploration = null;
// The minimum width, in pixels, needed to be able to show two cards
// side-by-side.
var TWO_CARD_THRESHOLD_PX = 960;
var TIME_PADDING_MSEC = 250;
var TIME_SCROLL_MSEC = 600;
var MIN_CARD_LOADING_DELAY_MSEC = 950;
var CONTENT_FOCUS_LABEL_PREFIX = 'content-focus-label-';
var hasInteractedAtLeastOnce = false;
var _answerIsBeingProcessed = false;
var _nextFocusLabel = null;
// This variable is used only when viewport is narrow.
// Indicates whether the tutor card is displayed.
var tutorCardIsDisplayedIfNarrow = true;
$scope.explorationId = oppiaPlayerService.getExplorationId();
$scope.isInPreviewMode = oppiaPlayerService.isInPreviewMode();
$scope.isIframed = urlService.isIframed();
$rootScope.loadingMessage = 'Loading';
$scope.hasFullyLoaded = false;
$scope.recommendedExplorationSummaries = [];
$scope.OPPIA_AVATAR_IMAGE_URL = (
UrlInterpolationService.getStaticImageUrl(
'/avatar/oppia_black_72px.png'));
$scope.activeCard = null;
$scope.numProgressDots = 0;
$scope.arePreviousResponsesShown = false;
$scope.upcomingStateName = null;
$scope.upcomingContentHtml = null;
$scope.upcomingInlineInteractionHtml = null;
$scope.helpCardHtml = null;
$scope.helpCardHasContinueButton = false;
$scope.profilePicture = (
UrlInterpolationService.getStaticImageUrl(
'/avatar/user_blue_72px.png'));
$scope.DEFAULT_TWITTER_SHARE_MESSAGE_PLAYER =
GLOBALS.DEFAULT_TWITTER_SHARE_MESSAGE_PLAYER;
oppiaPlayerService.getUserProfileImage().then(function(result) {
$scope.profilePicture = result;
});
$scope.clearHelpCard = function() {
$scope.helpCardHtml = null;
$scope.helpCardHasContinueButton = false;
};
$scope.getContentFocusLabel = function(index) {
return CONTENT_FOCUS_LABEL_PREFIX + index;
};
// If the exploration is iframed, send data to its parent about its
// height so that the parent can be resized as necessary.
$scope.lastRequestedHeight = 0;
$scope.lastRequestedScroll = false;
$scope.adjustPageHeight = function(scroll, callback) {
$timeout(function() {
var newHeight = document.body.scrollHeight;
if (Math.abs($scope.lastRequestedHeight - newHeight) > 50.5 ||
(scroll && !$scope.lastRequestedScroll)) {
// Sometimes setting iframe height to the exact content height
// still produces scrollbar, so adding 50 extra px.
newHeight += 50;
messengerService.sendMessage(messengerService.HEIGHT_CHANGE, {
height: newHeight,
scroll: scroll
});
$scope.lastRequestedHeight = newHeight;
$scope.lastRequestedScroll = scroll;
}
if (callback) {
callback();
}
}, 100);
};
$scope.isOnTerminalCard = function() {
return $scope.activeCard &&
$scope.exploration.isStateTerminal($scope.activeCard.stateName);
};
var isSupplementalCardNonempty = function(card) {
return !$scope.exploration.isInteractionInline(card.stateName);
};
$scope.isCurrentSupplementalCardNonempty = function() {
return $scope.activeCard && isSupplementalCardNonempty(
$scope.activeCard);
};
// Navigates to the currently-active card, and resets the 'show previous
// responses' setting.
var _navigateToActiveCard = function() {
var index = playerPositionService.getActiveCardIndex();
$scope.activeCard = playerTranscriptService.getCard(index);
$scope.arePreviousResponsesShown = false;
$scope.clearHelpCard();
tutorCardIsDisplayedIfNarrow = true;
if (_nextFocusLabel && playerTranscriptService.isLastCard(index)) {
focusService.setFocusIfOnDesktop(_nextFocusLabel);
} else {
focusService.setFocusIfOnDesktop(
$scope.getContentFocusLabel(index));
}
};
var animateToTwoCards = function(doneCallback) {
$scope.isAnimatingToTwoCards = true;
$timeout(function() {
$scope.isAnimatingToTwoCards = false;
if (doneCallback) {
doneCallback();
}
}, TIME_NUM_CARDS_CHANGE_MSEC + TIME_FADEIN_MSEC + TIME_PADDING_MSEC);
};
var animateToOneCard = function(doneCallback) {
$scope.isAnimatingToOneCard = true;
$timeout(function() {
$scope.isAnimatingToOneCard = false;
if (doneCallback) {
doneCallback();
}
}, TIME_NUM_CARDS_CHANGE_MSEC);
};
$scope.isCurrentCardAtEndOfTranscript = function() {
return playerTranscriptService.isLastCard(
playerPositionService.getActiveCardIndex());
};
var _addNewCard = function(
stateName, newParams, contentHtml, interactionHtml) {
playerTranscriptService.addNewCard(
stateName, newParams, contentHtml, interactionHtml);
if (newParams) {
LearnerParamsService.init(newParams);
}
$scope.numProgressDots++;
var totalNumCards = playerTranscriptService.getNumCards();
var previousSupplementalCardIsNonempty = (
totalNumCards > 1 &&
isSupplementalCardNonempty(
playerTranscriptService.getCard(totalNumCards - 2)));
var nextSupplementalCardIsNonempty = isSupplementalCardNonempty(
playerTranscriptService.getLastCard());
if (totalNumCards > 1 && $scope.canWindowFitTwoCards() &&
!previousSupplementalCardIsNonempty &&
nextSupplementalCardIsNonempty) {
playerPositionService.setActiveCardIndex(
$scope.numProgressDots - 1);
animateToTwoCards(function() {});
} else if (
totalNumCards > 1 && $scope.canWindowFitTwoCards() &&
previousSupplementalCardIsNonempty &&
!nextSupplementalCardIsNonempty) {
animateToOneCard(function() {
playerPositionService.setActiveCardIndex(
$scope.numProgressDots - 1);
});
} else {
playerPositionService.setActiveCardIndex(
$scope.numProgressDots - 1);
}
if ($scope.exploration.isStateTerminal(stateName)) {
explorationRecommendationsService.getRecommendedSummaryDicts(
$scope.exploration.getAuthorRecommendedExpIds(stateName),
function(summaries) {
$scope.recommendedExplorationSummaries = summaries;
});
}
};
$scope.toggleShowPreviousResponses = function() {
$scope.arePreviousResponsesShown = !$scope.arePreviousResponsesShown;
};
$scope.initializePage = function() {
$scope.waitingForOppiaFeedback = false;
hasInteractedAtLeastOnce = false;
$scope.recommendedExplorationSummaries = [];
playerPositionService.init(_navigateToActiveCard);
oppiaPlayerService.init(function(exploration, initHtml, newParams) {
$scope.exploration = exploration;
$scope.isLoggedIn = oppiaPlayerService.isLoggedIn();
_nextFocusLabel = focusService.generateFocusLabel();
_addNewCard(
exploration.initStateName,
newParams,
initHtml,
oppiaPlayerService.getInteractionHtml(
exploration.initStateName, _nextFocusLabel));
$rootScope.loadingMessage = '';
$scope.hasFullyLoaded = true;
// If the exploration is embedded, use the exploration language
// as site language. If the exploration language is not supported
// as site language, English is used as default.
var langCodes = $window.GLOBALS.SUPPORTED_SITE_LANGUAGES.map(
function(language) {
return language.id;
});
if ($scope.isIframed) {
var explorationLanguageCode = (
oppiaPlayerService.getExplorationLanguageCode());
if (langCodes.indexOf(explorationLanguageCode) !== -1) {
$translate.use(explorationLanguageCode);
} else {
$translate.use('en');
}
}
$scope.adjustPageHeight(false, null);
$window.scrollTo(0, 0);
focusService.setFocusIfOnDesktop(_nextFocusLabel);
});
};
$scope.submitAnswer = function(answer, interactionRulesService) {
// For some reason, answers are getting submitted twice when the
// submit button is clicked. This guards against that.
if (_answerIsBeingProcessed ||
!$scope.isCurrentCardAtEndOfTranscript() ||
$scope.activeCard.destStateName) {
return;
}
$scope.clearHelpCard();
_answerIsBeingProcessed = true;
hasInteractedAtLeastOnce = true;
$scope.waitingForOppiaFeedback = true;
var _oldStateName = playerTranscriptService.getLastCard().stateName;
playerTranscriptService.addNewAnswer(answer);
var timeAtServerCall = new Date().getTime();
oppiaPlayerService.submitAnswer(
answer, interactionRulesService, function(
newStateName, refreshInteraction, feedbackHtml, contentHtml,
newParams) {
// Do not wait if the interaction is supplemental -- there's
// already a delay bringing in the help card.
var millisecsLeftToWait = (
!$scope.exploration.isInteractionInline(_oldStateName) ? 1.0 :
Math.max(MIN_CARD_LOADING_DELAY_MSEC - (
new Date().getTime() - timeAtServerCall),
1.0));
$timeout(function() {
$scope.waitingForOppiaFeedback = false;
var pairs = (
playerTranscriptService.getLastCard().answerFeedbackPairs);
var lastAnswerFeedbackPair = pairs[pairs.length - 1];
if (_oldStateName === newStateName) {
// Stay on the same card.
playerTranscriptService.addNewFeedback(feedbackHtml);
if (feedbackHtml &&
!$scope.exploration.isInteractionInline(
$scope.activeCard.stateName)) {
$scope.helpCardHtml = feedbackHtml;
}
if (refreshInteraction) {
// Replace the previous interaction with another of the
// same type.
_nextFocusLabel = focusService.generateFocusLabel();
playerTranscriptService.updateLatestInteractionHtml(
oppiaPlayerService.getInteractionHtml(
newStateName, _nextFocusLabel) +
oppiaPlayerService.getRandomSuffix());
}
focusService.setFocusIfOnDesktop(_nextFocusLabel);
scrollToBottom();
} else {
// There is a new card. If there is no feedback, move on
// immediately. Otherwise, give the learner a chance to read
// the feedback, and display a 'Continue' button.
_nextFocusLabel = focusService.generateFocusLabel();
playerTranscriptService.setDestination(newStateName);
// These are used to compute the dimensions for the next card.
$scope.upcomingStateName = newStateName;
$scope.upcomingParams = newParams;
$scope.upcomingContentHtml = (
contentHtml + oppiaPlayerService.getRandomSuffix());
var _isNextInteractionInline = (
$scope.exploration.isInteractionInline(newStateName));
$scope.upcomingInlineInteractionHtml = (
_isNextInteractionInline ?
oppiaPlayerService.getInteractionHtml(
newStateName, _nextFocusLabel
) + oppiaPlayerService.getRandomSuffix() : '');
if (feedbackHtml) {
playerTranscriptService.addNewFeedback(feedbackHtml);
if (!$scope.exploration.isInteractionInline(
$scope.activeCard.stateName)) {
$scope.helpCardHtml = feedbackHtml;
$scope.helpCardHasContinueButton = true;
}
_nextFocusLabel = $scope.CONTINUE_BUTTON_FOCUS_LABEL;
focusService.setFocusIfOnDesktop(_nextFocusLabel);
scrollToBottom();
} else {
playerTranscriptService.addNewFeedback(feedbackHtml);
$scope.showPendingCard(
newStateName,
newParams,
contentHtml + oppiaPlayerService.getRandomSuffix());
}
}
_answerIsBeingProcessed = false;
}, millisecsLeftToWait);
}
);
};
$scope.startCardChangeAnimation = false;
$scope.showPendingCard = function(
newStateName, newParams, newContentHtml) {
$scope.startCardChangeAnimation = true;
$timeout(function() {
var newInteractionHtml = oppiaPlayerService.getInteractionHtml(
newStateName, _nextFocusLabel);
// Note that newInteractionHtml may be null.
if (newInteractionHtml) {
newInteractionHtml += oppiaPlayerService.getRandomSuffix();
}
_addNewCard(
newStateName, newParams, newContentHtml, newInteractionHtml);
$scope.upcomingStateName = null;
$scope.upcomingParams = null;
$scope.upcomingContentHtml = null;
$scope.upcomingInlineInteractionHtml = null;
}, TIME_FADEOUT_MSEC + 0.1 * TIME_HEIGHT_CHANGE_MSEC);
$timeout(function() {
focusService.setFocusIfOnDesktop(_nextFocusLabel);
scrollToTop();
},
TIME_FADEOUT_MSEC + TIME_HEIGHT_CHANGE_MSEC + 0.5 * TIME_FADEIN_MSEC);
$timeout(function() {
$scope.startCardChangeAnimation = false;
},
TIME_FADEOUT_MSEC + TIME_HEIGHT_CHANGE_MSEC + TIME_FADEIN_MSEC +
TIME_PADDING_MSEC);
};
var scrollToBottom = function() {
$timeout(function() {
var tutorCard = $('.conversation-skin-main-tutor-card');
if (tutorCard.length === 0) {
return;
}
var tutorCardBottom = (
tutorCard.offset().top + tutorCard.outerHeight());
if ($(window).scrollTop() + $(window).height() < tutorCardBottom) {
$('html, body').animate({
scrollTop: tutorCardBottom - $(window).height() + 12
}, {
duration: TIME_SCROLL_MSEC,
easing: 'easeOutQuad'
});
}
}, 100);
};
var scrollToTop = function() {
$timeout(function() {
$('html, body').animate({
scrollTop: 0
}, 800, 'easeOutQuart');
return false;
});
};
$scope.submitUserRating = function(ratingValue) {
LearnerViewRatingService.submitUserRating(ratingValue);
};
$scope.$on('ratingUpdated', function() {
$scope.userRating = LearnerViewRatingService.getUserRating();
});
$window.addEventListener('beforeunload', function(e) {
if (hasInteractedAtLeastOnce && !$scope.isInPreviewMode &&
!$scope.exploration.isStateTerminal(
playerTranscriptService.getLastCard().stateName)) {
StatsReportingService.recordMaybeLeaveEvent(
playerTranscriptService.getLastStateName(),
LearnerParamsService.getAllParams());
var confirmationMessage = (
'If you navigate away from this page, your progress on the ' +
'exploration will be lost.');
(e || $window.event).returnValue = confirmationMessage;
return confirmationMessage;
}
});
$scope.windowWidth = windowDimensionsService.getWidth();
$window.onresize = function() {
$scope.adjustPageHeight(false, null);
$scope.windowWidth = windowDimensionsService.getWidth();
};
$window.addEventListener('scroll', function() {
fadeDotsOnScroll();
fixSupplementOnScroll();
});
var fadeDotsOnScroll = function() {
var progressDots = $('.conversation-skin-progress-dots');
var progressDotsTop = progressDots.height();
var newOpacity = Math.max(
(progressDotsTop - $(window).scrollTop()) / progressDotsTop, 0);
progressDots.css({
opacity: newOpacity
});
};
var fixSupplementOnScroll = function() {
var supplementCard = $('div.conversation-skin-supplemental-card');
var topMargin = $('.navbar-container').height() - 20;
if ($(window).scrollTop() > topMargin) {
supplementCard.addClass(
'conversation-skin-supplemental-card-fixed');
} else {
supplementCard.removeClass(
'conversation-skin-supplemental-card-fixed');
}
};
$scope.canWindowFitTwoCards = function() {
return $scope.windowWidth >= TWO_CARD_THRESHOLD_PX;
};
$scope.isViewportNarrow = function() {
return $scope.windowWidth < TWO_CARD_THRESHOLD_PX;
};
$scope.isWindowTall = function() {
return document.body.scrollHeight > $window.innerHeight;
};
$scope.isScreenNarrowAndShowingTutorCard = function() {
if (!$scope.isCurrentSupplementalCardNonempty()) {
return $scope.isViewportNarrow();
}
return $scope.isViewportNarrow() &&
tutorCardIsDisplayedIfNarrow;
};
$scope.isScreenNarrowAndShowingSupplementalCard = function() {
return $scope.isViewportNarrow() &&
!tutorCardIsDisplayedIfNarrow;
};
$scope.showTutorCardIfScreenIsNarrow = function() {
if ($scope.isViewportNarrow()) {
tutorCardIsDisplayedIfNarrow = true;
}
};
$scope.showSupplementalCardIfScreenIsNarrow = function() {
if ($scope.isViewportNarrow()) {
tutorCardIsDisplayedIfNarrow = false;
}
};
$scope.initializePage();
LearnerViewRatingService.init(function(userRating) {
$scope.userRating = userRating;
});
$scope.collectionId = GLOBALS.collectionId;
$scope.collectionTitle = GLOBALS.collectionTitle;
}
]
};
}]);<|fim▁end|> | |
<|file_name|>cli.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import logging
import argparse
from .imdb import find_movies
logger = logging.getLogger('mrot')
def parse_args():
parser = argparse.ArgumentParser(prog='mrot', description='Show movie ratings over time.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('movie_name', help='the name of the movie')
# Optional arguments
parser.add_argument("-c", "--concurrency", type=int, default=2,
help="maximum number of concurrent requests to the wayback machine")
parser.add_argument("-d", "--delta", type=int, default=365, help="minimum number of days between two ratings")
parser.add_argument("-q", "--quiet", action="store_true", help="don't print progress")
args = parser.parse_args()
return args
def main():
args = parse_args()
logging.basicConfig(level=(logging.WARN if args.quiet else logging.INFO))
# Don't allow more than 20 concurrent requests to the wayback machine
concurrency = min(args.concurrency, 10)
# Find the movies corresponding to the given movie name
imdb_movies = find_movies(args.movie_name)
if len(imdb_movies) > 0:
# Show rating for the first movie matching the given name
imdb_movie = imdb_movies[0]
imdb_movie.plot_ratings(concurrency, args.delta)
else:<|fim▁hole|> logger.info('Movie not found')<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var chownr = require('chownr')
var tar = require('tar-stream')
var pump = require('pump')
var mkdirp = require('mkdirp')
var fs = require('fs')
var path = require('path')
var os = require('os')
var win32 = os.platform() === 'win32'
var noop = function () {}
var echo = function (name) {
return name
}
var normalize = !win32 ? echo : function (name) {
return name.replace(/\\/g, '/').replace(/[:?<>|]/g, '_')
}
var statAll = function (fs, stat, cwd, ignore, entries, sort) {
var queue = entries || ['.']
return function loop (callback) {
if (!queue.length) return callback()
var next = queue.shift()
var nextAbs = path.join(cwd, next)
stat(nextAbs, function (err, stat) {
if (err) return callback(err)
if (!stat.isDirectory()) return callback(null, next, stat)
fs.readdir(nextAbs, function (err, files) {
if (err) return callback(err)
if (sort) files.sort()
for (var i = 0; i < files.length; i++) {
if (!ignore(path.join(cwd, next, files[i]))) queue.push(path.join(next, files[i]))
}
callback(null, next, stat)
})
})
}
}
var strip = function (map, level) {
return function (header) {
header.name = header.name.split('/').slice(level).join('/')
var linkname = header.linkname
if (linkname && (header.type === 'link' || path.isAbsolute(linkname))) {
header.linkname = linkname.split('/').slice(level).join('/')
}
return map(header)
}
}
exports.pack = function (cwd, opts) {
if (!cwd) cwd = '.'
if (!opts) opts = {}
var xfs = opts.fs || fs
var ignore = opts.ignore || opts.filter || noop
var map = opts.map || noop
var mapStream = opts.mapStream || echo
var statNext = statAll(xfs, opts.dereference ? xfs.stat : xfs.lstat, cwd, ignore, opts.entries, opts.sort)
var strict = opts.strict !== false
var umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask()
var dmode = typeof opts.dmode === 'number' ? opts.dmode : 0
var fmode = typeof opts.fmode === 'number' ? opts.fmode : 0
var pack = opts.pack || tar.pack()
var finish = opts.finish || noop
if (opts.strip) map = strip(map, opts.strip)
if (opts.readable) {
dmode |= parseInt(555, 8)
fmode |= parseInt(444, 8)
}
if (opts.writable) {
dmode |= parseInt(333, 8)
fmode |= parseInt(222, 8)
}
var onsymlink = function (filename, header) {
xfs.readlink(path.join(cwd, filename), function (err, linkname) {
if (err) return pack.destroy(err)
header.linkname = normalize(linkname)
pack.entry(header, onnextentry)
})
}
var onstat = function (err, filename, stat) {
if (err) return pack.destroy(err)
if (!filename) {
if (opts.finalize !== false) pack.finalize()
return finish(pack)
}
if (stat.isSocket()) return onnextentry() // tar does not support sockets...
var header = {
name: normalize(filename),
mode: (stat.mode | (stat.isDirectory() ? dmode : fmode)) & umask,
mtime: stat.mtime,
size: stat.size,
type: 'file',
uid: stat.uid,
gid: stat.gid
}
if (stat.isDirectory()) {
header.size = 0
header.type = 'directory'
header = map(header) || header
return pack.entry(header, onnextentry)
}
if (stat.isSymbolicLink()) {
header.size = 0
header.type = 'symlink'
header = map(header) || header
return onsymlink(filename, header)
}
// TODO: add fifo etc...
header = map(header) || header
if (!stat.isFile()) {
if (strict) return pack.destroy(new Error('unsupported type for ' + filename))
return onnextentry()
}
var entry = pack.entry(header, onnextentry)
if (!entry) return
var rs = mapStream(xfs.createReadStream(path.join(cwd, filename)), header)
rs.on('error', function (err) { // always forward errors on destroy
entry.destroy(err)
})
pump(rs, entry)
}
var onnextentry = function (err) {
if (err) return pack.destroy(err)
statNext(onstat)
}
onnextentry()
return pack<|fim▁hole|>}
var processGetuid = function () {
return process.getuid ? process.getuid() : -1
}
var processUmask = function () {
return process.umask ? process.umask() : 0
}
exports.extract = function (cwd, opts) {
if (!cwd) cwd = '.'
if (!opts) opts = {}
var xfs = opts.fs || fs
var ignore = opts.ignore || opts.filter || noop
var map = opts.map || noop
var mapStream = opts.mapStream || echo
var own = opts.chown !== false && !win32 && processGetuid() === 0
var extract = opts.extract || tar.extract()
var stack = []
var now = new Date()
var umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask()
var dmode = typeof opts.dmode === 'number' ? opts.dmode : 0
var fmode = typeof opts.fmode === 'number' ? opts.fmode : 0
var strict = opts.strict !== false
if (opts.strip) map = strip(map, opts.strip)
if (opts.readable) {
dmode |= parseInt(555, 8)
fmode |= parseInt(444, 8)
}
if (opts.writable) {
dmode |= parseInt(333, 8)
fmode |= parseInt(222, 8)
}
var utimesParent = function (name, cb) { // we just set the mtime on the parent dir again everytime we write an entry
var top
while ((top = head(stack)) && name.slice(0, top[0].length) !== top[0]) stack.pop()
if (!top) return cb()
xfs.utimes(top[0], now, top[1], cb)
}
var utimes = function (name, header, cb) {
if (opts.utimes === false) return cb()
if (header.type === 'directory') return xfs.utimes(name, now, header.mtime, cb)
if (header.type === 'symlink') return utimesParent(name, cb) // TODO: how to set mtime on link?
xfs.utimes(name, now, header.mtime, function (err) {
if (err) return cb(err)
utimesParent(name, cb)
})
}
var chperm = function (name, header, cb) {
var link = header.type === 'symlink'
var chmod = link ? xfs.lchmod : xfs.chmod
var chown = link ? xfs.lchown : xfs.chown
if (!chmod) return cb()
var mode = (header.mode | (header.type === 'directory' ? dmode : fmode)) & umask
chmod(name, mode, function (err) {
if (err) return cb(err)
if (!own) return cb()
if (!chown) return cb()
chown(name, header.uid, header.gid, cb)
})
}
extract.on('entry', function (header, stream, next) {
header = map(header) || header
header.name = normalize(header.name)
var name = path.join(cwd, path.join('/', header.name))
if (ignore(name, header)) {
stream.resume()
return next()
}
var stat = function (err) {
if (err) return next(err)
utimes(name, header, function (err) {
if (err) return next(err)
if (win32) return next()
chperm(name, header, next)
})
}
var onsymlink = function () {
if (win32) return next() // skip symlinks on win for now before it can be tested
xfs.unlink(name, function () {
xfs.symlink(header.linkname, name, stat)
})
}
var onlink = function () {
if (win32) return next() // skip links on win for now before it can be tested
xfs.unlink(name, function () {
var srcpath = path.join(cwd, path.join('/', header.linkname))
xfs.link(srcpath, name, function (err) {
if (err && err.code === 'EPERM' && opts.hardlinkAsFilesFallback) {
stream = xfs.createReadStream(srcpath)
return onfile()
}
stat(err)
})
})
}
var onfile = function () {
var ws = xfs.createWriteStream(name)
var rs = mapStream(stream, header)
ws.on('error', function (err) { // always forward errors on destroy
rs.destroy(err)
})
pump(rs, ws, function (err) {
if (err) return next(err)
ws.on('close', stat)
})
}
if (header.type === 'directory') {
stack.push([name, header.mtime])
return mkdirfix(name, {
fs: xfs, own: own, uid: header.uid, gid: header.gid
}, stat)
}
var dir = path.dirname(name)
validate(xfs, dir, path.join(cwd, '.'), function (err, valid) {
if (err) return next(err)
if (!valid) return next(new Error(dir + ' is not a valid path'))
mkdirfix(dir, {
fs: xfs, own: own, uid: header.uid, gid: header.gid
}, function (err) {
if (err) return next(err)
switch (header.type) {
case 'file': return onfile()
case 'link': return onlink()
case 'symlink': return onsymlink()
}
if (strict) return next(new Error('unsupported type for ' + name + ' (' + header.type + ')'))
stream.resume()
next()
})
})
})
if (opts.finish) extract.on('finish', opts.finish)
return extract
}
function validate (fs, name, root, cb) {
if (name === root) return cb(null, true)
fs.lstat(name, function (err, st) {
if (err && err.code !== 'ENOENT') return cb(err)
if (err || st.isDirectory()) return validate(fs, path.join(name, '..'), root, cb)
cb(null, false)
})
}
function mkdirfix (name, opts, cb) {
mkdirp(name, {fs: opts.fs}, function (err, made) {
if (!err && made && opts.own) {
chownr(made, opts.uid, opts.gid, cb)
} else {
cb(err)
}
})
}<|fim▁end|> | }
var head = function (list) {
return list.length ? list[list.length - 1] : null |
<|file_name|>Json.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2012 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
DOMSnitch.Heuristics.Json = function() {
/*
this._dbg = DOMSnitch.Heuristics.LightDbg.getInstance();
document.addEventListener("Eval", this._handleEval.bind(this), true);
*/
// Workaround for eval().
// More info at http://radi.r-n-d.org/2011/02/evil-magic-of-eval.html
var collector = DOMSnitch.Heuristics.XhrCollector.getInstance();
collector.addListener(this._checkXhr.bind(this));
window.addEventListener("message", this._checkPostMsg.bind(this), true);
}
DOMSnitch.Heuristics.Json.prototype = {
_checkJsonValidity: function(recordInfo) {
if(!recordInfo.jsData) {
return;
}
var code = 0; // None
var notes = "";
var jsData = recordInfo.jsData;
var canParse = true;
var hasCode = false;
if(jsData[0] == "(" && jsData[jsData.length - 1] == ")") {
jsData = jsData.substring(1, jsData.length - 1);
}
if(this._isJson(jsData)) {
try {
JSON.parse(jsData);
} catch (e) {
canParse = false;
}
jsData = jsData.replace(/,\]/g, ",null]");
jsData = jsData.replace(/\[,/g, "[null,");
jsData = jsData.replace(/,,/g, ",null,");
jsData = jsData.replace(/,,/g, ",null,");
jsData = jsData.replace(/{([\w_]+):/g, "{\"$1\":");
jsData = jsData.replace(/,([\w_]+):/g, ",\"$1\":");
jsData = jsData.replace(/'(\w+)'/g, "\"$1\"");
try {
JSON.parse(jsData);
} catch (e) {
hasCode = true;
}
if(!canParse) {
code = 2; // Medium
notes += "Malformed JSON object.\n";
}
if(!canParse && hasCode) {
code = 2; // Medium
notes += "Found code in JSON object.\n";
}
}
if(code > 1) {
var data = "JSON object:\n" + recordInfo.jsData;
if(!!recordInfo.debugInfo) {
data += "\n\n-----\n\n";
data += "Raw stack trace:\n" + recordInfo.debugInfo;
}
var record = {
documentUrl: location.href,
type: recordInfo.type,
data: data,
callStack: [],
gid: recordInfo.globalId,
env: {},
scanInfo: {code: code, notes: notes}
};
this._report(record);
}
},
_checkPostMsg: function(event) {
var code = this._stripBreakers(event.data);
var globalId = event.origin + "#InvalidJson";
window.setTimeout(
this._checkJsonValidity.bind(
this,
{
jsData: code,
globalId: globalId,
type: "Invalid JSON"
}
),
10
);
},
_checkXhr: function(event) {
var xhr = event.xhr;
var code = this._stripBreakers(xhr.responseBody);
var globalId = xhr.requestUrl + "#InvalidJson";
window.setTimeout(
this._checkJsonValidity.bind(
this,
{
jsData: code,
globalId: globalId,
type: "Invalid JSON"
}
),
10
);
},
_handleEval: function(event) {
var elem = event.target.documentElement;
var args = JSON.parse(elem.getAttribute("evalArgs"));
var code = args[0];
var globalId = elem.getAttribute("evalGid");
var debugInfo = this._dbg.collectStackTrace();
elem.removeAttribute("evalArgs");
elem.removeAttribute("evalGid");
window.setTimeout(
this._checkJsonValidity.bind(
this,
{
jsData: code,
globalId: globalId,
type: "Invalid JSON",
debugInfo: debugInfo
}
),
10
);
},
_isJson: function(jsData) {
var seemsJson = /\{.+\}/.test(jsData);
seemsJson = seemsJson || /\[.+\]/.test(jsData);
seemsJson = seemsJson && !(/(function|while|if)[\s\w]*\(/.test(jsData));
seemsJson = seemsJson && !(/(try|else)\s*\{/.test(jsData));
return seemsJson;
},
_report: function(obj) {
chrome.extension.sendRequest({type: "log", record: obj});
},
_stripBreakers: function(jsData) {
var cIdx = jsData.indexOf("{");
var aIdx = jsData.indexOf("[");
var cfIdx = jsData.lastIndexOf("}");
var afIdx = jsData.lastIndexOf("]");
var idx = 0;
var fidx = 0;
if(cIdx > -1 && aIdx > -1) {
idx = cIdx > aIdx ? aIdx : cIdx;<|fim▁hole|> idx = aIdx;
}
return jsData.substring(idx);
}
}<|fim▁end|> | } else if(cIdx > -1) {
idx = cIdx;
} else if(aIdx > -1) { |
<|file_name|>ProductDirective.java<|end_file_name|><|fim▁begin|><|fim▁hole|>/**
* Copyright (c) 2016-2020, Michael Yang 杨福海 ([email protected]).
* <p>
* Licensed under the GNU Lesser General Public License (LGPL) ,Version 3.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.gnu.org/licenses/lgpl-3.0.txt
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.jpress.module.product.directive;
import com.jfinal.aop.Inject;
import com.jfinal.template.Env;
import com.jfinal.template.io.Writer;
import com.jfinal.template.stat.Scope;
import io.jboot.utils.StrUtil;
import io.jboot.web.directive.annotation.JFinalDirective;
import io.jboot.web.directive.base.JbootDirectiveBase;
import io.jpress.module.product.model.Product;
import io.jpress.module.product.service.ProductService;
/**
* @author Michael Yang 杨福海 ([email protected])
* @version V1.0
*/
@JFinalDirective("product")
public class ProductDirective extends JbootDirectiveBase {
@Inject
private ProductService service;
@Override
public void onRender(Env env, Scope scope, Writer writer) {
String idOrSlug = getPara(0, scope);
Product product = getProduct(idOrSlug);
if (product == null) {
return;
}
scope.setLocal("product", product);
renderBody(env, scope, writer);
}
private Product getProduct(String idOrSlug) {
return StrUtil.isNumeric(idOrSlug)
? service.findById(idOrSlug)
: service.findFirstBySlug(idOrSlug);
}
@Override
public boolean hasEnd() {
return true;
}
}<|fim▁end|> | |
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>'use strict';
<|fim▁hole|><|fim▁end|> |
require('require-dir')('./tasks'); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.