prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>Select.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
import { DropdownProps } from '../../modules/Dropdown';
import { default as DropdownDivider } from '../../modules/Dropdown/DropdownDivider';
import { default as DropdownHeader } from '../../modules/Dropdown/DropdownHeader';
import { default as DropdownItem } from '../../modules/Dropdown/DropdownItem';
import { default as DropdownMenu } from '../../modules/Dropdown/DropdownMenu';
export interface SelectProps extends DropdownProps {
}
interface SelectComponent extends React.StatelessComponent<SelectProps> {
Divider: typeof DropdownDivider;
Header: typeof DropdownHeader;<|fim▁hole|> Item: typeof DropdownItem;
Menu: typeof DropdownMenu;
}
declare const Select: SelectComponent;
export default Select;<|fim▁end|> | |
<|file_name|>cmapview.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
cmapview.cpp
-------------------
begin : Mon Mar 19 2001
copyright : (C) 2001 by Kmud Developer Team
(C) 2007 Tomas Mecir <[email protected]>
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#include "cmapview.h"
#include <QAction>
#include <QPushButton>
#include <QScrollArea>
#include <QActionGroup>
#include "cmapmanager.h"
#include "cmapzone.h"
#include "cmapzonemanager.h"
#include "cmaplevel.h"
#include "cmappath.h"
#include "cmapview.h"
#include "cmapelement.h"
#include "cmapwidget.h"
#include "cmapviewstatusbar.h"
#include "cmaptoolbase.h"
#include "cmapclipboard.h"
#include "cmapcmdelementproperties.h"
#include <kselectaction.h>
#include <ktoggleaction.h>
#include <kactioncollection.h>
#include <kdebug.h>
#include <kiconloader.h>
#include <klocale.h>
#include <kmessagebox.h>
#include <kstandarddirs.h>
#include <kundostack.h>
#include <kinputdialog.h>
CMapView::CMapView(CMapManager *manager,QWidget *parent) : KXmlGuiWindow(parent)
{
kDebug() << "CMapView::CMapView create view";
setCaption (i18n ("Mapper"));
setAttribute (Qt::WA_DeleteOnClose, false); // do not delete on close
mapManager = manager;
currentLevel = 0;
setFocusPolicy(Qt::StrongFocus);
setWindowFlags (Qt::Widget);
m_clipboard = new CMapClipboard(mapManager, this, actionCollection());
initMenus();
// set up the menus
setHelpMenuEnabled (false);
scroller = new QScrollArea(this);
scroller->setWidgetResizable(true);
scroller->setVerticalScrollBarPolicy(Qt::ScrollBarAlwaysOn);
scroller->setHorizontalScrollBarPolicy(Qt::ScrollBarAlwaysOn);
setCentralWidget(scroller);
mapWidget = new CMapWidget(this, mapManager, scroller);
scroller->setWidget(mapWidget);
mapWidget->show();
statusbar = new CMapViewStatusbar(manager, this);
setStatusBar(statusbar);
cmdFollowMode = new QPushButton(i18n("Follow Moves"),statusbar);
cmdFollowMode->setIcon(BarIcon("kmud_follow.png"));
cmdFollowMode->setCheckable(true);
cmdFollowMode->setFocusProxy(this);
statusbar->addFollowButton(cmdFollowMode);
cmdCreateMode = new QPushButton(i18n("Auto Create"),statusbar);
cmdCreateMode->setIcon(BarIcon("kmud_create.png"));
cmdCreateMode->setCheckable(true);
cmdCreateMode->setFocusProxy(this);
statusbar->addFollowButton(cmdCreateMode);
// getMapData()->createModeActive = m_toolsCreate->isChecked();
//FIXME_jp : get settings for status bar instead of defaults
//FIXME_jp : Add proper resize instead of test size
changed();
}
CMapView::~CMapView()
{
kDebug() << "CMapView::~CMapView()";
}
void CMapView::initGUI()
{
createGUI(KStandardDirs::locate("appdata", "kmuddymapperpart.rc"));
mapWidget->initContexMenus();
enableViewControls(false);
}
void CMapView::initMenus()
{
kDebug() << "begin initMenus";
kDebug() << "Main collection is "<<actionCollection();
// Edit menu
mapManager->getCommandHistory()->createUndoAction(actionCollection(), "editUndo");
mapManager->getCommandHistory()->createRedoAction(actionCollection(), "editRedo");
// Tools menu
m_toolsGrid = new KToggleAction (this);
m_toolsGrid->setText ( i18n("&Grid"));
m_toolsGrid->setIcon (BarIcon("kmud_grid.png"));
connect (m_toolsGrid, SIGNAL (triggered()), this, SLOT(slotToolsGrid()));
actionCollection()->addAction ("toolsGrid", m_toolsGrid);
m_toolsUpLevel = new QAction (this);
m_toolsUpLevel->setText ( i18n("Display Upper Level"));
m_toolsUpLevel->setIcon (BarIcon("arrow-up"));
connect (m_toolsUpLevel, SIGNAL (triggered()), this, SLOT(slotToolsLevelUp()));
actionCollection()->addAction ("toolsLevelUp", m_toolsUpLevel);
m_toolsDownLevel = new QAction (this);
m_toolsDownLevel->setText ( i18n("Display Lower Level"));
m_toolsDownLevel->setIcon (BarIcon("arrow-down"));
connect (m_toolsDownLevel, SIGNAL (triggered()), this, SLOT(slotToolsLevelDown()));
actionCollection()->addAction ("toolsLevelDown", m_toolsDownLevel);
m_toolsDeleteLevel = new QAction (this);
m_toolsDeleteLevel->setText ( i18n("Delete Current Level"));
m_toolsDeleteLevel->setIcon (BarIcon("edit-delete"));
connect (m_toolsDeleteLevel, SIGNAL (triggered()), this, SLOT(slotToolsLevelDelete()));
actionCollection()->addAction ("toolsLevelDelete", m_toolsDeleteLevel);
m_toolsCreateZone = new QAction (this);
m_toolsCreateZone->setText ( i18n("Create New Zone"));
m_toolsCreateZone->setIcon (BarIcon("task-new"));
connect (m_toolsCreateZone, SIGNAL (triggered()), this, SLOT(slotToolsZoneCreate()));
actionCollection()->addAction ("toolsZoneCreate", m_toolsCreateZone);
m_toolsDeleteZone = new QAction (this);
m_toolsDeleteZone->setText ( i18n("Delete Current Zone"));
m_toolsDeleteZone->setIcon (BarIcon("edit-delete"));
connect (m_toolsDeleteZone, SIGNAL (triggered()), this, SLOT(slotToolsZoneDelete()));
actionCollection()->addAction ("toolsZoneDelete", m_toolsDeleteZone);
// View menu
m_viewUpperLevel = new KToggleAction (this);
m_viewUpperLevel->setText ( i18n("Map Upper Level"));
connect (m_viewUpperLevel, SIGNAL (triggered()), this, SLOT(slotViewUpperLevel()));
actionCollection()->addAction ("viewUpperLevel", m_viewUpperLevel);
m_viewLowerLevel = new KToggleAction (this);
m_viewLowerLevel->setText ( i18n("Map Lower Level"));
connect (m_viewLowerLevel, SIGNAL (triggered()), this, SLOT(slotViewLowerLevel()));
actionCollection()->addAction ("viewLowerLevel", m_viewLowerLevel);
// Room Popup Actions
QAction *action;
action = new QAction (this);
action->setText (i18n("Set &Current Position"));
connect (action, SIGNAL (triggered()), this, SLOT(slotRoomSetCurrentPos()));
actionCollection()->addAction ("roomCurrentPos", action);
action = new QAction (this);
action->setText (i18n("Set Room to &Login Point"));
connect (action, SIGNAL (triggered()), this, SLOT(slotRoomSetLogin()));
actionCollection()->addAction ("roomLoginPoint", action);
action = new QAction (this);
action->setText (i18n("&Speed walk to room"));
connect (action, SIGNAL (triggered()), this, SLOT(slotRoomSpeedwalkTo()));
actionCollection()->addAction ("roomWalkTo", action);
action = new QAction (this);
action->setText (i18n("&Delete room"));
action->setIcon (SmallIcon("edit-delete"));
connect (action, SIGNAL (triggered()), this, SLOT(slotRoomDelete()));
actionCollection()->addAction ("roomDelete", action);
action = new QAction (this);
action->setText (i18n("&Properties"));
action->setIcon (SmallIcon("document-properties"));
connect (action, SIGNAL (triggered()), this, SLOT(slotRoomProperties()));
actionCollection()->addAction ("roomProperties", action);
// Text Popup Actions
action = new QAction (this);
action->setText (i18n("&Delete Text"));
action->setIcon (SmallIcon("edit-delete"));
connect (action, SIGNAL (triggered()), this, SLOT(slotTextDelete()));
actionCollection()->addAction ("textDelete", action);
action = new QAction (this);
action->setText (i18n("&Properties"));
action->setIcon (SmallIcon("document-properties"));
connect (action, SIGNAL (triggered()), this, SLOT(slotTextProperties()));
actionCollection()->addAction ("textProperties", action);
// Path Popup Actions
action = new KToggleAction (this);
action->setText (i18n("&One way"));
connect (action, SIGNAL (triggered()), this, SLOT(slotPathOneWay()));
actionCollection()->addAction ("pathOneWay", action);
action = new KToggleAction (this);
action->setText (i18n("&Two way"));
connect (action, SIGNAL (triggered()), this, SLOT(slotPathTwoWay()));
actionCollection()->addAction ("pathTwoWay", action);
action = new QAction (this);
action->setText (i18n("&Add Bend"));
connect (action, SIGNAL (triggered()), this, SLOT(slotPathAddBend()));
actionCollection()->addAction ("pathAddBend", action);
action = new QAction (this);
action->setText (i18n("&Remove Segment"));
connect (action, SIGNAL (triggered()), this, SLOT(slotPathDelBend()));
actionCollection()->addAction ("pathDelBend", action);
action = new QAction (this);
action->setText (i18n("&Edit Bends"));
connect (action, SIGNAL (triggered()), this, SLOT(slotPathEditBends()));
actionCollection()->addAction ("pathEditBends", action);
action = new QAction (this);
action->setText (i18n("&Delete Path"));<|fim▁hole|> actionCollection()->addAction ("pathDelete", action);
action = new QAction (this);
action->setText (i18n("&Properties"));
connect (action, SIGNAL (triggered()), this, SLOT(slotPathProperties()));
actionCollection()->addAction ("pathPorperties", action);
QStringList labelPos;
labelPos.append(i18n("Hide"));
labelPos.append(mapManager->directionToText(NORTH,""));
labelPos.append(mapManager->directionToText(NORTHEAST,""));
labelPos.append(mapManager->directionToText(EAST,""));
labelPos.append(mapManager->directionToText(SOUTHEAST,""));
labelPos.append(mapManager->directionToText(SOUTH,""));
labelPos.append(mapManager->directionToText(SOUTHWEST,""));
labelPos.append(mapManager->directionToText(WEST,""));
labelPos.append(mapManager->directionToText(NORTHWEST,""));
labelPos.append(i18n("Custom"));
labelMenu = new KSelectAction (this);
labelMenu->setText (i18n("&Label"));
connect (labelMenu, SIGNAL (triggered()), this, SLOT(slotChangeLabelPos()));
actionCollection()->addAction ("labelMenu", labelMenu);
labelMenu->setItems(labelPos);
// tool action group
m_toolGroup = new QActionGroup (this);
}
void CMapView::readOptions()
{
CMapData *data = mapManager->getMapData();
m_toolsGrid->setChecked(data->gridVisable);
m_viewLowerLevel->setChecked(data->showLowerLevel);
m_viewUpperLevel->setChecked(data->showUpperLevel);
}
/** Used to get the currently viewed zone */
CMapZone *CMapView::getCurrentlyViewedZone()
{
return currentLevel ? currentLevel->getZone() : 0;
}
/** Used to get the currently viewed level */
CMapLevel *CMapView::getCurrentlyViewedLevel()
{
return currentLevel;
}
/** Used to set the current level. This is for internal use */
void CMapView::setLevel(CMapLevel *level)
{
currentLevel = level;
}
void CMapView::playerPositionChanged(CMapRoom *room)
{
if (!room) return;
if (room->getLevel() != currentLevel)
showPosition(room->getLevel(), false);
}
void CMapView::setSelectedElement(CMapElement *element)
{
m_selectedElement = element;
}
void CMapView::setSelectedPos(QPoint pos)
{
m_selectedPos = pos;
}
CMapElement *CMapView::getSelectedElement()
{
return m_selectedElement;
}
/** Used to let the map manager know if it should register the focus of this widget */
bool CMapView::acceptFocus()
{
return true;
}
/** This method is called when an element is added */
void CMapView::addedElement(CMapElement *element)
{
if (isElementVisible(element))
{
checkSize(element->getHighPos());
mapWidget->update();
}
}
/** This method is called when an element is deleted */
void CMapView::deletedElement(CMapLevel *deletedFromLevel)
{
CMapLevel *upperLevel = getCurrentlyViewedLevel()->getNextLevel();
CMapLevel *lowerLevel = getCurrentlyViewedLevel()->getPrevLevel();
if (deletedFromLevel == getCurrentlyViewedLevel())
mapWidget->update();
if (upperLevel && mapManager->getMapData()->showUpperLevel)
if (deletedFromLevel == upperLevel)
mapWidget->update();
if (lowerLevel && mapManager->getMapData()->showLowerLevel)
if (deletedFromLevel == lowerLevel)
mapWidget->update();
}
/** This method is called when an element is changed */
void CMapView::changedElement(CMapElement *element)
{
if (isElementVisible(element))
{
checkSize(element->getHighPos());
mapWidget->update();
}
if (element == mapManager->getCurrentRoom())
statusbar->setRoom(mapManager->getCurrentRoom()->getLabel());
}
/** This method is called when a map level is changed */
void CMapView::changedLevel(CMapLevel *level)
{
if (!isLevelVisible(level)) return;
changed();
}
/** Used to find out if a level is visible in the view */
bool CMapView::isLevelVisible(CMapLevel *level)
{
CMapLevel *vlevel = getCurrentlyViewedLevel();
if (!vlevel) return false;
if (level == vlevel)
return true;
if (level == vlevel->getPrevLevel())
return true;
if (level == vlevel->getNextLevel())
return true;
return false;
}
/** Used to find out if a element is visiable in the view */
bool CMapView::isElementVisible(CMapElement *element)
{
return isLevelVisible(element->getLevel());
}
/**
* Used to enable/disable the view actions
* @param If true then enable the actions otherwise disable the actions
*/
void CMapView::enableViewControls(bool enabled)
{
if (!mapManager->getMapData()) return; // if we don't have mapData, we're going down
enableNonViewActions(enabled);
m_clipboard->enableActions(enabled);
m_toolsUpLevel->setEnabled(enabled);
m_toolsDownLevel->setEnabled(enabled);
m_toolsDeleteLevel->setEnabled(enabled);
m_toolsCreateZone->setEnabled(enabled);
m_toolsDeleteZone->setEnabled(enabled);
}
/**
* This method is used to disable/enable mapper actions that are not done by enableViewControls()
* @param If true then enable the actions otherwise disable the actions
*/
void CMapView::enableNonViewActions(bool enabled)
{
m_toolsGrid->setEnabled(enabled);
}
void CMapView::requestPaint()
{
mapWidget->update();
}
void CMapView::changed()
{
maxSize = QSize(0,0);
CMapLevel *level = getCurrentlyViewedLevel();
if (!level) {
mapWidget->update();
return;
}
CMapLevel *upperLevel = level->getNextLevel();
CMapLevel *lowerLevel = level->getPrevLevel();
QPoint size(0,0);
// Calc the size the widget should be
QList<CMapElement *> lst = level->getAllElements();
foreach (CMapElement *element, lst)
{
if (element->getHighX()>size.x()) size.setX(element->getHighX());
if (element->getHighY()>size.y()) size.setY(element->getHighY());
}
if (upperLevel && mapManager->getMapData()->showUpperLevel)
{
lst = upperLevel->getAllElements();
foreach (CMapElement *element, lst)
{
if (element->getHighX()>size.x()) size.setX(element->getHighX());
if (element->getHighY()>size.y()) size.setY(element->getHighY());
}
}
if (lowerLevel && mapManager->getMapData()->showLowerLevel)
{
lst = lowerLevel->getAllElements();
foreach (CMapElement *element, lst)
{
if (element->getHighX()>size.x()) size.setX(element->getHighX());
if (element->getHighY()>size.y()) size.setY(element->getHighY());
}
}
checkSize(size);
mapWidget->update();
}
/** Tell this map widget to display a different level. view wiil
* center on the first room */
void CMapView::showPosition(CMapLevel *level,bool centerView)
{
QPoint pos(0,0);
if (!level->getRoomList()->isEmpty())
{
CMapRoom *room = level->getRoomList()->first();
pos.setX(room->getX());
pos.setY(room->getY());
}
showPosition(pos,level,centerView);
}
void CMapView::showPosition(CMapRoom *room,bool centerView)
{
QPoint pos(0,0);
if (room)
{
pos.setX(room->getX());
pos.setY(room->getY());
showPosition(pos,room->getLevel(),centerView);
}
}
/** Tell this map widget to display a different zone */
void CMapView::showPosition(QPoint pos,CMapLevel *level,bool centerView)
{
if ((!centerView) && (getCurrentlyViewedLevel() == level)) return;
if (!level) { changed(); return; }
setLevel(level);
changed();
enableViewControls(true);
// Center on the position
if (centerView)
scroller->ensureVisible(pos.x(),pos.y(), width()/2, height()/2);
// Update the status bar
statusbar->setZone(mapManager->getZone());
statusbar->setLevel(level);
CMapRoom *cur = mapManager->getCurrentRoom();
statusbar->setRoom(cur ? cur->getLabel() : "");
mapManager->activeViewChanged();
}
/** This is used ensure a location is visiable for views that scroll */
void CMapView::ensureVisible(QPoint pos)
{
scroller->ensureVisible(pos.x(),pos.y(),10,10);
}
/** Used to calculate the correct size for the widget */
void CMapView::checkSize(QPoint pos)
{
if (pos.x() > maxSize.width()) maxSize.setWidth(pos.x());
if (pos.y() > maxSize.height()) maxSize.setHeight(pos.y());
int view_x = width();
int view_y = height();
if (maxSize.width() > view_x)
view_x = maxSize.width();
if (maxSize.height() > view_y)
view_y = maxSize.height();
QSize grid = mapManager->getMapData()->gridSize;
view_x += grid.width() * 3;
view_y += grid.height() * 3;
if (view_y != mapWidget->height() || view_x != mapWidget->width())
mapWidget->setFixedSize(view_x, view_y);
}
/** Get the max x cord of all elements */
int CMapView::getMaxX(void)
{
return maxSize.width();
}
/** Get the max y cord of all elements */
int CMapView::getMaxY(void)
{
return maxSize.height();
}
void CMapView::setFollowMode(bool follow)
{
cmdFollowMode->setChecked(follow);
}
bool CMapView::getFollowMode(void)
{
return cmdFollowMode->isChecked();
}
void CMapView::setCreateMode(bool follow)
{
cmdCreateMode->setChecked(follow);
}
bool CMapView::getCreateMode(void)
{
return cmdCreateMode->isChecked();
}
// Tools slots
void CMapView::slotToolsGrid()
{
mapManager->getMapData()->gridVisable = m_toolsGrid->isChecked();
mapManager->redrawAllViews();
}
void CMapView::levelShift(bool up)
{
CMapLevel *level = getCurrentlyViewedLevel();
level = up ? level->getNextLevel() : level->getPrevLevel();
if (level) {
showPosition(level, false);
return;
}
if (KMessageBox::warningYesNo (NULL, i18n("There is no level in that direction. Do you want to create a new one?"),i18n("KMuddy Mapper")) != KMessageBox::Yes) return;
mapManager->createLevel(up ? UP : DOWN);
}
void CMapView::slotToolsLevelUp()
{
levelShift(true);
}
void CMapView::slotToolsLevelDown()
{
levelShift(false);
}
void CMapView::slotToolsLevelDelete()
{
CMapLevel *level = getCurrentlyViewedLevel();
if (!level) return;
int count = mapManager->getZone()->levelCount();
if (count <= 1) return;
if (KMessageBox::warningYesNo (NULL,i18n("Are you sure that you want to delete the current level?"),i18n("KMuddy Mapper")) != KMessageBox::Yes) return;
mapManager->deleteLevel(level);
}
void CMapView::slotToolsZoneCreate()
{
bool ok;
QString name = KInputDialog::getText(i18n("KMuddy Mapper"), i18n("Please enter the name of the new zone:"), QString(), &ok);
if (!ok) return;
if (!name.length()) return;
mapManager->zoneManager()->createZone(name);
}
void CMapView::slotToolsZoneDelete()
{
CMapZoneManager *zm = mapManager->zoneManager();
if (KMessageBox::warningYesNo (NULL,i18n("Are you sure that you want to delete the current zone? This cannot be undone."),i18n("KMuddy Mapper")) != KMessageBox::Yes) return;
zm->deleteZone(zm->activeZone());
}
void CMapView::slotViewUpperLevel()
{
mapManager->getMapData()->showUpperLevel = m_viewUpperLevel->isChecked();
mapManager->redrawAllViews();
}
void CMapView::slotViewLowerLevel()
{
mapManager->getMapData()->showLowerLevel = m_viewLowerLevel->isChecked();
mapManager->redrawAllViews();
}
/** Used to room under the point the current room */
void CMapView::slotRoomSetCurrentPos(void)
{
mapManager->setCurrentRoom((CMapRoom *)m_selectedElement);
}
/** Used to room under the point the login room */
void CMapView::slotRoomSetLogin(void)
{
mapManager->setLoginRoom((CMapRoom *)m_selectedElement);
}
/** Used to set speedwalk to the room under the pointer */
void CMapView::slotRoomSpeedwalkTo(void)
{
mapManager->walkPlayerTo((CMapRoom *)m_selectedElement);
}
/** Used to delete the room under the pointer */
void CMapView::slotRoomDelete(void)
{
mapManager->deleteElement(m_selectedElement);
}
/** Used to display the properties of the room under the pointer */
void CMapView::slotRoomProperties(void)
{
mapManager->propertiesRoom((CMapRoom *)m_selectedElement);
}
/** Used to make the path under the pointer one way */
void CMapView::slotPathOneWay(void)
{
mapManager->makePathOneWay((CMapPath *)m_selectedElement);
}
/** Used to make the path under the pointer two way */
void CMapView::slotPathTwoWay(void)
{
mapManager->makePathTwoWay((CMapPath *)m_selectedElement);
}
/** Used to add a bend to the path under the pointer */
void CMapView::slotPathAddBend(void)
{
kDebug() << "CMapView::CMapManager slotPathAddBend";
mapManager->openCommandGroup(i18n("Add Bend"));
CMapPath *path = (CMapPath *)m_selectedElement;
path->addBendWithUndo(m_selectedPos);
if (path->getOpsitePath())
{
path->getOpsitePath()->addBendWithUndo(m_selectedPos);
}
m_clipboard->slotUnselectAll();
path->setEditMode(true);
changedElement(path);
mapManager->closeCommandGroup();
}
/** Used to delete the path segment under the pointer */
void CMapView::slotPathDelBend(void)
{
mapManager->openCommandGroup(i18n("Delete Path Segment"));
CMapPath *path = (CMapPath *)m_selectedElement;
int seg = path->mouseInPathSeg(m_selectedPos, getCurrentlyViewedZone());
path->deletePathSegWithUndo(seg);
if (path->getOpsitePath())
{
int seg = path->getOpsitePath()->mouseInPathSeg(m_selectedPos, getCurrentlyViewedZone());
path->getOpsitePath()->deletePathSegWithUndo(seg);
}
mapManager->changedElement(path);
mapManager->closeCommandGroup();
}
/** Used to edit the bends of the path under the pointer */
void CMapView::slotPathEditBends(void)
{
CMapPath *path = (CMapPath *)m_selectedElement;
m_clipboard->slotUnselectAll();
path->setEditMode(true);
mapManager->changedElement(path);
}
/** Used to delete the path under the pointer */
void CMapView::slotPathDelete(void)
{
mapManager->deleteElement(m_selectedElement);
}
/** Used to display the properties of the path under the pointer */
void CMapView::slotPathProperties(void)
{
mapManager->propertiesPath((CMapPath *)m_selectedElement);
}
/** Used to delete the text element under the pointer */
void CMapView::slotTextDelete(void)
{
mapManager->deleteElement(m_selectedElement);
}
/** Used to display the text properties of the text element under the pointer */
void CMapView::slotTextProperties(void)
{
mapManager->propertiesText((CMapText *)m_selectedElement);
}
/** Used to change the position of room/zone labels */
void CMapView::slotChangeLabelPos()
{
if (m_selectedElement->getElementType()==ROOM)
{
CMapRoom *room = (CMapRoom *)m_selectedElement;
CMapCmdElementProperties *command = new CMapCmdElementProperties(mapManager,i18n("Change room label position"),room);
command->getOrgProperties().writeEntry("LabelPos",(int)room->getLabelPosition());
switch(labelMenu->currentItem())
{
case 0 : command->getNewProperties().writeEntry("LabelPos",(int)CMapRoom::HIDE); break;
case 1 : command->getNewProperties().writeEntry("LabelPos",(int)CMapRoom::NORTH); break;
case 2 : command->getNewProperties().writeEntry("LabelPos",(int)CMapRoom::NORTHEAST); break;
case 3 : command->getNewProperties().writeEntry("LabelPos",(int)CMapRoom::EAST); break;
case 4 : command->getNewProperties().writeEntry("LabelPos",(int)CMapRoom::SOUTHEAST); break;
case 5 : command->getNewProperties().writeEntry("LabelPos",(int)CMapRoom::SOUTH); break;
case 6 : command->getNewProperties().writeEntry("LabelPos",(int)CMapRoom::SOUTHWEST); break;
case 7 : command->getNewProperties().writeEntry("LabelPos",(int)CMapRoom::WEST); break;
case 8 : command->getNewProperties().writeEntry("LabelPos",(int)CMapRoom::NORTHWEST); break;
case 9 : command->getNewProperties().writeEntry("LabelPos",(int)CMapRoom::CUSTOM); break;
default : command->getNewProperties().writeEntry("LabelPos",(int)CMapRoom::HIDE); break;
}
mapManager->addCommand(command);
}
if (m_selectedElement->getElementType()==ZONE)
{
CMapZone *zone = (CMapZone *)m_selectedElement;
CMapCmdElementProperties *command = new CMapCmdElementProperties(mapManager,i18n("Change zone label position"),zone);
command->getOrgProperties().writeEntry("LabelPos",(int)zone->getLabelPosition());
switch(labelMenu->currentItem())
{
case 0 : command->getNewProperties().writeEntry("LabelPos",(int)CMapZone::HIDE); break;
case 1 : command->getNewProperties().writeEntry("LabelPos",(int)CMapZone::NORTH); break;
case 2 : command->getNewProperties().writeEntry("LabelPos",(int)CMapZone::NORTHEAST); break;
case 3 : command->getNewProperties().writeEntry("LabelPos",(int)CMapZone::EAST); break;
case 4 : command->getNewProperties().writeEntry("LabelPos",(int)CMapZone::SOUTHEAST); break;
case 5 : command->getNewProperties().writeEntry("LabelPos",(int)CMapZone::SOUTH); break;
case 6 : command->getNewProperties().writeEntry("LabelPos",(int)CMapZone::SOUTHWEST); break;
case 7 : command->getNewProperties().writeEntry("LabelPos",(int)CMapZone::WEST); break;
case 8 : command->getNewProperties().writeEntry("LabelPos",(int)CMapZone::NORTHWEST); break;
case 9 : command->getNewProperties().writeEntry("LabelPos",(int)CMapZone::CUSTOM); break;
default : command->getNewProperties().writeEntry("LabelPos",(int)CMapZone::HIDE); break;
}
mapManager->addCommand(command);
}
}
int CMapView::getWidth(void)
{
if (mapWidget->width() > scroller->viewport()->width())
return mapWidget->width();
return scroller->viewport()->width();
}
int CMapView::getHeight(void)
{
if (mapWidget->height() > scroller->viewport()->height())
return mapWidget->height();
return scroller->viewport()->height();
}
void CMapView::setCursor ( const QCursor & cursor)
{
if (mapWidget)
mapWidget->setCursor(cursor);
}
void CMapView::resizeEvent (QResizeEvent *)
{
changed();
}
/** Used to set the view to active */
void CMapView::focusInEvent(QFocusEvent *)
{
}
void CMapView::closeEvent(QCloseEvent *)
{
}
void CMapView::slotWidgetBeingClosed()
{
}
void CMapView::slotDockWindowClose()
{
}
bool CMapView::queryClose()
{
emit closed();
return true;
}<|fim▁end|> | action->setIcon (SmallIcon("edit-delete"));
connect (action, SIGNAL (triggered()), this, SLOT(slotPathDelete())); |
<|file_name|>Context.java<|end_file_name|><|fim▁begin|>// Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package codeu.chat.client.core;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Collection;
import codeu.chat.common.BasicView;
import codeu.chat.common.User;
import codeu.chat.util.Uuid;
import codeu.chat.util.connections.ConnectionSource;
public final class Context {
private final BasicView view;
private final Controller controller;
public Context(ConnectionSource source) {
this.view = new View(source);
this.controller = new Controller(source);
}
<|fim▁hole|> return user == null ?
null :
new UserContext(user, view, controller);
}
public Iterable<UserContext> allUsers() {
final Collection<UserContext> users = new ArrayList<>();
for (final User user : view.getUsers()) {
users.add(new UserContext(user, view, controller));
}
return users;
}
}<|fim▁end|> | public UserContext create(String name) {
final User user = controller.newUser(name); |
<|file_name|>ApplicationModule.java<|end_file_name|><|fim▁begin|>package com.example.dagger2_hello_world;
import android.content.Context;
import android.support.annotation.NonNull;
import javax.inject.Named;
import dagger.Binds;
import dagger.Module;
import dagger.Provides;
@Module
public abstract class ApplicationModule {
@Provides
@NonNull
static Application myApplication() {
return Application.getInstance();
}
@Binds
@NonNull
@Named(ApplicationScope.TAG)<|fim▁hole|>
@Binds
@NonNull
abstract android.app.Application application(Application application);
}<|fim▁end|> | abstract Context context(Application application); |
<|file_name|>LocationScale.js<|end_file_name|><|fim▁begin|>define([
'dojo/_base/declare',
'dojo/dom-construct',
'JBrowse/View/Track/BlockBased',
'JBrowse/Util'],
function(
declare,
dom,
BlockBased,
Util
) {
return declare(BlockBased,
/**
* @lends JBrowse.View.Track.LocationScale.prototype
*/
{
/**
* This track is for (e.g.) position and sequence information that should
* always stay visible at the top of the view.
* @constructs
*/
constructor: function( args ) {//name, labelClass, posHeight) {
this.loaded = true;
this.labelClass = args.labelClass;
this.posHeight = args.posHeight;
this.height = Math.round( args.posHeight * 1.2 );
},
// this track has no track label or track menu, stub them out
makeTrackLabel: function() {},
makeTrackMenu: function() {},
fillBlock: function( args ) {
var blockIndex = args.blockIndex;
var block = args.block;
var leftBase = args.leftBase;
var scale = args.scale;
var thisB = this;
// find the number that is within 2 px of the left boundary of
// the block that ends with the most zeroes, or a 5 if no
// zeroes
var labelNumber = this.chooseLabel( args );
var labelOffset = (leftBase+1-labelNumber)*scale/10;
// console.log( leftBase+1, labelNumber, labelOffset );
var posLabel = document.createElement("div");
var numtext = Util.addCommas( labelNumber );
posLabel.className = this.labelClass;
// give the position label a negative left offset in ex's to
// more-or-less center it over the left boundary of the block
posLabel.style.left = "-" + Number(numtext.length)/1.7 + labelOffset + "ex";
posLabel.appendChild( document.createTextNode( numtext ) );
block.domNode.appendChild(posLabel);
var highlight = this.browser.getHighlight();
if( highlight && highlight.ref == this.refSeq.name ) {
this.renderRegionHighlight( args, highlight );
}
var bookmarks = this.browser.getBookmarks();
if( bookmarks ) {
this.renderRegionBookmark( args, bookmarks, this.refSeq.name, true );
}
this.heightUpdate( Math.round( this.posHeight*1.2 ), blockIndex);
args.finishCallback();
},
chooseLabel: function( viewArgs ) {
var left = viewArgs.leftBase + 1;
var width = viewArgs.rightBase - left + 1;
var scale = viewArgs.scale;
for( var mod = 1000000; mod > 0; mod /= 10 ) {
if( left % mod * scale <= 3 )
return left - left%mod;
}
return left;
}
<|fim▁hole|>});<|fim▁end|> | }); |
<|file_name|>rev.go<|end_file_name|><|fim▁begin|>// The command line tool for running Revel apps.
package main
import (
"flag"
"fmt"
"io"
"os"
"strings"
"text/template"
)
// Cribbed from the genius organization of the "go" command.
type Command struct {
Run func(args []string)<|fim▁hole|>
func (cmd *Command) Name() string {
name := cmd.UsageLine
i := strings.Index(name, " ")
if i >= 0 {
name = name[:i]
}
return name
}
var commands = []*Command{
cmdNew,
cmdRun,
cmdBuild,
cmdPackage,
cmdClean,
cmdTest,
}
func main() {
fmt.Fprintf(os.Stdout, header)
flag.Usage = func() { usage(1) }
flag.Parse()
args := flag.Args()
if len(args) < 1 || args[0] == "help" {
if len(args) == 1 {
usage(0)
}
if len(args) > 1 {
for _, cmd := range commands {
if cmd.Name() == args[1] {
tmpl(os.Stdout, helpTemplate, cmd)
return
}
}
}
usage(2)
}
if args[0] == "flags" {
fmt.Println("Available flags:")
flag.PrintDefaults()
return
}
// Commands use panic to abort execution when something goes wrong.
// Panics are logged at the point of error. Ignore those.
defer func() {
if err := recover(); err != nil {
if _, ok := err.(LoggedError); !ok {
// This panic was not expected / logged.
panic(err)
}
os.Exit(1)
}
}()
for _, cmd := range commands {
if cmd.Name() == args[0] {
cmd.Run(args[1:])
return
}
}
errorf("unknown command %q\nRun 'revel help' for usage.\n", args[0])
}
func errorf(format string, args ...interface{}) {
// Ensure the user's command prompt starts on the next line.
if !strings.HasSuffix(format, "\n") {
format += "\n"
}
fmt.Fprintf(os.Stderr, format, args...)
panic(LoggedError{}) // Panic instead of os.Exit so that deferred will run.
}
const header = `~
~ revel! http://robfig.github.com/revel
~
`
const usageTemplate = `usage: revel [flags] command [arguments]
The commands are:
{{range .}}
{{.Name | printf "%-11s"}} {{.Short}}{{end}}
Use "revel help [command]" for more information.
The flags are:
`
var helpTemplate = `usage: revel {{.UsageLine}}
{{.Long}}
`
func usage(exitCode int) {
tmpl(os.Stderr, usageTemplate, commands)
flag.PrintDefaults()
fmt.Println()
os.Exit(exitCode)
}
func tmpl(w io.Writer, text string, data interface{}) {
t := template.New("top")
template.Must(t.Parse(text))
if err := t.Execute(w, data); err != nil {
panic(err)
}
}<|fim▁end|> | UsageLine, Short, Long string
} |
<|file_name|>redis_wrapper.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import redis, frappe, re
import cPickle as pickle
from frappe.utils import cstr
class RedisWrapper(redis.Redis):
"""Redis client that will automatically prefix conf.db_name"""
def make_key(self, key, user=None):
if user:
if user == True:
user = frappe.session.user
key = "user:{0}:{1}".format(user, key)
return "{0}|{1}".format(frappe.conf.db_name, key).encode('utf-8')
def set_value(self, key, val, user=None, expires_in_sec=None):
"""Sets cache value.<|fim▁hole|>
:param key: Cache key
:param val: Value to be cached
:param user: Prepends key with User
:param expires_in_sec: Expire value of this key in X seconds
"""
key = self.make_key(key, user)
if not expires_in_sec:
frappe.local.cache[key] = val
try:
if expires_in_sec:
self.setex(key, pickle.dumps(val), expires_in_sec)
else:
self.set(key, pickle.dumps(val))
except redis.exceptions.ConnectionError:
return None
def get_value(self, key, generator=None, user=None, expires=False):
"""Returns cache value. If not found and generator function is
given, it will call the generator.
:param key: Cache key.
:param generator: Function to be called to generate a value if `None` is returned.
:param expires: If the key is supposed to be with an expiry, don't store it in frappe.local
"""
original_key = key
key = self.make_key(key, user)
if key in frappe.local.cache:
val = frappe.local.cache[key]
else:
val = None
try:
val = self.get(key)
except redis.exceptions.ConnectionError:
pass
if val is not None:
val = pickle.loads(val)
if not expires:
if val is None and generator:
val = generator()
self.set_value(original_key, val, user=user)
else:
frappe.local.cache[key] = val
return val
def get_all(self, key):
ret = {}
for k in self.get_keys(key):
ret[key] = self.get_value(k)
return ret
def get_keys(self, key):
"""Return keys starting with `key`."""
try:
key = self.make_key(key + "*")
return self.keys(key)
except redis.exceptions.ConnectionError:
regex = re.compile(cstr(key).replace("|", "\|").replace("*", "[\w]*"))
return [k for k in frappe.local.cache.keys() if regex.match(k)]
def delete_keys(self, key):
"""Delete keys with wildcard `*`."""
try:
self.delete_value(self.get_keys(key), make_keys=False)
except redis.exceptions.ConnectionError:
pass
def delete_key(self, *args, **kwargs):
self.delete_value(*args, **kwargs)
def delete_value(self, keys, user=None, make_keys=True):
"""Delete value, list of values."""
if not isinstance(keys, (list, tuple)):
keys = (keys, )
for key in keys:
if make_keys:
key = self.make_key(key)
try:
self.delete(key)
except redis.exceptions.ConnectionError:
pass
if key in frappe.local.cache:
del frappe.local.cache[key]
def lpush(self, key, value):
super(redis.Redis, self).lpush(self.make_key(key), value)
def rpush(self, key, value):
super(redis.Redis, self).rpush(self.make_key(key), value)
def lpop(self, key):
return super(redis.Redis, self).lpop(self.make_key(key))
def llen(self, key):
return super(redis.Redis, self).llen(self.make_key(key))
def hset(self, name, key, value):
if not name in frappe.local.cache:
frappe.local.cache[name] = {}
frappe.local.cache[name][key] = value
try:
super(redis.Redis, self).hset(self.make_key(name), key, pickle.dumps(value))
except redis.exceptions.ConnectionError:
pass
def hgetall(self, name):
return {key: pickle.loads(value) for key, value in
super(redis.Redis, self).hgetall(self.make_key(name)).iteritems()}
def hget(self, name, key, generator=None):
if not name in frappe.local.cache:
frappe.local.cache[name] = {}
if key in frappe.local.cache[name]:
return frappe.local.cache[name][key]
value = None
try:
value = super(redis.Redis, self).hget(self.make_key(name), key)
except redis.exceptions.ConnectionError:
pass
if value:
value = pickle.loads(value)
frappe.local.cache[name][key] = value
elif generator:
value = generator()
try:
self.hset(name, key, value)
except redis.exceptions.ConnectionError:
pass
return value
def hdel(self, name, key):
if name in frappe.local.cache:
if key in frappe.local.cache[name]:
del frappe.local.cache[name][key]
try:
super(redis.Redis, self).hdel(self.make_key(name), key)
except redis.exceptions.ConnectionError:
pass
def hdel_keys(self, name_starts_with, key):
"""Delete hash names with wildcard `*` and key"""
for name in frappe.cache().get_keys(name_starts_with):
name = name.split("|", 1)[1]
self.hdel(name, key)
def hkeys(self, name):
try:
return super(redis.Redis, self).hkeys(self.make_key(name))
except redis.exceptions.ConnectionError:
return []<|fim▁end|> | |
<|file_name|>whisper.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2008 Orbitz WorldWide
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# This module is an implementation of the Whisper database API
# Here is the basic layout of a whisper data file
#
# File = Header,Data
# Header = Metadata,ArchiveInfo+
# Metadata = lastUpdate,maxRetention,xFilesFactor,archiveCount
# ArchiveInfo = Offset,SecondsPerPoint,Points
# Data = Archive+
# Archive = Point+
# Point = timestamp,value
#
# NOTE: the lastUpdate field is deprecated, do not use it!
import os, struct, time
try:
import fcntl
CAN_LOCK = True
except ImportError:
CAN_LOCK = False
LOCK = False
CACHE_HEADERS = False
__headerCache = {}
longFormat = "!L"
longSize = struct.calcsize(longFormat)
floatFormat = "!f"
floatSize = struct.calcsize(floatFormat)
timestampFormat = "!L"
timestampSize = struct.calcsize(timestampFormat)
valueFormat = "!d"
valueSize = struct.calcsize(valueFormat)
pointFormat = "!Ld"
pointSize = struct.calcsize(pointFormat)
metadataFormat = "!2LfL"
metadataSize = struct.calcsize(metadataFormat)
archiveInfoFormat = "!3L"
archiveInfoSize = struct.calcsize(archiveInfoFormat)
debug = startBlock = endBlock = lambda *a,**k: None
class WhisperException(Exception):
"""Base class for whisper exceptions."""
class InvalidConfiguration(WhisperException):
"""Invalid configuration."""
class InvalidTimeInterval(WhisperException):
"""Invalid time interval."""
class TimestampNotCovered(WhisperException):
"""Timestamp not covered by any archives in this database."""
def enableDebug():
global open, debug, startBlock, endBlock
class open(file):
def __init__(self,*args,**kwargs):
file.__init__(self,*args,**kwargs)
self.writeCount = 0
self.readCount = 0
def write(self,data):
self.writeCount += 1
debug('WRITE %d bytes #%d' % (len(data),self.writeCount))
return file.write(self,data)
def read(self,bytes):
self.readCount += 1
debug('READ %d bytes #%d' % (bytes,self.readCount))
return file.read(self,bytes)
def debug(message):
print 'DEBUG :: %s' % message
__timingBlocks = {}
def startBlock(name):
__timingBlocks[name] = time.time()
def endBlock(name):
debug("%s took %.5f seconds" % (name,time.time() - __timingBlocks.pop(name)))
def __readHeader(fh):
info = __headerCache.get(fh.name)
if info: return info
#startBlock('__readHeader')
originalOffset = fh.tell()
fh.seek(0)
packedMetadata = fh.read(metadataSize)
(lastUpdate,maxRetention,xff,archiveCount) = struct.unpack(metadataFormat,packedMetadata)
archives = []
for i in xrange(archiveCount):
packedArchiveInfo = fh.read(archiveInfoSize)
(offset,secondsPerPoint,points) = struct.unpack(archiveInfoFormat,packedArchiveInfo)
archiveInfo = {
'offset' : offset,
'secondsPerPoint' : secondsPerPoint,
'points' : points,
'retention' : secondsPerPoint * points,
'size' : points * pointSize,
}
archives.append(archiveInfo)
fh.seek(originalOffset)
info = {
#'lastUpdate' : lastUpdate, # Deprecated
'maxRetention' : maxRetention,
'xFilesFactor' : xff,
'archives' : archives,
}
if CACHE_HEADERS:
__headerCache[fh.name] = info
#endBlock('__readHeader')
return info
def __changeLastUpdate(fh):
return #XXX Make this a NOP, use os.stat(filename).st_mtime instead
startBlock('__changeLastUpdate()')
originalOffset = fh.tell()
fh.seek(0) #Based on assumption that first field is lastUpdate
now = int( time.time() )
packedTime = struct.pack(timestampFormat,now)
fh.write(packedTime)
fh.seek(originalOffset)
endBlock('__changeLastUpdate()')
def create(path,archiveList,xFilesFactor=0.5):
"""create(path,archiveList,xFilesFactor=0.5)
path is a string
archiveList is a list of archives, each of which is of the form (secondsPerPoint,numberOfPoints)
xFilesFactor specifies the fraction of data points in a propagation interval that must have known values for a propagation to occur
"""
#Validate archive configurations...
if not archiveList:
raise InvalidConfiguration("You must specify at least one archive configuration!")
archiveList.sort(key=lambda a: a[0]) #sort by precision (secondsPerPoint)
for i,archive in enumerate(archiveList):
if i == len(archiveList) - 1: break
next = archiveList[i+1]
if not (archive[0] < next[0]):
raise InvalidConfiguration("You cannot configure two archives "
"with the same precision %s,%s" % (archive,next))
if (next[0] % archive[0]) != 0:
raise InvalidConfiguration("Higher precision archives' precision "
"must evenly divide all lower precision archives' precision %s,%s" \
% (archive[0],next[0]))
retention = archive[0] * archive[1]
nextRetention = next[0] * next[1]
if not (nextRetention > retention):
raise InvalidConfiguration("Lower precision archives must cover "
"larger time intervals than higher precision archives %s,%s" \<|fim▁hole|> % (archive,next))
#Looks good, now we create the file and write the header
if os.path.exists(path):
raise InvalidConfiguration("File %s already exists!" % path)
fh = open(path,'wb')
if LOCK: fcntl.flock( fh.fileno(), fcntl.LOCK_EX )
lastUpdate = struct.pack( timestampFormat, int(time.time()) )
oldest = sorted([secondsPerPoint * points for secondsPerPoint,points in archiveList])[-1]
maxRetention = struct.pack( longFormat, oldest )
xFilesFactor = struct.pack( floatFormat, float(xFilesFactor) )
archiveCount = struct.pack(longFormat, len(archiveList))
packedMetadata = lastUpdate + maxRetention + xFilesFactor + archiveCount
fh.write(packedMetadata)
headerSize = metadataSize + (archiveInfoSize * len(archiveList))
archiveOffsetPointer = headerSize
for secondsPerPoint,points in archiveList:
archiveInfo = struct.pack(archiveInfoFormat, archiveOffsetPointer, secondsPerPoint, points)
fh.write(archiveInfo)
archiveOffsetPointer += (points * pointSize)
zeroes = '\x00' * (archiveOffsetPointer - headerSize)
fh.write(zeroes)
fh.close()
def __propagate(fh,timestamp,xff,higher,lower):
lowerIntervalStart = timestamp - (timestamp % lower['secondsPerPoint'])
lowerIntervalEnd = lowerIntervalStart + lower['secondsPerPoint']
fh.seek(higher['offset'])
packedPoint = fh.read(pointSize)
(higherBaseInterval,higherBaseValue) = struct.unpack(pointFormat,packedPoint)
if higherBaseInterval == 0:
higherFirstOffset = higher['offset']
else:
timeDistance = lowerIntervalStart - higherBaseInterval
pointDistance = timeDistance / higher['secondsPerPoint']
byteDistance = pointDistance * pointSize
higherFirstOffset = higher['offset'] + (byteDistance % higher['size'])
higherPoints = lower['secondsPerPoint'] / higher['secondsPerPoint']
higherSize = higherPoints * pointSize
relativeFirstOffset = higherFirstOffset - higher['offset']
relativeLastOffset = (relativeFirstOffset + higherSize) % higher['size']
higherLastOffset = relativeLastOffset + higher['offset']
fh.seek(higherFirstOffset)
if higherFirstOffset < higherLastOffset: #we don't wrap the archive
seriesString = fh.read(higherLastOffset - higherFirstOffset)
else: #We do wrap the archive
higherEnd = higher['offset'] + higher['size']
seriesString = fh.read(higherEnd - higherFirstOffset)
fh.seek(higher['offset'])
seriesString += fh.read(higherLastOffset - higher['offset'])
#Now we unpack the series data we just read
byteOrder,pointTypes = pointFormat[0],pointFormat[1:]
points = len(seriesString) / pointSize
seriesFormat = byteOrder + (pointTypes * points)
unpackedSeries = struct.unpack(seriesFormat, seriesString)
#And finally we construct a list of values
neighborValues = [None] * points
currentInterval = lowerIntervalStart
step = higher['secondsPerPoint']
for i in xrange(0,len(unpackedSeries),2):
pointTime = unpackedSeries[i]
if pointTime == currentInterval:
neighborValues[i/2] = unpackedSeries[i+1]
currentInterval += step
#Propagate aggregateValue to propagate from neighborValues if we have enough known points
knownValues = [v for v in neighborValues if v is not None]
if not knownValues:
return False
knownPercent = float(len(knownValues)) / float(len(neighborValues))
if knownPercent >= xff: #we have enough data to propagate a value!
aggregateValue = float(sum(knownValues)) / float(len(knownValues)) #TODO another CF besides average?
myPackedPoint = struct.pack(pointFormat,lowerIntervalStart,aggregateValue)
fh.seek(lower['offset'])
packedPoint = fh.read(pointSize)
(lowerBaseInterval,lowerBaseValue) = struct.unpack(pointFormat,packedPoint)
if lowerBaseInterval == 0: #First propagated update to this lower archive
fh.seek(lower['offset'])
fh.write(myPackedPoint)
else: #Not our first propagated update to this lower archive
timeDistance = lowerIntervalStart - lowerBaseInterval
pointDistance = timeDistance / lower['secondsPerPoint']
byteDistance = pointDistance * pointSize
lowerOffset = lower['offset'] + (byteDistance % lower['size'])
fh.seek(lowerOffset)
fh.write(myPackedPoint)
return True
else:
return False
def update(path,value,timestamp=None):
"""update(path,value,timestamp=None)
path is a string
value is a float
timestamp is either an int or float
"""
value = float(value)
fh = open(path,'r+b')
return file_update(fh, value, timestamp)
def file_update(fh, value, timestamp):
if LOCK: fcntl.flock( fh.fileno(), fcntl.LOCK_EX )
header = __readHeader(fh)
now = int( time.time() )
if timestamp is None: timestamp = now
timestamp = int(timestamp)
diff = now - timestamp
if not ((diff < header['maxRetention']) and diff >= 0):
raise TimestampNotCovered("Timestamp not covered by any archives in "
"this database.")
for i,archive in enumerate(header['archives']): #Find the highest-precision archive that covers timestamp
if archive['retention'] < diff: continue
lowerArchives = header['archives'][i+1:] #We'll pass on the update to these lower precision archives later
break
#First we update the highest-precision archive
myInterval = timestamp - (timestamp % archive['secondsPerPoint'])
myPackedPoint = struct.pack(pointFormat,myInterval,value)
fh.seek(archive['offset'])
packedPoint = fh.read(pointSize)
(baseInterval,baseValue) = struct.unpack(pointFormat,packedPoint)
if baseInterval == 0: #This file's first update
fh.seek(archive['offset'])
fh.write(myPackedPoint)
baseInterval,baseValue = myInterval,value
else: #Not our first update
timeDistance = myInterval - baseInterval
pointDistance = timeDistance / archive['secondsPerPoint']
byteDistance = pointDistance * pointSize
myOffset = archive['offset'] + (byteDistance % archive['size'])
fh.seek(myOffset)
fh.write(myPackedPoint)
#Now we propagate the update to lower-precision archives
#startBlock('update propagation')
higher = archive
for lower in lowerArchives:
if not __propagate(fh,myInterval,header['xFilesFactor'],higher,lower): break
higher = lower
#endBlock('update propagation')
__changeLastUpdate(fh)
fh.close()
def update_many(path,points):
"""update_many(path,points)
path is a string
points is a list of (timestamp,value) points
"""
if not points: return
points = [ (int(t),float(v)) for (t,v) in points]
points.sort(key=lambda p: p[0],reverse=True) #order points by timestamp, newest first
fh = open(path,'r+b')
return file_update_many(fh, points)
def file_update_many(fh, points):
if LOCK: fcntl.flock( fh.fileno(), fcntl.LOCK_EX )
header = __readHeader(fh)
now = int( time.time() )
archives = iter( header['archives'] )
currentArchive = archives.next()
#debug(' update_many currentArchive=%s' % str(currentArchive))
currentPoints = []
for point in points:
age = now - point[0]
#debug(' update_many iterating points, point=%s age=%d' % (str(point),age))
while currentArchive['retention'] < age: #we can't fit any more points in this archive
#debug(' update_many this point is too old to fit here, currentPoints=%d' % len(currentPoints))
if currentPoints: #commit all the points we've found that it can fit
currentPoints.reverse() #put points in chronological order
__archive_update_many(fh,header,currentArchive,currentPoints)
currentPoints = []
try:
currentArchive = archives.next()
#debug(' update_many using next archive %s' % str(currentArchive))
except StopIteration:
#debug(' update_many no more archives!')
currentArchive = None
break
if not currentArchive: break #drop remaining points that don't fit in the database
#debug(' update_many adding point=%s' % str(point))
currentPoints.append(point)
#debug(' update_many done iterating points')
if currentArchive and currentPoints: #don't forget to commit after we've checked all the archives
currentPoints.reverse()
__archive_update_many(fh,header,currentArchive,currentPoints)
__changeLastUpdate(fh)
fh.close()
def __archive_update_many(fh,header,archive,points):
step = archive['secondsPerPoint']
#startBlock('__archive_update_many file=%s archive=%s points=%d' % (fh.name,step,len(points)))
alignedPoints = [ (timestamp - (timestamp % step), value)
for (timestamp,value) in points ]
#Create a packed string for each contiguous sequence of points
#startBlock('__archive_update_many string packing')
packedStrings = []
previousInterval = None
currentString = ""
for (interval,value) in alignedPoints:
#debug('__archive_update_many iterating alignedPoint at %s' % interval)
if (not previousInterval) or (interval == previousInterval + step):
#debug('__archive_update_many was expected, packing onto currentString')
currentString += struct.pack(pointFormat,interval,value)
previousInterval = interval
else:
numberOfPoints = len(currentString) / pointSize
startInterval = previousInterval - (step * (numberOfPoints-1))
#debug('__archive_update_many was NOT expected, appending to packedStrings startInterval=%s currentString=%d bytes' % (startInterval,len(currentString)))
packedStrings.append( (startInterval,currentString) )
currentString = struct.pack(pointFormat,interval,value)
previousInterval = interval
if currentString:
#startInterval = previousInterval - (step * len(currentString) / pointSize) + step
numberOfPoints = len(currentString) / pointSize
startInterval = previousInterval - (step * (numberOfPoints-1))
#debug('__archive_update_many done iterating alignedPoints, remainder currentString of %d bytes, startInterval=%s' % (len(currentString),startInterval))
packedStrings.append( (startInterval,currentString) )
#endBlock('__archive_update_many string packing')
#Read base point and determine where our writes will start
fh.seek(archive['offset'])
packedBasePoint = fh.read(pointSize)
(baseInterval,baseValue) = struct.unpack(pointFormat,packedBasePoint)
if baseInterval == 0: #This file's first update
#debug('__archive_update_many first update')
baseInterval = packedStrings[0][0] #use our first string as the base, so we start at the start
#debug('__archive_update_many baseInterval is %s' % baseInterval)
#Write all of our packed strings in locations determined by the baseInterval
#startBlock('__archive_update_many write() operations')
for (interval,packedString) in packedStrings:
timeDistance = interval - baseInterval
pointDistance = timeDistance / step
byteDistance = pointDistance * pointSize
myOffset = archive['offset'] + (byteDistance % archive['size'])
fh.seek(myOffset)
archiveEnd = archive['offset'] + archive['size']
bytesBeyond = (myOffset + len(packedString)) - archiveEnd
#debug(' __archive_update_many myOffset=%d packedString=%d archiveEnd=%d bytesBeyond=%d' % (myOffset,len(packedString),archiveEnd,bytesBeyond))
if bytesBeyond > 0:
fh.write( packedString[:-bytesBeyond] )
#debug('We wrapped an archive!')
assert fh.tell() == archiveEnd, "archiveEnd=%d fh.tell=%d bytesBeyond=%d len(packedString)=%d" % (archiveEnd,fh.tell(),bytesBeyond,len(packedString))
fh.seek( archive['offset'] )
fh.write( packedString[-bytesBeyond:] ) #safe because it can't exceed the archive (retention checking logic above)
else:
fh.write(packedString)
#endBlock('__archive_update_many write() operations')
#Now we propagate the updates to lower-precision archives
#startBlock('__archive_update_many propagation')
higher = archive
lowerArchives = [arc for arc in header['archives'] if arc['secondsPerPoint'] > archive['secondsPerPoint']]
#debug('__archive_update_many I have %d lower archives' % len(lowerArchives))
for lower in lowerArchives:
fit = lambda i: i - (i % lower['secondsPerPoint'])
lowerIntervals = [fit(p[0]) for p in alignedPoints]
uniqueLowerIntervals = set(lowerIntervals)
#debug(' __archive_update_many points=%d unique=%d' % (len(alignedPoints),len(uniqueLowerIntervals)))
propagateFurther = False
for interval in uniqueLowerIntervals:
#debug(' __archive_update_many propagating from %d to %d, interval=%d' % (higher['secondsPerPoint'],lower['secondsPerPoint'],interval))
if __propagate(fh,interval,header['xFilesFactor'],higher,lower):
propagateFurther = True
#debug(' __archive_update_many Successful propagation!')
#debug(' __archive_update_many propagateFurther=%s' % propagateFurther)
if not propagateFurther: break
higher = lower
#endBlock('__archive_update_many propagation')
#endBlock('__archive_update_many file=%s archive=%s points=%d' % (fh.name,step,len(points)))
def info(path):
"""info(path)
path is a string
"""
fh = open(path,'rb')
info = __readHeader(fh)
fh.close()
return info
def fetch(path,fromTime,untilTime=None):
"""fetch(path,fromTime,untilTime=None)
path is a string
fromTime is an epoch time
untilTime is also an epoch time, but defaults to now
"""
fh = open(path,'rb')
return file_fetch(fh, fromTime, untilTime)
def file_fetch(fh, fromTime, untilTime):
header = __readHeader(fh)
now = int( time.time() )
if untilTime is None:
untilTime = now
fromTime = int(fromTime)
untilTime = int(untilTime)
oldestTime = now - header['maxRetention']
if fromTime < oldestTime:
fromTime = oldestTime
if not (fromTime < untilTime):
raise InvalidTimeInterval("Invalid time interval")
if untilTime > now:
untilTime = now
if untilTime < fromTime:
untilTime = now
diff = now - fromTime
for archive in header['archives']:
if archive['retention'] >= diff: break
fromInterval = int( fromTime - (fromTime % archive['secondsPerPoint']) ) + archive['secondsPerPoint']
untilInterval = int( untilTime - (untilTime % archive['secondsPerPoint']) ) + archive['secondsPerPoint']
fh.seek(archive['offset'])
packedPoint = fh.read(pointSize)
(baseInterval,baseValue) = struct.unpack(pointFormat,packedPoint)
if baseInterval == 0:
step = archive['secondsPerPoint']
points = (untilInterval - fromInterval) / step
timeInfo = (fromInterval,untilInterval,step)
valueList = [None] * points
return (timeInfo,valueList)
#Determine fromOffset
timeDistance = fromInterval - baseInterval
pointDistance = timeDistance / archive['secondsPerPoint']
byteDistance = pointDistance * pointSize
fromOffset = archive['offset'] + (byteDistance % archive['size'])
#Determine untilOffset
timeDistance = untilInterval - baseInterval
pointDistance = timeDistance / archive['secondsPerPoint']
byteDistance = pointDistance * pointSize
untilOffset = archive['offset'] + (byteDistance % archive['size'])
#Read all the points in the interval
fh.seek(fromOffset)
if fromOffset < untilOffset: #If we don't wrap around the archive
seriesString = fh.read(untilOffset - fromOffset)
else: #We do wrap around the archive, so we need two reads
archiveEnd = archive['offset'] + archive['size']
seriesString = fh.read(archiveEnd - fromOffset)
fh.seek(archive['offset'])
seriesString += fh.read(untilOffset - archive['offset'])
#Now we unpack the series data we just read (anything faster than unpack?)
byteOrder,pointTypes = pointFormat[0],pointFormat[1:]
points = len(seriesString) / pointSize
seriesFormat = byteOrder + (pointTypes * points)
unpackedSeries = struct.unpack(seriesFormat, seriesString)
#And finally we construct a list of values (optimize this!)
valueList = [None] * points #pre-allocate entire list for speed
currentInterval = fromInterval
step = archive['secondsPerPoint']
for i in xrange(0,len(unpackedSeries),2):
pointTime = unpackedSeries[i]
if pointTime == currentInterval:
pointValue = unpackedSeries[i+1]
valueList[i/2] = pointValue #in-place reassignment is faster than append()
currentInterval += step
fh.close()
timeInfo = (fromInterval,untilInterval,step)
return (timeInfo,valueList)<|fim▁end|> | |
<|file_name|>availability_sets_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from .. import models
class AvailabilitySetsOperations(object):
"""AvailabilitySetsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client Api Version. Constant value: "2017-03-30".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-03-30"
self.config = config
def create_or_update(
self, resource_group_name, availability_set_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Create or update an availability set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param availability_set_name: The name of the availability set.
:type availability_set_name: str
:param parameters: Parameters supplied to the Create Availability Set
operation.
:type parameters:
~azure.mgmt.compute.v2017_03_30.models.AvailabilitySet
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: AvailabilitySet or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.compute.v2017_03_30.models.AvailabilitySet or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets/{availabilitySetName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'availabilitySetName': self._serialize.url("availability_set_name", availability_set_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'AvailabilitySet')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AvailabilitySet', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete(
self, resource_group_name, availability_set_name, custom_headers=None, raw=False, **operation_config):
"""Delete an availability set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param availability_set_name: The name of the availability set.
:type availability_set_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: OperationStatusResponse or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets/{availabilitySetName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'availabilitySetName': self._serialize.url("availability_set_name", availability_set_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get(
self, resource_group_name, availability_set_name, custom_headers=None, raw=False, **operation_config):
"""Retrieves information about an availability set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param availability_set_name: The name of the availability set.
:type availability_set_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: AvailabilitySet or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.compute.v2017_03_30.models.AvailabilitySet or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets/{availabilitySetName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'availabilitySetName': self._serialize.url("availability_set_name", availability_set_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AvailabilitySet', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Lists all availability sets in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of AvailabilitySet
:rtype:
~azure.mgmt.compute.v2017_03_30.models.AvailabilitySetPaged[~azure.mgmt.compute.v2017_03_30.models.AvailabilitySet]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""<|fim▁hole|> def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.AvailabilitySetPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.AvailabilitySetPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list_available_sizes(
self, resource_group_name, availability_set_name, custom_headers=None, raw=False, **operation_config):
"""Lists all available virtual machine sizes that can be used to create a
new virtual machine in an existing availability set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param availability_set_name: The name of the availability set.
:type availability_set_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of VirtualMachineSize
:rtype:
~azure.mgmt.compute.v2017_03_30.models.VirtualMachineSizePaged[~azure.mgmt.compute.v2017_03_30.models.VirtualMachineSize]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets/{availabilitySetName}/vmSizes'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'availabilitySetName': self._serialize.url("availability_set_name", availability_set_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.VirtualMachineSizePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.VirtualMachineSizePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized<|fim▁end|> | |
<|file_name|>201511251405_33a1d6f25951_add_timetable_related_tables.py<|end_file_name|><|fim▁begin|>"""Add timetable related tables
Revision ID: 33a1d6f25951
Revises: 225d0750c216
Create Date: 2015-11-25 14:05:51.856236
"""
import sqlalchemy as sa
from alembic import op
from indico.core.db.sqlalchemy import PyIntEnum, UTCDateTime
from indico.modules.events.timetable.models.entries import TimetableEntryType
# revision identifiers, used by Alembic.
revision = '33a1d6f25951'
down_revision = '225d0750c216'
def upgrade():
# Break
op.create_table(
'breaks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('duration', sa.Interval(), nullable=False),
sa.Column('text_color', sa.String(), nullable=False),
sa.Column('background_color', sa.String(), nullable=False),
sa.Column('room_name', sa.String(), nullable=False),
sa.Column('inherit_location', sa.Boolean(), nullable=False),
sa.Column('address', sa.Text(), nullable=False),
sa.Column('venue_id', sa.Integer(), nullable=True, index=True),
sa.Column('venue_name', sa.String(), nullable=False),
sa.Column('room_id', sa.Integer(), nullable=True, index=True),
sa.CheckConstraint("(room_id IS NULL) OR (venue_name = '' AND room_name = '')",
name='no_custom_location_if_room'),
sa.CheckConstraint("(venue_id IS NULL) OR (venue_name = '')", name='no_venue_name_if_venue_id'),
sa.CheckConstraint("(room_id IS NULL) OR (venue_id IS NOT NULL)", name='venue_id_if_room_id'),
sa.CheckConstraint("NOT inherit_location OR (venue_id IS NULL AND room_id IS NULL AND venue_name = '' AND "
"room_name = '' AND address = '')", name='inherited_location'),
sa.CheckConstraint("(text_color = '') = (background_color = '')", name='both_or_no_colors'),
sa.CheckConstraint("text_color != '' AND background_color != ''", name='colors_not_empty'),
sa.ForeignKeyConstraint(['room_id'], ['roombooking.rooms.id']),
sa.ForeignKeyConstraint(['venue_id'], ['roombooking.locations.id']),
sa.ForeignKeyConstraint(['venue_id', 'room_id'], ['roombooking.rooms.location_id', 'roombooking.rooms.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)<|fim▁hole|>
# TimetableEntry
op.create_table(
'timetable_entries',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('parent_id', sa.Integer(), nullable=True, index=True),
sa.Column('session_block_id', sa.Integer(), nullable=True, index=True, unique=True),
sa.Column('contribution_id', sa.Integer(), nullable=True, index=True, unique=True),
sa.Column('break_id', sa.Integer(), nullable=True, index=True, unique=True),
sa.Column('type', PyIntEnum(TimetableEntryType), nullable=False),
sa.Column('start_dt', UTCDateTime, nullable=False),
sa.Index('ix_timetable_entries_start_dt_desc', sa.text('start_dt DESC')),
sa.CheckConstraint('type != 1 OR parent_id IS NULL', name='valid_parent'),
sa.CheckConstraint('type != 1 OR (contribution_id IS NULL AND break_id IS NULL AND '
'session_block_id IS NOT NULL)', name='valid_session_block'),
sa.CheckConstraint('type != 2 OR (session_block_id IS NULL AND break_id IS NULL AND '
'contribution_id IS NOT NULL)', name='valid_contribution'),
sa.CheckConstraint('type != 3 OR (contribution_id IS NULL AND session_block_id IS NULL AND '
'break_id IS NOT NULL)', name='valid_break'),
sa.ForeignKeyConstraint(['break_id'], ['events.breaks.id']),
sa.ForeignKeyConstraint(['contribution_id'], ['events.contributions.id']),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.ForeignKeyConstraint(['parent_id'], ['events.timetable_entries.id']),
sa.ForeignKeyConstraint(['session_block_id'], ['events.session_blocks.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('timetable_entries', schema='events')
op.drop_table('breaks', schema='events')<|fim▁end|> | |
<|file_name|>collectd.py<|end_file_name|><|fim▁begin|># Copyright 2015-2016 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
minemeld.collectd
Provides a client to collectd for storing metrics.
"""
import socket
import logging
LOG = logging.getLogger(__name__)
class CollectdClient(object):
"""Collectd client.
Args:
path (str): path to the collectd unix socket
"""
def __init__(self, path):
self.path = path
self.socket = None
def _open_socket(self):
if self.socket is not None:
return
_socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
_socket.connect(self.path)
self.socket = _socket
def _readline(self):
result = ''<|fim▁hole|> data = None
while data != '\n':
data = self.socket.recv(1)
if data == '\n' or data is None:
return result
result += data
def _send_cmd(self, command):
self._open_socket()
self.socket.send(command+'\n')
ans = self._readline()
status, message = ans.split(None, 1)
status = int(status)
if status < 0:
raise RuntimeError('Error communicating with collectd %s' %
message)
message = [message]
for _ in range(status):
message.append(self._readline())
return status, '\n'.join(message)
def flush(self, identifier=None, timeout=None):
cmd = 'FLUSH'
if timeout is not None:
cmd += ' timeout=%d' % timeout
if identifier is not None:
cmd += ' identifier=%s' % identifier
self._send_cmd(
cmd
)
def putval(self, identifier, value, timestamp='N',
type_='minemeld_counter', hostname='minemeld', interval=None):
if isinstance(timestamp, int):
timestamp = '%d' % timestamp
identifier = '/'.join([hostname, identifier, type_])
command = 'PUTVAL %s' % identifier
if interval is not None:
command += ' interval=%d' % interval
command += ' %s:%d' % (timestamp, value)
self._send_cmd(command)<|fim▁end|> | |
<|file_name|>qhyper.ts<|end_file_name|><|fim▁begin|>/* This is a conversion from LIB-R-MATH to Typescript/Javascript
Copyright (C) 2018 Jacob K.F. Bogers [email protected]
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { debug } from 'debug';
import { ML_ERR_return_NAN, R_Q_P01_boundaries } from '@common/logger';
import { lfastchoose } from '@lib/special/choose';
import { R_DT_qIv } from '@dist/exp/expm1';
import { DBL_EPSILON } from '@lib/r-func';
import type { QHyperFunctionMap, CalcQHyper } from './qhyper_wasm';
const printer_qhyper = debug('qhyper');
const _d = new Float64Array(7);
const ixr = 0;
const isum = 1;
const ixb = 2;
const iterm = 3;
const iNR = 4;
const iNB = 5;
// init
let backendTinyN: CalcQHyper = cpuBackendTinyN;
let backendBigN: CalcQHyper = cpuBackendBigN;
export function registerBackend(fns: QHyperFunctionMap): void {
backendTinyN = fns.calcTinyN;
backendBigN = fns.calcBigN;
}
export function unRegisterBackend(): boolean {
const previous = !!backendTinyN && !!backendBigN;
backendTinyN = cpuBackendTinyN;
backendBigN = cpuBackendBigN;
return previous;
}
function cpuBackendTinyN(sum: number, term: number, p: number, xr: number, end: number, xb: number, NB: number, NR: number): number {
while (sum < p && xr < end) {
//xr++
xr++;
NB++;
term *= (NR / xr) * (xb / NB);
sum += term;
xb--;
NR--;
}
return xr;
}
function cpuBackendBigN(sum: number, term: number, p: number, xr: number, end: number, xb: number, NB: number, NR: number): number {
while (sum < p && xr < end) {
xr++;
NB++;
term += Math.log((NR / xr) * (xb / NB));
sum += Math.exp(term);
xb--;
NR--;
}
return xr;
}
export function qhyper(
p: number,
nr: number,
nb: number,
n: number,
lowerTail = true,
logP = false
): number {
if (isNaN(p) || isNaN(nr) || isNaN(nb) || isNaN(n)) {
return NaN;
}
if (!isFinite(p) || !isFinite(nr) || !isFinite(nb) || !isFinite(n)) {
return ML_ERR_return_NAN(printer_qhyper);
}
_d[iNR] = Math.round(nr);
_d[iNB] = Math.round(nb);
const N = _d[iNR] + _d[iNB];
n = Math.round(n);
if (_d[iNR] < 0 || _d[iNB] < 0 || n < 0 || n > N) return ML_ERR_return_NAN(printer_qhyper);
/* Goal: Find xr (= #{red balls in sample}) such that
* phyper(xr, NR,NB, n) >= p > phyper(xr - 1, NR,NB, n)
*/
const xstart = Math.max(0, n - _d[iNB]);
const xend = Math.min(n, _d[iNR]);
const rc = R_Q_P01_boundaries(lowerTail, logP, p, xstart, xend);
if (rc !== undefined) {
return rc;
}
_d[ixr] = xstart;
_d[ixb] = n - _d[ixr]; /* always ( = #{black balls in sample} ) */
const small_N = N < 1000; /* won't have underflow in product below */
/* if N is small, term := product.ratio( bin.coef );
otherwise work with its logarithm to protect against underflow */<|fim▁hole|> -
lfastchoose(N, n);
if (small_N) _d[iterm] = Math.exp(_d[iterm]);
_d[iNR] -= _d[ixr];
_d[iNB] -= _d[ixb];
if (!lowerTail || logP) {
p = R_DT_qIv(lowerTail, logP, p);
}
p *= 1 - 1000 * DBL_EPSILON; /* was 64, but failed on FreeBSD sometimes */
_d[isum] = small_N ? _d[iterm] : Math.exp(_d[iterm]);
// for speed, removed if (small_N) out of the while loop
return (small_N) ? backendTinyN(
_d[isum],
_d[iterm],
p,
_d[ixr],
xend,
_d[ixb],
_d[iNB],
_d[iNR]
) : backendBigN(
_d[isum],
_d[iterm],
p,
_d[ixr],
xend,
_d[ixb],
_d[iNB],
_d[iNR]
);
}<|fim▁end|> | _d[iterm] =
lfastchoose(_d[iNR], _d[ixr])
+
lfastchoose(_d[iNB], _d[ixb]) |
<|file_name|>matrix_test.py<|end_file_name|><|fim▁begin|># Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington and Kai Nagel
# See opus_core/LICENSE
import os
import opus_matsim.sustain_city.tests as test_dir
from opus_core.tests import opus_unittest
from opus_core.store.csv_storage import csv_storage
from urbansim.datasets.travel_data_dataset import TravelDataDataset
from numpy import *
import numpy
from opus_core.logger import logger
class MatrixTest(opus_unittest.OpusTestCase):
""" Testing access to travel data values stored in numpy arrays
"""
def setUp(self):
print "Entering setup"
# get sensitivity test path
self.test_dir_path = test_dir.__path__[0]
# get location to travel data table
self.input_directory = os.path.join( self.test_dir_path, 'data', 'travel_cost')
logger.log_status("input_directory: %s" % self.input_directory)
# check source file
if not os.path.exists( self.input_directory ):
raise('File not found! %s' % self.input_directory)
print "Leaving setup"
def test_run(self):
print "Entering test run"
# This test loads an exising travel data as a TravelDataSet (numpy array)
# and accesses single (pre-known) values to validate the conversion process
# (numpy array into standard python list).
#
# Here an example:
# my_list = [[1,2,3],
# [4,5,6],
# [7,8,9]]
#
# my_list[0][1] should be = 2
# my_list[2][2] should be = 9
table_name = 'travel_data'
travel_data_attribute = 'single_vehicle_to_work_travel_cost'
# location of pre-calculated MATSim travel costs
in_storage = csv_storage(storage_location = self.input_directory)
# create travel data set (travel costs)
travel_data_set = TravelDataDataset( in_storage=in_storage, in_table_name=table_name )
travel_data_attribute_mat = travel_data_set.get_attribute_as_matrix(travel_data_attribute, fill=31)
# converting from numpy array into a 2d list
travel_list = numpy.atleast_2d(travel_data_attribute_mat).tolist()
# get two values for validation
value1 = int(travel_list[1][1]) # should be = 0
value2 = int(travel_list[2][1]) # should be = 120
logger.log_status('First validation value should be 0. Current value is %i' % value1)
logger.log_status('Second validation value should be 120. Current value is %i' % value2)
self.assertTrue( value1 == 0 )
self.assertTrue( value2 == 120 )
# self.dump_travel_list(travel_list) # for debugging
print "Leaving test run"
def dump_travel_list(self, travel_list):
''' Dumping travel_list for debugging reasons...<|fim▁hole|> dest = os.path.join( os.environ['OPUS_HOME'], 'opus_matsim', 'tmp')
if not os.path.exists(dest):
os.makedirs(dest)
travel = os.path.join(dest, 'travelFile.txt')
f = open(travel, "w")
f.write( str(travel_list) )
f.close()
if __name__ == "__main__":
#mt = MatrixTest() # for debugging
#mt.test_run() # for debugging
opus_unittest.main()<|fim▁end|> | '''
|
<|file_name|>successive_narrowing.py<|end_file_name|><|fim▁begin|>import deep_architect.searchers.common as se
import numpy as np
# NOTE: this searcher does not do any budget adjustment and needs to be
# combined with an evaluator that does.
class SuccessiveNarrowing(se.Searcher):
def __init__(self, search_space_fn, num_initial_samples, reduction_factor,
reset_default_scope_upon_sample):
se.Searcher.__init__(self, search_space_fn,
reset_default_scope_upon_sample)
self.num_initial_samples = num_initial_samples
self.reduction_factor = reduction_factor
self.vals = [None for _ in range(num_initial_samples)]
self.num_remaining = num_initial_samples
self.idx = 0
self.queue = []
for _ in range(num_initial_samples):
inputs, outputs = search_space_fn()
hyperp_value_lst = se.random_specify(outputs)
self.queue.append(hyperp_value_lst)
def sample(self):
assert self.idx < len(self.queue)
hyperp_value_lst = self.queue[self.idx]
(inputs, outputs) = self.search_space_fn()
se.specify(outputs, hyperp_value_lst)
idx = self.idx
self.idx += 1
return inputs, outputs, hyperp_value_lst, {"idx": idx}
def update(self, val, searcher_eval_token):
assert self.num_remaining > 0
idx = searcher_eval_token["idx"]
assert self.vals[idx] is None
self.vals[idx] = val
self.num_remaining -= 1
# generate the next round of architectures by keeping the best ones.
if self.num_remaining == 0:
num_samples = int(self.reduction_factor * len(self.queue))
assert num_samples > 0
top_idxs = np.argsort(self.vals)[::-1][:num_samples]
self.queue = [self.queue[idx] for idx in top_idxs]
self.vals = [None for _ in range(num_samples)]
self.num_remaining = num_samples
self.idx = 0
# run simple successive narrowing on a single machine.
def run_successive_narrowing(search_space_fn, num_initial_samples,
initial_budget, get_evaluator, extract_val_fn,
num_samples_reduction_factor,
budget_increase_factor, num_rounds,
get_evaluation_logger):
num_samples = num_initial_samples
searcher = SuccessiveNarrowing(search_space_fn, num_initial_samples,
num_samples_reduction_factor)
evaluation_id = 0
for round_idx in range(num_rounds):<|fim▁hole|> evaluator = get_evaluator(budget)
for idx in range(num_samples):
(inputs, outputs, hyperp_value_lst,
searcher_eval_token) = searcher.sample()
results = evaluator.eval(inputs, outputs)
val = extract_val_fn(results)
searcher.update(val, searcher_eval_token)
logger = get_evaluation_logger(evaluation_id)
logger.log_config(hyperp_value_lst, searcher_eval_token)
logger.log_results(results)
evaluation_id += 1
num_samples = int(num_samples_reduction_factor * num_samples)<|fim▁end|> | budget = initial_budget * (budget_increase_factor**round_idx) |
<|file_name|>elementwise.py<|end_file_name|><|fim▁begin|>import string
import numpy
import six
import cupy
from cupy import carray
from cupy import cuda
from cupy import util
six_range = six.moves.range
six_zip = six.moves.zip
def _get_simple_elementwise_kernel(
params, operation, name, preamble,
loop_prep='', after_loop='', options=()):
module_code = string.Template('''
${preamble}
extern "C" __global__ void ${name}(${params}) {
${loop_prep};
CUPY_FOR(i, _ind.size()) {
_ind.set(i);
${operation};
}
${after_loop};
}
''').substitute(
params=params,
operation=operation,
name=name,
preamble=preamble,
loop_prep=loop_prep,
after_loop=after_loop)
module = carray.compile_with_cache(module_code, options)
return module.get_function(name)
_typenames = {
numpy.dtype('float64'): 'double',
numpy.dtype('float32'): 'float',
numpy.dtype('float16'): 'float16',
numpy.dtype('int64'): 'long long',
numpy.dtype('int32'): 'int',
numpy.dtype('int16'): 'short',
numpy.dtype('int8'): 'signed char',
numpy.dtype('uint64'): 'unsigned long long',
numpy.dtype('uint32'): 'unsigned int',
numpy.dtype('uint16'): 'unsigned short',
numpy.dtype('uint8'): 'unsigned char',
numpy.dtype('bool'): 'bool',
}
_scalar_type = (int, float, bool) + tuple(t.type for t in _typenames.keys())
def _get_typename(dtype):
if dtype is None:
raise ValueError('dtype is None')
return _typenames[numpy.dtype(dtype)]
def _check_args(args):
dev = cuda.Device()
cp_array = cupy.ndarray
scalar_type = _scalar_type
for arg in args:
if isinstance(arg, cp_array):
if arg.data.device != dev:
raise ValueError('Array device must be same as the current '
'device: array device = %d while current = %d'
% (arg.device.id, dev.id))
elif not isinstance(arg, scalar_type):
raise TypeError('Unsupported type %s' % type(arg))
def _get_args_info(args):
ret = []
carray_Indexer = carray.Indexer
ret_append = ret.append
for a in args:
t = type(a)
if t == carray_Indexer:
dtype = None
else:
dtype = a.dtype.type
ret_append((t, dtype, a.ndim))
return tuple(ret)
def _get_kernel_params(params, args_info):
ret = []
for p, a in six_zip(params, args_info):
type, dtype, ndim = a
is_array = type is cupy.ndarray
if type is carray.Indexer:
t = 'CIndexer<%d>' % ndim
else:
t = _get_typename(dtype)
if is_array:
t = 'CArray<%s, %d>' % (t, ndim)
ret.append('%s%s %s%s' % ('const ' if p.is_const else '',
t,
'_raw_' if is_array and not p.raw else '',
p.name))
return ', '.join(ret)
def _reduce_dims(args, params, shape):
ndim = len(shape)
if ndim <= 1:
return args, shape
cp_array = cupy.ndarray
is_array_flags = [not p.raw and isinstance(a, cp_array)
for p, a in six_zip(params, args)]
args_strides = [a._strides for a, f in six_zip(args, is_array_flags) if f]
src_shape = shape
shape = list(src_shape)
cnt = 0
for i in six_range(1, ndim):
j = i - 1
shape_i = shape[i]
shape_j = shape[j]
if shape_j == 1:
continue
for strides in args_strides:
if strides[i] * shape_i != strides[j]:
cnt += 1
axis = j
break
else:
shape[i] *= shape_j
shape[j] = 1
if shape[-1] != 1:
cnt += 1
axis = -1
if not cnt:
return args, src_shape
elif cnt == 1:
new_shape = shape[axis],
args = list(args)
for i, a in enumerate(args):
if is_array_flags[i]:
a = args[i] = a.view()
a._shape = new_shape
a._strides = a._strides[axis],
return args, new_shape
new_shape = tuple([dim for dim in shape if dim != 1])
args = list(args)
for i, a in enumerate(args):
if is_array_flags[i]:
a = args[i] = a.view()
a._shape = new_shape
a._strides = tuple(
[st for st, sh in six_zip(a._strides, shape) if sh != 1])
return args, new_shape
class ParameterInfo(object):
def __init__(self, str, is_const):
self.name = None
self.dtype = None
self.ctype = None
self.raw = False
self.is_const = is_const
s = tuple(i for i in str.split() if len(i) != 0)
if len(s) < 2:
raise Exception('Syntax error: %s' % str)
t, self.name = s[-2:]
if t == 'CIndexer':
pass
elif len(t) == 1:
self.ctype = t
else:
dtype = numpy.dtype(t)
self.dtype = dtype.type
if dtype.name != t:
raise ValueError('Wrong type %s' % t)
self.ctype = _get_typename(self.dtype)
for i in s[:-2]:
if i == 'raw':
self.raw = True
else:
raise Exception('Unknown keyward "%s"' % i)
@util.memoize()
def _get_param_info(s, is_const):
if len(s) == 0:
return ()
return tuple([ParameterInfo(i, is_const) for i in s.strip().split(',')])
@util.memoize()
def _decide_params_type(in_params, out_params, in_args_dtype, out_args_dtype):
type_dict = {}
if out_args_dtype:
assert len(out_params) == len(out_args_dtype)
for p, a in six_zip(out_params, out_args_dtype):
if a is None:
raise TypeError('Output arguments must be cupy.ndarray')
if p.dtype is not None:
if a != p.dtype:
raise TypeError(
'Type is mismatched. %s %s %s' % (p.name, a, p.dtype))
elif p.ctype in type_dict:
t = type_dict[p.ctype]
if t != a:
raise TypeError(
'Type is mismatched. %s %s %s %s' % (
p.name, a, t, p.ctype))
else:
type_dict[p.ctype] = a
assert len(in_params) == len(in_args_dtype)
unknown_ctype = []
for p, a in six_zip(in_params, in_args_dtype):
if a is None:
if p.dtype is None:
unknown_ctype.append(p.ctype)
else:
if p.dtype is not None:
if a != p.dtype:
raise TypeError(
'Type is mismatched. %s %s %s' % (p.name, a, p.dtype))
elif p.ctype in type_dict:
t = type_dict[p.ctype]
if t != a:
raise TypeError(
'Type is mismatched. %s %s %s %s' % (
p.name, a, t, p.ctype))
else:
type_dict[p.ctype] = a
in_types = tuple([type_dict[p.ctype] if p.dtype is None else p.dtype
for p in in_params])
out_types = tuple([type_dict[p.ctype] if p.dtype is None else p.dtype
for p in out_params])
return in_types, out_types, tuple(type_dict.items())
def _broadcast(args, params, use_size):
value = [a if not p.raw and isinstance(a, cupy.ndarray) else None
for p, a in six_zip(params, args)]
if use_size:
for i in value:
if i is None:
break
else:
raise ValueError("Specified 'size' can be used only "
"if all of the ndarray are 'raw'.")
else:
for i in value:
if i is not None:
break
else:
raise ValueError('Loop size is Undecided')
brod = cupy.broadcast(*value)
value = [b if a is None else a
for a, b in six_zip(brod.values, args)]
return value, brod.shape
def _get_out_args(out_args, out_types, out_shape):
if not out_args:
return [cupy.empty(out_shape, t) for t in out_types]
for a in out_args:
if not isinstance(a, cupy.ndarray):
raise TypeError(
'Output arguments type must be cupy.ndarray')
if a.shape != out_shape:
raise ValueError('Out shape is mismatched')
return out_args
def _get_out_args_with_params(out_args, out_types, out_shape, out_params):
if not out_args:
for p in out_params:
if p.raw:
raise ValueError('Output array size is Undecided')
return [cupy.empty(out_shape, t) for t in out_types]
for a, p in six_zip(out_args, out_params):
if not isinstance(a, cupy.ndarray):
raise TypeError(
'Output arguments type must be cupy.ndarray')
if a.shape != out_shape and not p.raw:
raise ValueError('Out shape is mismatched')
return out_args
@util.memoize(for_each_device=True)
def _get_elementwise_kernel(args_info, types, params, operation, name,
preamble, kwargs):
kernel_params = _get_kernel_params(params, args_info)
types_preamble = '\n'.join(
'typedef %s %s;' % (_get_typename(v), k) for k, v in types)
preamble = types_preamble + '\n' + preamble
op = []
for p, a in six_zip(params, args_info):
if not p.raw and a[0] == cupy.ndarray:
if p.is_const:
fmt = 'const {t} {n} = _raw_{n}[_ind.get()];'
else:
fmt = '{t} &{n} = _raw_{n}[_ind.get()];'
op.append(fmt.format(t=p.ctype, n=p.name))
op.append(operation)
operation = '\n'.join(op)
return _get_simple_elementwise_kernel(
kernel_params, operation, name,
preamble, **dict(kwargs))
class ElementwiseKernel(object):
"""User-defined elementwise kernel.
This class can be used to define an elementwise kernel with or without
broadcasting.
The kernel is compiled at an invocation of the
:meth:`~ElementwiseKernel.__call__` method,
which is cached for each device.
The compiled binary is also cached into a file under the
``$HOME/.cupy/kernel_cache/`` directory with a hashed file name. The cached
binary is reused by other processes.
Args:
in_params (str): Input argument list.
out_params (str): Output argument list.
operation (str): The body in the loop written in CUDA-C/C++.
name (str): Name of the kernel function. It should be set for
readability of the performance profiling.
reduce_dims (bool): If False, the shapes of array arguments are
kept within the kernel invocation. The shapes are reduced
(i.e., the arrays are reshaped without copy to the minimum
ndims) by default. It may make the kernel fast by reducing the
index calculations.
options (list): Options passed to the nvcc command.
preamble (str): Fragment of the CUDA-C/C++ code that is inserted at the
top of the cu file.
loop_prep (str): Fragment of the CUDA-C/C++ code that is inserted at
the top of the kernel function definition and above the ``for``
loop.
after_loop (str): Fragment of the CUDA-C/C++ code that is inserted at
the bottom of the kernel function definition.
"""
def __init__(self, in_params, out_params, operation,
name='kernel', reduce_dims=True, preamble='', **kwargs):
self.in_params = _get_param_info(in_params, True)
self.out_params = _get_param_info(out_params, False)
self.nin = len(self.in_params)
self.nout = len(self.out_params)
self.nargs = self.nin + self.nout
param_rest = _get_param_info('CIndexer _ind', False)
self.params = self.in_params + self.out_params + param_rest
self.operation = operation
self.name = name
self.reduce_dims = reduce_dims
self.preamble = preamble
self.kwargs = frozenset(kwargs.items())
names = [p.name for p in self.in_params + self.out_params]
if 'i' in names:
raise ValueError("Can not use 'i' as a parameter name")
def __call__(self, *args, **kwargs):
"""Compiles and invokes the elementwise kernel.
The compilation runs only if the kernel is not cached. Note that the
kernels with different argument dtypes or ndims are not compatible. It
means that single ElementwiseKernel object may be compiled into
multiple kernel binaries.
Args:
args: Argumens of the kernel.
size (int): Range size of the indices. If specified, the variable
``n`` is set to this value. Otherwise, the result of
broadcasting is used to determine the value of ``n``.
Returns:
Arrays are returned according to the ``out_params`` argument of the
``__init__`` method.
"""
size = kwargs.pop('size', None)
if kwargs:
raise TypeError('Wrong arguments %s' % kwargs)
n_args = len(args)
if n_args != self.nin and n_args != self.nargs:
raise TypeError('Wrong number of arguments for %s' % self.name)
_check_args(args)
values, shape = _broadcast(args, self.params, size is not None)
in_args = values[:self.nin]
out_args = values[self.nin:]
cp_array = cupy.ndarray
in_ndarray_types = tuple(
[a.dtype.type if isinstance(a, cp_array) else None
for a in in_args])
out_ndarray_types = tuple(
[a.dtype.type if isinstance(a, cp_array) else None
for a in out_args])
in_types, out_types, types = _decide_params_type(
self.in_params, self.out_params,
in_ndarray_types, out_ndarray_types)
out_args = _get_out_args_with_params(
out_args, out_types, shape, self.out_params)
if self.nout == 1:
ret = out_args[0]
else:
ret = tuple(out_args)
if size is not None:
shape = size,
if 0 in shape:
return ret
inout_args = [x if isinstance(x, cp_array) else t(x)
for x, t in six_zip(in_args, in_types)]
inout_args += out_args
if self.reduce_dims:
inout_args, shape = _reduce_dims(
inout_args, self.params, shape)
indexer = carray.Indexer(shape)
inout_args.append(indexer)
args_info = _get_args_info(inout_args)
kern = _get_elementwise_kernel(
args_info, types, self.params, self.operation,
self.name, self.preamble, self.kwargs)
kern.linear_launch(indexer.size, inout_args)
return ret
@util.memoize(for_each_device=True)
def _get_ufunc_kernel(in_types, out_types, routine, args_info, out_raw_types,
params, name, preamble):
kernel_params = _get_kernel_params(params, args_info)
types = []
op = []
for i, x in enumerate(in_types):
types.append('typedef %s in%d_type;' % (_get_typename(x), i))
if args_info[i][0] is cupy.ndarray:
op.append(
'const in{0}_type in{0} = _raw_in{0}[_ind.get()];'.format(i))
for i, x in enumerate(out_types):
types.append('typedef %s out%d_type;' % (_get_typename(x), i))
op.append('{1} &out{0} = _raw_out{0}[_ind.get()];'.format(
i, _get_typename(out_raw_types[i])))
op.append(routine)
operation = '\n'.join(op)
types.append(preamble)
preamble = '\n'.join(types)
return _get_simple_elementwise_kernel(
kernel_params, operation, name, preamble)
def _guess_routine_from_in_types(ops, in_types):
for op in ops:
for dst, src in six_zip(op[0], in_types):
if not numpy.can_cast(src, dst):
break
else:
return op
return None
def _guess_routine_from_dtype(ops, dtype):
for op in ops:
for t in op[1]:
if t != dtype:
break
else:
return op
return None
def _guess_routine(name, cache, ops, in_args, dtype):
if dtype is None:
key = tuple([numpy.dtype(type(i)).type
if isinstance(i, (int, float, bool)) else i.dtype.type
for i in in_args])
else:
key = dtype
op = cache.get(key, ())
if op is ():
if dtype is None:
op = _guess_routine_from_in_types(ops, key)
else:
op = _guess_routine_from_dtype(ops, key)
cache[key] = op
if op:
return op
raise TypeError('Wrong type of arguments for %s' % name)
class ufunc(object):
"""Universal function.
Attributes:
name (str): The name of the universal function.
nin (int): Number of input arguments.
nout (int): Number of output arguments.
nargs (int): Number of all arguments.
"""
def __init__(self, name, nin, nout, ops, preamble='', doc=''):
self.name = name
self.nin = nin
self.nout = nout
self.nargs = nin + nout
self._ops = ops
self._preamble = preamble
self.__doc__ = doc
_in_params = tuple(
ParameterInfo('T in%d' % i, True)
for i in six_range(nin))
_out_params = tuple(
ParameterInfo('T out%d' % i, False)
for i in six_range(nout))
self._params = _in_params + _out_params + (
ParameterInfo('CIndexer _ind', False),)
self._routine_cache = {}
def __repr__(self):<|fim▁hole|> @property
def types(self):
"""A list of type signatures.
Each type signature is represented by type character codes of inputs
and outputs separated by '->'.
"""
types = []
for in_types, out_types, _ in self._ops:
in_str = ''.join([numpy.dtype(t).char for t in in_types])
out_str = ''.join([numpy.dtype(t).char for t in out_types])
types.append('%s->%s' % (in_str, out_str))
return types
def __call__(self, *args, **kwargs):
"""Applies the universal function to arguments elementwise.
Args:
args: Input arguments. Each of them can be a cupy.ndarray object or
a scalar. The output arguments can be omitted or be specified
by the ``out`` argument.
out (cupy.ndarray): Output array. It outputs to new arrays
default.
dtype: Data type specifier.
Returns:
Output array or a tuple of output arrays.
"""
out = kwargs.pop('out', None)
dtype = kwargs.pop('dtype', None)
if dtype is not None:
dtype = numpy.dtype(dtype).type
if kwargs:
raise TypeError('Wrong arguments %s' % kwargs)
n_args = len(args)
if n_args != self.nin and n_args != self.nargs:
raise TypeError('Wrong number of arguments for %s' % self.name)
if out is None:
in_args = args[:self.nin]
out_args = args[self.nin:]
else:
if self.nout != 1:
raise ValueError("Cannot use 'out' in %s" % self.name)
if n_args != self.nin:
raise ValueError("Cannot specify 'out' as both "
"a positional and keyword argument")
in_args = args
out_args = out,
args += out_args
_check_args(args)
broad = cupy.broadcast(*args)
shape = broad.shape
in_types, out_types, routine = _guess_routine(
self.name, self._routine_cache, self._ops, in_args, dtype)
out_args = _get_out_args(out_args, out_types, shape)
if self.nout == 1:
ret = out_args[0]
else:
ret = tuple(out_args)
if 0 in shape:
return ret
inout_args = [x if isinstance(x, cupy.ndarray) else t(x)
for x, t in six_zip(broad.values, in_types)]
inout_args.extend(out_args)
inout_args, shape = _reduce_dims(inout_args, self._params, shape)
indexer = carray.Indexer(shape)
inout_args.append(indexer)
args_info = _get_args_info(inout_args)
out_raw_types = tuple([x.dtype.type for x in out_args])
kern = _get_ufunc_kernel(
in_types, out_types, routine,
args_info, out_raw_types,
self._params, self.name, self._preamble)
kern.linear_launch(indexer.size, inout_args)
return ret
def create_ufunc(name, ops, routine=None, preamble='', doc=''):
_ops = []
for t in ops:
if not isinstance(t, tuple):
typ = t
rt = routine
else:
typ, rt = t
types = typ.split('->')
if len(types) == 1:
in_types = out_types = tuple(types)
else:
in_types, out_types = map(tuple, types)
in_types = tuple([numpy.dtype(t).type for t in in_types])
out_types = tuple([numpy.dtype(t).type for t in out_types])
_ops.append((in_types, out_types, rt))
return ufunc(name, len(_ops[0][0]), len(_ops[0][1]), _ops, preamble, doc)
_id = 'out0 = in0'
copy = create_ufunc(
'cupy_copy',
('?->?', 'b->b', 'B->B', 'h->h', 'H->H', 'i->i', 'I->I', 'l->l', 'L->L',
'q->q', 'Q->Q', 'e->e', 'f->f', 'd->d'),
_id)
copy_where = create_ufunc(
'cupy_copy_where',
('??->?', 'b?->b', 'B?->B', 'h?->h', 'H?->H', 'i?->i', 'I?->I', 'l?->l',
'L?->L', 'q?->q', 'Q?->Q', 'e?->e', 'f?->f', 'd?->d'),
'if (in1) out0 = in0')
_divmod = create_ufunc(
'cupy_divmod',
('bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l', 'LL->L',
'qq->q', 'QQ->Q', 'ee->e', 'ff->f', 'dd->d'),
'out0_type a = _floor_divide(in0, in1); out0 = a; out1 = in0 - a * in1')<|fim▁end|> | return "<ufunc '%s'>" % self.name
|
<|file_name|>spu_isa.py<|end_file_name|><|fim▁begin|># Copyright (c) 2006-2009 The Trustees of Indiana University.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# - Neither the Indiana University nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from corepy.spre.spe import Instruction, DispatchInstruction, Register
from spu_insts import *
__doc__="""
ISA for the Cell Broadband Engine's SPU.
"""
class lqx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':452}
cycles = (1, 6, 0)
class stqx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':324}
cycles = (1, 6, 0)
class cbx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':468}
cycles = (1, 4, 0)
class chx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':469}
cycles = (1, 4, 0)
class cwx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':470}
cycles = (1, 4, 0)
class cdx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':471}
cycles = (1, 4, 0)
class ah(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':200}
cycles = (0, 2, 0)
class a(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':192}
cycles = (0, 2, 0)
class sfh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':72}
cycles = (0, 2, 0)
class sf(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':64}
cycles = (0, 2, 0)
class addx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':832}
cycles = (0, 2, 0)
class cg(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':194}
cycles = (0, 2, 0)
class cgx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':834}
cycles = (0, 2, 0)
class sfx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':833}
cycles = (0, 2, 0)
class bg(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':66}
cycles = (0, 2, 0)
<|fim▁hole|> params = {'OPCD':835}
cycles = (0, 2, 0)
class mpy(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':964}
cycles = (0, 7, 0)
class mpyu(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':972}
cycles = (0, 7, 0)
class mpyh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':965}
cycles = (0, 7, 0)
class mpys(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':967}
cycles = (0, 7, 0)
class mpyhh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':966}
cycles = (0, 7, 0)
class mpyhha(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':838}
cycles = (0, 7, 0)
class mpyhhu(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':974}
cycles = (0, 7, 0)
class mpyhhau(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':846}
cycles = (0, 7, 0)
class clz(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':677}
cycles = (0, 2, 0)
class cntb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':692}
cycles = (0, 4, 0)
class fsmb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':438}
cycles = (1, 4, 0)
class fsmh(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':437}
cycles = (1, 4, 0)
class fsm(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':436}
cycles = (1, 4, 0)
class gbb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':434}
cycles = (1, 4, 0)
class gbh(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':433}
cycles = (1, 4, 0)
class gb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':432}
cycles = (1, 4, 0)
class avgb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':211}
cycles = (0, 4, 0)
class absdb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':83}
cycles = (0, 4, 0)
class sumb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':595}
cycles = (0, 4, 0)
class xsbh(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':694}
cycles = (0, 2, 0)
class xshw(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':686}
cycles = (0, 2, 0)
class xswd(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':678}
cycles = (0, 2, 0)
class and_(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':193}
cycles = (0, 2, 0)
class andc(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':705}
cycles = (0, 2, 0)
class or_(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':65}
cycles = (0, 2, 0)
class orc(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':713}
cycles = (0, 2, 0)
class orx(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':496}
cycles = (1, 4, 0)
class xor(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':577}
cycles = (0, 2, 0)
class nand(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':201}
cycles = (0, 2, 0)
class nor(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':73}
cycles = (0, 2, 0)
class eqv(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':585}
cycles = (0, 2, 0)
class shlh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':95}
cycles = (0, 4, 0)
class shl(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':91}
cycles = (0, 4, 0)
class shlqbi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':475}
cycles = (1, 4, 0)
class shlqby(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':479}
cycles = (1, 4, 0)
class shlqbybi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':463}
cycles = (1, 4, 0)
class roth(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':92}
cycles = (0, 4, 0)
class rot(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':88}
cycles = (0, 4, 0)
class rotqby(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':476}
cycles = (1, 4, 0)
class rotqbybi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':460}
cycles = (1, 4, 0)
class rotqbi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':472}
cycles = (1, 4, 0)
class rothm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':93}
cycles = (0, 4, 0)
class rotm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':89}
cycles = (0, 4, 0)
class rotqmby(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':477}
cycles = (1, 4, 0)
class rotqmbybi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':461}
cycles = (1, 4, 0)
class rotqmbi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':473}
cycles = (1, 4, 0)
class rotmah(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':94}
cycles = (0, 4, 0)
class rotma(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':90}
cycles = (0, 4, 0)
class heq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':984}
cycles = (0, 2, 0)
class hgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':600}
cycles = (0, 2, 0)
class hlgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':728}
cycles = (0, 2, 0)
class ceqb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':976}
cycles = (0, 2, 0)
class ceqh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':968}
cycles = (0, 2, 0)
class ceq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':960}
cycles = (0, 2, 0)
class cgtb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':592}
cycles = (0, 2, 0)
class cgth(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':584}
cycles = (0, 2, 0)
class cgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':576}
cycles = (0, 2, 0)
class clgtb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':720}
cycles = (0, 2, 0)
class clgth(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':712}
cycles = (0, 2, 0)
class clgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':704}
cycles = (0, 2, 0)
class bi(Instruction):
machine_inst = OPCD_A_D_E
params = {'OPCD':424}
cycles = (1, 4, 0)
class iret(Instruction):
machine_inst = OPCD_A_D_E
params = {'OPCD':426}
cycles = (1, 4, 0)
class bisled(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':427}
cycles = (1, 4, 0)
class bisl(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':425}
cycles = (1, 4, 0)
class biz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':296}
cycles = (1, 4, 0)
class binz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':297}
cycles = (1, 4, 0)
class bihz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':294}
cycles = (1, 4, 0)
class bihnz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':299}
cycles = (1, 4, 0)
# TODO - can we check that if P is set then RO is zero as required?
class hbr(DispatchInstruction):
cycles = (1, 15, 0)
dispatch = (
(OPCD_RO_A_P, {'OPCD':428}),
(OPCD_LBL9_A_P, {'OPCD':428}))
class fa(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':708}
cycles = (0, 6, 0)
class dfa(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':716}
cycles = (0, 13, 6)
class fs(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':709}
cycles = (0, 6, 0)
class dfs(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':717}
cycles = (0, 13, 6)
class fm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':710}
cycles = (0, 6, 0)
class dfm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':718}
cycles = (0, 13, 6)
class dfma(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':860}
cycles = (0, 13, 6)
class dfnms(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':862}
cycles = (0, 13, 6)
class dfms(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':861}
cycles = (0, 13, 6)
class dfnma(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':863}
cycles = (0, 13, 6)
class frest(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':440}
cycles = (1, 4, 0)
class frsqest(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':441}
cycles = (1, 4, 0)
class fi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':980}
cycles = (0, 7, 0)
class frds(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':953}
cycles = (0, 13, 6)
class fesd(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':952}
cycles = (0, 13, 6)
class fceq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':962}
cycles = (0, 2, 0)
class fcmeq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':970}
cycles = (0, 2, 0)
class fcgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':706}
cycles = (0, 2, 0)
class fcmgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':714}
cycles = (0, 2, 0)
class fscrwr(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':954}
cycles = (0, 7, 0)
class fscrrd(Instruction):
machine_inst = OPCD_T
params = {'OPCD':920}
cycles = (0, 13, 6)
class stop(Instruction):
machine_inst = OPCD_STOP_SIG
params = {'OPCD':0}
cycles = (1, 4, 0)
class stopd(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':320}
cycles = (1, 4, 0)
class lnop(Instruction):
machine_inst = OPCD
params = {'OPCD':1}
cycles = (1, 0, 0)
class nop(Instruction):
machine_inst = OPCD_T
params = {'OPCD':513}
cycles = (0, 0, 0)
class sync(Instruction):
machine_inst = OPCD_CF
params = {'OPCD':2}
cycles = (1, 4, 0)
class dsync(Instruction):
machine_inst = OPCD
params = {'OPCD':3}
cycles = (1, 4, 0)
class mfspr(Instruction):
machine_inst = OPCD_SA_T
params = {'OPCD':12}
cycles = (1, 6, 0)
class mtspr(Instruction):
machine_inst = OPCD_SA_T
params = {'OPCD':268}
cycles = (1, 6, 0)
class rdch(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':13}
cycles = (1, 6, 0)
class rchcnt(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':15}
cycles = (1, 6, 0)
class wrch(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':269}
cycles = (1, 6, 0)
class mpya(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':12}
cycles = (0, 7, 0)
class selb(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':8}
cycles = (0, 2, 0)
class shufb(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':11}
cycles = (1, 4, 0)
class fma(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':14}
cycles = (0, 6, 0)
class fnms(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':13}
cycles = (0, 6, 0)
class fms(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':15}
cycles = (0, 6, 0)
class cbd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':500}
cycles = (1, 4, 0)
class chd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':501}
cycles = (1, 4, 0)
class cwd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':502}
cycles = (1, 4, 0)
class cdd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':503}
cycles = (1, 4, 0)
class shlhi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':127}
cycles = (0, 4, 0)
class shli(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':123}
cycles = (0, 4, 0)
class shlqbii(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':507}
cycles = (1, 4, 0)
class shlqbyi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':511}
cycles = (1, 4, 0)
class rothi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':124}
cycles = (0, 4, 0)
class roti(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':120}
cycles = (0, 4, 0)
class rotqbyi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':508}
cycles = (1, 4, 0)
class rotqbii(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':504}
cycles = (1, 4, 0)
class rothmi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':125}
cycles = (0, 4, 0)
class rotmi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':121}
cycles = (0, 4, 0)
class rotqmbyi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':509}
cycles = (1, 4, 0)
class rotqmbii(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':505}
cycles = (1, 4, 0)
class rotmahi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':126}
cycles = (0, 4, 0)
class rotmai(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':122}
cycles = (0, 4, 0)
class csflt(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':474}
cycles = (0, 7, 0)
class cflts(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':472}
cycles = (0, 7, 0)
class cuflt(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':475}
cycles = (0, 7, 0)
class cfltu(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':473}
cycles = (0, 7, 0)
class lqd(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':52}
cycles = (1, 6, 0)
class stqd(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':36}
cycles = (1, 6, 0)
class ahi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':29}
cycles = (0, 2, 0)
class ai(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':28}
cycles = (0, 2, 0)
class sfhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':13}
cycles = (0, 2, 0)
class sfi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':12}
cycles = (0, 2, 0)
class mpyi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':116}
cycles = (0, 7, 0)
class mpyui(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':117}
cycles = (0, 7, 0)
class andbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':22}
cycles = (0, 2, 0)
class andhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':21}
cycles = (0, 2, 0)
class andi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':20}
cycles = (0, 2, 0)
class orbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':6}
cycles = (0, 2, 0)
class orhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':5}
cycles = (0, 2, 0)
class ori(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':4}
cycles = (0, 2, 0)
class xorbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':70}
cycles = (0, 2, 0)
class xorhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':69}
cycles = (0, 2, 0)
class xori(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':68}
cycles = (0, 2, 0)
class heqi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':127}
cycles = (0, 2, 0)
class hgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':79}
cycles = (0, 2, 0)
class hlgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':95}
cycles = (0, 2, 0)
class ceqbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':126}
cycles = (0, 2, 0)
class ceqhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':125}
cycles = (0, 2, 0)
class ceqi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':124}
cycles = (0, 2, 0)
class cgtbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':78}
cycles = (0, 2, 0)
class cgthi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':77}
cycles = (0, 2, 0)
class cgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':76}
cycles = (0, 2, 0)
class clgtbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':94}
cycles = (0, 2, 0)
class clgthi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':93}
cycles = (0, 2, 0)
class clgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':92}
cycles = (0, 2, 0)
class lqa(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':97}
cycles = (1, 6, 0)
class lqr(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':103}
cycles = (1, 6, 0)
class stqa(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':65}
cycles = (1, 6, 0)
class stqr(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':71}
cycles = (1, 6, 0)
class ilh(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':131}
cycles = (0, 2, 0)
class ilhu(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':130}
cycles = (0, 2, 0)
class il(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':129}
cycles = (0, 2, 0)
class iohl(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':193}
cycles = (0, 2, 0)
class fsmbi(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':101}
cycles = (1, 4, 0)
class br(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16, {'OPCD':100}),
(OPCD_LBL16, {'OPCD':100}))
# TODO - how can I do absolute branches?
class bra(Instruction):
machine_inst = OPCD_I16
params = {'OPCD':96}
cycles = (1, 4, 0)
# TODO - I16 has two zero bits appended, do I handle this correctly?
# What is the correct way, anyway?
class brsl(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16_T, {'OPCD':102}),
(OPCD_LBL16_T, {'OPCD':102}))
class brasl(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':98}
cycles = (1, 4, 0)
class brnz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16_T, {'OPCD':66}),
(OPCD_LBL16_T, {'OPCD':66}))
class brz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16_T, {'OPCD':64}),
(OPCD_LBL16_T, {'OPCD':64}))
class brhnz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16, {'OPCD':70}),
(OPCD_LBL16, {'OPCD':70}))
class brhz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16, {'OPCD':68}),
(OPCD_LBL16, {'OPCD':68}))
class hbra(Instruction):
machine_inst = OPCD_LBL9_I16
params = {'OPCD':8}
cycles = (1, 15, 0)
class hbrr(DispatchInstruction):
cycles = (1, 15, 0)
dispatch = (
(OPCD_ROA_I16, {'OPCD':9}),
(OPCD_LBL9_LBL16, {'OPCD':9}))
class ila(Instruction):
machine_inst = OPCD_I18_T
params = {'OPCD':33}
cycles = (0, 2, 0)<|fim▁end|> | class bgx(Instruction):
machine_inst = OPCD_B_A_T |
<|file_name|>UnsafeSSLConnection.java<|end_file_name|><|fim▁begin|>/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.mariotaku.twidere.util.http;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import javax.microedition.io.Connector;
import javax.microedition.io.SecurityInfo;
import javax.microedition.io.SocketConnection;
import javax.microedition.io.StreamConnection;
import repackaged.com.sun.midp.pki.X509Certificate;
import repackaged.com.sun.midp.ssl.SSLStreamConnection;
/**
*
* @author mariotaku
*/
public final class UnsafeSSLConnection implements StreamConnection {
private final SSLStreamConnection sc;
UnsafeSSLConnection(final String host, final int port) throws IOException {
final SocketConnection tcp = (SocketConnection) Connector.open("socket://" + host + ":" + port);
tcp.setSocketOption(SocketConnection.DELAY, 0);
final InputStream tcpIn = tcp.openInputStream();
final OutputStream tcpOut = tcp.openOutputStream();
sc = new SSLStreamConnection(host, port, tcpIn, tcpOut);
}
public synchronized OutputStream openOutputStream() throws IOException {
return sc.openOutputStream();
}
public synchronized InputStream openInputStream() throws IOException {
return sc.openInputStream();
}
public DataOutputStream openDataOutputStream() throws IOException {
return sc.openDataOutputStream();
}
public DataInputStream openDataInputStream() throws IOException {
return sc.openDataInputStream();
}
public X509Certificate getServerCertificate() {
return sc.getServerCertificate();
}
public SecurityInfo getSecurityInfo() throws IOException {
return sc.getSecurityInfo();
}
public synchronized void close() throws IOException {
sc.close();
}
public static UnsafeSSLConnection open(final String host, final int port) throws IOException {
if (host == null && port < 0) {
return new UnsafeSSLConnection("127.0.0.1", 443);
} else if (host != null) {
return new UnsafeSSLConnection(host, 443);<|fim▁hole|>}<|fim▁end|> | }
return new UnsafeSSLConnection(host, port);
} |
<|file_name|>player_controller.hpp<|end_file_name|><|fim▁begin|>//
// SuperTuxKart - a fun racing game with go-kart
// Copyright (C) 2004-2005 Steve Baker <[email protected]>
// Copyright (C) 2006 Joerg Henrichs, Steve Baker
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; either version 3
// of the License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#ifndef HEADER_PLAYERKART_HPP
#define HEADER_PLAYERKART_HPP
#include "config/player.hpp"
#include "karts/controller/controller.hpp"
class AbstractKart;
class Player;
class SFXBase;
/** PlayerKart manages control events from the player and moves
* them to the Kart
*
* \ingroup controller
*/
class PlayerController : public Controller
{
private:
int m_steer_val, m_steer_val_l, m_steer_val_r;
int m_prev_accel;
bool m_prev_brake;
bool m_prev_nitro;
float m_penalty_time;
SFXBase *m_bzzt_sound;
SFXBase *m_wee_sound;
SFXBase *m_ugh_sound;
SFXBase *m_grab_sound;
SFXBase *m_full_sound;
void steer(float, int);
public:
PlayerController (AbstractKart *kart,
StateManager::ActivePlayer *_player,
unsigned int player_index);
~PlayerController ();
void update (float);
void action (PlayerAction action, int value);
void handleZipper (bool play_sound);
void collectedItem (const Item &item, int add_info=-1,
float previous_energy=0);
virtual void skidBonusTriggered();
virtual void setPosition (int p);
virtual void finishedRace (float time);
virtual bool isPlayerController() const {return true;}
virtual bool isNetworkController() const { return false; }
virtual void reset ();
void resetInputState ();
virtual void crashed (const AbstractKart *k) {}
virtual void crashed (const Material *m) {}
// ------------------------------------------------------------------------
/** Player will always be able to get a slipstream bonus. */<|fim▁hole|> // ------------------------------------------------------------------------
/** Callback whenever a new lap is triggered. Used by the AI
* to trigger a recomputation of the way to use. */
virtual void newLap(int lap) {}
};
#endif<|fim▁end|> | virtual bool disableSlipstreamBonus() const { return false; } |
<|file_name|>pseudo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import random as rand
class PrimeTester(object):
def solovay_strassen(self, primo, acuracidade=5):
nro_tentativas = 0
if primo == 2 or primo == 3:
return (nro_tentativas, True)
if primo < 2:
raise ValueError('Entrada < 2')
if primo % 2 == 0:
raise ValueError('Entrada % 2 == 0')
for _ in range(acuracidade):
nro_tentativas += 1
a = rand.randint(2, primo - 1)
res = self.adrien_legendre(a, primo)
potencia = self.potencia(a, (primo - 1) // 2, primo)
if res == 0 or potencia != res % primo:
return (nro_tentativas, False)
return (nro_tentativas, True)
def adrien_legendre(self, a, primo):
if a == 0 or a == 1:
return a
if a % 2 == 0:
res = self.adrien_legendre(a // 2, primo)
if ((primo ** 2) - 1) & 8 != 0:
res = -res
else:
res = self.adrien_legendre(primo % a, a)
if (a - 1) * (primo - 1) & 4 != 0:
res = -res
return res
def fatora(self, n):
exp2 = 0
while n % 2 == 0:
n = n // 2
exp2 += 1
return exp2, n
def testacandidato(self, primo_candidato, primo, expoente, resto):<|fim▁hole|> if primo_candidato == 1 or primo_candidato == primo - 1:
return False
for _ in range(expoente):
primo_candidato = self.potencia(primo_candidato, 2, primo)
if primo_candidato == primo - 1:
return False
return True
def miller_rabin(self, primo, acuracidade=5):
nro_tentativas = 0
if primo == 2 or primo == 3:
return (nro_tentativas, True)
if primo < 2:
return (nro_tentativas, False)
if primo % 2 == 0:
return (nro_tentativas, False)
expoente, resto = self.fatora(primo - 1)
for _ in range(acuracidade):
nro_tentativas += 1
possivelmente_primo = rand.randint(2, primo - 2)
if self.testacandidato(possivelmente_primo, primo, expoente, resto):
return (nro_tentativas, False)
return (nro_tentativas, True)
def potencia(self, base, exp, modulo):
res = 1
base = base % modulo
while exp > 0:
if exp % 2 == 1:
res = (res * base) % modulo
exp = exp >> 1
base = (base * base) % modulo
return res
class Twister(object):
def __init__(self, index=0):
self.N = 624
self.M = 397
self.mersenne_twister = [x for x in range(624)]
self.index = 0
self.index_tamanho = 64
self.__magic__ = rand.getrandbits(64)
def alimentar(self, seed):
self.index = 0
self.mersenne_twister[0] = seed
for i in range(1, self.N):
self.mersenne_twister[i] = (
1812433253 * (self.mersenne_twister[i - 1] ^ (self.mersenne_twister[i - 1] >> 30)) + i)
def extrair(self, bits):
if bits in [64, 128, 256, 512, 1024, 2048, 4096]:
self.__magic__ = rand.getrandbits(bits)
else:
raise 'Precisa ser algum desses valores: {64, 128, 256, 512, 1024, 2048, 4096}'
if self.index == 0:
self.gerar()
y = self.mersenne_twister[self.index]
y = self.mascara(y)
self.index = (self.index + 1) % len(self.mersenne_twister)
return y
def mascara(self, y):
y ^= (y >> 11)
y ^= (y << 7) & 0x9d2c5680
y ^= (y << 15) & 0xefc60000
y ^= (y >> 18)
return y
def gerar(self):
for i in range(self.N):
y = (self.mersenne_twister[i] and 0x80000000) + (self.mersenne_twister[(i + 1) % self.N] and 0x7fffffff)
self.mersenne_twister[i] = self.mersenne_twister[(i + 397) % self.N] ^ (y >> 1)
if y % 2 != 0:
# Original : MT[i] := MT[i] xor (2567483615) // 0x9908b0df
# Hacked : MT[i] := MT[i] mod (2567483615) // 0x9908b0df
# https://en.wikipedia.org/wiki/Mersenne_Twister
self.mersenne_twister[i] %= self.__magic__<|fim▁end|> | primo_candidato = self.potencia(primo_candidato, resto, primo)
|
<|file_name|>avx.rs<|end_file_name|><|fim▁begin|>//! Mask reductions implementation for `x86` and `x86_64` targets with `AVX`
/// `x86`/`x86_64` 256-bit `AVX` implementation
/// FIXME: it might be faster here to do two `_mm_movmask_epi8`
#[cfg(target_feature = "avx")]
macro_rules! x86_m8x32_avx_impl {
($id:ident) => {
impl All for $id {
#[inline]
#[target_feature(enable = "avx")]
unsafe fn all(self) -> bool {
#[cfg(target_arch = "x86")]
use crate::arch::x86::_mm256_testc_si256;
#[cfg(target_arch = "x86_64")]
use crate::arch::x86_64::_mm256_testc_si256;
_mm256_testc_si256(
crate::mem::transmute(self),
crate::mem::transmute($id::splat(true)),
) != 0
}
}
impl Any for $id {
#[inline]
#[target_feature(enable = "avx")]
unsafe fn any(self) -> bool {
#[cfg(target_arch = "x86")]
use crate::arch::x86::_mm256_testz_si256;
#[cfg(target_arch = "x86_64")]
use crate::arch::x86_64::_mm256_testz_si256;
_mm256_testz_si256(
crate::mem::transmute(self),
crate::mem::transmute(self),
) == 0
}
}
};
}
/// `x86`/`x86_64` 256-bit m32x8 `AVX` implementation
macro_rules! x86_m32x8_avx_impl {
($id:ident) => {
impl All for $id {
#[inline]
#[target_feature(enable = "sse")]
unsafe fn all(self) -> bool {
#[cfg(target_arch = "x86")]
use crate::arch::x86::_mm256_movemask_ps;
#[cfg(target_arch = "x86_64")]
use crate::arch::x86_64::_mm256_movemask_ps;
// _mm256_movemask_ps(a) creates a 8bit mask containing the
// most significant bit of each lane of `a`. If all bits are
// set, then all 8 lanes of the mask are true.
_mm256_movemask_ps(crate::mem::transmute(self)) == 0b_1111_1111_i32
}
}
impl Any for $id {
#[inline]
#[target_feature(enable = "sse")]
unsafe fn any(self) -> bool {
#[cfg(target_arch = "x86")]
use crate::arch::x86::_mm256_movemask_ps;
#[cfg(target_arch = "x86_64")]
use crate::arch::x86_64::_mm256_movemask_ps;
_mm256_movemask_ps(crate::mem::transmute(self)) != 0
}<|fim▁hole|> }
};
}
/// `x86`/`x86_64` 256-bit m64x4 `AVX` implementation
macro_rules! x86_m64x4_avx_impl {
($id:ident) => {
impl All for $id {
#[inline]
#[target_feature(enable = "sse")]
unsafe fn all(self) -> bool {
#[cfg(target_arch = "x86")]
use crate::arch::x86::_mm256_movemask_pd;
#[cfg(target_arch = "x86_64")]
use crate::arch::x86_64::_mm256_movemask_pd;
// _mm256_movemask_pd(a) creates a 4bit mask containing the
// most significant bit of each lane of `a`. If all bits are
// set, then all 4 lanes of the mask are true.
_mm256_movemask_pd(crate::mem::transmute(self)) == 0b_1111_i32
}
}
impl Any for $id {
#[inline]
#[target_feature(enable = "sse")]
unsafe fn any(self) -> bool {
#[cfg(target_arch = "x86")]
use crate::arch::x86::_mm256_movemask_pd;
#[cfg(target_arch = "x86_64")]
use crate::arch::x86_64::_mm256_movemask_pd;
_mm256_movemask_pd(crate::mem::transmute(self)) != 0
}
}
};
}<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from .v2016_09_01.models import *<|fim▁end|> | # coding=utf-8
# -------------------------------------------------------------------------- |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url, include
from snippets import views
from rest_framework.routers import DefaultRouter
# Create a router and register our viewsets with it.
router = DefaultRouter()
router.register(r'snippets', views.SnippetViewSet)
router.register(r'users', views.UserViewSet)
# The API URLs are now determined automatically by the router.
# Additionally, we include the login URLs for the browsable API.
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))<|fim▁hole|><|fim▁end|> | ] |
<|file_name|>authv.js<|end_file_name|><|fim▁begin|>Vue.component('snackbar', require('./components/material/snackbar.vue'));<|fim▁hole|><|fim▁end|> | Vue.component('show-snackbar', require('./components/material/show-snackbar.vue')); |
<|file_name|>UIRichEdit.cpp<|end_file_name|><|fim▁begin|>#include "stdafx.h"
#include <textserv.h>
#pragma comment(lib, "riched20.lib")
// These constants are for backward compatibility. They are the
// sizes used for initialization and reset in RichEdit 1.0
namespace DirectUICore {
const LONG cInitTextMax = (32 * 1024) - 1;
EXTERN_C const IID IID_ITextServices = { // 8d33f740-cf58-11ce-a89d-00aa006cadc5
0x8d33f740,
0xcf58,
0x11ce,
{0xa8, 0x9d, 0x00, 0xaa, 0x00, 0x6c, 0xad, 0xc5}
};
EXTERN_C const IID IID_ITextHost = { /* c5bdd8d0-d26e-11ce-a89e-00aa006cadc5 */
0xc5bdd8d0,
0xd26e,
0x11ce,
{0xa8, 0x9e, 0x00, 0xaa, 0x00, 0x6c, 0xad, 0xc5}
};
#ifndef LY_PER_INCH
#define LY_PER_INCH 1440
#endif
#ifndef HIMETRIC_PER_INCH
#define HIMETRIC_PER_INCH 2540
#endif
class CTxtWinHost : public ITextHost
{
public:
CTxtWinHost();
BOOL Init(CRichEditUI *re , const CREATESTRUCT *pcs);
virtual ~CTxtWinHost();
ITextServices* GetTextServices(void) { return pserv; }
void SetClientRect(RECT *prc);
RECT* GetClientRect() { return &rcClient; }
BOOL GetWordWrap(void) { return fWordWrap; }
void SetWordWrap(BOOL fWordWrap);
BOOL GetReadOnly();
void SetReadOnly(BOOL fReadOnly);
void SetFont(HFONT hFont);
void SetColor(DWORD dwColor);
SIZEL* GetExtent();
void SetExtent(SIZEL *psizelExtent);
void LimitText(LONG nChars);
BOOL IsCaptured();
BOOL GetAllowBeep();
void SetAllowBeep(BOOL fAllowBeep);
WORD GetDefaultAlign();
void SetDefaultAlign(WORD wNewAlign);
BOOL GetRichTextFlag();
void SetRichTextFlag(BOOL fNew);
LONG GetDefaultLeftIndent();
void SetDefaultLeftIndent(LONG lNewIndent);
BOOL SetSaveSelection(BOOL fSaveSelection);
HRESULT OnTxInPlaceDeactivate();
HRESULT OnTxInPlaceActivate(LPCRECT prcClient);
BOOL GetActiveState(void) { return fInplaceActive; }
BOOL DoSetCursor(RECT *prc, POINT *pt);
void SetTransparent(BOOL fTransparent);
void GetControlRect(LPRECT prc);
LONG SetAccelPos(LONG laccelpos);
WCHAR SetPasswordChar(WCHAR chPasswordChar);
void SetDisabled(BOOL fOn);
LONG SetSelBarWidth(LONG lSelBarWidth);
BOOL GetTimerState();
void SetCharFormat(CHARFORMAT2W &c);
void SetParaFormat(PARAFORMAT2 &p);
// -----------------------------
// IUnknown interface
// -----------------------------
virtual HRESULT _stdcall QueryInterface(REFIID riid, void **ppvObject);
virtual ULONG _stdcall AddRef(void);
virtual ULONG _stdcall Release(void);
// -----------------------------
// ITextHost interface
// -----------------------------
virtual HDC TxGetDC();
virtual INT TxReleaseDC(HDC hdc);
virtual BOOL TxShowScrollBar(INT fnBar, BOOL fShow);
virtual BOOL TxEnableScrollBar (INT fuSBFlags, INT fuArrowflags);
virtual BOOL TxSetScrollRange(INT fnBar, LONG nMinPos, INT nMaxPos, BOOL fRedraw);
virtual BOOL TxSetScrollPos (INT fnBar, INT nPos, BOOL fRedraw);
virtual void TxInvalidateRect(LPCRECT prc, BOOL fMode);
virtual void TxViewChange(BOOL fUpdate);
virtual BOOL TxCreateCaret(HBITMAP hbmp, INT xWidth, INT yHeight);
virtual BOOL TxShowCaret(BOOL fShow);
virtual BOOL TxSetCaretPos(INT x, INT y);
virtual BOOL TxSetTimer(UINT idTimer, UINT uTimeout);
virtual void TxKillTimer(UINT idTimer);
virtual void TxScrollWindowEx (INT dx, INT dy, LPCRECT lprcScroll, LPCRECT lprcClip, HRGN hrgnUpdate, LPRECT lprcUpdate, UINT fuScroll);
virtual void TxSetCapture(BOOL fCapture);
virtual void TxSetFocus();
virtual void TxSetCursor(HCURSOR hcur, BOOL fText);
virtual BOOL TxScreenToClient (LPPOINT lppt);
virtual BOOL TxClientToScreen (LPPOINT lppt);
virtual HRESULT TxActivate( LONG * plOldState );
virtual HRESULT TxDeactivate( LONG lNewState );
virtual HRESULT TxGetClientRect(LPRECT prc);
virtual HRESULT TxGetViewInset(LPRECT prc);
virtual HRESULT TxGetCharFormat(const CHARFORMATW **ppCF );
virtual HRESULT TxGetParaFormat(const PARAFORMAT **ppPF);
virtual COLORREF TxGetSysColor(int nIndex);
virtual HRESULT TxGetBackStyle(TXTBACKSTYLE *pstyle);
virtual HRESULT TxGetMaxLength(DWORD *plength);
virtual HRESULT TxGetScrollBars(DWORD *pdwScrollBar);
virtual HRESULT TxGetPasswordChar(TCHAR *pch);
virtual HRESULT TxGetAcceleratorPos(LONG *pcp);
virtual HRESULT TxGetExtent(LPSIZEL lpExtent);
virtual HRESULT OnTxCharFormatChange (const CHARFORMATW * pcf);
virtual HRESULT OnTxParaFormatChange (const PARAFORMAT * ppf);
virtual HRESULT TxGetPropertyBits(DWORD dwMask, DWORD *pdwBits);
virtual HRESULT TxNotify(DWORD iNotify, void *pv);
virtual HIMC TxImmGetContext(void);
virtual void TxImmReleaseContext(HIMC himc);
virtual HRESULT TxGetSelectionBarWidth (LONG *lSelBarWidth);
private:
CRichEditUI *m_re;
ULONG cRefs; // Reference Count
ITextServices *pserv; // pointer to Text Services object
// Properties
DWORD dwStyle; // style bits
unsigned fEnableAutoWordSel :1; // enable Word style auto word selection?
unsigned fWordWrap :1; // Whether control should word wrap
unsigned fAllowBeep :1; // Whether beep is allowed
unsigned fRich :1; // Whether control is rich text
unsigned fSaveSelection :1; // Whether to save the selection when inactive
unsigned fInplaceActive :1; // Whether control is inplace active
unsigned fTransparent :1; // Whether control is transparent
unsigned fTimer :1; // A timer is set
unsigned fCaptured :1;
LONG lSelBarWidth; // Width of the selection bar
LONG cchTextMost; // maximum text size
DWORD dwEventMask; // DoEvent mask to pass on to parent window
LONG icf;
LONG ipf;
RECT rcClient; // Client Rect for this control
SIZEL sizelExtent; // Extent array
CHARFORMAT2W cf; // Default character format
PARAFORMAT2 pf; // Default paragraph format
LONG laccelpos; // Accelerator position
WCHAR chPasswordChar; // Password character
};
// Convert Pixels on the X axis to Himetric
LONG DXtoHimetricX(LONG dx, LONG xPerInch)
{
return (LONG) MulDiv(dx, HIMETRIC_PER_INCH, xPerInch);
}
// Convert Pixels on the Y axis to Himetric
LONG DYtoHimetricY(LONG dy, LONG yPerInch)
{
return (LONG) MulDiv(dy, HIMETRIC_PER_INCH, yPerInch);
}
HRESULT InitDefaultCharFormat(CRichEditUI* re, CHARFORMAT2W* pcf, HFONT hfont)
{
memset(pcf, 0, sizeof(CHARFORMAT2W));
LOGFONT lf;
if( !hfont )
hfont = re->GetManager()->GetFont(re->GetFont());
::GetObject(hfont, sizeof(LOGFONT), &lf);
DWORD dwColor = re->GetTextColor();
pcf->cbSize = sizeof(CHARFORMAT2W);
pcf->crTextColor = RGB(GetBValue(dwColor), GetGValue(dwColor), GetRValue(dwColor));
LONG yPixPerInch = GetDeviceCaps(re->GetManager()->GetPaintDC(), LOGPIXELSY);
pcf->yHeight = -lf.lfHeight * LY_PER_INCH / yPixPerInch;
pcf->yOffset = 0;
pcf->dwEffects = 0;
pcf->dwMask = CFM_SIZE | CFM_OFFSET | CFM_FACE | CFM_CHARSET | CFM_COLOR | CFM_BOLD | CFM_ITALIC | CFM_UNDERLINE;
if(lf.lfWeight >= FW_BOLD)
pcf->dwEffects |= CFE_BOLD;
if(lf.lfItalic)
pcf->dwEffects |= CFE_ITALIC;
if(lf.lfUnderline)
pcf->dwEffects |= CFE_UNDERLINE;
pcf->bCharSet = lf.lfCharSet;
pcf->bPitchAndFamily = lf.lfPitchAndFamily;
#ifdef _UNICODE
_tcscpy(pcf->szFaceName, lf.lfFaceName);
#else
//need to thunk pcf->szFaceName to a standard char string.in this case it's easy because our thunk is also our copy
MultiByteToWideChar(CP_ACP, 0, lf.lfFaceName, LF_FACESIZE, pcf->szFaceName, LF_FACESIZE) ;
#endif
return S_OK;
}
HRESULT InitDefaultParaFormat(CRichEditUI* re, PARAFORMAT2* ppf)
{
memset(ppf, 0, sizeof(PARAFORMAT2));
ppf->cbSize = sizeof(PARAFORMAT2);
ppf->dwMask = PFM_ALL;
ppf->wAlignment = PFA_LEFT;
ppf->cTabCount = 1;
ppf->rgxTabs[0] = lDefaultTab;
return S_OK;
}
HRESULT CreateHost(CRichEditUI *re, const CREATESTRUCT *pcs, CTxtWinHost **pptec)
{
HRESULT hr = E_FAIL;
//GdiSetBatchLimit(1);
CTxtWinHost *phost = new CTxtWinHost();
if(phost)
{
if (phost->Init(re, pcs))
{
*pptec = phost;
hr = S_OK;
}
}
if (FAILED(hr))
{
delete phost;
}
return TRUE;
}
CTxtWinHost::CTxtWinHost() : m_re(NULL)
{
::ZeroMemory(&cRefs, sizeof(CTxtWinHost) - offsetof(CTxtWinHost, cRefs));
cchTextMost = cInitTextMax;
laccelpos = -1;
}
CTxtWinHost::~CTxtWinHost()
{
pserv->OnTxInPlaceDeactivate();
pserv->Release();
}
////////////////////// Create/Init/Destruct Commands ///////////////////////
BOOL CTxtWinHost::Init(CRichEditUI *re, const CREATESTRUCT *pcs)
{
IUnknown *pUnk;
HRESULT hr;
m_re = re;
// Initialize Reference count
cRefs = 1;
// Create and cache CHARFORMAT for this control
if(FAILED(InitDefaultCharFormat(re, &cf, NULL)))
goto err;
// Create and cache PARAFORMAT for this control
if(FAILED(InitDefaultParaFormat(re, &pf)))
goto err;
// edit controls created without a window are multiline by default
// so that paragraph formats can be
dwStyle = ES_MULTILINE;
// edit controls are rich by default
fRich = re->IsRich();
cchTextMost = re->GetLimitText();
if (pcs )
{
dwStyle = pcs->style;
if ( !(dwStyle & (ES_AUTOHSCROLL | WS_HSCROLL)) )
{
fWordWrap = TRUE;
}
}
if( !(dwStyle & ES_LEFT) )
{
if(dwStyle & ES_CENTER)
pf.wAlignment = PFA_CENTER;
else if(dwStyle & ES_RIGHT)
pf.wAlignment = PFA_RIGHT;
}
fInplaceActive = TRUE;
// Create Text Services component
if(FAILED(CreateTextServices(NULL, this, &pUnk)))
goto err;
hr = pUnk->QueryInterface(IID_ITextServices,(void **)&pserv);
// Whether the previous call succeeded or failed we are done
// with the private interface.
pUnk->Release();
if(FAILED(hr))
{
goto err;
}
// Set window text
if(pcs && pcs->lpszName)
{
#ifdef _UNICODE
if(FAILED(pserv->TxSetText((TCHAR *)pcs->lpszName)))
goto err;
#else
size_t iLen = _tcslen(pcs->lpszName);
LPWSTR lpText = new WCHAR[iLen + 1];
::ZeroMemory(lpText, (iLen + 1) * sizeof(WCHAR));
::MultiByteToWideChar(CP_ACP, 0, pcs->lpszName, -1, (LPWSTR)lpText, iLen) ;
if(FAILED(pserv->TxSetText((LPWSTR)lpText))) {
delete[] lpText;
goto err;
}
delete[] lpText;
#endif
}
return TRUE;
err:
return FALSE;
}
///////////////////////////////// IUnknown ////////////////////////////////
HRESULT CTxtWinHost::QueryInterface(REFIID riid, void **ppvObject)
{
HRESULT hr = E_NOINTERFACE;
*ppvObject = NULL;
if (IsEqualIID(riid, IID_IUnknown)
|| IsEqualIID(riid, IID_ITextHost))
{
AddRef();
*ppvObject = (ITextHost *) this;
hr = S_OK;
}
return hr;
}
ULONG CTxtWinHost::AddRef(void)
{
return ++cRefs;
}
ULONG CTxtWinHost::Release(void)
{
ULONG c_Refs = --cRefs;
if (c_Refs == 0)
{
delete this;
}
return c_Refs;
}
///////////////////////////////// Far East Support //////////////////////////////////////
HIMC CTxtWinHost::TxImmGetContext(void)
{
return NULL;
}
void CTxtWinHost::TxImmReleaseContext(HIMC himc)
{
//::ImmReleaseContext( hwnd, himc );
}
//////////////////////////// ITextHost Interface ////////////////////////////
HDC CTxtWinHost::TxGetDC()
{
return m_re->GetManager()->GetPaintDC();
}
int CTxtWinHost::TxReleaseDC(HDC hdc)
{
return 1;
}
BOOL CTxtWinHost::TxShowScrollBar(INT fnBar, BOOL fShow)
{
CScrollBarUI* pVerticalScrollBar = m_re->GetVerticalScrollBar();
CScrollBarUI* pHorizontalScrollBar = m_re->GetHorizontalScrollBar();
if( fnBar == SB_VERT && pVerticalScrollBar ) {
pVerticalScrollBar->SetVisible(fShow == TRUE);
}
else if( fnBar == SB_HORZ && pHorizontalScrollBar ) {
pHorizontalScrollBar->SetVisible(fShow == TRUE);
}
else if( fnBar == SB_BOTH ) {
if( pVerticalScrollBar ) pVerticalScrollBar->SetVisible(fShow == TRUE);
if( pHorizontalScrollBar ) pHorizontalScrollBar->SetVisible(fShow == TRUE);
}
return TRUE;
}
BOOL CTxtWinHost::TxEnableScrollBar (INT fuSBFlags, INT fuArrowflags)
{
if( fuSBFlags == SB_VERT ) {
m_re->EnableScrollBar(true, m_re->GetHorizontalScrollBar() != NULL);
m_re->GetVerticalScrollBar()->SetVisible(fuArrowflags != ESB_DISABLE_BOTH);
}
else if( fuSBFlags == SB_HORZ ) {
m_re->EnableScrollBar(m_re->GetVerticalScrollBar() != NULL, true);
m_re->GetHorizontalScrollBar()->SetVisible(fuArrowflags != ESB_DISABLE_BOTH);
}
else if( fuSBFlags == SB_BOTH ) {
m_re->EnableScrollBar(true, true);
m_re->GetVerticalScrollBar()->SetVisible(fuArrowflags != ESB_DISABLE_BOTH);
m_re->GetHorizontalScrollBar()->SetVisible(fuArrowflags != ESB_DISABLE_BOTH);
}
return TRUE;
}
BOOL CTxtWinHost::TxSetScrollRange(INT fnBar, LONG nMinPos, INT nMaxPos, BOOL fRedraw)
{
CScrollBarUI* pVerticalScrollBar = m_re->GetVerticalScrollBar();
CScrollBarUI* pHorizontalScrollBar = m_re->GetHorizontalScrollBar();
if( fnBar == SB_VERT && pVerticalScrollBar ) {
if( nMaxPos - nMinPos - rcClient.bottom + rcClient.top <= 0 ) {
pVerticalScrollBar->SetVisible(false);
}
else {
pVerticalScrollBar->SetVisible(true);
pVerticalScrollBar->SetScrollRange(nMaxPos - nMinPos - rcClient.bottom + rcClient.top);
}
}
else if( fnBar == SB_HORZ && pHorizontalScrollBar ) {
if( nMaxPos - nMinPos - rcClient.right + rcClient.left <= 0 ) {
pHorizontalScrollBar->SetVisible(false);
}
else {
pHorizontalScrollBar->SetVisible(true);
pHorizontalScrollBar->SetScrollRange(nMaxPos - nMinPos - rcClient.right + rcClient.left);
}
}
return TRUE;
}
BOOL CTxtWinHost::TxSetScrollPos (INT fnBar, INT nPos, BOOL fRedraw)
{
CScrollBarUI* pVerticalScrollBar = m_re->GetVerticalScrollBar();
CScrollBarUI* pHorizontalScrollBar = m_re->GetHorizontalScrollBar();
if( fnBar == SB_VERT && pVerticalScrollBar ) {
pVerticalScrollBar->SetScrollPos(nPos);
}
else if( fnBar == SB_HORZ && pHorizontalScrollBar ) {
pHorizontalScrollBar->SetScrollPos(nPos);
}
return TRUE;
}
void CTxtWinHost::TxInvalidateRect(LPCRECT prc, BOOL fMode)
{
if( prc == NULL ) {
m_re->GetManager()->Invalidate(rcClient);
return;
}
RECT rc = *prc;
m_re->GetManager()->Invalidate(rc);
}
void CTxtWinHost::TxViewChange(BOOL fUpdate)
{
if( m_re->OnTxViewChanged() ) m_re->Invalidate();
}
BOOL CTxtWinHost::TxCreateCaret(HBITMAP hbmp, INT xWidth, INT yHeight)
{
return ::CreateCaret(m_re->GetManager()->GetPaintWindow(), hbmp, xWidth, yHeight);
}
BOOL CTxtWinHost::TxShowCaret(BOOL fShow)
{
if(fShow)
return ::ShowCaret(m_re->GetManager()->GetPaintWindow());
else
return ::HideCaret(m_re->GetManager()->GetPaintWindow());
}
BOOL CTxtWinHost::TxSetCaretPos(INT x, INT y)
{
return ::SetCaretPos(x, y);
}
BOOL CTxtWinHost::TxSetTimer(UINT idTimer, UINT uTimeout)
{
fTimer = TRUE;
return m_re->GetManager()->SetTimer(m_re, idTimer, uTimeout) == TRUE;
}
void CTxtWinHost::TxKillTimer(UINT idTimer)
{
m_re->GetManager()->KillTimer(m_re, idTimer);
fTimer = FALSE;
}
void CTxtWinHost::TxScrollWindowEx (INT dx, INT dy, LPCRECT lprcScroll, LPCRECT lprcClip, HRGN hrgnUpdate, LPRECT lprcUpdate, UINT fuScroll)
{
return;
}
void CTxtWinHost::TxSetCapture(BOOL fCapture)
{
if (fCapture) m_re->GetManager()->SetCapture();
else m_re->GetManager()->ReleaseCapture();
fCaptured = fCapture;
}
void CTxtWinHost::TxSetFocus()
{
m_re->SetFocus();
}
void CTxtWinHost::TxSetCursor(HCURSOR hcur, BOOL fText)
{
::SetCursor(hcur);
}
BOOL CTxtWinHost::TxScreenToClient(LPPOINT lppt)
{
return ::ScreenToClient(m_re->GetManager()->GetPaintWindow(), lppt);
}
BOOL CTxtWinHost::TxClientToScreen(LPPOINT lppt)
{
return ::ClientToScreen(m_re->GetManager()->GetPaintWindow(), lppt);
}
HRESULT CTxtWinHost::TxActivate(LONG *plOldState)
{
return S_OK;
}
HRESULT CTxtWinHost::TxDeactivate(LONG lNewState)
{
return S_OK;
}
HRESULT CTxtWinHost::TxGetClientRect(LPRECT prc)
{
*prc = rcClient;
GetControlRect(prc);
return NOERROR;
}
HRESULT CTxtWinHost::TxGetViewInset(LPRECT prc)
{
prc->left = prc->right = prc->top = prc->bottom = 0;
return NOERROR;
}
HRESULT CTxtWinHost::TxGetCharFormat(const CHARFORMATW **ppCF)
{
*ppCF = &cf;
return NOERROR;
}
HRESULT CTxtWinHost::TxGetParaFormat(const PARAFORMAT **ppPF)
{
*ppPF = &pf;
return NOERROR;
}
COLORREF CTxtWinHost::TxGetSysColor(int nIndex)
{
return ::GetSysColor(nIndex);
}
HRESULT CTxtWinHost::TxGetBackStyle(TXTBACKSTYLE *pstyle)
{
*pstyle = !fTransparent ? TXTBACK_OPAQUE : TXTBACK_TRANSPARENT;
return NOERROR;
}
HRESULT CTxtWinHost::TxGetMaxLength(DWORD *pLength)
{
*pLength = cchTextMost;
return NOERROR;
}
HRESULT CTxtWinHost::TxGetScrollBars(DWORD *pdwScrollBar)
{
*pdwScrollBar = dwStyle & (WS_VSCROLL | WS_HSCROLL | ES_AUTOVSCROLL |
ES_AUTOHSCROLL | ES_DISABLENOSCROLL);
return NOERROR;
}
HRESULT CTxtWinHost::TxGetPasswordChar(TCHAR *pch)
{
#ifdef _UNICODE
*pch = chPasswordChar;
#else
::WideCharToMultiByte(CP_ACP, 0, &chPasswordChar, 1, pch, 1, NULL, NULL) ;
#endif
return NOERROR;
}
HRESULT CTxtWinHost::TxGetAcceleratorPos(LONG *pcp)
{
*pcp = laccelpos;
return S_OK;
}
HRESULT CTxtWinHost::OnTxCharFormatChange(const CHARFORMATW *pcf)
{
return S_OK;
}
HRESULT CTxtWinHost::OnTxParaFormatChange(const PARAFORMAT *ppf)
{
return S_OK;
}
HRESULT CTxtWinHost::TxGetPropertyBits(DWORD dwMask, DWORD *pdwBits)
{
DWORD dwProperties = 0;
if (fRich)
{
dwProperties = TXTBIT_RICHTEXT;
}
if (dwStyle & ES_MULTILINE)
{
dwProperties |= TXTBIT_MULTILINE;
}
if (dwStyle & ES_READONLY)
{
dwProperties |= TXTBIT_READONLY;
}
if (dwStyle & ES_PASSWORD)
{
dwProperties |= TXTBIT_USEPASSWORD;
}
if (!(dwStyle & ES_NOHIDESEL))
{
dwProperties |= TXTBIT_HIDESELECTION;
}
if (fEnableAutoWordSel)
{
dwProperties |= TXTBIT_AUTOWORDSEL;
}
if (fWordWrap)
{
dwProperties |= TXTBIT_WORDWRAP;
}
if (fAllowBeep)
{
dwProperties |= TXTBIT_ALLOWBEEP;
}
if (fSaveSelection)
{
dwProperties |= TXTBIT_SAVESELECTION;
}
*pdwBits = dwProperties & dwMask;
return NOERROR;
}
HRESULT CTxtWinHost::TxNotify(DWORD iNotify, void *pv)
{
if( iNotify == EN_REQUESTRESIZE ) {
RECT rc;
REQRESIZE *preqsz = (REQRESIZE *)pv;
GetControlRect(&rc);
rc.bottom = rc.top + preqsz->rc.bottom;
rc.right = rc.left + preqsz->rc.right;
SetClientRect(&rc);
return S_OK;
}
m_re->OnTxNotify(iNotify, pv);
return S_OK;
}
HRESULT CTxtWinHost::TxGetExtent(LPSIZEL lpExtent)
{
*lpExtent = sizelExtent;
return S_OK;
}
HRESULT CTxtWinHost::TxGetSelectionBarWidth (LONG *plSelBarWidth)
{
*plSelBarWidth = lSelBarWidth;
return S_OK;
}
void CTxtWinHost::SetWordWrap(BOOL fWordWrap)
{
fWordWrap = fWordWrap;
pserv->OnTxPropertyBitsChange(TXTBIT_WORDWRAP, fWordWrap ? TXTBIT_WORDWRAP : 0);
}
BOOL CTxtWinHost::GetReadOnly()
{
return (dwStyle & ES_READONLY) != 0;
}
void CTxtWinHost::SetReadOnly(BOOL fReadOnly)
{
if (fReadOnly)
{
dwStyle |= ES_READONLY;
}
else<|fim▁hole|> dwStyle &= ~ES_READONLY;
}
pserv->OnTxPropertyBitsChange(TXTBIT_READONLY,
fReadOnly ? TXTBIT_READONLY : 0);
}
void CTxtWinHost::SetFont(HFONT hFont)
{
if( hFont == NULL ) return;
LOGFONT lf;
::GetObject(hFont, sizeof(LOGFONT), &lf);
LONG yPixPerInch = ::GetDeviceCaps(m_re->GetManager()->GetPaintDC(), LOGPIXELSY);
cf.yHeight = -lf.lfHeight * LY_PER_INCH / yPixPerInch;
if(lf.lfWeight >= FW_BOLD)
cf.dwEffects |= CFE_BOLD;
if(lf.lfItalic)
cf.dwEffects |= CFE_ITALIC;
if(lf.lfUnderline)
cf.dwEffects |= CFE_UNDERLINE;
cf.bCharSet = lf.lfCharSet;
cf.bPitchAndFamily = lf.lfPitchAndFamily;
#ifdef _UNICODE
_tcscpy(cf.szFaceName, lf.lfFaceName);
#else
//need to thunk pcf->szFaceName to a standard char string.in this case it's easy because our thunk is also our copy
MultiByteToWideChar(CP_ACP, 0, lf.lfFaceName, LF_FACESIZE, cf.szFaceName, LF_FACESIZE) ;
#endif
pserv->OnTxPropertyBitsChange(TXTBIT_CHARFORMATCHANGE,
TXTBIT_CHARFORMATCHANGE);
}
void CTxtWinHost::SetColor(DWORD dwColor)
{
cf.crTextColor = RGB(GetBValue(dwColor), GetGValue(dwColor), GetRValue(dwColor));
pserv->OnTxPropertyBitsChange(TXTBIT_CHARFORMATCHANGE,
TXTBIT_CHARFORMATCHANGE);
}
SIZEL* CTxtWinHost::GetExtent()
{
return &sizelExtent;
}
void CTxtWinHost::SetExtent(SIZEL *psizelExtent)
{
sizelExtent = *psizelExtent;
pserv->OnTxPropertyBitsChange(TXTBIT_EXTENTCHANGE, TXTBIT_EXTENTCHANGE);
}
void CTxtWinHost::LimitText(LONG nChars)
{
cchTextMost = nChars;
if( cchTextMost <= 0 ) cchTextMost = cInitTextMax;
pserv->OnTxPropertyBitsChange(TXTBIT_MAXLENGTHCHANGE, TXTBIT_MAXLENGTHCHANGE);
}
BOOL CTxtWinHost::IsCaptured()
{
return fCaptured;
}
BOOL CTxtWinHost::GetAllowBeep()
{
return fAllowBeep;
}
void CTxtWinHost::SetAllowBeep(BOOL fAllowBeep)
{
fAllowBeep = fAllowBeep;
pserv->OnTxPropertyBitsChange(TXTBIT_ALLOWBEEP,
fAllowBeep ? TXTBIT_ALLOWBEEP : 0);
}
WORD CTxtWinHost::GetDefaultAlign()
{
return pf.wAlignment;
}
void CTxtWinHost::SetDefaultAlign(WORD wNewAlign)
{
pf.wAlignment = wNewAlign;
// Notify control of property change
pserv->OnTxPropertyBitsChange(TXTBIT_PARAFORMATCHANGE, 0);
}
BOOL CTxtWinHost::GetRichTextFlag()
{
return fRich;
}
void CTxtWinHost::SetRichTextFlag(BOOL fNew)
{
fRich = fNew;
pserv->OnTxPropertyBitsChange(TXTBIT_RICHTEXT,
fNew ? TXTBIT_RICHTEXT : 0);
}
LONG CTxtWinHost::GetDefaultLeftIndent()
{
return pf.dxOffset;
}
void CTxtWinHost::SetDefaultLeftIndent(LONG lNewIndent)
{
pf.dxOffset = lNewIndent;
pserv->OnTxPropertyBitsChange(TXTBIT_PARAFORMATCHANGE, 0);
}
void CTxtWinHost::SetClientRect(RECT *prc)
{
rcClient = *prc;
LONG xPerInch = ::GetDeviceCaps(m_re->GetManager()->GetPaintDC(), LOGPIXELSX);
LONG yPerInch = ::GetDeviceCaps(m_re->GetManager()->GetPaintDC(), LOGPIXELSY);
sizelExtent.cx = DXtoHimetricX(rcClient.right - rcClient.left, xPerInch);
sizelExtent.cy = DYtoHimetricY(rcClient.bottom - rcClient.top, yPerInch);
pserv->OnTxPropertyBitsChange(TXTBIT_VIEWINSETCHANGE, TXTBIT_VIEWINSETCHANGE);
}
BOOL CTxtWinHost::SetSaveSelection(BOOL f_SaveSelection)
{
BOOL fResult = f_SaveSelection;
fSaveSelection = f_SaveSelection;
// notify text services of property change
pserv->OnTxPropertyBitsChange(TXTBIT_SAVESELECTION,
fSaveSelection ? TXTBIT_SAVESELECTION : 0);
return fResult;
}
HRESULT CTxtWinHost::OnTxInPlaceDeactivate()
{
HRESULT hr = pserv->OnTxInPlaceDeactivate();
if (SUCCEEDED(hr))
{
fInplaceActive = FALSE;
}
return hr;
}
HRESULT CTxtWinHost::OnTxInPlaceActivate(LPCRECT prcClient)
{
fInplaceActive = TRUE;
HRESULT hr = pserv->OnTxInPlaceActivate(prcClient);
if (FAILED(hr))
{
fInplaceActive = FALSE;
}
return hr;
}
BOOL CTxtWinHost::DoSetCursor(RECT *prc, POINT *pt)
{
RECT rc = prc ? *prc : rcClient;
// Is this in our rectangle?
if (PtInRect(&rc, *pt))
{
RECT *prcClient = (!fInplaceActive || prc) ? &rc : NULL;
pserv->OnTxSetCursor(DVASPECT_CONTENT, -1, NULL, NULL, m_re->GetManager()->GetPaintDC(),
NULL, prcClient, pt->x, pt->y);
return TRUE;
}
return FALSE;
}
void CTxtWinHost::GetControlRect(LPRECT prc)
{
prc->top = rcClient.top;
prc->bottom = rcClient.bottom;
prc->left = rcClient.left;
prc->right = rcClient.right;
}
void CTxtWinHost::SetTransparent(BOOL f_Transparent)
{
fTransparent = f_Transparent;
// notify text services of property change
pserv->OnTxPropertyBitsChange(TXTBIT_BACKSTYLECHANGE, 0);
}
LONG CTxtWinHost::SetAccelPos(LONG l_accelpos)
{
LONG laccelposOld = l_accelpos;
laccelpos = l_accelpos;
// notify text services of property change
pserv->OnTxPropertyBitsChange(TXTBIT_SHOWACCELERATOR, 0);
return laccelposOld;
}
WCHAR CTxtWinHost::SetPasswordChar(WCHAR ch_PasswordChar)
{
WCHAR chOldPasswordChar = chPasswordChar;
chPasswordChar = ch_PasswordChar;
// notify text services of property change
pserv->OnTxPropertyBitsChange(TXTBIT_USEPASSWORD,
(chPasswordChar != 0) ? TXTBIT_USEPASSWORD : 0);
return chOldPasswordChar;
}
void CTxtWinHost::SetDisabled(BOOL fOn)
{
cf.dwMask |= CFM_COLOR | CFM_DISABLED;
cf.dwEffects |= CFE_AUTOCOLOR | CFE_DISABLED;
if( !fOn )
{
cf.dwEffects &= ~CFE_DISABLED;
}
pserv->OnTxPropertyBitsChange(TXTBIT_CHARFORMATCHANGE,
TXTBIT_CHARFORMATCHANGE);
}
LONG CTxtWinHost::SetSelBarWidth(LONG l_SelBarWidth)
{
LONG lOldSelBarWidth = lSelBarWidth;
lSelBarWidth = l_SelBarWidth;
if (lSelBarWidth)
{
dwStyle |= ES_SELECTIONBAR;
}
else
{
dwStyle &= (~ES_SELECTIONBAR);
}
pserv->OnTxPropertyBitsChange(TXTBIT_SELBARCHANGE, TXTBIT_SELBARCHANGE);
return lOldSelBarWidth;
}
BOOL CTxtWinHost::GetTimerState()
{
return fTimer;
}
void CTxtWinHost::SetCharFormat(CHARFORMAT2W &c)
{
cf = c;
}
void CTxtWinHost::SetParaFormat(PARAFORMAT2 &p)
{
pf = p;
}
/////////////////////////////////////////////////////////////////////////////////////
//
//
CRichEditUI::CRichEditUI() : m_pTwh(NULL), m_bVScrollBarFixing(false), m_bWantTab(true), m_bWantReturn(true),
m_bWantCtrlReturn(true), m_bRich(true), m_bReadOnly(false), m_bWordWrap(false), m_dwTextColor(0), m_iFont(-1),
m_iLimitText(cInitTextMax), m_lTwhStyle(ES_MULTILINE)
{
}
CRichEditUI::~CRichEditUI()
{
if( m_pTwh ) {
m_pTwh->Release();
m_pManager->RemoveMessageFilter(this);
}
}
LPCTSTR CRichEditUI::GetClass() const
{
return _T("RichEditUI");
}
LPVOID CRichEditUI::GetInterface(LPCTSTR pstrName)
{
if( _tcscmp(pstrName, _T("RichEdit")) == 0 ) return static_cast<CRichEditUI*>(this);
return CContainerUI::GetInterface(pstrName);
}
UINT CRichEditUI::GetControlFlags() const
{
if( !IsEnabled() ) return CControlUI::GetControlFlags();
return UIFLAG_SETCURSOR | UIFLAG_TABSTOP;
}
bool CRichEditUI::IsWantTab()
{
return m_bWantTab;
}
void CRichEditUI::SetWantTab(bool bWantTab)
{
m_bWantTab = bWantTab;
}
bool CRichEditUI::IsWantReturn()
{
return m_bWantReturn;
}
void CRichEditUI::SetWantReturn(bool bWantReturn)
{
m_bWantReturn = bWantReturn;
}
bool CRichEditUI::IsWantCtrlReturn()
{
return m_bWantCtrlReturn;
}
void CRichEditUI::SetWantCtrlReturn(bool bWantCtrlReturn)
{
m_bWantCtrlReturn = bWantCtrlReturn;
}
bool CRichEditUI::IsRich()
{
return m_bRich;
}
void CRichEditUI::SetRich(bool bRich)
{
m_bRich = bRich;
if( m_pTwh ) m_pTwh->SetRichTextFlag(bRich);
}
bool CRichEditUI::IsReadOnly()
{
return m_bReadOnly;
}
void CRichEditUI::SetReadOnly(bool bReadOnly)
{
m_bReadOnly = bReadOnly;
if( m_pTwh ) m_pTwh->SetReadOnly(bReadOnly);
}
bool CRichEditUI::GetWordWrap()
{
return m_bWordWrap;
}
void CRichEditUI::SetWordWrap(bool bWordWrap)
{
m_bWordWrap = bWordWrap;
if( m_pTwh ) m_pTwh->SetWordWrap(bWordWrap);
}
int CRichEditUI::GetFont()
{
return m_iFont;
}
void CRichEditUI::SetFont(int index)
{
m_iFont = index;
if( m_pTwh ) {
m_pTwh->SetFont(GetManager()->GetFont(m_iFont));
}
}
void CRichEditUI::SetFont(LPCTSTR pStrFontName, int nSize, bool bBold, bool bUnderline, bool bItalic)
{
if( m_pTwh ) {
LOGFONT lf = { 0 };
::GetObject(::GetStockObject(DEFAULT_GUI_FONT), sizeof(LOGFONT), &lf);
_tcscpy(lf.lfFaceName, pStrFontName);
lf.lfCharSet = DEFAULT_CHARSET;
lf.lfHeight = -nSize;
if( bBold ) lf.lfWeight += FW_BOLD;
if( bUnderline ) lf.lfUnderline = TRUE;
if( bItalic ) lf.lfItalic = TRUE;
HFONT hFont = ::CreateFontIndirect(&lf);
if( hFont == NULL ) return;
m_pTwh->SetFont(hFont);
::DeleteObject(hFont);
}
}
LONG CRichEditUI::GetWinStyle()
{
return m_lTwhStyle;
}
void CRichEditUI::SetWinStyle(LONG lStyle)
{
m_lTwhStyle = lStyle;
}
DWORD CRichEditUI::GetTextColor()
{
return m_dwTextColor;
}
void CRichEditUI::SetTextColor(DWORD dwTextColor)
{
m_dwTextColor = dwTextColor;
if( m_pTwh ) {
m_pTwh->SetColor(dwTextColor);
}
}
int CRichEditUI::GetLimitText()
{
return m_iLimitText;
}
void CRichEditUI::SetLimitText(int iChars)
{
m_iLimitText = iChars;
if( m_pTwh ) {
m_pTwh->LimitText(m_iLimitText);
}
}
long CRichEditUI::GetTextLength(DWORD dwFlags) const
{
GETTEXTLENGTHEX textLenEx;
textLenEx.flags = dwFlags;
#ifdef _UNICODE
textLenEx.codepage = 1200;
#else
textLenEx.codepage = CP_ACP;
#endif
LRESULT lResult;
TxSendMessage(EM_GETTEXTLENGTHEX, (WPARAM)&textLenEx, 0, &lResult);
return (long)lResult;
}
CStdString CRichEditUI::GetText() const
{
long lLen = GetTextLength(GTL_DEFAULT);
LPTSTR lpText = NULL;
GETTEXTEX gt;
gt.flags = GT_DEFAULT;
#ifdef _UNICODE
gt.cb = sizeof(TCHAR) * (lLen + 1) ;
gt.codepage = 1200;
lpText = new TCHAR[lLen + 1];
::ZeroMemory(lpText, (lLen + 1) * sizeof(TCHAR));
#else
gt.cb = sizeof(TCHAR) * lLen * 2 + 1;
gt.codepage = CP_ACP;
lpText = new TCHAR[lLen * 2 + 1];
::ZeroMemory(lpText, (lLen * 2 + 1) * sizeof(TCHAR));
#endif
gt.lpDefaultChar = NULL;
gt.lpUsedDefChar = NULL;
TxSendMessage(EM_GETTEXTEX, (WPARAM)>, (LPARAM)lpText, 0);
CStdString sText(lpText);
delete[] lpText;
return sText;
}
void CRichEditUI::SetText(LPCTSTR pstrText)
{
m_sText = pstrText;
if( !m_pTwh ) return;
SetSel(0, -1);
ReplaceSel(pstrText, FALSE);
}
bool CRichEditUI::GetModify() const
{
if( !m_pTwh ) return false;
LRESULT lResult;
TxSendMessage(EM_GETMODIFY, 0, 0, &lResult);
return (BOOL)lResult == TRUE;
}
void CRichEditUI::SetModify(bool bModified) const
{
TxSendMessage(EM_SETMODIFY, bModified, 0, 0);
}
void CRichEditUI::GetSel(CHARRANGE &cr) const
{
TxSendMessage(EM_EXGETSEL, 0, (LPARAM)&cr, 0);
}
void CRichEditUI::GetSel(long& nStartChar, long& nEndChar) const
{
CHARRANGE cr;
TxSendMessage(EM_EXGETSEL, 0, (LPARAM)&cr, 0);
nStartChar = cr.cpMin;
nEndChar = cr.cpMax;
}
int CRichEditUI::SetSel(CHARRANGE &cr)
{
LRESULT lResult;
TxSendMessage(EM_EXSETSEL, 0, (LPARAM)&cr, &lResult);
return (int)lResult;
}
int CRichEditUI::SetSel(long nStartChar, long nEndChar)
{
CHARRANGE cr;
cr.cpMin = nStartChar;
cr.cpMax = nEndChar;
LRESULT lResult;
TxSendMessage(EM_EXSETSEL, 0, (LPARAM)&cr, &lResult);
return (int)lResult;
}
void CRichEditUI::ReplaceSel(LPCTSTR lpszNewText, bool bCanUndo)
{
#ifdef _UNICODE
TxSendMessage(EM_REPLACESEL, (WPARAM) bCanUndo, (LPARAM)lpszNewText, 0);
#else
int iLen = _tcslen(lpszNewText);
LPWSTR lpText = new WCHAR[iLen + 1];
::ZeroMemory(lpText, (iLen + 1) * sizeof(WCHAR));
::MultiByteToWideChar(CP_ACP, 0, lpszNewText, -1, (LPWSTR)lpText, iLen) ;
TxSendMessage(EM_REPLACESEL, (WPARAM) bCanUndo, (LPARAM)lpText, 0);
delete[] lpText;
#endif
}
void CRichEditUI::ReplaceSelW(LPCWSTR lpszNewText, bool bCanUndo)
{
TxSendMessage(EM_REPLACESEL, (WPARAM) bCanUndo, (LPARAM)lpszNewText, 0);
}
CStdString CRichEditUI::GetSelText() const
{
if( !m_pTwh ) return CStdString();
CHARRANGE cr;
cr.cpMin = cr.cpMax = 0;
TxSendMessage(EM_EXGETSEL, 0, (LPARAM)&cr, 0);
LPWSTR lpText = NULL;
lpText = new WCHAR[cr.cpMax - cr.cpMin + 1];
::ZeroMemory(lpText, (cr.cpMax - cr.cpMin + 1) * sizeof(WCHAR));
TxSendMessage(EM_GETSELTEXT, 0, (LPARAM)lpText, 0);
CStdString sText;
sText = (LPCWSTR)lpText;
delete[] lpText;
return sText;
}
int CRichEditUI::SetSelAll()
{
return SetSel(0, -1);
}
int CRichEditUI::SetSelNone()
{
return SetSel(-1, 0);
}
bool CRichEditUI::GetZoom(int& nNum, int& nDen) const
{
LRESULT lResult;
TxSendMessage(EM_GETZOOM, (WPARAM)&nNum, (LPARAM)&nDen, &lResult);
return (BOOL)lResult == TRUE;
}
bool CRichEditUI::SetZoom(int nNum, int nDen)
{
if (nNum < 0 || nNum > 64) return false;
if (nDen < 0 || nDen > 64) return false;
LRESULT lResult;
TxSendMessage(EM_SETZOOM, nNum, nDen, &lResult);
return (BOOL)lResult == TRUE;
}
bool CRichEditUI::SetZoomOff()
{
LRESULT lResult;
TxSendMessage(EM_SETZOOM, 0, 0, &lResult);
return (BOOL)lResult == TRUE;
}
WORD CRichEditUI::GetSelectionType() const
{
LRESULT lResult;
TxSendMessage(EM_SELECTIONTYPE, 0, 0, &lResult);
return (WORD)lResult;
}
bool CRichEditUI::GetAutoURLDetect() const
{
LRESULT lResult;
TxSendMessage(EM_GETAUTOURLDETECT, 0, 0, &lResult);
return (BOOL)lResult == TRUE;
}
bool CRichEditUI::SetAutoURLDetect(bool bAutoDetect)
{
LRESULT lResult;
TxSendMessage(EM_AUTOURLDETECT, bAutoDetect, 0, &lResult);
return (BOOL)lResult == FALSE;
}
DWORD CRichEditUI::GetEventMask() const
{
LRESULT lResult;
TxSendMessage(EM_GETEVENTMASK, 0, 0, &lResult);
return (DWORD)lResult;
}
DWORD CRichEditUI::SetEventMask(DWORD dwEventMask)
{
LRESULT lResult;
TxSendMessage(EM_SETEVENTMASK, 0, dwEventMask, &lResult);
return (DWORD)lResult;
}
CStdString CRichEditUI::GetTextRange(long nStartChar, long nEndChar) const
{
TEXTRANGEW tr = { 0 };
tr.chrg.cpMin = nStartChar;
tr.chrg.cpMax = nEndChar;
LPWSTR lpText = NULL;
lpText = new WCHAR[nEndChar - nStartChar + 1];
::ZeroMemory(lpText, (nEndChar - nStartChar + 1) * sizeof(WCHAR));
tr.lpstrText = lpText;
TxSendMessage(EM_GETTEXTRANGE, 0, (LPARAM)&tr, 0);
CStdString sText;
sText = (LPCWSTR)lpText;
delete[] lpText;
return sText;
}
void CRichEditUI::HideSelection(bool bHide, bool bChangeStyle)
{
TxSendMessage(EM_HIDESELECTION, bHide, bChangeStyle, 0);
}
void CRichEditUI::ScrollCaret()
{
TxSendMessage(EM_SCROLLCARET, 0, 0, 0);
}
int CRichEditUI::InsertText(long nInsertAfterChar, LPCTSTR lpstrText, bool bCanUndo)
{
int nRet = SetSel(nInsertAfterChar, nInsertAfterChar);
ReplaceSel(lpstrText, bCanUndo);
return nRet;
}
int CRichEditUI::AppendText(LPCTSTR lpstrText, bool bCanUndo)
{
int nRet = SetSel(-1, -1);
ReplaceSel(lpstrText, bCanUndo);
return nRet;
}
DWORD CRichEditUI::GetDefaultCharFormat(CHARFORMAT2 &cf) const
{
cf.cbSize = sizeof(CHARFORMAT2);
LRESULT lResult;
TxSendMessage(EM_GETCHARFORMAT, 0, (LPARAM)&cf, &lResult);
return (DWORD)lResult;
}
bool CRichEditUI::SetDefaultCharFormat(CHARFORMAT2 &cf)
{
if( !m_pTwh ) return false;
cf.cbSize = sizeof(CHARFORMAT2);
LRESULT lResult;
TxSendMessage(EM_SETCHARFORMAT, 0, (LPARAM)&cf, &lResult);
if( (BOOL)lResult == TRUE ) {
CHARFORMAT2W cfw;
cfw.cbSize = sizeof(CHARFORMAT2W);
TxSendMessage(EM_GETCHARFORMAT, 1, (LPARAM)&cfw, 0);
m_pTwh->SetCharFormat(cfw);
return true;
}
return false;
}
DWORD CRichEditUI::GetSelectionCharFormat(CHARFORMAT2 &cf) const
{
cf.cbSize = sizeof(CHARFORMAT2);
LRESULT lResult;
TxSendMessage(EM_GETCHARFORMAT, 1, (LPARAM)&cf, &lResult);
return (DWORD)lResult;
}
bool CRichEditUI::SetSelectionCharFormat(CHARFORMAT2 &cf)
{
if( !m_pTwh ) return false;
cf.cbSize = sizeof(CHARFORMAT2);
LRESULT lResult;
TxSendMessage(EM_SETCHARFORMAT, SCF_SELECTION, (LPARAM)&cf, &lResult);
return (BOOL)lResult == TRUE;
}
bool CRichEditUI::SetWordCharFormat(CHARFORMAT2 &cf)
{
if( !m_pTwh ) return false;
cf.cbSize = sizeof(CHARFORMAT2);
LRESULT lResult;
TxSendMessage(EM_SETCHARFORMAT, SCF_SELECTION|SCF_WORD, (LPARAM)&cf, &lResult);
return (BOOL)lResult == TRUE;
}
DWORD CRichEditUI::GetParaFormat(PARAFORMAT2 &pf) const
{
pf.cbSize = sizeof(PARAFORMAT2);
LRESULT lResult;
TxSendMessage(EM_GETPARAFORMAT, 0, (LPARAM)&pf, &lResult);
return (DWORD)lResult;
}
bool CRichEditUI::SetParaFormat(PARAFORMAT2 &pf)
{
if( !m_pTwh ) return false;
pf.cbSize = sizeof(PARAFORMAT2);
LRESULT lResult;
TxSendMessage(EM_SETPARAFORMAT, 0, (LPARAM)&pf, &lResult);
if( (BOOL)lResult == TRUE ) {
m_pTwh->SetParaFormat(pf);
return true;
}
return false;
}
bool CRichEditUI::Redo()
{
if( !m_pTwh ) return false;
LRESULT lResult;
TxSendMessage(EM_REDO, 0, 0, &lResult);
return (BOOL)lResult == TRUE;
}
bool CRichEditUI::Undo()
{
if( !m_pTwh ) return false;
LRESULT lResult;
TxSendMessage(EM_UNDO, 0, 0, &lResult);
return (BOOL)lResult == TRUE;
}
void CRichEditUI::Clear()
{
TxSendMessage(WM_CLEAR, 0, 0, 0);
}
void CRichEditUI::Copy()
{
TxSendMessage(WM_COPY, 0, 0, 0);
}
void CRichEditUI::Cut()
{
TxSendMessage(WM_CUT, 0, 0, 0);
}
void CRichEditUI::Paste()
{
TxSendMessage(WM_PASTE, 0, 0, 0);
}
int CRichEditUI::GetLineCount() const
{
if( !m_pTwh ) return 0;
LRESULT lResult;
TxSendMessage(EM_GETLINECOUNT, 0, 0, &lResult);
return (int)lResult;
}
CStdString CRichEditUI::GetLine(int nIndex, int nMaxLength) const
{
LPWSTR lpText = NULL;
lpText = new WCHAR[nMaxLength + 1];
::ZeroMemory(lpText, (nMaxLength + 1) * sizeof(WCHAR));
*(LPWORD)lpText = (WORD)nMaxLength;
TxSendMessage(EM_GETLINE, nIndex, (LPARAM)lpText, 0);
CStdString sText;
sText = (LPCWSTR)lpText;
delete[] lpText;
return sText;
}
int CRichEditUI::LineIndex(int nLine) const
{
LRESULT lResult;
TxSendMessage(EM_LINEINDEX, nLine, 0, &lResult);
return (int)lResult;
}
int CRichEditUI::LineLength(int nLine) const
{
LRESULT lResult;
TxSendMessage(EM_LINELENGTH, nLine, 0, &lResult);
return (int)lResult;
}
bool CRichEditUI::LineScroll(int nLines, int nChars)
{
LRESULT lResult;
TxSendMessage(EM_LINESCROLL, nChars, nLines, &lResult);
return (BOOL)lResult == TRUE;
}
CPoint CRichEditUI::GetCharPos(long lChar) const
{
CPoint pt;
TxSendMessage(EM_POSFROMCHAR, (WPARAM)&pt, (LPARAM)lChar, 0);
return pt;
}
long CRichEditUI::LineFromChar(long nIndex) const
{
if( !m_pTwh ) return 0L;
LRESULT lResult;
TxSendMessage(EM_EXLINEFROMCHAR, 0, nIndex, &lResult);
return (long)lResult;
}
CPoint CRichEditUI::PosFromChar(UINT nChar) const
{
POINTL pt;
TxSendMessage(EM_POSFROMCHAR, (WPARAM)&pt, nChar, 0);
return CPoint(pt.x, pt.y);
}
int CRichEditUI::CharFromPos(CPoint pt) const
{
POINTL ptl = {pt.x, pt.y};
if( !m_pTwh ) return 0;
LRESULT lResult;
TxSendMessage(EM_CHARFROMPOS, 0, (LPARAM)&ptl, &lResult);
return (int)lResult;
}
void CRichEditUI::EmptyUndoBuffer()
{
TxSendMessage(EM_EMPTYUNDOBUFFER, 0, 0, 0);
}
UINT CRichEditUI::SetUndoLimit(UINT nLimit)
{
if( !m_pTwh ) return 0;
LRESULT lResult;
TxSendMessage(EM_SETUNDOLIMIT, (WPARAM) nLimit, 0, &lResult);
return (UINT)lResult;
}
long CRichEditUI::StreamIn(int nFormat, EDITSTREAM &es)
{
if( !m_pTwh ) return 0L;
LRESULT lResult;
TxSendMessage(EM_STREAMIN, nFormat, (LPARAM)&es, &lResult);
return (long)lResult;
}
long CRichEditUI::StreamOut(int nFormat, EDITSTREAM &es)
{
if( !m_pTwh ) return 0L;
LRESULT lResult;
TxSendMessage(EM_STREAMOUT, nFormat, (LPARAM)&es, &lResult);
return (long)lResult;
}
void CRichEditUI::DoInit()
{
CREATESTRUCT cs;
cs.style = m_lTwhStyle;
cs.x = 0;
cs.y = 0;
cs.cy = 0;
cs.cx = 0;
cs.lpszName = m_sText.GetData();
CreateHost(this, &cs, &m_pTwh);
if( m_pTwh ) {
m_pTwh->SetTransparent(TRUE);
LRESULT lResult;
m_pTwh->GetTextServices()->TxSendMessage(EM_SETLANGOPTIONS, 0, 0, &lResult);
m_pTwh->OnTxInPlaceActivate(NULL);
m_pManager->AddMessageFilter(this);
}
}
HRESULT CRichEditUI::TxSendMessage(UINT msg, WPARAM wparam, LPARAM lparam, LRESULT *plresult) const
{
if( m_pTwh ) {
if( msg == WM_KEYDOWN && TCHAR(wparam) == VK_RETURN ) {
if( !m_bWantReturn || (::GetKeyState(VK_CONTROL) < 0 && !m_bWantCtrlReturn) ) {
if( m_pManager != NULL ) m_pManager->SendNotify((CControlUI*)this, _T("return"));
return S_OK;
}
}
return m_pTwh->GetTextServices()->TxSendMessage(msg, wparam, lparam, plresult);
}
return S_FALSE;
}
IDropTarget* CRichEditUI::GetTxDropTarget()
{
IDropTarget *pdt = NULL;
if( m_pTwh->GetTextServices()->TxGetDropTarget(&pdt) == NOERROR ) return pdt;
return NULL;
}
bool CRichEditUI::OnTxViewChanged()
{
return true;
}
void CRichEditUI::OnTxNotify(DWORD iNotify, void *pv)
{
}
// ¶àÐзÇrich¸ñʽµÄricheditÓÐÒ»¸ö¹ö¶¯Ìõbug£¬ÔÚ×îºóÒ»ÐÐÊÇ¿ÕÐÐʱ£¬LineDownºÍSetScrollPosÎÞ·¨¹ö¶¯µ½×îºó
// ÒýÈëiPos¾ÍÊÇΪÁËÐÞÕýÕâ¸öbug
void CRichEditUI::SetScrollPos(SIZE szPos)
{
int cx = 0;
int cy = 0;
if( m_pVerticalScrollBar && m_pVerticalScrollBar->IsVisible() ) {
int iLastScrollPos = m_pVerticalScrollBar->GetScrollPos();
m_pVerticalScrollBar->SetScrollPos(szPos.cy);
cy = m_pVerticalScrollBar->GetScrollPos() - iLastScrollPos;
}
if( m_pHorizontalScrollBar && m_pHorizontalScrollBar->IsVisible() ) {
int iLastScrollPos = m_pHorizontalScrollBar->GetScrollPos();
m_pHorizontalScrollBar->SetScrollPos(szPos.cx);
cx = m_pHorizontalScrollBar->GetScrollPos() - iLastScrollPos;
}
if( cy != 0 ) {
int iPos = 0;
if( m_pTwh && !m_bRich && m_pVerticalScrollBar && m_pVerticalScrollBar->IsVisible() )
iPos = m_pVerticalScrollBar->GetScrollPos();
WPARAM wParam = MAKEWPARAM(SB_THUMBPOSITION, m_pVerticalScrollBar->GetScrollPos());
TxSendMessage(WM_VSCROLL, wParam, 0L, 0);
if( m_pTwh && !m_bRich && m_pVerticalScrollBar && m_pVerticalScrollBar->IsVisible() ) {
if( cy > 0 && m_pVerticalScrollBar->GetScrollPos() <= iPos )
m_pVerticalScrollBar->SetScrollPos(iPos);
}
}
if( cx != 0 ) {
WPARAM wParam = MAKEWPARAM(SB_THUMBPOSITION, m_pHorizontalScrollBar->GetScrollPos());
TxSendMessage(WM_HSCROLL, wParam, 0L, 0);
}
}
void CRichEditUI::LineUp()
{
TxSendMessage(WM_VSCROLL, SB_LINEUP, 0L, 0);
}
void CRichEditUI::LineDown()
{
int iPos = 0;
if( m_pTwh && !m_bRich && m_pVerticalScrollBar && m_pVerticalScrollBar->IsVisible() )
iPos = m_pVerticalScrollBar->GetScrollPos();
TxSendMessage(WM_VSCROLL, SB_LINEDOWN, 0L, 0);
if( m_pTwh && !m_bRich && m_pVerticalScrollBar && m_pVerticalScrollBar->IsVisible() ) {
if( m_pVerticalScrollBar->GetScrollPos() <= iPos )
m_pVerticalScrollBar->SetScrollPos(m_pVerticalScrollBar->GetScrollRange());
}
}
void CRichEditUI::PageUp()
{
TxSendMessage(WM_VSCROLL, SB_PAGEUP, 0L, 0);
}
void CRichEditUI::PageDown()
{
TxSendMessage(WM_VSCROLL, SB_PAGEDOWN, 0L, 0);
}
void CRichEditUI::HomeUp()
{
TxSendMessage(WM_VSCROLL, SB_TOP, 0L, 0);
}
void CRichEditUI::EndDown()
{
TxSendMessage(WM_VSCROLL, SB_BOTTOM, 0L, 0);
}
void CRichEditUI::LineLeft()
{
TxSendMessage(WM_HSCROLL, SB_LINELEFT, 0L, 0);
}
void CRichEditUI::LineRight()
{
TxSendMessage(WM_HSCROLL, SB_LINERIGHT, 0L, 0);
}
void CRichEditUI::PageLeft()
{
TxSendMessage(WM_HSCROLL, SB_PAGELEFT, 0L, 0);
}
void CRichEditUI::PageRight()
{
TxSendMessage(WM_HSCROLL, SB_PAGERIGHT, 0L, 0);
}
void CRichEditUI::HomeLeft()
{
TxSendMessage(WM_HSCROLL, SB_LEFT, 0L, 0);
}
void CRichEditUI::EndRight()
{
TxSendMessage(WM_HSCROLL, SB_RIGHT, 0L, 0);
}
void CRichEditUI::DoEvent(TEventUI& event)
{
if( !IsMouseEnabled() && event.Type > UIEVENT__MOUSEBEGIN && event.Type < UIEVENT__MOUSEEND ) {
if( m_pParent != NULL ) m_pParent->DoEvent(event);
else CControlUI::DoEvent(event);
return;
}
if( event.Type == UIEVENT_SETCURSOR && IsEnabled() )
{
if( m_pTwh && m_pTwh->DoSetCursor(NULL, &event.ptMouse) ) {
return;
}
}
if( event.Type == UIEVENT_SETFOCUS ) {
if( m_pTwh ) {
m_pTwh->OnTxInPlaceActivate(NULL);
m_pTwh->GetTextServices()->TxSendMessage(WM_SETFOCUS, 0, 0, 0);
}
}
if( event.Type == UIEVENT_KILLFOCUS ) {
if( m_pTwh ) {
m_pTwh->OnTxInPlaceActivate(NULL);
m_pTwh->GetTextServices()->TxSendMessage(WM_KILLFOCUS, 0, 0, 0);
}
}
if( event.Type == UIEVENT_TIMER ) {
if( m_pTwh ) {
m_pTwh->GetTextServices()->TxSendMessage(WM_TIMER, event.wParam, event.lParam, 0);
}
}
if( event.Type == UIEVENT_SCROLLWHEEL ) {
if( (event.wKeyState & MK_CONTROL) != 0 ) {
return;
}
}
if( event.Type == UIEVENT_BUTTONDOWN || event.Type == UIEVENT_DBLCLICK )
{
return;
}
if( event.Type == UIEVENT_MOUSEMOVE )
{
return;
}
if( event.Type == UIEVENT_BUTTONUP )
{
return;
}
if( event.Type == UIEVENT_MOUSEENTER )
{
return;
}
if( event.Type == UIEVENT_MOUSELEAVE )
{
return;
}
if( event.Type > UIEVENT__KEYBEGIN && event.Type < UIEVENT__KEYEND )
{
return;
}
CContainerUI::DoEvent(event);
}
SIZE CRichEditUI::EstimateSize(SIZE szAvailable)
{
//return CSize(m_rcItem); // ÕâÖÖ·½Ê½ÔÚµÚÒ»´ÎÉèÖôóС֮ºó¾Í´óС²»±äÁË
return CContainerUI::EstimateSize(szAvailable);
}
void CRichEditUI::SetPos(RECT rc)
{
CControlUI::SetPos(rc);
rc = m_rcItem;
rc.left += m_rcInset.left;
rc.top += m_rcInset.top;
rc.right -= m_rcInset.right;
rc.bottom -= m_rcInset.bottom;
bool bVScrollBarVisiable = false;
if( m_pVerticalScrollBar && m_pVerticalScrollBar->IsVisible() ) {
bVScrollBarVisiable = true;
rc.right -= m_pVerticalScrollBar->GetFixedWidth();
}
if( m_pHorizontalScrollBar && m_pHorizontalScrollBar->IsVisible() ) {
rc.bottom -= m_pHorizontalScrollBar->GetFixedHeight();
}
if( m_pTwh ) {
m_pTwh->SetClientRect(&rc);
if( bVScrollBarVisiable && (!m_pVerticalScrollBar->IsVisible() || m_bVScrollBarFixing) ) {
LONG lWidth = rc.right - rc.left + m_pVerticalScrollBar->GetFixedWidth();
LONG lHeight = 0;
SIZEL szExtent = { -1, -1 };
m_pTwh->GetTextServices()->TxGetNaturalSize(
DVASPECT_CONTENT,
GetManager()->GetPaintDC(),
NULL,
NULL,
TXTNS_FITTOCONTENT,
&szExtent,
&lWidth,
&lHeight);
if( lHeight > rc.bottom - rc.top ) {
m_pVerticalScrollBar->SetVisible(true);
m_pVerticalScrollBar->SetScrollPos(0);
m_bVScrollBarFixing = true;
}
else {
if( m_bVScrollBarFixing ) {
m_pVerticalScrollBar->SetVisible(false);
m_bVScrollBarFixing = false;
}
}
}
}
if( m_pVerticalScrollBar != NULL && m_pVerticalScrollBar->IsVisible() ) {
RECT rcScrollBarPos = { rc.right, rc.top, rc.right + m_pVerticalScrollBar->GetFixedWidth(), rc.bottom};
m_pVerticalScrollBar->SetPos(rcScrollBarPos);
}
if( m_pHorizontalScrollBar != NULL && m_pHorizontalScrollBar->IsVisible() ) {
RECT rcScrollBarPos = { rc.left, rc.bottom, rc.right, rc.bottom + m_pHorizontalScrollBar->GetFixedHeight()};
m_pHorizontalScrollBar->SetPos(rcScrollBarPos);
}
for( int it = 0; it < m_items.GetSize(); it++ ) {
CControlUI* pControl = static_cast<CControlUI*>(m_items[it]);
if( !pControl->IsVisible() ) continue;
if( pControl->IsFloat() ) {
SetFloatPos(it);
}
else {
pControl->SetPos(rc); // ËùÓзÇfloat×ӿؼþ·Å´óµ½Õû¸ö¿Í»§Çø
}
}
}
void CRichEditUI::DoPaint(HDC hDC, const RECT& rcPaint)
{
RECT rcTemp = { 0 };
if( !::IntersectRect(&rcTemp, &rcPaint, &m_rcItem) ) return;
CRenderClip clip;
CRenderClip::GenerateClip(hDC, rcTemp, clip);
CControlUI::DoPaint(hDC, rcPaint);
if( m_pTwh ) {
RECT rc;
m_pTwh->GetControlRect(&rc);
// Remember wparam is actually the hdc and lparam is the update
// rect because this message has been preprocessed by the window.
m_pTwh->GetTextServices()->TxDraw(
DVASPECT_CONTENT, // Draw Aspect
/*-1*/0, // Lindex
NULL, // Info for drawing optimazation
NULL, // target device information
hDC, // Draw device HDC
NULL, // Target device HDC
(RECTL*)&rc, // Bounding client rectangle
NULL, // Clipping rectangle for metafiles
(RECT*)&rcPaint, // Update rectangle
NULL, // Call back function
NULL, // Call back parameter
0); // What view of the object
if( m_bVScrollBarFixing ) {
LONG lWidth = rc.right - rc.left + m_pVerticalScrollBar->GetFixedWidth();
LONG lHeight = 0;
SIZEL szExtent = { -1, -1 };
m_pTwh->GetTextServices()->TxGetNaturalSize(
DVASPECT_CONTENT,
GetManager()->GetPaintDC(),
NULL,
NULL,
TXTNS_FITTOCONTENT,
&szExtent,
&lWidth,
&lHeight);
if( lHeight <= rc.bottom - rc.top ) {
NeedUpdate();
}
}
}
if( m_items.GetSize() > 0 ) {
RECT rc = m_rcItem;
rc.left += m_rcInset.left;
rc.top += m_rcInset.top;
rc.right -= m_rcInset.right;
rc.bottom -= m_rcInset.bottom;
if( m_pVerticalScrollBar && m_pVerticalScrollBar->IsVisible() ) rc.right -= m_pVerticalScrollBar->GetFixedWidth();
if( m_pHorizontalScrollBar && m_pHorizontalScrollBar->IsVisible() ) rc.bottom -= m_pHorizontalScrollBar->GetFixedHeight();
if( !::IntersectRect(&rcTemp, &rcPaint, &rc) ) {
for( int it = 0; it < m_items.GetSize(); it++ ) {
CControlUI* pControl = static_cast<CControlUI*>(m_items[it]);
if( !pControl->IsVisible() ) continue;
if( !::IntersectRect(&rcTemp, &rcPaint, &pControl->GetPos()) ) continue;
if( pControl ->IsFloat() ) {
if( !::IntersectRect(&rcTemp, &m_rcItem, &pControl->GetPos()) ) continue;
pControl->DoPaint(hDC, rcPaint);
}
}
}
else {
CRenderClip childClip;
CRenderClip::GenerateClip(hDC, rcTemp, childClip);
for( int it = 0; it < m_items.GetSize(); it++ ) {
CControlUI* pControl = static_cast<CControlUI*>(m_items[it]);
if( !pControl->IsVisible() ) continue;
if( !::IntersectRect(&rcTemp, &rcPaint, &pControl->GetPos()) ) continue;
if( pControl ->IsFloat() ) {
if( !::IntersectRect(&rcTemp, &m_rcItem, &pControl->GetPos()) ) continue;
CRenderClip::UseOldClipBegin(hDC, childClip);
pControl->DoPaint(hDC, rcPaint);
CRenderClip::UseOldClipEnd(hDC, childClip);
}
else {
if( !::IntersectRect(&rcTemp, &rc, &pControl->GetPos()) ) continue;
pControl->DoPaint(hDC, rcPaint);
}
}
}
}
if( m_pVerticalScrollBar != NULL && m_pVerticalScrollBar->IsVisible() ) {
if( ::IntersectRect(&rcTemp, &rcPaint, &m_pVerticalScrollBar->GetPos()) ) {
m_pVerticalScrollBar->DoPaint(hDC, rcPaint);
}
}
if( m_pHorizontalScrollBar != NULL && m_pHorizontalScrollBar->IsVisible() ) {
if( ::IntersectRect(&rcTemp, &rcPaint, &m_pHorizontalScrollBar->GetPos()) ) {
m_pHorizontalScrollBar->DoPaint(hDC, rcPaint);
}
}
}
void CRichEditUI::SetAttribute(LPCTSTR pstrName, LPCTSTR pstrValue)
{
if( _tcscmp(pstrName, _T("vscrollbar")) == 0 ) {
if( _tcscmp(pstrValue, _T("true")) == 0 ) m_lTwhStyle |= ES_DISABLENOSCROLL | WS_VSCROLL;
}
if( _tcscmp(pstrName, _T("autovscroll")) == 0 ) {
if( _tcscmp(pstrValue, _T("true")) == 0 ) m_lTwhStyle |= ES_AUTOVSCROLL;
}
else if( _tcscmp(pstrName, _T("hscrollbar")) == 0 ) {
if( _tcscmp(pstrValue, _T("true")) == 0 ) m_lTwhStyle |= ES_DISABLENOSCROLL | WS_HSCROLL;
}
if( _tcscmp(pstrName, _T("autohscroll")) == 0 ) {
if( _tcscmp(pstrValue, _T("true")) == 0 ) m_lTwhStyle |= ES_AUTOHSCROLL;
}
else if( _tcscmp(pstrName, _T("wanttab")) == 0 ) {
SetWantTab(_tcscmp(pstrValue, _T("true")) == 0);
}
else if( _tcscmp(pstrName, _T("wantreturn")) == 0 ) {
SetWantReturn(_tcscmp(pstrValue, _T("true")) == 0);
}
else if( _tcscmp(pstrName, _T("wantctrlreturn")) == 0 ) {
SetWantCtrlReturn(_tcscmp(pstrValue, _T("true")) == 0);
}
else if( _tcscmp(pstrName, _T("rich")) == 0 ) {
SetRich(_tcscmp(pstrValue, _T("true")) == 0);
}
else if( _tcscmp(pstrName, _T("multiline")) == 0 ) {
if( _tcscmp(pstrValue, _T("false")) == 0 ) m_lTwhStyle &= ~ES_MULTILINE;
}
else if( _tcscmp(pstrName, _T("readonly")) == 0 ) {
if( _tcscmp(pstrValue, _T("true")) == 0 ) { m_lTwhStyle |= ES_READONLY; m_bReadOnly = true; }
}
else if( _tcscmp(pstrName, _T("password")) == 0 ) {
if( _tcscmp(pstrValue, _T("true")) == 0 ) m_lTwhStyle |= ES_PASSWORD;
}
else if( _tcscmp(pstrName, _T("align")) == 0 ) {
if( _tcsstr(pstrValue, _T("left")) != NULL ) {
m_lTwhStyle &= ~(ES_CENTER | ES_RIGHT);
m_lTwhStyle |= ES_LEFT;
}
if( _tcsstr(pstrValue, _T("center")) != NULL ) {
m_lTwhStyle &= ~(ES_LEFT | ES_RIGHT);
m_lTwhStyle |= ES_CENTER;
}
if( _tcsstr(pstrValue, _T("right")) != NULL ) {
m_lTwhStyle &= ~(ES_LEFT | ES_CENTER);
m_lTwhStyle |= ES_RIGHT;
}
}
else if( _tcscmp(pstrName, _T("font")) == 0 ) SetFont(_ttoi(pstrValue));
else if( _tcscmp(pstrName, _T("textcolor")) == 0 ) {
while( *pstrValue > _T('\0') && *pstrValue <= _T(' ') ) pstrValue = ::CharNext(pstrValue);
if( *pstrValue == _T('#')) pstrValue = ::CharNext(pstrValue);
LPTSTR pstr = NULL;
DWORD clrColor = _tcstoul(pstrValue, &pstr, 16);
SetTextColor(clrColor);
}
else CContainerUI::SetAttribute(pstrName, pstrValue);
}
LRESULT CRichEditUI::MessageHandler(UINT uMsg, WPARAM wParam, LPARAM lParam, bool& bHandled)
{
if( !IsVisible() || !IsEnabled() ) return 0;
if( !IsMouseEnabled() && uMsg >= WM_MOUSEFIRST && uMsg <= WM_MOUSELAST ) return 0;
if( uMsg == WM_MOUSEWHEEL && (LOWORD(wParam) & MK_CONTROL) == 0 ) return 0;
bool bWasHandled = true;
if( (uMsg >= WM_MOUSEFIRST && uMsg <= WM_MOUSELAST) || uMsg == WM_SETCURSOR ) {
if( !m_pTwh->IsCaptured() ) {
switch (uMsg) {
case WM_LBUTTONDOWN:
case WM_LBUTTONUP:
case WM_LBUTTONDBLCLK:
case WM_RBUTTONDOWN:
case WM_RBUTTONUP:
{
POINT pt = { GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam) };
CControlUI* pHover = GetManager()->FindControl(pt);
if(pHover != this) {
bWasHandled = false;
return 0;
}
}
break;
}
}
// Mouse message only go when captured or inside rect
DWORD dwHitResult = m_pTwh->IsCaptured() ? HITRESULT_HIT : HITRESULT_OUTSIDE;
if( dwHitResult == HITRESULT_OUTSIDE ) {
RECT rc;
m_pTwh->GetControlRect(&rc);
POINT pt = { GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam) };
if( uMsg == WM_MOUSEWHEEL ) ::ScreenToClient(GetManager()->GetPaintWindow(), &pt);
if( ::PtInRect(&rc, pt) && !GetManager()->IsCaptured() ) dwHitResult = HITRESULT_HIT;
}
if( dwHitResult != HITRESULT_HIT ) return 0;
if( uMsg == WM_SETCURSOR ) bWasHandled = false;
else if( uMsg == WM_LBUTTONDOWN || uMsg == WM_LBUTTONDBLCLK || uMsg == WM_RBUTTONDOWN ) {
SetFocus();
}
}
#ifdef _UNICODE
else if( uMsg >= WM_KEYFIRST && uMsg <= WM_KEYLAST ) {
#else
else if( (uMsg >= WM_KEYFIRST && uMsg <= WM_KEYLAST) || uMsg == WM_CHAR || uMsg == WM_IME_CHAR ) {
#endif
if( !IsFocused() ) return 0;
}
else if( uMsg == WM_CONTEXTMENU ) {
POINT pt = { GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam) };
::ScreenToClient(GetManager()->GetPaintWindow(), &pt);
CControlUI* pHover = GetManager()->FindControl(pt);
if(pHover != this) {
bWasHandled = false;
return 0;
}
}
else
{
switch( uMsg ) {
case WM_HELP:
bWasHandled = false;
break;
default:
return 0;
}
}
LRESULT lResult = 0;
HRESULT Hr = TxSendMessage(uMsg, wParam, lParam, &lResult);
if( Hr == S_OK ) bHandled = bWasHandled;
else if( (uMsg >= WM_KEYFIRST && uMsg <= WM_KEYLAST) || uMsg == WM_CHAR || uMsg == WM_IME_CHAR )
bHandled = bWasHandled;
else if( uMsg >= WM_MOUSEFIRST && uMsg <= WM_MOUSELAST ) {
if( m_pTwh->IsCaptured() ) bHandled = bWasHandled;
}
return lResult;
}
} // namespace DirectUICore<|fim▁end|> | { |
<|file_name|>namespaces.js<|end_file_name|><|fim▁begin|>var namespaces =<|fim▁hole|><|fim▁end|> | [
[ "shyft", "namespaceshyft.html", "namespaceshyft" ]
]; |
<|file_name|>iot_lcd.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import print_function
import time
import pyupm_grove as grove
import pyupm_i2clcd as lcd
import pyupm_th02 as th02
import pyupm_guvas12d as upmUV
import pyupm_grovemoisture as upmMoisture
from phant import Phant
import requests
from iot_utils import *
__author__ = 'KT Kirk'
# Initialize Jhd1313m1 at 0x3E (LCD_ADDRESS) and 0x62 (RGB_ADDRESS)
myLcd = lcd.Jhd1313m1(0, 0x3E, 0x62)
myLcd.setColor(53, 249, 39 ) # Green
myLcd.setCursor(0,0)
myLcd.write('IoT')
# Instantiate a Grove Moisture sensor on analog pin A1
moisture = upmMoisture.GroveMoisture(1)
# Create the light sensor object using AI2 pin 2
light = grove.GroveLight(2)
# Instantiate a UV sensor on analog pin A3
uv = upmUV.GUVAS12D(3);
# analog voltage, usually 3.3 or 5.0
GUVAS12D_AREF = 5.0;
SAMPLES_PER_QUERY = 1024;
# Create the temperature sensor object using AIO pin 0
i2c_th = th02.TH02()
#
p = Phant(keys["publicKey"],
'device', 'temp', 'humidity', 'light', "uv", "moisture",
private_key=keys["privateKey"])
device = open("/factory/serial_number").read().strip('\n')
while(True):
temp = i2c_th.getTemperature()
humid = i2c_th.getHumidity()
lux_val = light.value()
uv_val = uv.value(GUVAS12D_AREF, SAMPLES_PER_QUERY)
moisture_val = moisture.value()
myLcd.setCursor(1, 0)
try:
p.log(device, temp, humid, lux_val, uv_val, moisture_val)
except requests.exceptions.ConnectionError as e:
print("Connection error with data.sparkfun.com")
myLcd.setColor(255, 0, 0) # Red
myLcd.write("Error")
else:
myLcd.setColor(53, 39, 249) # Bl
myLcd.write("Sent Bytes: {}".format(p.remaining_bytes))
<|fim▁hole|> #data = p.get()
#print(data['temp'])
time.sleep(60 * 5)<|fim▁end|> | |
<|file_name|>linear_search.py<|end_file_name|><|fim▁begin|>def linear_search(lst,size,value):
i = 0
while i < size:
if lst[i] == value:
return i
i = i + 1
return -1
def main():
lst = [-31, 0, 1, 2, 2, 4, 65, 83, 99, 782]
size = len(lst)
original_list = ""
value = int(input("\nInput a value to search for: "))
print("\nOriginal Array: ")
for i in lst:<|fim▁hole|>
index = linear_search(lst,size,value)
if index == -1:
print(str(value) + " was not found in that array\n")
else:
print(str(value) + " was found at index " + str(index))
if __name__ == '__main__':
main()<|fim▁end|> | original_list += str(i) + " "
print(original_list)
print("\nLinear Search Big O Notation:\n--> Best Case: O(1)\n--> Average Case: O(n)\n--> Worst Case: O(n)\n") |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>use std::env;
use std::process::Command;
fn main() {
if std::env::var("DOCS_RS").is_ok() {
// Skip everything when building docs on docs.rs
return;
}
// On Windows Rust always links against release version of MSVC runtime, thus requires Release
// build here.
let build_type = if cfg!(all(debug_assertions, not(windows))) {
"Debug"
} else {
"Release"
};
let out_dir = env::var("OUT_DIR").unwrap();
// Force forward slashes on Windows too so that is plays well with our dumb `Makefile`
let mediasoup_out_dir = format!("{}/out", out_dir.replace('\\', "/"));
// Add C++ std lib
#[cfg(target_os = "linux")]
{
let path = Command::new(env::var("CXX").unwrap_or_else(|_| "c++".to_string()))
.arg("--print-file-name=libstdc++.a")
.output()
.expect("Failed to start")
.stdout;
println!(
"cargo:rustc-link-search=native={}",
String::from_utf8_lossy(&path)
.trim()
.strip_suffix("libstdc++.a")
.expect("Failed to strip suffix"),
);
println!("cargo:rustc-link-lib=static=stdc++");
}
#[cfg(any(
target_os = "freebsd",
target_os = "dragonfly",
target_os = "openbsd",
target_os = "netbsd"
))]
{
let path = Command::new(env::var("CXX").unwrap_or_else(|_| "c++".to_string()))
.arg("--print-file-name=libc++.a")
.output()
.expect("Failed to start")
.stdout;
println!(
"cargo:rustc-link-search=native={}",
String::from_utf8_lossy(&path)
.trim()
.strip_suffix("libc++.a")
.expect("Failed to strip suffix"),
);
println!("cargo:rustc-link-lib=static=c++");
}
#[cfg(target_os = "macos")]
{
let path = Command::new("xcrun")
.args(&["--show-sdk-path"])
.output()
.expect("Failed to start")
.stdout;
let libpath = format!(
"{}/usr/lib",
String::from_utf8(path)
.expect("Failed to decode path")
.trim()
);
println!("cargo:rustc-link-search={}", libpath);
println!("cargo:rustc-link-lib=dylib=c++");
println!("cargo:rustc-link-lib=dylib=c++abi");
}
#[cfg(target_os = "windows")]
{
// Nothing special is needed so far
}
// Build
if !Command::new("make")
.arg("libmediasoup-worker")
.env("MEDIASOUP_OUT_DIR", &mediasoup_out_dir)
.env("MEDIASOUP_BUILDTYPE", &build_type)
// Force forward slashes on Windows too, otherwise Meson thinks path is not absolute 🤷
.env("INSTALL_DIR", &out_dir.replace('\\', "/"))
.spawn()
.expect("Failed to start")
.wait()
.expect("Wasn't running")
.success()
{
panic!("Failed to build libmediasoup-worker")
}
#[cfg(target_os = "windows")]
{
let dot_a = format!("{}/libmediasoup-worker.a", out_dir);
let dot_lib = format!("{}/mediasoup-worker.lib", out_dir);
// Meson builds `libmediasoup-worker.a` on Windows instead of `*.lib` file under MinGW
if std::path::Path::new(&dot_a).exists() {
std::fs::copy(&dot_a, &dot_lib).unwrap_or_else(|error| {
panic!(
"Failed to copy static library from {} to {}: {}",
dot_a, dot_lib, error<|fim▁hole|> });
}
// These are required by libuv on Windows
println!("cargo:rustc-link-lib=psapi");
println!("cargo:rustc-link-lib=user32");
println!("cargo:rustc-link-lib=advapi32");
println!("cargo:rustc-link-lib=iphlpapi");
println!("cargo:rustc-link-lib=userenv");
println!("cargo:rustc-link-lib=ws2_32");
// These are required by OpenSSL on Windows
println!("cargo:rustc-link-lib=ws2_32");
println!("cargo:rustc-link-lib=gdi32");
println!("cargo:rustc-link-lib=advapi32");
println!("cargo:rustc-link-lib=crypt32");
println!("cargo:rustc-link-lib=user32");
}
if env::var("KEEP_BUILD_ARTIFACTS") != Ok("1".to_string()) {
// Clean
if !Command::new("make")
.arg("clean-all")
.env("MEDIASOUP_OUT_DIR", &mediasoup_out_dir)
.spawn()
.expect("Failed to start")
.wait()
.expect("Wasn't running")
.success()
{
panic!("Failed to clean libmediasoup-worker")
}
}
println!("cargo:rustc-link-lib=static=mediasoup-worker");
println!("cargo:rustc-link-search=native={}", out_dir);
}<|fim▁end|> | ) |
<|file_name|>CanvasSetup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
New Drawing class to create new mark and style on axes.
"""
# from copy import deepcopy, copy
from decimal import Decimal
import numpy as np
import toyplot
# from .Admixture import AdmixEdges
# for setting values from iterables
ITERABLE = (list, tuple, np.ndarray)
class GridSetup:
"""
Returns Canvas and Cartesian axes objects to fit a grid of trees.
"""
def __init__(self, nrows, ncols, width, height, layout):
# style args can include height/width, nrows, ncols, shared,...
self.nrows = nrows
self.ncols = ncols
self.width = width
self.height = height
self.layout = layout
# get .canvas and .axes
self.get_tree_dims()
self.get_canvas_and_axes()
def get_canvas_and_axes(self):
"""
Set .canvas and .axes objects
"""
self.canvas = toyplot.Canvas(
height=self.height,
width=self.width,
)
self.axes = [
self.canvas.cartesian(
grid=(self.nrows, self.ncols, i),
padding=10,
margin=25,
)
for i in range(self.nrows * self.ncols)
]
def get_tree_dims(self):
"""
get height and width if not set by user
"""
if self.ncols * self.nrows < 4:
minx = 250
miny = 250
else:
minx = 200
miny = 140
# wider than tall
if self.layout in ("d", "u"):
self.width = (
self.width if self.width
else min(750, minx * self.ncols)
)
self.height = (
self.height if self.height
else min(750, miny * self.nrows)
)
else:
self.height = (
self.height if self.height
else min(750, minx * self.nrows)
)
self.width = (
self.width if self.width
else min(750, miny * self.ncols)
)
class CanvasSetup:
"""
Returns Canvas and Cartesian axes objects
"""
def __init__(self, tree, axes, style):
# args includes axes
self.tree = tree
self.axes = axes<|fim▁hole|> self.style = style
self.canvas = None
self.external_axis = False
# get the longest name for dimension fitting
self.lname = 0
if not all([i is None for i in self.style.tip_labels]):
self.lname = max([len(str(i)) for i in self.style.tip_labels])
# ntips and shape to fit with provided args
self.get_dims_from_tree_size()
# fills canvas and axes
self.get_canvas_and_axes()
# expand the domain/extents for the text
# self.fit_tip_labels()
# ticks for tree and scalebar
self.add_axes_style()
def get_dims_from_tree_size(self):
"""
Calculate reasonable canvas height and width for tree given N tips
"""
if self.style.layout == "c":
radius = max(
[0] + [i for i in [self.style.height, self.style.width] if i])
if not radius:
radius = 400
self.style.width = self.style.height = radius
return
if self.style.layout in ("r", "l"):
# height fit by tree size
if not self.style.height:
self.style.height = max(275, min(1000, 18 * self.tree.ntips))
# width fit by name size
if not self.style.width:
self.style.width = max(250, min(500, 250 + 5 * self.lname))
else:
# height fit by name size
if not self.style.height:
self.style.height = max(250, min(500, 250 + 5 * self.lname))
# width fit by tree size
if not self.style.width:
self.style.width = max(350, min(1000, 18 * self.tree.ntips))
def get_canvas_and_axes(self):
"""
"""
if self.axes is not None:
self.canvas = None
self.external_axis = True
else:
self.canvas = toyplot.Canvas(
height=self.style.height,
width=self.style.width,
)
self.axes = self.canvas.cartesian(
padding=self.style.padding
)
def add_axes_style(self):
"""
"""
# style axes with padding and show axes
self.axes.padding = self.style.padding
if not self.external_axis:
self.axes.show = True
if not self.style.scalebar:
self.axes.show = False
# scalebar
if self.style.scalebar:
if self.style.layout in ("r", "l"):
nticks = max((3, np.floor(self.style.width / 100).astype(int)))
self.axes.y.show = False
self.axes.x.show = True
self.axes.x.ticks.show = True
# generate locations
if self.style.use_edge_lengths:
th = self.tree.treenode.height
else:
th = self.tree.treenode.get_farthest_leaf(True)[1] + 1
if self.style.layout == "r":
top = self.style.xbaseline - th
else:
top = self.style.xbaseline + th
locs = np.linspace(self.style.xbaseline, top, nticks)
# auto-formatter for axes ticks labels
zer = abs(min(0, Decimal(locs[1]).adjusted()))
fmt = "{:." + str(zer) + "f}"
self.axes.x.ticks.locator = toyplot.locator.Explicit(
locations=locs,
labels=[fmt.format(i) for i in np.abs(locs)],
)
elif self.style.layout in ("u", "d"):
nticks = max((3, np.floor(self.style.height / 100).astype(int)))
self.axes.x.show = False
self.axes.y.show = True
self.axes.y.ticks.show = True
# generate locations
if self.style.use_edge_lengths:
th = self.tree.treenode.height
else:
th = self.tree.treenode.get_farthest_leaf(True)[1] + 1
if self.style.layout == "d":
top = self.style.ybaseline + th
else:
top = self.style.ybaseline - th
locs = np.linspace(self.style.ybaseline, top, nticks)
# auto-formatter for axes ticks labels
zer = abs(min(0, Decimal(locs[1]).adjusted()))
fmt = "{:." + str(zer) + "f}"
self.axes.y.ticks.locator = toyplot.locator.Explicit(
locations=locs,
labels=[fmt.format(i) for i in np.abs(locs)],
)
# elif self.style.layout == "d":
# nticks = max((3, np.floor(self.style.height / 100).astype(int)))
# self.axes.x.show = False
# self.axes.y.show = True
# self.axes.y.ticks.show = True
# # generate locations
# locs = np.linspace(0, self.tree.treenode.height, nticks)
# # auto-formatter for axes ticks labels
# zer = abs(min(0, Decimal(locs[1]).adjusted()))
# fmt = "{:." + str(zer) + "f}"
# self.axes.y.ticks.locator = toyplot.locator.Explicit(
# locations=locs,
# labels=[fmt.format(i) for i in np.abs(locs)],
# )
# def fit_tip_labels(self):
# """
# DEPRECATED SINCE V2 since Mark now sets its own extents correctly.
# Modifies display range to ensure tip labels fit. This is a bit hackish
# still. The problem is that the 'extents' range of the rendered text
# is not totally correct. So we add a little buffer here. Should add for
# user to be able to modify this if needed. If not using edge lengths
# then need to use unit length for treeheight.
# """
# # bail on unrooted for now; TODO
# if self.style.layout == "c":
# return
# # if names
# if self.lname:
# # get ratio of names to tree in plot
# ratio = max(self.lname / 10, 0.15)
# # have tree figure make up 85% of plot
# if self.style.use_edge_lengths:
# addon = self.tree.treenode.height
# else:
# addon = self.tree.treenode.get_farthest_leaf(True)[1] + 1
# addon *= ratio
# # modify display for layout
# if self.style.layout == "r":
# self.axes.x.domain.max = (addon / 2.) + self.style.xbaseline
# elif self.style.layout == "l":
# self.axes.x.domain.min = (-addon / 2.) + self.style.xbaseline
# # self.axes.x.domain.min -= self.style.xbaseline
# elif self.style.layout == "d":
# self.axes.y.domain.min = (-addon / 2.) + self.style.ybaseline
# elif self.style.layout == "u":
# self.axes.y.domain.max = (addon / 2.) + self.style.ybaseline
# # print(addon, ratio, self.axes.x.domain.min, self.axes.x.domain.max)<|fim▁end|> | |
<|file_name|>test_awsssm.py<|end_file_name|><|fim▁begin|>import pretend
import pytest
from botocore.exceptions import ClientError
from configstore.backends.awsssm import AwsSsmBackend
def test_awsssm_init_bad_install(monkeypatch):
monkeypatch.setattr('configstore.backends.awsssm.boto3', None)
with pytest.raises(ImportError):
AwsSsmBackend()
def test_awsssm_success(monkeypatch):
response = {'Parameter': {
'Value': 'postgres://localhost/app',
}}
fake_client = pretend.stub(
get_parameter=pretend.call_recorder(lambda Name, WithDecryption: response),
)
fake_boto3 = pretend.stub(
client=pretend.call_recorder(lambda service: fake_client),
)
monkeypatch.setattr('configstore.backends.awsssm.boto3', fake_boto3)
b = AwsSsmBackend()
value = b.get_setting('DATABASE_URL')
assert value == 'postgres://localhost/app'
assert fake_boto3.client.calls == [pretend.call('ssm')]
assert fake_client.get_parameter.calls == [
pretend.call(Name='DATABASE_URL', WithDecryption=True),
]
<|fim▁hole|>def test_awsssm_success_with_prefix(monkeypatch):
response = {'Parameter': {
'Value': 'off',
}}
fake_client = pretend.stub(
get_parameter=pretend.call_recorder(lambda Name, WithDecryption: response),
)
fake_boto3 = pretend.stub(
client=pretend.call_recorder(lambda service: fake_client),
)
monkeypatch.setattr('configstore.backends.awsssm.boto3', fake_boto3)
b = AwsSsmBackend('/myapp/staging/')
value = b.get_setting('DEBUG')
assert value == 'off'
assert fake_boto3.client.calls == [pretend.call('ssm')]
assert fake_client.get_parameter.calls == [
pretend.call(Name='/myapp/staging/DEBUG', WithDecryption=True),
]
def test_awsssm_missing(monkeypatch):
error = ClientError({'Error': {'Code': 'ParameterNotFound'}}, 'get_parameter')
fake_client = pretend.stub(
get_parameter=pretend.raiser(error),
)
fake_boto3 = pretend.stub(
client=lambda service: fake_client,
)
monkeypatch.setattr('configstore.backends.awsssm.boto3', fake_boto3)
b = AwsSsmBackend()
value = b.get_setting('/app1/TEMPLATE_DEBUG')
assert value is None
def test_awsssm_missing_with_prefix(monkeypatch):
error = ClientError({'Error': {'Code': 'ParameterNotFound'}}, 'get_parameter')
fake_client = pretend.stub(
get_parameter=pretend.raiser(error),
)
fake_boto3 = pretend.stub(
client=lambda service: fake_client,
)
monkeypatch.setattr('configstore.backends.awsssm.boto3', fake_boto3)
b = AwsSsmBackend('/app1/')
value = b.get_setting('TEMPLATE_DEBUG')
assert value is None
def test_awsssm_error(monkeypatch):
error = ClientError({'Error': {'Code': 'SomethingBad'}}, 'get_parameter')
fake_client = pretend.stub(
get_parameter=pretend.raiser(error),
)
fake_boto3 = pretend.stub(
client=lambda service: fake_client,
)
monkeypatch.setattr('configstore.backends.awsssm.boto3', fake_boto3)
b = AwsSsmBackend('/app1/')
with pytest.raises(ClientError):
b.get_setting('TEMPLATE_DEBUG')<|fim▁end|> | |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>//! Error types and conversion functions.
use std::error::Error;
use std::fmt;
use std::sync::Arc;
use rodio::decoder::DecoderError;
use rodio::PlayError;
/// An enum containing all kinds of game framework errors.
#[derive(Debug, Clone)]
pub enum GameError {
/// An error in the filesystem layout
FilesystemError(String),
/// An error in the config file
ConfigError(String),
/// Happens when an `winit::event_loop::EventLoopProxy` attempts to
/// wake up an `winit::event_loop::EventLoop` that no longer exists.
EventLoopError(String),
/// An error trying to load a resource, such as getting an invalid image file.
ResourceLoadError(String),
/// Unable to find a resource; the `Vec` is the paths it searched for and associated errors
ResourceNotFound(String, Vec<(std::path::PathBuf, GameError)>),
/// Something went wrong in the renderer
RenderError(String),
/// Something went wrong in the audio playback
AudioError(String),
/// Something went wrong trying to set or get window properties.
WindowError(String),
/// Something went wrong trying to create a window
WindowCreationError(Arc<glutin::CreationError>),
/// Something went wrong trying to read from a file
#[allow(clippy::upper_case_acronyms)]
IOError(Arc<std::io::Error>),
/// Something went wrong trying to load/render a font
FontError(String),
/// Something went wrong applying video settings.
VideoError(String),
/// Something went wrong compiling shaders
ShaderProgramError(gfx::shade::ProgramError),
/// Something went wrong with the `gilrs` gamepad-input library.
GamepadError(String),
/// Something went wrong with the `lyon` shape-tesselation library.
LyonError(String),
/// You tried to use MSAA on canvases with GLES, which isn't supported.
CanvasMSAAError,
/// A custom error type for use by users of ggez.
/// This lets you handle custom errors that may happen during your game (such as, trying to load a malformed file for a level)
/// using the same mechanism you handle ggez's other errors.
///
/// Please include an informative message with the error.
CustomError(String),
}
impl fmt::Display for GameError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
GameError::ConfigError(ref s) => write!(f, "Config error: {}", s),
GameError::ResourceLoadError(ref s) => write!(f, "Error loading resource: {}", s),
GameError::ResourceNotFound(ref s, ref paths) => write!(
f,
"Resource not found: {}, searched in paths {:?}",
s, paths
),
GameError::WindowError(ref e) => write!(f, "Window creation error: {}", e),
GameError::CustomError(ref s) => write!(f, "Custom error: {}", s),
GameError::CanvasMSAAError => write!(f, "You tried to use MSAA on canvases with GLES, which isn't supported, as our implementation depends on a fragment shader workaround, which doesn't work with GLES 300"),
_ => write!(f, "GameError {:?}", self),
}
}
}
impl Error for GameError {
fn cause(&self) -> Option<&dyn Error> {
match *self {
GameError::WindowCreationError(ref e) => Some(&**e),
GameError::IOError(ref e) => Some(&**e),
GameError::ShaderProgramError(ref e) => Some(e),
_ => None,
}
}
}
/// A convenient result type consisting of a return type and a `GameError`
pub type GameResult<T = ()> = Result<T, GameError>;
impl From<std::io::Error> for GameError {<|fim▁hole|> fn from(e: std::io::Error) -> GameError {
GameError::IOError(Arc::new(e))
}
}
impl From<toml::de::Error> for GameError {
fn from(e: toml::de::Error) -> GameError {
let errstr = format!("TOML decode error: {}", e.to_string());
GameError::ConfigError(errstr)
}
}
impl From<toml::ser::Error> for GameError {
fn from(e: toml::ser::Error) -> GameError {
let errstr = format!("TOML error (possibly encoding?): {}", e.to_string());
GameError::ConfigError(errstr)
}
}
impl From<zip::result::ZipError> for GameError {
fn from(e: zip::result::ZipError) -> GameError {
let errstr = format!("Zip error: {}", e.to_string());
GameError::ResourceLoadError(errstr)
}
}
impl From<DecoderError> for GameError {
fn from(e: DecoderError) -> GameError {
let errstr = format!("Audio decoder error: {:?}", e);
GameError::AudioError(errstr)
}
}
impl From<PlayError> for GameError {
fn from(e: PlayError) -> GameError {
let errstr = format!("Audio playing error: {:?}", e);
GameError::AudioError(errstr)
}
}
impl From<image::ImageError> for GameError {
fn from(e: image::ImageError) -> GameError {
let errstr = format!("Image load error: {}", e.to_string());
GameError::ResourceLoadError(errstr)
}
}
impl From<gfx::PipelineStateError<std::string::String>> for GameError {
fn from(e: gfx::PipelineStateError<std::string::String>) -> GameError {
let errstr = format!(
"Error constructing pipeline!\nThis should probably not be \
happening; it probably means an error in a shader or \
something.\nError was: {:?}",
e
);
GameError::VideoError(errstr)
}
}
impl From<gfx::mapping::Error> for GameError {
fn from(e: gfx::mapping::Error) -> GameError {
let errstr = format!("Buffer mapping error: {:?}", e);
GameError::VideoError(errstr)
}
}
impl<S, D> From<gfx::CopyError<S, D>> for GameError
where
S: fmt::Debug,
D: fmt::Debug,
{
fn from(e: gfx::CopyError<S, D>) -> GameError {
let errstr = format!("Memory copy error: {:?}", e);
GameError::VideoError(errstr)
}
}
impl From<gfx::CombinedError> for GameError {
fn from(e: gfx::CombinedError) -> GameError {
let errstr = format!("Texture+view load error: {}", e.to_string());
GameError::VideoError(errstr)
}
}
impl From<gfx::texture::CreationError> for GameError {
fn from(e: gfx::texture::CreationError) -> GameError {
gfx::CombinedError::from(e).into()
}
}
impl From<gfx::ResourceViewError> for GameError {
fn from(e: gfx::ResourceViewError) -> GameError {
gfx::CombinedError::from(e).into()
}
}
impl From<gfx::TargetViewError> for GameError {
fn from(e: gfx::TargetViewError) -> GameError {
gfx::CombinedError::from(e).into()
}
}
impl<T> From<gfx::UpdateError<T>> for GameError
where
T: fmt::Debug + fmt::Display + 'static,
{
fn from(e: gfx::UpdateError<T>) -> GameError {
let errstr = format!("Buffer update error: {}", e);
GameError::VideoError(errstr)
}
}
impl From<gfx::shade::ProgramError> for GameError {
fn from(e: gfx::shade::ProgramError) -> GameError {
GameError::ShaderProgramError(e)
}
}
impl<T> From<winit::event_loop::EventLoopClosed<T>> for GameError {
fn from(_: glutin::event_loop::EventLoopClosed<T>) -> GameError {
let e = "An event loop proxy attempted to wake up an event loop that no longer exists."
.to_owned();
GameError::EventLoopError(e)
}
}
impl From<glutin::CreationError> for GameError {
fn from(s: glutin::CreationError) -> GameError {
GameError::WindowCreationError(Arc::new(s))
}
}
impl From<glutin::ContextError> for GameError {
fn from(s: glutin::ContextError) -> GameError {
GameError::RenderError(format!("OpenGL context error: {}", s))
}
}
impl From<gilrs::Error> for GameError {
fn from(s: gilrs::Error) -> GameError {
let errstr = format!("Gamepad error: {}", s);
GameError::GamepadError(errstr)
}
}
impl From<lyon::lyon_tessellation::TessellationError> for GameError {
fn from(s: lyon::lyon_tessellation::TessellationError) -> GameError {
let errstr = format!(
"Error while tesselating shape (did you give it an infinity or NaN?): {:?}",
s
);
GameError::LyonError(errstr)
}
}
impl From<lyon::lyon_tessellation::geometry_builder::GeometryBuilderError> for GameError {
fn from(s: lyon::lyon_tessellation::geometry_builder::GeometryBuilderError) -> GameError {
let errstr = format!(
"Error while building geometry (did you give it too many vertices?): {:?}",
s
);
GameError::LyonError(errstr)
}
}<|fim▁end|> | |
<|file_name|>bug-44.js<|end_file_name|><|fim▁begin|>describe('async methods', () => {
it('commits multiple valid inserts to the database', done => {
const methodUnderTest = async (unitOfWork) => {
const insert = { method: 'insert' };
await new unitOfWork.Users(getUserWithEmail('1')).save(null, insert);
await new unitOfWork.Users(getUserWithEmail('2')).save(null, insert);
await new unitOfWork.Users(getUserWithEmail('3')).save(null, insert);<|fim▁hole|> .then(() => bookshelf.Users.count())
.then(count => expect(count).to.equal('3'))
.then(() => done(), done);
})
});<|fim▁end|> | };
new SomeService(methodUnderTest)
.runMethodUnderTest() |
<|file_name|>argument.py<|end_file_name|><|fim▁begin|>__author__ = 'tahsmith'
from operator import add
import os
from cmake.context import Context
from functools import reduce
class VariableReference(object):
def __init__(self, tokens):
self.name = tokens[0]
def evaluate(self, ctx):
"""
Perform any nested interpolations and give the value of the variable, or None.
:type ctx: Context
"""
return ctx.variable_lookup(self.name.evaluate(ctx))
class EnvironmentVariableReference(object):
def __init__(self, tokens):
self.name = tokens
def evaluate(self, ctx):
"""
Perform any nested interpolations and give the value of the variable, or None.
:type ctx: Context
"""
name = self.name.evaluate(ctx)
if name in os.environ:<|fim▁hole|> else:
return
class StringFragment(object):
def __init__(self, tokens):
self.token = tokens[0]
def evaluate(self, ctx):
return self.token
class InterpolatedString(object):
def __init__(self, tokens):
self.tokens = tokens
def evaluate(self, ctx):
"""
Perform any substitutions in each token and join into one string.
:type ctx: Context
"""
return reduce(add, (token.evaluate(ctx) for token in self.tokens))
class ArgumentList(object):
def __init__(self, tokens):
self.tokens = tokens
def evaluate(self, ctx):
"""
Process the argument tokens, performing interpolations and splitting semi-colon delimited lists.
:param ctx: map of variables for performing substitutions.
:return: list of strings
"""
# Interpolate tokens.
list = (token.evaluate(ctx) for token in self.tokens)
# Split lists.
list = [item for token in list for item in token.split(';')]
return list<|fim▁end|> | return os.environ[name] |
<|file_name|>angular-ui-dashboard.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.dashboard', ['ui.bootstrap', 'ui.sortable']);
angular.module('ui.dashboard')
.directive('dashboard', ['WidgetModel', 'WidgetDefCollection', '$modal', 'DashboardState', '$log', function (WidgetModel, WidgetDefCollection, $modal, DashboardState, $log) {
return {
restrict: 'A',
templateUrl: function(element, attr) {
return attr.templateUrl ? attr.templateUrl : 'template/dashboard.html';
},
scope: true,
controller: ['$scope', '$attrs', function (scope, attrs) {
// default options
var defaults = {
stringifyStorage: true,
hideWidgetSettings: false,
hideWidgetClose: false,
settingsModalOptions: {
templateUrl: 'template/widget-settings-template.html',
controller: 'WidgetSettingsCtrl'
},
onSettingsClose: function(result, widget) { // NOTE: dashboard scope is also passed as 3rd argument
jQuery.extend(true, widget, result);
},
onSettingsDismiss: function(reason) { // NOTE: dashboard scope is also passed as 2nd argument
$log.info('widget settings were dismissed. Reason: ', reason);
}
};
// from dashboard="options"
// scope.options = scope.$eval(attrs.dashboard);
// extend default settingsModalOptions
// scope.options.settingsModalOptions = scope.options.settingsModalOptions || {};
// extend options with defaults
// angular.extend(defaults.settingsModalOptions, scope.options.settingsModalOptions);
// angular.extend(scope.options.settingsModalOptions, defaults.settingsModalOptions);
// angular.extend(defaults, scope.options);
// angular.extend(scope.options, defaults);
// from dashboard="options"
scope.options = scope.$eval(attrs.dashboard);
// Deep options
scope.options.settingsModalOptions = scope.options.settingsModalOptions || {};
_.each(['settingsModalOptions'], function(key) {
// Ensure it exists on scope.options
scope.options[key] = scope.options[key] || {};
// Set defaults
_.defaults(scope.options[key], defaults[key]);
});
// Shallow options
_.defaults(scope.options, defaults);
// jQuery.extend(true, defaults, scope.options);
// jQuery.extend(scope.options, defaults);
var sortableDefaults = {
stop: function () {
scope.saveDashboard();
},
handle: '.widget-header'
};
scope.sortableOptions = angular.extend({}, sortableDefaults, scope.options.sortableOptions || {});
}],
link: function (scope) {
// Save default widget config for reset
scope.defaultWidgets = scope.options.defaultWidgets;
//scope.widgetDefs = scope.options.widgetDefinitions;
scope.widgetDefs = new WidgetDefCollection(scope.options.widgetDefinitions);
var count = 1;
// Instantiate new instance of dashboard state
scope.dashboardState = new DashboardState(
scope.options.storage,
scope.options.storageId,
scope.options.storageHash,
scope.widgetDefs,
scope.options.stringifyStorage
);
/**
* Instantiates a new widget on the dashboard
* @param {Object} widgetToInstantiate The definition object of the widget to be instantiated
*/
scope.addWidget = function (widgetToInstantiate, doNotSave) {
var defaultWidgetDefinition = scope.widgetDefs.getByName(widgetToInstantiate.name);
if (!defaultWidgetDefinition) {
throw 'Widget ' + widgetToInstantiate.name + ' is not found.';
}
// Determine the title for the new widget
var title;
if (widgetToInstantiate.title) {
title = widgetToInstantiate.title;
} else if (defaultWidgetDefinition.title) {
title = defaultWidgetDefinition.title;
} else {
title = 'Widget ' + count++;
}
// Deep extend a new object for instantiation
widgetToInstantiate = jQuery.extend(true, {}, defaultWidgetDefinition, widgetToInstantiate);
// Instantiation
var widget = new WidgetModel(widgetToInstantiate, {
title: title
});
scope.widgets.push(widget);
if (!doNotSave) {
scope.saveDashboard();
}
};
/**
* Removes a widget instance from the dashboard
* @param {Object} widget The widget instance object (not a definition object)
*/
scope.removeWidget = function (widget) {
scope.widgets.splice(_.indexOf(scope.widgets, widget), 1);
scope.saveDashboard();
};
/**
* Opens a dialog for setting and changing widget properties
* @param {Object} widget The widget instance object
*/
scope.openWidgetSettings = function (widget) {
// Set up $modal options
var options = _.defaults(
{ scope: scope },
widget.settingsModalOptions,
scope.options.settingsModalOptions);
// Ensure widget is resolved
options.resolve = {
widget: function () {
return widget;
}
};
// Create the modal
var modalInstance = $modal.open(options);
var onClose = widget.onSettingsClose || scope.options.onSettingsClose;
var onDismiss = widget.onSettingsDismiss || scope.options.onSettingsDismiss;
// Set resolve and reject callbacks for the result promise
modalInstance.result.then(
function (result) {
// Call the close callback
onClose(result, widget, scope);
//AW Persist title change from options editor
scope.$emit('widgetChanged', widget);
},
function (reason) {
// Call the dismiss callback
onDismiss(reason, scope);
}
);
};
/**
* Remove all widget instances from dashboard
*/
scope.clear = function (doNotSave) {
scope.widgets = [];
if (doNotSave === true) {
return;
}
scope.saveDashboard();
};
/**
* Used for preventing default on click event
* @param {Object} event A click event
* @param {Object} widgetDef A widget definition object
*/
scope.addWidgetInternal = function (event, widgetDef) {
event.preventDefault();
scope.addWidget(widgetDef);
};
/**
* Uses dashboardState service to save state
*/
scope.saveDashboard = function (force) {
if (!scope.options.explicitSave) {
scope.dashboardState.save(scope.widgets);
} else {
if (!angular.isNumber(scope.options.unsavedChangeCount)) {
scope.options.unsavedChangeCount = 0;
}
if (force) {
scope.options.unsavedChangeCount = 0;
scope.dashboardState.save(scope.widgets);
} else {
++scope.options.unsavedChangeCount;
}
}
};
/**
* Wraps saveDashboard for external use.
*/
scope.externalSaveDashboard = function() {
scope.saveDashboard(true);
};
/**
* Clears current dash and instantiates widget definitions
* @param {Array} widgets Array of definition objects
*/
scope.loadWidgets = function (widgets) {
// AW dashboards are continuously saved today (no "save" button).
//scope.defaultWidgets = widgets;
scope.savedWidgetDefs = widgets;
scope.clear(true);
_.each(widgets, function (widgetDef) {
scope.addWidget(widgetDef, true);
});
};
/**
* Resets widget instances to default config
* @return {[type]} [description]
*/
scope.resetWidgetsToDefault = function () {
scope.loadWidgets(scope.defaultWidgets);
scope.saveDashboard();
};
// Set default widgets array
var savedWidgetDefs = scope.dashboardState.load();
// Success handler
function handleStateLoad(saved) {
scope.options.unsavedChangeCount = 0;
if (saved && saved.length) {
scope.loadWidgets(saved);
} else if (scope.defaultWidgets) {
scope.loadWidgets(scope.defaultWidgets);
} else {
scope.clear(true);
}
}
if (angular.isArray(savedWidgetDefs)) {
handleStateLoad(savedWidgetDefs);
} else if (savedWidgetDefs && angular.isObject(savedWidgetDefs) && angular.isFunction(savedWidgetDefs.then)) {
savedWidgetDefs.then(handleStateLoad, handleStateLoad);
} else {
handleStateLoad();
}
// expose functionality externally
// functions are appended to the provided dashboard options
scope.options.addWidget = scope.addWidget;
scope.options.loadWidgets = scope.loadWidgets;
scope.options.saveDashboard = scope.externalSaveDashboard;
// save state
scope.$on('widgetChanged', function (event) {
event.stopPropagation();
scope.saveDashboard();
});
}
};
}]);
/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.dashboard')
.directive('dashboardLayouts', ['LayoutStorage', '$timeout', '$modal',
function(LayoutStorage, $timeout, $modal) {
return {
scope: true,
templateUrl: function(element, attr) {
return attr.templateUrl ? attr.templateUrl : 'template/dashboard-layouts.html';
},
link: function(scope, element, attrs) {
scope.options = scope.$eval(attrs.dashboardLayouts);
var layoutStorage = new LayoutStorage(scope.options);
scope.layouts = layoutStorage.layouts;
scope.createNewLayout = function() {
var newLayout = {
title: 'Custom',
defaultWidgets: scope.options.defaultWidgets || []
};
layoutStorage.add(newLayout);
scope.makeLayoutActive(newLayout);
layoutStorage.save();
return newLayout;
};
scope.removeLayout = function(layout) {
layoutStorage.remove(layout);
layoutStorage.save();
};
scope.makeLayoutActive = function(layout) {
var current = layoutStorage.getActiveLayout();
if (current && current.dashboard.unsavedChangeCount) {
var modalInstance = $modal.open({
templateUrl: 'template/save-changes-modal.html',
resolve: {
layout: function() {
return layout;
}
},
controller: 'SaveChangesModalCtrl'
});
// Set resolve and reject callbacks for the result promise
modalInstance.result.then(
function() {
current.dashboard.saveDashboard();
scope._makeLayoutActive(layout);
},
function() {
scope._makeLayoutActive(layout);
}
);
} else {
scope._makeLayoutActive(layout);
}
};
scope._makeLayoutActive = function(layout) {
angular.forEach(scope.layouts, function(l) {
if (l !== layout) {
l.active = false;
} else {
l.active = true;
}
});
layoutStorage.save();
};
scope.isActive = function(layout) {
return !!layout.active;
};
scope.editTitle = function(layout) {
var input = element.find('input[data-layout="' + layout.id + '"]');
layout.editingTitle = true;
$timeout(function() {
input.focus()[0].setSelectionRange(0, 9999);
});
};
// saves whatever is in the title input as the new title
scope.saveTitleEdit = function(layout) {
layout.editingTitle = false;
layoutStorage.save();
};
scope.options.saveLayouts = function() {
layoutStorage.save(true);
};
scope.options.addWidget = function() {
var layout = layoutStorage.getActiveLayout();
if (layout) {
layout.dashboard.addWidget.apply(layout.dashboard, arguments);
}
};<|fim▁hole|> scope.options.loadWidgets = function() {
var layout = layoutStorage.getActiveLayout();
if (layout) {
layout.dashboard.loadWidgets.apply(layout.dashboard, arguments);
}
};
scope.options.saveDashboard = function() {
var layout = layoutStorage.getActiveLayout();
if (layout) {
layout.dashboard.saveDashboard.apply(layout.dashboard, arguments);
}
};
var sortableDefaults = {
stop: function() {
scope.options.saveLayouts();
},
};
scope.sortableOptions = angular.extend({}, sortableDefaults, scope.options.sortableOptions || {});
}
};
}
]);
/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.dashboard')
.directive('widget', ['$injector', function ($injector) {
return {
controller: 'DashboardWidgetCtrl',
link: function (scope) {
var widget = scope.widget;
var dataModelType = widget.dataModelType;
// set up data source
if (dataModelType) {
var DataModelConstructor; // data model constructor function
if (angular.isFunction(dataModelType)) {
DataModelConstructor = dataModelType;
} else if (angular.isString(dataModelType)) {
$injector.invoke([dataModelType, function (DataModelType) {
DataModelConstructor = DataModelType;
}]);
} else {
throw new Error('widget dataModelType should be function or string');
}
var ds;
if (widget.dataModelArgs) {
ds = new DataModelConstructor(widget.dataModelArgs);
} else {
ds = new DataModelConstructor();
}
widget.dataModel = ds;
ds.setup(widget, scope);
ds.init();
scope.$on('$destroy', _.bind(ds.destroy,ds));
}
// Compile the widget template, emit add event
scope.compileTemplate();
scope.$emit('widgetAdded', widget);
}
};
}]);
/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.dashboard')
.factory('LayoutStorage', function() {
var noopStorage = {
setItem: function() {
},
getItem: function() {
},
removeItem: function() {
}
};
function LayoutStorage(options) {
var defaults = {
storage: noopStorage,
storageHash: '',
stringifyStorage: true
};
angular.extend(defaults, options);
angular.extend(options, defaults);
this.id = options.storageId;
this.storage = options.storage;
this.storageHash = options.storageHash;
this.stringifyStorage = options.stringifyStorage;
this.widgetDefinitions = options.widgetDefinitions;
this.defaultLayouts = options.defaultLayouts;
this.widgetButtons = options.widgetButtons;
this.explicitSave = options.explicitSave;
this.defaultWidgets = options.defaultWidgets;
this.settingsModalOptions = options.settingsModalOptions;
this.onSettingsClose = options.onSettingsClose;
this.onSettingsDismiss = options.onSettingsDismiss;
this.options = options;
this.options.unsavedChangeCount = 0;
this.layouts = [];
this.states = {};
this.load();
this._ensureActiveLayout();
}
LayoutStorage.prototype = {
add: function(layouts) {
if (!angular.isArray(layouts)) {
layouts = [layouts];
}
var self = this;
angular.forEach(layouts, function(layout) {
layout.dashboard = layout.dashboard || {};
layout.dashboard.storage = self;
layout.dashboard.storageId = layout.id = self._getLayoutId.call(self,layout);
layout.dashboard.widgetDefinitions = self.widgetDefinitions;
layout.dashboard.stringifyStorage = false;
layout.dashboard.defaultWidgets = layout.defaultWidgets || self.defaultWidgets;
layout.dashboard.widgetButtons = self.widgetButtons;
layout.dashboard.explicitSave = self.explicitSave;
layout.dashboard.settingsModalOptions = self.settingsModalOptions;
layout.dashboard.onSettingsClose = self.onSettingsClose;
layout.dashboard.onSettingsDismiss = self.onSettingsDismiss;
self.layouts.push(layout);
});
},
remove: function(layout) {
var index = this.layouts.indexOf(layout);
if (index >= 0) {
this.layouts.splice(index, 1);
delete this.states[layout.id];
// check for active
if (layout.active && this.layouts.length) {
var nextActive = index > 0 ? index - 1 : 0;
this.layouts[nextActive].active = true;
}
}
},
save: function() {
var state = {
layouts: this._serializeLayouts(),
states: this.states,
storageHash: this.storageHash
};
if (this.stringifyStorage) {
state = JSON.stringify(state);
}
this.storage.setItem(this.id, state);
this.options.unsavedChangeCount = 0;
},
load: function() {
var serialized = this.storage.getItem(this.id);
this.clear();
if (serialized) {
// check for promise
if (angular.isObject(serialized) && angular.isFunction(serialized.then)) {
this._handleAsyncLoad(serialized);
} else {
this._handleSyncLoad(serialized);
}
} else {
this._addDefaultLayouts();
}
},
clear: function() {
this.layouts = [];
this.states = {};
},
setItem: function(id, value) {
this.states[id] = value;
this.save();
},
getItem: function(id) {
return this.states[id];
},
removeItem: function(id) {
delete this.states[id];
this.save();
},
getActiveLayout: function() {
var len = this.layouts.length;
for (var i = 0; i < len; i++) {
var layout = this.layouts[i];
if (layout.active) {
return layout;
}
}
return false;
},
_addDefaultLayouts: function() {
var self = this;
angular.forEach(this.defaultLayouts, function(layout) {
self.add(angular.extend({}, layout));
});
},
_serializeLayouts: function() {
var result = [];
angular.forEach(this.layouts, function(l) {
result.push({
title: l.title,
id: l.id,
active: l.active,
defaultWidgets: l.dashboard.defaultWidgets
});
});
return result;
},
_handleSyncLoad: function(serialized) {
var deserialized;
if (this.stringifyStorage) {
try {
deserialized = JSON.parse(serialized);
} catch (e) {
this._addDefaultLayouts();
return;
}
} else {
deserialized = serialized;
}
if (this.storageHash !== deserialized.storageHash) {
this._addDefaultLayouts();
return;
}
this.states = deserialized.states;
this.add(deserialized.layouts);
},
_handleAsyncLoad: function(promise) {
var self = this;
promise.then(
angular.bind(self, this._handleSyncLoad),
angular.bind(self, this._addDefaultLayouts)
);
},
_ensureActiveLayout: function() {
for (var i = 0; i < this.layouts.length; i++) {
var layout = this.layouts[i];
if (layout.active) {
return;
}
}
if (this.layouts[0]) {
this.layouts[0].active = true;
}
},
_getLayoutId: function(layout) {
if (layout.id) {
return layout.id;
}
var max = 0;
for (var i = 0; i < this.layouts.length; i++) {
var id = this.layouts[i].id;
max = Math.max(max, id * 1);
}
return max + 1;
}
};
return LayoutStorage;
});
/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.dashboard')
.factory('DashboardState', ['$log', '$q', function ($log, $q) {
function DashboardState(storage, id, hash, widgetDefinitions, stringify) {
this.storage = storage;
this.id = id;
this.hash = hash;
this.widgetDefinitions = widgetDefinitions;
this.stringify = stringify;
}
DashboardState.prototype = {
/**
* Takes array of widget instance objects, serializes,
* and saves state.
*
* @param {Array} widgets scope.widgets from dashboard directive
* @return {Boolean} true on success, false on failure
*/
save: function (widgets) {
if (!this.storage) {
return true;
}
var serialized = _.map(widgets, function (widget) {
var widgetObject = {
title: widget.title,
name: widget.name,
style: widget.style,
size: widget.size,
dataModelOptions: widget.dataModelOptions,
storageHash: widget.storageHash,
attrs: widget.attrs
};
return widgetObject;
});
var item = { widgets: serialized, hash: this.hash };
if (this.stringify) {
item = JSON.stringify(item);
}
this.storage.setItem(this.id, item);
return true;
},
/**
* Loads dashboard state from the storage object.
* Can handle a synchronous response or a promise.
*
* @return {Array|Promise} Array of widget definitions or a promise
*/
load: function () {
if (!this.storage) {
return null;
}
var serialized;
// try loading storage item
serialized = this.storage.getItem( this.id );
if (serialized) {
// check for promise
if (angular.isObject(serialized) && angular.isFunction(serialized.then)) {
return this._handleAsyncLoad(serialized);
}
// otherwise handle synchronous load
return this._handleSyncLoad(serialized);
} else {
return null;
}
},
_handleSyncLoad: function(serialized) {
var deserialized, result = [];
if (!serialized) {
return null;
}
if (this.stringify) {
try { // to deserialize the string
deserialized = JSON.parse(serialized);
} catch (e) {
// bad JSON, log a warning and return
$log.warn('Serialized dashboard state was malformed and could not be parsed: ', serialized);
return null;
}
}
else {
deserialized = serialized;
}
// check hash against current hash
if (deserialized.hash !== this.hash) {
$log.info('Serialized dashboard from storage was stale (old hash: ' + deserialized.hash + ', new hash: ' + this.hash + ')');
this.storage.removeItem(this.id);
return null;
}
// Cache widgets
var savedWidgetDefs = deserialized.widgets;
// instantiate widgets from stored data
for (var i = 0; i < savedWidgetDefs.length; i++) {
// deserialized object
var savedWidgetDef = savedWidgetDefs[i];
// widget definition to use
var widgetDefinition = this.widgetDefinitions.getByName(savedWidgetDef.name);
// check for no widget
if (!widgetDefinition) {
// no widget definition found, remove and return false
$log.warn('Widget with name "' + savedWidgetDef.name + '" was not found in given widget definition objects');
continue;
}
// check widget-specific storageHash
if (widgetDefinition.hasOwnProperty('storageHash') && widgetDefinition.storageHash !== savedWidgetDef.storageHash) {
// widget definition was found, but storageHash was stale, removing storage
$log.info('Widget Definition Object with name "' + savedWidgetDef.name + '" was found ' +
'but the storageHash property on the widget definition is different from that on the ' +
'serialized widget loaded from storage. hash from storage: "' + savedWidgetDef.storageHash + '"' +
', hash from WDO: "' + widgetDefinition.storageHash + '"');
continue;
}
// push instantiated widget to result array
result.push(savedWidgetDef);
}
return result;
},
_handleAsyncLoad: function(promise) {
var self = this;
var deferred = $q.defer();
promise.then(
// success
function(res) {
var result = self._handleSyncLoad(res);
if (result) {
deferred.resolve(result);
} else {
deferred.reject(result);
}
},
// failure
function(res) {
deferred.reject(res);
}
);
return deferred.promise;
}
};
return DashboardState;
}]);
/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.dashboard')
.factory('WidgetDataModel', function () {
function WidgetDataModel() {
}
WidgetDataModel.prototype = {
setup: function (widget, scope) {
this.dataAttrName = widget.dataAttrName;
this.dataModelOptions = widget.dataModelOptions;
this.widgetScope = scope;
},
updateScope: function (data) {
this.widgetScope.widgetData = data;
},
init: function () {
// to be overridden by subclasses
},
destroy: function () {
// to be overridden by subclasses
}
};
return WidgetDataModel;
});
/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.dashboard')
.factory('WidgetDefCollection', function () {
function WidgetDefCollection(widgetDefs) {
this.push.apply(this, widgetDefs);
// build (name -> widget definition) map for widget lookup by name
var map = {};
_.each(widgetDefs, function (widgetDef) {
map[widgetDef.name] = widgetDef;
});
this.map = map;
}
WidgetDefCollection.prototype = Object.create(Array.prototype);
WidgetDefCollection.prototype.getByName = function (name) {
return this.map[name];
};
return WidgetDefCollection;
});
/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.dashboard')
.factory('WidgetModel', function () {
// constructor for widget model instances
function WidgetModel(Class, overrides) {
var defaults = {
title: 'Widget',
name: Class.name,
attrs: Class.attrs,
dataAttrName: Class.dataAttrName,
dataModelType: Class.dataModelType,
dataModelArgs: Class.dataModelArgs, // used in data model constructor, not serialized
//AW Need deep copy of options to support widget options editing
dataModelOptions: Class.dataModelOptions,
settingsModalOptions: Class.settingsModalOptions,
onSettingsClose: Class.onSettingsClose,
onSettingsDismiss: Class.onSettingsDismiss,
style: Class.style || {},
size: Class.size || {},
enableVerticalResize: (Class.enableVerticalResize === false) ? false : true
};
overrides = overrides || {};
angular.extend(this, angular.copy(defaults), overrides);
this.containerStyle = { width: '33%' }; // default width
this.contentStyle = {};
this.updateContainerStyle(this.style);
if (Class.templateUrl) {
this.templateUrl = Class.templateUrl;
} else if (Class.template) {
this.template = Class.template;
} else {
var directive = Class.directive || Class.name;
this.directive = directive;
}
if (this.size && _.has(this.size, 'height')) {
this.setHeight(this.size.height);
}
if (this.style && _.has(this.style, 'width')) { //TODO deprecate style attribute
this.setWidth(this.style.width);
}
if (this.size && _.has(this.size, 'width')) {
this.setWidth(this.size.width);
}
}
WidgetModel.prototype = {
// sets the width (and widthUnits)
setWidth: function (width, units) {
width = width.toString();
units = units || width.replace(/^[-\.\d]+/, '') || '%';
this.widthUnits = units;
width = parseFloat(width);
if (width < 0) {
return false;
}
if (units === '%') {
width = Math.min(100, width);
width = Math.max(0, width);
}
this.containerStyle.width = width + '' + units;
this.updateSize(this.containerStyle);
return true;
},
setHeight: function (height) {
this.contentStyle.height = height;
this.updateSize(this.contentStyle);
},
setStyle: function (style) {
this.style = style;
this.updateContainerStyle(style);
},
updateSize: function (size) {
angular.extend(this.size, size);
},
updateContainerStyle: function (style) {
angular.extend(this.containerStyle, style);
}
};
return WidgetModel;
});
/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.dashboard')
.controller('SaveChangesModalCtrl', ['$scope', '$modalInstance', 'layout', function ($scope, $modalInstance, layout) {
// add layout to scope
$scope.layout = layout;
$scope.ok = function () {
$modalInstance.close();
};
$scope.cancel = function () {
$modalInstance.dismiss();
};
}]);
/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.dashboard')
.controller('DashboardWidgetCtrl', ['$scope', '$element', '$compile', '$window', '$timeout',
function($scope, $element, $compile, $window, $timeout) {
$scope.status = {
isopen: false
};
// Fills "container" with compiled view
$scope.makeTemplateString = function() {
var widget = $scope.widget;
// First, build template string
var templateString = '';
if (widget.templateUrl) {
// Use ng-include for templateUrl
templateString = '<div ng-include="\'' + widget.templateUrl + '\'"></div>';
} else if (widget.template) {
// Direct string template
templateString = widget.template;
} else {
// Assume attribute directive
templateString = '<div ' + widget.directive;
// Check if data attribute was specified
if (widget.dataAttrName) {
widget.attrs = widget.attrs || {};
widget.attrs[widget.dataAttrName] = 'widgetData';
}
// Check for specified attributes
if (widget.attrs) {
// First check directive name attr
if (widget.attrs[widget.directive]) {
templateString += '="' + widget.attrs[widget.directive] + '"';
}
// Add attributes
_.each(widget.attrs, function(value, attr) {
// make sure we aren't reusing directive attr
if (attr !== widget.directive) {
templateString += ' ' + attr + '="' + value + '"';
}
});
}
templateString += '></div>';
}
return templateString;
};
$scope.grabResizer = function(e) {
var widget = $scope.widget;
var widgetElm = $element.find('.widget');
// ignore middle- and right-click
if (e.which !== 1) {
return;
}
e.stopPropagation();
e.originalEvent.preventDefault();
// get the starting horizontal position
var initX = e.clientX;
// console.log('initX', initX);
// Get the current width of the widget and dashboard
var pixelWidth = widgetElm.width();
var pixelHeight = widgetElm.height();
var widgetStyleWidth = widget.containerStyle.width;
var widthUnits = widget.widthUnits;
var unitWidth = parseFloat(widgetStyleWidth);
// create marquee element for resize action
var $marquee = angular.element('<div class="widget-resizer-marquee" style="height: ' + pixelHeight + 'px; width: ' + pixelWidth + 'px;"></div>');
widgetElm.append($marquee);
// determine the unit/pixel ratio
var transformMultiplier = unitWidth / pixelWidth;
// updates marquee with preview of new width
var mousemove = function(e) {
var curX = e.clientX;
var pixelChange = curX - initX;
var newWidth = pixelWidth + pixelChange;
$marquee.css('width', newWidth + 'px');
};
// sets new widget width on mouseup
var mouseup = function(e) {
// remove listener and marquee
jQuery($window).off('mousemove', mousemove);
$marquee.remove();
// calculate change in units
var curX = e.clientX;
var pixelChange = curX - initX;
var unitChange = Math.round(pixelChange * transformMultiplier * 100) / 100;
// add to initial unit width
var newWidth = unitWidth * 1 + unitChange;
widget.setWidth(newWidth + widthUnits);
$scope.$emit('widgetChanged', widget);
$scope.$apply();
$scope.$broadcast('widgetResized', {
width: newWidth
});
};
jQuery($window).on('mousemove', mousemove).one('mouseup', mouseup);
};
//TODO refactor
$scope.grabSouthResizer = function(e) {
var widgetElm = $element.find('.widget');
// ignore middle- and right-click
if (e.which !== 1) {
return;
}
e.stopPropagation();
e.originalEvent.preventDefault();
// get the starting horizontal position
var initY = e.clientY;
// console.log('initX', initX);
// Get the current width of the widget and dashboard
var pixelWidth = widgetElm.width();
var pixelHeight = widgetElm.height();
// create marquee element for resize action
var $marquee = angular.element('<div class="widget-resizer-marquee" style="height: ' + pixelHeight + 'px; width: ' + pixelWidth + 'px;"></div>');
widgetElm.append($marquee);
// updates marquee with preview of new height
var mousemove = function(e) {
var curY = e.clientY;
var pixelChange = curY - initY;
var newHeight = pixelHeight + pixelChange;
$marquee.css('height', newHeight + 'px');
};
// sets new widget width on mouseup
var mouseup = function(e) {
// remove listener and marquee
jQuery($window).off('mousemove', mousemove);
$marquee.remove();
// calculate height change
var curY = e.clientY;
var pixelChange = curY - initY;
//var widgetContainer = widgetElm.parent(); // widget container responsible for holding widget width and height
var widgetContainer = widgetElm.find('.widget-content');
var diff = pixelChange;
var height = parseInt(widgetContainer.css('height'), 10);
var newHeight = (height + diff);
//$scope.widget.style.height = newHeight + 'px';
$scope.widget.setHeight(newHeight + 'px');
$scope.$emit('widgetChanged', $scope.widget);
$scope.$apply(); // make AngularJS to apply style changes
$scope.$broadcast('widgetResized', {
height: newHeight
});
};
jQuery($window).on('mousemove', mousemove).one('mouseup', mouseup);
};
// replaces widget title with input
$scope.editTitle = function(widget) {
var widgetElm = $element.find('.widget');
widget.editingTitle = true;
// HACK: get the input to focus after being displayed.
$timeout(function() {
widgetElm.find('form.widget-title input:eq(0)').focus()[0].setSelectionRange(0, 9999);
});
};
// saves whatever is in the title input as the new title
$scope.saveTitleEdit = function(widget) {
widget.editingTitle = false;
$scope.$emit('widgetChanged', widget);
};
$scope.compileTemplate = function() {
var container = $scope.findWidgetContainer($element);
var templateString = $scope.makeTemplateString();
var widgetElement = angular.element(templateString);
container.empty();
container.append(widgetElement);
$compile(widgetElement)($scope);
};
$scope.findWidgetContainer = function(element) {
// widget placeholder is the first (and only) child of .widget-content
return element.find('.widget-content');
};
}
]);
/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.dashboard')
.controller('WidgetSettingsCtrl', ['$scope', '$modalInstance', 'widget', function ($scope, $modalInstance, widget) {
// add widget to scope
$scope.widget = widget;
// set up result object
$scope.result = jQuery.extend(true, {}, widget);
$scope.ok = function () {
$modalInstance.close($scope.result);
};
$scope.cancel = function () {
$modalInstance.dismiss('cancel');
};
}]);
angular.module("ui.dashboard").run(["$templateCache", function($templateCache) {
$templateCache.put("template/alt-dashboard.html",
"<div>\n" +
" <div class=\"btn-toolbar\" ng-if=\"!options.hideToolbar\">\n" +
" <div class=\"btn-group\" ng-if=\"!options.widgetButtons\">\n" +
" <span class=\"dropdown\" on-toggle=\"toggled(open)\">\n" +
" <button type=\"button\" class=\"btn btn-primary dropdown-toggle\" ng-disabled=\"disabled\">\n" +
" Button dropdown <span class=\"caret\"></span>\n" +
" </button>\n" +
" <ul class=\"dropdown-menu\" role=\"menu\">\n" +
" <li ng-repeat=\"widget in widgetDefs\">\n" +
" <a href=\"#\" ng-click=\"addWidgetInternal($event, widget);\" class=\"dropdown-toggle\">{{widget.name}}</a>\n" +
" </li>\n" +
" </ul>\n" +
" </span>\n" +
" </div>\n" +
"\n" +
" <div class=\"btn-group\" ng-if=\"options.widgetButtons\">\n" +
" <button ng-repeat=\"widget in widgetDefs\"\n" +
" ng-click=\"addWidgetInternal($event, widget);\" type=\"button\" class=\"btn btn-primary\">\n" +
" {{widget.name}}\n" +
" </button>\n" +
" </div>\n" +
"\n" +
" <button class=\"btn btn-warning\" ng-click=\"resetWidgetsToDefault()\">Default Widgets</button>\n" +
"\n" +
" <button ng-if=\"options.storage && options.explicitSave\" ng-click=\"options.saveDashboard()\" class=\"btn btn-success\" ng-hide=\"!options.unsavedChangeCount\">{{ !options.unsavedChangeCount ? \"Alternative - No Changes\" : \"Save\" }}</button>\n" +
"\n" +
" <button ng-click=\"clear();\" ng-hide=\"!widgets.length\" type=\"button\" class=\"btn btn-info\">Clear</button>\n" +
" </div>\n" +
"\n" +
" <div ui-sortable=\"sortableOptions\" ng-model=\"widgets\" class=\"dashboard-widget-area\">\n" +
" <div ng-repeat=\"widget in widgets\" ng-style=\"widget.style\" class=\"widget-container\" widget>\n" +
" <div class=\"widget panel panel-default\">\n" +
" <div class=\"widget-header panel-heading\">\n" +
" <h3 class=\"panel-title\">\n" +
" <span class=\"widget-title\" ng-dblclick=\"editTitle(widget)\" ng-hide=\"widget.editingTitle\">{{widget.title}}</span>\n" +
" <form action=\"\" class=\"widget-title\" ng-show=\"widget.editingTitle\" ng-submit=\"saveTitleEdit(widget)\">\n" +
" <input type=\"text\" ng-model=\"widget.title\" class=\"form-control\">\n" +
" </form>\n" +
" <span class=\"label label-primary\" ng-if=\"!options.hideWidgetName\">{{widget.name}}</span>\n" +
" <span ng-click=\"removeWidget(widget);\" class=\"glyphicon glyphicon-remove\" ng-if=\"!options.hideWidgetClose\"></span>\n" +
" <span ng-click=\"openWidgetSettings(widget);\" class=\"glyphicon glyphicon-cog\" ng-if=\"!options.hideWidgetSettings\"></span>\n" +
" </h3>\n" +
" </div>\n" +
" <div class=\"panel-body widget-content\"></div>\n" +
" <div class=\"widget-ew-resizer\" ng-mousedown=\"grabResizer($event)\"></div>\n" +
" </div>\n" +
" </div>\n" +
" </div>\n" +
"</div>\n"
);
$templateCache.put("template/dashboard-layouts.html",
"<ul ui-sortable=\"sortableOptions\" ng-model=\"layouts\" class=\"nav nav-tabs layout-tabs\">\n" +
" <li ng-repeat=\"layout in layouts\" ng-class=\"{ active: layout.active }\">\n" +
" <a ng-click=\"makeLayoutActive(layout)\">\n" +
" <span ng-dblclick=\"editTitle(layout)\" ng-show=\"!layout.editingTitle\">{{layout.title}}</span>\n" +
" <form action=\"\" class=\"layout-title\" ng-show=\"layout.editingTitle\" ng-submit=\"saveTitleEdit(layout)\">\n" +
" <input type=\"text\" ng-model=\"layout.title\" class=\"form-control\" data-layout=\"{{layout.id}}\">\n" +
" </form>\n" +
" <span ng-click=\"removeLayout(layout)\" class=\"glyphicon glyphicon-remove remove-layout-icon\"></span>\n" +
" <!-- <span class=\"glyphicon glyphicon-pencil\"></span> -->\n" +
" <!-- <span class=\"glyphicon glyphicon-remove\"></span> -->\n" +
" </a>\n" +
" </li>\n" +
" <li>\n" +
" <a ng-click=\"createNewLayout()\">\n" +
" <span class=\"glyphicon glyphicon-plus\"></span>\n" +
" </a>\n" +
" </li>\n" +
"</ul>\n" +
"<div ng-repeat=\"layout in layouts | filter:isActive\" dashboard=\"layout.dashboard\" template-url=\"template/dashboard.html\"></div>"
);
$templateCache.put("template/dashboard.html",
"<div>\n" +
" <div class=\"btn-toolbar\" ng-if=\"!options.hideToolbar\">\n" +
" <div class=\"btn-group\" ng-if=\"!options.widgetButtons\">\n" +
" <span class=\"dropdown\" on-toggle=\"toggled(open)\">\n" +
" <button type=\"button\" class=\"btn btn-primary dropdown-toggle\" ng-disabled=\"disabled\">\n" +
" Button dropdown <span class=\"caret\"></span>\n" +
" </button>\n" +
" <ul class=\"dropdown-menu\" role=\"menu\">\n" +
" <li ng-repeat=\"widget in widgetDefs\">\n" +
" <a href=\"#\" ng-click=\"addWidgetInternal($event, widget);\" class=\"dropdown-toggle\"><span class=\"label label-primary\">{{widget.name}}</span></a>\n" +
" </li>\n" +
" </ul>\n" +
" </span>\n" +
" </div>\n" +
" <div class=\"btn-group\" ng-if=\"options.widgetButtons\">\n" +
" <button ng-repeat=\"widget in widgetDefs\"\n" +
" ng-click=\"addWidgetInternal($event, widget);\" type=\"button\" class=\"btn btn-primary\">\n" +
" {{widget.name}}\n" +
" </button>\n" +
" </div>\n" +
"\n" +
" <button class=\"btn btn-warning\" ng-click=\"resetWidgetsToDefault()\">Default Widgets</button>\n" +
"\n" +
" <button ng-if=\"options.storage && options.explicitSave\" ng-click=\"options.saveDashboard()\" class=\"btn btn-success\" ng-disabled=\"!options.unsavedChangeCount\">{{ !options.unsavedChangeCount ? \"all saved\" : \"save changes (\" + options.unsavedChangeCount + \")\" }}</button>\n" +
"\n" +
" <button ng-click=\"clear();\" type=\"button\" class=\"btn btn-info\">Clear</button>\n" +
" </div>\n" +
"\n" +
" <div ui-sortable=\"sortableOptions\" ng-model=\"widgets\" class=\"dashboard-widget-area\">\n" +
" <div ng-repeat=\"widget in widgets\" ng-style=\"widget.containerStyle\" class=\"widget-container\" widget>\n" +
" <div class=\"widget panel panel-default\">\n" +
" <div class=\"widget-header panel-heading\">\n" +
" <h3 class=\"panel-title\">\n" +
" <span class=\"widget-title\" ng-dblclick=\"editTitle(widget)\" ng-hide=\"widget.editingTitle\">{{widget.title}}</span>\n" +
" <form action=\"\" class=\"widget-title\" ng-show=\"widget.editingTitle\" ng-submit=\"saveTitleEdit(widget)\">\n" +
" <input type=\"text\" ng-model=\"widget.title\" class=\"form-control\">\n" +
" </form>\n" +
" <span class=\"label label-primary\" ng-if=\"!options.hideWidgetName\">{{widget.name}}</span>\n" +
" <span ng-click=\"removeWidget(widget);\" class=\"glyphicon glyphicon-remove\" ng-if=\"!options.hideWidgetClose\"></span>\n" +
" <span ng-click=\"openWidgetSettings(widget);\" class=\"glyphicon glyphicon-cog\" ng-if=\"!options.hideWidgetSettings\"></span>\n" +
" </h3>\n" +
" </div>\n" +
" <div class=\"panel-body widget-content\" ng-style=\"widget.contentStyle\"></div>\n" +
" <div class=\"widget-ew-resizer\" ng-mousedown=\"grabResizer($event)\"></div>\n" +
" <div ng-if=\"widget.enableVerticalResize\" class=\"widget-s-resizer\" ng-mousedown=\"grabSouthResizer($event)\"></div>\n" +
" </div>\n" +
" </div>\n" +
" </div>\n" +
"</div>"
);
$templateCache.put("template/save-changes-modal.html",
"<div class=\"modal-header\">\n" +
" <button type=\"button\" class=\"close\" data-dismiss=\"modal\" aria-hidden=\"true\" ng-click=\"cancel()\">×</button>\n" +
" <h3>Unsaved Changes to \"{{layout.title}}\"</h3>\n" +
"</div>\n" +
"\n" +
"<div class=\"modal-body\">\n" +
" <p>You have {{layout.dashboard.unsavedChangeCount}} unsaved changes on this dashboard. Would you like to save them?</p>\n" +
"</div>\n" +
"\n" +
"<div class=\"modal-footer\">\n" +
" <button type=\"button\" class=\"btn btn-default\" ng-click=\"cancel()\">Don't Save</button>\n" +
" <button type=\"button\" class=\"btn btn-primary\" ng-click=\"ok()\">Save</button>\n" +
"</div>"
);
$templateCache.put("template/widget-default-content.html",
""
);
$templateCache.put("template/widget-settings-template.html",
"<div class=\"modal-header\">\n" +
" <button type=\"button\" class=\"close\" data-dismiss=\"modal\" aria-hidden=\"true\" ng-click=\"cancel()\">×</button>\n" +
" <h3>Widget Options <small>{{widget.title}}</small></h3>\n" +
"</div>\n" +
"\n" +
"<div class=\"modal-body\">\n" +
" <form name=\"form\" novalidate class=\"form-horizontal\">\n" +
" <div class=\"form-group\">\n" +
" <label for=\"widgetTitle\" class=\"col-sm-2 control-label\">Title</label>\n" +
" <div class=\"col-sm-10\">\n" +
" <input type=\"text\" class=\"form-control\" name=\"widgetTitle\" ng-model=\"result.title\">\n" +
" </div>\n" +
" </div>\n" +
" <div ng-if=\"widget.settingsModalOptions.partialTemplateUrl\"\n" +
" ng-include=\"widget.settingsModalOptions.partialTemplateUrl\"></div>\n" +
" </form>\n" +
"</div>\n" +
"\n" +
"<div class=\"modal-footer\">\n" +
" <button type=\"button\" class=\"btn btn-default\" ng-click=\"cancel()\">Cancel</button>\n" +
" <button type=\"button\" class=\"btn btn-primary\" ng-click=\"ok()\">OK</button>\n" +
"</div>"
);
}]);<|fim▁end|> | |
<|file_name|>test_oopsreferences.py<|end_file_name|><|fim▁begin|># Copyright 2009-2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests of the oopsreferences core."""
__metaclass__ = type
from datetime import (
datetime,
timedelta,
)
from pytz import utc
from lp.registry.model.oopsreferences import referenced_oops
from lp.services.database.interfaces import IStore
from lp.services.messages.model.message import (
Message,
MessageSet,
)
from lp.testing import (
person_logged_in,
TestCaseWithFactory,
)
from lp.testing.layers import DatabaseFunctionalLayer
class TestOopsReferences(TestCaseWithFactory):
layer = DatabaseFunctionalLayer
def setUp(self):
super(TestOopsReferences, self).setUp()
self.store = IStore(Message)
def test_oops_in_messagechunk(self):
oopsid = "OOPS-abcdef1234"
MessageSet().fromText('foo', "foo %s bar" % oopsid)
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
self.failUnlessEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=1", {}))
self.failUnlessEqual(
set(),
referenced_oops(now + day, now + day, "product=1", {}))
def test_oops_in_messagesubject(self):
oopsid = "OOPS-abcdef1234"
self.factory.makeEmailMessage()
MessageSet().fromText("Crash with %s" % oopsid, "body")
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
self.failUnlessEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=1", {}))
self.failUnlessEqual(
set(),
referenced_oops(now + day, now + day, "product=1", {}))
def test_oops_in_bug_title(self):
oopsid = "OOPS-abcdef1234"
bug = self.factory.makeBug()
with person_logged_in(bug.owner):
bug.title = "Crash with %s" % oopsid
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
self.failUnlessEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=1", {}))
self.failUnlessEqual(
set(),
referenced_oops(now + day, now + day, "product=1", {}))
def test_oops_in_bug_description(self):
oopsid = "OOPS-abcdef1234"
bug = self.factory.makeBug()
with person_logged_in(bug.owner):
bug.description = "Crash with %s" % oopsid
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
self.failUnlessEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=1", {}))
self.failUnlessEqual(
set(),
referenced_oops(now + day, now + day, "product=1", {}))
def test_oops_in_question_title(self):
oopsid = "OOPS-abcdef1234"
question = self.factory.makeQuestion(title="Crash with %s" % oopsid)
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
self.failUnlessEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=%(product)s",
{'product': question.product.id}))
self.failUnlessEqual(
set([]),
referenced_oops(now + day, now + day, "product=%(product)s",
{'product': question.product.id}))
def test_oops_in_question_wrong_context(self):
oopsid = "OOPS-abcdef1234"
question = self.factory.makeQuestion(title="Crash with %s" % oopsid)
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
self.store.flush()
self.failUnlessEqual(
set(),
referenced_oops(now - day, now, "product=%(product)s",
{'product': question.product.id + 1}))
def test_oops_in_question_description(self):
oopsid = "OOPS-abcdef1234"
question = self.factory.makeQuestion(
description="Crash with %s" % oopsid)
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
self.failUnlessEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=%(product)s",
{'product': question.product.id}))
self.failUnlessEqual(
set([]),
referenced_oops(now + day, now + day, "product=%(product)s",
{'product': question.product.id}))
def test_oops_in_question_whiteboard(self):
oopsid = "OOPS-abcdef1234"
question = self.factory.makeQuestion()
with person_logged_in(question.owner):
question.whiteboard = "Crash with %s" % oopsid
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
self.failUnlessEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=%(product)s",
{'product': question.product.id}))
self.failUnlessEqual(
set([]),
referenced_oops(now + day, now + day, "product=%(product)s",
{'product': question.product.id}))
def test_oops_in_question_distribution(self):
oopsid = "OOPS-abcdef1234"
distro = self.factory.makeDistribution()
question = self.factory.makeQuestion(target=distro)
with person_logged_in(question.owner):
question.whiteboard = "Crash with %s" % oopsid
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
self.failUnlessEqual(
set([oopsid]),
referenced_oops(now - day, now, "distribution=%(distribution)s",
{'distribution': distro.id}))
self.failUnlessEqual(
set([]),
referenced_oops(now + day, now + day,
"distribution=%(distribution)s", {'distribution': distro.id}))
def test_referenced_oops_in_urls_bug_663249(self):
# Sometimes OOPS ids appears as part of an URL. These should could as
# a reference even though they are not formatted specially - this
# requires somewhat special handling in the reference calculation
# function.
oopsid_old = "OOPS-abcdef1234"
oopsid_new = "OOPS-4321"
bug_old = self.factory.makeBug()
bug_new = self.factory.makeBug()
with person_logged_in(bug_old.owner):
bug_old.description = (
"foo https://lp-oops.canonical.com/oops.py?oopsid=%s bar"
% oopsid_old)
with person_logged_in(bug_new.owner):
bug_new.description = (
"foo https://oops.canonical.com/oops.py?oopsid=%s bar"
% oopsid_new)
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
self.failUnlessEqual(
set([oopsid_old, oopsid_new]),
referenced_oops(now - day, now, "product=1", {}))<|fim▁hole|><|fim▁end|> | self.failUnlessEqual(
set([]),
referenced_oops(now + day, now + day, "product=1", {})) |
<|file_name|>gpio.py<|end_file_name|><|fim▁begin|>GPIO_HUB_RST_N = 30
GPIO_UBLOX_RST_N = 32
GPIO_UBLOX_SAFEBOOT_N = 33
GPIO_UBLOX_PWR_EN = 34
GPIO_STM_RST_N = 124
GPIO_STM_BOOT0 = 134
def gpio_init(pin, output):
try:
with open(f"/sys/class/gpio/gpio{pin}/direction", 'wb') as f:
f.write(b"out" if output else b"in")
except Exception as e:<|fim▁hole|> try:
with open(f"/sys/class/gpio/gpio{pin}/value", 'wb') as f:
f.write(b"1" if high else b"0")
except Exception as e:
print(f"Failed to set gpio {pin} value: {e}")<|fim▁end|> | print(f"Failed to set gpio {pin} direction: {e}")
def gpio_set(pin, high): |
<|file_name|>badge.ts<|end_file_name|><|fim▁begin|>import * as Router from 'koa-router'
import * as moment from 'moment'
import { get } from 'lodash'
import { QueryOptions } from '../route'
import { BlockchainService } from '../../domainService'<|fim▁hole|>
interface Badge {
readonly workId: string,
readonly datePublished: string
}
export class BadgeRoute extends WorkRoute {
constructor(service: BlockchainService) {
super(service)
}
getBadgeHTML(badge: Badge) {
return `
<html><head><style> body,html,div { margin: 0; padding: 0 }</style><link href="https://fonts.googleapis.com/css?family=Roboto" rel="stylesheet"></head>
<body> <div style=" width: 165px; height: 50px; background-color: white; font-family: Roboto; font-size: 12px; border: 1px solid #CDCDCD; border-radius: 4px; box-shadow: 0 2px 0 0 #F0F0F0;">
<a href="https://alpha.po.et/works/${badge.workId}" target="_blank" style=" color: #35393E; text-decoration: none; display: flex; flex-direction: row; height: 50px">
<img src="https://alpha.po.et/images/quill64.png" style=" width: 31px; height: 31px; margin-top: 8px; margin-left: 8px; margin-right: 8px; background-color: #393534; color: #35393E; font-family: Roboto;">
<div><p style="padding-top: 10px; line-height: 15px; margin: 0; font-size: 10pt; font-weight: bold; text-align: left;">Verified on po.et</p>
<p style="text-align: left; line-height: 15px; margin: 0; font-size: 10px; padding-top: 1px; font-size: 8px; font-family: Roboto; font-weight: bold; line-height: 13px; color: #707070;">${moment(parseInt(badge.datePublished, 10)).format('MMMM Do YYYY, HH:mm')}</p>
</div></a></div></body></html>
`
}
getBadgeHTMLEmpty() {
return `
<html><head><style> body,html,div { margin: 0; padding: 0 }</style><link href="https://fonts.googleapis.com/css?family=Roboto" rel="stylesheet"></head>
<body> <div style=" width: 165px; height: 50px; background-color: white; font-family: Roboto; font-size: 12px; border: 1px solid #CDCDCD; border-radius: 4px; box-shadow: 0 2px 0 0 #F0F0F0;">
<a href="https://alpha.po.et/" target="_blank" style=" color: #35393E; text-decoration: none; display: flex; flex-direction: row; height: 50px">
<img src="https://alpha.po.et/images/quill64.png" style=" width: 31px; height: 31px; margin-top: 8px; margin-left: 8px; margin-right: 8px; background-color: #393534; color: #35393E; font-family: Roboto;">
<div><p style="padding-top: 15px; line-height: 15px; margin: 0; font-size: 10pt; font-weight: bold; text-align: left;">Pending on po.et</p>
</div></a></div></body></html>
`
}
getParseCtx(ctx: any) {
const { profileId, workId } = ctx.query
ctx.request.query.attribute = `id<>${workId}`
ctx.request.query.owner = profileId
return ctx
}
async getBadge(opts: QueryOptions): Promise<Badge> {
// TODO we need to remove the dependency
// the this.getCollection
const work = await this.getCollection(opts)
const sanetizedWork = get(work, '[0]', undefined)
return sanetizedWork && {
workId: get(sanetizedWork, 'id', ''),
datePublished: get(sanetizedWork, 'attributes.datePublished', '')
}
}
async getBadgeRoute(ctx: any): Promise<void> {
const parseCtx = this.getParseCtx(ctx)
const opts = this.getParamOpts(parseCtx)
const badge = await this.getBadge(opts)
ctx.body = badge ? this.getBadgeHTML(badge) : this.getBadgeHTMLEmpty()
}
addRoutes(router: Router): any {
router.get('/badge', this.getBadgeRoute.bind(this))
}
}<|fim▁end|> | import { WorkRoute } from './work' |
<|file_name|>test_filter.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from textwrap import dedent
from pants.backend.core.targets.dependencies import Dependencies
from pants.backend.core.targets.doc import Page
from pants.backend.core.tasks.filter import Filter
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.python.targets.python_library import PythonLibrary
from pants.backend.python.targets.python_requirement_library import PythonRequirementLibrary
from pants.base.exceptions import TaskError
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants_test.tasks.task_test_base import ConsoleTaskTestBase
class BaseFilterTest(ConsoleTaskTestBase):
@property
def alias_groups(self):
return BuildFileAliases(
targets={
'target': Dependencies,
'java_library': JavaLibrary,
'page': Page,
'python_library': PythonLibrary,
'python_requirement_library': PythonRequirementLibrary,
}
)
@classmethod
def task_type(cls):
return Filter
class FilterEmptyTargetsTest(BaseFilterTest):
def test_no_filters(self):
self.assert_console_output()
def test_type(self):
self.assert_console_output(options={'type': ['page']})
self.assert_console_output(options={'type': ['java_library']})
def test_regex(self):
self.assert_console_output(options={'regex': ['^common']})
self.assert_console_output(options={'regex': ['-^common']})
class FilterTest(BaseFilterTest):
def setUp(self):
super(FilterTest, self).setUp()
requirement_injected = set()
def add_to_build_file(path, name, *deps):
if path not in requirement_injected:
self.add_to_build_file(path, "python_requirement_library(name='foo')")
requirement_injected.add(path)
all_deps = ["'{0}'".format(dep) for dep in deps] + ["':foo'"]
self.add_to_build_file(path, dedent("""
python_library(name='{name}',
dependencies=[{all_deps}],
tags=['{tag}']
)
""".format(name=name, tag=name + "_tag", all_deps=','.join(all_deps))))
add_to_build_file('common/a', 'a')
add_to_build_file('common/b', 'b')
add_to_build_file('common/c', 'c')
add_to_build_file('overlaps', 'one', 'common/a', 'common/b')
add_to_build_file('overlaps', 'two', 'common/a', 'common/c')
add_to_build_file('overlaps', 'three', 'common/a', 'overlaps:one')
def test_roots(self):
self.assert_console_output(
'common/a:a',
'common/a:foo',
'common/b:b',
'common/b:foo',
'common/c:c',
'common/c:foo',
targets=self.targets('common/::'),
extra_targets=self.targets('overlaps/::')
)
def test_nodups(self):
targets = [self.target('common/b')] * 2
self.assertEqual(2, len(targets))
self.assert_console_output(
'common/b:b',
targets=targets
)
def test_no_filters(self):
self.assert_console_output(
'common/a:a',
'common/a:foo',
'common/b:b',
'common/b:foo',
'common/c:c',
'common/c:foo',
'overlaps:one',
'overlaps:two',
'overlaps:three',
'overlaps:foo',
targets=self.targets('::')
)
def test_filter_type(self):
self.assert_console_output(
'common/a:a',
'common/b:b',
'common/c:c',
'overlaps:one',
'overlaps:two',
'overlaps:three',
targets=self.targets('::'),
options={'type': ['python_library']}
)
self.assert_console_output(
'common/a:foo',
'common/b:foo',
'common/c:foo',
'overlaps:foo',
targets=self.targets('::'),
options={'type': ['-python_library']}
)
self.assert_console_output(
'common/a:a',
'common/a:foo',
'common/b:b',
'common/b:foo',
'common/c:c',
'common/c:foo',
'overlaps:one',
'overlaps:two',
'overlaps:three',
'overlaps:foo',
targets=self.targets('::'),
# Note that the comma is inside the string, so these are ORed.
options={'type': ['python_requirement_library,python_library']}
)
def test_filter_multiple_types(self):
# A target can only have one type, so the output should be empty.
self.assert_console_output(
targets=self.targets('::'),
options={'type': ['python_requirement_library', 'python_library']}
)
def test_filter_target(self):
self.assert_console_output(
'common/a:a',
'overlaps:foo',
targets=self.targets('::'),
options={'target': ['common/a,overlaps/:foo']}
)
self.assert_console_output(
'common/a:foo',
'common/b:b',
'common/b:foo',
'common/c:c',
'common/c:foo',
'overlaps:two',
'overlaps:three',
targets=self.targets('::'),
options={'target': ['-common/a:a,overlaps:one,overlaps:foo']}
)
def test_filter_ancestor(self):
self.assert_console_output(
'common/a:a',
'common/a:foo',
'common/b:b',
'common/b:foo',
'overlaps:one',
'overlaps:foo',
targets=self.targets('::'),
options={'ancestor': ['overlaps:one,overlaps:foo']}
)
self.assert_console_output(
'common/c:c',
'common/c:foo',
'overlaps:two',
'overlaps:three',
targets=self.targets('::'),
options={'ancestor': ['-overlaps:one,overlaps:foo']}
)
def test_filter_ancestor_out_of_context(self):
"""Tests that targets outside of the context used as filters are parsed before use."""
# Add an additional un-injected target, and then use it as a filter.
self.add_to_build_file("blacklist", "target(name='blacklist', dependencies=['common/a'])")
self.assert_console_output(
'common/b:b',
'common/b:foo',
'common/c:c',
'common/c:foo',
'overlaps:one',
'overlaps:two',
'overlaps:three',
'overlaps:foo',
targets=self.targets('::'),
options={'ancestor': ['-blacklist']}
)
def test_filter_ancestor_not_passed_targets(self):
"""Tests filtering targets based on an ancestor not in that list of targets."""
# Add an additional un-injected target, and then use it as a filter.
self.add_to_build_file("blacklist", "target(name='blacklist', dependencies=['common/a'])")
self.assert_console_output(
'common/b:b',
'common/b:foo',
'common/c:c',
'common/c:foo',
targets=self.targets('common/::'), # blacklist is not in the list of targets
options={'ancestor': ['-blacklist']}
)
self.assert_console_output(
'common/a:a', # a: _should_ show up if we don't filter.
'common/a:foo',
'common/b:b',
'common/b:foo',
'common/c:c',
'common/c:foo',
targets=self.targets('common/::'),
options={'ancestor': []}
)
def test_filter_regex(self):
self.assert_console_output(
'common/a:a',
'common/a:foo',
'common/b:b',
'common/b:foo',
'common/c:c',
'common/c:foo',
targets=self.targets('::'),
options={'regex': ['^common']}
)
self.assert_console_output(
'common/a:foo',
'common/b:foo',
'common/c:foo',
'overlaps:one',
'overlaps:two',
'overlaps:three',
'overlaps:foo',
targets=self.targets('::'),
options={'regex': ['+foo,^overlaps']}
)
self.assert_console_output(
'overlaps:one',
'overlaps:two',
'overlaps:three',
targets=self.targets('::'),
options={'regex': ['-^common,foo$']}
)
# Invalid regex.
self.assert_console_raises(TaskError,
targets=self.targets('::'),
options={'regex': ['abc)']}
)
def test_filter_tag_regex(self):<|fim▁hole|> options={'tag_regex': ['+e(?=e)']}
)
# Removals.
self.assert_console_output(
'common/a:a',
'common/a:foo',
'common/b:b',
'common/b:foo',
'common/c:c',
'common/c:foo',
'overlaps:foo',
'overlaps:three',
targets=self.targets('::'),
options={'tag_regex': ['-one|two']}
)
# Invalid regex.
self.assert_console_raises(TaskError,
targets=self.targets('::'),
options={'tag_regex': ['abc)']}
)
def test_filter_tag(self):
# One match.
self.assert_console_output(
'common/a:a',
targets=self.targets('::'),
options={'tag': ['+a_tag']}
)
# Two matches.
self.assert_console_output(
'common/a:a',
'common/b:b',
targets=self.targets('::'),
options={'tag': ['+a_tag,b_tag']}
)
# One removal.
self.assert_console_output(
'common/a:a',
'common/a:foo',
'common/b:b',
'common/b:foo',
'common/c:c',
'common/c:foo',
'overlaps:foo',
'overlaps:two',
'overlaps:three',
targets=self.targets('::'),
options={'tag': ['-one_tag']}
)
# Two removals.
self.assert_console_output(
'common/a:a',
'common/a:foo',
'common/b:b',
'common/b:foo',
'common/c:c',
'common/c:foo',
'overlaps:foo',
'overlaps:three',
targets=self.targets('::'),
options={'tag': ['-one_tag,two_tag']}
)
# No match.
self.assert_console_output(
targets=self.targets('::'),
options={'tag': ['+abcdefg_tag']}
)
# No match due to AND of separate predicates.
self.assert_console_output(
targets=self.targets('::'),
options={'tag': ['a_tag', 'b_tag']}
)<|fim▁end|> | # Filter two.
self.assert_console_output(
'overlaps:three',
targets=self.targets('::'), |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Scrapy settings for miner project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'miner'
SPIDER_MODULES = ['miner.spiders']
NEWSPIDER_MODULE = 'miner.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'miner (+http://www.yourdomain.com)'
ITEM_PIPELINES = {<|fim▁hole|> BOT_NAME+'.pipelines.BXJPostDigestMongoPipeline': 303,
}
MONGODB_URI = "localhost"
MONGODB_PORT = 27017
MONGODB_DB = "HupuMiner"<|fim▁end|> | BOT_NAME+'.pipelines.BXJDailyPostDigestPipeline': 300,
BOT_NAME+'.pipelines.BXJPostDigestDuplicatesPipeline': 301,
BOT_NAME+'.pipelines.BXJPostDigestJsonWirterPipeline': 302, |
<|file_name|>exercise2_60.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
https://mitpress.mit.edu/sicp/full-text/book/book-Z-H-16.html#%_thm_2.60
"""
from Chapter2.themes.lisp_list_structured_data import car, cdr, cons, lisp_list, nil, print_lisp_list
from Chapter2.themes.sequences_as_conventional_interfaces import accumulate
def element_of_set(x, set):
"""Tests if x is element of set with a representation of sets that allows duplicates"""
if set is nil:
return False
if x == car(set):
return True
return element_of_set(x, cdr(set))
def adjoin_set(x, set):
"""Adds x to set"""
return cons(x, set)
def union_set(set1, set2):
"""Computes union of set1 and set2"""
return accumulate(adjoin_set, set2, set1)
def intersection_set(set1, set2):<|fim▁hole|> return cons(car(set1), intersection_set(cdr(set1), set2))
return intersection_set(cdr(set1), set2)
def run_the_magic():
s1 = lisp_list(2, 3, 2, 1, 3, 2, 2)
s2 = lisp_list(1, 1, 3)
s3 = lisp_list(1, 2, 3)
print(element_of_set(3, s1))
print_lisp_list(adjoin_set(4, s1))
print_lisp_list(intersection_set(s1, s2))
print_lisp_list(union_set(s1, s2))
from timeit import Timer
t1_element_of = Timer(stmt='element_of_set(3, %(s1)s)' % locals(),
setup='from Chapter2.exercise2_60 import element_of_set')
t2_element_of = Timer(stmt='element_of_set(3, %(s1)s)' % locals(),
setup='from Chapter2.sets_as_unordered_lists import element_of_set')
t1_adjoin = Timer(stmt='adjoin_set(4, %(s1)s)' % locals(), setup='from Chapter2.exercise2_60 import adjoin_set')
t2_adjoin = Timer(stmt='adjoin_set(4, %(s3)s)' % locals(),
setup='from Chapter2.sets_as_unordered_lists import adjoin_set')
t1_intersection = Timer(stmt='intersection_set(%(s1)s, %(s2)s)' % locals(),
setup='from Chapter2.exercise2_60 import intersection_set')
t2_intersection = Timer(stmt='intersection_set(%(s1)s, %(s3)s)' % locals(),
setup='from Chapter2.sets_as_unordered_lists import intersection_set')
t1_union = Timer(stmt='union_set(%(s1)s, %(s2)s)' % locals(),
setup='from Chapter2.exercise2_60 import union_set')
t2_union = Timer(stmt='union_set(%(s1)s, %(s2)s)' % locals(),
setup='from Chapter2.exercise2_59 import union_set')
header = '-----------Timing for *%s* operation'
def do_timing(timer1, timer2, op_name):
print(header % op_name)
t1 = timer1.timeit()
t2 = timer2.timeit()
print('-> With duplicate: %s' % t1)
print('-> Without duplicate: %s' % t2)
do_timing(t1_element_of, t2_element_of, 'element_of_set')
do_timing(t1_adjoin, t2_adjoin, 'adjoin_set')
do_timing(t2_intersection, t2_intersection, 'intersection_set')
do_timing(t1_union, t2_union, 'union_set')
print('The representation using unordered list with duplicates is better suited for applications where there are '
'many insertions in the data structure')
if __name__ == "__main__":
run_the_magic()<|fim▁end|> | """Computes intersection of set1 and set2"""
if set1 is nil or set2 is nil:
return nil
if element_of_set(car(set1), set2): |
<|file_name|>test-until.ts<|end_file_name|><|fim▁begin|>// Source file from duniter: Crypto-currency software to manage libre currency such as Ğ1
// Copyright (C) 2018 Cedric Moreau <[email protected]>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
import {TestingServer} from "./toolbox"
const UNTIL_TIMEOUT = 115000;
export function until(server:TestingServer, eventName:string, count:number) {
let counted = 0;
const max = count == undefined ? 1 : count;
return new Promise(function (resolve, reject) {
let finished = false;
server._server.on(eventName, function () {
counted++;
if (counted == max) {
if (!finished) {
finished = true;
resolve();<|fim▁hole|> });
setTimeout(function() {
if (!finished) {
finished = true;
reject('Received ' + counted + '/' + count + ' ' + eventName + ' after ' + UNTIL_TIMEOUT + ' ms');
}
}, UNTIL_TIMEOUT);
});
}<|fim▁end|> | }
} |
<|file_name|>test_nmf.py<|end_file_name|><|fim▁begin|>import numpy as np
from scipy import linalg
from sklearn.decomposition import nmf
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import raises
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
random_state = np.random.mtrand.RandomState(0)
@raises(ValueError)
def test_initialize_nn_input():
# Test NNDSVD behaviour on negative input
nmf._initialize_nmf(-np.ones((2, 2)), 2)
def test_initialize_nn_output():
# Test that NNDSVD does not return negative values
data = np.abs(random_state.randn(10, 10))
for var in (None, 'a', 'ar'):
W, H = nmf._initialize_nmf(data, 10, random_state=0)
assert_false((W < 0).any() or (H < 0).any())
def test_initialize_close():
# Test NNDSVD error
# Test that _initialize_nmf error is less than the standard deviation of
# the entries in the matrix.
A = np.abs(random_state.randn(10, 10))
W, H = nmf._initialize_nmf(A, 10)
error = linalg.norm(np.dot(W, H) - A)
sdev = linalg.norm(A - A.mean())
assert_true(error <= sdev)
def test_initialize_variants():
# Test NNDSVD variants correctness
# Test that the variants 'a' and 'ar' differ from basic NNDSVD only where
# the basic version has zeros.
data = np.abs(random_state.randn(10, 10))
W0, H0 = nmf._initialize_nmf(data, 10, variant=None)
Wa, Ha = nmf._initialize_nmf(data, 10, variant='a')
War, Har = nmf._initialize_nmf(data, 10, variant='ar', random_state=0)
for ref, evl in ((W0, Wa), (W0, War), (H0, Ha), (H0, Har)):
assert_true(np.allclose(evl[ref != 0], ref[ref != 0]))
@raises(ValueError)
def test_projgrad_nmf_fit_nn_input():
# Test model fit behaviour on negative input
A = -np.ones((2, 2))
m = nmf.ProjectedGradientNMF(n_components=2, init=None, random_state=0)
m.fit(A)
def test_projgrad_nmf_fit_nn_output():
# Test that the decomposition does not contain negative values
A = np.c_[5 * np.ones(5) - np.arange(1, 6),
5 * np.ones(5) + np.arange(1, 6)]
for init in (None, 'nndsvd', 'nndsvda', 'nndsvdar'):
model = nmf.ProjectedGradientNMF(n_components=2, init=init,
random_state=0)
transf = model.fit_transform(A)
assert_false((model.components_ < 0).any() or
(transf < 0).any())
def test_projgrad_nmf_fit_close():
# Test that the fit is not too far away
pnmf = nmf.ProjectedGradientNMF(5, init='nndsvda', random_state=0)
X = np.abs(random_state.randn(6, 5))
assert_less(pnmf.fit(X).reconstruction_err_, 0.05)
def test_nls_nn_output():
# Test that NLS solver doesn't return negative values<|fim▁hole|> A = np.arange(1, 5).reshape(1, -1)
Ap, _, _ = nmf._nls_subproblem(np.dot(A.T, -A), A.T, A, 0.001, 100)
assert_false((Ap < 0).any())
def test_nls_close():
# Test that the NLS results should be close
A = np.arange(1, 5).reshape(1, -1)
Ap, _, _ = nmf._nls_subproblem(np.dot(A.T, A), A.T, np.zeros_like(A),
0.001, 100)
assert_true((np.abs(Ap - A) < 0.01).all())
def test_projgrad_nmf_transform():
# Test that NMF.transform returns close values
# (transform uses scipy.optimize.nnls for now)
A = np.abs(random_state.randn(6, 5))
m = nmf.ProjectedGradientNMF(n_components=5, init='nndsvd', random_state=0)
transf = m.fit_transform(A)
assert_true(np.allclose(transf, m.transform(A), atol=1e-2, rtol=0))
def test_n_components_greater_n_features():
# Smoke test for the case of more components than features.
A = np.abs(random_state.randn(30, 10))
nmf.ProjectedGradientNMF(n_components=15, sparseness='data',
random_state=0).fit(A)
def test_projgrad_nmf_sparseness():
# Test sparseness
# Test that sparsity constraints actually increase sparseness in the
# part where they are applied.
A = np.abs(random_state.randn(10, 10))
m = nmf.ProjectedGradientNMF(n_components=5, random_state=0).fit(A)
data_sp = nmf.ProjectedGradientNMF(n_components=5, sparseness='data',
random_state=0).fit(A).data_sparseness_
comp_sp = nmf.ProjectedGradientNMF(n_components=5, sparseness='components',
random_state=0).fit(A).comp_sparseness_
assert_greater(data_sp, m.data_sparseness_)
assert_greater(comp_sp, m.comp_sparseness_)
def test_sparse_input():
# Test that sparse matrices are accepted as input
from scipy.sparse import csc_matrix
A = np.abs(random_state.randn(10, 10))
A[:, 2 * np.arange(5)] = 0
T1 = nmf.ProjectedGradientNMF(n_components=5, init='random',
random_state=999).fit_transform(A)
A_sparse = csc_matrix(A)
pg_nmf = nmf.ProjectedGradientNMF(n_components=5, init='random',
random_state=999)
T2 = pg_nmf.fit_transform(A_sparse)
assert_array_almost_equal(pg_nmf.reconstruction_err_,
linalg.norm(A - np.dot(T2, pg_nmf.components_),
'fro'))
assert_array_almost_equal(T1, T2)
# same with sparseness
T2 = nmf.ProjectedGradientNMF(
n_components=5, init='random', sparseness='data',
random_state=999).fit_transform(A_sparse)
T1 = nmf.ProjectedGradientNMF(
n_components=5, init='random', sparseness='data',
random_state=999).fit_transform(A)
def test_sparse_transform():
# Test that transform works on sparse data. Issue #2124
from scipy.sparse import csc_matrix
A = np.abs(random_state.randn(5, 4))
A[A > 1.0] = 0
A = csc_matrix(A)
model = nmf.NMF()
A_fit_tr = model.fit_transform(A)
A_tr = model.transform(A)
# This solver seems pretty inconsistent
assert_array_almost_equal(A_fit_tr, A_tr, decimal=2)
if __name__ == '__main__':
import nose
nose.run(argv=['', __file__])<|fim▁end|> | |
<|file_name|>WSModificationRepository.java<|end_file_name|><|fim▁begin|>package com.compomics.pride_asa_pipeline.core.repository.impl.webservice;
import com.compomics.pride_asa_pipeline.core.model.modification.source.PRIDEModificationFactory;
import com.compomics.pride_asa_pipeline.core.model.modification.impl.AsapModificationAdapter;
import com.compomics.pride_asa_pipeline.core.repository.ModificationRepository;
import com.compomics.pride_asa_pipeline.model.Modification;
import com.compomics.util.pride.PrideWebService;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import uk.ac.ebi.pride.archive.web.service.model.assay.AssayDetail;
/**
*
* @author Kenneth Verheggen
*/
public class WSModificationRepository implements ModificationRepository {
private static final Logger LOGGER = Logger.getLogger(WSModificationRepository.class);
@Override
public List<Modification> getModificationsByPeptideId(long peptideId) {
throw new UnsupportedOperationException("Currently not supported through the webservice");
<|fim▁hole|>
@Override
public List<Modification> getModificationsByExperimentId(String experimentId) {
LOGGER.debug("Loading modifications for experimentid " + experimentId);
List<Modification> modifications = new ArrayList<>();
AsapModificationAdapter adapter = new AsapModificationAdapter();
try {
AssayDetail assayDetail = PrideWebService.getAssayDetail(String.valueOf(experimentId));
for (String aPtmName : assayDetail.getPtmNames()) {
PRIDEModificationFactory.getInstance().getModification(adapter, aPtmName);
}
LOGGER.debug("Finished loading modifications for pride experiment with id " + experimentId);
return modifications;
} catch (IOException ex) {
LOGGER.error(ex);
}
return modifications;
}
}<|fim▁end|> | }
|
<|file_name|>parser.rs<|end_file_name|><|fim▁begin|>pub fn parse_line(line: &str) -> Vec<String> {
let mut vec = Vec::with_capacity(32);
let mut tmp_str = String::with_capacity(line.len());
let mut in_quotes = false;
for c in line.chars() {
match c {
' ' => {
if in_quotes {
tmp_str.push(' ');
continue;
}
if !tmp_str.is_empty() {
vec.push(tmp_str);
}
tmp_str = String::with_capacity(line.len());
}
'"' => {
in_quotes = !in_quotes;
}
_ => {
tmp_str.push(c);
}
}
}
if !tmp_str.is_empty() {
vec.push(tmp_str);
}
vec
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_single_arg() {
let args = parse_line("abc");
assert_eq!(args.len(), 1);
assert_eq!(args[0], "abc");
let args = parse_line("abc ");
assert_eq!(args.len(), 1);
assert_eq!(args[0], "abc");
let args = parse_line("abc ");
assert_eq!(args.len(), 1);
assert_eq!(args[0], "abc");
}
#[test]
fn parse_two_args() {
let args = parse_line("abc 123");
assert_eq!(args.len(), 2);
assert_eq!(args[0], "abc");
assert_eq!(args[1], "123");
let args = parse_line("abc 123");
assert_eq!(args.len(), 2);
assert_eq!(args[0], "abc");
assert_eq!(args[1], "123");
}
#[test]
fn parse_multiargs() {
let args = parse_line("abc 123 def");
assert_eq!(args.len(), 3);
assert_eq!(args[0], "abc");
assert_eq!(args[1], "123");
assert_eq!(args[2], "def");
let args = parse_line("abc 123 def 456");
assert_eq!(args.len(), 4);
assert_eq!(args[0], "abc");
assert_eq!(args[1], "123");
assert_eq!(args[2], "def");
assert_eq!(args[3], "456");
}
#[test]
fn parse_quotes() {
let args = parse_line("\"abc 123\" def");
assert_eq!(args.len(), 2);
assert_eq!(args[0], "abc 123");
assert_eq!(args[1], "def");
let args = parse_line("abc\" \"123 def");
assert_eq!(args.len(), 2);
assert_eq!(args[0], "abc 123");
assert_eq!(args[1], "def");
let args = parse_line("\" \"123 def");<|fim▁hole|> assert_eq!(args.len(), 2);
assert_eq!(args[0], " 123");
assert_eq!(args[1], "def");
let args = parse_line("\" abc");
assert_eq!(args.len(), 1);
assert_eq!(args[0], " abc");
}
}<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate serde;
#[macro_use]
extern crate serde_derive;
mod gen;
pub use gen::*;
<|fim▁hole|> use super::*;
#[test]
fn get_perms() {
Role::all_roles();
}
}<|fim▁end|> | #[cfg(test)]
mod tests { |
<|file_name|>shortener.cpp<|end_file_name|><|fim▁begin|>/****************************************************************
* This file is distributed under the following license:
*
* Copyright (C) 2010, Bernd Stramm
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
****************************************************************/
#include <QRegExp>
#include <QString>
#include <QStringList>
#include "shortener.h"
#include "network-if.h"
namespace chronicon {
Shortener::Shortener (QObject *parent)
:QObject (parent),
network (0)
{
}
void
Shortener::SetNetwork (NetworkIF * net)
{
network = net;
connect (network, SIGNAL (ShortenReply (QUuid, QString, QString, bool)),
this, SLOT (CatchShortening (QUuid, QString, QString, bool)));
}
void
Shortener::ShortenHttp (QString status, bool & wait)
{
if (network == 0) {
emit DoneShortening (status);
return;
}
QRegExp regular ("(https?://)(\\S*)");
status.append (" ");
QStringList linkList;
QStringList wholeList;
int where (0), offset(0), lenSub(0);
QString link, beforeLink;
while ((where = regular.indexIn (status,offset)) > 0) {
lenSub = regular.matchedLength();
beforeLink = status.mid (offset, where - offset);
link = regular.cap(0);
if ((!link.contains ("bit.ly"))
&& (!link.contains ("twitpic.com"))
&& (link.length() > QString("http://bit.ly/12345678").length())) {
linkList << link;
}
wholeList << beforeLink;
wholeList << link;
offset = where + lenSub;
}
wholeList << status.mid (offset, -1);
shortenTag = QUuid::createUuid();
if (linkList.isEmpty ()) {
wait = false;
} else {
messageParts[shortenTag] = wholeList;
linkParts [shortenTag] = linkList;
network->ShortenHttp (shortenTag,linkList);
wait = true;
}
}
void
Shortener::CatchShortening (QUuid tag, QString shortUrl, QString longUrl, bool good)
{
/// replace the longUrl with shortUrl in the messageParts[tag]
// remove the longUrl from the linkParts[tag]
// if the linkParts[tag] is empty, we have replaced all the links
// so send append all the messageParts[tag] and finish the message
if (messageParts.find(tag) == messageParts.end()) {
return; // extra, perhaps duplicates in original, or not for me
}
if (linkParts.find(tag) == linkParts.end()) {
return;
}
QStringList::iterator chase;
for (chase = messageParts[tag].begin();
chase != messageParts[tag].end();
chase++) {
if (*chase == longUrl) {
*chase = shortUrl;
}
}
linkParts[tag].removeAll (longUrl);
if (linkParts[tag].isEmpty()) {
QString message = messageParts[tag].join (QString());<|fim▁hole|>
} // namespace<|fim▁end|> | emit DoneShortening (message);
messageParts.erase (tag);
}
} |
<|file_name|>Transformer.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<|fim▁hole|>
/**
*
* <p>This api is experimental and thus the classes and the interfaces returned are subject to change.</p>
*/
public interface Transformer
extends
Emitter,
Receiver,
Stage {
}<|fim▁end|> | package org.drools.runtime.pipeline;
|
<|file_name|>apple_ios_base.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::old_io::{Command, IoError, OtherIoError};
use target::TargetOptions;
use self::Arch::*;
#[allow(non_camel_case_types)]
#[derive(Copy)]
pub enum Arch {
Armv7,
Armv7s,
Arm64,
I386,
X86_64
}
impl Arch {
pub fn to_string(&self) -> &'static str {
match self {
&Armv7 => "armv7",
&Armv7s => "armv7s",
&Arm64 => "arm64",
&I386 => "i386",
&X86_64 => "x86_64"
}
}
}
pub fn get_sdk_root(sdk_name: &str) -> String {
let res = Command::new("xcrun")
.arg("--show-sdk-path")
.arg("-sdk")
.arg(sdk_name)
.spawn()
.and_then(|c| c.wait_with_output())
.and_then(|output| {
if output.status.success() {
Ok(String::from_utf8(output.output).unwrap())
} else {
Err(IoError {
kind: OtherIoError,
desc: "process exit with error",
detail: String::from_utf8(output.error).ok()})
}
});
match res {
Ok(output) => output.trim().to_string(),
Err(e) => panic!("failed to get {} SDK path: {}", sdk_name, e)
}
}
fn pre_link_args(arch: Arch) -> Vec<String> {
let sdk_name = match arch {
Armv7 | Armv7s | Arm64 => "iphoneos",
I386 | X86_64 => "iphonesimulator"
};
let arch_name = arch.to_string();
vec!["-arch".to_string(), arch_name.to_string(),
"-Wl,-syslibroot".to_string(), get_sdk_root(sdk_name)]
}
fn target_cpu(arch: Arch) -> String {
match arch {
Armv7 => "cortex-a8", // iOS7 is supported on iPhone 4 and higher
Armv7s => "cortex-a9",
Arm64 => "cyclone",
I386 => "generic",
X86_64 => "x86-64",
}.to_string()
}
pub fn opts(arch: Arch) -> TargetOptions {
TargetOptions {
cpu: target_cpu(arch),
dynamic_linking: false,
executables: true,
// Although there is an experimental implementation of LLVM which
// supports SS on armv7 it wasn't approved by Apple, see:
// http://lists.cs.uiuc.edu/pipermail/llvm-commits/Week-of-Mon-20140505/216350.html
// It looks like it might be never accepted to upstream LLVM.
//
// SS might be also enabled on Arm64 as it has builtin support in LLVM
// but I haven't tested it through yet
morestack: false,
pre_link_args: pre_link_args(arch),
.. super::apple_base::opts()
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>dropdown.js<|end_file_name|><|fim▁begin|>;(function () {
var defaults = {
"wrap": "this",//下拉列表用什么包裹,"this"为按钮自身,主流的插件放在"body"中
"data": null,
"formName": "",
"zIndex": 999
};
function Dropdown(opt) {
var $this = $(this),
id = $this.attr("id"),
height = $this.outerHeight(),
name = !!opt.formName ? opt.formName : id,
liArray = [],
aTop = height / 2 - 3;
$this.css("position", "relative").append("<i class='dropdown_arrow' style='top:" + aTop + "px; right:10px'></i>");
if ($this.find("span").length < 1) {
$this.append("<span></span>");
}
$this.append("<input type='hidden' name='" + name + "'>");
if ($(".dropdown_menu[data-target='#" + id + "']").length < 1 && !!opt.data) {
var wrap;
if (opt.wrap == "this") {
wrap = $this
} else {
wrap = $(opt.wrap)
}
wrap.append("<ul class='dropdown_menu' data-target='#" + id + "'></ul>")
}
var $menu = $(".dropdown_menu[data-target='#" + id + "']").hide().css({"position": "absolute", "zIndex": opt.zIndex});
if (!!opt.data) {
for (var i = 0; i < opt.data.length; i++) {
liArray.push("<li data-text='" + opt.data[i].text + "' data-value='" + opt.data[i].value + "' >" + opt.data[i].text + "</li>");
}
$menu.html(liArray.join(""));
}
BindEvents($this, $menu, true);
}
function Reload(data) {
var $this = $(this),
id = $this.attr("id"),
$menu = $(".dropdown_menu[data-target='#" + id + "']"),
liArray = [];
if (typeof data != "object") {
return
}
for (var i = 0; i < data.length; i++) {
liArray.push("<li data-text='" + data[i].text + "' data-value='" + data[i].value + "' >" + data[i].text + "</li>");
}
$menu.html(liArray.join(""));
BindEvents($this, $menu, false);
}
function setValue(text, value) {
var $this = $(this);
$this.data("value", value).find("span:eq(0)").html(text).end().find("input:hidden").val(value);
}
function setDisable (val){
}
function BindEvents($this, $menu, init) {
if(init){
$("body").on("click",function(){
$menu.hide();
});
}
$this.off("click").on("click", function () {
var width = $this.outerWidth(),
height = $this.outerHeight(),
offset = $this.offset();
var border = parseInt($menu.css("borderLeftWidth")) + parseInt($menu.css("borderRightWidth"));<|fim▁hole|> var padding = parseInt($menu.css("paddingLeft")) + parseInt($menu.css("paddingRight"));
$menu.css({"width": (width - border - padding), "top": offset.top + height, "left": offset.left}).show();
$this.trigger("dropdown.show");
return false;
});
$menu.delegate("li", "click", function () {
var text = $(this).data("text");
var value = $(this).data("value");
setValue.call($this[0], text, value);
$this.trigger("dropdown.set", [text, value]);
$menu.hide();
}).delegate("li", "mouseenter", function () {
$(this).addClass("cursor").siblings("li").removeClass("cursor");
}).mouseleave(function () {
$menu.hide();
});
}
//初始化下拉组件的方法
$.fn.dropdown = function (opt) {
var opts = defaults;
$.extend(true, opts, opt);
return this.each(function () {
Dropdown.call(this, opts);
});
};
//重载下拉框数据的方法
$.fn.menuReload = function (data) {
return this.each(function () {
Reload.call(this, data);
});
};
//设置下拉框的值
$.fn.setValue = function (data) {
return this.each(function () {
setValue.call(this, data.text, data.value);
});
};
$.fn.setDisable = function (val) {
return this.each(function () {
setDisable.call(this, val);
});
};
})();<|fim▁end|> | |
<|file_name|>VisualizerCanvasPoincare.cpp<|end_file_name|><|fim▁begin|>#include "VisualizerCanvasPoincare.h"
#include <QMouseEvent>
#include <assert.h>
#include "VisualizerContext3D.h"
#include <cmath>
#include <limits>
#include <boost/foreach.hpp>
#include <algorithm>
#define pi 3.14159256
double log(double,int){
assert(false);
return 0;
}
VisualizerCanvasPoincare::VisualizerCanvasPoincare(QWidget* parent, VisualizerContext3D* context)
: VisualizerCanvas(parent, context), m_context(context), m_sphere_model(0), m_data_models(0)
{
setBackgroundColor(1.f, 1.f, 1.f);
m_mode = NAVIGATION;
snapViewAroundData();
}
VisualizerCanvasPoincare::~VisualizerCanvasPoincare(void)
{
deleteModels();
}
void VisualizerCanvasPoincare::refresh() {
deleteModels();
}
void VisualizerCanvasPoincare::setXAxisDescription(string desc) { m_xAxisDescription = desc; }
void VisualizerCanvasPoincare::setYAxisDescription(string desc) { m_yAxisDescription = desc; }
void VisualizerCanvasPoincare::setZAxisDescription(string desc) { m_zAxisDescription = desc; }
void VisualizerCanvasPoincare::addData(VisualizerData* data) {
makeCurrent();
VisualizerData3D* data_3d = dynamic_cast<VisualizerData3D*>( data );
assert( data_3d );
VisualizerCanvas::addData( data_3d );
}
void VisualizerCanvasPoincare::clearData() {
deleteModels();
VisualizerCanvas::clearData();
}
void VisualizerCanvasPoincare::initializeGL() {
glEnable(GL_BLEND);
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
}
void VisualizerCanvasPoincare::resizeGL(int width, int height) {
makeCurrent();
glViewport(0,0, width, height);
setViewport();
}
void VisualizerCanvasPoincare::paintGL() {
clearCanvas();
setViewport();
paintSphere();
paintData();
paintAxisLabels();
}
void VisualizerCanvasPoincare::setViewport() {
makeCurrent();
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(-m_viewport.width / 2.0f , m_viewport.width / 2.0f ,
-m_viewport.height / 2.0f, m_viewport.height / 2.0f,
0.0, 10.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glTranslatef( m_translation.x, m_translation.y, -1.5f );
glRotatef( m_rotation.x, 1.0f, 0.0f, 0.0f );
glRotatef( m_rotation.y, 0.0f, 1.0f, 0.0f );
// Kippen der gesamten Kugel um 90 Grad um s1, damit die Darstellung derjenigen von MATLAB
// gleicht:
glRotatef( -90.0f, 1.0f, 0.0f, 0.0f );
}
void VisualizerCanvasPoincare::clearCanvas() {
makeCurrent();
glClearColor(getBackgroundColor().red, getBackgroundColor().green, getBackgroundColor().blue, 1.f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
}
void VisualizerCanvasPoincare::paintSphere() {
makeCurrent();
if (!m_sphere_model) m_sphere_model = createSphereModel();
glCallList(m_sphere_model);
}
void VisualizerCanvasPoincare::paintData() {
makeCurrent();
if ( m_data_models.empty() ) {
BOOST_FOREACH( VisualizerData* data, getData() ) {
m_data_models.push_back( createDataModel( data ) );
}
}
BOOST_FOREACH( GLuint model, m_data_models ) glCallList(model);
}
void VisualizerCanvasPoincare::paintAxisLabels() {
glColor4f(0.0, 0.0, 0.0, 1.0);
const double dist = 1.23f;
const double off = 0.02 * dist;
QFont font;
font.setPixelSize( 51.0f / m_viewport.height );
renderText(dist, off, off, "s1", font);
renderText( off, dist, off, "s2", font);
renderText( off, off, dist, "s3", font);
}
GLuint VisualizerCanvasPoincare::createSphereModel() {
makeCurrent();
const double sphere_radius = 1.0f; // Radius der Kugel beträgt 1.<|fim▁hole|>
glNewList(sphere, GL_COMPILE);
glEnable(GL_BLEND);
glColor4f( 1-getBackgroundColor().red+0.6,
1-getBackgroundColor().green+0.6,
1-getBackgroundColor().blue+0.6,
0.8f );
glLineStipple(3, 0xAAAA);
glEnable(GL_LINE_STIPPLE);
for ( double phi = delta_phi; phi < pi; phi += delta_phi ) {
double z_pos = sphere_radius * cos(phi);
double radius = sphere_radius * sin(phi);
drawCircle( Point(0.0f, 0.0f, z_pos), radius, Point(1.0f, 0.0f, 0.0f), 0.0f );
}
glPushMatrix();
glRotatef(90.0, 1.0, 0.0, 0.0);
for ( double phi = 0; phi < pi; phi += delta_phi ) {
drawCircle( Point(0.0f, 0.0, 0.0f), sphere_radius, Point(0.0f, 1.0f, 0.0f), phi / pi * 180.0 );
}
glPopMatrix();
glDisable(GL_LINE_STIPPLE);
// Zeichne die Achsen ein:
drawAxes();
glDisable(GL_BLEND);
glEndList();
return sphere;
}
void VisualizerCanvasPoincare::drawAxes() {
drawAxis( Axis( Angle( 0.0, 0.0), RGBAColor(0.0, 0.0, 0.0, 1.0) ) );
drawAxis( Axis( Angle(-90.0, 0.0), RGBAColor(0.0, 0.0, 0.0, 1.0) ) );
drawAxis( Axis( Angle( 0.0, 90.0), RGBAColor(0.0, 0.0, 0.0, 1.0) ) );
}
void VisualizerCanvasPoincare::drawAxis( const Axis& a ) {
makeCurrent();
glPushMatrix();
glRotatef(a.direction.theta, 0.0, 1.0, 0.0);
glRotatef(a.direction.phi, 0.0, 0.0, 1.0);
const double length = 1.18f;
glColor4f(a.color.r, a.color.g, a.color.b, a.color.a);
glBegin(GL_LINES);
glVertex3f( 0.0f, 0.0f, 0.0f);
glVertex3f(length, 0.0f, 0.0f);
glEnd();
glPushMatrix();
glTranslatef(length, 0.0, 0.0);
const double width = 0.03;
glBegin(GL_TRIANGLES);
glVertex3f(-width * 3, 0.0, -width);
glVertex3f(-width * 3, 0.0, width);
glVertex3f(0.0, 0.0, 0.0);
glEnd();
glRotatef(90, 1.0, 0.0, 0.0);
glBegin(GL_TRIANGLES);
glVertex3f(-width * 3, 0.0, -width);
glVertex3f(-width * 3, 0.0, width);
glVertex3f(0.0, 0.0, 0.0);
glEnd();
glPopMatrix();
glPopMatrix();
}
GLuint VisualizerCanvasPoincare::createDataModel( VisualizerData* raw_data ) {
makeCurrent();
VisualizerData3D* data = dynamic_cast<VisualizerData3D*>( raw_data );
assert( data );
GLuint data_model = glGenLists(1);
glNewList(data_model, GL_COMPILE);
glEnable(GL_BLEND);
if ( data->isVisible() ) {
switch ( data->getStyle() ) {
case VisualizerData::LINE:
case VisualizerData::DASHED_LINE:
case VisualizerData::DASHED_DOTTED_LINE:
drawDataAsLines(data); break;
case VisualizerData::CROSS:
case VisualizerData::SQUARE:
drawDataAsCrosses(data); break;
default: assert(false);
}
}
glDisable(GL_BLEND);
glEndList();
return data_model;
}
vector<double> VisualizerCanvasPoincare::getAlphaValues( VisualizerData3D* data ) {
vector<double> intensities;
intensities.resize( data->getData().size(), 1.0f );
if ( data->hasAnnotation("Intensity") && data->isTransparent() ) {
intensities = data->getAnnotation("Intensity");
double highest = -std::numeric_limits<double>::infinity();
double lowest = std::numeric_limits<double>::infinity();
for (uint i = 0; i < intensities.size(); ++i) {
if ( intensities[i] > highest ) highest = intensities[i];
if ( intensities[i] < lowest ) lowest = intensities[i];
}
lowest = log(lowest + 1.0f , 10);
highest = log(highest + 1.0f, 10);
if ( (highest - lowest) / highest < 1e-6 ) { // Im Falle, das alle Intensitäten (nahezu) gleich sind...
return vector<double>( data->getData().size(), 1.0f ); // Gleichverteilung zurückgeben, denn die Intensitäten sind ja auch gleichverteilt.
}
for (uint i = 0; i < intensities.size(); ++i) {
intensities[i] = ( log( intensities[i] + 1.0f , 10 ) - lowest ) / (highest - lowest);
if ( intensities[i] < 0.10 ) {
intensities[i] = 0.0;
}
}
}
return intensities;
}
void VisualizerCanvasPoincare::drawDataAsLines( VisualizerData3D* data ) {
if (data->getStyle() == VisualizerData::DASHED_LINE) {
glLineStipple (1, 0xAAAA);
glEnable(GL_LINE_STIPPLE);
}
if (data->getStyle() == VisualizerData::DASHED_DOTTED_LINE) {
glLineStipple (1, 0x1C47);
glEnable(GL_LINE_STIPPLE);
}
vector<double> alpha = getAlphaValues(data);
glBegin( GL_LINE_STRIP );
for (uint i = 0; i < data->getData().size(); ++i) {
VisualizerData3D::Point p = data->getData()[i];
glColor4f(data->getColorRed(),data->getColorGreen(),data->getColorBlue(),alpha[i]);
glVertex3f( p.x, p.y, p.z );
}
glEnd();
if ( glIsEnabled( GL_LINE_STIPPLE ) ) glDisable(GL_LINE_STIPPLE);
}
void VisualizerCanvasPoincare::drawDataAsCrosses( VisualizerData3D* data ) {
const double cross = 3.0f / 100.0f;
vector<double> alpha = getAlphaValues(data);
if ( data->isBold() ) glLineWidth( 5.0f );
for (uint i = 0; i < data->getData().size(); ++i) {
VisualizerData3D::Point p = data->getData()[i];
glBegin( GL_LINES );
glColor4f(data->getColorRed(),data->getColorGreen(),data->getColorBlue(),alpha[i]);
glVertex3f( p.x - cross / 2.0, p.y, p.z );
glVertex3f( p.x + cross / 2.0, p.y, p.z );
glVertex3f( p.x, p.y - cross / 2.0f, p.z );
glVertex3f( p.x, p.y + cross / 2.0f, p.z );
glVertex3f( p.x, p.y, p.z - cross / 2.0f );
glVertex3f( p.x, p.y, p.z + cross / 2.0f );
glEnd();
}
if ( data->isBold() ) glLineWidth( 1.0f );
}
void VisualizerCanvasPoincare::drawCircle( Point center, double radius, Point rotationVector, double angle ) {
glPushMatrix();
glTranslatef(center.x, center.y, center.z);
glRotatef(angle, rotationVector.x, rotationVector.y, rotationVector.z);
const double delta_phi = 2*pi / 100;
glBegin(GL_LINE_LOOP);
for ( double phi = 0; phi < 2*pi; phi += delta_phi ) {
glVertex3f( radius * cos(phi), radius * sin(phi), 0.0f );
}
glEnd();
glPopMatrix();
}
void VisualizerCanvasPoincare::deleteModels() {
makeCurrent();
if (m_sphere_model) {
glDeleteLists(m_sphere_model, 1);
m_sphere_model = 0;
}
if (!m_data_models.empty()) {
BOOST_FOREACH( GLuint model, m_data_models ) {
glDeleteLists(model, 1);
}
m_data_models.clear();
}
}
void VisualizerCanvasPoincare::mousePressEvent(QMouseEvent *event) {
m_last_mouse_pos.first = event->x();
m_last_mouse_pos.second = event->y();
}
void VisualizerCanvasPoincare::mouseReleaseEvent(QMouseEvent *) {
}
void VisualizerCanvasPoincare::mouseMoveEvent(QMouseEvent *event) {
pair<int, int> current( event->x(), event->y() );
pair<int, int> last = m_last_mouse_pos;
if (m_mode == NAVIGATION) {
switch (event->buttons()) {
case Qt::LeftButton:
rotate( current.first - last.first, current.second - last.second );
break;
case Qt::RightButton:
translate( current.first - last.first, current.second - last.second );
break;
}
updateGL();
}
m_last_mouse_pos = current;
}
void VisualizerCanvasPoincare::wheelEvent(QWheelEvent* event) {
int numDegrees = event->delta() / 8;
int numSteps = numDegrees / 15;
if (event->orientation() == Qt::Vertical) {
if ( (m_viewport.width > 2 * numSteps / 10.0f) && (m_viewport.height > 2 * numSteps / 10.0f) ) {
m_viewport.width -= numSteps / 10.0f;
m_viewport.height -= numSteps / 10.0f;
}
setViewport();
updateGL();
event->accept();
}
}
void VisualizerCanvasPoincare::snapViewAroundData() {
m_rotation.x = m_rotation.y = m_rotation.z = 0.0f;
m_translation.x = m_translation.y = m_translation.z = 0.0f;
m_viewport.width = m_viewport.height = 3.0f;
setViewport();
if ( isVisible() ) {
makeCurrent();
updateGL();
}
}
void VisualizerCanvasPoincare::updateVisualizerData(VisualizerData*) {
deleteModels();
updateGL();
}
void VisualizerCanvasPoincare::rotate( int x, int y ) {
const double px2deg = 0.5 / 3.0 * m_viewport.width;
m_rotation.x += y * px2deg;
m_rotation.y += x * px2deg;
if (m_rotation.x > 90.0f) m_rotation.x = 90.0f;
if (m_rotation.x < -90.0f) m_rotation.x = -90.0f;
}
void VisualizerCanvasPoincare::translate( int x, int y ) {
float diminisher = 600.0f / m_viewport.width;
m_translation.x += x / diminisher;
m_translation.y -= y / diminisher;
}<|fim▁end|> | const double delta_phi = pi / 12.0; // Kugel wird in 15-Grad-Schritten unterteilt.
GLuint sphere = glGenLists(1); |
<|file_name|>a.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | #![allow(unused)]
#![crate_type = "proc-macro"] |
<|file_name|>mutation.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Facebook, Inc. and its affiliates.<|fim▁hole|> *
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
//! Tracking of commit mutations (amends, rebases, etc.)
use std::io::{Read, Write};
use anyhow::{anyhow, Result};
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use vlqencoding::{VLQDecode, VLQEncode};
use crate::node::{Node, ReadNodeExt, WriteNodeExt};
#[derive(Clone, PartialEq, PartialOrd)]
pub struct MutationEntry {
pub succ: Node,
pub preds: Vec<Node>,
pub split: Vec<Node>,
pub op: String,
pub user: String,
pub time: i64,
pub tz: i32,
pub extra: Vec<(Box<[u8]>, Box<[u8]>)>,
}
/// Default size for a buffer that will be used for serializing a mutation entry.
///
/// This is:
/// * Version (1 byte)
/// * Successor hash (20 bytes)
/// * Predecessor count (1 byte)
/// * Predecessor hash (20 bytes) - most entries have only one predecessor
/// * Operation (~7 bytes) - e.g. amend, rebase
/// * User (~40 bytes)
/// * Time (4-8 bytes)
/// * Timezone (~2 bytes)
/// * Extra count (1 byte)
pub const DEFAULT_ENTRY_SIZE: usize = 100;
const DEFAULT_VERSION: u8 = 1;
impl MutationEntry {
pub fn serialize(&self, w: &mut dyn Write) -> Result<()> {
w.write_u8(DEFAULT_VERSION)?;
w.write_node(&self.succ)?;
w.write_vlq(self.preds.len())?;
for pred in self.preds.iter() {
w.write_node(pred)?;
}
w.write_vlq(self.split.len())?;
for split in self.split.iter() {
w.write_node(split)?;
}
w.write_vlq(self.op.len())?;
w.write_all(self.op.as_bytes())?;
w.write_vlq(self.user.len())?;
w.write_all(&self.user.as_bytes())?;
w.write_f64::<BigEndian>(self.time as f64)?;
w.write_vlq(self.tz)?;
w.write_vlq(self.extra.len())?;
for (key, value) in self.extra.iter() {
w.write_vlq(key.len())?;
w.write_all(&key)?;
w.write_vlq(value.len())?;
w.write_all(&value)?;
}
Ok(())
}
pub fn deserialize(r: &mut dyn Read) -> Result<Self> {
enum EntryFormat {
FloatDate,
Latest,
}
let format = match r.read_u8()? {
0 => return Err(anyhow!("invalid mutation entry version: 0")),
1..=4 => {
// These versions stored the date as an f64.
EntryFormat::FloatDate
}
5 => EntryFormat::Latest,
v => return Err(anyhow!("unsupported mutation entry version: {}", v)),
};
let succ = r.read_node()?;
let pred_count = r.read_vlq()?;
let mut preds = Vec::with_capacity(pred_count);
for _ in 0..pred_count {
preds.push(r.read_node()?);
}
let split_count = r.read_vlq()?;
let mut split = Vec::with_capacity(split_count);
for _ in 0..split_count {
split.push(r.read_node()?);
}
let op_len = r.read_vlq()?;
let mut op = vec![0; op_len];
r.read_exact(&mut op)?;
let op = String::from_utf8(op)?;
let user_len = r.read_vlq()?;
let mut user = vec![0; user_len];
r.read_exact(&mut user)?;
let user = String::from_utf8(user)?;
let time = match format {
EntryFormat::FloatDate => {
// The date was stored as a floating point number. We
// actually want an integer, so truncate and convert.
r.read_f64::<BigEndian>()?.trunc() as i64
}
_ => r.read_vlq()?,
};
let tz = r.read_vlq()?;
let extra_count = r.read_vlq()?;
let mut extra = Vec::with_capacity(extra_count);
for _ in 0..extra_count {
let key_len = r.read_vlq()?;
let mut key = vec![0; key_len];
r.read_exact(&mut key)?;
let value_len = r.read_vlq()?;
let mut value = vec![0; value_len];
r.read_exact(&mut value)?;
extra.push((key.into_boxed_slice(), value.into_boxed_slice()));
}
Ok(MutationEntry {
succ,
preds,
split,
op,
user,
time,
tz,
extra,
})
}
/// Return true if this represents an 1:1 commit replacement.
/// A split or a fold are not 1:1 replacement.
///
/// Note: Resolving divergence should use multiple 1:1 records
/// and are considered 1:1 replacements.
///
/// If this function returns true, `preds` is ensured to only
/// have one item.
pub fn is_one_to_one(&self) -> bool {
self.split.is_empty() && self.preds.len() == 1
}
}<|fim▁end|> | |
<|file_name|>test_identifier_detail.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from nose.tools import * # flake8: noqa
import urlparse
from api.base.settings.defaults import API_BASE
from website.identifiers.model import Identifier
from tests.base import ApiTestCase
from osf_tests.factories import (
RegistrationFactory,
AuthUserFactory,
IdentifierFactory,
NodeFactory,
)
class TestIdentifierDetail(ApiTestCase):
def setUp(self):
super(TestIdentifierDetail, self).setUp()
self.user = AuthUserFactory()<|fim▁hole|> self.registration = RegistrationFactory(creator=self.user, is_public=True)
self.registration_identifier = IdentifierFactory(referent=self.registration)
self.registration_url = '/{}identifiers/{}/'.format(API_BASE, self.registration_identifier._id)
self.node = NodeFactory(creator=self.user, is_public=True)
self.node_identifier = IdentifierFactory(referent=self.node)
self.node_url = '/{}identifiers/{}/'.format(API_BASE, self.node_identifier._id)
self.registration_res = self.app.get(self.registration_url)
self.registration_data = self.registration_res.json['data']
self.node_res = self.app.get(self.node_url)
self.node_data = self.node_res.json['data']
def test_identifier_detail_success_registration(self):
assert_equal(self.registration_res.status_code, 200)
assert_equal(self.registration_res.content_type, 'application/vnd.api+json')
def test_identifier_detail_success_node(self):
assert_equal(self.node_res.status_code, 200)
assert_equal(self.node_res.content_type, 'application/vnd.api+json')
def test_identifier_detail_returns_correct_referent_registration(self):
path = urlparse.urlparse(self.registration_data['relationships']['referent']['links']['related']['href']).path
assert_equal('/{}registrations/{}/'.format(API_BASE, self.registration._id), path)
def test_identifier_detail_returns_correct_referent_node(self):
path = urlparse.urlparse(self.node_data['relationships']['referent']['links']['related']['href']).path
assert_equal('/{}nodes/{}/'.format(API_BASE, self.node._id), path)
def test_identifier_detail_returns_correct_category_registration(self):
assert_equal(self.registration_data['attributes']['category'], self.registration_identifier.category)
def test_identifier_detail_returns_correct_category_node(self):
assert_equal(self.node_data['attributes']['category'], self.node_identifier.category)
def test_identifier_detail_returns_correct_value_registration(self):
assert_equal(self.registration_data['attributes']['value'], self.registration_identifier.value)
def test_identifier_detail_returns_correct_value_node(self):
assert_equal(self.node_data['attributes']['value'], self.node_identifier.value)<|fim▁end|> | |
<|file_name|>PropertiesFactory.java<|end_file_name|><|fim▁begin|>/**<|fim▁hole|> * @author Eric D. Dill [email protected]
* @author James D. Martin [email protected]
* Copyright © 2010-2013 North Carolina State University. All rights reserved
*/
package email;
import java.util.Properties;
public class PropertiesFactory {
public static Properties getGmailProperties() {
Properties gmail = new Properties();
gmail.put("mail.smtp.auth", "true");
gmail.put("mail.smtp.starttls.enable", "true");
gmail.put("mail.smtp.host", "smtp.gmail.com");
gmail.put("mail.smtp.port", "587");
return gmail;
}
}<|fim▁end|> | |
<|file_name|>PerLengthPhaseImpedance.java<|end_file_name|><|fim▁begin|>/**
*/
package CIM15.IEC61970.Wires;
import CIM15.IEC61970.Core.IdentifiedObject;
import java.util.Collection;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.util.BasicInternalEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Per Length Phase Impedance</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* <ul>
* <li>{@link CIM15.IEC61970.Wires.PerLengthPhaseImpedance#getLineSegments <em>Line Segments</em>}</li>
* <li>{@link CIM15.IEC61970.Wires.PerLengthPhaseImpedance#getConductorCount <em>Conductor Count</em>}</li>
* <li>{@link CIM15.IEC61970.Wires.PerLengthPhaseImpedance#getPhaseImpedanceData <em>Phase Impedance Data</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class PerLengthPhaseImpedance extends IdentifiedObject {
/**
* The cached value of the '{@link #getLineSegments() <em>Line Segments</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLineSegments()
* @generated
* @ordered
*/
protected EList<ACLineSegment> lineSegments;
/**
* The default value of the '{@link #getConductorCount() <em>Conductor Count</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getConductorCount()
* @generated
* @ordered
*/
protected static final int CONDUCTOR_COUNT_EDEFAULT = 0;
/**
* The cached value of the '{@link #getConductorCount() <em>Conductor Count</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getConductorCount()
* @generated
* @ordered
*/
protected int conductorCount = CONDUCTOR_COUNT_EDEFAULT;
/**
* This is true if the Conductor Count attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean conductorCountESet;
/**
* The cached value of the '{@link #getPhaseImpedanceData() <em>Phase Impedance Data</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPhaseImpedanceData()
* @generated
* @ordered
*/
protected EList<PhaseImpedanceData> phaseImpedanceData;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected PerLengthPhaseImpedance() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return WiresPackage.Literals.PER_LENGTH_PHASE_IMPEDANCE;
}
/**
* Returns the value of the '<em><b>Line Segments</b></em>' reference list.
* The list contents are of type {@link CIM15.IEC61970.Wires.ACLineSegment}.
* It is bidirectional and its opposite is '{@link CIM15.IEC61970.Wires.ACLineSegment#getPhaseImpedance <em>Phase Impedance</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Line Segments</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Line Segments</em>' reference list.
* @see CIM15.IEC61970.Wires.ACLineSegment#getPhaseImpedance
* @generated
*/
public EList<ACLineSegment> getLineSegments() {
if (lineSegments == null) {
lineSegments = new BasicInternalEList<ACLineSegment>(ACLineSegment.class);
}
return lineSegments;
}
/**
* Returns the value of the '<em><b>Conductor Count</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Conductor Count</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Conductor Count</em>' attribute.
* @see #isSetConductorCount()
* @see #unsetConductorCount()
* @see #setConductorCount(int)
* @generated
*/
public int getConductorCount() {
return conductorCount;
}
/**
* Sets the value of the '{@link CIM15.IEC61970.Wires.PerLengthPhaseImpedance#getConductorCount <em>Conductor Count</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Conductor Count</em>' attribute.
* @see #isSetConductorCount()
* @see #unsetConductorCount()
* @see #getConductorCount()
* @generated
*/
public void setConductorCount(int newConductorCount) {
conductorCount = newConductorCount;
conductorCountESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61970.Wires.PerLengthPhaseImpedance#getConductorCount <em>Conductor Count</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetConductorCount()
* @see #getConductorCount()
* @see #setConductorCount(int)
* @generated
*/
public void unsetConductorCount() {
conductorCount = CONDUCTOR_COUNT_EDEFAULT;
conductorCountESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61970.Wires.PerLengthPhaseImpedance#getConductorCount <em>Conductor Count</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Conductor Count</em>' attribute is set.
* @see #unsetConductorCount()
* @see #getConductorCount()
* @see #setConductorCount(int)
* @generated
*/
public boolean isSetConductorCount() {
return conductorCountESet;
}
/**
* Returns the value of the '<em><b>Phase Impedance Data</b></em>' reference list.
* The list contents are of type {@link CIM15.IEC61970.Wires.PhaseImpedanceData}.
* It is bidirectional and its opposite is '{@link CIM15.IEC61970.Wires.PhaseImpedanceData#getPhaseImpedance <em>Phase Impedance</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Phase Impedance Data</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Phase Impedance Data</em>' reference list.
* @see CIM15.IEC61970.Wires.PhaseImpedanceData#getPhaseImpedance
* @generated
*/
public EList<PhaseImpedanceData> getPhaseImpedanceData() {
if (phaseImpedanceData == null) {
phaseImpedanceData = new BasicInternalEList<PhaseImpedanceData>(PhaseImpedanceData.class);
}
return phaseImpedanceData;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__LINE_SEGMENTS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getLineSegments()).basicAdd(otherEnd, msgs);
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__PHASE_IMPEDANCE_DATA:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getPhaseImpedanceData()).basicAdd(otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__LINE_SEGMENTS:
return ((InternalEList<?>)getLineSegments()).basicRemove(otherEnd, msgs);
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__PHASE_IMPEDANCE_DATA:
return ((InternalEList<?>)getPhaseImpedanceData()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**<|fim▁hole|> */
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__LINE_SEGMENTS:
return getLineSegments();
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__CONDUCTOR_COUNT:
return getConductorCount();
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__PHASE_IMPEDANCE_DATA:
return getPhaseImpedanceData();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__LINE_SEGMENTS:
getLineSegments().clear();
getLineSegments().addAll((Collection<? extends ACLineSegment>)newValue);
return;
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__CONDUCTOR_COUNT:
setConductorCount((Integer)newValue);
return;
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__PHASE_IMPEDANCE_DATA:
getPhaseImpedanceData().clear();
getPhaseImpedanceData().addAll((Collection<? extends PhaseImpedanceData>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__LINE_SEGMENTS:
getLineSegments().clear();
return;
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__CONDUCTOR_COUNT:
unsetConductorCount();
return;
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__PHASE_IMPEDANCE_DATA:
getPhaseImpedanceData().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__LINE_SEGMENTS:
return lineSegments != null && !lineSegments.isEmpty();
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__CONDUCTOR_COUNT:
return isSetConductorCount();
case WiresPackage.PER_LENGTH_PHASE_IMPEDANCE__PHASE_IMPEDANCE_DATA:
return phaseImpedanceData != null && !phaseImpedanceData.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (conductorCount: ");
if (conductorCountESet) result.append(conductorCount); else result.append("<unset>");
result.append(')');
return result.toString();
}
} // PerLengthPhaseImpedance<|fim▁end|> | * <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated |
<|file_name|>EfuseCommand.cpp<|end_file_name|><|fim▁begin|>#include "../Resource/ResourceManager.h"
#include "../Resource/Handle/DLHandle.h"
#include "../Resource/Handle/DAHandle.h"
#include "../Err/FlashToolErrorCodeDef.h"
#include "../Conn/Connection.h"
#include "../Err/Exception.h"
#include "../Logger/Log.h"
#include "EfuseCommand.h"
#include "../Utility/IniItem.h"
#define sbc_pub_key_hash \
sbc_pub_key_u.r_sbc_pub_key_hash
#define _sbc_key_E \
sbc_pub_key_u.w_sbc_pub_key.key_e
#define _sbc_key_N \
sbc_pub_key_u.w_sbc_pub_key.key_n
#define sbc_pub_key_hash1 \
sbc_pub_key1_u.r_sbc_pub_key_hash1
#define _sbc_key1_E \
sbc_pub_key1_u.w_sbc_pub_key1.key_e
#define _sbc_key1_N \
sbc_pub_key1_u.w_sbc_pub_key1.key_n
#define EfuseOpt2Text(opt) \
((opt)==EFUSE_ENABLE ? "on" : "off")
#define DumpEfuseOpt(p, sec) \
LOGI(#sec " = %s", EfuseOpt2Text(p->sec))
#define DumpEfuseOptEx(p, sec, key) \
LOGI("%s = %s", key.c_str(), EfuseOpt2Text(p.sec))
#define DumpInteger(p, sec) \
LOGI(#sec " = %d", p->sec)
#define DumpIntegerEx(p, sec, key) \
LOGI("%s = %d", key.c_str(), p.sec)
#define DumpShortHex(p, sec) \
LOGI(#sec " = %04x", p->sec)
#define DumpIntHex(p, sec) \
LOGI(#sec " = %08x", p->sec)
#define DumpIntHexEx(p, sec, key) \
LOGI("%s = %08x", key.c_str(), p.sec)
#define DumpText(p, sec) \
LOGI(#sec " = %s", (p->sec).buf)
#define DumpTextEx(p, sec, key) \
LOGI("%s = %s", key.c_str(), (p.sec).buf)
#define DumpBinary(p, sec) \
LOGI(#sec " = %s", Binary2Str(p->sec).c_str())
#define DumpBinaryEx(p, sec, key) \
LOGI("%s = %s", key.c_str(), Binary2Str(p.sec).c_str())
#define DumpEfuseSTBOpt(p, sec) \
LOGI(#sec " = %s", EfuseOpt2Text(p.sec))
#define DumpSTBKeyName(p, sec) \
LOGI(#sec " = %s", p.sec)
#define DumpSTBText(p, sec) \
LOGI(#sec " = %s", (p.sec).buf)
#define DumpSTBBinary(p, sec) \
LOGI(#sec " = %s", Binary2Str(p.sec).c_str())
#define SaveEfuseOpt(f, p, sec) \
fprintf(f, #sec " = %s\r\n", EfuseOpt2Text(p->sec))
#define SaveEfuseOptEx(f, p, sec, key) \
fprintf(f, "%s =%s\r\n", key.c_str(), EfuseOpt2Text(p.sec))
#define SaveInteger(f, p, sec) \
fprintf(f, #sec " = %d\r\n", p->sec)
#define SaveIntegerEx(f, p, sec, key) \
fprintf(f, "%s = %d\r\n", key.c_str(), p.sec)
#define SaveShortHex(f, p, sec) \
fprintf(f, #sec " = %04x\r\n", p->sec)
#define SaveIntHex(f, p, sec) \
fprintf(f, #sec " = %08x\r\n", p->sec)
#define SaveIntHexEx(f, p, sec, key) \
fprintf(f, "%s = %08x\r\n", key.c_str(), p->sec)
#define SaveBinary(f, p, sec) \
fprintf(f, #sec " = %s\r\n", Binary2Str(p->sec).c_str())
#define SaveBinaryEx(f, p, sec, key) \
fprintf(f, "%s = %s\r\n", key.c_str(), Binary2Str(p.sec).c_str())
#define SaveSTBKeyName(f, p, sec) \
fprintf(f, #sec " = %s\r\n", p.sec)
#define SaveSTBBinary(f, p, sec) \
fprintf(f, #sec " = %s\r\n", Binary2Str(p.sec).c_str())
using std::string;
namespace APCore
{
// class EfuseCommand
EfuseCommand::EfuseCommand(
APKey key,
EfuseSetting *p)
: ICommand(key),
p_setting_(p),
read_only_(false),
stb_lock_enable_(false),
stb_key_enable_(false)
{
}
EfuseCommand::~EfuseCommand()
{
}
void EfuseCommand::exec(
const QSharedPointer<Connection> &conn)
{
Q_ASSERT(p_setting_ != NULL);
Connect(conn.data());
if(!read_only_)
WriteAll(conn.data());
ReadAll(conn.data());
}
void EfuseCommand::Connect(Connection *conn)
{
conn->ConnectBROM();
IniItem item("option.ini", "BLOWN", "brom");
bool brom_blown = item.GetBooleanValue();
switch (conn->boot_result().m_bbchip_type)
{
case MT6573:
case MT6575:
case MT6577:
case MT6589:
break;
case MT8135:
TryConnectDA(conn);
break;
default:
if(!brom_blown){
conn->ConnectDA();
}
break;
}
}
void EfuseCommand::TryConnectDA(Connection *conn)
{
if (GET_DA_HANDLE_T(conn->ap_key()) != NULL &&
GET_DL_HANDLE_T(conn->ap_key()) != NULL)
{
conn->ConnectDA();
}
}
void EfuseCommand::WriteAll(Connection *conn)
{
DumpSetting();
LOGI("Writing Efuse registers...");
int ret = FlashTool_WriteEfuseAll_Ex(
conn->FTHandle(),
p_setting_->CommonArg(),
p_setting_->SecureArg(),
p_setting_->LockArg(),
p_setting_->StbKeyArg(),
p_setting_->extraArg());
if (S_DONE != ret)
{
LOGI("Write Efuse registers failed: %s",
StatusToString(ret));
THROW_APP_EXCEPTION(ret, "");
}
else
{
LOGI("Efuse registers written successfully");
}
}
void EfuseCommand::ReadAll(Connection *conn)
{
string rb_file = p_setting_->ReadbackFile();
if (!rb_file.empty())
{
Efuse_Common_Arg common_arg;// = { 0 };
memset(&common_arg, 0, sizeof(common_arg));
Efuse_Secure_Arg secure_arg;// = { 0 };
memset(&secure_arg, 0, sizeof(secure_arg));
Efuse_Lock_Arg lock_arg;// = { 0 };
memset(&lock_arg, 0, sizeof(lock_arg));
Efuse_STB_Key_Arg stb_arg;// = { 0 };
memset(&stb_arg, 0, sizeof(stb_arg));
Efuse_Extra_Arg extra_arg;// = { 0 };
memset(&extra_arg, 0, sizeof(extra_arg));
char spare_buf[64] = { 0 };
char ackey_buf[16] = { 0 };
char sbcpk_buf[32] = { 0 };
char sbcpk1_buf[32] = { 0 };
char stbkey_bufs[EFUSE_STK_KEY_NUMBER][32] = {{0}};
char stbkey_names[EFUSE_STK_KEY_NUMBER][64] = {{0}};
common_arg.spare.buf = spare_buf;
common_arg.spare.buf_len = sizeof(spare_buf);
secure_arg.ac_key.buf = ackey_buf;
secure_arg.ac_key.buf_len = sizeof(ackey_buf);
secure_arg.sbc_pub_key_hash.buf = sbcpk_buf;
secure_arg.sbc_pub_key_hash.buf_len = sizeof(sbcpk_buf);
secure_arg.sbc_pub_key_hash1.buf = sbcpk1_buf;
secure_arg.sbc_pub_key_hash1.buf_len = sizeof(sbcpk1_buf);
char extra_bufs[END_KEY][64] = {{0}};
for(int i = 0; i < EFUSE_STK_KEY_NUMBER; i++)
{
stb_arg.stb_blow_keys[i].key_name = stbkey_names[i];
stb_arg.stb_blow_keys[i].stb_key.buf = stbkey_bufs[i];
stb_arg.stb_blow_keys[i].stb_key.buf_len = sizeof(stbkey_bufs[i]);
}
for(int i = 0; i < END_KEY; i++)
{
extra_arg.items[i].data.key_pair.key.buf = extra_bufs[i];
}
LOGI("Reading Efuse registers...");
int ret = FlashTool_ReadEfuseAll_Ex(
conn->FTHandle(),
&common_arg,
&secure_arg,
&lock_arg,
&stb_arg,
&extra_arg);
if (S_DONE != ret)
{
LOGI("Read Efuse registers failed: %s",
StatusToString(ret));
THROW_APP_EXCEPTION(ret, "");
}
else
{
LOGI("Efuse registers read successfully");
DumpResult(rb_file,
&common_arg,
&secure_arg,
&lock_arg,
&stb_arg,
&extra_arg);
}
LOGI("Efuse Dump done");
}
}
string EfuseCommand::Binary2Str(
const _BUFFER &ref)
{
string str;
char tmp[16];
for (U32 i=0; i<ref.buf_len; i+=sizeof(U32))
{
sprintf(tmp, "%08X ", *(U32*)(ref.buf+i));
str += tmp;
}
return str;
}
void EfuseCommand::DumpCommonArg(
const Efuse_Common_Arg *common,
bool to_write /*= true*/)
{
LOGI("========== Efuse Common Setting ==========");
DumpEfuseOpt(common, emmc_boot_dis);
DumpEfuseOpt(common, nand_boot_dis);
DumpEfuseOpt(common, nand_boot_speedup_dis);
DumpEfuseOpt(common, pid_vid_custom_en);
DumpEfuseOpt(common, ufs_boot_dis);
DumpEfuseOpt(common, sbc_pub_hash_dis);
DumpEfuseOpt(common, sbc_pub_hash1_dis);
if (to_write)
{
DumpEfuseOpt(common, spare_blow);
}
if (common->spare.buf_len > 0)
{
if (to_write)
DumpText(common, spare);
else
DumpBinary(common, spare);
}
DumpInteger(common, usbdl_type);
if (to_write)
{
DumpEfuseOpt(common, usb_id_blow);
}
if (common->usb_id_blow == EFUSE_ENABLE || !to_write)
{
DumpShortHex(common, usb_pid);
DumpShortHex(common, usb_vid);
}
}
void EfuseCommand::DumpSecureArg(
const Efuse_Secure_Arg *secure,
bool to_write /*= true*/)
{
LOGI("========== Efuse Secure Setting ==========");
DumpEfuseOpt(secure, acc_en);
DumpEfuseOpt(secure, sbc_en);
DumpEfuseOpt(secure, daa_en);
DumpEfuseOpt(secure, sla_en);
DumpEfuseOpt(secure, ack_en);
DumpEfuseOpt(secure, jtag_dis);
DumpEfuseOpt(secure, jtag_sw_con);
DumpEfuseOpt(secure, dbgport_lock_dis);
DumpEfuseOpt(secure, md1_sbc_en);
DumpEfuseOpt(secure, c2k_sbc_en);
DumpEfuseOpt(secure, pl_ar_en);
DumpEfuseOpt(secure, pk_cus_en);
if (to_write)
{
DumpEfuseOpt(secure, ac_key_blow);
}
if(secure->ac_key.buf_len > 0)
{
if (to_write)
DumpText(secure, ac_key);
else
DumpBinary(secure, ac_key);
}
if (to_write)
{
DumpEfuseOpt(secure, sbc_pubk_blow);
if(secure->_sbc_key_E.buf_len > 0)
{
LOGI("sbc_public_key_e = %s",
secure->_sbc_key_E.buf);
}
if(secure->_sbc_key_N.buf_len > 0)
{
LOGI("sbc_public_key_n = %s",
secure->_sbc_key_N.buf);
}
}
else if (secure->sbc_pub_key_hash.buf_len > 0)
{
DumpBinary(secure, sbc_pub_key_hash);
}
if (to_write)
{
DumpEfuseOpt(secure, sbc_pubk1_blow);
<|fim▁hole|> }
if(secure->_sbc_key1_N.buf_len > 0)
{
LOGI("sbc_public_key1_n = %s",
secure->_sbc_key1_N.buf);
}
}
else if (secure->sbc_pub_key_hash1.buf_len > 0)
{
DumpBinary(secure, sbc_pub_key_hash1);
}
}
void EfuseCommand::DumpLockArg(
const Efuse_Lock_Arg *lock)
{
LOGI("========== Efuse Lock Setting ==========");
DumpEfuseOpt(lock, common_ctrl_lock);
DumpEfuseOpt(lock, usb_id_lock);
DumpEfuseOpt(lock, spare_lock);
DumpEfuseOpt(lock, sec_ctrl_lock);
DumpEfuseOpt(lock, ackey_lock);
DumpEfuseOpt(lock, sbc_pubk_hash_lock);
DumpEfuseOpt(lock, sec_msc_lock);
DumpEfuseOpt(lock, custk_lock);
DumpEfuseOpt(lock, sbc_pubk_hash1_lock);
}
void EfuseCommand::DumpSTBLockArg(const STB_Lock_PARAM *stb_lock)
{
LOGI("========== Efuse STB Lock Setting ==========");
DumpEfuseOpt(stb_lock, stb_key_g7_lock);
DumpEfuseOpt(stb_lock, stb_key_g6_lock);
DumpEfuseOpt(stb_lock, stb_key_g5_lock);
DumpEfuseOpt(stb_lock, stb_key_g4_lock);
DumpEfuseOpt(stb_lock, stb_key_g3_lock);
DumpEfuseOpt(stb_lock, stb_key_g2_lock);
DumpEfuseOpt(stb_lock, stb_key_g1_lock);
DumpEfuseOpt(stb_lock, stb_key_g0_lock);
DumpEfuseOpt(stb_lock, stb_key_operatorid_lock);
DumpEfuseOpt(stb_lock, stb_key_chipid_lock);
DumpEfuseOpt(stb_lock, stb_key_sn_lock);
}
void EfuseCommand::DumpSTBArg(const Efuse_STB_Key_Arg *stb,
bool to_write)
{
if(stb_lock_enable_)
DumpSTBLockArg(&stb->stb_lock);
if(!stb_key_enable_)
return;
LOGI("========== Efuse STB Key Setting ==========");
if (to_write)
{
DumpEfuseOpt(stb, stb_key_chipid_blow);
}
if (stb->stb_key_chipid_blow == EFUSE_ENABLE || !to_write)
{
DumpShortHex(stb, stb_key_chipID);
}
if(to_write)
{
DumpEfuseOpt(stb, stb_key_operatorid_blow);
}
if(stb->stb_key_operatorid_blow == EFUSE_ENABLE || !to_write)
{
DumpShortHex(stb, stb_key_operatorid);
}
for(int i = 0; i < 16; i++)
{
if (to_write)
{
DumpEfuseSTBOpt(stb->stb_blow_keys[i], key_blow);
}
if(stb->stb_blow_keys[i].stb_key.buf_len > 0 &&
stb->stb_blow_keys[i].key_name != NULL &&
strlen(stb->stb_blow_keys[i].key_name) > 0)
{
DumpSTBKeyName(stb->stb_blow_keys[i], key_name);
if (to_write)
DumpSTBText(stb->stb_blow_keys[i], stb_key);
else
DumpSTBBinary(stb->stb_blow_keys[i], stb_key);
}
}
}
#define KeyToString(key) EfuseExtraKeyToString(key)
void EfuseCommand::DumpExtraArg(const Efuse_Extra_Arg *extra,
bool to_write)
{
LOGI("========== Efuse Extra Setting ==========");
for(int i = 0; i < END_KEY; i++)
{
std::string key = KeyToString((EFUSE_KEY)extra->items[i].key);
switch(extra->items[i].type)
{
case T_BOOLEAN:
DumpEfuseOptEx(extra->items[i], data.enable, key);
break;
case T_INT:
DumpIntHexEx(extra->items[i], data.iPair.value, key);
break;
case T_BUF:
if(extra->items[i].data.key_pair.key.buf_len > 0)
{
if(to_write)
DumpTextEx(extra->items[i], data.key_pair.key, key);
else
DumpBinaryEx(extra->items[i], data.key_pair.key, key);
}
break;
}
}
}
void EfuseCommand::DumpSetting()
{
DumpCommonArg(p_setting_->CommonArg());
DumpSecureArg(p_setting_->SecureArg());
DumpLockArg(p_setting_->LockArg());
DumpSTBArg(p_setting_->StbKeyArg());
DumpExtraArg(p_setting_->extraArg());
}
void EfuseCommand::DumpResult(
const std::string &rb_file,
const Efuse_Common_Arg *common,
const Efuse_Secure_Arg *secure,
const Efuse_Lock_Arg *lock,
const Efuse_STB_Key_Arg *stb,
const Efuse_Extra_Arg *extra)
{
FILE *fp = fopen(rb_file.c_str(), "wb");
if (NULL == fp)
{
LOGE("Failed to open file: %s",
rb_file.c_str());
}
else
{
SaveCommonArg(fp, common);
SaveSecureArgR(fp, secure);
SaveLockArg(fp, lock);
SaveStbArg(fp, stb);
SaveExtraArg(fp, extra);
fclose(fp);
}
DumpCommonArg(common, false);
DumpSecureArg(secure, false);
DumpLockArg(lock);
DumpSTBArg(stb, false);
DumpExtraArg(extra, false);
}
void EfuseCommand::SaveCommonArg(
FILE *rb_file,
const Efuse_Common_Arg *common)
{
fprintf(rb_file,
"========== Efuse Common Setting ==========\r\n");
SaveEfuseOpt(rb_file, common, emmc_boot_dis);
SaveEfuseOpt(rb_file, common, nand_boot_dis);
SaveEfuseOpt(rb_file, common, nand_boot_speedup_dis);
SaveEfuseOpt(rb_file, common, pid_vid_custom_en);
SaveEfuseOpt(rb_file, common, sbc_pub_hash_dis);
SaveEfuseOpt(rb_file, common, sbc_pub_hash1_dis);
if (common->spare.buf_len > 0)
{
SaveBinary(rb_file, common, spare);
}
SaveInteger(rb_file, common, usbdl_type);
if (common->usb_id_blow == EFUSE_ENABLE)
{
SaveShortHex(rb_file, common, usb_pid);
SaveShortHex(rb_file, common, usb_vid);
}
fprintf(rb_file, "\r\n");
}
void EfuseCommand::SaveSecureArgR(
FILE *rb_file,
const Efuse_Secure_Arg *secure)
{
fprintf(rb_file,
"========== Efuse Secure Setting ==========\r\n");
SaveEfuseOpt(rb_file, secure, acc_en);
SaveEfuseOpt(rb_file, secure, sbc_en);
SaveEfuseOpt(rb_file, secure, daa_en);
SaveEfuseOpt(rb_file, secure, sla_en);
SaveEfuseOpt(rb_file, secure, ack_en);
SaveEfuseOpt(rb_file, secure, jtag_dis);
SaveEfuseOpt(rb_file, secure, jtag_sw_con);
SaveEfuseOpt(rb_file, secure, rom_cmd_dis);
SaveEfuseOpt(rb_file, secure, dbgport_lock_dis);
SaveEfuseOpt(rb_file, secure, md1_sbc_en);
SaveEfuseOpt(rb_file, secure, c2k_sbc_en);
SaveEfuseOpt(rb_file, secure, pl_ar_en);
SaveEfuseOpt(rb_file, secure, pk_cus_en);
if (secure->ac_key.buf_len > 0)
{
SaveBinary(rb_file, secure, ac_key);
}
if (secure->sbc_pub_key_hash.buf_len > 0)
{
SaveBinary(rb_file, secure, sbc_pub_key_hash);
}
if (secure->sbc_pub_key_hash1.buf_len > 0)
{
SaveBinary(rb_file, secure, sbc_pub_key_hash1);
}
fprintf(rb_file, "\r\n");
}
void EfuseCommand::SaveLockArg(
FILE *rb_file,
const Efuse_Lock_Arg *lock)
{
fprintf(rb_file,
"========== Efuse Lock Setting ==========\r\n");
SaveEfuseOpt(rb_file, lock, common_ctrl_lock);
SaveEfuseOpt(rb_file, lock, usb_id_lock);
SaveEfuseOpt(rb_file, lock, spare_lock);
SaveEfuseOpt(rb_file, lock, sec_ctrl_lock);
SaveEfuseOpt(rb_file, lock, ackey_lock);
SaveEfuseOpt(rb_file, lock, sbc_pubk_hash_lock);
SaveEfuseOpt(rb_file, lock, sec_msc_lock);
SaveEfuseOpt(rb_file, lock, custk_lock);
SaveEfuseOpt(rb_file, lock, sbc_pubk_hash1_lock);
fprintf(rb_file, "\r\n");
}
void EfuseCommand::SaveStbArg(FILE *rb_file,
const Efuse_STB_Key_Arg *stb)
{
if(stb_lock_enable_)
SaveSTBKeyLockArg(rb_file, &stb->stb_lock);
if(!stb_key_enable_)
return;
fprintf(rb_file,
"========== Efuse STB Key Setting ==========\r\n");
if (stb->stb_key_chipid_blow == EFUSE_ENABLE)
{
SaveShortHex(rb_file, stb, stb_key_chipID);
}
if(stb->stb_key_operatorid_blow == EFUSE_ENABLE)
{
SaveShortHex(rb_file, stb, stb_key_operatorid);
}
for(int i = 0; i < 16; i++)
{
if(stb->stb_blow_keys[i].stb_key.buf_len > 0 &&
stb->stb_blow_keys[i].key_name != NULL &&
strlen(stb->stb_blow_keys[i].key_name) > 0)
{
SaveSTBKeyName(rb_file, stb->stb_blow_keys[i], key_name);
SaveSTBBinary(rb_file, stb->stb_blow_keys[i], stb_key);
}
}
fprintf(rb_file, "\r\n");
}
void EfuseCommand::SaveSTBKeyLockArg(FILE *rb_file,
const STB_Lock_PARAM *stb_lock)
{
fprintf(rb_file,
"========== Efuse STB Key Lock Setting ==========\r\n");
SaveEfuseOpt(rb_file, stb_lock, stb_key_g7_lock);
SaveEfuseOpt(rb_file, stb_lock, stb_key_g6_lock);
SaveEfuseOpt(rb_file, stb_lock, stb_key_g5_lock);
SaveEfuseOpt(rb_file, stb_lock, stb_key_g4_lock);
SaveEfuseOpt(rb_file, stb_lock, stb_key_g3_lock);
SaveEfuseOpt(rb_file, stb_lock, stb_key_g2_lock);
SaveEfuseOpt(rb_file, stb_lock, stb_key_g1_lock);
SaveEfuseOpt(rb_file, stb_lock, stb_key_g0_lock);
SaveEfuseOpt(rb_file, stb_lock, stb_key_operatorid_lock);
SaveEfuseOpt(rb_file, stb_lock, stb_key_chipid_lock);
SaveEfuseOpt(rb_file, stb_lock, stb_key_sn_lock);
fprintf(rb_file, "\r\n");
}
void EfuseCommand::SaveExtraArg(FILE *rb_file,
const Efuse_Extra_Arg *extra)
{
fprintf(rb_file,
"==========Efuse Extra Setting =======\r\n");
for(int i = 0; i < END_KEY; i++)
{
std::string key = KeyToString((EFUSE_KEY)extra->items[i].key);
switch(extra->items[i].type)
{
case T_BOOLEAN:
SaveEfuseOptEx(rb_file, extra->items[i], data.enable, key);
break;
case T_INT:
SaveIntegerEx(rb_file, extra->items[i], data.iPair.value, key);
break;
case T_BUF:
if(extra->items[i].data.key_pair.key.buf_len > 0)
{
SaveBinaryEx(rb_file, extra->items[i], data.key_pair.key, key);
}
break;
}
}
fprintf(rb_file, "\r\n");
}
} /*namespace APCore*/<|fim▁end|> | if(secure->_sbc_key1_E.buf_len > 0)
{
LOGI("sbc_public_key1_e = %s",
secure->_sbc_key1_E.buf); |
<|file_name|>util.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Several methods to simplify expressions involving unit objects.
"""
from __future__ import division
from sympy.utilities.exceptions import SymPyDeprecationWarning
from sympy import Add, Function, Mul, Pow, Rational, Tuple, sympify
from sympy.core.compatibility import reduce, Iterable
from sympy.physics.units.dimensions import Dimension, dimsys_default
from sympy.physics.units.quantities import Quantity
from sympy.physics.units.prefixes import Prefix
from sympy.utilities.iterables import sift
def dim_simplify(expr):
"""
NOTE: this function could be deprecated in the future.
Simplify expression by recursively evaluating the dimension arguments.
This function proceeds to a very rough dimensional analysis. It tries to
simplify expression with dimensions, and it deletes all what multiplies a
dimension without being a dimension. This is necessary to avoid strange
behavior when Add(L, L) be transformed into Mul(2, L).
"""
SymPyDeprecationWarning(
deprecated_since_version="1.2",
feature="dimensional simplification function",
issue=13336,
useinstead="don't use",
).warn()
_, expr = Quantity._collect_factor_and_dimension(expr)
return expr
def _get_conversion_matrix_for_expr(expr, target_units):
from sympy import Matrix
expr_dim = Dimension(Quantity.get_dimensional_expr(expr))
dim_dependencies = dimsys_default.get_dimensional_dependencies(expr_dim, mark_dimensionless=True)
target_dims = [Dimension(Quantity.get_dimensional_expr(x)) for x in target_units]
canon_dim_units = {i for x in target_dims for i in dimsys_default.get_dimensional_dependencies(x, mark_dimensionless=True)}
canon_expr_units = {i for i in dim_dependencies}
if not canon_expr_units.issubset(canon_dim_units):
return None
canon_dim_units = sorted(canon_dim_units)
camat = Matrix([[dimsys_default.get_dimensional_dependencies(i, mark_dimensionless=True).get(j, 0) for i in target_dims] for j in canon_dim_units])
exprmat = Matrix([dim_dependencies.get(k, 0) for k in canon_dim_units])
res_exponents = camat.solve_least_squares(exprmat, method=None)
return res_exponents
def convert_to(expr, target_units):
"""
Convert ``expr`` to the same expression with all of its units and quantities<|fim▁hole|> represented as factors of ``target_units``, whenever the dimension is compatible.
``target_units`` may be a single unit/quantity, or a collection of
units/quantities.
Examples
========
>>> from sympy.physics.units import speed_of_light, meter, gram, second, day
>>> from sympy.physics.units import mile, newton, kilogram, atomic_mass_constant
>>> from sympy.physics.units import kilometer, centimeter
>>> from sympy.physics.units import convert_to
>>> convert_to(mile, kilometer)
25146*kilometer/15625
>>> convert_to(mile, kilometer).n()
1.609344*kilometer
>>> convert_to(speed_of_light, meter/second)
299792458*meter/second
>>> convert_to(day, second)
86400*second
>>> 3*newton
3*newton
>>> convert_to(3*newton, kilogram*meter/second**2)
3*kilogram*meter/second**2
>>> convert_to(atomic_mass_constant, gram)
1.66053904e-24*gram
Conversion to multiple units:
>>> convert_to(speed_of_light, [meter, second])
299792458*meter/second
>>> convert_to(3*newton, [centimeter, gram, second])
300000*centimeter*gram/second**2
Conversion to Planck units:
>>> from sympy.physics.units import gravitational_constant, hbar
>>> convert_to(atomic_mass_constant, [gravitational_constant, speed_of_light, hbar]).n()
7.62950196312651e-20*gravitational_constant**(-0.5)*hbar**0.5*speed_of_light**0.5
"""
if not isinstance(target_units, (Iterable, Tuple)):
target_units = [target_units]
if isinstance(expr, Add):
return Add.fromiter(convert_to(i, target_units) for i in expr.args)
expr = sympify(expr)
if not isinstance(expr, Quantity) and expr.has(Quantity):
expr = expr.replace(lambda x: isinstance(x, Quantity), lambda x: x.convert_to(target_units))
def get_total_scale_factor(expr):
if isinstance(expr, Mul):
return reduce(lambda x, y: x * y, [get_total_scale_factor(i) for i in expr.args])
elif isinstance(expr, Pow):
return get_total_scale_factor(expr.base) ** expr.exp
elif isinstance(expr, Quantity):
return expr.scale_factor
return expr
depmat = _get_conversion_matrix_for_expr(expr, target_units)
if depmat is None:
return expr
expr_scale_factor = get_total_scale_factor(expr)
return expr_scale_factor * Mul.fromiter((1/get_total_scale_factor(u) * u) ** p for u, p in zip(target_units, depmat))
def quantity_simplify(expr):
if expr.is_Atom:
return expr
if not expr.is_Mul:
return expr.func(*map(quantity_simplify, expr.args))
if expr.has(Prefix):
coeff, args = expr.as_coeff_mul(Prefix)
args = list(args)
for arg in args:
if isinstance(arg, Pow):
coeff = coeff * (arg.base.scale_factor ** arg.exp)
else:
coeff = coeff * arg.scale_factor
expr = coeff
coeff, args = expr.as_coeff_mul(Quantity)
args_pow = [arg.as_base_exp() for arg in args]
quantity_pow, other_pow = sift(args_pow, lambda x: isinstance(x[0], Quantity), binary=True)
quantity_pow_by_dim = sift(quantity_pow, lambda x: x[0].dimension)
# Just pick the first quantity:
ref_quantities = [i[0][0] for i in quantity_pow_by_dim.values()]
new_quantities = [
Mul.fromiter(
(quantity*i.scale_factor/quantity.scale_factor)**p for i, p in v)
if len(v) > 1 else v[0][0]**v[0][1]
for quantity, (k, v) in zip(ref_quantities, quantity_pow_by_dim.items())]
return coeff*Mul.fromiter(other_pow)*Mul.fromiter(new_quantities)<|fim▁end|> | |
<|file_name|>multichecksum.py<|end_file_name|><|fim▁begin|># !/usr/bin/env python
__author__ = "Andrew Hankinson ([email protected])"
__version__ = "1.5"
__date__ = "2011"
__copyright__ = "Creative Commons Attribution"
__license__ = """The MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE."""
import multiprocessing
from optparse import OptionParser
import os
import sys
import hashlib
import codecs
import re
from pybagit.exceptions import *
from functools import reduce
# declare a default hashalgorithm
HASHALG = 'sha1'
ENCODING = "utf-8"
def write_manifest(datadir, encoding, update=False):
bag_root = os.path.split(os.path.abspath(datadir))[0]
manifest_file = os.path.join(bag_root, "manifest-{0}.txt".format(HASHALG))
checksums = dict()
files_to_checksum = set(dirwalk(datadir))
if update and os.path.isfile(manifest_file):
for line in codecs.open(manifest_file, 'rb', encoding):
checksum, file_ = line.strip().split(' ', 1)
full_file = os.path.join(bag_root, file_)
if full_file in files_to_checksum:
files_to_checksum.remove(full_file)
checksums[os.path.join(bag_root, file_)] = checksum
p = multiprocessing.Pool(processes=multiprocessing.cpu_count())
result = p.map_async(csumfile, files_to_checksum)
checksums.update((k, v) for v, k in result.get())
p.close()
p.join()
mfile = codecs.open(manifest_file, 'wb', encoding)
for file_, checksum in sorted(checksums.iteritems()):
rp = os.path.relpath(file_, bag_root)
fl = ensure_unix_pathname(rp).decode(ENCODING, 'replace')
mfile.write(u"{0} {1}\n".format(checksum, fl))
mfile.close()
def dirwalk(datadir):
datafiles = []
for dirpath, dirnames, filenames in os.walk(datadir):
for fn in filenames:
datafiles.append(os.path.join(dirpath, fn))
return datafiles
def csumfile(filename):
""" Based on
http://abstracthack.wordpress.com/2007/10/19/calculating-md5-checksum/
"""
hashalg = getattr(hashlib, HASHALG)() # == 'hashlib.md5' or 'hashlib.sha1'
blocksize = 0x10000
def __upd(m, data):
m.update(data)
return m
fd = open(filename, 'rb')
try:
contents = iter(lambda: fd.read(blocksize), "")<|fim▁hole|>
return (m.hexdigest(), filename)
def ensure_unix_pathname(pathname):
# it's only windows we have to worry about
if sys.platform != "win32":
return pathname
replace = re.compile(r"\\", re.UNICODE)
fnm = re.sub(replace, "/", pathname)
return fnm
if __name__ == "__main__":
parser = OptionParser()
usage = "%prog [options] arg1 arg2"
parser.add_option(
"-a",
"--algorithm",
action="store",
help="checksum algorithm to use (sha1|md5)")
parser.add_option(
"-c",
"--encoding",
action="store",
help="File encoding to write manifest")
parser.add_option(
"-u",
"--update",
action="store_true",
help="Only update new/removed files")
(options, args) = parser.parse_args()
if options.algorithm:
if not options.algorithm in ('md5', 'sha1'):
raise BagCheckSumNotValid(
'You must specify either "md5" or "sha1" as the checksum algorithm')
HASHALG = options.algorithm
if options.encoding:
ENCODING = options.encoding
if len(args) < 1:
parser.error("You must specify a data directory")
write_manifest(args[0], ENCODING, update=options.update)<|fim▁end|> | m = reduce(__upd, contents, hashalg)
finally:
fd.close() |
<|file_name|>SelectionSort.java<|end_file_name|><|fim▁begin|>/**
* Created by avinashvundyala on 09/07/16.
*/
public class SelectionSort {
public int[] iterativeSelectionSort(int[] arr) {
int temp, smallIdx;
for(int i = 0; i < arr.length; i++) {
smallIdx = i;
for(int j = i; j < arr.length; j++) {
if(arr[smallIdx] > arr[j]) {
smallIdx = j;
}
}
temp = arr[i];
arr[i] = arr[smallIdx];
arr[smallIdx] = temp;
}
return arr;
}
public int[] recursiveSelectionSort(int[] arr, int startIdx) {
if(startIdx == arr.length) {
return arr;
}
int temp, smallIdx;
smallIdx = startIdx;
for(int j = startIdx; j < arr.length; j++) {<|fim▁hole|> }
}
temp = arr[startIdx];
arr[startIdx] = arr[smallIdx];
arr[smallIdx] = temp;
return recursiveSelectionSort(arr, startIdx + 1);
}
public void printArray(int[] array) {
for(int number: array) {
System.out.print(String.valueOf(number) + " ");
}
}
public static void main(String args[]) {
SelectionSort ss = new SelectionSort();
int[] unsortedArray1 = {6,4,2,1,9,0};
System.out.print("Unsoreted Array Sorting: ");
ss.printArray(ss.iterativeSelectionSort(unsortedArray1));
int[] unsortedArray2 = {1,2,3,4,5,6};
System.out.print("\nAlready Soreted Array Sorting: ");
ss.printArray(ss.iterativeSelectionSort(unsortedArray2));
int[] unsortedArray3 = {6,5,4,3,2,1};
System.out.print("\nUnsoreted Array Sorting: ");
ss.printArray(ss.iterativeSelectionSort(unsortedArray3));
int[] unsortedArray4 = {6,4,2,1,9,0};
System.out.print("\nUnsoreted Array Sorting: ");
ss.printArray(ss.recursiveSelectionSort(unsortedArray4, 0));
int[] unsortedArray5 = {1,2,3,4,5,6};
System.out.print("\nAlready Soreted Array Sorting: ");
ss.printArray(ss.recursiveSelectionSort(unsortedArray5, 0));
int[] unsortedArray6 = {6,5,4,3,2,1};
System.out.print("\nUnsoreted Array Sorting: ");
ss.printArray(ss.recursiveSelectionSort(unsortedArray6, 0));
}
}<|fim▁end|> | if(arr[smallIdx] > arr[j]) {
smallIdx = j; |
<|file_name|>authors.js<|end_file_name|><|fim▁begin|>const debug = require('ghost-ignition').debug('api:canary:utils:serializers:input:authors');
const slugFilterOrder = require('./utils/slug-filter-order');
const utils = require('../../index');
function setDefaultOrder(frame) {
if (!frame.options.order && frame.options.filter) {
frame.options.autoOrder = slugFilterOrder('users', frame.options.filter);
}
if (!frame.options.order && !frame.options.autoOrder) {
frame.options.order = 'name asc';<|fim▁hole|> }
}
module.exports = {
browse(apiConfig, frame) {
debug('browse');
if (utils.isContentAPI(frame)) {
setDefaultOrder(frame);
}
},
read(apiConfig, frame) {
debug('read');
if (utils.isContentAPI(frame)) {
setDefaultOrder(frame);
}
}
};<|fim▁end|> | |
<|file_name|>array.js<|end_file_name|><|fim▁begin|>//Version-IE: < 9
if ( 'function' !== typeof Array.prototype.reduce ) {
Array.prototype.reduce = function( callback /*, initialValue*/ ) {
'use strict';
if ( null === this || 'undefined' === typeof this ) {
throw new TypeError(
'Array.prototype.reduce called on null or undefined' );
}
if ( 'function' !== typeof callback ) {
throw new TypeError( callback + ' is not a function' );
}
var t = Object( this ), len = t.length >>> 0, k = 0, value;
if ( arguments.length >= 2 ) {
value = arguments[1];
} else {
while ( k < len && ! k in t ) k++;
if ( k >= len )
throw new TypeError('Reduce of empty array with no initial value');
value = t[ k++ ];
}
for ( ; k < len ; k++ ) {
if ( k in t ) {
value = callback( value, t[k], k, t );
}
}
return value;
};
}
//Version-IE: < 9
if ( 'function' !== typeof Array.prototype.reduceRight ) {
Array.prototype.reduceRight = function( callback /*, initialValue*/ ) {
'use strict';
if ( null === this || 'undefined' === typeof this ) {
throw new TypeError(
'Array.prototype.reduce called on null or undefined' );
}
if ( 'function' !== typeof callback ) {
throw new TypeError( callback + ' is not a function' );
}
var t = Object( this ), len = t.length >>> 0, k = len - 1, value;
if ( arguments.length >= 2 ) {
value = arguments[1];
} else {
while ( k >= 0 && ! k in t ) k--;
if ( k < 0 )
throw new TypeError('Reduce of empty array with no initial value');
value = t[ k-- ];
}
for ( ; k >= 0 ; k-- ) {
if ( k in t ) {
value = callback( value, t[k], k, t );
}
}
return value;
};
}
//Version-IE: < 9
if (!Array.prototype.filter)
{
Array.prototype.filter = function(fun /*, thisArg */)
{
"use strict";
if (this === void 0 || this === null)
throw new TypeError();
var t = Object(this);
var len = t.length >>> 0;
if (typeof fun !== "function")
throw new TypeError();
var res = [];
var thisArg = arguments.length >= 2 ? arguments[1] : void 0;
for (var i = 0; i < len; i++)
{
if (i in t)
{
var val = t[i];
// NOTE: Technically this should Object.defineProperty at
// the next index, as push can be affected by
// properties on Object.prototype and Array.prototype.
// But that method's new, and collisions should be
// rare, so use the more-compatible alternative.
if (fun.call(<span style="line-height: normal;">thisArg</span><span style="line-height: normal;">, val, i, t))</span>
res.push(val);
}
}
return res;
};
}
//Version-IE: < 9
if (!Array.prototype.every) {
Array.prototype.every = function (callbackfn, thisArg) {
"use strict";
var T, k;
if (this == null) {
throw new TypeError("this is null or not defined");
}
// 1. Let O be the result of calling ToObject passing the this
// value as the argument.
var O = Object(this);
// 2. Let lenValue be the result of calling the Get internal method of O with the argument "length".
// 3. Let len be ToUint32(lenValue).
var len = O.length >>> 0;
// 4. If IsCallable(callbackfn) is false, throw a TypeError exception.
if (typeof callbackfn !== "function") {
throw new TypeError();
}
// 5. If thisArg was supplied, let T be thisArg; else let T be undefined.
if (arguments.length > 1) {
T = thisArg;
}
// 6. Let k be 0.
k = 0;
// 7. Repeat, while k < len
while (k < len) {
var kValue;
// a. Let Pk be ToString(k).
// This is implicit for LHS operands of the in operator
// b. Let kPresent be the result of calling the HasProperty internal
// method of O with argument Pk.
// This step can be combined with c
// c. If kPresent is true, then
if (k in O) {
// i. Let kValue be the result of calling the Get internal method of O with argument Pk.
kValue = O[k];
// ii. Let testResult be the result of calling the Call internal method
// of callbackfn with T as the this value and argument list
// containing kValue, k, and O.
var testResult = callbackfn.call(T, kValue, k, O);
// iii. If ToBoolean(testResult) is false, return false.
if (!testResult) {
return false;
}
}
k++;
}
return true;
};
}
//Version-IE: < 9
if (!Array.prototype.some)
{
Array.prototype.some = function(fun /*, thisArg */)
{
'use strict';
if (this === void 0 || this === null)
throw new TypeError();
var t = Object(this);
var len = t.length >>> 0;
if (typeof fun !== 'function')
throw new TypeError();
var thisArg = arguments.length >= 2 ? arguments[1] : void 0;
for (var i = 0; i < len; i++)
{
if (i in t && fun.call(thisArg, t[i], i, t))
return true;
}
return false;
};
}
// Production steps of ECMA-262, Edition 5, 15.4.4.19
// Reference: http://es5.github.com/#x15.4.4.19
//Version-IE: < 9
if (!Array.prototype.map) {
Array.prototype.map = function (callback, thisArg) {
var T, A, k;
if (this == null) {
throw new TypeError(" this is null or not defined");
}
// 1. Let O be the result of calling ToObject passing the |this| value as the argument.
var O = Object(this);
// 2. Let lenValue be the result of calling the Get internal method of O with the argument "length".
// 3. Let len be ToUint32(lenValue).
var len = O.length >>> 0;
// 4. If IsCallable(callback) is false, throw a TypeError exception.
// See: http://es5.github.com/#x9.11
if (typeof callback !== "function") {
throw new TypeError(callback + " is not a function");
}
// 5. If thisArg was supplied, let T be thisArg; else let T be undefined.
if (arguments.length > 1) {
T = thisArg;
}
// 6. Let A be a new array created as if by the expression new Array( len) where Array is
// the standard built-in constructor with that name and len is the value of len.
A = new Array(len);
// 7. Let k be 0
k = 0;
// 8. Repeat, while k < len
while (k < len) {
var kValue, mappedValue;
// a. Let Pk be ToString(k).
// This is implicit for LHS operands of the in operator
// b. Let kPresent be the result of calling the HasProperty internal method of O with argument Pk.
// This step can be combined with c
// c. If kPresent is true, then
if (k in O) {
// i. Let kValue be the result of calling the Get internal method of O with argument Pk.
kValue = O[k];
// ii. Let mappedValue be the result of calling the Call internal method of callback
// with T as the this value and argument list containing kValue, k, and O.
mappedValue = callback.call(T, kValue, k, O);
// iii. Call the DefineOwnProperty internal method of A with arguments
// Pk, Property Descriptor {Value: mappedValue, Writable: true, Enumerable: true, Configurable: true},
// and false.
// In browsers that support Object.defineProperty, use the following:
// Object.defineProperty( A, k, { value: mappedValue, writable: true, enumerable: true, configurable: true });
// For best browser support, use the following:
A[k] = mappedValue;
}
// d. Increase k by 1.
k++;
}
// 9. return A
return A;
};
}
// Production steps of ECMA-262, Edition 5, 15.4.4.18
// Reference: http://es5.github.com/#x15.4.4.18
//Version-IE: < 9
if (!Array.prototype.forEach) {
Array.prototype.forEach = function (callback, thisArg) {
var T, k;
if (this == null) {
throw new TypeError(" this is null or not defined");
}
// 1. Let O be the result of calling ToObject passing the |this| value as the argument.
var O = Object(this);
// 2. Let lenValue be the result of calling the Get internal method of O with the argument "length".
// 3. Let len be ToUint32(lenValue).
var len = O.length >>> 0;
// 4. If IsCallable(callback) is false, throw a TypeError exception.
// See: http://es5.github.com/#x9.11
if (typeof callback !== "function") {
throw new TypeError(callback + " is not a function");
}
// 5. If thisArg was supplied, let T be thisArg; else let T be undefined.
if (arguments.length > 1) {
T = thisArg;
}
// 6. Let k be 0
k = 0;<|fim▁hole|>
var kValue;
// a. Let Pk be ToString(k).
// This is implicit for LHS operands of the in operator
// b. Let kPresent be the result of calling the HasProperty internal method of O with argument Pk.
// This step can be combined with c
// c. If kPresent is true, then
if (k in O) {
// i. Let kValue be the result of calling the Get internal method of O with argument Pk.
kValue = O[k];
// ii. Call the Call internal method of callback with T as the this value and
// argument list containing kValue, k, and O.
callback.call(T, kValue, k, O);
}
// d. Increase k by 1.
k++;
}
// 8. return undefined
};
}<|fim▁end|> |
// 7. Repeat, while k < len
while (k < len) { |
<|file_name|>flat_map.rs<|end_file_name|><|fim▁begin|>use consumer::*;
use std::cell::RefCell;
use std::rc::Rc;
use stream::*;
struct FlatmapState<C, F> {
child: Rc<RefCell<Option<C>>>,
func: F,
}
impl<C, F, I, S> Consumer<I> for FlatmapState<C, F>
where F: FnMut(I) -> S,
C: Consumer<S::Item>,
S: Stream
{
fn emit(&mut self, item: I) -> bool {
if self.child.borrow().is_none() {
return false;
}
let stream = (self.func)(item);
stream.consume(self.child.clone());
self.child.borrow().is_some()
}
}
impl<C, T> Consumer<T> for Rc<RefCell<Option<C>>>
where C: Consumer<T>
{
fn emit(&mut self, item: T) -> bool {
let mut consumer = self.borrow_mut();
if let Some(ref mut consumer) = *consumer {
if consumer.emit(item) {
return true;
}
}
*consumer = None;
false
}
}
#[must_use = "stream adaptors are lazy and do nothing unless consumed"]
pub struct Flatmap<S, F> {
func: F,
stream: S,
}
impl<S, F> Flatmap<S, F> {
pub fn new(stream: S, func: F) -> Self {
Flatmap {
func: func,
stream: stream,
}
}<|fim▁hole|> where S: Stream,
F: FnMut(S::Item) -> SO,
SO: Stream
{
type Item = SO::Item;
fn consume<C>(self, consumer: C)
where C: Consumer<Self::Item>
{
self.stream.consume(FlatmapState {
child: Rc::new(RefCell::new(Some(consumer))),
func: self.func,
});
}
}<|fim▁end|> | }
impl<S, F, SO> Stream for Flatmap<S, F> |
<|file_name|>Charset5Main.java<|end_file_name|><|fim▁begin|>package com.cloudhopper.commons.charset.demo;
/*
* #%L
* ch-commons-charset
* %%
* Copyright (C) 2012 Cloudhopper by Twitter
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.cloudhopper.commons.charset.CharsetUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author joelauer
*/
public class Charset5Main {
private static final Logger logger = LoggerFactory.getLogger(Charset5Main.class);
static public void main(String[] args) throws Exception {
String sourceString = "h\u6025\u20ACllo";
String targetString = CharsetUtil.normalize(sourceString, CharsetUtil.CHARSET_UTF_8);
logger.debug("source string: " + sourceString);<|fim▁hole|> logger.debug("target string: " + targetString);
}
}<|fim▁end|> | |
<|file_name|>treeview.js<|end_file_name|><|fim▁begin|>/**
* guy
* 异步树
* @class BI.TreeView
* @extends BI.Pane
*/
BI.TreeView = BI.inherit(BI.Pane, {
_defaultConfig: function () {
return BI.extend(BI.TreeView.superclass._defaultConfig.apply(this, arguments), {
_baseCls: "bi-tree",
paras: {
selectedValues: {}
},
itemsCreator: BI.emptyFn
});
},
_init: function () {
BI.TreeView.superclass._init.apply(this, arguments);
var o = this.options;
this._stop = false;
this._createTree();
this.tip = BI.createWidget({
type: "bi.loading_bar",
invisible: true,
handler: BI.bind(this._loadMore, this)
});
BI.createWidget({
type: "bi.vertical",
scrollable: true,
scrolly: false,
element: this,
items: [this.tip]
});
if(BI.isNotNull(o.value)) {
this.setSelectedValue(o.value);
}
if (BI.isIE9Below && BI.isIE9Below()) {
this.element.addClass("hack");
}
},
_createTree: function () {
this.id = "bi-tree" + BI.UUID();
if (this.nodes) {
this.nodes.destroy();
}
if (this.tree) {
this.tree.destroy();
}
this.tree = BI.createWidget({
type: "bi.layout",
element: "<ul id='" + this.id + "' class='ztree'></ul>"
});
BI.createWidget({
type: "bi.default",
element: this.element,
items: [this.tree]
});
},
// 选择节点触发方法
_selectTreeNode: function (treeId, treeNode) {
this.fireEvent(BI.Controller.EVENT_CHANGE, BI.Events.CLICK, treeNode, this);
this.fireEvent(BI.TreeView.EVENT_CHANGE, treeNode, this);
},
// 配置属性
_configSetting: function () {
var paras = this.options.paras;
var self = this;
var setting = {
async: {
enable: true,
url: getUrl,
autoParam: ["id", "name"], // 节点展开异步请求自动提交id和name
otherParam: BI.cjkEncodeDO(paras) // 静态参数
},
check: {
enable: true
},
data: {
key: {
title: "title",
name: "text" // 节点的name属性替换成text
},
simpleData: {
enable: true // 可以穿id,pid属性的对象数组
}
},
view: {
showIcon: false,
expandSpeed: "",
nameIsHTML: true, // 节点可以用html标签代替
dblClickExpand: false
},
callback: {
beforeExpand: beforeExpand,
onAsyncSuccess: onAsyncSuccess,
onAsyncError: onAsyncError,
beforeCheck: beforeCheck,
onCheck: onCheck,
onExpand: onExpand,
onCollapse: onCollapse,
onClick: onClick
}
};
var className = "dark", perTime = 100;
function onClick (event, treeId, treeNode) {
// 当前点击节点的状态是半选,且为true_part, 则将其改为false_part,使得点击半选后切换到的是全选
var checked = treeNode.checked;
var status = treeNode.getCheckStatus();
if(status.half === true && status.checked === true) {
checked = false;
}
// 更新此node的check状态, 影响父子关联,并调用beforeCheck和onCheck回调
self.nodes.checkNode(treeNode, !checked, true, true);
}
function getUrl (treeId, treeNode) {
var parentNode = self._getParentValues(treeNode);
treeNode.times = treeNode.times || 1;
var param = "id=" + treeNode.id
+ "×=" + (treeNode.times++)
+ "&parentValues= " + _global.encodeURIComponent(BI.jsonEncode(parentNode))
+ "&checkState=" + _global.encodeURIComponent(BI.jsonEncode(treeNode.getCheckStatus()));
return "&" + param;
}
function beforeExpand (treeId, treeNode) {
if (!treeNode.isAjaxing) {
if (!treeNode.children) {
treeNode.times = 1;
ajaxGetNodes(treeNode, "refresh");
}
return true;
}
BI.Msg.toast("Please Wait。", "warning"); // 不展开节点,也不触发onExpand事件
return false;
}
function onAsyncSuccess (event, treeId, treeNode, msg) {
treeNode.halfCheck = false;
if (!msg || msg.length === 0 || /^<html>[\s,\S]*<\/html>$/gi.test(msg) || self._stop) {
return;
}
var zTree = self.nodes;
var totalCount = treeNode.count || 0;
// 尝试去获取下一组节点,若获取值为空数组,表示获取完成
// TODO by GUY
if (treeNode.children.length > totalCount) {
treeNode.count = treeNode.children.length;
BI.delay(function () {
ajaxGetNodes(treeNode);
}, perTime);
} else {
// treeNode.icon = "";
zTree.updateNode(treeNode);
zTree.selectNode(treeNode.children[0]);
// className = (className === "dark" ? "":"dark");
}
}
function onAsyncError (event, treeId, treeNode, XMLHttpRequest, textStatus, errorThrown) {
var zTree = self.nodes;
BI.Msg.toast("Error!", "warning");
// treeNode.icon = "";
// zTree.updateNode(treeNode);
}
function ajaxGetNodes (treeNode, reloadType) {
var zTree = self.nodes;
if (reloadType == "refresh") {
zTree.updateNode(treeNode); // 刷新一下当前节点,如果treeNode.xxx被改了的话
}
zTree.reAsyncChildNodes(treeNode, reloadType, true); // 强制加载子节点,reloadType === refresh为先清空再加载,否则为追加到现有子节点之后
}
function beforeCheck (treeId, treeNode) {
treeNode.halfCheck = false;
if (treeNode.checked === true) {
// 将展开的节点halfCheck设为false,解决展开节点存在halfCheck=true的情况 guy
// 所有的半选状态都需要取消halfCheck=true的情况
function track (children) {
BI.each(children, function (i, ch) {
if (ch.halfCheck === true) {
ch.halfCheck = false;
track(ch.children);
}
});
}
track(treeNode.children);
var treeObj = self.nodes;
var nodes = treeObj.getSelectedNodes();
BI.$.each(nodes, function (index, node) {
node.halfCheck = false;
});
}
var status = treeNode.getCheckStatus();
// 当前点击节点的状态是半选,且为true_part, 则将其改为false_part,使得点击半选后切换到的是全选
if(status.half === true && status.checked === true) {
treeNode.checked = false;
}
}
function onCheck (event, treeId, treeNode) {
self._selectTreeNode(treeId, treeNode);
}
function onExpand (event, treeId, treeNode) {
treeNode.halfCheck = false;
}
function onCollapse (event, treeId, treeNode) {
}
return setting;
},
_getParentValues: function (treeNode) {
if (!treeNode.getParentNode()) {
return [];
}
var parentNode = treeNode.getParentNode();
var result = this._getParentValues(parentNode);
result = result.concat([this._getNodeValue(parentNode)]);
return result;
},
_getNodeValue: function (node) {
// 去除标红
return node.value == null ? BI.replaceAll(node.text.replace(/<[^>]+>/g, ""), " ", " ") : node.value;
},
// 获取半选框值
_getHalfSelectedValues: function (map, node) {
var self = this;
var checkState = node.getCheckStatus();
// 将未选的去掉
if (checkState.checked === false && checkState.half === false) {
return;
}
// 如果节点已展开,并且是半选
if (BI.isNotEmptyArray(node.children) && checkState.half === true) {
var children = node.children;
BI.each(children, function (i, ch) {
self._getHalfSelectedValues(map, ch);
});
return;
}
var parent = node.parentValues || self._getParentValues(node);
var path = parent.concat(this._getNodeValue(node));
// 当前节点是全选的,因为上面的判断已经排除了不选和半选
if (BI.isNotEmptyArray(node.children) || checkState.half === false) {
this._buildTree(map, path);
return;
}
// 剩下的就是半选不展开的节点,因为不知道里面是什么情况,所以借助selectedValues(这个是完整的选中情况)
var storeValues = BI.deepClone(this.options.paras.selectedValues);
var treeNode = this._getTree(storeValues, path);
this._addTreeNode(map, parent, this._getNodeValue(node), treeNode);
},
// 获取的是以values最后一个节点为根的子树
_getTree: function (map, values) {
var cur = map;
BI.any(values, function (i, value) {
if (cur[value] == null) {
return true;
}
cur = cur[value];
});
return cur;
},
// 以values为path一路向里补充map, 并在末尾节点添加key: value节点
_addTreeNode: function (map, values, key, value) {
var cur = map;
BI.each(values, function (i, value) {
if (cur[value] == null) {
cur[value] = {};
}
cur = cur[value];
});
cur[key] = value;
},
// 构造树节点
_buildTree: function (map, values) {
var cur = map;
BI.each(values, function (i, value) {
if (cur[value] == null) {
cur[value] = {};
}
cur = cur[value];
});
},
// 获取选中的值
_getSelectedValues: function () {
var self = this;
var hashMap = {};
var rootNoots = this.nodes.getNodes();
track(rootNoots); // 可以看到这个方法没有递归调用,所以在_getHalfSelectedValues中需要关心全选的节点
function track (nodes) {
BI.each(nodes, function (i, node) {
var checkState = node.getCheckStatus();
if (checkState.checked === true || checkState.half === true) {
if (checkState.half === true) {
self._getHalfSelectedValues(hashMap, node);
} else {
var parentValues = node.parentValues || self._getParentValues(node);
var values = parentValues.concat([self._getNodeValue(node)]);
self._buildTree(hashMap, values);
}
}
});
}
return hashMap;
},
// 处理节点
_dealWidthNodes: function (nodes) {
var self = this, o = this.options;
var ns = BI.Tree.arrayFormat(nodes);
BI.each(ns, function (i, n) {
n.title = n.title || n.text || n.value;
n.isParent = n.isParent || n.parent;
// 处理标红
if (BI.isKey(o.paras.keyword)) {
n.text = BI.$("<div>").__textKeywordMarked__(n.text, o.paras.keyword, n.py).html();
} else {
n.text = BI.htmlEncode(n.text + "");
}
});
return nodes;
},
_loadMore: function () {
var self = this, o = this.options;
this.tip.setLoading();
var op = BI.extend({}, o.paras, {
times: ++this.times
});
o.itemsCreator(op, function (res) {
if (self._stop === true) {
return;
}
var hasNext = !!res.hasNext, nodes = res.items || [];
if (!hasNext) {
self.tip.setEnd();
} else {
self.tip.setLoaded();
}
if (nodes.length > 0) {
self.nodes.addNodes(null, self._dealWidthNodes(nodes));
}
});
},
// 生成树内部方法
_initTree: function (setting) {
var self = this, o = this.options;
self.fireEvent(BI.Events.INIT);
this.times = 1;
var tree = this.tree;
tree.empty();
this.loading();
this.tip.setVisible(false);
var callback = function (nodes) {
if (self._stop === true) {
return;
}
self.nodes = BI.$.fn.zTree.init(tree.element, setting, nodes);
};
var op = BI.extend({}, o.paras, {
times: 1
});<|fim▁hole|> return;
}
var hasNext = !!res.hasNext, nodes = res.items || [];
if (nodes.length > 0) {
callback(self._dealWidthNodes(nodes));
}
self.setTipVisible(nodes.length <= 0);
self.loaded();
if (!hasNext) {
self.tip.invisible();
} else {
self.tip.setLoaded();
}
op.times === 1 && self.fireEvent(BI.Events.AFTERINIT);
});
},
// 构造树结构,
initTree: function (nodes, setting) {
var setting = setting || {
async: {
enable: false
},
check: {
enable: false
},
data: {
key: {
title: "title",
name: "text"
},
simpleData: {
enable: true
}
},
view: {
showIcon: false,
expandSpeed: "",
nameIsHTML: true
},
callback: {}
};
this.nodes = BI.$.fn.zTree.init(this.tree.element, setting, nodes);
},
start: function () {
this._stop = false;
},
stop: function () {
this._stop = true;
},
// 生成树方法
stroke: function (config) {
delete this.options.keyword;
BI.extend(this.options.paras, config);
var setting = this._configSetting();
this._createTree();
this.start();
this._initTree(setting);
},
populate: function () {
this.stroke.apply(this, arguments);
},
hasChecked: function () {
var treeObj = this.nodes;
return treeObj.getCheckedNodes(true).length > 0;
},
checkAll: function (checked) {
function setNode (children) {
BI.each(children, function (i, child) {
child.halfCheck = false;
setNode(child.children);
});
}
if (!this.nodes) {
return;
}
BI.each(this.nodes.getNodes(), function (i, node) {
node.halfCheck = false;
setNode(node.children);
});
this.nodes.checkAllNodes(checked);
},
expandAll: function (flag) {
this.nodes && this.nodes.expandAll(flag);
},
// 设置树节点的状态
setValue: function (value, param) {
this.checkAll(false);
this.updateValue(value, param);
this.refresh();
},
setSelectedValue: function (value) {
this.options.paras.selectedValues = BI.deepClone(value || {});
},
updateValue: function (values, param) {
if (!this.nodes) {
return;
}
param || (param = "value");
var treeObj = this.nodes;
BI.each(values, function (v, op) {
var nodes = treeObj.getNodesByParam(param, v, null);
BI.each(nodes, function (j, node) {
BI.extend(node, {checked: true}, op);
treeObj.updateNode(node);
});
});
},
refresh: function () {
this.nodes && this.nodes.refresh();
},
getValue: function () {
if (!this.nodes) {
return null;
}
return this._getSelectedValues();
},
destroyed: function () {
this.stop();
this.nodes && this.nodes.destroy();
}
});
BI.extend(BI.TreeView, {
REQ_TYPE_INIT_DATA: 1,
REQ_TYPE_ADJUST_DATA: 2,
REQ_TYPE_SELECT_DATA: 3,
REQ_TYPE_GET_SELECTED_DATA: 4
});
BI.TreeView.EVENT_CHANGE = "EVENT_CHANGE";
BI.TreeView.EVENT_INIT = BI.Events.INIT;
BI.TreeView.EVENT_AFTERINIT = BI.Events.AFTERINIT;
BI.shortcut("bi.tree_view", BI.TreeView);<|fim▁end|> |
o.itemsCreator(op, function (res) {
if (self._stop === true) { |
<|file_name|>plate.py<|end_file_name|><|fim▁begin|># -----------------------------------------------------------------------------
# Copyright (c) 2016--, The Plate Mapper Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
import platemap as pm
class Plate(pm.base.PMObject):
_table = 'plate'
@classmethod
def plates(cls, finalized=False):
"""Returns all plates available in the system
Parameters
----------
finalized: bool, optional
Whether to only grab finalized plates. Default False.
Returns
-------
list of Plate objects
All plates in the system
"""
sql = "SELECT plate_id FROM barcodes.plate"
if finalized:
sql += " WHERE finalized = TRUE"
sql += " ORDER BY created_on DESC"
with pm.sql.TRN:
pm.sql.TRN.add(sql)
return [cls(p) for p in pm.sql.TRN.execute_fetchflatten()]
@classmethod
def create(cls, barcode, name, person, rows, cols):
r"""Creates a new plate object
Parameters
----------
barcode : str
The barcode assigned to the plate
name : str
Identifying name for the plate
person : Person object
The person creating the plate
rows : int
Number of rows on the plate
cols : int
Number of columns in the plate
Returns
-------
Plate object
New plate object
Raises
------
DuplicateError
Plate with given barcode already exists
DeveloperError
Barcode already assigned to something else
"""
plate_sql = """INSERT INTO barcodes.plate
(plate_id, plate, rows, cols, person_id)
VALUES (%s, %s, %s, %s, %s)
"""
barcode_sql = """UPDATE barcodes.barcode
SET assigned_on = NOW()
WHERE barcode = %s
"""
with pm.sql.TRN:
if cls.exists(barcode):
raise pm.exceptions.DuplicateError(barcode, 'plate')
if pm.util.check_barcode_assigned(barcode):
raise pm.exceptions.DeveloperError(
'Barcode %s already assigned!' % barcode)
pm.sql.TRN.add(plate_sql, [barcode, name, rows, cols, person.id])
pm.sql.TRN.add(barcode_sql, [barcode])
pm.sql.TRN.execute()
return cls(barcode)
@staticmethod
def delete(cls, barcode):
r"""Delete a plate from the system
Parameters
----------
barcode : str
The plate barcode
"""
raise NotImplementedError()
@staticmethod
def exists(barcode):
r"""Checks if a plate already exists
Parameters
----------
barcode : str
Barcode for plate
Returns
-------
bool
Whether plate already exists (True) or not (False)
"""
sql = "SELECT EXISTS(SELECT * FROM barcodes.plate WHERE plate_id = %s)"
with pm.sql.TRN:
pm.sql.TRN.add(sql, [barcode])
return pm.sql.TRN.execute_fetchlast()
def _check_finalized(self):
"""Locks down changes to plate if already finalized
Raises
------
EditError
Trying to change values of a finalized plate
"""
if self.finalized:
raise pm.exceptions.EditError(self.id)
def __getitem__(self, pos):
"""
Returns the sample at a given position on the plate
Parameters
----------
pos : tuple of int
The plate well to get sample for
Returns
-------
Sample object or None
Sample at the positon, or None if no sample.
Raises
------
IndexError
Position given is outside of plate
Notes
-----
Passed a tuple, so called as sample = plate[row, col]
"""
sql = """SELECT sample_id
FROM barcodes.plates_samples
WHERE plate_id = %s AND plate_row = %s and plate_col = %s
"""
with pm.sql.TRN:
row, col = pos[0], pos[1]
rows, cols = self.shape
if row < 0 or row >= rows or col < 0 or col >= cols:
raise IndexError('Position %d, %d not on plate' % (row, col))
pm.sql.TRN.add(sql, [self.id, row, col])
sid = pm.sql.TRN.execute_fetchlast()
return None if sid is None else pm.sample.Sample(sid)
def __setitem__(self, pos, value):
"""
Adds the sample at a given position on the plate
Parameters
----------
pos : tuple of int
The plate well to add sample at
value : Sample object or None
The sample to add, or None to remove sample from position
Raises
------
IndexError
Position given is outside of plate
Notes
-----
Passed a tuple, so called as plate[row, col] = Sample()
"""
# Need to get around postgres not having upsert in postgres < 9.5
# So do this slightly hacky workaround
# http://www.the-art-of-web.com/sql/upsert/
upsert_sql = """WITH upsert AS (<|fim▁hole|> UPDATE barcodes.plates_samples
SET sample_id = %s
WHERE plate_id = %s AND plate_row = %s
AND plate_col = %s
RETURNING *)
INSERT INTO barcodes.plates_samples
(sample_id, plate_id, plate_row, plate_col)
SELECT %s, %s, %s, %s WHERE NOT EXISTS (
SELECT * FROM upsert)
"""
delete_sql = """DELETE FROM barcodes.plates_samples
WHERE plate_id = %s AND plate_row = %s
AND plate_col = %s"""
with pm.sql.TRN:
self._check_finalized()
row, col = pos[0], pos[1]
rows, cols = self.shape
if row < 0 or row >= rows or col < 0 or col >= cols:
raise IndexError('Position %d, %d not on plate' % (row, col))
if value is not None:
pm.sql.TRN.add(upsert_sql, [value.id, self.id, row, col,
value.id, self.id, row, col])
else:
pm.sql.TRN.add(delete_sql, [self.id, row, col])
@property
def name(self):
"""Name of the plate
Returns
-------
str
Name of the plate
"""
return self._get_property('plate')
@property
def finalized(self):
"""Finalized status of the plate
Returns
-------
bool
If the plate is finalized (True) or not (False)
"""
return self._get_property('finalized')
@property
def shape(self):
"""Shaple of the plate
Returns
-------
tuple of int
Plate dimensions in the form (rows, cols)
"""
sql = "SELECT rows, cols FROM barcodes.plate WHERE plate_id = %s"
with pm.sql.TRN:
pm.sql.TRN.add(sql, [self.id])
return tuple(pm.sql.TRN.execute_fetchindex()[0])
@property
def samples(self):
"""List of samples in the plate, ordered by row down the plate
Returns
-------
list of Sample objects
Samples on the plate, ordered by row.
Sample at [0, 0], followed by [0, 1], [0, 2], etc.
"""
sql = """SELECT sample_id
FROM barcodes.plates_samples
WHERE plate_id = %s
ORDER BY plate_row, plate_col
"""
with pm.sql.TRN:
pm.sql.TRN.add(sql, [self.id])
return [pm.sample.Sample(s) for s in
pm.sql.TRN.execute_fetchflatten()]
@property
def platemap(self):
"""Samples on the plate, mapped as list of lists
Returns
-------
list of list of Sample objects or None
Samples on the plate, with None if no sample at the position
"""
sql = """SELECT plate_row::varchar || plate_col::varchar, sample_id
FROM barcodes.plates_samples
WHERE plate_id = %s
ORDER BY plate_row, plate_col
"""
with pm.sql.TRN:
rows, cols = self.shape
pm.sql.TRN.add(sql, [self.id])
# Turn the returned rows into a dict keyed to the combined
# rowcol created by the sql query
samples = dict(pm.sql.TRN.execute_fetchindex())
ret = []
# Loop over each sample and add None of no sample in position
for r in range(rows):
ret.append([])
for c in range(cols):
samp = samples.get('%d%d' % (r, c), None)
ret[r].append(pm.sample.Sample(samp)
if samp is not None else None)
return ret
# -------- functions ----------------
def to_html(self):
"""Builds an HTML table representation of the plate
Returns
-------
str
HTML representation of the plate
Notes
-----
The class `plate` is added to the table for css styling.
"""
samples = self.platemap
rows, cols = self.shape
table = ['<table class="plate"><tr><th></th>']
# Add column header
for col in range(1, cols + 1):
table.append('<th>%d</th>' % col)
table.append('</tr>')
for row in range(rows):
table.append('<tr><th>%s</th>' % chr(65 + row))
for col in range(cols):
samp = samples[row][col]
table.append('<td>%s</td>' %
samp.name if samp is not None else '<td></td>')
table.append('</tr>')
table.append('</table>')
return ''.join(table)
def finalize(self):
"""Finalizes plate by flagging it in the DB"""
sql = "UPDATE barcodes.plate SET finalized = 'T' WHERE plate_id = %s"
with pm.sql.TRN:
pm.sql.TRN.add(sql, [self.id])
def revert(self, user):
"""Reverts the plate from finalized to editable
Parameters
----------
user : User object
User requesting the revert
Raises
------
AssignError
User is not an admin
"""
# Make sure user is admin
if not user.check_access('Admin'):
raise pm.exceptions.AssignError('User %s is not admin!' % user)
sql = "UPDATE barcodes.plate SET finalized = 'F' WHERE plate_id = %s"
with pm.sql.TRN:
pm.sql.TRN.add(sql, [self.id])<|fim▁end|> | |
<|file_name|>account_payment_order.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2015 Compassion CH (Nicolas Tran)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, models
class AccountPaymentOrder(models.Model):
_inherit = 'account.payment.order'
<|fim▁hole|> Replace action to propose upload SEPA file to FDS.
:return: window action
"""
action = super(AccountPaymentOrder, self).open2generated()
if self.payment_method_id.code == 'sepa_credit_transfer':
upload_obj = self.env['payment.order.upload.sepa.wizard']
attachment_id = action['res_id']
upload_wizard = upload_obj.create({
'attachment_id': attachment_id,
'payment_order_id': self.id,
})
del action['view_id']
action.update({
'res_model': upload_obj._name,
'res_id': upload_wizard.id,
'flags': {'initial_mode': 'edit'},
'attachment_id': attachment_id
})
return action<|fim▁end|> | @api.multi
def open2generated(self):
""" |
<|file_name|>test_cred_client.py<|end_file_name|><|fim▁begin|># Copyright 2016 Hewlett Packard Enterprise Development LP
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from tempest.lib.common import cred_client
from tempest.tests import base
class TestCredClientV2(base.TestCase):
def setUp(self):
super(TestCredClientV2, self).setUp()
self.identity_client = mock.MagicMock()
self.projects_client = mock.MagicMock()
self.users_client = mock.MagicMock()
self.roles_client = mock.MagicMock()
self.creds_client = cred_client.V2CredsClient(self.identity_client,
self.projects_client,
self.users_client,
self.roles_client)
def test_create_project(self):
self.projects_client.create_tenant.return_value = {
'tenant': 'a_tenant'
}
res = self.creds_client.create_project('fake_name', 'desc')
self.assertEqual('a_tenant', res)
self.projects_client.create_tenant.assert_called_once_with(
name='fake_name', description='desc')
def test_delete_project(self):
self.creds_client.delete_project('fake_id')
self.projects_client.delete_tenant.assert_called_once_with(
'fake_id')
<|fim▁hole|> self.identity_client = mock.MagicMock()
self.projects_client = mock.MagicMock()
self.users_client = mock.MagicMock()
self.roles_client = mock.MagicMock()
self.domains_client = mock.MagicMock()
self.domains_client.list_domains.return_value = {
'domains': [{'id': 'fake_domain_id'}]
}
self.creds_client = cred_client.V3CredsClient(self.identity_client,
self.projects_client,
self.users_client,
self.roles_client,
self.domains_client,
'fake_domain')
def test_create_project(self):
self.projects_client.create_project.return_value = {
'project': 'a_tenant'
}
res = self.creds_client.create_project('fake_name', 'desc')
self.assertEqual('a_tenant', res)
self.projects_client.create_project.assert_called_once_with(
name='fake_name', description='desc', domain_id='fake_domain_id')
def test_delete_project(self):
self.creds_client.delete_project('fake_id')
self.projects_client.delete_project.assert_called_once_with(
'fake_id')<|fim▁end|> |
class TestCredClientV3(base.TestCase):
def setUp(self):
super(TestCredClientV3, self).setUp() |
<|file_name|>test_paralleltools.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
__author__ = """Co-Pierre Georg ([email protected])"""
import sys
from src.paralleltools import Parallel
#-------------------------------------------------------------------------
#<|fim▁hole|>
"""
VARIABLES
"""
args = sys.argv
config_file_name = args[1]
"""
CODE
"""
parallel = Parallel()
parallel.create_config_files(config_file_name)<|fim▁end|> | # conftools.py is a simple module to manage .xml configuration files
#
#-------------------------------------------------------------------------
if __name__ == '__main__': |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># encoding: utf-8<|fim▁hole|># ## Imports
from threading import local as __local
# Expose these as importable from the top-level `web.core` namespace.
from .application import Application
from .util import lazy
# ## Module Globals
__all__ = ['local', 'Application', 'lazy'] # Symbols exported by this package.
# This is to support the web.ext.local extension, and allow for early importing of the variable.
local = __local()<|fim▁end|> | |
<|file_name|>EntityTypeValidator.java<|end_file_name|><|fim▁begin|>/**
*
* $Id$
*/
package Variation_Diff.validation;
/**
* A sample validator interface for {@link Variation_Diff.EntityType}.
* This doesn't really do anything, and it's not a real EMF artifact.
* It was generated by the org.eclipse.emf.examples.generator.validator plug-in to illustrate how EMF's code generator can be extended.<|fim▁hole|> */
public interface EntityTypeValidator
{
boolean validate();
boolean validateType(String value);
}<|fim▁end|> | * This can be disabled with -vmargs -Dorg.eclipse.emf.examples.generator.validator=false. |
<|file_name|>gendata.py<|end_file_name|><|fim▁begin|>class Project(object):
def __init__(self, name, start, end):
self.name = name
self.start = start
self.end = end
def __repr__(self):
return "Project '%s from %s to %s" % (
self.name, self.start.isoformat(), self.end.isoformat()<|fim▁hole|><|fim▁end|> | ) |
<|file_name|>volume.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2011 Nexenta Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`nexenta.volume` -- Driver to store volumes on Nexenta Appliance
=====================================================================
.. automodule:: nexenta.volume
.. moduleauthor:: Yuriy Taraday <[email protected]>
"""
from nova import exception
from nova import flags
from nova.openstack.common import cfg
from nova.openstack.common import log as logging
from nova.volume import driver
from nova.volume import nexenta
from nova.volume.nexenta import jsonrpc
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
nexenta_opts = [
cfg.StrOpt('nexenta_host',
default='',
help='IP address of Nexenta SA'),
cfg.IntOpt('nexenta_rest_port',
default=2000,
help='HTTP port to connect to Nexenta REST API server'),
cfg.StrOpt('nexenta_rest_protocol',
default='auto',
help='Use http or https for REST connection (default auto)'),
cfg.StrOpt('nexenta_user',
default='admin',
help='User name to connect to Nexenta SA'),
cfg.StrOpt('nexenta_password',
default='nexenta',
help='Password to connect to Nexenta SA'),
cfg.IntOpt('nexenta_iscsi_target_portal_port',
default=3260,
help='Nexenta target portal port'),
cfg.StrOpt('nexenta_volume',
default='nova',
help='pool on SA that will hold all volumes'),
cfg.StrOpt('nexenta_target_prefix',
default='iqn.1986-03.com.sun:02:nova-',
help='IQN prefix for iSCSI targets'),
cfg.StrOpt('nexenta_target_group_prefix',
default='nova/',
help='prefix for iSCSI target groups on SA'),
cfg.StrOpt('nexenta_blocksize',
default='',
help='block size for volumes (blank=default,8KB)'),
cfg.BoolOpt('nexenta_sparse',
default=False,
help='flag to create sparse volumes'),
]
FLAGS.register_opts(nexenta_opts)
class NexentaDriver(driver.ISCSIDriver): # pylint: disable=R0921
"""Executes volume driver commands on Nexenta Appliance."""
def __init__(self):
super(NexentaDriver, self).__init__()
def do_setup(self, context):
protocol = FLAGS.nexenta_rest_protocol
auto = protocol == 'auto'
if auto:
protocol = 'http'
self.nms = jsonrpc.NexentaJSONProxy(
'%s://%s:%s/rest/nms/' % (protocol, FLAGS.nexenta_host,
FLAGS.nexenta_rest_port),
FLAGS.nexenta_user, FLAGS.nexenta_password, auto=auto)
def check_for_setup_error(self):
"""Verify that the volume for our zvols exists.
:raise: :py:exc:`LookupError`
"""
if not self.nms.volume.object_exists(FLAGS.nexenta_volume):
raise LookupError(_("Volume %s does not exist in Nexenta SA"),
FLAGS.nexenta_volume)
@staticmethod
def _get_zvol_name(volume_name):
"""Return zvol name that corresponds given volume name."""
return '%s/%s' % (FLAGS.nexenta_volume, volume_name)
@staticmethod
def _get_target_name(volume_name):
"""Return iSCSI target name to access volume."""
return '%s%s' % (FLAGS.nexenta_target_prefix, volume_name)
@staticmethod
def _get_target_group_name(volume_name):
"""Return Nexenta iSCSI target group name for volume."""
return '%s%s' % (FLAGS.nexenta_target_group_prefix, volume_name)
def create_volume(self, volume):
"""Create a zvol on appliance.
:param volume: volume reference
"""
self.nms.zvol.create(
self._get_zvol_name(volume['name']),
'%sG' % (volume['size'],),
FLAGS.nexenta_blocksize, FLAGS.nexenta_sparse)
def delete_volume(self, volume):
"""Destroy a zvol on appliance.
:param volume: volume reference
"""
try:
self.nms.zvol.destroy(self._get_zvol_name(volume['name']), '')
except nexenta.NexentaException as exc:
if "zvol has children" in exc.args[1]:
raise exception.VolumeIsBusy
else:
raise
def create_snapshot(self, snapshot):
"""Create snapshot of existing zvol on appliance.
:param snapshot: shapshot reference
"""
self.nms.zvol.create_snapshot(
self._get_zvol_name(snapshot['volume_name']),
snapshot['name'], '')
def create_volume_from_snapshot(self, volume, snapshot):
"""Create new volume from other's snapshot on appliance.
:param volume: reference of volume to be created
:param snapshot: reference of source snapshot
"""
self.nms.zvol.clone(
'%s@%s' % (self._get_zvol_name(snapshot['volume_name']),
snapshot['name']),
self._get_zvol_name(volume['name']))
def delete_snapshot(self, snapshot):
"""Delete volume's snapshot on appliance.
:param snapshot: shapshot reference
"""
try:
self.nms.snapshot.destroy(
'%s@%s' % (self._get_zvol_name(snapshot['volume_name']),
snapshot['name']),
'')
except nexenta.NexentaException as exc:
if "snapshot has dependent clones" in exc.args[1]:
raise exception.SnapshotIsBusy
else:
raise
def local_path(self, volume):
"""Return local path to existing local volume.
We never have local volumes, so it raises NotImplementedError.
:raise: :py:exc:`NotImplementedError`
"""
LOG.error(_("Call to local_path should not happen."
" Verify that use_local_volumes flag is turned off."))
raise NotImplementedError
def _do_export(self, _ctx, volume, ensure=False):
"""Do all steps to get zvol exported as LUN 0 at separate target.
:param volume: reference of volume to be exported
:param ensure: if True, ignore errors caused by already existing
resources<|fim▁hole|> zvol_name = self._get_zvol_name(volume['name'])
target_name = self._get_target_name(volume['name'])
target_group_name = self._get_target_group_name(volume['name'])
try:
self.nms.iscsitarget.create_target({'target_name': target_name})
except nexenta.NexentaException as exc:
if not ensure or 'already configured' not in exc.args[1]:
raise
else:
LOG.info(_('Ignored target creation error "%s"'
' while ensuring export'), exc)
try:
self.nms.stmf.create_targetgroup(target_group_name)
except nexenta.NexentaException as exc:
if not ensure or 'already exists' not in exc.args[1]:
raise
else:
LOG.info(_('Ignored target group creation error "%s"'
' while ensuring export'), exc)
try:
self.nms.stmf.add_targetgroup_member(target_group_name,
target_name)
except nexenta.NexentaException as exc:
if not ensure or 'already exists' not in exc.args[1]:
raise
else:
LOG.info(_('Ignored target group member addition error "%s"'
' while ensuring export'), exc)
try:
self.nms.scsidisk.create_lu(zvol_name, {})
except nexenta.NexentaException as exc:
if not ensure or 'in use' not in exc.args[1]:
raise
else:
LOG.info(_('Ignored LU creation error "%s"'
' while ensuring export'), exc)
try:
self.nms.scsidisk.add_lun_mapping_entry(zvol_name, {
'target_group': target_group_name,
'lun': '0'})
except nexenta.NexentaException as exc:
if not ensure or 'view entry exists' not in exc.args[1]:
raise
else:
LOG.info(_('Ignored LUN mapping entry addition error "%s"'
' while ensuring export'), exc)
return '%s:%s,1 %s' % (FLAGS.nexenta_host,
FLAGS.nexenta_iscsi_target_portal_port,
target_name)
def create_export(self, _ctx, volume):
"""Create new export for zvol.
:param volume: reference of volume to be exported
:return: iscsiadm-formatted provider location string
"""
loc = self._do_export(_ctx, volume, ensure=False)
return {'provider_location': loc}
def ensure_export(self, _ctx, volume):
"""Recreate parts of export if necessary.
:param volume: reference of volume to be exported
"""
self._do_export(_ctx, volume, ensure=True)
def remove_export(self, _ctx, volume):
"""Destroy all resources created to export zvol.
:param volume: reference of volume to be unexported
"""
zvol_name = self._get_zvol_name(volume['name'])
target_name = self._get_target_name(volume['name'])
target_group_name = self._get_target_group_name(volume['name'])
self.nms.scsidisk.delete_lu(zvol_name)
try:
self.nms.stmf.destroy_targetgroup(target_group_name)
except nexenta.NexentaException as exc:
# We assume that target group is already gone
LOG.warn(_('Got error trying to destroy target group'
' %(target_group)s, assuming it is already gone: %(exc)s'),
{'target_group': target_group_name, 'exc': exc})
try:
self.nms.iscsitarget.delete_target(target_name)
except nexenta.NexentaException as exc:
# We assume that target is gone as well
LOG.warn(_('Got error trying to delete target %(target)s,'
' assuming it is already gone: %(exc)s'),
{'target': target_name, 'exc': exc})<|fim▁end|> | :return: iscsiadm-formatted provider location string
""" |
<|file_name|>htckbdhandler.cpp<|end_file_name|><|fim▁begin|>/*
* Jeremy Compostella <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or (at
* your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
*/
#include "htckbdhandler.h"
#include <QFile>
#include <QTextStream>
#include <QSocketNotifier>
#include <QDebug>
#include <fcntl.h>
#include <unistd.h>
#include <htckbdmap.h>
#include <linux/input.h>
htcKbdHandler::htcKbdHandler(const QString &device)
: modifiers(Qt::NoModifier), capsLock(false)
{
setObjectName("Htc keyboard Handler");
kbdFD = ::open(device.toLocal8Bit().constData(), O_RDONLY, 0);
if (kbdFD >= 0) {
m_notify = new QSocketNotifier(kbdFD, QSocketNotifier::Read, this);
connect(m_notify, SIGNAL(activated(int)), this, SLOT(processEvent()));
} else
qWarning("Cannot open %s", device.toLocal8Bit().constData());
}
htcKbdHandler::~htcKbdHandler() { }
void htcKbdHandler::processEvent()
{
#define MAX_EVENT 10
struct input_event events[MAX_EVENT];
unsigned int i;
int n = read(kbdFD, &events, sizeof(struct input_event) * MAX_EVENT);
for (i = 0 ; i < n / sizeof(struct input_event) ; ++i)
processEvent(events[i]);
}<|fim▁hole|>
void htcKbdHandler::processEvent(struct input_event event)
{
static struct input_event previous = { {0, 0}, 0, 0, 0};
static uint previous_nb = 0;
struct QWSKeyMap key;
if (event.code > keyMSize || event.code == 0)
return;
#define MIN_REPEAT 5
key = htcuniversalKeyMap[event.code];
if ((event.code == previous.code &&
(previous.value == 0 ? !event.value : event.value)) &&
(key.key_code == Qt::Key_Control || key.key_code == Qt::Key_Shift ||
key.key_code == Qt::Key_CapsLock || key.key_code == Qt::Key_Alt ||
previous_nb++ <= MIN_REPEAT))
return;
if (event.code != previous.code)
previous_nb = 0;
if (key.key_code == Qt::Key_Control)
modifiers ^= Qt::ControlModifier;
if (key.key_code == Qt::Key_Shift)
modifiers ^= Qt::ShiftModifier;
if (key.key_code == Qt::Key_Alt)
modifiers ^= Qt::AltModifier;
if (key.key_code == Qt::Key_CapsLock && event.value == 0)
capsLock = !capsLock;
ushort unicode = key.unicode;
if (modifiers & Qt::ShiftModifier && !capsLock)
unicode = key.shift_unicode;
else if (modifiers & Qt::ControlModifier)
unicode = key.ctrl_unicode;
else if (modifiers & Qt::AltModifier)
unicode = key.alt_unicode;
else if (capsLock && !(modifiers & Qt::ShiftModifier))
unicode = key.shift_unicode;
processKeyEvent(unicode, key.key_code, modifiers, event.value != 0, false);
previous = event;
}<|fim▁end|> | |
<|file_name|>query_external_sheets_permanent_table.py<|end_file_name|><|fim▁begin|># Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def query_external_sheets_permanent_table(dataset_id):
# [START bigquery_query_external_sheets_perm]
from google.cloud import bigquery
import google.auth
# Create credentials with Drive & BigQuery API scopes.
# Both APIs must be enabled for your project before running this code.
#
# If you are using credentials from gcloud, you must authorize the<|fim▁hole|> # gcloud auth application-default login \
# --scopes=https://www.googleapis.com/auth/drive,https://www.googleapis.com/auth/cloud-platform
credentials, project = google.auth.default(
scopes=[
"https://www.googleapis.com/auth/drive",
"https://www.googleapis.com/auth/bigquery",
]
)
# Construct a BigQuery client object.
client = bigquery.Client(credentials=credentials, project=project)
# TODO(developer): Set dataset_id to the ID of the dataset to fetch.
# dataset_id = "your-project.your_dataset"
# Configure the external data source.
dataset = client.get_dataset(dataset_id)
table_id = "us_states"
schema = [
bigquery.SchemaField("name", "STRING"),
bigquery.SchemaField("post_abbr", "STRING"),
]
table = bigquery.Table(dataset.table(table_id), schema=schema)
external_config = bigquery.ExternalConfig("GOOGLE_SHEETS")
# Use a shareable link or grant viewing access to the email address you
# used to authenticate with BigQuery (this example Sheet is public).
sheet_url = (
"https://docs.google.com/spreadsheets"
"/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing"
)
external_config.source_uris = [sheet_url]
external_config.options.skip_leading_rows = 1 # Optionally skip header row.
external_config.options.range = (
"us-states!A20:B49" # Optionally set range of the sheet to query from.
)
table.external_data_configuration = external_config
# Create a permanent table linked to the Sheets file.
table = client.create_table(table) # Make an API request.
# Example query to find states starting with "W".
sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format(dataset_id, table_id)
query_job = client.query(sql) # Make an API request.
# Wait for the query to complete.
w_states = list(query_job)
print(
"There are {} states with names starting with W in the selected range.".format(
len(w_states)
)
)
# [END bigquery_query_external_sheets_perm]<|fim▁end|> | # application first with the following command:
# |
<|file_name|>project-cache-issue-31849.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for #31849: the problem here was actually a performance
// cliff, but I'm adding the test for reference.
pub trait Upcast<T> {
fn upcast(self) -> T;
}
impl<S1, S2, T1, T2> Upcast<(T1, T2)> for (S1,S2)
where S1: Upcast<T1>,
S2: Upcast<T2>,
{
fn upcast(self) -> (T1, T2) { (self.0.upcast(), self.1.upcast()) }
}
impl Upcast<()> for ()
{
fn upcast(self) -> () { () }
}
pub trait ToStatic {
type Static: 'static;
fn to_static(self) -> Self::Static where Self: Sized;
}
impl<T, U> ToStatic for (T, U)
where T: ToStatic,<|fim▁hole|> fn to_static(self) -> Self::Static { (self.0.to_static(), self.1.to_static()) }
}
impl ToStatic for ()
{
type Static = ();
fn to_static(self) -> () { () }
}
trait Factory {
type Output;
fn build(&self) -> Self::Output;
}
impl<S,T> Factory for (S, T)
where S: Factory,
T: Factory,
S::Output: ToStatic,
<S::Output as ToStatic>::Static: Upcast<S::Output>,
{
type Output = (S::Output, T::Output);
fn build(&self) -> Self::Output { (self.0.build().to_static().upcast(), self.1.build()) }
}
impl Factory for () {
type Output = ();
fn build(&self) -> Self::Output { () }
}
fn main() {
// More parens, more time.
let it = ((((((((((),()),()),()),()),()),()),()),()),());
it.build();
}<|fim▁end|> | U: ToStatic
{
type Static = (T::Static, U::Static); |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import sys
import os
import pkg_resources
VERSION = 0.5
<|fim▁hole|>
def resource_path(relative_path):
base_path = getattr(sys, '_MEIPASS', script_path)
full_path = os.path.join(base_path, relative_path)
if os.path.isfile(full_path):
return full_path
else:
return pkg_resources.resource_filename(__name__, relative_path)<|fim▁end|> | script_path = os.path.dirname(sys.argv[0]) |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2020 DDN. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
pub mod daemon;<|fim▁hole|>pub mod state;<|fim▁end|> | pub mod error;
pub mod reducers; |
<|file_name|>initdatabase.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-<|fim▁hole|>
conn = mysql.connector.connect(host="localhost",user="spike",password="valentine", database="drupal")
cann = mysql.connector.connect(host="localhost",user="spike",password="valentine", database="content_delivery_weather")
cursor = conn.cursor()
cursar = cann.cursor()
cursor.execute("""SELECT uid, mail FROM users""")
rows = cursor.fetchall()
for row in rows:
if row[0] != 0:
print('{0} : {1} '.format(row[0], row[1]))
#print('UPDATE new_v4_users_probes_edit SET email = {0} WHERE uid = {1}'.format(row[1], row[0]))
cursar.execute("""UPDATE new_v4_users_probes_edit SET email = %s WHERE userid = %s""",(row[1], row[0]))
cursar.execute("""SELECT probename, probeid FROM new_v4_sonde""")
rows = cursar.fetchall()
for row in rows:
cursar.execute("""SHOW TABLES LIKE %s""",("%" + row[0] + "%",))
rowsbis = cursar.fetchall()
for rowbis in rowsbis:
result = rowbis[0].split("_")
month = 1 + int(result[4])
s = "01/" + str(month) + "/" + result[3]
timestamp = time.mktime(datetime.datetime.strptime(s, "%d/%m/%Y").timetuple())
print('{0} : {1} year: {2} month: {3} timestamp: {4}'.format(row[0], rowbis[0], result[3], result[4], round(timestamp,0)))
cursar.execute("""SELECT firsttime FROM new_v4_sonde WHERE probeid = %s""",(row[1],))
rowsbisbis = cursar.fetchall()
for rowbisbis in rowsbisbis:
if rowbisbis[0] == None:
cursar.execute("""UPDATE new_v4_sonde SET firsttime = %s WHERE probeid = %s""",(timestamp,row[1]))
print('firsttime: {0}'.format(rowbisbis[0],))
conn.close()
cann.close()<|fim▁end|> |
import mysql.connector
import time
import datetime |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
.. module:: radical.pilot.controller.pilot_launcher_worker
.. moduleauthor:: Ole Weidner <[email protected]>
"""
__copyright__ = "Copyright 2013-2014, http://radical.rutgers.edu"
__license__ = "MIT"
from unit_manager_controller import UnitManagerController<|fim▁hole|><|fim▁end|> | from pilot_manager_controller import PilotManagerController
from input_file_transfer_worker import InputFileTransferWorker
from output_file_transfer_worker import OutputFileTransferWorker |
<|file_name|>clip-docs-2.ts<|end_file_name|><|fim▁begin|>import * as clipboardy from 'clipboardy'
const rows = clipboardy.readSync();
const transformed = rows.split('\n')
.map(row => {
const cells = row.split('\t').map(cell => (cell || '').replace(/\n/g, ' ').trim());
return ` /** ${cells[1]} */\n ${cells[0].replace(/^.*\./g, '')}`;<|fim▁hole|>clipboardy.writeSync(transformed);
console.log(transformed);<|fim▁end|> | })
.join('\n');
|
<|file_name|>Editor.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2004-2010, The Dojo Foundation All Rights Reserved.
Available via Academic Free License >= 2.1 OR the modified BSD license.
see: http://dojotoolkit.org/license for details
*/
if(!dojo._hasResource["dijit.Editor"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code.
dojo._hasResource["dijit.Editor"] = true;
dojo.provide("dijit.Editor");
dojo.require("dijit._editor.RichText");
dojo.require("dijit.Toolbar");
dojo.require("dijit.ToolbarSeparator");
dojo.require("dijit._editor._Plugin");
dojo.require("dijit._editor.plugins.EnterKeyHandling");
dojo.require("dijit._editor.range");
dojo.require("dijit._Container");
dojo.require("dojo.i18n");
dojo.require("dijit.layout._LayoutWidget");
dojo.require("dijit._editor.range");
dojo.requireLocalization("dijit._editor", "commands", null, "ROOT,ar,ca,cs,da,de,el,es,fi,fr,he,hu,it,ja,kk,ko,nb,nl,pl,pt,pt-pt,ro,ru,sk,sl,sv,th,tr,zh,zh-tw");
dojo.declare(
"dijit.Editor",
dijit._editor.RichText,
{
// summary:
// A rich text Editing widget
//
// description:
// This widget provides basic WYSIWYG editing features, based on the browser's
// underlying rich text editing capability, accompanied by a toolbar (`dijit.Toolbar`).
// A plugin model is available to extend the editor's capabilities as well as the
// the options available in the toolbar. Content generation may vary across
// browsers, and clipboard operations may have different results, to name
// a few limitations. Note: this widget should not be used with the HTML
// <TEXTAREA> tag -- see dijit._editor.RichText for details.
// plugins: Object[]
// A list of plugin names (as strings) or instances (as objects)
// for this widget.
//
// When declared in markup, it might look like:
// | plugins="['bold',{name:'dijit._editor.plugins.FontChoice', command:'fontName', generic:true}]"
plugins: null,
// extraPlugins: Object[]
// A list of extra plugin names which will be appended to plugins array
extraPlugins: null,
constructor: function(){
// summary:
// Runs on widget initialization to setup arrays etc.
// tags:
// private
if(!dojo.isArray(this.plugins)){
this.plugins=["undo","redo","|","cut","copy","paste","|","bold","italic","underline","strikethrough","|",
"insertOrderedList","insertUnorderedList","indent","outdent","|","justifyLeft","justifyRight","justifyCenter","justifyFull",
"dijit._editor.plugins.EnterKeyHandling" /*, "createLink"*/];
}
this._plugins=[];
this._editInterval = this.editActionInterval * 1000;
//IE will always lose focus when other element gets focus, while for FF and safari,
//when no iframe is used, focus will be lost whenever another element gets focus.
//For IE, we can connect to onBeforeDeactivate, which will be called right before
//the focus is lost, so we can obtain the selected range. For other browsers,
//no equivelent of onBeforeDeactivate, so we need to do two things to make sure
//selection is properly saved before focus is lost: 1) when user clicks another
//element in the page, in which case we listen to mousedown on the entire page and
//see whether user clicks out of a focus editor, if so, save selection (focus will
//only lost after onmousedown event is fired, so we can obtain correct caret pos.)
//2) when user tabs away from the editor, which is handled in onKeyDown below.
if(dojo.isIE){
this.events.push("onBeforeDeactivate");
this.events.push("onBeforeActivate");
}
},
postMixInProperties: function() {
// summary:
// Extension to make sure a deferred is in place before certain functions
// execute, like making sure all the plugins are properly inserted.
// Set up a deferred so that the value isn't applied to the editor
// until all the plugins load, needed to avoid timing condition
// reported in #10537.
this.setValueDeferred = new dojo.Deferred();
this.inherited(arguments);
},
postCreate: function(){
//for custom undo/redo, if enabled.
this._steps=this._steps.slice(0);
this._undoedSteps=this._undoedSteps.slice(0);
if(dojo.isArray(this.extraPlugins)){
this.plugins=this.plugins.concat(this.extraPlugins);
}
this.inherited(arguments);
this.commands = dojo.i18n.getLocalization("dijit._editor", "commands", this.lang);
if(!this.toolbar){
// if we haven't been assigned a toolbar, create one
this.toolbar = new dijit.Toolbar({
dir: this.dir,
lang: this.lang
});
this.header.appendChild(this.toolbar.domNode);
}
dojo.forEach(this.plugins, this.addPlugin, this);
// Okay, denote the value can now be set.
this.setValueDeferred.callback(true);
dojo.addClass(this.iframe.parentNode, "dijitEditorIFrameContainer");
dojo.addClass(this.iframe, "dijitEditorIFrame");
dojo.attr(this.iframe, "allowTransparency", true);
if(dojo.isWebKit){
// Disable selecting the entire editor by inadvertant double-clicks.
// on buttons, title bar, etc. Otherwise clicking too fast on
// a button such as undo/redo selects the entire editor.
dojo.style(this.domNode, "KhtmlUserSelect", "none");
}
this.toolbar.startup();
this.onNormalizedDisplayChanged(); //update toolbar button status
},
destroy: function(){
dojo.forEach(this._plugins, function(p){
if(p && p.destroy){
p.destroy();
}
});
this._plugins=[];
this.toolbar.destroyRecursive();
delete this.toolbar;
this.inherited(arguments);
},
addPlugin: function(/*String||Object*/plugin, /*Integer?*/index){
// summary:
// takes a plugin name as a string or a plugin instance and
// adds it to the toolbar and associates it with this editor
// instance. The resulting plugin is added to the Editor's
// plugins array. If index is passed, it's placed in the plugins
// array at that index. No big magic, but a nice helper for
// passing in plugin names via markup.
//
// plugin: String, args object or plugin instance
//
// args:
// This object will be passed to the plugin constructor
//
// index: Integer
// Used when creating an instance from
// something already in this.plugins. Ensures that the new
// instance is assigned to this.plugins at that index.
var args=dojo.isString(plugin)?{name:plugin}:plugin;
if(!args.setEditor){
var o={"args":args,"plugin":null,"editor":this};
dojo.publish(dijit._scopeName + ".Editor.getPlugin",[o]);
if(!o.plugin){
var pc = dojo.getObject(args.name);
if(pc){
o.plugin=new pc(args);
}
}
if(!o.plugin){
console.warn('Cannot find plugin',plugin);
return;
}
plugin=o.plugin;
}
if(arguments.length > 1){
this._plugins[index] = plugin;
}else{
this._plugins.push(plugin);
}
plugin.setEditor(this);
if(dojo.isFunction(plugin.setToolbar)){
plugin.setToolbar(this.toolbar);
}
},
//the following 3 functions are required to make the editor play nice under a layout widget, see #4070
startup: function(){
// summary:
// Exists to make Editor work as a child of a layout widget.
// Developers don't need to call this method.
// tags:
// protected
//console.log('startup',arguments);
},
resize: function(size){
// summary:
// Resize the editor to the specified size, see `dijit.layout._LayoutWidget.resize`
if(size){
// we've been given a height/width for the entire editor (toolbar + contents), calls layout()
// to split the allocated size between the toolbar and the contents
dijit.layout._LayoutWidget.prototype.resize.apply(this, arguments);
}
/*
else{
// do nothing, the editor is already laid out correctly. The user has probably specified
// the height parameter, which was used to set a size on the iframe
}
*/
},
layout: function(){
// summary:
// Called from `dijit.layout._LayoutWidget.resize`. This shouldn't be called directly
// tags:
// protected
// Converts the iframe (or rather the <div> surrounding it) to take all the available space
// except what's needed for the header (toolbars) and footer (breadcrumbs, etc).
// A class was added to the iframe container and some themes style it, so we have to
// calc off the added margins and padding too. See tracker: #10662
var areaHeight = (this._contentBox.h -
(this.getHeaderHeight() + this.getFooterHeight() +
dojo._getPadBorderExtents(this.iframe.parentNode).h +
dojo._getMarginExtents(this.iframe.parentNode).h));
this.editingArea.style.height = areaHeight + "px";
if(this.iframe){
this.iframe.style.height="100%";
}
this._layoutMode = true;
},
_onIEMouseDown: function(/*Event*/ e){
// summary:
// IE only to prevent 2 clicks to focus
// tags:
// private
var outsideClientArea;
// IE 8's componentFromPoint is broken, which is a shame since it
// was smaller code, but oh well. We have to do this brute force
// to detect if the click was scroller or not.
var b = this.document.body;
var clientWidth = b.clientWidth;
var clientHeight = b.clientHeight;
var clientLeft = b.clientLeft;
var offsetWidth = b.offsetWidth;
var offsetHeight = b.offsetHeight;
var offsetLeft = b.offsetLeft;
//Check for vertical scroller click.
bodyDir = b.dir?b.dir.toLowerCase():""
if(bodyDir != "rtl"){
if(clientWidth < offsetWidth && e.x > clientWidth && e.x < offsetWidth){
// Check the click was between width and offset width, if so, scroller
outsideClientArea = true;
}
}else{
// RTL mode, we have to go by the left offsets.
if(e.x < clientLeft && e.x > offsetLeft){
// Check the click was between width and offset width, if so, scroller
outsideClientArea = true;
}
}
if(!outsideClientArea){
// Okay, might be horiz scroller, check that.
if(clientHeight < offsetHeight && e.y > clientHeight && e.y < offsetHeight){
// Horizontal scroller.
outsideClientArea = true;
}
}
if(!outsideClientArea){
delete this._cursorToStart; // Remove the force to cursor to start position.
delete this._savedSelection; // new mouse position overrides old selection
if(e.target.tagName == "BODY"){
setTimeout(dojo.hitch(this, "placeCursorAtEnd"), 0);
}
this.inherited(arguments);
}
},
onBeforeActivate: function(e){
this._restoreSelection();
},
onBeforeDeactivate: function(e){
// summary:
// Called on IE right before focus is lost. Saves the selected range.
// tags:
// private
if(this.customUndo){
this.endEditing(true);
}
//in IE, the selection will be lost when other elements get focus,
//let's save focus before the editor is deactivated
if(e.target.tagName != "BODY"){
this._saveSelection();
}
//console.log('onBeforeDeactivate',this);
},
/* beginning of custom undo/redo support */
// customUndo: Boolean
// Whether we shall use custom undo/redo support instead of the native
// browser support. By default, we only enable customUndo for IE, as it
// has broken native undo/redo support. Note: the implementation does
// support other browsers which have W3C DOM2 Range API implemented.
// It was also enabled on WebKit, to fix undo/redo enablement. (#9613)
customUndo: dojo.isIE || dojo.isWebKit,
// editActionInterval: Integer
// When using customUndo, not every keystroke will be saved as a step.
// Instead typing (including delete) will be grouped together: after
// a user stops typing for editActionInterval seconds, a step will be
// saved; if a user resume typing within editActionInterval seconds,
// the timeout will be restarted. By default, editActionInterval is 3
// seconds.
editActionInterval: 3,
beginEditing: function(cmd){
// summary:
// Called to note that the user has started typing alphanumeric characters, if it's not already noted.
// Deals with saving undo; see editActionInterval parameter.
// tags:
// private
if(!this._inEditing){
this._inEditing=true;
this._beginEditing(cmd);
}
if(this.editActionInterval>0){
if(this._editTimer){
clearTimeout(this._editTimer);
}
this._editTimer = setTimeout(dojo.hitch(this, this.endEditing), this._editInterval);
}
},
_steps:[],
_undoedSteps:[],
execCommand: function(cmd){
// summary:
// Main handler for executing any commands to the editor, like paste, bold, etc.
// Called by plugins, but not meant to be called by end users.
// tags:
// protected
if(this.customUndo && (cmd == 'undo' || cmd == 'redo')){
return this[cmd]();
}else{
if(this.customUndo){
this.endEditing();
this._beginEditing();
}
var r;
try{
r = this.inherited('execCommand', arguments);
if(dojo.isWebKit && cmd == 'paste' && !r){ //see #4598: safari does not support invoking paste from js
throw { code: 1011 }; // throw an object like Mozilla's error
}
}catch(e){
//TODO: when else might we get an exception? Do we need the Mozilla test below?
if(e.code == 1011 /* Mozilla: service denied */ && /copy|cut|paste/.test(cmd)){
// Warn user of platform limitation. Cannot programmatically access clipboard. See ticket #4136
var sub = dojo.string.substitute,
accel = {cut:'X', copy:'C', paste:'V'};
alert(sub(this.commands.systemShortcut,
[this.commands[cmd], sub(this.commands[dojo.isMac ? 'appleKey' : 'ctrlKey'], [accel[cmd]])]));
}
r = false;
}
if(this.customUndo){
this._endEditing();
}
return r;
}
},
queryCommandEnabled: function(cmd){
// summary:
// Returns true if specified editor command is enabled.
// Used by the plugins to know when to highlight/not highlight buttons.
// tags:
// protected
if(this.customUndo && (cmd == 'undo' || cmd == 'redo')){
return cmd == 'undo' ? (this._steps.length > 1) : (this._undoedSteps.length > 0);
}else{
return this.inherited('queryCommandEnabled',arguments);
}
},
_moveToBookmark: function(b){
// summary:
// Selects the text specified in bookmark b
// tags:
// private
var bookmark = b.mark;
var mark = b.mark;
var col = b.isCollapsed;
var r, sNode, eNode, sel;
if(mark){
if(dojo.isIE){
if(dojo.isArray(mark)){
//IE CONTROL, have to use the native bookmark.
bookmark = [];
dojo.forEach(mark,function(n){
bookmark.push(dijit.range.getNode(n,this.editNode));
},this);
dojo.withGlobal(this.window,'moveToBookmark',dijit,[{mark: bookmark, isCollapsed: col}]);
}else{
if(mark.startContainer && mark.endContainer){
// Use the pseudo WC3 range API. This works better for positions
// than the IE native bookmark code.
sel = dijit.range.getSelection(this.window);
if(sel && sel.removeAllRanges){
sel.removeAllRanges();
r = dijit.range.create(this.window);
sNode = dijit.range.getNode(mark.startContainer,this.editNode);
eNode = dijit.range.getNode(mark.endContainer,this.editNode);
if(sNode && eNode){
// Okay, we believe we found the position, so add it into the selection
// There are cases where it may not be found, particularly in undo/redo, when
// IE changes the underlying DOM on us (wraps text in a <p> tag or similar.
// So, in those cases, don't bother restoring selection.
r.setStart(sNode,mark.startOffset);
r.setEnd(eNode,mark.endOffset);
sel.addRange(r);
}
}
}
}
}else{//w3c range
sel = dijit.range.getSelection(this.window);
if(sel && sel.removeAllRanges){
sel.removeAllRanges();
r = dijit.range.create(this.window);
sNode = dijit.range.getNode(mark.startContainer,this.editNode);
eNode = dijit.range.getNode(mark.endContainer,this.editNode);
if(sNode && eNode){
// Okay, we believe we found the position, so add it into the selection
// There are cases where it may not be found, particularly in undo/redo, when
// formatting as been done and so on, so don't restore selection then.
r.setStart(sNode,mark.startOffset);
r.setEnd(eNode,mark.endOffset);
sel.addRange(r);
}
}
}
}
},
_changeToStep: function(from, to){
// summary:
// Reverts editor to "to" setting, from the undo stack.
// tags:
// private
this.setValue(to.text);
var b=to.bookmark;
if(!b){ return; }
this._moveToBookmark(b);
},
undo: function(){
// summary:
// Handler for editor undo (ex: ctrl-z) operation
// tags:
// private
//console.log('undo');
var ret = false;
if(!this._undoRedoActive){
this._undoRedoActive = true;<|fim▁hole|> var s=this._steps.pop();
if(s && this._steps.length>0){
this.focus();
this._changeToStep(s,this._steps[this._steps.length-1]);
this._undoedSteps.push(s);
this.onDisplayChanged();
delete this._undoRedoActive;
ret = true;
}
delete this._undoRedoActive;
}
return ret;
},
redo: function(){
// summary:
// Handler for editor redo (ex: ctrl-y) operation
// tags:
// private
//console.log('redo');
var ret = false;
if(!this._undoRedoActive){
this._undoRedoActive = true;
this.endEditing(true);
var s=this._undoedSteps.pop();
if(s && this._steps.length>0){
this.focus();
this._changeToStep(this._steps[this._steps.length-1],s);
this._steps.push(s);
this.onDisplayChanged();
ret = true;
}
delete this._undoRedoActive;
}
return ret;
},
endEditing: function(ignore_caret){
// summary:
// Called to note that the user has stopped typing alphanumeric characters, if it's not already noted.
// Deals with saving undo; see editActionInterval parameter.
// tags:
// private
if(this._editTimer){
clearTimeout(this._editTimer);
}
if(this._inEditing){
this._endEditing(ignore_caret);
this._inEditing=false;
}
},
_getBookmark: function(){
// summary:
// Get the currently selected text
// tags:
// protected
var b=dojo.withGlobal(this.window,dijit.getBookmark);
var tmp=[];
if(b && b.mark){
var mark = b.mark;
if(dojo.isIE){
// Try to use the pseudo range API on IE for better accuracy.
var sel = dijit.range.getSelection(this.window);
if(!dojo.isArray(mark)){
if(sel){
var range;
if(sel.rangeCount){
range = sel.getRangeAt(0);
}
if(range){
b.mark = range.cloneRange();
}else{
b.mark = dojo.withGlobal(this.window,dijit.getBookmark);
}
}
}else{
// Control ranges (img, table, etc), handle differently.
dojo.forEach(b.mark,function(n){
tmp.push(dijit.range.getIndex(n,this.editNode).o);
},this);
b.mark = tmp;
}
}
try{
if(b.mark && b.mark.startContainer){
tmp=dijit.range.getIndex(b.mark.startContainer,this.editNode).o;
b.mark={startContainer:tmp,
startOffset:b.mark.startOffset,
endContainer:b.mark.endContainer===b.mark.startContainer?tmp:dijit.range.getIndex(b.mark.endContainer,this.editNode).o,
endOffset:b.mark.endOffset};
}
}catch(e){
b.mark = null;
}
}
return b;
},
_beginEditing: function(cmd){
// summary:
// Called when the user starts typing alphanumeric characters.
// Deals with saving undo; see editActionInterval parameter.
// tags:
// private
if(this._steps.length === 0){
// You want to use the editor content without post filtering
// to make sure selection restores right for the 'initial' state.
// and undo is called. So not using this.savedContent, as it was 'processed'
// and the line-up for selections may have been altered.
this._steps.push({'text':dijit._editor.getChildrenHtml(this.editNode),'bookmark':this._getBookmark()});
}
},
_endEditing: function(ignore_caret){
// summary:
// Called when the user stops typing alphanumeric characters.
// Deals with saving undo; see editActionInterval parameter.
// tags:
// private
// Avoid filtering to make sure selections restore.
var v = dijit._editor.getChildrenHtml(this.editNode);
this._undoedSteps=[];//clear undoed steps
this._steps.push({text: v, bookmark: this._getBookmark()});
},
onKeyDown: function(e){
// summary:
// Handler for onkeydown event.
// tags:
// private
//We need to save selection if the user TAB away from this editor
//no need to call _saveSelection for IE, as that will be taken care of in onBeforeDeactivate
if(!dojo.isIE && !this.iframe && e.keyCode == dojo.keys.TAB && !this.tabIndent){
this._saveSelection();
}
if(!this.customUndo){
this.inherited(arguments);
return;
}
var k = e.keyCode, ks = dojo.keys;
if(e.ctrlKey && !e.altKey){//undo and redo only if the special right Alt + z/y are not pressed #5892
if(k == 90 || k == 122){ //z
dojo.stopEvent(e);
this.undo();
return;
}else if(k == 89 || k == 121){ //y
dojo.stopEvent(e);
this.redo();
return;
}
}
this.inherited(arguments);
switch(k){
case ks.ENTER:
case ks.BACKSPACE:
case ks.DELETE:
this.beginEditing();
break;
case 88: //x
case 86: //v
if(e.ctrlKey && !e.altKey && !e.metaKey){
this.endEditing();//end current typing step if any
if(e.keyCode == 88){
this.beginEditing('cut');
//use timeout to trigger after the cut is complete
setTimeout(dojo.hitch(this, this.endEditing), 1);
}else{
this.beginEditing('paste');
//use timeout to trigger after the paste is complete
setTimeout(dojo.hitch(this, this.endEditing), 1);
}
break;
}
//pass through
default:
if(!e.ctrlKey && !e.altKey && !e.metaKey && (e.keyCode<dojo.keys.F1 || e.keyCode>dojo.keys.F15)){
this.beginEditing();
break;
}
//pass through
case ks.ALT:
this.endEditing();
break;
case ks.UP_ARROW:
case ks.DOWN_ARROW:
case ks.LEFT_ARROW:
case ks.RIGHT_ARROW:
case ks.HOME:
case ks.END:
case ks.PAGE_UP:
case ks.PAGE_DOWN:
this.endEditing(true);
break;
//maybe ctrl+backspace/delete, so don't endEditing when ctrl is pressed
case ks.CTRL:
case ks.SHIFT:
case ks.TAB:
break;
}
},
_onBlur: function(){
// summary:
// Called from focus manager when focus has moved away from this editor
// tags:
// protected
//this._saveSelection();
this.inherited('_onBlur',arguments);
this.endEditing(true);
},
_saveSelection: function(){
// summary:
// Save the currently selected text in _savedSelection attribute
// tags:
// private
this._savedSelection=this._getBookmark();
//console.log('save selection',this._savedSelection,this);
},
_restoreSelection: function(){
// summary:
// Re-select the text specified in _savedSelection attribute;
// see _saveSelection().
// tags:
// private
if(this._savedSelection){
// Clear off cursor to start, we're deliberately going to a selection.
delete this._cursorToStart;
// only restore the selection if the current range is collapsed
// if not collapsed, then it means the editor does not lose
// selection and there is no need to restore it
if(dojo.withGlobal(this.window,'isCollapsed',dijit)){
this._moveToBookmark(this._savedSelection);
}
delete this._savedSelection;
}
},
onClick: function(){
// summary:
// Handler for when editor is clicked
// tags:
// protected
this.endEditing(true);
this.inherited(arguments);
},
_setDisabledAttr: function(/*Boolean*/ value){
var disableFunc = dojo.hitch(this, function(){
if((!this.disabled && value) || (!this._buttonEnabledPlugins && value)){
// Disable editor: disable all enabled buttons and remember that list
this._buttonEnabledPlugins = dojo.filter(this._plugins, function(p){
if(p && p.button && !p.button.get("disabled")){
p.button.set("disabled", true);
return true;
}
return false;
});
}else if(this.disabled && !value){
// Enable editor: we only want to enable the buttons that should be
// enabled (for example, the outdent button shouldn't be enabled if the current
// text can't be outdented).
dojo.forEach(this._buttonEnabledPlugins, function(p){
p.button.attr("disabled", false);
p.updateState && p.updateState(); // just in case something changed, like caret position
});
}
});
this.setValueDeferred.addCallback(disableFunc);
this.inherited(arguments);
},
_setStateClass: function(){
this.inherited(arguments);
// Let theme set the editor's text color based on editor enabled/disabled state.
// We need to jump through hoops because the main document (where the theme CSS is)
// is separate from the iframe's document.
if(this.document && this.document.body){
dojo.style(this.document.body, "color", dojo.style(this.iframe, "color"));
}
}
}
);
// Register the "default plugins", ie, the built-in editor commands
dojo.subscribe(dijit._scopeName + ".Editor.getPlugin",null,function(o){
if(o.plugin){ return; }
var args = o.args, p;
var _p = dijit._editor._Plugin;
var name = args.name;
switch(name){
case "undo": case "redo": case "cut": case "copy": case "paste": case "insertOrderedList":
case "insertUnorderedList": case "indent": case "outdent": case "justifyCenter":
case "justifyFull": case "justifyLeft": case "justifyRight": case "delete":
case "selectAll": case "removeFormat": case "unlink":
case "insertHorizontalRule":
p = new _p({ command: name });
break;
case "bold": case "italic": case "underline": case "strikethrough":
case "subscript": case "superscript":
p = new _p({ buttonClass: dijit.form.ToggleButton, command: name });
break;
case "|":
p = new _p({ button: new dijit.ToolbarSeparator(), setEditor: function(editor) {this.editor = editor;} });
}
// console.log('name',name,p);
o.plugin=p;
});
}<|fim▁end|> | this.endEditing(true); |
<|file_name|>replication_controller.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// If you make changes to this file, you should also make the corresponding change in ReplicaSet.
package replication
import (
"reflect"
"sort"
"sync"
"time"
"github.com/golang/glog"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/errors"
"k8s.io/kubernetes/pkg/api/unversioned"
"k8s.io/kubernetes/pkg/api/v1"
"k8s.io/kubernetes/pkg/client/cache"
clientset "k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset"
unversionedcore "k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset/typed/core/unversioned"
"k8s.io/kubernetes/pkg/client/record"
"k8s.io/kubernetes/pkg/controller"
"k8s.io/kubernetes/pkg/controller/framework"
"k8s.io/kubernetes/pkg/controller/framework/informers"
"k8s.io/kubernetes/pkg/labels"
"k8s.io/kubernetes/pkg/runtime"
"k8s.io/kubernetes/pkg/util"
utilerrors "k8s.io/kubernetes/pkg/util/errors"
"k8s.io/kubernetes/pkg/util/metrics"
utilruntime "k8s.io/kubernetes/pkg/util/runtime"
"k8s.io/kubernetes/pkg/util/wait"
"k8s.io/kubernetes/pkg/util/workqueue"
"k8s.io/kubernetes/pkg/watch"
)
const (
// We'll attempt to recompute the required replicas of all replication controllers
// that have fulfilled their expectations at least this often. This recomputation
// happens based on contents in local pod storage.
// Full Resync shouldn't be needed at all in a healthy system. This is a protection
// against disappearing objects and watch notification, that we believe should not
// happen at all.
// TODO: We should get rid of it completely in the fullness of time.
FullControllerResyncPeriod = 10 * time.Minute
// Realistic value of the burstReplica field for the replication manager based off
// performance requirements for kubernetes 1.0.
BurstReplicas = 500
// We must avoid counting pods until the pod store has synced. If it hasn't synced, to
// avoid a hot loop, we'll wait this long between checks.
PodStoreSyncedPollPeriod = 100 * time.Millisecond
// The number of times we retry updating a replication controller's status.
statusUpdateRetries = 1
)
func getRCKind() unversioned.GroupVersionKind {
return v1.SchemeGroupVersion.WithKind("ReplicationController")
}
// ReplicationManager is responsible for synchronizing ReplicationController objects stored
// in the system with actual running pods.
// TODO: this really should be called ReplicationController. The only reason why it's a Manager
// is to distinguish this type from API object "ReplicationController". We should fix this.
type ReplicationManager struct {
kubeClient clientset.Interface
podControl controller.PodControlInterface
// internalPodInformer is used to hold a personal informer. If we're using
// a normal shared informer, then the informer will be started for us. If
// we have a personal informer, we must start it ourselves. If you start
// the controller using NewReplicationManager(passing SharedInformer), this
// will be null
internalPodInformer framework.SharedIndexInformer
// An rc is temporarily suspended after creating/deleting these many replicas.
// It resumes normal action after observing the watch events for them.
burstReplicas int
// To allow injection of syncReplicationController for testing.
syncHandler func(rcKey string) error
// A TTLCache of pod creates/deletes each rc expects to see.
expectations *controller.UIDTrackingControllerExpectations
// A store of replication controllers, populated by the rcController
rcStore cache.StoreToReplicationControllerLister
// Watches changes to all replication controllers
rcController *framework.Controller
// A store of pods, populated by the podController
podStore cache.StoreToPodLister
// Watches changes to all pods
podController framework.ControllerInterface
// podStoreSynced returns true if the pod store has been synced at least once.
// Added as a member to the struct to allow injection for testing.
podStoreSynced func() bool
lookupCache *controller.MatchingCache
// Controllers that need to be synced
queue *workqueue.Type
// garbageCollectorEnabled denotes if the garbage collector is enabled. RC
// manager behaves differently if GC is enabled.
garbageCollectorEnabled bool
}
// NewReplicationManager creates a replication manager
func NewReplicationManager(podInformer framework.SharedIndexInformer, kubeClient clientset.Interface, resyncPeriod controller.ResyncPeriodFunc, burstReplicas int, lookupCacheSize int, garbageCollectorEnabled bool) *ReplicationManager {
eventBroadcaster := record.NewBroadcaster()
eventBroadcaster.StartLogging(glog.Infof)
eventBroadcaster.StartRecordingToSink(&unversionedcore.EventSinkImpl{Interface: kubeClient.Core().Events("")})
return newReplicationManager(
eventBroadcaster.NewRecorder(api.EventSource{Component: "replication-controller"}),
podInformer, kubeClient, resyncPeriod, burstReplicas, lookupCacheSize, garbageCollectorEnabled)
}
// newReplicationManager configures a replication manager with the specified event recorder
func newReplicationManager(eventRecorder record.EventRecorder, podInformer framework.SharedIndexInformer, kubeClient clientset.Interface, resyncPeriod controller.ResyncPeriodFunc, burstReplicas int, lookupCacheSize int, garbageCollectorEnabled bool) *ReplicationManager {
if kubeClient != nil && kubeClient.Core().GetRESTClient().GetRateLimiter() != nil {
metrics.RegisterMetricAndTrackRateLimiterUsage("replication_controller", kubeClient.Core().GetRESTClient().GetRateLimiter())
}
rm := &ReplicationManager{
kubeClient: kubeClient,
podControl: controller.RealPodControl{
KubeClient: kubeClient,
Recorder: eventRecorder,
},
burstReplicas: burstReplicas,
expectations: controller.NewUIDTrackingControllerExpectations(controller.NewControllerExpectations()),
queue: workqueue.New(),
garbageCollectorEnabled: garbageCollectorEnabled,
}
rm.rcStore.Indexer, rm.rcController = framework.NewIndexerInformer(
&cache.ListWatch{
ListFunc: func(options api.ListOptions) (runtime.Object, error) {
return rm.kubeClient.Core().ReplicationControllers(api.NamespaceAll).List(options)
},
WatchFunc: func(options api.ListOptions) (watch.Interface, error) {
return rm.kubeClient.Core().ReplicationControllers(api.NamespaceAll).Watch(options)
},
},
&api.ReplicationController{},
// TODO: Can we have much longer period here?
FullControllerResyncPeriod,
framework.ResourceEventHandlerFuncs{
AddFunc: rm.enqueueController,
UpdateFunc: rm.updateRC,
// This will enter the sync loop and no-op, because the controller has been deleted from the store.
// Note that deleting a controller immediately after scaling it to 0 will not work. The recommended
// way of achieving this is by performing a `stop` operation on the controller.
DeleteFunc: rm.enqueueController,
},
cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc},
)
podInformer.AddEventHandler(framework.ResourceEventHandlerFuncs{
AddFunc: rm.addPod,
// This invokes the rc for every pod change, eg: host assignment. Though this might seem like overkill
// the most frequent pod update is status, and the associated rc will only list from local storage, so
// it should be ok.
UpdateFunc: rm.updatePod,
DeleteFunc: rm.deletePod,
})
rm.podStore.Indexer = podInformer.GetIndexer()
rm.podController = podInformer.GetController()
rm.syncHandler = rm.syncReplicationController
rm.podStoreSynced = rm.podController.HasSynced
rm.lookupCache = controller.NewMatchingCache(lookupCacheSize)
return rm
}
// NewReplicationManagerFromClientForIntegration creates a new ReplicationManager that runs its own informer. It disables event recording for use in integration tests.
func NewReplicationManagerFromClientForIntegration(kubeClient clientset.Interface, resyncPeriod controller.ResyncPeriodFunc, burstReplicas int, lookupCacheSize int) *ReplicationManager {
podInformer := informers.CreateSharedPodIndexInformer(kubeClient, resyncPeriod())
garbageCollectorEnabled := false
rm := newReplicationManager(&record.FakeRecorder{}, podInformer, kubeClient, resyncPeriod, burstReplicas, lookupCacheSize, garbageCollectorEnabled)
rm.internalPodInformer = podInformer
return rm
}
// NewReplicationManagerFromClient creates a new ReplicationManager that runs its own informer.
func NewReplicationManagerFromClient(kubeClient clientset.Interface, resyncPeriod controller.ResyncPeriodFunc, burstReplicas int, lookupCacheSize int) *ReplicationManager {
podInformer := informers.CreateSharedPodIndexInformer(kubeClient, resyncPeriod())
garbageCollectorEnabled := false
rm := NewReplicationManager(podInformer, kubeClient, resyncPeriod, burstReplicas, lookupCacheSize, garbageCollectorEnabled)
rm.internalPodInformer = podInformer
return rm
}
// SetEventRecorder replaces the event recorder used by the replication manager
// with the given recorder. Only used for testing.
func (rm *ReplicationManager) SetEventRecorder(recorder record.EventRecorder) {
// TODO: Hack. We can't cleanly shutdown the event recorder, so benchmarks
// need to pass in a fake.
rm.podControl = controller.RealPodControl{KubeClient: rm.kubeClient, Recorder: recorder}
}
// Run begins watching and syncing.
func (rm *ReplicationManager) Run(workers int, stopCh <-chan struct{}) {
defer utilruntime.HandleCrash()
glog.Infof("Starting RC Manager")
go rm.rcController.Run(stopCh)
go rm.podController.Run(stopCh)
for i := 0; i < workers; i++ {
go wait.Until(rm.worker, time.Second, stopCh)
}
if rm.internalPodInformer != nil {
go rm.internalPodInformer.Run(stopCh)
}
<-stopCh
glog.Infof("Shutting down RC Manager")
rm.queue.ShutDown()
}
// getPodController returns the controller managing the given pod.
// TODO: Surface that we are ignoring multiple controllers for a single pod.
// TODO: use ownerReference.Controller to determine if the rc controls the pod.
func (rm *ReplicationManager) getPodController(pod *api.Pod) *api.ReplicationController {
// look up in the cache, if cached and the cache is valid, just return cached value
if obj, cached := rm.lookupCache.GetMatchingObject(pod); cached {
controller, ok := obj.(*api.ReplicationController)
if !ok {
// This should not happen
glog.Errorf("lookup cache does not return a ReplicationController object")
return nil
}
if cached && rm.isCacheValid(pod, controller) {
return controller
}
}
// if not cached or cached value is invalid, search all the rc to find the matching one, and update cache
controllers, err := rm.rcStore.GetPodControllers(pod)
if err != nil {
glog.V(4).Infof("No controllers found for pod %v, replication manager will avoid syncing", pod.Name)
return nil
}
// In theory, overlapping controllers is user error. This sorting will not prevent
// oscillation of replicas in all cases, eg:
// rc1 (older rc): [(k1=v1)], replicas=1 rc2: [(k2=v2)], replicas=2
// pod: [(k1:v1), (k2:v2)] will wake both rc1 and rc2, and we will sync rc1.
// pod: [(k2:v2)] will wake rc2 which creates a new replica.
if len(controllers) > 1 {
// More than two items in this list indicates user error. If two replication-controller
// overlap, sort by creation timestamp, subsort by name, then pick
// the first.
glog.Errorf("user error! more than one replication controller is selecting pods with labels: %+v", pod.Labels)
sort.Sort(OverlappingControllers(controllers))
}
// update lookup cache
rm.lookupCache.Update(pod, &controllers[0])
return &controllers[0]
}
// isCacheValid check if the cache is valid
func (rm *ReplicationManager) isCacheValid(pod *api.Pod, cachedRC *api.ReplicationController) bool {
exists, err := rm.rcStore.Exists(cachedRC)
// rc has been deleted or updated, cache is invalid
if err != nil || !exists || !isControllerMatch(pod, cachedRC) {
return false
}
return true
}
// isControllerMatch take a Pod and ReplicationController, return whether the Pod and ReplicationController are matching
// TODO(mqliang): This logic is a copy from GetPodControllers(), remove the duplication
func isControllerMatch(pod *api.Pod, rc *api.ReplicationController) bool {
if rc.Namespace != pod.Namespace {
return false
}
labelSet := labels.Set(rc.Spec.Selector)
selector := labels.Set(rc.Spec.Selector).AsSelector()
// If an rc with a nil or empty selector creeps in, it should match nothing, not everything.
if labelSet.AsSelector().Empty() || !selector.Matches(labels.Set(pod.Labels)) {
return false
}
return true
}
// callback when RC is updated
func (rm *ReplicationManager) updateRC(old, cur interface{}) {
oldRC := old.(*api.ReplicationController)
curRC := cur.(*api.ReplicationController)
// We should invalidate the whole lookup cache if a RC's selector has been updated.
//
// Imagine that you have two RCs:
// * old RC1
// * new RC2
// You also have a pod that is attached to RC2 (because it doesn't match RC1 selector).
// Now imagine that you are changing RC1 selector so that it is now matching that pod,
// in such case, we must invalidate the whole cache so that pod could be adopted by RC1
//
// This makes the lookup cache less helpful, but selector update does not happen often,
// so it's not a big problem
if !reflect.DeepEqual(oldRC.Spec.Selector, curRC.Spec.Selector) {
rm.lookupCache.InvalidateAll()
}
// You might imagine that we only really need to enqueue the
// controller when Spec changes, but it is safer to sync any
// time this function is triggered. That way a full informer
// resync can requeue any controllers that don't yet have pods
// but whose last attempts at creating a pod have failed (since
// we don't block on creation of pods) instead of those
// controllers stalling indefinitely. Enqueueing every time
// does result in some spurious syncs (like when Status.Replica
// is updated and the watch notification from it retriggers
// this function), but in general extra resyncs shouldn't be
// that bad as rcs that haven't met expectations yet won't
// sync, and all the listing is done using local stores.
if oldRC.Status.Replicas != curRC.Status.Replicas {
glog.V(4).Infof("Observed updated replica count for rc: %v, %d->%d", curRC.Name, oldRC.Status.Replicas, curRC.Status.Replicas)
}
rm.enqueueController(cur)
}
// When a pod is created, enqueue the controller that manages it and update it's expectations.
func (rm *ReplicationManager) addPod(obj interface{}) {
pod := obj.(*api.Pod)
rc := rm.getPodController(pod)
if rc == nil {
return
}
rcKey, err := controller.KeyFunc(rc)
if err != nil {
glog.Errorf("Couldn't get key for replication controller %#v: %v", rc, err)
return
}
if pod.DeletionTimestamp != nil {
// on a restart of the controller manager, it's possible a new pod shows up in a state that
// is already pending deletion. Prevent the pod from being a creation observation.
rm.deletePod(pod)
return
}
rm.expectations.CreationObserved(rcKey)
rm.enqueueController(rc)
}
// When a pod is updated, figure out what controller/s manage it and wake them
// up. If the labels of the pod have changed we need to awaken both the old
// and new controller. old and cur must be *api.Pod types.
func (rm *ReplicationManager) updatePod(old, cur interface{}) {
if api.Semantic.DeepEqual(old, cur) {
// A periodic relist will send update events for all known pods.
return
}
curPod := cur.(*api.Pod)
oldPod := old.(*api.Pod)
glog.V(4).Infof("Pod %s updated, objectMeta %+v -> %+v.", curPod.Name, oldPod.ObjectMeta, curPod.ObjectMeta)
labelChanged := !reflect.DeepEqual(curPod.Labels, oldPod.Labels)
if curPod.DeletionTimestamp != nil {
// when a pod is deleted gracefully it's deletion timestamp is first modified to reflect a grace period,
// and after such time has passed, the kubelet actually deletes it from the store. We receive an update
// for modification of the deletion timestamp and expect an rc to create more replicas asap, not wait
// until the kubelet actually deletes the pod. This is different from the Phase of a pod changing, because
// an rc never initiates a phase change, and so is never asleep waiting for the same.
rm.deletePod(curPod)
if labelChanged {
// we don't need to check the oldPod.DeletionTimestamp because DeletionTimestamp cannot be unset.
rm.deletePod(oldPod)
}
return
}
// Only need to get the old controller if the labels changed.
// Enqueue the oldRC before the curRC to give curRC a chance to adopt the oldPod.
if labelChanged {
// If the old and new rc are the same, the first one that syncs
// will set expectations preventing any damage from the second.
if oldRC := rm.getPodController(oldPod); oldRC != nil {
rm.enqueueController(oldRC)
}
}
if curRC := rm.getPodController(curPod); curRC != nil {
rm.enqueueController(curRC)
}
}
// When a pod is deleted, enqueue the controller that manages the pod and update its expectations.
// obj could be an *api.Pod, or a DeletionFinalStateUnknown marker item.
func (rm *ReplicationManager) deletePod(obj interface{}) {
pod, ok := obj.(*api.Pod)
// When a delete is dropped, the relist will notice a pod in the store not
// in the list, leading to the insertion of a tombstone object which contains
// the deleted key/value. Note that this value might be stale. If the pod
// changed labels the new rc will not be woken up till the periodic resync.
if !ok {
tombstone, ok := obj.(cache.DeletedFinalStateUnknown)
if !ok {
glog.Errorf("Couldn't get object from tombstone %#v", obj)
return
}
pod, ok = tombstone.Obj.(*api.Pod)
if !ok {
glog.Errorf("Tombstone contained object that is not a pod %#v", obj)
return<|fim▁hole|> rcKey, err := controller.KeyFunc(rc)
if err != nil {
glog.Errorf("Couldn't get key for replication controller %#v: %v", rc, err)
return
}
rm.expectations.DeletionObserved(rcKey, controller.PodKey(pod))
rm.enqueueController(rc)
}
}
// obj could be an *api.ReplicationController, or a DeletionFinalStateUnknown marker item.
func (rm *ReplicationManager) enqueueController(obj interface{}) {
key, err := controller.KeyFunc(obj)
if err != nil {
glog.Errorf("Couldn't get key for object %+v: %v", obj, err)
return
}
// TODO: Handle overlapping controllers better. Either disallow them at admission time or
// deterministically avoid syncing controllers that fight over pods. Currently, we only
// ensure that the same controller is synced for a given pod. When we periodically relist
// all controllers there will still be some replica instability. One way to handle this is
// by querying the store for all controllers that this rc overlaps, as well as all
// controllers that overlap this rc, and sorting them.
rm.queue.Add(key)
}
// worker runs a worker thread that just dequeues items, processes them, and marks them done.
// It enforces that the syncHandler is never invoked concurrently with the same key.
func (rm *ReplicationManager) worker() {
workFunc := func() bool {
key, quit := rm.queue.Get()
if quit {
return true
}
defer rm.queue.Done(key)
err := rm.syncHandler(key.(string))
if err != nil {
glog.Errorf("Error syncing replication controller: %v", err)
}
return false
}
for {
if quit := workFunc(); quit {
glog.Infof("replication controller worker shutting down")
return
}
}
}
// manageReplicas checks and updates replicas for the given replication controller.
func (rm *ReplicationManager) manageReplicas(filteredPods []*api.Pod, rc *api.ReplicationController) {
diff := len(filteredPods) - int(rc.Spec.Replicas)
rcKey, err := controller.KeyFunc(rc)
if err != nil {
glog.Errorf("Couldn't get key for replication controller %#v: %v", rc, err)
return
}
if diff < 0 {
diff *= -1
if diff > rm.burstReplicas {
diff = rm.burstReplicas
}
// TODO: Track UIDs of creates just like deletes. The problem currently
// is we'd need to wait on the result of a create to record the pod's
// UID, which would require locking *across* the create, which will turn
// into a performance bottleneck. We should generate a UID for the pod
// beforehand and store it via ExpectCreations.
rm.expectations.ExpectCreations(rcKey, diff)
var wg sync.WaitGroup
wg.Add(diff)
glog.V(2).Infof("Too few %q/%q replicas, need %d, creating %d", rc.Namespace, rc.Name, rc.Spec.Replicas, diff)
for i := 0; i < diff; i++ {
go func() {
defer wg.Done()
var err error
if rm.garbageCollectorEnabled {
var trueVar = true
controllerRef := &api.OwnerReference{
APIVersion: getRCKind().GroupVersion().String(),
Kind: getRCKind().Kind,
Name: rc.Name,
UID: rc.UID,
Controller: &trueVar,
}
err = rm.podControl.CreatePodsWithControllerRef(rc.Namespace, rc.Spec.Template, rc, controllerRef)
} else {
err = rm.podControl.CreatePods(rc.Namespace, rc.Spec.Template, rc)
}
if err != nil {
// Decrement the expected number of creates because the informer won't observe this pod
glog.V(2).Infof("Failed creation, decrementing expectations for controller %q/%q", rc.Namespace, rc.Name)
rm.expectations.CreationObserved(rcKey)
rm.enqueueController(rc)
utilruntime.HandleError(err)
}
}()
}
wg.Wait()
} else if diff > 0 {
if diff > rm.burstReplicas {
diff = rm.burstReplicas
}
glog.V(2).Infof("Too many %q/%q replicas, need %d, deleting %d", rc.Namespace, rc.Name, rc.Spec.Replicas, diff)
// No need to sort pods if we are about to delete all of them
if rc.Spec.Replicas != 0 {
// Sort the pods in the order such that not-ready < ready, unscheduled
// < scheduled, and pending < running. This ensures that we delete pods
// in the earlier stages whenever possible.
sort.Sort(controller.ActivePods(filteredPods))
}
// Snapshot the UIDs (ns/name) of the pods we're expecting to see
// deleted, so we know to record their expectations exactly once either
// when we see it as an update of the deletion timestamp, or as a delete.
// Note that if the labels on a pod/rc change in a way that the pod gets
// orphaned, the rs will only wake up after the expectations have
// expired even if other pods are deleted.
deletedPodKeys := []string{}
for i := 0; i < diff; i++ {
deletedPodKeys = append(deletedPodKeys, controller.PodKey(filteredPods[i]))
}
// We use pod namespace/name as a UID to wait for deletions, so if the
// labels on a pod/rc change in a way that the pod gets orphaned, the
// rc will only wake up after the expectation has expired.
rm.expectations.ExpectDeletions(rcKey, deletedPodKeys)
var wg sync.WaitGroup
wg.Add(diff)
for i := 0; i < diff; i++ {
go func(ix int) {
defer wg.Done()
if err := rm.podControl.DeletePod(rc.Namespace, filteredPods[ix].Name, rc); err != nil {
// Decrement the expected number of deletes because the informer won't observe this deletion
podKey := controller.PodKey(filteredPods[ix])
glog.V(2).Infof("Failed to delete %v due to %v, decrementing expectations for controller %q/%q", podKey, err, rc.Namespace, rc.Name)
rm.expectations.DeletionObserved(rcKey, podKey)
rm.enqueueController(rc)
utilruntime.HandleError(err)
}
}(i)
}
wg.Wait()
}
}
// syncReplicationController will sync the rc with the given key if it has had its expectations fulfilled, meaning
// it did not expect to see any more of its pods created or deleted. This function is not meant to be invoked
// concurrently with the same key.
func (rm *ReplicationManager) syncReplicationController(key string) error {
trace := util.NewTrace("syncReplicationController: " + key)
defer trace.LogIfLong(250 * time.Millisecond)
startTime := time.Now()
defer func() {
glog.V(4).Infof("Finished syncing controller %q (%v)", key, time.Now().Sub(startTime))
}()
if !rm.podStoreSynced() {
// Sleep so we give the pod reflector goroutine a chance to run.
time.Sleep(PodStoreSyncedPollPeriod)
glog.Infof("Waiting for pods controller to sync, requeuing rc %v", key)
rm.queue.Add(key)
return nil
}
obj, exists, err := rm.rcStore.Indexer.GetByKey(key)
if !exists {
glog.Infof("Replication Controller has been deleted %v", key)
rm.expectations.DeleteExpectations(key)
return nil
}
if err != nil {
glog.Infof("Unable to retrieve rc %v from store: %v", key, err)
rm.queue.Add(key)
return err
}
rc := *obj.(*api.ReplicationController)
// Check the expectations of the rc before counting active pods, otherwise a new pod can sneak in
// and update the expectations after we've retrieved active pods from the store. If a new pod enters
// the store after we've checked the expectation, the rc sync is just deferred till the next relist.
rcKey, err := controller.KeyFunc(&rc)
if err != nil {
glog.Errorf("Couldn't get key for replication controller %#v: %v", rc, err)
return err
}
trace.Step("ReplicationController restored")
rcNeedsSync := rm.expectations.SatisfiedExpectations(rcKey)
trace.Step("Expectations restored")
// TODO: Do the List and Filter in a single pass, or use an index.
var filteredPods []*api.Pod
if rm.garbageCollectorEnabled {
// list all pods to include the pods that don't match the rc's selector
// anymore but has the stale controller ref.
podList, err := rm.podStore.Pods(rc.Namespace).List(labels.Everything())
if err != nil {
glog.Errorf("Error getting pods for rc %q: %v", key, err)
rm.queue.Add(key)
return err
}
cm := controller.NewPodControllerRefManager(rm.podControl, rc.ObjectMeta, labels.Set(rc.Spec.Selector).AsSelector(), getRCKind())
matchesAndControlled, matchesNeedsController, controlledDoesNotMatch := cm.Classify(podList.Items)
for _, pod := range matchesNeedsController {
err := cm.AdoptPod(pod)
// continue to next pod if adoption fails.
if err != nil {
// If the pod no longer exists, don't even log the error.
if !errors.IsNotFound(err) {
utilruntime.HandleError(err)
}
} else {
matchesAndControlled = append(matchesAndControlled, pod)
}
}
filteredPods = matchesAndControlled
// remove the controllerRef for the pods that no longer have matching labels
var errlist []error
for _, pod := range controlledDoesNotMatch {
err := cm.ReleasePod(pod)
if err != nil {
errlist = append(errlist, err)
}
}
if len(errlist) != 0 {
aggregate := utilerrors.NewAggregate(errlist)
// push the RC into work queue again. We need to try to free the
// pods again otherwise they will stuck with the stale
// controllerRef.
rm.queue.Add(key)
return aggregate
}
} else {
podList, err := rm.podStore.Pods(rc.Namespace).List(labels.Set(rc.Spec.Selector).AsSelector())
if err != nil {
glog.Errorf("Error getting pods for rc %q: %v", key, err)
rm.queue.Add(key)
return err
}
filteredPods = controller.FilterActivePods(podList.Items)
}
if rcNeedsSync && rc.DeletionTimestamp == nil {
rm.manageReplicas(filteredPods, &rc)
}
trace.Step("manageReplicas done")
// Count the number of pods that have labels matching the labels of the pod
// template of the replication controller, the matching pods may have more
// labels than are in the template. Because the label of podTemplateSpec is
// a superset of the selector of the replication controller, so the possible
// matching pods must be part of the filteredPods.
fullyLabeledReplicasCount := 0
templateLabel := labels.Set(rc.Spec.Template.Labels).AsSelector()
for _, pod := range filteredPods {
if templateLabel.Matches(labels.Set(pod.Labels)) {
fullyLabeledReplicasCount++
}
}
// Always updates status as pods come up or die.
if err := updateReplicaCount(rm.kubeClient.Core().ReplicationControllers(rc.Namespace), rc, len(filteredPods), fullyLabeledReplicasCount); err != nil {
// Multiple things could lead to this update failing. Requeuing the controller ensures
// we retry with some fairness.
glog.V(2).Infof("Failed to update replica count for controller %v/%v; requeuing; error: %v", rc.Namespace, rc.Name, err)
rm.enqueueController(&rc)
}
return nil
}<|fim▁end|> | }
}
glog.V(4).Infof("Pod %s/%s deleted through %v, timestamp %+v, labels %+v.", pod.Namespace, pod.Name, utilruntime.GetCaller(), pod.DeletionTimestamp, pod.Labels)
if rc := rm.getPodController(pod); rc != nil { |
<|file_name|>CVar.cpp<|end_file_name|><|fim▁begin|>#include <stdio.h>
#include <sstream>
#include <bitset>
#include "CVar.h"
#include "../dbnet/db_net.hpp"
CVar::CVar(int iWid, int iType, std::string sRem) : m_iWid(iWid), m_iType(iType), m_sRem(sRem)
{
m_bSync = false;
}
void CVar::AddAlias(int iBit, std::string sRem)
{
CAlias* pNewAlias = new CAlias(iBit, sRem);
CBase::AddItem(pNewAlias);
}
CAlias* CVar::GetLastAlias()
{
return CBase::GetLastItem();
}
/*
SAlias* CVar::GetAliasByBit(int iBit)
{
for (CAlias* pAlias = m_pAliases; pAlias; pAlias = pAlias->m_pNext)
{
if (pAlias->m_iBit == iBit)
{
return pAlias;
}
}
return NULL;
}
*/
bool CVar::Sync()
{
++m_iAttempt;
if (DBnet_ReadReg(m_iWid, m_iType, m_Data) == SUCCESS)
{
time(&m_SyncTime);
m_bSync = true;
return true;
}
else
{
m_bSync = false;
return false;
}
}
std::string CVar::GetData()
{
switch (m_iType)
{
case TYPE_INT:
{
std::bitset<8> bset(*(int *)m_Data);
return bset.to_string();
}
case TYPE_LONG:
{
std::bitset<16> bset(*(int *)m_Data);
return bset.to_string();
}
<|fim▁hole|> char sRet[256];
sprintf(sRet, "%f", *(float *)m_Data);
return sRet;
}
default:
return "";
}
}
float CVar::GetDataF()
{
return *(float *)m_Data;
}
long CVar::GetDataL()
{
std::string sData = GetData();
int long lData = std::stol(sData,nullptr,2);
return lData;
}
std::string CVar::Write()
{
std::stringstream s;
if (m_bSync)
{
s << "wid: " << GetWid() << "\t hodnota: " << GetData() << "\t " << GetTime() << "\t" << GetAttemptCount() << "\t;" << GetRem() << std::endl;
if (m_iType == TYPE_INT || m_iType == TYPE_LONG)
{
for (CAlias* pAlias = m_pFirst; pAlias; pAlias = pAlias->m_pNext)
{
if (pAlias)
{
std::bitset<16> bset(*(long *)m_Data);
s << " bit: " << pAlias->m_iBit << "\t hodnota: " << (bset[pAlias->m_iBit] ? "ON" : "OFF") << "\t\t\t\t\t\t;" << pAlias->m_sRem << std::endl;
}
}
}
}
else
{
s << "wid: " << GetWid() << "\t nesynchronizováno!!!\t\t\t\t" << GetAttemptCount() << "\t;" << GetRem() << std::endl;
}
return s.str();
}
std::string CVar::GetTime()
{
struct tm* timeinfo = localtime(&m_SyncTime);
std::stringstream strs;
strs << 1900 + timeinfo->tm_year << "-" << 1 + timeinfo->tm_mon << "-" << timeinfo->tm_mday << " ";
strs << timeinfo->tm_hour << ":";
timeinfo->tm_min < 10 ? (strs << "0" << timeinfo->tm_min << ":") : (strs << timeinfo->tm_min << ":");
timeinfo->tm_sec < 10 ? (strs << "0" << timeinfo->tm_sec) : (strs << timeinfo->tm_sec);
return strs.str();
}<|fim▁end|> | case TYPE_FLOAT:
{ |
<|file_name|>dynamic.to.top.min.js<|end_file_name|><|fim▁begin|>/*
* Dynamic To Top Plugin
* http://www.mattvarone.com
*
* By Matt Varone
* @sksmatt
*
*/
var mv_dynamic_to_top;(function($,mv_dynamic_to_top){jQuery.fn.DynamicToTop=function(options){var defaults={text:mv_dynamic_to_top.text,min:parseInt(mv_dynamic_to_top.min,10),fade_in:600,fade_out:400,speed:parseInt(mv_dynamic_to_top.speed,10),easing:mv_dynamic_to_top.easing,version:mv_dynamic_to_top.version,id:'dynamic-to-top'},settings=$.extend(defaults,options);if(settings.version===""||settings.version==='0'){settings.text='<span> </span>';}<|fim▁hole|>var $toTop=$('<a href=\"#\" id=\"'+settings.id+'\"></a>').html(settings.text);$toTop.hide().appendTo('body').click(function(){$('html, body').stop().animate({scrollTop:0},settings.speed,settings.easing);return false;});$(window).scroll(function(){var sd=jQuery(window).scrollTop();if(typeof document.body.style.maxHeight==="undefined"){$toTop.css({'position':'absolute','top':sd+$(window).height()-mv_dynamic_to_top.margin});}
if(sd>settings.min){$toTop.fadeIn(settings.fade_in);}else{$toTop.fadeOut(settings.fade_out);}});};$('body').DynamicToTop();})(jQuery,mv_dynamic_to_top);<|fim▁end|> | if(!$.isFunction(settings.easing)){settings.easing='linear';} |
<|file_name|>agent.rs<|end_file_name|><|fim▁begin|>#![allow(non_snake_case)]
use std::collections::HashMap;
use request::Handler;
use serde_json;
use error::ConsulResult;
use std::error::Error;
use super::{Service, RegisterService, TtlHealthCheck};
/// Agent can be used to query the Agent endpoints
pub struct Agent{
handler: Handler
}
/// AgentMember represents a cluster member known to the agent
#[derive(Serialize, Deserialize)]
pub struct AgentMember {
Name: String,
Addr: String,
Port: u16,
Tags: HashMap<String, String>,
Status: usize,
ProtocolMin: u8,
ProtocolMax: u8,
ProtocolCur: u8,
DelegateMin: u8,
DelegateMax: u8,
DelegateCur: u8
}
impl Agent {
pub fn new(address: &str) -> Agent {
Agent {
handler: Handler::new(&format!("{}/v1/agent", address))
}
}
pub fn services(&self) -> ConsulResult<HashMap<String, Service>> {
let result = self.handler.get("services")?;
serde_json::from_str(&result)
.map_err(|e| e.description().to_owned())
}
pub fn members(&self) -> ConsulResult<Vec<AgentMember>> {
let result = self.handler.get("members")?;
serde_json::from_str(&result)
.map_err(|e| e.description().to_owned())
}
pub fn register(&self, service: RegisterService) -> ConsulResult<()> {<|fim▁hole|> if let Err(e) = self.handler.put("service/register", json_str, Some("application/json")) {
Err(format!("Consul: Error registering a service. Err:{}", e))
}
else {
Ok(())
}
}
pub fn register_ttl_check(&self, health_check: TtlHealthCheck) -> ConsulResult<()> {
let json_str = serde_json::to_string(&health_check)
.map_err(|e| e.description().to_owned())?;
if let Err(e) = self.handler.put("check/register", json_str, Some("application/json")) {
Err(format!("Consul: Error registering a health check. Err:{}", e))
}
else {
Ok(())
}
}
pub fn check_pass(&self, service_id: String) -> ConsulResult<()> {
let uri = format!("check/pass/{}", service_id);
self.handler.get(&uri)?;
Ok(())
}
pub fn get_self_name(&self) -> ConsulResult<Option<String>> {
let result = self.handler.get("self")?;
let json_data = serde_json::from_str(&result)
.map_err(|e| e.description().to_owned())?;
Ok(super::get_string(&json_data, &["Config", "NodeName"]))
}
pub fn get_self_address(&self) -> ConsulResult<Option<String>> {
let result = self.handler.get("self")?;
let json_data = serde_json::from_str(&result)
.map_err(|e| e.description().to_owned())?;
Ok(super::get_string(&json_data, &["Config", "AdvertiseAddr"]))
}
}<|fim▁end|> | let json_str = serde_json::to_string(&service)
.map_err(|e| e.description().to_owned())?;
|
<|file_name|>dataerrorview.directive.js<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2021 Inera AB (http://www.inera.se)
*
* This file is part of sklintyg (https://github.com/sklintyg).
*<|fim▁hole|> *
* sklintyg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
angular.module('StatisticsApp').directive('dataerrorview', function() {
'use strict';
return {
restrict: 'A',
transclude: true,
replace: true,
scope: {
errorPageUrl: '=',
showError: '='
},
templateUrl: '/components/directives/dataerrorview/dataerrorview.html'
};
});<|fim▁end|> | * sklintyg is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version. |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import operator
from django.core.urlresolvers import reverse, reverse_lazy
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_post_parameters
from horizon import exceptions
from horizon import forms
from horizon import tables
from openstack_dashboard import api
from .forms import CreateUserForm, UpdateUserForm
from .tables import UsersTable
class IndexView(tables.DataTableView):
table_class = UsersTable
template_name = 'admin/users/index.html'
def get_data(self):
users = []
try:
users = api.keystone.user_list(self.request)
except:
exceptions.handle(self.request,
_('Unable to retrieve user list.'))
return users
class UpdateView(forms.ModalFormView):
form_class = UpdateUserForm
template_name = 'admin/users/update.html'
success_url = reverse_lazy('horizon:admin:users:index')
@method_decorator(sensitive_post_parameters('password',
'confirm_password'))
def dispatch(self, *args, **kwargs):
return super(UpdateView, self).dispatch(*args, **kwargs)
def get_object(self):
if not hasattr(self, "_object"):
try:
self._object = api.keystone.user_get(self.request,
self.kwargs['user_id'],
admin=True)
except:
redirect = reverse("horizon:admin:users:index")
exceptions.handle(self.request,
_('Unable to update user.'),<|fim▁hole|> redirect=redirect)
return self._object
def get_context_data(self, **kwargs):
context = super(UpdateView, self).get_context_data(**kwargs)
context['user'] = self.get_object()
return context
#trunglq add
def get_initial(self):
user = self.get_object()
try:
print user.secretkey
return {'id': user.id,
'name': user.name,
'tenant_id': getattr(user, 'tenantId', None),
'email': user.email,
'secretkey': user.secretkey}
except Exception as err:
return {'id': user.id,
'name': user.name,
'tenant_id': getattr(user, 'tenantId', None),
'email': user.email}
#end
class CreateView(forms.ModalFormView):
form_class = CreateUserForm
template_name = 'admin/users/create.html'
success_url = reverse_lazy('horizon:admin:users:index')
@method_decorator(sensitive_post_parameters('password',
'confirm_password'))
def dispatch(self, *args, **kwargs):
return super(CreateView, self).dispatch(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super(CreateView, self).get_form_kwargs()
try:
roles = api.keystone.role_list(self.request)
except:
redirect = reverse("horizon:admin:users:index")
exceptions.handle(self.request,
_("Unable to retrieve user roles."),
redirect=redirect)
roles.sort(key=operator.attrgetter("id"))
kwargs['roles'] = roles
return kwargs
def get_initial(self):
default_role = api.keystone.get_default_role(self.request)
return {'role_id': getattr(default_role, "id", None)}<|fim▁end|> | |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>export { piMatch } from './pimatch'
export { titleCapitalize } from './titlecapitalization'<|fim▁hole|><|fim▁end|> | export { wordScore } from './wordscore' |
<|file_name|>gblackboard.cpp<|end_file_name|><|fim▁begin|>#include "gblackboard_p.h"
#include "gblackboard_p_p.h"
#include "gghosttree_p.h"
#include "gghostnode_p.h"
typedef QHash<QQmlEngine *, QPointer<GBlackboard> > GlobalBlackboards;
Q_GLOBAL_STATIC(GlobalBlackboards, theGlobalBlackboards)
// class GBlackboard
GBlackboard::GBlackboard(QObject *parent)
: QObject(*new GBlackboardPrivate(), parent)
{
Q_D(GBlackboard);
d->targetNode = qobject_cast<GGhostNode *>(parent);
if (d->targetNode) {
connect(d->targetNode, &GGhostNode::statusChanged,
[this](Ghost::Status status) {
if (Ghost::StandBy == status) {
this->clear();
}
});
d->masterTree = d->targetNode->masterTree();
} else {
d->masterTree = qobject_cast<GGhostTree *>(parent);
if (d->masterTree) {
connect(d->masterTree, &GGhostTree::statusChanged,
[this](Ghost::Status status) {
if (Ghost::StandBy == status) {
this->clear();
}
});
}
}
}
GBlackboard *GBlackboard::qmlAttachedProperties(QObject *target)
{
return new GBlackboard(target);
}
GBlackboard *GBlackboard::globalBlackboard() const
{
Q_D(const GBlackboard);
if (d->globalBlackboard) {
return d->globalBlackboard;
}
QQmlContext *context = qmlContext(d->parent);
if (nullptr == context) {
Q_CHECK_PTR(context);
return nullptr;
}
QQmlEngine *engine = context->engine();
if (nullptr == engine) {
Q_CHECK_PTR(engine);
return nullptr;
}
QPointer<GBlackboard> blackboard = theGlobalBlackboards()->value(engine);
if (blackboard.isNull()) {
blackboard = new GBlackboard(engine);
theGlobalBlackboards()->insert(engine, blackboard);
}
GBlackboardPrivate *_this = const_cast<GBlackboardPrivate *>(d);
_this->globalBlackboard = blackboard.data();
return d->globalBlackboard;
}
GBlackboard *GBlackboard::sharedBlackboard() const
{
Q_D(const GBlackboard);
if (d->sharedBlackboard) {
return d->sharedBlackboard;
}
if (!d->masterTree) {
qWarning("GtGhost : Master tree is null.");
return nullptr;
}
QObject *attached = qmlAttachedPropertiesObject<GBlackboard>(d->masterTree);
if (attached) {
GBlackboardPrivate *_this = const_cast<GBlackboardPrivate *>(d);
_this->sharedBlackboard = qobject_cast<GBlackboard *>(attached);
}
return d->sharedBlackboard;
}
bool GBlackboard::has(const QString &key) const
{
Q_D(const GBlackboard);
return d->datas.contains(key);
}
void GBlackboard::set(const QString &key, const QJSValue &value)
{<|fim▁hole|> d->datas.remove(key);
} else {
d->datas.insert(key, value);
}
}
QJSValue GBlackboard::get(const QString &key) const
{
Q_D(const GBlackboard);
return d->datas.value(key);
}
void GBlackboard::unset(const QString &key)
{
Q_D(GBlackboard);
d->datas.remove(key);
}
void GBlackboard::clear()
{
Q_D(GBlackboard);
d->datas.clear();
}
// class GBlackboardPrivate
GBlackboardPrivate::GBlackboardPrivate()
: masterTree(nullptr)
, targetNode(nullptr)
, globalBlackboard(nullptr)
, sharedBlackboard(nullptr)
{
}
GBlackboardPrivate::~GBlackboardPrivate()
{
}<|fim▁end|> | Q_D(GBlackboard);
if (value.isUndefined()) { |
<|file_name|>PaginationControlsView_spec.js<|end_file_name|><|fim▁begin|>var createFakeModel = function () { return sinon.createStubInstance(Backbone.Model); };
var createFakeCollection = function () { return sinon.createStubInstance(Backbone.Collection); };
requireMock.requireWithStubs(
{
'models/SearchParamsModel': sinon.stub().returns(createFakeModel()),
'collections/SearchResultsCollection': sinon.stub().returns(createFakeCollection())
},
[
'views/right_column/results_footer/PaginationControlsView',
'collections/SearchResultsCollection',
'models/SearchParamsModel',
'lib/Mediator'
],
function (
PaginationControlsView,
SearchResultsCollection,
SearchParamsModel,
Mediator
) {
describe('Pagination Controls View', function () {
var mediator,
resultsCollection,
searchParamsModel,
view;
beforeEach(function () {
mediator = sinon.stub(new Mediator());
resultsCollection = new SearchResultsCollection();
resultsCollection.getLastPageNumber = sinon.stub().returns(7);
resultsCollection.getPageNumber = sinon.stub().returns(4);
resultsCollection.getTotalResultsCount = sinon.stub().returns(10);
searchParamsModel = new SearchParamsModel();
searchParamsModel.setPageNumber = sinon.spy();
searchParamsModel.get = sinon.stub();
view = new PaginationControlsView({
collection: resultsCollection,
model: searchParamsModel
});
view.setMediator(mediator);
view.render();
});
describe('Basic rendering and appearance', function () {
it('creates a container element with class .pagination', function () {
expect(view.$el).toHaveClass('pagination');
});
it('has a first page button', function () {
expect(view.$('a[title="Go to page 1"]').length).toEqual(1);
});
it('has a last page button', function () {
expect(view.$('a[title="Go to page 7"]').length).toEqual(1);
});
it('has a previous page button', function () {
expect(view.$('a.prev').length).toEqual(1);
});
it('has a next page button', function () {
expect(view.$('a.next').length).toEqual(1);
});
});
describe('the visible page numbers with 7 pages of results', function () {
var range,
options;
// key: current page
// value: the page numbers that should be visible and clickable
options = {
1: [1, 2, 7],
2: [1, 2, 3, 7],
3: [1, 2, 3, 4, 7],
4: [1, 2, 3, 4, 5, 6, 7],
5: [1, 4, 5, 6, 7],
6: [1, 5, 6, 7],
7: [1, 6, 7]
};
range = [1, 2, 3, 4, 5, 6, 7];
_.each(options, function (visiblePages, currentPage) {
it('gives links for pages ' + visiblePages + ' and only those pages when on page ' + currentPage, function () {
var hiddenPages = _.difference(range, visiblePages);
resultsCollection.getPageNumber = sinon.stub().returns(currentPage);
view.render();
_.each(visiblePages, function (visiblePage) {
expect(view.$el.html()).toContain('title="Go to page ' + visiblePage + '"');
});
_.each(hiddenPages, function (hiddenPage) {
expect(view.$el.html()).not.toContain('title="Go to page ' + hiddenPage + '"');
});
});
});
});
describe('a single page of results', function () {
beforeEach(function () {
resultsCollection.getLastPageNumber = sinon.stub().returns(1);
resultsCollection.getPageNumber = sinon.stub().returns(1);
});
it('is hidden', function () {
view.onSearchComplete();
expect(view.$el).toHaveClass('hidden');
});
});
describe('no results', function () {
beforeEach(function () {
resultsCollection.getTotalResultsCount = sinon.stub().returns(0);
});
it('is hidden', function () {
view.showControls();
view.onSearchComplete();
expect(view.$el).toHaveClass('hidden');
});
});
describe('updating the search params model', function () {
it('decrements the searchParamsModel pageNumber value when the prev page button is clicked', function () {
view.onClickPrevPageButton();
expect(searchParamsModel.setPageNumber).toHaveBeenCalledWith(3);
});
it('sets the searchParamsModel pageNumber value to 1 when the first page button is clicked', function () {
view.onClickPageSelector({
target: view.$('a[title="Go to page 1"]')[0]
});
expect(searchParamsModel.setPageNumber).toHaveBeenCalledWith(1);
});
it('increments the searchParamsModel pageNumber value when the next page button is clicked', function () {
view.onClickNextPageButton();
expect(searchParamsModel.setPageNumber).toHaveBeenCalledWith(5);
});
});
describe('triggering the "search:refinedSearch" event', function () {
it('triggers when a page number is clicked', function () {
view.onClickPageSelector({
target: {
text: '1'
}
});
expect(mediator.trigger).toHaveBeenCalledWith('search:refinedSearch');
});
it('triggers when the previous page button is clicked', function () {
view.onClickPrevPageButton();
expect(mediator.trigger).toHaveBeenCalledWith('search:refinedSearch');
});
it('triggers when the next page button is clicked', function () {
view.onClickNextPageButton();
expect(mediator.trigger).toHaveBeenCalledWith('search:refinedSearch');
});
});
describe('mediated event handling', function () {
beforeEach(function () {
mediator = new Mediator();
view.setMediator(mediator);
});
it('hides itself when the app goes home', function () {
// guard assertion
expect(view.$el).not.toHaveClass('hidden');
mediator.trigger('app:home');
expect(view.$el).toHaveClass('hidden');
});
it('hides itself when a new search is intiated', function () {
// guard assertion
expect(view.$el).not.toHaveClass('hidden');
mediator.trigger('search:initiated');
expect(view.$el).toHaveClass('hidden');
});
it('shows itself when a new set of search results is ready', function () {
view.hideControls();
mediator.trigger('search:complete');
expect(view.$el).not.toHaveClass('hidden');
});
<|fim▁hole|> it('shows itself when an in-progress search is canceled if there are previous results', function () {
view.hideControls();
mediator.trigger('search:displayPreviousResults');
expect(view.$el).not.toHaveClass('hidden');
});
});
});
});<|fim▁end|> | |
<|file_name|>test_stl.cpp<|end_file_name|><|fim▁begin|>#include "test_stl.h"
#include <QSet>
#include <QtTest/QtTest>
#include <vector>
#include <cstring>
#include <cwchar>
#include "../src/utils/stl.h"
void STLTest::testBufferArry() {
using utils::BufferArray;
// constructor
std::string test1("123456789abcdefg");
BufferArray buffer(test1);
QVERIFY(test1.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(strncmp(test1.data(), buffer.data(), test1.size()) == 0);
// operator[]
QVERIFY(test1[0] == '1');
QVERIFY(test1[1] == '2');
QVERIFY(test1[2] == '3');
// reserve
buffer.resize(30);
QVERIFY(buffer.capacity() == 30);
// shrink_to_fit
buffer.shrink_to_fit();
QVERIFY(buffer.capacity() == buffer.size());
// resize
buffer.resize(9);
std::string test2("12345678");
QVERIFY(test2.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() > buffer.size());
QVERIFY(strncmp(test2.data(), buffer.data(), test2.size()) == 0);
// shrink_to_fit
buffer.shrink_to_fit();
QVERIFY(buffer.capacity() == buffer.size());
#ifdef UTILS_CXX11_MODE<|fim▁hole|> buffer = std::move(other_buffer);
QVERIFY(test3.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(strncmp(test3.data(), buffer.data(), test3.size()) == 0);
// constructor2
const char test_string[] = "abcdefg";
size_t test_size = sizeof(test_string);
buffer = BufferArray(test_string);
QVERIFY(test_size == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(memcmp(test_string, buffer.data(), test_size) == 0);
#endif
}
void STLTest::testWBufferArry() {
using utils::WBufferArray;
// constructor
std::wstring test1(L"123456789abcdefg");
WBufferArray buffer(test1);
QVERIFY(test1.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(wcsncmp(test1.data(), buffer.data(), test1.size()) == 0);
// operator[]
QVERIFY(test1[0] == L'1');
QVERIFY(test1[1] == L'2');
QVERIFY(test1[2] == L'3');
// reserve
buffer.resize(30);
QVERIFY(buffer.capacity() == 30);
// shrink_to_fit
buffer.shrink_to_fit();
QVERIFY(buffer.capacity() == buffer.size());
// resize
buffer.resize(9);
std::wstring test2(L"12345678");
QVERIFY(test2.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() > buffer.size());
QVERIFY(wcsncmp(test2.data(), buffer.data(), test2.size()) == 0);
#ifdef UTILS_CXX11_MODE
// move
std::wstring test3(L"gqjdiw913abc_123d");
WBufferArray other_buffer(test3);
buffer = std::move(other_buffer);
QVERIFY(test3.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(wcsncmp(test3.data(), buffer.data(), test3.size()) == 0);
// constructor2
const wchar_t test_string[] = L"abcdefg";
size_t test_size = sizeof(test_string) / sizeof(wchar_t);
buffer = WBufferArray(test_string);
QVERIFY(test_size == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(memcmp(test_string, buffer.data(), test_size) == 0);
#endif
}
QTEST_APPLESS_MAIN(STLTest)<|fim▁end|> | // move
std::string test3("gqjdiw913abc_123d");
BufferArray other_buffer(test3); |
<|file_name|>Renderer.cpp<|end_file_name|><|fim▁begin|>#include "Renderer.h"
#include "Core/Windows/Window.h"
#include <Resources/ResourceCache.h>
namespace uut
{
UUT_MODULE_IMPLEMENT(Renderer)
{}
Renderer::Renderer()
: _screenSize(0)
{
}
Renderer::~Renderer()
{
}
//////////////////////////////////////////////////////////////////////////////
bool Renderer::OnInit()
{
if (!Super::OnInit())
return false;
ModuleInstance<ResourceCache> cache;
cache->AddResource(CreateMonoTexture(Color32::White), "white");
cache->AddResource(CreateMonoTexture(Color32::Black), "black");
<|fim▁hole|>
void Renderer::OnDone()
{
}
SharedPtr<Texture2D> Renderer::CreateMonoTexture(const Color32& color)
{
auto tex = CreateTexture(Vector2i(1), TextureAccess::Static);
uint32_t* buf = static_cast<uint32_t*>(tex->Lock());
if (buf == nullptr)
return nullptr;
buf[0] = color.ToInt();
tex->Unlock();
return tex;
}
}<|fim▁end|> | return true;
} |
<|file_name|>1.cpp<|end_file_name|><|fim▁begin|>//
// Created by icebeetle on 18-6-23.
//
#include<bits/stdc++.h>
using namespace std;
struct _t {
int x, y, z, t;
explicit _t(int xx = 0, int yy = 0, int zz = 0, int tt = 0) : x(xx), y(yy), z(zz), t(tt) {};
friend bool operator<(const _t &a, const _t &b) {
return a.t < b.t;
}
};
const int SIZE_T = 200000;
const int SIZE = 510;
_t peo[SIZE_T];
inline int read_t(int a, int b, int c, int d) {
return (a * 60 * 60 + b * 60 + c) * 1000 + d;
}
int chess[SIZE][SIZE];
int main(int argc, char *argv[]) {
int n, m, k;
while (scanf("%d%d%d", &n, &m, &k) != EOF) {
for (int i = 0; i < k; ++i) {
int x, y, z, a, b, c, d;
scanf("%d%d%d%d:%d:%d.%d", &x, &y, &z, &a, &b, &c, &d);
peo[i] = _t(x - 1, y - 1, z, read_t(a, b, c, d));
}
sort(peo, peo + k);
int cnt = 0;
int mcnt = 0;
int ansid = -1;
memset(chess, 0, sizeof(chess));
for (int i = 0; i < k; ++i) {
if (peo[i].z == 0) {
if (chess[peo[i].x][peo[i].y] == 0) cnt++;
chess[peo[i].x][peo[i].y]++;
} else {
if (chess[peo[i].x][peo[i].y] == 1) cnt--;
chess[peo[i].x][peo[i].y]--;
}<|fim▁hole|> mcnt = cnt;
ansid = i;
}
}
memset(chess, 0, sizeof(chess));
for (int i = 0; i <= ansid; ++i) {
if (peo[i].z == 0) {
if (chess[peo[i].x][peo[i].y] == 0) cnt++;
chess[peo[i].x][peo[i].y]++;
} else {
if (chess[peo[i].x][peo[i].y] == 1) cnt--;
chess[peo[i].x][peo[i].y]--;
}
}
for (int i = 0; i < n; ++i) {
for (int j = 0; j < m; ++j)printf(chess[i][j] ? "1" : "0");
printf("\n");
}
}
return 0;
}<|fim▁end|> | if (mcnt <= cnt) { |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.core.validators import MinValueValidator, MaxValueValidator
from django.conf import settings
from datetime import datetime
import uuid
User = settings.AUTH_USER_MODEL
def generate_new_uuid():
return str(uuid.uuid4())
class behaviourExperimentType_model(models.Model):
# BE CAREFUL About migrations that add unique fields !!!!!!!!!!!!! e.g. UUID
# https: // docs.djangoproject.com / en / 1.9 / howto / writing - migrations / # migrations-that-add-unique-fields
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
about = models.CharField(max_length=60, blank=True)
public = models.BooleanField (default = False, blank=True)
public_set_date = models.DateTimeField (default=datetime.now)
description = models.TextField(max_length=1000, blank=True)
created = models.DateTimeField(auto_now_add=True)
creator = models.ForeignKey(User, related_name='behaviouralExperiment_own')
users_with_access = models.ManyToManyField (User, related_name='behaviouralExperiment_accessable', through = 'shareBehaviouralExperiment')
experimentDefinition = models.ForeignKey("experimentType_model")
environmentDefinition = models.ForeignKey("environmentType_model")
class Meta:
#unique_together = ("creator","experimentDefinition","environmentDefinition")
ordering = ["-created"]
def __unicode__(self):
return "id: %s" % (self.uuid, )
def save(self, *args, **kwargs):
if self.uuid is not None:
try:
orig = behaviourExperimentType_model.objects.get(uuid=self.uuid)
if orig.public != self.public:
self.public_set_date = datetime.now()
except: #If it is the first time that is being created then .get() fails and throws an exception
pass
super(behaviourExperimentType_model, self).save(*args, **kwargs)
#### ENVIRONMENT ##########
class environmentType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
wormStatus = models.ForeignKey("wormStatusType_model")
plateConfiguration = models.ForeignKey("plateConfigurationType_model")
obstacle = models.ManyToManyField("obstacleLocationType_model",blank=True)
crowding = models.ForeignKey("crowdingType_model")
envTemp = models.FloatField(('Environmental Temperature'), default=20)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class wormStatusType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
xCoordFromPlateCentre = models.FloatField(blank=False)
yCoorDFromPlateCentre = models.FloatField(blank=False)
angleRelativeXaxis = models.FloatField(validators=[MinValueValidator(0),MaxValueValidator(6.28318)],blank=False)
wormData = models.ForeignKey("wormDataType_model")
#class Meta:
#unique_together = ("xCoordFromPlateCentre","yCoorDFromPlateCentre","angleRelativeXaxis","wormData")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class wormDataType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
MALE = 'M'
FEMALEHERMAPHRODITES = 'FH'
GENDERTYPE = (
(MALE,"Male"),
(FEMALEHERMAPHRODITES,"Female Hermaphrodites"),
)
gender = models.CharField(max_length=60, blank=False,choices=GENDERTYPE, default=FEMALEHERMAPHRODITES)
age = models.PositiveIntegerField(blank=False)
stageOfLifeCycle = models.PositiveIntegerField(blank=False,validators=[MinValueValidator(1),MaxValueValidator(4)])
timeOffFood = models.PositiveIntegerField(blank=False)
#class Meta:
#unique_together = ("gender","age","stageOfLifeCycle","timeOffFood")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class crowdingType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#These parameters wormsDistributionInPlate and wormsInPlate are fo
wormsDistributionInPlate = models.CharField(max_length=60, blank=True)
wormsInPlate = models.PositiveIntegerField(validators=[MinValueValidator(1)],default=1,blank=False,)
#class Meta:
#unique_together = ("wormsDistributionInPlate","wormsInPlate")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class obstacleLocationType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
xCoordFromPlateCentre = models.FloatField(blank=False)
yCoorDFromPlateCentre = models.FloatField(blank=False)
Stiffness = models.FloatField(validators=[MinValueValidator(0)],blank=False)
CYLINDER = 'CY'
CUBE = 'CU'
HEXAGON = 'HE'
SHAPETYPE = (
(CYLINDER,"cylinder"),
(CUBE,"cube"),
(HEXAGON,"hexagon"),
)
shape = models.CharField(max_length=60, blank=False,choices=SHAPETYPE, default=CYLINDER)
Cylinder = models.ForeignKey("CylinderType_model",null=True, blank=True)
Cube = models.ForeignKey("CubeType_model",null=True, blank=True)
Hexagon = models.ForeignKey("HexagonType_model",null=True, blank=True)
#class Meta:
#unique_together = ("shape","xCoordFromPlateCentre","yCoorDFromPlateCentre","angleRelativeXaxis","Stiffness","Cylinder","Cube","Hexagon","Hair")
def __unicode__(self):
return "id: %s" % (self.uuid,)
class plateConfigurationType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
WATER = 'W'
GELATIN = 'G'
AGAR = 'A'
BOTTOMMATERIALTYPE = (
(WATER,"water"),
(GELATIN,"gelatin"),
(AGAR,"agar"),
)
lid = models.BooleanField(blank=False,default=False)
bottomMaterial = models.CharField (max_length=60, blank=False,choices=BOTTOMMATERIALTYPE, default=AGAR)
dryness = models.FloatField(blank=False,validators=[MinValueValidator(0)])
CYLINDER = 'CY'
CUBE = 'CU'
HEXAGON = 'HE'
SHAPETYPE = (
(CYLINDER,"cylinder"),
(CUBE,"cube"),
(HEXAGON,"hexagon"),
)
shape = models.CharField(max_length=60, blank=False,choices=SHAPETYPE, default=CYLINDER)
Cylinder = models.ForeignKey("CylinderType_model",null=True, blank=True)
Cube = models.ForeignKey("CubeType_model",null=True, blank=True)
Hexagon = models.ForeignKey("HexagonType_model",null=True, blank=True)
#class Meta:
#unique_together = ("lid","bottomMaterial","dryness","shape","Cylinder","Cube","Hexagon")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class CubeType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
depth = models.FloatField(validators=[MinValueValidator(0)],blank=False)
side1Length = models.FloatField(validators=[MinValueValidator(0)],blank=False)
side2Length = models.FloatField(validators=[MinValueValidator(0)],blank=False)
#class Meta:
#unique_together = ("depth", "side1Length", "side2Length")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class CylinderType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
length = models.FloatField(validators=[MinValueValidator(0)], blank=False)
radius = models.FloatField(validators=[MinValueValidator(0)], blank=False)
#class Meta:
#unique_together = ("length", "radius")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class HexagonType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
depth = models.FloatField(validators=[MinValueValidator(0)],blank=False)
sideLength = models.FloatField(validators=[MinValueValidator(0)],blank=False)
#class Meta:
#unique_together = ("depth", "sideLength")
def __unicode__(self):
return "id: %s" % (self.uuid, )
##### EXPERIMENT ####
class experimentType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#It is possible to have different elements of interaction
description = models.TextField(max_length=1000, blank=True)
experimentDuration = models.PositiveIntegerField(blank=False, default=10000)
# The following ManyToManyField relations do not have an explicit definition table since we do not see need to associate extra data to the relationship
# https://docs.djangoproject.com/en/dev/ref/models/fields/#django.db.models.ManyToManyField
#
#GE: Check how can we ensure that at least one of them is defined
#
interactionAtSpecificTime = models.ManyToManyField("interactionAtSpecificTimeType_model",blank=True, null=True )
interactionFromt0tot1 = models.ManyToManyField("interactionFromt0tot1Type_model",blank=True, null=True)
experimentWideConf = models.ManyToManyField("experimentWideConfType_model",blank=True, null=True)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
## Experiments at specific time
class interactionAtSpecificTimeType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
# Only one of them at each object
#name = models.CharField(max_length=60, blank=True)
description = models.TextField(max_length=1000, blank=True, default='No description provided')
eventTime = models.FloatField(blank=False, default=100)
MECHANOSENSATION = 'MS'
CHEMOTAXIS ='CT'
TERMOTAXIS ='TT'
GALVANOTAXIS = 'GT'
PHOTOTAXIS = 'PT'
EXPERIMENTCATEGORY = (
(MECHANOSENSATION,"mechanosensation"),
(CHEMOTAXIS,"chemotaxis"),
(TERMOTAXIS,"termotaxis"),
(GALVANOTAXIS,"galvanotaxis"),
(PHOTOTAXIS,"phototaxis"),
)
experimentCategory = models.CharField(max_length=60, blank=False,choices=EXPERIMENTCATEGORY, default=MECHANOSENSATION)
#GE: Revise to force the user to fill one of the followings
mechanosensation = models.ForeignKey("mechanosensationTimeEventType_model", blank=True, null=True)
chemotaxis = models.ForeignKey("chemotaxisTimeEventType_model", blank=True, null=True)
termotaxis = models.ForeignKey("termotaxisTimeEventType_model", blank=True, null=True)
galvanotaxis = models.ForeignKey("galvanotaxisTimeEventType_model", blank=True, null=True)
phototaxis = models.ForeignKey("phototaxisTimeEventType_model", blank=True, null=True)
#name = models.CharField(max_length=60, blank=True)
#class Meta:
#unique_together = ("eventTime","mechanosensation","chemotaxis","termotaxis","galvanotaxis", "phototaxis")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class mechanosensationTimeEventType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
PLATETAP = 'PT'
DIRECTWORMTOUCH = 'DWT'
INTERACTIONOPTIONS = (
(PLATETAP,"plateTap"),
(DIRECTWORMTOUCH,"directWormTouch"),
)
interactionType = models.CharField(max_length=60, blank=False,choices=INTERACTIONOPTIONS, default=DIRECTWORMTOUCH)
directTouch = models.ForeignKey("directTouchType_model", blank=True, null=True)
plateTap = models.ForeignKey("plateTapType_model", blank=True, null=True)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class directTouchType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
EYEBROW = 'EB'
VONFREYHAIR = 'VFH'
PLATINIUMWIRE = 'PW'
TOUCHINSTRUMENTTYPE = (
(EYEBROW,"Eyebrow"),
(VONFREYHAIR,"Von Frey hair"),
(PLATINIUMWIRE,"Platinium wire"),
)
directTouchInstrument = models.CharField(max_length=60, blank=False, choices=TOUCHINSTRUMENTTYPE, default=EYEBROW)
touchDistance = models.FloatField(blank=False, validators=[MinValueValidator(0),MaxValueValidator(1.0)])
touchAngle = models.FloatField(blank=False, validators=[MinValueValidator(0),MaxValueValidator(360)])
appliedForce = models.FloatField(blank=False,validators=[MinValueValidator(0),
MaxValueValidator(100)])
#class Meta:
#unique_together = ("directTouchInstrument", "appliedForce","touchDistance","touchAngle")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class plateTapType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
appliedForce = models.FloatField(blank=False,validators=[MinValueValidator(0),
MaxValueValidator(100)]) #In the GUI the max is 1 to reflect 1mN, I'll leave it to 100 to avoid breaking if we make slight changes to support a bit more
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
<|fim▁hole|> description = models.TextField(max_length=1000, blank=True)
DYNAMICDROPTEST = 'DDT'
CHEMOTAXISOPTIONS = (
(DYNAMICDROPTEST,"Dynamic drop test"),
)
chemotaxisType = models.CharField(max_length=60, blank=False,choices=CHEMOTAXISOPTIONS, default=DYNAMICDROPTEST)
dynamicDropTestConf = models.ForeignKey("dynamicDropTestType_model", blank=True, null=True)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class staticPointSourceType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
dropQuantity = models.FloatField(blank=False,)
chemical = models.ForeignKey("chemicalType_model",blank=False)
chemicalConcentration = models.FloatField(blank=False)
xCoordFromPlateCentre = models.FloatField(blank=False)
yCoordFromPlateCentre = models.FloatField(blank=False)
#class Meta:
#unique_together = ("dropQuantity","chemical","chemicalConcentration","xCoordFromPlateCentre","yCoordFromPlateCentre")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class dynamicDropTestType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
dropQuantity = models.FloatField(blank=False,)
chemical = models.ForeignKey("chemicalType_model",blank=False)
chemicalConcentration = models.FloatField(blank=False)
xCoordFromPlateCentre = models.FloatField(blank=False)
yCoordFromPlateCentre = models.FloatField(blank=False)
#class Meta:
#unique_together = ("dropQuantity","chemical","chemicalConcentration","xCoordFromPlateCentre","yCoordFromPlateCentre")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class chemicalType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
'''
From NeuronsIDtable-NTU-EditV3.xlsx (Si elegans GDrive)
lysine
cAMP
biotin
Na+
Cl-
heavy metals
copper
cadmium
SDS - Sodium dodecyl sulfate
quinine
'''
NONE = 'None'
NACL = 'NaCl'
BIOTIN = 'biotin'
ETHANOL = 'ethanol'
BUTANONE = 'butanone'
COPPERSULPHATE = 'CuSO4'
SODIUMDODECYLSULFATE = 'SDS - Sodium dodecyl sulfate'
QUININE = 'quinine' # C20H24N2O2
BENZALDEHYDE='benzaldehyde'
DIACETYL='diacetyl'
SODIUMAZIDE='NaN3'
CHEMICALS = (
(NONE, 'None'),
(NACL, "Sodium chloride"),
(BIOTIN, "Biotin"),
(ETHANOL, "Ethanol"),
(BUTANONE, "Butanone"),
(COPPERSULPHATE, "Copper sulphate"),
(SODIUMDODECYLSULFATE, "Sodium dodecyl sulfate"),
(QUININE, "Quinine"),
(BENZALDEHYDE, "Benzaldehyde"),
(DIACETYL, "Diacetyl"),
(SODIUMAZIDE, "Sodium azide"),
)
diffusionCoefficient = models.FloatField (blank=False, default=0)
chemical_name = models.CharField(max_length=60, blank=False, choices=CHEMICALS, default=NONE)
isVolatile = models.BooleanField(blank=False, default=False)
#GE: How can I make a validation so that In case in not volatile this should be empty
volatilitySpeed = models.FloatField(validators=[MinValueValidator(0)],blank=True,null=True)
#class Meta:
#unique_together = ("isVolatile","volatilitySpeed","chemical_name")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class termotaxisTimeEventType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid )
class pointSourceHeatAvoidanceType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
temperature = models.FloatField(blank=False) #Understood as Celsius
#We consider worm size as 1
heatPointDistance = models.FloatField(blank=False, validators=[MinValueValidator(0),MaxValueValidator(1)])
# heatPointAngle we are not considering it. We will consider that heat is exposed perpendicular to the worm and in a small distance to the worm
# heatPointAngle = models.FloatField(blank=False, validators=[MinValueValidator(0),MaxValueValidator(6.28318)])
#class Meta:
#unique_together = ("temperature","heatPointDistance","heatPointAngle")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class galvanotaxisTimeEventType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class phototaxisTimeEventType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class electricShockType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
amplitude = models.FloatField (blank=False)
shockDuration = models.PositiveIntegerField (blank = False)
shockFrequency = models.FloatField (blank = False) # Provide in shocks / sec
#class Meta:y
#unique_together = ("waveLength","intensity","lightingPointDistance","lightingPointAngle")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class pointSourceLightType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
waveLength = models.FloatField(blank=False, validators=[MinValueValidator(0), MaxValueValidator(255)])
#Ask Kofi Categorical vs Wavelength in 10nm .- 1um?
intensity = models.FloatField(blank=False, validators=[MinValueValidator(0), MaxValueValidator(255)])
#Ask Kofi
#The intensity values used by most neuroscientist range from -3 to 0; (log I/20 mW). In my simulations I have been using values from 0 to 255.
'''The values below refer to the point of the worm, considering the worm as a cylinder
Worm's size is considered as 1. Therefore, the max value of lightingPointDistance is 1'''
lightingPointDistance = models.FloatField(blank=False, validators=[MinValueValidator(0), MaxValueValidator(1)])
#lightingPointAngle we are not considering it. We will consider that light is exposed perpendicular to the plate
#lightingPointAngle = models.FloatField(blank=False, validators=[MinValueValidator(0), MaxValueValidator(6.28318)])
'''lightBeamRadius is to have width value to calculate which neurons are lighted, if width=1 all worm is covered'''
lightBeamRadius = models.FloatField(blank=False, default=0.1, validators=[MinValueValidator(0), MaxValueValidator(1)])
#class Meta:
#unique_together = ("waveLength","intensity","lightingPointDistance","lightingPointAngle")
def __unicode__(self):
return "id: %s" % (self.uuid, )
## Experiments from t0 to t1
class interactionFromt0tot1Type_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True, default='No description provided')
eventStartTime = models.FloatField(blank=False, default=100)
eventStopTime = models.FloatField(blank=False, default=1000)
MECHANOSENSATION = 'MS'
CHEMOTAXIS ='CT'
TERMOTAXIS ='TT'
GALVANOTAXIS = 'GT'
PHOTOTAXIS = 'PT'
EXPERIMENTCATEGORY = (
(MECHANOSENSATION,"mechanosensation"),
(CHEMOTAXIS,"chemotaxis"),
(TERMOTAXIS,"termotaxis"),
(GALVANOTAXIS,"galvanotaxis"),
(PHOTOTAXIS,"phototaxis"),
)
experimentCategory = models.CharField(max_length=60, blank=False,choices=EXPERIMENTCATEGORY, default=MECHANOSENSATION)
#GE: Revise to force the user to fill one of the followings
mechanosensation = models.ForeignKey("mechanosensationTimet0tot1Type_model", blank=True, null=True)
chemotaxis = models.ForeignKey("chemotaxisTimet0tot1Type_model", blank=True, null=True)
termotaxis = models.ForeignKey("termotaxisTimet0tot1Type_model", blank=True, null=True)
galvanotaxis = models.ForeignKey("galvanotaxisTimet0tot1Type_model", blank=True, null=True)
phototaxis = models.ForeignKey("phototaxisTimet0tot1Type_model", blank=True, null=True)
#class Meta:
#unique_together = ("eventStartTime","eventStopTime","mechanosensation","chemotaxis", "termotaxis","galvanotaxis", "phototaxis")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class mechanosensationTimet0tot1Type_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class termotaxisTimet0tot1Type_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
TEMPERATURECHANGEINTIME = 'TC'
POINTSOURCEHEATAVOIDANCE = 'PS'
TERMOTAXISOPTIONS = (
(TEMPERATURECHANGEINTIME,"temperatureChangeInTime"),
(POINTSOURCEHEATAVOIDANCE,"pointsourceheatavoidance"),
)
termotaxisType = models.CharField(max_length=60, blank=False,choices=TERMOTAXISOPTIONS, default=TEMPERATURECHANGEINTIME)
temperatureChangeInTime = models.ForeignKey("temperatureChangeInTimeType_model",blank=True, null=True)
pointSourceHeatAvoidance = models.ForeignKey("pointSourceHeatAvoidanceType_model",blank=True, null=True)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class temperatureChangeInTimeType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
initialTemperature = models.FloatField(blank=False,validators=[MinValueValidator(0)])
finalTemperature = models.FloatField(blank=False,validators=[MinValueValidator(0)])
#class Meta:
#unique_together = ("initialTemperature","finalTemperature")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class chemotaxisTimet0tot1Type_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid )
class galvanotaxisTimet0tot1Type_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True, default='')
ELECTRICSHOCK = 'ES'
GALVANOTAXISOPTIONS = (
(ELECTRICSHOCK,"Electric shocks"),
)
galvanotaxisType = models.CharField(max_length=60, blank=False,choices=GALVANOTAXISOPTIONS, default=ELECTRICSHOCK)
electricShockConf = models.ForeignKey("electricShockType_model", blank=True, null=True)
def __unicode__(self):
return "id: %s" % (self.uuid )
class phototaxisTimet0tot1Type_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
POINTSOURCELIGHT = 'PSL'
PHOTOTAXISOPTIONS = (
(POINTSOURCELIGHT,"pointsourcelight"),
)
phototaxisType = models.CharField(max_length=60, blank=False,choices=PHOTOTAXISOPTIONS, default=POINTSOURCELIGHT)
pointSourceLightConf = models.ForeignKey("pointSourceLightType_model", blank=True, null=True)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
# Experiment wide experiment type
class experimentWideConfType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True, default='No description provided')
MECHANOSENSATION ='MS'
CHEMOTAXIS = 'CT'
TERMOTAXIS = 'TT'
GALVANOTAXIS = 'GT'
PHOTOTAXIS = 'PT'
EXPERIMENTCATEGORY = (
(MECHANOSENSATION,"mechanosensation"),
(CHEMOTAXIS,"chemotaxis"),
(TERMOTAXIS,"termotaxis"),
(GALVANOTAXIS,"galvanotaxis"),
(PHOTOTAXIS,"phototaxis"),
)
experimentCategory = models.CharField(max_length=60, blank=False,choices=EXPERIMENTCATEGORY, default=MECHANOSENSATION)
#GE: Revise to force the user to fill one of the followings
mechanosensation = models.ForeignKey("mechanosensationExpWideType_model", blank=True, null=True)
chemotaxis = models.ForeignKey("chemotaxisExperimentWideType_model", blank=True, null=True)
termotaxis = models.ForeignKey("termotaxisExperimentWideType_model", blank=True, null=True)
galvanotaxis = models.ForeignKey("galvanotaxisExperimentWideType_model", blank=True, null=True)
phototaxis = models.ForeignKey("phototaxisExperimentWideType_model", blank=True, null=True)
#class Meta:
#unique_together = ("mechanosensation","chemotaxis","termotaxis","galvanotaxis","phototaxis")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class mechanosensationExpWideType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class termotaxisExperimentWideType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
LINEARTHERMALGRADIENT = 'LT'
TERMOTAXIS = (
(LINEARTHERMALGRADIENT,"linearThermalGradient"),
)
termotaxisType = models.CharField(max_length=60, blank=False,choices=TERMOTAXIS, default=LINEARTHERMALGRADIENT)
linearThermalGradient = models.ForeignKey("linearThermalGradientType_model",blank=True, null=True)
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class linearThermalGradientType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
temperatureRightHorizonal = models.FloatField(blank=False)
temperatureLeftHorizontal = models.FloatField(blank=False)
#class Meta:
#unique_together = ("temperatureRightHorizonal","temperatureLeftHorizontal")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class chemotaxisExperimentWideType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
description = models.TextField(max_length=1000, blank=True)
STATICPOINTSOURCE = 'SPS'
CHEMICALQUADRANTS1 = 'CQ1'
CHEMICALQUADRANTS2 = 'CQ2'
CHEMICALQUADRANTS4 = 'CQ4'
OSMOTICRING = 'OR'
CHEMICALCATEGORY = (
(STATICPOINTSOURCE,"Static point source"),
(CHEMICALQUADRANTS1,"chemicalquadrants1"),
(CHEMICALQUADRANTS2,"chemicalquadrants2"),
(CHEMICALQUADRANTS4,"chemicalquadrants4"),
(OSMOTICRING,"osmoticring"),
)
chemicalCategory = models.CharField(max_length=60, blank=False,choices=CHEMICALCATEGORY, default=CHEMICALQUADRANTS1)
staticPointSourceConf = models.ForeignKey("staticPointSourceType_model", blank=True, null=True)
chemotaxisQuadrants1 = models.ForeignKey("chemotaxisQuadrantsType_1_model", blank=True, null=True)
chemotaxisQuadrants2 = models.ForeignKey("chemotaxisQuadrantsType_2_model", blank=True, null=True)
chemotaxisQuadrants4 = models.ForeignKey("chemotaxisQuadrantsType_4_model", blank=True, null=True)
osmoticRing = models.ForeignKey("osmoticRingType_model", blank=True, null=True)
#class Meta:
#unique_together = ("chemicalCategory","chemotaxisQuadrants","osmoticRing")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class chemotaxisQuadrantsType_1_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
quadrantChemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_1_1', blank=False)
quadrantChemicalConcentration = models.FloatField(blank=False) #Provide in 1 mol / l = Molar = 1M
#class Meta:
#unique_together = ("quadrantsPlacement","numberOfQuadrants","quadrantChemical","quadrantBarrierChemical","quadrantChemicalConcentration","quadrantBarrierChemicalConcentration" )
def __unicode__(self):
return "id: %s" % (self.uuid, )
class chemotaxisQuadrantsType_2_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
quadrant_1_Chemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_2_1', blank=False)
quadrant_2_Chemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_2_2', blank=False)
quadrant_1_ChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
quadrant_2_ChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
quadrantBarrierChemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_2_Barrier', blank=False)
quadrantBarrierChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
#class Meta:
#unique_together = ("quadrantsPlacement","numberOfQuadrants","quadrantChemical","quadrantBarrierChemical","quadrantChemicalConcentration","quadrantBarrierChemicalConcentration" )
def __unicode__(self):
return "id: %s" % (self.uuid, )
class chemotaxisQuadrantsType_4_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
quadrant_1_Chemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_4_1', blank=False)
quadrant_2_Chemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_4_2', blank=False)
quadrant_3_Chemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_4_3', blank=False)
quadrant_4_Chemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_4_4', blank=False)
quadrant_1_ChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
quadrant_2_ChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
quadrant_3_ChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
quadrant_4_ChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
quadrantBarrierChemical = models.ForeignKey("chemicalType_model",related_name='access_quadrant_4_Barrier', blank=False)
quadrantBarrierChemicalConcentration = models.FloatField(blank=False)#Provide in 1 mol / l = Molar = 1M
#class Meta:
#unique_together = ("quadrantsPlacement","numberOfQuadrants","quadrantChemical","quadrantBarrierChemical","quadrantChemicalConcentration","quadrantBarrierChemicalConcentration" )
def __unicode__(self):
return "id: %s" % (self.uuid, )
class osmoticRingType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
ringChemical = models.ForeignKey("chemicalType_model", blank=False)
chemicalConcentration = models.FloatField(blank=False) #Provide in 1 mol / l = Molar = 1M
internalRadius = models.FloatField(blank=False,validators=[MinValueValidator(0)])
externalRadius = models.FloatField(blank=False,validators=[MinValueValidator(0)])
#class Meta:
#unique_together = ("ringChemical","chemicalConcentration","externalRadius","internalRadius")
def __unicode__(self):
return "id: %s" % (self.uuid, )
class galvanotaxisExperimentWideType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class phototaxisExperimentWideType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
#Add a type selector if an experiment type of this is added
#Add a foreign key to the defined experiment model
#class Meta:
#unique_together = ()
def __unicode__(self):
return "id: %s" % (self.uuid, )
class shareBehaviouralExperiment(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid)
user = models.ForeignKey(User)
behaviouralExperiment = models.ForeignKey (behaviourExperimentType_model)
shared_date = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = ("user","behaviouralExperiment")
def __unicode__(self):
return "id: %s_%s" % (self.user,self.behaviouralExperiment )<|fim▁end|> | class chemotaxisTimeEventType_model(models.Model):
uuid = models.CharField(('Unique Identifier'), max_length=36, primary_key=True, default=generate_new_uuid) |
<|file_name|>helloWorldController.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
app.get('/hello/world', function(request, response){
var responseObject = { "hello": "world"}
response.send(responseObject);
});
};
module.exports = helloWorldController;<|fim▁end|> | var helloWorldController = function(app){ |
<|file_name|>good-edit.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnDestroy, OnInit, Self } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { Subscription, BehaviorSubject, combineLatest } from 'rxjs';
import { switchMap, map } from 'rxjs/operators';
import { MatSnackBar } from '@angular/material';
import { ErrorService, ContextService, NavigationService, NavigationActivatedRoute, MetaService } from '../../../../../angular';
import { Good, Facility, Locale, ProductCategory, ProductType, Organisation, Brand, Model, VendorProduct, VatRate, Ownership, InternalOrganisation, Part, GoodIdentificationType, ProductNumber } from '../../../../../domain';
import { PullRequest, Sort, Equals } from '../../../../../framework';
import { MetaDomain } from '../../../../../meta';
import { Fetcher } from '../../Fetcher';
import { StateService } from '../../../..';
@Component({
templateUrl: './good-edit.component.html',
providers: [ContextService]
})
export class GoodEditComponent implements OnInit, OnDestroy {
m: MetaDomain;
good: Good;
add: boolean;
edit: boolean;
subTitle: string;
facility: Facility;
locales: Locale[];
categories: ProductCategory[];
productTypes: ProductType[];
manufacturers: Organisation[];
brands: Brand[];
selectedBrand: Brand;
models: Model[];
selectedModel: Model;
vendorProduct: VendorProduct;
vatRates: VatRate[];
ownerships: Ownership[];
organisations: Organisation[];
addBrand = false;
addModel = false;
parts: Part[];
goodIdentificationTypes: GoodIdentificationType[];
productNumber: ProductNumber;
private subscription: Subscription;
private refresh$: BehaviorSubject<Date>;
private fetcher: Fetcher;
constructor(
@Self() public allors: ContextService,
public navigationService: NavigationService,
public metaService: MetaService,
private errorService: ErrorService,
private route: ActivatedRoute,
private snackBar: MatSnackBar,
private stateService: StateService) {
this.m = this.metaService.m;
this.refresh$ = new BehaviorSubject<Date>(undefined);
this.fetcher = new Fetcher(this.stateService, this.metaService.pull);
}
public ngOnInit(): void {
const { m, pull, x } = this.metaService;
this.subscription = combineLatest(this.route.url, this.refresh$, this.stateService.internalOrganisationId$)
.pipe(
switchMap(([, refresh, internalOrganisationId]) => {
const navRoute = new NavigationActivatedRoute(this.route);
const id = navRoute.id();
const add = !id;
let pulls = [
this.fetcher.locales,
this.fetcher.internalOrganisation,
pull.VatRate(),
pull.GoodIdentificationType(),
pull.ProductCategory({ sort: new Sort(m.ProductCategory.Name) }),
pull.Part({
include: {
Brand: x,
Model: x
},
sort: new Sort(m.Part.Name),
})
];
if (!add) {
pulls = [
...pulls,
pull.Good({
object: id,
include: {
Part: {
Brand: x,
Model: x
},
PrimaryPhoto: x,
ProductCategories: x,
GoodIdentifications: x,
Photos: x,
ElectronicDocuments: x,
LocalisedNames: {
Locale: x,
},
LocalisedDescriptions: {
Locale: x,
},
LocalisedComments: {
Locale: x,
},
},
}),
];
}
return this.allors.context.load('Pull', new PullRequest({ pulls }))
.pipe(
map((loaded) => ({ loaded, add }))
);
})
)
.subscribe(({ loaded, add }) => {
this.allors.context.reset();
const internalOrganisation = loaded.objects.InternalOrganisation as InternalOrganisation;
this.facility = internalOrganisation.DefaultFacility;
this.good = loaded.objects.Good as Good;
this.categories = loaded.collections.ProductCategories as ProductCategory[];
this.parts = loaded.collections.Parts as Part[];
this.vatRates = loaded.collections.VatRates as VatRate[];
this.goodIdentificationTypes = loaded.collections.GoodIdentificationTypes as GoodIdentificationType[];
this.locales = loaded.collections.AdditionalLocales as Locale[];
const vatRateZero = this.vatRates.find((v: VatRate) => v.Rate === 0);
const goodNumberType = this.goodIdentificationTypes.find((v) => v.UniqueId === 'b640630d-a556-4526-a2e5-60a84ab0db3f');
if (add) {
this.add = !(this.edit = false);
this.good = this.allors.context.create('Good') as Good;
this.good.VatRate = vatRateZero;
this.productNumber = this.allors.context.create('ProductNumber') as ProductNumber;
this.productNumber.GoodIdentificationType = goodNumberType;
this.good.AddGoodIdentification(this.productNumber);
this.vendorProduct = this.allors.context.create('VendorProduct') as VendorProduct;
this.vendorProduct.Product = this.good;
this.vendorProduct.InternalOrganisation = internalOrganisation;
} else {
this.edit = !(this.add = false);
this.productNumber = this.good.GoodIdentifications.find(v => v.GoodIdentificationType === goodNumberType);
}
},
(error: any) => {
this.errorService.handle(error);
this.navigationService.back();
},
);<|fim▁hole|> if (this.subscription) {
this.subscription.unsubscribe();
}
}
public refresh(): void {
this.refresh$.next(new Date());
}
public save(): void {
this.allors.context.save()
.subscribe(() => {
this.navigationService.back();
},
(error: Error) => {
this.errorService.handle(error);
});
}
public update(): void {
this.allors.context.save()
.subscribe(() => {
this.snackBar.open('Successfully saved.', 'close', { duration: 5000 });
this.goBack();
},
(error: Error) => {
this.errorService.handle(error);
});
}
public goBack(): void {
window.history.back();
}
}<|fim▁end|> | }
public ngOnDestroy(): void { |
<|file_name|>defs.rs<|end_file_name|><|fim▁begin|>//-
// Copyright (c) 2016, 2017, Jason Lingle
//
// This file is part of Ensync.
//
// Ensync is free software: you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the Free Software
// Foundation, either version 3 of the License, or (at your option) any later
// version.
//
// Ensync is distributed in the hope that it will be useful, but WITHOUT ANY
// WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
// FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
// details.
//
// You should have received a copy of the GNU General Public License along with
// Ensync. If not, see <http://www.gnu.org/licenses/>.
use std::ffi::{OsStr, OsString};
use std::fmt;
/// Type for content hashes of regular files and for blob identifiers on the
/// server.
///
/// In practise, this is a 256-bit SHA-3 sum.
pub type HashId = [u8; 32];
/// The sentinal hash value indicating an uncomputed hash.
///
/// One does not compare hashes against this, since the hashes on files can be
/// out-of-date anyway and must be computed when the file is uploaded in any
/// case.
pub const UNKNOWN_HASH: HashId = [0; 32];
/// The name of the directory which is a sibling to the configuration and which
/// is the root of Ensync's private data.
pub const PRIVATE_DIR_NAME: &'static str = "internal.ensync";
/// Prefix of invasive temporary files (i.e., those created implicitly by the
/// sync process).
pub const INVASIVE_TMP_PREFIX: &'static str = "ensync_tmp_";
/// Wraps a `HashId` to display it in hexadecimal format.
#[derive(Clone, Copy)]
pub struct DisplayHash(pub HashId);
impl fmt::Display for DisplayHash {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}\
{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}\
{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}\
{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}",
self.0[0],
self.0[1],
self.0[2],
self.0[3],
self.0[4],
self.0[5],
self.0[6],
self.0[7],
self.0[8],
self.0[9],
self.0[10],
self.0[11],
self.0[12],
self.0[13],
self.0[14],
self.0[15],
self.0[16],
self.0[17],
self.0[18],
self.0[19],
self.0[20],
self.0[21],
self.0[22],
self.0[23],
self.0[24],
self.0[25],
self.0[26],
self.0[27],
self.0[28],
self.0[29],
self.0[30],
self.0[31]
)
}
}
// These were originally defined to `mode_t`, `off_t`, `time_t`, and `ino_t`
// when we planned to use the POSIX API directly.
pub type FileMode = u32;
pub type FileSize = u64;
pub type FileTime = i64;
pub type FileInode = u64;
/// Shallow data about a file in the sync process, excluding its name.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum FileData {
/// A directory. The only immediate data is its mode. In a file stream, the
/// receiver must either push the new directory or request it to be
/// discarded.
Directory(FileMode),
/// A regular file. Data is mode, size in bytes, last modified, content
/// hash. Note that the content hash may be incorrect, and always will be
/// for files freshly streamed off the client filesystem.
Regular(FileMode, FileSize, FileTime, HashId),
/// A symbolic link. The only data is its actual content.
Symlink(OsString),
/// Any other type of non-regular file.
Special,
}
impl FileData {
/// If both `self` and `other` have a `FileMode`, set `self`'s mode to
/// `other`'s.
pub fn transrich_unix_mode(&mut self, other: &FileData) {
match *self {
FileData::Directory(ref mut dst)
| FileData::Regular(ref mut dst, _, _, _) => match *other {
FileData::Directory(src) | FileData::Regular(src, _, _, _) => {
*dst = src
}
_ => (),
},
_ => (),
}
}
/// Returns whether this `FileData` is a directory.
pub fn is_dir(&self) -> bool {
match *self {<|fim▁hole|> _ => false,
}
}
/// Returns whether both `self` and `other` are regular files and `self`'s
/// modification time is greater than `other`'s.
pub fn newer_than(&self, other: &Self) -> bool {
match (self, other) {
(
&FileData::Regular(_, _, tself, _),
&FileData::Regular(_, _, tother, _),
) => tself > tother,
_ => false,
}
}
/// Returns whether this file object and another one represent the same
/// content.
///
/// This is slightly less strict than a full equality test, ignoring some
/// of the fields for regular files.
pub fn matches(&self, that: &FileData) -> bool {
use self::FileData::*;
match (self, that) {
(&Directory(m1), &Directory(m2)) => m1 == m2,
(&Regular(m1, _, t1, ref h1), &Regular(m2, _, t2, ref h2)) => {
m1 == m2 && t1 == t2 && *h1 == *h2
}
(&Symlink(ref t1), &Symlink(ref t2)) => *t1 == *t2,
(&Special, &Special) => true,
_ => false,
}
}
/// Returns whether non-metadata about this file and another one match.
pub fn matches_data(&self, that: &FileData) -> bool {
use self::FileData::*;
match (self, that) {
(&Directory(m1), &Directory(m2)) => m1 == m2,
(&Regular(m1, _, _, ref h1), &Regular(m2, _, _, ref h2)) => {
m1 == m2 && *h1 == *h2
}
(&Symlink(ref t1), &Symlink(ref t2)) => *t1 == *t2,
(&Special, &Special) => true,
_ => false,
}
}
/// Returns whether this file object and another one have the same content
/// except for file mode.
pub fn matches_content(&self, that: &FileData) -> bool {
use self::FileData::*;
match (self, that) {
(&Directory(_), &Directory(_)) => true,
(&Regular(_, _, _, ref h1), &Regular(_, _, _, ref h2)) => {
*h1 == *h2
}
(&Symlink(ref t1), &Symlink(ref t2)) => *t1 == *t2,
(&Special, &Special) => true,
_ => false,
}
}
}
/// Convenience for passing a file name and data together.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct File<'a>(pub &'a OsStr, pub &'a FileData);
pub fn is_dir(fd: Option<&FileData>) -> bool {
match fd {
Some(&FileData::Directory(_)) => true,
_ => false,
}
}
#[cfg(test)]
pub mod test_helpers {
use std::ffi::{OsStr, OsString};
pub fn oss(s: &str) -> OsString {
OsStr::new(s).to_owned()
}
}
#[cfg(test)]
mod test {
use super::test_helpers::*;
use super::*;
#[test]
fn file_newer_than() {
let older = FileData::Regular(0o777, 0, 42, [1; 32]);
let newer = FileData::Regular(0o666, 0, 56, [2; 32]);
assert!(newer.newer_than(&older));
assert!(!older.newer_than(&newer));
assert!(!FileData::Special.newer_than(&older));
assert!(!newer.newer_than(&FileData::Special));
}
#[test]
fn file_matches() {
let f1 = FileData::Regular(0o777, 0, 42, [1; 32]);
let f2 = FileData::Regular(0o666, 0, 56, [1; 32]);
let f3 = FileData::Regular(0o777, 0, 42, [2; 32]);
let f4 = FileData::Regular(0o777, 0, 42, [1; 32]);
let d1 = FileData::Directory(0o777);
let d2 = FileData::Directory(0o666);
let s1 = FileData::Symlink(oss("foo"));
let s2 = FileData::Symlink(oss("bar"));
let s3 = FileData::Symlink(oss("foo"));
let special = FileData::Special;
assert!(f1.matches(&f1));
assert!(f1.matches(&f4));
assert!(!f1.matches(&f2));
assert!(!f1.matches(&f3));
assert!(!f1.matches(&d1));
assert!(!f1.matches(&s1));
assert!(!f1.matches(&special));
assert!(d1.matches(&d1));
assert!(!d1.matches(&d2));
assert!(!d1.matches(&f1));
assert!(s1.matches(&s1));
assert!(s1.matches(&s3));
assert!(!s1.matches(&s2));
assert!(!s1.matches(&special));
assert!(special.matches(&special));
assert!(!special.matches(&f1));
assert!(f1.matches_content(&f1));
assert!(f1.matches_content(&f4));
assert!(f1.matches_content(&f2));
assert!(!f1.matches_content(&f3));
assert!(!f1.matches_content(&d1));
assert!(!f1.matches_content(&s1));
assert!(!f1.matches_content(&special));
assert!(d1.matches_content(&d1));
assert!(d1.matches_content(&d2));
assert!(!d1.matches_content(&f1));
assert!(s1.matches_content(&s1));
assert!(s1.matches_content(&s3));
assert!(!s1.matches_content(&s2));
assert!(!s1.matches_content(&special));
assert!(special.matches_content(&special));
assert!(!special.matches_content(&f1));
}
}<|fim▁end|> | FileData::Directory(_) => true, |
<|file_name|>usa2Low.min.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1
oid sha256:bdecbb5008fcaa6e28cdb99aada4e0c3dc8dfa6bd53bb4d6e63005c7538ee886<|fim▁hole|>size 30020<|fim▁end|> | |
<|file_name|>search_results.js<|end_file_name|><|fim▁begin|>var Entry = require("entry");
var reds = require("reds");
var transliterate = require("transliteration");
module.exports = function (req, callback) {
var blogID = req.blog.id;
// We couldn't find a search query
if (!req.query.q) {
return callback(null, []);
}
var q = transliterate(req.query.q);
var search = reds.createSearch("blog:" + blogID + ":search");
<|fim▁hole|>
for (var i in ids) ids[i] = parseFloat(ids[i]);
Entry.get(blogID, ids, function (entries) {
return callback(null, entries);
});
});
};<|fim▁end|> | search.query(q).end(function (err, ids) {
if (err) return callback(err); |
<|file_name|>problems.rs<|end_file_name|><|fim▁begin|>#[derive(PartialEq, Debug)]
pub enum ProblemType {
MAX,<|fim▁hole|><|fim▁end|> | MIN,
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.