prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>make.js<|end_file_name|><|fim▁begin|>// Make screen : lists different control activities and allows to select which to use
var MakeScreen = Screen.extend({
enter: function(){
// Display this screen
this.display('make_screen');
// Setup button clicks
this.html.find(".btn-play").off().click(function(){ fabrica.navigation.go("/make/play"); });
this.html.find(".btn-upload").off().click(function(){ fabrica.navigation.go("/make/upload"); });
},
});<|fim▁hole|><|fim▁end|> |
screens.make = new MakeScreen(); |
<|file_name|>EXI_DeviceMemoryCard.cpp<|end_file_name|><|fim▁begin|>// Copyright 2008 Dolphin Emulator Project
// Licensed under GPLv2+
// Refer to the license.txt file included.
#include "Core/HW/EXI/EXI_DeviceMemoryCard.h"
#include <array>
#include <cstring>
#include <memory>
#include <string>
#include "Common/ChunkFile.h"
#include "Common/CommonPaths.h"
#include "Common/CommonTypes.h"
#include "Common/FileUtil.h"
#include "Common/IniFile.h"
#include "Common/Logging/Log.h"
#include "Common/NandPaths.h"
#include "Common/StringUtil.h"
#include "Core/ConfigManager.h"
#include "Core/CoreTiming.h"
#include "Core/HW/EXI/EXI.h"
#include "Core/HW/EXI/EXI_Channel.h"
#include "Core/HW/EXI/EXI_Device.h"
#include "Core/HW/GCMemcard/GCMemcard.h"
#include "Core/HW/GCMemcard/GCMemcardDirectory.h"
#include "Core/HW/GCMemcard/GCMemcardRaw.h"
#include "Core/HW/Memmap.h"
#include "Core/HW/Sram.h"
#include "Core/HW/SystemTimers.h"
#include "Core/Movie.h"
#include "DiscIO/Enums.h"
#include "DiscIO/NANDContentLoader.h"
namespace ExpansionInterface
{
#define MC_STATUS_BUSY 0x80
#define MC_STATUS_UNLOCKED 0x40
#define MC_STATUS_SLEEP 0x20
#define MC_STATUS_ERASEERROR 0x10
#define MC_STATUS_PROGRAMEERROR 0x08
#define MC_STATUS_READY 0x01
#define SIZE_TO_Mb (1024 * 8 * 16)
static const u32 MC_TRANSFER_RATE_READ = 512 * 1024;
static const u32 MC_TRANSFER_RATE_WRITE = (u32)(96.125f * 1024.0f);
static std::array<CoreTiming::EventType*, 2> s_et_cmd_done;
static std::array<CoreTiming::EventType*, 2> s_et_transfer_complete;
// Takes care of the nasty recovery of the 'this' pointer from card_index,
// stored in the userdata parameter of the CoreTiming event.
void CEXIMemoryCard::EventCompleteFindInstance(u64 userdata,
std::function<void(CEXIMemoryCard*)> callback)
{
int card_index = (int)userdata;
CEXIMemoryCard* pThis =
(CEXIMemoryCard*)ExpansionInterface::FindDevice(EXIDEVICE_MEMORYCARD, card_index);
if (pThis == nullptr)
{
pThis = (CEXIMemoryCard*)ExpansionInterface::FindDevice(EXIDEVICE_MEMORYCARDFOLDER, card_index);
}
if (pThis)
{
callback(pThis);
}
}
void CEXIMemoryCard::CmdDoneCallback(u64 userdata, s64 cyclesLate)
{
EventCompleteFindInstance(userdata, [](CEXIMemoryCard* instance) { instance->CmdDone(); });
}
void CEXIMemoryCard::TransferCompleteCallback(u64 userdata, s64 cyclesLate)
{
EventCompleteFindInstance(userdata,
[](CEXIMemoryCard* instance) { instance->TransferComplete(); });
}
void CEXIMemoryCard::Init()
{
static constexpr char DONE_PREFIX[] = "memcardDone";
static constexpr char TRANSFER_COMPLETE_PREFIX[] = "memcardTransferComplete";
static_assert(s_et_cmd_done.size() == s_et_transfer_complete.size(), "Event array size differs");
for (unsigned int i = 0; i < s_et_cmd_done.size(); ++i)
{
std::string name = DONE_PREFIX;
name += static_cast<char>('A' + i);
s_et_cmd_done[i] = CoreTiming::RegisterEvent(name, CmdDoneCallback);
name = TRANSFER_COMPLETE_PREFIX;
name += static_cast<char>('A' + i);
s_et_transfer_complete[i] = CoreTiming::RegisterEvent(name, TransferCompleteCallback);
}
}
void CEXIMemoryCard::Shutdown()
{
s_et_cmd_done.fill(nullptr);
s_et_transfer_complete.fill(nullptr);
}
CEXIMemoryCard::CEXIMemoryCard(const int index, bool gciFolder) : card_index(index)
{
_assert_msg_(EXPANSIONINTERFACE, static_cast<std::size_t>(index) < s_et_cmd_done.size(),
"Trying to create invalid memory card index %d.", index);
// NOTE: When loading a save state, DMA completion callbacks (s_et_transfer_complete) and such
// may have been restored, we need to anticipate those arriving.
interruptSwitch = 0;
m_bInterruptSet = 0;
command = 0;
status = MC_STATUS_BUSY | MC_STATUS_UNLOCKED | MC_STATUS_READY;
m_uPosition = 0;
memset(programming_buffer, 0, sizeof(programming_buffer));
// Nintendo Memory Card EXI IDs
// 0x00000004 Memory Card 59 4Mbit
// 0x00000008 Memory Card 123 8Mb
// 0x00000010 Memory Card 251 16Mb
// 0x00000020 Memory Card 507 32Mb
// 0x00000040 Memory Card 1019 64Mb
// 0x00000080 Memory Card 2043 128Mb
// 0x00000510 16Mb "bigben" card
// card_id = 0xc243;
card_id = 0xc221; // It's a Nintendo brand memcard
// The following games have issues with memory cards bigger than 16Mb
// Darkened Skye GDQE6S GDQP6S
// WTA Tour Tennis GWTEA4 GWTJA4 GWTPA4
// Disney Sports : Skate Boarding GDXEA4 GDXPA4 GDXJA4
// Disney Sports : Soccer GDKEA4
// Wallace and Gromit in Pet Zoo GWLE6L GWLX6L
// Use a 16Mb (251 block) memory card for these games
bool useMC251;
IniFile gameIni = SConfig::GetInstance().LoadGameIni();
gameIni.GetOrCreateSection("Core")->Get("MemoryCard251", &useMC251, false);
u16 sizeMb = useMC251 ? MemCard251Mb : MemCard2043Mb;
if (gciFolder)
{
SetupGciFolder(sizeMb);
}
else
{
SetupRawMemcard(sizeMb);
}
memory_card_size = memorycard->GetCardId() * SIZE_TO_Mb;
u8 header[20] = { 0 };
memorycard->Read(0, static_cast<s32>(ArraySize(header)), header);
SetCardFlashID(header, card_index);
}
void CEXIMemoryCard::SetupGciFolder(u16 sizeMb)
{
DiscIO::Region region = SConfig::GetInstance().m_region;
const std::string& game_id = SConfig::GetInstance().GetGameID();
u32 CurrentGameId = 0;
if (game_id.length() >= 4 && game_id != "00000000" && game_id != TITLEID_SYSMENU_STRING)
CurrentGameId = BE32((u8*)game_id.c_str());
const bool shift_jis = region == DiscIO::Region::NTSC_J;
std::string strDirectoryName = File::GetUserPath(D_GCUSER_IDX);
if (Movie::IsPlayingInput() && Movie::IsConfigSaved() && Movie::IsUsingMemcard(card_index) &&
Movie::IsStartingFromClearSave())
strDirectoryName += "Movie" DIR_SEP;
strDirectoryName = strDirectoryName + SConfig::GetDirectoryForRegion(region) + DIR_SEP +
StringFromFormat("Card %c", 'A' + card_index);
if (!File::Exists(strDirectoryName)) // first use of memcard folder, migrate automatically
{
MigrateFromMemcardFile(strDirectoryName + DIR_SEP, card_index);
}
else if (!File::IsDirectory(strDirectoryName))
{
if (File::Rename(strDirectoryName, strDirectoryName + ".original"))
{
PanicAlertT("%s was not a directory, moved to *.original", strDirectoryName.c_str());
MigrateFromMemcardFile(strDirectoryName + DIR_SEP, card_index);
}
else // we tried but the user wants to crash
{
// TODO more user friendly abort<|fim▁hole|> strDirectoryName.c_str());
exit(0);
}
}
memorycard = std::make_unique<GCMemcardDirectory>(strDirectoryName + DIR_SEP, card_index, sizeMb,
shift_jis, region, CurrentGameId);
}
void CEXIMemoryCard::SetupRawMemcard(u16 sizeMb)
{
std::string filename = (card_index == 0) ? SConfig::GetInstance().m_strMemoryCardA :
SConfig::GetInstance().m_strMemoryCardB;
if (Movie::IsPlayingInput() && Movie::IsConfigSaved() && Movie::IsUsingMemcard(card_index) &&
Movie::IsStartingFromClearSave())
filename = File::GetUserPath(D_GCUSER_IDX) +
StringFromFormat("Movie%s.raw", (card_index == 0) ? "A" : "B");
if (sizeMb == MemCard251Mb)
{
filename.insert(filename.find_last_of("."), ".251");
}
memorycard = std::make_unique<MemoryCard>(filename, card_index, sizeMb);
}
CEXIMemoryCard::~CEXIMemoryCard()
{
CoreTiming::RemoveEvent(s_et_cmd_done[card_index]);
CoreTiming::RemoveEvent(s_et_transfer_complete[card_index]);
}
bool CEXIMemoryCard::UseDelayedTransferCompletion() const
{
return true;
}
bool CEXIMemoryCard::IsPresent() const
{
return true;
}
void CEXIMemoryCard::CmdDone()
{
status |= MC_STATUS_READY;
status &= ~MC_STATUS_BUSY;
m_bInterruptSet = 1;
ExpansionInterface::UpdateInterrupts();
}
void CEXIMemoryCard::TransferComplete()
{
// Transfer complete, send interrupt
ExpansionInterface::GetChannel(card_index)->SendTransferComplete();
}
void CEXIMemoryCard::CmdDoneLater(u64 cycles)
{
CoreTiming::RemoveEvent(s_et_cmd_done[card_index]);
CoreTiming::ScheduleEvent((int)cycles, s_et_cmd_done[card_index], (u64)card_index);
}
void CEXIMemoryCard::SetCS(int cs)
{
if (cs) // not-selected to selected
{
m_uPosition = 0;
}
else
{
switch (command)
{
case cmdSectorErase:
if (m_uPosition > 2)
{
memorycard->ClearBlock(address & (memory_card_size - 1));
status |= MC_STATUS_BUSY;
status &= ~MC_STATUS_READY;
//???
CmdDoneLater(5000);
}
break;
case cmdChipErase:
if (m_uPosition > 2)
{
// TODO: Investigate on HW, I (LPFaint99) believe that this only
// erases the system area (Blocks 0-4)
memorycard->ClearAll();
status &= ~MC_STATUS_BUSY;
}
break;
case cmdPageProgram:
if (m_uPosition >= 5)
{
int count = m_uPosition - 5;
int i = 0;
status &= ~0x80;
while (count--)
{
memorycard->Write(address, 1, &(programming_buffer[i++]));
i &= 127;
address = (address & ~0x1FF) | ((address + 1) & 0x1FF);
}
CmdDoneLater(5000);
}
break;
}
}
}
bool CEXIMemoryCard::IsInterruptSet()
{
if (interruptSwitch)
return m_bInterruptSet;
return false;
}
void CEXIMemoryCard::TransferByte(u8& byte)
{
DEBUG_LOG(EXPANSIONINTERFACE, "EXI MEMCARD: > %02x", byte);
if (m_uPosition == 0)
{
command = byte; // first byte is command
byte = 0xFF; // would be tristate, but we don't care.
switch (command) // This seems silly, do we really need it?
{
case cmdNintendoID:
case cmdReadArray:
case cmdArrayToBuffer:
case cmdSetInterrupt:
case cmdWriteBuffer:
case cmdReadStatus:
case cmdReadID:
case cmdReadErrorBuffer:
case cmdWakeUp:
case cmdSleep:
case cmdClearStatus:
case cmdSectorErase:
case cmdPageProgram:
case cmdExtraByteProgram:
case cmdChipErase:
DEBUG_LOG(EXPANSIONINTERFACE, "EXI MEMCARD: command %02x at position 0. seems normal.",
command);
break;
default:
WARN_LOG(EXPANSIONINTERFACE, "EXI MEMCARD: command %02x at position 0", command);
break;
}
if (command == cmdClearStatus)
{
status &= ~MC_STATUS_PROGRAMEERROR;
status &= ~MC_STATUS_ERASEERROR;
status |= MC_STATUS_READY;
m_bInterruptSet = 0;
byte = 0xFF;
m_uPosition = 0;
}
}
else
{
switch (command)
{
case cmdNintendoID:
//
// Nintendo card:
// 00 | 80 00 00 00 10 00 00 00
// "bigben" card:
// 00 | ff 00 00 05 10 00 00 00 00 00 00 00 00 00 00
// we do it the Nintendo way.
if (m_uPosition == 1)
byte = 0x80; // dummy cycle
else
byte = (u8)(memorycard->GetCardId() >> (24 - (((m_uPosition - 2) & 3) * 8)));
break;
case cmdReadArray:
switch (m_uPosition)
{
case 1: // AD1
address = byte << 17;
byte = 0xFF;
break;
case 2: // AD2
address |= byte << 9;
break;
case 3: // AD3
address |= (byte & 3) << 7;
break;
case 4: // BA
address |= (byte & 0x7F);
break;
}
if (m_uPosition > 1) // not specified for 1..8, anyway
{
memorycard->Read(address & (memory_card_size - 1), 1, &byte);
// after 9 bytes, we start incrementing the address,
// but only the sector offset - the pointer wraps around
if (m_uPosition >= 9)
address = (address & ~0x1FF) | ((address + 1) & 0x1FF);
}
break;
case cmdReadStatus:
// (unspecified for byte 1)
byte = status;
break;
case cmdReadID:
if (m_uPosition == 1) // (unspecified)
byte = (u8)(card_id >> 8);
else
byte = (u8)((m_uPosition & 1) ? (card_id) : (card_id >> 8));
break;
case cmdSectorErase:
switch (m_uPosition)
{
case 1: // AD1
address = byte << 17;
break;
case 2: // AD2
address |= byte << 9;
break;
}
byte = 0xFF;
break;
case cmdSetInterrupt:
if (m_uPosition == 1)
{
interruptSwitch = byte;
}
byte = 0xFF;
break;
case cmdChipErase:
byte = 0xFF;
break;
case cmdPageProgram:
switch (m_uPosition)
{
case 1: // AD1
address = byte << 17;
break;
case 2: // AD2
address |= byte << 9;
break;
case 3: // AD3
address |= (byte & 3) << 7;
break;
case 4: // BA
address |= (byte & 0x7F);
break;
}
if (m_uPosition >= 5)
programming_buffer[((m_uPosition - 5) & 0x7F)] = byte; // wrap around after 128 bytes
byte = 0xFF;
break;
default:
WARN_LOG(EXPANSIONINTERFACE, "EXI MEMCARD: unknown command byte %02x", byte);
byte = 0xFF;
}
}
m_uPosition++;
DEBUG_LOG(EXPANSIONINTERFACE, "EXI MEMCARD: < %02x", byte);
}
void CEXIMemoryCard::DoState(PointerWrap& p)
{
// for movie sync, we need to save/load memory card contents (and other data) in savestates.
// otherwise, we'll assume the user wants to keep their memcards and saves separate,
// unless we're loading (in which case we let the savestate contents decide, in order to stay
// aligned with them).
bool storeContents = (Movie::IsMovieActive());
p.Do(storeContents);
if (storeContents)
{
p.Do(interruptSwitch);
p.Do(m_bInterruptSet);
p.Do(command);
p.Do(status);
p.Do(m_uPosition);
p.Do(programming_buffer);
p.Do(address);
memorycard->DoState(p);
p.Do(card_index);
}
}
IEXIDevice* CEXIMemoryCard::FindDevice(TEXIDevices device_type, int customIndex)
{
if (device_type != m_device_type)
return nullptr;
if (customIndex != card_index)
return nullptr;
return this;
}
// DMA reads are preceded by all of the necessary setup via IMMRead
// read all at once instead of single byte at a time as done by IEXIDevice::DMARead
void CEXIMemoryCard::DMARead(u32 _uAddr, u32 _uSize)
{
memorycard->Read(address, _uSize, Memory::GetPointer(_uAddr));
if ((address + _uSize) % BLOCK_SIZE == 0)
{
INFO_LOG(EXPANSIONINTERFACE, "reading from block: %x", address / BLOCK_SIZE);
}
// Schedule transfer complete later based on read speed
CoreTiming::ScheduleEvent(_uSize * (SystemTimers::GetTicksPerSecond() / MC_TRANSFER_RATE_READ),
s_et_transfer_complete[card_index], (u64)card_index);
}
// DMA write are preceded by all of the necessary setup via IMMWrite
// write all at once instead of single byte at a time as done by IEXIDevice::DMAWrite
void CEXIMemoryCard::DMAWrite(u32 _uAddr, u32 _uSize)
{
memorycard->Write(address, _uSize, Memory::GetPointer(_uAddr));
if (((address + _uSize) % BLOCK_SIZE) == 0)
{
INFO_LOG(EXPANSIONINTERFACE, "writing to block: %x", address / BLOCK_SIZE);
}
// Schedule transfer complete later based on write speed
CoreTiming::ScheduleEvent(_uSize * (SystemTimers::GetTicksPerSecond() / MC_TRANSFER_RATE_WRITE),
s_et_transfer_complete[card_index], (u64)card_index);
}
} // namespace ExpansionInterface<|fim▁end|> | PanicAlertT("%s is not a directory, failed to move to *.original.\n Verify your "
"write permissions or move the file outside of Dolphin", |
<|file_name|>work_request_log_entry.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016, 2018, 2019, Oracle and/or its affiliates. All rights reserved.
// Code generated. DO NOT EDIT.
// Object Storage Service API
//
// Common set of Object Storage and Archive Storage APIs for managing buckets, objects, and related resources.
//
package objectstorage
import (
"github.com/oracle/oci-go-sdk/common"
)
// WorkRequestLogEntry The representation of WorkRequestLogEntry
type WorkRequestLogEntry struct {
// Human-readable log message.
Message *string `mandatory:"false" json:"message"`
// The date and time the log message was written, as described in
// RFC 3339 (https://tools.ietf.org/rfc/rfc3339), section 14.29.
Timestamp *common.SDKTime `mandatory:"false" json:"timestamp"`
}
<|fim▁hole|> return common.PointerString(m)
}<|fim▁end|> | func (m WorkRequestLogEntry) String() string { |
<|file_name|>asyncload.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Shows how to use BitmapManager to asynchronously load a Bitmap from a file.
Run this snippet providing a list of filenames of (high resolution) pictures:
$ ./asyncload.py /path/to/mypics/*.jpg anotherpic.png nonexistent.png
Press space to sequentially load the pictures. A rotating rectangle appears
during the time the picture file is being loaded to show how the main thread
is not affected by the load operation.
Press 'f' to display the frame time graph, which should show no significant
glitches while loading
'''
import sys
import libavg
from libavg import player
APP_RESOLUTION = (640, 480)
class AsyncLoadApp(libavg.AVGApp):
def init(self):
'''
Create placeholders for the example. A single ImageNode is used to show
the pictures.
'''
self.__imageNode = libavg.avg.ImageNode(pos=(10, 20), parent=self._parentNode)
self.__spinner = libavg.avg.RectNode(color='222222',
fillopacity=1, size=(40, 40), active=False,
pos=(10, self._parentNode.size.y - 50), parent=self._parentNode)
self.__infoNode = libavg.avg.WordsNode(text='Press space to load the first image',
fontsize=11, pos=(10, 5), parent=self._parentNode)
self.__pics = sys.argv[1:]
self.__currentPic = -1
player.subscribe(player.ON_FRAME, self.__onFrame)
def onKeyDown(self, event):
'''
Intercept a space keypress and trigger the request.
'''
if event.keystring == 'space':
self.__requestNextBitmap()
def __requestNextBitmap(self):
'''
Ask the BitmapManager to load a new file. loadBitmap() call returns immediately.
'''
self.__currentPic = (self.__currentPic + 1) % len(self.__pics)
libavg.avg.BitmapManager.get().loadBitmap(self.__pics[self.__currentPic],
self.__onBitmapLoaded)
self.__spinner.active = True
self.__spinner.angle = 0
def __onBitmapLoaded(self, bmp):
'''
This callback is invoked by BitmapManager, 'bmp' can be either a Bitmap instance
or a RuntimeError instance (hence checking for Exception is consistent).
'''
self.__spinner.active = False
if isinstance(bmp, Exception):
self.__infoNode.text = ('Error loading '
'image %s : %s' % (self.__pics[self.__currentPic], str(bmp)))
self.__imageNode.href = ''
else:
self.__infoNode.text = ('Loaded %s, '
'press space for the next one' % self.__pics[self.__currentPic])
self.__setBitmapAndResize(bmp)
def __setBitmapAndResize(self, bmp):
originalSize = bmp.getSize()
if originalSize.x > originalSize.y:
ratio = (APP_RESOLUTION[0] - 20) / originalSize.x
else:
ratio = (APP_RESOLUTION[1] - 40) / originalSize.y
self.__imageNode.setBitmap(bmp)
self.__imageNode.size = originalSize * ratio
def __onFrame(self):
if self.__spinner.active:
self.__spinner.angle += 0.05
if len(sys.argv) == 1:
print 'Usage: %s <filename> [<filename> [<filename> [..]]]' % sys.argv[0]
sys.exit(1)
AsyncLoadApp.start(resolution=APP_RESOLUTION)<|fim▁end|> | |
<|file_name|>parser.js<|end_file_name|><|fim▁begin|>'use strict';
if (/^((?!chrome).)*safari/i.test(navigator.userAgent)){
alert("We have detected you are using Safari. Please switch to Chrome or Firefox to properly use this app.");
}
var weekAbbrev = {
Mo: "monday",
Tu: "tuesday",
We: "wednesday",
Th: "thursday",
Fr: "friday",
Sa: "saturday",
Su: "sunday"
};
var badString = function(str){
return str == null || str.trim().length === 0;
}
//returns an ics object
var parseCourseworkString = function(){
<|fim▁hole|> var cs = document.getElementById("classes").value.trim(),
quarterLength = document.getElementById("weeks").value.trim(),
calObj = ics(),
startDate = document.getElementById("startDate").value.trim() + " ";
if(badString(cs)){ alert("Please copy paste in the Axess course table"); return; }
if(badString(startDate)){ alert("Please input start date in the MM/DD/YYYY format"); return; }
if(badString(quarterLength) || !_.isNumber(parseInt(quarterLength)) || parseInt(quarterLength) < 1){
alert("Please insert a valid number of weeks in the quarter.");
return;
}
var counter = 0;
//removes descrepancy between Firefox and Chrome copy pasting.
var prelimFilter = _.chain(cs.split("\n")).filter(function(row){
return row.trim().length > 0
}).value().join('\n').split('Academic Calendar Deadlines');
_.chain(prelimFilter).map(function(row){
return _.compact(row.split("\n"));
}).filter(function(items){
if(items.length != 6 && items.length > 3){
counter ++;
}
return items.length === 6;
}).map(function(items){
var name = items[0],
desc = items[1] + " Unit: " + items[2] + " Grading:" + items[3],
location = items[5],
timeObj = items[4].split(" "),
timeStart = new Date(startDate + timeObj[1].substr(0, timeObj[1].length - 2) + " " + timeObj[1].substr(-2)),
timeEnd = new Date(startDate + timeObj[3].substr(0, timeObj[3].length - 2) + " " + timeObj[3].substr(-2));
if(timeStart===null || timeEnd === null || timeStart.toString()==="Invalid Date" || timeEnd.toString()==="Invalid Date"){
alert("Please input a correct start date format of MM/DD/YYYY");
throw "Badly formatted Start Date (╯°□°)╯︵ ┻━┻";
}
var wkNumber = timeStart.getWeek(),
repeat = timeObj[0].match(/.{1,2}/g).join(','),
shiftedStart = Date.today().setWeek(wkNumber).sunday().last()[weekAbbrev[repeat.split(',')[0]]]().at(timeObj[1]), //Alterations to the dates because the library acts strangely
shiftedEnd = Date.today().setWeek(wkNumber).sunday().last()[weekAbbrev[repeat.split(',')[0]]]().at(timeObj[3]);
calObj.addEvent(name, desc, location, shiftedStart, shiftedEnd, repeat, quarterLength * repeat.split(',').length);
});
calObj.download("schedule", ".ics");
if(counter > 0){
alert(counter + (counter > 1 ? " classes ": " class ") + "failed to be exported. The formatting was weird.")
}
}<|fim▁end|> | |
<|file_name|>defines_3.js<|end_file_name|><|fim▁begin|>var searchData=
[
['ime_5faddr_5fmax',['IME_ADDR_MAX',['../_a_p_i_8h.html#a6d369ee1e214daea8bf939aa817b5d00',1,'API.h']]],
['input',['INPUT',['../_a_p_i_8h.html#a1bb283bd7893b9855e2f23013891fc82',1,'API.h']]],
['input_5fanalog',['INPUT_ANALOG',['../_a_p_i_8h.html#a877f7490feac007f3a904ece06afe87a',1,'API.h']]],
['input_5ffloating',['INPUT_FLOATING',['../_a_p_i_8h.html#ac31084f7ffdfd4325b3703718fce74ea',1,'API.h']]],
['interrupt_5fedge_5fboth',['INTERRUPT_EDGE_BOTH',['../_a_p_i_8h.html#ab0ce5d2283faeb80389f8b54a925a15b',1,'API.h']]],
['interrupt_5fedge_5ffalling',['INTERRUPT_EDGE_FALLING',['../_a_p_i_8h.html#a5d01e5bd9626ca29af3e1e9385e58427',1,'API.h']]],<|fim▁hole|><|fim▁end|> | ['interrupt_5fedge_5frising',['INTERRUPT_EDGE_RISING',['../_a_p_i_8h.html#a8bd8f2fe1b638ebff63e702d14880b12',1,'API.h']]]
]; |
<|file_name|>io.py<|end_file_name|><|fim▁begin|>import threading
from ctypes import POINTER, Structure, byref, c_char, c_char_p, c_int, c_size_t
from django.contrib.gis.geos.base import GEOSBase
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory
from django.contrib.gis.geos.prototypes.errcheck import (
check_geom, check_sized_string, check_string,
)
from django.contrib.gis.geos.prototypes.geom import c_uchar_p, geos_char_p
from django.utils import six
from django.utils.encoding import force_bytes
# ### The WKB/WKT Reader/Writer structures and pointers ###
class WKTReader_st(Structure):
pass
class WKTWriter_st(Structure):
pass
class WKBReader_st(Structure):
pass
class WKBWriter_st(Structure):
pass
WKT_READ_PTR = POINTER(WKTReader_st)
WKT_WRITE_PTR = POINTER(WKTWriter_st)
WKB_READ_PTR = POINTER(WKBReader_st)
WKB_WRITE_PTR = POINTER(WKBReader_st)
# WKTReader routines
wkt_reader_create = GEOSFuncFactory('GEOSWKTReader_create', restype=WKT_READ_PTR)
wkt_reader_destroy = GEOSFuncFactory('GEOSWKTReader_destroy', argtypes=[WKT_READ_PTR])
wkt_reader_read = GEOSFuncFactory(
'GEOSWKTReader_read', argtypes=[WKT_READ_PTR, c_char_p], restype=GEOM_PTR, errcheck=check_geom
)
# WKTWriter routines
wkt_writer_create = GEOSFuncFactory('GEOSWKTWriter_create', restype=WKT_WRITE_PTR)
wkt_writer_destroy = GEOSFuncFactory('GEOSWKTWriter_destroy', argtypes=[WKT_WRITE_PTR])
wkt_writer_write = GEOSFuncFactory(
'GEOSWKTWriter_write', argtypes=[WKT_WRITE_PTR, GEOM_PTR], restype=geos_char_p, errcheck=check_string
)
wkt_writer_get_outdim = GEOSFuncFactory(
'GEOSWKTWriter_getOutputDimension', argtypes=[WKT_WRITE_PTR], restype=c_int
)
wkt_writer_set_outdim = GEOSFuncFactory(
'GEOSWKTWriter_setOutputDimension', argtypes=[WKT_WRITE_PTR, c_int]
)
wkt_writer_set_trim = GEOSFuncFactory('GEOSWKTWriter_setTrim', argtypes=[WKT_WRITE_PTR, c_char])
wkt_writer_set_precision = GEOSFuncFactory('GEOSWKTWriter_setRoundingPrecision', argtypes=[WKT_WRITE_PTR, c_int])
# WKBReader routines
wkb_reader_create = GEOSFuncFactory('GEOSWKBReader_create', restype=WKB_READ_PTR)
wkb_reader_destroy = GEOSFuncFactory('GEOSWKBReader_destroy', argtypes=[WKB_READ_PTR])
class WKBReadFunc(GEOSFuncFactory):
# Although the function definitions take `const unsigned char *`
# as their parameter, we use c_char_p here so the function may
# take Python strings directly as parameters. Inside Python there
# is not a difference between signed and unsigned characters, so
# it is not a problem.
argtypes = [WKB_READ_PTR, c_char_p, c_size_t]
restype = GEOM_PTR
errcheck = staticmethod(check_geom)
wkb_reader_read = WKBReadFunc('GEOSWKBReader_read')
wkb_reader_read_hex = WKBReadFunc('GEOSWKBReader_readHEX')
# WKBWriter routines
wkb_writer_create = GEOSFuncFactory('GEOSWKBWriter_create', restype=WKB_WRITE_PTR)
wkb_writer_destroy = GEOSFuncFactory('GEOSWKBWriter_destroy', argtypes=[WKB_WRITE_PTR])
# WKB Writing prototypes.
class WKBWriteFunc(GEOSFuncFactory):
argtypes = [WKB_WRITE_PTR, GEOM_PTR, POINTER(c_size_t)]
restype = c_uchar_p
errcheck = staticmethod(check_sized_string)
wkb_writer_write = WKBWriteFunc('GEOSWKBWriter_write')
wkb_writer_write_hex = WKBWriteFunc('GEOSWKBWriter_writeHEX')
# WKBWriter property getter/setter prototypes.
class WKBWriterGet(GEOSFuncFactory):
argtypes = [WKB_WRITE_PTR]
restype = c_int
class WKBWriterSet(GEOSFuncFactory):
argtypes = [WKB_WRITE_PTR, c_int]
wkb_writer_get_byteorder = WKBWriterGet('GEOSWKBWriter_getByteOrder')
wkb_writer_set_byteorder = WKBWriterSet('GEOSWKBWriter_setByteOrder')
wkb_writer_get_outdim = WKBWriterGet('GEOSWKBWriter_getOutputDimension')
wkb_writer_set_outdim = WKBWriterSet('GEOSWKBWriter_setOutputDimension')
wkb_writer_get_include_srid = WKBWriterGet('GEOSWKBWriter_getIncludeSRID', restype=c_char)
wkb_writer_set_include_srid = WKBWriterSet('GEOSWKBWriter_setIncludeSRID', argtypes=[WKB_WRITE_PTR, c_char])
# ### Base I/O Class ###
class IOBase(GEOSBase):
"Base class for GEOS I/O objects."
def __init__(self):
# Getting the pointer with the constructor.
self.ptr = self._constructor()
# Loading the real destructor function at this point as doing it in
# __del__ is too late (import error).
self._destructor.func = self._destructor.get_func(
*self._destructor.args, **self._destructor.kwargs
)
def __del__(self):
# Cleaning up with the appropriate destructor.
try:
self._destructor(self._ptr)
except (AttributeError, TypeError):
pass # Some part might already have been garbage collected
# ### Base WKB/WKT Reading and Writing objects ###
# Non-public WKB/WKT reader classes for internal use because
# their `read` methods return _pointers_ instead of GEOSGeometry
# objects.
class _WKTReader(IOBase):
_constructor = wkt_reader_create
_destructor = wkt_reader_destroy
ptr_type = WKT_READ_PTR
def read(self, wkt):
if not isinstance(wkt, (bytes, six.string_types)):
raise TypeError
return wkt_reader_read(self.ptr, force_bytes(wkt))
class _WKBReader(IOBase):
_constructor = wkb_reader_create
_destructor = wkb_reader_destroy
ptr_type = WKB_READ_PTR
def read(self, wkb):
"Returns a _pointer_ to C GEOS Geometry object from the given WKB."
if isinstance(wkb, six.memoryview):
wkb_s = bytes(wkb)
return wkb_reader_read(self.ptr, wkb_s, len(wkb_s))
elif isinstance(wkb, (bytes, six.string_types)):
return wkb_reader_read_hex(self.ptr, wkb, len(wkb))
else:
raise TypeError
# ### WKB/WKT Writer Classes ###
class WKTWriter(IOBase):
_constructor = wkt_writer_create
_destructor = wkt_writer_destroy
ptr_type = WKT_WRITE_PTR
_trim = False
_precision = None
def __init__(self, dim=2, trim=False, precision=None):
super(WKTWriter, self).__init__()
if bool(trim) != self._trim:
self.trim = trim
if precision is not None:
self.precision = precision
self.outdim = dim
def write(self, geom):
"Returns the WKT representation of the given geometry."
return wkt_writer_write(self.ptr, geom.ptr)
@property
def outdim(self):
return wkt_writer_get_outdim(self.ptr)
@outdim.setter
def outdim(self, new_dim):
if new_dim not in (2, 3):
raise ValueError('WKT output dimension must be 2 or 3')
wkt_writer_set_outdim(self.ptr, new_dim)
@property
def trim(self):
return self._trim
@trim.setter
def trim(self, flag):
if bool(flag) != self._trim:
self._trim = bool(flag)
wkt_writer_set_trim(self.ptr, b'\x01' if flag else b'\x00')
@property
def precision(self):
return self._precision
@precision.setter
def precision(self, precision):
if (not isinstance(precision, int) or precision < 0) and precision is not None:
raise AttributeError('WKT output rounding precision must be non-negative integer or None.')
if precision != self._precision:
self._precision = precision
wkt_writer_set_precision(self.ptr, -1 if precision is None else precision)
class WKBWriter(IOBase):
_constructor = wkb_writer_create
_destructor = wkb_writer_destroy
ptr_type = WKB_WRITE_PTR
def __init__(self, dim=2):
super(WKBWriter, self).__init__()
self.outdim = dim
def _handle_empty_point(self, geom):
from django.contrib.gis.geos import Point
if isinstance(geom, Point) and geom.empty:
if self.srid:
# PostGIS uses POINT(NaN NaN) for WKB representation of empty
# points. Use it for EWKB as it's a PostGIS specific format.
# https://trac.osgeo.org/postgis/ticket/3181
geom = Point(float('NaN'), float('NaN'), srid=geom.srid)
else:
raise ValueError('Empty point is not representable in WKB.')
return geom
def write(self, geom):
"Returns the WKB representation of the given geometry."
from django.contrib.gis.geos import Polygon
geom = self._handle_empty_point(geom)
wkb = wkb_writer_write(self.ptr, geom.ptr, byref(c_size_t()))
if isinstance(geom, Polygon) and geom.empty:
# Fix GEOS output for empty polygon.<|fim▁hole|>
def write_hex(self, geom):
"Returns the HEXEWKB representation of the given geometry."
from django.contrib.gis.geos.polygon import Polygon
geom = self._handle_empty_point(geom)
wkb = wkb_writer_write_hex(self.ptr, geom.ptr, byref(c_size_t()))
if isinstance(geom, Polygon) and geom.empty:
wkb = wkb[:-16] + b'0' * 8
return wkb
# ### WKBWriter Properties ###
# Property for getting/setting the byteorder.
def _get_byteorder(self):
return wkb_writer_get_byteorder(self.ptr)
def _set_byteorder(self, order):
if order not in (0, 1):
raise ValueError('Byte order parameter must be 0 (Big Endian) or 1 (Little Endian).')
wkb_writer_set_byteorder(self.ptr, order)
byteorder = property(_get_byteorder, _set_byteorder)
# Property for getting/setting the output dimension.
@property
def outdim(self):
return wkb_writer_get_outdim(self.ptr)
@outdim.setter
def outdim(self, new_dim):
if new_dim not in (2, 3):
raise ValueError('WKB output dimension must be 2 or 3')
wkb_writer_set_outdim(self.ptr, new_dim)
# Property for getting/setting the include srid flag.
@property
def srid(self):
return bool(ord(wkb_writer_get_include_srid(self.ptr)))
@srid.setter
def srid(self, include):
if include:
flag = b'\x01'
else:
flag = b'\x00'
wkb_writer_set_include_srid(self.ptr, flag)
# `ThreadLocalIO` object holds instances of the WKT and WKB reader/writer
# objects that are local to the thread. The `GEOSGeometry` internals
# access these instances by calling the module-level functions, defined
# below.
class ThreadLocalIO(threading.local):
wkt_r = None
wkt_w = None
wkb_r = None
wkb_w = None
ewkb_w = None
thread_context = ThreadLocalIO()
# These module-level routines return the I/O object that is local to the
# thread. If the I/O object does not exist yet it will be initialized.
def wkt_r():
if not thread_context.wkt_r:
thread_context.wkt_r = _WKTReader()
return thread_context.wkt_r
def wkt_w(dim=2, trim=False, precision=None):
if not thread_context.wkt_w:
thread_context.wkt_w = WKTWriter(dim=dim, trim=trim, precision=precision)
else:
thread_context.wkt_w.outdim = dim
thread_context.wkt_w.trim = trim
thread_context.wkt_w.precision = precision
return thread_context.wkt_w
def wkb_r():
if not thread_context.wkb_r:
thread_context.wkb_r = _WKBReader()
return thread_context.wkb_r
def wkb_w(dim=2):
if not thread_context.wkb_w:
thread_context.wkb_w = WKBWriter(dim=dim)
else:
thread_context.wkb_w.outdim = dim
return thread_context.wkb_w
def ewkb_w(dim=2):
if not thread_context.ewkb_w:
thread_context.ewkb_w = WKBWriter(dim=dim)
thread_context.ewkb_w.srid = True
else:
thread_context.ewkb_w.outdim = dim
return thread_context.ewkb_w<|fim▁end|> | # See https://trac.osgeo.org/geos/ticket/680.
wkb = wkb[:-8] + b'\0' * 4
return six.memoryview(wkb) |
<|file_name|>test_resource_flags.py<|end_file_name|><|fim▁begin|>import os
import tempfile
from rest_framework import status
from hs_core.hydroshare import resource
from .base import HSRESTTestCase
class TestPublicResourceFlagsEndpoint(HSRESTTestCase):
def setUp(self):
super(TestPublicResourceFlagsEndpoint, self).setUp()
self.tmp_dir = tempfile.mkdtemp()
self.rtype = 'GenericResource'
self.title = 'My Test resource'
res = resource.create_resource(self.rtype,
self.user,
self.title)<|fim▁hole|> {'subject': {'value': 'sub-1'}}
]
file_one = "test1.txt"
open(file_one, "w").close()
self.file_one = open(file_one, "r")
self.txt_file_path = os.path.join(self.tmp_dir, 'text.txt')
txt = open(self.txt_file_path, 'w')
txt.write("Hello World\n")
txt.close()
self.rtype = 'GenericResource'
self.title = 'My Test resource'
res_two = resource.create_resource(self.rtype,
self.user,
self.title,
files=(self.file_one,),
metadata=metadata_dict)
self.pid = res.short_id
self.pid_two = res_two.short_id
self.resources_to_delete.append(self.pid)
self.resources_to_delete.append(self.pid_two)
def test_set_resource_flag_make_public(self):
flag_url = "/hsapi/resource/%s/flag/" % self.pid
response = self.client.post(flag_url, {
"t": "make_public"
}, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
flag_url = "/hsapi/resource/%s/flag/" % self.pid_two
response = self.client.post(flag_url, {
"t": "make_public"
}, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_set_resource_flag_make_private(self):
flag_url = "/hsapi/resource/%s/flag/" % self.pid
response = self.client.post(flag_url, {
"t": "make_private"
}, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_set_resource_flag_make_discoverable(self):
flag_url = "/hsapi/resource/%s/flag/" % self.pid_two
response = self.client.post(flag_url, {
"t": "make_discoverable"
}, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_set_resource_flag_make_not_discoverable(self):
flag_url = "/hsapi/resource/%s/flag/" % self.pid
response = self.client.post(flag_url, {
"t": "make_not_discoverable"
}, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_set_resource_flag_make_not_shareable(self):
flag_url = "/hsapi/resource/%s/flag/" % self.pid
response = self.client.post(flag_url, {
"t": "make_not_shareable"
}, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_set_resource_flag_make_shareable(self):
flag_url = "/hsapi/resource/%s/flag/" % self.pid
response = self.client.post(flag_url, {
"t": "make_shareable"
}, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)<|fim▁end|> |
metadata_dict = [
{'description': {'abstract': 'My test abstract'}}, |
<|file_name|>test_bm_node.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 NTT DOCOMO, INC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Bare-Metal DB testcase for BareMetalNode
"""
from nova import exception
from nova.tests.virt.baremetal.db import base
from nova.tests.virt.baremetal.db import utils
from nova.virt.baremetal import db
class BareMetalNodesTestCase(base.BMDBTestCase):
def _create_nodes(self):
nodes = [
utils.new_bm_node(pm_address='0', service_host="host1",
memory_mb=100000, cpus=100, local_gb=10000),
utils.new_bm_node(pm_address='1', service_host="host2",
instance_uuid='A',
memory_mb=100000, cpus=100, local_gb=10000),
utils.new_bm_node(pm_address='2', service_host="host2",
memory_mb=1000, cpus=1, local_gb=1000),
utils.new_bm_node(pm_address='3', service_host="host2",
memory_mb=1000, cpus=2, local_gb=1000),
utils.new_bm_node(pm_address='4', service_host="host2",
memory_mb=2000, cpus=1, local_gb=1000),
utils.new_bm_node(pm_address='5', service_host="host2",
memory_mb=2000, cpus=2, local_gb=1000),
]
self.ids = []
for n in nodes:
ref = db.bm_node_create(self.context, n)
self.ids.append(ref['id'])
def test_get_all(self):
r = db.bm_node_get_all(self.context)
self.assertEqual(r, [])
self._create_nodes()
r = db.bm_node_get_all(self.context)
self.assertEqual(len(r), 6)
def test_get(self):
self._create_nodes()
r = db.bm_node_get(self.context, self.ids[0])
self.assertEqual(r['pm_address'], '0')
r = db.bm_node_get(self.context, self.ids[1])
self.assertEqual(r['pm_address'], '1')
self.assertRaises(
exception.NodeNotFound,
db.bm_node_get,
self.context, -1)
def test_get_by_service_host(self):
self._create_nodes()
r = db.bm_node_get_all(self.context, service_host=None)
self.assertEqual(len(r), 6)
r = db.bm_node_get_all(self.context, service_host="host1")
self.assertEqual(len(r), 1)
self.assertEqual(r[0]['pm_address'], '0')
r = db.bm_node_get_all(self.context, service_host="host2")
self.assertEqual(len(r), 5)
pmaddrs = [x['pm_address'] for x in r]
self.assertIn('1', pmaddrs)
self.assertIn('2', pmaddrs)
self.assertIn('3', pmaddrs)
self.assertIn('4', pmaddrs)
self.assertIn('5', pmaddrs)
r = db.bm_node_get_all(self.context, service_host="host3")
self.assertEqual(r, [])
def test_get_associated(self):
self._create_nodes()
r = db.bm_node_get_associated(self.context, service_host=None)
self.assertEqual(len(r), 1)
self.assertEqual(r[0]['pm_address'], '1')
r = db.bm_node_get_unassociated(self.context, service_host=None)
self.assertEqual(len(r), 5)
pmaddrs = [x['pm_address'] for x in r]
self.assertIn('0', pmaddrs)
self.assertIn('2', pmaddrs)
self.assertIn('3', pmaddrs)
self.assertIn('4', pmaddrs)
self.assertIn('5', pmaddrs)
def test_destroy(self):
self._create_nodes()
db.bm_node_destroy(self.context, self.ids[0])
self.assertRaises(
exception.NodeNotFound,
db.bm_node_get,
self.context, self.ids[0])
r = db.bm_node_get_all(self.context)
self.assertEqual(len(r), 5)
def test_destroy_with_interfaces(self):
self._create_nodes()
if_a_id = db.bm_interface_create(self.context, self.ids[0],
'aa:aa:aa:aa:aa:aa', None, None)
if_b_id = db.bm_interface_create(self.context, self.ids[0],
'bb:bb:bb:bb:bb:bb', None, None)
if_x_id = db.bm_interface_create(self.context, self.ids[1],
'11:22:33:44:55:66', None, None)
db.bm_node_destroy(self.context, self.ids[0])
self.assertRaises(
exception.NovaException,
db.bm_interface_get,
self.context, if_a_id)
self.assertRaises(
exception.NovaException,
db.bm_interface_get,
self.context, if_b_id)
# Another node's interface is not affected
if_x = db.bm_interface_get(self.context, if_x_id)
self.assertEqual(self.ids[1], if_x['bm_node_id'])
self.assertRaises(
exception.NodeNotFound,
db.bm_node_get,
self.context, self.ids[0])
r = db.bm_node_get_all(self.context)
self.assertEqual(len(r), 5)
def test_find_free(self):
self._create_nodes()
fn = db.bm_node_find_free(self.context, 'host2')
self.assertEqual(fn['pm_address'], '2')
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=500, cpus=2, local_gb=100)
self.assertEqual(fn['pm_address'], '3')
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=1001, cpus=1, local_gb=1000)
self.assertEqual(fn['pm_address'], '4')
fn = db.bm_node_find_free(self.context, 'host2',<|fim▁hole|> memory_mb=2000, cpus=2, local_gb=1000)
self.assertEqual(fn['pm_address'], '5')
# check memory_mb
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=2001, cpus=2, local_gb=1000)
self.assertIsNone(fn)
# check cpus
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=2000, cpus=3, local_gb=1000)
self.assertIsNone(fn)
# check local_gb
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=2000, cpus=2, local_gb=1001)
self.assertIsNone(fn)<|fim▁end|> | memory_mb=2000, cpus=1, local_gb=1000)
self.assertEqual(fn['pm_address'], '4')
fn = db.bm_node_find_free(self.context, 'host2', |
<|file_name|>updateShortReadParentCount.py<|end_file_name|><|fim▁begin|>import argparse
import sys
import logging
import os
import csv
class ReadItem:
def __init__(self, sequence, totalCount):
self.Sequence = sequence
self.TotalCount = totalCount
self.SampleMap = {}
class AnnotationItem:
def __init__(self, sequence, totalCount, category, counts):
self.Sequence = sequence
self.TotalCount = totalCount
self.Categories = [category]
self.Counts = counts
def getValue(value):
return value.TotalCount
def getFilename(value):
return value[1]
def update(logger, args):
logger.info("Reading short reads:" + input + " ...")
shortReadMap = {}<|fim▁hole|> shortFileList = []
with open(input, 'r') as sr:
for line in sr:
parts = line.rstrip().split('\t')
shortFileList.append(parts)
shortFileList = sorted(shortFileList, key=getFilename)
for parts in shortFileList:
sampleFile = parts[0]
sample = parts[1]
shortReadFiles.append(sample)
logger.info(" Reading " + sampleFile + " ...")
with open(sampleFile, 'r') as fin:
fin.readline()
for line in fin:
reads = line.rstrip().split('\t')
count = int(reads[1])
seq = reads[2].rstrip()
if not seq in shortReadMap:
ri = ReadItem(seq, count)
shortReadMap[seq] = ri
else:
ri = shortReadMap[seq]
ri.TotalCount += count
ri.SampleMap[sample] = count
if minSampleCount > 1 or minReadCount > 1:
shortReads = []
for read in shortReadMap.values():
validSampleCount = len([v for v in read.SampleMap.values() if v >= minReadCount])
if validSampleCount >= minSampleCount:
shortReads.append(read)
else:
shortReads = shortReadMap.values()
shortReads = sorted(shortReads, key=getValue, reverse=True)
if len(shortReads) > maxNumber:
shortReads = shortReads[0:maxNumber]
logger.info("Reading max mapped reads:" + maxMapped + " ...")
maxmappedReads = {}
with open(maxMapped, 'r') as sr:
for line in sr:
parts = line.split('\t')
logger.info(" Reading " + parts[0] + " ...")
with open(parts[0], 'r') as fin:
while True:
qname = fin.readline().rstrip()
if not qname:
break
seq = fin.readline()
fin.readline()
fin.readline()
if qname.endswith("_"):
maxmappedReads[seq.rstrip()] = 1
cnames = names.split(",")
logger.info("Reading annotated reads:" + annotated + " ...")
annotatedReadMap = {}
annotatedFiles = []
with open(annotated, 'r') as annolist:
iIndex = -1
for row in annolist:
parts = row.split('\t')
annofile = parts[0]
iIndex = iIndex + 1
category = cnames[iIndex]
logger.info(" Reading " + annofile + " ...")
with open(annofile, 'r') as sr:
annotatedFiles = sr.readline().rstrip().split('\t')[1:]
for line in sr:
parts = line.rstrip().split('\t')
seq = parts[0]
if seq not in annotatedReadMap:
totalCount = sum(int(p) for p in parts[1:])
annotatedReadMap[seq] = AnnotationItem(seq, totalCount, category, parts[1:])
else:
annotatedReadMap[seq].Categories.append(category)
annotatedReads = sorted(annotatedReadMap.values(), key=getValue, reverse=True)
output = outputPrefix + ".tsv"
logger.info("Writing explain result:" + output + " ...")
with open(output, "w") as sw:
sw.write("ShortRead\tShortReadCount\tShortReadLength\t" + "\t".join(["SRS_" + f for f in shortReadFiles]) + "\tIsMaxMapped\tParentRead\tParentReadCount\tParentReadCategory\t" + "\t".join(["PRS_" + f for f in annotatedFiles]) + "\n")
emptyAnnotation = "\t\t\t\t" + "\t".join(["" for af in annotatedFiles]) + "\n"
for shortRead in shortReads:
shortSeq = shortRead.Sequence
shortSeqCount = shortRead.TotalCount
seqMap = shortRead.SampleMap
sw.write("%s\t%s\t%d" % (shortSeq, shortSeqCount, len(shortSeq)))
for fname in shortReadFiles:
if fname in seqMap:
sw.write("\t%s" % seqMap[fname])
else:
sw.write("\t0")
sw.write("\t" + str(shortSeq in maxmappedReads))
bFound = False
for annotatedRead in annotatedReads:
annoSeq = annotatedRead.Sequence
if shortSeq in annoSeq:
bFound = True
sw.write("\t%s\t%s\t%s\t%s\n" % (annoSeq, annotatedRead.TotalCount, "/".join(annotatedRead.Categories[0]), "\t".join(annotatedRead.Counts)))
break
if not bFound:
sw.write(emptyAnnotation)
logger.info("Done.")
def main():
parser = argparse.ArgumentParser(description="Matching short reads with annotated reads.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
DEBUG=False
NOT_DEBUG = not DEBUG
parser.add_argument('-i', '--input', action='store', nargs='?', help='Input short reads', required=NOT_DEBUG)
parser.add_argument('-m', '--maxMapped', action='store', nargs='?', help='Input reads exceed maximum mapping to genome', required=NOT_DEBUG)
parser.add_argument('-a', '--annotated', action='store', nargs='?', help='Input annotated reads', required=NOT_DEBUG)
parser.add_argument('-n', '--names', action='store', nargs='?', help='Input annotated reads categories, split by ''', required=NOT_DEBUG)
parser.add_argument('--maxNumber', action='store', default=100, nargs='?', help='Input number of top short reads for annotation')
parser.add_argument('--minReadCount', action='store', default=3, nargs='?', help='Input minimum copy of short reads in sample for annotation')
parser.add_argument('--minSampleCount', action='store', default=2, nargs='?', help='Input minimum number of sample with valid read count')
parser.add_argument('-o', '--output', action='store', nargs='?', default="-", help="Output prefix of matched reads file", required=NOT_DEBUG)
if NOT_DEBUG and len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
if DEBUG:
args.input = "T:/Shared/Labs/Vickers Lab/Tiger/projects/20180809_smallRNA_269_933_2002_human/data_visualization/short_reads_source/result/match__fileList1.list"
args.maxMapped = "T:/Shared/Labs/Vickers Lab/Tiger/projects/20180809_smallRNA_269_933_2002_human/data_visualization/short_reads_source/result/match__fileList2.list"
args.annotated = "T:/Shared/Labs/Vickers Lab/Tiger/projects/20180809_smallRNA_269_933_2002_human/data_visualization/short_reads_source/result/match__fileList3.list"
args.names = "Host miRNA,Host tRNA,Host snRNA,Host snoRNA,Host rRNA,Host other small RNA,Host Genome,Microbiome Bacteria,Environment Bacteria,Fungus,Non host tRNA,Non host rRNA"
#args.names = "Host miRNA,Host tRNA"
args.output = "T:/Shared/Labs/Vickers Lab/Tiger/projects/20180809_smallRNA_269_933_2002_human/data_visualization/short_reads_source/result/match2"
logger = logging.getLogger('updateCount')
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)-8s - %(message)s')
match(logger, args.input, args.names, args.annotated, args.maxMapped, args.maxNumber, args.minReadCount, args.minSampleCount, args.output)
if __name__ == "__main__":
main()<|fim▁end|> | shortReadFiles = []
|
<|file_name|>cholesky_op_test.py<|end_file_name|><|fim▁begin|>"""Tests for tensorflow.ops.tf.Cholesky."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
<|fim▁hole|>import tensorflow as tf
class CholeskyOpTest(tf.test.TestCase):
def _verifyCholesky(self, x):
with self.test_session() as sess:
# Verify that LL^T == x.
if x.ndim == 2:
chol = tf.cholesky(x)
verification = tf.matmul(chol,
chol,
transpose_a=False,
transpose_b=True)
else:
chol = tf.batch_cholesky(x)
verification = tf.batch_matmul(chol, chol, adj_x=False, adj_y=True)
chol_np, verification_np = sess.run([chol, verification])
self.assertAllClose(x, verification_np)
self.assertShapeEqual(x, chol)
# Check that the cholesky is lower triangular, and has positive diagonal
# elements.
if chol_np.shape[-1] > 0:
chol_reshaped = np.reshape(chol_np, (-1, chol_np.shape[-2],
chol_np.shape[-1]))
for chol_matrix in chol_reshaped:
self.assertAllClose(chol_matrix, np.tril(chol_matrix))
self.assertTrue((np.diag(chol_matrix) > 0.0).all())
def testBasic(self):
self._verifyCholesky(np.array([[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]))
def testBatch(self):
simple_array = np.array([[[1., 0.], [0., 5.]]]) # shape (1, 2, 2)
self._verifyCholesky(simple_array)
self._verifyCholesky(np.vstack((simple_array, simple_array)))
odd_sized_array = np.array([[[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]])
self._verifyCholesky(np.vstack((odd_sized_array, odd_sized_array)))
# Generate random positive-definite matrices.
matrices = np.random.rand(10, 5, 5)
for i in xrange(10):
matrices[i] = np.dot(matrices[i].T, matrices[i])
self._verifyCholesky(matrices)
def testNonSquareMatrix(self):
with self.assertRaises(ValueError):
tf.cholesky(np.array([[1., 2., 3.], [3., 4., 5.]]))
def testWrongDimensions(self):
tensor3 = tf.constant([1., 2.])
with self.assertRaises(ValueError):
tf.cholesky(tensor3)
def testNotInvertible(self):
# The input should be invertible.
with self.test_session():
with self.assertRaisesOpError("LLT decomposition was not successful. The "
"input might not be valid."):
# All rows of the matrix below add to zero
self._verifyCholesky(np.array([[1., -1., 0.], [-1., 1., -1.], [0., -1.,
1.]]))
def testEmpty(self):
self._verifyCholesky(np.empty([0, 2, 2]))
self._verifyCholesky(np.empty([2, 0, 0]))
if __name__ == "__main__":
tf.test.main()<|fim▁end|> | import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin |
<|file_name|>bibrank_tag_based_indexer_unit_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
## This file is part of Invenio.
## Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Unit tests for the ranking engine."""
__revision__ = "$Id$"
from invenio.importutils import lazy_import
from invenio.testutils import make_test_suite, run_test_suite, InvenioTestCase
bibrank_tag_based_indexer = lazy_import('invenio.bibrank_tag_based_indexer')
split_ranges = lazy_import('invenio.bibrank:split_ranges')<|fim▁hole|>
def test_union_dicts(self):
"""bibrank tag based indexer - union dicts"""
self.assertEqual({1: 5, 2: 6, 3: 9, 4: 10, 10: 1}, bibrank_tag_based_indexer.union_dicts({1: 5, 2: 6, 3: 9}, {3:9, 4:10, 10: 1}))
def test_split_ranges(self):
"""bibrank tag based indexer - split ranges"""
self.assertEqual([[0, 500], [600, 1000]], split_ranges("0-500,600-1000"))
TEST_SUITE = make_test_suite(TestListSetOperations,)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)<|fim▁end|> |
class TestListSetOperations(InvenioTestCase):
"""Test list set operations.""" |
<|file_name|>Scripting.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,shlex,shutil,traceback,errno,sys,stat
from waflib import Utils,Configure,Logs,Options,ConfigSet,Context,Errors,Build,Node
build_dir_override=None
no_climb_commands=['configure']
default_cmd="build"
def waf_entry_point(current_directory,version,wafdir):
Logs.init_log()
if Context.WAFVERSION!=version:
Logs.error('Waf script %r and library %r do not match (directory %r)'%(version,Context.WAFVERSION,wafdir))
sys.exit(1)
if'--version'in sys.argv:
Context.run_dir=current_directory
ctx=Context.create_context('options')
ctx.curdir=current_directory
ctx.parse_args()
sys.exit(0)
Context.waf_dir=wafdir
Context.launch_dir=current_directory
no_climb=os.environ.get('NOCLIMB',None)
if not no_climb:
for k in no_climb_commands:
if k in sys.argv:
no_climb=True
break
cur=current_directory
while cur:
lst=os.listdir(cur)
if Options.lockfile in lst:
env=ConfigSet.ConfigSet()
try:
env.load(os.path.join(cur,Options.lockfile))
ino=os.stat(cur)[stat.ST_INO]
except Exception:
pass
else:
for x in[env.run_dir,env.top_dir,env.out_dir]:
if Utils.is_win32:
if cur==x:
load=True
break
else:
try:
ino2=os.stat(x)[stat.ST_INO]
except OSError:
pass
else:
if ino==ino2:
load=True
break
else:
Logs.warn('invalid lock file in %s'%cur)
load=False
if load:
Context.run_dir=env.run_dir
Context.top_dir=env.top_dir
Context.out_dir=env.out_dir
break
if not Context.run_dir:
if Context.WSCRIPT_FILE in lst:
Context.run_dir=cur
next=os.path.dirname(cur)
if next==cur:
break
cur=next
if no_climb:
break
if not Context.run_dir:
if'-h'in sys.argv or'--help'in sys.argv:
Logs.warn('No wscript file found: the help message may be incomplete')
Context.run_dir=current_directory
ctx=Context.create_context('options')
ctx.curdir=current_directory
ctx.parse_args()
sys.exit(0)
Logs.error('Waf: Run from a directory containing a file named %r'%Context.WSCRIPT_FILE)
sys.exit(1)
try:
os.chdir(Context.run_dir)
except OSError:
Logs.error('Waf: The folder %r is unreadable'%Context.run_dir)
sys.exit(1)
try:
set_main_module(Context.run_dir+os.sep+Context.WSCRIPT_FILE)
except Errors.WafError ,e:
Logs.pprint('RED',e.verbose_msg)
Logs.error(str(e))
sys.exit(1)
except Exception ,e:
Logs.error('Waf: The wscript in %r is unreadable'%Context.run_dir,e)
traceback.print_exc(file=sys.stdout)
sys.exit(2)
try:
run_commands()
except Errors.WafError ,e:
if Logs.verbose>1:
Logs.pprint('RED',e.verbose_msg)
Logs.error(e.msg)
sys.exit(1)
except SystemExit:
raise
except Exception ,e:
traceback.print_exc(file=sys.stdout)
sys.exit(2)
except KeyboardInterrupt:
Logs.pprint('RED','Interrupted')
sys.exit(68)
def set_main_module(file_path):
Context.g_module=Context.load_module(file_path)
Context.g_module.root_path=file_path
def set_def(obj):
name=obj.__name__
if not name in Context.g_module.__dict__:
setattr(Context.g_module,name,obj)
for k in[update,dist,distclean,distcheck,update]:
set_def(k)
if not'init'in Context.g_module.__dict__:
Context.g_module.init=Utils.nada
if not'shutdown'in Context.g_module.__dict__:
Context.g_module.shutdown=Utils.nada
if not'options'in Context.g_module.__dict__:
Context.g_module.options=Utils.nada
def parse_options():
Context.create_context('options').execute()
if not Options.commands:
Options.commands=[default_cmd]
Options.commands=[x for x in Options.commands if x!='options']
Logs.verbose=Options.options.verbose
Logs.init_log()
if Options.options.zones:
Logs.zones=Options.options.zones.split(',')
if not Logs.verbose:
Logs.verbose=1
elif Logs.verbose>0:
Logs.zones=['runner']
if Logs.verbose>2:
Logs.zones=['*']
def run_command(cmd_name):
ctx=Context.create_context(cmd_name)
ctx.log_timer=Utils.Timer()
ctx.options=Options.options
ctx.cmd=cmd_name
ctx.execute()
return ctx
def run_commands():
parse_options()
run_command('init')
while Options.commands:
cmd_name=Options.commands.pop(0)
ctx=run_command(cmd_name)
Logs.info('%r finished successfully (%s)'%(cmd_name,str(ctx.log_timer)))
run_command('shutdown')
def _can_distclean(name):
for k in'.o .moc .exe'.split():
if name.endswith(k):
return True
return False
def distclean_dir(dirname):
for(root,dirs,files)in os.walk(dirname):
for f in files:
if _can_distclean(f):
fname=root+os.sep+f
try:
os.unlink(fname)
except OSError:
Logs.warn('Could not remove %r'%fname)
for x in[Context.DBFILE,'config.log']:
try:
os.unlink(x)
except OSError:
pass
try:
shutil.rmtree('c4che')
except OSError:
pass
def distclean(ctx):
'''removes the build directory'''
lst=os.listdir('.')
for f in lst:
if f==Options.lockfile:
try:
proj=ConfigSet.ConfigSet(f)
except IOError:
Logs.warn('Could not read %r'%f)
continue
if proj['out_dir']!=proj['top_dir']:
try:
shutil.rmtree(proj['out_dir'])
except IOError:
pass
except OSError ,e:
if e.errno!=errno.ENOENT:
Logs.warn('project %r cannot be removed'%proj[Context.OUT])
else:
distclean_dir(proj['out_dir'])
for k in(proj['out_dir'],proj['top_dir'],proj['run_dir']):
try:
os.remove(os.path.join(k,Options.lockfile))
except OSError ,e:
if e.errno!=errno.ENOENT:
Logs.warn('file %r cannot be removed'%f)
if f.startswith('.waf')and not Options.commands:
shutil.rmtree(f,ignore_errors=True)
class Dist(Context.Context):
'''creates an archive containing the project source code'''
cmd='dist'
fun='dist'
algo='tar.bz2'
ext_algo={}
def execute(self):
self.recurse([os.path.dirname(Context.g_module.root_path)])
self.archive()
def archive(self):
import tarfile
arch_name=self.get_arch_name()
try:
self.base_path
except AttributeError:
self.base_path=self.path
node=self.base_path.make_node(arch_name)
try:
node.delete()
except Exception:
pass
files=self.get_files()
if self.algo.startswith('tar.'):
tar=tarfile.open(arch_name,'w:'+self.algo.replace('tar.',''))
for x in files:
self.add_tar_file(x,tar)
tar.close()
elif self.algo=='zip':
import zipfile
zip=zipfile.ZipFile(arch_name,'w',compression=zipfile.ZIP_DEFLATED)
for x in files:
archive_name=self.get_base_name()+'/'+x.path_from(self.base_path)
zip.write(x.abspath(),archive_name,zipfile.ZIP_DEFLATED)
zip.close()
else:
self.fatal('Valid algo types are tar.bz2, tar.gz or zip')
try:
from hashlib import sha1 as sha
except ImportError:
from sha import sha
try:
digest=" (sha=%r)"%sha(node.read()).hexdigest()
except Exception:
digest=''
Logs.info('New archive created: %s%s'%(self.arch_name,digest))
def get_tar_path(self,node):
return node.abspath()
def add_tar_file(self,x,tar):
p=self.get_tar_path(x)
tinfo=tar.gettarinfo(name=p,arcname=self.get_tar_prefix()+'/'+x.path_from(self.base_path))
tinfo.uid=0
tinfo.gid=0
tinfo.uname='root'
tinfo.gname='root'
fu=None
try:
fu=open(p,'rb')
tar.addfile(tinfo,fileobj=fu)
finally:
if fu:
fu.close()
def get_tar_prefix(self):
try:
return self.tar_prefix
except AttributeError:
return self.get_base_name()
def get_arch_name(self):
try:
self.arch_name
except AttributeError:
self.arch_name=self.get_base_name()+'.'+self.ext_algo.get(self.algo,self.algo)
return self.arch_name
def get_base_name(self):
try:
self.base_name
except AttributeError:
appname=getattr(Context.g_module,Context.APPNAME,'noname')
version=getattr(Context.g_module,Context.VERSION,'1.0')
self.base_name=appname+'-'+version
return self.base_name
def get_excl(self):
try:
return self.excl
except AttributeError:
self.excl=Node.exclude_regs+' **/waf-1.7.* **/.waf-1.7* **/waf3-1.7.* **/.waf3-1.7* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
nd=self.root.find_node(Context.out_dir)
if nd:
self.excl+=' '+nd.path_from(self.base_path)
return self.excl
def get_files(self):
try:
files=self.files
except AttributeError:
files=self.base_path.ant_glob('**/*',excl=self.get_excl())
return files
def dist(ctx):
'''makes a tarball for redistributing the sources'''
pass
class DistCheck(Dist):
fun='distcheck'
cmd='distcheck'
def execute(self):
self.recurse([os.path.dirname(Context.g_module.root_path)])
self.archive()
self.check()
def check(self):
import tempfile,tarfile
t=None
try:
<|fim▁hole|> for x in t:
t.extract(x)
finally:
if t:
t.close()
cfg=[]
if Options.options.distcheck_args:
cfg=shlex.split(Options.options.distcheck_args)
else:
cfg=[x for x in sys.argv if x.startswith('-')]
instdir=tempfile.mkdtemp('.inst',self.get_base_name())
ret=Utils.subprocess.Popen([sys.argv[0],'configure','install','uninstall','--destdir='+instdir]+cfg,cwd=self.get_base_name()).wait()
if ret:
raise Errors.WafError('distcheck failed with code %i'%ret)
if os.path.exists(instdir):
raise Errors.WafError('distcheck succeeded, but files were left in %s'%instdir)
shutil.rmtree(self.get_base_name())
def distcheck(ctx):
'''checks if the project compiles (tarball from 'dist')'''
pass
def update(ctx):
'''updates the plugins from the *waflib/extras* directory'''
lst=Options.options.files.split(',')
if not lst:
lst=[x for x in Utils.listdir(Context.waf_dir+'/waflib/extras')if x.endswith('.py')]
for x in lst:
tool=x.replace('.py','')
try:
Configure.download_tool(tool,force=True,ctx=ctx)
except Errors.WafError:
Logs.error('Could not find the tool %s in the remote repository'%x)
def autoconfigure(execute_method):
def execute(self):
if not Configure.autoconfig:
return execute_method(self)
env=ConfigSet.ConfigSet()
do_config=False
try:
env.load(os.path.join(Context.top_dir,Options.lockfile))
except Exception:
Logs.warn('Configuring the project')
do_config=True
else:
if env.run_dir!=Context.run_dir:
do_config=True
else:
h=0
for f in env['files']:
h=hash((h,Utils.readf(f,'rb')))
do_config=h!=env.hash
if do_config:
Options.commands.insert(0,self.cmd)
Options.commands.insert(0,'configure')
return
return execute_method(self)
return execute
Build.BuildContext.execute=autoconfigure(Build.BuildContext.execute)<|fim▁end|> | t=tarfile.open(self.get_arch_name())
|
<|file_name|>pibrellaMidi.py<|end_file_name|><|fim▁begin|>from mido import MidiFile
from time import sleep
import pibrella
""" fade test
pibrella.light.red.fade(0,100,10)
sleep(11)
pibrella.light.red.fade(100,0,10)
sleep(11)
"""
""" start
pibrella.buzzer.note(-9)
sleep(.9)
pibrella.buzzer.off()
sleep(0.1)
pibrella.buzzer.note(-9)
sleep(0.9)
pibrella.buzzer.off()
sleep(0.1)
pibrella.buzzer.note(-9)
sleep(0.9)
pibrella.buzzer.off()
sleep(0.1)
pibrella.buzzer.note(3)
sleep(0.9)
pibrella.buzzer.off()
"""
<|fim▁hole|>pibrella.buzzer.note(0)
sleep(1.25)
pibrella.buzzer.note(-7)
sleep(2)
pibrellay.buzzer.off()
"""
""" Mike notes for success likely bond theme
and need a calibration mode
push button yellow goes on then as turn the light can change untl the light changes
press red button again to go back to operational state
"""
""" it knows it is a comment """
mid = MidiFile('bond.mid')
for i, track in enumerate(mid.tracks):
print('Track ')
print(track.name)
if track.name == '':
for message in track:
if message.type == 'note_on':
# print('Turn on ')
note = message.note - 69
print(note)
pibrella.buzzer.note(note)
duration = 0.0 + message.time
elif message.type == 'note_off':
print(duration)
duration = message.time - duration
if duration > 0:
sleep(duration/1000.0)
pibrella.buzzer.off()
pibrella.buzzer.off()<|fim▁end|> | """ fail |
<|file_name|>daemon.go<|end_file_name|><|fim▁begin|>// Package daemon exposes the functions that occur on the host server
// that the Docker daemon is running.
//
// In implementing the various functions of the daemon, there is often
// a method-specific struct for configuring the runtime behavior.
package daemon // import "github.com/docker/docker/daemon"
import (
"context"
"fmt"
"io/ioutil"
"math/rand"
"net"
"net/url"
"os"
"path"
"path/filepath"
"runtime"
"strings"
"sync"
"time"
"github.com/docker/docker/pkg/fileutils"
"google.golang.org/grpc"
"github.com/containerd/containerd"
"github.com/containerd/containerd/defaults"
"github.com/containerd/containerd/pkg/dialer"
"github.com/containerd/containerd/remotes/docker"
"github.com/docker/distribution/reference"
"github.com/docker/docker/api/types"
containertypes "github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/swarm"
"github.com/docker/docker/builder"
"github.com/docker/docker/container"
"github.com/docker/docker/daemon/config"
"github.com/docker/docker/daemon/discovery"
"github.com/docker/docker/daemon/events"
"github.com/docker/docker/daemon/exec"
"github.com/docker/docker/daemon/images"
"github.com/docker/docker/daemon/logger"
"github.com/docker/docker/daemon/network"
"github.com/docker/docker/errdefs"
"github.com/moby/buildkit/util/resolver"
"github.com/moby/buildkit/util/tracing"
"github.com/sirupsen/logrus"
// register graph drivers
_ "github.com/docker/docker/daemon/graphdriver/register"
"github.com/docker/docker/daemon/stats"
dmetadata "github.com/docker/docker/distribution/metadata"
"github.com/docker/docker/dockerversion"
"github.com/docker/docker/image"
"github.com/docker/docker/layer"
"github.com/docker/docker/libcontainerd"
libcontainerdtypes "github.com/docker/docker/libcontainerd/types"
"github.com/docker/docker/pkg/idtools"
"github.com/docker/docker/pkg/locker"
"github.com/docker/docker/pkg/plugingetter"
"github.com/docker/docker/pkg/sysinfo"
"github.com/docker/docker/pkg/system"
"github.com/docker/docker/pkg/truncindex"
"github.com/docker/docker/plugin"
pluginexec "github.com/docker/docker/plugin/executor/containerd"
refstore "github.com/docker/docker/reference"
"github.com/docker/docker/registry"
"github.com/docker/docker/runconfig"
volumesservice "github.com/docker/docker/volume/service"
"github.com/docker/libnetwork"
"github.com/docker/libnetwork/cluster"
nwconfig "github.com/docker/libnetwork/config"
"github.com/pkg/errors"
"golang.org/x/sync/semaphore"
)
// ContainersNamespace is the name of the namespace used for users containers
const (
ContainersNamespace = "moby"
)
var (
errSystemNotSupported = errors.New("the Docker daemon is not supported on this platform")
)
// Daemon holds information about the Docker daemon.
type Daemon struct {
ID string
repository string
containers container.Store
containersReplica container.ViewDB
execCommands *exec.Store
imageService *images.ImageService
idIndex *truncindex.TruncIndex
configStore *config.Config
statsCollector *stats.Collector
defaultLogConfig containertypes.LogConfig
RegistryService registry.Service
EventsService *events.Events
netController libnetwork.NetworkController
volumes *volumesservice.VolumesService
discoveryWatcher discovery.Reloader
root string
seccompEnabled bool
apparmorEnabled bool
shutdown bool
idMapping *idtools.IdentityMapping
// TODO: move graphDrivers field to an InfoService
graphDrivers map[string]string // By operating system
PluginStore *plugin.Store // todo: remove
pluginManager *plugin.Manager
linkIndex *linkIndex
containerdCli *containerd.Client
containerd libcontainerdtypes.Client
defaultIsolation containertypes.Isolation // Default isolation mode on Windows
clusterProvider cluster.Provider
cluster Cluster
genericResources []swarm.GenericResource
metricsPluginListener net.Listener
machineMemory uint64
seccompProfile []byte
seccompProfilePath string
diskUsageRunning int32
pruneRunning int32
hosts map[string]bool // hosts stores the addresses the daemon is listening on
startupDone chan struct{}
attachmentStore network.AttachmentStore
attachableNetworkLock *locker.Locker
}
// StoreHosts stores the addresses the daemon is listening on
func (daemon *Daemon) StoreHosts(hosts []string) {
if daemon.hosts == nil {
daemon.hosts = make(map[string]bool)
}
for _, h := range hosts {
daemon.hosts[h] = true
}
}
// HasExperimental returns whether the experimental features of the daemon are enabled or not
func (daemon *Daemon) HasExperimental() bool {
return daemon.configStore != nil && daemon.configStore.Experimental
}
// Features returns the features map from configStore
func (daemon *Daemon) Features() *map[string]bool {
return &daemon.configStore.Features
}
// NewResolveOptionsFunc returns a call back function to resolve "registry-mirrors" and
// "insecure-registries" for buildkit
func (daemon *Daemon) NewResolveOptionsFunc() resolver.ResolveOptionsFunc {
return func(ref string) docker.ResolverOptions {
var (
registryKey = "docker.io"
mirrors = make([]string, len(daemon.configStore.Mirrors))
m = map[string]resolver.RegistryConf{}
)
// must trim "https://" or "http://" prefix
for i, v := range daemon.configStore.Mirrors {
if uri, err := url.Parse(v); err == nil {
v = uri.Host
}
mirrors[i] = v
}
// set "registry-mirrors"
m[registryKey] = resolver.RegistryConf{Mirrors: mirrors}
// set "insecure-registries"
for _, v := range daemon.configStore.InsecureRegistries {
if uri, err := url.Parse(v); err == nil {
v = uri.Host
}
plainHTTP := true
m[v] = resolver.RegistryConf{
PlainHTTP: &plainHTTP,
}
}
def := docker.ResolverOptions{
Client: tracing.DefaultClient,
}
parsed, err := reference.ParseNormalizedNamed(ref)
if err != nil {
return def
}
host := reference.Domain(parsed)
c, ok := m[host]
if !ok {
return def
}
if len(c.Mirrors) > 0 {
// TODO ResolverOptions.Host is deprecated; ResolverOptions.Hosts should be used
def.Host = func(string) (string, error) {
return c.Mirrors[rand.Intn(len(c.Mirrors))], nil
}
}
// TODO ResolverOptions.PlainHTTP is deprecated; ResolverOptions.Hosts should be used
if c.PlainHTTP != nil {
def.PlainHTTP = *c.PlainHTTP
}
return def
}
}
func (daemon *Daemon) restore() error {
var mapLock sync.Mutex
containers := make(map[string]*container.Container)
logrus.Info("Loading containers: start.")
dir, err := ioutil.ReadDir(daemon.repository)
if err != nil {
return err
}
// parallelLimit is the maximum number of parallel startup jobs that we
// allow (this is the limited used for all startup semaphores). The multipler
// (128) was chosen after some fairly significant benchmarking -- don't change
// it unless you've tested it significantly (this value is adjusted if
// RLIMIT_NOFILE is small to avoid EMFILE).
parallelLimit := adjustParallelLimit(len(dir), 128*runtime.NumCPU())
// Re-used for all parallel startup jobs.
var group sync.WaitGroup
sem := semaphore.NewWeighted(int64(parallelLimit))
for _, v := range dir {
group.Add(1)
go func(id string) {
defer group.Done()
_ = sem.Acquire(context.Background(), 1)
defer sem.Release(1)
container, err := daemon.load(id)
if err != nil {
logrus.Errorf("Failed to load container %v: %v", id, err)
return
}
if !system.IsOSSupported(container.OS) {
logrus.Errorf("Failed to load container %v: %s (%q)", id, system.ErrNotSupportedOperatingSystem, container.OS)
return
}
// Ignore the container if it does not support the current driver being used by the graph
currentDriverForContainerOS := daemon.graphDrivers[container.OS]
if (container.Driver == "" && currentDriverForContainerOS == "aufs") || container.Driver == currentDriverForContainerOS {
rwlayer, err := daemon.imageService.GetLayerByID(container.ID, container.OS)
if err != nil {
logrus.Errorf("Failed to load container mount %v: %v", id, err)
return
}
container.RWLayer = rwlayer
logrus.Debugf("Loaded container %v, isRunning: %v", container.ID, container.IsRunning())
mapLock.Lock()
containers[container.ID] = container
mapLock.Unlock()
} else {
logrus.Debugf("Cannot load container %s because it was created with another graph driver.", container.ID)
}
}(v.Name())
}
group.Wait()
removeContainers := make(map[string]*container.Container)
restartContainers := make(map[*container.Container]chan struct{})
activeSandboxes := make(map[string]interface{})
for _, c := range containers {
group.Add(1)
go func(c *container.Container) {
defer group.Done()
_ = sem.Acquire(context.Background(), 1)
defer sem.Release(1)
if err := daemon.registerName(c); err != nil {
logrus.Errorf("Failed to register container name %s: %s", c.ID, err)
mapLock.Lock()
delete(containers, c.ID)
mapLock.Unlock()
return
}
if err := daemon.Register(c); err != nil {
logrus.Errorf("Failed to register container %s: %s", c.ID, err)
mapLock.Lock()
delete(containers, c.ID)
mapLock.Unlock()
return
}
// The LogConfig.Type is empty if the container was created before docker 1.12 with default log driver.
// We should rewrite it to use the daemon defaults.
// Fixes https://github.com/docker/docker/issues/22536
if c.HostConfig.LogConfig.Type == "" {
if err := daemon.mergeAndVerifyLogConfig(&c.HostConfig.LogConfig); err != nil {
logrus.Errorf("Failed to verify log config for container %s: %q", c.ID, err)
}
}
}(c)
}
group.Wait()
for _, c := range containers {
group.Add(1)
go func(c *container.Container) {
defer group.Done()
_ = sem.Acquire(context.Background(), 1)
defer sem.Release(1)
daemon.backportMountSpec(c)
if err := daemon.checkpointAndSave(c); err != nil {
logrus.WithError(err).WithField("container", c.ID).Error("error saving backported mountspec to disk")
}
daemon.setStateCounter(c)
logrus.WithFields(logrus.Fields{
"container": c.ID,
"running": c.IsRunning(),
"paused": c.IsPaused(),
}).Debug("restoring container")
var (
err error
alive bool
ec uint32
exitedAt time.Time
process libcontainerdtypes.Process
)
alive, _, process, err = daemon.containerd.Restore(context.Background(), c.ID, c.InitializeStdio)
if err != nil && !errdefs.IsNotFound(err) {
logrus.Errorf("Failed to restore container %s with containerd: %s", c.ID, err)
return
}
if !alive && process != nil {
ec, exitedAt, err = process.Delete(context.Background())
if err != nil && !errdefs.IsNotFound(err) {
logrus.WithError(err).Errorf("Failed to delete container %s from containerd", c.ID)
return
}
} else if !daemon.configStore.LiveRestoreEnabled {
if err := daemon.kill(c, c.StopSignal()); err != nil && !errdefs.IsNotFound(err) {
logrus.WithError(err).WithField("container", c.ID).Error("error shutting down container")
return
}
}
if c.IsRunning() || c.IsPaused() {
c.RestartManager().Cancel() // manually start containers because some need to wait for swarm networking
if c.IsPaused() && alive {
s, err := daemon.containerd.Status(context.Background(), c.ID)
if err != nil {
logrus.WithError(err).WithField("container", c.ID).
Errorf("Failed to get container status")
} else {
logrus.WithField("container", c.ID).WithField("state", s).
Info("restored container paused")
switch s {
case containerd.Paused, containerd.Pausing:
// nothing to do
case containerd.Stopped:
alive = false
case containerd.Unknown:
logrus.WithField("container", c.ID).
Error("Unknown status for container during restore")
default:
// running
c.Lock()
c.Paused = false
daemon.setStateCounter(c)
if err := c.CheckpointTo(daemon.containersReplica); err != nil {
logrus.WithError(err).WithField("container", c.ID).
Error("Failed to update stopped container state")
}
c.Unlock()
}
}
}
if !alive {
c.Lock()
c.SetStopped(&container.ExitStatus{ExitCode: int(ec), ExitedAt: exitedAt})
daemon.Cleanup(c)
if err := c.CheckpointTo(daemon.containersReplica); err != nil {
logrus.Errorf("Failed to update stopped container %s state: %v", c.ID, err)
}
c.Unlock()
}
// we call Mount and then Unmount to get BaseFs of the container
if err := daemon.Mount(c); err != nil {
// The mount is unlikely to fail. However, in case mount fails
// the container should be allowed to restore here. Some functionalities
// (like docker exec -u user) might be missing but container is able to be
// stopped/restarted/removed.
// See #29365 for related information.
// The error is only logged here.
logrus.Warnf("Failed to mount container on getting BaseFs path %v: %v", c.ID, err)
} else {
if err := daemon.Unmount(c); err != nil {
logrus.Warnf("Failed to umount container on getting BaseFs path %v: %v", c.ID, err)
}
}
c.ResetRestartManager(false)
if !c.HostConfig.NetworkMode.IsContainer() && c.IsRunning() {
options, err := daemon.buildSandboxOptions(c)
if err != nil {
logrus.Warnf("Failed build sandbox option to restore container %s: %v", c.ID, err)
}
mapLock.Lock()
activeSandboxes[c.NetworkSettings.SandboxID] = options
mapLock.Unlock()
}
}
// get list of containers we need to restart
// Do not autostart containers which
// has endpoints in a swarm scope
// network yet since the cluster is
// not initialized yet. We will start
// it after the cluster is
// initialized.
if daemon.configStore.AutoRestart && c.ShouldRestart() && !c.NetworkSettings.HasSwarmEndpoint && c.HasBeenStartedBefore {
mapLock.Lock()
restartContainers[c] = make(chan struct{})
mapLock.Unlock()
} else if c.HostConfig != nil && c.HostConfig.AutoRemove {
mapLock.Lock()
removeContainers[c.ID] = c
mapLock.Unlock()
}
c.Lock()
if c.RemovalInProgress {
// We probably crashed in the middle of a removal, reset
// the flag.
//
// We DO NOT remove the container here as we do not
// know if the user had requested for either the
// associated volumes, network links or both to also
// be removed. So we put the container in the "dead"
// state and leave further processing up to them.
logrus.Debugf("Resetting RemovalInProgress flag from %v", c.ID)
c.RemovalInProgress = false
c.Dead = true
if err := c.CheckpointTo(daemon.containersReplica); err != nil {
logrus.Errorf("Failed to update RemovalInProgress container %s state: %v", c.ID, err)
}
}
c.Unlock()
}(c)
}
group.Wait()
daemon.netController, err = daemon.initNetworkController(daemon.configStore, activeSandboxes)
if err != nil {
return fmt.Errorf("Error initializing network controller: %v", err)
}
// Now that all the containers are registered, register the links
for _, c := range containers {
group.Add(1)
go func(c *container.Container) {
_ = sem.Acquire(context.Background(), 1)
if err := daemon.registerLinks(c, c.HostConfig); err != nil {
logrus.Errorf("failed to register link for container %s: %v", c.ID, err)
}
sem.Release(1)
group.Done()
}(c)
}
group.Wait()
for c, notifier := range restartContainers {
group.Add(1)
go func(c *container.Container, chNotify chan struct{}) {
_ = sem.Acquire(context.Background(), 1)
logrus.Debugf("Starting container %s", c.ID)
// ignore errors here as this is a best effort to wait for children to be
// running before we try to start the container
children := daemon.children(c)
timeout := time.NewTimer(5 * time.Second)
defer timeout.Stop()
for _, child := range children {
if notifier, exists := restartContainers[child]; exists {
select {
case <-notifier:
case <-timeout.C:
}
}
}
// Make sure networks are available before starting
daemon.waitForNetworks(c)
if err := daemon.containerStart(c, "", "", true); err != nil {
logrus.Errorf("Failed to start container %s: %s", c.ID, err)
}
close(chNotify)
sem.Release(1)
group.Done()
}(c, notifier)
}
group.Wait()
for id := range removeContainers {
group.Add(1)
go func(cid string) {
_ = sem.Acquire(context.Background(), 1)
if err := daemon.ContainerRm(cid, &types.ContainerRmConfig{ForceRemove: true, RemoveVolume: true}); err != nil {
logrus.Errorf("Failed to remove container %s: %s", cid, err)
}
sem.Release(1)
group.Done()
}(id)
}
group.Wait()
// any containers that were started above would already have had this done,
// however we need to now prepare the mountpoints for the rest of the containers as well.
// This shouldn't cause any issue running on the containers that already had this run.
// This must be run after any containers with a restart policy so that containerized plugins
// can have a chance to be running before we try to initialize them.
for _, c := range containers {
// if the container has restart policy, do not
// prepare the mountpoints since it has been done on restarting.
// This is to speed up the daemon start when a restart container
// has a volume and the volume driver is not available.
if _, ok := restartContainers[c]; ok {
continue
} else if _, ok := removeContainers[c.ID]; ok {
// container is automatically removed, skip it.
continue
}
group.Add(1)
go func(c *container.Container) {
_ = sem.Acquire(context.Background(), 1)
if err := daemon.prepareMountPoints(c); err != nil {
logrus.Error(err)
}
sem.Release(1)
group.Done()
}(c)
}
group.Wait()
logrus.Info("Loading containers: done.")
return nil
}
// RestartSwarmContainers restarts any autostart container which has a
// swarm endpoint.
func (daemon *Daemon) RestartSwarmContainers() {
ctx := context.Background()
// parallelLimit is the maximum number of parallel startup jobs that we
// allow (this is the limited used for all startup semaphores). The multipler
// (128) was chosen after some fairly significant benchmarking -- don't change
// it unless you've tested it significantly (this value is adjusted if
// RLIMIT_NOFILE is small to avoid EMFILE).
parallelLimit := adjustParallelLimit(len(daemon.List()), 128*runtime.NumCPU())
var group sync.WaitGroup
sem := semaphore.NewWeighted(int64(parallelLimit))
for _, c := range daemon.List() {
if !c.IsRunning() && !c.IsPaused() {
// Autostart all the containers which has a
// swarm endpoint now that the cluster is
// initialized.
if daemon.configStore.AutoRestart && c.ShouldRestart() && c.NetworkSettings.HasSwarmEndpoint && c.HasBeenStartedBefore {
group.Add(1)
go func(c *container.Container) {
if err := sem.Acquire(ctx, 1); err != nil {
// ctx is done.
group.Done()
return
}
if err := daemon.containerStart(c, "", "", true); err != nil {
logrus.Error(err)
}
sem.Release(1)
group.Done()
}(c)
}
}
}
group.Wait()
}
// waitForNetworks is used during daemon initialization when starting up containers
// It ensures that all of a container's networks are available before the daemon tries to start the container.
// In practice it just makes sure the discovery service is available for containers which use a network that require discovery.
func (daemon *Daemon) waitForNetworks(c *container.Container) {
if daemon.discoveryWatcher == nil {
return
}
// Make sure if the container has a network that requires discovery that the discovery service is available before starting
for netName := range c.NetworkSettings.Networks {
// If we get `ErrNoSuchNetwork` here, we can assume that it is due to discovery not being ready
// Most likely this is because the K/V store used for discovery is in a container and needs to be started
if _, err := daemon.netController.NetworkByName(netName); err != nil {
if _, ok := err.(libnetwork.ErrNoSuchNetwork); !ok {
continue
}
// use a longish timeout here due to some slowdowns in libnetwork if the k/v store is on anything other than --net=host
// FIXME: why is this slow???
dur := 60 * time.Second
timer := time.NewTimer(dur)
logrus.Debugf("Container %s waiting for network to be ready", c.Name)
select {
case <-daemon.discoveryWatcher.ReadyCh():
case <-timer.C:
}
timer.Stop()
return
}
}
}
func (daemon *Daemon) children(c *container.Container) map[string]*container.Container {
return daemon.linkIndex.children(c)
}
// parents returns the names of the parent containers of the container
// with the given name.
func (daemon *Daemon) parents(c *container.Container) map[string]*container.Container {
return daemon.linkIndex.parents(c)
}
func (daemon *Daemon) registerLink(parent, child *container.Container, alias string) error {
fullName := path.Join(parent.Name, alias)
if err := daemon.containersReplica.ReserveName(fullName, child.ID); err != nil {
if err == container.ErrNameReserved {
logrus.Warnf("error registering link for %s, to %s, as alias %s, ignoring: %v", parent.ID, child.ID, alias, err)
return nil
}
return err
}
daemon.linkIndex.link(parent, child, fullName)
return nil
}
// DaemonJoinsCluster informs the daemon has joined the cluster and provides
// the handler to query the cluster component
func (daemon *Daemon) DaemonJoinsCluster(clusterProvider cluster.Provider) {
daemon.setClusterProvider(clusterProvider)
}
// DaemonLeavesCluster informs the daemon has left the cluster
func (daemon *Daemon) DaemonLeavesCluster() {
// Daemon is in charge of removing the attachable networks with
// connected containers when the node leaves the swarm
daemon.clearAttachableNetworks()
// We no longer need the cluster provider, stop it now so that
// the network agent will stop listening to cluster events.
daemon.setClusterProvider(nil)
// Wait for the networking cluster agent to stop
daemon.netController.AgentStopWait()
// Daemon is in charge of removing the ingress network when the
// node leaves the swarm. Wait for job to be done or timeout.
// This is called also on graceful daemon shutdown. We need to
// wait, because the ingress release has to happen before the
// network controller is stopped.
if done, err := daemon.ReleaseIngress(); err == nil {
timeout := time.NewTimer(5 * time.Second)
defer timeout.Stop()
select {
case <-done:
case <-timeout.C:
logrus.Warn("timeout while waiting for ingress network removal")
}
} else {
logrus.Warnf("failed to initiate ingress network removal: %v", err)
}
daemon.attachmentStore.ClearAttachments()
}
// setClusterProvider sets a component for querying the current cluster state.
func (daemon *Daemon) setClusterProvider(clusterProvider cluster.Provider) {
daemon.clusterProvider = clusterProvider
daemon.netController.SetClusterProvider(clusterProvider)
daemon.attachableNetworkLock = locker.New()
}
// IsSwarmCompatible verifies if the current daemon
// configuration is compatible with the swarm mode
func (daemon *Daemon) IsSwarmCompatible() error {
if daemon.configStore == nil {
return nil
}
return daemon.configStore.IsSwarmCompatible()
}
// NewDaemon sets up everything for the daemon to be able to service
// requests from the webserver.
func NewDaemon(ctx context.Context, config *config.Config, pluginStore *plugin.Store) (daemon *Daemon, err error) {
setDefaultMtu(config)
registryService, err := registry.NewService(config.ServiceOptions)
if err != nil {
return nil, err
}
// Ensure that we have a correct root key limit for launching containers.
if err := ModifyRootKeyLimit(); err != nil {
logrus.Warnf("unable to modify root key limit, number of containers could be limited by this quota: %v", err)
}
// Ensure we have compatible and valid configuration options
if err := verifyDaemonSettings(config); err != nil {
return nil, err
}
// Do we have a disabled network?
config.DisableBridge = isBridgeNetworkDisabled(config)
// Setup the resolv.conf
setupResolvConf(config)
// Verify the platform is supported as a daemon
if !platformSupported {
return nil, errSystemNotSupported
}
// Validate platform-specific requirements
if err := checkSystem(); err != nil {
return nil, err
}
idMapping, err := setupRemappedRoot(config)
if err != nil {
return nil, err
}
rootIDs := idMapping.RootPair()
if err := setupDaemonProcess(config); err != nil {
return nil, err
}
// set up the tmpDir to use a canonical path
tmp, err := prepareTempDir(config.Root, rootIDs)
if err != nil {
return nil, fmt.Errorf("Unable to get the TempDir under %s: %s", config.Root, err)
}
realTmp, err := fileutils.ReadSymlinkedDirectory(tmp)
if err != nil {
return nil, fmt.Errorf("Unable to get the full path to the TempDir (%s): %s", tmp, err)
}
if isWindows {
if _, err := os.Stat(realTmp); err != nil && os.IsNotExist(err) {
if err := system.MkdirAll(realTmp, 0700); err != nil {
return nil, fmt.Errorf("Unable to create the TempDir (%s): %s", realTmp, err)
}
}
os.Setenv("TEMP", realTmp)
os.Setenv("TMP", realTmp)
} else {
os.Setenv("TMPDIR", realTmp)
}
d := &Daemon{
configStore: config,
PluginStore: pluginStore,
startupDone: make(chan struct{}),
}
// Ensure the daemon is properly shutdown if there is a failure during
// initialization
defer func() {
if err != nil {
if err := d.Shutdown(); err != nil {
logrus.Error(err)
}
}
}()
if err := d.setGenericResources(config); err != nil {
return nil, err
}
// set up SIGUSR1 handler on Unix-like systems, or a Win32 global event
// on Windows to dump Go routine stacks
stackDumpDir := config.Root
if execRoot := config.GetExecRoot(); execRoot != "" {
stackDumpDir = execRoot
}
d.setupDumpStackTrap(stackDumpDir)
if err := d.setupSeccompProfile(); err != nil {
return nil, err
}
// Set the default isolation mode (only applicable on Windows)
if err := d.setDefaultIsolation(); err != nil {
return nil, fmt.Errorf("error setting default isolation mode: %v", err)
}
if err := configureMaxThreads(config); err != nil {
logrus.Warnf("Failed to configure golang's threads limit: %v", err)
}
// ensureDefaultAppArmorProfile does nothing if apparmor is disabled
if err := ensureDefaultAppArmorProfile(); err != nil {
logrus.Errorf(err.Error())
}
daemonRepo := filepath.Join(config.Root, "containers")
if err := idtools.MkdirAllAndChown(daemonRepo, 0700, rootIDs); err != nil {
return nil, err
}
// Create the directory where we'll store the runtime scripts (i.e. in
// order to support runtimeArgs)
daemonRuntimes := filepath.Join(config.Root, "runtimes")
if err := system.MkdirAll(daemonRuntimes, 0700); err != nil {
return nil, err
}
if err := d.loadRuntimes(); err != nil {
return nil, err
}
if isWindows {
if err := system.MkdirAll(filepath.Join(config.Root, "credentialspecs"), 0); err != nil {
return nil, err
}
}
// On Windows we don't support the environment variable, or a user supplied graphdriver
// as Windows has no choice in terms of which graphdrivers to use. It's a case of
// running Windows containers on Windows - windowsfilter, running Linux containers on Windows,
// lcow. Unix platforms however run a single graphdriver for all containers, and it can
// be set through an environment variable, a daemon start parameter, or chosen through
// initialization of the layerstore through driver priority order for example.
d.graphDrivers = make(map[string]string)
layerStores := make(map[string]layer.Store)
if isWindows {
d.graphDrivers[runtime.GOOS] = "windowsfilter"
if system.LCOWSupported() {
d.graphDrivers["linux"] = "lcow"
}
} else {
driverName := os.Getenv("DOCKER_DRIVER")
if driverName == "" {
driverName = config.GraphDriver
} else {
logrus.Infof("Setting the storage driver from the $DOCKER_DRIVER environment variable (%s)", driverName)
}
d.graphDrivers[runtime.GOOS] = driverName // May still be empty. Layerstore init determines instead.
}
d.RegistryService = registryService
logger.RegisterPluginGetter(d.PluginStore)
metricsSockPath, err := d.listenMetricsSock()
if err != nil {
return nil, err
}
registerMetricsPluginCallback(d.PluginStore, metricsSockPath)
gopts := []grpc.DialOption{
grpc.WithInsecure(),
grpc.WithBackoffMaxDelay(3 * time.Second),
grpc.WithDialer(dialer.Dialer),
// TODO(stevvooe): We may need to allow configuration of this on the client.
grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(defaults.DefaultMaxRecvMsgSize)),
grpc.WithDefaultCallOptions(grpc.MaxCallSendMsgSize(defaults.DefaultMaxSendMsgSize)),
}
if config.ContainerdAddr != "" {
d.containerdCli, err = containerd.New(config.ContainerdAddr, containerd.WithDefaultNamespace(config.ContainerdNamespace), containerd.WithDialOpts(gopts), containerd.WithTimeout(60*time.Second))
if err != nil {
return nil, errors.Wrapf(err, "failed to dial %q", config.ContainerdAddr)
}
}
createPluginExec := func(m *plugin.Manager) (plugin.Executor, error) {<|fim▁hole|> // client as nil
if config.ContainerdAddr != "" {
pluginCli, err = containerd.New(config.ContainerdAddr, containerd.WithDefaultNamespace(config.ContainerdPluginNamespace), containerd.WithDialOpts(gopts), containerd.WithTimeout(60*time.Second))
if err != nil {
return nil, errors.Wrapf(err, "failed to dial %q", config.ContainerdAddr)
}
}
return pluginexec.New(ctx, getPluginExecRoot(config.Root), pluginCli, config.ContainerdPluginNamespace, m)
}
// Plugin system initialization should happen before restore. Do not change order.
d.pluginManager, err = plugin.NewManager(plugin.ManagerConfig{
Root: filepath.Join(config.Root, "plugins"),
ExecRoot: getPluginExecRoot(config.Root),
Store: d.PluginStore,
CreateExecutor: createPluginExec,
RegistryService: registryService,
LiveRestoreEnabled: config.LiveRestoreEnabled,
LogPluginEvent: d.LogPluginEvent, // todo: make private
AuthzMiddleware: config.AuthzMiddleware,
})
if err != nil {
return nil, errors.Wrap(err, "couldn't create plugin manager")
}
if err := d.setupDefaultLogConfig(); err != nil {
return nil, err
}
for operatingSystem, gd := range d.graphDrivers {
layerStores[operatingSystem], err = layer.NewStoreFromOptions(layer.StoreOptions{
Root: config.Root,
MetadataStorePathTemplate: filepath.Join(config.Root, "image", "%s", "layerdb"),
GraphDriver: gd,
GraphDriverOptions: config.GraphOptions,
IDMapping: idMapping,
PluginGetter: d.PluginStore,
ExperimentalEnabled: config.Experimental,
OS: operatingSystem,
})
if err != nil {
return nil, err
}
// As layerstore initialization may set the driver
d.graphDrivers[operatingSystem] = layerStores[operatingSystem].DriverName()
}
// Configure and validate the kernels security support. Note this is a Linux/FreeBSD
// operation only, so it is safe to pass *just* the runtime OS graphdriver.
if err := configureKernelSecuritySupport(config, d.graphDrivers[runtime.GOOS]); err != nil {
return nil, err
}
imageRoot := filepath.Join(config.Root, "image", d.graphDrivers[runtime.GOOS])
ifs, err := image.NewFSStoreBackend(filepath.Join(imageRoot, "imagedb"))
if err != nil {
return nil, err
}
lgrMap := make(map[string]image.LayerGetReleaser)
for os, ls := range layerStores {
lgrMap[os] = ls
}
imageStore, err := image.NewImageStore(ifs, lgrMap)
if err != nil {
return nil, err
}
d.volumes, err = volumesservice.NewVolumeService(config.Root, d.PluginStore, rootIDs, d)
if err != nil {
return nil, err
}
trustKey, err := loadOrCreateTrustKey(config.TrustKeyPath)
if err != nil {
return nil, err
}
trustDir := filepath.Join(config.Root, "trust")
if err := system.MkdirAll(trustDir, 0700); err != nil {
return nil, err
}
// We have a single tag/reference store for the daemon globally. However, it's
// stored under the graphdriver. On host platforms which only support a single
// container OS, but multiple selectable graphdrivers, this means depending on which
// graphdriver is chosen, the global reference store is under there. For
// platforms which support multiple container operating systems, this is slightly
// more problematic as where does the global ref store get located? Fortunately,
// for Windows, which is currently the only daemon supporting multiple container
// operating systems, the list of graphdrivers available isn't user configurable.
// For backwards compatibility, we just put it under the windowsfilter
// directory regardless.
refStoreLocation := filepath.Join(imageRoot, `repositories.json`)
rs, err := refstore.NewReferenceStore(refStoreLocation)
if err != nil {
return nil, fmt.Errorf("Couldn't create reference store repository: %s", err)
}
distributionMetadataStore, err := dmetadata.NewFSMetadataStore(filepath.Join(imageRoot, "distribution"))
if err != nil {
return nil, err
}
// Discovery is only enabled when the daemon is launched with an address to advertise. When
// initialized, the daemon is registered and we can store the discovery backend as it's read-only
if err := d.initDiscovery(config); err != nil {
return nil, err
}
sysInfo := sysinfo.New(false)
// Check if Devices cgroup is mounted, it is hard requirement for container security,
// on Linux.
if runtime.GOOS == "linux" && !sysInfo.CgroupDevicesEnabled {
return nil, errors.New("Devices cgroup isn't mounted")
}
d.ID = trustKey.PublicKey().KeyID()
d.repository = daemonRepo
d.containers = container.NewMemoryStore()
if d.containersReplica, err = container.NewViewDB(); err != nil {
return nil, err
}
d.execCommands = exec.NewStore()
d.idIndex = truncindex.NewTruncIndex([]string{})
d.statsCollector = d.newStatsCollector(1 * time.Second)
d.EventsService = events.New()
d.root = config.Root
d.idMapping = idMapping
d.seccompEnabled = sysInfo.Seccomp
d.apparmorEnabled = sysInfo.AppArmor
d.linkIndex = newLinkIndex()
// TODO: imageStore, distributionMetadataStore, and ReferenceStore are only
// used above to run migration. They could be initialized in ImageService
// if migration is called from daemon/images. layerStore might move as well.
d.imageService = images.NewImageService(images.ImageServiceConfig{
ContainerStore: d.containers,
DistributionMetadataStore: distributionMetadataStore,
EventsService: d.EventsService,
ImageStore: imageStore,
LayerStores: layerStores,
MaxConcurrentDownloads: *config.MaxConcurrentDownloads,
MaxConcurrentUploads: *config.MaxConcurrentUploads,
MaxDownloadAttempts: *config.MaxDownloadAttempts,
ReferenceStore: rs,
RegistryService: registryService,
TrustKey: trustKey,
})
go d.execCommandGC()
d.containerd, err = libcontainerd.NewClient(ctx, d.containerdCli, filepath.Join(config.ExecRoot, "containerd"), config.ContainerdNamespace, d)
if err != nil {
return nil, err
}
if err := d.restore(); err != nil {
return nil, err
}
close(d.startupDone)
// FIXME: this method never returns an error
info, _ := d.SystemInfo()
engineInfo.WithValues(
dockerversion.Version,
dockerversion.GitCommit,
info.Architecture,
info.Driver,
info.KernelVersion,
info.OperatingSystem,
info.OSType,
info.OSVersion,
info.ID,
).Set(1)
engineCpus.Set(float64(info.NCPU))
engineMemory.Set(float64(info.MemTotal))
gd := ""
for os, driver := range d.graphDrivers {
if len(gd) > 0 {
gd += ", "
}
gd += driver
if len(d.graphDrivers) > 1 {
gd = fmt.Sprintf("%s (%s)", gd, os)
}
}
logrus.WithFields(logrus.Fields{
"version": dockerversion.Version,
"commit": dockerversion.GitCommit,
"graphdriver(s)": gd,
}).Info("Docker daemon")
return d, nil
}
// DistributionServices returns services controlling daemon storage
func (daemon *Daemon) DistributionServices() images.DistributionServices {
return daemon.imageService.DistributionServices()
}
func (daemon *Daemon) waitForStartupDone() {
<-daemon.startupDone
}
func (daemon *Daemon) shutdownContainer(c *container.Container) error {
stopTimeout := c.StopTimeout()
// If container failed to exit in stopTimeout seconds of SIGTERM, then using the force
if err := daemon.containerStop(c, stopTimeout); err != nil {
return fmt.Errorf("Failed to stop container %s with error: %v", c.ID, err)
}
// Wait without timeout for the container to exit.
// Ignore the result.
<-c.Wait(context.Background(), container.WaitConditionNotRunning)
return nil
}
// ShutdownTimeout returns the timeout (in seconds) before containers are forcibly
// killed during shutdown. The default timeout can be configured both on the daemon
// and per container, and the longest timeout will be used. A grace-period of
// 5 seconds is added to the configured timeout.
//
// A negative (-1) timeout means "indefinitely", which means that containers
// are not forcibly killed, and the daemon shuts down after all containers exit.
func (daemon *Daemon) ShutdownTimeout() int {
shutdownTimeout := daemon.configStore.ShutdownTimeout
if shutdownTimeout < 0 {
return -1
}
if daemon.containers == nil {
return shutdownTimeout
}
graceTimeout := 5
for _, c := range daemon.containers.List() {
stopTimeout := c.StopTimeout()
if stopTimeout < 0 {
return -1
}
if stopTimeout+graceTimeout > shutdownTimeout {
shutdownTimeout = stopTimeout + graceTimeout
}
}
return shutdownTimeout
}
// Shutdown stops the daemon.
func (daemon *Daemon) Shutdown() error {
daemon.shutdown = true
// Keep mounts and networking running on daemon shutdown if
// we are to keep containers running and restore them.
if daemon.configStore.LiveRestoreEnabled && daemon.containers != nil {
// check if there are any running containers, if none we should do some cleanup
if ls, err := daemon.Containers(&types.ContainerListOptions{}); len(ls) != 0 || err != nil {
// metrics plugins still need some cleanup
daemon.cleanupMetricsPlugins()
return nil
}
}
if daemon.containers != nil {
logrus.Debugf("daemon configured with a %d seconds minimum shutdown timeout", daemon.configStore.ShutdownTimeout)
logrus.Debugf("start clean shutdown of all containers with a %d seconds timeout...", daemon.ShutdownTimeout())
daemon.containers.ApplyAll(func(c *container.Container) {
if !c.IsRunning() {
return
}
logrus.Debugf("stopping %s", c.ID)
if err := daemon.shutdownContainer(c); err != nil {
logrus.Errorf("Stop container error: %v", err)
return
}
if mountid, err := daemon.imageService.GetLayerMountID(c.ID, c.OS); err == nil {
daemon.cleanupMountsByID(mountid)
}
logrus.Debugf("container stopped %s", c.ID)
})
}
if daemon.volumes != nil {
if err := daemon.volumes.Shutdown(); err != nil {
logrus.Errorf("Error shutting down volume store: %v", err)
}
}
if daemon.imageService != nil {
daemon.imageService.Cleanup()
}
// If we are part of a cluster, clean up cluster's stuff
if daemon.clusterProvider != nil {
logrus.Debugf("start clean shutdown of cluster resources...")
daemon.DaemonLeavesCluster()
}
daemon.cleanupMetricsPlugins()
// Shutdown plugins after containers and layerstore. Don't change the order.
daemon.pluginShutdown()
// trigger libnetwork Stop only if it's initialized
if daemon.netController != nil {
daemon.netController.Stop()
}
if daemon.containerdCli != nil {
daemon.containerdCli.Close()
}
return daemon.cleanupMounts()
}
// Mount sets container.BaseFS
// (is it not set coming in? why is it unset?)
func (daemon *Daemon) Mount(container *container.Container) error {
if container.RWLayer == nil {
return errors.New("RWLayer of container " + container.ID + " is unexpectedly nil")
}
dir, err := container.RWLayer.Mount(container.GetMountLabel())
if err != nil {
return err
}
logrus.Debugf("container mounted via layerStore: %v", dir)
if container.BaseFS != nil && container.BaseFS.Path() != dir.Path() {
// The mount path reported by the graph driver should always be trusted on Windows, since the
// volume path for a given mounted layer may change over time. This should only be an error
// on non-Windows operating systems.
if runtime.GOOS != "windows" {
daemon.Unmount(container)
return fmt.Errorf("Error: driver %s is returning inconsistent paths for container %s ('%s' then '%s')",
daemon.imageService.GraphDriverForOS(container.OS), container.ID, container.BaseFS, dir)
}
}
container.BaseFS = dir // TODO: combine these fields
return nil
}
// Unmount unsets the container base filesystem
func (daemon *Daemon) Unmount(container *container.Container) error {
if container.RWLayer == nil {
return errors.New("RWLayer of container " + container.ID + " is unexpectedly nil")
}
if err := container.RWLayer.Unmount(); err != nil {
logrus.Errorf("Error unmounting container %s: %s", container.ID, err)
return err
}
return nil
}
// Subnets return the IPv4 and IPv6 subnets of networks that are manager by Docker.
func (daemon *Daemon) Subnets() ([]net.IPNet, []net.IPNet) {
var v4Subnets []net.IPNet
var v6Subnets []net.IPNet
managedNetworks := daemon.netController.Networks()
for _, managedNetwork := range managedNetworks {
v4infos, v6infos := managedNetwork.Info().IpamInfo()
for _, info := range v4infos {
if info.IPAMData.Pool != nil {
v4Subnets = append(v4Subnets, *info.IPAMData.Pool)
}
}
for _, info := range v6infos {
if info.IPAMData.Pool != nil {
v6Subnets = append(v6Subnets, *info.IPAMData.Pool)
}
}
}
return v4Subnets, v6Subnets
}
// prepareTempDir prepares and returns the default directory to use
// for temporary files.
// If it doesn't exist, it is created. If it exists, its content is removed.
func prepareTempDir(rootDir string, rootIdentity idtools.Identity) (string, error) {
var tmpDir string
if tmpDir = os.Getenv("DOCKER_TMPDIR"); tmpDir == "" {
tmpDir = filepath.Join(rootDir, "tmp")
newName := tmpDir + "-old"
if err := os.Rename(tmpDir, newName); err == nil {
go func() {
if err := os.RemoveAll(newName); err != nil {
logrus.Warnf("failed to delete old tmp directory: %s", newName)
}
}()
} else if !os.IsNotExist(err) {
logrus.Warnf("failed to rename %s for background deletion: %s. Deleting synchronously", tmpDir, err)
if err := os.RemoveAll(tmpDir); err != nil {
logrus.Warnf("failed to delete old tmp directory: %s", tmpDir)
}
}
}
// We don't remove the content of tmpdir if it's not the default,
// it may hold things that do not belong to us.
return tmpDir, idtools.MkdirAllAndChown(tmpDir, 0700, rootIdentity)
}
func (daemon *Daemon) setGenericResources(conf *config.Config) error {
genericResources, err := config.ParseGenericResources(conf.NodeGenericResources)
if err != nil {
return err
}
daemon.genericResources = genericResources
return nil
}
func setDefaultMtu(conf *config.Config) {
// do nothing if the config does not have the default 0 value.
if conf.Mtu != 0 {
return
}
conf.Mtu = config.DefaultNetworkMtu
}
// IsShuttingDown tells whether the daemon is shutting down or not
func (daemon *Daemon) IsShuttingDown() bool {
return daemon.shutdown
}
// initDiscovery initializes the discovery watcher for this daemon.
func (daemon *Daemon) initDiscovery(conf *config.Config) error {
advertise, err := config.ParseClusterAdvertiseSettings(conf.ClusterStore, conf.ClusterAdvertise)
if err != nil {
if err == discovery.ErrDiscoveryDisabled {
return nil
}
return err
}
conf.ClusterAdvertise = advertise
discoveryWatcher, err := discovery.Init(conf.ClusterStore, conf.ClusterAdvertise, conf.ClusterOpts)
if err != nil {
return fmt.Errorf("discovery initialization failed (%v)", err)
}
daemon.discoveryWatcher = discoveryWatcher
return nil
}
func isBridgeNetworkDisabled(conf *config.Config) bool {
return conf.BridgeConfig.Iface == config.DisableNetworkBridge
}
func (daemon *Daemon) networkOptions(dconfig *config.Config, pg plugingetter.PluginGetter, activeSandboxes map[string]interface{}) ([]nwconfig.Option, error) {
options := []nwconfig.Option{}
if dconfig == nil {
return options, nil
}
options = append(options, nwconfig.OptionExperimental(dconfig.Experimental))
options = append(options, nwconfig.OptionDataDir(dconfig.Root))
options = append(options, nwconfig.OptionExecRoot(dconfig.GetExecRoot()))
dd := runconfig.DefaultDaemonNetworkMode()
dn := runconfig.DefaultDaemonNetworkMode().NetworkName()
options = append(options, nwconfig.OptionDefaultDriver(string(dd)))
options = append(options, nwconfig.OptionDefaultNetwork(dn))
if strings.TrimSpace(dconfig.ClusterStore) != "" {
kv := strings.Split(dconfig.ClusterStore, "://")
if len(kv) != 2 {
return nil, errors.New("kv store daemon config must be of the form KV-PROVIDER://KV-URL")
}
options = append(options, nwconfig.OptionKVProvider(kv[0]))
options = append(options, nwconfig.OptionKVProviderURL(kv[1]))
}
if len(dconfig.ClusterOpts) > 0 {
options = append(options, nwconfig.OptionKVOpts(dconfig.ClusterOpts))
}
if daemon.discoveryWatcher != nil {
options = append(options, nwconfig.OptionDiscoveryWatcher(daemon.discoveryWatcher))
}
if dconfig.ClusterAdvertise != "" {
options = append(options, nwconfig.OptionDiscoveryAddress(dconfig.ClusterAdvertise))
}
options = append(options, nwconfig.OptionLabels(dconfig.Labels))
options = append(options, driverOptions(dconfig)...)
if len(dconfig.NetworkConfig.DefaultAddressPools.Value()) > 0 {
options = append(options, nwconfig.OptionDefaultAddressPoolConfig(dconfig.NetworkConfig.DefaultAddressPools.Value()))
}
if daemon.configStore != nil && daemon.configStore.LiveRestoreEnabled && len(activeSandboxes) != 0 {
options = append(options, nwconfig.OptionActiveSandboxes(activeSandboxes))
}
if pg != nil {
options = append(options, nwconfig.OptionPluginGetter(pg))
}
options = append(options, nwconfig.OptionNetworkControlPlaneMTU(dconfig.NetworkControlPlaneMTU))
return options, nil
}
// GetCluster returns the cluster
func (daemon *Daemon) GetCluster() Cluster {
return daemon.cluster
}
// SetCluster sets the cluster
func (daemon *Daemon) SetCluster(cluster Cluster) {
daemon.cluster = cluster
}
func (daemon *Daemon) pluginShutdown() {
manager := daemon.pluginManager
// Check for a valid manager object. In error conditions, daemon init can fail
// and shutdown called, before plugin manager is initialized.
if manager != nil {
manager.Shutdown()
}
}
// PluginManager returns current pluginManager associated with the daemon
func (daemon *Daemon) PluginManager() *plugin.Manager { // set up before daemon to avoid this method
return daemon.pluginManager
}
// PluginGetter returns current pluginStore associated with the daemon
func (daemon *Daemon) PluginGetter() *plugin.Store {
return daemon.PluginStore
}
// CreateDaemonRoot creates the root for the daemon
func CreateDaemonRoot(config *config.Config) error {
// get the canonical path to the Docker root directory
var realRoot string
if _, err := os.Stat(config.Root); err != nil && os.IsNotExist(err) {
realRoot = config.Root
} else {
realRoot, err = fileutils.ReadSymlinkedDirectory(config.Root)
if err != nil {
return fmt.Errorf("Unable to get the full path to root (%s): %s", config.Root, err)
}
}
idMapping, err := setupRemappedRoot(config)
if err != nil {
return err
}
return setupDaemonRoot(config, realRoot, idMapping.RootPair())
}
// checkpointAndSave grabs a container lock to safely call container.CheckpointTo
func (daemon *Daemon) checkpointAndSave(container *container.Container) error {
container.Lock()
defer container.Unlock()
if err := container.CheckpointTo(daemon.containersReplica); err != nil {
return fmt.Errorf("Error saving container state: %v", err)
}
return nil
}
// because the CLI sends a -1 when it wants to unset the swappiness value
// we need to clear it on the server side
func fixMemorySwappiness(resources *containertypes.Resources) {
if resources.MemorySwappiness != nil && *resources.MemorySwappiness == -1 {
resources.MemorySwappiness = nil
}
}
// GetAttachmentStore returns current attachment store associated with the daemon
func (daemon *Daemon) GetAttachmentStore() *network.AttachmentStore {
return &daemon.attachmentStore
}
// IdentityMapping returns uid/gid mapping or a SID (in the case of Windows) for the builder
func (daemon *Daemon) IdentityMapping() *idtools.IdentityMapping {
return daemon.idMapping
}
// ImageService returns the Daemon's ImageService
func (daemon *Daemon) ImageService() *images.ImageService {
return daemon.imageService
}
// BuilderBackend returns the backend used by builder
func (daemon *Daemon) BuilderBackend() builder.Backend {
return struct {
*Daemon
*images.ImageService
}{daemon, daemon.imageService}
}<|fim▁end|> | var pluginCli *containerd.Client
// Windows is not currently using containerd, keep the |
<|file_name|>h5_in_memory.py<|end_file_name|><|fim▁begin|>"""
HDF5 in memory object
Britton Smith <[email protected]>
"""
import h5py
import numpy as np
import sys
class H5InMemory(object):
def __init__(self, fh):
self.attrs = {}
if fh is None:
self.data = {}
return
if isinstance(fh, str):
fh = h5py.File(fh, "r")
if hasattr(fh, "attrs"):
self.attrs = dict([(attr, fh.attrs[attr]) \
for attr in fh.attrs])
if isinstance(fh, h5py.Dataset):
self.value = fh.value
elif isinstance(fh, np.ndarray):
self.value = fh
else:
self.data = dict([(field, H5InMemory(fh[field])) \
for field in fh.keys()])
if isinstance(fh, h5py.File):
fh.close()
def create_group(self, key):
self.data[key] = H5InMemory(None)
return self.data[key]
def create_dataset(self, key, data=None):
self.data[key] = H5InMemory(data)
return self.data[key]
def __getitem__(self, key):
return self.data[key]
def __setitem__(self, key, value):
self.data[key] = value
def __delitem__(self, key):
del self.data[key]
def __iter__(self):
for key in self.data:
yield key
def iterkeys(self):
for key in self.data:
yield key
def __contains__(self, key):
return key in self.data
def __repr__(self):
if hasattr(self, "value"):
return "<H5InMemory Data object %s>" % \
str(self.value.shape)
else:<|fim▁hole|> len(self.keys())
def __str__(self):
return self.__repr__()
def __iter__(self):
for field in self.keys():
yield field
def keys(self):
if hasattr(self, "data"):
return self.data.keys()
return None
def save(self, fh):
top = False
if isinstance(fh, str):
top = True
fh = h5py.File(fh, "w")
for attr in self.attrs:
fh.attrs[attr] = self.attrs[attr]
if hasattr(self, "data"):
for field in self:
if hasattr(self.data[field], "data"):
self.data[field].save(fh.create_group(field))
else:
dfh = fh.create_dataset(field,
data=self.data[field].value)
self.data[field].save(dfh)
if top:
fh.close()<|fim▁end|> | return "<H5InMemory Group object (%d items)>" % \ |
<|file_name|>OSimpleKeyIndexDefinitionTest.java<|end_file_name|><|fim▁begin|>package com.orientechnologies.orient.core.index;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.orientechnologies.common.collection.OCompositeKey;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
@Test
@SuppressWarnings("unchecked")
public class OSimpleKeyIndexDefinitionTest {
private OSimpleKeyIndexDefinition simpleKeyIndexDefinition;
@BeforeMethod
public void beforeMethod() {
simpleKeyIndexDefinition = new OSimpleKeyIndexDefinition(OType.INTEGER, OType.STRING);
}
@Test
public void testGetFields() {
Assert.assertTrue(simpleKeyIndexDefinition.getFields().isEmpty());
}
@Test
public void testGetClassName() {
Assert.assertNull(simpleKeyIndexDefinition.getClassName());
}
@Test
public void testCreateValueSimpleKey() {
final OSimpleKeyIndexDefinition keyIndexDefinition = new OSimpleKeyIndexDefinition(OType.INTEGER);
final Object result = keyIndexDefinition.createValue("2");
Assert.assertEquals(result, 2);
}
@Test
public void testCreateValueCompositeKeyListParam() {
final Object result = simpleKeyIndexDefinition.createValue(Arrays.asList("2", "3"));
final OCompositeKey compositeKey = new OCompositeKey(Arrays.asList(2, "3"));
Assert.assertEquals(result, compositeKey);
}
@Test
public void testCreateValueCompositeKeyNullListParam() {
final Object result = simpleKeyIndexDefinition.createValue(Arrays.asList((Object) null));
Assert.assertNull(result);
}
@Test
public void testNullParamListItem() {
final Object result = simpleKeyIndexDefinition.createValue(Arrays.asList("2", null));
Assert.assertNull(result);
}
@Test
public void testWrongParamTypeListItem() {
final Object result = simpleKeyIndexDefinition.createValue(Arrays.asList("a", "3"));
Assert.assertNull(result);
}
@Test
public void testCreateValueCompositeKey() {
final Object result = simpleKeyIndexDefinition.createValue("2", "3");
final OCompositeKey compositeKey = new OCompositeKey(Arrays.asList(2, "3"));
Assert.assertEquals(result, compositeKey);
}
@Test
public void testCreateValueCompositeKeyNullParamList() {
final Object result = simpleKeyIndexDefinition.createValue((List<?>) null);
Assert.assertNull(result);
<|fim▁hole|>
@Test
public void testCreateValueCompositeKeyNullParam() {
final Object result = simpleKeyIndexDefinition.createValue((Object) null);
Assert.assertNull(result);
}
@Test
public void testCreateValueCompositeKeyEmptyList() {
final Object result = simpleKeyIndexDefinition.createValue(Collections.<Object> emptyList());
Assert.assertNull(result);
}
@Test
public void testNullParamItem() {
final Object result = simpleKeyIndexDefinition.createValue("2", null);
Assert.assertNull(result);
}
@Test
public void testWrongParamType() {
final Object result = simpleKeyIndexDefinition.createValue("a", "3");
Assert.assertNull(result);
}
@Test
public void testParamCount() {
Assert.assertEquals(simpleKeyIndexDefinition.getParamCount(), 2);
}
@Test
public void testParamCountOneItem() {
final OSimpleKeyIndexDefinition keyIndexDefinition = new OSimpleKeyIndexDefinition(OType.INTEGER);
Assert.assertEquals(keyIndexDefinition.getParamCount(), 1);
}
@Test
public void testGetKeyTypes() {
Assert.assertEquals(simpleKeyIndexDefinition.getTypes(), new OType[] { OType.INTEGER, OType.STRING });
}
@Test
public void testGetKeyTypesOneType() {
final OSimpleKeyIndexDefinition keyIndexDefinition = new OSimpleKeyIndexDefinition(OType.BOOLEAN);
Assert.assertEquals(keyIndexDefinition.getTypes(), new OType[] { OType.BOOLEAN });
}
@Test
public void testReload() {
final ODatabaseDocumentTx databaseDocumentTx = new ODatabaseDocumentTx("memory:osimplekeyindexdefinitiontest");
databaseDocumentTx.create();
final ODocument storeDocument = simpleKeyIndexDefinition.toStream();
storeDocument.save();
final ODocument loadDocument = databaseDocumentTx.load(storeDocument.getIdentity());
final OSimpleKeyIndexDefinition loadedKeyIndexDefinition = new OSimpleKeyIndexDefinition();
loadedKeyIndexDefinition.fromStream(loadDocument);
databaseDocumentTx.drop();
Assert.assertEquals(loadedKeyIndexDefinition, simpleKeyIndexDefinition);
}
@Test(expectedExceptions = OIndexException.class)
public void testGetDocumentValueToIndex() {
simpleKeyIndexDefinition.getDocumentValueToIndex(new ODocument());
}
}<|fim▁end|> | }
|
<|file_name|>timer.rs<|end_file_name|><|fim▁begin|>use libc::{uint32_t, c_void};
use std::mem;
use sys::timer as ll;
pub fn get_ticks() -> u32 {
unsafe { ll::SDL_GetTicks() }
}
pub fn get_performance_counter() -> u64 {
unsafe { ll::SDL_GetPerformanceCounter() }
}
pub fn get_performance_frequency() -> u64 {
unsafe { ll::SDL_GetPerformanceFrequency() }
}
pub fn delay(ms: u32) {
unsafe { ll::SDL_Delay(ms) }
}
pub type TimerCallback<'a> = Box<FnMut() -> u32+'a+Sync>;
#[unstable = "Unstable because of move to unboxed closures and `box` syntax"]
pub struct Timer<'a> {
callback: Option<Box<TimerCallback<'a>>>,
_delay: u32,
raw: ll::SDL_TimerID,
}
impl<'a> Timer<'a> {
/// Constructs a new timer using the boxed closure `callback`.
/// The timer is started immediately, it will be cancelled either:
/// * when the timer is dropped
/// * or when the callback returns a non-positive continuation interval
pub fn new(delay: u32, callback: TimerCallback<'a>) -> Timer<'a> {
unsafe {
let callback = Box::new(callback);
let timer_id = ll::SDL_AddTimer(delay,
Some(c_timer_callback),
mem::transmute_copy(&callback));
Timer {
callback: Some(callback),
_delay: delay,
raw: timer_id,
}
}
}
/// Returns the closure as a trait-object and cancels the timer
/// by consuming it...
pub fn into_inner(mut self) -> TimerCallback<'a> {
*self.callback.take().unwrap()
}
}
#[unsafe_destructor]
impl<'a> Drop for Timer<'a> {
fn drop(&mut self) {
let ret = unsafe { ll::SDL_RemoveTimer(self.raw) };
if ret != 1 {
println!("error dropping timer {}, maybe already removed.", self.raw);
}
}
}
extern "C" fn c_timer_callback(_interval: u32, param: *const c_void) -> uint32_t {
unsafe {
let f: *const Box<Fn() -> u32> = mem::transmute(param);
(*f)() as uint32_t
}
}
#[cfg(test)] use std::sync::{StaticMutex, MUTEX_INIT};
#[cfg(test)] static TIMER_INIT_LOCK: StaticMutex = MUTEX_INIT;
<|fim▁hole|> let _running = TIMER_INIT_LOCK.lock().unwrap();
::sdl::init(::sdl::INIT_TIMER).unwrap();
let local_num = Arc::new(Mutex::new(0));
let timer_num = local_num.clone();
let _timer = Timer::new(20, Box::new(|| {
// increment up to 10 times (0 -> 9)
// tick again in 100ms after each increment
//
let mut num = timer_num.lock().unwrap();
if *num < 9 {
*num += 1;
20
} else { 0 }
}));
delay(250); // tick the timer at least 10 times w/ 200ms of "buffer"
let num = local_num.lock().unwrap(); // read the number back
assert_eq!(*num, 9); // it should have incremented at least 10 times...
}
#[test]
fn test_timer_runs_at_least_once() {
use std::sync::{Arc, Mutex};
let _running = TIMER_INIT_LOCK.lock().unwrap();
::sdl::init(::sdl::INIT_TIMER).unwrap();
let local_flag = Arc::new(Mutex::new(false));
let timer_flag = local_flag.clone();
let _timer = Timer::new(20, Box::new(|| {
let mut flag = timer_flag.lock().unwrap();
*flag = true; 0
}));
delay(50);
let flag = local_flag.lock().unwrap();
assert_eq!(*flag, true);
}
#[test]
fn test_timer_can_be_recreated() {
use std::sync::{Arc, Mutex};
let _running = TIMER_INIT_LOCK.lock().unwrap();
::sdl::init(::sdl::INIT_TIMER).unwrap();
let local_num = Arc::new(Mutex::new(0));
let timer_num = local_num.clone();
// run the timer once and reclaim its closure
let timer_1 = Timer::new(20, Box::new(move|| {
let mut num = timer_num.lock().unwrap();
*num += 1; // increment the number
0 // do not run timer again
}));
// reclaim closure after timer runs
delay(50);
let closure = timer_1.into_inner();
// create a second timer and increment again
let _timer_2 = Timer::new(20, closure);
delay(50);
// check that timer was incremented twice
let num = local_num.lock().unwrap();
assert_eq!(*num, 2);
}<|fim▁end|> | #[test]
fn test_timer_runs_multiple_times() {
use std::sync::{Arc, Mutex}; |
<|file_name|>match_view.py<|end_file_name|><|fim▁begin|>import html
import inflect
import titlecase
from flask import url_for
from shared.pd_exception import DoesNotExistException
from .. import APP, importing
from ..data import match
from ..view import View
@APP.route('/match/<int:match_id>/')
def show_match(match_id: int) -> str:
view = Match(match.get_match(match_id))
return view.page()
# pylint: disable=no-self-use,too-many-instance-attributes
class Match(View):
def __init__(self, viewed_match: match.Match) -> None:
super().__init__()
if not viewed_match:<|fim▁hole|> self.id = viewed_match.id
self.comment = viewed_match.comment
self.format_name = viewed_match.format_name()
self.players_string = ' vs '.join([p.name for p in viewed_match.players])
self.players_string_safe = ' vs '.join([player_link(p.name) for p in viewed_match.players])
self.module_string = ', '.join([m.name for m in viewed_match.modules])
if not viewed_match.games:
self.no_games = True
return
self.game_one = viewed_match.games[0]
self.has_game_two = False
self.has_game_three = False
if len(viewed_match.games) > 1:
self.has_game_two = True
self.game_two = viewed_match.games[1]
if len(viewed_match.games) > 2:
self.has_game_three = True
self.game_three = viewed_match.games[2]
if viewed_match.has_unexpected_third_game is None:
importing.reimport(viewed_match)
self.has_unexpected_third_game = viewed_match.has_unexpected_third_game
if viewed_match.is_tournament is None:
importing.reimport(viewed_match)
self.is_tournament = viewed_match.is_tournament
def og_title(self) -> str:
return self.players_string
def og_url(self) -> str:
return url_for('show_match', match_id=self.id, _external=True)
def og_description(self) -> str:
p = inflect.engine()
fmt = titlecase.titlecase(p.a(self.format_name))
description = '{fmt} match.'.format(fmt=fmt)
return description
def player_link(name: str) -> str:
url = url_for('show_person', person=name)
return '<a href="{url}">{name}</a>'.format(url=html.escape(url), name=html.escape(name))<|fim▁end|> | raise DoesNotExistException()
self.match = viewed_match |
<|file_name|>issue-2735-3.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
// This test should behave exactly like issue-2735-2
struct defer<'a> {
b: &'a Cell<bool>,
}
impl<'a> Drop for defer<'a> {
fn drop(&mut self) {
self.b.set(true);
}
}<|fim▁hole|> defer {
b: b
}
}
pub fn main() {
let dtor_ran = &Cell::new(false);
defer(dtor_ran);
assert!(dtor_ran.get());
}<|fim▁end|> |
fn defer(b: &Cell<bool>) -> defer { |
<|file_name|>match.rs<|end_file_name|><|fim▁begin|>// rustfmt-normalize_comments: true
// Match expressions.
fn foo() {
// A match expression.
match x {
// Some comment.
a => foo(),
b if 0 < 42 => foo(),
c => { // Another comment.
// Comment.
an_expression;
foo()
}
Foo(ref bar) =>
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
Pattern1 | Pattern2 | Pattern3 => false,
Paternnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn |
Paternnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn => {
blah
}
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn => meh,
Patternnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnn if looooooooooooooooooong_guard => meh,
Patternnnnnnnnnnnnnnnnnnnnnnnnn |
Patternnnnnnnnnnnnnnnnnnnnnnnnn if looooooooooooooooooooooooooooooooooooooooong_guard =>
meh,
// Test that earlier patterns can take the guard space
(aaaa, bbbbb, ccccccc, aaaaa, bbbbbbbb, cccccc, aaaa, bbbbbbbb, cccccc, dddddd) |
Patternnnnnnnnnnnnnnnnnnnnnnnnn if loooooooooooooooooooooooooooooooooooooooooong_guard => {}
_ => {}
ast::PathParameters::AngleBracketedParameters(ref data) if data.lifetimes.len() > 0 ||
data.types.len() > 0 ||
data.bindings.len() > 0 => {}
}
let whatever = match something {
/// DOC COMMENT!
Some(_) => 42,
// Comment on an attribute.
#[an_attribute]
// Comment after an attribute.
None => 0,
#[rustfmt::skip]
Blurb => { }
};
}
// Test that a match on an overflow line is laid out properly.
fn main() {
let sub_span =
match xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx {
Some(sub_span) => Some(sub_span),
None => sub_span,
};
}
// Test that one-line bodies align.
fn main() {
match r {
Variableeeeeeeeeeeeeeeeee => ( "variable",
vec!("id", "name", "qualname",
"value", "type", "scopeid"),
true,
true),
Enummmmmmmmmmmmmmmmmmmmm => ("enum",
vec!("id","qualname","scopeid","value"),
true,
true),
Variantttttttttttttttttttttttt => ("variant",
vec!("id",
"name",
"qualname",
"type",
"value",
"scopeid"),
true,
true),
};
match x{
y=>{/*Block with comment. Preserve me.*/ }
z=>{stmt();} }
}
fn matches() {
match 1 {
-1 => 10,
1 => 1, // foo
2 => 2,
// bar
3 => 3,
_ => 0 // baz
}
}
fn match_skip() {
let _ = match Some(1) {
#[rustfmt::skip]
Some( n ) => n,
None => 1,
};
}
fn issue339() {
match a {
b => {}
c => { }
d => {
}
e => {
}
// collapsing here is safe
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff => {
}
// collapsing here exceeds line length
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffg => {
}
h => { // comment above block
}
i => {
} // comment below block
j => {
// comment inside block
}
j2 => {
// comments inside...
} // ... and after
// TODO uncomment when vertical whitespace is handled better
// k => {
//
// // comment with WS above
// }
// l => {
// // comment with ws below
//
// }
m => {
} n => { } o =>
{
}
p => { // Don't collapse me
} q => { } r =>
{
}
s => 0, // s comment
// t comment
t => 1,
u => 2,
v => {
} /* funky block
* comment */
// final comment
}
}
fn issue355() {
match mac {
a => println!("a", b),
b => vec!(1, 2),
c => vec!(3; 4),
d => {
println!("a", b)
}
e => {
vec!(1, 2)
}
f => {
vec!(3; 4)
}
h => println!("a", b), // h comment
i => vec!(1, 2), // i comment
j => vec!(3; 4), // j comment
// k comment
k => println!("a", b),
// l comment
l => vec!(1, 2),
// m comment
m => vec!(3; 4),
// Rewrite splits macro
nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn => println!("a", b),
// Rewrite splits macro
oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo => vec!(1, 2),
// Macro support fails to recognise this macro as splittable
// We push the whole expr to a new line, TODO split this macro as well
pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp => vec!(3; 4),
// q, r and s: Rewrite splits match arm
qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq => println!("a", b),
rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr => vec!(1, 2),
ssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss => vec!(3; 4),
// Funky bracketing styles
t => println!{"a", b},
u => vec!{1, 2},
v => vec!{3; 4},
w => println!["a", b],
x => vec![1, 2],
y =>vec![3; 4],
// Brackets with comments
tc => println!{"a", b}, // comment
uc => vec!{1, 2}, // comment
vc =>vec!{3; 4}, // comment
wc =>println!["a", b], // comment
xc => vec![1,2], // comment
yc => vec![3; 4], // comment
yd =>
looooooooooooooooooooooooooooooooooooooooooooooooooooooooong_func(aaaaaaaaaa,
bbbbbbbbbb,
cccccccccc,
dddddddddd),
}
}
fn issue280() {
{
match x {
CompressionMode::DiscardNewline | CompressionMode::CompressWhitespaceNewline => ch ==
'\n',
ast::ItemConst(ref typ, ref expr) => self.process_static_or_const_item(item,
&typ,
&expr),
}
}
}
fn issue383() {
match resolution.last_private {LastImport{..} => false, _ => true};
}
fn issue507() {
match 1 {
1 => unsafe { std::intrinsics::abort() },<|fim▁hole|> }
}
fn issue508() {
match s.type_id() {
Some(NodeTypeId::Element(ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLCanvasElement))) => true,
Some(NodeTypeId::Element(ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLObjectElement))) => s.has_object_data(),
Some(NodeTypeId::Element(_)) => false,
}
}
fn issue496() {{{{
match def {
def::DefConst(def_id) | def::DefAssociatedConst(def_id) =>
match const_eval::lookup_const_by_id(cx.tcx, def_id, Some(self.pat.id)) {
Some(const_expr) => { x }}}}}}}
fn issue494() {
{
match stmt.node {
hir::StmtExpr(ref expr, id) | hir::StmtSemi(ref expr, id) =>
result.push(
StmtRef::Mirror(
Box::new(Stmt { span: stmt.span,
kind: StmtKind::Expr {
scope: cx.tcx.region_maps.node_extent(id),
expr: expr.to_ref() } }))),
}
}
}
fn issue386() {
match foo {
BiEq | BiLt | BiLe | BiNe | BiGt | BiGe =>
true,
BiAnd | BiOr | BiAdd | BiSub | BiMul | BiDiv | BiRem |
BiBitXor | BiBitAnd | BiBitOr | BiShl | BiShr =>
false,
}
}
fn guards() {
match foo {
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa if foooooooooooooo && barrrrrrrrrrrr => {}
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa if foooooooooooooo && barrrrrrrrrrrr => {}
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
if fooooooooooooooooooooo &&
(bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb || cccccccccccccccccccccccccccccccccccccccc) => {}
}
}
fn issue1371() {
Some(match type_ {
sfEvtClosed => Closed,
sfEvtResized => {
let e = unsafe { *event.size.as_ref() };
Resized {
width: e.width,
height: e.height,
}
}
sfEvtLostFocus => LostFocus,
sfEvtGainedFocus => GainedFocus,
sfEvtTextEntered => {
TextEntered {
unicode: unsafe {
::std::char::from_u32((*event.text.as_ref()).unicode)
.expect("Invalid unicode encountered on TextEntered event")
},
}
}
sfEvtKeyPressed => {
let e = unsafe { event.key.as_ref() };
KeyPressed {
code: unsafe { ::std::mem::transmute(e.code) },
alt: e.alt.to_bool(),
ctrl: e.control.to_bool(),
shift: e.shift.to_bool(),
system: e.system.to_bool(),
}
}
sfEvtKeyReleased => {
let e = unsafe { event.key.as_ref() };
KeyReleased {
code: unsafe { ::std::mem::transmute(e.code) },
alt: e.alt.to_bool(),
ctrl: e.control.to_bool(),
shift: e.shift.to_bool(),
system: e.system.to_bool(),
}
}
})
}
fn issue1395() {
let bar = Some(true);
let foo = Some(true);
let mut x = false;
bar.and_then(|_| {
match foo {
None => None,
Some(b) => {
x = true;
Some(b)
}
}
});
}
fn issue1456() {
Ok(Recording {
artists: match reader.evaluate(".//mb:recording/mb:artist-credit/mb:name-credit")? {
Nodeset(nodeset) => {
let res: Result<Vec<ArtistRef>, ReadError> = nodeset
.iter()
.map(|node| {
XPathNodeReader::new(node, &context).and_then(|r| ArtistRef::from_xml(&r))
})
.collect();
res?
}
_ => Vec::new(),
},
})
}
fn issue1460() {
let _ = match foo {
REORDER_BUFFER_CHANGE_INTERNAL_SPEC_INSERT => "internal_spec_insert_internal_spec_insert_internal_spec_insert",
_ => "reorder_something",
};
}
fn issue525() {
foobar(f, "{}", match *self {
TaskState::Started => "started",
TaskState::Success => "success",
TaskState::Failed => "failed",
});
}
// #1838, #1839
fn match_with_near_max_width() {
let (this_line_uses_99_characters_and_is_formatted_properly, x012345) = match some_expression {
_ => unimplemented!(),
};
let (should_be_formatted_like_the_line_above_using_100_characters, x0) = match some_expression {
_ => unimplemented!(),
};
let (should_put_the_brace_on_the_next_line_using_101_characters, x0000) = match some_expression
{
_ => unimplemented!(),
};
match m {
Variant::Tag | Variant::Tag2 | Variant::Tag3 | Variant::Tag4 | Variant::Tag5 | Variant::Tag6 =>
{}
}
}
fn match_with_trailing_spaces() {
match x {
Some(..) => 0,
None => 1,
}
}
fn issue_2099() {
let a = match x {
};
let b = match x {
};
match x {}
}
// #2021
impl<'tcx> Const<'tcx> {
pub fn from_constval<'a>() -> Const<'tcx> {
let val = match *cv {
ConstVal::Variant(_) | ConstVal::Aggregate(..) | ConstVal::Unevaluated(..) => bug!("MIR must not use `{:?}` (aggregates are expanded to MIR rvalues)", cv),
};
}
}
// #2151
fn issue_2151() {
match either {
x => {
}y => ()
}
}
// #2152
fn issue_2152() {
match m {
"aaaaaaaaaaaaa" | "bbbbbbbbbbbbb" | "cccccccccccccccccccccccccccccccccccccccccccc" if true => {}
"bind" | "writev" | "readv" | "sendmsg" | "recvmsg" if android && (aarch64 || x86_64) => true,
}
}
// #2376
// Preserve block around expressions with condition.
fn issue_2376() {
let mut x = None;
match x {
Some(0) => {
for i in 1..11 {
x = Some(i);
}
}
Some(ref mut y) => {
while *y < 10 {
*y += 1;
}
}
None => {
while let None = x {
x = Some(10);
}
}
}
}
// #2621
// Strip leading `|` in match arm patterns
fn issue_2621() {
let x = Foo::A;
match x {
Foo::A => println!("No vert single condition"),
Foo::B | Foo::C => println!("Center vert two conditions"),
| Foo::D => println!("Preceding vert single condition"),
| Foo::E
| Foo::F => println!("Preceding vert over two lines"),
Foo::G |
Foo::H => println!("Trailing vert over two lines"),
// Comment on its own line
| Foo::I => println!("With comment"), // Comment after line
}
}
fn issue_2377() {
match tok {
Tok::Not
| Tok::BNot
| Tok::Plus
| Tok::Minus
| Tok::PlusPlus
| Tok::MinusMinus
| Tok::Void
| Tok::Delete if prec <= 16 => {
// code here...
}
Tok::TypeOf if prec <= 16 => {}
}
}
// #3040
fn issue_3040() {
{
match foo {
DevtoolScriptControlMsg::WantsLiveNotifications(id, to_send) => {
match documents.find_window(id) {
Some(window) => devtools::handle_wants_live_notifications(window.upcast(), to_send),
None => return warn!("Message sent to closed pipeline {}.", id),
}
}
}
}
}
// #3030
fn issue_3030() {
match input.trim().parse::<f64>() {
Ok(val)
if !(
// A valid number is the same as what rust considers to be valid,
// except for +1., NaN, and Infinity.
val.is_infinite() || val
.is_nan() || input.ends_with(".") || input.starts_with("+")
)
=> {
}
}
}
fn issue_3005() {
match *token {
Token::Dimension {
value, ref unit, ..
} if num_context.is_ok(context.parsing_mode, value) =>
{
return NoCalcLength::parse_dimension(context, value, unit)
.map(LengthOrPercentage::Length)
.map_err(|()| location.new_unexpected_token_error(token.clone()));
},
}
}
// #3774
fn issue_3774() {
{
{
{
match foo {
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachab(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreacha!(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachabl(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachae!(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachable(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachable!(),
Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => rrunreachable!(),
}
}
}
}
}<|fim▁end|> | _ => (), |
<|file_name|>test_pdf_reader.py<|end_file_name|><|fim▁begin|># This file is part of rinohtype, the Python document preparation system.
#
# Copyright (c) Brecht Machiels.
#
# Use of this source code is subject to the terms of the GNU Affero General
# Public License v3. See the LICENSE file or http://www.gnu.org/licenses/.
import pytest
from io import BytesIO
from rinoh.backend.pdf import cos
from rinoh.backend.pdf.reader import PDFObjectReader
def test_read_boolean():
def test_boolean(bytes_boolean, boolean):
reader = PDFObjectReader(BytesIO(bytes_boolean))
result = reader.next_item()
assert isinstance(result, cos.Boolean) and bool(result) == boolean
test_boolean(b'true', True)
test_boolean(b'false', False)
def test_read_integer():
def test_integer(bytes_integer, integer):
reader = PDFObjectReader(BytesIO(bytes_integer))
result = reader.next_item()
assert isinstance(result, cos.Integer) and result == integer
test_integer(b'123', 123)
test_integer(b'43445', 43445)
test_integer(b'+17', 17)
test_integer(b'-98', -98)
test_integer(b'0', 0)
def test_read_real():<|fim▁hole|>
test_real(b'34.5', 34.5)
test_real(b'-3.62', -3.62)
test_real(b'+123.6', 123.6)
test_real(b'4.', 4.0)
test_real(b'-.002', -.002)
test_real(b'0.0', 0.0)
def test_read_name():
def test_name(bytes_name, unicode_name):
reader = PDFObjectReader(BytesIO(bytes_name))
result = reader.next_item()
assert isinstance(result, cos.Name) and str(result) == unicode_name
test_name(b'/Adobe#20Green', 'Adobe Green')
test_name(b'/PANTONE#205757#20CV', 'PANTONE 5757 CV')
test_name(b'/paired#28#29parentheses', 'paired()parentheses')
test_name(b'/The_Key_of_F#23_Minor', 'The_Key_of_F#_Minor')
test_name(b'/A#42', 'AB')
def test_read_dictionary():
input = b"""
<< /Type /Example
/Subtype /DictionaryExample
/Version 0.01
/IntegerItem 12
/StringItem (a string)
/Subdictionary << /Item1 0.4
/Item2 true
/LastItem (not!)
/VeryLastItem (OK)
>>
>>"""
reader = PDFObjectReader(BytesIO(input))
result = reader.next_item()
expected = cos.Dictionary([('Type', cos.Name('Example')),
('Subtype', cos.Name('DictionaryExample')),
('Version', cos.Real(0.01)),
('IntegerItem', cos.Integer(12)),
('StringItem', cos.String('a string')),
('Subdictionary', cos.Dictionary(
[('Item1', cos.Real(0.4)),
('Item2', cos.Boolean(True)),
('LastItem', cos.String('not!')),
('VeryLastItem', cos.String('OK'))]))])
assert isinstance(result, cos.Dictionary)
assert dict(result) == dict(expected)<|fim▁end|> | def test_real(bytes_real, real):
reader = PDFObjectReader(BytesIO(bytes_real))
result = reader.next_item()
assert isinstance(result, cos.Real) and result == real |
<|file_name|>release.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""SIP Flask Master Device package."""
import logging
__subsystem__ = 'TangoControl'
__service_name__ = 'FlaskMaster'
__version_info__ = (1, 3, 0)
__version__ = '.'.join(map(str, __version_info__))
__service_id__ = ':'.join(map(str, (__subsystem__,
__service_name__,
__version__)))
LOG = logging.getLogger('sip.tc.flask_master')
__all__ = [
'__subsystem__',
'__service_name__',
'__version__',<|fim▁hole|>]<|fim▁end|> | '__service_id__',
'LOG' |
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>/**
* @summary Returns deep equality between objects
* {@link https://gist.github.com/egardner/efd34f270cc33db67c0246e837689cb9}
* @param obj1
* @param obj2
* @return {boolean}
* @private
*/
export function deepEqual(obj1, obj2) {
if (obj1 === obj2) {
return true;
}
else if (isObject(obj1) && isObject(obj2)) {
if (Object.keys(obj1).length !== Object.keys(obj2).length) {
return false;
}
for (const prop of Object.keys(obj1)) {
if (!deepEqual(obj1[prop], obj2[prop])) {
return false;<|fim▁hole|> else {
return false;
}
}
function isObject(obj) {
return typeof obj === 'object' && obj != null;
}<|fim▁end|> | }
}
return true;
} |
<|file_name|>static_server.js<|end_file_name|><|fim▁begin|>var http = require("http"),
url = require("url"),
path = require("path"),
fs = require("fs"),
DS = "/";
var settings = {
port: 8080,
indexFile: "index.html",
folder: {
serverside: "serverside",
clientside: "clientside",
static: "static_server",
admin: "admin"
}
};
var paths = {};
paths.origin = process.cwd();
paths.base = paths.origin.slice(0, -1 * settings.folder.serverside.length - 1);
paths.clientside = paths.base + DS + settings.folder.clientside;
paths.serverside = paths.base + DS + settings.folder.serverside;
paths.static = paths.base + DS + settings.folder.serverside + DS + settings.folder.static;
clientside_exists = false;
path.exists(paths.clientside, function(exists) {
clientside_exists = exists;
});
<|fim▁hole|>
var DS_admin = DS + settings.folder.admin + DS;
if (uri.slice(0, DS_admin.length) === DS_admin) {
filename = path.join(paths.static, uri);
} else if (clientside_exists) {
filename = path.join(paths.clientside, uri);
} else {
filename = path.join(paths.origin, uri);
}
fs.exists(filename, function(exists) {
if (!exists) {
response.writeHead(404, {"Content-Type": "text/plain"});
response.write("404 Not Found\n");
response.end();
return;
}
if (fs.statSync(filename).isDirectory()) {
var indexFound = false;
var rawlist = fs.readdirSync(filename);
var filelist = [];
rawlist.forEach(function(element) {
if (!fs.statSync(path.join(filename, element)).isDirectory() && !indexFound) {
if (element === settings.indexFile) {
indexFound = true;
} else {
filelist.push(element);
}
}
});
if (filelist.length > 0 && !indexFound) {
response.writeHead(200, {"Content-Type": "text/plain"});
response.write(JSON.stringify(filelist));
response.end();
return;
}
filename = path.join(filename, settings.indexFile);
}
fs.readFile(filename, "binary", function(err, file) {
if (err) {
response.writeHead(500, {"Content-Type": "text/plain"});
response.write(err + "\n");
response.end();
return;
}
response.writeHead(200);
response.write(file, "binary");
response.end();
});
});
});
static_server.listen(parseInt(settings.port, 10));
module.exports = static_server;<|fim▁end|> | var static_server = http.createServer(function(request, response) {
var uri = url.parse(request.url).pathname,
filename; |
<|file_name|>advanced_settings_menu.cpp<|end_file_name|><|fim▁begin|>/*****************************
* advance_settings_menu.cpp *
*****************************/
/****************************************************************************
* Written By Mark Pelletier 2017 - Aleph Objects, Inc. *
* Written By Marcio Teixeira 2018 - Aleph Objects, Inc. *
* *
* This program is free software: you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation, either version 3 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* To view a copy of the GNU General Public License, go to the following *
* location: <https://www.gnu.org/licenses/>. *
****************************************************************************/
#include "../config.h"
#if ENABLED(TOUCH_UI_FTDI_EVE) && !defined(TOUCH_UI_LULZBOT_BIO)
#include "screens.h"
using namespace FTDI;
using namespace ExtUI;
using namespace Theme;
void AdvancedSettingsMenu::onRedraw(draw_mode_t what) {
if (what & BACKGROUND) {
CommandProcessor cmd;
cmd.cmd(CLEAR_COLOR_RGB(Theme::bg_color))
.cmd(CLEAR(true,true,true));
}
#ifdef TOUCH_UI_PORTRAIT
#if EITHER(HAS_CASE_LIGHT, SENSORLESS_HOMING)
#define GRID_ROWS 9
#else
#define GRID_ROWS 8
#endif
#define GRID_COLS 2
#define RESTORE_DEFAULTS_POS BTN_POS(1,1), BTN_SIZE(2,1)
#define DISPLAY_POS BTN_POS(1,2), BTN_SIZE(1,1)
#define INTERFACE_POS BTN_POS(2,2), BTN_SIZE(1,1)
#define ZPROBE_ZOFFSET_POS BTN_POS(1,3), BTN_SIZE(1,1)
#define STEPS_PER_MM_POS BTN_POS(2,3), BTN_SIZE(1,1)
#define FILAMENT_POS BTN_POS(1,4), BTN_SIZE(1,1)
#define VELOCITY_POS BTN_POS(2,4), BTN_SIZE(1,1)
#define TMC_CURRENT_POS BTN_POS(1,5), BTN_SIZE(1,1)
#define ACCELERATION_POS BTN_POS(2,5), BTN_SIZE(1,1)
#define ENDSTOPS_POS BTN_POS(1,6), BTN_SIZE(1,1)
#define JERK_POS BTN_POS(2,6), BTN_SIZE(1,1)
#define OFFSETS_POS BTN_POS(1,7), BTN_SIZE(1,1)
#define BACKLASH_POS BTN_POS(2,7), BTN_SIZE(1,1)
#define CASE_LIGHT_POS BTN_POS(1,8), BTN_SIZE(1,1)
#define TMC_HOMING_THRS_POS BTN_POS(2,8), BTN_SIZE(1,1)
#if EITHER(HAS_CASE_LIGHT, SENSORLESS_HOMING)
#define BACK_POS BTN_POS(1,9), BTN_SIZE(2,1)
#else
#define BACK_POS BTN_POS(1,8), BTN_SIZE(2,1)
#endif
#else
#define GRID_ROWS 6
#define GRID_COLS 3
#define ZPROBE_ZOFFSET_POS BTN_POS(1,1), BTN_SIZE(1,1)
#define CASE_LIGHT_POS BTN_POS(1,4), BTN_SIZE(1,1)
#define STEPS_PER_MM_POS BTN_POS(2,1), BTN_SIZE(1,1)
#define TMC_CURRENT_POS BTN_POS(3,1), BTN_SIZE(1,1)
#define TMC_HOMING_THRS_POS BTN_POS(3,2), BTN_SIZE(1,1)
#define BACKLASH_POS BTN_POS(3,3), BTN_SIZE(1,1)
#define FILAMENT_POS BTN_POS(1,3), BTN_SIZE(1,1)
#define ENDSTOPS_POS BTN_POS(3,4), BTN_SIZE(1,1)
#define DISPLAY_POS BTN_POS(3,5), BTN_SIZE(1,1)<|fim▁hole|> #define ACCELERATION_POS BTN_POS(2,3), BTN_SIZE(1,1)
#define JERK_POS BTN_POS(2,4), BTN_SIZE(1,1)
#define OFFSETS_POS BTN_POS(1,2), BTN_SIZE(1,1)
#define BACK_POS BTN_POS(3,6), BTN_SIZE(1,1)
#endif
if (what & FOREGROUND) {
CommandProcessor cmd;
cmd.colors(normal_btn)
.font(Theme::font_medium)
.enabled(ENABLED(HAS_BED_PROBE))
.tag(2) .button( ZPROBE_ZOFFSET_POS, GET_TEXT_F(MSG_ZPROBE_ZOFFSET))
.enabled(ENABLED(HAS_CASE_LIGHT))
.tag(16).button( CASE_LIGHT_POS, GET_TEXT_F(MSG_CASE_LIGHT))
.tag(3) .button( STEPS_PER_MM_POS, GET_TEXT_F(MSG_STEPS_PER_MM))
.enabled(ENABLED(HAS_TRINAMIC_CONFIG))
.tag(13).button( TMC_CURRENT_POS, GET_TEXT_F(MSG_TMC_CURRENT))
.enabled(ENABLED(SENSORLESS_HOMING))
.tag(14).button( TMC_HOMING_THRS_POS, GET_TEXT_F(MSG_TMC_HOMING_THRS))
.enabled(EITHER(HAS_MULTI_HOTEND, BLTOUCH))
.tag(4) .button( OFFSETS_POS, GET_TEXT_F(TERN(HAS_MULTI_HOTEND, MSG_OFFSETS_MENU, MSG_RESET_BLTOUCH)))
.enabled(EITHER(LIN_ADVANCE, FILAMENT_RUNOUT_SENSOR))
.tag(11).button( FILAMENT_POS, GET_TEXT_F(MSG_FILAMENT))
.tag(12).button( ENDSTOPS_POS, GET_TEXT_F(MSG_LCD_ENDSTOPS))
.tag(15).button( DISPLAY_POS, GET_TEXT_F(MSG_DISPLAY_MENU))
.tag(9) .button( INTERFACE_POS, GET_TEXT_F(MSG_INTERFACE))
.tag(10).button( RESTORE_DEFAULTS_POS, GET_TEXT_F(MSG_RESTORE_DEFAULTS))
.tag(5) .button( VELOCITY_POS, GET_TEXT_F(MSG_VELOCITY))
.tag(6) .button( ACCELERATION_POS, GET_TEXT_F(MSG_ACCELERATION))
.tag(7) .button( JERK_POS, GET_TEXT_F(TERN(HAS_JUNCTION_DEVIATION, MSG_JUNCTION_DEVIATION, MSG_JERK)))
.enabled(ENABLED(BACKLASH_GCODE))
.tag(8).button( BACKLASH_POS, GET_TEXT_F(MSG_BACKLASH))
.colors(action_btn)
.tag(1).button( BACK_POS, GET_TEXT_F(MSG_BACK));
}
}
bool AdvancedSettingsMenu::onTouchEnd(uint8_t tag) {
switch (tag) {
case 1: SaveSettingsDialogBox::promptToSaveSettings(); break;
#if HAS_BED_PROBE
case 2: GOTO_SCREEN(ZOffsetScreen); break;
#endif
case 3: GOTO_SCREEN(StepsScreen); break;
case 4:
#if HAS_MULTI_HOTEND
GOTO_SCREEN(NozzleOffsetScreen);
#elif ENABLED(BLTOUCH)
injectCommands_P(PSTR("M280 P0 S60"));
#endif
break;
case 5: GOTO_SCREEN(MaxVelocityScreen); break;
case 6: GOTO_SCREEN(DefaultAccelerationScreen); break;
case 7: GOTO_SCREEN(TERN(HAS_JUNCTION_DEVIATION, JunctionDeviationScreen, JerkScreen)); break;
#if ENABLED(BACKLASH_GCODE)
case 8: GOTO_SCREEN(BacklashCompensationScreen); break;
#endif
case 9: GOTO_SCREEN(InterfaceSettingsScreen); LockScreen::check_passcode(); break;
case 10: GOTO_SCREEN(RestoreFailsafeDialogBox); LockScreen::check_passcode(); break;
#if EITHER(LIN_ADVANCE, FILAMENT_RUNOUT_SENSOR)
case 11: GOTO_SCREEN(FilamentMenu); break;
#endif
case 12: GOTO_SCREEN(EndstopStatesScreen); break;
#if HAS_TRINAMIC_CONFIG
case 13: GOTO_SCREEN(StepperCurrentScreen); break;
#endif
#if ENABLED(SENSORLESS_HOMING)
case 14: GOTO_SCREEN(StepperBumpSensitivityScreen); break;
#endif
case 15: GOTO_SCREEN(DisplayTuningScreen); break;
#if HAS_CASE_LIGHT
case 16: GOTO_SCREEN(CaseLightScreen); break;
#endif
default: return false;
}
return true;
}
#endif // TOUCH_UI_FTDI_EVE<|fim▁end|> | #define INTERFACE_POS BTN_POS(1,5), BTN_SIZE(2,1)
#define RESTORE_DEFAULTS_POS BTN_POS(1,6), BTN_SIZE(2,1)
#define VELOCITY_POS BTN_POS(2,2), BTN_SIZE(1,1) |
<|file_name|>test_summary2_unifiles.py<|end_file_name|><|fim▁begin|>import unittest, time, sys, random, math, getpass
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_import as h2i, h2o_util, h2o_browse as h2b, h2o_print as h2p
import h2o_summ
DO_TRY_SCIPY = False
if getpass.getuser()=='kevin' or getpass.getuser()=='jenkins':
DO_TRY_SCIPY = True
DO_MEDIAN = True
# FIX!. we seem to lose accuracy with fewer bins -> more iterations. Maybe we're leaking or ??
# this test failed (if run as user kevin) with 10 bins
MAX_QBINS = 1000 # pass
MAX_QBINS = 1000 # pass
# this one doesn't fail with 10 bins
# this failed. interestingly got same number as 1000 bin summary2 (the 7.433..
# on runifA.csv (2nd col?)
# MAX_QBINS = 20
# Exception: h2o quantile multipass is not approx. same as sort algo. h2o_util.assertApproxEqual failed comparing 7.43337413296 and 8.26268245. {'tol': 2e-07}.
MAX_QBINS = 27
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED
SEED = h2o.setup_random_seed()
h2o.init()
@classmethod
def tearDownClass(cls):
# h2o.sleep(3600)
h2o.tear_down_cloud()
def test_summary2_unifiles(self):
SYNDATASETS_DIR = h2o.make_syn_dir()
# new with 1000 bins. copy expected from R
tryList = [
('cars.csv', 'c.hex', [
(None, None,None,None,None,None),
('economy (mpg)', None,None,None,None,None),
('cylinders', None,None,None,None,None),
],
),
('runifA.csv', 'A.hex', [
(None, 1.00, 25.00, 50.00, 75.00, 100.0),
('x', -99.9, -44.7, 8.26, 58.00, 91.7),
],
),
# colname, (min, 25th, 50th, 75th, max)
('runif.csv', 'x.hex', [
(None, 1.00, 5000.0, 10000.0, 15000.0, 20000.00),
('D', -5000.00, -3735.0, -2443, -1187.0, 99.8),
('E', -100000.0, -49208.0, 1783.8, 50621.9, 100000.0),
('F', -1.00, -0.4886, 0.00868, 0.5048, 1.00),
],
),
('runifB.csv', 'B.hex', [<|fim▁hole|> ),
('runifC.csv', 'C.hex', [
(None, 1.00, 25002.00, 50002.00, 75002.00, 100000.00),
('x', -100.00, -50.45, -1.135, 49.28, 100.00),
],
),
]
timeoutSecs = 15
trial = 1
n = h2o.nodes[0]
lenNodes = len(h2o.nodes)
timeoutSecs = 60
for (csvFilename, hex_key, expectedCols) in tryList:
csvPathname = csvFilename
csvPathnameFull = h2i.find_folder_and_filename('smalldata', csvPathname, returnFullPath=True)
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname,
schema='put', hex_key=hex_key, timeoutSecs=10, doSummary=False)
print "Parse result['destination_key']:", parseResult['destination_key']
# We should be able to see the parse result?
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'])
print "\n" + csvFilename
numRows = inspect["numRows"]
numCols = inspect["numCols"]
# okay to get more cols than we want
# okay to vary MAX_QBINS because we adjust the expected accuracy
summaryResult = h2o_cmd.runSummary(key=hex_key, max_qbins=MAX_QBINS)
h2o.verboseprint("summaryResult:", h2o.dump_json(summaryResult))
summaries = summaryResult['summaries']
scipyCol = 0
for expected, column in zip(expectedCols, summaries):
colname = column['colname']
if expected[0]:
self.assertEqual(colname, expected[0]), colname, expected[0]
else:
# if the colname is None, skip it (so we don't barf on strings on the h2o quantile page
scipyCol += 1
continue
quantile = 0.5 if DO_MEDIAN else .999
# h2o has problem if a list of columns (or dictionary) is passed to 'column' param
q = h2o.nodes[0].quantiles(source_key=hex_key, column=column['colname'],
quantile=quantile, max_qbins=MAX_QBINS, multiple_pass=2, interpolation_type=7) # for comparing to summary2
qresult = q['result']
qresult_single = q['result_single']
h2p.blue_print("h2o quantiles result:", qresult)
h2p.blue_print("h2o quantiles result_single:", qresult_single)
h2p.blue_print("h2o quantiles iterations:", q['iterations'])
h2p.blue_print("h2o quantiles interpolated:", q['interpolated'])
print h2o.dump_json(q)
# ('', '1.00', '25002.00', '50002.00', '75002.00', '100000.00'),
coltype = column['type']
nacnt = column['nacnt']
stats = column['stats']
stattype= stats['type']
print stattype
# FIX! we should compare mean and sd to expected?
# enums don't have mean or sd?
if stattype!='Enum':
mean = stats['mean']
sd = stats['sd']
zeros = stats['zeros']
mins = stats['mins']
maxs = stats['maxs']
print "colname:", colname, "mean (2 places):", h2o_util.twoDecimals(mean)
print "colname:", colname, "std dev. (2 places):", h2o_util.twoDecimals(sd)
pct = stats['pct']
print "pct:", pct
print ""
# the thresholds h2o used, should match what we expected
expectedPct= [0.01, 0.05, 0.1, 0.25, 0.33, 0.5, 0.66, 0.75, 0.9, 0.95, 0.99]
pctile = stats['pctile']
# figure out the expected max error
# use this for comparing to sklearn/sort
if expected[1] and expected[5]:
expectedRange = expected[5] - expected[1]
# because of floor and ceil effects due we potentially lose 2 bins (worst case)
# the extra bin for the max value, is an extra bin..ignore
expectedBin = expectedRange/(MAX_QBINS-2)
maxErr = 0.5 * expectedBin # should we have some fuzz for fp?
else:
print "Test won't calculate max expected error"
maxErr = 0
# hack..assume just one None is enough to ignore for cars.csv
if expected[1]:
h2o_util.assertApproxEqual(mins[0], expected[1], tol=maxErr, msg='min is not approx. expected')
if expected[2]:
h2o_util.assertApproxEqual(pctile[3], expected[2], tol=maxErr, msg='25th percentile is not approx. expected')
if expected[3]:
h2o_util.assertApproxEqual(pctile[5], expected[3], tol=maxErr, msg='50th percentile (median) is not approx. expected')
if expected[4]:
h2o_util.assertApproxEqual(pctile[7], expected[4], tol=maxErr, msg='75th percentile is not approx. expected')
if expected[5]:
h2o_util.assertApproxEqual(maxs[0], expected[5], tol=maxErr, msg='max is not approx. expected')
hstart = column['hstart']
hstep = column['hstep']
hbrk = column['hbrk']
hcnt = column['hcnt']
for b in hcnt:
# should we be able to check for a uniform distribution in the files?
e = .1 * numRows
# self.assertAlmostEqual(b, .1 * rowCount, delta=.01*rowCount,
# msg="Bins not right. b: %s e: %s" % (b, e))
if stattype!='Enum':
pt = h2o_util.twoDecimals(pctile)
print "colname:", colname, "pctile (2 places):", pt
mx = h2o_util.twoDecimals(maxs)
mn = h2o_util.twoDecimals(mins)
print "colname:", colname, "maxs: (2 places):", mx
print "colname:", colname, "mins: (2 places):", mn
# FIX! we should do an exec and compare using the exec quantile too
actual = mn[0], pt[3], pt[5], pt[7], mx[0]
print "min/25/50/75/max colname:", colname, "(2 places):", actual
print "maxs colname:", colname, "(2 places):", mx
print "mins colname:", colname, "(2 places):", mn
# don't check if colname is empty..means it's a string and scipy doesn't parse right?
# need to ignore the car names
if colname!='' and expected[scipyCol]:
# don't do for enums
# also get the median with a sort (h2o_summ.percentileOnSortedlist()
h2o_summ.quantile_comparisons(
csvPathnameFull,
skipHeader=True,
col=scipyCol,
datatype='float',
quantile=0.5 if DO_MEDIAN else 0.999,
# FIX! ignore for now
h2oSummary2=pctile[5 if DO_MEDIAN else 10],
h2oQuantilesApprox=qresult_single,
h2oQuantilesExact=qresult,
h2oSummary2MaxErr=maxErr,
)
if False and h2o_util.approxEqual(pctile[5], 0.990238116744, tol=0.002, msg='stop here'):
raise Exception("stopping to look")
scipyCol += 1
trial += 1
if __name__ == '__main__':
h2o.unit_main()<|fim▁end|> | (None, 1.00, 2501.00, 5001.00, 7501.00, 10000.00),
('x', -100.00, -50.1, 0.974, 51.7, 100,00),
], |
<|file_name|>hidden_submit_button.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright 2016 Google Inc.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This creates a form submit button that is not displayed, in order that the form may be submitted
* using the enter key.
*/
export function makeHiddenSubmitButton() {
let element = document.createElement('button');
element.type = 'submit';
element.className = 'hidden-submit-button';
// We apply these styles directly to the element rather than by styling the class in order to
// avoid them being overridden accidentally.
element.style.margin = '0';
element.style.border = '0';
element.style.padding = '0';<|fim▁hole|> element.style.width = '0';
element.style.height = '0';
element.style.overflow = 'hidden';
element.tabIndex = -1;
return element;
}<|fim▁end|> | |
<|file_name|>user_prompts.py<|end_file_name|><|fim▁begin|>from tests.support.asserts import assert_error, assert_dialog_handled
from tests.support.fixtures import create_dialog
from tests.support.inline import inline
alert_doc = inline("<script>window.alert()</script>")
def get_window_rect(session):
return session.transport.send(
"GET", "session/{session_id}/window/rect".format(**vars(session)))
def test_handle_prompt_dismiss_and_notify():
"""TODO"""
def test_handle_prompt_accept_and_notify():
"""TODO"""
def test_handle_prompt_ignore():
"""TODO"""
def test_handle_prompt_accept(new_session, add_browser_capabilites):
_, session = new_session({"capabilities": {"alwaysMatch": add_browser_capabilites({"unhandledPromptBehavior": "accept"})}})
session.url = inline("<title>WD doc title</title>")
create_dialog(session)("alert", text="dismiss #1", result_var="dismiss1")
response = get_window_rect(session)
assert response.status == 200
assert_dialog_handled(session, "dismiss #1")
create_dialog(session)("confirm", text="dismiss #2", result_var="dismiss2")
response = get_window_rect(session)
assert response.status == 200
assert_dialog_handled(session, "dismiss #2")
create_dialog(session)("prompt", text="dismiss #3", result_var="dismiss3")
response = get_window_rect(session)
assert response.status == 200
assert_dialog_handled(session, "dismiss #3")
def test_handle_prompt_missing_value(session, create_dialog):
session.url = inline("<title>WD doc title</title>")
create_dialog("alert", text="dismiss #1", result_var="dismiss1")
response = get_window_rect(session)
assert_error(response, "unexpected alert open")
assert_dialog_handled(session, "dismiss #1")
create_dialog("confirm", text="dismiss #2", result_var="dismiss2")<|fim▁hole|> assert_error(response, "unexpected alert open")
assert_dialog_handled(session, "dismiss #2")
create_dialog("prompt", text="dismiss #3", result_var="dismiss3")
response = get_window_rect(session)
assert_error(response, "unexpected alert open")
assert_dialog_handled(session, "dismiss #3")<|fim▁end|> |
response = get_window_rect(session) |
<|file_name|>0020_auto_20201213_2014.py<|end_file_name|><|fim▁begin|># Generated by Django 3.1.4 on 2020-12-13 20:14
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import django_ca.models
class Migration(migrations.Migration):
dependencies = [
('django_ca', '0019_certificate_autogenerated'),
]
operations = [
migrations.CreateModel(
name='AcmeAccount',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('updated', models.DateTimeField(auto_now=True)),
('pem', models.TextField(unique=True, validators=[django_ca.models.pem_validator], verbose_name='Public key')),
('thumbprint', models.CharField(max_length=64)),
('slug', models.SlugField(default=django_ca.models.acme_slug, unique=True)),
('kid', models.URLField(unique=True, validators=[django.core.validators.URLValidator(schemes=('http', 'https'))], verbose_name='Key ID')),
('status', models.CharField(choices=[('valid', 'Valid'), ('deactivated', 'Deactivated'), ('revoked', 'Revoked')], default='valid', max_length=12)),
('contact', models.TextField(blank=True, help_text='Contact addresses for this account, one per line.')),
('terms_of_service_agreed', models.BooleanField(default=False)),
],
options={
'verbose_name': 'ACME Account',
'verbose_name_plural': 'ACME Accounts',
},
bases=(django_ca.models.DjangoCAModel, ),
),
migrations.AddField(
model_name='certificateauthority',
name='acme_enabled',
field=models.BooleanField(default=False, help_text='Whether it is possible to use ACME for this CA.', verbose_name='Enable ACME'),
),
migrations.AddField(
model_name='certificateauthority',
name='acme_requires_contact',
field=models.BooleanField(default=True, help_text='If this CA requires a contact address during account registration.', verbose_name='Requires contact'),
),
migrations.AddField(
model_name='certificateauthority',
name='caa_identity',
field=models.CharField(blank=True, help_text='CAA identity for this CA (NOTE: Not currently used!).', max_length=32, verbose_name='CAA identity'),
),
migrations.AddField(
model_name='certificateauthority',
name='terms_of_service',
field=models.URLField(blank=True, help_text='URL to Terms of Service for this CA', verbose_name='Terms of Service'),
),
migrations.AddField(
model_name='certificateauthority',
name='website',
field=models.URLField(blank=True, help_text='Website for your CA.'),
),
migrations.CreateModel(
name='AcmeOrder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(default=django_ca.models.acme_slug, unique=True)),
('status', models.CharField(choices=[('invalid', 'Invalid'), ('pending', 'Pending'), ('processing', 'Processing'), ('ready', 'Ready'), ('valid', 'Valid')], default='pending', max_length=10)),
('expires', models.DateTimeField(default=django_ca.models.acme_order_expires)),
('not_before', models.DateTimeField(null=True)),
('not_after', models.DateTimeField(null=True)),
('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='orders', to='django_ca.acmeaccount')),
],
options={
'verbose_name': 'ACME Order',
'verbose_name_plural': 'ACME Orders',
},
bases=(django_ca.models.DjangoCAModel, ),<|fim▁hole|> ),
migrations.CreateModel(
name='AcmeCertificate',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(default=django_ca.models.acme_slug, unique=True)),
('csr', models.TextField(verbose_name='CSR')),
('cert', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='django_ca.certificate')),
('order', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='django_ca.acmeorder')),
],
options={
'verbose_name': 'ACME Certificate',
'verbose_name_plural': 'ACME Certificate',
},
),
migrations.CreateModel(
name='AcmeAuthorization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(default=django_ca.models.acme_slug, unique=True)),
('type', models.CharField(choices=[('dns', 'DNS')], default='dns', max_length=8)),
('value', models.CharField(max_length=255)),
('status', models.CharField(choices=[('pending', 'Pending'), ('valid', 'Valid'), ('invalid', 'Invalid'), ('deactivated', 'Deactivated'), ('expired', 'Expired'), ('revoked', 'Revoked')], default='pending', max_length=12)),
('wildcard', models.BooleanField(default=False)),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='authorizations', to='django_ca.acmeorder')),
],
options={
'verbose_name': 'ACME Authorization',
'verbose_name_plural': 'ACME Authorizations',
'unique_together': {('order', 'type', 'value')},
},
),
migrations.AddField(
model_name='acmeaccount',
name='ca',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='django_ca.certificateauthority', verbose_name='Certificate Authority'),
),
migrations.CreateModel(
name='AcmeChallenge',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(default=django_ca.models.acme_slug, unique=True)),
('type', models.CharField(choices=[('http-01', 'HTTP Challenge'), ('dns-01', 'DNS Challenge'), ('tls-alpn-01', 'TLS ALPN Challenge')], max_length=12)),
('status', models.CharField(choices=[('pending', 'Pending'), ('processing', 'Processing'), ('valid', 'Valid'), ('invalid', 'Name')], default='pending', max_length=12)),
('validated', models.DateTimeField(blank=True, null=True)),
('error', models.CharField(blank=True, max_length=64)),
('token', models.CharField(blank=True, default=django_ca.models.acme_token, max_length=64)),
('auth', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='challenges', to='django_ca.acmeauthorization')),
],
options={
'verbose_name': 'ACME Challenge',
'verbose_name_plural': 'ACME Challenges',
'unique_together': {('auth', 'type')},
},
),
migrations.AlterUniqueTogether(
name='acmeaccount',
unique_together={('ca', 'thumbprint')},
),
]<|fim▁end|> | |
<|file_name|>actionsAtoms.ts<|end_file_name|><|fim▁begin|>import { atom } from 'jotai';
import { Resource } from '../models';<|fim▁hole|>export const resourcesToCheckAtom = atom<Array<Resource>>([]);
export const resourcesToDisacknowledgeAtom = atom<Array<Resource>>([]);<|fim▁end|> |
export const selectedResourcesAtom = atom<Array<Resource>>([]);
export const resourcesToAcknowledgeAtom = atom<Array<Resource>>([]);
export const resourcesToSetDowntimeAtom = atom<Array<Resource>>([]); |
<|file_name|>kane.py<|end_file_name|><|fim▁begin|>__all__ = ['Kane']
from sympy import Symbol, zeros, Matrix, diff, solve_linear_system_LU, eye
from sympy.utilities import default_sort_key
from sympy.physics.mechanics.essential import ReferenceFrame, dynamicsymbols
from sympy.physics.mechanics.particle import Particle
from sympy.physics.mechanics.point import Point
from sympy.physics.mechanics.rigidbody import RigidBody
class Kane(object):
"""Kane's method object.
This object is used to do the "book-keeping" as you go through and form
equations of motion in the way Kane presents in:
Kane, T., Levinson, D. Dynamics Theory and Applications. 1985 McGraw-Hill
The attributes are for equations in the form [M] udot = forcing.
Very Important Warning: simp is set to True by default, to the advantage of
smaller, simpler systems. If your system is large, it will lead to
slowdowns; however turning it off might have negative implications in
numerical evaluation. Care needs to be taken to appropriately reduce
expressions generated with simp==False, as they might be too large
themselves. Computing the relationship between independent and dependent
speeds (when dealing with non-holonomic systems) benefits from simp being
set to True (during the .speeds() method); the same is true for
linearization of non-holonomic systems. If numerical evaluations are
unsucessful with simp==False, try setting simp to True only for these
methods; this provides some compromise between the two options.
Attributes
==========
auxiliary : Matrix
If applicable, the set of auxiliary Kane's
equations used to solve for non-contributing
forces.
mass_matrix : Matrix
The system's mass matrix
forcing : Matrix
The system's forcing vector
simp : Boolean
Flag determining whether simplification of symbolic matrix
inversion can occur or not
mass_matrix_full : Matrix
The "mass matrix" for the u's and q's
forcing_full : Matrix
The "forcing vector" for the u's and q's
Examples
========
This is a simple example for a one defree of freedom translational
spring-mass-damper.
In this example, we first need to do the kinematics.
This involves creating generalized speeds and coordinates and their
derivatives.
Then we create a point and set its velocity in a frame::
>>> from sympy import symbols
>>> from sympy.physics.mechanics import dynamicsymbols, ReferenceFrame
>>> from sympy.physics.mechanics import Point, Particle, Kane
>>> q, u = dynamicsymbols('q u')
>>> qd, ud = dynamicsymbols('q u', 1)
>>> m, c, k = symbols('m c k')
>>> N = ReferenceFrame('N')
>>> P = Point('P')
>>> P.set_vel(N, u * N.x)
Next we need to arrange/store information in the way the Kane requires.
The kinematic differential equations need to be stored in a dict.
A list of forces/torques must be constructed, where each entry in the list
is a (Point, Vector) or (ReferenceFrame, Vector) tuple, where the Vectors
represent the Force or Torque.
Next a particle needs to be created, and it needs to have a point and mass
assigned to it.
Finally, a list of all bodies and particles needs to be created::
>>> kd = [qd - u]
>>> FL = [(P, (-k * q - c * u) * N.x)]
>>> pa = Particle('pa', P, m)
>>> BL = [pa]
Finally we can generate the equations of motion.
First we create the Kane object and supply an inertial frame.
Next we pass it the generalized speeds.
Then we pass it the kinematic differential equation dict.
Next we form FR* and FR to complete: Fr + Fr* = 0.
We have the equations of motion at this point.
It makes sense to rearrnge them though, so we calculate the mass matrix and
the forcing terms, for E.o.M. in the form: [MM] udot = forcing, where MM is
the mass matrix, udot is a vector of the time derivatives of the
generalized speeds, and forcing is a vector representing "forcing" terms::
>>> KM = Kane(N)
>>> KM.coords([q])
>>> KM.speeds([u])
>>> KM.kindiffeq(kd)
>>> (fr, frstar) = KM.kanes_equations(FL, BL)
>>> MM = KM.mass_matrix
>>> forcing = KM.forcing
>>> rhs = MM.inv() * forcing
>>> rhs
[-(c*u(t) + k*q(t))/m]
>>> KM.linearize()[0]
[0, 1]
[k, c]
Please look at the documentation pages for more information on how to
perform linearization and how to deal with dependent coordinates & speeds,
and how do deal with bringing non-contributing forces into evidence.
"""
simp = True
def __init__(self, frame):
"""Supply the inertial frame for Kane initialization. """
# Big storage things
self._inertial = frame
self._forcelist = None
self._bodylist = None
self._fr = None
self._frstar = None
self._rhs = None
self._aux_eq = None
# States
self._q = None
self._qdep = []
self._qdot = None
self._u = None
self._udep = []
self._udot = None
self._uaux = None
# Differential Equations Matrices
self._k_d = None
self._f_d = None
self._k_kqdot = None
self._k_ku = None
self._f_k = None
# Constraint Matrices
self._f_h = Matrix([])
self._k_nh = Matrix([])
self._f_nh = Matrix([])
self._k_dnh = Matrix([])
self._f_dnh = Matrix([])
def _find_dynamicsymbols(self, inlist, insyms=[]):
"""Finds all non-supplied dynamicsymbols in the expressions."""
from sympy.core.function import AppliedUndef, Derivative
t = dynamicsymbols._t
return reduce(set.union, [set([i]) for j in inlist
for i in j.atoms(AppliedUndef, Derivative)
if i.atoms() == set([t])], set()) - insyms
temp_f = set().union(*[i.atoms(AppliedUndef) for i in inlist])
temp_d = set().union(*[i.atoms(Derivative) for i in inlist])
set_f = set([a for a in temp_f if a.args == (t,)])
set_d = set([a for a in temp_d if ((a.args[0] in set_f) and all([i == t
for i in a.variables]))])
return list(set.union(set_f, set_d) - set(insyms))
def _find_othersymbols(self, inlist, insyms=[]):
"""Finds all non-dynamic symbols in the expressions."""
return list(reduce(set.union, [i.atoms(Symbol) for i in inlist]) -
set(insyms))
def _mat_inv_mul(self, A, B):
"""Internal Function
Computes A^-1 * B symbolically w/ substitution, where B is not
necessarily a vector, but can be a matrix.
"""
# Note: investigate difficulty in only creating symbols for non-zero
# entries; this could speed things up, perhaps?
r1, c1 = A.shape
r2, c2 = B.shape
temp1 = Matrix(r1, c1, lambda i, j: Symbol('x' + str(j + r1 * i)))
temp2 = Matrix(r2, c2, lambda i, j: Symbol('y' + str(j + r2 * i)))
for i in range(len(temp1)):
if A[i] == 0:
temp1[i] = 0
for i in range(len(temp2)):
if B[i] == 0:
temp2[i] = 0
temp3 = []
for i in range(c2):
temp3.append(temp1.LUsolve(temp2.extract(range(r2), [i])))
temp3 = Matrix([i.T for i in temp3]).T
if Kane.simp == True:
temp3.simplify()
return temp3.subs(dict(zip(temp1, A))).subs(dict(zip(temp2, B)))
def coords(self, qind, qdep=[], coneqs=[]):
"""Supply all the generalized coordiantes in a list.
If some coordinates are dependent, supply them as part of qdep. Their
dependent nature will only show up in the linearization process though.
Parameters
==========
qind : list
A list of independent generalized coords
qdep : list
List of dependent coordinates
coneq : list
List of expressions which are equal to zero; these are the
configuration constraint equations
"""
if not isinstance(qind, (list, tuple)):
raise TypeError('Generalized coords. must be supplied in a list.')
self._q = qind + qdep
self._qdot = [diff(i, dynamicsymbols._t) for i in self._q]
if not isinstance(qdep, (list, tuple)):
raise TypeError('Dependent speeds and constraints must each be '
'provided in their own list.')
if len(qdep) != len(coneqs):
raise ValueError('There must be an equal number of dependent '
'speeds and constraints.')
coneqs = Matrix(coneqs)
self._qdep = qdep
self._f_h = coneqs
def speeds(self, uind, udep=[], coneqs=[], diffconeqs=None, u_auxiliary=[]):
"""Supply all the generalized speeds in a list.
If there are motion constraints or auxiliary speeds, they are provided
here as well (as well as motion constraints).
Parameters
==========
uind : list
A list of independent generalized speeds
udep : list
Optional list of dependent speeds
coneqs : list
Optional List of constraint expressions; these are expressions
which are equal to zero which define a speed (motion) constraint.
diffconeqs : list
Optional, calculated automatically otherwise; list of constraint
equations; again equal to zero, but define an acceleration
constraint.
u_auxiliary : list
An optional list of auxiliary speeds used for brining
non-contributing forces into evidence
"""
if not isinstance(uind, (list, tuple)):
raise TypeError('Generalized speeds must be supplied in a list.')
self._u = uind + udep
self._udot = [diff(i, dynamicsymbols._t) for i in self._u]
self._uaux = u_auxiliary
if not isinstance(udep, (list, tuple)):
raise TypeError('Dependent speeds and constraints must each be '
'provided in their own list.')
if len(udep) != len(coneqs):
raise ValueError('There must be an equal number of dependent '
'speeds and constraints.')
if diffconeqs != None:
if len(udep) != len(diffconeqs):
raise ValueError('There must be an equal number of dependent '
'speeds and constraints.')
if len(udep) != 0:
u = self._u
uzero = dict(zip(u, [0] * len(u)))
coneqs = Matrix(coneqs)
udot = self._udot
udotzero = dict(zip(udot, [0] * len(udot)))
self._udep = udep
self._f_nh = coneqs.subs(uzero)
self._k_nh = (coneqs - self._f_nh).jacobian(u)
# if no differentiated non holonomic constraints were given, calculate
if diffconeqs == None:
self._k_dnh = self._k_nh
self._f_dnh = (self._k_nh.diff(dynamicsymbols._t) * Matrix(u) +
self._f_nh.diff(dynamicsymbols._t))
else:
self._f_dnh = diffconeqs.subs(udotzero)
self._k_dnh = (diffconeqs - self._f_dnh).jacobian(udot)
o = len(u) # number of generalized speeds
m = len(udep) # number of motion constraints
p = o - m # number of independent speeds
# For a reminder, form of non-holonomic constraints is:
# B u + C = 0
B = self._k_nh.extract(range(m), range(o))
C = self._f_nh.extract(range(m), [0])
# We partition B into indenpendent and dependent columns
# Ars is then -Bdep.inv() * Bind, and it relates depedent speeds to
# independent speeds as: udep = Ars uind, neglecting the C term here.
self._depB = B
self._depC = C
mr1 = B.extract(range(m), range(p))
ml1 = B.extract(range(m), range(p, o))
self._Ars = - self._mat_inv_mul(ml1, mr1)
def kindiffdict(self):
"""Returns the qdot's in a dictionary. """
if self._k_kqdot == None:
raise ValueError('Kin. diff. eqs need to be supplied first.')
sub_dict = solve_linear_system_LU(Matrix([self._k_kqdot.T,
-(self._k_ku * Matrix(self._u) + self._f_k).T]).T, self._qdot)
return sub_dict
def kindiffeq(self, kdeqs):
"""Supply all the kinematic differential equations in a list.
They should be in the form [Expr1, Expr2, ...] where Expri is equal to
zero
Parameters
==========
kdeqs : list (of Expr)
The listof kinematic differential equations
"""
if len(self._q) != len(kdeqs):
raise ValueError('There must be an equal number of kinematic '
'differential equations and coordinates.')
uaux = self._uaux
# dictionary of auxiliary speeds which are equal to zero
uaz = dict(zip(uaux, [0] * len(uaux)))
kdeqs = Matrix(kdeqs).subs(uaz)
qdot = self._qdot
qdotzero = dict(zip(qdot, [0] * len(qdot)))
u = self._u
uzero = dict(zip(u, [0] * len(u)))
f_k = kdeqs.subs(uzero).subs(qdotzero)
k_kqdot = (kdeqs.subs(uzero) - f_k).jacobian(Matrix(qdot))
k_ku = (kdeqs.subs(qdotzero) - f_k).jacobian(Matrix(u))
self._k_ku = self._mat_inv_mul(k_kqdot, k_ku)
self._f_k = self._mat_inv_mul(k_kqdot, f_k)
self._k_kqdot = eye(len(qdot))
def _form_fr(self, fl):
"""Form the generalized active force.
Computes the vector of the generalized active force vector.
Used to compute E.o.M. in the form Fr + Fr* = 0.
Parameters
==========
fl : list
Takes in a list of (Point, Vector) or (ReferenceFrame, Vector)
tuples which represent the force at a point or torque on a frame.
"""
if not isinstance(fl, (list, tuple)):
raise TypeError('Forces must be supplied in a list of: lists or '
'tuples.')
N = self._inertial
self._forcelist = fl[:]
u = self._u
o = len(u)
FR = zeros(o, 1)
# goes through each Fr (where this loop's i is r)
for i, v in enumerate(u):
# does this for each force pair in list (pair is w)
for j, w in enumerate(fl):
if isinstance(w[0], ReferenceFrame):
speed = w[0].ang_vel_in(N)
FR[i] += speed.diff(v, N) & w[1]
elif isinstance(w[0], Point):
speed = w[0].vel(N)
FR[i] += speed.diff(v, N) & w[1]
else:
raise TypeError('First entry in force pair is a point or'
' frame.')
# for dependent speeds
if len(self._udep) != 0:
m = len(self._udep)
p = o - m
FRtilde = FR.extract(range(p), [0])
FRold = FR.extract(range(p, o), [0])
FRtilde += self._Ars.T * FRold
FR = FRtilde
self._fr = FR
return FR
def _form_frstar(self, bl):
"""Form the generalized inertia force.
Computes the vector of the generalized inertia force vector.
Used to compute E.o.M. in the form Fr + Fr* = 0.
Parameters
==========
bl : list
A list of all RigidBody's and Particle's in the system.
"""
if not isinstance(bl, (list, tuple)):
raise TypeError('Bodies must be supplied in a list.')
if self._fr == None:
raise ValueError('Calculate Fr first, please.')
t = dynamicsymbols._t
N = self._inertial
self._bodylist = bl
u = self._u # all speeds
udep = self._udep # dependent speeds
o = len(u)
p = o - len(udep)
udot = self._udot
udotzero = dict(zip(udot, [0] * len(udot)))
uaux = self._uaux
uauxdot = [diff(i, t) for i in uaux]
# dictionary of auxiliary speeds which are equal to zero
uaz = dict(zip(uaux, [0] * len(uaux)))
# dictionary of derivatives of auxiliary speeds which are equal to zero
uadz = dict(zip(uauxdot, [0] * len(uauxdot)))
# Form R*, T* for each body or particle in the list
# This is stored as a list of tuples [(r*, t*),...]
# Each tuple is for a body or particle
# Within each rs is a tuple and ts is a tuple
# These have the same structure: ([list], value)
# The list is the coefficients of rs/ts wrt udots, value is everything
# else in the expression
# Partial velocities are stored as a list of tuple; a tuple for each
# body
# Each tuple has two elements, lists which represent the partial
# velocity for each ur; The first list is translational partial
# velocities, the second list is rotational translational velocities
MM = zeros(o, o)
nonMM = zeros(o, 1)
rsts = []
partials = []
for i, v in enumerate(bl): # go through list of bodies, particles
if isinstance(v, RigidBody):
om = v.frame.ang_vel_in(N).subs(uadz).subs(uaz) # ang velocity
omp = v.frame.ang_vel_in(N) # ang velocity, for partials
alp = v.frame.ang_acc_in(N).subs(uadz).subs(uaz) # ang acc
ve = v.mc.vel(N).subs(uadz).subs(uaz) # velocity
vep = v.mc.vel(N) # velocity, for partials
acc = v.mc.acc(N).subs(uadz).subs(uaz) # acceleration
m = (v.mass).subs(uadz).subs(uaz)
I, P = v.inertia
I = I.subs(uadz).subs(uaz)
if P != v.mc:
# redefine I about mass center
# have I S/O, want I S/S*
# I S/O = I S/S* + I S*/O; I S/S* = I S/O - I S*/O
# This block of code needs to have a test written for it
print('This functionality has not yet been tested yet, '
'use at your own risk.')
f = v.frame
d = v.mc.pos_from(P)
I -= m * (((f.x | f.x) + (f.y | f.y) + (f.z | f.z)) *
(d & d) - (d | d))
templist = []
# One could think of r star as a collection of coefficients of
# the udots plus another term. What we do here is get all of
# these coefficients and store them in a list, then we get the
# "other" term and put the list and other term in a tuple, for
# each body/particle. The same is done for t star. The reason
# for this is to not let the expressions get too large; so we
# keep them seperate for as long a possible
for j, w in enumerate(udot):
templist.append(-m * acc.diff(w, N))
other = -m.diff(t) * ve - m * acc.subs(udotzero)
rs = (templist, other)
templist = []
# see above note
for j, w in enumerate(udot):
templist.append(-I & alp.diff(w, N))
other = -((I.dt(v.frame) & om) + (I & alp.subs(udotzero))
+ (om ^ (I & om)))
ts = (templist, other)
tl1 = []
tl2 = []
# calculates the partials only once and stores them for later
for j, w in enumerate(u):
tl1.append(vep.diff(w, N))
tl2.append(omp.diff(w, N))
partials.append((tl1, tl2))
elif isinstance(v, Particle):
ve = v.point.vel(N).subs(uadz).subs(uaz)
vep = v.point.vel(N)
acc = v.point.acc(N).subs(uadz).subs(uaz)
m = v.mass.subs(uadz).subs(uaz)
templist = []
# see above note
for j, w in enumerate(udot):
templist.append(-m * acc.diff(w, N))
other = -m.diff(t) * ve - m * acc.subs(udotzero)
rs = (templist, other)
# We make an empty t star here so that way the later code
# doesn't care whether its operating on a body or particle
ts = ([0] * len(u), 0)
tl1 = []
tl2 = []
# calculates the partials only once, makes 0's for angular
# partials so the later code is body/particle indepedent
for j, w in enumerate(u):
tl1.append(vep.diff(w, N))
tl2.append(0)
partials.append((tl1, tl2))
else:
raise TypeError('The body list needs RigidBody or '
'Particle as list elements.')
rsts.append((rs, ts))
# Use R*, T* and partial velocities to form FR*
FRSTAR = zeros(o, 1)
# does this for each body in the list
for i, v in enumerate(rsts):
rs, ts = v # unpact r*, t*
vps, ops = partials[i] # unpack vel. partials, ang. vel. partials
# Computes the mass matrix entries from r*, there are from the list
# in the rstar tuple
ii = 0
for x in vps:
for w in rs[0]:
MM[ii] += w & x
ii += 1
# Computes the mass matrix entries from t*, there are from the list
# in the tstar tuple
ii = 0
for x in ops:
for w in ts[0]:
MM[ii] += w & x
ii += 1
# Non mass matrix entries from rstar, from the other in the rstar
# tuple
for j, w in enumerate(vps):
nonMM[j] += w & rs[1]
# Non mass matrix entries from tstar, from the other in the tstar
# tuple
for j, w in enumerate(ops):
nonMM[j] += w & ts[1]
FRSTAR = MM * Matrix(udot) + nonMM
# For motion constraints, m is the number of constraints
# Really, one should just look at Kane's book for descriptions of this
# process
if len(self._udep) != 0:
FRSTARtilde = FRSTAR.extract(range(p), [0])
FRSTARold = FRSTAR.extract(range(p, o), [0])
FRSTARtilde += self._Ars.T * FRSTARold
FRSTAR = FRSTARtilde
MMi = MM.extract(range(p), range(o))
MMd = MM.extract(range(p, o), range(o))
MM = MMi + self._Ars.T * MMd
self._frstar = FRSTAR
zeroeq = self._fr + self._frstar
zeroeq = zeroeq.subs(udotzero)
self._k_d = MM
self._f_d = zeroeq
return FRSTAR
def kanes_equations(self, FL, BL):
""" Method to form Kane's equations, Fr + Fr* = 0.
Returns (Fr, Fr*). In the case where auxiliary generalized speeds are
present (say, s auxiliary speeds, o generalized speeds, and m motion
constraints) the length of the returned vectors will be o - m + s in
length. The first o - m equations will be the constrained Kane's
equations, then the s auxiliary Kane's equations. These auxiliary
equations can be accessed with the auxiliary_eqs().
Parameters
==========
FL : list
Takes in a list of (Point, Vector) or (ReferenceFrame, Vector)
tuples which represent the force at a point or torque on a frame.
BL : list
A list of all RigidBody's and Particle's in the system.
"""
if (self._q == None) or (self._u == None):
raise ValueError('Speeds and coordinates must be supplied first.')
if (self._k_kqdot == None):
raise ValueError('Supply kinematic differential equations, please.')
fr = self._form_fr(FL)
frstar = self._form_frstar(BL)
if self._uaux != []:
km = Kane(self._inertial)
km.coords(self._q)
km.speeds(self._uaux, u_auxiliary=self._uaux)
fraux = km._form_fr(FL)
frstaraux = km._form_frstar(BL)
self._aux_eq = fraux + frstaraux
self._fr = fr.col_join(fraux)
self._frstar = frstar.col_join(frstaraux)
return (self._fr, self._frstar)
else:
return (fr, frstar)
@property
def auxiliary_eqs(self):
if (self._fr == None) or (self._frstar == None):
raise ValueError('Need to compute Fr, Fr* first.')
if self._uaux == []:
raise ValueError('No auxiliary speeds have been declared.')
return self._aux_eq
def linearize(self):
""" Method used to generate linearized equations.
Note that for linearization, it is assumed that time is not perturbed,
but only coordinates and positions. The "forcing" vector's jacobian is
computed with respect to the state vector in the form [Qi, Qd, Ui, Ud].
This is the "f_lin_A" matrix.
It also finds any non-state dynamicsymbols and computes the jacobian of
the "forcing" vector with respect to them. This is the "f_lin_B"
matrix; if this is empty, an empty matrix is created.
Consider the following:
If our equations are: [M]qudot = f, where [M] is the full mass matrix,
qudot is a vector of the deriatives of the coordinates and speeds, and
f in the full forcing vector, the linearization process is as follows:
[M]qudot = [f_lin_A]qu + [f_lin_B]y, where qu is the state vector,
f_lin_A is the jacobian of the full forcing vector with respect to the
state vector, f_lin_B is the jacobian of the full forcing vector with
respect to any non-speed/coordinate dynamicsymbols which show up in the
full forcing vector, and y is a vector of those dynamic symbols (each
column in f_lin_B corresponds to a row of the y vector, each of which
is a non-speed/coordinate dynamicsymbol).
To get the traditional state-space A and B matrix, you need to multiply
the f_lin_A and f_lin_B matrices by the inverse of the mass matrix.
Caution needs to be taken when inverting large symbolic matrices;
substituting in numerical values before inverting will work better.
A tuple of (f_lin_A, f_lin_B, other_dynamicsymbols) is returned.
"""
if (self._fr == None) or (self._frstar == None):
raise ValueError('Need to compute Fr, Fr* first.')
# Note that this is now unneccessary, and it should never be
# encountered; I still think it should be in here in case the user
# manually sets these matrices incorrectly.
for i in self._q:
if self._k_kqdot.diff(i) != 0 * self._k_kqdot:
raise ValueError('Matrix K_kqdot must not depend on any q.')
t = dynamicsymbols._t
uaux = self._uaux
uauxdot = [diff(i, t) for i in uaux]
# dictionary of auxiliary speeds & derivatives which are equal to zero<|fim▁hole|>
# Checking for dynamic symbols outside the dynamic differential
# equations; throws error if there is.
insyms = set(self._q + self._qdot + self._u + self._udot + uaux + uauxdot)
if any(self._find_dynamicsymbols(i, insyms) for i in [self._k_kqdot,
self._k_ku,
self._f_k,
self._k_dnh,
self._f_dnh,
self._k_d]):
raise ValueError('Cannot have dynamic symbols outside dynamic '
'forcing vector.')
other_dyns = list(self._find_dynamicsymbols(self._f_d.subs(subdict),
insyms))
# make it canonically ordered so the jacobian is canonical
other_dyns.sort(key=default_sort_key)
for i in other_dyns:
if diff(i, dynamicsymbols._t) in other_dyns:
raise ValueError('Cannot have derivatives of specified '
'quantities when linearizing forcing terms.')
o = len(self._u) # number of speeds
n = len(self._q) # number of coordinates
l = len(self._qdep) # number of configuration constraints
m = len(self._udep) # number of motion constraints
qi = Matrix(self._q[: n - l]) # independent coords
qd = Matrix(self._q[n - l: n]) # dependent coords; could be empty
ui = Matrix(self._u[: o - m]) # independent speeds
ud = Matrix(self._u[o - m: o]) # dependent speeds; could be empty
qdot = Matrix(self._qdot) # time derivatives of coordinates
# with equations in the form MM udot = forcing, expand that to:
# MM_full [q,u].T = forcing_full. This combines coordinates and
# speeds together for the linearization, which is necessary for the
# linearization process, due to dependent coordinates. f1 is the rows
# from the kinematic differential equations, f2 is the rows from the
# dynamic differential equations (and differentiated non-holonomic
# constraints).
f1 = self._k_ku * Matrix(self._u) + self._f_k
f2 = self._f_d
# Only want to do this if these matrices have been filled in, which
# occurs when there are dependent speeds
if m != 0:
f2 = self._f_d.col_join(self._f_dnh)
fnh = self._f_nh + self._k_nh * Matrix(self._u)
f1 = f1.subs(subdict)
f2 = f2.subs(subdict)
fh = self._f_h.subs(subdict)
fku = (self._k_ku * Matrix(self._u)).subs(subdict)
fkf = self._f_k.subs(subdict)
# In the code below, we are applying the chain rule by hand on these
# things. All the matrices have been changed into vectors (by
# multiplying the dynamic symbols which it is paired with), so we can
# take the jacobian of them. The basic operation is take the jacobian
# of the f1, f2 vectors wrt all of the q's and u's. f1 is a function of
# q, u, and t; f2 is a function of q, qdot, u, and t. In the code
# below, we are not considering perturbations in t. So if f1 is a
# function of the q's, u's but some of the q's or u's could be
# dependent on other q's or u's (qd's might be dependent on qi's, ud's
# might be dependent on ui's or qi's), so what we do is take the
# jacobian of the f1 term wrt qi's and qd's, the jacobian wrt the qd's
# gets multiplied by the jacobian of qd wrt qi, this is extended for
# the ud's as well. dqd_dqi is computed by taking a taylor expansion of
# the holonomic constraint equations about q*, treating q* - q as dq,
# seperating into dqd (depedent q's) and dqi (independent q's) and the
# rearranging for dqd/dqi. This is again extended for the speeds.
# First case: configuration and motion constraints
if (l != 0) and (m != 0):
fh_jac_qi = fh.jacobian(qi)
fh_jac_qd = fh.jacobian(qd)
fnh_jac_qi = fnh.jacobian(qi)
fnh_jac_qd = fnh.jacobian(qd)
fnh_jac_ui = fnh.jacobian(ui)
fnh_jac_ud = fnh.jacobian(ud)
fku_jac_qi = fku.jacobian(qi)
fku_jac_qd = fku.jacobian(qd)
fku_jac_ui = fku.jacobian(ui)
fku_jac_ud = fku.jacobian(ud)
fkf_jac_qi = fkf.jacobian(qi)
fkf_jac_qd = fkf.jacobian(qd)
f1_jac_qi = f1.jacobian(qi)
f1_jac_qd = f1.jacobian(qd)
f1_jac_ui = f1.jacobian(ui)
f1_jac_ud = f1.jacobian(ud)
f2_jac_qi = f2.jacobian(qi)
f2_jac_qd = f2.jacobian(qd)
f2_jac_ui = f2.jacobian(ui)
f2_jac_ud = f2.jacobian(ud)
f2_jac_qdot = f2.jacobian(qdot)
dqd_dqi = - self._mat_inv_mul(fh_jac_qd, fh_jac_qi)
dud_dqi = self._mat_inv_mul(fnh_jac_ud, (fnh_jac_qd *
dqd_dqi - fnh_jac_qi))
dud_dui = - self._mat_inv_mul(fnh_jac_ud, fnh_jac_ui)
dqdot_dui = - self._k_kqdot.inv() * (fku_jac_ui +
fku_jac_ud * dud_dui)
dqdot_dqi = - self._k_kqdot.inv() * (fku_jac_qi + fkf_jac_qi +
(fku_jac_qd + fkf_jac_qd) * dqd_dqi + fku_jac_ud * dud_dqi)
f1_q = f1_jac_qi + f1_jac_qd * dqd_dqi + f1_jac_ud * dud_dqi
f1_u = f1_jac_ui + f1_jac_ud * dud_dui
f2_q = (f2_jac_qi + f2_jac_qd * dqd_dqi + f2_jac_qdot * dqdot_dqi +
f2_jac_ud * dud_dqi)
f2_u = f2_jac_ui + f2_jac_ud * dud_dui + f2_jac_qdot * dqdot_dui
# Second case: configuration constraints only
elif l != 0:
dqd_dqi = - self._mat_inv_mul(fh.jacobian(qd), fh.jacobian(qi))
dqdot_dui = - self._k_kqdot.inv() * fku.jacobian(ui)
dqdot_dqi = - self._k_kqdot.inv() * (fku.jacobian(qi) +
fkf.jacobian(qi) + (fku.jacobian(qd) + fkf.jacobian(qd)) *
dqd_dqi)
f1_q = (f1.jacobian(qi) + f1.jacobian(qd) * dqd_dqi)
f1_u = f1.jacobian(ui)
f2_jac_qdot = f2.jacobian(qdot)
f2_q = (f2.jacobian(qi) + f2.jacobian(qd) * dqd_dqi +
f2.jac_qdot * dqdot_dqi)
f2_u = f2.jacobian(ui) + f2_jac_qdot * dqdot_dui
# Third case: motion constraints only
elif m != 0:
dud_dqi = self._mat_inv_mul(fnh.jacobian(ud), - fnh.jacobian(qi))
dud_dui = - self._mat_inv_mul(fnh.jacobian(ud), fnh.jacobian(ui))
dqdot_dui = - self._k_kqdot.inv() * (fku.jacobian(ui) +
fku.jacobian(ud) * dud_dui)
dqdot_dqi = - self._k_kqdot.inv() * (fku.jacobian(qi) +
fkf.jacobian(qi) + fku.jacobian(ud) * dud_dqi)
f1_jac_ud = f1.jacobian(ud)
f2_jac_qdot = f2.jacobian(qdot)
f2_jac_ud = f2.jacobian(ud)
f1_q = f1.jacobian(qi) + f1_jac_ud * dud_dqi
f1_u = f1.jacobian(ui) + f1_jac_ud * dud_dui
f2_q = (f2.jacobian(qi) + f2_jac_qdot * dqdot_dqi + f2_jac_ud
* dud_dqi)
f2_u = (f2.jacobian(ui) + f2_jac_ud * dud_dui + f2_jac_qdot *
dqdot_dui)
# Fourth case: No constraints
else:
dqdot_dui = - self._k_kqdot.inv() * fku.jacobian(ui)
dqdot_dqi = - self._k_kqdot.inv() * (fku.jacobian(qi) +
fkf.jacobian(qi))
f1_q = f1.jacobian(qi)
f1_u = f1.jacobian(ui)
f2_jac_qdot = f2.jacobian(qdot)
f2_q = f2.jacobian(qi) + f2_jac_qdot * dqdot_dqi
f2_u = f2.jacobian(ui) + f2_jac_qdot * dqdot_dui
f_lin_A = -(f1_q.row_join(f1_u)).col_join(f2_q.row_join(f2_u))
if other_dyns:
f1_oths = f1.jacobian(other_dyns)
f2_oths = f2.jacobian(other_dyns)
f_lin_B = -f1_oths.col_join(f2_oths)
else:
f_lin_B = Matrix([])
return (f_lin_A, f_lin_B, Matrix(other_dyns))
@property
def mass_matrix(self):
# Returns the mass matrix, which is augmented by the differentiated non
# holonomic equations if necessary
if (self._frstar == None) & (self._fr == None):
raise ValueError('Need to compute Fr, Fr* first.')
return Matrix([self._k_d, self._k_dnh])
@property
def mass_matrix_full(self):
# Returns the mass matrix from above, augmented by kin diff's k_kqdot
if (self._frstar == None) & (self._fr == None):
raise ValueError('Need to compute Fr, Fr* first.')
o = len(self._u)
n = len(self._q)
return ((self._k_kqdot).row_join(zeros(n, o))).col_join((zeros(o,
n)).row_join(self.mass_matrix))
@property
def forcing(self):
# Returns the forcing vector, which is augmented by the differentiated
# non holonomic equations if necessary
if (self._frstar == None) & (self._fr == None):
raise ValueError('Need to compute Fr, Fr* first.')
return -Matrix([self._f_d, self._f_dnh])
@property
def forcing_full(self):
# Returns the forcing vector, which is augmented by the differentiated
# non holonomic equations if necessary
if (self._frstar == None) & (self._fr == None):
raise ValueError('Need to compute Fr, Fr* first.')
f1 = self._k_ku * Matrix(self._u) + self._f_k
return -Matrix([f1, self._f_d, self._f_dnh])<|fim▁end|> | subdict = dict(zip(uaux + uauxdot, [0] * (len(uaux) + len(uauxdot)))) |
<|file_name|>states-int-test.js<|end_file_name|><|fim▁begin|>var flow = require('js-flow'),
assert = require('assert'),
tubes = require('evo-tubes');
describe('evo-states', function () {
var TIMEOUT = 60000;
var sandbox;
beforeEach(function (done) {
this.timeout(TIMEOUT);
(sandbox = new tubes.Sandbox())
.add(new tubes.Environment({ nodes: 4 }))
.add(new tubes.NeuronFactory())
.add(new tubes.Connector())
.add(new tubes.States())
.start(done);
});
afterEach(function (done) {
sandbox.cleanup(done);
});
it('synchronize', function (done) {
this.timeout(TIMEOUT);
var connector = sandbox.res('evo-connector');
flow.steps()
.next('clientsReady')
.next(function (next) {
flow.each([this.clients[0], this.clients[1]])
.keys()
.do(function (index, client, next) {
client.commit({ key: 'val' + index }, next);
})
.run(next);
})
.next(function (next) {<|fim▁hole|> });
}, next);
})
.with(sandbox.res('evo-states'))
.run(done)
});
});<|fim▁end|> | this.waitForSync({ key: 'key' }, function (data, client, index) {
return [0, 1].every(function (i) {
var nodeVal = data.d[connector.clients[i].localId];
return nodeVal && nodeVal.d == 'val' + i; |
<|file_name|>CommandObject.java<|end_file_name|><|fim▁begin|>/*
* Copyright 1997-2005 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,<|fim▁hole|> * CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*/
package javax.activation;
import java.io.IOException;
/**
* JavaBeans components that are Activation Framework aware implement
* this interface to find out which command verb they're being asked
* to perform, and to obtain the DataHandler representing the
* data they should operate on. JavaBeans that don't implement
* this interface may be used as well. Such commands may obtain
* the data using the Externalizable interface, or using an
* application-specific method.<p>
*
* @since 1.6
*/
public interface CommandObject {
/**
* Initialize the Command with the verb it is requested to handle
* and the DataHandler that describes the data it will
* operate on. <b>NOTE:</b> it is acceptable for the caller
* to pass <i>null</i> as the value for <code>DataHandler</code>.
*
* @param verb The Command Verb this object refers to.
* @param dh The DataHandler.
*/
public void setCommandContext(String verb, DataHandler dh)
throws IOException;
}<|fim▁end|> | |
<|file_name|>tables.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
//! Analyze tracing data for edenscm
//!
//! This is edenscm application specific. It's not a general purposed library.
use serde_json::Value;
// use std::borrow::Cow;
use std::collections::BTreeMap as Map;
use tracing_collector::model::{IndexMap, TreeSpan, TreeSpans};
type Row = Map<String, Value>;
type Rows = Vec<Row>;
type Tables = Map<String, Rows>;
type TidSpans<'a> = IndexMap<(u64, u64), TreeSpans<&'a str>>;
// TODO: Make things more configurable.
/// Extract rows from tracing data. Output format is similar to NoSQL tables:
///
/// ```plain,ignore
/// {table_name: [{column_name: column_data}]}
/// ```
pub fn extract_tables(tid_spans: &TidSpans) -> Tables {
let mut tables = Map::new();
extract_dev_command_timers(&mut tables, tid_spans);
extract_other_tables(&mut tables, tid_spans);
tables
}
fn extract_dev_command_timers<'a>(tables: &mut Tables, tid_spans: &TidSpans) {
let mut row = Row::new();
let toint = |value: &str| -> Value { value.parse::<i64>().unwrap_or_default().into() };
for spans in tid_spans.values() {
for span in spans.walk() {
match span.meta.get("name").cloned().unwrap_or("") {
// By hgcommands, run.rs
"Run Command" => {
let duration = span.duration_millis().unwrap_or(0);
row.insert("command_duration".into(), duration.into());
row.insert("elapsed".into(), duration.into());
for (&name, &value) in span.meta.iter() {
match name {
"nice" => {
row.insert("nice".into(), toint(value));
}
"version" => {
// Truncate the "version" string. This matches the old telemetry behavior.
row.insert("version".into(), value[..34.min(value.len())].into());
}
"max_rss" => {
row.insert("maxrss".into(), toint(value));
}
"exit_code" => {
row.insert("errorcode".into(), toint(value));
}
"parent_names" => {
if let Ok(names) = serde_json::from_str::<Vec<String>>(value) {
let name = names.get(0).cloned().unwrap_or_default();
row.insert("parent".into(), name.into());
}
}
"args" => {
if let Ok(args) = serde_json::from_str::<Vec<String>>(value) {
// Normalize the first argument to "hg".
let mut full = "hg".to_string();
for arg in args.into_iter().skip(1) {
// Keep the length bounded.
if full.len() + arg.len() >= 256 {
full += " (truncated)";
break;
}
full += &" ";
// TODO: Use shell_escape once in tp2.
// full += &shell_escape::unix::escape(Cow::Owned(arg));
full += &arg;
}
row.insert("fullcommand".into(), full.into());
}
}
_ => {}
}
}
}
// The "log:command-row" event is used by code that wants to
// log to columns of the main command row easily.
"log:command-row" if span.is_event => {
extract_span(&span, &mut row);
}
_ => {}
}
}
}
tables.insert("dev_command_timers".into(), vec![row]);
}
fn extract_other_tables<'a>(tables: &mut Tables, tid_spans: &TidSpans) {
for spans in tid_spans.values() {
for span in spans.walk() {
match span.meta.get("name").cloned().unwrap_or("") {
// The "log:create-row" event is used by code that wants to log
// to a entire new column in a specified table.
//
// The event is expected to have "table", and the rest of the
// metadata will be logged as-is.
"log:create-row" => {
let table_name = match span.meta.get("table") {
Some(&name) => name,
None => continue,
};
let mut row = Row::new();
extract_span(span, &mut row);
tables.entry(table_name.into()).or_default().push(row);<|fim▁hole|> }
}
/// Parse a span, extract its metadata to a row.
fn extract_span(span: &TreeSpan<&str>, row: &mut Row) {
for (&name, &value) in span.meta.iter() {
match name {
// Those keys are likely generated. Skip them.
"module_path" | "cat" | "line" | "name" => {}
// Attempt to convert it to an integer (since tracing data is
// string only).
_ => match value.parse::<i64>() {
Ok(i) => {
row.insert(name.into(), i.into());
}
_ => {
row.insert(name.into(), value.into());
}
},
}
}
}<|fim▁end|> | }
_ => {}
}
} |
<|file_name|>particle.js<|end_file_name|><|fim▁begin|>define(['../lib/circle', '../lib/aura', './collision', '../lib/vector', '../lib/dictionary'],function(Circle, Aura, collision, Vector, Dictionary){
var Particle = function(world, options) {
var options = options || {}
Circle.call(this, options);
//TODO: implement singleton for the world
this.world = world;
this.mass = this.radius;
// this.energy = 100;
this.speed = options.speed || 3;
// todo: move this somewhere else
this.directions = new Dictionary({
n: new Vector(0, 1),
s: new Vector(0, -1),
e: new Vector(1, 0),
w: new Vector(-1, 0),
ne: new Vector(1, 1),
se: new Vector(1, -1),
nw: new Vector(-1, 1),
sw: new Vector(-1, -1)
});
this.setDirection(options.direction || this.directions.random());
this.aura = new Aura(this);
};
// inheritance
Particle.prototype = Object.create(Circle.prototype);
Particle.prototype.act = function() {
this.move();
};
/**
* Change position of object based on direction
* and speed
*/
Particle.prototype.move = function() {
var pos = this.position.add(new Vector(this.direction.normalize().x * this.speed, this.direction.normalize().y * this.speed));
this.setPosition(pos);
};
/**
* React to collision depending on the type of object
*/
Particle.prototype.reactToCollision = function(other) {
//TODO: fix this
//http://en.wikipedia.org/wiki/Elastic_collision
//http://www.gamasutra.com/view/feature/131424/pool_hall_lessons_fast_accurate_.php?page=3
var n = this.position.sub(other.prevPosition).normalize();
var a1 = this.direction.dot(n);
var a2 = other.prevDirection.dot(n);
var optimizedP = (2 * (a1 - a2)) / (this.mass + other.mass);
var newDir = this.direction.sub(n.mult(optimizedP * other.mass));
// this.setPosition(this.prevPosition);
this.setDirection(newDir);
// this.move();
};
/**
* Needed to keep track of the previous direction
*/
Particle.prototype.setDirection = function(vector){
this.prevDirection = this.direction || vector;
this.direction = vector;
};
/**
* Render
*/
Particle.prototype.render = function(){
this.aura.render();
this.constructor.prototype.render.call(this);
};
<|fim▁hole|>
return Particle;
});<|fim▁end|> | Particle.prototype.setPosition = function(pos){
this.prevPosition = this.position || pos;
this.constructor.prototype.setPosition.call(this, pos);
}; |
<|file_name|>test_cython_optimize.py<|end_file_name|><|fim▁begin|>"""
Test Cython optimize zeros API functions: ``bisect``, ``ridder``, ``brenth``,
and ``brentq`` in `scipy.optimize.cython_optimize`, by finding the roots of a
3rd order polynomial given a sequence of constant terms, ``a0``, and fixed 1st,
2nd, and 3rd order terms in ``args``.
.. math::
f(x, a0, args) = ((args[2]*x + args[1])*x + args[0])*x + a0
The 3rd order polynomial function is written in Cython and called in a Python
wrapper named after the zero function. See the private ``_zeros`` Cython module
in `scipy.optimize.cython_optimze` for more information.
"""
import numpy.testing as npt
from scipy.optimize.cython_optimize import _zeros
# CONSTANTS
# Solve x**3 - A0 = 0 for A0 = [2.0, 2.1, ..., 2.9].
# The ARGS have 3 elements just to show how this could be done for any cubic
# polynomial.
A0 = tuple(-2.0 - x/10.0 for x in range(10)) # constant term
ARGS = (0.0, 0.0, 1.0) # 1st, 2nd, and 3rd order terms
XLO, XHI = 0.0, 2.0 # first and second bounds of zeros functions
# absolute and relative tolerances and max iterations for zeros functions
XTOL, RTOL, MITR = 0.001, 0.001, 10
EXPECTED = [(-a0) ** (1.0/3.0) for a0 in A0]
# = [1.2599210498948732,
# 1.2805791649874942,
# 1.300591446851387,
# 1.3200061217959123,
# 1.338865900164339,
# 1.3572088082974532,
# 1.375068867074141,
# 1.3924766500838337,
# 1.4094597464129783,
# 1.4260431471424087]
# test bisect
def test_bisect():
npt.assert_allclose(
EXPECTED,
list(
_zeros.loop_example('bisect', A0, ARGS, XLO, XHI, XTOL, RTOL, MITR)
),
rtol=RTOL, atol=XTOL
)
# test ridder
def test_ridder():
npt.assert_allclose(
EXPECTED,
list(
_zeros.loop_example('ridder', A0, ARGS, XLO, XHI, XTOL, RTOL, MITR)
),
rtol=RTOL, atol=XTOL
)
# test brenth
def test_brenth():
npt.assert_allclose(
EXPECTED,
list(
_zeros.loop_example('brenth', A0, ARGS, XLO, XHI, XTOL, RTOL, MITR)
),
rtol=RTOL, atol=XTOL
)
# test brentq
def test_brentq():
npt.assert_allclose(
EXPECTED,
list(
_zeros.loop_example('brentq', A0, ARGS, XLO, XHI, XTOL, RTOL, MITR)
),
rtol=RTOL, atol=XTOL
)
# test brentq with full output<|fim▁hole|> output = _zeros.full_output_example(
(A0[0],) + ARGS, XLO, XHI, XTOL, RTOL, MITR)
npt.assert_allclose(EXPECTED[0], output['root'], rtol=RTOL, atol=XTOL)
npt.assert_equal(6, output['iterations'])
npt.assert_equal(7, output['funcalls'])
npt.assert_equal(0, output['error_num'])<|fim▁end|> | def test_brentq_full_output(): |
<|file_name|>ipython_config.py<|end_file_name|><|fim▁begin|># Configuration file for ipython.
c = get_config() # noqa: F821
c.Completer.use_jedi = False
# ------------------------------------------------------------------------------
# InteractiveShellApp configuration
# ------------------------------------------------------------------------------
# A Mixin for applications that start InteractiveShell instances.
#
# Provides configurables for loading extensions and executing files as part of
# configuring a Shell environment.
#
# The following methods should be called by the :meth:`initialize` method of the
# subclass:
#
# - :meth:`init_path`
# - :meth:`init_shell` (to be implemented by the subclass)
# - :meth:`init_gui_pylab`
# - :meth:`init_extensions`
# - :meth:`init_code`
# Execute the given command string.
# c.InteractiveShellApp.code_to_run = ''
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.InteractiveShellApp.pylab = None
# Run the file referenced by the PYTHONSTARTUP environment variable at IPython
# startup.
# c.InteractiveShellApp.exec_PYTHONSTARTUP = True
# lines of code to run at IPython startup.
c.InteractiveShellApp.exec_lines = [
"import biokbase.narrative.magics",
"from biokbase.narrative.services import *",
"from biokbase.narrative.widgetmanager import WidgetManager",
"from biokbase.narrative.jobs import *",
]
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx',
# 'pyglet', 'qt', 'qt5', 'tk', 'wx').
# c.InteractiveShellApp.gui = None
# Reraise exceptions encountered loading IPython extensions?
# c.InteractiveShellApp.reraise_ipython_extension_failures = False
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.InteractiveShellApp.matplotlib = None
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an ``import *`` is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.InteractiveShellApp.pylab_import_all = True
# A list of dotted module names of IPython extensions to load.
# c.InteractiveShellApp.extensions = []
# Run the module as a script.
# c.InteractiveShellApp.module_to_run = ''
# Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
# c.InteractiveShellApp.hide_initial_ns = True
# dotted module name of an IPython extension to load.
# c.InteractiveShellApp.extra_extension = ''
# List of files to run at IPython startup.
# c.InteractiveShellApp.exec_files = []
# A file to be run
# c.InteractiveShellApp.file_to_run = ''
# ------------------------------------------------------------------------------
# TerminalIPythonApp configuration
# ------------------------------------------------------------------------------
# TerminalIPythonApp will inherit config from: BaseIPythonApplication,
# Application, InteractiveShellApp
# Run the file referenced by the PYTHONSTARTUP environment variable at IPython
# startup.
# c.TerminalIPythonApp.exec_PYTHONSTARTUP = True
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.TerminalIPythonApp.pylab = None
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.TerminalIPythonApp.verbose_crash = False
# Run the module as a script.
# c.TerminalIPythonApp.module_to_run = ''
# The date format used by logging formatters for %(asctime)s
# c.TerminalIPythonApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# Whether to overwrite existing config files when copying
# c.TerminalIPythonApp.overwrite = False
# Execute the given command string.
# c.TerminalIPythonApp.code_to_run = ''
# Set the log level by value or name.
# c.TerminalIPythonApp.log_level = 30
# lines of code to run at IPython startup.
# c.TerminalIPythonApp.exec_lines = []
# Suppress warning messages about legacy config files
# c.TerminalIPythonApp.ignore_old_config = False
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.TerminalIPythonApp.extra_config_file = u''
# Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
# c.TerminalIPythonApp.hide_initial_ns = True
# dotted module name of an IPython extension to load.
# c.TerminalIPythonApp.extra_extension = ''
# A file to be run
# c.TerminalIPythonApp.file_to_run = ''
# The IPython profile to use.
# c.TerminalIPythonApp.profile = u'default'
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.TerminalIPythonApp.matplotlib = None
# If a command or file is given via the command-line, e.g. 'ipython foo.py',
# start an interactive shell after executing the file or command.<|fim▁hole|># If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an ``import *`` is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.TerminalIPythonApp.pylab_import_all = True
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This option can also be specified through the environment
# variable IPYTHONDIR.
# c.TerminalIPythonApp.ipython_dir = u''
# Whether to display a banner upon starting IPython.
# c.TerminalIPythonApp.display_banner = True
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.TerminalIPythonApp.copy_config_files = False
# List of files to run at IPython startup.
# c.TerminalIPythonApp.exec_files = []
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx',
# 'pyglet', 'qt', 'qt5', 'tk', 'wx').
# c.TerminalIPythonApp.gui = None
# Reraise exceptions encountered loading IPython extensions?
# c.TerminalIPythonApp.reraise_ipython_extension_failures = False
# A list of dotted module names of IPython extensions to load.
# c.TerminalIPythonApp.extensions = []
# Start IPython quickly by skipping the loading of config files.
# c.TerminalIPythonApp.quick = False
# The Logging format template
# c.TerminalIPythonApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
# ------------------------------------------------------------------------------
# TerminalInteractiveShell configuration
# ------------------------------------------------------------------------------
# TerminalInteractiveShell will inherit config from: InteractiveShell
# auto editing of files with syntax errors.
# c.TerminalInteractiveShell.autoedit_syntax = False
# Use colors for displaying information about objects. Because this information
# is passed through a pager (like 'less'), and some pagers get confused with
# color codes, this capability can be turned off.
# c.TerminalInteractiveShell.color_info = True
# A list of ast.NodeTransformer subclass instances, which will be applied to
# user input before code is run.
# c.TerminalInteractiveShell.ast_transformers = []
#
# c.TerminalInteractiveShell.history_length = 10000
# Don't call post-execute functions that have failed in the past.
# c.TerminalInteractiveShell.disable_failing_post_execute = False
# Show rewritten input, e.g. for autocall.
# c.TerminalInteractiveShell.show_rewritten_input = True
# Set the color scheme (NoColor, Linux, or LightBG).
# c.TerminalInteractiveShell.colors = 'LightBG'
# If True, anything that would be passed to the pager will be displayed as
# regular output instead.
# c.TerminalInteractiveShell.display_page = False
# Autoindent IPython code entered interactively.
# c.TerminalInteractiveShell.autoindent = True
#
# c.TerminalInteractiveShell.separate_in = '\n'
# Deprecated, use PromptManager.in2_template
# c.TerminalInteractiveShell.prompt_in2 = ' .\\D.: '
#
# c.TerminalInteractiveShell.separate_out = ''
# Deprecated, use PromptManager.in_template
# c.TerminalInteractiveShell.prompt_in1 = 'In [\\#]: '
# Make IPython automatically call any callable object even if you didn't type
# explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically.
# The value can be '0' to disable the feature, '1' for 'smart' autocall, where
# it is not applied if there are no more arguments on the line, and '2' for
# 'full' autocall, where all callable objects are automatically called (even if
# no arguments are present).
# c.TerminalInteractiveShell.autocall = 0
# Number of lines of your screen, used to control printing of very long strings.
# Strings longer than this number of lines will be sent through a pager instead
# of directly printed. The default value for this is 0, which means IPython
# will auto-detect your screen size every time it needs to print certain
# potentially long strings (this doesn't change the behavior of the 'print'
# keyword, it's only triggered internally). If for some reason this isn't
# working well (it needs curses support), specify it yourself. Otherwise don't
# change the default.
# c.TerminalInteractiveShell.screen_length = 0
# Set the editor used by IPython (default to $EDITOR/vi/notepad).
# c.TerminalInteractiveShell.editor = 'vi'
# Deprecated, use PromptManager.justify
# c.TerminalInteractiveShell.prompts_pad_left = True
# The part of the banner to be printed before the profile
# c.TerminalInteractiveShell.banner1 = 'Python 2.7.6 (default, Nov 18 2013, 15:12:51) \nType "copyright", "credits" or "license" for more information.\n\nIPython 3.2.0-dev -- An enhanced Interactive Python.\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n'
#
# c.TerminalInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard']
# The part of the banner to be printed after the profile
# c.TerminalInteractiveShell.banner2 = ''
#
# c.TerminalInteractiveShell.separate_out2 = ''
#
# c.TerminalInteractiveShell.wildcards_case_sensitive = True
#
# c.TerminalInteractiveShell.debug = False
# Set to confirm when you try to exit IPython with an EOF (Control-D in Unix,
# Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a
# direct exit without any confirmation.
# c.TerminalInteractiveShell.confirm_exit = True
#
# c.TerminalInteractiveShell.ipython_dir = ''
#
# c.TerminalInteractiveShell.readline_remove_delims = '-/~'
# Start logging to the default log file in overwrite mode. Use `logappend` to
# specify a log file to **append** logs to.
# c.TerminalInteractiveShell.logstart = False
# The name of the logfile to use.
# c.TerminalInteractiveShell.logfile = ''
# The shell program to be used for paging.
# c.TerminalInteractiveShell.pager = 'less'
# Enable magic commands to be called without the leading %.
# c.TerminalInteractiveShell.automagic = True
# Save multi-line entries as one entry in readline history
# c.TerminalInteractiveShell.multiline_history = True
#
# c.TerminalInteractiveShell.readline_use = True
# Enable deep (recursive) reloading by default. IPython can use the deep_reload
# module which reloads changes in modules recursively (it replaces the reload()
# function, so you don't need to change anything to use it). deep_reload()
# forces a full reload of modules whose code may have changed, which the default
# reload() function does not. When deep_reload is off, IPython will use the
# normal reload(), but deep_reload will still be available as dreload().
# c.TerminalInteractiveShell.deep_reload = False
# Start logging to the given file in append mode. Use `logfile` to specify a log
# file to **overwrite** logs to.
# c.TerminalInteractiveShell.logappend = ''
#
# c.TerminalInteractiveShell.xmode = 'Context'
#
# c.TerminalInteractiveShell.quiet = False
# Enable auto setting the terminal title.
# c.TerminalInteractiveShell.term_title = False
#
# c.TerminalInteractiveShell.object_info_string_level = 0
# Deprecated, use PromptManager.out_template
# c.TerminalInteractiveShell.prompt_out = 'Out[\\#]: '
# Set the size of the output cache. The default is 1000, you can change it
# permanently in your config file. Setting it to 0 completely disables the
# caching system, and the minimum value accepted is 20 (if you provide a value
# less than 20, it is reset to 0 and a warning is issued). This limit is
# defined because otherwise you'll spend more time re-flushing a too small cache
# than working
# c.TerminalInteractiveShell.cache_size = 1000
# 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run
# interactively (displaying output from expressions).
# c.TerminalInteractiveShell.ast_node_interactivity = 'last_expr'
# Automatically call the pdb debugger after every exception.
# c.TerminalInteractiveShell.pdb = False
# ------------------------------------------------------------------------------
# PromptManager configuration
# ------------------------------------------------------------------------------
# This is the primary interface for producing IPython's prompts.
# Output prompt. '\#' will be transformed to the prompt number
# c.PromptManager.out_template = 'Out[\\#]: '
# Continuation prompt.
# c.PromptManager.in2_template = ' .\\D.: '
# If True (default), each prompt will be right-aligned with the preceding one.
# c.PromptManager.justify = True
# Input prompt. '\#' will be transformed to the prompt number
# c.PromptManager.in_template = 'In [\\#]: '
#
# c.PromptManager.color_scheme = 'Linux'
# ------------------------------------------------------------------------------
# HistoryManager configuration
# ------------------------------------------------------------------------------
# A class to organize all history-related functionality in one place.
# HistoryManager will inherit config from: HistoryAccessor
# Should the history database include output? (default: no)
# c.HistoryManager.db_log_output = False
# Write to database every x commands (higher values save disk access & power).
# Values of 1 or less effectively disable caching.
# c.HistoryManager.db_cache_size = 0
# Path to file to use for SQLite history database.
#
# By default, IPython will put the history database in the IPython profile
# directory. If you would rather share one history among profiles, you can set
# this value in each, so that they are consistent.
#
# Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts.
# If you see IPython hanging, try setting this to something on a local disk,
# e.g::
#
# ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite
# c.HistoryManager.hist_file = u''
# Options for configuring the SQLite connection
#
# These options are passed as keyword args to sqlite3.connect when establishing
# database conenctions.
# c.HistoryManager.connection_options = {}
# enable the SQLite history
#
# set enabled=False to disable the SQLite history, in which case there will be
# no stored history, no SQLite connection, and no background saving thread.
# This may be necessary in some threaded environments where IPython is embedded.
# c.HistoryManager.enabled = True
# ------------------------------------------------------------------------------
# ProfileDir configuration
# ------------------------------------------------------------------------------
# An object to manage the profile directory and its resources.
#
# The profile directory is used by all IPython applications, to manage
# configuration, logging and security.
#
# This object knows how to find, create and manage these directories. This
# should be used by any code that wants to handle profiles.
# Set the profile location directly. This overrides the logic used by the
# `profile` option.
# c.ProfileDir.location = u''
# ------------------------------------------------------------------------------
# PlainTextFormatter configuration
# ------------------------------------------------------------------------------
# The default pretty-printer.
#
# This uses :mod:`IPython.lib.pretty` to compute the format data of the object.
# If the object cannot be pretty printed, :func:`repr` is used. See the
# documentation of :mod:`IPython.lib.pretty` for details on how to write pretty
# printers. Here is a simple example::
#
# def dtype_pprinter(obj, p, cycle):
# if cycle:
# return p.text('dtype(...)')
# if hasattr(obj, 'fields'):
# if obj.fields is None:
# p.text(repr(obj))
# else:
# p.begin_group(7, 'dtype([')
# for i, field in enumerate(obj.descr):
# if i > 0:
# p.text(',')
# p.breakable()
# p.pretty(field)
# p.end_group(7, '])')
# PlainTextFormatter will inherit config from: BaseFormatter
#
# c.PlainTextFormatter.type_printers = {}
# Truncate large collections (lists, dicts, tuples, sets) to this size.
#
# Set to 0 to disable truncation.
# c.PlainTextFormatter.max_seq_length = 1000
#
# c.PlainTextFormatter.float_precision = ''
#
# c.PlainTextFormatter.verbose = False
#
# c.PlainTextFormatter.deferred_printers = {}
#
# c.PlainTextFormatter.newline = '\n'
#
# c.PlainTextFormatter.max_width = 79
#
# c.PlainTextFormatter.pprint = True
#
# c.PlainTextFormatter.singleton_printers = {}
# ------------------------------------------------------------------------------
# IPCompleter configuration
# ------------------------------------------------------------------------------
# Extension of the completer class with IPython-specific features
# IPCompleter will inherit config from: Completer
# Instruct the completer to omit private method names
#
# Specifically, when completing on ``object.<tab>``.
#
# When 2 [default]: all names that start with '_' will be excluded.
#
# When 1: all 'magic' names (``__foo__``) will be excluded.
#
# When 0: nothing will be excluded.
# c.IPCompleter.omit__names = 2
# Whether to merge completion results into a single list
#
# If False, only the completion results from the first non-empty completer will
# be returned.
# c.IPCompleter.merge_completions = True
# Instruct the completer to use __all__ for the completion
#
# Specifically, when completing on ``object.<tab>``.
#
# When True: only those names in obj.__all__ will be included.
#
# When False [default]: the __all__ attribute is ignored
# c.IPCompleter.limit_to__all__ = False
# Activate greedy completion
#
# This will enable completion on elements of lists, results of function calls,
# etc., but can be unsafe because the code is actually evaluated on TAB.
# c.IPCompleter.greedy = False
# ------------------------------------------------------------------------------
# ScriptMagics configuration
# ------------------------------------------------------------------------------
# Magics for talking to scripts
#
# This defines a base `%%script` cell magic for running a cell with a program in
# a subprocess, and registers a few top-level magics that call %%script with
# common interpreters.
# Extra script cell magics to define
#
# This generates simple wrappers of `%%script foo` as `%%foo`.
#
# If you want to add script magics that aren't on your path, specify them in
# script_paths
# c.ScriptMagics.script_magics = []
# Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby'
#
# Only necessary for items in script_magics where the default path will not find
# the right interpreter.
# c.ScriptMagics.script_paths = {}
# ------------------------------------------------------------------------------
# StoreMagics configuration
# ------------------------------------------------------------------------------
# Lightweight persistence for python variables.
#
# Provides the %store magic.
# If True, any %store-d variables will be automatically restored when IPython
# starts.
# c.StoreMagics.autorestore = False<|fim▁end|> | # c.TerminalIPythonApp.force_interact = False
|
<|file_name|>declare.rs<|end_file_name|><|fim▁begin|>// https://rustbyexample.com/variable_bindings/declare.html
// http://rust-lang-ja.org/rust-by-example/variable_bindings/declare.html
fn main() {
// Declare a variable binding
let a_binding;
{
let x = 2;
// Initialize the binding
a_binding = x * x;
}
println!("a binding: {}", a_binding);
let another_binding;
<|fim▁hole|> // FIXME ^ Comment out this line
another_binding = 1;
println!("another binding: {}", another_binding);
}<|fim▁end|> | // Error! Use of uninitialized binding
// println!("another binding: {}", another_binding); // error[E0381]: use of possibly uninitialized variable: `another_binding` |
<|file_name|>penguin_cli.py<|end_file_name|><|fim▁begin|>import click
import os
import penguin.pdf as pdf
import penguin.utils as utils
def check_src(src):
if not all((map(utils.is_valid_source, src))):
raise click.BadParameter("src arguments must be either a valid directory"
" or pdf file.")
@click.group()
def penguin():
pass
@penguin.command()
@click.argument('src', nargs=-1)
@click.argument('dst')
@click.option('--bookmark', 'bookmark', flag_value='include-bookmarks',
default=True)
@click.option('--remove-blank-pages', 'rmblanks', flag_value='remove-blanks-pages',
default=False)
def combine(src, dst, bookmark, rmblanks):
"""Combine Pdf files from the source provided into the destination file.
:param src: The source Pdf file(s). src can either be a list of individual
files or directories containing Pdf files.
:param dst: The output file destination.
:param bookmark: True if the combined Pdf should include bookmarks.
:param rmblanks: True if blank pages should be removed from the combined Pdf.
"""
check_src(src)
combined_pdf = pdf.combine(src, bookmark, rmblanks)
<|fim▁hole|>@penguin.command()
@click.argument('src',)
@click.argument('pages', nargs=-1)
@click.argument('dst')
def split(src, pages, dst):
"""Split the specified pages from src into the the dst.
:param src: The source Pdf file (directory).
:param pages: The page number(s) to extract from each file.
:param dst: The output file destination.
"""
check_src(src)
combined_pdf = pdf.split(src, pages)
with open(dst, 'wb') as f:
combined_pdf.write(f)
if __name__ == '__main__':
penguin()<|fim▁end|> | with open(dst, 'wb') as f:
combined_pdf.write(f)
|
<|file_name|>test_nonlin.py<|end_file_name|><|fim▁begin|>""" Unit tests for nonlinear solvers
Author: Ondrej Certik
May 2007
"""
from numpy.testing import assert_, dec, TestCase, run_module_suite
from scipy.optimize import nonlin
from numpy import matrix, diag, dot
from numpy.linalg import inv
import numpy as np
SOLVERS = [nonlin.anderson, nonlin.diagbroyden, nonlin.linearmixing,
nonlin.excitingmixing, nonlin.broyden1, nonlin.broyden2,
nonlin.newton_krylov]
MUST_WORK = [nonlin.anderson, nonlin.broyden1, nonlin.broyden2,
nonlin.newton_krylov]
#-------------------------------------------------------------------------------
# Test problems
#-------------------------------------------------------------------------------
def F(x):
x = np.asmatrix(x).T
d = matrix(diag([3,2,1.5,1,0.5]))
c = 0.01
f = -d*x - c*float(x.T*x)*x
return f
F.xin = [1,1,1,1,1]
F.KNOWN_BAD = []
def F2(x):
return x
F2.xin = [1,2,3,4,5,6]
F2.KNOWN_BAD = [nonlin.linearmixing, nonlin.excitingmixing]
def F3(x):
A = np.mat('-2 1 0; 1 -2 1; 0 1 -2')
b = np.mat('1 2 3')
return np.dot(A, x) - b
F3.xin = [1,2,3]
F3.KNOWN_BAD = []
def F4_powell(x):
A = 1e4
return [A*x[0]*x[1] - 1, np.exp(-x[0]) + np.exp(-x[1]) - (1 + 1/A)]
F4_powell.xin = [-1, -2]
F4_powell.KNOWN_BAD = [nonlin.linearmixing, nonlin.excitingmixing,
nonlin.diagbroyden]
from test_minpack import TestFSolve as F5_class
F5_object = F5_class()
def F5(x):
return F5_object.pressure_network(x, 4, np.array([.5, .5, .5, .5]))
F5.xin = [2., 0, 2, 0]
F5.KNOWN_BAD = [nonlin.excitingmixing, nonlin.linearmixing, nonlin.diagbroyden]
def F6(x):
x1, x2 = x
J0 = np.array([[ -4.256 , 14.7 ],
[ 0.8394989 , 0.59964207]])
v = np.array([(x1 + 3) * (x2**5 - 7) + 3*6,
np.sin(x2 * np.exp(x1) - 1)])
return -np.linalg.solve(J0, v)
F6.xin = [-0.5, 1.4]
F6.KNOWN_BAD = [nonlin.excitingmixing, nonlin.linearmixing, nonlin.diagbroyden]
#-------------------------------------------------------------------------------
# Tests
#-------------------------------------------------------------------------------
class TestNonlin(object):
"""
Check the Broyden methods for a few test problems.
broyden1, broyden2, and newton_krylov must succeed for
all functions. Some of the others don't -- tests in KNOWN_BAD are skipped.
"""
def _check_func(self, f, func, f_tol=1e-2):
x = func(f, f.xin, f_tol=f_tol, maxiter=200, verbose=0)
assert_(np.absolute(f(x)).max() < f_tol)
@dec.knownfailureif(True)
def _check_func_fail(self, *a, **kw):
pass
def test_problem(self):
for f in [F, F2, F3, F4_powell, F5, F6]:
for func in SOLVERS:
if func in f.KNOWN_BAD:
if func in MUST_WORK:
yield self._check_func_fail, f, func
continue
yield self._check_func, f, func
class TestSecant(TestCase):
"""Check that some Jacobian approximations satisfy the secant condition"""
xs = [np.array([1,2,3,4,5], float),
np.array([2,3,4,5,1], float),
np.array([3,4,5,1,2], float),
np.array([4,5,1,2,3], float),
np.array([9,1,9,1,3], float),
np.array([0,1,9,1,3], float),
np.array([5,5,7,1,1], float),
np.array([1,2,7,5,1], float),]
fs = [x**2 - 1 for x in xs]
def _check_secant(self, jac_cls, npoints=1, **kw):
"""
Check that the given Jacobian approximation satisfies secant
conditions for last `npoints` points.
"""
jac = jac_cls(**kw)
jac.setup(self.xs[0], self.fs[0], None)
for j, (x, f) in enumerate(zip(self.xs[1:], self.fs[1:])):
jac.update(x, f)
for k in xrange(min(npoints, j+1)):
dx = self.xs[j-k+1] - self.xs[j-k]
df = self.fs[j-k+1] - self.fs[j-k]
assert_(np.allclose(dx, jac.solve(df)))
# Check that the `npoints` secant bound is strict
if j >= npoints:
dx = self.xs[j-npoints+1] - self.xs[j-npoints]
df = self.fs[j-npoints+1] - self.fs[j-npoints]
assert_(not np.allclose(dx, jac.solve(df)))
def test_broyden1(self):
self._check_secant(nonlin.BroydenFirst)
def test_broyden2(self):
self._check_secant(nonlin.BroydenSecond)
def test_broyden1_update(self):
# Check that BroydenFirst update works as for a dense matrix
jac = nonlin.BroydenFirst(alpha=0.1)
jac.setup(self.xs[0], self.fs[0], None)
B = np.identity(5) * (-1/0.1)
for last_j, (x, f) in enumerate(zip(self.xs[1:], self.fs[1:])):
df = f - self.fs[last_j]
dx = x - self.xs[last_j]
B += (df - dot(B, dx))[:,None] * dx[None,:] / dot(dx, dx)
jac.update(x, f)
assert_(np.allclose(jac.todense(), B, rtol=1e-10, atol=1e-13))
def test_broyden2_update(self):
# Check that BroydenSecond update works as for a dense matrix
jac = nonlin.BroydenSecond(alpha=0.1)
jac.setup(self.xs[0], self.fs[0], None)
H = np.identity(5) * (-0.1)
for last_j, (x, f) in enumerate(zip(self.xs[1:], self.fs[1:])):
df = f - self.fs[last_j]
dx = x - self.xs[last_j]
H += (dx - dot(H, df))[:,None] * df[None,:] / dot(df, df)
jac.update(x, f)
assert_(np.allclose(jac.todense(), inv(H), rtol=1e-10, atol=1e-13))
def test_anderson(self):
# Anderson mixing (with w0=0) satisfies secant conditions
# for the last M iterates, see [Ey]_
#
# .. [Ey] V. Eyert, J. Comp. Phys., 124, 271 (1996).
self._check_secant(nonlin.Anderson, M=3, w0=0, npoints=3)
class TestLinear(TestCase):
"""Solve a linear equation;
some methods find the exact solution in a finite number of steps"""
def _check(self, jac, N, maxiter, complex=False, **kw):
np.random.seed(123)
A = np.random.randn(N, N)
if complex:
A = A + 1j*np.random.randn(N, N)
b = np.random.randn(N)
if complex:
b = b + 1j*np.random.randn(N)
def func(x):
return dot(A, x) - b
sol = nonlin.nonlin_solve(func, b*0, jac, maxiter=maxiter,
f_tol=1e-6, line_search=None, verbose=0)
assert_(np.allclose(dot(A, sol), b, atol=1e-6))
def test_broyden1(self):
# Broyden methods solve linear systems exactly in 2*N steps
self._check(nonlin.BroydenFirst(alpha=1.0), 20, 41, False)
self._check(nonlin.BroydenFirst(alpha=1.0), 20, 41, True)
<|fim▁hole|>
def test_anderson(self):
# Anderson is rather similar to Broyden, if given enough storage space
self._check(nonlin.Anderson(M=50, alpha=1.0), 20, 29, False)
self._check(nonlin.Anderson(M=50, alpha=1.0), 20, 29, True)
def test_krylov(self):
# Krylov methods solve linear systems exactly in N inner steps
self._check(nonlin.KrylovJacobian, 20, 2, False, inner_m=10)
self._check(nonlin.KrylovJacobian, 20, 2, True, inner_m=10)
class TestJacobianDotSolve(object):
"""Check that solve/dot methods in Jacobian approximations are consistent"""
def _func(self, x):
return x**2 - 1 + np.dot(self.A, x)
def _check_dot(self, jac_cls, complex=False, tol=1e-6, **kw):
np.random.seed(123)
N = 7
def rand(*a):
q = np.random.rand(*a)
if complex:
q = q + 1j*np.random.rand(*a)
return q
def assert_close(a, b, msg):
d = abs(a - b).max()
f = tol + abs(b).max()*tol
if d > f:
raise AssertionError('%s: err %g' % (msg, d))
self.A = rand(N, N)
# initialize
x0 = np.random.rand(N)
jac = jac_cls(**kw)
jac.setup(x0, self._func(x0), self._func)
# check consistency
for k in xrange(2*N):
v = rand(N)
if hasattr(jac, '__array__'):
Jd = np.array(jac)
if hasattr(jac, 'solve'):
Gv = jac.solve(v)
Gv2 = np.linalg.solve(Jd, v)
assert_close(Gv, Gv2, 'solve vs array')
if hasattr(jac, 'rsolve'):
Gv = jac.rsolve(v)
Gv2 = np.linalg.solve(Jd.T.conj(), v)
assert_close(Gv, Gv2, 'rsolve vs array')
if hasattr(jac, 'matvec'):
Jv = jac.matvec(v)
Jv2 = np.dot(Jd, v)
assert_close(Jv, Jv2, 'dot vs array')
if hasattr(jac, 'rmatvec'):
Jv = jac.rmatvec(v)
Jv2 = np.dot(Jd.T.conj(), v)
assert_close(Jv, Jv2, 'rmatvec vs array')
if hasattr(jac, 'matvec') and hasattr(jac, 'solve'):
Jv = jac.matvec(v)
Jv2 = jac.solve(jac.matvec(Jv))
assert_close(Jv, Jv2, 'dot vs solve')
if hasattr(jac, 'rmatvec') and hasattr(jac, 'rsolve'):
Jv = jac.rmatvec(v)
Jv2 = jac.rmatvec(jac.rsolve(Jv))
assert_close(Jv, Jv2, 'rmatvec vs rsolve')
x = rand(N)
jac.update(x, self._func(x))
def test_broyden1(self):
self._check_dot(nonlin.BroydenFirst, complex=False)
self._check_dot(nonlin.BroydenFirst, complex=True)
def test_broyden2(self):
self._check_dot(nonlin.BroydenSecond, complex=False)
self._check_dot(nonlin.BroydenSecond, complex=True)
def test_anderson(self):
self._check_dot(nonlin.Anderson, complex=False)
self._check_dot(nonlin.Anderson, complex=True)
def test_diagbroyden(self):
self._check_dot(nonlin.DiagBroyden, complex=False)
self._check_dot(nonlin.DiagBroyden, complex=True)
def test_linearmixing(self):
self._check_dot(nonlin.LinearMixing, complex=False)
self._check_dot(nonlin.LinearMixing, complex=True)
def test_excitingmixing(self):
self._check_dot(nonlin.ExcitingMixing, complex=False)
self._check_dot(nonlin.ExcitingMixing, complex=True)
def test_krylov(self):
self._check_dot(nonlin.KrylovJacobian, complex=False, tol=1e-4)
self._check_dot(nonlin.KrylovJacobian, complex=True, tol=1e-4)
class TestNonlinOldTests(TestCase):
""" Test case for a simple constrained entropy maximization problem
(the machine translation example of Berger et al in
Computational Linguistics, vol 22, num 1, pp 39--72, 1996.)
"""
def test_broyden1(self):
x= nonlin.broyden1(F,F.xin,iter=12,alpha=1)
assert_(nonlin.norm(x) < 1e-9)
assert_(nonlin.norm(F(x)) < 1e-9)
def test_broyden2(self):
x= nonlin.broyden2(F,F.xin,iter=12,alpha=1)
assert_(nonlin.norm(x) < 1e-9)
assert_(nonlin.norm(F(x)) < 1e-9)
def test_anderson(self):
x= nonlin.anderson(F,F.xin,iter=12,alpha=0.03,M=5)
assert_(nonlin.norm(x) < 0.33)
def test_linearmixing(self):
x = nonlin.linearmixing(F,F.xin,iter=60,alpha=0.5)
assert_(nonlin.norm(x) < 1e-7)
assert_(nonlin.norm(F(x)) < 1e-7)
def test_exciting(self):
x= nonlin.excitingmixing(F,F.xin,iter=20,alpha=0.5)
assert_(nonlin.norm(x) < 1e-5)
assert_(nonlin.norm(F(x)) < 1e-5)
def test_diagbroyden(self):
x= nonlin.diagbroyden(F,F.xin,iter=11,alpha=1)
assert_(nonlin.norm(x) < 1e-8)
assert_(nonlin.norm(F(x)) < 1e-8)
if __name__ == "__main__":
run_module_suite()<|fim▁end|> | def test_broyden2(self):
# Broyden methods solve linear systems exactly in 2*N steps
self._check(nonlin.BroydenSecond(alpha=1.0), 20, 41, False)
self._check(nonlin.BroydenSecond(alpha=1.0), 20, 41, True) |
<|file_name|>clipgrab_si.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.0" language="sl_SI">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>ClipGrab</name>
<message>
<location filename="clipgrab.cpp" line="37"/>
<source>Automatic language recognition</source>
<translation>Samodejno prepoznavanje jezika</translation>
</message>
<message>
<location filename="clipgrab.cpp" line="486"/>
<source>Downloading update … %1/%2 MBytes</source>
<translation type="unfinished">Prenos posodobitev … %1/%2 MBajtov</translation>
</message>
<message>
<location filename="clipgrab.cpp" line="495"/>
<source>There was an error while downloading the update.: %1</source>
<translation type="unfinished">Prišlo je do napake med prenosom posodobitev: %1</translation>
</message>
<message>
<location filename="clipgrab.cpp" line="509"/>
<source>The fingerprint of the downloaded update file could not be verified: %1 should have been %2</source>
<translation type="unfinished">Prstni odtis prenešene datoteke za posodobitev ni bilo mogoče preveriti: %1 moral bi biti %2</translation>
</message>
<message>
<location filename="clipgrab.cpp" line="521"/>
<source>Could not open update file.</source>
<translation type="unfinished">Ne morem odpreti datoteko za posodobitev.</translation>
</message>
</context>
<context>
<name>LoginDialog</name>
<message>
<location filename="login_dialog.ui" line="20"/>
<source>Confirmation or Login Required</source>
<translation type="unfinished">Zahtevana je potrditev ali prijava</translation>
</message>
<message>
<location filename="login_dialog.ui" line="54"/>
<source>Confirmation or login required</source>
<translation type="unfinished">Zahtevana je potrditev ali prijava</translation>
</message>
<message>
<location filename="login_dialog.ui" line="67"/>
<source>This video requires you to sign in or confirm your access before downloading it.</source>
<translation type="unfinished">Ta video zahteva, da se prijavite ali potrdite dostop pred prenosom.</translation>
</message>
<message>
<location filename="login_dialog.ui" line="100"/>
<source>about:blank</source>
<translation type="unfinished">about:blank</translation>
</message>
<message>
<location filename="login_dialog.ui" line="124"/>
<source>Remember login</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>MainWindow</name>
<message>
<location filename="mainwindow.cpp" line="249"/>
<location filename="mainwindow.cpp" line="261"/>
<source>Select Target</source>
<translation type="unfinished">Izberite cilj</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="331"/>
<source>Please wait while ClipGrab is loading information about the video ...</source>
<translation type="unfinished">Prosimo počakajte, da ClipGrab naloži informacije o videoposnetku ...</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="348"/>
<source>Please enter the link to the video you want to download in the field below.</source>
<translation type="unfinished">Vnesite v spodnje polje povezavo do videa, ki ga želite prenesti.</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="353"/>
<source>The link you have entered seems to not be recognised by any of the supported portals.<br/>Now ClipGrab will check if it can download a video from that site anyway.</source>
<translation type="unfinished">Zdi se, da noben podprt portal ni prepoznal povezave, ki ste jo vnesli.<br/>Zdaj bo ClipGrab preveril, če vseeno lahko prenesete video s te strani.</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="400"/>
<source>No downloadable video could be found.<br />Maybe you have entered the wrong link or there is a problem with your connection.</source>
<translation type="unfinished">Ni bilo možno najti nobenega videoposnetka za prenos.<br />Morda ste vnesli napačno povezavo ali pa imate težave z vašo povezavo.</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="413"/>
<source>ClipGrab - Select target path</source>
<translation type="unfinished">ClipGrab - Izberite ciljno pot</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="460"/>
<source>ClipGrab: Video discovered in your clipboard</source>
<translation type="unfinished">ClipGrab: Video je odkrit v vašem odložišču</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="460"/>
<source>ClipGrab has discovered the address of a compatible video in your clipboard. Click on this message to download it now.</source>
<translation type="unfinished">ClipGrab je odkril naslov združljivega videoposnetka v odložišču. Kliknite na to sporočilo, da ga zdaj naložite.</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="566"/>
<source>ClipGrab - Exit confirmation</source>
<translation type="unfinished">ClipGrab - Izhod iz potrditve</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="566"/>
<source>There is still at least one download in progress.<br />If you exit the program now, all downloads will be canceled and cannot be recovered later.<br />Do you really want to quit ClipGrab now?</source>
<translation type="unfinished">Še vedno se izvaja vsaj en prenos.<br />Če zdaj zapustite program bodo preklicani vsi prenosi in jih kasneje ne bo možno obnoviti.<br />Ali res želite zdaj zapustiti ClipGrab?</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="603"/>
<source>Download finished</source>
<translation type="unfinished">Prenos je končan</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="603"/>
<source>Downloading and converting “%title” is now finished.</source>
<translation type="unfinished">Prenos in pretvorbo “%title” sta zdaj končani.</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="607"/>
<source>All downloads finished</source>
<translation type="unfinished">Vsi prenosi so končani</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="607"/>
<source>ClipGrab has finished downloading and converting all selected videos.</source>
<translation type="unfinished">ClipGrab je končal prenos in pretvorbo vseh izbranih videoposnetkov.</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="672"/>
<location filename="mainwindow.cpp" line="674"/>
<location filename="mainwindow.cpp" line="681"/>
<location filename="mainwindow.cpp" line="683"/>
<source>ClipGrab</source>
<translation type="unfinished">ClipGrab</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="672"/>
<location filename="mainwindow.cpp" line="674"/>
<source> MiB</source>
<translation type="unfinished">> MB</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="674"/>
<source> KiB</source>
<translation type="unfinished"> KB</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="681"/>
<location filename="mainwindow.cpp" line="683"/>
<source>Currently no downloads in progress.</source>
<translation type="unfinished">Trenutno se ne izvaja noben prenos.</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="684"/>
<source>ClipGrab - Download and Convert Online Videos</source>
<translation type="unfinished">ClipGrab - Prenesi in pretvori spletne videoposnetke</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="880"/>
<source>&Open downloaded file</source>
<translation type="unfinished">&Odpri preneseno datoteko</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="881"/>
<source>Open &target folder</source>
<translation type="unfinished">Odpri &ciljno mapo</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="883"/>
<source>&Pause download</source>
<translation type="unfinished">Začasno &ustavi prenos</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="884"/>
<source>&Restart download</source>
<translation type="unfinished">Ponovno &začni prenos</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="885"/>
<source>&Cancel download</source>
<translation type="unfinished">Pre&kliči prenos</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="887"/>
<source>Copy &video link</source>
<translation type="unfinished">&Kopiraj povezavo do videoposnetka</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="888"/>
<source>Open video link in &browser</source>
<translation type="unfinished">Odpri video povezavo v &brskalniku</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="893"/>
<source>Resume download</source>
<translation type="unfinished">Nadaljuj s prenosom</translation>
</message>
<message>
<location filename="mainwindow.cpp" line="902"/>
<source>Show in &Finder</source>
<translation type="unfinished">Prikaži v &iskalniku</translation>
</message>
</context>
<context>
<name>MainWindowClass</name>
<message>
<location filename="mainwindow.ui" line="14"/>
<source>ClipGrab - Download and Convert Online Videos</source>
<translation type="unfinished">ClipGrab - Prenos in pretvorba spletnih videoposnetkov</translation>
</message>
<message>
<location filename="mainwindow.ui" line="136"/>
<source>Search</source>
<translation type="unfinished">Poišči</translation>
</message>
<message>
<location filename="mainwindow.ui" line="155"/>
<source>Enter keywords in the box below in order to search videos on YouTube</source>
<translation type="unfinished">Vnesite ključne besede v spodnje polje za iskanje video posnetkov na YouTube</translation>
</message>
<message>
<location filename="mainwindow.ui" line="163"/>
<source>about:blank</source>
<translation type="unfinished">about:blank</translation>
</message>
<message>
<location filename="mainwindow.ui" line="172"/>
<source>Downloads</source>
<translation type="unfinished">Prenosi</translation>
</message>
<message>
<location filename="mainwindow.ui" line="370"/>
<source>Grab this clip!</source>
<translation type="unfinished">Zgrabi ta posnetek!</translation>
</message>
<message>
<location filename="mainwindow.ui" line="203"/>
<source>Quality:</source>
<translation type="unfinished">Kakovost:</translation>
</message>
<message>
<location filename="mainwindow.ui" line="216"/>
<source>Please enter the link to the video you want to download in the field below.</source>
<translation type="unfinished">V spodnje polje vnesite povezavo do videa, ki ga želite prenesti.</translation>
</message>
<message>
<location filename="mainwindow.ui" line="251"/>
<source>Format:</source>
<translation type="unfinished">Format:</translation>
</message>
<message>
<location filename="mainwindow.ui" line="290"/>
<source>Current Downloads:</source>
<translation type="unfinished">Trenutni prenosi:</translation>
</message>
<message>
<location filename="mainwindow.ui" line="300"/>
<source>Cancel selected download</source>
<translation type="unfinished">Prekliči izbran prenos</translation>
</message>
<message>
<location filename="mainwindow.ui" line="310"/>
<source>Open the target folder of the selected download</source>
<translation type="unfinished">Odpri ciljno mapo izbranega prenosa</translation>
</message>
<message>
<location filename="mainwindow.ui" line="330"/>
<source>Portal</source>
<translation type="unfinished">Portal</translation>
</message>
<message>
<location filename="mainwindow.ui" line="335"/>
<source>Title</source>
<translation type="unfinished">Naziv</translation>
</message>
<message>
<location filename="mainwindow.ui" line="340"/>
<source>Format</source>
<translation type="unfinished">Format</translation>
</message>
<message>
<location filename="mainwindow.ui" line="345"/>
<source>Progress</source>
<translation type="unfinished">Napredek</translation>
</message>
<message>
<location filename="mainwindow.ui" line="356"/>
<source>Pause selected download</source>
<translation type="unfinished">Premor izbranega prenosa</translation>
</message>
<message>
<location filename="mainwindow.ui" line="389"/>
<source>…</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="mainwindow.ui" line="397"/>
<source>Settings</source>
<translation type="unfinished">Nastavitve</translation>
</message>
<message>
<location filename="mainwindow.ui" line="410"/>
<source>General</source>
<translation type="unfinished">Splošno</translation>
</message>
<message>
<location filename="mainwindow.ui" line="434"/>
<source>Here you can adjust some general settings concerning the behaviour of ClipGrab.</source>
<translation type="unfinished">Tu lahko nastavite nekatere splošne nastavitve glede obnašanja ClipGraba.</translation>
</message>
<message>
<location filename="mainwindow.ui" line="445"/>
<source>Target Path</source>
<translation type="unfinished">Ciljna pot</translation>
</message>
<message>
<location filename="mainwindow.ui" line="451"/>
<source>Always save at the last used path.</source>
<translation type="unfinished">Vedno shrani zadnjo uporabljeno pot.</translation>
</message>
<message>
<location filename="mainwindow.ui" line="465"/>
<source>Here you can configure where the downloaded videos are to be saved.</source>
<translation type="unfinished">Tu lahko nastavite, kjer se shranjujejo naloženi videoposnetki.</translation>
</message>
<message>
<location filename="mainwindow.ui" line="472"/>
<source>Browse ...</source>
<translation type="unfinished">Prebrskaj ...</translation>
</message>
<message>
<location filename="mainwindow.ui" line="479"/>
<source>Never ask for file name</source>
<translation type="unfinished">Nikoli ne vprašaj z aime datoteke</translation>
</message>
<message>
<location filename="mainwindow.ui" line="500"/>
<source>Metadata</source>
<translation type="unfinished">Metapodatki</translation>
</message>
<message>
<location filename="mainwindow.ui" line="506"/>
<source>Here you can configure if ClipGrab is supposed to add metadata (ID3 tags) to your mp3 files.</source>
<translation type="unfinished">Tukaj lahko doličite ali naj ClipGrab dodal metapodatke (ID3 oznake) v vaše mp3 datoteke.</translation>
</message>
<message>
<location filename="mainwindow.ui" line="513"/>
<source>Use metadata</source>
<translation type="unfinished">Uporabi metapodatke</translation>
</message>
<message>
<location filename="mainwindow.ui" line="534"/>
<source>Clipboard</source>
<translation type="unfinished">Odložišče</translation>
</message>
<message>
<location filename="mainwindow.ui" line="540"/>
<source>Here you can configure how ClipGrab behaves when a downloadable video is discovered in your clipboard.</source>
<translation type="unfinished">Tukaj lahko nastavite kako naj se ClipGrab obnaša, ko je prenosljiv videoposnetek odkrit v vašem odložišču.</translation>
</message>
<message>
<location filename="mainwindow.ui" line="547"/>
<source>Always download</source>
<translation type="unfinished">Vedno prenesi</translation>
</message>
<message>
<location filename="mainwindow.ui" line="554"/>
<source>Never download</source>
<translation type="unfinished">Nikoli ne prenesi</translation>
</message>
<message>
<location filename="mainwindow.ui" line="561"/>
<source>Always ask</source>
<translation type="unfinished">Vedno vprašaj</translation>
</message>
<message>
<location filename="mainwindow.ui" line="582"/>
<source>Notifications</source>
<translation type="unfinished">Obvestila</translation>
</message>
<message>
<location filename="mainwindow.ui" line="588"/>
<source>After each download</source>
<translation type="unfinished">Po vsakem prenosu</translation>
</message>
<message>
<location filename="mainwindow.ui" line="595"/>
<source>After all downloads have been completed</source>
<translation type="unfinished">Potem, ko so bili vsi prenosi zaključeni</translation>
</message>
<message>
<location filename="mainwindow.ui" line="615"/>
<source>Here you can configure when ClipGrab is supposed to display notifications.</source>
<translation type="unfinished">Tu lahko nastavite, kje naj bi ClipGrab prikaz obvestila.</translation>
</message>
<message>
<location filename="mainwindow.ui" line="622"/>
<source>Never</source>
<translation type="unfinished">Nikoli</translation>
</message>
<message>
<location filename="mainwindow.ui" line="630"/>
<source>Proxy</source>
<translation type="unfinished">Proxy strežnik</translation>
</message>
<message>
<location filename="mainwindow.ui" line="636"/>
<source>Use a proxy server</source>
<translation type="unfinished">Uporabi Proxy strežnik</translation><|fim▁hole|> <translation type="unfinished">Nastavitve Proxy strežnika</translation>
</message>
<message>
<location filename="mainwindow.ui" line="655"/>
<source>Hostname/IP:</source>
<translation type="unfinished">Ime gostitelja/IP:</translation>
</message>
<message>
<location filename="mainwindow.ui" line="662"/>
<source>Port:</source>
<translation type="unfinished">Vrata:</translation>
</message>
<message>
<location filename="mainwindow.ui" line="689"/>
<source>Proxy type:</source>
<translation type="unfinished">Vrsta Proxy:</translation>
</message>
<message>
<location filename="mainwindow.ui" line="697"/>
<source>HTTP Proxy</source>
<translation type="unfinished">HTTP Proxy</translation>
</message>
<message>
<location filename="mainwindow.ui" line="702"/>
<source>Socks5 Proxy</source>
<translation type="unfinished">Socks5 Proxy</translation>
</message>
<message>
<location filename="mainwindow.ui" line="729"/>
<source>Proxy authentication</source>
<translation type="unfinished">Preverjanje pristnosti</translation>
</message>
<message>
<location filename="mainwindow.ui" line="735"/>
<source>Username:</source>
<translation type="unfinished">Uporabniško ime:</translation>
</message>
<message>
<location filename="mainwindow.ui" line="752"/>
<source>Password:</source>
<translation type="unfinished">Geslo:</translation>
</message>
<message>
<location filename="mainwindow.ui" line="765"/>
<source>Proxy requires authentication</source>
<translation type="unfinished">Proxy zahteva preverjanje pristnosti</translation>
</message>
<message>
<location filename="mainwindow.ui" line="773"/>
<source>Other</source>
<translation type="unfinished">Drugo</translation>
</message>
<message>
<location filename="mainwindow.ui" line="800"/>
<source>Remove finished downloads from list</source>
<translation type="unfinished">Odstrani s seznama končane prenose</translation>
</message>
<message>
<location filename="mainwindow.ui" line="779"/>
<source>Minimize ClipGrab to the system tray</source>
<translation type="unfinished">Pomanjšaj ClipGrab v programsko vrstico</translation>
</message>
<message>
<location filename="mainwindow.ui" line="786"/>
<source>Use WebM if possible</source>
<translation type="unfinished">Uporabi WebM, če je možno</translation>
</message>
<message>
<location filename="mainwindow.ui" line="793"/>
<source>Ignore SSL errors</source>
<translation type="unfinished">Prezri SSL napake</translation>
</message>
<message>
<location filename="mainwindow.ui" line="820"/>
<source>Remember logins</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="mainwindow.ui" line="827"/>
<source>Remember video quality</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="mainwindow.ui" line="839"/>
<source>Language</source>
<translation type="unfinished">Jezik</translation>
</message>
<message>
<location filename="mainwindow.ui" line="863"/>
<source>Here you can change the language of ClipGrab.</source>
<translation type="unfinished">Tukaj lahko spremenite jezik vmesnika ClipGrab</translation>
</message>
<message>
<location filename="mainwindow.ui" line="870"/>
<source>Please select a language from the list below. You have to restart ClipGrab in order to apply you selection.</source>
<translation type="unfinished">Izberite jezik s spodnjega seznama. Morali boste ponovno zagnati ClipGrab, da bo uveljavljena vaš izbor.</translation>
</message>
<message>
<location filename="mainwindow.ui" line="907"/>
<source>Conversion</source>
<translation type="unfinished">Pretvorba</translation>
</message>
<message>
<location filename="mainwindow.ui" line="930"/>
<source>Experts can create custom presets for the video conversion here.</source>
<translation type="unfinished">Strokovnjaki tukaj lahko ustvarijo prednastavitve po meri za pretvorbo videoposnetkov.</translation>
</message>
<message>
<location filename="mainwindow.ui" line="937"/>
<source>Format preset</source>
<translation type="unfinished">Format prednastavitve</translation>
</message>
<message>
<location filename="mainwindow.ui" line="947"/>
<source>Add new preset</source>
<translation type="unfinished">Dodaj novo prednastavitev</translation>
</message>
<message>
<location filename="mainwindow.ui" line="954"/>
<source>Remove selected preset</source>
<translation type="unfinished">Odstrani izbrano prednastavitev</translation>
</message>
<message>
<location filename="mainwindow.ui" line="977"/>
<source>Audio</source>
<translation type="unfinished">Zvok</translation>
</message>
<message>
<location filename="mainwindow.ui" line="989"/>
<location filename="mainwindow.ui" line="1041"/>
<source>Codec</source>
<translation type="unfinished">Kodek</translation>
</message>
<message>
<location filename="mainwindow.ui" line="1015"/>
<location filename="mainwindow.ui" line="1103"/>
<source>Bitrate (kb/s)</source>
<translation type="unfinished">Bitna hitrost (kb/s)</translation>
</message>
<message>
<location filename="mainwindow.ui" line="1022"/>
<source>Disable Audio</source>
<translation type="unfinished">Onemogoči zvok</translation>
</message>
<message>
<location filename="mainwindow.ui" line="1032"/>
<source>Video</source>
<translation type="unfinished">Video</translation>
</message>
<message>
<location filename="mainwindow.ui" line="1054"/>
<source>Frames/second</source>
<translation type="unfinished">Okvirjev/sekund</translation>
</message>
<message>
<location filename="mainwindow.ui" line="1113"/>
<source>Disable video</source>
<translation type="unfinished">Onemogoči video</translation>
</message>
<message>
<location filename="mainwindow.ui" line="1128"/>
<source>About</source>
<translation type="unfinished">Vizitka</translation>
</message>
<message>
<location filename="mainwindow.ui" line="1134"/>
<source><h2>Support ClipGrab!</h2>
<p>Only with your support, ClipGrab can remain free software!<br>So if you like ClipGrab and also want to help ensuring its further development, please consider making a donation.</p></source>
<translation type="unfinished"><h2>Podprite ClipGrab!</h2>
<p>Le z vašo podporo je ClipGrab lahko še vedno brezplačen program!<br>Torej, če vam je všeč ClipGrab in nam želite pomagati zagotoviti njegov nadaljnji razvoj, vas prosimo za donacijo.</p></translation>
</message>
<message>
<location filename="mainwindow.ui" line="1177"/>
<source><h2>Translation</h2>
ClipGrab is already available in many languages. If ClipGrab has not been translated into your language yet and if you want to contribute a translation, please check <a href="http://clipgrab.de/translate">http://clipgrab.de/translate</a> for further information.</source>
<translation type="unfinished"><h2>Prevod</h2>
ClipGrab je že na voljo v številnih jezikih. Če ClipGrab še ni preveden v vaš jezik in če želite, da prispevate svoj prevod, preverite <a href="http://clipgrab.de/translate">http://clipgrab.de/translate</a> za nadaljnje informacije.</translation>
</message>
<message>
<location filename="mainwindow.ui" line="1194"/>
<source><h2>Thanks</h2>
ClipGrab relies on the work of the Qt project and the ffmpeg team.<br>
Visit <a href="http://qt-project.org">qt-project.org</a> and <a href="http://ffmpeg.org">ffmpeg.org</a> for further information.</source>
<translation type="unfinished"><h2>Hvala</h2>
ClipGrab temelji na delu projekta Qt in ekipe ffmpeg.<br>
Obiščite <a href="http://qt-project.org">qt-project.org</a> in <a href="http://ffmpeg.org">ffmpeg.org</a> za več informacij.</translation>
</message>
</context>
<context>
<name>MetadataDialog</name>
<message>
<location filename="metadata-dialog.ui" line="14"/>
<source>ClipGrab - enter metadata</source>
<translation type="unfinished">ClipGrab - vnos metapodatkov</translation>
</message>
<message>
<location filename="metadata-dialog.ui" line="25"/>
<source>Please enter the metadata for your download. If you don't want to add metadata, just leave the fields empty.</source>
<translation type="unfinished">Vnesite metapodatke za vaš prenos. Če 'nočete dodati metapodatke, pustite prazna polja.</translation>
</message>
<message>
<location filename="metadata-dialog.ui" line="45"/>
<source>Title:</source>
<translation type="unfinished">Naziv:</translation>
</message>
<message>
<location filename="metadata-dialog.ui" line="52"/>
<source>Artist:</source>
<translation type="unfinished">Izvajalec:</translation>
</message>
</context>
<context>
<name>UpdateMessage</name>
<message>
<location filename="update_message.ui" line="14"/>
<source>Update for ClipGrab</source>
<translation type="unfinished">Posodobitev za ClipGrab</translation>
</message>
<message>
<location filename="update_message.ui" line="69"/>
<source>ClipGrab %1 is now available (you are using %2). Would you like to install the update?</source>
<translation type="unfinished">lipGrab %1 je zdaj na voljo (vi uporabljate %2). Želite namestiti posodobitev?</translation>
</message>
<message>
<location filename="update_message.ui" line="83"/>
<source>There is an update for your version of ClipGrab!</source>
<oldsource>Na voljo je posodobitev za vašo različico ClipGraba!</oldsource>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="update_message.ui" line="183"/>
<source>Skip this update</source>
<translation type="unfinished">Preskoči to posodobitev</translation>
</message>
<message>
<location filename="update_message.ui" line="193"/>
<source>Download update</source>
<translation type="unfinished">Prenesi posodobitev</translation>
</message>
<message>
<location filename="update_message.ui" line="204"/>
<source>about:blank</source>
<translation type="unfinished">about:blank</translation>
</message>
<message>
<location filename="update_message.ui" line="212"/>
<source>Remind me later</source>
<translation type="unfinished">Opomni me kasneje</translation>
</message>
<message>
<location filename="update_message.ui" line="229"/>
<source>The update will begin in just a moment …</source>
<translation type="unfinished">Posodobitev se bo začela čez nekaj trenutkov …</translation>
</message>
</context>
<context>
<name>converter_copy</name>
<message>
<location filename="converter_copy.cpp" line="28"/>
<source>Original</source>
<translation type="unfinished">Izvirnik</translation>
</message>
</context>
<context>
<name>converter_ffmpeg</name>
<message>
<location filename="converter_ffmpeg.cpp" line="212"/>
<source>MPEG4</source>
<translation type="unfinished">MPEG4</translation>
</message>
<message>
<location filename="converter_ffmpeg.cpp" line="213"/>
<source>WMV (Windows)</source>
<translation type="unfinished">WMV (Windows)</translation>
</message>
<message>
<location filename="converter_ffmpeg.cpp" line="214"/>
<source>OGG Theora</source>
<translation type="unfinished">OGG Theora</translation>
</message>
<message>
<location filename="converter_ffmpeg.cpp" line="215"/>
<source>MP3 (audio only)</source>
<translation type="unfinished">MP3 (samo zvok)</translation>
</message>
<message>
<location filename="converter_ffmpeg.cpp" line="216"/>
<source>OGG Vorbis (audio only)</source>
<translation type="unfinished">OGG Vorbis (samo zvok)</translation>
</message>
<message>
<location filename="converter_ffmpeg.cpp" line="217"/>
<source>Original (audio only)</source>
<translation type="unfinished">Izvirnik (samo zvok)</translation>
</message>
<message>
<location filename="converter_ffmpeg.cpp" line="345"/>
<source>No installed version of avconv or ffmpeg coud be found. Converting files and downloading 1080p videos from YouTube is not supported.</source>
<translation type="unfinished">Ni bilo možno najti nameščen avconv ali ffmpeg. Ni podprta pretvorba datotek in prenos 1080p videoposnetkov z YouTube.</translation>
</message>
<message>
<location filename="converter_ffmpeg.cpp" line="360"/>
<source>The installed version of %1 is outdated.
Downloading 1080p videos from YouTube is not supported.</source>
<translation type="unfinished">Nameščena različica %1 je zastarela</translation>
</message>
</context>
<context>
<name>messageDialog</name>
<message>
<location filename="message_dialog.ui" line="14"/>
<source>Dialog</source>
<translation type="unfinished">Pogovorno okno</translation>
</message>
<message>
<location filename="message_dialog.ui" line="35"/>
<source>Close this message</source>
<translation type="unfinished">Zapri to sporočilo</translation>
</message>
<message>
<location filename="message_dialog.ui" line="62"/>
<source>about:blank</source>
<translation type="unfinished">about:blank</translation>
</message>
</context>
<context>
<name>playlistDialog</name>
<message>
<location filename="playlist_dialog.ui" line="14"/>
<source>Dialog</source>
<translation type="unfinished">Pogovorno okno</translation>
</message>
<message>
<location filename="playlist_dialog.ui" line="35"/>
<source>Grab those clips!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="playlist_dialog.ui" line="58"/>
<source>Here you can set up the download of multiple videos at once.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="playlist_dialog.ui" line="65"/>
<source>Select all</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="playlist_dialog.ui" line="72"/>
<source>Add link to list</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="playlist_dialog.ui" line="79"/>
<source>Cancel</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="playlist_dialog.ui" line="89"/>
<source>Deselect all</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>video</name>
<message>
<location filename="video.cpp" line="159"/>
<location filename="video.cpp" line="305"/>
<source>Converting ...</source>
<translation type="unfinished">Pretvorba ...</translation>
</message>
<message>
<location filename="video.cpp" line="200"/>
<source>Downloading ...</source>
<translation type="unfinished">Prenos ....</translation>
</message>
<message>
<location filename="video.cpp" line="200"/>
<source> MiB</source>
<translation type="unfinished"> MB</translation>
</message>
<message>
<location filename="video.cpp" line="370"/>
<source>Finished</source>
<translation type="unfinished">Končano</translation>
</message>
<message>
<location filename="video.cpp" line="376"/>
<source>Finished!</source>
<translation type="unfinished">Končano!</translation>
</message>
<message>
<location filename="video.cpp" line="515"/>
<source>Cancelled</source>
<translation type="unfinished">Preklicano</translation>
</message>
</context>
<context>
<name>video_clipfish</name>
<message>
<source>normal</source>
<translation type="obsolete">normalna</translation>
</message>
</context>
<context>
<name>video_dailymotion</name>
<message>
<location filename="video_dailymotion.cpp" line="83"/>
<source>HD (1080p)</source>
<translation type="unfinished">HD (1080p)</translation>
</message>
<message>
<location filename="video_dailymotion.cpp" line="84"/>
<source>HD (720p)</source>
<translation type="unfinished">HD (720p)</translation>
</message>
<message>
<location filename="video_dailymotion.cpp" line="85"/>
<source>480p</source>
<translation type="unfinished">480p</translation>
</message>
<message>
<location filename="video_dailymotion.cpp" line="86"/>
<source>380p</source>
<translation type="unfinished">380p</translation>
</message>
<message>
<location filename="video_dailymotion.cpp" line="87"/>
<source>240p</source>
<translation type="unfinished">240p</translation>
</message>
</context>
<context>
<name>video_facebook</name>
<message>
<location filename="video_facebook.cpp" line="111"/>
<location filename="video_facebook.cpp" line="112"/>
<location filename="video_facebook.cpp" line="141"/>
<source>HD</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="video_facebook.cpp" line="113"/>
<location filename="video_facebook.cpp" line="114"/>
<source>normal</source>
<translation type="unfinished">normalna</translation>
</message>
</context>
<context>
<name>video_heuristic</name>
<message>
<location filename="video_heuristic.cpp" line="75"/>
<location filename="video_heuristic.cpp" line="88"/>
<location filename="video_heuristic.cpp" line="100"/>
<location filename="video_heuristic.cpp" line="113"/>
<location filename="video_heuristic.cpp" line="128"/>
<location filename="video_heuristic.cpp" line="142"/>
<location filename="video_heuristic.cpp" line="155"/>
<location filename="video_heuristic.cpp" line="168"/>
<location filename="video_heuristic.cpp" line="182"/>
<source>normal</source>
<translation type="unfinished">normalna</translation>
</message>
</context>
<context>
<name>video_myspass</name>
<message>
<location filename="video_myspass.cpp" line="87"/>
<source>high</source>
<translation type="unfinished">visoka</translation>
</message>
</context>
<context>
<name>video_myvideo</name>
<message>
<source>normal</source>
<translation type="obsolete">normalna</translation>
</message>
</context>
<context>
<name>video_youtube</name>
<message>
<location filename="video_youtube.cpp" line="152"/>
<source>HD (1080p)</source>
<translation type="unfinished">HD (1080p)</translation>
</message>
<message>
<location filename="video_youtube.cpp" line="156"/>
<source>HD (1440p)</source>
<translation type="unfinished">HD (1440p)</translation>
</message>
<message>
<location filename="video_youtube.cpp" line="160"/>
<source>4K (2160p)</source>
<translation type="unfinished">4K (2160p)</translation>
</message>
<message>
<location filename="video_youtube.cpp" line="164"/>
<source>5K (2880p)</source>
<translation type="unfinished">5K (2880p)</translation>
</message>
<message>
<location filename="video_youtube.cpp" line="168"/>
<source>8K (4320p)</source>
<translation type="unfinished">8K (4320p)</translation>
</message>
<message>
<location filename="video_youtube.cpp" line="498"/>
<location filename="video_youtube.cpp" line="529"/>
<location filename="video_youtube.cpp" line="625"/>
<location filename="video_youtube.cpp" line="628"/>
<source>Original</source>
<translation type="unfinished">Izvirnik</translation>
</message>
<message>
<location filename="video_youtube.cpp" line="681"/>
<source>normal</source>
<translation type="unfinished">normalna</translation>
</message>
</context>
</TS><|fim▁end|> | </message>
<message>
<location filename="mainwindow.ui" line="646"/>
<source>Proxy settings</source> |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod renderer;
mod render_cache;
mod render_state;
mod lexeme_mapper;
mod line_numbers;
mod scrollable_region;
<|fim▁hole|>pub use self::render_state::RenderState;
pub use self::lexeme_mapper::{LexemeMapper, MappedLexeme};
pub use self::line_numbers::LineNumbers;
pub use self::scrollable_region::ScrollableRegion;<|fim▁end|> | pub use self::renderer::BufferRenderer;
pub use self::render_cache::RenderCache; |
<|file_name|>conv.go<|end_file_name|><|fim▁begin|>package tempconv
// CToF converts a Celsius temperature to Fahrenheit
func CToF(c Celsius) Fahrenheit {<|fim▁hole|>// FToC converts s Fahrenheit temperature to Celsius
func FToC(f Fahrenheit) Celsius {
return Celsius((f - 32) * 5 / 9)
}<|fim▁end|> | return Fahrenheit(c * 9 / 5 + 32)
}
|
<|file_name|>LoadingView.js<|end_file_name|><|fim▁begin|>/**
Loading progress view
@class LoadingView
@constructor
@return {object} instantiated LoadingView
**/<|fim▁hole|> var LoadingView = Backbone.View.extend({
/**
Constructor
@method initialize
**/
initialize: function () {
var self = this;
BB.comBroker.setService(BB.SERVICES.LOADING_VIEW,self);
self.collection.on('add', function(){
BB.comBroker.getService(BB.EVENTS.APP_STACK_VIEW).selectView(BB.Elements.DIGG_CONTAINER);
self.collection.off('add');
});
}
});
return LoadingView;
});<|fim▁end|> | define(['jquery', 'backbone'], function ($, Backbone) { |
<|file_name|>unix.py<|end_file_name|><|fim▁begin|># Implements twisted-based UNIX domain socket transport
import sys
from twisted.internet import reactor
from pysnmp.carrier.twisted.dgram.base import DgramTwistedTransport
from pysnmp.carrier import error
domainName = snmpLocalDomain = (1, 3, 6, 1, 2, 1, 100, 1, 13)
class UnixTwistedTransport(DgramTwistedTransport):
# AbstractTwistedTransport API
def openClientMode(self, iface=''):
try:
self._lport = reactor.connectUNIXDatagram(iface, self)
except Exception:
raise error.CarrierError(sys.exc_info()[1])
return self
def openServerMode(self, iface=None):
try:
self._lport = reactor.listenUNIXDatagram(iface, self)
except Exception:
raise error.CarrierError(sys.exc_info()[1])
return self
def closeTransport(self):
d = self._lport.stopListening()<|fim▁hole|>UnixTransport = UnixTwistedTransport<|fim▁end|> | d and d.addCallback(lambda x: None)
DgramTwistedTransport.closeTransport(self)
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from openpyxl import load_workbook
from libs.xlsxInterface import getLevelTypes, travesalLevels
import models
from prettyprint import pp
from functools import partial
from django.db import transaction
def generateLevels(levelDict, levelTypes, cellAndValues):
currentDict = levelDict
parentLevel = None
for index, (cell, value) in zip(range(len(cellAndValues)), cellAndValues):<|fim▁hole|> currentDict[value] = {}
level = models.Level()
level.title = value
currentDict[value]['levelObject'] = level
if parentLevel != None:
level.parent = parentLevel
else:
pass
level.levelType = levelType
level.save()
parentLevel = level
else:
parentLevel = currentDict[value]['levelObject']
currentDict = currentDict[value]
pass
@transaction.atomic
def generateLevelsFromExcelFile(xlsxFile, **args):
ltlt = args.pop('leftTopLevelTypeCell')
ltrb = args.pop('rightBottomLevelTypeCell')
lt = args.pop('leftTopLevelCell')
rb = args.pop('rightBottomLevelCell')
ws = load_workbook(xlsxFile, read_only = False).worksheets[0]
levelTypes = getLevelTypes(ws, ltlt, ltrb)
levelTypes = map(lambda x: models.LevelType(x), levelTypes)
levelDict = {}
travesalLevels(ws, lt, rb, partial(generateLevels, levelDict, levelTypes))
for levelType in levelTypes:
levelType.save()
pass
return levelDict
@transaction.atomic
def test():
# levelDict = generateLevelsFromExcelFile('test.xlsx',
# leftTopLevelTypeCell = 'B6',
# rightBottomLevelTypeCell = 'D7',
# leftTopLevelCell = 'B8',
# rightBottomLevelCell = 'D419')
# levels = models.Level.objects.all()
traversalLevels()
pass
@transaction.atomic
def clearAll():
map(lambda x: x.delete(), models.Level.objects.all())
map(lambda x: x.delete(), models.LevelType.objects.all())
def traversalLevels():
roots = models.Level.objects.filter(parent = None)
tableTemplate = u"""
<table>
<tbody>
{content}
</tbody>
</table>
"""
content = u""
for level in roots:
content += u"<TR>" + makeLevelsTable(level)[1]
print tableTemplate.format(content = content)
def makeLevelsTable(level):
children = models.Level.objects.filter(parent = level)
rowSpan = len(children)
if rowSpan == 0:
return (1, u"<td>{title}</td></tr>\n".format(title = level.title))
rowSpan = 0
ret = u"<TD rowspan='{{rowspan}}'>{title}</TD>".format(title = level.title)
for i, child in zip(range(len(children)), children):
childRowCount, childRows = makeLevelsTable(child)
rowSpan += childRowCount
if i == 0:
ret += childRows
else:
ret += u"<tr>" + childRows
ret = ret.format(rowspan = rowSpan)
return (rowSpan, ret)
def createTestObject(**args):
title = args.pop('title')
description = args.pop('description', u'')
testObject = models.TestObject.objects.create()
testObject.title = title
testObject.description = description
testObject.save()
return testObject
def getAllTestObjects():
return models.TestObject.objects.all()
def getAllTestLevelTypes():
return models.LevelType.objects.all()
def addLevelTypeToTestObject(levelTypeId, testObjectId):
try:
lt = models.LevelType.objects.get(id = levelTypeId)
to = models.TestObject.objects.get(id = testObjectId)
to.add(lt)
except Exception, e:
print e
raise<|fim▁end|> | levelType = levelTypes[index]
if value not in currentDict: |
<|file_name|>anchorGenerator.py<|end_file_name|><|fim▁begin|># anchorGenerator
from models.anchor import *
# main function
if __name__=='__main__':
# TEMP: Wipe existing anchors
# anchors = Anchor.all(size=1000)
# Anchor.delete_all(anchors)
# THIS IS TEMPORARY:
anchors = {'Vaccination', 'Vaccinations', 'Vaccine', 'Vaccines', 'Inoculation', 'Immunization', 'Shot', 'Chickenpox', 'Disease', 'Diseases', 'Hepatitis A', 'Hepatitis B', 'infection', 'infections', 'measles', 'outbreak', 'mumps', 'rabies', 'tetanus', 'virus', 'autism'}
seed = 'vaccination'
for anchor in anchors:
a = Anchor.getOrCreate(anchor)
a.findInstances()
a.save()
"""
query = {
"size": 0,
"query": {
"filtered": {
"query": {
"query_string": {
"query": "*",
"analyze_wildcard": True
}
}
}
},
"aggs": {
"2": {
"terms": {
"field": "title",<|fim▁hole|> "size": 100,
"order": {
"_count": "desc"
}
}
}
}
}
response = es.search(index="crowdynews"', 'body=query)
retrieved = now()
anchors = {}
# go through each retrieved document
for hit in response['aggregations']['2']['buckets']:
key = hit['key']
if validKey(key):
anchors[key] = hit['doc_count']
addBulk(anchors)
"""<|fim▁end|> | |
<|file_name|>DiscoveredDeviceParent.java<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> * Copyright 2016 The OpenDCT Authors. All Rights Reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opendct.tuning.discovery;
import opendct.config.options.DeviceOptions;
import java.net.InetAddress;
public interface DiscoveredDeviceParent extends DeviceOptions {
/**
* The unique name of this capture device parent.
* <p/>
* This should always return exactly the same name every time this device is detected. This is
* used to verify that we are not potentially loading a duplicate device.
*
* @return The unchangeable unique name of this capture device.
*/
public String getName();
/**
* The friendly/modifiable name of this capture device parent.
* <p/>
* This can be the same as the unique name, but this value should be user assignable.
*
* @return The modifiable name of this capture device parent.
*/
public String getFriendlyName();
/**
* The unique id of this capture device parent.
* <p/>
* This ID must be exactly the same every time this device is detected. This is used to verify
* that we are not potentially loading a duplicate device.
*
* @return The unique ID for this capture device.
*/
public int getParentId();
/**
* Is this a network device?
*
* @return <i>true</i> if this is a network device.
*/
public boolean isNetworkDevice();
/**
* Returns the local IP address to be used when streaming to this computer.
* <p/>
* Return <i>null</i> if this is not a network device.
*
* @return Returns the local IP address if this is a network device or <i>null</i>.
*/
public InetAddress getLocalAddress();
/**
* Returns the current IP address of the capture device parent.
* <p/>
* Return <i>null</i> if this is not a network device.
*
* @return Returns the remote IP address if this is a network device or <i>null</i>.
*/
public InetAddress getRemoteAddress();
/**
* Returns the unique IDs of all child devices for this parent device.
* <p/>
* This list is allowed to expand. When a capture device is detected, the device parent should
* always be added first.
*
* @return An array of the child devices by unique ID.
*/
public int[] getChildDevices();
}<|fim▁end|> | |
<|file_name|>KeyParser.ts<|end_file_name|><|fim▁begin|>/**
* KeyParser.ts
*
* Simple parsing logic to take vim key bindings / chords,
* and return a normalized object.
*/
export interface IKey {
character: string
shift: boolean
alt: boolean
control: boolean
meta: boolean
}
export interface IKeyChord {
chord: IKey[]
}
export const parseKeysFromVimString = (keys: string): IKeyChord => {
const chord: IKey[] = []
let idx = 0
while (idx < keys.length) {
if (keys[idx] !== "<") {
chord.push(parseKey(keys[idx]))
} else {
const endIndex = getNextCharacter(keys, idx + 1)
// Malformed if there isn't a corresponding '>'
if (endIndex === -1) {
return { chord }
}
const keyContents = keys.substring(idx + 1, endIndex)
chord.push(parseKey(keyContents))
idx = endIndex + 1
}
idx++
}
return {
chord,
}
}
const getNextCharacter = (str: string, startIndex: number): number => {
let i = startIndex
while (i < str.length) {
if (str[i] === ">") {
return i
}
i++
}
return -1
}
export const parseKey = (key: string): IKey => {
if (key.indexOf("-") === -1) {
return {
character: key,
shift: false,
alt: false,
control: false,
meta: false,
}
}
const hasControl = key.indexOf("c-") >= 0 || key.indexOf("C-") >= 0
const hasShift = key.indexOf("s-") >= 0 || key.indexOf("S-") >= 0
const hasAlt = key.indexOf("a-") >= 0 || key.indexOf("A-") >= 0
const hasMeta = key.indexOf("m-") >= 0 || key.indexOf("M-") >= 0
const lastIndexoFHyphen = key.lastIndexOf("-")
const finalKey = key.substring(lastIndexoFHyphen + 1, key.length)
return {
character: finalKey,
shift: hasShift,
alt: hasAlt,
control: hasControl,
meta: hasMeta,
}
}
// Parse a chord string (e.g. <c-s-p>) into textual descriptions of the relevant keys
// <c-s-p> -> ["control", "shift", "p"]
export const parseChordParts = (keys: string): string[] => {
const parsedKeys = parseKeysFromVimString(keys)
if (!parsedKeys || !parsedKeys.chord || parsedKeys.chord.length === 0) {
return null
}
const firstChord = parsedKeys.chord[0]
const chordParts: string[] = []
if (firstChord.meta) {
chordParts.push("meta")
}
if (firstChord.control) {
chordParts.push("control")
}
if (firstChord.alt) {<|fim▁hole|> if (firstChord.shift) {
chordParts.push("shift")
}
chordParts.push(firstChord.character)
return chordParts
}<|fim▁end|> | chordParts.push("alt")
}
|
<|file_name|>videoscraper.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
'''
videoscraper scraper for Exodus forks.
Nov 9 2018 - Checked
Updated and refactored by someone.
Originally created by others.
'''
import json, urllib, urlparse
from resources.lib.modules import client<|fim▁hole|>from resources.lib.modules import directstream
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['localhost']
self.base_link = 'http://127.0.0.1:16735'
def movie(self, imdb, title, localtitle, aliases, year):
try:
return urllib.urlencode({'imdb': imdb, 'title': title, 'year': year})
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
return urllib.urlencode({'imdb': imdb, 'title': tvshowtitle, 'year': year})
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
return urllib.urlencode({'imdb': imdb, 'title': title, 'year': data['year'], 'season': season, 'episode': episode})
except:
return
def sources(self, url, hostDict, hostprDict):
sources = []
try:
if not url:
return sources
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
url = urlparse.urljoin(self.base_link, '/sources?%s' % urllib.urlencode(data))
r = client.request(url)
if not r: raise Exception()
result = json.loads(r)
try:
gvideos = [i['url'] for i in result if i['source'] == 'GVIDEO']
for url in gvideos:
gtag = directstream.googletag(url)[0]
sources.append({'source': 'gvideo', 'quality': gtag['quality'], 'language': 'en', 'url': gtag['url'], 'direct': True, 'debridonly': False})
except:
pass
try:
oloads = [i['url'] for i in result if i['source'] == 'CDN']
for url in oloads:
sources.append({'source': 'CDN', 'quality': 'HD', 'language': 'en', 'url': url, 'direct': False, 'debridonly': False})
except:
pass
return sources
except:
return sources
def resolve(self, url):
if 'googlevideo' in url:
return directstream.googlepass(url)
return url<|fim▁end|> | |
<|file_name|>32.d.ts<|end_file_name|><|fim▁begin|>import * as React from "react";
import { CarbonIconProps } from "../../";
declare const MobileAdd32: React.ForwardRefExoticComponent<<|fim▁hole|><|fim▁end|> | CarbonIconProps & React.RefAttributes<SVGSVGElement>
>;
export default MobileAdd32; |
<|file_name|>PathSimpleCopy.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#***************************************************************************
#* *
#* Copyright (c) 2015 Yorik van Havre <[email protected]> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
import FreeCAD,FreeCADGui,Path,PathGui
from PySide import QtCore,QtGui
"""Path SimpleCopy command"""
# Qt tanslation handling
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def translate(context, text, disambig=None):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def translate(context, text, disambig=None):
return QtGui.QApplication.translate(context, text, disambig)
<|fim▁hole|>
class CommandPathSimpleCopy:
def GetResources(self):
return {'Pixmap' : 'Path-SimpleCopy',
'MenuText': QtCore.QT_TRANSLATE_NOOP("Path_SimpleCopy","Simple Copy"),
'Accel': "P, Y",
'ToolTip': QtCore.QT_TRANSLATE_NOOP("Path_SimpleCopy","Creates a non-parametric copy of another path")}
def IsActive(self):
return not FreeCAD.ActiveDocument is None
def Activated(self):
# check that the selection contains exactly what we want
selection = FreeCADGui.Selection.getSelection()
if len(selection) != 1:
FreeCAD.Console.PrintError(translate("Path_SimpleCopy","Please select exactly one path object\n"))
return
if not(selection[0].isDerivedFrom("Path::Feature")):
FreeCAD.Console.PrintError(translate("Path_SimpleCopy","Please select exactly one path object\n"))
return
FreeCAD.ActiveDocument.openTransaction(translate("Path_SimpleCopy","Simple Copy"))
FreeCADGui.addModule("PathScripts.PathUtils")
FreeCADGui.doCommand('obj = FreeCAD.ActiveDocument.addObject("Path::Feature","'+selection[0].Name+ '_copy")')
FreeCADGui.doCommand('obj.Path = FreeCAD.ActiveDocument.'+selection[0].Name+'.Path')
FreeCADGui.doCommand('PathScripts.PathUtils.addToProject(obj)')
FreeCAD.ActiveDocument.commitTransaction()
FreeCAD.ActiveDocument.recompute()
if FreeCAD.GuiUp:
# register the FreeCAD command
FreeCADGui.addCommand('Path_SimpleCopy',CommandPathSimpleCopy())<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | pub mod ray_tracer; |
<|file_name|>issue-85943-no-suggest-unsized-indirection-in-where-clause.rs<|end_file_name|><|fim▁begin|>// Regression test for #85943: should not emit suggestions for adding
// indirection to type parameters in where-clauses when suggesting
// adding `?Sized`.
struct A<T>(T) where T: Send;
struct B(A<[u8]>);<|fim▁hole|><|fim▁end|> | //~^ ERROR the size for values of type
pub fn main() {
} |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import {flags} from '@heroku-cli/command'
import {cli} from 'cli-ux'
import BaseCommand from '../../../base'<|fim▁hole|>
export default class EventsIndex extends BaseCommand {
static description = 'list webhook events on an app'
static examples = [
'$ heroku webhooks:events',
]
static flags = {
app: flags.app(),
remote: flags.remote(),
pipeline: flags.pipeline({char: 'p', description: 'pipeline on which to list', hidden: true}),
}
async run() {
const {flags} = this.parse(EventsIndex)
const {path, display} = this.webhookType(flags)
cli.warn('heroku webhooks:event is deprecated, please use heroku webhooks:deliveries')
const {body: events} = await this.webhooksClient.get(`${path}/webhook-events`)
if (events.length === 0) {
this.log(`${display} has no events`)
} else {
events.sort((a: any, b: any) => Date.parse(a.created_at) - Date.parse(b.created_at))
cli.table(events, {
id: {
header: 'Event ID',
},
resource: {
get: (w: any) => w.payload.resource,
},
action: {
get: (w: any) => w.payload.action,
},
published_at: {
header: 'Published At', get: (w: any) => w.payload.published_at,
},
}, {
printLine: this.log,
})
}
}
}<|fim▁end|> | |
<|file_name|>error.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Minio Python Library for Amazon S3 Compatible Cloud Storage,
# (C) 2015, 2016, 2017 Minio, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
minio.error
~~~~~~~~~~~~~~~~~~~
This module provides custom exception classes for Minio library
and API specific errors.
:copyright: (c) 2015, 2016, 2017 by Minio, Inc.
:license: Apache 2.0, see LICENSE for more details.
"""
from xml.etree import cElementTree
from xml.etree.cElementTree import ParseError
if hasattr(cElementTree, 'ParseError'):
## ParseError seems to not have .message like other
## exceptions. Add dynamically new attribute carrying
## value from message.
if not hasattr(ParseError, 'message'):
setattr(ParseError, 'message', ParseError.msg)
_ETREE_EXCEPTIONS = (ParseError, AttributeError, ValueError, TypeError)
else:
_ETREE_EXCEPTIONS = (SyntaxError, AttributeError, ValueError, TypeError)
class MinioError(Exception):
"""
Base class for all exceptions
:param message: User defined message.
"""
def __init__(self, message, **kwargs):
super(MinioError, self).__init__(**kwargs)
self.message = message
def __str__(self):
return "{name}: message: {message}".format(
name=self.__class__.__name__,
message=self.message
)
class InvalidEndpointError(MinioError):
"""
InvalidEndpointError is raised when input endpoint URL is invalid.
"""
pass
class InvalidBucketError(MinioError):
"""
InvalidBucketError is raised when input bucket name is invalid.
NOTE: Bucket names are validated based on Amazon S3 requirements.
"""
pass
class InvalidArgumentError(MinioError):
"""
InvalidArgumentError is raised when an unexpected
argument is received by the callee.
"""
pass
class InvalidSizeError(MinioError):
"""
InvalidSizeError is raised when an unexpected size mismatch occurs.
"""
pass
class InvalidXMLError(MinioError):
"""
InvalidXMLError is raised when an unexpected XML tag or
a missing tag is found during parsing.
"""
pass
class MultiDeleteError(object):
"""
Represents an error raised when trying to delete an object in a
Multi-Object Delete API call :class:`MultiDeleteError <MultiDeleteError>`
:object_name: Object name that had a delete error.
:error_code: Error code.
:error_message: Error message.
"""
def __init__(self, object_name, err_code, err_message):
self.object_name = object_name
self.error_code = err_code
self.error_message = err_message
def __str__(self):
string_format = '<MultiDeleteError: object_name: {} error_code: {}' \
' error_message: {}>'
return string_format.format(self.object_name,
self.error_code,
self.error_message)
class ResponseError(MinioError):
"""
ResponseError is raised when an API call doesn't succeed.
raises :exc:`ResponseError` accordingly.
:param response: Response from http client :class:`urllib3.HTTPResponse`.
"""
def __init__(self, response, method, bucket_name=None,
object_name=None):
super(ResponseError, self).__init__(message='')
# initialize parameter fields
self._response = response
self._xml = response.data
self.method = method
self.bucket_name = bucket_name
self.object_name = object_name
# initialize all ResponseError fields
self.code = ''
# Amz headers
self.request_id = ''
self.host_id = ''
self.region = ''
# handle the error
self._handle_error_response(bucket_name)
def get_exception(self):
"""
Gets the error exception derived from the initialization of
an ErrorResponse object
:return: The derived exception or ResponseError exception
"""
exception = known_errors.get(self.code)
if exception:
return exception(self)
else:
return self
def _handle_error_response(self, bucket_name=None):
"""
Sets error response uses xml body if available, otherwise
relies on HTTP headers.
"""
if not self._response.data:
self._set_error_response_without_body(bucket_name)
else:
self._set_error_response_with_body(bucket_name)
def _set_error_response_with_body(self, bucket_name=None):
"""
Sets all the error response fields with a valid response body.
Raises :exc:`ValueError` if invoked on a zero length body.
:param bucket_name: Optional bucket name resource at which error
occurred.
:param object_name: Option object name resource at which error
occurred.
"""
if len(self._response.data) == 0:
raise ValueError('response data has no body.')
try:
root = cElementTree.fromstring(self._response.data)
except _ETREE_EXCEPTIONS as error:
raise InvalidXMLError('"Error" XML is not parsable. '
'Message: {0}'.format(error.message))
for attribute in root:
if attribute.tag == 'Code':
self.code = attribute.text
elif attribute.tag == 'BucketName':
self.bucket_name = attribute.text
elif attribute.tag == 'Key':
self.object_name = attribute.text
elif attribute.tag == 'Message':
self.message = attribute.text
elif attribute.tag == 'RequestId':
self.request_id = attribute.text
elif attribute.tag == 'HostId':
self.host_id = attribute.text
# Set amz headers.
self._set_amz_headers()
def _set_error_response_without_body(self, bucket_name=None):
"""
Sets all the error response fields from response headers.
"""
if self._response.status == 404:
if bucket_name:
if self.object_name:
self.code = 'NoSuchKey'
self.message = self._response.reason
else:
self.code = 'NoSuchBucket'
self.message = self._response.reason
elif self._response.status == 409:
self.code = 'Confict'
self.message = 'The bucket you tried to delete is not empty.'
elif self._response.status == 403:
self.code = 'AccessDenied'
self.message = self._response.reason
elif self._response.status == 400:
self.code = 'BadRequest'
self.message = self._response.reason
elif self._response.status == 301:
self.code = 'PermanentRedirect'
self.message = self._response.reason
elif self._response.status == 307:
self.code = 'Redirect'
self.message = self._response.reason
elif self._response.status in [405, 501]:
self.code = 'MethodNotAllowed'
self.message = self._response.reason
elif self._response.status == 500:
self.code = 'InternalError'
self.message = 'Internal Server Error.'
else:
self.code = 'UnknownException'
self.message = self._response.reason
# Set amz headers.
self._set_amz_headers()
def _set_amz_headers(self):
"""
Sets x-amz-* error response fields from response headers.
"""
if self._response.headers:
# keeping x-amz-id-2 as part of amz_host_id.
if 'x-amz-id-2' in self._response.headers:
self.host_id = self._response.headers['x-amz-id-2']
if 'x-amz-request-id' in self._response.headers:
self.request_id = self._response.headers['x-amz-request-id']
# This is a new undocumented field, set only if available.
if 'x-amz-bucket-region' in self._response.headers:
self.region = self._response.headers['x-amz-bucket-region']
def __str__(self):
return ('ResponseError: code: {0}, message: {1},'
' bucket_name: {2}, object_name: {3}, request_id: {4},'
' host_id: {5}, region: {6}'.format(self.code,
self.message,
self.bucket_name,
self.object_name,
self.request_id,
self.host_id,
self.region))
# Common error responses listed here
# http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.htmlRESTErrorResponses
class KnownResponseError(MinioError):
def __init__(self, response_error, **kwargs):
super(KnownResponseError, self).__init__(message=self.message, **kwargs)
self.response_error = response_error
class AccessDenied(KnownResponseError):
message = 'Access Denied'
class AccountProblem(KnownResponseError):
message = 'There is a problem with your account that prevents the ' \
'operation from completing successfully.'
class AmbiguousGrantByEmailAddress(KnownResponseError):
message = 'The email address you provided is associated with ' \
'more than one account.'
class BadDigest(KnownResponseError):
message = 'The Content-MD5 you specified did not match what we received.'
class BucketAlreadyExists(KnownResponseError):
message = 'The requested bucket name is not available. The ' \
'bucket namespace is shared by all users of the system. ' \
'Please select a different name and try again.'
class BucketAlreadyOwnedByYou(KnownResponseError):
message = 'Your previous request to create the named bucket ' \
'succeeded and you already own it.'
class BucketNotEmpty(KnownResponseError):
message = 'The bucket you tried to delete is not empty.'
class CredentialNotSupported(KnownResponseError):
message = 'This request does not support credentials.'
class CrossLocationLoggingProhibited(KnownResponseError):
message = 'Cross-location logging not allowed. Buckets in one ' \
'geographic location cannot log information to a bucket ' \
'in another location.'
class EntityTooSmall(KnownResponseError):
message = 'Your proposed upload is smaller than the minimum a' \
'llowed object size.'
class EntityTooLarge(KnownResponseError):
message = 'Your proposed upload exceeds the maximum allowed object size.'
class ExpiredToken(KnownResponseError):
message = 'The provided token has expired.'
class IllegalVersioningConfigurationException(KnownResponseError):
message = 'Indicates that the versioning configuration specified ' \
'in the request is invalid.'
class IncompleteBody(KnownResponseError):
message = 'You did not provide the number of bytes specified by the ' \
'Content-Length HTTP header'
class IncorrectNumberOfFilesInPostRequest(KnownResponseError):
message = 'POST requires exactly one file upload per request.'
class InlineDataTooLarge(KnownResponseError):
message = 'Inline data exceeds the maximum allowed size.'
class InternalError(KnownResponseError):
message = 'We encountered an internal error. Please try again.'
class InvalidAccessKeyId(KnownResponseError):
message = 'The access key Id you provided does not exist in our records.'
class InvalidAddressingHeader(KnownResponseError):
message = 'You must specify the Anonymous role.'
class InvalidArgument(KnownResponseError):
message = 'Invalid Argument'
class InvalidBucketName(KnownResponseError):
message = 'The specified bucket is not valid.'
class InvalidBucketState(KnownResponseError):
message = 'The request is not valid with the current state of the bucket.'
class InvalidDigest(KnownResponseError):
message = 'The Content-MD5 you specified is not valid.'
class InvalidEncryptionAlgorithmError(KnownResponseError):
message = 'The encryption request you specified is not valid. ' \
'The valid value is AES256.'
class InvalidLocationConstraint(KnownResponseError):
message = 'The specified location constraint is not valid.'
class InvalidObjectState(KnownResponseError):
message = 'The operation is not valid for the current state of the object.'
class InvalidPart(KnownResponseError):
message = 'One or more of the specified parts could not be found. ' \
'The part might not have been uploaded, or the specified ' \
'entity tag might not have matched the part\'s entity tag'
class InvalidPartOrder(KnownResponseError):
message = 'The list of parts was not in ascending order.Parts list ' \
'must specified in order by part number.'
class InvalidPayer(KnownResponseError):
message = 'All access to this object has been disabled.'
class InvalidPolicyDocument(KnownResponseError):
message = 'The content of the form does not meet the conditions ' \
'specified in the policy document.'
class InvalidRange(KnownResponseError):
message = 'The requested range cannot be satisfied.'
class InvalidRequest(KnownResponseError):
message = 'Invalid Request'
class InvalidSecurity(KnownResponseError):
message = 'The provided security credentials are not valid.'
class InvalidSOAPRequest(KnownResponseError):
message = 'The SOAP request body is invalid.'
class InvalidStorageClass(KnownResponseError):
message = 'The storage class you specified is not valid.'
class InvalidTargetBucketForLogging(KnownResponseError):
message = 'The target bucket for logging does not exist, ' \
'is not owned by you, or does not have the appropriate ' \
'grants for the log-delivery group.'
class InvalidToken(KnownResponseError):
message = 'The provided token is malformed or otherwise invalid.'
class InvalidURI(KnownResponseError):
message = 'Couldn\'t parse the specified URI.'
class KeyTooLong(KnownResponseError):
message = 'Your key is too long.'
class MalformedACLError(KnownResponseError):
message = 'The XML you provided was not well-formed ' \
'or did not validate against our published schema.'
class MalformedPOSTRequest(KnownResponseError):
message = 'The body of your POST request is not ' \
'well-formed multipart/form-data.'
class MalformedXML(KnownResponseError):
message = 'This happens when the user sends malformed xml (xml that ' \
'doesn\'t conform to the published xsd) for the configuration.'
class MaxMessageLengthExceeded(KnownResponseError):
message = 'Your request was too big.'
class MaxPostPreDataLengthExceededError(KnownResponseError):
message = 'Your POST request fields preceding the ' \
'upload file were too large.'
class MetadataTooLarge(KnownResponseError):
message = 'Your metadata headers exceed the maximum allowed metadata size.'
class MethodNotAllowed(KnownResponseError):
message = 'The specified method is not allowed against this resource'
class MissingAttachment(KnownResponseError):
message = 'A SOAP attachment was expected, but none were found.'
class MissingContentLength(KnownResponseError):
message = 'You must provide the Content-Length HTTP header.'
class MissingRequestBodyError(KnownResponseError):
message = 'This happens when the user sends an empty xml document ' \
'as a request. The error message is, "Request body is empty."'
class MissingSecurityElement(KnownResponseError):
message = 'The SOAP 1.1 request is missing a security element.'
class MissingSecurityHeader(KnownResponseError):
message = 'Your request is missing a required header.'
class NoLoggingStatusForKey(KnownResponseError):
message = 'There is no such thing as a logging ' \
'status subresource for a key.'
class NoSuchBucket(KnownResponseError):
message = 'The specified bucket does not exist.'
class NoSuchKey(KnownResponseError):
message = 'The specified key does not exist.'
class NoSuchLifecycleConfiguration(KnownResponseError):
message = 'The lifecycle configuration does not exist.'
class NoSuchUpload(KnownResponseError):
message = 'The specified multipart upload does not exist. ' \
'The upload ID might be invalid, or the multipart \
upload might have been aborted or completed.'
class NoSuchVersion(KnownResponseError):
message = 'Indicates that the version ID specified in the ' \<|fim▁hole|> 'request does not match an existing version.'
class APINotImplemented(KnownResponseError):
message = 'A header you provided implies functionality ' \
'that is not implemented.'
class NotSignedUp(KnownResponseError):
message = 'Your account is not signed up.'
class NoSuchBucketPolicy(KnownResponseError):
message = 'The specified bucket does not have a bucket policy.'
class OperationAborted(KnownResponseError):
message = 'A conflicting conditional operation is currently in ' \
'progress against this resource. Try again.'
class PermanentRedirect(KnownResponseError):
message = 'The bucket you are attempting to access must be addressed ' \
'using the specified endpoint. Send all future requests ' \
'to this endpoint.'
class PreconditionFailed(KnownResponseError):
message = 'At least one of the preconditions you specified did not hold.'
class Redirect(KnownResponseError):
message = 'Temporary redirect.'
class RestoreAlreadyInProgress(KnownResponseError):
message = 'Object restore is already in progress.'
class RequestIsNotMultiPartContent(KnownResponseError):
message = 'Bucket POST must be of the enclosure-type multipart/form-data.'
class RequestTimeout(KnownResponseError):
message = 'Your socket connection to the server was not read ' \
'from or written to within the timeout period.'
class RequestTimeTooSkewed(KnownResponseError):
message = 'The difference between the request time and the ' \
'server\'s time is too large.'
class RequestTorrentOfBucketError(KnownResponseError):
message = 'Requesting the torrent file of a bucket is not permitted.'
class SignatureDoesNotMatch(KnownResponseError):
message = 'The request signature we calculated does not match the ' \
'signature you provided.'
class ServiceUnavailable(KnownResponseError):
message = 'Reduce your request rate.'
class SlowDown(KnownResponseError):
message = 'Reduce your request rate.'
class TemporaryRedirect(KnownResponseError):
message = 'You are being redirected to the bucket while DNS updates.'
class TokenRefreshRequired(KnownResponseError):
message = 'The provided token must be refreshed.'
class TooManyBuckets(KnownResponseError):
message = 'You have attempted to create more buckets than allowed.'
class UnexpectedContent(KnownResponseError):
message = 'This request does not support content.'
class UnresolvableGrantByEmailAddress(KnownResponseError):
message = 'The email address you provided does not match any account ' \
'on record.'
class UserKeyMustBeSpecified(KnownResponseError):
message = 'The bucket POST must contain the specified field name. ' \
'If it is specified, check the order of the fields.'
known_errors = {
'AccessDenied': AccessDenied,
'AcccountProblem': AccountProblem,
'AmbiguousGrantByEmailAddress': AmbiguousGrantByEmailAddress,
'BadDigest': BadDigest,
'BucketAlreadyExists': BucketAlreadyExists,
'BucketAlreadyOwnedByYou': BucketAlreadyOwnedByYou,
'BucketNotEmpty': BucketNotEmpty,
'CredentialNotSupported': CredentialNotSupported,
'CrossLocationLoggingProhibited': CrossLocationLoggingProhibited,
'EntityTooSmall': EntityTooSmall,
'EntityTooLarge': EntityTooLarge,
'ExpiredToken': ExpiredToken,
'IllegalVersioningConfigurationException': IllegalVersioningConfigurationException,
'IncompleteBody': IncompleteBody,
'IncorrectNumberOfFilesInPostRequest': IncorrectNumberOfFilesInPostRequest,
'InlineDataTooLarge': InlineDataTooLarge,
'InternalError': InternalError,
'InvalidAccessKeyId': InvalidAccessKeyId,
'InvalidAddressingHeader': InvalidAddressingHeader,
'InvalidArgument': InvalidArgument,
'InvalidBucketName': InvalidBucketName,
'InvalidBucketState': InvalidBucketState,
'InvalidDigest': InvalidDigest,
'InvalidEncryptionAlgorithmError': InvalidEncryptionAlgorithmError,
'InvalidLocationConstraint': InvalidLocationConstraint,
'InvalidObjectState': InvalidObjectState,
'InvalidPart': InvalidPart,
'InvalidPartOrder': InvalidPartOrder,
'InvalidPayer': InvalidPayer,
'InvalidPolicyDocument': InvalidPolicyDocument,
'InvalidRange': InvalidRange,
'InvalidRequest': InvalidRequest,
'InvalidSecurity': InvalidSecurity,
'InvalidSOAPRequest': InvalidSOAPRequest,
'InvalidStorageClass': InvalidStorageClass,
'InvalidTargetBucketForLogging': InvalidTargetBucketForLogging,
'InvalidToken': InvalidToken,
'InvalidURI': InvalidURI,
'KeyTooLong': KeyTooLong,
'MalformedACLError': MalformedACLError,
'MalformedPOSTRequest': MalformedPOSTRequest,
'MalformedXML': MalformedXML,
'MaxMessageLengthExceeded': MaxMessageLengthExceeded,
'MaxPostPreDataLengthExceededError': MaxPostPreDataLengthExceededError,
'MetadataTooLarge': MetadataTooLarge,
'MethodNotAllowed': MethodNotAllowed,
'MissingAttachment': MissingAttachment,
'MissingContentLength': MissingContentLength,
'MissingRequestBodyError': MissingRequestBodyError,
'MissingSecurityElement': MissingSecurityElement,
'MissingSecurityHeader': MissingSecurityHeader,
'NoLoggingStatusForKey': NoLoggingStatusForKey,
'NoSuchBucket': NoSuchBucket,
'NoSuchKey': NoSuchKey,
'NoSuchLifecycleConfiguration': NoSuchLifecycleConfiguration,
'NoSuchUpload': NoSuchUpload,
'NoSuchVersion': NoSuchVersion,
'NotImplemented': APINotImplemented,
'NotSignedUp': NotSignedUp,
'NoSuchBucketPolicy': NoSuchBucketPolicy,
'OperationAborted': OperationAborted,
'PermanentRedirect': PermanentRedirect,
'PreconditionFailed': PreconditionFailed,
'Redirect': Redirect,
'RestoreAlreadyInProgress': RestoreAlreadyInProgress,
'RequestIsNotMultiPartContent': RequestIsNotMultiPartContent,
'RequestTimeout': RequestTimeout,
'RequestTimeTooSkewed': RequestTimeTooSkewed,
'RequestTorrentOfBucketError': RequestTorrentOfBucketError,
'SignatureDoesNotMatch': SignatureDoesNotMatch,
'ServiceUnavailable': ServiceUnavailable,
'SlowDown': SlowDown,
'TemporaryRedirect': TemporaryRedirect,
'TokenRefreshRequired': TokenRefreshRequired,
'TooManyBuckets': TooManyBuckets,
'UnexpectedContent': UnexpectedContent,
'UnresolvableGrantByEmailAddress': UnresolvableGrantByEmailAddress,
'UserKeyMustBeSpecified': UserKeyMustBeSpecified,
}<|fim▁end|> | |
<|file_name|>VisualLayers.ts<|end_file_name|><|fim▁begin|>// Work started at https://discourse.threejs.org/t/12503/35
import {Camera, Object3D, Renderer, Scene as ThreeScene, WebGLRenderer} from 'three'
/**
* Allows rendering objects into one ore more visual layers that are stacked on
* top of each other. Think of it like layers in Adobe Photoshop.
*/
export class VisualLayers {
__layers: Array<Layer> = []
__renderer: Renderer
__Scene: typeof ThreeScene
/**
* @param {THREE.Renderer} renderer The `THREE.Renderer` (f.e. `THREE.WebGLRenderer`) that
* will be used to render the layers.
* @param {typeof THREE.Scene} Scene The `THREE.Scene` class that will be used for each layer
* (one per layer). If not provided, `THREE.Scene` is used by default.
*/
// IDEA: Optionally accept different Scene types per layer.
constructor(renderer: Renderer, Scene = ThreeScene) {
this.__renderer = renderer
this.__Scene = Scene
}
/**
* Deletes all defined layers -- hence un-references contained objects so
* they can be garbage collected -- as if starting with a fresh new
* VisualLayers. Generally you should call this if you are getting rid
* of layering, or want to define a new set of layers, etc.
*/
dispose(): void {
this.__layers.length = 0
}
/**
* Defines a new layer.
* @param {LayerName} layerName The name to give the layer.
* @param {number} order The order it will have. The newly-defined layer will
* render above other layers that have lower numbers, and below other layers
* that have higher order numbers.
* @returns {Layer} The created object representing the layer.
*/
defineLayer(layerName: LayerName, order = 0): Layer {
const layer = this.__getOrMakeLayer(layerName)
const previousOrder = layer.order
layer.order = order
// Sort only if order changed.
if (previousOrder !== layer.order) this.__layers.sort((a, b) => a.order - b.order)
return layer
}
/**
* Set the visibility of one or more layers.
* @param {LayerNames} layerNames The name of a layer (or array of names of layers) that will have its (their) visibility set.
* @param {boolean} visible A boolean indicating whether the layer or layers should be visible.
*/
setLayerVisible(layerNames: LayerNames, visible: boolean) {
if (typeof layerNames == 'string') return this.__setLayerVisible(layerNames, visible)
for (const name of layerNames) this.__setLayerVisible(name, visible)
}
__setLayerVisible(layerName: LayerName, visible: boolean) {
const layer = this.__layers.find(l => l.name === layerName)
if (!layer) throw new Error('Can not set visibility of layer that does not exist.')
layer.visible = visible
}
/** Get a layer by name (if it doesn't exist, creates it with default order 0). */
__getOrMakeLayer(layerName: LayerName): Layer {
let layer = this.__layers.find(l => l.name === layerName)
if (!layer) {
layer = {name: layerName, backingScene: new this.__Scene(), order: 0, visible: true}
layer.backingScene.autoUpdate = false
this.__layers.push(layer)
}
<|fim▁hole|> /**
* Remove a layer.
* @param {LayerName} layerName The name of the layer to remove.
*/
removeLayer(layerName: LayerName): void {
const index = this.__layers.findIndex(l => {
if (l.name === layerName) {
l.backingScene.children.length = 0
return true
}
return false
})
if (index >= 0) this.__layers.splice(index, 1)
}
/**
* Check if a layer exists.
* @param {LayerName} layerName The name of the layer to check existence of.
* @returns {boolean} A boolean indicating if the layer exists.
*/
hasLayer(layerName: LayerName): boolean {
return this.__layers.some(l => l.name === layerName)
}
/**
* The number of layers.
* @readonly
* @type {number}
*/
get layerCount(): number {
return this.__layers.length
}
/**
* Add an object (anything that is or extends from THREE.Object3D) to the named layer (or named layers).
*
* @param {THREE.Object3D} obj The object to add. Must be an `instanceof THREE.Object3D`.
*
* @param {LayerNames} layerNames The name of a layer (or array of names of layers) that
* the object will be added to. If an object is added to multiple layers, the
* object will be rendered multiple times, once per layer.
*
* @param {boolean | undefined} withSubtree When true, this causes an object that was added into
* specified layer(s) be rendered with its (grand)children, rather than it
* being rendered only by itself without any of its (grand)children.
*
* It is useful for `withSubtree` to be set to `false` (the default) when you
* want to have a hierarchy with different parts of the hierarchy rendered
* in different layers
*
* On the other hand, sometimes you have a whole tree that you want to put in
* a layer and don’t want to have to specify layers for all of the sub-nodes.
* Set `withSubtree` to `true` in this case to add a root node to a layer
* to render that whole subtree in that layer.
*
* It is easier to add a whole tree into a layer with `withSubtree` as
* `true`. When `withSubtree` is `false` each node in a subtree would
* need to be added to a layer manually, but this allows more fine grained control of
* which parts of a tree will render in various layers.
*/
addObjectToLayer(obj: Object3D, layerNames: LayerNames, withSubtree = false): void {
if (typeof layerNames == 'string') return this.__addObjectToLayer(obj, layerNames, withSubtree)
for (const name of layerNames) this.__addObjectToLayer(obj, name, withSubtree)
}
/**
* Similar to `addObjectToLayer`, but for adding multiple objects at once.
* @param {THREE.Object3D[]} objects An array of objects that are `instanceof THREE.Object3D`.
* @param {LayerNames} layerNames The layer or layers to add the objects to.
* @param {boolean | undefined} withSubtree Whether rendering of the objects will also render their
* children. See `withSubtree` of `addObjectToLayer` for more details.
*/
addObjectsToLayer(objects: Object3D[], layerNames: LayerNames, withSubtree = false): void {
for (const obj of objects) this.addObjectToLayer(obj, layerNames, withSubtree)
}
/**
* Add an object to all currently-defined layers.
* @param {THREE.Object3D} obj The object to add. Must be an `instanceof THREE.Object3D`.
* @param {boolean | undefined} withSubtree Whether rendering of the object will also render its
* children. See `withSubtree` of `addObjectToLayer` for more details.
*/
addObjectToAllLayers(obj: Object3D, withSubtree = false): void {
for (const layer of this.__layers) this.__addObjectToLayer(obj, layer.name, withSubtree)
}
/**
* Add a set of objects to all currently-defined layers.
* @param {THREE.Object3D[]} objects The list of `THREE.Object3D` instances to add.
* @param {boolean | undefined} withSubtree Whether rendering of the objects will also render their
* children. See `withSubtree` of `addObjectToLayer` for more details.
*/
addObjectsToAllLayers(objects: Object3D[], withSubtree = false): void {
for (const obj of objects) this.addObjectToAllLayers(obj, withSubtree)
}
readonly __emptyArray = Object.freeze([])
__addObjectToLayer(obj: Object3D, layerName: LayerName, withSubtree: boolean): void {
const layer = this.__getOrMakeLayer(layerName)
if (!this.__layerHasObject(layer, obj)) {
const proxy = Object.create(obj, withSubtree ? {} : {children: {get: () => this.__emptyArray}})
// We use `children.push()` here instead of `children.add()` so that the
// added child will not be removed from its parent in its original scene.
// This allows us to add an object to multiple layers, and to not
// interfere with the user's original tree.
layer.backingScene.children.push(proxy)
}
}
__layerHasObject(layer: Layer, obj: Object3D): boolean {
return layer.backingScene.children.some(proxy => (proxy as any).__proto__ === obj)
}
/**
* Remove an object from a layer or set of layers.
* @param {THREE.Object3D} obj The object to remove from the specified layer or layers.
* @param {LayerNames} layerNames The layer or layers from which to remove the object from.
*/
removeObjectFromLayer(obj: Object3D, layerNames: LayerNames): void {
if (typeof layerNames == 'string') {
const layer = this.__layers.find(l => l.name === layerNames)
return this.__removeObjectFromLayer(obj, layer)
}
for (const name of layerNames) {
const layer = this.__layers.find(l => l.name === name)
this.__removeObjectFromLayer(obj, layer)
}
}
/**
* Remove an object from a layer or set of layers.
* @param {THREE.Object3D[]} objects The objects to remove from the specified layer or layers.
* @param {LayerNames} layerNames The layer or layers from which to remove the object from.
*/
removeObjectsFromLayer(objects: Object3D[], layerNames: LayerNames): void {
for (const obj of objects) this.removeObjectFromLayer(obj, layerNames)
}
/**
* Remove the given object from all layers.
* @param {THREE.Object3D} obj The object to remove.
*/
removeObjectFromAllLayers(obj: Object3D): void {
for (const layer of this.__layers) this.__removeObjectFromLayer(obj, layer)
}
/**
* Remove the given objects from all layers they may belong to.
* @param {THREE.Object3D[]} objects The objects to remove.
*/
removeObjectsFromAllLayers(objects: Object3D[]): void {
for (const obj of objects) this.removeObjectFromAllLayers(obj)
}
__removeObjectFromLayer(obj: Object3D, layer: Layer | undefined) {
if (!layer) throw new Error('Can not remove object from layer that does not exist.')
const children = layer.backingScene.children
const index = children.findIndex(proxy => (proxy as any).__proto__ === obj)
if (index >= 0) {
children[index] = children[children.length - 1]
children.pop()
}
}
/**
* Render visible layers.
*
* @param {THREE.Camera} camera A THREE.Camera to render all the layers with.
*
* @param {BeforeAllCallback | undefined} beforeAll Optional: Called before rendering all layers. If not
* supplied, the default value will turn off the rendere's auto clearing, so that
* each layer can be manually drawn stacked on top of each other.
*
* @param {BeforeEachCallback | undefined} beforeEach Optional: When the layers are being rendered in the order they are
* defined to be in, this callback will be called right before a layer is
* rendered. It will be passed the name of the layer that is about to be
* rendered. By default, this does nothing.
*
* @param {AfterEachCallback | undefined} afterEach Optional: When the layers are being rendered in the order
* they are defined to be in, this callback will be called right after a
* layer is rendered. It will be passed the name of the layer that was just
* rendered. The default is that `clearDepth()` will be called on a
* `WebGLRenderer` to ensure layers render on top of each other from low
* order to high order. If you provide your own callback, you'll have to
* remember to call `clearDepth` manually, unless you wish for layers to blend into
* the same 3D space rather than appaering as separate scenes stacked on
* top of each other.
*/
// IDEA: Allow different cameras per layer? It may not be common, but could
// be useful for, for example, making background effects, etc.
render(
camera: Camera,
beforeAll: BeforeAllCallback = this.__defaultBeforeAllCallback,
beforeEach: BeforeEachCallback = this.__defaultBeforeEachCallback,
afterEach: AfterEachCallback = this.__defaultAfterEachCallback,
) {
beforeAll()
for (const layer of this.__layers) {
if (!layer.visible) continue
beforeEach(layer.name)
this.__renderer.render(layer.backingScene, camera)
afterEach(layer.name)
}
}
__defaultBeforeAllCallback = () => {
if (this.__renderer instanceof WebGLRenderer) {
this.__renderer.autoClear = false
this.__renderer.clear()
}
}
__defaultBeforeEachCallback = () => {}
__defaultAfterEachCallback = () => {
// By default, the depth of a WebGLRenderer is cleared, so that layers
// render on top of each other in order from lowest to highest order value.
if (this.__renderer instanceof WebGLRenderer) this.__renderer.clearDepth()
}
}
/** @typedef {string} LayerName */
type LayerName = string
/** @typedef {LayerName | LayerName[]} LayerNames */
type LayerNames = LayerName | LayerName[]
/** @typedef {{name: LayerName; backingScene: THREE.Scene; order: number; visible: boolean}} Layer */
type Layer = {name: LayerName; backingScene: THREE.Scene; order: number; visible: boolean}
/**
* @typedef {
* @function
* @param {LayerName} layerName
* } BeforeEachCallback
*/
type BeforeEachCallback = (layerName: LayerName) => void
/**
* @typedef {
* @function
* } BeforeAllCallback
*/
type BeforeAllCallback = () => void
/**
* @typedef {
* @function
* @param {LayerName} layerName
* @returns {void}
* } AfterEachCallback
*/
type AfterEachCallback = (layerName: LayerName) => void<|fim▁end|> | return layer
}
|
<|file_name|>knearest.cpp<|end_file_name|><|fim▁begin|>/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// Intel License Agreement
//
// Copyright (C) 2000, Intel Corporation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of Intel Corporation may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
/****************************************************************************************\
* K-Nearest Neighbors Classifier *
\****************************************************************************************/
// k Nearest Neighbors
CvKNearest::CvKNearest()
{
samples = 0;
clear();
}
CvKNearest::~CvKNearest()
{
clear();
}
CvKNearest::CvKNearest( const CvMat* _train_data, const CvMat* _responses,
const CvMat* _sample_idx, bool _is_regression, int _max_k )
{
samples = 0;
train( _train_data, _responses, _sample_idx, _is_regression, _max_k, false );
}
void CvKNearest::clear()
{
while( samples )
{
CvVectors* next_samples = samples->next;
cvFree( &samples->data.fl );
cvFree( &samples );
samples = next_samples;
}
var_count = 0;
total = 0;
max_k = 0;
}
int CvKNearest::get_max_k() const { return max_k; }
int CvKNearest::get_var_count() const { return var_count; }
bool CvKNearest::is_regression() const { return regression; }
int CvKNearest::get_sample_count() const { return total; }
bool CvKNearest::train( const CvMat* _train_data, const CvMat* _responses,
const CvMat* _sample_idx, bool _is_regression,
int _max_k, bool _update_base )
{
bool ok = false;
CvMat* responses = 0;
CV_FUNCNAME( "CvKNearest::train" );
__BEGIN__;
CvVectors* _samples = 0;
float** _data = 0;
int _count = 0, _dims = 0, _dims_all = 0, _rsize = 0;
if( !_update_base )
clear();
// Prepare training data and related parameters.
// Treat categorical responses as ordered - to prevent class label compression and
// to enable entering new classes in the updates
CV_CALL( cvPrepareTrainData( "CvKNearest::train", _train_data, CV_ROW_SAMPLE,
_responses, CV_VAR_ORDERED, 0, _sample_idx, true, (const float***)&_data,
&_count, &_dims, &_dims_all, &responses, 0, 0 ));
if( !responses )
CV_ERROR( CV_StsNoMem, "Could not allocate memory for responses" );
if( _update_base && _dims != var_count )
CV_ERROR( CV_StsBadArg, "The newly added data have different dimensionality" );
if( !_update_base )
{
if( _max_k < 1 )
CV_ERROR( CV_StsOutOfRange, "max_k must be a positive number" );
regression = _is_regression;
var_count = _dims;
max_k = _max_k;
}
_rsize = _count*sizeof(float);
CV_CALL( _samples = (CvVectors*)cvAlloc( sizeof(*_samples) + _rsize ));
_samples->next = samples;
_samples->type = CV_32F;
_samples->data.fl = _data;
_samples->count = _count;
total += _count;
samples = _samples;
memcpy( _samples + 1, responses->data.fl, _rsize );
ok = true;
__END__;
if( responses && responses->data.ptr != _responses->data.ptr )
cvReleaseMat(&responses);
return ok;
}
void CvKNearest::find_neighbors_direct( const CvMat* _samples, int k, int start, int end,
float* neighbor_responses, const float** neighbors, float* dist ) const
{
int i, j, count = end - start, k1 = 0, k2 = 0, d = var_count;
CvVectors* s = samples;
for( ; s != 0; s = s->next )
{
int n = s->count;
for( j = 0; j < n; j++ )
{
for( i = 0; i < count; i++ )
{
double sum = 0;
Cv32suf si;
const float* v = s->data.fl[j];
const float* u = (float*)(_samples->data.ptr + _samples->step*(start + i));
Cv32suf* dd = (Cv32suf*)(dist + i*k);
float* nr;
const float** nn;
int t, ii, ii1;
for( t = 0; t <= d - 4; t += 4 )
{
double t0 = u[t] - v[t], t1 = u[t+1] - v[t+1];
double t2 = u[t+2] - v[t+2], t3 = u[t+3] - v[t+3];
sum += t0*t0 + t1*t1 + t2*t2 + t3*t3;
}
for( ; t < d; t++ )
{
double t0 = u[t] - v[t];
sum += t0*t0;
}
si.f = (float)sum;
for( ii = k1-1; ii >= 0; ii-- )
if( si.i > dd[ii].i )
break;
if( ii >= k-1 )
continue;
nr = neighbor_responses + i*k;
nn = neighbors ? neighbors + (start + i)*k : 0;
for( ii1 = k2 - 1; ii1 > ii; ii1-- )
{
dd[ii1+1].i = dd[ii1].i;
nr[ii1+1] = nr[ii1];
if( nn ) nn[ii1+1] = nn[ii1];
}
dd[ii+1].i = si.i;
nr[ii+1] = ((float*)(s + 1))[j];
if( nn )
nn[ii+1] = v;
}
k1 = MIN( k1+1, k );
k2 = MIN( k1, k-1 );
}
}
}
float CvKNearest::write_results( int k, int k1, int start, int end,
const float* neighbor_responses, const float* dist,
CvMat* _results, CvMat* _neighbor_responses,
CvMat* _dist, Cv32suf* sort_buf ) const
{
float result = 0.f;
int i, j, j1, count = end - start;
double inv_scale = 1./k1;
int rstep = _results && !CV_IS_MAT_CONT(_results->type) ? _results->step/sizeof(result) : 1;
for( i = 0; i < count; i++ )
{
const Cv32suf* nr = (const Cv32suf*)(neighbor_responses + i*k);
float* dst;
float r;
if( _results || start+i == 0 )
{
if( regression )
{
double s = 0;
for( j = 0; j < k1; j++ )
s += nr[j].f;
r = (float)(s*inv_scale);
}
else
{
int prev_start = 0, best_count = 0, cur_count;
Cv32suf best_val;
for( j = 0; j < k1; j++ )
sort_buf[j].i = nr[j].i;
for( j = k1-1; j > 0; j-- )
{
bool swap_fl = false;
for( j1 = 0; j1 < j; j1++ )
if( sort_buf[j1].i > sort_buf[j1+1].i )
{
int t;
CV_SWAP( sort_buf[j1].i, sort_buf[j1+1].i, t );
swap_fl = true;
}
if( !swap_fl )
break;
}
best_val.i = 0;
for( j = 1; j <= k1; j++ )
if( j == k1 || sort_buf[j].i != sort_buf[j-1].i )
{
cur_count = j - prev_start;
if( best_count < cur_count )
{
best_count = cur_count;
best_val.i = sort_buf[j-1].i;
}
prev_start = j;
}
r = best_val.f;
}
if( start+i == 0 )
result = r;
if( _results )
_results->data.fl[(start + i)*rstep] = r;
}
if( _neighbor_responses )
{
dst = (float*)(_neighbor_responses->data.ptr +
(start + i)*_neighbor_responses->step);
for( j = 0; j < k1; j++ )
dst[j] = nr[j].f;
for( ; j < k; j++ )
dst[j] = 0.f;
}
if( _dist )
{
dst = (float*)(_dist->data.ptr + (start + i)*_dist->step);
for( j = 0; j < k1; j++ )
dst[j] = dist[j + i*k];
for( ; j < k; j++ )
dst[j] = 0.f;
}
}
return result;
}
struct P1 : cv::ParallelLoopBody {
P1(const CvKNearest* _pointer, int _buf_sz, int _k, const CvMat* __samples, const float** __neighbors,
int _k1, CvMat* __results, CvMat* __neighbor_responses, CvMat* __dist, float* _result)
{
pointer = _pointer;
k = _k;
_samples = __samples;
_neighbors = __neighbors;
k1 = _k1;
_results = __results;
_neighbor_responses = __neighbor_responses;
_dist = __dist;
result = _result;
buf_sz = _buf_sz;
}
const CvKNearest* pointer;
int k;
const CvMat* _samples;
const float** _neighbors;
int k1;
CvMat* _results;
CvMat* _neighbor_responses;
CvMat* _dist;
float* result;<|fim▁hole|> cv::AutoBuffer<float> buf(buf_sz);
for(int i = range.start; i < range.end; i += 1 )
{
float* neighbor_responses = &buf[0];
float* dist = neighbor_responses + 1*k;
Cv32suf* sort_buf = (Cv32suf*)(dist + 1*k);
pointer->find_neighbors_direct( _samples, k, i, i + 1,
neighbor_responses, _neighbors, dist );
float r = pointer->write_results( k, k1, i, i + 1, neighbor_responses, dist,
_results, _neighbor_responses, _dist, sort_buf );
if( i == 0 )
*result = r;
}
}
};
float CvKNearest::find_nearest( const CvMat* _samples, int k, CvMat* _results,
const float** _neighbors, CvMat* _neighbor_responses, CvMat* _dist ) const
{
float result = 0.f;
const int max_blk_count = 128, max_buf_sz = 1 << 12;
if( !samples )
CV_Error( CV_StsError, "The search tree must be constructed first using train method" );
if( !CV_IS_MAT(_samples) ||
CV_MAT_TYPE(_samples->type) != CV_32FC1 ||
_samples->cols != var_count )
CV_Error( CV_StsBadArg, "Input samples must be floating-point matrix (<num_samples>x<var_count>)" );
if( _results && (!CV_IS_MAT(_results) ||
(_results->cols != 1 && _results->rows != 1) ||
_results->cols + _results->rows - 1 != _samples->rows) )
CV_Error( CV_StsBadArg,
"The results must be 1d vector containing as much elements as the number of samples" );
if( _results && CV_MAT_TYPE(_results->type) != CV_32FC1 &&
(CV_MAT_TYPE(_results->type) != CV_32SC1 || regression))
CV_Error( CV_StsUnsupportedFormat,
"The results must be floating-point or integer (in case of classification) vector" );
if( k < 1 || k > max_k )
CV_Error( CV_StsOutOfRange, "k must be within 1..max_k range" );
if( _neighbor_responses )
{
if( !CV_IS_MAT(_neighbor_responses) || CV_MAT_TYPE(_neighbor_responses->type) != CV_32FC1 ||
_neighbor_responses->rows != _samples->rows || _neighbor_responses->cols != k )
CV_Error( CV_StsBadArg,
"The neighbor responses (if present) must be floating-point matrix of <num_samples> x <k> size" );
}
if( _dist )
{
if( !CV_IS_MAT(_dist) || CV_MAT_TYPE(_dist->type) != CV_32FC1 ||
_dist->rows != _samples->rows || _dist->cols != k )
CV_Error( CV_StsBadArg,
"The distances from the neighbors (if present) must be floating-point matrix of <num_samples> x <k> size" );
}
int count = _samples->rows;
int count_scale = k*2;
int blk_count0 = MIN( count, max_blk_count );
int buf_sz = MIN( blk_count0 * count_scale, max_buf_sz );
blk_count0 = MAX( buf_sz/count_scale, 1 );
blk_count0 += blk_count0 % 2;
blk_count0 = MIN( blk_count0, count );
buf_sz = blk_count0 * count_scale + k;
int k1 = get_sample_count();
k1 = MIN( k1, k );
cv::parallel_for_(cv::Range(0, count), P1(this, buf_sz, k, _samples, _neighbors, k1,
_results, _neighbor_responses, _dist, &result)
);
return result;
}
using namespace cv;
CvKNearest::CvKNearest( const Mat& _train_data, const Mat& _responses,
const Mat& _sample_idx, bool _is_regression, int _max_k )
{
samples = 0;
train(_train_data, _responses, _sample_idx, _is_regression, _max_k, false );
}
bool CvKNearest::train( const Mat& _train_data, const Mat& _responses,
const Mat& _sample_idx, bool _is_regression,
int _max_k, bool _update_base )
{
CvMat tdata = _train_data, responses = _responses, sidx = _sample_idx;
return train(&tdata, &responses, sidx.data.ptr ? &sidx : 0, _is_regression, _max_k, _update_base );
}
float CvKNearest::find_nearest( const Mat& _samples, int k, Mat* _results,
const float** _neighbors, Mat* _neighbor_responses,
Mat* _dist ) const
{
CvMat s = _samples, results, *presults = 0, nresponses, *pnresponses = 0, dist, *pdist = 0;
if( _results )
{
if(!(_results->data && (_results->type() == CV_32F ||
(_results->type() == CV_32S && regression)) &&
(_results->cols == 1 || _results->rows == 1) &&
_results->cols + _results->rows - 1 == _samples.rows) )
_results->create(_samples.rows, 1, CV_32F);
presults = &(results = *_results);
}
if( _neighbor_responses )
{
if(!(_neighbor_responses->data && _neighbor_responses->type() == CV_32F &&
_neighbor_responses->cols == k && _neighbor_responses->rows == _samples.rows) )
_neighbor_responses->create(_samples.rows, k, CV_32F);
pnresponses = &(nresponses = *_neighbor_responses);
}
if( _dist )
{
if(!(_dist->data && _dist->type() == CV_32F &&
_dist->cols == k && _dist->rows == _samples.rows) )
_dist->create(_samples.rows, k, CV_32F);
pdist = &(dist = *_dist);
}
return find_nearest(&s, k, presults, _neighbors, pnresponses, pdist );
}
float CvKNearest::find_nearest( const cv::Mat& _samples, int k, CV_OUT cv::Mat& results,
CV_OUT cv::Mat& neighborResponses, CV_OUT cv::Mat& dists) const
{
return find_nearest(_samples, k, &results, 0, &neighborResponses, &dists);
}
/* End of file */<|fim▁end|> | int buf_sz;
void operator()( const cv::Range& range ) const
{ |
<|file_name|>MedicalRoster.stories.js<|end_file_name|><|fim▁begin|>import React from "react";
import StorybookWrapper from "stories/helpers/storybookWrapper.js";
import baseProps from "stories/helpers/baseProps.js";
import Component, {<|fim▁hole|>
export default {
title: "Cards|Medical/MedicalRoster",
};
export const MedicalRoster = () => (
<StorybookWrapper
queries={[
MEDICAL_ROSTER_CREW_SUB,
MEDICAL_ROSTER_QUERY,
MEDICAL_ROSTER_SUB,
]}
>
<Component {...baseProps} />
</StorybookWrapper>
);<|fim▁end|> | MEDICAL_ROSTER_CREW_SUB,
MEDICAL_ROSTER_QUERY,
MEDICAL_ROSTER_SUB,
} from "components/views/MedicalRoster/index.js"; |
<|file_name|>CsvUpsertExecutor.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.util.csv;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.Base64;
import java.util.List;
import java.util.Properties;
import javax.annotation.Nullable;
import org.apache.commons.csv.CSVRecord;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.expression.function.EncodeFormat;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.query.QueryServicesOptions;
import org.apache.phoenix.schema.IllegalDataException;
import org.apache.phoenix.schema.types.PBinary;
import org.apache.phoenix.schema.types.PBoolean;
import org.apache.phoenix.schema.types.PDataType;
import org.apache.phoenix.schema.types.PDataType.PDataCodec;
import org.apache.phoenix.schema.types.PTimestamp;
import org.apache.phoenix.schema.types.PVarbinary;
import org.apache.phoenix.util.ColumnInfo;
import org.apache.phoenix.util.DateUtil;
import org.apache.phoenix.util.ReadOnlyProps;
import org.apache.phoenix.util.UpsertExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
/** {@link UpsertExecutor} over {@link CSVRecord}s. */
public class CsvUpsertExecutor extends UpsertExecutor<CSVRecord, String> {
private static final Logger LOG = LoggerFactory.getLogger(CsvUpsertExecutor.class);
protected final String arrayElementSeparator;
/** Testing constructor. Do not use in prod. */
@VisibleForTesting
protected CsvUpsertExecutor(Connection conn, List<ColumnInfo> columnInfoList,
PreparedStatement stmt, UpsertListener<CSVRecord> upsertListener,
String arrayElementSeparator) {
super(conn, columnInfoList, stmt, upsertListener);
this.arrayElementSeparator = arrayElementSeparator;
finishInit();
}
public CsvUpsertExecutor(Connection conn, String tableName,
List<ColumnInfo> columnInfoList, UpsertListener<CSVRecord> upsertListener,
String arrayElementSeparator) {
super(conn, tableName, columnInfoList, upsertListener);
this.arrayElementSeparator = arrayElementSeparator;
finishInit();
}
@Override
protected void execute(CSVRecord csvRecord) {
try {
if (csvRecord.size() < conversionFunctions.size()) {
String message = String.format("CSV record does not have enough values (has %d, but needs %d)",
csvRecord.size(), conversionFunctions.size());
throw new IllegalArgumentException(message);
}
for (int fieldIndex = 0; fieldIndex < conversionFunctions.size(); fieldIndex++) {
Object sqlValue = conversionFunctions.get(fieldIndex).apply(csvRecord.get(fieldIndex));
if (sqlValue != null) {
preparedStatement.setObject(fieldIndex + 1, sqlValue);
} else {
preparedStatement.setNull(fieldIndex + 1, dataTypes.get(fieldIndex).getSqlType());
}
}
preparedStatement.execute();
upsertListener.upsertDone(++upsertCount);
} catch (Exception e) {
if (LOG.isDebugEnabled()) {
// Even though this is an error we only log it with debug logging because we're notifying the
// listener, and it can do its own logging if needed
LOG.debug("Error on CSVRecord " + csvRecord, e);
}
upsertListener.errorOnRecord(csvRecord, e);
}
}
@Override
protected Function<String, Object> createConversionFunction(PDataType dataType) {
if (dataType.isArrayType()) {
return new ArrayDatatypeConversionFunction(
new StringToArrayConverter(
conn,
arrayElementSeparator,
PDataType.fromTypeId(dataType.getSqlType() - PDataType.ARRAY_TYPE_BASE)));
} else {
return new SimpleDatatypeConversionFunction(dataType, this.conn);
}
}
/**
* Performs typed conversion from String values to a given column value type.
*/
static class SimpleDatatypeConversionFunction implements Function<String, Object> {
private final PDataType dataType;
private final PDataCodec codec;
private final DateUtil.DateTimeParser dateTimeParser;
private final String binaryEncoding;
SimpleDatatypeConversionFunction(PDataType dataType, Connection conn) {
ReadOnlyProps props;
try {<|fim▁hole|> props = conn.unwrap(PhoenixConnection.class).getQueryServices().getProps();
} catch (SQLException e) {
throw new RuntimeException(e);
}
this.dataType = dataType;
PDataCodec codec = dataType.getCodec();
if(dataType.isCoercibleTo(PTimestamp.INSTANCE)) {
codec = DateUtil.getCodecFor(dataType);
// TODO: move to DateUtil
String dateFormat;
int dateSqlType = dataType.getResultSetSqlType();
if (dateSqlType == Types.DATE) {
dateFormat = props.get(QueryServices.DATE_FORMAT_ATTRIB,
DateUtil.DEFAULT_DATE_FORMAT);
} else if (dateSqlType == Types.TIME) {
dateFormat = props.get(QueryServices.TIME_FORMAT_ATTRIB,
DateUtil.DEFAULT_TIME_FORMAT);
} else {
dateFormat = props.get(QueryServices.TIMESTAMP_FORMAT_ATTRIB,
DateUtil.DEFAULT_TIMESTAMP_FORMAT);
}
String timeZoneId = props.get(QueryServices.DATE_FORMAT_TIMEZONE_ATTRIB,
QueryServicesOptions.DEFAULT_DATE_FORMAT_TIMEZONE);
this.dateTimeParser = DateUtil.getDateTimeParser(dateFormat, dataType, timeZoneId);
} else {
this.dateTimeParser = null;
}
this.codec = codec;
this.binaryEncoding = props.get(QueryServices.UPLOAD_BINARY_DATA_TYPE_ENCODING,
QueryServicesOptions.DEFAULT_UPLOAD_BINARY_DATA_TYPE_ENCODING);
}
@Nullable
@Override
public Object apply(@Nullable String input) {
if (input == null || input.isEmpty()) {
return null;
}
if (dataType == PTimestamp.INSTANCE) {
return DateUtil.parseTimestamp(input);
}
if (dateTimeParser != null) {
long epochTime = dateTimeParser.parseDateTime(input);
byte[] byteValue = new byte[dataType.getByteSize()];
codec.encodeLong(epochTime, byteValue, 0);
return dataType.toObject(byteValue);
} else if (dataType == PBoolean.INSTANCE) {
switch (input.toLowerCase()) {
case "true":
case "t":
case "1":
return Boolean.TRUE;
case "false":
case "f":
case "0":
return Boolean.FALSE;
default:
throw new RuntimeException("Invalid boolean value: '" + input
+ "', must be one of ['true','t','1','false','f','0']");
}
}else if (dataType == PVarbinary.INSTANCE || dataType == PBinary.INSTANCE){
EncodeFormat format = EncodeFormat.valueOf(binaryEncoding.toUpperCase());
Object object = null;
switch (format) {
case BASE64:
object = Base64.getDecoder().decode(input);
if (object == null) { throw new IllegalDataException(
"Input: [" + input + "] is not base64 encoded"); }
break;
case ASCII:
object = Bytes.toBytes(input);
break;
default:
throw new IllegalDataException("Unsupported encoding \"" + binaryEncoding + "\"");
}
return object;
}
return dataType.toObject(input);
}
}
/**
* Converts string representations of arrays into Phoenix arrays of the correct type.
*/
private static class ArrayDatatypeConversionFunction implements Function<String, Object> {
private final StringToArrayConverter arrayConverter;
private ArrayDatatypeConversionFunction(StringToArrayConverter arrayConverter) {
this.arrayConverter = arrayConverter;
}
@Nullable
@Override
public Object apply(@Nullable String input) {
try {
return arrayConverter.toArray(input);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
}<|fim▁end|> | |
<|file_name|>websocket.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::BlobBinding::BlobMethods;
use dom::bindings::codegen::Bindings::WebSocketBinding;
use dom::bindings::codegen::Bindings::WebSocketBinding::{BinaryType, WebSocketMethods};
use dom::bindings::codegen::UnionTypes::StringOrStringSequence;
use dom::bindings::conversions::ToJSValConvertible;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::refcounted::Trusted;
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::root::DomRoot;
use dom::bindings::str::{DOMString, USVString, is_token};
use dom::blob::{Blob, BlobImpl};
use dom::closeevent::CloseEvent;
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::eventtarget::EventTarget;
use dom::globalscope::GlobalScope;
use dom::messageevent::MessageEvent;
use dom_struct::dom_struct;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
use js::jsapi::JSAutoCompartment;
use js::jsval::UndefinedValue;
use js::typedarray::{ArrayBuffer, CreateWith};
use net_traits::{CoreResourceMsg, FetchChannels};
use net_traits::{WebSocketDomAction, WebSocketNetworkEvent};
use net_traits::MessageData;
use net_traits::request::{RequestInit, RequestMode};
use script_runtime::CommonScriptMsg;
use script_runtime::ScriptThreadEventCategory::WebSocketEvent;
use servo_url::ServoUrl;
#[allow(unused_imports)] use std::ascii::AsciiExt;
use std::borrow::ToOwned;
use std::cell::Cell;
use std::ptr;
use std::thread;
use task::{TaskOnce, TaskCanceller};
use task_source::TaskSource;
use task_source::networking::NetworkingTaskSource;
#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq)]
enum WebSocketRequestState {
Connecting = 0,
Open = 1,
Closing = 2,
Closed = 3,
}
// Close codes defined in https://tools.ietf.org/html/rfc6455#section-7.4.1
// Names are from https://github.com/mozilla/gecko-dev/blob/master/netwerk/protocol/websocket/nsIWebSocketChannel.idl
#[allow(dead_code)]
mod close_code {
pub const NORMAL: u16 = 1000;
pub const GOING_AWAY: u16 = 1001;
pub const PROTOCOL_ERROR: u16 = 1002;
pub const UNSUPPORTED_DATATYPE: u16 = 1003;
pub const NO_STATUS: u16 = 1005;
pub const ABNORMAL: u16 = 1006;
pub const INVALID_PAYLOAD: u16 = 1007;
pub const POLICY_VIOLATION: u16 = 1008;
pub const TOO_LARGE: u16 = 1009;
pub const EXTENSION_MISSING: u16 = 1010;
pub const INTERNAL_ERROR: u16 = 1011;
pub const TLS_FAILED: u16 = 1015;
}
pub fn close_the_websocket_connection(
address: Trusted<WebSocket>,
task_source: &NetworkingTaskSource,
canceller: &TaskCanceller,
code: Option<u16>,
reason: String,
) {
let close_task = CloseTask {
address: address,
failed: false,
code: code,
reason: Some(reason),
};
task_source.queue_with_canceller(close_task, &canceller).unwrap();
}
pub fn fail_the_websocket_connection(
address: Trusted<WebSocket>,
task_source: &NetworkingTaskSource,
canceller: &TaskCanceller,
) {
let close_task = CloseTask {
address: address,
failed: true,
code: Some(close_code::ABNORMAL),
reason: None,
};
task_source.queue_with_canceller(close_task, &canceller).unwrap();
}
#[dom_struct]
pub struct WebSocket {
eventtarget: EventTarget,
url: ServoUrl,
ready_state: Cell<WebSocketRequestState>,
buffered_amount: Cell<u64>,
clearing_buffer: Cell<bool>, //Flag to tell if there is a running thread to clear buffered_amount
#[ignore_malloc_size_of = "Defined in std"]
sender: DomRefCell<Option<IpcSender<WebSocketDomAction>>>,
binary_type: Cell<BinaryType>,
protocol: DomRefCell<String>, //Subprotocol selected by server
}
impl WebSocket {
fn new_inherited(url: ServoUrl) -> WebSocket {
WebSocket {
eventtarget: EventTarget::new_inherited(),
url: url,
ready_state: Cell::new(WebSocketRequestState::Connecting),
buffered_amount: Cell::new(0),
clearing_buffer: Cell::new(false),
sender: DomRefCell::new(None),
binary_type: Cell::new(BinaryType::Blob),
protocol: DomRefCell::new("".to_owned()),
}
}
fn new(global: &GlobalScope, url: ServoUrl) -> DomRoot<WebSocket> {
reflect_dom_object(Box::new(WebSocket::new_inherited(url)),
global, WebSocketBinding::Wrap)
}
/// <https://html.spec.whatwg.org/multipage/#dom-websocket>
pub fn Constructor(global: &GlobalScope,
url: DOMString,
protocols: Option<StringOrStringSequence>)
-> Fallible<DomRoot<WebSocket>> {
// Steps 1-2.
let url_record = ServoUrl::parse(&url).or(Err(Error::Syntax))?;
// Step 3.
match url_record.scheme() {
"ws" | "wss" => {},
_ => return Err(Error::Syntax),
}
// Step 4.
if url_record.fragment().is_some() {
return Err(Error::Syntax);
}
// Step 5.
let protocols = protocols.map_or(vec![], |p| {
match p {
StringOrStringSequence::String(string) => vec![string.into()],
StringOrStringSequence::StringSequence(seq) => {
seq.into_iter().map(String::from).collect()
},
}
});
// Step 6.
for (i, protocol) in protocols.iter().enumerate() {
// https://tools.ietf.org/html/rfc6455#section-4.1
// Handshake requirements, step 10
if protocols[i + 1..].iter().any(|p| p.eq_ignore_ascii_case(protocol)) {
return Err(Error::Syntax);
}
// https://tools.ietf.org/html/rfc6455#section-4.1
if !is_token(protocol.as_bytes()) {
return Err(Error::Syntax);
}
}
let ws = WebSocket::new(global, url_record.clone());
let address = Trusted::new(&*ws);
// Create the interface for communication with the resource thread
let (dom_action_sender, resource_action_receiver):
(IpcSender<WebSocketDomAction>,
IpcReceiver<WebSocketDomAction>) = ipc::channel().unwrap();
let (resource_event_sender, dom_event_receiver):
(IpcSender<WebSocketNetworkEvent>,
IpcReceiver<WebSocketNetworkEvent>) = ipc::channel().unwrap();
// Step 8.
let request = RequestInit {
url: url_record,
origin: global.origin().immutable().clone(),
mode: RequestMode::WebSocket { protocols },
..RequestInit::default()
};
let channels = FetchChannels::WebSocket {
event_sender: resource_event_sender,
action_receiver: resource_action_receiver,
};
let _ = global.core_resource_thread().send(CoreResourceMsg::Fetch(request, channels));
*ws.sender.borrow_mut() = Some(dom_action_sender);
let task_source = global.networking_task_source();
let canceller = global.task_canceller();
thread::spawn(move || {
while let Ok(event) = dom_event_receiver.recv() {
match event {
WebSocketNetworkEvent::ConnectionEstablished { protocol_in_use } => {
let open_thread = ConnectionEstablishedTask {
address: address.clone(),
protocol_in_use,
};
task_source.queue_with_canceller(open_thread, &canceller).unwrap();
},
WebSocketNetworkEvent::MessageReceived(message) => {
let message_thread = MessageReceivedTask {
address: address.clone(),
message: message,
};
task_source.queue_with_canceller(message_thread, &canceller).unwrap();
},
WebSocketNetworkEvent::Fail => {
fail_the_websocket_connection(address.clone(),
&task_source, &canceller);
},
WebSocketNetworkEvent::Close(code, reason) => {
close_the_websocket_connection(address.clone(),
&task_source, &canceller, code, reason);
},
}
}
});
// Step 7.
Ok(ws)
}
// https://html.spec.whatwg.org/multipage/#dom-websocket-send
fn send_impl(&self, data_byte_len: u64) -> Fallible<bool> {
let return_after_buffer = match self.ready_state.get() {
WebSocketRequestState::Connecting => {
return Err(Error::InvalidState);
},
WebSocketRequestState::Open => false,
WebSocketRequestState::Closing | WebSocketRequestState::Closed => true,
};
let address = Trusted::new(self);
match data_byte_len.checked_add(self.buffered_amount.get()) {
None => panic!(),
Some(new_amount) => self.buffered_amount.set(new_amount)
};
if return_after_buffer {
return Ok(false);
}
if !self.clearing_buffer.get() && self.ready_state.get() == WebSocketRequestState::Open {
self.clearing_buffer.set(true);
let task = Box::new(BufferedAmountTask {
address: address,
});
let pipeline_id = self.global().pipeline_id();
self.global()
.script_chan()
.send(CommonScriptMsg::Task(WebSocketEvent, task, Some(pipeline_id)))
.unwrap();
}
Ok(true)
}
}
impl WebSocketMethods for WebSocket {
// https://html.spec.whatwg.org/multipage/#handler-websocket-onopen
event_handler!(open, GetOnopen, SetOnopen);
// https://html.spec.whatwg.org/multipage/#handler-websocket-onclose
event_handler!(close, GetOnclose, SetOnclose);
// https://html.spec.whatwg.org/multipage/#handler-websocket-onerror
event_handler!(error, GetOnerror, SetOnerror);
// https://html.spec.whatwg.org/multipage/#handler-websocket-onmessage
event_handler!(message, GetOnmessage, SetOnmessage);
// https://html.spec.whatwg.org/multipage/#dom-websocket-url
fn Url(&self) -> DOMString {
DOMString::from(self.url.as_str())
}
// https://html.spec.whatwg.org/multipage/#dom-websocket-readystate
fn ReadyState(&self) -> u16 {
self.ready_state.get() as u16
}
// https://html.spec.whatwg.org/multipage/#dom-websocket-bufferedamount
fn BufferedAmount(&self) -> u64 {
self.buffered_amount.get()
}
// https://html.spec.whatwg.org/multipage/#dom-websocket-binarytype
fn BinaryType(&self) -> BinaryType {
self.binary_type.get()
}
// https://html.spec.whatwg.org/multipage/#dom-websocket-binarytype
fn SetBinaryType(&self, btype: BinaryType) {
self.binary_type.set(btype)
}
// https://html.spec.whatwg.org/multipage/#dom-websocket-protocol
fn Protocol(&self) -> DOMString {
DOMString::from(self.protocol.borrow().clone())
}
// https://html.spec.whatwg.org/multipage/#dom-websocket-send
fn Send(&self, data: USVString) -> ErrorResult {
let data_byte_len = data.0.as_bytes().len() as u64;
let send_data = self.send_impl(data_byte_len)?;
if send_data {
let mut other_sender = self.sender.borrow_mut();
let my_sender = other_sender.as_mut().unwrap();
let _ = my_sender.send(WebSocketDomAction::SendMessage(MessageData::Text(data.0)));
}
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-websocket-send
fn Send_(&self, blob: &Blob) -> ErrorResult {
/* As per https://html.spec.whatwg.org/multipage/#websocket
the buffered amount needs to be clamped to u32, even though Blob.Size() is u64
If the buffer limit is reached in the first place, there are likely other major problems
*/
let data_byte_len = blob.Size();
let send_data = self.send_impl(data_byte_len)?;
if send_data {
let mut other_sender = self.sender.borrow_mut();
let my_sender = other_sender.as_mut().unwrap();
let bytes = blob.get_bytes().unwrap_or(vec![]);
let _ = my_sender.send(WebSocketDomAction::SendMessage(MessageData::Binary(bytes)));
}
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-websocket-close
fn Close(&self, code: Option<u16>, reason: Option<USVString>) -> ErrorResult {
if let Some(code) = code {
//Fail if the supplied code isn't normal and isn't reserved for libraries, frameworks, and applications
if code != close_code::NORMAL && (code < 3000 || code > 4999) {
return Err(Error::InvalidAccess);
}
}
if let Some(ref reason) = reason {
if reason.0.as_bytes().len() > 123 { //reason cannot be larger than 123 bytes
return Err(Error::Syntax);
}
}
match self.ready_state.get() {
WebSocketRequestState::Closing | WebSocketRequestState::Closed => {} //Do nothing
WebSocketRequestState::Connecting => { //Connection is not yet established
/*By setting the state to closing, the open function
will abort connecting the websocket*/
self.ready_state.set(WebSocketRequestState::Closing);
let address = Trusted::new(self);
let task_source = self.global().networking_task_source();
fail_the_websocket_connection(address, &task_source, &self.global().task_canceller());
}
WebSocketRequestState::Open => {
self.ready_state.set(WebSocketRequestState::Closing);
// Kick off _Start the WebSocket Closing Handshake_
// https://tools.ietf.org/html/rfc6455#section-7.1.2
let reason = reason.map(|reason| reason.0);
let mut other_sender = self.sender.borrow_mut();
let my_sender = other_sender.as_mut().unwrap();
let _ = my_sender.send(WebSocketDomAction::Close(code, reason));
}
}
Ok(()) //Return Ok
}
}
/// Task queued when *the WebSocket connection is established*.
/// <https://html.spec.whatwg.org/multipage/#feedback-from-the-protocol:concept-websocket-established>
struct ConnectionEstablishedTask {
address: Trusted<WebSocket>,
protocol_in_use: Option<String>,
}
impl TaskOnce for ConnectionEstablishedTask {
/// <https://html.spec.whatwg.org/multipage/#feedback-from-the-protocol:concept-websocket-established>
fn run_once(self) {
let ws = self.address.root();
// Step 1.
ws.ready_state.set(WebSocketRequestState::Open);
// Step 2: Extensions.
// TODO: Set extensions to extensions in use.
// Step 3.
if let Some(protocol_name) = self.protocol_in_use {
*ws.protocol.borrow_mut() = protocol_name;
};
// Step 4.
ws.upcast().fire_event(atom!("open"));
}
}
struct BufferedAmountTask {
address: Trusted<WebSocket>,
}
impl TaskOnce for BufferedAmountTask {
// See https://html.spec.whatwg.org/multipage/#dom-websocket-bufferedamount
//
// To be compliant with standards, we need to reset bufferedAmount only when the event loop
// reaches step 1. In our implementation, the bytes will already have been sent on a background
// thread.
fn run_once(self) {
let ws = self.address.root();
ws.buffered_amount.set(0);
ws.clearing_buffer.set(false);
}
}
struct CloseTask {
address: Trusted<WebSocket>,
failed: bool,
code: Option<u16>,
reason: Option<String>,
}
impl TaskOnce for CloseTask {
fn run_once(self) {
let ws = self.address.root();
if ws.ready_state.get() == WebSocketRequestState::Closed {
// Do nothing if already closed.
return;
}
// Perform _the WebSocket connection is closed_ steps.
// https://html.spec.whatwg.org/multipage/#closeWebSocket
// Step 1.
ws.ready_state.set(WebSocketRequestState::Closed);
// Step 2.
if self.failed {
ws.upcast().fire_event(atom!("error"));
}
// Step 3.
let clean_close = !self.failed;
let code = self.code.unwrap_or(close_code::NO_STATUS);
let reason = DOMString::from(self.reason.unwrap_or("".to_owned()));
let close_event = CloseEvent::new(&ws.global(),<|fim▁hole|> EventBubbles::DoesNotBubble,
EventCancelable::NotCancelable,
clean_close,
code,
reason);
close_event.upcast::<Event>().fire(ws.upcast());
}
}
struct MessageReceivedTask {
address: Trusted<WebSocket>,
message: MessageData,
}
impl TaskOnce for MessageReceivedTask {
#[allow(unsafe_code)]
fn run_once(self) {
let ws = self.address.root();
debug!("MessageReceivedTask::handler({:p}): readyState={:?}", &*ws,
ws.ready_state.get());
// Step 1.
if ws.ready_state.get() != WebSocketRequestState::Open {
return;
}
// Step 2-5.
let global = ws.global();
// global.get_cx() returns a valid `JSContext` pointer, so this is safe.
unsafe {
let cx = global.get_cx();
let _ac = JSAutoCompartment::new(cx, ws.reflector().get_jsobject().get());
rooted!(in(cx) let mut message = UndefinedValue());
match self.message {
MessageData::Text(text) => text.to_jsval(cx, message.handle_mut()),
MessageData::Binary(data) => {
match ws.binary_type.get() {
BinaryType::Blob => {
let blob = Blob::new(&global, BlobImpl::new_from_bytes(data), "".to_owned());
blob.to_jsval(cx, message.handle_mut());
}
BinaryType::Arraybuffer => {
rooted!(in(cx) let mut array_buffer = ptr::null_mut());
assert!(ArrayBuffer::create(cx,
CreateWith::Slice(&data),
array_buffer.handle_mut())
.is_ok());
(*array_buffer).to_jsval(cx, message.handle_mut());
}
}
},
}
MessageEvent::dispatch_jsval(ws.upcast(), &global, message.handle());
}
}
}<|fim▁end|> | atom!("close"), |
<|file_name|>image_math.cpp<|end_file_name|><|fim▁begin|>/*
* image_math.cpp
* PHD Guiding
*
* Created by Craig Stark.
* Copyright (c) 2006-2010 Craig Stark.
* All rights reserved.
*
* This source code is distributed under the following "BSD" license
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* Neither the name of Craig Stark, Stark Labs nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "phd.h"
#include "image_math.h"
#include <wx/wfstream.h>
#include <wx/txtstrm.h>
#include <wx/tokenzr.h>
#include <algorithm>
int dbl_sort_func (double *first, double *second)
{
if (*first < *second)
return -1;
else if (*first > *second)
return 1;
return 0;
}
double CalcSlope(const ArrayOfDbl& y)
{
// Does a linear regression to calculate the slope
int nn = (int) y.GetCount();
if (nn < 2)
return 0.;
double s_xy = 0.0;
double s_y = 0.0;
for (int x = 0; x < nn; x++)
{
s_xy += (double)(x + 1) * y[x];
s_y += y[x];
}
int sx = (nn * (nn + 1)) / 2;
int sxx = sx * (2 * nn + 1) / 3;
double s_x = (double) sx;
double s_xx = (double) sxx;
double n = (double) nn;
return (n * s_xy - (s_x * s_y)) / (n * s_xx - (s_x * s_x));
}
bool QuickLRecon(usImage& img)
{
// Does a simple debayer of luminance data only -- sliding 2x2 window
usImage tmp;
if (tmp.Init(img.Size))
{
pFrame->Alert(_("Memory allocation error"));
return true;
}
int const W = img.Size.GetWidth();
int RX, RY, RW, RH;
if (img.Subframe.IsEmpty())
{
RX = RY = 0;
RW = img.Size.GetWidth();
RH = img.Size.GetHeight();
}
else
{
RX = img.Subframe.GetX();
RY = img.Subframe.GetY();
RW = img.Subframe.GetWidth();
RH = img.Subframe.GetHeight();
tmp.Clear();
}
#define IX(x_, y_) ((RY + (y_)) * W + RX + (x_))
unsigned short *d;
unsigned int t;
for (int y = 0; y <= RH - 2; y++)
{
d = &tmp.ImageData[IX(0, y)];
for (int x = 0; x <= RW - 2; x++)
{
t = img.ImageData[IX(x , y )];
t += img.ImageData[IX(x + 1, y )];
t += img.ImageData[IX(x , y + 1)];
t += img.ImageData[IX(x + 1, y + 1)];
*d++ = (unsigned short)(t >> 2);
}
// last col
t = img.ImageData[IX(RW - 1, y )];
t += img.ImageData[IX(RW - 1, y + 1)];
*d = (unsigned short)(t >> 1);
}
// last row
d = &tmp.ImageData[IX(0, RH - 1)];
for (int x = 0; x <= RW - 2; x++)
{
t = img.ImageData[IX(x , RH - 1)];
t += img.ImageData[IX(x + 1, RH - 1)];
*d++ = (unsigned short)(t >> 1);
}
// bottom-right pixel
*d = img.ImageData[IX(RW - 1, RH - 1)];
#undef IX
img.SwapImageData(tmp);
return false;
}
bool Median3(usImage& img)
{
usImage tmp;
tmp.Init(img.Size);
bool err;
if (img.Subframe.IsEmpty())
{
err = Median3(tmp.ImageData, img.ImageData, img.Size, wxRect(img.Size));
}
else
{
tmp.Clear();
err = Median3(tmp.ImageData, img.ImageData, img.Size, img.Subframe);
}
img.SwapImageData(tmp);
return err;
}
inline static void swap(unsigned short& a, unsigned short& b)
{
unsigned short const t = a;
a = b;
b = t;
}
inline static unsigned short median9(const unsigned short l[9])
{
unsigned short l0 = l[0], l1 = l[1], l2 = l[2], l3 = l[3], l4 = l[4];
unsigned short x;
x = l[5];
if (x < l0) swap(x, l0);
if (x < l1) swap(x, l1);
if (x < l2) swap(x, l2);
if (x < l3) swap(x, l3);
if (x < l4) swap(x, l4);
x = l[6];
if (x < l0) swap(x, l0);
if (x < l1) swap(x, l1);
if (x < l2) swap(x, l2);
if (x < l3) swap(x, l3);
if (x < l4) swap(x, l4);
x = l[7];
if (x < l0) swap(x, l0);
if (x < l1) swap(x, l1);
if (x < l2) swap(x, l2);
if (x < l3) swap(x, l3);
if (x < l4) swap(x, l4);
x = l[8];
if (x < l0) swap(x, l0);
if (x < l1) swap(x, l1);
if (x < l2) swap(x, l2);
if (x < l3) swap(x, l3);
if (x < l4) swap(x, l4);
if (l1 > l0) l0 = l1;
if (l2 > l0) l0 = l2;
if (l3 > l0) l0 = l3;
if (l4 > l0) l0 = l4;
return l0;
}
inline static unsigned short median8(const unsigned short l[8])
{
unsigned short l0 = l[0], l1 = l[1], l2 = l[2], l3 = l[3], l4 = l[4];
unsigned short x;
x = l[5];
if (x < l0) swap(x, l0);
if (x < l1) swap(x, l1);
if (x < l2) swap(x, l2);
if (x < l3) swap(x, l3);
if (x < l4) swap(x, l4);
x = l[6];
if (x < l0) swap(x, l0);
if (x < l1) swap(x, l1);
if (x < l2) swap(x, l2);
if (x < l3) swap(x, l3);
if (x < l4) swap(x, l4);
x = l[7];
if (x < l0) swap(x, l0);
if (x < l1) swap(x, l1);
if (x < l2) swap(x, l2);
if (x < l3) swap(x, l3);
if (x < l4) swap(x, l4);
if (l2 > l0) swap(l2, l0);
if (l2 > l1) swap(l2, l1);
if (l3 > l0) swap(l3, l0);
if (l3 > l1) swap(l3, l1);
if (l4 > l0) swap(l4, l0);
if (l4 > l1) swap(l4, l1);
return (unsigned short)(((unsigned int) l0 + (unsigned int) l1) / 2);
}
inline static unsigned short median6(const unsigned short l[6])
{
unsigned short l0 = l[0], l1 = l[1], l2 = l[2], l3 = l[3];
unsigned short x;
x = l[4];
if (x < l0) swap(x, l0);
if (x < l1) swap(x, l1);
if (x < l2) swap(x, l2);
if (x < l3) swap(x, l3);
x = l[5];
if (x < l0) swap(x, l0);
if (x < l1) swap(x, l1);
if (x < l2) swap(x, l2);
if (x < l3) swap(x, l3);
if (l2 > l0) swap(l2, l0);
if (l2 > l1) swap(l2, l1);
if (l3 > l0) swap(l3, l0);
if (l3 > l1) swap(l3, l1);
return (unsigned short)(((unsigned int) l0 + (unsigned int) l1) / 2);
}
inline static unsigned short median5(const unsigned short l[5])
{
unsigned short l0 = l[0], l1 = l[1], l2 = l[2];
unsigned short x;
x = l[3];
if (x < l0) swap(x, l0);
if (x < l1) swap(x, l1);
if (x < l2) swap(x, l2);
x = l[4];
if (x < l0) swap(x, l0);
if (x < l1) swap(x, l1);
if (x < l2) swap(x, l2);
if (l1 > l0) l0 = l1;
if (l2 > l0) l0 = l2;
return l0;
}
inline static unsigned short median4(const unsigned short l[4])
{
unsigned short l0 = l[0], l1 = l[1], l2 = l[2];
unsigned short x;
x = l[3];
if (x < l0) swap(x, l0);
if (x < l1) swap(x, l1);
if (x < l2) swap(x, l2);
if (l2 > l0) swap(l2, l0);
if (l2 > l1) swap(l2, l1);
return (unsigned short)(((unsigned int) l0 + (unsigned int) l1) / 2);
}
inline static unsigned short median3(const unsigned short l[3])
{
unsigned short l0 = l[0], l1 = l[1], l2 = l[2];
if (l2 < l0) swap(l2, l0);
if (l2 < l1) swap(l2, l1);
if (l1 > l0) l0 = l1;
return l0;
}
bool Median3(unsigned short *dst, const unsigned short *src, const wxSize& size, const wxRect& rect)
{
int const W = size.GetWidth();
int const RX = rect.GetX();
int const RY = rect.GetY();
int const RW = rect.GetWidth();
int const RH = rect.GetHeight();
unsigned short a[9];
unsigned short *d;
#define IX(x_, y_) ((RY + (y_)) * W + RX + (x_))
// top row
d = &dst[IX(0, 0)];
// top-left corner
a[0] = src[IX(0, 0)];
a[1] = src[IX(1, 0)];
a[2] = src[IX(0, 1)];
a[3] = src[IX(1, 1)];
*d++ = median4(a);
// top row middle pixels
for (int x = 1; x <= RW - 2; x++)
{
a[0] = src[IX(x - 1, 0)];
a[1] = src[IX(x, 0)];
a[2] = src[IX(x + 1, 0)];
a[3] = src[IX(x - 1, 1)];
a[4] = src[IX(x, 1)];
a[5] = src[IX(x + 1, 1)];
*d++ = median6(a);
}
// top-right corner
a[0] = src[IX(RW - 2, 0)];
a[1] = src[IX(RW - 1, 0)];
a[2] = src[IX(RW - 2, 1)];
a[3] = src[IX(RW - 1, 1)];
*d = median4(a);
for (int y = 1; y <= RH - 2; y++)
{
d = &dst[IX(0, y)];
// leftmost pixel
a[0] = src[IX(0, y - 1)];
a[1] = src[IX(1, y - 1)];
a[2] = src[IX(0, y )];
a[3] = src[IX(1, y )];
a[4] = src[IX(0, y + 1)];
a[5] = src[IX(1, y + 1)];
*d++ = median6(a);
for (int x = 1; x <= RW - 2; x++)
{
a[0] = src[IX(x - 1, y - 1)];
a[1] = src[IX(x , y - 1)];
a[2] = src[IX(x + 1, y - 1)];
a[3] = src[IX(x - 1, y )];
a[4] = src[IX(x , y )];
a[5] = src[IX(x + 1, y )];
a[6] = src[IX(x - 1, y + 1)];
a[7] = src[IX(x , y + 1)];
a[8] = src[IX(x + 1, y + 1)];
*d++ = median9(a);
}
// rightmost pixel
a[0] = src[IX(RW - 2, y - 1)];
a[1] = src[IX(RW - 1, y - 1)];
a[2] = src[IX(RW - 2, y )];
a[3] = src[IX(RW - 1, y )];
a[4] = src[IX(RW - 2, y + 1)];
a[5] = src[IX(RW - 1, y + 1)];
*d++ = median6(a);
}
// bottom row
d = &dst[IX(0, RH - 1)];
// bottom-left corner
a[0] = src[IX(0, RH - 2)];
a[1] = src[IX(1, RH - 2)];
a[2] = src[IX(0, RH - 1)];
a[3] = src[IX(1, RH - 1)];
*d++ = median4(a);
// bottom row middle pixels
for (int x = 1; x <= RW - 2; x++)
{
a[0] = src[IX(x - 1, RH - 2)];
a[1] = src[IX(x , RH - 2)];
a[2] = src[IX(x + 1, RH - 2)];
a[3] = src[IX(x - 1, RH - 1)];
a[4] = src[IX(x , RH - 1)];
a[5] = src[IX(x + 1, RH - 1)];
*d++ = median6(a);
}
// bottom-right corner
a[0] = src[IX(RW - 2, RH - 2)];
a[1] = src[IX(RW - 1, RH - 2)];
a[2] = src[IX(RW - 2, RH - 1)];
a[3] = src[IX(RW - 1, RH - 1)];
*d = median4(a);
#undef IX
return false;
}
static unsigned short MedianBorderingPixels(const usImage& img, int x, int y)
{
unsigned short array[8];
int const xsize = img.Size.GetWidth();
int const ysize = img.Size.GetHeight();
if (x > 0 && y > 0 && x < xsize - 1 && y < ysize - 1)
{
array[0] = img.ImageData[(x-1) + (y-1) * xsize];
array[1] = img.ImageData[(x) + (y-1) * xsize];
array[2] = img.ImageData[(x+1) + (y-1) * xsize];
array[3] = img.ImageData[(x-1) + (y) * xsize];
array[4] = img.ImageData[(x+1) + (y) * xsize];
array[5] = img.ImageData[(x-1) + (y+1) * xsize];
array[6] = img.ImageData[(x) + (y+1) * xsize];
<|fim▁hole|> return median8(array);
}
if (x == 0 && y > 0 && y < ysize - 1)
{
// On left edge
array[0] = img.ImageData[(x) + (y - 1) * xsize];
array[1] = img.ImageData[(x) + (y + 1) * xsize];
array[2] = img.ImageData[(x + 1) + (y - 1) * xsize];
array[3] = img.ImageData[(x + 1) + (y) * xsize];
array[4] = img.ImageData[(x + 1) + (y + 1) * xsize];
return median5(array);
}
if (x == xsize - 1 && y > 0 && y < ysize - 1)
{
// On right edge
array[0] = img.ImageData[(x) + (y - 1) * xsize];
array[1] = img.ImageData[(x) + (y + 1) * xsize];
array[2] = img.ImageData[(x - 1) + (y - 1) * xsize];
array[3] = img.ImageData[(x - 1) + (y) * xsize];
array[4] = img.ImageData[(x - 1) + (y + 1) * xsize];
return median5(array);
}
if (y == 0 && x > 0 && x < xsize - 1)
{
// On bottom edge
array[0] = img.ImageData[(x - 1) + (y) * xsize];
array[1] = img.ImageData[(x - 1) + (y + 1) * xsize];
array[2] = img.ImageData[(x) + (y + 1) * xsize];
array[3] = img.ImageData[(x + 1) + (y) * xsize];
array[4] = img.ImageData[(x + 1) + (y + 1) * xsize];
return median5(array);
}
if (y == ysize - 1 && x > 0 && x < xsize - 1)
{
// On top edge
array[0] = img.ImageData[(x - 1) + (y) * xsize];
array[1] = img.ImageData[(x - 1) + (y - 1) * xsize];
array[2] = img.ImageData[(x) + (y - 1) * xsize];
array[3] = img.ImageData[(x + 1) + (y) * xsize];
array[4] = img.ImageData[(x + 1) + (y - 1) * xsize];
return median5(array);
}
if (x == 0 && y == 0)
{
// At lower left corner
array[0] = img.ImageData[(x + 1) + (y) * xsize];
array[1] = img.ImageData[(x) + (y + 1) * xsize];
array[2] = img.ImageData[(x + 1) + (y + 1) * xsize];
}
else if (x == 0 && y == ysize - 1)
{
// At upper left corner
array[0] = img.ImageData[(x + 1) + (y) * xsize];
array[1] = img.ImageData[(x) + (y - 1) * xsize];
array[2] = img.ImageData[(x + 1) + (y - 1) * xsize];
}
else if (x == xsize - 1 && y == ysize - 1)
{
// At upper right corner
array[0] = img.ImageData[(x - 1) + (y) * xsize];
array[1] = img.ImageData[(x) + (y - 1) * xsize];
array[2] = img.ImageData[(x - 1) + (y - 1) * xsize];
}
else if (x == xsize - 1 && y == 0)
{
// At lower right corner
array[0] = img.ImageData[(x - 1) + (y) * xsize];
array[1] = img.ImageData[(x) + (y + 1) * xsize];
array[2] = img.ImageData[(x - 1) + (y + 1) * xsize];
}
else
{
// unreachable
return 0;
}
return median3(array);
}
bool SquarePixels(usImage& img, float xsize, float ysize)
{
// Stretches one dimension to square up pixels
if (!img.ImageData)
return true;
if (xsize <= ysize)
return false;
// Move the existing data to a temp image
usImage tempimg;
if (tempimg.Init(img.Size))
{
pFrame->Alert(_("Memory allocation error"));
return true;
}
tempimg.SwapImageData(img);
// if X > Y, when viewing stock, Y is unnaturally stretched, so stretch X to match
double ratio = ysize / xsize;
int newsize = ROUND((float) tempimg.Size.GetWidth() * (1.0/ratio)); // make new image correct size
img.Init(newsize,tempimg.Size.GetHeight());
unsigned short *optr = img.ImageData;
int linesize = tempimg.Size.GetWidth(); // size of an original line
for (int y = 0; y < img.Size.GetHeight(); y++)
{
for (int x = 0; x < newsize; x++, optr++)
{
double oldposition = x * ratio;
int ind1 = (unsigned int) floor(oldposition);
int ind2 = (unsigned int) ceil(oldposition);
if (ind2 > (tempimg.Size.GetWidth() - 1))
ind2 = tempimg.Size.GetWidth() - 1;
double weight = ceil(oldposition) - oldposition;
*optr = (unsigned short) (((float) *(tempimg.ImageData + y*linesize + ind1) * weight) + ((float) *(tempimg.ImageData + y*linesize + ind1) * (1.0 - weight)));
}
}
return false;
}
bool Subtract(usImage& light, const usImage& dark)
{
if (!light.ImageData || !dark.ImageData)
return true;
if (light.Size != dark.Size)
return true;
unsigned int left, top, width, height;
if (!light.Subframe.IsEmpty())
{
left = light.Subframe.GetLeft();
width = light.Subframe.GetWidth();
top = light.Subframe.GetTop();
height = light.Subframe.GetHeight();
}
else
{
left = top = 0;
width = light.Size.GetWidth();
height = light.Size.GetHeight();
}
int mindiff = 65535;
unsigned short *pl0 = &light.Pixel(left, top);
const unsigned short *pd0 = &dark.Pixel(left, top);
for (unsigned int r = 0; r < height;
r++, pl0 += light.Size.GetWidth(), pd0 += light.Size.GetWidth())
{
unsigned short *const endl = pl0 + width;
unsigned short *pl;
const unsigned short *pd;
for (pl = pl0, pd = pd0; pl < endl; pl++, pd++)
{
int diff = (int) *pl - (int) *pd;
if (diff < mindiff)
mindiff = diff;
}
}
int offset = 0;
if (mindiff < 0) // dark was lighter than light
offset = -mindiff;
pl0 = &light.Pixel(left, top);
pd0 = &dark.Pixel(left, top);
for (unsigned int r = 0; r < height;
r++, pl0 += light.Size.GetWidth(), pd0 += light.Size.GetWidth())
{
unsigned short *const endl = pl0 + width;
unsigned short *pl;
const unsigned short *pd;
for (pl = pl0, pd = pd0; pl < endl; pl++, pd++)
{
int newval = (int) *pl - (int) *pd + offset;
if (newval < 0) newval = 0; // shouldn't hit this...
else if (newval > 65535) newval = 65535;
*pl = (unsigned short) newval;
}
}
return false;
}
inline static unsigned short histo_median(unsigned short histo1[256], unsigned short histo2[65536], int n)
{
n /= 2;
unsigned int i;
for (i = 0; i < 256; i++)
{
if (histo1[i] > n)
break;
n -= histo1[i];
}
for (i <<= 8; i < 65536; i++)
{
if (histo2[i] > n)
break;
n -= histo2[i];
}
return i;
}
static void MedianFilter(usImage& dst, const usImage& src, int halfWidth)
{
dst.Init(src.Size);
unsigned short *d = &dst.ImageData[0];
int const width = src.Size.GetWidth();
int const height = src.Size.GetHeight();
for (int y = 0; y < height; y++)
{
int top = std::max(0, y - halfWidth);
int bot = std::min(y + halfWidth, height - 1);
int left = 0;
int right = halfWidth;
// TODO: we initialize the histogram at the start of each row, but we could make this faster
// if we scan left to right, move down, scan right to left, move down so we never need to
// reinitialize the histogram
// initialize 2-level histogram
unsigned short histo1[256];
unsigned short histo2[65536];
memset(&histo1[0], 0, sizeof(histo1));
memset(&histo2[0], 0, sizeof(histo2));
for (int j = top; j <= bot; j++)
{
const unsigned short *p = &src.Pixel(left, j);
for (int i = left; i <= right; i++, p++)
{
++histo1[*p >> 8];
++histo2[*p];
}
}
unsigned int n = (right - left + 1) * (bot - top + 1);
// read off first value for this row
*d++ = histo_median(histo1, histo2, n);
// loop across remaining columns for this row
for (int i = 1; i < width; i++)
{
left = std::max(0, i - halfWidth);
right = std::min(i + halfWidth, width - 1);
// remove leftmost column
if (left > 0)
{
const unsigned short *p = &src.Pixel(left - 1, top);
for (int j = top; j <= bot; j++, p += width)
{
--histo1[*p >> 8];
--histo2[*p];
}
n -= (bot - top + 1);
}
// add new column on right
if (i + halfWidth <= width - 1)
{
const unsigned short *p = &src.Pixel(right, top);
for (int j = top; j <= bot; j++, p += width)
{
++histo1[*p >> 8];
++histo2[*p];
}
n += (bot - top + 1);
}
*d++ = histo_median(histo1, histo2, n);
}
}
}
struct ImageStatsWork
{
ImageStats stats;
usImage temp;
};
static void GetImageStats(ImageStatsWork& w, const usImage& img, const wxRect& win)
{
w.temp.Init(img.Size);
// Determine the mean and standard deviation
double sum = 0.0;
double a = 0.0;
double q = 0.0;
double k = 1.0;
double km1 = 0.0;
const unsigned short *p0 = &img.Pixel(win.GetLeft(), win.GetTop());
unsigned short *dst = &w.temp.ImageData[0];
for (int y = 0; y < win.GetHeight(); y++)
{
const unsigned short *end = p0 + win.GetWidth();
for (const unsigned short *p = p0; p < end; p++)
{
*dst++ = *p;
double const x = (double) *p;
sum += x;
double const a0 = a;
a += (x - a) / k;
q += (x - a0) * (x - a);
km1 = k;
k += 1.0;
}
p0 += img.Size.GetWidth();
}
w.stats.mean = sum / km1;
w.stats.stdev = sqrt(q / km1);
int winPixels = win.GetWidth() * win.GetHeight();
unsigned short *tmp = &w.temp.ImageData[0];
std::nth_element(tmp, tmp + winPixels / 2, tmp + winPixels);
w.stats.median = tmp[winPixels / 2];
// replace each pixel with the absolute deviation from the median
unsigned short *p = tmp;
for (int i = 0; i < winPixels; i++)
{
unsigned short ad = (unsigned short) std::abs((int) *p - (int) w.stats.median);
*p++ = ad;
}
std::nth_element(tmp, tmp + winPixels / 2, tmp + winPixels);
w.stats.mad = tmp[winPixels / 2];
}
void DefectMapDarks::BuildFilteredDark()
{
enum { WINDOW = 15 };
filteredDark.Init(masterDark.Size);
MedianFilter(filteredDark, masterDark, WINDOW);
}
static wxString DefectMapMasterPath(int profileId)
{
int inst = pFrame->GetInstanceNumber();
return MyFrame::GetDarksDir() + PATHSEPSTR +
wxString::Format("PHD2_defect_map_master%s_%d.fit", inst > 1 ? wxString::Format("_%d", inst) : "", profileId);
}
static wxString DefectMapMasterPath()
{
return DefectMapMasterPath(pConfig->GetCurrentProfileId());
}
static wxString DefectMapFilterPath(int profileId)
{
int inst = pFrame->GetInstanceNumber();
return MyFrame::GetDarksDir() + PATHSEPSTR +
wxString::Format("PHD2_defect_map_master_filt%s_%d.fit", inst > 1 ? wxString::Format("_%d", inst) : "", profileId);
}
static wxString DefectMapFilterPath()
{
return DefectMapFilterPath(pConfig->GetCurrentProfileId());
}
void DefectMapDarks::SaveDarks(const wxString& notes)
{
masterDark.Save(DefectMapMasterPath(), notes);
filteredDark.Save(DefectMapFilterPath());
}
void DefectMapDarks::LoadDarks()
{
masterDark.Load(DefectMapMasterPath());
filteredDark.Load(DefectMapFilterPath());
}
struct BadPx
{
unsigned short x;
unsigned short y;
int v;
BadPx();
BadPx(int x_, int y_, int v_) : x(x_), y(y_), v(v_) { }
bool operator<(const BadPx& rhs) const { return v < rhs.v; }
};
typedef std::set<BadPx> BadPxSet;
struct DefectMapBuilderImpl
{
DefectMapDarks *darks;
ImageStatsWork w;
wxArrayString mapInfo;
int aggrCold;
int aggrHot;
BadPxSet coldPx;
BadPxSet hotPx;
BadPxSet::const_iterator coldPxThresh;
BadPxSet::const_iterator hotPxThresh;
unsigned int coldPxSelected;
unsigned int hotPxSelected;
bool threshValid;
DefectMapBuilderImpl()
:
darks(0),
aggrCold(100),
aggrHot(100),
threshValid(false)
{ }
};
DefectMapBuilder::DefectMapBuilder()
: m_impl(new DefectMapBuilderImpl())
{
}
DefectMapBuilder::~DefectMapBuilder()
{
delete m_impl;
}
inline static double AggrToSigma(int val)
{
// Aggressiveness of 0 to 100 maps to signma factor from 8.0 to 0.125
return exp2(3.0 - (6.0 / 100.0) * (double)val);
}
void DefectMapBuilder::Init(DefectMapDarks& darks)
{
m_impl->darks = &darks;
Debug.AddLine("DefectMapBuilder: Init");
::GetImageStats(m_impl->w, darks.masterDark,
wxRect(0, 0, darks.masterDark.Size.GetWidth(), darks.masterDark.Size.GetHeight()));
const ImageStats& stats = m_impl->w.stats;
Debug.AddLine("DefectMapBuilder: Dark N = %d Mean = %.f Median = %d Standard Deviation = %.f MAD=%d",
darks.masterDark.NPixels, stats.mean, stats.median, stats.stdev, stats.mad);
// load potential defects
int thresh = (int)(AggrToSigma(100) * stats.stdev);
Debug.AddLine("DefectMapBuilder: load potential defects thresh = %d", thresh);
usImage& dark = m_impl->darks->masterDark;
usImage& medianFilt = m_impl->darks->filteredDark;
m_impl->coldPx.clear();
m_impl->hotPx.clear();
for (int y = 0; y < dark.Size.GetHeight(); y++)
{
for (int x = 0; x < dark.Size.GetWidth(); x++)
{
int filt = (int) medianFilt.Pixel(x, y);
int val = (int) dark.Pixel(x, y);
int v = val - filt;
if (v > thresh)
{
m_impl->hotPx.insert(BadPx(x, y, v));
}
else if (-v > thresh)
{
m_impl->coldPx.insert(BadPx(x, y, -v));
}
}
}
Debug.AddLine("DefectMapBuilder: Loaded %d cold %d hot", m_impl->coldPx.size(), m_impl->hotPx.size());
}
const ImageStats& DefectMapBuilder::GetImageStats() const
{
return m_impl->w.stats;
}
void DefectMapBuilder::SetAggressiveness(int aggrCold, int aggrHot)
{
m_impl->aggrCold = std::max(0, std::min(100, aggrCold));
m_impl->aggrHot = std::max(0, std::min(100, aggrHot));
m_impl->threshValid = false;
}
static void FindThresh(DefectMapBuilderImpl *impl)
{
if (impl->threshValid)
return;
double multCold = AggrToSigma(impl->aggrCold);
double multHot = AggrToSigma(impl->aggrHot);
int coldThresh = (int) (multCold * impl->w.stats.stdev);
int hotThresh = (int) (multHot * impl->w.stats.stdev);
Debug.AddLine("DefectMap: find thresholds aggr:(%d,%d) sigma:(%.1f,%.1f) px:(%+d,%+d)",
impl->aggrCold, impl->aggrHot, multCold, multHot, -coldThresh, hotThresh);
impl->coldPxThresh = impl->coldPx.lower_bound(BadPx(0, 0, coldThresh));
impl->hotPxThresh = impl->hotPx.lower_bound(BadPx(0, 0, hotThresh));
impl->coldPxSelected = std::distance(impl->coldPxThresh, impl->coldPx.end());
impl->hotPxSelected = std::distance(impl->hotPxThresh, impl->hotPx.end());
Debug.AddLine("DefectMap: find thresholds found (%d,%d)", impl->coldPxSelected, impl->hotPxSelected);
impl->threshValid = true;
}
int DefectMapBuilder::GetColdPixelCnt() const
{
FindThresh(m_impl);
return m_impl->coldPxSelected;
}
int DefectMapBuilder::GetHotPixelCnt() const
{
FindThresh(m_impl);
return m_impl->hotPxSelected;
}
inline static unsigned int emit_defects(DefectMap& defectMap, BadPxSet::const_iterator p0, BadPxSet::const_iterator p1, double stdev, int sign, bool verbose)
{
unsigned int cnt = 0;
for (BadPxSet::const_iterator it = p0; it != p1; ++it, ++cnt)
{
if (verbose)
{
int v = sign * it->v;
Debug.AddLine("DefectMap: defect @ (%d, %d) val = %d (%+.1f sigma)", it->x, it->y, v, stdev > 0.1 ? (double)v / stdev : 0.0);
}
defectMap.push_back(wxPoint(it->x, it->y));
}
return cnt;
}
void DefectMapBuilder::BuildDefectMap(DefectMap& defectMap, bool verbose) const
{
wxArrayString& info = m_impl->mapInfo;
double multCold = AggrToSigma(m_impl->aggrCold);
double multHot = AggrToSigma(m_impl->aggrHot);
const ImageStats& stats = m_impl->w.stats;
info.Clear();
info.push_back(wxString::Format("Generated: %s", wxDateTime::UNow().FormatISOCombined(' ')));
info.push_back(wxString::Format("Camera: %s", pCamera->Name));
info.push_back(wxString::Format("Dark Exposure Time: %d ms", m_impl->darks->masterDark.ImgExpDur));
info.push_back(wxString::Format("Dark Frame Count: %d", m_impl->darks->masterDark.ImgStackCnt));
info.push_back(wxString::Format("Aggressiveness, cold: %d", m_impl->aggrCold));
info.push_back(wxString::Format("Aggressiveness, hot: %d", m_impl->aggrHot));
info.push_back(wxString::Format("Sigma Thresh, cold: %.2f", multCold));
info.push_back(wxString::Format("Sigma Thresh, hot: %.2f", multHot));
info.push_back(wxString::Format("Mean: %.f", stats.mean));
info.push_back(wxString::Format("Stdev: %.f", stats.stdev));
info.push_back(wxString::Format("Median: %d", stats.median));
info.push_back(wxString::Format("MAD: %d", stats.mad));
int deltaCold = (int)(multCold * stats.stdev);
int deltaHot = (int)(multHot * stats.stdev);
info.push_back(wxString::Format("DeltaCold: %+d", -deltaCold));
info.push_back(wxString::Format("DeltaHot: %+d", deltaHot));
if (verbose) Debug.AddLine("DefectMap: deltaCold = %+d deltaHot = %+d", -deltaCold, deltaHot);
FindThresh(m_impl);
defectMap.clear();
unsigned int nr_cold = emit_defects(defectMap, m_impl->coldPxThresh, m_impl->coldPx.end(), stats.stdev, -1, verbose);
unsigned int nr_hot = emit_defects(defectMap, m_impl->hotPxThresh, m_impl->hotPx.end(), stats.stdev, +1, verbose);
if (verbose) Debug.AddLine("New defect map created, count=%d (cold=%d, hot=%d)", defectMap.size(), nr_cold, nr_hot);
}
const wxArrayString& DefectMapBuilder::GetMapInfo() const
{
return m_impl->mapInfo;
}
bool RemoveDefects(usImage& light, const DefectMap& defectMap)
{
// Check to make sure the light frame is valid
if (!light.ImageData)
return true;
if (!light.Subframe.IsEmpty())
{
// Step over each defect and replace the light value
// with the median of the surrounding pixels
for (DefectMap::const_iterator it = defectMap.begin(); it != defectMap.end(); ++it)
{
const wxPoint& pt = *it;
// Check to see if we are within the subframe before correcting the defect
if (light.Subframe.Contains(pt))
{
light.Pixel(pt.x, pt.y) = MedianBorderingPixels(light, pt.x, pt.y);
}
}
}
else
{
// Step over each defect and replace the light value
// with the median of the surrounding pixels
for (DefectMap::const_iterator it = defectMap.begin(); it != defectMap.end(); ++it)
{
int const x = it->x;
int const y = it->y;
if (x >= 0 && x < light.Size.GetWidth() && y >= 0 && y < light.Size.GetHeight())
{
light.Pixel(x, y) = MedianBorderingPixels(light, x, y);
}
}
}
return false;
}
wxString DefectMap::DefectMapFileName(int profileId)
{
int inst = pFrame->GetInstanceNumber();
return MyFrame::GetDarksDir() + PATHSEPSTR +
wxString::Format("PHD2_defect_map%s_%d.txt", inst > 1 ? wxString::Format("_%d", inst) : "", profileId);
}
bool DefectMap::ImportFromProfile(int srcId, int destId)
{
wxString sourceName;
wxString destName;
int rslt;
sourceName = DefectMapFileName(srcId);
destName = DefectMapFileName(destId);
rslt = wxCopyFile(sourceName, destName, true);
if (rslt != 1)
{
Debug.Write(wxString::Format("DefectMap::ImportFromProfile failed on defect map copy of %s to %s\n", sourceName, destName));
return false;
}
sourceName = DefectMapMasterPath(srcId);
destName = DefectMapMasterPath(destId);
rslt = wxCopyFile(sourceName, destName, true);
if (rslt != 1)
{
Debug.Write(wxString::Format("DefectMap::ImportFromProfile failed on defect map master dark copy of %s to %s\n", sourceName, destName));
return false;
}
sourceName = DefectMapFilterPath(srcId);
destName = DefectMapFilterPath(destId);
rslt = wxCopyFile(sourceName, destName, true);
if (rslt != 1)
{
Debug.Write(wxString::Format("DefectMap::ImportFromProfile failed on defect map master filtered dark copy of %s to %s\n", sourceName, destName));
return false;
}
return (true);
}
bool DefectMap::DefectMapExists(int profileId, bool showAlert)
{
bool bOk = false;
if (wxFileExists(DefectMapFileName(profileId)))
{
wxString fName = DefectMapMasterPath(profileId);
const wxSize& sensorSize = pCamera->DarkFrameSize();
if (sensorSize == UNDEFINED_FRAME_SIZE)
{
bOk = true;
Debug.AddLine("BPM check: undefined frame size for current camera");
}
else
{
fitsfile *fptr;
int status = 0; // CFITSIO status value MUST be initialized to zero!
if (PHD_fits_open_diskfile(&fptr, fName, READONLY, &status) == 0)
{
long fsize[2];
fits_get_img_size(fptr, 2, fsize, &status);
if (status == 0 && fsize[0] == sensorSize.x && fsize[1] == sensorSize.y)
bOk = true;
else
{
Debug.AddLine(wxString::Format("BPM check: failed geometry check - fits status = %d, cam dimensions = {%d,%d}, "
" BPM dimensions = {%d,%d}", status, sensorSize.x, sensorSize.y, fsize[0], fsize[1]));
if (showAlert)
pFrame->Alert(_("Bad-pixel map does not match the camera in this profile - it needs to be replaced."));
}
PHD_fits_close_file(fptr);
}
else
Debug.AddLine(wxString::Format("BPM check: fitsio error on open_diskfile = %d", status));
}
}
return bOk;
}
void DefectMap::Save(const wxArrayString& info) const
{
wxString filename = DefectMapFileName(m_profileId);
wxFileOutputStream oStream(filename);
wxTextOutputStream outText(oStream);
if (oStream.GetLastError() != wxSTREAM_NO_ERROR)
{
Debug.AddLine(wxString::Format("Failed to save defect map to %s", filename));
return;
}
outText << "# PHD2 Defect Map v1\n";
for (wxArrayString::const_iterator it = info.begin(); it != info.end(); ++it)
{
outText << "# " << *it << "\n";
}
outText << "# Defect count: " << ((unsigned int) size()) << "\n";
for (const_iterator it = begin(); it != end(); ++it)
{
outText << it->x << " " << it->y << "\n";
}
oStream.Close();
Debug.AddLine(wxString::Format("Saved defect map to %s", filename));
}
DefectMap::DefectMap()
: m_profileId(pConfig->GetCurrentProfileId())
{
}
DefectMap::DefectMap(int profileId)
: m_profileId(profileId)
{
}
bool DefectMap::FindDefect(const wxPoint& pt) const
{
return std::find(begin(), end(), pt) != end();
}
void DefectMap::AddDefect(const wxPoint& pt)
{
// first add the point
push_back(pt);
wxString filename = DefectMapFileName(m_profileId);
wxFile file(filename, wxFile::write_append);
wxFileOutputStream oStream(file);
wxTextOutputStream outText(oStream);
if (oStream.GetLastError() != wxSTREAM_NO_ERROR)
{
Debug.AddLine(wxString::Format("Failed to save defect map to %s", filename));
return;
}
outText << pt.x << " " << pt.y << "\n";
oStream.Close();
Debug.AddLine(wxString::Format("Saved defect map to %s", filename));
}
DefectMap *DefectMap::LoadDefectMap(int profileId)
{
wxString filename = DefectMapFileName(profileId);
Debug.AddLine(wxString::Format("Loading defect map file %s", filename));
if (!wxFileExists(filename))
{
Debug.AddLine(wxString::Format("Defect map file not found: %s", filename));
return 0;
}
wxFileInputStream iStream(filename);
wxTextInputStream inText(iStream);
// Re-initialize the defect map and parse the defect map file
if (iStream.GetLastError() != wxSTREAM_NO_ERROR)
{
Debug.AddLine(wxString::Format("Unexpected eof on defect map file %s", filename));
return 0;
}
DefectMap *defectMap = new DefectMap(profileId);
int linenum = 0;
while (!inText.GetInputStream().Eof())
{
wxString line = inText.ReadLine();
++linenum;
line.Trim(false); // trim leading whitespace
if (line.IsEmpty())
continue;
if (line.StartsWith("#"))
continue;
wxStringTokenizer tok(line);
wxString s1 = tok.GetNextToken();
wxString s2 = tok.GetNextToken();
long x, y;
if (s1.ToLong(&x) && s2.ToLong(&y))
{
defectMap->push_back(wxPoint(x, y));
}
else
{
Debug.AddLine(wxString::Format("DefectMap: ignore junk on line %d: %s", linenum, line));
}
}
Debug.AddLine(wxString::Format("Loaded %d defects", defectMap->size()));
return defectMap;
}
void DefectMap::DeleteDefectMap(int profileId)
{
wxString filename = DefectMapFileName(profileId);
if (wxFileExists(filename))
{
Debug.AddLine("Removing defect map file: " + filename);
wxRemoveFile(filename);
}
}<|fim▁end|> | array[7] = img.ImageData[(x+1) + (y+1) * xsize];
|
<|file_name|>XMLparamPrint.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
demos reading HiST camera parameters from XML file
"""
from histutils.hstxmlparse import xmlparam
from argparse import ArgumentParser
if __name__ == "__main__":
p = ArgumentParser()
p.add_argument("fn", help="xml filename to parse")
p = p.parse_args()
params = xmlparam(p.fn)
<|fim▁hole|><|fim▁end|> | print(params) |
<|file_name|>tokenize.test.js<|end_file_name|><|fim▁begin|>const assert = require('assert');
const md5 = require('blueimp-md5');
const createApp = require('../../lib');
describe('tokenize service', () => {
it('tokenizes and stems', () => {
const app = createApp();
const text = `what's the weather in vancouver`;
const hash = md5(text);
return app.service('tokenize').create({ text }).then(data => {
assert.equal(data._id, hash, 'id is MD5 hash of the string');
assert.deepEqual(data, {
_id: '873dd3d48eed1d576a4d5b1dcacd2348',
text: 'what\'s the weather in vancouver',
tokens: [ 'what', 's', 'the', 'weather', 'in', 'vancouver' ],
stems: [ 'what', 's', 'the', 'weather', 'in', 'vancouv' ]<|fim▁hole|> });
});
});
});<|fim▁end|> | |
<|file_name|>QuickSort.py<|end_file_name|><|fim▁begin|>"""
Author: Maneesh Divana <[email protected]>
Interpreter: Python 3.6.8
Quick Sort
Worst Case: O(n^2)
Average Case: O(nlog n)
Best Case: O(nlog n)
"""
from random import shuffle
def partition(arr: list, left: int, right: int) -> int:
"""Partitions the given array based on a pivot element,
then sorts the sub-arrays and returns the partition index"""
# Take the right most element as pivot
pivot = arr[right]
# i tracks the smallest element, currently invalid
i = left - 1
for j in range(left, right):
# Check if the current element is smaller than pivot element
if arr[j] <= pivot:
i += 1
# If so, swap the smallest element and the current element
arr[i], arr[j] = arr[j], arr[i]
# One final swap to put pivot element at its correct position
arr[i + 1], arr[right] = arr[right], arr[i + 1]
# Return the partition index
return i + 1
def qsort(arr: list, left: int, right: int) -> None:
"""Recursively partitions the given array and sorts based on
QuickSort algorithm."""
if left < right:
# Partition the array and get the partition index
p_idx = partition(arr, left, right)
# Recursively partition and sort the sub-arrays
qsort(arr, left, p_idx - 1)
qsort(arr, p_idx + 1, right)
if __name__ == "__main__":
ARR = list(range(0, 10))
shuffle(ARR)
LEFT = 0
RIGHT = len(ARR) - 1
print("\nQuickSort\n")
print("Input array:", ARR)<|fim▁hole|><|fim▁end|> | qsort(ARR, LEFT, RIGHT)
print("\nSorted array:", ARR, "\n") |
<|file_name|>handlebars.min.js<|end_file_name|><|fim▁begin|>hljs.registerLanguage("handlebars",(()=>{"use strict";function e(...e){
return e.map((e=>{return(n=e)?"string"==typeof n?n:n.source:null;var n
})).join("")}return n=>{const a={
"builtin-name":"action bindattr collection component concat debugger each each-in get hash if in input link-to loc log lookup mut outlet partial query-params render template textarea unbound unless view with yield"
},s=/\[.*?\]/,t=/[^\s!"#%&'()*+,.\/;<=>@\[\\\]^`{|}~]+/,i=e("(",/'.*?'/,"|",/".*?"/,"|",s,"|",t,"|",/\.|\//,")+"),r=e("(",s,"|",t,")(?==)"),l={
begin:i,lexemes:/[\w.\/]+/},c=n.inherit(l,{keywords:{<|fim▁hole|>className:"attr",begin:r,relevance:0,starts:{begin:/=/,end:/=/,starts:{
contains:[n.NUMBER_MODE,n.QUOTE_STRING_MODE,n.APOS_STRING_MODE,c,o]}}},d={
contains:[n.NUMBER_MODE,n.QUOTE_STRING_MODE,n.APOS_STRING_MODE,{begin:/as\s+\|/,
keywords:{keyword:"as"},end:/\|/,contains:[{begin:/\w+/}]},m,c,o],returnEnd:!0
},g=n.inherit(l,{className:"name",keywords:a,starts:n.inherit(d,{end:/\)/})})
;o.contains=[g];const b=n.inherit(l,{keywords:a,className:"name",
starts:n.inherit(d,{end:/\}\}/})}),u=n.inherit(l,{keywords:a,className:"name"
}),h=n.inherit(l,{className:"name",keywords:a,starts:n.inherit(d,{end:/\}\}/})})
;return{name:"Handlebars",
aliases:["hbs","html.hbs","html.handlebars","htmlbars"],case_insensitive:!0,
subLanguage:"xml",contains:[{begin:/\\\{\{/,skip:!0},{begin:/\\\\(?=\{\{)/,
skip:!0},n.COMMENT(/\{\{!--/,/--\}\}/),n.COMMENT(/\{\{!/,/\}\}/),{
className:"template-tag",begin:/\{\{\{\{(?!\/)/,end:/\}\}\}\}/,contains:[b],
starts:{end:/\{\{\{\{\//,returnEnd:!0,subLanguage:"xml"}},{
className:"template-tag",begin:/\{\{\{\{\//,end:/\}\}\}\}/,contains:[u]},{
className:"template-tag",begin:/\{\{#/,end:/\}\}/,contains:[b]},{
className:"template-tag",begin:/\{\{(?=else\}\})/,end:/\}\}/,keywords:"else"},{
className:"template-tag",begin:/\{\{(?=else if)/,end:/\}\}/,keywords:"else if"
},{className:"template-tag",begin:/\{\{\//,end:/\}\}/,contains:[u]},{
className:"template-variable",begin:/\{\{\{/,end:/\}\}\}/,contains:[h]},{
className:"template-variable",begin:/\{\{/,end:/\}\}/,contains:[h]}]}}})());<|fim▁end|> | literal:"true false undefined null"}}),o={begin:/\(/,end:/\)/},m={ |
<|file_name|>window.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the examples of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:BSD$
** You may use this file under the terms of the BSD license as follows:
**
** "Redistribution and use in source and binary forms, with or without
** modification, are permitted provided that the following conditions are
** met:
** * Redistributions of source code must retain the above copyright
** notice, this list of conditions and the following disclaimer.
** * Redistributions in binary form must reproduce the above copyright
** notice, this list of conditions and the following disclaimer in
** the documentation and/or other materials provided with the
** distribution.
** * Neither the name of Digia Plc and its Subsidiary(-ies) nor the names
** of its contributors may be used to endorse or promote products derived
** from this software without specific prior written permission.
**
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
** A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
** OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
** SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
** LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
** DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
** THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
** OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "renderarea.h"
#include "window.h"
#include <QtWidgets>
//! [0]
const int IdRole = Qt::UserRole;
//! [0]
//! [1]
Window::Window()
{
renderArea = new RenderArea;
shapeComboBox = new QComboBox;
shapeComboBox->addItem(tr("Polygon"), RenderArea::Polygon);
shapeComboBox->addItem(tr("Rectangle"), RenderArea::Rect);
shapeComboBox->addItem(tr("Rounded Rectangle"), RenderArea::RoundedRect);
shapeComboBox->addItem(tr("Ellipse"), RenderArea::Ellipse);
shapeComboBox->addItem(tr("Pie"), RenderArea::Pie);
shapeComboBox->addItem(tr("Chord"), RenderArea::Chord);
shapeComboBox->addItem(tr("Path"), RenderArea::Path);
shapeComboBox->addItem(tr("Line"), RenderArea::Line);
shapeComboBox->addItem(tr("Polyline"), RenderArea::Polyline);
shapeComboBox->addItem(tr("Arc"), RenderArea::Arc);
shapeComboBox->addItem(tr("Points"), RenderArea::Points);
shapeComboBox->addItem(tr("Text"), RenderArea::Text);
shapeComboBox->addItem(tr("Pixmap"), RenderArea::Pixmap);
shapeLabel = new QLabel(tr("&Shape:"));
shapeLabel->setBuddy(shapeComboBox);
//! [1]
//! [2]
penWidthSpinBox = new QSpinBox;
penWidthSpinBox->setRange(0, 20);
penWidthSpinBox->setSpecialValueText(tr("0 (cosmetic pen)"));
penWidthLabel = new QLabel(tr("Pen &Width:"));
penWidthLabel->setBuddy(penWidthSpinBox);
//! [2]
//! [3]
penStyleComboBox = new QComboBox;
penStyleComboBox->addItem(tr("Solid"), static_cast<int>(Qt::SolidLine));
penStyleComboBox->addItem(tr("Dash"), static_cast<int>(Qt::DashLine));
penStyleComboBox->addItem(tr("Dot"), static_cast<int>(Qt::DotLine));
penStyleComboBox->addItem(tr("Dash Dot"), static_cast<int>(Qt::DashDotLine));
penStyleComboBox->addItem(tr("Dash Dot Dot"), static_cast<int>(Qt::DashDotDotLine));
penStyleComboBox->addItem(tr("None"), static_cast<int>(Qt::NoPen));
penStyleLabel = new QLabel(tr("&Pen Style:"));
penStyleLabel->setBuddy(penStyleComboBox);
penCapComboBox = new QComboBox;
penCapComboBox->addItem(tr("Flat"), Qt::FlatCap);
penCapComboBox->addItem(tr("Square"), Qt::SquareCap);
penCapComboBox->addItem(tr("Round"), Qt::RoundCap);
penCapLabel = new QLabel(tr("Pen &Cap:"));
penCapLabel->setBuddy(penCapComboBox);
penJoinComboBox = new QComboBox;
penJoinComboBox->addItem(tr("Miter"), Qt::MiterJoin);
penJoinComboBox->addItem(tr("Bevel"), Qt::BevelJoin);
penJoinComboBox->addItem(tr("Round"), Qt::RoundJoin);
penJoinLabel = new QLabel(tr("Pen &Join:"));
penJoinLabel->setBuddy(penJoinComboBox);
//! [3]
//! [4]
brushStyleComboBox = new QComboBox;
brushStyleComboBox->addItem(tr("Linear Gradient"),
static_cast<int>(Qt::LinearGradientPattern));
brushStyleComboBox->addItem(tr("Radial Gradient"),
static_cast<int>(Qt::RadialGradientPattern));
brushStyleComboBox->addItem(tr("Conical Gradient"),
static_cast<int>(Qt::ConicalGradientPattern));
brushStyleComboBox->addItem(tr("Texture"), static_cast<int>(Qt::TexturePattern));
brushStyleComboBox->addItem(tr("Solid"), static_cast<int>(Qt::SolidPattern));
brushStyleComboBox->addItem(tr("Horizontal"), static_cast<int>(Qt::HorPattern));
brushStyleComboBox->addItem(tr("Vertical"), static_cast<int>(Qt::VerPattern));
brushStyleComboBox->addItem(tr("Cross"), static_cast<int>(Qt::CrossPattern));
brushStyleComboBox->addItem(tr("Backward Diagonal"), static_cast<int>(Qt::BDiagPattern));
brushStyleComboBox->addItem(tr("Forward Diagonal"), static_cast<int>(Qt::FDiagPattern));
brushStyleComboBox->addItem(tr("Diagonal Cross"), static_cast<int>(Qt::DiagCrossPattern));
brushStyleComboBox->addItem(tr("Dense 1"), static_cast<int>(Qt::Dense1Pattern));
brushStyleComboBox->addItem(tr("Dense 2"), static_cast<int>(Qt::Dense2Pattern));
brushStyleComboBox->addItem(tr("Dense 3"), static_cast<int>(Qt::Dense3Pattern));
brushStyleComboBox->addItem(tr("Dense 4"), static_cast<int>(Qt::Dense4Pattern));
brushStyleComboBox->addItem(tr("Dense 5"), static_cast<int>(Qt::Dense5Pattern));
brushStyleComboBox->addItem(tr("Dense 6"), static_cast<int>(Qt::Dense6Pattern));
brushStyleComboBox->addItem(tr("Dense 7"), static_cast<int>(Qt::Dense7Pattern));
brushStyleComboBox->addItem(tr("None"), static_cast<int>(Qt::NoBrush));
brushStyleLabel = new QLabel(tr("&Brush:"));
brushStyleLabel->setBuddy(brushStyleComboBox);
//! [4]
//! [5]
otherOptionsLabel = new QLabel(tr("Options:"));
//! [5] //! [6]
antialiasingCheckBox = new QCheckBox(tr("&Antialiasing"));
//! [6] //! [7]
transformationsCheckBox = new QCheckBox(tr("&Transformations"));
//! [7]
//! [8]
connect(shapeComboBox, SIGNAL(activated(int)),
this, SLOT(shapeChanged()));
connect(penWidthSpinBox, SIGNAL(valueChanged(int)),
this, SLOT(penChanged()));
connect(penStyleComboBox, SIGNAL(activated(int)),
this, SLOT(penChanged()));
connect(penCapComboBox, SIGNAL(activated(int)),
this, SLOT(penChanged()));
connect(penJoinComboBox, SIGNAL(activated(int)),
this, SLOT(penChanged()));
connect(brushStyleComboBox, SIGNAL(activated(int)),
this, SLOT(brushChanged()));
connect(antialiasingCheckBox, SIGNAL(toggled(bool)),
renderArea, SLOT(setAntialiased(bool)));
connect(transformationsCheckBox, SIGNAL(toggled(bool)),
renderArea, SLOT(setTransformed(bool)));
//! [8]
//! [9]
QGridLayout *mainLayout = new QGridLayout;
//! [9] //! [10]
mainLayout->setColumnStretch(0, 1);
mainLayout->setColumnStretch(3, 1);
mainLayout->addWidget(renderArea, 0, 0, 1, 4);
mainLayout->addWidget(shapeLabel, 2, 0, Qt::AlignRight);
mainLayout->addWidget(shapeComboBox, 2, 1);
mainLayout->addWidget(penWidthLabel, 3, 0, Qt::AlignRight);
mainLayout->addWidget(penWidthSpinBox, 3, 1);
mainLayout->addWidget(penStyleLabel, 4, 0, Qt::AlignRight);<|fim▁hole|> mainLayout->addWidget(penCapComboBox, 3, 3);
mainLayout->addWidget(penJoinLabel, 2, 2, Qt::AlignRight);
mainLayout->addWidget(penJoinComboBox, 2, 3);
mainLayout->addWidget(brushStyleLabel, 4, 2, Qt::AlignRight);
mainLayout->addWidget(brushStyleComboBox, 4, 3);
mainLayout->addWidget(otherOptionsLabel, 5, 0, Qt::AlignRight);
mainLayout->addWidget(antialiasingCheckBox, 5, 1, 1, 1, Qt::AlignRight);
mainLayout->addWidget(transformationsCheckBox, 5, 2, 1, 2, Qt::AlignRight);
setLayout(mainLayout);
shapeChanged();
penChanged();
brushChanged();
antialiasingCheckBox->setChecked(true);
setWindowTitle(tr("Basic Drawing"));
}
//! [10]
//! [11]
void Window::shapeChanged()
{
RenderArea::Shape shape = RenderArea::Shape(shapeComboBox->itemData(
shapeComboBox->currentIndex(), IdRole).toInt());
renderArea->setShape(shape);
}
//! [11]
//! [12]
void Window::penChanged()
{
int width = penWidthSpinBox->value();
Qt::PenStyle style = Qt::PenStyle(penStyleComboBox->itemData(
penStyleComboBox->currentIndex(), IdRole).toInt());
Qt::PenCapStyle cap = Qt::PenCapStyle(penCapComboBox->itemData(
penCapComboBox->currentIndex(), IdRole).toInt());
Qt::PenJoinStyle join = Qt::PenJoinStyle(penJoinComboBox->itemData(
penJoinComboBox->currentIndex(), IdRole).toInt());
renderArea->setPen(QPen(Qt::blue, width, style, cap, join));
}
//! [12]
//! [13]
void Window::brushChanged()
{
Qt::BrushStyle style = Qt::BrushStyle(brushStyleComboBox->itemData(
//! [13]
brushStyleComboBox->currentIndex(), IdRole).toInt());
//! [14]
if (style == Qt::LinearGradientPattern) {
QLinearGradient linearGradient(0, 0, 100, 100);
linearGradient.setColorAt(0.0, Qt::white);
linearGradient.setColorAt(0.2, Qt::green);
linearGradient.setColorAt(1.0, Qt::black);
renderArea->setBrush(linearGradient);
//! [14] //! [15]
} else if (style == Qt::RadialGradientPattern) {
QRadialGradient radialGradient(50, 50, 50, 70, 70);
radialGradient.setColorAt(0.0, Qt::white);
radialGradient.setColorAt(0.2, Qt::green);
radialGradient.setColorAt(1.0, Qt::black);
renderArea->setBrush(radialGradient);
} else if (style == Qt::ConicalGradientPattern) {
QConicalGradient conicalGradient(50, 50, 150);
conicalGradient.setColorAt(0.0, Qt::white);
conicalGradient.setColorAt(0.2, Qt::green);
conicalGradient.setColorAt(1.0, Qt::black);
renderArea->setBrush(conicalGradient);
//! [15] //! [16]
} else if (style == Qt::TexturePattern) {
renderArea->setBrush(QBrush(QPixmap(":/images/brick.png")));
//! [16] //! [17]
} else {
renderArea->setBrush(QBrush(Qt::green, style));
}
}
//! [17]<|fim▁end|> | mainLayout->addWidget(penStyleComboBox, 4, 1);
mainLayout->addWidget(penCapLabel, 3, 2, Qt::AlignRight); |
<|file_name|>category.js<|end_file_name|><|fim▁begin|>/// <reference path="/Content/base/ro/js/rocms.helpers.js" />
/// <reference path="base.js" />
function mapCategoriesToIds(categories) {
var result = $(categories).map(function () {
return {
heartId: this.heartId,
childrenCategories: this.childrenCategories ? mapCategoriesToIds(this.childrenCategories) : []
}
}).get();
return result;
}
function categoriesEditorLoaded(onSelected, context) {
blockUI();
if (context) {
$(context).on("click",
".category .toggler",
function () {
$(this).closest(".category").find(".child-categories").first().collapse('toggle');
var toggler = $(this);
if (toggler.is(".collapsed2")) {
toggler.removeClass("collapsed2");
} else {
toggler.addClass("collapsed2");
}
});
} else {
$("#categoryEditor").on("click",
".category .toggler",
function () {
$(this).closest(".category").find(".child-categories").first().collapse('toggle');
var toggler = $(this);
if (toggler.is(".collapsed2")) {
toggler.removeClass("collapsed2");
} else {
toggler.addClass("collapsed2");
}
});
}
var vm = {
childrenCategories: ko.observableArray(),
orderEditingEnabled: ko.observable(false),
createCategory: function () {
var self = this;
var category = $.extend(new App.Admin.Shop.Category(), App.Admin.Shop.CategoryFunctions);
category.newCategory(function () {
self.childrenCategories.push(category);
});
},
selectCategory: function (item) {
if (onSelected) {
onSelected(item);
}
},
enableEditOrder: function () {
this.orderEditingEnabled(!this.orderEditingEnabled());
},
saveOrder: function () {
blockUI();
var cats = mapCategoriesToIds(ko.toJS(this.childrenCategories));
postJSON("/api/shop/categories/order/update", cats,
function (result) {
})
.fail(function () {
smartAlert("Произошла ошибка. Если она будет повторяться - обратитесь к разработчикам.");
})
.always(function () {
unblockUI();
});
}
};
getJSON("/api/shop/categories/null/get", "", function (result) {
$(result).each(function () {
var res = $.extend(ko.mapping.fromJS(this, App.Admin.Shop.CategoryValidationMapping), App.Admin.Shop.CategoryFunctions);
res.hasChildren = ko.observable(true);
vm.childrenCategories.push(res);
});
})
.fail(function () {
smartAlert("Произошла ошибка. Если она будет повторяться - обратитесь к разработчикам.");
})
.always(function () {
unblockUI();
});
if (context) {
ko.applyBindings(vm, context[0]);
} else {
ko.applyBindings(vm);
}
}
App.Admin.Shop.CategoryValidationMapping = {
name: {
create: function (options) {
var res = ko.observable(options.data).extend({ required: true });
return res;
}
},
childrenCategories: {
create: function (options) {
//пишется для одного элемента массива
var res = $.extend(ko.mapping.fromJS(options.data, App.Admin.Shop.CategoryValidationMapping), App.Admin.Shop.CategoryFunctions);
return res;
}
},
parentCategory: {
create: function (options) {
if (options.data) {
var res = ko.observable(options.data);
return res;
} else {
return ko.observable({ name: "" });
}
}
}
};
$.extend(App.Admin.Shop.CategoryValidationMapping, App.Admin.HeartValidationMapping);
App.Admin.Shop.Category = function () {
var self = this;
$.extend(self, new App.Admin.Heart());
self.name = ko.observable().extend({ required: true });
self.description = ko.observable();
self.parentCategoryId = ko.observable();
self.imageId = ko.observable();
self.childrenCategories = ko.observableArray();
self.parentCategory = ko.observable({ name: "" });
self.hidden = ko.observable(false);
self.orderFormSpecs = ko.observableArray();
self.hasChildren = ko.observable(true);
//self.init = function (data) {
// self.heartId(data.heartId);
// self.parentCategoryId(data.parentCategoryId);
// self.name(data.name);
// self.description(data.description);
// self.imageId(data.imageId);
// self.hidden(data.hidden);
// if (data.parentCategory) {
// self.parentCategory(data.parentCategory);
// }
// $(data.childrenCategories).each(function () {
// self.childrenCategories.push(new App.Admin.Shop.Category(this));
// });
// $(data.orderFormSpecs).each(function () {
// self.orderFormSpecs.push(new App.Admin.Spec(this));
// });
//};
//if (data)
// self.init(data);
}
App.Admin.Shop.CategoryFunctions = {
initCategory: function () {
var self = this;
self.initHeart();
//var cats = self.childrenCategories();
//self.childrenCategories.removeAll();
//$(cats).each(function () {
// var res = $.extend(ko.mapping.fromJS(this, App.Admin.Shop.CategoryValidationMapping), App.Admin.Shop.CategoryFunctions);
// res.initCategory();
// self.childrenCategories.push(res);
//});
//$(data.orderFormSpecs).each(function () {
// self.orderFormSpecs.push(new App.Admin.Spec(this));
//});
if ($("#categoryDescription").length) {
$("#categoryDescription").val(self.description());
initContentEditor();
}
self.name.subscribe(function (val) {
if (val) {
if (!self.title()) {
self.title(val);
}
if (!self.description()) {
self.description(val);
}
}
});
},
prepareCategoryForUpdate: function () {
var self = this;
self.prepareHeartForUpdate();
var text = getTextFromEditor('categoryDescription');
if (text) {
self.description(text);
}
},
loadChildren: function (onSuccess) {
var self = this;
if (self.childrenCategories().length > 0) {
return;
};
getJSON("/api/shop/categories/" + self.heartId() + "/get", "", function (result) {
if (result.length == 0) {
self.hasChildren(false);
}
$(result).each(function () {
var res = $.extend(ko.mapping.fromJS(this, App.Admin.Shop.CategoryValidationMapping), App.Admin.Shop.CategoryFunctions);
res.hasChildren = ko.observable(true);
self.childrenCategories.push(res);
});
if (onSuccess) {
onSuccess();
}
})
.fail(function () {
smartAlert("Произошла ошибка. Если она будет повторяться - обратитесь к разработчикам.");
})
.always(function () {
unblockUI();
});
},
addChild: function () {
var self = this;
var category = $.extend(new App.Admin.Shop.Category(), App.Admin.Shop.CategoryFunctions);
category.parentCategoryId(self.heartId());
category.parentCategory().name = self.name();
category.parentCategory().id = self.heartId();
category.parentHeartId(self.heartId());
category.newCategory(function () {
if (self.childrenCategories().length > 0) {<|fim▁hole|> self.loadChildren(function () {
//self.childrenCategories.push(category);
});
}
self.hasChildren(true);
});
},
clearParentCategory: function () {
var self = this;
self.parentCategoryId("");
self.parentCategory({ name: "" });
},
editParentCategory: function () {
var self = this;
showCategoriesDialog(function (result) {
if (result.id != self.heartId()) {
self.parentCategory(result);
self.parentCategoryId(result.id);
}
else {
alert("Нельзя установить родительской категорией текущую категорию");
}
});
},
newCategory: function (onCreate) {
var self = this;
self.dialog("/api/shop/category/create", function () {
if (onCreate) {
onCreate();
}
});
},
edit: function () {
var self = this;
self.dialog("/api/shop/category/update", function () {
});
},
save: function (url, onSuccess) {
var self = this;
blockUI();
postJSON(url, ko.toJS(self), function (result) {
if (result.succeed) {
if (onSuccess) {
onSuccess(result.data);
}
}
})
.fail(function () {
smartAlert("Произошла ошибка. Если она будет повторяться - обратитесь к разработчикам.");
})
.always(function () {
unblockUI();
});
},
remove: function (item, parent) {
var self = this;
if (self.heartId()) {
blockUI();
var url = "/api/shop/category/" + self.heartId() + "/delete";
postJSON(url, "", function (result) {
if (result.succeed) {
parent.childrenCategories.remove(item);
if (parent.childrenCategories().length === 0) {
parent.hasChildren(false);
}
}
})
.fail(function () {
smartAlert("Произошла ошибка. Если она будет повторяться - обратитесь к разработчикам.");
})
.always(function () {
unblockUI();
});
}
},
pickImage: function () {
var self = this;
showImagePickDialog(function (imageData) {
self.imageId(imageData.ID);
$('.remove-image').show();
});
},
removeImage: function () {
var self = this;
self.imageId("");
$('.remove-image').hide();
},
addSpec: function () {
var self = this;
showSpecDialog(function (item) {
var result = $.grep(self.orderFormSpecs(), function (e) {
return e.specId() === item.specId();
});
if (result.length === 0) {
self.orderFormSpecs.push(item);
}
});
},
removeSpec: function (spec, parent) {
var self = this;
parent.orderFormSpecs.remove(function (item) {
return item.specId() === spec.specId();
});
},
moveUp: function (item, parent) {
var self = this;
var index = parent.childrenCategories.indexOf(item);
if (index <= 0) return false;
parent.childrenCategories.remove(item);
parent.childrenCategories.splice(index - 1, 0, item);
},
moveDown: function (item, parent) {
var self = this;
var index = parent.childrenCategories.indexOf(item);
if (index == parent.childrenCategories.length - 1) return false;
parent.childrenCategories.remove(item);
parent.childrenCategories.splice(index + 1, 0, item);
},
dialog: function (url, onSuccess) {
var self = this;
var dm = ko.validatedObservable(self);
var dialogContent = $("#categoryTemplate").tmpl();
var options = {
title: "Категория",
width: 900,
height: 650,
resizable: false,
modal: true,
open: function () {
var $form = $(this).find('form');
self.initCategory();
var parents = ko.observableArray();
parents.push({ title: "Нет", heartId: null, type: "Выберите..." });
if (self.parentCategoryId() && self.parentCategory()) {
parents.push({ heartId: self.parentCategory().id, title: self.parentCategory().name, type: 'Категории' });
}
var that = this;
var vm = {
dm: dm,
parents: parents
}
self.parentCategory.subscribe(function () {
vm.parents.removeAll();
vm.parents.push({ title: "Нет", heartId: null, type: "Выберите..." });
if (self.parentCategory().name) {
vm.parents.push({
heartId: self.parentCategory().id,
title: self.parentCategory().name,
type: 'Категории'
});
self.parentHeartId(self.parentCategory().id);
}
self.parentHeartId.notifySubscribers();
setTimeout(function () {
$(".withsearch").selectpicker('refresh');
}, 100);
});
ko.applyBindings(vm, that);
setTimeout(function () {
$(".withsearch").selectpicker();
}, 100);
},
buttons: [
{
text: "Сохранить",
click: function () {
var $form = $(this).find('form');
self.prepareCategoryForUpdate();
var $dialog = $(this);
//if ($("#categoryDescription", $form).length) {
// $("#categoryDescription", $form).val(self.description());
// initContentEditor();
//}
if (dm.isValid()) {
self.save(url,
function (result) {
if (result) {
self.heartId(result.id);
}
if (onSuccess) {
onSuccess();
}
$dialog.dialog("close");
});
} else {
dm.errors.showAllMessages();
}
}
},
{
text: "Закрыть",
click: function () {
$(this).dialog("close");
}
}
],
close: function () {
$(this).dialog('destroy');
dialogContent.remove();
}
};
dialogContent.dialog(options);
return dialogContent;
}
};
$.extend(App.Admin.Shop.CategoryFunctions, App.Admin.HeartFunctions);
function showCategoriesDialog(onSelected) {
blockUI();
var options = {
title: "Категории",
modal: true,
draggable: false,
resizable: false,
width: 900,
height: 650,
open: function () {
var $dialog = $(this).dialog("widget");
var that = this;
unblockUI();
categoriesEditorLoaded(function (item) {
if (onSelected) {
onSelected({ id: item.heartId(), name: item.name() });
}
$(that).dialog("close");
}, $dialog);
}
};
showDialogFromUrl("/ShopEditor/CategoriesEditor", options);
}<|fim▁end|> | self.childrenCategories.push(category);
}
else { |
<|file_name|>json-ui.component.ts<|end_file_name|><|fim▁begin|>/*
* Squidex Headless CMS
*
* @license<|fim▁hole|> * Copyright (c) Squidex UG (haftungsbeschränkt). All rights reserved.
*/
import { Component, Input } from '@angular/core';
import { FormGroup } from '@angular/forms';
import { FieldDto, JsonFieldPropertiesDto } from '@app/shared';
@Component({
selector: 'sqx-json-ui[field][fieldForm][properties]',
styleUrls: ['json-ui.component.scss'],
templateUrl: 'json-ui.component.html',
})
export class JsonUIComponent {
@Input()
public fieldForm!: FormGroup;
@Input()
public field!: FieldDto;
@Input()
public properties!: JsonFieldPropertiesDto;
}<|fim▁end|> | |
<|file_name|>request_headers.rs<|end_file_name|><|fim▁begin|>use std::fmt::{self};
use header;
/// The `Access-Control-Request-Headers` request header,
/// part of [CORS](http://www.w3.org/TR/cors/).
///
/// > The `Access-Control-Request-Headers` header indicates which headers will
/// > be used in the actual request as part of the preflight request.
///
/// Spec: www.w3.org/TR/cors/#access-control-request-headers-request-header
#[derive(Clone, PartialEq, Debug)]
pub struct AccessControlRequestHeaders(pub Vec<String>);
impl header::Header for AccessControlRequestHeaders {
#[inline]
fn header_name() -> &'static str {
"Access-Control-Request-Headers"
}
<|fim▁hole|>}
impl header::HeaderFormat for AccessControlRequestHeaders {
fn fmt_header(&self, f: &mut fmt::Formatter) -> fmt::Result {
let AccessControlRequestHeaders(ref parts) = *self;
header::parsing::fmt_comma_delimited(f, parts.as_ref())
}
}<|fim▁end|> | fn parse_header(raw: &[Vec<u8>]) -> Option<AccessControlRequestHeaders> {
header::parsing::from_comma_delimited(raw).map(AccessControlRequestHeaders)
} |
<|file_name|>QuestionService.ts<|end_file_name|><|fim▁begin|>import Promise = require('bluebird');
import _ = require("underscore");
import CategoryRepository = require("./../datagateway/repository/CategoryRepository");
import ICategoryRepository = require("./../datagateway/repository/ICategoryRepository");
import IQuestionRepository = require("./../datagateway/repository/IQuestionRepository");
import IQuestionOptionRepository = require("./../datagateway/repository/IQuestionOptionRepository");
import QuestionRepository = require("./../datagateway/repository/QuestionRepository");
import QuestionOptionRepository = require("./../datagateway/repository/QuestionOptionRepository");
import ServiceBase = require("./ServiceBase");
import IQuestionService = require("./IQuestionService");
import ICategoryModel = require("./../domainmodel/ICategoryModel");
import IQuestionModel = require("./../domainmodel/IQuestionModel");
import IQuestionOptionModel = require("./../domainmodel/IQuestionOptionModel");
import Utilities = require("./../domainmodel/Utilities");
class QuestionService extends ServiceBase<IQuestionModel> implements IQuestionService {
private _categoryRep: ICategoryRepository;
private _questionRep: IQuestionRepository;
private _questionOptionRep: IQuestionOptionRepository;
constructor();
constructor(questionRep: IQuestionRepository = new QuestionRepository()) {
super(questionRep);
this._questionRep = questionRep;
this._questionOptionRep = new QuestionOptionRepository();
this._categoryRep = new CategoryRepository();
}
updateOption(option: IQuestionOptionModel, callback: (error: any, result: any) => void): void {
let returnOption: IQuestionOptionModel;
Promise.resolve(this._questionOptionRep.findById(option._id))
.then((dbOption: IQuestionOptionModel) => {
if (Utilities.isNullorEmpty(dbOption)) {
throw new Error("the question option do not exists, please try other!");
}
returnOption = dbOption;
return dbOption;
})
.then((dbOption: IQuestionOptionModel) => {
return this._questionOptionRep.update(option._id, { answer: option.answer });
})
.then((updateInfo: any) => {
returnOption.answer = option.answer;
callback(null, returnOption);
})
.catch((error: any) => {
callback(error, null);
});
}
getQuestionById(questionId, callback: (error: any, result: any) => void): void {
Promise.resolve(this._questionRep.getQuestionById(questionId))
.then((findQuesion: IQuestionModel) => {
if (Utilities.isNullorEmpty(findQuesion)) {
throw new Error("the question do not exists, please try other!");
}
return findQuesion;
}).then((item: IQuestionModel) => {
callback(null, item);
}).catch((error: any) => {
callback(error, null);
});
}
getQuestions(title: string, page: number, size: number, callback: (error: any, result: any) => void): void {
let totalCount: number = 0;
let skip: number = ((page - 1) * size);
let cond: Object = {};
if (!Utilities.isNullorEmpty(title)) {
cond = { title: { $regex: title, $options:"i" } };
}
Promise.resolve(this._questionRep.count(cond))
.then((totalNum: number) => {
totalCount = totalNum;
return totalNum;
})
.then((count: number) => {
console.log(title);
return this._questionRep.getQuestions(cond, skip, size);
})
.then((questionDataList: IQuestionModel[]) => {
let flatQuestions = _.map(questionDataList, (item: IQuestionModel) => {
return {
_id: item._id,
category: item.category.name,
title: item.title,
tip: item.tip,
create_date: item.create_date
};
});
callback(null, {
totalNum: totalCount,
data: flatQuestions
});
})
.catch((error: any) => {
callback(error, null);
});
}
private validtorOptions(question: IQuestionModel): void {
if (question.options.length !== 4) {
throw new Error("the question's option only have four item!");
}
let answerCount = _.countBy(question.options, (item: IQuestionOptionModel) => {
return item.isCorrect ? "Right" : "Wrong";
});
if (answerCount["Right"] !== 1) {
throw new Error("the question's option only have one right answer!");
}
let checkAnswerField = _.every(question.options, (item: IQuestionOptionModel) => {
return _.has(item, "isCorrect");
});
if (!checkAnswerField) {
throw new Error("the question's option must have answer filed, eg:isCorrect!");
}
}
createQuestion(question: IQuestionModel, callback: (error: any, result: any) => void) {
this.validtorOptions(question);
let category: ICategoryModel;
Promise.resolve(this._categoryRep.findById(question.category))
.then((cat: ICategoryModel) => {
if (Utilities.isNullorEmpty(cat)) {
throw new Error("the question category do not exists, please try other!");
}
return cat;
})
.then((cat: ICategoryModel) => {
category = cat;
return this._questionRep.findOne({ title: question.title, category: question.category });
})
.then((findQuestions: IQuestionModel) => {
if (!Utilities.isNullorEmpty(findQuestions)) {
throw new Error("the question exists under this category, please try other!");
}
if (!Utilities.isNullorEmpty(category.childrens)) {
throw new Error("the question category only can be selected in level, please try other!");
}
return this._questionOptionRep.createList(question.options);
})
.then((questionOptions: IQuestionOptionModel[]) => {
let rightAnswer = _.find(questionOptions, (item: IQuestionOptionModel) => {
return item.isCorrect ? item : null;
});
question.options = questionOptions;
question.correct = rightAnswer;
question.category = category;
return this._questionRep.create(question);
})
.then((qes: IQuestionModel) => {
callback(null, qes);
})
.catch((error: any) => {
callback(error, null);
});
}
updateQuestion(question: IQuestionModel, callback: (error: any, result: any) => void) {
this.validtorOptions(question);
let dbCategory: ICategoryModel;
let dbQuestionInfo: any;
let dbOptionIdsList = [];
let dbRightOptionId: any;
let rightOptionId: any;
Promise.resolve(this._categoryRep.findById(question.category))
.then((cat: ICategoryModel) => {
if (Utilities.isNullorEmpty(cat)) {
throw new Error("the question category do not exists, please try other!");
}
return cat;
})
.then((cat: ICategoryModel) => {
if (!Utilities.isNullorEmpty(cat.childrens)) {
throw new Error("the question category only can be selected in level, please try other!");
}
dbCategory = cat;
return this._questionRep.findById(question._id);
})
.then((getQuestion: IQuestionModel) => {
if (Utilities.isNullorEmpty(getQuestion)) {
throw new Error("the question can not be found, please try other!");
}
_.forEach(question.options, (item: IQuestionOptionModel) => {
dbOptionIdsList.push(item._id);
});
return this._questionOptionRep.retrieve({ _id: { $in: dbOptionIdsList } });
})
.then((options: IQuestionOptionModel[]) => {
if (options.length !== 4) {
throw new Error("the question's option do not match the original!");
}
let newAnswer = _.find(question.options, (item: IQuestionOptionModel) => {
return item.isCorrect;
});<|fim▁hole|> let dbAnswer = _.find(options, (item: IQuestionOptionModel) => {
return item.isCorrect;
});
rightOptionId = newAnswer._id;
dbRightOptionId = dbAnswer._id;
let updateOption = { category: dbCategory._id, tip: question.tip, title: question.title, correct: rightOptionId };
return this._questionRep.update(question._id, updateOption);
})
.then((tmpQuestion: any) => {
dbQuestionInfo = tmpQuestion;
return this._questionOptionRep.update(dbRightOptionId, { isCorrect: false });
})
.then((updateWrongInfo: any) => {
return this._questionOptionRep.update(rightOptionId, { isCorrect: true });
})
.then((updateRightInfo: any) => {
callback(null, dbQuestionInfo);
})
.catch((error: any) => {
callback(error, null);
});
}
removeQuestion(id: string, callback: (error: any, result: any) => void) {
let dbQuestion: IQuestionModel;
let questionDelInfo: any;
Promise.resolve(this._questionRep.findById(id))
.then((item: IQuestionModel) => {
if (Utilities.isNullorEmpty(item)) {
throw new Error("the question can not be found, please try other!");
}
dbQuestion = item;
return this._questionRep.remove(id);
}).then((delInfo: any) => {
questionDelInfo = delInfo;
return this._questionOptionRep.removeList({ _id: { $in: dbQuestion.options } });
}).then((delInfo: any) => {
callback(null, questionDelInfo);
}).catch((error: any) => {
callback(error, null);
});
}
}
Object.seal(QuestionService);
export = QuestionService;<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
extern crate multihash;
extern crate cid;
extern crate unicase;<|fim▁hole|>extern crate ethcore_bytes as bytes;
extern crate jsonrpc_core as core;
extern crate jsonrpc_http_server as http;
pub mod error;
mod route;
use std::thread;
use std::sync::{mpsc, Arc};
use std::net::{SocketAddr, IpAddr};
use core::futures::future::{self, FutureResult};
use core::futures::{self, Future};
use ethcore::client::BlockChainClient;
use http::hyper::header::{self, Vary, ContentType};
use http::hyper::{Method, StatusCode};
use http::hyper::{self, server};
use unicase::Ascii;
use error::ServerError;
use route::Out;
pub use http::{AccessControlAllowOrigin, Host, DomainsValidation};
/// Request/response handler
pub struct IpfsHandler {
/// Allowed CORS domains
cors_domains: Option<Vec<AccessControlAllowOrigin>>,
/// Hostnames allowed in the `Host` request header
allowed_hosts: Option<Vec<Host>>,
/// Reference to the Blockchain Client
client: Arc<BlockChainClient>,
}
impl IpfsHandler {
pub fn client(&self) -> &BlockChainClient {
&*self.client
}
pub fn new(cors: DomainsValidation<AccessControlAllowOrigin>, hosts: DomainsValidation<Host>, client: Arc<BlockChainClient>) -> Self {
IpfsHandler {
cors_domains: cors.into(),
allowed_hosts: hosts.into(),
client: client,
}
}
pub fn on_request(&self, req: hyper::Request) -> (Option<header::AccessControlAllowOrigin>, Out) {
match *req.method() {
Method::Get | Method::Post => {},
_ => return (None, Out::Bad("Invalid Request")),
}
if !http::is_host_allowed(&req, &self.allowed_hosts) {
return (None, Out::Bad("Disallowed Host header"));
}
let cors_header = http::cors_header(&req, &self.cors_domains);
if cors_header == http::CorsHeader::Invalid {
return (None, Out::Bad("Disallowed Origin header"));
}
let path = req.uri().path();
let query = req.uri().query();
return (cors_header.into(), self.route(path, query));
}
}
impl server::Service for IpfsHandler {
type Request = hyper::Request;
type Response = hyper::Response;
type Error = hyper::Error;
type Future = FutureResult<hyper::Response, hyper::Error>;
fn call(&self, request: Self::Request) -> Self::Future {
let (cors_header, out) = self.on_request(request);
let mut res = match out {
Out::OctetStream(bytes) => {
hyper::Response::new()
.with_status(StatusCode::Ok)
.with_header(ContentType::octet_stream())
.with_body(bytes)
},
Out::NotFound(reason) => {
hyper::Response::new()
.with_status(StatusCode::NotFound)
.with_header(ContentType::plaintext())
.with_body(reason)
},
Out::Bad(reason) => {
hyper::Response::new()
.with_status(StatusCode::BadRequest)
.with_header(ContentType::plaintext())
.with_body(reason)
}
};
if let Some(cors_header) = cors_header {
res.headers_mut().set(cors_header);
res.headers_mut().set(Vary::Items(vec![Ascii::new("Origin".into())]));
}
future::ok(res)
}
}
/// Add current interface (default: "127.0.0.1:5001") to list of allowed hosts
fn include_current_interface(mut hosts: Vec<Host>, interface: String, port: u16) -> Vec<Host> {
hosts.push(match port {
80 => interface,
_ => format!("{}:{}", interface, port),
}.into());
hosts
}
#[derive(Debug)]
pub struct Listening {
close: Option<futures::sync::oneshot::Sender<()>>,
thread: Option<thread::JoinHandle<()>>,
}
impl Drop for Listening {
fn drop(&mut self) {
self.close.take().unwrap().send(()).unwrap();
let _ = self.thread.take().unwrap().join();
}
}
pub fn start_server(
port: u16,
interface: String,
cors: DomainsValidation<AccessControlAllowOrigin>,
hosts: DomainsValidation<Host>,
client: Arc<BlockChainClient>
) -> Result<Listening, ServerError> {
let ip: IpAddr = interface.parse().map_err(|_| ServerError::InvalidInterface)?;
let addr = SocketAddr::new(ip, port);
let hosts: Option<Vec<_>> = hosts.into();
let hosts: DomainsValidation<_> = hosts.map(move |hosts| include_current_interface(hosts, interface, port)).into();
let (close, shutdown_signal) = futures::sync::oneshot::channel::<()>();
let (tx, rx) = mpsc::sync_channel(1);
let thread = thread::spawn(move || {
let send = |res| tx.send(res).expect("rx end is never dropped; qed");
let server = match server::Http::new().bind(&addr, move || {
Ok(IpfsHandler::new(cors.clone(), hosts.clone(), client.clone()))
}) {
Ok(server) => {
send(Ok(()));
server
},
Err(err) => {
send(Err(err));
return;
}
};
let _ = server.run_until(shutdown_signal.map_err(|_| {}));
});
// Wait for server to start successfuly.
rx.recv().expect("tx end is never dropped; qed")?;
Ok(Listening {
close: close.into(),
thread: thread.into(),
})
}<|fim▁end|> |
extern crate rlp;
extern crate ethcore;
extern crate ethcore_bigint as bigint; |
<|file_name|>variance-regions-direct.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we correctly infer variance for region parameters in
// various self-contained types.
// Regions that just appear in normal spots are contravariant:
#[rustc_variance]
struct Test2<'a, 'b, 'c> { //~ ERROR regions=[[-, -, -];[];[];[]]
x: &'a int,
y: &'b [int],
c: &'c str
}
// Those same annotations in function arguments become covariant:
#[rustc_variance]<|fim▁hole|>}
// Mutability induces invariance:
#[rustc_variance]
struct Test4<'a, 'b:'a> { //~ ERROR regions=[[-, o];[];[];[]]
x: &'a mut &'b int,
}
// Mutability induces invariance, even when in a
// contravariant context:
#[rustc_variance]
struct Test5<'a, 'b> { //~ ERROR regions=[[+, o];[];[];[]]
x: extern "Rust" fn(&'a mut &'b int),
}
// Invariance is a trap from which NO ONE CAN ESCAPE.
// In other words, even though the `&'b int` occurs in
// a argument list (which is contravariant), that
// argument list occurs in an invariant context.
#[rustc_variance]
struct Test6<'a, 'b> { //~ ERROR regions=[[-, o];[];[];[]]
x: &'a mut extern "Rust" fn(&'b int),
}
// No uses at all is bivariant:
#[rustc_variance]
struct Test7<'a> { //~ ERROR regions=[[*];[];[];[]]
x: int
}
// Try enums too.
#[rustc_variance]
enum Test8<'a, 'b, 'c:'b> { //~ ERROR regions=[[+, -, o];[];[];[]]
Test8A(extern "Rust" fn(&'a int)),
Test8B(&'b [int]),
Test8C(&'b mut &'c str),
}
fn main() {}<|fim▁end|> | struct Test3<'a, 'b, 'c> { //~ ERROR regions=[[+, +, +];[];[];[]]
x: extern "Rust" fn(&'a int),
y: extern "Rust" fn(&'b [int]),
c: extern "Rust" fn(&'c str), |
<|file_name|>rfc3414.py<|end_file_name|><|fim▁begin|>#
# This file is part of pyasn1-modules software.
#
# Copyright (c) 2005-2017, Ilya Etingof <[email protected]>
# License: http://pyasn1.sf.net/license.html
#
# SNMPv3 message syntax
#<|fim▁hole|>from pyasn1.type import constraint
from pyasn1.type import namedtype
from pyasn1.type import univ
class UsmSecurityParameters(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('msgAuthoritativeEngineID', univ.OctetString()),
namedtype.NamedType('msgAuthoritativeEngineBoots',
univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))),
namedtype.NamedType('msgAuthoritativeEngineTime',
univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))),
namedtype.NamedType('msgUserName',
univ.OctetString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, 32))),
namedtype.NamedType('msgAuthenticationParameters', univ.OctetString()),
namedtype.NamedType('msgPrivacyParameters', univ.OctetString())
)<|fim▁end|> | # ASN.1 source from:
# http://www.ietf.org/rfc/rfc3414.txt
# |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Created on 2013-10-11
#
# @author: Bartosz Nowak [email protected]
#
# This file is licensed GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007
from __future__ import unicode_literals
import unicodecsv
from datetime import datetime, timedelta
from pytz import timezone
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.views.generic import UpdateView, FormView, TemplateView, CreateView
from neonet.views import LoggedInMixin
from DamageReports import models
from DamageReports import forms
class DamageReports(LoggedInMixin, FormView):
template_name = 'DamageReports/list.html'
form_class = forms.DamageReportsDateFilter
now = datetime.now(timezone('Europe/Warsaw'))
yesterday = now - timedelta(days=1)
initial = {'date_from': yesterday, 'date_to': now}
def form_valid(self, form):
reports = models.DamageReport.objects.select_related('commodity').filter(date__range=(
form.cleaned_data['date_from'], form.cleaned_data['date_to']))
return self.render_to_response(self.get_context_data(form=form, reports=reports))
class DamageReportsCreate(LoggedInMixin, CreateView):
model = models.DamageReport
template_name = 'DamageReports/create.html'
form_class = forms.DamageReportForm
now = datetime.now(timezone('Europe/Warsaw'))
initial = {'date': now}
def get_success_url(self):
return reverse('DamageReports:damage_reports_view')
def form_valid(self, form):
report = form.save(commit=False)<|fim▁hole|>
class DamageReportsUpdate(LoggedInMixin, UpdateView):
model = models.DamageReport
template_name = 'DamageReports/update.html'
form_class = forms.DamageReportForm
def get_success_url(self):
return reverse('DamageReports:list')
def get_initial(self):
initial = self.initial.copy()
initial['ean'] = self.get_object().commodity.ean
return initial
class DamageReportsExport(LoggedInMixin, FormView):
template_name = 'DamageReports/export.html'
form_class = forms.DamageReportsDateFilter
now = datetime.now(timezone('Europe/Warsaw'))
yesterday = now - timedelta(days=1)
initial = {'date_from': yesterday, 'date_to': now}
def form_valid(self, form):
response = HttpResponse(content_type='text/csv')
response['content-disposition'] = 'attachment; filename="reports.csv.txt"'
data = models.DamageReport.objects.\
select_related('commodity', 'detection_time', 'category', 'further_action', 'user').\
filter(date__range=(form.cleaned_data['date_from'], form.cleaned_data['date_to']))
writer = unicodecsv.writer(response, delimiter=b';')
if not data:
writer.writerow('Nie znaleziono żadnych raportów')
else:
for report in data:
row = ['', unicode(report.date), report.brand, report.commodity.__unicode__(), report.serial,
report.detection_time.detection_time, report.category.category, report.comments,
report.further_action.further_action, '', '',
(report.user.first_name + ' ' + report.user.last_name)
]
row = [element.strip() for element in row]
writer.writerow(row)
return response
class DamageReportsCharts(LoggedInMixin, TemplateView):
template_name = 'DamageReports/charts.html'
def get_context_data(self, **kwargs):
context = super(DamageReportsCharts, self).get_context_data(**kwargs)
context['chart'] = self._view()
return context
def _view(self):
self.a = {}
self.b = {}
self.c = {}
objects = models.DamageReport.objects.select_related('category').order_by('-date')
for report in objects:
_date = report.day_str()
if _date not in self.a:
self.a[_date] = 0
if _date not in self.b:
self.b[_date] = 0
if _date not in self.c:
self.c[_date] = 0
getattr(self, report.category.category.lower())[_date] += 1
reports = [{'data': [], 'name': 'A'},
{'data': [], 'name': 'B'},
{'data': [], 'name': 'C'}]
for k, v in self.a.iteritems():
reports[0]['data'].append([k, v])
for k, v in self.b.iteritems():
reports[1]['data'].append([k, v])
for k, v in self.c.iteritems():
reports[2]['data'].append([k, v])
return reports<|fim▁end|> | report.user = self.request.user
report.save()
return super(DamageReportsCreate, self).form_valid(form) |
<|file_name|>winston-syslog.js<|end_file_name|><|fim▁begin|>/*
* syslog.js: Transport for logging to a remote syslog consumer
*
* (C) 2011 Squeeks and Charlie Robbins
* MIT LICENCE
*
*/
var dgram = require('dgram'),
net = require('net'),
util = require('util'),
glossy = require('glossy'),
winston = require('winston'),
unix = require('unix-dgram'),
os = require('os');
var levels = Object.keys({
debug: 0,
info: 1,
notice: 2,
warning: 3,
error: 4,
crit: 5,
alert: 6,
emerg: 7
});
//
// ### function Syslog (options)
// #### @options {Object} Options for this instance.
// Constructor function for the Syslog Transport capable of sending
// RFC 3164 and RFC 5424 compliant messages.
//
var Syslog = exports.Syslog = function (options) {
winston.Transport.call(this, options);
options = options || {};
// Set transport name
this.name = 'syslog';
//
// Setup connection state<|fim▁hole|> this.retries = 0;
this.queue = [];
//
// Merge the options for the target Syslog server.
//
this.host = options.host || 'localhost';
this.port = options.port || 514;
this.path = options.path || null;
this.protocol = options.protocol || 'udp4';
this.isDgram = /^udp|unix/.test(this.protocol);
if (!/^udp|unix|tcp/.test(this.protocol)) {
throw new Error('Invalid syslog protocol: ' + this.protocol);
}
if (/^unix/.test(this.protocol) && !this.path) {
throw new Error('`options.path` is required on unix dgram sockets.');
}
//
// Merge the default message options.
//
this.localhost = options.localhost || os.hostname();
this.type = options.type || 'BSD';
this.facility = options.facility || 'local0';
this.pid = options.pid || process.pid;
this.app_name = options.app_name || process.title;
//
// Setup our Syslog and network members for later use.
//
this.socket = null;
this.producer = new glossy.Produce({
type: this.type,
appName: this.app_name,
pid: this.pid,
facility: this.facility
});
};
//
// Inherit from `winston.Transport`.
//
util.inherits(Syslog, winston.Transport);
//
// Define a getter so that `winston.transports.Syslog`
// is available and thus backwards compatible.
//
winston.transports.Syslog = Syslog;
//
// ### function log (level, msg, [meta], callback)
// #### @level {string} Target level to log to
// #### @msg {string} Message to log
// #### @meta {Object} **Optional** Additional metadata to log.
// #### @callback {function} Continuation to respond to when complete.
// Core logging method exposed to Winston. Logs the `msg` and optional
// metadata, `meta`, to the specified `level`.
//
Syslog.prototype.log = function (level, msg, meta, callback) {
var self = this,
data = meta ? winston.clone(meta) : {},
syslogMsg,
buffer;
if (!~levels.indexOf(level)) {
return callback(new Error('Cannot log unknown syslog level: ' + level));
}
data.message = msg;
syslogMsg = this.producer.produce({
severity: level,
host: this.localhost,
date: new Date(),
message: meta ? JSON.stringify(data) : msg
});
//
// Attempt to connect to the socket
//
this.connect(function (err) {
if (err) {
//
// If there was an error enqueue the message
//
return self.queue.push(syslogMsg);
}
//
// On any error writing to the socket, enqueue the message
//
function onError (logErr) {
if (logErr) { self.queue.push(syslogMsg) }
self.emit('logged');
}
//
// Write to the `tcp*`, `udp*`, or `unix` socket.
//
if (self.isDgram) {
buffer = new Buffer(syslogMsg);
if (self.protocol.match(/^udp/)) {
self.socket.send(buffer, 0, buffer.length, self.port, self.host, onError);
}
else {
self.socket.send(buffer, 0, buffer.length, self.path, onError);
}
}
else {
self.socket.write(syslogMsg, 'utf8', onError);
}
});
callback(null, true);
};
//
// ### function connect (callback)
// #### @callback {function} Continuation to respond to when complete.
// Connects to the remote syslog server using `dgram` or `net` depending
// on the `protocol` for this instance.
//
Syslog.prototype.connect = function (callback) {
var self = this, readyEvent;
//
// If the socket already exists then respond
//
if (this.socket) {
return (!this.socket.readyState) || (this.socket.readyState === 'open')
? callback(null)
: callback(true);
}
//
// Create the appropriate socket type.
//
if (this.isDgram) {
if (self.protocol.match(/^udp/)) {
this.socket = new dgram.Socket(this.protocol);
}
else {
this.socket = new unix.createSocket('unix_dgram');
}
return callback(null);
}
else {
this.socket = new net.Socket({ type: this.protocol });
this.socket.setKeepAlive(true);
this.socket.setNoDelay();
readyEvent = 'connect';
}
//
// On any error writing to the socket, emit the `logged` event
// and the `error` event.
//
function onError (logErr) {
if (logErr) { self.emit('error', logErr) }
self.emit('logged');
}
//
// Indicate to the callee that the socket is not ready. This
// will enqueue the current message for later.
//
callback(true);
//
// Listen to the appropriate events on the socket that
// was just created.
//
this.socket.on(readyEvent, function () {
//
// When the socket is ready, write the current queue
// to it.
//
self.socket.write(self.queue.join(''), 'utf8', onError);
self.emit('logged');
self.queue = [];
self.retries = 0;
self.connected = true;
}).on('error', function (ex) {
//
// TODO: Pass this error back up
//
}).on('end', function (ex) {
//
// Nothing needs to be done here.
//
}).on('close', function (ex) {
//
// Attempt to reconnect on lost connection(s), progressively
// increasing the amount of time between each try.
//
var interval = Math.pow(2, self.retries);
self.connected = false;
setTimeout(function () {
self.retries++;
self.socket.connect(self.port, self.host);
}, interval * 1000);
}).on('timeout', function () {
if (self.socket.readyState !== 'open') {
self.socket.destroy();
}
});
this.socket.connect(this.port, this.host);
};<|fim▁end|> | //
this.connected = false; |
<|file_name|>anoncreds.rs<|end_file_name|><|fim▁begin|>use futures::Future;
use serde_json;
use serde_json::{map::Map, Value};
use indy::{anoncreds, blob_storage, ledger};
use time;
use settings;
use utils::constants::{LIBINDY_CRED_OFFER, REQUESTED_ATTRIBUTES, PROOF_REQUESTED_PREDICATES, ATTRS, REV_STATE_JSON};
use utils::libindy::{wallet::get_wallet_handle, LibindyMock};
use utils::libindy::payments::{pay_for_txn, PaymentTxn};
use utils::libindy::ledger::*;
use utils::constants::{SCHEMA_ID, SCHEMA_JSON, SCHEMA_TXN, CREATE_SCHEMA_ACTION, CRED_DEF_ID, CRED_DEF_JSON, CRED_DEF_REQ, CREATE_CRED_DEF_ACTION, CREATE_REV_REG_DEF_ACTION, CREATE_REV_REG_DELTA_ACTION, REVOC_REG_TYPE, rev_def_json, REV_REG_ID, REV_REG_DELTA_JSON, REV_REG_JSON};
use error::prelude::*;
const BLOB_STORAGE_TYPE: &str = "default";
const REVOCATION_REGISTRY_TYPE: &str = "ISSUANCE_BY_DEFAULT";
pub fn libindy_verifier_verify_proof(proof_req_json: &str,
proof_json: &str,
schemas_json: &str,
credential_defs_json: &str,
rev_reg_defs_json: &str,
rev_regs_json: &str) -> VcxResult<bool> {
anoncreds::verifier_verify_proof(proof_req_json,
proof_json,
schemas_json,
credential_defs_json,
rev_reg_defs_json,
rev_regs_json)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_create_and_store_revoc_reg(issuer_did: &str, cred_def_id: &str, tails_path: &str, max_creds: u32) -> VcxResult<(String, String, String)> {
trace!("creating revocation: {}, {}, {}", cred_def_id, tails_path, max_creds);
let tails_config = json!({"base_dir": tails_path,"uri_pattern": ""}).to_string();
let writer = blob_storage::open_writer(BLOB_STORAGE_TYPE, &tails_config)
.wait()?;
let revoc_config = json!({"max_cred_num": max_creds, "issuance_type": REVOCATION_REGISTRY_TYPE}).to_string();
anoncreds::issuer_create_and_store_revoc_reg(get_wallet_handle(), issuer_did, None, "tag1", cred_def_id, &revoc_config, writer)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_create_and_store_credential_def(issuer_did: &str,
schema_json: &str,
tag: &str,
sig_type: Option<&str>,
config_json: &str) -> VcxResult<(String, String)> {
anoncreds::issuer_create_and_store_credential_def(get_wallet_handle(),
issuer_did,
schema_json,
tag,
sig_type,
config_json)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_issuer_create_credential_offer(cred_def_id: &str) -> VcxResult<String> {
if settings::indy_mocks_enabled() {
let rc = LibindyMock::get_result();
if rc != 0 { return Err(VcxError::from(VcxErrorKind::InvalidState)); };
return Ok(LIBINDY_CRED_OFFER.to_string());
}
anoncreds::issuer_create_credential_offer(get_wallet_handle(),
cred_def_id)
.wait()
.map_err(VcxError::from)
}
fn blob_storage_open_reader(base_dir: &str) -> VcxResult<i32> {
let tails_config = json!({"base_dir": base_dir,"uri_pattern": ""}).to_string();
blob_storage::open_reader("default", &tails_config)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_issuer_create_credential(cred_offer_json: &str,
cred_req_json: &str,
cred_values_json: &str,
rev_reg_id: Option<String>,
tails_file: Option<String>) -> VcxResult<(String, Option<String>, Option<String>)> {
if settings::indy_mocks_enabled() { return Ok((::utils::constants::CREDENTIAL_JSON.to_owned(), None, None)); }
let revocation = rev_reg_id.as_ref().map(String::as_str);
let blob_handle = match tails_file {
Some(x) => blob_storage_open_reader(&x)?,
None => -1,
};
anoncreds::issuer_create_credential(get_wallet_handle(),
cred_offer_json,
cred_req_json,
cred_values_json,
revocation,
blob_handle)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_prover_create_proof(proof_req_json: &str,
requested_credentials_json: &str,
master_secret_id: &str,
schemas_json: &str,
credential_defs_json: &str,
revoc_states_json: Option<&str>) -> VcxResult<String> {
if settings::indy_mocks_enabled() { return Ok(::utils::constants::PROOF_JSON.to_owned()); }
let revoc_states_json = revoc_states_json.unwrap_or("{}");
anoncreds::prover_create_proof(get_wallet_handle(),
proof_req_json,
requested_credentials_json,
master_secret_id,
schemas_json,
credential_defs_json,
revoc_states_json)
.wait()
.map_err(VcxError::from)
}
fn fetch_credentials(search_handle: i32, requested_attributes: Map<String, Value>) -> VcxResult<String> {
let mut v: Value = json!({});
for item_referent in requested_attributes.keys().into_iter() {
v[ATTRS][item_referent] =
serde_json::from_str(&anoncreds::prover_fetch_credentials_for_proof_req(search_handle, item_referent, 100).wait()?)
.map_err(|_| {
error!("Invalid Json Parsing of Object Returned from Libindy. Did Libindy change its structure?");
VcxError::from_msg(VcxErrorKind::InvalidConfiguration, "Invalid Json Parsing of Object Returned from Libindy. Did Libindy change its structure?")
})?
}
Ok(v.to_string())
}
fn close_search_handle(search_handle: i32) -> VcxResult<()> {
anoncreds::prover_close_credentials_search_for_proof_req(search_handle)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_prover_get_credentials_for_proof_req(proof_req: &str) -> VcxResult<String> {
let wallet_handle = get_wallet_handle();
// this may be too redundant since Prover::search_credentials will validate the proof reqeuest already.
let proof_request_json: Map<String, Value> = serde_json::from_str(proof_req)
.map_err(|err| VcxError::from_msg(VcxErrorKind::InvalidProofRequest, format!("Cannot deserialize ProofRequest: {:?}", err)))?;
// since the search_credentials_for_proof request validates that the proof_req is properly structured, this get()
// fn should never fail, unless libindy changes their formats.
let requested_attributes: Option<Map<String, Value>> = proof_request_json.get(REQUESTED_ATTRIBUTES)
.and_then(|v| {
serde_json::from_value(v.clone()).map_err(|_| {
error!("Invalid Json Parsing of Requested Attributes Retrieved From Libindy. Did Libindy change its structure?");
}).ok()
});
let requested_predicates: Option<Map<String, Value>> = proof_request_json.get(PROOF_REQUESTED_PREDICATES).and_then(|v| {
serde_json::from_value(v.clone()).map_err(|_| {
error!("Invalid Json Parsing of Requested Predicates Retrieved From Libindy. Did Libindy change its structure?");
}).ok()
});
// handle special case of "empty because json is bad" vs "empty because no attributes sepected"
if requested_attributes == None && requested_predicates == None {
return Err(VcxError::from_msg(VcxErrorKind::InvalidAttributesStructure, "Invalid Json Parsing of Requested Attributes Retrieved From Libindy"));
}
let mut fetch_attrs: Map<String, Value> = match requested_attributes {
Some(attrs) => attrs.clone(),
None => Map::new()
};
match requested_predicates {
Some(attrs) => fetch_attrs.extend(attrs),
None => ()
}
if 0 < fetch_attrs.len() {
let search_handle = anoncreds::prover_search_credentials_for_proof_req(wallet_handle, proof_req, None)
.wait()
.map_err(|ec| {
error!("Opening Indy Search for Credentials Failed");
ec
})?;
let creds: String = fetch_credentials(search_handle, fetch_attrs)?;
// should an error on closing a search handle throw an error, or just a warning?
// for now we're are just outputting to the user that there is an issue, and continuing on.
let _ = close_search_handle(search_handle);
Ok(creds)
} else {
Ok("{}".to_string())
}
}
pub fn libindy_prover_create_credential_req(prover_did: &str,
credential_offer_json: &str,
credential_def_json: &str) -> VcxResult<(String, String)> {
if settings::indy_mocks_enabled() { return Ok((::utils::constants::CREDENTIAL_REQ_STRING.to_owned(), String::new())); }
let master_secret_name = settings::DEFAULT_LINK_SECRET_ALIAS;
anoncreds::prover_create_credential_req(get_wallet_handle(),
prover_did,
credential_offer_json,
credential_def_json,
master_secret_name)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_prover_create_revocation_state(rev_reg_def_json: &str, rev_reg_delta_json: &str, cred_rev_id: &str, tails_file: &str) -> VcxResult<String> {
if settings::indy_mocks_enabled() { return Ok(REV_STATE_JSON.to_string()); }
let blob_handle = blob_storage_open_reader(tails_file)?;
anoncreds::create_revocation_state(blob_handle, rev_reg_def_json, rev_reg_delta_json, 100, cred_rev_id)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_prover_update_revocation_state(rev_reg_def_json: &str, rev_state_json: &str, rev_reg_delta_json: &str, cred_rev_id: &str, tails_file: &str) -> VcxResult<String> {
if settings::indy_mocks_enabled() { return Ok(REV_STATE_JSON.to_string()); }
let blob_handle = blob_storage_open_reader(tails_file)?;
anoncreds::update_revocation_state(blob_handle, rev_state_json, rev_reg_def_json, rev_reg_delta_json, 100, cred_rev_id)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_prover_store_credential(cred_id: Option<&str>,
cred_req_meta: &str,
cred_json: &str,
cred_def_json: &str,
rev_reg_def_json: Option<&str>) -> VcxResult<String> {
if settings::indy_mocks_enabled() { return Ok("cred_id".to_string()); }
anoncreds::prover_store_credential(get_wallet_handle(),
cred_id,
cred_req_meta,
cred_json,
cred_def_json,
rev_reg_def_json)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_prover_delete_credential(cred_id: &str) -> VcxResult<()>{
anoncreds::prover_delete_credential(get_wallet_handle(),
cred_id)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_prover_create_master_secret(master_secret_id: &str) -> VcxResult<String> {
if settings::indy_mocks_enabled() { return Ok(settings::DEFAULT_LINK_SECRET_ALIAS.to_string()); }
anoncreds::prover_create_master_secret(get_wallet_handle(),
Some(master_secret_id))
.wait()
.map_err(VcxError::from)
}
pub fn libindy_issuer_create_schema(issuer_did: &str,
name: &str,
version: &str,
attrs: &str) -> VcxResult<(String, String)> {
anoncreds::issuer_create_schema(issuer_did,
name,
version,
attrs)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_issuer_revoke_credential(tails_file: &str, rev_reg_id: &str, cred_rev_id: &str) -> VcxResult<String> {
let blob_handle = blob_storage_open_reader(tails_file)?;
anoncreds::issuer_revoke_credential(get_wallet_handle(), blob_handle, rev_reg_id, cred_rev_id)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_build_revoc_reg_def_request(submitter_did: &str,
rev_reg_def_json: &str) -> VcxResult<String> {
if settings::indy_mocks_enabled() { return Ok("".to_string()); }
ledger::build_revoc_reg_def_request(submitter_did, rev_reg_def_json)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_build_revoc_reg_entry_request(submitter_did: &str,
rev_reg_id: &str,
rev_def_type: &str,
value: &str) -> VcxResult<String> {
if settings::indy_mocks_enabled() { return Ok("".to_string()); }
ledger::build_revoc_reg_entry_request(submitter_did, rev_reg_id, rev_def_type, value)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_build_get_revoc_reg_def_request(submitter_did: &str, rev_reg_id: &str) -> VcxResult<String> {
ledger::build_get_revoc_reg_def_request(Some(submitter_did), rev_reg_id)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_parse_get_revoc_reg_def_response(rev_reg_def_json: &str) -> VcxResult<(String, String)> {
ledger::parse_get_revoc_reg_def_response(rev_reg_def_json)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_build_get_revoc_reg_delta_request(submitter_did: &str,
rev_reg_id: &str,
from: i64,
to: i64) -> VcxResult<String> {
ledger::build_get_revoc_reg_delta_request(Some(submitter_did),
rev_reg_id,
from,
to)
.wait()
.map_err(VcxError::from)
}
fn libindy_build_get_revoc_reg_request(submitter_did: &str, rev_reg_id: &str, timestamp: u64) -> VcxResult<String> {
ledger::build_get_revoc_reg_request(Some(submitter_did),
rev_reg_id,
timestamp as i64)
.wait()
.map_err(VcxError::from)
}
fn libindy_parse_get_revoc_reg_response(get_rev_reg_resp: &str) -> VcxResult<(String, String, u64)> {
ledger::parse_get_revoc_reg_response(get_rev_reg_resp)
.wait()
.map_err(VcxError::from)
}
pub fn libindy_parse_get_revoc_reg_delta_response(get_rev_reg_delta_response: &str)
-> VcxResult<(String, String, u64)> {
ledger::parse_get_revoc_reg_delta_response(get_rev_reg_delta_response)
.wait()
.map_err(VcxError::from)
}
pub fn create_schema(name: &str, version: &str, data: &str) -> VcxResult<(String, String)> {
if settings::indy_mocks_enabled() {
return Ok((SCHEMA_ID.to_string(), SCHEMA_JSON.to_string()));
}
let submitter_did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID)?;
let (id, create_schema) = libindy_issuer_create_schema(&submitter_did, name, version, data)?;
Ok((id, create_schema))
}
pub fn build_schema_request(schema: &str) -> VcxResult<String> {
if settings::indy_mocks_enabled() {
return Ok(SCHEMA_TXN.to_string());
}
let submitter_did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID)?;
let request = libindy_build_schema_request(&submitter_did, schema)?;
let request = append_txn_author_agreement_to_request(&request)?;
Ok(request)
}
pub fn publish_schema(schema: &str) -> VcxResult<Option<PaymentTxn>> {
if settings::indy_mocks_enabled() {
let inputs = vec!["pay:null:9UFgyjuJxi1i1HD".to_string()];
let outputs = serde_json::from_str::<Vec<::utils::libindy::payments::Output>>(r#"[{"amount":4,"extra":null,"recipient":"pay:null:xkIsxem0YNtHrRO"}]"#).unwrap();
return Ok(Some(PaymentTxn::from_parts(inputs, outputs, 1, false)));
}
let request = build_schema_request(schema)?;
let (payment, response) = pay_for_txn(&request, CREATE_SCHEMA_ACTION)?;
_check_schema_response(&response)?;
Ok(payment)
}
pub fn get_schema_json(schema_id: &str) -> VcxResult<(String, String)> {
if settings::indy_mocks_enabled() { return Ok((SCHEMA_ID.to_string(), SCHEMA_JSON.to_string())); }
let submitter_did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID)?;
let schema_json = libindy_get_schema(&submitter_did, schema_id)?;
Ok((schema_id.to_string(), schema_json))
}
pub fn generate_cred_def(issuer_did: &str,
schema_json: &str,
tag: &str,
sig_type: Option<&str>,
support_revocation: Option<bool>) -> VcxResult<(String, String)> {
if settings::indy_mocks_enabled() {
return Ok((CRED_DEF_ID.to_string(), CRED_DEF_JSON.to_string()));
}
let config_json = json!({"support_revocation": support_revocation.unwrap_or(false)}).to_string();
libindy_create_and_store_credential_def(issuer_did,
schema_json,
tag,
sig_type,
&config_json)
}<|fim▁hole|> return Ok(CRED_DEF_REQ.to_string());
}
let cred_def_req = libindy_build_create_credential_def_txn(issuer_did, &cred_def_json)?;
let cred_def_req = append_txn_author_agreement_to_request(&cred_def_req)?;
Ok(cred_def_req)
}
pub fn publish_cred_def(issuer_did: &str, cred_def_json: &str) -> VcxResult<Option<PaymentTxn>> {
if settings::indy_mocks_enabled() {
let inputs = vec!["pay:null:9UFgyjuJxi1i1HD".to_string()];
let outputs = serde_json::from_str::<Vec<::utils::libindy::payments::Output>>(r#"[{"amount":4,"extra":null,"recipient":"pay:null:xkIsxem0YNtHrRO"}]"#).unwrap();
return Ok(Some(PaymentTxn::from_parts(inputs, outputs, 1, false)));
}
let cred_def_req = build_cred_def_request(issuer_did, &cred_def_json)?;
let (payment, _) = pay_for_txn(&cred_def_req, CREATE_CRED_DEF_ACTION)?;
Ok(payment)
}
pub fn get_cred_def_json(cred_def_id: &str) -> VcxResult<(String, String)> {
if settings::indy_mocks_enabled() { return Ok((CRED_DEF_ID.to_string(), CRED_DEF_JSON.to_string())); }
let cred_def_json = libindy_get_cred_def(cred_def_id)?;
Ok((cred_def_id.to_string(), cred_def_json))
}
pub fn generate_rev_reg(issuer_did: &str, cred_def_id: &str, tails_file: &str, max_creds: u32)
-> VcxResult<(String, String, String)> {
if settings::indy_mocks_enabled() { return Ok((REV_REG_ID.to_string(), rev_def_json(), "".to_string())); }
let (rev_reg_id, rev_reg_def_json, rev_reg_entry_json) =
libindy_create_and_store_revoc_reg(issuer_did,
cred_def_id,
tails_file,
max_creds)?;
Ok((rev_reg_id, rev_reg_def_json, rev_reg_entry_json))
}
pub fn build_rev_reg_request(issuer_did: &str, rev_reg_def_json: &str) -> VcxResult<String> {
if settings::indy_mocks_enabled() { return Ok("".to_string()); }
let rev_reg_def_req = libindy_build_revoc_reg_def_request(issuer_did, &rev_reg_def_json)?;
let rev_reg_def_req = append_txn_author_agreement_to_request(&rev_reg_def_req)?;
Ok(rev_reg_def_req)
}
pub fn publish_rev_reg_def(issuer_did: &str, rev_reg_def_json: &str) -> VcxResult<Option<PaymentTxn>> {
if settings::indy_mocks_enabled() { return Ok(None); }
let rev_reg_def_req = build_rev_reg_request(issuer_did, &rev_reg_def_json)?;
let (payment, _) = pay_for_txn(&rev_reg_def_req, CREATE_REV_REG_DEF_ACTION)?;
Ok(payment)
}
pub fn get_rev_reg_def_json(rev_reg_id: &str) -> VcxResult<(String, String)> {
if settings::indy_mocks_enabled() { return Ok((REV_REG_ID.to_string(), rev_def_json())); }
let submitter_did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID)?;
libindy_build_get_revoc_reg_def_request(&submitter_did, rev_reg_id)
.and_then(|req| libindy_submit_request(&req))
.and_then(|response| libindy_parse_get_revoc_reg_def_response(&response))
}
pub fn build_rev_reg_delta_request(issuer_did: &str, rev_reg_id: &str, rev_reg_entry_json: &str)
-> VcxResult<String> {
let request = libindy_build_revoc_reg_entry_request(issuer_did, rev_reg_id, REVOC_REG_TYPE, rev_reg_entry_json)?;
let request = append_txn_author_agreement_to_request(&request)?;
Ok(request)
}
pub fn publish_rev_reg_delta(issuer_did: &str, rev_reg_id: &str, rev_reg_entry_json: &str)
-> VcxResult<(Option<PaymentTxn>, String)> {
let request = build_rev_reg_delta_request(issuer_did, rev_reg_id, rev_reg_entry_json)?;
pay_for_txn(&request, CREATE_REV_REG_DELTA_ACTION)
}
pub fn get_rev_reg_delta_json(rev_reg_id: &str, from: Option<u64>, to: Option<u64>)
-> VcxResult<(String, String, u64)> {
if settings::indy_mocks_enabled() { return Ok((REV_REG_ID.to_string(), REV_REG_DELTA_JSON.to_string(), 1)); }
let submitter_did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID)?;
let from: i64 = if let Some(_from) = from { _from as i64 } else { -1 };
let to = if let Some(_to) = to { _to as i64 } else { time::get_time().sec };
libindy_build_get_revoc_reg_delta_request(&submitter_did, rev_reg_id, from, to)
.and_then(|req| libindy_submit_request(&req))
.and_then(|response| libindy_parse_get_revoc_reg_delta_response(&response))
}
pub fn get_rev_reg(rev_reg_id: &str, timestamp: u64) -> VcxResult<(String, String, u64)> {
if settings::indy_mocks_enabled() { return Ok((REV_REG_ID.to_string(), REV_REG_JSON.to_string(), 1)); }
let submitter_did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID)?;
libindy_build_get_revoc_reg_request(&submitter_did, rev_reg_id, timestamp)
.and_then(|req| libindy_submit_request(&req))
.and_then(|response| libindy_parse_get_revoc_reg_response(&response))
}
pub fn revoke_credential(tails_file: &str, rev_reg_id: &str, cred_rev_id: &str) -> VcxResult<(Option<PaymentTxn>, String)> {
if settings::indy_mocks_enabled() {
let inputs = vec!["pay:null:9UFgyjuJxi1i1HD".to_string()];
let outputs = serde_json::from_str::<Vec<::utils::libindy::payments::Output>>(r#"[{"amount":4,"extra":null,"recipient":"pay:null:xkIsxem0YNtHrRO"}]"#).unwrap();
return Ok((Some(PaymentTxn::from_parts(inputs, outputs, 1, false)), REV_REG_DELTA_JSON.to_string()));
}
let submitter_did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID)?;
let delta = libindy_issuer_revoke_credential(tails_file, rev_reg_id, cred_rev_id)?;
let (payment, _) = publish_rev_reg_delta(&submitter_did, rev_reg_id, &delta)?;
Ok((payment, delta))
}
pub fn libindy_to_unqualified(entity: &str) -> VcxResult<String> {
anoncreds::to_unqualified(entity)
.wait()
.map_err(VcxError::from)
}
fn _check_schema_response(response: &str) -> VcxResult<()> {
// TODO: saved backwardcampatibilyty but actually we can better handle response
match parse_response(response)? {
Response::Reply(_) => Ok(()),
Response::Reject(reject) => Err(VcxError::from_msg(VcxErrorKind::DuplicationSchema, format!("{:?}", reject))),
Response::ReqNACK(reqnack) => Err(VcxError::from_msg(VcxErrorKind::UnknownSchemaRejection, format!("{:?}", reqnack)))
}
}
pub fn generate_nonce() -> VcxResult<String> {
anoncreds::generate_nonce()
.wait()
.map_err(VcxError::from)
}
#[cfg(test)]
pub mod tests {
use super::*;
use utils::get_temp_dir_path;
extern crate serde_json;
extern crate rand;
use rand::Rng;
use settings;
use utils::constants::*;
use std::thread;
use std::time::Duration;
#[cfg(feature = "pool_tests")]
use utils::constants::TEST_TAILS_FILE;
use utils::devsetup::*;
pub fn create_schema(attr_list: &str) -> (String, String) {
let data = attr_list.to_string();
let schema_name: String = rand::thread_rng().gen_ascii_chars().take(25).collect::<String>();
let schema_version: String = format!("{}.{}", rand::thread_rng().gen::<u32>().to_string(),
rand::thread_rng().gen::<u32>().to_string());
let institution_did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID).unwrap();
libindy_issuer_create_schema(&institution_did, &schema_name, &schema_version, &data).unwrap()
}
pub fn create_schema_req(schema_json: &str) -> String {
let institution_did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID).unwrap();
let request = ::utils::libindy::ledger::libindy_build_schema_request(&institution_did, schema_json).unwrap();
append_txn_author_agreement_to_request(&request).unwrap()
}
pub fn create_and_write_test_schema(attr_list: &str) -> (String, String) {
let (schema_id, schema_json) = create_schema(attr_list);
let req = create_schema_req(&schema_json);
::utils::libindy::payments::pay_for_txn(&req, CREATE_SCHEMA_ACTION).unwrap();
thread::sleep(Duration::from_millis(1000));
(schema_id, schema_json)
}
pub fn create_and_store_credential_def(attr_list: &str, support_rev: bool) -> (String, String, String, String, u32, Option<String>) {
/* create schema */
let (schema_id, schema_json) = create_and_write_test_schema(attr_list);
let name: String = rand::thread_rng().gen_ascii_chars().take(25).collect::<String>();
let institution_did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID).unwrap();
/* create cred-def */
let mut revocation_details = json!({"support_revocation":support_rev});
if support_rev {
revocation_details["tails_file"] = json!(get_temp_dir_path(TEST_TAILS_FILE).to_str().unwrap().to_string());
revocation_details["max_creds"] = json!(10);
}
let handle = ::credential_def::create_and_publish_credentialdef("1".to_string(),
name,
institution_did.clone(),
schema_id.clone(),
"tag1".to_string(),
revocation_details.to_string()).unwrap();
thread::sleep(Duration::from_millis(1000));
let cred_def_id = ::credential_def::get_cred_def_id(handle).unwrap();
thread::sleep(Duration::from_millis(1000));
let (_, cred_def_json) = get_cred_def_json(&cred_def_id).unwrap();
let rev_reg_id = ::credential_def::get_rev_reg_id(handle).unwrap();
(schema_id, schema_json, cred_def_id, cred_def_json, handle, rev_reg_id)
}
pub fn create_credential_offer(attr_list: &str, revocation: bool) -> (String, String, String, String, String, Option<String>) {
let (schema_id, schema_json, cred_def_id, cred_def_json, _, rev_reg_id) = create_and_store_credential_def(attr_list, revocation);
let offer = ::utils::libindy::anoncreds::libindy_issuer_create_credential_offer(&cred_def_id).unwrap();
(schema_id, schema_json, cred_def_id, cred_def_json, offer, rev_reg_id)
}
pub fn create_credential_req(attr_list: &str, revocation: bool) -> (String, String, String, String, String, String, String, Option<String>) {
let (schema_id, schema_json, cred_def_id, cred_def_json, offer, rev_reg_id) = create_credential_offer(attr_list, revocation);
let institution_did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID).unwrap();
let (req, req_meta) = ::utils::libindy::anoncreds::libindy_prover_create_credential_req(&institution_did, &offer, &cred_def_json).unwrap();
(schema_id, schema_json, cred_def_id, cred_def_json, offer, req, req_meta, rev_reg_id)
}
pub fn create_and_store_credential(attr_list: &str, revocation: bool) -> (String, String, String, String, String, String, String, String, Option<String>, Option<String>) {
let (schema_id, schema_json, cred_def_id, cred_def_json, offer, req, req_meta, rev_reg_id) = create_credential_req(attr_list, revocation);
/* create cred */
let credential_data = r#"{"address1": ["123 Main St"], "address2": ["Suite 3"], "city": ["Draper"], "state": ["UT"], "zip": ["84000"]}"#;
let encoded_attributes = ::issuer_credential::encode_attributes(&credential_data).unwrap();
let (rev_def_json, tails_file) = if revocation {
let (_id, json) = get_rev_reg_def_json(&rev_reg_id.clone().unwrap()).unwrap();
(Some(json), Some(get_temp_dir_path(TEST_TAILS_FILE).to_str().unwrap().to_string().to_string()))
} else { (None, None) };
let (cred, cred_rev_id, _) = ::utils::libindy::anoncreds::libindy_issuer_create_credential(&offer, &req, &encoded_attributes, rev_reg_id.clone(), tails_file).unwrap();
/* store cred */
let cred_id = ::utils::libindy::anoncreds::libindy_prover_store_credential(None, &req_meta, &cred, &cred_def_json, rev_def_json.as_ref().map(String::as_str)).unwrap();
(schema_id, schema_json, cred_def_id, cred_def_json, offer, req, req_meta, cred_id, rev_reg_id, cred_rev_id)
}
pub fn create_proof() -> (String, String, String, String) {
let did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID).unwrap();
let (schema_id, schema_json, cred_def_id, cred_def_json, _offer, _req, _req_meta, cred_id, _, _)
= create_and_store_credential(::utils::constants::DEFAULT_SCHEMA_ATTRS, false);
let proof_req = json!({
"nonce":"123432421212",
"name":"proof_req_1",
"version":"0.1",
"requested_attributes": json!({
"address1_1": json!({
"name":"address1",
"restrictions": [json!({ "issuer_did": did })]
}),
"zip_2": json!({
"name":"zip",
"restrictions": [json!({ "issuer_did": did })]
}),
"self_attest_3": json!({
"name":"self_attest",
}),
}),
"requested_predicates": json!({}),
}).to_string();
let requested_credentials_json = json!({
"self_attested_attributes":{
"self_attest_3": "my_self_attested_val"
},
"requested_attributes":{
"address1_1": {"cred_id": cred_id, "revealed": true},
"zip_2": {"cred_id": cred_id, "revealed": true}
},
"requested_predicates":{}
}).to_string();
let schema_json: serde_json::Value = serde_json::from_str(&schema_json).unwrap();
let schemas = json!({
schema_id: schema_json,
}).to_string();
let cred_def_json: serde_json::Value = serde_json::from_str(&cred_def_json).unwrap();
let cred_defs = json!({
cred_def_id: cred_def_json,
}).to_string();
libindy_prover_get_credentials_for_proof_req(&proof_req).unwrap();
let proof = libindy_prover_create_proof(
&proof_req,
&requested_credentials_json,
"main",
&schemas,
&cred_defs,
None).unwrap();
(schemas, cred_defs, proof_req, proof)
}
pub fn create_self_attested_proof() -> (String, String) {
let proof_req = json!({
"nonce":"123432421212",
"name":"proof_req_1",
"version":"0.1",
"requested_attributes": json!({
"address1_1": json!({
"name":"address1",
}),
"zip_2": json!({
"name":"zip",
}),
}),
"requested_predicates": json!({}),
}).to_string();
let requested_credentials_json = json!({
"self_attested_attributes":{
"address1_1": "my_self_attested_address",
"zip_2": "my_self_attested_zip"
},
"requested_attributes":{},
"requested_predicates":{}
}).to_string();
let schemas = json!({}).to_string();
let cred_defs = json!({}).to_string();
libindy_prover_get_credentials_for_proof_req(&proof_req).unwrap();
let proof = libindy_prover_create_proof(
&proof_req,
&requested_credentials_json,
"main",
&schemas,
&cred_defs,
None).unwrap();
(proof_req, proof)
}
pub fn create_proof_with_predicate(include_predicate_cred: bool) -> (String, String, String, String) {
let did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID).unwrap();
let (schema_id, schema_json, cred_def_id, cred_def_json, _offer, _req, _req_meta, cred_id, _, _)
= create_and_store_credential(::utils::constants::DEFAULT_SCHEMA_ATTRS, false);
let proof_req = json!({
"nonce":"123432421212",
"name":"proof_req_1",
"version":"0.1",
"requested_attributes": json!({
"address1_1": json!({
"name":"address1",
"restrictions": [json!({ "issuer_did": did })]
}),
"self_attest_3": json!({
"name":"self_attest",
}),
}),
"requested_predicates": json!({
"zip_3": {"name":"zip", "p_type":">=", "p_value":18}
}),
}).to_string();
let requested_credentials_json;
if include_predicate_cred {
requested_credentials_json = json!({
"self_attested_attributes":{
"self_attest_3": "my_self_attested_val"
},
"requested_attributes":{
"address1_1": {"cred_id": cred_id, "revealed": true}
},
"requested_predicates":{
"zip_3": {"cred_id": cred_id}
}
}).to_string();
} else {
requested_credentials_json = json!({
"self_attested_attributes":{
"self_attest_3": "my_self_attested_val"
},
"requested_attributes":{
"address1_1": {"cred_id": cred_id, "revealed": true}
},
"requested_predicates":{
}
}).to_string();
}
let schema_json: serde_json::Value = serde_json::from_str(&schema_json).unwrap();
let schemas = json!({
schema_id: schema_json,
}).to_string();
let cred_def_json: serde_json::Value = serde_json::from_str(&cred_def_json).unwrap();
let cred_defs = json!({
cred_def_id: cred_def_json,
}).to_string();
libindy_prover_get_credentials_for_proof_req(&proof_req).unwrap();
let proof = libindy_prover_create_proof(
&proof_req,
&requested_credentials_json,
"main",
&schemas,
&cred_defs,
None).unwrap();
(schemas, cred_defs, proof_req, proof)
}
#[cfg(feature = "pool_tests")]
#[test]
fn test_prover_verify_proof() {
let _setup = SetupLibraryWalletPool::init();
let (schemas, cred_defs, proof_req, proof) = create_proof();
let proof_validation = libindy_verifier_verify_proof(
&proof_req,
&proof,
&schemas,
&cred_defs,
"{}",
"{}",
).unwrap();
assert!(proof_validation, true);
}
#[cfg(feature = "pool_tests")]
#[test]
fn test_prover_verify_proof_with_predicate_success_case() {
let _setup = SetupLibraryWalletPool::init();
let (schemas, cred_defs, proof_req, proof) = create_proof_with_predicate(true);
let proof_validation = libindy_verifier_verify_proof(
&proof_req,
&proof,
&schemas,
&cred_defs,
"{}",
"{}",
).unwrap();
assert!(proof_validation, true);
}
#[cfg(feature = "pool_tests")]
#[test]
fn test_prover_verify_proof_with_predicate_fail_case() {
let _setup = SetupLibraryWalletPool::init();
let (schemas, cred_defs, proof_req, proof) = create_proof_with_predicate(false);
libindy_verifier_verify_proof(
&proof_req,
&proof,
&schemas,
&cred_defs,
"{}",
"{}",
).unwrap_err();
}
#[cfg(feature = "pool_tests")]
#[test]
fn tests_libindy_prover_get_credentials() {
let _setup = SetupLibraryWallet::init();
let proof_req = "{";
let result = libindy_prover_get_credentials_for_proof_req(&proof_req);
assert_eq!(result.unwrap_err().kind(), VcxErrorKind::InvalidProofRequest);
let proof_req = json!({
"nonce":"123432421212",
"name":"proof_req_1",
"version":"0.1",
"requested_attributes": json!({
"address1_1": json!({
"name":"address1",
}),
"zip_2": json!({
"name":"zip",
}),
}),
"requested_predicates": json!({}),
}).to_string();
let _result = libindy_prover_get_credentials_for_proof_req(&proof_req).unwrap();
let result_malformed_json = libindy_prover_get_credentials_for_proof_req("{}").unwrap_err();
assert_eq!(result_malformed_json.kind(), VcxErrorKind::InvalidAttributesStructure);
}
#[cfg(feature = "pool_tests")]
#[test]
fn test_issuer_revoke_credential() {
let _setup = SetupLibraryWalletPool::init();
let rc = libindy_issuer_revoke_credential(get_temp_dir_path(TEST_TAILS_FILE).to_str().unwrap(), "", "");
assert!(rc.is_err());
let (_, _, _, _, _, _, _, _, rev_reg_id, cred_rev_id)
= create_and_store_credential(::utils::constants::DEFAULT_SCHEMA_ATTRS, true);
let rc = ::utils::libindy::anoncreds::libindy_issuer_revoke_credential(get_temp_dir_path(TEST_TAILS_FILE).to_str().unwrap(), &rev_reg_id.unwrap(), &cred_rev_id.unwrap());
assert!(rc.is_ok());
}
#[test]
fn test_create_cred_def() {
let _setup = SetupMocks::init();
let (id, _) = generate_cred_def("did", SCHEMAS_JSON, "tag_1", None, Some(false)).unwrap();
assert_eq!(id, CRED_DEF_ID);
}
#[cfg(feature = "pool_tests")]
#[test]
fn test_create_cred_def_real() {
let _setup = SetupLibraryWalletPool::init();
let (schema_id, _) = ::utils::libindy::anoncreds::tests::create_and_write_test_schema(::utils::constants::DEFAULT_SCHEMA_ATTRS);
let (_, schema_json) = get_schema_json(&schema_id).unwrap();
let did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID).unwrap();
let (_, cred_def_json) = generate_cred_def(&did, &schema_json, "tag_1", None, Some(true)).unwrap();
publish_cred_def(&did, &cred_def_json).unwrap();
}
#[cfg(feature = "pool_tests")]
#[test]
fn test_rev_reg_def_fails_for_cred_def_created_without_revocation() {
let _setup = SetupLibraryWalletPool::init();
// Cred def is created with support_revocation=false,
// revoc_reg_def will fail in libindy because cred_Def doesn't have revocation keys
let (_, _, cred_def_id, _, _, _) = ::utils::libindy::anoncreds::tests::create_and_store_credential_def(::utils::constants::DEFAULT_SCHEMA_ATTRS, false);
let did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID).unwrap();
let rc = generate_rev_reg(&did, &cred_def_id, get_temp_dir_path("path.txt").to_str().unwrap(), 2);
assert_eq!(rc.unwrap_err().kind(), VcxErrorKind::LibindyInvalidStructure);
}
#[cfg(feature = "pool_tests")]
#[test]
fn test_create_rev_reg_def() {
let _setup = SetupLibraryWalletPool::init();
let (schema_id, _) = ::utils::libindy::anoncreds::tests::create_and_write_test_schema(::utils::constants::DEFAULT_SCHEMA_ATTRS);
let (_, schema_json) = get_schema_json(&schema_id).unwrap();
let did = settings::get_config_value(settings::CONFIG_INSTITUTION_DID).unwrap();
let (cred_def_id, cred_def_json) = generate_cred_def(&did, &schema_json, "tag_1", None, Some(true)).unwrap();
publish_cred_def(&did, &cred_def_json).unwrap();
let (rev_reg_def_id, rev_reg_def_json, rev_reg_entry_json) = generate_rev_reg(&did, &cred_def_id, "tails.txt", 2).unwrap();
publish_rev_reg_def(&did, &rev_reg_def_json).unwrap();
publish_rev_reg_delta(&did, &rev_reg_def_id, &rev_reg_entry_json).unwrap();
}
#[cfg(feature = "pool_tests")]
#[test]
fn test_get_rev_reg_def_json() {
let _setup = SetupLibraryWalletPool::init();
let attrs = r#"["address1","address2","city","state","zip"]"#;
let (_, _, _, _, _, rev_reg_id) =
::utils::libindy::anoncreds::tests::create_and_store_credential_def(attrs, true);
let rev_reg_id = rev_reg_id.unwrap();
let (id, _json) = get_rev_reg_def_json(&rev_reg_id).unwrap();
assert_eq!(id, rev_reg_id);
}
#[cfg(feature = "pool_tests")]
#[test]
fn test_get_rev_reg_delta_json() {
let _setup = SetupLibraryWalletPool::init();
let attrs = r#"["address1","address2","city","state","zip"]"#;
let (_, _, _, _, _, rev_reg_id) =
::utils::libindy::anoncreds::tests::create_and_store_credential_def(attrs, true);
let rev_reg_id = rev_reg_id.unwrap();
let (id, _delta, _timestamp) = get_rev_reg_delta_json(&rev_reg_id, None, None).unwrap();
assert_eq!(id, rev_reg_id);
}
#[cfg(feature = "pool_tests")]
#[test]
fn test_get_rev_reg() {
let _setup = SetupLibraryWalletPool::init();
let attrs = r#"["address1","address2","city","state","zip"]"#;
let (_, _, _, _, _, rev_reg_id) =
::utils::libindy::anoncreds::tests::create_and_store_credential_def(attrs, true);
let rev_reg_id = rev_reg_id.unwrap();
let (id, _rev_reg, _timestamp) = get_rev_reg(&rev_reg_id, time::get_time().sec as u64).unwrap();
assert_eq!(id, rev_reg_id);
}
#[cfg(feature = "pool_tests")]
#[test]
fn from_pool_ledger_with_id() {
let _setup = SetupLibraryWalletPool::init();
let (schema_id, _schema_json) = ::utils::libindy::anoncreds::tests::create_and_write_test_schema(::utils::constants::DEFAULT_SCHEMA_ATTRS);
let rc = get_schema_json(&schema_id);
let (_id, retrieved_schema) = rc.unwrap();
assert!(retrieved_schema.contains(&schema_id));
}
#[test]
fn from_ledger_schema_id() {
let _setup = SetupMocks::init();
let (id, retrieved_schema) = get_schema_json(SCHEMA_ID).unwrap();
assert_eq!(&retrieved_schema, SCHEMA_JSON);
assert_eq!(&id, SCHEMA_ID);
}
#[cfg(feature = "pool_tests")]
#[test]
fn test_revoke_credential() {
let _setup = SetupLibraryWalletPool::init();
let (_, _, _, _, _, _, _, _, rev_reg_id, cred_rev_id)
= ::utils::libindy::anoncreds::tests::create_and_store_credential(::utils::constants::DEFAULT_SCHEMA_ATTRS, true);
let rev_reg_id = rev_reg_id.unwrap();
let (_, first_rev_reg_delta, first_timestamp) = get_rev_reg_delta_json(&rev_reg_id, None, None).unwrap();
let (_, test_same_delta, test_same_timestamp) = get_rev_reg_delta_json(&rev_reg_id, None, None).unwrap();
assert_eq!(first_rev_reg_delta, test_same_delta);
assert_eq!(first_timestamp, test_same_timestamp);
let (payment, _revoked_rev_reg_delta) = revoke_credential(get_temp_dir_path(TEST_TAILS_FILE).to_str().unwrap(), &rev_reg_id, cred_rev_id.unwrap().as_str()).unwrap();
// Delta should change after revocation
let (_, second_rev_reg_delta, _) = get_rev_reg_delta_json(&rev_reg_id, Some(first_timestamp + 1), None).unwrap();
assert!(payment.is_some());
assert_ne!(first_rev_reg_delta, second_rev_reg_delta);
}
}<|fim▁end|> |
pub fn build_cred_def_request(issuer_did: &str, cred_def_json: &str) -> VcxResult<String> {
if settings::indy_mocks_enabled() { |
<|file_name|>borrowck-uniq-via-lend.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn borrow(_v: &int) {}
fn local() {<|fim▁hole|> borrow(v);
}
fn local_rec() {
struct F { f: Box<int> }
let mut v = F {f: box 3};
borrow(v.f);
}
fn local_recs() {
struct F { f: G }
struct G { g: H }
struct H { h: Box<int> }
let mut v = F {f: G {g: H {h: box 3}}};
borrow(v.f.g.h);
}
fn aliased_imm() {
let mut v = box 3i;
let _w = &v;
borrow(v);
}
fn aliased_mut() {
let mut v = box 3i;
let _w = &mut v;
borrow(v); //~ ERROR cannot borrow `*v`
}
fn aliased_other() {
let mut v = box 3i;
let mut w = box 4i;
let _x = &mut w;
borrow(v);
}
fn aliased_other_reassign() {
let mut v = box 3i;
let mut w = box 4i;
let mut _x = &mut w;
_x = &mut v;
borrow(v); //~ ERROR cannot borrow `*v`
}
fn main() {
}<|fim▁end|> | let mut v = box 3i; |
<|file_name|>thread_pool.cpp<|end_file_name|><|fim▁begin|>#include <ncore/sys/wait.h>
#include "thread_pool.h"
namespace ncore
{
ThreadPool::ThreadPool()
: work_threads_(), running_(0)
{
exec_proc_.Register(this, &ThreadPool::DoJobs);
}
ThreadPool::~ThreadPool()
{
fini();
}
bool ThreadPool::init(size_t thread_number)
{
if(!thread_number)
return false;
if(!job_queue_semaphore_.init(0, 512))
return false;
work_threads_.resize(thread_number);
for(size_t i = 0; i < thread_number; ++i)
{
ThreadPtr thread(new Thread());
if(thread == 0)
return false;
if(thread->init(exec_proc_) == false)
return false;
work_threads_[i] = std::move(thread);
}
return true;
}
void ThreadPool::fini()
{
for(auto thread = work_threads_.begin();
thread != work_threads_.end();
++thread)
{
(*thread)->Abort();
}
work_threads_.clear();
while(!job_queue_.empty())
{
JobPtr job = job_queue_.front();
job->Cancel();
job->completed_.Set();
job_queue_.pop();
}
}
bool ThreadPool::Start()
{
for(auto thread = work_threads_.begin();
thread != work_threads_.end();
++thread)
{
if(!(*thread)->Start())
return false;
}
return true;
}
void ThreadPool::Abort()
{
for(auto thread = work_threads_.begin();
thread != work_threads_.end();
++thread)
{
(*thread)->Abort();
}
}
bool ThreadPool::Join()
{
for(auto thread = work_threads_.begin();
thread != work_threads_.end();
++thread)
{
if(!(*thread)->Join())
return false;
}
return true;
}
void ThreadPool::QueueJob(JobPtr & ptr)
{
<|fim▁hole|> if(ptr == nullptr)
return;
if(!ptr->ready_)
return;
if(!ptr->completed_.Wait(0))
return;
if(!ptr->completed_.Reset())
return;
job_queue_lock_.Acquire();
job_queue_.push(ptr);
job_queue_lock_.Release();
job_queue_semaphore_.Increase(1);
}
void ThreadPool::DoJobs()
{
try
{
while(true)
{
JobPtr job(nullptr);
job_queue_semaphore_.WaitEx(Wait::kInfinity, true);
job_queue_lock_.Acquire();
job = job_queue_.front();
job_queue_.pop();
job_queue_lock_.Release();
if(job == nullptr)
continue;
++running_;
if(!job->Rest(0))
job->Do();
job->completed_.Set();
--running_;
}
}
catch (ThreadExceptionAbort e)
{
return;
}
}
}<|fim▁end|> | |
<|file_name|>TemplateTest.py<|end_file_name|><|fim▁begin|>"""Fonctionnal testing for "Python In HTML" (PIH) and "HTML In Python" (HIP).
"""
__author__ = "Didier Wenzek ([email protected])"
# The code under test is in the .. directory.
import sys
sys.path.append('..')
# We use the Python unit testing framework
import unittest
import thread, time
from util import *
class TemplateTest(unittest.TestCase):
"""Testing the Karrigell Scripting languages.
A test is prepared with the addScript() and removeScript() methods.
which are used to set up the root directory with scripts picked from
the data directory.
The Karrigell response is returned by the getPage() method
as a Page object with a status and a content attribute.
The expected response can be retrieved from the data repository with the
getGoldenFile() method.
The Karrigell response can then be tested against the expected response,
using any of the unittest methods like assertEqual().
"""
def setUp(self):
launchKarrigell()
def test_PIH_InsertPython(self):
"""<% and %> tags are used to insert python code in HTML."""
import time
addScript("time.pih")
page = getPage("/time.pih")
today = time.strftime("%d:%m:%y",time.localtime(time.time()))
expected = getGoldenFile("time.out", date=today)
self.assertEqual(page.status, 200)
self.assertEqual(page.content, expected)<|fim▁hole|> addScript("value.pih")
page = getPage("/value.pih")
expected = getGoldenFile("value.out")
self.assertEqual(page.status, 200)
self.assertEqual(page.content + '\n', expected)
def test_PIH_Indentation(self):
"""Python indentation is managed using the <% end %> tag."""
addScript("indent.pih")
page = getPage("/indent.pih", 'POST', params={'hour':'22'})
expected = getGoldenFile("indent.out")
self.assertEqual(page.status, 200)
self.assertEqual(page.content, expected)
def test_PIH_IndentTag(self):
"""Within <indent> tag HTML must follow Python indentation."""
addScript("indent_tag.pih")
page = getPage("/indent_tag.pih", 'POST', params={'hour':'22'})
expected = getGoldenFile("indent_tag.out")
self.assertEqual(page.status, 200)
self.assertEqual(page.content, expected)
def test_PIH_EmbeddedBlocks(self):
"""Python blocks may be embedded."""
addScript("embedded.pih")
page = getPage("/embedded.pih")
expected = getGoldenFile("embedded.out")
self.assertEqual(page.status, 200)
self.assertEqual(page.content, expected)
def test_HIP_Principes(self):
"""Literal text in python are printed to the response stream"""
addScript("the_smiths.hip")
page = getPage("/the_smiths.hip")
expected = getGoldenFile("the_smiths.out")
self.assertEqual(page.status, 200)
self.assertEqual(page.content, expected)
if __name__ == "__main__":
unittest.main()<|fim▁end|> |
def test_PIH_PrintValue(self):
"""<%= an %> tags are used to print python value.""" |
<|file_name|>check-rpc-mappings.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2009-2019 The Bitcoin Core developers
# Copyright (c) 2014-2019 The DigiByte Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Check RPC argument consistency."""
from collections import defaultdict
import os
import re
import sys
# Source files (relative to root) to scan for dispatch tables
SOURCES = [
"src/rpc/server.cpp",
"src/rpc/blockchain.cpp",
"src/rpc/mining.cpp",
"src/rpc/misc.cpp",
"src/rpc/net.cpp",
"src/rpc/rawtransaction.cpp",
"src/wallet/rpcwallet.cpp",
]<|fim▁hole|>SOURCE_CLIENT = 'src/rpc/client.cpp'
# Argument names that should be ignored in consistency checks
IGNORE_DUMMY_ARGS = {'dummy', 'arg0', 'arg1', 'arg2', 'arg3', 'arg4', 'arg5', 'arg6', 'arg7', 'arg8', 'arg9'}
class RPCCommand:
def __init__(self, name, args):
self.name = name
self.args = args
class RPCArgument:
def __init__(self, names, idx):
self.names = names
self.idx = idx
self.convert = False
def parse_string(s):
assert s[0] == '"'
assert s[-1] == '"'
return s[1:-1]
def process_commands(fname):
"""Find and parse dispatch table in implementation file `fname`."""
cmds = []
in_rpcs = False
with open(fname, "r", encoding="utf8") as f:
for line in f:
line = line.rstrip()
if not in_rpcs:
if re.match("static const CRPCCommand .*\[\] =", line):
in_rpcs = True
else:
if line.startswith('};'):
in_rpcs = False
elif '{' in line and '"' in line:
m = re.search('{ *("[^"]*"), *("[^"]*"), *&([^,]*), *{([^}]*)} *},', line)
assert m, 'No match to table expression: %s' % line
name = parse_string(m.group(2))
args_str = m.group(4).strip()
if args_str:
args = [RPCArgument(parse_string(x.strip()).split('|'), idx) for idx, x in enumerate(args_str.split(','))]
else:
args = []
cmds.append(RPCCommand(name, args))
assert not in_rpcs and cmds, "Something went wrong with parsing the C++ file: update the regexps"
return cmds
def process_mapping(fname):
"""Find and parse conversion table in implementation file `fname`."""
cmds = []
in_rpcs = False
with open(fname, "r", encoding="utf8") as f:
for line in f:
line = line.rstrip()
if not in_rpcs:
if line == 'static const CRPCConvertParam vRPCConvertParams[] =':
in_rpcs = True
else:
if line.startswith('};'):
in_rpcs = False
elif '{' in line and '"' in line:
m = re.search('{ *("[^"]*"), *([0-9]+) *, *("[^"]*") *},', line)
assert m, 'No match to table expression: %s' % line
name = parse_string(m.group(1))
idx = int(m.group(2))
argname = parse_string(m.group(3))
cmds.append((name, idx, argname))
assert not in_rpcs and cmds
return cmds
def main():
root = sys.argv[1]
# Get all commands from dispatch tables
cmds = []
for fname in SOURCES:
cmds += process_commands(os.path.join(root, fname))
cmds_by_name = {}
for cmd in cmds:
cmds_by_name[cmd.name] = cmd
# Get current convert mapping for client
client = SOURCE_CLIENT
mapping = set(process_mapping(os.path.join(root, client)))
print('* Checking consistency between dispatch tables and vRPCConvertParams')
# Check mapping consistency
errors = 0
for (cmdname, argidx, argname) in mapping:
try:
rargnames = cmds_by_name[cmdname].args[argidx].names
except IndexError:
print('ERROR: %s argument %i (named %s in vRPCConvertParams) is not defined in dispatch table' % (cmdname, argidx, argname))
errors += 1
continue
if argname not in rargnames:
print('ERROR: %s argument %i is named %s in vRPCConvertParams but %s in dispatch table' % (cmdname, argidx, argname, rargnames), file=sys.stderr)
errors += 1
# Check for conflicts in vRPCConvertParams conversion
# All aliases for an argument must either be present in the
# conversion table, or not. Anything in between means an oversight
# and some aliases won't work.
for cmd in cmds:
for arg in cmd.args:
convert = [((cmd.name, arg.idx, argname) in mapping) for argname in arg.names]
if any(convert) != all(convert):
print('ERROR: %s argument %s has conflicts in vRPCConvertParams conversion specifier %s' % (cmd.name, arg.names, convert))
errors += 1
arg.convert = all(convert)
# Check for conversion difference by argument name.
# It is preferable for API consistency that arguments with the same name
# have the same conversion, so bin by argument name.
all_methods_by_argname = defaultdict(list)
converts_by_argname = defaultdict(list)
for cmd in cmds:
for arg in cmd.args:
for argname in arg.names:
all_methods_by_argname[argname].append(cmd.name)
converts_by_argname[argname].append(arg.convert)
for argname, convert in converts_by_argname.items():
if all(convert) != any(convert):
if argname in IGNORE_DUMMY_ARGS:
# these are testing or dummy, don't warn for them
continue
print('WARNING: conversion mismatch for argument named %s (%s)' %
(argname, list(zip(all_methods_by_argname[argname], converts_by_argname[argname]))))
sys.exit(errors > 0)
if __name__ == '__main__':
main()<|fim▁end|> | # Source file (relative to root) containing conversion mapping |
<|file_name|>recordio.hpp<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef __COMMON_RECORDIO_HPP__
#define __COMMON_RECORDIO_HPP__
#include <queue>
#include <string>
#include <utility>
#include <mesos/mesos.hpp>
#include <process/defer.hpp>
#include <process/dispatch.hpp>
#include <process/http.hpp>
#include <process/loop.hpp>
#include <process/owned.hpp>
#include <process/pid.hpp>
#include <process/process.hpp>
#include <stout/lambda.hpp>
#include <stout/nothing.hpp>
#include <stout/recordio.hpp>
#include <stout/result.hpp>
namespace mesos {
namespace internal {
namespace recordio {
namespace internal {
template <typename T>
class ReaderProcess;
} // namespace internal {
/**
* Provides RecordIO decoding on top of an http::Pipe::Reader.
* The caller is responsible for closing the http::Pipe::Reader
* when a failure is encountered or end-of-file is reached.
*
* TODO(bmahler): Since we currently do not have a generalized
* abstraction in libprocess for "streams" of asynchronous data
* (e.g. process::Stream<T>), we have to create a one-off wrapper
* here. In the future, this would be better expressed as "piping"
* data from a stream of raw bytes into a decoder, which yields a
* stream of typed data.
*/
template <typename T>
class Reader
{
public:
// We spawn `ReaderProcess` as a managed process to guarantee
// that it does not wait on itself (this would cause a deadlock!).
// See comments in `Connection::Data` for further details.
Reader(::recordio::Decoder<T>&& decoder,
process::http::Pipe::Reader reader)
: process(process::spawn(
new internal::ReaderProcess<T>(std::move(decoder), reader),<|fim▁hole|> virtual ~Reader()
{
// Note that we pass 'false' here to avoid injecting the
// termination event at the front of the queue. This is
// to ensure we don't drop any queued request dispatches
// which would leave the caller with a future stuck in
// a pending state.
process::terminate(process, false);
}
/**
* Returns the next piece of decoded data from the pipe.
* Returns error if an individual record could not be decoded.
* Returns none when end-of-file is reached.
* Returns failure when the pipe or decoder has failed.
*/
process::Future<Result<T>> read()
{
return process::dispatch(process, &internal::ReaderProcess<T>::read);
}
private:
process::PID<internal::ReaderProcess<T>> process;
};
/**
* This is a helper function that reads records from a `Reader`, applies
* a transformation to the records and writes to the pipe.
*
* Returns a failed future if there are any errors reading or writing.
* The future is satisfied when we get a EOF.
*
* TODO(vinod): Split this method into primitives that can transform a
* stream of bytes to a stream of typed records that can be further transformed.
* See the TODO above in `Reader` for further details.
*/
template <typename T>
process::Future<Nothing> transform(
process::Owned<Reader<T>>&& reader,
const std::function<std::string(const T&)>& func,
process::http::Pipe::Writer writer)
{
return process::loop(
None(),
[=]() {
return reader->read();
},
[=](const Result<T>& record) mutable
-> process::Future<process::ControlFlow<Nothing>> {
// This could happen if EOF is sent by the writer.
if (record.isNone()) {
return process::Break();
}
// This could happen if there is a de-serialization error.
if (record.isError()) {
return process::Failure(record.error());
}
// TODO(vinod): Instead of detecting that the reader went away only
// after attempting a write, leverage `writer.readerClosed` future.
if (!writer.write(func(record.get()))) {
return process::Failure("Write failed to the pipe");
}
return process::Continue();
});
}
namespace internal {
template <typename T>
class ReaderProcess : public process::Process<ReaderProcess<T>>
{
public:
ReaderProcess(
::recordio::Decoder<T>&& _decoder,
process::http::Pipe::Reader _reader)
: process::ProcessBase(process::ID::generate("__reader__")),
decoder(_decoder),
reader(_reader),
done(false) {}
virtual ~ReaderProcess() {}
process::Future<Result<T>> read()
{
if (!records.empty()) {
Result<T> record = std::move(records.front());
records.pop();
return record;
}
if (error.isSome()) {
return process::Failure(error->message);
}
if (done) {
return None();
}
auto waiter = process::Owned<process::Promise<Result<T>>>(
new process::Promise<Result<T>>());
waiters.push(std::move(waiter));
return waiters.back()->future();
}
protected:
virtual void initialize() override
{
consume();
}
virtual void finalize() override
{
// Fail any remaining waiters.
fail("Reader is terminating");
}
private:
void fail(const std::string& message)
{
error = Error(message);
while (!waiters.empty()) {
waiters.front()->fail(message);
waiters.pop();
}
}
void complete()
{
done = true;
while (!waiters.empty()) {
waiters.front()->set(Result<T>::none());
waiters.pop();
}
}
using process::Process<ReaderProcess<T>>::consume;
void consume()
{
reader.read()
.onAny(process::defer(this, &ReaderProcess::_consume, lambda::_1));
}
void _consume(const process::Future<std::string>& read)
{
if (!read.isReady()) {
fail("Pipe::Reader failure: " +
(read.isFailed() ? read.failure() : "discarded"));
return;
}
// Have we reached EOF?
if (read->empty()) {
complete();
return;
}
Try<std::deque<Try<T>>> decode = decoder.decode(read.get());
if (decode.isError()) {
fail("Decoder failure: " + decode.error());
return;
}
foreach (const Try<T>& record, decode.get()) {
if (!waiters.empty()) {
waiters.front()->set(Result<T>(std::move(record)));
waiters.pop();
} else {
records.push(std::move(record));
}
}
consume();
}
::recordio::Decoder<T> decoder;
process::http::Pipe::Reader reader;
std::queue<process::Owned<process::Promise<Result<T>>>> waiters;
std::queue<Result<T>> records;
bool done;
Option<Error> error;
};
} // namespace internal {
} // namespace recordio {
} // namespace internal {
} // namespace mesos {
#endif // __COMMON_RECORDIO_HPP__<|fim▁end|> | true)) {}
|
<|file_name|>multi_settings_source_test.go<|end_file_name|><|fim▁begin|>package infrastructure_test
import (
"errors"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
. "github.com/cloudfoundry/bosh-agent/infrastructure"
fakeinf "github.com/cloudfoundry/bosh-agent/infrastructure/fakes"
boshsettings "github.com/cloudfoundry/bosh-agent/settings"
)
var _ = Describe("MultiSettingsSource", func() {
var (
source SettingsSource
)
Context("when there are no sources", func() {
It("returns an error when there are no sources", func() {
_, err := NewMultiSettingsSource([]SettingsSource{}...)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("MultiSettingsSource requires to have at least one source"))
})
})
Context("when there is at least one source", func() {
var (
source1 fakeinf.FakeSettingsSource
source2 fakeinf.FakeSettingsSource
)
BeforeEach(func() {
source1 = fakeinf.FakeSettingsSource{
PublicKey: "fake-public-key-1",
PublicKeyErr: errors.New("fake-public-key-err-1"),
SettingsValue: boshsettings.Settings{AgentID: "fake-settings-1"},
SettingsErr: errors.New("fake-settings-err-1"),
}
source2 = fakeinf.FakeSettingsSource{
PublicKey: "fake-public-key-2",
PublicKeyErr: errors.New("fake-public-key-err-2"),
SettingsValue: boshsettings.Settings{AgentID: "fake-settings-2"},
SettingsErr: errors.New("fake-settings-err-2"),
}
})
JustBeforeEach(func() {
var err error
source, err = NewMultiSettingsSource(source1, source2)
Expect(err).ToNot(HaveOccurred())
})
Describe("PublicSSHKeyForUsername", func() {
Context("when the first source returns public key", func() {<|fim▁hole|> It("returns public key and public key error from the first source", func() {
publicKey, err := source.PublicSSHKeyForUsername("fake-username")
Expect(err).ToNot(HaveOccurred())
Expect(publicKey).To(Equal("fake-public-key-1"))
})
})
Context("when the second source returns public key", func() {
BeforeEach(func() {
source2.PublicKeyErr = nil
})
It("returns public key from the second source", func() {
publicKey, err := source.PublicSSHKeyForUsername("fake-username")
Expect(err).ToNot(HaveOccurred())
Expect(publicKey).To(Equal("fake-public-key-2"))
})
})
Context("when both sources fail to get ssh key", func() {
It("returns error from the second source", func() {
_, err := source.PublicSSHKeyForUsername("fake-username")
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("fake-public-key-err-2"))
})
})
})
Describe("Settings", func() {
Context("when the first source returns settings", func() {
BeforeEach(func() {
source1.SettingsErr = nil
})
It("returns settings from the first source", func() {
settings, err := source.Settings()
Expect(err).ToNot(HaveOccurred())
Expect(settings).To(Equal(boshsettings.Settings{AgentID: "fake-settings-1"}))
})
})
Context("when both sources do not have settings", func() {
It("returns error from the second source", func() {
_, err := source.Settings()
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("fake-settings-err-2"))
})
})
Context("when the second source returns settings", func() {
BeforeEach(func() {
source2.SettingsErr = nil
})
It("returns settings from the second source", func() {
settings, err := source.Settings()
Expect(err).ToNot(HaveOccurred())
Expect(settings).To(Equal(boshsettings.Settings{AgentID: "fake-settings-2"}))
})
})
})
})
})<|fim▁end|> | BeforeEach(func() {
source1.PublicKeyErr = nil
})
|
<|file_name|>hash.py<|end_file_name|><|fim▁begin|>'''
Copyright (C) 2016-2019 Vanessa Sochat.
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
from spython.main import Client
from singularity.logger import bot
from singularity.analysis.reproduce.criteria import *
from singularity.analysis.reproduce.levels import *
from singularity.analysis.reproduce.utils import (
get_image_tar,
extract_content,
delete_image_tar,
extract_guts
)
import datetime
import hashlib
import sys
import os
import io
import re
Client.quiet = True
def get_image_hashes(image_path, version=None, levels=None):
'''get_image_hashes returns the hash for an image across all levels. This is the quickest,
easiest way to define a container's reproducibility on each level.
'''
if levels is None:
levels = get_levels(version=version)
hashes = dict()
# use a cached object for all
file_obj, tar = get_image_tar(image_path)
for level_name, level_filter in levels.items():
hashes[level_name] = get_image_hash(image_path,
level_filter=level_filter,
file_obj=file_obj,
tar=tar)
try:
file_obj.close()
except:
tar.close()
if os.path.exists(file_obj):
os.remove(file_obj)
return hashes
def get_image_hash(image_path,
level=None,
level_filter=None,
include_files=None,
skip_files=None,
version=None,
file_obj=None,
tar=None):
'''get_image_hash will generate a sha1 hash of an image, depending on a level
of reproducibility specified by the user. (see function get_levels for descriptions)
the user can also provide a level_filter manually with level_filter (for custom levels)
:param level: the level of reproducibility to use, which maps to a set regular
expression to match particular files/folders in the image. Choices are in notes.
:param skip_files: an optional list of files to skip
:param include_files: an optional list of files to keep (only if level not defined)
:param version: the version to use. If not defined, default is 2.3
::notes
LEVEL DEFINITIONS
The level definitions come down to including folders/files in the comparison. For files
that Singularity produces on the fly that might be different (timestamps) but equal content
(eg for a replication) we hash the content ("assess_content") instead of the file.
'''
# First get a level dictionary, with description and regexp
if level_filter is not None:
file_filter = level_filter
elif level is None:
file_filter = get_level("RECIPE",
version=version,
include_files=include_files,
skip_files=skip_files)
<|fim▁hole|> skip_files=skip_files,
include_files=include_files)
close = False
if file_obj is None and tar is None:
file_obj, tar = get_image_tar(image_path)
close = True
hasher = hashlib.md5()
for member in tar:
member_name = member.name.replace('.','',1)
# For files, we either assess content, or include the file
if member.isdir() or member.issym():
continue
elif assess_content(member, file_filter):
content = extract_content(image_path, member.name)
hasher.update(content)
elif include_file(member.name, file_filter):
buf = member.tobuf()
hasher.update(buf)
digest = hasher.hexdigest()
# Close up / remove files
if close is True:
try:
file_obj.close()
except:
tar.close()
if os.path.exists(file_obj):
os.remove(file_obj)
return digest
def get_content_hashes(image_path,
level=None,
regexp=None,
include_files=None,
tag_root=True,
level_filter=None,
skip_files=None,
version=None,
include_sizes=True):
'''get_content_hashes is like get_image_hash, but it returns a complete dictionary
of file names (keys) and their respective hashes (values). This function is intended
for more research purposes and was used to generate the levels in the first place.
If include_sizes is True, we include a second data structure with sizes
'''
if level_filter is not None:
file_filter = level_filter
elif level is None:
file_filter = get_level("REPLICATE", version=version,
skip_files=skip_files,
include_files=include_files)
else:
file_filter = get_level(level,version=version,
skip_files=skip_files,
include_files=include_files)
results = extract_guts(image_path=image_path,
file_filter=file_filter,
tag_root=tag_root,
include_sizes=include_sizes)
return results
def get_image_file_hash(image_path):
'''get_image_hash will return an md5 hash of the file based on a criteria level.
:param level: one of LOW, MEDIUM, HIGH
:param image_path: full path to the singularity image
'''
hasher = hashlib.md5()
with open(image_path, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hasher.update(chunk)
return hasher.hexdigest()<|fim▁end|> | else:
file_filter = get_level(level,version=version, |
<|file_name|>PageStoreSpec.js<|end_file_name|><|fim▁begin|>describe('paging', function(){
var welcomeHolder = app.PageStore.welcome;
var roomsHolder = app.PageStore.rooms;
beforeEach(function(){
app.PageStore.welcome = welcomeHolder;
app.PageStore.rooms = roomsHolder;
});
it('should have a welcome route that invokes a callback', function(){
var counter = 0;
app.PageStore.welcome = function(){counter++;};
app.PageActions.navigate({
dest: 'welcome'
});
expect(counter).to.equal(1);
});
it('should have a rooms route that passes the roomId to a callback', function(){
var id;
app.PageStore.rooms = function(roomId){id = roomId;};
app.PageActions.navigate({
dest: 'rooms',
props: '0'
});<|fim▁hole|> expect(id).to.equal('0');
});
it('should emit events when routing', function(){
var callcount = 0;
var callback = function(){callcount++;};
app.PageStore.addChangeListener(callback);
app.PageActions.navigate({
dest: 'welcome'
});
expect(callcount).to.equal(1);
app.PageActions.navigate({
dest: 'rooms'
});
expect(callcount).to.equal(2);
app.PageStore.removeChangeListener(callback);
});
});<|fim▁end|> | |
<|file_name|>DMF.py<|end_file_name|><|fim▁begin|>#coding:utf8
from baseclass.DeepRecommender import DeepRecommender
import numpy as np
from random import choice,random,randint,shuffle
from tool import config
import tensorflow as tf
#According to the paper, we only
class DMF(DeepRecommender):
def __init__(self,conf,trainingSet=None,testSet=None,fold='[1]'):
super(DMF, self).__init__(conf,trainingSet,testSet,fold)
def next_batch(self,i):
rows = np.zeros(((self.negative_sp+1)*self.batch_size,self.num_items))
cols = np.zeros(((self.negative_sp+1)*self.batch_size,self.num_users))
batch_idx = range(self.batch_size*i,self.batch_size*(i+1))
users = [self.data.trainingData[idx][0] for idx in batch_idx]
items = [self.data.trainingData[idx][1] for idx in batch_idx]
u_idx = [self.data.user[u] for u in users]
v_idx = [self.data.item[i] for i in items]
ratings = [float(self.data.trainingData[idx][2]) for idx in batch_idx]
for i,user in enumerate(users):
rows[i] = self.data.row(user)
for i,item in enumerate(items):
cols[i] = self.data.col(item)<|fim▁hole|> itemList = self.data.item.keys()
#negative sample
for i in range(self.negative_sp*self.batch_size):
u = choice(userList)
v = choice(itemList)
while self.data.contains(u,v):
u = choice(userList)
v = choice(itemList)
rows[self.batch_size-1+i]=self.data.row(u)
cols[self.batch_size-1+i]=self.data.col(i)
u_idx.append(self.data.user[u])
v_idx.append(self.data.item[v])
ratings.append(0)
return rows,cols,np.array(ratings),np.array(u_idx),np.array(v_idx)
def initModel(self):
super(DMF, self).initModel()
n_input_u = len(self.data.item)
n_input_i = len(self.data.user)
self.negative_sp = 5
self.n_hidden_u=[256,512]
self.n_hidden_i=[256,512]
self.input_u = tf.placeholder(tf.float, [None, n_input_u])
self.input_i = tf.placeholder(tf.float, [None, n_input_i])
def buildModel(self):
super(DMF, self).buildModel_tf()
initializer = tf.contrib.layers.xavier_initializer()
#user net
user_W1 = tf.Variable(initializer([self.num_items, self.n_hidden_u[0]],stddev=0.01))
self.user_out = tf.nn.relu(tf.matmul(self.input_u, user_W1))
self.regLoss = tf.nn.l2_loss(user_W1)
for i in range(1, len(self.n_hidden_u)):
W = tf.Variable(initializer([self.n_hidden_u[i-1], self.n_hidden_u[i]],stddev=0.01))
b = tf.Variable(initializer([self.n_hidden_u[i]],stddev=0.01))
self.regLoss = tf.add(self.regLoss,tf.nn.l2_loss(W))
self.regLoss = tf.add(self.regLoss, tf.nn.l2_loss(b))
self.user_out = tf.nn.relu(tf.add(tf.matmul(self.user_out, W), b))
#item net
item_W1 = tf.Variable(initializer([self.num_users, self.n_hidden_i[0]],stddev=0.01))
self.item_out = tf.nn.relu(tf.matmul(self.input_i, item_W1))
self.regLoss = tf.add(self.regLoss, tf.nn.l2_loss(item_W1))
for i in range(1, len(self.n_hidden_i)):
W = tf.Variable(initializer([self.n_hidden_i[i-1], self.n_hidden_i[i]],stddev=0.01))
b = tf.Variable(initializer([self.n_hidden_i[i]],stddev=0.01))
self.regLoss = tf.add(self.regLoss, tf.nn.l2_loss(W))
self.regLoss = tf.add(self.regLoss, tf.nn.l2_loss(b))
self.item_out = tf.nn.relu(tf.add(tf.matmul(self.item_out, W), b))
norm_user_output = tf.sqrt(tf.reduce_sum(tf.square(self.user_out), axis=1))
norm_item_output = tf.sqrt(tf.reduce_sum(tf.square(self.item_out), axis=1))
self.y_ = tf.reduce_sum(tf.multiply(self.user_out, self.item_out), axis=1) / (
norm_item_output * norm_user_output)
self.y_ = tf.maximum(1e-6, self.y_)
self.loss = self.r*tf.log(self.y_) + (1 - self.r) * tf.log(1 - self.y_)#tf.nn.sigmoid_cross_entropy_with_logits(logits=self.y_,labels=self.r)
#self.loss = tf.nn.l2_loss(tf.subtract(self.y_,self.r))
self.loss = -tf.reduce_sum(self.loss)
reg_lambda = tf.constant(self.regU, dtype=tf.float32)
self.regLoss = tf.multiply(reg_lambda,self.regLoss)
self.loss = tf.add(self.loss,self.regLoss)
optimizer = tf.train.AdamOptimizer(self.lRate).minimize(self.loss)
self.U = np.zeros((self.num_users, self.n_hidden_u[-1]))
self.V = np.zeros((self.num_items, self.n_hidden_u[-1]))
init = tf.global_variables_initializer()
self.sess.run(init)
total_batch = int(len(self.data.trainingData)/ self.batch_size)
for epoch in range(self.maxIter):
shuffle(self.data.trainingData)
for i in range(total_batch):
users,items,ratings,u_idx,v_idx = self.next_batch(i)
shuffle_idx=np.random.permutation(range(len(users)))
users = users[shuffle_idx]
items = items[shuffle_idx]
ratings = ratings[shuffle_idx]
u_idx = u_idx[shuffle_idx]
v_idx = v_idx[shuffle_idx]
_,loss= self.sess.run([optimizer, self.loss], feed_dict={self.input_u: users,self.input_i:items,self.r:ratings})
print self.foldInfo, "Epoch:", '%04d' % (epoch + 1), "Batch:", '%03d' % (i + 1), "loss=", "{:.9f}".format(loss)
#save the output layer
U_embedding, V_embedding = self.sess.run([self.user_out, self.item_out], feed_dict={self.input_u: users,self.input_i:items})
for ue,u in zip(U_embedding,u_idx):
self.U[u]=ue
for ve,v in zip(V_embedding,v_idx):
self.V[v]=ve
self.normalized_V = np.sqrt(np.sum(self.V * self.V, axis=1))
self.normalized_U = np.sqrt(np.sum(self.U * self.U, axis=1))
self.ranking_performance()
print("Optimization Finished!")
def predictForRanking(self, u):
'invoked to rank all the items for the user'
if self.data.containsUser(u):
uid = self.data.user[u]
return np.divide(self.V.dot(self.U[uid]),self.normalized_U[uid]*self.normalized_V)
else:
return [self.data.globalMean] * self.num_items<|fim▁end|> |
userList = self.data.user.keys() |
<|file_name|>confirm_donation.js<|end_file_name|><|fim▁begin|>angular.module('bhima.controllers')
.controller('ConfirmDonationController', ConfirmDonationController);
ConfirmDonationController.$inject = [
'$scope', '$q', '$http', 'validate', 'connect', '$location', 'uuid', 'SessionService'
];
function ConfirmDonationController($scope, $q, $http, validate, connect, $location, uuid, Session) {
var vm = this,
dependencies = {},
session = $scope.session = {};
dependencies.donations = {
query : {
identifier : 'uuid',
tables : {
donations : {columns : ['uuid', 'date', 'is_received', 'confirmed_by']},
donor : {columns : ['id', 'name']},
employee : {columns : ['prenom', 'name::nom_employee', 'postnom']}
},
join : ['donor.id=donations.donor_id', 'donations.employee_id=employee.id'],
where : ['donations.is_received=1', 'AND', 'donations.is_confirmed=0']
}
};
$scope.project = Session.project;
$scope.user = Session.user;
function initialise(model) {
angular.extend($scope, model);
}
function confirmDonation(donationId) {
session.selected = $scope.donations.get(donationId);
loadDetails(donationId);
}
function loadDetails(donationId) {
dependencies.donationDetails = {
query : {
identifier : 'inventory_uuid',
tables : {
donations : {columns : ['uuid', 'donor_id', 'employee_id', 'date', 'is_received']},
donation_item : {columns : ['uuid::donationItemUuid']},
stock : {columns : ['inventory_uuid', 'tracking_number', 'purchase_order_uuid', 'quantity::stockQuantity', 'lot_number', 'entry_date']},
purchase : {columns : ['uuid::purchaseUuid', 'cost', 'currency_id', 'note']},
purchase_item : {columns : ['uuid::purchaseItemUuid', 'unit_price', 'quantity']}
},
join : [
'donations.uuid=donation_item.donation_uuid',
'donation_item.tracking_number=stock.tracking_number',
'stock.purchase_order_uuid=purchase.uuid',
'stock.inventory_uuid=purchase_item.inventory_uuid',
'purchase.uuid=purchase_item.purchase_uuid',
],
where : ['donations.uuid=' + donationId]
}
};
validate.refresh(dependencies, ['donationDetails'])
.then(initialise);
}
function confirmReception() {
writeToJournal()
.then(updateDonation)
.then(generateDocument)
.then(resetSelected)
.catch(handleError);
}
function updatePurchase () {
var purchase = {
uuid : session.selected.uuid,
confirmed : 1,
confirmed_by : $scope.user.id,
paid : 1
};
return connect.put('purchase', [purchase], ['uuid']);
}
function updateDonation () {
var donation = {
uuid : session.selected.uuid,
is_confirmed : 1,
confirmed_by : $scope.user.id
};
return connect.put('donations', [donation], ['uuid']);
}
function writeToJournal() {
var document_id = uuid();
var synthese = [];
// Distinct inventory
var unique = {};
var distinctInventory = [];
$scope.donationDetails.data.forEach(function (x) {
if (!unique[x.inventory_uuid]) {
distinctInventory.push(x);
unique[x.inventory_uuid] = true;
}
});
// End Distinct inventory
// Grouping by lot
var inventoryByLot = [];
distinctInventory.forEach(function (x) {
var lot = [];
lot = $scope.donationDetails.data.filter(function (item) {
return item.inventory_uuid === x.inventory_uuid;
});
inventoryByLot.push({
inventory_uuid : x.inventory_uuid,
purchase_price : x.unit_price,
currency_id : x.currency_id,
quantity : x.quantity,
lots : lot
});
});
// End Grouping by lot
inventoryByLot.forEach(function (item) {
var donation = { uuid : item.lots[0].uuid },
inventory_lots = [];
item.lots.forEach(function (lot) {
inventory_lots.push(lot.tracking_number);
});
synthese.push({
movement : { document_id : document_id },
inventory_uuid : item.inventory_uuid,
donation : donation,
tracking_numbers : inventory_lots,
quantity : item.quantity,
purchase_price : item.purchase_price,
currency_id : item.currency_id,
project_id : $scope.project.id
});
});
return $q.all(synthese.map(function (postingEntry) {
// REM : Stock Account (3) in Debit and Donation Account (?) in credit
// OBJECTIF : Ecrire pour chaque inventory de la donation comme une transaction dans le journal
return $http.post('posting_donation/', postingEntry);
}));
}
function paymentSuccess(result) {<|fim▁hole|> var purchase = {
uuid : session.selected.uuid,
paid : 1
};
return connect.put('purchase', [purchase], ['uuid']);
}
function generateDocument(res) {
$location.path('/invoice/confirm_donation/' + session.selected.uuid);
}
function handleError(error) {
console.log(error);
}
function resetSelected() {
session.selected = null;
validate.refresh(dependencies, ['donations'])
.then(initialise);
}
$scope.confirmDonation = confirmDonation;
$scope.confirmReception = confirmReception;
$scope.resetSelected = resetSelected;
// start the module up
validate.process(dependencies)
.then(initialise);
}<|fim▁end|> | |
<|file_name|>test_deconvolution.cpp<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright 2018 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
#include "mkldnn_test_common.hpp"
#include "gtest/gtest.h"
#include "mkldnn.hpp"
#include "mkldnn_debug.h"
namespace mkldnn {
using fmt = memory::format;
struct deconvolution_test_params {
const mkldnn::engine::kind engine_kind;
mkldnn::algorithm aalgorithm;
test_convolution_formats_t formats;
test_convolution_attr_t attr;
test_convolution_sizes_t sizes;
bool expect_to_fail;
mkldnn_status_t expected_status;
};
template <typename data_t>
void compute_bias_fwd(const test_convolution_sizes_t &c,
mkldnn::memory& dst, mkldnn::memory& bias) {
data_t *bias_data = (data_t *)bias.get_data_handle();
data_t *dst_data = (data_t *)dst.get_data_handle();
const memory::desc bias_d = bias.get_primitive_desc().desc();
const memory::desc dst_d = dst.get_primitive_desc().desc();
mkldnn::impl::parallel_nd(c.mb, c.ng, c.oc / c.ng, c.oh, c.ow,
[&](int n, int g, int oc, int oh, int ow) {
data_t b = bias_data[map_index(bias_d, g * c.oc / c.ng + oc)];
int oidx = n * c.oc * c.oh * c.ow
+ g * c.oc / c.ng * c.oh * c.ow
+ oc * c.oh * c.ow + oh * c.ow + ow;
dst_data[map_index(dst_d, oidx)] += b;
}
);
}
template <typename data_t>
void compute_bias_bwd(const test_convolution_sizes_t &c,
mkldnn::memory& dst, mkldnn::memory& bias) {
data_t *bias_data = (data_t *)bias.get_data_handle();
data_t *dst_data = (data_t *)dst.get_data_handle();
const memory::desc bias_d = bias.get_primitive_desc().desc();
const memory::desc dst_d = dst.get_primitive_desc().desc();
mkldnn::impl::parallel_nd(c.ng, c.oc / c.ng, [&](int g, int oc) {
int bidx = g * c.oc / c.ng + oc;
bias_data[map_index(bias_d, bidx)] = 0.0;
for (int mb = 0; mb < c.mb; ++mb) {
for (int oh = 0; oh < c.oh; ++oh) {
for (int ow = 0; ow < c.ow; ++ow) {
int oidx = mb * c.oc * c.oh * c.ow
+ g * c.oc / c.ng * c.oh * c.ow
+ oc * c.oh * c.ow + oh * c.ow + ow;
bias_data[map_index(bias_d, bidx)]
+= dst_data[map_index(dst_d, oidx)];
}
}
}
});
}
template <typename data_t>
void transpose_wei(const test_convolution_sizes_t &c,
mkldnn::memory& weights, mkldnn::memory& weights_tr) {
data_t *weights_data = (data_t *)weights.get_data_handle();
const memory::desc weights_d = weights.get_primitive_desc().desc();
data_t *weights_tr_data = (data_t *)weights_tr.get_data_handle();
const memory::desc weights_tr_d = weights_tr.get_primitive_desc().desc();
mkldnn::impl::parallel_nd(c.ng, c.oc / c.ng, c.ic / c.ng, c.kh, c.kw,
[&](int g, int oc, int ic, int kh, int kw) {
int widx = g * c.oc / c.ng * c.ic / c.ng * c.kh * c.kw
+ oc * c.ic / c.ng * c.kh * c.kw
+ ic * c.kh * c.kw + kh * c.kw + kw;
int widx_tr = g * c.oc / c.ng * c.ic / c.ng * c.kh * c.kw
+ ic * c.oc / c.ng * c.kh * c.kw
+ oc * c.kh * c.kw + kh * c.kw + kw;
weights_tr_data[map_index(weights_tr_d, widx_tr)]
= weights_data[map_index(weights_d, widx)];
}
);
}
template <typename data_t>
class deconvolution_test : public
::testing::TestWithParam<deconvolution_test_params> {
private:
std::shared_ptr<test_memory> src;
std::shared_ptr<test_memory> weights;
std::shared_ptr<test_memory> dst;
std::shared_ptr<test_memory> bias;
std::shared_ptr<memory::desc> dec_src_desc;
std::shared_ptr<memory::desc> dec_weights_desc;
std::shared_ptr<memory::desc> dec_bias_desc;
std::shared_ptr<memory::desc> dec_dst_desc;
std::shared_ptr<memory::desc> con_src_desc;
std::shared_ptr<memory::desc> con_bias_desc;
std::shared_ptr<memory::desc> con_dst_desc;
std::shared_ptr<memory::desc> con_weights_desc;
std::shared_ptr<engine> eng;
bool with_bias;
std::vector<int> padR;
protected:
virtual void SetUp() {
auto p = ::testing::TestWithParam<deconvolution_test_params>::GetParam();
catch_expected_failures([=](){Test();}, p.expect_to_fail,
p.expected_status);
}
void Test() {
auto p = ::testing::TestWithParam<deconvolution_test_params>::GetParam();
ASSERT_TRUE(p.engine_kind == engine::kind::cpu);
eng.reset(new engine(p.engine_kind, 0));
ASSERT_EQ(p.aalgorithm, algorithm::deconvolution_direct);
memory::data_type data_type = data_traits<data_t>::data_type;
test_convolution_sizes_t dd = p.sizes;
p.formats.bias_format = memory::format::format_undef;
with_bias = p.formats.bias_format != memory::format::format_undef;
memory::dims src_dims = {dd.mb, dd.ic, dd.ih, dd.iw};
memory::dims dst_dims = {dd.mb, dd.oc, dd.oh, dd.ow};
memory::dims weights_dims, c_weights_dims;
if (dd.ng > 1) {
weights_dims = { dd.ng, dd.oc / dd.ng, dd.ic / dd.ng, dd.kh, dd.kw };
c_weights_dims = { dd.ng, dd.ic / dd.ng, dd.oc / dd.ng, dd.kh, dd.kw };
} else {
weights_dims = { dd.oc, dd.ic, dd.kh, dd.kw };
c_weights_dims = { dd.ic, dd.oc, dd.kh, dd.kw };
}
memory::dims bias_dims;
if (with_bias) bias_dims = {dd.oc};
else bias_dims = {};
dec_src_desc.reset(new memory::desc(src_dims, data_type,
p.formats.src_format));
dec_dst_desc.reset(new memory::desc(dst_dims, data_type,
p.formats.src_format));
dec_weights_desc.reset(new memory::desc(weights_dims, data_type,
p.formats.weights_format));
dec_bias_desc.reset(new memory::desc(bias_dims, data_type,
p.formats.bias_format));
con_src_desc.reset(new memory::desc(dst_dims, data_type,
p.formats.src_format));
con_dst_desc.reset(new memory::desc(src_dims, data_type,
p.formats.src_format));
con_weights_desc.reset(new memory::desc(c_weights_dims, data_type,
p.formats.weights_format));
src.reset(new test_memory(*dec_src_desc, *eng));
weights.reset(new test_memory(*dec_weights_desc, *eng));
bias.reset(new test_memory(*dec_bias_desc, *eng));
dst.reset(new test_memory(*dec_dst_desc, *eng));
padR = {
right_padding(dd.oh, dd.ih, dd.kh, dd.padh, dd.strh, dd.dilh),
right_padding(dd.ow, dd.iw, dd.kw, dd.padw, dd.strw, dd.dilw)
};
Forward();
BackwardData();
BackwardWeights();
}
void Forward() {
auto aprop_kind = prop_kind::forward;
deconvolution_test_params p =
::testing::TestWithParam<deconvolution_test_params>::GetParam();
auto conv_src = test_memory(*con_src_desc, *eng);
auto conv_dst = src;
test_convolution_sizes_t dd = p.sizes;
fill_data<data_t>(src->get_size() / sizeof(data_t),
(data_t *)src->get().get_data_handle());
fill_data<data_t>(weights->get_size() / sizeof(data_t),
(data_t *)weights->get().get_data_handle());
if (with_bias) {
fill_data<data_t>(bias->get_size() / sizeof(data_t),
(data_t *)bias->get().get_data_handle());
}
auto weights_tr = memory({*con_weights_desc, *eng});
transpose_wei<data_t>(dd, weights->get(), weights_tr);
auto deconv_desc = with_bias ?
deconvolution_forward::desc(aprop_kind,
algorithm::deconvolution_direct, *dec_src_desc,
*dec_weights_desc, *dec_bias_desc, *dec_dst_desc,
{ dd.strh, dd.strw }, { dd.padh, dd.padw }, padR,
padding_kind::zero) :
deconvolution_forward::desc(aprop_kind,
algorithm::deconvolution_direct, *dec_src_desc,
*dec_weights_desc, *dec_dst_desc, { dd.strh, dd.strw },
{ dd.padh, dd.padw }, padR, padding_kind::zero);
auto deconv_primitive_desc = deconvolution_forward::primitive_desc(
deconv_desc, *eng);
auto deconv = with_bias ?
deconvolution_forward(deconv_primitive_desc, src->get(),
weights->get(), bias->get(), dst->get()) :
deconvolution_forward(deconv_primitive_desc, src->get(),
weights->get(), dst->get());
auto conv_desc = convolution_forward::desc(
prop_kind::forward_training, algorithm::convolution_direct,
*con_src_desc, *con_weights_desc, *con_dst_desc,
{ dd.strh, dd.strw }, { dd.padh, dd.padw }, padR,
padding_kind::zero);
auto conv_primitive_desc = convolution_forward::primitive_desc(
conv_desc, *eng);
auto conv_bwd_data_desc = convolution_backward_data::desc(
algorithm::convolution_direct, *con_src_desc,
*con_weights_desc, *con_dst_desc,
{ dd.strh, dd.strw }, { dd.padh, dd.padw }, padR,
padding_kind::zero);
auto conv_bwd_data_primitive_desc
= convolution_backward_data::primitive_desc(
conv_bwd_data_desc, *eng, conv_primitive_desc);
auto conv_bwd_data = convolution_backward_data(
conv_bwd_data_primitive_desc,
conv_dst->get(), weights_tr, conv_src.get());
std::vector<primitive> pipeline;
pipeline.push_back(deconv);
pipeline.push_back(conv_bwd_data);
stream(stream::kind::lazy).submit(pipeline).wait();
if(with_bias) compute_bias_fwd<data_t>(dd, conv_src.get(), bias->get());
compare_data<data_t>(conv_src.get(), dst->get());
}
void BackwardData() {
auto p = ::testing::TestWithParam<deconvolution_test_params>::GetParam();
auto conv_src = dst;
auto conv_dst = test_memory(*con_dst_desc, *eng);
test_convolution_sizes_t dd = p.sizes;
fill_data<data_t>(weights->get_size() / sizeof(data_t),
(data_t *)weights->get().get_data_handle());
fill_data<data_t>(dst->get_size() / sizeof(data_t),
(data_t *)dst->get().get_data_handle());
auto weights_tr = memory({*con_weights_desc, *eng});
transpose_wei<data_t>(dd, weights->get(), weights_tr);
auto deconv_desc = deconvolution_forward::desc(prop_kind::forward_training,
algorithm::deconvolution_direct, *dec_src_desc,
*dec_weights_desc, *dec_dst_desc, { dd.strh, dd.strw },
{ dd.padh, dd.padw }, padR, padding_kind::zero);
auto deconv_primitive_desc = deconvolution_forward::primitive_desc(
deconv_desc, *eng);
auto deconv_bwd_data_desc = deconvolution_backward_data::desc(
algorithm::deconvolution_direct, *dec_src_desc,
*dec_weights_desc, *dec_dst_desc,
{ dd.strh, dd.strw }, { dd.padh, dd.padw }, padR,
padding_kind::zero);
auto deconv_bwd_data_primitive_desc
= deconvolution_backward_data::primitive_desc(
deconv_bwd_data_desc, *eng, deconv_primitive_desc);
auto deconv_bwd_data = deconvolution_backward_data(
deconv_bwd_data_primitive_desc, dst->get(), weights->get(),
src->get());
auto conv_desc = convolution_forward::desc(
prop_kind::forward_training, algorithm::convolution_direct,
*con_src_desc, *con_weights_desc, *con_dst_desc,
{ dd.strh, dd.strw }, { dd.padh, dd.padw }, padR,
padding_kind::zero);
auto conv_primitive_desc = convolution_forward::primitive_desc(<|fim▁hole|>
std::vector<primitive> pipeline;
pipeline.push_back(deconv_bwd_data);
pipeline.push_back(conv);
stream(stream::kind::lazy).submit(pipeline).wait();
compare_data<data_t>(conv_dst.get(), src->get());
}
void BackwardWeights() {
auto p = ::testing::TestWithParam<deconvolution_test_params>::GetParam();
auto conv_src = dst;
auto conv_dst = src;
auto conv_weights = memory({*con_weights_desc, *eng});
test_convolution_sizes_t dd = p.sizes;
fill_data<data_t>(src->get_size() / sizeof(data_t),
(data_t *)src->get().get_data_handle());
fill_data<data_t>(dst->get_size() / sizeof(data_t),
(data_t *)dst->get().get_data_handle());
auto deconv_desc = deconvolution_forward::desc(prop_kind::forward_training,
algorithm::deconvolution_direct, *dec_src_desc,
*dec_weights_desc, *dec_bias_desc, *dec_dst_desc,
{ dd.strh, dd.strw }, { dd.padh, dd.padw }, padR, padding_kind::zero);
auto deconv_primitive_desc = deconvolution_forward::primitive_desc(
deconv_desc, *eng);
auto deconv_bwd_weights_desc = deconvolution_backward_weights::desc(
algorithm::deconvolution_direct, *dec_src_desc,
*dec_weights_desc, *dec_bias_desc, *dec_dst_desc,
{ dd.strh, dd.strw }, { dd.padh, dd.padw }, padR,
padding_kind::zero);
auto deconv_bwd_weights_primitive_desc
= deconvolution_backward_weights::primitive_desc(
deconv_bwd_weights_desc, *eng, deconv_primitive_desc);
auto deconv_bwd_weights = deconvolution_backward_weights(
deconv_bwd_weights_primitive_desc, src->get(), dst->get(),
weights->get(), bias->get());
auto conv_desc = convolution_forward::desc(
prop_kind::forward_training, algorithm::convolution_direct,
*con_src_desc, *con_weights_desc, *con_dst_desc,
{ dd.strh, dd.strw }, { dd.padh, dd.padw }, padR,
padding_kind::zero);
auto conv_primitive_desc = convolution_forward::primitive_desc(
conv_desc, *eng);
auto conv_bwd_weights_desc = convolution_backward_weights::desc(
algorithm::convolution_direct, *con_src_desc, *con_weights_desc,
*con_dst_desc, { dd.strh, dd.strw }, { dd.padh, dd.padw },
padR, padding_kind::zero);
auto conv_bwd_weights_primitive_desc =
convolution_backward_weights::primitive_desc(
conv_bwd_weights_desc, *eng, conv_primitive_desc);
auto conv_bwd_weights =
convolution_backward_weights(conv_bwd_weights_primitive_desc,
conv_src->get(), conv_dst->get(), conv_weights);
std::vector<primitive> pipeline;
pipeline.push_back(conv_bwd_weights);
pipeline.push_back(deconv_bwd_weights);
stream(stream::kind::lazy).submit(pipeline).wait();
auto weights_tr = memory({*con_weights_desc, *eng});
transpose_wei<data_t>(dd, weights->get(), weights_tr);
compare_data<data_t>(weights_tr, conv_weights);
if (with_bias) {
auto ref_bias = memory({*dec_bias_desc, *eng});
compute_bias_bwd<data_t>(dd, dst->get(), ref_bias);
compare_data<data_t>(ref_bias, bias->get());
}
}
};
using deconvolution_test_float = deconvolution_test<float>;
TEST_P(deconvolution_test_float, TestDeconvolution)
{
}
#define EXPAND_FORMATS(src, weights, bias, dst) \
{ mkldnn::memory::format::src, mkldnn::memory::format::weights, \
mkldnn::memory::format::bias, mkldnn::memory::format::dst }
#define ENGINE engine::kind::cpu
#define ALGORITHM mkldnn::deconvolution_direct
#define PARAMS(src, weights, bias, dst, ...) \
deconvolution_test_params { ENGINE, ALGORITHM, \
EXPAND_FORMATS(src, weights, bias, dst), {}, \
{__VA_ARGS__} }
#define INST_TEST_CASE(str, ...) INSTANTIATE_TEST_CASE_P( \
str, deconvolution_test_float, ::testing::Values(__VA_ARGS__))
#define FMT_BIAS x
#define FMT_DATA_BLOCKED nChw8c
#define FMT_WEIGHTS_BLOCKED Ohwi8o
INST_TEST_CASE(SimpleSmall_NCHW,
PARAMS(nchw, oihw, x, nchw,
2, 1, 6, 4, 4, 4, 4, 4, 3, 3, 1, 1, 1, 1),
PARAMS(nchw, oihw, x, nchw,
2, 1, 6, 2, 2, 4, 4, 4, 3, 3, 0, 0, 1, 1),
PARAMS(nhwc, oihw, x, nhwc,
2, 1, 6, 2, 2, 4, 4, 4, 3, 3, 0, 0, 1, 1),
PARAMS(nhwc, hwio, x, nhwc,
2, 1, 6, 4, 4, 4, 4, 4, 3, 3, 1, 1, 1, 1),
PARAMS(nhwc, hwio, x, nhwc,
2, 1, 6, 2, 2, 4, 4, 4, 3, 3, 0, 0, 1, 1),
PARAMS(nhwc, goihw, x, nhwc,
2, 2, 6, 4, 4, 4, 4, 4, 3, 3, 0, 0, 1, 1),
PARAMS(nhwc, hwigo, x, nhwc,
2, 2, 6, 4, 4, 4, 4, 4, 3, 3, 1, 1, 1, 1)
);
INST_TEST_CASE(SimpleSmall_Blocked,
PARAMS(FMT_DATA_BLOCKED, FMT_WEIGHTS_BLOCKED, FMT_BIAS, FMT_DATA_BLOCKED,
2, 1, 32, 12, 12, 32, 13, 13, 3, 3, 0, 0, 1, 1),
PARAMS(FMT_DATA_BLOCKED, FMT_WEIGHTS_BLOCKED, FMT_BIAS, FMT_DATA_BLOCKED,
2, 1, 32, 4, 4, 32, 3, 3, 3, 3, 1, 1, 1, 1),
PARAMS(FMT_DATA_BLOCKED, FMT_WEIGHTS_BLOCKED, FMT_BIAS, FMT_DATA_BLOCKED,
2, 1, 32, 4, 4, 32, 4, 4, 3, 3, 0, 0, 1, 1),
PARAMS(FMT_DATA_BLOCKED, FMT_WEIGHTS_BLOCKED, FMT_BIAS, FMT_DATA_BLOCKED,
2, 1, 32, 2, 2, 32, 3, 3, 3, 3, 0, 0, 1, 1),
PARAMS(FMT_DATA_BLOCKED, FMT_WEIGHTS_BLOCKED, FMT_BIAS, FMT_DATA_BLOCKED,
2, 1, 32, 2, 2, 32, 2, 2, 3, 3, 1, 1, 1, 1),
PARAMS(FMT_DATA_BLOCKED, FMT_WEIGHTS_BLOCKED, FMT_BIAS, FMT_DATA_BLOCKED,
2, 1, 48, 13, 13, 32, 13, 13, 3, 3, 1, 1, 1, 1),
PARAMS(FMT_DATA_BLOCKED, FMT_WEIGHTS_BLOCKED, FMT_BIAS, FMT_DATA_BLOCKED,
2, 1, 48, 11, 11, 32, 13, 13, 3, 3, 0, 0, 1, 1)
);
}<|fim▁end|> | conv_desc, *eng);
auto conv = convolution_forward(conv_primitive_desc, conv_src->get(),
weights_tr, conv_dst.get()); |
<|file_name|>urlPathHttpMatcher_test.go<|end_file_name|><|fim▁begin|>//Copyright (C) 2015 dhrapson
//This program is free software: you can redistribute it and/or modify
//it under the terms of the GNU General Public License as published by
//the Free Software Foundation, either version 3 of the License, or
//(at your option) any later version.
//This program is distributed in the hope that it will be useful,
//but WITHOUT ANY WARRANTY; without even the implied warranty of
//MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//GNU General Public License for more details.
//You should have received a copy of the GNU General Public License
//along with this program. If not, see <http://www.gnu.org/licenses/>.
package configure_test
import (
. "github.com/dhrapson/resembleio/configure"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("UrlPathHttpMatcher", func() {
var (
path_regex string
matcher UrlPathHttpMatcher
err error
)
Describe("when using a valid regex", func() {
JustBeforeEach(func() {
matcher, err = NewUrlPathHttpMatcher(path_regex)
})
Context("when given an exactly matching regexp", func() {
BeforeEach(func() {
path_regex = `^/abc/def$`
})
It("should return true", func() {
Expect(err).NotTo(HaveOccurred())<|fim▁hole|> })
Context("when given a loosely matching regexp", func() {
BeforeEach(func() {
path_regex = `abc`
})
It("should return true", func() {
Expect(err).NotTo(HaveOccurred())
result := matcher.MatchUrlPath("/abc/def")
Expect(result).To(BeTrue())
})
})
Context("when given a non-matching URL path", func() {
BeforeEach(func() {
path_regex = `^/abc/ghi$`
})
It("should return false", func() {
Expect(err).NotTo(HaveOccurred())
result := matcher.MatchUrlPath("/abc/def")
Expect(result).To(BeFalse())
Expect(err).NotTo(HaveOccurred())
})
})
})
Describe("when using an invalid regex", func() {
BeforeEach(func() {
path_regex = `^abc++$`
})
It("should raise an error on creating the matcher", func() {
matcher, err = NewUrlPathHttpMatcher(path_regex)
Expect(err).To(HaveOccurred())
})
})
})<|fim▁end|> | result := matcher.MatchUrlPath("/abc/def")
Expect(result).To(BeTrue())
}) |
<|file_name|>AtMostXVerificationTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2007 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockitousage.verification;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.atMost;
import static org.mockito.Mockito.atMostOnce;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import java.util.List;
import org.junit.Test;
import org.mockito.InOrder;
import org.mockito.Mock;
import org.mockito.exceptions.base.MockitoException;
import org.mockito.exceptions.verification.MoreThanAllowedActualInvocations;
import org.mockito.exceptions.verification.NoInteractionsWanted;
import org.mockitoutil.TestBase;
public class AtMostXVerificationTest extends TestBase {
@Mock private List<String> mock;
@Test
public void shouldVerifyAtMostXTimes() throws Exception {
mock.clear();
mock.clear();
verify(mock, atMost(2)).clear();
verify(mock, atMost(3)).clear();
try {
verify(mock, atMostOnce()).clear();
fail();
} catch (MoreThanAllowedActualInvocations e) {
}
}
@Test
public void shouldWorkWithArgumentMatchers() throws Exception {
mock.add("one");
verify(mock, atMost(5)).add(anyString());
try {
verify(mock, atMost(0)).add(anyString());
fail();
} catch (MoreThanAllowedActualInvocations e) {
}
}
@Test
public void shouldNotAllowNegativeNumber() throws Exception {
try {
verify(mock, atMost(-1)).clear();
fail();
} catch (MockitoException e) {
assertEquals("Negative value is not allowed here", e.getMessage());
}
}
@Test
public void shouldPrintDecentMessage() throws Exception {
mock.clear();
mock.clear();
try {
verify(mock, atMostOnce()).clear();
fail();
} catch (MoreThanAllowedActualInvocations e) {
assertEquals("\nWanted at most 1 time but was 2", e.getMessage());
}
}
@Test
public void shouldNotAllowInOrderMode() throws Exception {
mock.clear();
InOrder inOrder = inOrder(mock);
try {
inOrder.verify(mock, atMostOnce()).clear();
fail();
} catch (MockitoException e) {
assertEquals("AtMost is not implemented to work with InOrder", e.getMessage());
}
}
@Test
public void shouldMarkInteractionsAsVerified() throws Exception {
mock.clear();
mock.clear();
verify(mock, atMost(3)).clear();
verifyNoMoreInteractions(mock);
}
@Test
public void shouldDetectUnverifiedInMarkInteractionsAsVerified() throws Exception {
mock.clear();
mock.clear();
undesiredInteraction();
verify(mock, atMost(3)).clear();
try {
verifyNoMoreInteractions(mock);
fail();<|fim▁hole|> assertThat(e).hasMessageContaining("undesiredInteraction(");
}
}
private void undesiredInteraction() {
mock.add("");
}
}<|fim▁end|> | } catch (NoInteractionsWanted e) { |
<|file_name|>sockopt_unix.go<|end_file_name|><|fim▁begin|>// Copyright 2021 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//go:build !windows && !solaris
// +build !windows,!solaris
package transport
import (
"syscall"<|fim▁hole|>func setReusePort(network, address string, conn syscall.RawConn) error {
return conn.Control(func(fd uintptr) {
syscall.SetsockoptInt(int(fd), syscall.SOL_SOCKET, unix.SO_REUSEPORT, 1)
})
}
func setReuseAddress(network, address string, conn syscall.RawConn) error {
return conn.Control(func(fd uintptr) {
syscall.SetsockoptInt(int(fd), syscall.SOL_SOCKET, unix.SO_REUSEADDR, 1)
})
}<|fim▁end|> |
"golang.org/x/sys/unix"
)
|
<|file_name|>attrib32.C<|end_file_name|><|fim▁begin|>// PR c++/35315
typedef union { int i; } U __attribute__((transparent_union));
static void foo(U) {}
static void foo(int) {}
void bar()
{<|fim▁hole|>}
typedef union U1 { int i; } U2 __attribute__((transparent_union)); // { dg-warning "ignored" }
static void foo2(U1) {} // { dg-error "previously defined" }
static void foo2(U2) {} // { dg-error "redefinition" }
void bar2(U1 u1, U2 u2)
{
foo2(u1);
foo2(u2);
}
// PR c++/36410
struct A
{
typedef union
{
int i;
} B __attribute__((transparent_union));
};
void foo(A::B b)
{
b.i;
}<|fim▁end|> | foo(0); |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#[macro_use] extern crate log;
extern crate env_logger;
extern crate yak_client;
extern crate capnp;
extern crate log4rs;
extern crate rusqlite;
extern crate byteorder;
#[cfg(test)]
extern crate quickcheck;
#[cfg(test)]
extern crate rand;
use std::default::Default;
use std::net::{TcpListener, TcpStream};
use std::thread;
use std::io::{self,Read,Write};
use std::fmt;
use std::sync::{Arc,Mutex};
use std::clone::Clone;
use std::error::Error;
use std::path::Path;
use yak_client::{WireProtocol,Request,Response,Operation,Datum,SeqNo,YakError};
#[macro_use] mod store;
mod sqlite_store;
macro_rules! try_box {
($expr:expr) => (match $expr {
Ok(val) => val,
Err(err) => {
return Err(From::from(Box::new(err) as Box<Error + 'static>))
}
})
}
#[derive(Debug)]
enum ServerError {
CapnpError(capnp::Error),
CapnpNotInSchema(capnp::NotInSchema),
IoError(std::io::Error),
DownstreamError(YakError),
StoreError(Box<Error>),
}
impl fmt::Display for ServerError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match self {
&ServerError::CapnpError(ref e) => e.fmt(f),
&ServerError::CapnpNotInSchema(ref e) => e.fmt(f),
&ServerError::IoError(ref e) => e.fmt(f),
&ServerError::DownstreamError(ref e) => e.fmt(f),
&ServerError::StoreError(ref e) => write!(f, "{}", e),
}
}
}
impl Error for ServerError {
fn description(&self) -> &str {
match self {
&ServerError::CapnpError(ref e) => e.description(),
&ServerError::CapnpNotInSchema(ref e) => e.description(),
&ServerError::IoError(ref e) => e.description(),<|fim▁hole|> &ServerError::StoreError(ref e) => e.description(),
}
}
}
struct DownStream<S: Read+Write> {
protocol: Arc<Mutex<WireProtocol<S>>>,
}
impl <S: ::std::fmt::Debug + Read + Write> ::std::fmt::Debug for DownStream<S>
where S: ::std::fmt::Debug + 'static {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match self.protocol.try_lock() {
Ok(ref proto) => write!(fmt, "DownStream{{ protocol: {:?} }}", &**proto),
Err(_) => write!(fmt, "DownStream{{ protocol: <locked> }}"),
}
}
}
impl<S> Clone for DownStream<S> where S: Read+Write {
fn clone(&self) -> DownStream<S> {
DownStream { protocol: self.protocol.clone() }
}
}
static LOG_FILE: &'static str = "log.toml";
pub fn main() {
if let Err(e) = log4rs::init_file(LOG_FILE, Default::default()) {
panic!("Could not init logger from file {}: {}", LOG_FILE, e);
}
match do_run() {
Ok(()) => info!("Terminated normally"),
Err(e) => panic!("Failed: {}", e),
}
}
fn do_run() -> Result<(), ServerError> {
let mut a = std::env::args().skip(1);
let storedir = a.next().unwrap();
let local : String = a.next().unwrap();
let next = match a.next() {
Some(ref addr) => Some(try!(DownStream::new(addr))),
None => None
};
let listener = TcpListener::bind(&local as &str).unwrap();
info!("listening started on {}, ready to accept", local);
let store = sqlite_store::SqliteStore::new(Path::new(&storedir)).unwrap();
for stream in listener.incoming() {
let next = next.clone();
let store = store.clone();
let sock = stream.unwrap();
let peer = sock.peer_addr().unwrap();
let _ = try!(thread::Builder::new().name(format!("C{}", peer)).spawn(move || {
debug!("Accept stream from {:?}", peer);
match Session::new(peer, sock, store, next).process_requests() {
Err(e) => report_session_errors(&e),
_ => ()
}
}));
}
Ok(())
}
fn report_session_errors(error: &Error) {
error!("Session failed with: {}", error);
while let Some(error) = error.cause() {
error!("\tCaused by: {}", error);
}
}
impl DownStream<TcpStream> {
fn new(addr: &str) -> Result<DownStream<TcpStream>, ServerError> {
debug!("Connect downstream: {:?}", addr);
let proto = try_box!(WireProtocol::connect(addr));
debug!("Connected downstream: {:?}", proto);
Ok(DownStream { protocol: Arc::new(Mutex::new(proto)) })
}
}
fn ptr_addr<T>(obj:&T) -> usize {
return obj as *const T as usize;
}
impl<S: Read+Write> DownStream<S> {
fn handle(&self, msg: &Request) -> Result<Response, ServerError> {
let mut wire = self.protocol.lock().unwrap();
debug!("Downstream: -> {:x}", ptr_addr(&msg));
try!(wire.send(msg));
debug!("Downstream wait: {:x}", ptr_addr(&msg));
let resp = try!(wire.read::<Response>());
debug!("Downstream: <- {:x}", ptr_addr(&resp));
resp.map(Ok).unwrap_or(Err(ServerError::DownstreamError(YakError::ProtocolError)))
}
}
struct Session<Id, S:Read+Write+'static, ST> {
id: Id,
protocol: WireProtocol<S>,
store: ST,
next: Option<DownStream<S>>
}
impl<Id: fmt::Display, S: Read+Write, ST:store::Store> Session<Id, S, ST> {
fn new(id: Id, conn: S, store: ST, next: Option<DownStream<S>>) -> Session<Id, S, ST> {
Session {
id: id,
protocol: WireProtocol::new(conn),
store: store,
next: next
}
}
fn process_requests(&mut self) -> Result<(), ServerError> {
trace!("{}: Waiting for message", self.id);
while let Some(msg) = try!(self.protocol.read::<Request>()) {
try!(self.process_one(msg));
}
Ok(())
}
fn process_one(&mut self, msg: Request) -> Result<(), ServerError> {
trace!("{}: Handle message: {:x}", self.id, ptr_addr(&msg));
let resp = match msg.operation {
Operation::Write { ref key, ref value } => {
let resp = try!(self.write(msg.sequence, &msg.space, &key, &value));
try!(self.send_downstream_or(&msg, resp))
},
Operation::Read { key } =>
try!(self.read(msg.sequence, &msg.space, &key)),
Operation::Subscribe => {
try!(self.subscribe(msg.sequence, &msg.space));
Response::Okay(msg.sequence)
},
};
trace!("Response: {:?}", resp);
try!(self.protocol.send(&resp));
Ok(())
}
fn send_downstream_or(&self, msg: &Request, default: Response) -> Result<Response, ServerError> {
match self.next {
Some(ref d) => d.handle(msg),
None => Ok(default),
}
}
fn read(&self, seq: SeqNo, space: &str, key: &[u8]) -> Result<Response, ServerError> {
let val = try_box!(self.store.read(space, key));
trace!("{}/{:?}: read:{:?}: -> {:?}", self.id, space, key, val);
let data = val.iter().map(|c| Datum { key: Vec::new(), content: c.clone() }).collect();
Ok(Response::OkayData(seq, data))
}
fn write(&self, seq: SeqNo, space: &str, key: &[u8], val: &[u8]) -> Result<Response, ServerError> {
trace!("{}/{:?}: write:{:?} -> {:?}", self.id, space, key, val);
try_box!(self.store.write(space, key, val));
Ok(Response::Okay(seq))
}
fn subscribe(&mut self, seq: SeqNo, space: &str) -> Result<(), ServerError> {
try!(self.protocol.send(&Response::Okay(seq)));
for d in try_box!(self.store.subscribe(space)) {
try!(self.protocol.send(&Response::Delivery(d)));
}
Ok(())
}
}
impl From<capnp::Error> for ServerError {
fn from(err: capnp::Error) -> ServerError {
ServerError::CapnpError(err)
}
}
impl From<capnp::NotInSchema> for ServerError {
fn from(err: capnp::NotInSchema) -> ServerError {
ServerError::CapnpNotInSchema(err)
}
}
impl From<io::Error> for ServerError {
fn from(err: io::Error) -> ServerError {
ServerError::IoError(err)
}
}
impl From<YakError> for ServerError {
fn from(err: YakError) -> ServerError {
ServerError::DownstreamError(err)
}
}
impl From<Box<Error>> for ServerError {
fn from(err: Box<Error>) -> ServerError {
ServerError::StoreError(err)
}
}<|fim▁end|> | &ServerError::DownstreamError(ref e) => e.description(), |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>"""Main entry points for scripts."""
from __future__ import print_function, division
from argparse import ArgumentParser
from collections import OrderedDict
from copy import copy
from datetime import datetime
import glob
import json
import logging
import math
import os
import scipy.stats
import numpy as np
from .version import __version__
from .psffuncs import gaussian_moffat_psf
from .psf import TabularPSF, GaussianMoffatPSF
from .io import read_datacube, write_results, read_results
from .fitting import (guess_sky, fit_galaxy_single, fit_galaxy_sky_multi,
fit_position_sky, fit_position_sky_sn_multi,
RegularizationPenalty)
from .utils import yxbounds
from .extern import ADR, Hyper_PSF3D_PL
__all__ = ["cubefit", "cubefit_subtract", "cubefit_plot"]
MODEL_SHAPE = (32, 32)
SPAXEL_SIZE = 0.43
MIN_NMAD = 2.5 # Minimum Number of Median Absolute Deviations above
# the minimum spaxel value in fit_position
LBFGSB_FACTOR = 1e10
REFWAVE = 5000. # reference wavelength in Angstroms for PSF params and ADR
POSITION_BOUND = 3. # Bound on fitted positions relative in initial positions
def snfpsf(wave, psfparams, header, psftype):
"""Create a 3-d PSF based on SNFactory-specific parameterization of
Gaussian + Moffat PSF parameters and ADR."""
# Get Gaussian+Moffat parameters at each wavelength.
relwave = wave / REFWAVE - 1.0
ellipticity = abs(psfparams[0]) * np.ones_like(wave)
alpha = np.abs(psfparams[1] +
psfparams[2] * relwave +
psfparams[3] * relwave**2)
# correlated parameters (coefficients determined externally)
sigma = 0.545 + 0.215 * alpha # Gaussian parameter
beta = 1.685 + 0.345 * alpha # Moffat parameter
eta = 1.040 + 0.0 * alpha # gaussian ampl. / moffat ampl.
# Atmospheric differential refraction (ADR): Because of ADR,
# the center of the PSF will be different at each wavelength,
# by an amount that we can determine (pretty well) from the
# atmospheric conditions and the pointing and angle of the
# instrument. We calculate the offsets here as a function of
# observation and wavelength and input these to the model.
# Correction to parallactic angle and airmass for 2nd-order effects
# such as MLA rotation, mechanical flexures or finite-exposure
# corrections. These values have been trained on faint-std star
# exposures.
#
# `predict_adr_params` uses 'AIRMASS', 'PARANG' and 'CHANNEL' keys
# in input dictionary.
delta, theta = Hyper_PSF3D_PL.predict_adr_params(header)
# check for crazy values of pressure and temperature, and assign default
# values.
pressure = header.get('PRESSURE', 617.)
if not 550. < pressure < 650.:
pressure = 617.
temp = header.get('TEMP', 2.)
if not -20. < temp < 20.:
temp = 2.
adr = ADR(pressure, temp, lref=REFWAVE, delta=delta, theta=theta)
adr_refract = adr.refract(0, 0, wave, unit=SPAXEL_SIZE)
# adr_refract[0, :] corresponds to x, adr_refract[1, :] => y
xctr, yctr = adr_refract
if psftype == 'gaussian-moffat':
return GaussianMoffatPSF(sigma, alpha, beta, ellipticity, eta,
yctr, xctr, MODEL_SHAPE, subpix=3)
elif psftype == 'tabular':
A = gaussian_moffat_psf(sigma, alpha, beta, ellipticity, eta,
yctr, xctr, MODEL_SHAPE, subpix=3)
return TabularPSF(A)
else:
raise ValueError("unknown psf type: " + repr(psftype))
def setup_logging(loglevel, logfname=None):
# if loglevel isn't an integer, parse it as "debug", "info", etc:
if not isinstance(loglevel, int):
loglevel = getattr(logging, loglevel.upper(), None)
if not isinstance(loglevel, int):
print('Invalid log level: %s' % loglevel)
exit(1)
# remove logfile if it already exists
if logfname is not None and os.path.exists(logfname):
os.remove(logfname)
logging.basicConfig(filename=logfname, format="%(levelname)s %(message)s",
level=loglevel)
def cubefit(argv=None):
DESCRIPTION = "Fit SN + galaxy model to SNFactory data cubes."
parser = ArgumentParser(prog="cubefit", description=DESCRIPTION)
parser.add_argument("configfile",
help="configuration file name (JSON format)")
parser.add_argument("outfile", help="Output file name (FITS format)")
parser.add_argument("--dataprefix", default="",
help="path prepended to data file names; default is "
"empty string")
parser.add_argument("--logfile", help="Write log to this file "
"(default: print to stdout)", default=None)
parser.add_argument("--loglevel", default="info",
help="one of: debug, info, warning (default is info)")
parser.add_argument("--diagdir", default=None,
help="If given, write intermediate diagnostic results "
"to this directory")
parser.add_argument("--refitgal", default=False, action="store_true",
help="Add an iteration where galaxy model is fit "
"using all epochs and then data/SN positions are "
"refit")
parser.add_argument("--mu_wave", default=0.07, type=float,
help="Wavelength regularization parameter. "
"Default is 0.07.")
parser.add_argument("--mu_xy", default=0.001, type=float,
help="Spatial regularization parameter. "
"Default is 0.001.")
parser.add_argument("--psftype", default="gaussian-moffat",
help="Type of PSF: 'gaussian-moffat' or 'tabular'. "
"Currently, tabular means generate a tabular PSF from "
"gaussian-moffat parameters.")
args = parser.parse_args(argv)
setup_logging(args.loglevel, logfname=args.logfile)
# record start time
tstart = datetime.now()
logging.info("cubefit v%s started at %s", __version__,
tstart.strftime("%Y-%m-%d %H:%M:%S"))
tsteps = OrderedDict() # finish time of each step.
logging.info("parameters: mu_wave={:.3g} mu_xy={:.3g} refitgal={}"
.format(args.mu_wave, args.mu_xy, args.refitgal))
logging.info(" psftype={}".format(args.psftype))
logging.info("reading config file")
with open(args.configfile) as f:
cfg = json.load(f)
# basic checks on config contents.
assert (len(cfg["filenames"]) == len(cfg["xcenters"]) ==
len(cfg["ycenters"]) == len(cfg["psf_params"]))
# -------------------------------------------------------------------------
# Load data cubes from the list of FITS files.
nt = len(cfg["filenames"])
logging.info("reading %d data cubes", nt)
cubes = []
for fname in cfg["filenames"]:
logging.debug(" reading %s", fname)
cubes.append(read_datacube(os.path.join(args.dataprefix, fname)))
wave = cubes[0].wave
nw = len(wave)
# assign some local variables for convenience
refs = cfg["refs"]
master_ref = cfg["master_ref"]
if master_ref not in refs:
raise ValueError("master ref choice must be one of the final refs (" +
" ".join(refs.astype(str)) + ")")
nonmaster_refs = [i for i in refs if i != master_ref]
nonrefs = [i for i in range(nt) if i not in refs]
# Ensure that all cubes have the same wavelengths.
if not all(np.all(cubes[i].wave == wave) for i in range(1, nt)):
raise ValueError("all data must have same wavelengths")
# -------------------------------------------------------------------------
# PSF for each observation
logging.info("setting up PSF for all %d epochs", nt)
psfs = [snfpsf(wave, cfg["psf_params"][i], cubes[i].header, args.psftype)
for i in range(nt)]
# -------------------------------------------------------------------------
# Initialize all model parameters to be fit
yctr0 = np.array(cfg["ycenters"])
xctr0 = np.array(cfg["xcenters"])
galaxy = np.zeros((nw, MODEL_SHAPE[0], MODEL_SHAPE[1]), dtype=np.float64)
sn = np.zeros((nt, nw), dtype=np.float64) # SN spectrum at each epoch
skys = np.zeros((nt, nw), dtype=np.float64) # Sky spectrum at each epoch
yctr = yctr0.copy()
xctr = xctr0.copy()
snctr = (0., 0.)
# For writing out to FITS
modelwcs = {"CRVAL1": -SPAXEL_SIZE * (MODEL_SHAPE[0] - 1) / 2.,
"CRPIX1": 1,
"CDELT1": SPAXEL_SIZE,
"CRVAL2": -SPAXEL_SIZE * (MODEL_SHAPE[1] - 1) / 2.,
"CRPIX2": 1,
"CDELT2": SPAXEL_SIZE,
"CRVAL3": cubes[0].header["CRVAL3"],
"CRPIX3": cubes[0].header["CRPIX3"],
"CDELT3": cubes[0].header["CDELT3"]}
# -------------------------------------------------------------------------
# Position bounds
# Bounds on data position: shape=(nt, 2)
xctrbounds = np.vstack((xctr - POSITION_BOUND, xctr + POSITION_BOUND)).T
yctrbounds = np.vstack((yctr - POSITION_BOUND, yctr + POSITION_BOUND)).T
snctrbounds = (-POSITION_BOUND, POSITION_BOUND)
# For data positions, check that bounds do not extend
# past the edge of the model and adjust the minbound and maxbound.
# This doesn't apply to SN position.
gshape = galaxy.shape[1:3] # model shape
for i in range(nt):
dshape = cubes[i].data.shape[1:3]
(yminabs, ymaxabs), (xminabs, xmaxabs) = yxbounds(gshape, dshape)
yctrbounds[i, 0] = max(yctrbounds[i, 0], yminabs)
yctrbounds[i, 1] = min(yctrbounds[i, 1], ymaxabs)
xctrbounds[i, 0] = max(xctrbounds[i, 0], xminabs)
xctrbounds[i, 1] = min(xctrbounds[i, 1], xmaxabs)
# -------------------------------------------------------------------------
# Guess sky
logging.info("guessing sky for all %d epochs", nt)
for i, cube in enumerate(cubes):
skys[i, :] = guess_sky(cube, npix=30)
# -------------------------------------------------------------------------
# Regularization penalty parameters
# Calculate rough average galaxy spectrum from all final refs.
spectra = np.zeros((len(refs), len(wave)), dtype=np.float64)
for j, i in enumerate(refs):
avg_spec = np.average(cubes[i].data, axis=(1, 2)) - skys[i]
mean_spec, bins, bn = scipy.stats.binned_statistic(wave, avg_spec,
bins=len(wave)/10)
spectra[j] = np.interp(wave, bins[:-1] + np.diff(bins)[0]/2.,
mean_spec)
mean_gal_spec = np.average(spectra, axis=0)
# Ensure that there won't be any negative or tiny values in mean:
mean_floor = 0.1 * np.median(mean_gal_spec)
mean_gal_spec[mean_gal_spec < mean_floor] = mean_floor
galprior = np.zeros((nw, MODEL_SHAPE[0], MODEL_SHAPE[1]), dtype=np.float64)
regpenalty = RegularizationPenalty(galprior, mean_gal_spec, args.mu_xy,
args.mu_wave)
tsteps["setup"] = datetime.now()
# -------------------------------------------------------------------------
# Fit just the galaxy model to just the master ref.
data = cubes[master_ref].data - skys[master_ref, :, None, None]
weight = cubes[master_ref].weight
logging.info("fitting galaxy to master ref [%d]", master_ref)
galaxy = fit_galaxy_single(galaxy, data, weight,
(yctr[master_ref], xctr[master_ref]),
psfs[master_ref], regpenalty, LBFGSB_FACTOR)
if args.diagdir:
fname = os.path.join(args.diagdir, 'step1.fits')
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)
tsteps["fit galaxy to master ref"] = datetime.now()
# -------------------------------------------------------------------------
# Fit the positions of the other final refs
#
# Here we only use spaxels where the *model* has significant flux.
# We define "significant" as some number of median absolute deviations
# (MAD) above the minimum flux in the model. We (temporarily) set the
# weight of "insignificant" spaxels to zero during this process, then
# restore the original weight after we're done.
#
# If there are less than 20 "significant" spaxels, we do not attempt to
# fit the position, but simply leave it as is.
logging.info("fitting position of non-master refs %s", nonmaster_refs)
for i in nonmaster_refs:
cube = cubes[i]
# Evaluate galaxy on this epoch for purpose of masking spaxels.
gal = psfs[i].evaluate_galaxy(galaxy, (cube.ny, cube.nx),
(yctr[i], xctr[i]))
# Set weight of low-valued spaxels to zero.
gal2d = gal.sum(axis=0) # Sum of gal over wavelengths
mad = np.median(np.abs(gal2d - np.median(gal2d)))
mask = gal2d > np.min(gal2d) + MIN_NMAD * mad
if mask.sum() < 20:
continue
weight = cube.weight * mask[None, :, :]
fctr, fsky = fit_position_sky(galaxy, cube.data, weight,
(yctr[i], xctr[i]), psfs[i],
(yctrbounds[i], xctrbounds[i]))
yctr[i], xctr[i] = fctr
skys[i, :] = fsky
tsteps["fit positions of other refs"] = datetime.now()
# -------------------------------------------------------------------------
# Redo model fit, this time including all final refs.
datas = [cubes[i].data for i in refs]
weights = [cubes[i].weight for i in refs]
ctrs = [(yctr[i], xctr[i]) for i in refs]
psfs_refs = [psfs[i] for i in refs]
logging.info("fitting galaxy to all refs %s", refs)
galaxy, fskys = fit_galaxy_sky_multi(galaxy, datas, weights, ctrs,
psfs_refs, regpenalty, LBFGSB_FACTOR)
# put fitted skys back in `skys`
for i,j in enumerate(refs):
skys[j, :] = fskys[i]
if args.diagdir:
fname = os.path.join(args.diagdir, 'step2.fits')
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)
tsteps["fit galaxy to all refs"] = datetime.now()
# -------------------------------------------------------------------------
# Fit position of data and SN in non-references
#
# Now we think we have a good galaxy model. We fix this and fit
# the relative position of the remaining epochs (which presumably
# all have some SN light). We simultaneously fit the position of
# the SN itself.
logging.info("fitting position of all %d non-refs and SN position",
len(nonrefs))
if len(nonrefs) > 0:
datas = [cubes[i].data for i in nonrefs]
weights = [cubes[i].weight for i in nonrefs]
psfs_nonrefs = [psfs[i] for i in nonrefs]
fyctr, fxctr, snctr, fskys, fsne = fit_position_sky_sn_multi(
galaxy, datas, weights, yctr[nonrefs], xctr[nonrefs],
snctr, psfs_nonrefs, LBFGSB_FACTOR, yctrbounds[nonrefs],<|fim▁hole|> yctr[nonrefs] = fyctr
xctr[nonrefs] = fxctr
for i,j in enumerate(nonrefs):
skys[j, :] = fskys[i]
sn[j, :] = fsne[i]
tsteps["fit positions of nonrefs & SN"] = datetime.now()
# -------------------------------------------------------------------------
# optional step(s)
if args.refitgal and len(nonrefs) > 0:
if args.diagdir:
fname = os.path.join(args.diagdir, 'step3.fits')
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)
# ---------------------------------------------------------------------
# Redo fit of galaxy, using ALL epochs, including ones with SN
# light. We hold the SN "fixed" simply by subtracting it from the
# data and fitting the remainder.
#
# This is slightly dangerous: any errors in the original SN
# determination, whether due to an incorrect PSF or ADR model
# or errors in the galaxy model will result in residuals. The
# galaxy model will then try to compensate for these.
#
# We should look at the galaxy model at the position of the SN
# before and after this step to see if there is a bias towards
# the galaxy flux increasing.
logging.info("fitting galaxy using all %d epochs", nt)
datas = [cube.data for cube in cubes]
weights = [cube.weight for cube in cubes]
ctrs = [(yctr[i], xctr[i]) for i in range(nt)]
# subtract SN from non-ref cubes.
for i in nonrefs:
s = psfs[i].point_source(snctr, datas[i].shape[1:3], ctrs[i])
# do *not* use in-place operation (-=) here!
datas[i] = cubes[i].data - sn[i, :, None, None] * s
galaxy, fskys = fit_galaxy_sky_multi(galaxy, datas, weights, ctrs,
psfs, regpenalty, LBFGSB_FACTOR)
for i in range(nt):
skys[i, :] = fskys[i] # put fitted skys back in skys
if args.diagdir:
fname = os.path.join(args.diagdir, 'step4.fits')
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)
# ---------------------------------------------------------------------
# Repeat step before last: fit position of data and SN in
# non-references
logging.info("re-fitting position of all %d non-refs and SN position",
len(nonrefs))
if len(nonrefs) > 0:
datas = [cubes[i].data for i in nonrefs]
weights = [cubes[i].weight for i in nonrefs]
psfs_nonrefs = [psfs[i] for i in nonrefs]
fyctr, fxctr, snctr, fskys, fsne = fit_position_sky_sn_multi(
galaxy, datas, weights, yctr[nonrefs], xctr[nonrefs],
snctr, psfs_nonrefs, LBFGSB_FACTOR, yctrbounds[nonrefs],
xctrbounds[nonrefs], snctrbounds)
# put fitted results back in parameter lists.
yctr[nonrefs] = fyctr
xctr[nonrefs] = fxctr
for i, j in enumerate(nonrefs):
skys[j, :] = fskys[i]
sn[j, :] = fsne[i]
# -------------------------------------------------------------------------
# Write results
logging.info("writing results to %s", args.outfile)
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, args.outfile)
# time info
logging.info("step times:")
maxlen = max(len(key) for key in tsteps)
fmtstr = " %2dm%02ds - %-" + str(maxlen) + "s"
tprev = tstart
for key, tstep in tsteps.items():
t = (tstep - tprev).seconds
logging.info(fmtstr, t//60, t%60, key)
tprev = tstep
tfinish = datetime.now()
logging.info("finished at %s", tfinish.strftime("%Y-%m-%d %H:%M:%S"))
t = (tfinish - tstart).seconds
logging.info("took %3dm%2ds", t // 60, t % 60)
return 0
def cubefit_subtract(argv=None):
DESCRIPTION = \
"""Subtract model determined by cubefit from the original data.
The "outnames" key in the supplied configuration file is used to
determine the output FITS file names. The input FITS header is passed
unaltered to the output file, with the following additions:
(1) A `HISTORY` entry. (2) `CBFT_SNX` and `CBFT_SNY` records giving
the cubefit-determined position of the SN relative to the center of
the data array (at the reference wavelength).
This script also writes fitted SN spectra to individual FITS files.
The "sn_outnames" configuration field determines the output filenames.
"""
import shutil
import fitsio
prog_name = "cubefit-subtract"
prog_name_ver = "{} v{}".format(prog_name, __version__)
parser = ArgumentParser(prog=prog_name, description=DESCRIPTION)
parser.add_argument("configfile", help="configuration file name "
"(JSON format), same as cubefit input.")
parser.add_argument("resultfile", help="Result FITS file from cubefit")
parser.add_argument("--dataprefix", default="",
help="path prepended to data file names; default is "
"empty string")
parser.add_argument("--outprefix", default="",
help="path prepended to output file names; default is "
"empty string")
args = parser.parse_args(argv)
setup_logging("info")
# get input & output filenames
with open(args.configfile) as f:
cfg = json.load(f)
fnames = [os.path.join(args.dataprefix, fname)
for fname in cfg["filenames"]]
outfnames = [os.path.join(args.outprefix, fname)
for fname in cfg["outnames"]]
# load results
results = read_results(args.resultfile)
epochs = results["epochs"]
sny, snx = results["snctr"]
if not len(epochs) == len(fnames) == len(outfnames):
raise RuntimeError("number of epochs in result file not equal to "
"number of input and output files in config file")
# subtract and write out.
for fname, outfname, epoch in zip(fnames, outfnames, epochs):
logging.info("writing %s", outfname)
shutil.copy(fname, outfname)
f = fitsio.FITS(outfname, "rw")
data = f[0].read()
data -= epoch["galeval"]
f[0].write(data)
f[0].write_history("galaxy subtracted by " + prog_name_ver)
f[0].write_key("CBFT_SNX", snx - epoch['xctr'],
comment="SN x offset from center at {:.0f} A [spaxels]"
.format(REFWAVE))
f[0].write_key("CBFT_SNY", sny - epoch['yctr'],
comment="SN y offset from center at {:.0f} A [spaxels]"
.format(REFWAVE))
f.close()
# output SN spectra to separate files.
sn_outnames = [os.path.join(args.outprefix, fname)
for fname in cfg["sn_outnames"]]
header = {"CRVAL1": results["header"]["CRVAL3"],
"CRPIX1": results["header"]["CRPIX3"],
"CDELT1": results["header"]["CDELT3"]}
for outfname, epoch in zip(sn_outnames, epochs):
logging.info("writing %s", outfname)
if os.path.exists(outfname): # avoid warning from clobber=True
os.remove(outfname)
with fitsio.FITS(outfname, "rw") as f:
f.write(epoch["sn"], extname="sn", header=header)
f[0].write_history("created by " + prog_name_ver)
return 0
def cubefit_plot(argv=None):
DESCRIPTION = """Plot results and diagnostics from cubefit"""
from .plotting import plot_timeseries, plot_epoch, plot_sn, plot_adr
# arguments are the same as cubefit except an output
parser = ArgumentParser(prog="cubefit-plot", description=DESCRIPTION)
parser.add_argument("configfile", help="configuration filename")
parser.add_argument("resultfile", help="Result filename from cubefit")
parser.add_argument("outprefix", help="output prefix")
parser.add_argument("--dataprefix", default="",
help="path prepended to data file names; default is "
"empty string")
parser.add_argument('-b', '--band', help='timeseries band (U, B, V). '
'Default is a 1000 A wide band in middle of cube.',
default=None, dest='band')
parser.add_argument('--idrfiles', nargs='+', default=None,
help='Prefix of IDR. If given, the cubefit SN '
'spectra are plotted against the production values.')
parser.add_argument("--diagdir", default=None,
help="If given, read intermediate diagnostic "
"results from this directory and include in plot(s)")
parser.add_argument("--plotepochs", default=False, action="store_true",
help="Make diagnostic plots for each epoch")
args = parser.parse_args(argv)
# Read in data
with open(args.configfile) as f:
cfg = json.load(f)
cubes = [read_datacube(os.path.join(args.dataprefix, fname), scale=False)
for fname in cfg["filenames"]]
results = OrderedDict()
# Diagnostic results at each step
if args.diagdir is not None:
fnames = sorted(glob.glob(os.path.join(args.diagdir, "step*.fits")))
for fname in fnames:
name = os.path.basename(fname).split(".")[0]
results[name] = read_results(fname)
# Final result (don't fail if not available)
if os.path.exists(args.resultfile):
results["final"] = read_results(args.resultfile)
# plot time series
plot_timeseries(cubes, results, band=args.band,
fname=(args.outprefix + '_timeseries.png'))
# Plot wave slices and sn, galaxy and sky spectra for all epochs.
if 'final' in results and args.plotepochs:
for i_t in range(len(cubes)):
plot_epoch(cubes[i_t], results['final']['epochs'][i_t],
fname=(args.outprefix + '_epoch%02d.png' % i_t))
# Plot result spectra against IDR spectra.
if 'final' in results and args.idrfiles is not None:
plot_sn(cfg['filenames'], results['final']['epochs']['sn'],
results['final']['wave'], args.idrfiles,
args.outprefix + '_sn.png')
# Plot the x-y coordinates of the adr versus wavelength
# (Skip this for now; contains no interesting information)
#plot_adr(cubes, cubes[0].wave, fname=(args.outprefix + '_adr.png'))
return 0<|fim▁end|> | xctrbounds[nonrefs], snctrbounds)
# put fitted results back in parameter lists. |
<|file_name|>minesweeper.js<|end_file_name|><|fim▁begin|>/******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };<|fim▁hole|>/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ({
/***/ 0:
/***/ function(module, exports, __webpack_require__) {
var __weex_template__ = __webpack_require__(241)
var __weex_style__ = __webpack_require__(242)
var __weex_script__ = __webpack_require__(243)
__weex_define__('@weex-component/c0df89e239d226d3a0c6f418314cda04', [], function(__weex_require__, __weex_exports__, __weex_module__) {
__weex_script__(__weex_module__, __weex_exports__, __weex_require__)
if (__weex_exports__.__esModule && __weex_exports__.default) {
__weex_module__.exports = __weex_exports__.default
}
__weex_module__.exports.template = __weex_template__
__weex_module__.exports.style = __weex_style__
})
__weex_bootstrap__('@weex-component/c0df89e239d226d3a0c6f418314cda04',undefined,undefined)
/***/ },
/***/ 241:
/***/ function(module, exports) {
module.exports = {
"type": "container",
"children": [
{
"type": "text",
"classList": [
"btn"
],
"attr": {
"value": function () {return this.board}
}
},
{
"type": "container",
"repeat": function () {return this.row},
"style": {
"flexDirection": "row",
"flex": 1
},
"children": [
{
"type": "container",
"repeat": function () {return this.col},
"style": {
"flex": 1
},
"children": [
{
"type": "text",
"attr": {
"tid": function () {return this.tid},
"around": function () {return this.around},
"value": function () {return this.text}
},
"events": {
"click": "onclick",
"longpress": "onlongpress"
},
"classList": function () {return [this.state, 'tile']}
}
]
}
]
},
{
"type": "text",
"events": {
"click": "restart"
},
"classList": [
"btn"
],
"attr": {
"value": "START"
}
}
]
}
/***/ },
/***/ 242:
/***/ function(module, exports) {
module.exports = {
"btn": {
"margin": 2,
"backgroundColor": "#e74c3c",
"color": "#ffffff",
"textAlign": "center",
"flex": 1,
"fontSize": 66,
"height": 80
},
"normal": {
"backgroundColor": "#95a5a6"
},
"open": {
"backgroundColor": "#34495e",
"color": "#ffffff"
},
"flag": {
"backgroundColor": "#95a5a6"
},
"tile": {
"margin": 2,
"fontSize": 56,
"height": 80,
"paddingTop": 0,
"textAlign": "center"
}
}
/***/ },
/***/ 243:
/***/ function(module, exports) {
module.exports = function(module, exports, __weex_require__){"use strict";
module.exports = {
data: function () {return {
size: 9,
max: 10,
board: 0,
row: [],
vector: [[-1, 0], [-1, -1], [0, -1], [1, -1], [1, 0], [1, 1], [0, 1], [-1, 1]],
strings: {
mine: "💣",
flag: "🚩",
win: "YOU WIN!",
lose: "YOU LOSE~"
},
finished: false
}},
methods: {
map: function map(x, y, callback) {
for (var i = 0; i < 8; ++i) {
var mx = x + this.vector[i][0];
var my = y + this.vector[i][1];
if (mx >= 0 && my >= 0 && mx < this.size && my < this.size) {
callback(this.row[mx].col[my]);
}
}
},
dfs: function dfs(tile) {
var pos = this.position(tile.tid);
var context = this;
tile.state = "open";
this.map(pos["x"], pos["y"], function (node) {
if (node.around == 0 && node.state == "normal") {
context.dfs(node);
} else {
context.display(node);
}
});
},
random: function random(min, max) {
return parseInt(Math.random() * (max - min) + min);
},
plant: function plant() {
var count = 0;
while (count < this.max) {
var x = this.random(0, this.size);
var y = this.random(0, this.size);
var tile = this.row[x].col[y];
if (tile.value == 0) {
++count;
tile.value = 1;
}
}
},
calculate: function calculate() {
for (var i = 0; i < this.size; ++i) {
for (var j = 0; j < this.size; ++j) {
var around = 0;
this.map(i, j, function (tile) {
around += tile.value;
});
this.row[i].col[j].around = around;
}
}
},
restart: function restart(e) {
var row = [];
var count = 0;
this.board = this.max;
this.finished = false;
for (var i = 0; i < this.size; ++i) {
var col = { "col": [] };
for (var j = 0; j < this.size; ++j) {
var tid = i * this.size + j;
col["col"][j] = {
tid: "" + tid,
state: "normal",
value: 0,
text: "",
around: 0
};
}
row[i] = col;
}
this.row = row;
this.plant();
this.calculate();
},
unfinished: function unfinished() {
var finished = this.finished;
if (this.finished) {
this.restart();
}
return !finished;
},
position: function position(tid) {
var row = parseInt(tid / this.size);
var col = tid % this.size;
return { x: row, y: col };
},
display: function display(tile) {
tile.state = "open";
tile.text = tile.around == 0 ? "" : tile.around;
},
tile: function tile(event) {
var tid = event.target.attr["tid"];
var pos = this.position(tid);
return this.row[pos["x"]].col[pos["y"]];
},
onclick: function onclick(event) {
if (this.unfinished()) {
var tile = this.tile(event);
if (tile.state == "normal") {
if (tile.value == 1) {
this.onfail();
} else {
this.display(tile);
if (tile.around == 0) {
this.dfs(tile);
}
this.judge();
}
}
}
},
onlongpress: function onlongpress(event) {
if (this.unfinished()) {
var tile = this.tile(event);
tile.state = tile.state == "flag" ? "normal" : "flag";
if (tile.state == "flag") {
--this.board;
tile.text = this.strings.flag;
} else {
++this.board;
tile.text = "";
}
this.judge();
}
},
foreach: function foreach(callback) {
for (var i = 0; i < this.size; ++i) {
for (var j = 0; j < this.size; ++j) {
callback(this.row[i].col[j]);
}
}
},
judge: function judge() {
var count = 0;
this.foreach(function (tile) {
if (tile.state == "open" || tile.state == "flag") {
++count;
}
});
if (count == this.size * this.size) {
this.finished = true;
this.board = this.strings.win;
}
},
onfail: function onfail() {
this.board = this.strings.lose;
this.finished = true;
var mine = this.strings.mine;
this.foreach(function (tile) {
if (tile.value == 1) {
tile.text = mine;
}
});
}
}
};}
/* generated by weex-loader */
/***/ }
/******/ });<|fim▁end|> |
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
|
<|file_name|>ClusterProcessHasLabelsId.java<|end_file_name|><|fim▁begin|>package com.silicolife.textmining.core.datastructures.dataaccess.database.dataaccess.implementation.model.core.entities;
// Generated 23/Mar/2015 16:36:00 by Hibernate Tools 4.3.1
import javax.persistence.Column;
import javax.persistence.Embeddable;
/**
* ClusterProcessHasLabelsId generated by hbm2java
*/
@Embeddable
public class ClusterProcessHasLabelsId implements java.io.Serializable {
private long cphClusterProcessId;
private long cphClusterLabelId;
public ClusterProcessHasLabelsId() {
}
public ClusterProcessHasLabelsId(long cphClusterProcessId, long cphClusterLabelId) {
this.cphClusterProcessId = cphClusterProcessId;
this.cphClusterLabelId = cphClusterLabelId;
}
@Column(name = "cph_cluster_process_id", nullable = false)
public long getCphClusterProcessId() {
return this.cphClusterProcessId;
}
public void setCphClusterProcessId(long cphClusterProcessId) {
this.cphClusterProcessId = cphClusterProcessId;
}
@Column(name = "cph_cluster_label_id", nullable = false)
public long getCphClusterLabelId() {
return this.cphClusterLabelId;
}
public void setCphClusterLabelId(long cphClusterLabelId) {
this.cphClusterLabelId = cphClusterLabelId;
}
public boolean equals(Object other) {
if ((this == other))
return true;
if ((other == null))
return false;<|fim▁hole|> return (this.getCphClusterProcessId() == castOther.getCphClusterProcessId()) && (this.getCphClusterLabelId() == castOther.getCphClusterLabelId());
}
public int hashCode() {
int result = 17;
result = 37 * result + (int) this.getCphClusterProcessId();
result = 37 * result + (int) this.getCphClusterLabelId();
return result;
}
}<|fim▁end|> | if (!(other instanceof ClusterProcessHasLabelsId))
return false;
ClusterProcessHasLabelsId castOther = (ClusterProcessHasLabelsId) other;
|
<|file_name|>maintenance.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 Outbrain Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package inst
import (
"github.com/github/orchestrator/go/config"
)
// Maintenance indicates a maintenance entry (also in the database)
type Maintenance struct {
MaintenanceId uint
Key InstanceKey
BeginTimestamp string
SecondsElapsed uint<|fim▁hole|> Reason string
}
var maintenanceOwner string = ""
func GetMaintenanceOwner() string {
if maintenanceOwner != "" {
return maintenanceOwner
}
return config.MaintenanceOwner
}
func SetMaintenanceOwner(owner string) {
maintenanceOwner = owner
}<|fim▁end|> | IsActive bool
Owner string |
<|file_name|>para_training_local.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# setup of the grid parameters
# default queue used for training
training_queue = { 'queue':'q1dm', 'memfree':'16G', 'pe_opt':'pe_mth 2', 'hvmem':'8G', 'io_big':True }
<|fim▁hole|>
# number of audio files that one job should preprocess
number_of_audio_files_per_job = 1000
preprocessing_queue = {}
# number of features that one job should extract
number_of_features_per_job = 600
extraction_queue = { 'queue':'q1d', 'memfree':'8G' }
# number of features that one job should project
number_of_projections_per_job = 600
projection_queue = { 'queue':'q1d', 'hvmem':'8G', 'memfree':'8G' }
# number of models that one job should enroll
number_of_models_per_enrol_job = 20
enrol_queue = { 'queue':'q1d', 'memfree':'4G', 'io_big':True }
# number of models that one score job should process
number_of_models_per_score_job = 20
score_queue = { 'queue':'q1d', 'memfree':'4G', 'io_big':True }
grid_type = 'local' # on Idiap grid<|fim▁end|> | # the queue that is used solely for the final ISV training step
isv_training_queue = { 'queue':'q1wm', 'memfree':'32G', 'pe_opt':'pe_mth 4', 'hvmem':'8G' } |
<|file_name|>WebMapTileServiceImageryProvider.js<|end_file_name|><|fim▁begin|>/*global define*/
define([
'../Core/combine',
'../Core/Credit',
'../Core/defaultValue',
'../Core/defined',
'../Core/defineProperties',
'../Core/DeveloperError',
'../Core/Event',
'../Core/freezeObject',
'../Core/isArray',
'../Core/objectToQuery',
'../Core/queryToObject',
'../Core/Rectangle',
'../Core/WebMercatorTilingScheme',
'../ThirdParty/Uri',
'../ThirdParty/when',
'./ImageryProvider'
], function(
combine,
Credit,
defaultValue,
defined,
defineProperties,
DeveloperError,
Event,
freezeObject,
isArray,
objectToQuery,
queryToObject,
Rectangle,
WebMercatorTilingScheme,
Uri,
when,
ImageryProvider) {
'use strict';
/**
* Provides tiled imagery served by {@link http://www.opengeospatial.org/standards/wmts|WMTS 1.0.0} compliant servers.
* This provider supports HTTP KVP-encoded and RESTful GetTile requests, but does not yet support the SOAP encoding.
*
* @alias WebMapTileServiceImageryProvider
* @constructor
*
* @param {Object} options Object with the following properties:
* @param {String} options.url The base URL for the WMTS GetTile operation (for KVP-encoded requests) or the tile-URL template (for RESTful requests). The tile-URL template should contain the following variables: {style}, {TileMatrixSet}, {TileMatrix}, {TileRow}, {TileCol}. The first two are optional if actual values are hardcoded or not required by the server. The {s} keyword may be used to specify subdomains.
* @param {String} [options.format='image/jpeg'] The MIME type for images to retrieve from the server.
* @param {String} options.layer The layer name for WMTS requests.
* @param {String} options.style The style name for WMTS requests.
* @param {String} options.tileMatrixSetID The identifier of the TileMatrixSet to use for WMTS requests.
* @param {Array} [options.tileMatrixLabels] A list of identifiers in the TileMatrix to use for WMTS requests, one per TileMatrix level.
* @param {Number} [options.tileWidth=256] The tile width in pixels.
* @param {Number} [options.tileHeight=256] The tile height in pixels.
* @param {TilingScheme} [options.tilingScheme] The tiling scheme corresponding to the organization of the tiles in the TileMatrixSet.
* @param {Object} [options.proxy] A proxy to use for requests. This object is expected to have a getURL function which returns the proxied URL.
* @param {Rectangle} [options.rectangle=Rectangle.MAX_VALUE] The rectangle covered by the layer.
* @param {Number} [options.minimumLevel=0] The minimum level-of-detail supported by the imagery provider.
* @param {Number} [options.maximumLevel] The maximum level-of-detail supported by the imagery provider, or undefined if there is no limit.
* @param {Ellipsoid} [options.ellipsoid] The ellipsoid. If not specified, the WGS84 ellipsoid is used.
* @param {Credit|String} [options.credit] A credit for the data source, which is displayed on the canvas.
* @param {String|String[]} [options.subdomains='abc'] The subdomains to use for the <code>{s}</code> placeholder in the URL template.
* If this parameter is a single string, each character in the string is a subdomain. If it is
* an array, each element in the array is a subdomain.
*
*
* @example
* // Example 1. USGS shaded relief tiles (KVP)
* var shadedRelief1 = new Cesium.WebMapTileServiceImageryProvider({
* url : 'http://basemap.nationalmap.gov/arcgis/rest/services/USGSShadedReliefOnly/MapServer/WMTS',
* layer : 'USGSShadedReliefOnly',
* style : 'default',
* format : 'image/jpeg',
* tileMatrixSetID : 'default028mm',
* // tileMatrixLabels : ['default028mm:0', 'default028mm:1', 'default028mm:2' ...],
* maximumLevel: 19,
* credit : new Cesium.Credit('U. S. Geological Survey')
* });
* viewer.imageryLayers.addImageryProvider(shadedRelief1);
*
* @example
* // Example 2. USGS shaded relief tiles (RESTful)
* var shadedRelief2 = new Cesium.WebMapTileServiceImageryProvider({
* url : 'http://basemap.nationalmap.gov/arcgis/rest/services/USGSShadedReliefOnly/MapServer/WMTS/tile/1.0.0/USGSShadedReliefOnly/{Style}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.jpg',
* layer : 'USGSShadedReliefOnly',
* style : 'default',
* format : 'image/jpeg',
* tileMatrixSetID : 'default028mm',
* maximumLevel: 19,
* credit : new Cesium.Credit('U. S. Geological Survey')
* });
* viewer.imageryLayers.addImageryProvider(shadedRelief2);
*
* @see ArcGisMapServerImageryProvider
* @see BingMapsImageryProvider
* @see GoogleEarthImageryProvider
* @see createOpenStreetMapImageryProvider
* @see SingleTileImageryProvider
* @see createTileMapServiceImageryProvider
* @see WebMapServiceImageryProvider
* @see UrlTemplateImageryProvider
*/
function WebMapTileServiceImageryProvider(options) {
options = defaultValue(options, defaultValue.EMPTY_OBJECT);
//>>includeStart('debug', pragmas.debug);
if (!defined(options.url)) {
throw new DeveloperError('options.url is required.');
}
if (!defined(options.layer)) {
throw new DeveloperError('options.layer is required.');
}
if (!defined(options.style)) {
throw new DeveloperError('options.style is required.');
}
if (!defined(options.tileMatrixSetID)) {
throw new DeveloperError('options.tileMatrixSetID is required.');
}
//>>includeEnd('debug');
this._url = options.url;
this._layer = options.layer;
this._style = options.style;
this._tileMatrixSetID = options.tileMatrixSetID;
this._tileMatrixLabels = options.tileMatrixLabels;
this._format = defaultValue(options.format, 'image/jpeg');
this._proxy = options.proxy;
this._tileDiscardPolicy = options.tileDiscardPolicy;
this._tilingScheme = defined(options.tilingScheme) ? options.tilingScheme : new WebMercatorTilingScheme({ ellipsoid : options.ellipsoid });
this._tileWidth = defaultValue(options.tileWidth, 256);
this._tileHeight = defaultValue(options.tileHeight, 256);
this._minimumLevel = defaultValue(options.minimumLevel, 0);
this._maximumLevel = options.maximumLevel;
this._rectangle = defaultValue(options.rectangle, this._tilingScheme.rectangle);
this._readyPromise = when.resolve(true);
// Check the number of tiles at the minimum level. If it's more than four,
// throw an exception, because starting at the higher minimum
// level will cause too many tiles to be downloaded and rendered.
var swTile = this._tilingScheme.positionToTileXY(Rectangle.southwest(this._rectangle), this._minimumLevel);
var neTile = this._tilingScheme.positionToTileXY(Rectangle.northeast(this._rectangle), this._minimumLevel);
var tileCount = (Math.abs(neTile.x - swTile.x) + 1) * (Math.abs(neTile.y - swTile.y) + 1);
//>>includeStart('debug', pragmas.debug);
if (tileCount > 4) {
throw new DeveloperError('The imagery provider\'s rectangle and minimumLevel indicate that there are ' + tileCount + ' tiles at the minimum level. Imagery providers with more than four tiles at the minimum level are not supported.');
}
//>>includeEnd('debug');
this._errorEvent = new Event();
var credit = options.credit;
this._credit = typeof credit === 'string' ? new Credit(credit) : credit;
this._subdomains = options.subdomains;
if (isArray(this._subdomains)) {
this._subdomains = this._subdomains.slice();
} else if (defined(this._subdomains) && this._subdomains.length > 0) {
this._subdomains = this._subdomains.split('');
} else {
this._subdomains = ['a', 'b', 'c'];
}
}
var defaultParameters = freezeObject({
service : 'WMTS',
version : '1.0.0',
request : 'GetTile'
});
function buildImageUrl(imageryProvider, col, row, level) {
var labels = imageryProvider._tileMatrixLabels;
var tileMatrix = defined(labels) ? labels[level] : level.toString();
var subdomains = imageryProvider._subdomains;
var url;
<|fim▁hole|>
if (imageryProvider._url.indexOf('{') >= 0) {
// resolve tile-URL template
url = imageryProvider._url
.replace('{style}', imageryProvider._style)
.replace('{Style}', imageryProvider._style)
.replace('{TileMatrixSet}', imageryProvider._tileMatrixSetID)
.replace('{TileMatrix}', tileMatrix)
.replace('{TileRow}', row.toString())
.replace('{TileCol}', col.toString())
.replace('{s}', subdomains[(col + row + level) % subdomains.length]);
}
else {
// build KVP request
var uri = new Uri(imageryProvider._url);
var queryOptions = queryToObject(defaultValue(uri.query, ''));
queryOptions = combine(defaultParameters, queryOptions);
queryOptions.tilematrix = tileMatrix;
queryOptions.layer = imageryProvider._layer;
queryOptions.style = imageryProvider._style;
queryOptions.tilerow = row;
queryOptions.tilecol = col;
queryOptions.tilematrixset = imageryProvider._tileMatrixSetID;
queryOptions.format = imageryProvider._format;
uri.query = objectToQuery(queryOptions);
url = uri.toString();
}
var proxy = imageryProvider._proxy;
if (defined(proxy)) {
url = proxy.getURL(url);
}
return url;
}
defineProperties(WebMapTileServiceImageryProvider.prototype, {
/**
* Gets the URL of the service hosting the imagery.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {String}
* @readonly
*/
url : {
get : function() {
return this._url;
}
},
/**
* Gets the proxy used by this provider.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {Proxy}
* @readonly
*/
proxy : {
get : function() {
return this._proxy;
}
},
/**
* Gets the width of each tile, in pixels. This function should
* not be called before {@link WebMapTileServiceImageryProvider#ready} returns true.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {Number}
* @readonly
*/
tileWidth : {
get : function() {
return this._tileWidth;
}
},
/**
* Gets the height of each tile, in pixels. This function should
* not be called before {@link WebMapTileServiceImageryProvider#ready} returns true.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {Number}
* @readonly
*/
tileHeight : {
get : function() {
return this._tileHeight;
}
},
/**
* Gets the maximum level-of-detail that can be requested. This function should
* not be called before {@link WebMapTileServiceImageryProvider#ready} returns true.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {Number}
* @readonly
*/
maximumLevel : {
get : function() {
return this._maximumLevel;
}
},
/**
* Gets the minimum level-of-detail that can be requested. This function should
* not be called before {@link WebMapTileServiceImageryProvider#ready} returns true.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {Number}
* @readonly
*/
minimumLevel : {
get : function() {
return this._minimumLevel;
}
},
/**
* Gets the tiling scheme used by this provider. This function should
* not be called before {@link WebMapTileServiceImageryProvider#ready} returns true.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {TilingScheme}
* @readonly
*/
tilingScheme : {
get : function() {
return this._tilingScheme;
}
},
/**
* Gets the rectangle, in radians, of the imagery provided by this instance. This function should
* not be called before {@link WebMapTileServiceImageryProvider#ready} returns true.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {Rectangle}
* @readonly
*/
rectangle : {
get : function() {
return this._rectangle;
}
},
/**
* Gets the tile discard policy. If not undefined, the discard policy is responsible
* for filtering out "missing" tiles via its shouldDiscardImage function. If this function
* returns undefined, no tiles are filtered. This function should
* not be called before {@link WebMapTileServiceImageryProvider#ready} returns true.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {TileDiscardPolicy}
* @readonly
*/
tileDiscardPolicy : {
get : function() {
return this._tileDiscardPolicy;
}
},
/**
* Gets an event that is raised when the imagery provider encounters an asynchronous error. By subscribing
* to the event, you will be notified of the error and can potentially recover from it. Event listeners
* are passed an instance of {@link TileProviderError}.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {Event}
* @readonly
*/
errorEvent : {
get : function() {
return this._errorEvent;
}
},
/**
* Gets the mime type of images returned by this imagery provider.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {String}
* @readonly
*/
format : {
get : function() {
return this._format;
}
},
/**
* Gets a value indicating whether or not the provider is ready for use.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {Boolean}
* @readonly
*/
ready : {
value: true
},
/**
* Gets a promise that resolves to true when the provider is ready for use.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {Promise.<Boolean>}
* @readonly
*/
readyPromise : {
get : function() {
return this._readyPromise;
}
},
/**
* Gets the credit to display when this imagery provider is active. Typically this is used to credit
* the source of the imagery. This function should not be called before {@link WebMapTileServiceImageryProvider#ready} returns true.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {Credit}
* @readonly
*/
credit : {
get : function() {
return this._credit;
}
},
/**
* Gets a value indicating whether or not the images provided by this imagery provider
* include an alpha channel. If this property is false, an alpha channel, if present, will
* be ignored. If this property is true, any images without an alpha channel will be treated
* as if their alpha is 1.0 everywhere. When this property is false, memory usage
* and texture upload time are reduced.
* @memberof WebMapTileServiceImageryProvider.prototype
* @type {Boolean}
* @readonly
*/
hasAlphaChannel : {
get : function() {
return true;
}
}
});
/**
* Gets the credits to be displayed when a given tile is displayed.
*
* @param {Number} x The tile X coordinate.
* @param {Number} y The tile Y coordinate.
* @param {Number} level The tile level;
* @returns {Credit[]} The credits to be displayed when the tile is displayed.
*
* @exception {DeveloperError} <code>getTileCredits</code> must not be called before the imagery provider is ready.
*/
WebMapTileServiceImageryProvider.prototype.getTileCredits = function(x, y, level) {
return undefined;
};
/**
* Requests the image for a given tile. This function should
* not be called before {@link WebMapTileServiceImageryProvider#ready} returns true.
*
* @param {Number} x The tile X coordinate.
* @param {Number} y The tile Y coordinate.
* @param {Number} level The tile level.
* @returns {Promise.<Image|Canvas>|undefined} A promise for the image that will resolve when the image is available, or
* undefined if there are too many active requests to the server, and the request
* should be retried later. The resolved image may be either an
* Image or a Canvas DOM object.
*
* @exception {DeveloperError} <code>requestImage</code> must not be called before the imagery provider is ready.
*/
WebMapTileServiceImageryProvider.prototype.requestImage = function(x, y, level) {
var url = buildImageUrl(this, x, y, level);
return ImageryProvider.loadImage(this, url);
};
/**
* Picking features is not currently supported by this imagery provider, so this function simply returns
* undefined.
*
* @param {Number} x The tile X coordinate.
* @param {Number} y The tile Y coordinate.
* @param {Number} level The tile level.
* @param {Number} longitude The longitude at which to pick features.
* @param {Number} latitude The latitude at which to pick features.
* @return {Promise.<ImageryLayerFeatureInfo[]>|undefined} A promise for the picked features that will resolve when the asynchronous
* picking completes. The resolved value is an array of {@link ImageryLayerFeatureInfo}
* instances. The array may be empty if no features are found at the given location.
* It may also be undefined if picking is not supported.
*/
WebMapTileServiceImageryProvider.prototype.pickFeatures = function(x, y, level, longitude, latitude) {
return undefined;
};
return WebMapTileServiceImageryProvider;
});<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.