prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>applicationService.js<|end_file_name|><|fim▁begin|>import util from "../utils";
class ApplicationService {
getServiceFramework(controller) {
let sf = util.utilities.sf;
sf.moduleRoot = "PersonaBar";
sf.controller = controller;
return sf;
}
getGeneralSettings(callback) {
const sf = this.getServiceFramework("SEO");
sf.get("GetGeneralSettings", {}, callback);
}
updateGeneralSettings(payload, callback, failureCallback) {
const sf = this.getServiceFramework("SEO");
sf.post("UpdateGeneralSettings", payload, callback, failureCallback);
}
getRegexSettings(callback) {
const sf = this.getServiceFramework("SEO");
sf.get("GetRegexSettings", {}, callback);
}
updateRegexSettings(payload, callback, failureCallback) {
const sf = this.getServiceFramework("SEO");<|fim▁hole|> sf.post("UpdateRegexSettings", payload, callback, failureCallback);
}
testUrl(pageId, queryString, customPageName, callback) {
const sf = this.getServiceFramework("SEO");
sf.get("TestUrl?pageId=" + pageId + "&queryString=" + encodeURIComponent(queryString) + "&customPageName=" + encodeURIComponent(customPageName), {}, callback);
}
testUrlRewrite(uri, callback) {
const sf = this.getServiceFramework("SEO");
sf.get("TestUrlRewrite?uri=" + uri, {}, callback);
}
getSitemapSettings(callback) {
const sf = this.getServiceFramework("SEO");
sf.get("GetSitemapSettings", {}, callback);
}
updateSitemapSettings(payload, callback, failureCallback) {
const sf = this.getServiceFramework("SEO");
sf.post("UpdateSitemapSettings", payload, callback, failureCallback);
}
getSitemapProviders(callback) {
const sf = this.getServiceFramework("SEO");
sf.get("GetSitemapProviders", {}, callback);
}
updateSitemapProvider(payload, callback, failureCallback) {
const sf = this.getServiceFramework("SEO");
sf.post("UpdateSitemapProvider", payload, callback, failureCallback);
}
createVerification(verification, callback, failureCallback) {
const sf = this.getServiceFramework("SEO");
sf.post("CreateVerification?verification=" + verification, {}, callback, failureCallback);
}
clearCache(callback, failureCallback) {
const sf = this.getServiceFramework("SEO");
sf.post("ResetCache", {}, callback, failureCallback);
}
getExtensionUrlProviders(callback) {
const sf = this.getServiceFramework("SEO");
sf.get("GetExtensionUrlProviders", {}, callback);
}
updateExtensionUrlProviderStatus(payload, callback, failureCallback) {
const sf = this.getServiceFramework("SEO");
sf.post("UpdateExtensionUrlProviderStatus", payload, callback, failureCallback);
}
}
const applicationService = new ApplicationService();
export default applicationService;<|fim▁end|> | |
<|file_name|>mount_tr.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="tr">
<context>
<name>DeviceActionInfo</name>
<message>
<location filename="../actions/deviceaction_info.cpp" line="45"/>
<source>The device <b><nobr>"%1"</nobr></b> is connected.</source>
<translation type="unfinished"><b><nobr>"%1"</nobr></b> aygıtı bağlı.</translation>
</message>
<message>
<location filename="../actions/deviceaction_info.cpp" line="53"/>
<source>The device <b><nobr>"%1"</nobr></b> is removed.</source>
<translation type="unfinished"><b><nobr>"%1"</nobr></b> aygıtı çıkarıldı.</translation>
</message>
<message>
<location filename="../actions/deviceaction_info.cpp" line="59"/>
<source>Removable media/devices manager</source>
<translation type="unfinished">Çıkarılabilir ortam/aygıt yönetici</translation>
</message>
</context>
<context>
<name>LxQtMountConfiguration</name>
<message>
<source>LxQt Removable media manager settings</source>
<translation type="vanished">LxQt Çıkarılabilir ortam yönetici ayarları</translation>
</message>
<message>
<location filename="../lxqtmountconfiguration.ui" line="14"/>
<source>Removable Media Settings</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../lxqtmountconfiguration.ui" line="20"/>
<source>Behaviour</source>
<translation>Davranış</translation>
</message>
<message>
<location filename="../lxqtmountconfiguration.ui" line="26"/>
<source>When a device is connected </source>
<translation>Bir aygıt bağlandığında </translation>
</message>
<message>
<location filename="../lxqtmountconfiguration.cpp" line="44"/>
<source>Popup menu</source>
<translation>Açılır menü</translation>
</message>
<message>
<location filename="../lxqtmountconfiguration.cpp" line="45"/>
<source>Show info</source>
<translation>Bilgi görüntüle</translation>
</message>
<message>
<location filename="../lxqtmountconfiguration.cpp" line="46"/>
<source>Do nothing</source>
<translation>Hiç bir şey yapma</translation>
</message>
</context>
<context>
<name>MenuDiskItem</name>
<message>
<source>Click to access this device from other applications.</source>
<translation type="vanished">Diğer uygulamalardan bu aygıta erişmek için tıklayın</translation>
</message>
<message>
<source>Click to eject this disc.</source>
<translation type="vanished">Bu diski çıkartmak için tıklayın.</translation>
</message>
<message>
<location filename="../menudiskitem.cpp" line="120"/>
<location filename="../menudiskitem.cpp" line="155"/>
<source>Removable media/devices manager</source>
<translation type="unfinished">Çıkarılabilir ortam/aygıt yönetici</translation>
</message>
<message>
<location filename="../menudiskitem.cpp" line="121"/>
<source>Mounting of <strong><nobr>"%1"</nobr></strong> failed: %2</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../menudiskitem.cpp" line="156"/>
<source>Unmounting of <strong><nobr>"%1"</nobr></strong> failed: %2</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>MountButton</name>
<message>
<location filename="../mountbutton.cpp" line="38"/>
<source>Removable media/devices manager</source>
<translation>Çıkarılabilir ortam/aygıt yönetici</translation>
</message>
<message>
<source>The device <b><nobr>"%1"</nobr></b> is connected.</source>
<translation type="vanished"><b><nobr>"%1"</nobr></b> aygıtı bağlı.</translation>
</message>
<message>
<source>The device <b><nobr>"%1"</nobr></b> is removed.</source>
<translation type="vanished"><b><nobr>"%1"</nobr></b> aygıtı çıkarıldı.</translation>
</message>
<message>
<source>No devices Available.</source>
<translation type="vanished">Erişilebilir aygıt yok.</translation>
</message>
</context>
<context>
<name>Popup</name>
<message><|fim▁hole|> <source>No devices are available</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS><|fim▁end|> | <location filename="../popup.cpp" line="68"/> |
<|file_name|>simple_worker.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate log;
extern crate bevy;
use std::collections::HashMap;
use std::sync::mpsc::{Receiver, Sender, channel};<|fim▁hole|>use bevy::net::client::{ClientID};
use bevy::net::work::{Work,WorkIdentifier,WorkEvent,ClientEvent,WorkerEvent,WorkEventLoop,InputStorage};
use bevy::entity::{Entity, EntityTimeline, EntityResult, EntityReference};
use bevy::logging::simple_logger;
use bevy::asset::AssetManager;
use bevy::util::time::{Ticker, Duration};
use bevy::util::time;
use bevy::util::binary_io::{BinaryWrite, BinaryReadDep, BinaryWriteDep, BinaryError};
use bevy::math::{Vector3, Quaternion, Decomposed, Transform};
// A zone is a portion of a game world. It contains a map of tiles and cube entities. A new cube
// entity is created for each player. ZoneWork will take its Clients' inputs, use them to move their
// corresponding cubes, and send the updates to all of the Clients. ZoneWork
struct ZoneWork {
clients: HashMap<ClientID, TcpStream>, // map unique ClientIDs to their connections
client_entities: HashMap<ClientID, EntityReference>, // map unique ClientIDs to their character
// in the form of a reference
update_duration: Duration,
worker_event_rx: Option<Receiver<WorkerEvent>>,
client_event_rx: Option<Receiver<ClientEvent>>,
client_event_tx: Sender<ClientEvent>,
entity_timeline: EntityTimeline,
id_message: Entity,
time_message: Entity,
asset_manager: AssetManager,
next_id: u32
}
impl ZoneWork {
pub fn new(updates_per_second: u32, mut asset_manager: AssetManager) -> ZoneWork {
let (client_message_tx, client_message_rx) = channel();
let id_message = Entity::new(asset_manager.get_entity_template("entity_templates/messages/id_assignment.json").unwrap(), 0u32);
let time_message = Entity::new(asset_manager.get_entity_template("entity_templates/messages/time.json").unwrap(), 0u32);
ZoneWork {
clients: HashMap::new(),
client_entities: HashMap::new(),
update_duration: Duration::milliseconds((1000.0f64 / updates_per_second as f64) as i64),
worker_event_rx: None,
entity_timeline: EntityTimeline::new(3),
asset_manager: asset_manager,
client_event_rx: Some(client_message_rx),
client_event_tx: client_message_tx,
id_message: id_message,
time_message: time_message,
next_id: 0
}
}
// Add map tiles to the current state
pub fn initialize_map(&mut self) {
let tile_template = self.asset_manager.get_entity_template("entity_templates/tile.json").unwrap();
let mut state = self.entity_timeline.get_current_state_mut().unwrap();
for x in 0..10 {
for y in 0..10 {
let mut new_tile = Entity::new(tile_template.clone(), (x+y*10) as u32);
new_tile.set("position", Vector3::<f32>::new((x*2) as f32, (y*2) as f32, -1.0)).unwrap();
state.add_entity(new_tile).unwrap();
}
}
}
// Consume the ZoneWork's ClientEvent Receiver and return it, if it exists
pub fn client_events(&mut self) -> Option<Receiver<ClientEvent>> {
self.client_event_rx.take()
}
// Consume the ZoneWork's WorkerEvent Receiver and return it, if it exists
pub fn worker_events(&mut self) -> Option<Receiver<WorkerEvent>> {
self.worker_event_rx.take()
}
pub fn get_unique_client_id(&mut self) -> u32 {
let id = self.next_id;
self.next_id += 1;
id
}
// Adds a new client from a TcpStream
pub fn add_client(&mut self, mut stream: TcpStream) {
// Load the cube template and create a new Cube entity
let cube_template = self.asset_manager.get_entity_template("entity_templates/cube.json").unwrap();
let id = self.get_unique_client_id();
let new_shape = Entity::new(cube_template, id);
self.client_entities.insert(id, new_shape.get_reference());
// Add the client's character the the current state
self.entity_timeline.get_current_state_mut().unwrap().add_entity(new_shape).unwrap();
self.clients.insert(id, stream.try_clone().unwrap());
self.send_id_assignment(id);
self.send_full_update(id);
// Get an asset manager for this client's message thread
let mut asset_manager = self.asset_manager.clone();
// Get a copy of the ClientEventessage sender for this client's message thread
let client_event_tx = self.client_event_tx.clone();
// Listen for client messages on a new thread
thread::spawn(move || {
loop {
match EntityResult::binary_read_dep(&mut stream, &mut asset_manager) {
Ok(entity_result) => {
client_event_tx.send(ClientEvent::Message(id, entity_result)).unwrap();
},
Err(err) => {
match err {
BinaryError::UnexpectedEOF => {},
BinaryError::Io(err) => {error!("{:?}", err);},
}
break;
}
}
}
client_event_tx.send(ClientEvent::Disconnected(id)).unwrap();
});
}
// Process a single WorkerEvent
pub fn handle_worker_event(&mut self, worker_event: WorkerEvent) {
match worker_event {
WorkerEvent::AddClient(stream) => {
self.add_client(stream);
}
}
}
pub fn handle_client_event(&mut self, client_event: ClientEvent, client_input: &mut HashMap<ClientID, InputStorage>) {
match client_event {
ClientEvent::Disconnected(id) => {
// If a client disconnects, remove it from the game state
let reference = self.client_entities.remove(&id).unwrap();
self.clients.remove(&id);
client_input.remove(&id);
self.entity_timeline.get_current_state_mut().unwrap().remove_entity(reference).unwrap();
},
ClientEvent::Message(id, entity_result) => {
match entity_result {
EntityResult::Dynamic(input_entity) => {
// The only dynamic entity a client sends is PlayerInput
let input_duration = input_entity.get_duration("duration").unwrap();
let input_type = input_entity.get_string("type").unwrap();
// Add the input to the client's InputStorage
let mut input_storage = client_input.entry(id).or_insert_with(|| {InputStorage::new()});
input_storage.add_input(input_type, input_duration);
},
EntityResult::Static(_) => {
// the only static entity a player sends is GetTime
let mut stream = self.clients.get_mut(&id).unwrap();
self.time_message.set("time", time::get_time()).unwrap();
self.time_message.set("update_duration", self.update_duration.clone()).unwrap();
true.binary_write(stream).unwrap();
self.time_message.binary_write(stream).unwrap();
}
}
}
}
}
pub fn handle_player_input(&mut self, client_input: &mut HashMap<ClientID, InputStorage>) {
// input only modifies the current state
let mut current_state = self.entity_timeline.get_current_state_mut().unwrap();
// Iterate over every client InputStorage. If it has input, process that input
for (id, storage) in client_input.iter_mut() {
if storage.has_input() {
// Get the current player's position, rotation, and speed
let client_entity_reference = self.client_entities.get(&id).unwrap();
let (position, mut rotation, speed) = { // This is done for borrowing reasons
let client_entity = current_state.get_entity(&client_entity_reference).unwrap();
let position = client_entity.get_vector3f("position").unwrap().clone();
let rotation = client_entity.get_f32("rotation").unwrap().clone();
let speed = client_entity.get_f32("speed").unwrap().clone();
(position, rotation, speed)
};
// For each of the input types, remove up to update_duration from the input storage
// run that input type for the returned duration
match storage.remove_input("forward", &self.update_duration) {
Some(duration) => {
let input_milliseconds = duration.num_milliseconds();
let new_position = position +
move_vector(rotation, speed, input_milliseconds);
current_state.set(client_entity_reference, "position", new_position).unwrap();
},
None => {}
}
match storage.remove_input("back", &self.update_duration) {
Some(duration) => {
let input_milliseconds = duration.num_milliseconds();
let new_position = position -
move_vector(rotation, speed, input_milliseconds);
current_state.set(client_entity_reference, "position", new_position).unwrap();
},
None => {}
}
match storage.remove_input("rotate_counterclockwise", &self.update_duration) {
Some(duration) => {
let input_milliseconds = duration.num_milliseconds();
rotation += 0.001 * (input_milliseconds as f32);
current_state.set(client_entity_reference, "rotation", rotation).unwrap();
},
None => {}
}
match storage.remove_input("rotate_clockwise", &self.update_duration) {
Some(duration) => {
let input_milliseconds = duration.num_milliseconds();
rotation -= 0.001 * (input_milliseconds as f32);
current_state.set(client_entity_reference, "rotation", rotation).unwrap();
},
None => {}
}
}
}
}
pub fn send_full_update(&mut self, id: ClientID) {
let mut stream = self.clients.get_mut(&id).unwrap();
let mut states = self.entity_timeline.states.iter();
// Write the first state in full
match states.next() {
Some((time, state)) => {
false.binary_write(stream).unwrap();
state.binary_write_dep(stream, true).unwrap();
time.binary_write(stream).unwrap();
},
None => {}
}
// Only write changes for the other states
for (time, state) in states {
if state.has_changes() {
false.binary_write(stream).unwrap();
state.binary_write_dep(stream, false).unwrap();
time.binary_write(stream).unwrap();
}
}
}
// Send a client its entity id
pub fn send_id_assignment(&mut self, id: ClientID) {
let mut stream = self.clients.get_mut(&id).unwrap();
self.id_message.set("id", id).unwrap();
true.binary_write(stream).unwrap();
self.id_message.binary_write(stream).unwrap();
}
// Start the worker's event loop
pub fn start(&mut self) {
// Create a new map for client InputStorages
let mut client_input = HashMap::new();
// Create a new eventloop and listen for WorkerEvents, ClientEvents, and update ticks
let mut event_loop = WorkEventLoop::new();
// update the game state every update_duration
let mut ticker = Ticker::new(&self.update_duration);
event_loop.listen(ticker.event().unwrap());
event_loop.listen(self.worker_events().unwrap());
event_loop.listen(self.client_events().unwrap());
self.entity_timeline.new_state();
self.initialize_map();
loop {
let (event, _) = event_loop.get();
match event {
WorkEvent::Worker(event) => {
trace!("WorkerEvent: {:?}", event);
self.handle_worker_event(event);
},
WorkEvent::Client(event) => {
trace!("ClientEvent: {:?}", event);
self.handle_client_event(event, &mut client_input)
}
WorkEvent::Tick => {
// take up to update_duration from all stored player inputs and handle that amount of input
self.handle_player_input(&mut client_input);
// Send the changes since the last state to all current clients, if there are any changes
// This is simpler, but it would be more efficent to write the state to a buffer and then write the buffer to streams
if self.entity_timeline.get_current_state().unwrap().has_changes() {
for (_, client_stream) in self.clients.iter_mut() {
false.binary_write(client_stream).unwrap();
self.entity_timeline.get_current_state().unwrap()
.binary_write_dep(client_stream, false).unwrap();
self.entity_timeline.current_time.binary_write(client_stream).unwrap();
}
}
self.entity_timeline.new_state();
}
}
}
}
}
impl Work for ZoneWork {
/// Sets up communication between the Work and a Worker
fn set_worker_message_receiver(&mut self, receiver: Receiver<WorkerEvent>) {
self.worker_event_rx = Some(receiver);
}
/// Start the worker. Note: This should normally update the game state and send updates to clients
fn start(&mut self) {
self.start();
}
}
// Get an entity's displacement vector, given its speed, the amount of time it moves, and its rotation
fn move_vector(rotation: f32, speed: f32, milliseconds: i64) -> Vector3<f32> {
let distance = speed * (milliseconds as f32);
let transform = Decomposed {
scale: 1.0,
rot: Quaternion::new((rotation/2.0).cos(),0.0,0.0,(rotation/2.0).sin()),
disp: Vector3::<f32>::new(0.0,0.0,0.0)
};
transform.transform_vector(&Vector3::<f32>::new(0.0,distance,0.0))
}
// Returns ZoneWork with a given work entity's updates_per_second.
struct SimpleWorkIdentifier {
asset_manager: AssetManager
}
impl SimpleWorkIdentifier {
pub fn new(asset_manager: AssetManager) -> SimpleWorkIdentifier {
SimpleWorkIdentifier {
asset_manager: asset_manager
}
}
}
impl WorkIdentifier for SimpleWorkIdentifier {
fn work_from_entity(&self, entity: &Entity) -> Option<Box<Work+Send+'static>> {
if entity.name.as_ref().unwrap() == "Zone Work" {
let updates_per_second = entity.get_u32("updates_per_second").unwrap();
Some(Box::new(ZoneWork::new(updates_per_second, self.asset_manager.clone())))
} else {
None
}
}
}
fn main() {
// Print logs at all levels but trace (debug through error) to the console
simple_logger::init_debug().unwrap();
// simple_logger::init_trace().unwrap();
// create a new AssetManager with "examples/assets" as the root folder
let mut asset_manager = AssetManager::new("examples/assets");
// World state assets
asset_manager.load("component_templates/renderable_mesh.json");
asset_manager.load("entity_templates/cube.json");
asset_manager.load("entity_templates/tile.json");
// Work assignent assets
asset_manager.load("entity_templates/work.json");
asset_manager.load("static_entities/zone_work.json");
// Work and Client messages
asset_manager.load("entity_templates/messages/player_input.json");
asset_manager.load("entity_templates/messages/id_assignment.json");
asset_manager.load("entity_templates/messages/get_time.json");
asset_manager.load("entity_templates/messages/time.json");
asset_manager.load("static_entities/messages/forward_input.json");
asset_manager.load("static_entities/messages/back_input.json");
asset_manager.load("static_entities/messages/rotate_counterclockwise_input.json");
asset_manager.load("static_entities/messages/rotate_clockwise_input.json");
// wait for assets to finish loading
asset_manager.wait_for_loading_assets();
// create a new worker with a SimpleWorkIdentifier, connect to the arbiter, and listen for clients
let simple_work_identifier = Box::new(SimpleWorkIdentifier::new(asset_manager.clone()));
let mut worker = Worker::new(asset_manager, simple_work_identifier,
"127.0.0.1:8091", "127.0.0.1:8080");
worker.start();
// intercept all events and print them
for event in worker.events() {
match event {
_ => {info!("{:?}", event)}
}
}
}<|fim▁end|> | use std::net::TcpStream;
use std::thread;
use bevy::net::worker::Worker; |
<|file_name|>test-aws-region.js<|end_file_name|><|fim▁begin|>/*global module, process */<|fim▁hole|><|fim▁end|> | module.exports = process.env.AWS_REGION || 'us-east-1'; |
<|file_name|>_monitors_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class MonitorsOperations(object):
"""MonitorsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~microsoft_logz.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_monitored_resources(
self,
resource_group_name, # type: str
monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.MonitoredResourceListResponse"]
"""List the resources currently being monitored by the Logz monitor resource.
List the resources currently being monitored by the Logz monitor resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param monitor_name: Monitor resource name.
:type monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MonitoredResourceListResponse or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~microsoft_logz.models.MonitoredResourceListResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MonitoredResourceListResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_monitored_resources.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'monitorName': self._serialize.url("monitor_name", monitor_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('MonitoredResourceListResponse', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_monitored_resources.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logz/monitors/{monitorName}/listMonitoredResources'} # type: ignore
def list_by_subscription(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.LogzMonitorResourceListResponse"]
"""List all monitors under the specified subscription.
List all monitors under the specified subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LogzMonitorResourceListResponse or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~microsoft_logz.models.LogzMonitorResourceListResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LogzMonitorResourceListResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('LogzMonitorResourceListResponse', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Logz/monitors'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.LogzMonitorResourceListResponse"]
"""List all monitors under the specified resource group.
List all monitors under the specified resource group.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LogzMonitorResourceListResponse or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~microsoft_logz.models.LogzMonitorResourceListResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LogzMonitorResourceListResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('LogzMonitorResourceListResponse', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logz/monitors'} # type: ignore
def get(
self,
resource_group_name, # type: str
monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.LogzMonitorResource"
"""Get the properties of a specific monitor resource.
Get the properties of a specific monitor resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param monitor_name: Monitor resource name.
:type monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LogzMonitorResource, or the result of cls(response)
:rtype: ~microsoft_logz.models.LogzMonitorResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LogzMonitorResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'monitorName': self._serialize.url("monitor_name", monitor_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('LogzMonitorResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logz/monitors/{monitorName}'} # type: ignore
def _create_initial(
self,
resource_group_name, # type: str
monitor_name, # type: str
body=None, # type: Optional["_models.LogzMonitorResource"]
**kwargs # type: Any
):
# type: (...) -> "_models.LogzMonitorResource"
cls = kwargs.pop('cls', None) # type: ClsType["_models.LogzMonitorResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'monitorName': self._serialize.url("monitor_name", monitor_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
if body is not None:
body_content = self._serialize.body(body, 'LogzMonitorResource')
else:
body_content = None
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('LogzMonitorResource', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('LogzMonitorResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logz/monitors/{monitorName}'} # type: ignore
def begin_create(
self,
resource_group_name, # type: str
monitor_name, # type: str
body=None, # type: Optional["_models.LogzMonitorResource"]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.LogzMonitorResource"]
"""Create a monitor resource. This create operation can take upto 10 minutes to complete.
Create a monitor resource. This create operation can take upto 10 minutes to complete.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param monitor_name: Monitor resource name.
:type monitor_name: str
:param body:
:type body: ~microsoft_logz.models.LogzMonitorResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either LogzMonitorResource or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~microsoft_logz.models.LogzMonitorResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.LogzMonitorResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_initial(
resource_group_name=resource_group_name,<|fim▁hole|> cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('LogzMonitorResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'monitorName': self._serialize.url("monitor_name", monitor_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logz/monitors/{monitorName}'} # type: ignore
def update(
self,
resource_group_name, # type: str
monitor_name, # type: str
body=None, # type: Optional["_models.LogzMonitorResourceUpdateParameters"]
**kwargs # type: Any
):
# type: (...) -> "_models.LogzMonitorResource"
"""Update a monitor resource.
Update a monitor resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param monitor_name: Monitor resource name.
:type monitor_name: str
:param body:
:type body: ~microsoft_logz.models.LogzMonitorResourceUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LogzMonitorResource, or the result of cls(response)
:rtype: ~microsoft_logz.models.LogzMonitorResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LogzMonitorResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'monitorName': self._serialize.url("monitor_name", monitor_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
if body is not None:
body_content = self._serialize.body(body, 'LogzMonitorResourceUpdateParameters')
else:
body_content = None
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('LogzMonitorResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logz/monitors/{monitorName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'monitorName': self._serialize.url("monitor_name", monitor_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 202:
response_headers['location']=self._deserialize('str', response.headers.get('location'))
if cls:
return cls(pipeline_response, None, response_headers)
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logz/monitors/{monitorName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Delete a monitor resource. This delete operation can take upto 10 minutes to complete.
Delete a monitor resource. This delete operation can take upto 10 minutes to complete.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param monitor_name: Monitor resource name.
:type monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
monitor_name=monitor_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'monitorName': self._serialize.url("monitor_name", monitor_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logz/monitors/{monitorName}'} # type: ignore
def list_user_roles(
self,
resource_group_name, # type: str
monitor_name, # type: str
body=None, # type: Optional["_models.UserRoleRequest"]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.UserRoleListResponse"]
"""List the user's roles configured on Logz.io side for the account corresponding to the monitor resource.
List the user's roles configured on Logz.io side for the account corresponding to the monitor
resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param monitor_name: Monitor resource name.
:type monitor_name: str
:param body:
:type body: ~microsoft_logz.models.UserRoleRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either UserRoleListResponse or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~microsoft_logz.models.UserRoleListResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.UserRoleListResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01"
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_user_roles.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'monitorName': self._serialize.url("monitor_name", monitor_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
if body is not None:
body_content = self._serialize.body(body, 'UserRoleRequest')
else:
body_content = None
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
if body is not None:
body_content = self._serialize.body(body, 'UserRoleRequest')
else:
body_content = None
body_content_kwargs['content'] = body_content
request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('UserRoleListResponse', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_user_roles.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logz/monitors/{monitorName}/listUserRoles'} # type: ignore<|fim▁end|> | monitor_name=monitor_name,
body=body, |
<|file_name|>SoftwareTools.py<|end_file_name|><|fim▁begin|># -*- coding: iso-8859-1 -*-
from enigma import eConsoleAppContainer
from Components.Console import Console
from Components.About import about
from Components.PackageInfo import PackageInfoHandler
from Components.Language import language
from Components.Sources.List import List
from Components.Ipkg import IpkgComponent
from Components.Network import iNetwork
from Tools.Directories import pathExists, fileExists, resolveFilename, SCOPE_METADIR
from Tools.HardwareInfo import HardwareInfo
from time import time
from boxbranding import getImageVersion
class SoftwareTools(PackageInfoHandler):
lastDownloadDate = None
NetworkConnectionAvailable = None
list_updating = False
available_updates = 0
available_updatelist = []
available_packetlist = []
installed_packetlist = {}
def __init__(self):
aboutInfo = getImageVersion()
if aboutInfo.startswith("dev-"):
self.ImageVersion = 'Experimental'
else:
self.ImageVersion = 'Stable'
self.language = language.getLanguage()[:2] # getLanguage returns e.g. "fi_FI" for "language_country"
PackageInfoHandler.__init__(self, self.statusCallback, blocking = False, neededTag = 'ALL_TAGS', neededFlag = self.ImageVersion)
self.directory = resolveFilename(SCOPE_METADIR)
self.hardware_info = HardwareInfo()
self.list = List([])
self.NotifierCallback = None
self.Console = Console()
self.UpdateConsole = Console()
self.cmdList = []
self.unwanted_extensions = ('-dbg', '-dev', '-doc', '-staticdev', '-src')
self.ipkg = IpkgComponent()
self.ipkg.addCallback(self.ipkgCallback)
def statusCallback(self, status, progress):
pass
def startSoftwareTools(self, callback = None):
if callback is not None:
self.NotifierCallback = callback
iNetwork.checkNetworkState(self.checkNetworkCB)
def checkNetworkCB(self, data):
if data is not None:
if data <= 2:
self.NetworkConnectionAvailable = True
self.getUpdates()
else:
self.NetworkConnectionAvailable = False
self.getUpdates()
def getUpdates(self, callback = None):
if self.lastDownloadDate is None:
if self.NetworkConnectionAvailable:
self.lastDownloadDate = time()
if self.list_updating is False and callback is None:
self.list_updating = True
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is False and callback is not None:
self.list_updating = True
self.NotifierCallback = callback
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is True and callback is not None:
self.NotifierCallback = callback
else:
self.list_updating = False
if callback is not None:
callback(False)
elif self.NotifierCallback is not None:
self.NotifierCallback(False)
else:
if self.NetworkConnectionAvailable:
self.lastDownloadDate = time()
if self.list_updating is False and callback is None:
self.list_updating = True
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is False and callback is not None:
self.list_updating = True
self.NotifierCallback = callback
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is True and callback is not None:
self.NotifierCallback = callback
else:
if self.list_updating and callback is not None:
self.NotifierCallback = callback
self.startIpkgListAvailable()
else:
self.list_updating = False
if callback is not None:
callback(False)
elif self.NotifierCallback is not None:
self.NotifierCallback(False)
def ipkgCallback(self, event, param):
if event == IpkgComponent.EVENT_ERROR:
self.list_updating = False
if self.NotifierCallback is not None:
self.NotifierCallback(False)
elif event == IpkgComponent.EVENT_DONE:
if self.list_updating:
self.startIpkgListAvailable()
pass
def startIpkgListAvailable(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " list"
self.UpdateConsole.ePopen(cmd, self.IpkgListAvailableCB, callback)
def IpkgListAvailableCB(self, result, retval, extra_args = None):
(callback) = extra_args or None
if result:
if self.list_updating:
self.available_packetlist = []
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any(name.endswith(x) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 1 and tokens[1].strip() or ""
descr = l > 2 and tokens[2].strip() or ""
self.available_packetlist.append([name, version, descr])
if callback is None:
self.startInstallMetaPackage()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def startInstallMetaPackage(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if self.NetworkConnectionAvailable:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " install enigma2-meta enigma2-plugins-meta enigma2-skins-meta"
self.UpdateConsole.ePopen(cmd, self.InstallMetaPackageCB, callback)
else:
self.InstallMetaPackageCB(True)
def InstallMetaPackageCB(self, result, retval = None, extra_args = None):
(callback) = extra_args or None
if result:
self.fillPackagesIndexList()
if callback is None:
self.startIpkgListInstalled()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def startIpkgListInstalled(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " list_installed"
self.UpdateConsole.ePopen(cmd, self.IpkgListInstalledCB, callback)
def IpkgListInstalledCB(self, result, retval, extra_args = None):
(callback) = extra_args or None
if result:
self.installed_packetlist = {}
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any(name.endswith(x) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 1 and tokens[1].strip() or ""
self.installed_packetlist[name] = version<|fim▁hole|> for package in self.packagesIndexlist[:]:
attributes = package[0]["attributes"]
if "packagetype" in attributes:
if attributes["packagetype"] == "internal":
self.packagesIndexlist.remove(package)
if callback is None:
self.countUpdates()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def countUpdates(self, callback = None):
self.available_updates = 0
self.available_updatelist = []
for package in self.packagesIndexlist[:]:
attributes = package[0]["attributes"]
packagename = attributes["packagename"]
for x in self.available_packetlist:
if x[0] == packagename:
if packagename in self.installed_packetlist:
if self.installed_packetlist[packagename] != x[1]:
self.available_updates +=1
self.available_updatelist.append([packagename])
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(True)
callback = None
elif self.NotifierCallback is not None:
self.NotifierCallback(True)
self.NotifierCallback = None
def startIpkgUpdate(self, callback = None):
if not self.Console:
self.Console = Console()
cmd = self.ipkg.ipkg + " update"
self.Console.ePopen(cmd, self.IpkgUpdateCB, callback)
def IpkgUpdateCB(self, result, retval, extra_args = None):
(callback) = extra_args or None
if result:
if self.Console:
if len(self.Console.appContainers) == 0:
if callback is not None:
callback(True)
callback = None
def cleanupSoftwareTools(self):
self.list_updating = False
if self.NotifierCallback is not None:
self.NotifierCallback = None
self.ipkg.stop()
if self.Console is not None:
if len(self.Console.appContainers):
for name in self.Console.appContainers.keys():
self.Console.kill(name)
if self.UpdateConsole is not None:
if len(self.UpdateConsole.appContainers):
for name in self.UpdateConsole.appContainers.keys():
self.UpdateConsole.kill(name)
def verifyPrerequisites(self, prerequisites):
if "hardware" in prerequisites:
hardware_found = False
for hardware in prerequisites["hardware"]:
if hardware == self.hardware_info.device_name:
hardware_found = True
if not hardware_found:
return False
return True
iSoftwareTools = SoftwareTools()<|fim▁end|> | for package in self.packagesIndexlist[:]:
if not self.verifyPrerequisites(package[0]["prerequisites"]):
self.packagesIndexlist.remove(package) |
<|file_name|>jaglk.py<|end_file_name|><|fim▁begin|>import re
from jaglt import *
from jaglf import *
''' Regexes '''
JRE_Num = [
re.compile(r"[0-8]+o"), #Octal
re.compile(r"[\dA-F]+x"), #Hex
re.compile(r"(?:-?\d+(?:\.(?:\d+)?)?|\.\d+|-?\d+)e-?\d+"), #Scientific
re.compile(r"-?\d+(?:\.(?:\d+)?)?|-?\.\d+"), #Decimal
]
JRE_Str = re.compile(r'"(?:[^\\"]|\\.)*"') #String syntactic sugar
JRE_EStr = re.compile(r"'(?:[^\\/]|\\.)*'") #Escaped string syntactic sugar
''' Preprocessor for shorthands '''
def preprocess(string):
string = re.sub(r'([^\s\d\}orfuwF/%z])([orfuwF/%z])(?=([^"\\]*(\\.|"([^"\\]*\\.)*[^"\\]*"))*[^"]*$)(?=([^\'\\]*(\\.|\'([^\'\\]*\\.)*[^\'\\]*\'))*[^\']*$)', r"{\1}\2", string) #Shorthand for one function map
return string
''' Make a bracket map for array '''
def makeOuterMap(string, start, end, escape=None):
if string:
q, m = [], []
lst = None
for i, x in enumerate(string):
if (escape and lst != escape) or not escape:
if x == start:
q.append(i)
elif x == end:
if len(q) == 1:
m.append((q.pop(), i))
else:
q.pop()
lst = x
return m
else:
return []
''' Level Classification '''
def classifyLevel(string):
c = []
lists = map(lambda x: (x[0], x[1], JArray), makeOuterMap(string, "(", ")")) #Extend with arrays
lists.extend(map(lambda x: (x[0], x[1], JBlock), makeOuterMap(string, "{", "}"))) #Extend with blocks
lists.extend(map(lambda x: (x.start(), x.end(), str) , JRE_Str.finditer(string))) #Extend with strings
lists.extend(map(lambda x: (x.start(), x.end(), EStr), JRE_EStr.finditer(string)))
c.extend(lists)
ints = []
for r in JRE_Num:
matches = map(lambda x: (x.start(), x.end(), JNum) ,list(r.finditer(string))) #Get matches for int type
matches = filter(lambda x: not any(y[0] <= x[0] < y[1] for y in ints), matches) #Filter out overlapping int matches
ints.extend(matches)
c.extend(ints)
symbols = [True for i in range(len(string))] #Make a map to detect symbols
for s, e, _ in c: #Filter out all already detected types
if _ in [JArray, JBlock]:
e = e + 1
for x in range(s, e):
symbols[x] = False
for i, v in enumerate(string): #Filter out all whitespace
if re.match(r"\s", v):
symbols[i] = False
for i, s in enumerate(symbols): #Make everything a symbol
if s:
c.append((i, i+1, JFunc))
c = filter(lambda x: not any(y[0] < x[0] < y[1] for y in lists), c) #Filter out any elements in arrays or blocks
return sorted(c)
''' Recursively (possibly) create array '''
def makeArray(string):
inner = string[1:]
lev = classifyLevel(inner)
arr = []
for s, e, clss in lev:
if clss == JNum:
arr.append(JNum(inner[s:e]))
elif clss == JFunc:
arr.append(JFunc(inner[s:e]))
elif clss in [JArray, JBlock]:
arr.append(makeArray(inner[s:e]))
elif clss == str:<|fim▁hole|> elif clss == EStr:
arr.append(JArray(map(lambda x: JNum(ord(x)), inner[s+1:e-1].decode("string_escape"))))
if string[0] == "(":
return JArray(arr)
return JBlock(arr)
''' Tokenizer '''
def tokenize(string):
string = preprocess(string)
il = classifyLevel(string)
tokens = []
for s, e, clss in il:
if clss == JNum:
tokens.append(JNum(string[s:e]))
elif clss == JFunc:
tokens.append(JFunc(string[s:e]))
elif clss in [JArray, JBlock]:
tokens.append(makeArray(string[s:e]))
elif clss == str:
tokens.append(JArray(map(lambda x: JNum(ord(x)), string[s+1:e-1])))
elif clss == EStr:
tokens.append(JArray(map(lambda x: JNum(ord(x)), string[s+1:e-1].decode("string_escape"))))
return tokens<|fim▁end|> | arr.append(JArray(map(lambda x: JNum(ord(x)), inner[s+1:e-1]))) |
<|file_name|>annotations_line2d.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
annotations_line2d module
Created on Thu Sep 10 21:51:23 2015
@author: James Sorenson
"""
import matplotlib
import matplotlib.pyplot as plt
# This is to prevent overlapping annotations from being dragged simultaneously
# due to the multi-threaded nature of the matplotlib gui.
import threading
###########################
# Globals
###########################
attr_name = 'annotations_line2d'
_event= None # Used for debugging
###########################
# Class definitions
###########################
class DraggableAnnotationLine2D(matplotlib.offsetbox.DraggableBase):
"""This class is like Matplotlib’s DraggableAnnotation, but this one actually works.
Apparently, the original class can't handle annotations that are created
using 'offset points' from a data point. This class ONLY works with those.
Left-click to move the annotation without changing the data point.
Middle-click to slide the annotation to a different data point.
Right-click to delete the annotation.
The original annotation artist is in self.ref_artist.
We save additional info in self.line, self.index, and self.formatter.
"""
# Class-level lock to make sure only ONE annotation is moved at a time.
# Due to QT's multi—threaded nature, it‘s best to use a real thread lock.
_drag_lock=threading.Lock()
_counter=0 # Just a counter to give each annotation a unique ID.
def __init__(self, ref_artist, line=None, index=None, formatter=None, use_blit=True):
# Use the base init (This isn‘t C++ where the parent is called automatically.)
super().__init__(ref_artist, use_blit=use_blit)
# Store the other parameters
self.line=line
self.index=index
self.formatter=formatter
# Create a unique ID for this annotation (for debugging)
DraggableAnnotationLine2D._counter += 1
DraggableAnnotationLine2D._counter %= 2**31 # Not too big
self.id = DraggableAnnotationLine2D._counter
#print('Init',self.id)
if formatter is not None:
# Get and set the text
self.ref_artist.set_text(self.formatter(line, index))
#Update the canvas to make sure the annotation is visible
self.canvas.draw()
def artist_picker(self, artist, event):
"""
Determines if the artist should enable move for this mouse button event
"""
# Make sure this only happens with a click. Ignore scroll.
# Left or Right click works on all of these annotations
# Middle click (slide) requires that line and index are assigned
if (event.button in (1,3)) or \
(event.button ==2 and self.line is not None and self.index is not None):
# Good action. We only want to drag if the cursor is inside the
# box, not the arrow and the area around it.
# contains(event) returns (bool,attr)
#print('Picked',self.id)
drag = self.ref_artist.get_bbox_patch().contains(event)
if drag[0]:
#Make sure no other annotation are dragging.
# wait=False means no block. True if a successful lock.
if DraggableAnnotationLine2D._drag_lock.acquire(False):
# Record the mouse button
self.button=event.button
#print('Claim',self.id)
return drag
# If we made it here, then we're not moving
return (False, None)
def save_offset(self):
"""
On button-down, this saves the current location of the annotation.
Annotation object is in self.ref_artist.
"""
#print('Save',self.id)
if self.button == 1:
# Left-click. Move the annotation while pointing at the same data.
# Get the starting position of the artist in points (relative to data point)
self.drag_start_text_points = self.ref_artist.get_position()
# Get the inverted transform so we can convert pixels to paints.
self.drag_trans_mat = self.ref_artist.get_transform().inverted().get_matrix()
elif self.button == 2:
# Middle-click. We need some additional information to slide the data.
self.xydata=self.line.get_xydata() #just makes it easier (this does NOT copy)
# we need the pixels of the starting data point (not the cursor)
self.drag_start_pixels = self.ref_artist.get_axes().transData.transform(self.ref_artist.xy)
# Get the translation from pixels to data for annotation.xy
self.drag_trans_pix2dat = self.ref_artist.get_axes().transData.inverted()
def update_offset(self, dx, dy):
"""
dx and dy is the total pixel offset from the point where the mouse
drag started.
"""
if self.button == 1: # Left—click
# Scale delta pixels to delta points using parts of annotation transform.
# The full transform includes the data offset, but set position already does that.
new_position=(self.drag_start_text_points[0] + dx * self.drag_trans_mat[0,0],
self.drag_start_text_points[1] + dy * self.drag_trans_mat[1,1])
# Apply as delta points from data point
self.ref_artist.set_position(new_position)
elif self.button == 2: # Middle—click
# We may have a logarithmic scale, but update offset only gives us delta pixels.
# Add the delta to the starting pixels, then convert to data coordinates
pixels_dxy = matplotlib.numpy.array((dx,dy))
new_data_xy = self.drag_trans_pix2dat.transform(self.drag_start_pixels+pixels_dxy)<|fim▁hole|> #Move left
index -= 1
while (index < self.xydata.shape[0] - 1) and (self.xydata[index+1][0] < new_data_xy[0]):
# Move right
index += 1
if index != self.index:
# we moved an index! Update the annotation
self.ref_artist.xy=self.xydata[index,:]
self.index=index
if self.formatter is not None:
# Update the text in the annotation
self.ref_artist.set_text(self.formatter(self.line, index))
def finalize_offset(self):
"""Called when the mouse button is released, if this was picked in the first place."""
#print('Finalize',self.id)
if self.button == 2 and self.formatter is not None:
# Print out annotation text for the user to copy/paste
self.print_annotation()
elif self.button == 3:
# Delete annotation
self.remove()
def on_release(self,event):
"""
Called when the mouse button is released, whether or not this was picked.
We extend this function so that we are guaranteed to release the thread lock.
"""
# Call the original
super().on_release(event)
#Everyone tries to remove the block, just in case the controlling annotation was removed.
try:
DraggableAnnotationLine2D._drag_lock.release()
except RuntimeError:
pass # Already released. Not a concern.
def print_annotation(self):
"""Does exactly what you think it does"""
print('Annotation: {0}, ind={1}\n{2}'.format(self.line.get_label(), self.index, self.ref_artist.get_text()))
def remove(self):
"""Disconnect and delete the annotation."""
#print('Remove',self.id)
self.disconnect() # Disconnect the callbacks
self.ref_artist.remove() # Delete the annotation artist
self.got_artist=False # Tell this class it no longer has an artist
self.canvas.draw() # Update the whole canvas so the annotation disappears
class AnnotationPicker(object):
"""
A class to enable convenient annotations to any plot.
This is meant only for 2D lines.
Left-click to move the annotation without changing the data point.
Middle-click to slide the annotation to a different data point.
Right-click to delete the annotation.
Optional arguments:
artists: (default None) A single or list of artists to attach this to as 'artist annotations'
tolerance : (default 5) Picker tolerance to a line's data point to create an annotation.
formatter : function to generate the string in the annotation. fcn(Line2D artist, index)
All other keyword arguments Will be passed to the annotation.
"""
def __init__(self, artists=None, tolerance=5, formatter=None, button=1, key = 'control', use_blit=True, **kwargs):
# Parse the arguments
self.tolerance = tolerance
self.use_blit = use_blit
self.button = button
self.key=key
if formatter is None: # Use default
self.formatter=self._annotate_line_str
else:
self.formatter = formatter
# Save the annotation parameters
self.annotation_kwargs = dict(xycoords='data', textcoords='offset points',
fontsize=11, picker=True, xytext=(20, 20),
bbox=dict(boxstyle='round,pad=0.5', fc='yellow', alpha=0.5),
arrowprops=dict(shrink=0.05, headwidth=5, width=1))
# Add in additional/modified user parameters
self.annotation_kwargs.update(kwargs)
# Apply this annotation instance to the given artists and children
if artists is not None:
self.apply(artists)
def apply(self, artists):
"""
Enable picker on lines so that annotations are activated.
This particular Annotation instance will be applied to this artist and
its children (unless the children already have their own instance.
Use 'clear annotaions' if you wish to override children settings.
"""
# This is overly complex, but it allows the user to throw anything at it (figure, axes, line, etc)
# Make it iterable for convenience
artists = _make_iterable(artists)
for artist in artists:
if artist is None:
continue
# Attach this instance to the given artists
setattr(artist, attr_name, self)
# Enable picker to any line contained in this artist that is not already enabled.
if isinstance(artist, matplotlib.lines.Line2D) and not artist.pickable():
lines = [artist]
elif isinstance(artist, matplotlib.axes.Axes):
lines = [line for line in artist.get_lines() if not line.pickable()]
elif isinstance(artist, matplotlib.figure.Figure):
lines = [line for ax in artist.get_axes() for line in ax.get_lines() if not line.pickable()]
else:
lines=[]
for line in lines:
line.set_picker(self.tolerance)
# Make sure the callbacks are enabled for the parent canvas
enable_callbacks(artist)
def annotate(self, line, index, text=None):
"""
Makes a draggable, interactive annotation on the given line,
at the given index, with the given text.
line : Line2D object to annotate
index : The index of the line to put the annotation
text : The text to fill the annotation with. If None, then use default.
Returns a DraggableAnnotationLine2D instance where the annotation artist is in self.ref_artist.
"""
if text is None:
# Get the text from the formatter
formatter=self.formatter
else:
# Manual text is given. Don't use the formatter
formatter = None
# Create the annotation at the designated point
ax=line.get_axes()
annot=ax.annotate(text, line.get_xydata()[index,:], **self.annotation_kwargs)
# Make it draggable using our class, then return the object
return DraggableAnnotationLine2D(annot, line, index, formatter, use_blit=self.use_blit)
def _annotate_line_str(self, line, index):
"""
The default function to take a Line2D artist and index and generate a
string for the annotation box.
"""
xy=line.get_xydata()[index]
return '{0}[{1}]:\nx={2:.9}\ny:{3:.9}'.format(line.get_label(),index,xy[0],xy[1])
def _onpick(self,event):
"""Called by canvas pick event."""
if event.mouseevent.button == self.button and \
event.mouseevent.key == self.key and \
isinstance(event.artist, matplotlib.lines.Line2D):
# More than one index may be in range. Determine the middle index.
ind = event.ind[len(event.ind)//2]
global _event
_event=event
# Generate the annotation
self.annotate(event.artist, ind)
###########################
# Module functions
###########################
def enable_callbacks(artist):
"""
Enable annotation callbacks within this canvas/figure.
This adds the .annotations attribute to the canvas to hold the callbacks.
"""
if isinstance(artist, matplotlib.figure.Figure):
canvas=artist.canvas
elif hasattr(artist, 'get_figure'):
canvas=artist.get_figure().canvas
else:
canvas=artist
if not hasattr(canvas,attr_name):
# Add the callbacks and store as a list in the canvas attribute
callbacks=[]
callbacks.append(canvas.mpl_connect('pick_event', _on_pick_event))
callbacks.append(canvas.mpl_connect('figure_enter_event', _on_figure_enter_event))
setattr(canvas, attr_name, callbacks)
def disable_callbacks(canvas):
"""
Disable all annotation callbacks pertaining to this callback.
We leave the pickers and annotation instances in the artists.
We just get rid of the callback attached to the canvas.
"""
if isinstance(canvas, matplotlib.figure.Figure):
canvas=canvas.canvas # We were given the figure instead
for callback in getattr(canvas, attr_name, []):
canvas.mpl_disconnect(callback)
delattr(canvas, attr_name)
print('AnnotationPicker callback removed from canvas.')
def annotate(line, index, text=None):
"""
Wrapper function around AnnotationPicker.annotate()
This will find the controlling instance of Annotations for the given line
and create an interactive annotation at the given index with the given text.
Input:
line: The matplotlib line object to annotate (plt.figure(1).axes[0].lines[0])
index: The index of the line to annotate.
text: The annotation text. It None, then the AnnotationPicker.formatter()
is used to generate text at the given line and index.
Returns:
DraggableAnnotationLine2D object
"""
annotations_instance = _find_annotations_instance(line)
if annotations_instance is None:
# Create a default annotation for this line
annotations_instance = AnnotationPicker(line)
setattr(line, attr_name, annotations_instance)
annotations_instance.annotate(line, index, text)
def subplots(*args, anno=None, **kwargs):
"""
Identical to plt.subplots(), but also assigns an AnnotationPicker class
to the figure. Use "anno=AnnotationPickerInstance" to use a specific instance
of the AnnotationPicker.
"""
# Since we are using plt.subplots, this will show immediately if interactive.
# gca and gcf will also be updated.
fig,ax_list=plt.subplots(*args, **kwargs)
if anno is None:
# Create default AnnotationPicker that will be connected to the figure
AnnotationPicker(fig)
else:
anno.apply(fig)
return (fig,ax_list)
###########################
# Private Utilites
###########################
def _make_iterable(obj):
"""Return obj as a list if it is not already an iterable object"""
if hasattr(obj,'__iter__'):
return obj
else:
# Make it iterable for consistency
return [obj]
def _find_annotations_instance(artist):
"""
Find the controlling Annotations instance for this artists.
It could be attached to the artist itself, or on the parent axes or figure.
Returns the controlling Annotations instance.
"""
if hasattr(artist, attr_name):
# Instance is attached to the artist itself
return getattr(artist, attr_name)
elif hasattr(artist, 'get_axes' ) and hasattr(artist.get_axes(), attr_name):
# Instance is attached to the axes
return getattr(artist.get_axes(), attr_name)
elif hasattr(artist, 'get_figure') and hasattr(artist.get_figure(), attr_name):
# Instance is attached to the figure
return getattr(artist.get_figure(), attr_name)
# No instance found
return None
def _clear_annotations(artist):
"""
Call this on any artist to clear the annotation instances for that artist
and all of its children. Mostly useful for debugging.
"""
artists = _make_iterable(artist)
for artist in artists:
if hasattr(artist, attr_name):
delattr(artist, attr_name)
if hasattr(artist,'get chlldren'):
_clear_annotations(artist.get_children())
print('All annotations in artist and children deleted.')
###########################
# Canvas Callback functions
###########################
def _on_pick_event(event):
"""
This is what initially gets called when ANY artist in the figure with
picking enabled is picked.
Startlng with the artist itself, thls function will determine the closest
AnnotationPicker instance to call. This permits different settings per
line or per axes.
"""
annotations_instance = _find_annotations_instance(event.artist)
if annotations_instance is not None:
# Call the controlling Annotations instance
annotations_instance._onpick(event)
def _on_figure_enter_event(event):
"""
When the mouse enters the figure, this will make sure all lines have
picker enabled so that new lines can be annotated.
"""
fig=event.canvas.figure
# Only lines that are not already pickable will be updated.
lines=[line for ax in fig.axes for line in ax.lines if not line.pickable()]
for line in lines:
# The controlling Annotations instance is either in the axes or figure.
annotations_instance=_find_annotations_instance(line)
if annotations_instance is not None:
line.set_picker(annotations_instance.tolerance)
# We may need to update legends if the user manually plotted or deleted a line.
#legend_update(fig, draw=True) #Draw if a change was detected
###########################
# TEST
###########################
if __name__ == '__main__':
import numpy as np
plt.ion()
# Use our subplots wrapper to make sure annotations are enabled
fig,ax=subplots(2,1)
ax[0].set_title('click on points')
x=np.r_[-5:5:.1]
y=x**2-5*x+3
lines=[]
lines += ax[0].plot(x,x**2-5*x+3, '-.',label='My Line')
lines += ax[1].plot(x,5*x+4,label='Line2')
# Enable Annotations
anno=AnnotationPicker(fig)
an=anno.annotate(ax[0].lines[0],30, 'A manual annotation')
# Add a legend
#leg=legend(ax)
# Add another line and see if moving the mouse in catches it
ax[1].plot(x,2*x+7, label='New line')
# Create custom string for 2nd axes
def custom_text(line,ind):
xy=line.get_xydata()[ind]
custom='Custom text\nData[{0}]: {1:.9}, {2:.9}'.format(ind,xy[0],xy[1])
return custom
anno2=AnnotationPicker(ax[1],formatter=custom_text, key=None)
ax[1].plot(x,y, '.-',label='No picker yet') # See if the picker gets enabled
ax[1].legend()
plt.draw()<|fim▁end|> | # Determine if the new data coordinates reach or exceed the next line data point.
index=self.index
while (index > 0) and (self.xydata[index-1][0] > new_data_xy[0]): |
<|file_name|>recurrence_analysis_speleo_raw.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2014 Jonathan F. Donges
# Author: Jonathan F. Donges <[email protected]>
# URL: <http://www.pik-potsdam.de/members/donges/software>
"""
Performs recurrence analysis of paleoclimate proxy records.
This script provides analyses for this publication:
J.F. Donges, R.V. Donner, N. Marwan, S.F.M. Breitenbach, K. Rehfeld, and J. Kurths,
Nonlinear regime shifts in Holocene Asian monsoon variability: Potential impacts on cultural change and migratory patterns,
Climate of the Past 11, 709-741 (2015),
DOI: 10.5194/cp-11-709-2015
"""
#
# Imports
#
# Import cPickle for loading and saving data
import cPickle
# Import np for fast numerics
import numpy as np
# Import progress bar for easy progress bar handling
import progressbar
# Import class for recurrence network analysis
from pyunicorn.timeseries import RecurrenceNetwork
#
# Settings
#
# Name of data directory
DATA_DIR = "../../data/raw_proxy_data/"
# List of data FILENAMES
FILENAMES = ["Dimarshim_D1.dat", "Qunf_Q5_orig.dat", "Hoti.dat",
"Mawmluh.dat", "Tianmen_TM18_older.dat", "Dongge_DA.dat",
"Lianhua_d18O_d13C.dat", "Heshang_HS4.dat", "Jiuxian.dat",
"Liang-Luar.dat"]
# Names of proxy records / caves
NAMES = ["Dimarshim", "Qunf", "Hoti", "Mawmluh", "Tianmen", "Dongge",
"Lianhua", "Heshang", "Jiuxian", "Liang-Luar"]
# Specify symbol used for commenting in data file
COMMENT_SYMBOL = "%"
# Settings for the time dependent recurrence plot
# Window length [a] / [ka]
T_TIME = [750., 750., 750., 750., 750., 750., 750., 750., 750. ,750.]
# Step size [a] / [ka]
DELTA_TIME = 50.
# Settings for the embedding
DIM = 3
TAU = 2 # Only used if ADAPT_DELAY == False
ADAPT_DELAY = True # If true, the delay in units of data points is estimated to match the given DELAY_TIME
# Explicitly set delay times for time-delay embedding
DELAY_TIMES = [100., 216., 57., 146., 90., 185., 193., 60., 73., 135.] # in years [a]
# Settings for the recurrence plot
METRIC = "supremum" # metric for recurrence definition
RR = 0.05 # prescribed recurrence rate
# Settings for significance testing
# Ensemble size
N_ENSEMBLE = 1000
# Choose whether whole embedded state vectors or the scalar time series should be shuffled (Different null-hypothesis!)
SHUFFLE_EMBEDDED = True
# Settings for detrending
DETREND = True
DETRENDING_WINDOW_SIZE = 1000. # measured in [a] / [ka]
#
# Functions
#
def detrend_time_series(data, window_size):
"""
"""
# Get length of data array
n = data.shape[0]
# Initialize a local copy of data array
detrended_data = np.empty(n)
# Detrend data
for j in xrange(n):
# Get distance of sample from boundaries of time series
dist = min(j, n - 1 - j)
if window_size / 2 > dist:
half_size = dist
else:
half_size = window_size / 2
detrended_data[j] = data[j] - data[j - half_size:j + half_size + 1].mean()
return detrended_data
def autocorrelation(data, lag):
"""Return autocorrelation of data at specified lag."""
return np.corrcoef(data[lag:], data[:-lag])[0,1]
#
# The main script
#
print "Recurrence network analysis of paleoclimate records"
print "---------------------------------------------------"
#
# Import data
#
time_list = []
data_list = []
sampling_time_list = []
delay_list = []
# Get number of time series
n_time_series = len(FILENAMES)
# Load data
for i in xrange(n_time_series):
time, data = np.loadtxt(DATA_DIR + FILENAMES[i], comments=COMMENT_SYMBOL,
unpack=True, usecols=(0,1,))
average_sampling_time = np.diff(time).mean()
time_list.append(time)
if DETREND:
# Detrend data!
detrended_data = detrend_time_series(data=data,
window_size=DETRENDING_WINDOW_SIZE / average_sampling_time)
data_list.append(detrended_data)
else:
data_list.append(data)
# Get average sampling times
sampling_time_list.append(average_sampling_time)
# Get delay time
delay_list.append(int(DELAY_TIMES[i] / average_sampling_time))
# Temporaray: Get length of time series
n = len(time)
#
# Print some statistics
#
print "Average sampling time:"
for i in xrange(n_time_series):
print FILENAMES[i], ": (", np.diff(time_list[i]).mean(), "pm", np.diff(time_list[i]).std(), ") ka"
#
# Analyze time dependent recurrence networks by moving a window over
# the time series
#
# Initialize list of window mid-points used for estimating time scale of windowed measures
step_sequence = []
# Create dictionary of symbols for each windowed measure to be calculated
symbols = {"Average path length": "$\mathcal{L}$",
"Transitivity": "$\mathcal{T}$"}
#symbols = {"Average path length": "$\mathcal{L}$",
# "n.s.i. average path length": "$\mathcal{L}^*$",
# "Clustering": "$\mathcal{C}$",
# "n.s.i. clustering": "$\mathcal{C}^*$"}
#symbols = {"Determinism": "$DET$",
# "Laminarity": "$LAM$",
# "Mean diagonal line length": "$L_{mean}$",
# "Trapping time": "$TT$",
# "Diagonal line entropy": "$ENTR$",
# "Autocorrelation": "$ACF(1)$",
# "Mean": "Mean",
# "Standard deviation": "STD"}
# Initialize dictionaries
results = {}
surrogate_results = {}
for measure in symbols.keys():
results[measure] = []
surrogate_results[measure] = []
# Run analysis for each time series separately
for i in xrange(n_time_series):
print "Analyzing original data from", FILENAMES[i]
# Get time and data arrays
time = time_list[i]
data = data_list[i]
sampling_time = sampling_time_list[i]
# Set delay
if ADAPT_DELAY:
TAU = delay_list[i]
# Get window and step size in units of samples
T = int(T_TIME[i] / sampling_time)
delta = int(DELTA_TIME / sampling_time)
# Get length of time series
t_max = len(time)
# Get required time series length before embedding to achive window length T in the recurrence plot
T_embedded = T + (DIM - 1) * TAU
# Get number of steps
t_steps = int((t_max - T_embedded) / float(delta) + 1)
<|fim▁hole|> print "Embedding dimension:", DIM
print "Embedding delay:", TAU
print "Prescribed link density / recurrence rate:", RR
# Initializations
local_step_sequence = np.empty((t_steps), dtype=int)
local_result = {}
for measure in symbols.keys():
local_result[measure] = np.empty(t_steps)
# Initialize progress bar
progress = progressbar.ProgressBar().start()
# Loop over moving windows
for j in xrange(t_steps):
# Get time series section for current window
time_series = data[j * delta:j * delta + T_embedded]
local_step_sequence[j] = j * delta + T_embedded / 2
# Prepare recurrence network from original data
rec_net = RecurrenceNetwork(time_series.flatten(), dim=DIM, tau=TAU,
metric=METRIC, normalize=False,
silence_level=2, recurrence_rate=RR)
# Calculations for original recurrence network
local_result["Average path length"][j] = rec_net.average_path_length()
local_result["Transitivity"][j] = rec_net.transitivity()
#local_result["Assortativity"][j] = rec_net.assortativity()
#local_result["Diameter"][j] = rec_net.diameter()
# Calculate RQA measures
#local_result["Determinism"][j] = rec_net.determinism()
#local_result["Laminarity"][j] = rec_net.laminarity()
#local_result["Mean diagonal line length"][j] = rec_net.average_diaglength()
#local_result["Trapping time"][j] = rec_net.trapping_time()
#local_result["Diagonal line entropy"][j] = rec_net.diag_entropy()
#local_result["Autocorrelation"][j] = autocorrelation(time_series, lag=1)
#local_result["Mean"][j] = time_series.mean()
#local_result["Standard deviation"][j] = time_series.std()
# Update progress bar every step
progress.update(int(100 * j / float(t_steps)))
# Terminate progress bar
progress.finish()
# Store window mid-point
step_sequence.append(local_step_sequence)
# Store results
for measure in symbols.keys():
results[measure].append(local_result[measure])
#
# Calculate significance levels for network measures
#
print "Calculating significance levels based on", N_ENSEMBLE, "surrogates..."
# Initialize progress bar
progress = progressbar.ProgressBar().start()
# Create a copy of data for generating surrogates from
surrogate_data = data.copy()
if SHUFFLE_EMBEDDED:
# Get embedding of full time series
surrogate_embedding = rec_net.embed_time_series(surrogate_data,
DIM, TAU)
# Prepare stuff
local_surrogate_result = {}
for measure in symbols.keys():
local_surrogate_result[measure] = np.empty(N_ENSEMBLE)
for j in xrange(N_ENSEMBLE):
if SHUFFLE_EMBEDDED:
# Shuffle embedded time series along time axis, that is, whole
# embedded state vectors are shuffled around.
permuted_indices = np.random.permutation(surrogate_embedding.shape[0])
# Use the first T state vectors from the shuffled and embedded
# time series as a surrogate for one window
surrogate_series = surrogate_embedding[permuted_indices[:T],:]
# Prepare recurrence network from surrogate data for shuffled
# embedded time series
rec_net = RecurrenceNetwork(surrogate_series.copy(),
metric=METRIC, normalize=False,
silence_level=2, recurrence_rate=RR)
else:
# Shuffle dust time series
permuted_indices = np.random.permutation(surrogate_data.shape[0])
# Use the first T_embedded states from the shuffled dust time series as a surrogate for one window
surrogate_series = surrogate_data[permuted_indices[:T_embedded]]
# Prepare recurrence network from surrogate data for shuffled time series
rec_net = RecurrenceNetwork(surrogate_series.copy(), dim=DIM,
tau=TAU, metric=METRIC,
normalize=False, silence_level=2,
recurrence_rate=RR)
# Calculate measures for surrogate network
local_surrogate_result["Average path length"][j] = rec_net.average_path_length()
local_surrogate_result["Transitivity"][j] = rec_net.transitivity()
#local_surrogate_result["Assortativity"][j] = rec_net.assortativity()
#local_surrogate_result["Diameter"][j] = rec_net.diameter()
# Calculate RQA measures
#local_surrogate_result["Determinism"][j] = rec_net.determinism()
#local_surrogate_result["Laminarity"][j] = rec_net.laminarity()
#local_surrogate_result["Mean diagonal line length"][j] = rec_net.average_diaglength()
#local_surrogate_result["Trapping time"][j] = rec_net.trapping_time()
#local_surrogate_result["Diagonal line entropy"][j] = rec_net.diag_entropy()
#local_surrogate_result["Autocorrelation"][j] = autocorrelation(data, lag=1)
#local_surrogate_result["Mean"][j] = data.mean()
#local_surrogate_result["Standard deviation"][j] = data.std()
# Update progress bar every step
progress.update(int(100 * j / float(N_ENSEMBLE)))
# Store results
for measure in symbols.keys():
surrogate_results[measure].append(local_surrogate_result[measure])
# Terminate progress bar
progress.finish()
#
# Save results
#
print "Saving results..."
# Initialize storage dictionary
storage = {}
# Store parameters
storage["FILENAMES"] = FILENAMES
storage["NAMES"] = NAMES
storage["T_TIME"] = T_TIME
storage["DELTA_TIME"] = DELTA_TIME
storage["DETRENDING_WINDOW_SIZE"] = DETRENDING_WINDOW_SIZE
storage["DIM"] = DIM
storage["TAU"] = TAU
storage["ADAPT_DELAY"] = ADAPT_DELAY
storage["DELAY_TIMES"] = DELAY_TIMES
storage["METRIC"] = METRIC
storage["RR"] = RR
storage["N_ENSEMBLE"] = N_ENSEMBLE
storage["SHUFFLE_EMBEDDED"] = SHUFFLE_EMBEDDED
# Store symbols
storage["symbols"] = symbols
# Store raw input data
storage["time_list"] = time_list
storage["data_list"] = data_list
# Store axes
storage["step_sequence"] = step_sequence
# Store results
storage["results"] = results
storage["surrogate_results"] = surrogate_results
# Save to file
filename = "results_speleo_comparison_W_" + str(T_TIME[0]) + "y_M_" + str(N_ENSEMBLE) + "_DETREND_" + str(DETREND) + ".pickle"
file = open(filename, 'w')
cPickle.dump(storage, file)
file.close()<|fim▁end|> | print "Length of record:", t_max
print "Size of moving window:", T
print "Step size:", delta
print "Number of steps for moving window:", t_steps |
<|file_name|>db.py<|end_file_name|><|fim▁begin|>import pymysql.cursors
from model.group import Group
from model.contact import Contact
class DbFixture():
def __init__(self, host, name, user, password):
self.host = host
self.name = name
self.user = user
self.password = password
self.connection = pymysql.connect(host=host, database=name, user=user, password=password, autocommit=True)
def get_group_list(self):
list =[]
cursor = self.connection.cursor()
try:
cursor.execute("select group_id, group_name, group_header, group_footer from group_list")
for row in cursor:
(id, name, header, footer) = row
list.append(Group(id=str(id), name=name, header=header, footer=footer))
finally:
cursor.close()
return list
def get_contact_list(self):
list =[]<|fim▁hole|> cursor = self.connection.cursor()
try:
cursor.execute("select id, firstname, lastname from addressbook where deprecated='0000-00-00 00:00:00' ")
for row in cursor:
(id, firstname, lastname) = row
list.append(Contact(id=str(id), firstname=firstname, lastname=lastname))
finally:
cursor.close()
return list
def destroy(self):
self.connection.close()<|fim▁end|> | |
<|file_name|>bin_xlsx.ts<|end_file_name|><|fim▁begin|>/* xlsx.js (C) 2013-present SheetJS -- http://sheetjs.com */
/* eslint-env node */
/* vim: set ts=2 ft=javascript: */
/// <reference types="../node_modules/@types/node/" />
const n = "xlsx";
import X = require("xlsx");
import 'exit-on-epipe';
import * as fs from 'fs';
import program = require('commander');
program
.version(X.version)
.usage('[options] <file> [sheetname]')
.option('-f, --file <file>', 'use specified workbook')
.option('-s, --sheet <sheet>', 'print specified sheet (default first sheet)')
.option('-N, --sheet-index <idx>', 'use specified sheet index (0-based)')
.option('-p, --password <pw>', 'if file is encrypted, try with specified pw')
.option('-l, --list-sheets', 'list sheet names and exit')
.option('-o, --output <file>', 'output to specified file')
.option('-B, --xlsb', 'emit XLSB to <sheetname> or <file>.xlsb')
.option('-M, --xlsm', 'emit XLSM to <sheetname> or <file>.xlsm')
.option('-X, --xlsx', 'emit XLSX to <sheetname> or <file>.xlsx')
.option('-I, --xlam', 'emit XLAM to <sheetname> or <file>.xlam')
.option('-Y, --ods', 'emit ODS to <sheetname> or <file>.ods')
.option('-8, --xls', 'emit XLS to <sheetname> or <file>.xls (BIFF8)')
.option('-5, --biff5','emit XLS to <sheetname> or <file>.xls (BIFF5)')
.option('-2, --biff2','emit XLS to <sheetname> or <file>.xls (BIFF2)')
.option('-i, --xla', 'emit XLA to <sheetname> or <file>.xla')
.option('-6, --xlml', 'emit SSML to <sheetname> or <file>.xls (2003 XML)')
.option('-T, --fods', 'emit FODS to <sheetname> or <file>.fods (Flat ODS)')
.option('-S, --formulae', 'emit list of values and formulae')
.option('-j, --json', 'emit formatted JSON (all fields text)')
.option('-J, --raw-js', 'emit raw JS object (raw numbers)')
.option('-A, --arrays', 'emit rows as JS objects (raw numbers)')
.option('-H, --html', 'emit HTML to <sheetname> or <file>.html')
.option('-D, --dif', 'emit DIF to <sheetname> or <file>.dif (Lotus DIF)')
.option('-U, --dbf', 'emit DBF to <sheetname> or <file>.dbf (MSVFP DBF)')
.option('-K, --sylk', 'emit SYLK to <sheetname> or <file>.slk (Excel SYLK)')
.option('-P, --prn', 'emit PRN to <sheetname> or <file>.prn (Lotus PRN)')
.option('-E, --eth', 'emit ETH to <sheetname> or <file>.eth (Ethercalc)')
.option('-t, --txt', 'emit TXT to <sheetname> or <file>.txt (UTF-8 TSV)')
.option('-r, --rtf', 'emit RTF to <sheetname> or <file>.txt (Table RTF)')
.option('-z, --dump', 'dump internal representation as JSON')
.option('--props', 'dump workbook properties as CSV')
.option('-F, --field-sep <sep>', 'CSV field separator', ",")
.option('-R, --row-sep <sep>', 'CSV row separator', "\n")
.option('-n, --sheet-rows <num>', 'Number of rows to process (0=all rows)')
.option('--codepage <cp>', 'default to specified codepage when ambiguous')
.option('--req <module>', 'require module before processing')
.option('--sst', 'generate shared string table for XLS* formats')
.option('--compress', 'use compression when writing XLSX/M/B and ODS')
.option('--read', 'read but do not generate output')
.option('--book', 'for single-sheet formats, emit a file per worksheet')
.option('--all', 'parse everything; write as much as possible')
.option('--dev', 'development mode')
.option('--sparse', 'sparse mode')
.option('-q, --quiet', 'quiet mode');
program.on('--help', function() {
console.log(' Default output format is CSV');
console.log(' Support email: [email protected]');
console.log(' Web Demo: http://oss.sheetjs.com/js-'+n+'/');
});
/* flag, bookType, default ext */
const workbook_formats = [
['xlsx', 'xlsx', 'xlsx'],
['xlsm', 'xlsm', 'xlsm'],
['xlam', 'xlam', 'xlam'],
['xlsb', 'xlsb', 'xlsb'],
['xls', 'xls', 'xls'],
['xla', 'xla', 'xla'],
['biff5', 'biff5', 'xls'],
['ods', 'ods', 'ods'],
['fods', 'fods', 'fods']
];
const wb_formats_2 = [
['xlml', 'xlml', 'xls']
];
program.parse(process.argv);
let filename = '', sheetname = '';
if(program.args[0]) {
filename = program.args[0];
if(program.args[1]) sheetname = program.args[1];
}
if(program.sheet) sheetname = program.sheet;
if(program.file) filename = program.file;
if(!filename) {<|fim▁hole|>}
if(!fs.existsSync(filename)) {
console.error(n + ": " + filename + ": No such file or directory");
process.exit(2);
}
const opts: X.ParsingOptions = {};
let wb: X.WorkBook;
if(program.listSheets) opts.bookSheets = true;
if(program.sheetRows) opts.sheetRows = program.sheetRows;
if(program.password) opts.password = program.password;
let seen = false;
function wb_fmt() {
seen = true;
opts.cellFormula = true;
opts.cellNF = true;
if(program.output) sheetname = program.output;
}
function isfmt(m: string): boolean {
if(!program.output) return false;
const t = m.charAt(0) === "." ? m : "." + m;
return program.output.slice(-t.length) === t;
}
workbook_formats.forEach(function(m) { if(program[m[0]] || isfmt(m[0])) { wb_fmt(); } });
wb_formats_2.forEach(function(m) { if(program[m[0]] || isfmt(m[0])) { wb_fmt(); } });
if(seen) {
} else if(program.formulae) opts.cellFormula = true;
else opts.cellFormula = false;
const wopts: X.WritingOptions = ({WTF:opts.WTF, bookSST:program.sst}/*:any*/);
if(program.compress) wopts.compression = true;
if(program.all) {
opts.cellFormula = true;
opts.bookVBA = true;
opts.cellNF = true;
opts.cellHTML = true;
opts.cellStyles = true;
opts.sheetStubs = true;
opts.cellDates = true;
wopts.cellStyles = true;
wopts.bookVBA = true;
}
if(program.sparse) opts.dense = false; else opts.dense = true;
if(program.codepage) opts.codepage = +program.codepage;
if(program.dev) {
opts.WTF = true;
wb = X.readFile(filename, opts);
} else try {
wb = X.readFile(filename, opts);
} catch(e) {
let msg = (program.quiet) ? "" : n + ": error parsing ";
msg += filename + ": " + e;
console.error(msg);
process.exit(3);
}
if(program.read) process.exit(0);
if(!wb) { console.error(n + ": error parsing " + filename + ": empty workbook"); process.exit(0); }
/*:: if(!wb) throw new Error("unreachable"); */
if(program.listSheets) {
console.log((wb.SheetNames||[]).join("\n"));
process.exit(0);
}
if(program.dump) {
console.log(JSON.stringify(wb));
process.exit(0);
}
if(program.props) {
dump_props(wb);
process.exit(0);
}
/* full workbook formats */
workbook_formats.forEach(function(m) { if(program[m[0]] || isfmt(m[0])) {
wopts.bookType = <X.BookType>(m[1]);
X.writeFile(wb, program.output || sheetname || ((filename || "") + "." + m[2]), wopts);
process.exit(0);
} });
wb_formats_2.forEach(function(m) { if(program[m[0]] || isfmt(m[0])) {
wopts.bookType = <X.BookType>(m[1]);
X.writeFile(wb, program.output || sheetname || ((filename || "") + "." + m[2]), wopts);
process.exit(0);
} });
let target_sheet = sheetname || '';
if(target_sheet === '') {
if(program.sheetIndex < (wb.SheetNames||[]).length) target_sheet = wb.SheetNames[program.sheetIndex];
else target_sheet = (wb.SheetNames||[""])[0];
}
let ws: X.WorkSheet;
try {
ws = wb.Sheets[target_sheet];
if(!ws) {
console.error("Sheet " + target_sheet + " cannot be found");
process.exit(3);
}
} catch(e) {
console.error(n + ": error parsing "+filename+" "+target_sheet+": " + e);
process.exit(4);
}
if(!program.quiet && !program.book) console.error(target_sheet);
/* single worksheet file formats */
[
['biff2', '.xls'],
['biff3', '.xls'],
['biff4', '.xls'],
['sylk', '.slk'],
['html', '.html'],
['prn', '.prn'],
['eth', '.eth'],
['rtf', '.rtf'],
['txt', '.txt'],
['dbf', '.dbf'],
['dif', '.dif']
].forEach(function(m) { if(program[m[0]] || isfmt(m[1])) {
wopts.bookType = <X.BookType>(m[0]);
X.writeFile(wb, program.output || sheetname || ((filename || "") + m[1]), wopts);
process.exit(0);
} });
let oo = "", strm = false;
if(!program.quiet) console.error(target_sheet);
if(program.formulae) oo = X.utils.sheet_to_formulae(ws).join("\n");
else if(program.json) oo = JSON.stringify(X.utils.sheet_to_json(ws));
else if(program.rawJs) oo = JSON.stringify(X.utils.sheet_to_json(ws,{raw:true}));
else if(program.arrays) oo = JSON.stringify(X.utils.sheet_to_json(ws,{raw:true, header:1}));
else {
strm = true;
const stream: NodeJS.ReadableStream = X.stream.to_csv(ws, {FS:program.fieldSep, RS:program.rowSep});
if(program.output) stream.pipe(fs.createWriteStream(program.output));
else stream.pipe(process.stdout);
}
if(!strm) {
if(program.output) fs.writeFileSync(program.output, oo);
else console.log(oo);
}
/*:: } */
/*:: } */
function dump_props(wb: X.WorkBook) {
let propaoa: any[][] = [];
propaoa = (<any>Object).entries({...wb.Props, ...wb.Custprops});
console.log(X.utils.sheet_to_csv(X.utils.aoa_to_sheet(propaoa)));
}<|fim▁end|> | console.error(n + ": must specify a filename");
process.exit(1); |
<|file_name|>test_appx_gp.py<|end_file_name|><|fim▁begin|>import sys
import matplotlib.pyplot as plt
import numpy as np
import sklearn.gaussian_process
import sklearn.kernel_approximation
import splitter
from appx_gaussian_processes import appx_gp
TRAINING_NUM = 1500
TESTING_NUM = 50000
ALPHA = .003
LENGTH_SCALE = 1
GAMMA = .5 / (LENGTH_SCALE ** 2)
COMPONENTS = 100
def interval_in_box_from_line(box, line):
x_min, x_max, y_min, y_max = box
m, b = line
x_min_y = m * x_min + b
x_max_y = m * x_max + b
y_min_x = (y_min - b) / m
y_max_x = (y_max - b) / m
endpoints = set()
if y_min <= x_min_y <= y_max:
endpoints.add((x_min, x_min_y))
if y_min <= x_max_y <= y_max:
endpoints.add((x_max, x_max_y))
if x_min <= y_min_x <= x_max:
endpoints.add((y_min_x, y_min))
if x_min <= y_max_x <= x_max:
endpoints.add((y_max_x, y_max))
return endpoints
def approximate_kernel(train_X, test_X):
sampler = sklearn.kernel_approximation.RBFSampler(gamma=GAMMA, n_components=COMPONENTS)
sampler.fit(train_X)
appx_train_X = sampler.transform(train_X)
appx_test_X = sampler.transform(test_X)
return appx_train_X, appx_test_X
def main(path_in):
print('Loading data...')
data = splitter.load(path_in)
(train_X, train_y), (test_X, test_y) = splitter.split(data, TRAINING_NUM,
TESTING_NUM)
try:
gp_sigmas = np.loadtxt('gp_preds.txt')
assert gp_sigmas.shape == (TESTING_NUM,)
except (FileNotFoundError, AssertionError):
print('Fitting GP...')
kernel = sklearn.gaussian_process.kernels.RBF(
length_scale=LENGTH_SCALE)
gp = sklearn.gaussian_process.GaussianProcessRegressor(
kernel=kernel,
alpha=ALPHA,
copy_X_train=False)
gp.fit(train_X, train_y)
print('Predicting GP...')
_, gp_sigmas = gp.predict(test_X, return_std=True)
np.savetxt('gp_preds.txt', gp_sigmas)
print('Approximating kernel...')
appx_train_X, appx_test_X = approximate_kernel(train_X, test_X)
print('Fitting approximate GP...')
agp = appx_gp.AppxGaussianProcessRegressor(alpha=ALPHA)
agp.fit(appx_train_X, train_y)
print('Predicting approximate GP...')
_, agp_sigmas = agp.predict(appx_test_X, return_std=True)
print('Finding best fit...')
best_fit = np.polyfit(gp_sigmas, agp_sigmas, 1)
best_fit_box = (min(gp_sigmas), max(gp_sigmas),
min(agp_sigmas), max(agp_sigmas))
best_fit_endpoints = interval_in_box_from_line(best_fit_box, best_fit)
best_fit_xs, best_fit_ys = zip(*best_fit_endpoints)
print('Plotting...')
f = plt.figure()
ax = f.add_subplot(111)
sc = plt.scatter(gp_sigmas, agp_sigmas, s=.2, c=list(test_y))
plt.plot(best_fit_xs, best_fit_ys, color='red', label='Linear fit')
plt.title(r'$\gamma = {:.4},$ #components$= {}$'.format(GAMMA,<|fim▁hole|> plt.text(.975, .1, '$y = {:.4}x {:+.4}$'.format(*best_fit),
horizontalalignment='right',
verticalalignment='bottom',
transform = ax.transAxes)
colorbar = plt.colorbar(sc)
colorbar.set_label('Redshift')
plt.legend(loc='lower right')
plt.show()
if __name__ == '__main__':
main(sys.argv[1])<|fim▁end|> | COMPONENTS))
plt.xlabel('GP uncertainty')
plt.ylabel('Approximate GP uncertainty') |
<|file_name|>pycallgraph.py<|end_file_name|><|fim▁begin|>import locale
from .output import Output
from .config import Config
from .tracer import AsyncronousTracer, SyncronousTracer
from .exceptions import PyCallGraphException
class PyCallGraph(object):
def __init__(self, output=None, config=None):
'''output can be a single Output instance or an iterable with many
of them. Example usage:
PyCallGraph(output=GraphvizOutput(), config=Config())
'''
locale.setlocale(locale.LC_ALL, '')
if output is None:
self.output = []
elif isinstance(output, Output):
self.output = [output]
else:
self.output = output
self.config = config or Config()
configured_ouput = self.config.get_output()
if configured_ouput:
self.output.append(configured_ouput)
self.reset()
def __enter__(self):
self.start()
def __exit__(self, type, value, traceback):
self.done()
def get_tracer_class(self):
if self.config.threaded:
return AsyncronousTracer
else:
return SyncronousTracer
def reset(self):
'''Resets all collected statistics. This is run automatically by
start(reset=True) and when the class is initialized.
'''
self.tracer = self.get_tracer_class()(self.output, config=self.config)
for output in self.output:
self.prepare_output(output)
def start(self, reset=True):
'''Begins a trace. Setting reset to True will reset all previously
recorded trace data.
'''
if not self.output:
raise PyCallGraphException(<|fim▁hole|>
if reset:
self.reset()
for output in self.output:
output.start()
self.tracer.start()
def stop(self):
'''Stops the currently running trace, if any.'''
self.tracer.stop()
def done(self):
'''Stops the trace and tells the outputters to generate their
output.
'''
self.stop()
self.generate()
def generate(self):
# If in threaded mode, wait for the processor thread to complete
self.tracer.done()
for output in self.output:
output.done()
def add_output(self, output):
self.output.append(output)
self.prepare_output(output)
def prepare_output(self, output):
output.sanity_check()
output.set_processor(self.tracer.processor)
output.reset()<|fim▁end|> | 'No outputs declared. Please see the '
'examples in the online documentation.'
) |
<|file_name|>regex.rs<|end_file_name|><|fim▁begin|>use regex;
use serde_json::Value;<|fim▁hole|>
use super::super::errors;
impl super::Validator for regex::Regex {
fn validate(&self, val: &Value, path: &str) -> super::ValidatorResult {
let string = strict_process!(val.as_str(), path, "The value must be a string");
if self.is_match(string) {
Ok(())
} else {
Err(vec![Box::new(errors::WrongValue {
path: path.to_string(),
detail: Some("Value is not matched by required pattern".to_string()),
})])
}
}
}<|fim▁end|> | |
<|file_name|>pop.ts<|end_file_name|><|fim▁begin|>// ==LICENSE-BEGIN==
// Copyright 2017 European Digital Reading Lab. All rights reserved.
// Licensed to the Readium Foundation under one or more contributor license agreements.
// Use of this source code is governed by a BSD-style license
// that can be found in the LICENSE file exposed on Github (readium) in the project repository.
// ==LICENSE-END==
import { Action } from "readium-desktop/common/models/redux";
import { IHighlightBaseState } from "../../../state/highlight";
export const ID = "READER_HIGHLIGHT_HANDLER_POP";
// eslint-disable-next-line @typescript-eslint/no-empty-interface
interface IPayload extends Array<IHighlightBaseState> {
}
export function build(...param: IHighlightBaseState[]):
Action<typeof ID, IPayload> {
return {
type: ID,<|fim▁hole|>}
build.toString = () => ID; // Redux StringableActionCreator
export type TAction = ReturnType<typeof build>;<|fim▁end|> | payload: param,
}; |
<|file_name|>localize.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""extends the standard Python gettext classes
allows multiple simultaneous domains... (makes multiple sessions with different languages easier too)"""
# Copyright 2002, 2003 St James Software
#
# This file is part of jToolkit.
#
# jToolkit is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# jToolkit is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with jToolkit; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import gettext
import locale
import os.path
from errno import ENOENT
from jToolkit import languagenames
class ManyTranslations(gettext.NullTranslations):
"""this proxies to many translations"""
def __init__(self, translations=None):
"""Takes an optional sequence of translations."""
gettext.NullTranslations.__init__(self)
if translations is None:
self.translations = []
else:
self.translations = translations
def gettext(self, message):
"""gets the translation of the message by searching through all the domains"""
for translation in self.translations:
tmsg = translation._catalog.get(message, None)
if tmsg is not None:
return tmsg
return message
def ngettext(self, singular, plural, n):
"""gets the plural translation of the message by searching through all the domains"""
for translation in self.translations:
if not hasattr(translation, "plural"):
continue
plural = translation.plural
tmsg = translation._catalog[(singular, plural(n))]
if tmsg is not None:
return tmsg
if n == 1:
return singular
else:
return plural
def ugettext(self, message):
"""gets the translation of the message by searching through all the domains (unicode version)"""
for translation in self.translations:
tmsg = translation._catalog.get(message, None)
# TODO: we shouldn't set _charset like this. make sure it is set properly
if translation._charset is None: translation._charset = 'UTF-8'
if tmsg is not None:
if isinstance(tmsg, unicode):
return tmsg
else:
return unicode(tmsg, translation._charset)
return unicode(message)
def ungettext(self, singular, plural, n):
"""gets the plural translation of the message by searching through all the domains (unicode version)"""
for translation in self.translations:
if not hasattr(translation, "plural"):
continue
plural = translation.plural
tmsg = translation._catalog.get((singular, plural(n)), None)
# TODO: we shouldn't set _charset like this. make sure it is set properly
if translation._charset is None: translation._charset = 'UTF-8'<|fim▁hole|> return unicode(tmsg, translation._charset)
if n == 1:
return unicode(singular)
else:
return unicode(plural)
def getinstalledlanguages(localedir):
"""looks in localedir and returns a list of languages installed there"""
languages = []
def visit(arg, dirname, names):
if 'LC_MESSAGES' in names:
languages.append(os.path.basename(dirname))
os.path.walk(localedir, visit, None)
return languages
def getlanguagenames(languagecodes):
"""return a dictionary mapping the language code to the language name..."""
return dict([(code, languagenames.languagenames.get(code, code)) for code in languagecodes])
def findmany(domains, localedir=None, languages=None):
"""same as gettext.find, but handles many domains, returns many mofiles (not just one)"""
mofiles = []
if languages is None:
languages = getinstalledlanguages(localedir)
for domain in domains:
mofile = gettext.find(domain, localedir, languages)
mofiles.append(mofile)
return mofiles
def translation(domains, localedir=None, languages=None, class_=None):
"""same as gettext.translation, but handles many domains, returns a ManyTranslations object"""
if class_ is None:
class_ = gettext.GNUTranslations
mofiles = findmany(domains, localedir, languages)
# we'll just use null translations where domains are missing ; this code will refuse to
# if None in mofiles:
# missingindex = mofiles.index(None)
# raise IOError(ENOENT, 'No translation file found for domain', domains[missingindex])
translations = []
for mofile in mofiles:
if mofile is None:
t = gettext.NullTranslations()
t._catalog = {}
else:
key = os.path.abspath(mofile)
t = gettext._translations.get(key)
if t is None:
t = gettext._translations.setdefault(key, class_(open(mofile, 'rb')))
translations.append(t)
return ManyTranslations(translations)
def getdefaultlanguage(languagelist):
"""tries to work out the default language from a list"""
def reducelocale(locale):
pos = locale.find('_')
if pos == -1:
return locale
else:
return locale[:pos]
currentlocale, currentencoding = locale.getlocale()
try:
defaultlocale, defaultencoding = locale.getdefaultlocale()
except ValueError:
defaultlocale, defaultencoding = None, None
if len(languagelist) > 0:
if currentlocale is not None:
if currentlocale in languagelist:
return currentlocale
elif reducelocale(currentlocale) in languagelist:
return reducelocale(currentlocale)
if defaultlocale is not None:
if defaultlocale in languagelist:
return defaultlocale
elif reducelocale(defaultlocale) in languagelist:
return reducelocale(defaultlocale)
return languagelist[0]
else:
# if our language list is empty, we'll just ignore it
if currentlocale is not None:
return currentlocale
elif defaultlocale is not None:
return defaultlocale
return None<|fim▁end|> | if tmsg is not None:
if isinstance(tmsg, unicode):
return tmsg
else: |
<|file_name|>LibraryFormContents.js<|end_file_name|><|fim▁begin|>import React from 'react';
import PropTypes from 'prop-types';
import FormField from '../../components/FormField';
import TextHelp from '../../components/TextHelp';
import DomainIdField from '../../components/DomainIdField';
import PodcastEdit from './PodcastEdit';
const LibraryFormContents = (props) => {
const { className, errors, formState, session } = props;
const library = formState.object;
let podcast;
if (library.podcast) {
const url = `${window.location.origin}/${library.path || library.id}.rss`;
podcast = [
<p key="loc" className="form__text secondary"><|fim▁hole|> Submit
</a>
</p>,
<PodcastEdit key="podcast"
podcast={library.podcast}
onChange={formState.change('podcast')} />,
];
}
return (
<div className={className}>
<fieldset className="form__fields">
<FormField label="Name" error={errors.name}>
<input name="name"
value={library.name || ''}
onChange={formState.change('name')} />
</FormField>
<FormField name="text" label="Description" help={<TextHelp />}>
<textarea name="text"
value={library.text || ''}
rows={4}
onChange={formState.change('text')} />
</FormField>
<FormField name="path"
label="Url ID"
help="unique url name"
error={errors.path}>
<input name="path"
value={library.path || ''}
onChange={formState.change('path')} />
</FormField>
<DomainIdField formState={formState} session={session} />
<FormField>
<input name="podcast"
type="checkbox"
checked={library.podcast}
onChange={() =>
formState.set('podcast', library.podcast ? undefined : {})} />
<label htmlFor="podcast">podcast</label>
</FormField>
</fieldset>
{podcast}
</div>
);
};
LibraryFormContents.propTypes = {
className: PropTypes.string,
errors: PropTypes.object,
formState: PropTypes.object.isRequired,
session: PropTypes.object.isRequired,
};
LibraryFormContents.defaultProps = {
className: undefined,
errors: {},
};
export default LibraryFormContents;<|fim▁end|> | Published at: {url}<br />
<a href={`https://validator.w3.org/feed/check.cgi?url=${encodeURIComponent(url)}`}>
Test
</a> <a href="https://help.apple.com/itc/podcasts_connect/#/itcd88ea40b9"> |
<|file_name|>transactions.js<|end_file_name|><|fim▁begin|>'use strict'
var _ = require('lodash'),
CrudRoutes = require('../lib/crud-routes'),
transactions = require('../services/transactions'),
log = require('../lib/log');
<|fim▁hole|> parent: {
name: 'account'
},
protected: true
};
let routes = CrudRoutes(opts);
routes.push({
method: 'get',
uri: '/user/:userid/accounts/:accountid/transactions/search/:kind/:search',
protected: true,
handler: (req,res,next) => {
log.info('Search Transactions by ' + req.params.kind + '/' + req.params.search);
return transactions.search(req.params)
.then((data) => {
res.send(200, data);
})
.catch((err) => {
let code = err.type === 'validation' ? 400 : 500;
log.error('Error searching transactions: ' + err.message);
res.send(400, err.message);
});
}
});
routes.push({
method: 'get',
uri: '/user/:userid/accounts/:accountid/transactions/startdate/:startdate/enddate/:enddate',
protected: true,
handler: (req,res,next) => {
log.info('Retrieve Transactions from ' + req.params.startdate + '-' + req.params.enddate);
return transactions.search(req.params)
.then((data) => {
res.send(200, data);
})
.catch((err) => {
let code = err.type === 'validation' ? 400 : 500;
log.error('Error searching transactions: ' + err.message);
res.send(400, err.message);
});
}
});
routes.push({
method: 'get',
uri: '/user/:userid/accounts/:accountid/transactions/startdate/:startdate/enddate/:enddate/:groupby',
protected: true,
handler: (req,res,next) => {
log.info('Retrieve Transactions from ' + req.params.startdate + ' to ' + req.params.enddate);
return transactions.search(req.params)
.then((data) => {
log.debug('Grouping by ' + req.params.groupby);
let a = _.groupBy(data, req.params.groupby);
res.send(200, a);
})
.catch((err) => {
let code = err.type === 'validation' ? 400 : 500;
log.error('Error searching transactions: ' + err.message);
res.send(400, err.message);
});
}
});
module.exports = routes;<|fim▁end|> | let opts = {
entity: 'transactions',
service: transactions,
user: true, |
<|file_name|>CheckField.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author dh2744
*/
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
import java.util.Vector;
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author dh2744
*/
public class CheckField {
//check that spike list and exp log exists, are correct and match
public static int checkSpkListExpLog(javax.swing.JTextField spikeListField,
javax.swing.JTextField expLogFileField,
javax.swing.JButton runButton,
javax.swing.JTextArea outputTextArea) {
int exitValue = 0;
runButton.setEnabled(false);
Boolean errorLog = false;
//Vector<String> logResult = new Vector<>();
// to do java 1.6 compatibility
Vector<String> logResult = new Vector<String>();
//experimental log
try {
//read in Experimental log
String csvFile = expLogFileField.getText();
String[] csvCheck1 = csvFile.split("\\.");
String csvText1 = "csv";
System.out.println("end of Exp log"+ csvCheck1[csvCheck1.length - 1] + " equals csv:" );
System.out.println(csvCheck1[csvCheck1.length - 1].equals("csv"));
//two ifs for whether string ends in "]" or not
if ( !csvCheck1[csvCheck1.length - 1].equals("csv") ){
if (!(csvCheck1[csvCheck1.length].equals("csv"))) {
logResult.add( "Exp log file chosen is not a csv file" );
ErrorHandler.errorPanel("Exp log file chosen " + '\n'
+ csvFile + '\n'
+ "Is not a csv file.");
exitValue = -1;
errorLog = true;
}
}
// This will reference one line at a time
String line = null;
Boolean expLogGood = true;
try {
// FileReader reads text files in the default encoding.
FileReader fileReader = new FileReader(csvFile);
// Always wrap FileReader in BufferedReader.
BufferedReader bufferedReader = new BufferedReader(fileReader);
line = bufferedReader.readLine();
String[] expLogParts = line.split(",");
if (!expLogParts[0].toUpperCase().equals("PROJECT")) {
logResult.add( "Error reading in expLog File" );
logResult.add("Exp log file lacks Project column." );
ErrorHandler.errorPanel("Exp log file lacks Project column "
+ '\n'
+ "Please re-enter exp log file");
errorLog = true;
expLogGood = false;
exitValue = -1;
} else if (!expLogParts[1].toUpperCase().equals("EXPERIMENT DATE")) {
logResult.add( "Error reading in expLog File" );
logResult.add("Exp log file lacks Experiment column." );
ErrorHandler.errorPanel("Exp log file lacks Experiment column "
+ '\n'
+ "Please re-enter exp log file");
errorLog = true;
expLogGood = false;
exitValue = -1;
} else {
LineNumberReader lnr = null;
line = bufferedReader.readLine();
Integer i = 1;
line = bufferedReader.readLine();
String[] country = line.split(",");
System.out.println("Start while loop");
while (line != null) {
System.out.println(line);
System.out.println(country[0]);
System.out.println("Current line num " + i);
i++;
line = bufferedReader.readLine();
System.out.println(line);
System.out.println("line = null? " + (line == null));
//System.out.println("line.isEmpty ? " + (line.isEmpty()) );
if (line != null) {
System.out.println("line : " + line);
System.out.println("country : " + country[0]);
country = line.split(",");
}
}
System.out.println("Current line num " + i);
System.out.println("done with while loop");
// Always close files.
bufferedReader.close();
} //end of if-else
} catch (FileNotFoundException ex) {
logResult.add( "ExpLog File not found" );
System.out.println(
"Unable to open file '"
+ csvFile + "'");
errorLog = true;
exitValue = -1;
expLogGood = false;
} catch (IOException ex) {
logResult.add( "Error reading in expLog File" );
logResult.add("IOException." );
System.out.println(
"Error reading file '"
+ csvFile + "'");
errorLog = true;
expLogGood = false;
exitValue = -1;
}
System.out.println("expLogGood : " + expLogGood);
//++++++++++++++++++++++++++++++++++++read in Spk list file
String check = spikeListField.getText();
String[] temp = check.split(",");
System.out.println("spk list chooser: " + temp[0]);
String[] spkCsvFile = temp;
if (temp.length > 1) {
System.out.println("spkCsvFile.length = " + temp.length);
for (int ind = 0; ind < temp.length; ind++) {
if (0 == ind) {
spkCsvFile[ind] = temp[ind].substring(1, temp[ind].length()).trim();
System.out.println("spkCsvFile[ind] " + spkCsvFile[ind]);
File fileCheck = new File(spkCsvFile[ind]);
System.out.println("exists? spkCsvFile[" + ind + "] " + fileCheck.exists());
} else if (ind == (temp.length - 1)) {
spkCsvFile[ind] = temp[ind].substring(0, temp[ind].length() - 1).trim();
System.out.println("spkCsvFile[ind] " + spkCsvFile[ind]);
File fileCheck = new File(spkCsvFile[ind]);
System.out.println("exists? spkCsvFile[" + ind + "] " + fileCheck.exists());
} else {
spkCsvFile[ind] = temp[ind];
System.out.println("spkCsvFile[ind] " + spkCsvFile[ind].trim());
System.out.println("MMMMM" + spkCsvFile[ind].trim() + "MMMMMMM");
File fileCheck = new File(spkCsvFile[ind].trim());
System.out.println("exists? spkCsvFile[" + ind + "] " + fileCheck.exists());
}
}
} else if (temp.length == 1) {
System.out.println("temp.length = " + temp.length);
System.out.println("temp[0].length = " + temp.length);
System.out.println("temp[0].toString().endsWith(]) = " +
temp.toString().endsWith("]") );
System.out.println("temp[0].toString().length() "+temp[0].toString().length());
System.out.println("temp[0].substring(temp[0].toString().length()-1 ) = " +
temp[0].substring(temp[0].toString().length()-1 ).toString() );
System.out.println("temp[0].substring(temp[0].toString().length()-1 ).equals(]) "+
temp[0].substring(temp[0].toString().length()-1 ).equals("]") );
if ( temp[0].substring(temp[0].toString().length()-1 ).equals("]") ){
int len = temp[0].length();
len--;
System.out.println("temp[0].toString().substring(2,3 ) = "+
temp[0].substring(2,3) );
System.out.println("temp[0].toString().substring(1,2 ) = "+
temp[0].toString().substring(1,2 ) );
System.out.println("temp[0].toString().substring(0,1 ) = "+
temp[0].substring(0,1 ) );
if (temp[0].substring(2,3 ).equals("[")){
spkCsvFile[0] = temp[0].substring(3, len).trim();
} else if (temp[0].toString().substring(1,2 ).equals("[")){
spkCsvFile[0] = temp[0].toString().substring(2, len).trim();
} else if (temp[0].toString().substring(0,1 ).equals("[")){
spkCsvFile[0] = temp[0].toString().substring(1, len).trim();
}
System.out.println("spkCsvFile[ind] " + spkCsvFile[0]);
}
File fileCheck = new File(spkCsvFile[0]);
System.out.println("exists? spkCsvFile[" + 0 + "] " + fileCheck.exists());
}
System.out.println("Done with reading in spike-list file names ");
// check that it's csv
for (int j = 0; j < spkCsvFile.length; j++) {
String[] csvCheck = spkCsvFile[j].split("\\.");
String csvText = "csv";
System.out.println(csvCheck[csvCheck.length - 1]);
System.out.println(csvCheck[csvCheck.length - 1].equals("csv"));
if (!(csvCheck[csvCheck.length - 1].equals("csv")||
csvCheck[csvCheck.length].equals("csv")) ) {
logResult.add( "Spike-list csv file chosen " + spkCsvFile[j] );
logResult.add("Is not a csv file." );
ErrorHandler.errorPanel("Spike-list csv file chosen "
+ spkCsvFile[j] + '\n'
+ "Is not a csv file.");
errorLog = true;
exitValue = -1;
}
// spike list file
try {
// FileReader reads text files in the default encoding.
FileReader fileReader = new FileReader(spkCsvFile[j].trim());
System.out.println("file reader " + spkCsvFile[j]);
// Always wrap FileReader in BufferedReader.
BufferedReader bufferedReader = new BufferedReader(fileReader);
line = bufferedReader.readLine();
System.out.println(line);
String[] spkListHeader = line.split(",");
String third = "Time (s)";
System.out.println(spkListHeader[2]);
// check for spike list file
System.out.println(spkListHeader[2]);
if (!(spkListHeader[2].equals("Time (s)") || spkListHeader[2].equals("Time (s) ")
|| spkListHeader[2].equals("\"Time (s)\"")
|| spkListHeader[2].equals("\"Time (s) \""))) {
logResult.add( "Spike-list csv file chosen " + spkCsvFile[j] );
logResult.add("Is not a proper spike-list file" );
logResult.add( "'Time (s)' should be third column header " );
logResult.add("Instead 3rd column is "+ spkListHeader[2].toString() );
ErrorHandler.errorPanel("Spike-list csv file chosen '" + spkCsvFile[j]+"'" + '\n'
+ "Is not a proper spike-list file" + '\n'
+ "'Time (s)' should be third column header");
exitValue = -1;
errorLog=true;
}
for (int counter2 = 1; counter2 < 5; counter2++) {
// content check
line = bufferedReader.readLine();
if (counter2 == 2) {
System.out.println(line);
String[] spkListContents = line.split(",");
//Project,Experiment Date,Plate SN,DIV,Well,Treatment,Size,Dose,Units
System.out.println("Time(s) " + spkListContents[2]
+ " , Electrode =" + spkListContents[3]);
}
}
// Always close files.
bufferedReader.close();
} catch (FileNotFoundException ex) {
logResult.add( " Unable to open file " + spkCsvFile[j] );
logResult.add(" Please select another spike list file " );
ErrorHandler.errorPanel("Unable to open file " + j + " "
+ spkCsvFile[j] + '\n'
+ "Please select another spike list file");
exitValue = -1;
errorLog = true;
} catch (IOException ex) {
logResult.add( "Error reading file " + spkCsvFile[j] );
logResult.add(" IOException " );
ErrorHandler.errorPanel(
"Error reading file '" + j + " "
+ spkCsvFile[j] + "'" + '\n' + "Please select another spike list file");
errorLog = true;
exitValue = -1;
}
} //end of loop through spike List Files
// +++++++++need to compare files
System.out.println("Starting match");
for (int j = 0; j < spkCsvFile.length; j++) {
if (expLogGood && !errorLog) {
try {
// FileReader reads text files in the default encoding.
FileReader fileReader = new FileReader(csvFile);
// Always wrap FileReader in BufferedReader.
BufferedReader bufferedReader = new BufferedReader(fileReader);
line = bufferedReader.readLine();
line = bufferedReader.readLine();
System.out.println(line);
String[] country = line.split(",");
//Project,Experiment Date,Plate SN,DIV,Well,Treatment,Size,Dose,Units
if (country.length>5 ){
System.out.println("Project " + country[0]
+ " , Exp Date=" + country[1]
+ " , Plate SN=" + country[2]
+ " , DIV= " + country[3]
+ " , Well=" + country[4]
+ " , trt=" + country[5]);
} else{
System.out.println("Project " + country[0]
+ " , Exp Date=" + country[1]
+ " , Plate SN=" + country[2]
+ " , DIV= " + country[3]
+ " , Well=" + country[4] );
}
// now compare to name of spk-list file
File spkListFile = new File(spkCsvFile[j]);
System.out.println("spkCsvFile "+spkCsvFile[j]);
System.out.println( "File.separator" + File.separator );
String name1 = spkListFile.getName().toString();
String[] partsName1 = name1.split("_");
System.out.println(partsName1[0]);
System.out.println(partsName1[1]);
System.out.println(partsName1[2]);
if (!(partsName1[0].equals(country[0].trim() ))) {
System.out.println("spkListFileNameParts " + partsName1[0].toString());
System.out.println("country " + country[0]);
logResult.add( "project in spk-list file name: " + partsName1[0] );
logResult.add("& experimental log file project: " + country[0]);
logResult.add("Do not match");
ErrorHandler.errorPanel(
"project in spk-list file name: " + partsName1[0] + '\n'
+ "& experimental log file project: " + country[0] + '\n'
+ "Do not match");
exitValue = -1;
errorLog = true;
} else if (!(partsName1[1].equals(country[1].trim() ))) {
logResult.add( "Experiment date does not match between" + '\n' + spkCsvFile[j] );
logResult.add("and experimental log file. ");
logResult.add("Please re-enter file");
ErrorHandler.errorPanel(
"Experiment date does not match between" + '\n' + spkCsvFile[j]
+ " name and experimental log file. '");
exitValue = -1;
errorLog = true;
} else if (!(partsName1[2].equals(country[2].trim() ))) {
logResult.add("No match between spk list" + spkCsvFile[j]);
logResult.add("and experimental log file. ");
ErrorHandler.errorPanel(
"No match between spk list" + spkCsvFile[j] + '\n'
+ "and experimental log file. '"
+ '\n' + "Project name do not match");
exitValue = -1;
errorLog = true;
}
// Always close files.
bufferedReader.close();
} catch (FileNotFoundException ex) {
System.out.println(
"Unable to open file '"
+ csvFile + "'");
logResult.add(" Unable to open file ");
logResult.add("'" + csvFile + "'");
ErrorHandler.errorPanel(
"Unable to open file " + '\n'
+ "'" + csvFile + "'" + '\n'
);
exitValue = -1;
errorLog = true;
} catch (IOException ex) {
System.out.println(
"Error reading file '"
+ csvFile + "'");
ErrorHandler.errorPanel(
"Error reading file " +
"'" + csvFile + "'" + '\n'
);
logResult.add("Error reading file ");
logResult.add("'" + csvFile + "'");
errorLog = true;
exitValue = -1;
}
System.out.println("expLogGood : " + expLogGood);
System.out.println(" ");
}
System.out.println("done with match ");
System.out.println("NUMER OF TIMES THROUGH LOOP : " + j + 1);
} //end of loop through spike list files
if (!errorLog) {
runButton.setEnabled(true);
logResult.add("Files chosen are without errors");
logResult.add("They match one another");
logResult.add("proceeeding to Analysis");
}
PrintToTextArea.printToTextArea(logResult , outputTextArea);
} catch (Exception e) {
logResult.add("Try at reading the csv file failed");
logResult.add(" ");
logResult.add(" ");
e.printStackTrace();
exitValue = -1;
PrintToTextArea.printToTextArea(logResult , outputTextArea);
}
return(exitValue);
}
public static int checkRObject(javax.swing.JTextField rasterFileChooserField,<|fim▁hole|> javax.swing.JTextArea toolsTextArea) {
int exitValue = 0;
makeRasterButton.setEnabled(false);
Boolean errorLog = false;
//Vector<String> logResult = new Vector<>();
// to do java 1.6 compatibility
Vector<String> logResult = new Vector<String>();
//check Robject
try {
//check extension
String rObject = rasterFileChooserField.getText();
String[] rObjectCheck1 = rObject.split("\\.");
String rObjectText1 = "RData";
System.out.println(
rObject + " (-1) end in .RData? : " );
System.out.println(rObjectCheck1[rObjectCheck1.length-1 ].equals("RData"));
System.out.println(
rObject + " end in .RData? : " );
//System.out.println(rObjectCheck1[rObjectCheck1.length ].equals("RData"));
//two ifs for whether string ends in "]" or not
if ( !rObjectCheck1[rObjectCheck1.length-1 ].equals("RData") ) {
System.out.println("in if statement");
ErrorHandler.errorPanel("Robject file chosen " + '\n'
+ rObject + '\n'
+ "Is not a Robject of .RData extension");
exitValue = -1;
errorLog = true;
return exitValue;
}
// This will reference one line at a time
String line = null;
Boolean expLogGood = true;
try {
// Query R for what's in the Robject
System.out.println(" right before SystemCall.systemCall(cmd0, outputTextArea) " );
Vector<String> envVars = new Vector<String>();
File fileWD = new File(System.getProperty("java.class.path"));
File dashDir = fileWD.getAbsoluteFile().getParentFile();
System.out.println("Dash dir " + dashDir.toString());
File systemPaths = new File( dashDir.toString() +File.separator+"Code" +
File.separator+"systemPaths.txt");
File javaClassPath = new File(System.getProperty("java.class.path"));
File rootPath1 = javaClassPath.getAbsoluteFile().getParentFile();
String rootPath2Slash = rootPath1.toString();
rootPath2Slash = rootPath2Slash.replace("\\", "\\\\");
envVars=GetEnvVars.getEnvVars( systemPaths, toolsTextArea );
String cmd0 = envVars.get(0).toString() + " " + rootPath2Slash +
File.separator +"Code"+ File.separator + "RobjectInfoScript.R "+
"RobjectPath="+rObject.toString();
System.out.println( "cmd0 " + cmd0 );
SystemCall.systemCall(cmd0, toolsTextArea );
System.out.println("After SystemCall in Raster " );
/// here we need code to populate raster parameter object
} catch (Exception i){
logResult.add("Try at running RobjectInfoScript.R");
logResult.add(" ");
logResult.add(" ");
i.printStackTrace();
exitValue = -1;
PrintToTextArea.printToTextArea(logResult , toolsTextArea);
}
} catch (Exception e) {
logResult.add("Try at reading the Robject file failed");
logResult.add(" ");
logResult.add(" ");
e.printStackTrace();
exitValue = -1;
PrintToTextArea.printToTextArea(logResult , toolsTextArea);
}
return(exitValue);
}
public static int checkDistFile(
javax.swing.JTextField distPlotFileField,
javax.swing.JButton plotDistrButton,
javax.swing.JTextArea distPlotTextArea) {
int exitValue = 0;
plotDistrButton.setEnabled(false);
Boolean errorLog = false;
//Vector<String> logResult = new Vector<>();
// to do java 1.6 compatibility
Vector<String> logResult = new Vector<String>();
//check Robject
try {
//check extension
String[] distPlotFile1=RemoveSqBrackets.removeSqBrackets( distPlotFileField );
String[] distPlotFileCheck1 = distPlotFile1[0].split("\\.");
String distPlotFileText1 = "csv";
System.out.println(
distPlotFile1[0] + " (-1) end in .csv? : " );
System.out.println(distPlotFileCheck1[distPlotFileCheck1.length-1 ].equals("csv"));
System.out.println(distPlotFileCheck1[distPlotFileCheck1.length-1 ] +
" =distPlotFileCheck1[distPlotFileCheck1.length-1 ]" );
//System.out.println(rObjectCheck1[rObjectCheck1.length ].equals("RData"));
//two ifs for whether string ends in "]" or not
if ( !distPlotFileCheck1[distPlotFileCheck1.length-1 ].equals("csv") ) {
System.out.println("in if statement");
ErrorHandler.errorPanel("Distribution file chosen " + '\n'
+ distPlotFile1[0] + '\n'
+ "Is not a distribution file of .csv extension");
exitValue = -1;
errorLog = true;
return exitValue;
}
// This will reference one line at a time
String line = null;
Boolean expLogGood = true;
try {
// Query R for what's in the Robject
System.out.println(" right before SystemCall.systemCall(cmd0, distPlotTextArea) " );
Vector<String> envVars = new Vector<String>();
File fileWD = new File(System.getProperty("java.class.path"));
File dashDir = fileWD.getAbsoluteFile().getParentFile();
System.out.println("Dash dir " + dashDir.toString());
File systemPaths = new File( dashDir.toString() +File.separator+"Code" +
File.separator+"systemPaths.txt");
File javaClassPath = new File(System.getProperty("java.class.path"));
File rootPath1 = javaClassPath.getAbsoluteFile().getParentFile();
String rootPath2Slash = rootPath1.toString();
rootPath2Slash = rootPath2Slash.replace("\\", "\\\\");
envVars=GetEnvVars.getEnvVars( systemPaths, distPlotTextArea );
String cmd0 = envVars.get(0).toString() + " " + rootPath2Slash +
File.separator +"Code"+ File.separator + "distFileInfoScript.R "+
"distFilePath="+distPlotFile1[0].toString();
System.out.println( "cmd0 " + cmd0 );
SystemCall.systemCall(cmd0, distPlotTextArea );
System.out.println("After SystemCall in distPlot " );
/// here we need code to populate raster parameter object
} catch (Exception i){
logResult.add("Try at running distFileInfoScript.R");
logResult.add(" ");
logResult.add(" ");
i.printStackTrace();
exitValue = -1;
PrintToTextArea.printToTextArea(logResult , distPlotTextArea);
}
} catch (Exception e) {
logResult.add("Try at reading the Robject file failed");
logResult.add(" ");
logResult.add(" ");
e.printStackTrace();
exitValue = -1;
PrintToTextArea.printToTextArea(logResult ,distPlotTextArea);
}
return(exitValue);
}
}<|fim▁end|> | javax.swing.JButton makeRasterButton, |
<|file_name|>queue.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*-
"""
The ``queue` utils
==================
Some operation will require a queue. This utils file
"""
<|fim▁hole|>__credits__ = ['Salas']
__license__ = 'MIT'
__version__ = '0.2.0'
__maintainer__ = 'Salas'
__email__ = '[email protected]'
__status__ = 'Pre-Alpha'<|fim▁end|> | __author__ = 'Salas'
__copyright__ = 'Copyright 2014 LTL' |
<|file_name|>view.rs<|end_file_name|><|fim▁begin|>// +--------------------------------------------------------------------------+
// | Copyright 2016 Matthew D. Steele <[email protected]> |
// | |
// | This file is part of System Syzygy. |
// | |
// | System Syzygy is free software: you can redistribute it and/or modify it |
// | under the terms of the GNU General Public License as published by the |
// | Free Software Foundation, either version 3 of the License, or (at your |
// | option) any later version. |
// | |
// | System Syzygy is distributed in the hope that it will be useful, but |
// | WITHOUT ANY WARRANTY; without even the implied warranty of |
// | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
// | General Public License for details. |
// | |
// | You should have received a copy of the GNU General Public License along |
// | with System Syzygy. If not, see <http://www.gnu.org/licenses/>. |
// +--------------------------------------------------------------------------+
use super::scenes;
use crate::elements::memory::{MemoryGridView, NextShapeView, FLIP_SLOWDOWN};
use crate::elements::{
FadeStyle, ProgressBar, PuzzleCmd, PuzzleCore, PuzzleView,
};
use crate::gui::{Action, Canvas, Element, Event, Rect, Resources, Sound};
use crate::modes::SOLVED_INFO_TEXT;
use crate::save::{Direction, Game, PuzzleState, ServesState};
// ========================================================================= //
const REMOVE_DELAY: i32 = FLIP_SLOWDOWN * 5 + 20;
const REMOVE_SOUND_AT: i32 = 20 + FLIP_SLOWDOWN * 2;
// ========================================================================= //
pub struct View {
core: PuzzleCore<()>,
grid: MemoryGridView,
next: NextShapeView,
progress: ProgressBar,
progress_adjust: u32,
remove_countdown: i32,
show_next: bool,
}
impl View {
pub fn new(
resources: &mut Resources,
visible: Rect,
state: &ServesState,
) -> View {
let mut core = {
let fade = (FadeStyle::LeftToRight, FadeStyle::LeftToRight);
let intro = scenes::compile_intro_scene(resources);
let outro = scenes::compile_outro_scene(resources);
PuzzleCore::new(resources, visible, state, fade, intro, outro)
};
core.add_extra_scene(scenes::compile_argony_midscene(resources));
core.add_extra_scene(scenes::compile_mezure_midscene(resources));
View {
core,
grid: MemoryGridView::new(
resources,
"memory/serves",
(256, 176),
state.grid(),
),
next: NextShapeView::new(resources, "memory/serves", (96, 208)),
progress: ProgressBar::new(
(104, 176),
Direction::East,
80,
(191, 191, 0),
),
progress_adjust: 0,
remove_countdown: 0,
show_next: false,
}
}
}
impl Element<Game, PuzzleCmd> for View {
fn draw(&self, game: &Game, canvas: &mut Canvas) {
let state = &game.if_memory_serves;
self.core.draw_back_layer(canvas);
if !state.is_solved() {
let value = state.current_step() as u32 + self.progress_adjust;
let maximum = state.total_num_steps() as u32;
self.progress.draw(value, maximum, canvas);
}
self.grid.draw(state.grid(), canvas);
self.core.draw_middle_layer(canvas);
if self.show_next {
self.next.draw(&state.next_shape(), canvas);
}
self.core.draw_front_layer(canvas, state);
}
fn handle_event(
&mut self,
event: &Event,
game: &mut Game,
) -> Action<PuzzleCmd> {
let state = &mut game.if_memory_serves;
let mut action = self.core.handle_event(event, state);
if event == &Event::ClockTick && self.remove_countdown > 0 {
self.remove_countdown -= 1;
if self.remove_countdown == REMOVE_SOUND_AT {
let symbol = self.grid.flip_symbol();
let sound = if state.can_remove_symbol(symbol) {
self.progress_adjust = 1;
Sound::mid_puzzle_chime()<|fim▁hole|> Sound::talk_annoyed_hi()
};
action.merge(Action::redraw().and_play_sound(sound));
}
if self.remove_countdown == 0 {
self.progress_adjust = 0;
state.remove_symbol(self.grid.flip_symbol());
self.grid.clear_flip();
if state.is_solved() {
self.core.begin_outro_scene();
action = action.and_return(PuzzleCmd::Save);
}
action.also_redraw();
}
}
if (!action.should_stop() && self.remove_countdown == 0)
|| event == &Event::ClockTick
{
let subaction =
self.next.handle_event(event, &mut state.next_shape());
if let Some(&pt) = subaction.value() {
let (col, row) = self.grid.coords_for_point(pt);
if let Some(symbol) = state.try_place_shape(col, row) {
action.also_play_sound(Sound::device_drop());
self.grid.place_symbol(symbol);
}
}
action.merge(subaction.but_no_value());
}
if (!action.should_stop() && self.remove_countdown == 0)
|| event == &Event::ClockTick
{
let subaction = self.grid.handle_event(event, state.grid_mut());
if let Some(&symbol) = subaction.value() {
action.also_play_sound(Sound::device_rotate());
self.grid.reveal_symbol(symbol);
self.remove_countdown = REMOVE_DELAY;
}
action.merge(subaction.but_no_value());
}
if !action.should_stop() {
self.core.begin_character_scene_on_click(event);
}
action
}
}
impl PuzzleView for View {
fn info_text(&self, game: &Game) -> &'static str {
if game.if_memory_serves.is_solved() {
SOLVED_INFO_TEXT
} else {
INFO_BOX_TEXT
}
}
fn undo(&mut self, _: &mut Game) {}
fn redo(&mut self, _: &mut Game) {}
fn reset(&mut self, game: &mut Game) {
self.core.clear_undo_redo();
game.if_memory_serves.reset();
}
fn solve(&mut self, game: &mut Game) {
game.if_memory_serves.solve();
self.core.begin_outro_scene();
}
fn drain_queue(&mut self) {
for (kind, value) in self.core.drain_queue() {
if kind == 0 {
self.show_next = value != 0;
} else if kind == 1 {
if value >= 0 && (value as usize) < LETTERS.len() {
let (col, row, letter) = LETTERS[value as usize];
self.grid.add_letter(col, row, letter);
}
}
}
}
}
// ========================================================================= //
#[cfg_attr(rustfmt, rustfmt_skip)]
const LETTERS: &[(i32, i32, char)] = &[
(1, 0, 'I'), (1, 1, 'N'), (1, 2, 'T'), (1, 3, 'E'),
(3, 0, 'C'), (3, 1, 'O'), (3, 2, 'N'), (3, 3, 'S'),
(5, 0, 'I'), (5, 1, 'N'), (5, 2, 'D'), (5, 3, 'E'),
];
const INFO_BOX_TEXT: &str = "\
Your goal is to place (and later remove) each group of tiles on
the grid.
When a group of tiles appears on the left, use $M{your finger}{the mouse} to
drag it onto the grid on the right. The tiles will then flip over;
the backs of the tiles will be green.
Tiles will eventually turn from green to gray; once all tiles
with a given symbol are gray, they may be safely removed.
You can remove a group of tiles at any time by $M{tapp}{click}ing any of
the tiles on the grid that had that symbol. However, if you
accidentally remove a tile that's still green, you will have to
start over.
$M{Tap}{Click} on a character in the scene to hear their words of wisdom.";
// ========================================================================= //<|fim▁end|> | } else { |
<|file_name|>monad.ts<|end_file_name|><|fim▁begin|>/**
* Created by edlc on 12/9/16.
* Contains basic `Monad` class and associated methods.
* For 'what is a monad'/back-story
* @see `Maybe` reference: [http://hackage.haskell.org/package/base-4.10.1.0/docs/Data-Maybe.html](http://hackage.haskell.org/package/base-4.10.1.0/docs/Data-Maybe.html)
* @see `Either` reference: [http://hackage.haskell.org/package/base-4.10.1.0/docs/Data-Either.html](http://hackage.haskell.org/package/base-4.10.1.0/docs/Data-Either.html)
* @module monad
*/
import {isset} from "../object/isset";
import {$instanceOf} from '../platform/object';
import {curry, CurryOf1, toFunction, curry2, CurryOf2} from "../function";
import {
Applicative, ApplicativeConstructor, Functor,
FunctorConstructor, FunctorMapFn, Apply, ApplyConstructor, TypeRef
} from "../types";
import {isType} from "../object";
export interface Monad<T> extends Applicative<T> {
join(): T;
flatMap<RetT>(fn: FunctorMapFn<RetT>): Monad<RetT>;
}
export type MonadConstructor<T> = ApplicativeConstructor<T>;
export class MonadBase<T> implements Monad<T> {
/**
* Same as `new Monad(...)` just in 'static' function format.
*/
static of<X>(x: X): Monad<X> {
return new MonadBase(x);
}
static liftA2<A, B, RetT>(fn, appA: Applicative<A>, appB: Applicative<B>): Applicative<RetT> {
return (appA.constructor as ApplicativeConstructor<RetT>).of(
fn(appA.valueOf(), appB.valueOf())
);
}
static apRight<A, B, RetT>(appA: Applicative<A>, appB: Applicative<B>): Applicative<RetT> {
(appA.valueOf() as unknown as CallableFunction)();
return (appB.constructor as ApplicativeConstructor<RetT>).of(
(appB.valueOf() as unknown as CallableFunction)()
);
}
static apLeft<A, B, RetT>(appA: Applicative<A>, appB: Applicative<B>): Applicative<RetT> {
const out = (appA.valueOf() as unknown as CallableFunction)();
(appB.valueOf() as unknown as CallableFunction)();
return (appA.constructor as ApplicativeConstructor<RetT>).of(out);
}
constructor(readonly value?: T) {
}
valueOf(): T {
return this.value;
}
map<MapOpRet>(fn: FunctorMapFn<MapOpRet>): Functor<MapOpRet> | Functor {
return new (this.constructor as FunctorConstructor<MapOpRet>)(fn(this.valueOf()));
}
/**
* Applicative apply operation - applies contained function over passed in functor.
*/
ap<X, RetT>(f: Functor<X>): Apply<RetT> {
return new (this.constructor as ApplyConstructor<T>)(f.map(
toFunction(this.valueOf()) as FunctorMapFn<RetT>
).valueOf()
) as unknown as Apply<RetT>;
}
/**
* Monadic join - Removes one layer of monadic structure from value.
*/
join(): T {
return this.valueOf();
}
/**
* Flat map operation.
*/
flatMap<RetT = any>(fn: FunctorMapFn<RetT>): Monad<RetT> {
const out = unwrapMonadByType(this.constructor as TypeRef, this.map(fn) as Monad<RetT> | RetT);
return (this.constructor as ApplicativeConstructor<RetT>).of(out) as Monad<RetT>;
}
/**
* Simple to string implementation to make instances where this monad is dumped out
* as a string more readable.
*/
toString(): string {
return `${this.constructor.name}(${this.valueOf()})`
}
}
export const
/**
* Returns boolean indicating whether given value is an
* instance of monad or not.
*/
isMonad = $instanceOf(MonadBase) as CurryOf1<any, boolean>,
/**
* Calls `valueOf` on value (use for functional composition).
*/
valueOf = <T>(x: Monad<T>): T => x.valueOf(),
/**
* Calls `valueOf` on given value. Same as
* monadic `join` operation (extracts inner value of
* container/object).
*/
join = valueOf,
/**
* Maps given function over given functor.
*/
fmap = <T, RetT>(fn: FunctorMapFn<RetT>, x: Functor<T>): Functor<RetT> | Functor => x.map(fn),
/**
* Curried version of `fmap`.
*/
$fmap = curry(fmap),
/**
* Applies function contained by applicative to contents of given functor.
* (Same as functional applicative `apply`). Returns a functor containing the newly
* returned value from the application.
*/
ap = <A, B, RetT>(app: Applicative<A>, functor: Functor<B>): Functor<RetT> => app.ap(functor),
/**
* Curried version of `ap`.
*/
$ap = curry(ap),
/**
* Flat maps a function over given monad's contained value.
*/
flatMap = <T, RetT>(fn: FunctorMapFn<RetT>, monad: Monad<T>): Monad<RetT> => monad.flatMap(fn),
<|fim▁hole|> * Curried version of `flatMap`.
*/
$flatMap = curry(flatMap),
/**
* Unwraps monad by type.
*/
unwrapMonadByType = <T>(Type: TypeRef, monad: Monad<T> | T): any => {
if (!isset(monad) || !isType(Type, monad)) {
return monad;
}
let result = (monad as Monad<T>).join() as Monad<T> | T;
if (isset(result)) {
return result;
}
while (isset(result) && result !== monad && result instanceof monad.constructor) {
result = (monad as Monad<T>).join();
}
return result;
},
/**
* Curried version of `unwrapMonadByType`.
*/
$unwrapMonadByType = curry2(unwrapMonadByType) as CurryOf2<TypeRef, Monad<any> | any, any>;<|fim▁end|> | /** |
<|file_name|>mitkNavigationDataPassThroughFilter.cpp<|end_file_name|><|fim▁begin|>/*===================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) German Cancer Research Center,
Division of Medical and Biological Informatics.
All rights reserved.
This software is distributed WITHOUT ANY WARRANTY; without
even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE.
See LICENSE.txt or http://www.mitk.org for details.
===================================================================*/
#include "mitkNavigationDataPassThroughFilter.h"
<|fim▁hole|>{
}
mitk::NavigationDataPassThroughFilter::~NavigationDataPassThroughFilter()
{
}
void mitk::NavigationDataPassThroughFilter::GenerateData()
{
// get each input and transfer the data
DataObjectPointerArray inputs = this->GetInputs(); //get all inputs
for ( unsigned int index = 0; index < inputs.size(); ++index )
{
// get the needed variables (input and output)
const mitk::NavigationData* nd = this->GetInput(index);
mitk::NavigationData* output = this->GetOutput(index);
if ( ! nd || ! output )
{
MITK_ERROR("NavigationDataToNavigationDataFilter")("NavigationDataPassThroughFilter")
<< "Input and output must not be null.";
mitkThrow() << "Input and output must not be null.";
}
output->Graft(nd); // copy all information from input to output
output->SetDataValid(nd->IsDataValid());
}
}<|fim▁end|> | mitk::NavigationDataPassThroughFilter::NavigationDataPassThroughFilter() |
<|file_name|>watch-files.js<|end_file_name|><|fim▁begin|>import watch from 'gulp-watch';
import browserSync from 'browser-sync';
import path from 'path';
<|fim▁hole|> * Gulp task to watch files
* @return {function} Function task
*/
export default function watchFilesTask() {
const config = this.config;
const runSequence = require('run-sequence').use(this.gulp);
return () => {
if (config.entryHTML) {
watch(
path.join(
config.basePath,
config.browsersync.server.baseDir,
config.entryHTML
),
() => {
runSequence('build', browserSync.reload);
}
);
}
if (config.postcss) {
watch(path.join(config.sourcePath, '**/*.{css,scss,less}'), () => {
runSequence('postcss');
});
}
if (config.customWatch) {
if (typeof config.customWatch === 'function') {
config.customWatch(config, watch, browserSync);
} else {
watch(config.customWatch, () => {
runSequence('build', browserSync.reload);
});
}
}
};
}<|fim▁end|> | /** |
<|file_name|>paytoedit.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qrtextedit import ScanQRTextEdit
import re
from decimal import Decimal
from electrum import bitcoin
import util
RE_ADDRESS = '[1-9A-HJ-NP-Za-km-z]{26,}'
RE_ALIAS = '(.*?)\s*\<([1-9A-HJ-NP-Za-km-z]{26,})\>'
frozen_style = "QWidget { background-color:none; border:none;}"
normal_style = "QPlainTextEdit { }"
class PayToEdit(ScanQRTextEdit):
def __init__(self, win):
ScanQRTextEdit.__init__(self)
self.win = win
self.amount_edit = win.amount_e
self.document().contentsChanged.connect(self.update_size)
self.heightMin = 0
self.heightMax = 150
self.c = None
self.textChanged.connect(self.check_text)
self.outputs = []
self.errors = []
self.is_pr = False
self.is_alias = False
self.scan_f = win.pay_to_URI
self.update_size()
self.payto_address = None
self.previous_payto = ''
def setFrozen(self, b):
self.setReadOnly(b)<|fim▁hole|> for button in self.buttons:
button.setHidden(b)
def setGreen(self):
self.setStyleSheet(util.GREEN_BG)
def setExpired(self):
self.setStyleSheet(util.RED_BG)
def parse_address_and_amount(self, line):
x, y = line.split(',')
out_type, out = self.parse_output(x)
amount = self.parse_amount(y)
return out_type, out, amount
def parse_output(self, x):
try:
address = self.parse_address(x)
return bitcoin.TYPE_ADDRESS, address
except:
script = self.parse_script(x)
return bitcoin.TYPE_SCRIPT, script
def parse_script(self, x):
from electrum.transaction import opcodes, push_script
script = ''
for word in x.split():
if word[0:3] == 'OP_':
assert word in opcodes.lookup
script += chr(opcodes.lookup[word])
else:
script += push_script(word).decode('hex')
return script
def parse_amount(self, x):
if x.strip() == '!':
return '!'
p = pow(10, self.amount_edit.decimal_point())
return int(p * Decimal(x.strip()))
def parse_address(self, line):
r = line.strip()
m = re.match('^'+RE_ALIAS+'$', r)
address = str(m.group(2) if m else r)
assert bitcoin.is_address(address)
return address
def check_text(self):
self.errors = []
if self.is_pr:
return
# filter out empty lines
lines = filter(lambda x: x, self.lines())
outputs = []
total = 0
self.payto_address = None
if len(lines) == 1:
data = lines[0]
if data.startswith("myriadcoin:"):
self.scan_f(data)
return
try:
self.payto_address = self.parse_output(data)
except:
pass
if self.payto_address:
self.win.lock_amount(False)
return
is_max = False
for i, line in enumerate(lines):
try:
_type, to_address, amount = self.parse_address_and_amount(line)
except:
self.errors.append((i, line.strip()))
continue
outputs.append((_type, to_address, amount))
if amount == '!':
is_max = True
else:
total += amount
self.win.is_max = is_max
self.outputs = outputs
self.payto_address = None
if self.win.is_max:
self.win.do_update_fee()
else:
self.amount_edit.setAmount(total if outputs else None)
self.win.lock_amount(total or len(lines)>1)
def get_errors(self):
return self.errors
def get_recipient(self):
return self.payto_address
def get_outputs(self, is_max):
if self.payto_address:
if is_max:
amount = '!'
else:
amount = self.amount_edit.get_amount()
_type, addr = self.payto_address
self.outputs = [(_type, addr, amount)]
return self.outputs[:]
def lines(self):
return unicode(self.toPlainText()).split('\n')
def is_multiline(self):
return len(self.lines()) > 1
def paytomany(self):
self.setText("\n\n\n")
self.update_size()
def update_size(self):
docHeight = self.document().size().height()
h = docHeight*17 + 11
if self.heightMin <= h <= self.heightMax:
self.setMinimumHeight(h)
self.setMaximumHeight(h)
self.verticalScrollBar().hide()
def setCompleter(self, completer):
self.c = completer
self.c.setWidget(self)
self.c.setCompletionMode(QCompleter.PopupCompletion)
self.c.activated.connect(self.insertCompletion)
def insertCompletion(self, completion):
if self.c.widget() != self:
return
tc = self.textCursor()
extra = completion.length() - self.c.completionPrefix().length()
tc.movePosition(QTextCursor.Left)
tc.movePosition(QTextCursor.EndOfWord)
tc.insertText(completion.right(extra))
self.setTextCursor(tc)
def textUnderCursor(self):
tc = self.textCursor()
tc.select(QTextCursor.WordUnderCursor)
return tc.selectedText()
def keyPressEvent(self, e):
if self.isReadOnly():
return
if self.c.popup().isVisible():
if e.key() in [Qt.Key_Enter, Qt.Key_Return]:
e.ignore()
return
if e.key() in [Qt.Key_Tab]:
e.ignore()
return
if e.key() in [Qt.Key_Down, Qt.Key_Up] and not self.is_multiline():
e.ignore()
return
QPlainTextEdit.keyPressEvent(self, e)
ctrlOrShift = e.modifiers() and (Qt.ControlModifier or Qt.ShiftModifier)
if self.c is None or (ctrlOrShift and e.text().isEmpty()):
return
eow = QString("~!@#$%^&*()_+{}|:\"<>?,./;'[]\\-=")
hasModifier = (e.modifiers() != Qt.NoModifier) and not ctrlOrShift;
completionPrefix = self.textUnderCursor()
if hasModifier or e.text().isEmpty() or completionPrefix.length() < 1 or eow.contains(e.text().right(1)):
self.c.popup().hide()
return
if completionPrefix != self.c.completionPrefix():
self.c.setCompletionPrefix(completionPrefix);
self.c.popup().setCurrentIndex(self.c.completionModel().index(0, 0))
cr = self.cursorRect()
cr.setWidth(self.c.popup().sizeHintForColumn(0) + self.c.popup().verticalScrollBar().sizeHint().width())
self.c.complete(cr)
def qr_input(self):
data = super(PayToEdit,self).qr_input()
if data.startswith("myriadcoin:"):
self.scan_f(data)
# TODO: update fee
def resolve(self):
self.is_alias = False
if self.hasFocus():
return
if self.is_multiline(): # only supports single line entries atm
return
if self.is_pr:
return
key = str(self.toPlainText())
if key == self.previous_payto:
return
self.previous_payto = key
if not (('.' in key) and (not '<' in key) and (not ' ' in key)):
return
try:
data = self.win.contacts.resolve(key)
except:
return
if not data:
return
self.is_alias = True
address = data.get('address')
name = data.get('name')
new_url = key + ' <' + address + '>'
self.setText(new_url)
self.previous_payto = new_url
#if self.win.config.get('openalias_autoadd') == 'checked':
self.win.contacts[key] = ('openalias', name)
self.win.contact_list.on_update()
self.setFrozen(True)
if data.get('type') == 'openalias':
self.validated = data.get('validated')
if self.validated:
self.setGreen()
else:
self.setExpired()
else:
self.validated = None<|fim▁end|> | self.setStyleSheet(frozen_style if b else normal_style) |
<|file_name|>resource_thread.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A thread that takes a URL and streams back the binary data.
use connector::{create_http_connector, create_ssl_client};
use cookie;
use cookie_rs;
use cookie_storage::CookieStorage;
use devtools_traits::DevtoolsControlMsg;
use fetch::cors_cache::CorsCache;
use fetch::methods::{FetchContext, fetch};
use filemanager_thread::{FileManager, TFDProvider};
use hsts::HstsList;
use http_loader::{HttpState, http_redirect_fetch};
use hyper_serde::Serde;
use ipc_channel::ipc::{self, IpcReceiver, IpcReceiverSet, IpcSender};
use net_traits::{CookieSource, CoreResourceThread};
use net_traits::{CoreResourceMsg, FetchResponseMsg};
use net_traits::{CustomResponseMediator, ResourceId};
use net_traits::{ResourceThreads, WebSocketCommunicate, WebSocketConnectData};
use net_traits::request::{Request, RequestInit};
use net_traits::response::{Response, ResponseInit};
use net_traits::storage_thread::StorageThreadMsg;
use profile_traits::time::ProfilerChan;
use serde::{Deserialize, Serialize};
use serde_json;
use servo_config::opts;
use servo_config::resource_files::resources_dir_path;
use servo_url::ServoUrl;
use std::borrow::{Cow, ToOwned};
use std::collections::HashMap;
use std::error::Error;
use std::fs::File;
use std::io::prelude::*;
use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::sync::{Arc, RwLock};
use std::sync::mpsc::Sender;
use std::thread;
use storage_thread::StorageThreadFactory;
use websocket_loader;
const TFD_PROVIDER: &'static TFDProvider = &TFDProvider;
/// Returns a tuple of (public, private) senders to the new threads.
pub fn new_resource_threads(user_agent: Cow<'static, str>,
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
profiler_chan: ProfilerChan,
config_dir: Option<PathBuf>)
-> (ResourceThreads, ResourceThreads) {
let (public_core, private_core) = new_core_resource_thread(
user_agent,
devtools_chan,
profiler_chan,
config_dir.clone());
let storage: IpcSender<StorageThreadMsg> = StorageThreadFactory::new(config_dir);
(ResourceThreads::new(public_core, storage.clone()),
ResourceThreads::new(private_core, storage))
}
/// Create a CoreResourceThread
pub fn new_core_resource_thread(user_agent: Cow<'static, str>,
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
profiler_chan: ProfilerChan,
config_dir: Option<PathBuf>)
-> (CoreResourceThread, CoreResourceThread) {
let (public_setup_chan, public_setup_port) = ipc::channel().unwrap();
let (private_setup_chan, private_setup_port) = ipc::channel().unwrap();
thread::Builder::new().name("ResourceManager".to_owned()).spawn(move || {
let resource_manager = CoreResourceManager::new(
user_agent, devtools_chan, profiler_chan
);
let mut channel_manager = ResourceChannelManager {
resource_manager: resource_manager,
config_dir: config_dir,
};
channel_manager.start(public_setup_port,
private_setup_port);
}).expect("Thread spawning failed");
(public_setup_chan, private_setup_chan)
}
struct ResourceChannelManager {
resource_manager: CoreResourceManager,
config_dir: Option<PathBuf>,
}
fn create_http_states(config_dir: Option<&Path>) -> (Arc<HttpState>, Arc<HttpState>) {
let mut hsts_list = HstsList::from_servo_preload();
let mut auth_cache = AuthCache::new();
let mut cookie_jar = CookieStorage::new(150);
if let Some(config_dir) = config_dir {
read_json_from_file(&mut auth_cache, config_dir, "auth_cache.json");
read_json_from_file(&mut hsts_list, config_dir, "hsts_list.json");
read_json_from_file(&mut cookie_jar, config_dir, "cookie_jar.json");
}
let ca_file = match opts::get().certificate_path {
Some(ref path) => PathBuf::from(path),
None => resources_dir_path()
.expect("Need certificate file to make network requests")
.join("certs"),
};
let ssl_client = create_ssl_client(&ca_file);
let http_state = HttpState {
cookie_jar: RwLock::new(cookie_jar),
auth_cache: RwLock::new(auth_cache),
hsts_list: RwLock::new(hsts_list),
ssl_client: ssl_client.clone(),
connector: create_http_connector(ssl_client),
};
let private_ssl_client = create_ssl_client(&ca_file);
let private_http_state = HttpState::new(private_ssl_client);
(Arc::new(http_state), Arc::new(private_http_state))
}
impl ResourceChannelManager {
#[allow(unsafe_code)]
fn start(&mut self,
public_receiver: IpcReceiver<CoreResourceMsg>,
private_receiver: IpcReceiver<CoreResourceMsg>) {
let (public_http_state, private_http_state) =
create_http_states(self.config_dir.as_ref().map(Deref::deref));
let mut rx_set = IpcReceiverSet::new().unwrap();
let private_id = rx_set.add(private_receiver).unwrap();
let public_id = rx_set.add(public_receiver).unwrap();
loop {
for (id, data) in rx_set.select().unwrap().into_iter().map(|m| m.unwrap()) {
let group = if id == private_id {
&private_http_state
} else {
assert_eq!(id, public_id);
&public_http_state
};
if let Ok(msg) = data.to() {
if !self.process_msg(msg, group) {
return;
}
}
}
}
}
/// Returns false if the thread should exit.
fn process_msg(&mut self,
msg: CoreResourceMsg,
http_state: &Arc<HttpState>) -> bool {
match msg {
CoreResourceMsg::Fetch(req_init, sender) =>
self.resource_manager.fetch(req_init, None, sender, http_state),
CoreResourceMsg::FetchRedirect(req_init, res_init, sender) =>
self.resource_manager.fetch(req_init, Some(res_init), sender, http_state),
CoreResourceMsg::WebsocketConnect(connect, connect_data) =>
self.resource_manager.websocket_connect(connect, connect_data, http_state),
CoreResourceMsg::SetCookieForUrl(request, cookie, source) =>
self.resource_manager.set_cookie_for_url(&request, cookie.into_inner(), source, http_state),
CoreResourceMsg::SetCookiesForUrl(request, cookies, source) => {
for cookie in cookies {
self.resource_manager.set_cookie_for_url(&request, cookie.into_inner(), source, http_state);
}
}
CoreResourceMsg::GetCookiesForUrl(url, consumer, source) => {
let mut cookie_jar = http_state.cookie_jar.write().unwrap();
consumer.send(cookie_jar.cookies_for_url(&url, source)).unwrap();
}
CoreResourceMsg::NetworkMediator(mediator_chan) => {
self.resource_manager.swmanager_chan = Some(mediator_chan)
}
CoreResourceMsg::GetCookiesDataForUrl(url, consumer, source) => {
let mut cookie_jar = http_state.cookie_jar.write().unwrap();
let cookies = cookie_jar.cookies_data_for_url(&url, source).map(Serde).collect();
consumer.send(cookies).unwrap();
}
CoreResourceMsg::Cancel(res_id) => {
if let Some(cancel_sender) = self.resource_manager.cancel_load_map.get(&res_id) {
let _ = cancel_sender.send(());
}
self.resource_manager.cancel_load_map.remove(&res_id);
}
CoreResourceMsg::Synchronize(sender) => {
let _ = sender.send(());
}
CoreResourceMsg::ToFileManager(msg) => self.resource_manager.filemanager.handle(msg, TFD_PROVIDER),
CoreResourceMsg::Exit(sender) => {
if let Some(ref config_dir) = self.config_dir {
match http_state.auth_cache.read() {
Ok(auth_cache) => write_json_to_file(&*auth_cache, config_dir, "auth_cache.json"),
Err(_) => warn!("Error writing auth cache to disk"),
}
match http_state.cookie_jar.read() {
Ok(jar) => write_json_to_file(&*jar, config_dir, "cookie_jar.json"),
Err(_) => warn!("Error writing cookie jar to disk"),
}
match http_state.hsts_list.read() {
Ok(hsts) => write_json_to_file(&*hsts, config_dir, "hsts_list.json"),
Err(_) => warn!("Error writing hsts list to disk"),
}
}
let _ = sender.send(());
return false;
}
}
true
}
}
pub fn read_json_from_file<T>(data: &mut T, config_dir: &Path, filename: &str)
where T: Deserialize
{
let path = config_dir.join(filename);
let display = path.display();
let mut file = match File::open(&path) {
Err(why) => {
warn!("couldn't open {}: {}", display, Error::description(&why));
return;
},
Ok(file) => file,
};
let mut string_buffer: String = String::new();
match file.read_to_string(&mut string_buffer) {
Err(why) => {
panic!("couldn't read from {}: {}", display,
Error::description(&why))
},
Ok(_) => println!("successfully read from {}", display),
}
match serde_json::from_str(&string_buffer) {
Ok(decoded_buffer) => *data = decoded_buffer,
Err(why) => warn!("Could not decode buffer{}", why),
}
}
pub fn write_json_to_file<T>(data: &T, config_dir: &Path, filename: &str)
where T: Serialize
{
let json_encoded: String;
match serde_json::to_string_pretty(&data) {
Ok(d) => json_encoded = d,
Err(_) => return,
}
let path = config_dir.join(filename);
let display = path.display();
let mut file = match File::create(&path) {
Err(why) => panic!("couldn't create {}: {}",
display,
Error::description(&why)),
Ok(file) => file,
};
match file.write_all(json_encoded.as_bytes()) {
Err(why) => {
panic!("couldn't write to {}: {}", display,
Error::description(&why))
},
Ok(_) => println!("successfully wrote to {}", display),
}
}
#[derive(Clone, Deserialize, Serialize)]
pub struct AuthCacheEntry {
pub user_name: String,
pub password: String,
}
impl AuthCache {
pub fn new() -> AuthCache {
AuthCache {
version: 1,
entries: HashMap::new()
}
}
}
#[derive(Clone, Deserialize, Serialize)]
pub struct AuthCache {
pub version: u32,
pub entries: HashMap<String, AuthCacheEntry>,
}
pub struct CoreResourceManager {
user_agent: Cow<'static, str>,
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
swmanager_chan: Option<IpcSender<CustomResponseMediator>>,
filemanager: FileManager,
cancel_load_map: HashMap<ResourceId, Sender<()>>,
}
impl CoreResourceManager {
pub fn new(user_agent: Cow<'static, str>,
devtools_channel: Option<Sender<DevtoolsControlMsg>>,
_profiler_chan: ProfilerChan) -> CoreResourceManager {
CoreResourceManager {
user_agent: user_agent,
devtools_chan: devtools_channel,
swmanager_chan: None,
filemanager: FileManager::new(),
cancel_load_map: HashMap::new(),
}
}
fn set_cookie_for_url(&mut self, request: &ServoUrl,
cookie: cookie_rs::Cookie<'static>,
source: CookieSource,
http_state: &Arc<HttpState>) {
if let Some(cookie) = cookie::Cookie::new_wrapped(cookie, request, source) {
let mut cookie_jar = http_state.cookie_jar.write().unwrap();
cookie_jar.push(cookie, request, source)
}
}
fn fetch(&self,
req_init: RequestInit,
res_init_: Option<ResponseInit>,
mut sender: IpcSender<FetchResponseMsg>,
http_state: &Arc<HttpState>) {
let http_state = http_state.clone();
let ua = self.user_agent.clone();
let dc = self.devtools_chan.clone();
let filemanager = self.filemanager.clone();
thread::Builder::new().name(format!("fetch thread for {}", req_init.url)).spawn(move || {
let mut request = Request::from_init(req_init);
// XXXManishearth: Check origin against pipeline id (also ensure that the mode is allowed)
// todo load context / mimesniff in fetch
// todo referrer policy?
// todo service worker stuff
let context = FetchContext {
state: http_state,
user_agent: ua,
devtools_chan: dc,<|fim▁hole|> match res_init_ {
Some(res_init) => {
let response = Response::from_init(res_init);
http_redirect_fetch(&mut request,
&mut CorsCache::new(),
response,
true,
&mut sender,
&mut None,
&context);
},
None => fetch(&mut request, &mut sender, &context),
};
}).expect("Thread spawning failed");
}
fn websocket_connect(&self,
connect: WebSocketCommunicate,
connect_data: WebSocketConnectData,
http_state: &Arc<HttpState>) {
websocket_loader::init(connect, connect_data, http_state.clone());
}
}<|fim▁end|> | filemanager: filemanager,
};
|
<|file_name|>Upload.py<|end_file_name|><|fim▁begin|>from getpass import getuser
from Job import Job
from datetime import datetime
import os.path
import time
import re
import Database
class Upload( object ):
@classmethod
def CreateUpload( cls, filename=None ):
if not filename:
filename = Upload.defaultNewFilename()
id = Database.executeScalar('elc_CreateUpload',getuser(),filename)
return Upload(id)
@classmethod
def list( cls ):
uploads=[]
for r in Database.execute('select upl_id, created_by, creation_time, filename, n_insert, n_delete from elc_GetUploadDetails(NULL)'):
created_date = r[2].strftime('%d-%b-%Y')
uploads.append(dict(
upl_id=r[0],
created_by=r[1],
creation_time=r[2],
created_date=created_date,
filename=r[3],
n_insert=r[4],
n_delete=r[5]
))
return uploads
def __init__( self, id ):
r = Database.executeRow('select created_by, creation_time, filename, n_insert, n_delete from elc_GetUploadDetails(%s)',id)
self._id = id
self._created_by = r[0]
self._creation_time = r[1]
<|fim▁hole|> self._n_delete = r[4]
@classmethod
def defaultNewFilename( cls, upload_date=None ):
if not isinstance(upload_date,datetime):
upload_date = datetime.now()
return 'sad_'+upload_date.strftime('%d%b%y')+'.sql'
def id( self ): return self._id
def created_by( self ): return self._created_by
def creation_time( self ): return self._creation_time
def filename( self ): return self._filename
def n_insert( self ): return self._n_insert
def n_delete( self ): return self._n_delete
def defaultFilename( self ):
return Upload.defaultNewFilename( self._creation_time )
def addJob( self, job ):
if type(job) == int:
job = Job(job)
job.addToUpload( self )
job.save()
def writeSql( self, filename ):
sqlfile = open(filename,'w')
basename = os.path.splitext(os.path.basename(filename))[0]
txtfilename = os.path.splitext(filename)[0] + '.txt'
if txtfilename == filename:
txtfilename = txtfilename + '.txt'
txtfile = open(txtfilename,'w')
# Header
sqlfile.write("-- Bulk update of crs_street_address\n")
sqlfile.write("-- Upload id: %d\n" % (self._id,))
sqlfile.write("-- Created by: %s\n" % (self._created_by,))
sqlfile.write("-- Created on: %s\n" %
(self._creation_time.strftime('%d %B %Y at %H:%M'),))
sqlfile.write("\n")
# Insertions
sqlfile.write("\n")
nins = 0
for r in Database.execute('SELECT housenumber, range_low, range_high, status, rcl_id, rna_id, wkt, sufi from elc_UploadNewAddresses(%s)',self._id):
m = re.search(r"(\d+)(\.?\d*)\s+(\-\d+\.?\d*)",r[6])
wkt = '1 POINT(%d%s %s)'%(int(m.group(1))-160,m.group(2),m.group(3))
range_high = r[2] if r[2] != None else 'null'
if r[3] == "DELE": status = "HIST"
else: status = "CURR"
if r[3] == 'NEWA': sufi = 'null'
else: sufi = r[7]
unofficial_flag = "N"
sqlfile.write('''
INSERT INTO crs_street_address_stage(house_number, range_low, range_high, status, unofficial_flag, rcl_id, rna_id, shape, sufi) VALUES
('%s',%s,%s,'%s','%s',%d,%d,'%s', %s);''' % (r[0],r[1], range_high,status,unofficial_flag,r[4],r[5],wkt, sufi))
nins += 1
sqlfile.write("\n")
sqlfile.write("\n")
sqlfile.write(" EXECUTE PROCEDURE cp_cel_AddressStageUpdate();\n")
sqlfile.write("\n")
sqlfile.close()
txtfile.write('''
FTP the attached "%s" file to the production database server (crsprd1).
As the user "crsprd" run the script as follows:
sqf %s
The expected output is:
Database selected.
(constant)
Bulk insert of street addresses: id %d
1 row(s) retrieved.
1 row(s) inserted. ... repeated %d times
(constant)
Bulk update completed: id %d
1 row(s) retrieved.
Database closed.
''' % (basename,basename,self._id,nins,self._id))
txtfile.close()
Database.execute('elc_SetUploadFilename',self._id,basename)<|fim▁end|> | self._filename = r[2]
self._n_insert = r[3]
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>/**
* Module dependencies.
*/
var express = require('express');
var routes = require('./routes');
var user = require('./routes/user');
var http = require('http');
var path = require('path');
var app = express();
// all environments
app.set('port', process.env.PORT || 3000);
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'jade');
app.use(express.favicon());<|fim▁hole|>app.use(express.json());
app.use(express.urlencoded());
app.use(express.methodOverride());
app.use(app.router);
app.use(express.static(path.join(__dirname, 'public')));
// development only
if ('development' == app.get('env')) {
app.use(express.errorHandler());
}
app.get('/', routes.index);
app.get('/users', user.list);
var server = http.createServer(app);
var chatServer = require('./chat_server');
chatServer(server);
server.listen(app.get('port'), function () {
console.log('Express server listening on port ' + app.get('port'));
});<|fim▁end|> | app.use(express.logger('dev')); |
<|file_name|>help.py<|end_file_name|><|fim▁begin|># Author: Nick Raptis <[email protected]>
"""
Module for listing commands and help.
"""
from basemodule import BaseModule, BaseCommandContext
from alternatives import _
class HelpContext(BaseCommandContext):
def cmd_list(self, argument):
"""List commands"""
arg = argument.lower()
index = self.bot.help_index
public = "public commands -- %s" % " ".join(index['public'])
private = "private commands -- %s" % " ".join(index['private'])
if 'all' in arg or 'both' in arg:
output = "\n".join((public, private))
elif 'pub' in arg or self.target.startswith('#'):
output = public
elif 'priv' in arg or not self.target.startswith('#'):
output = private
else:
# we shouldn't be here
self.logger.error("cmd_list")
return
self.send(self.target, output)
def cmd_modules(self, argument):
"""List active modules"""
index = self.bot.help_index
output = "active modules -- %s" % " ".join(index['modules'].keys())
self.send(self.target, output)
def cmd_help(self, argument):
"""Get help on a command or module"""
arg = argument.lower()
index = self.bot.help_index
target = self.target
args = arg.split()
if not args:
s = "usage: help <command> [public|private] / help module <module>"
self.send(target, s)
elif args[0] == 'module':
args.pop(0)
if not args:
self.send(target, "usage: help module <module>")
else:
help_item = index['modules'].get(args[0])
if help_item:
self.send(target, help_item['summary'])
else:
self.send(target, _("No help for %s"), args[0])
else:
args.append("")
cmd = args.pop(0)
cmd_type = args.pop(0)
if 'pu' in cmd_type or self.target.startswith('#'):
cmd_type = 'public'
elif 'pr' in cmd_type or not self.target.startswith('#'):
cmd_type = 'private'
else:
# we shouldn't be here
self.logger.error("cmd_list")
return
help_item = index[cmd_type].get(cmd)<|fim▁hole|> self.send(target, _("No help for %s"), cmd)
class HelpModule(BaseModule):
context_class = HelpContext
module = HelpModule<|fim▁end|> | if help_item:
self.send(target, index[cmd_type][cmd]['summary'])
else: |
<|file_name|>validation.js<|end_file_name|><|fim▁begin|>define([
], function () {
'use strict';
return function (req, res, next) {
function filterParams(req, action) {
var paramsWhitelist = action.params,
whitelistParam,
paramValue,
type,
filteredParams = {};
// check all actions params<|fim▁hole|> if (paramsWhitelist.hasOwnProperty(whitelistParam)) {
type = '';
// get param from body or query
if (paramsWhitelist[whitelistParam].query === true) {
paramValue = req.query[whitelistParam];
} else {
paramValue = req.body[whitelistParam];
}
// if defined or not optional -> validate
if (paramValue !== undefined || !paramsWhitelist[whitelistParam].optional) {
// validate missing params
if (paramValue === undefined && !paramsWhitelist[whitelistParam].optional) { // necessary param missing
type = 'missing_parameter';
} else if (paramValue && paramValue.constructor !== paramsWhitelist[whitelistParam].type) { // validate param type
type = 'wrong_type';
} else if (paramsWhitelist[whitelistParam].hasOwnProperty('regex') && !paramsWhitelist[whitelistParam].regex.test(paramValue)) {
type = 'invalid_structure'; // validate param for custom regex
} else if (paramsWhitelist[whitelistParam].hasOwnProperty('validate') && !paramsWhitelist[whitelistParam].validate(paramValue)) {
type = 'custom_validation'; // validate param for custom validate function
}
// if error type is set -> throw error
if (type) {
throw {
error: type,
param: whitelistParam
};
}
// set validated param
filteredParams[whitelistParam] = paramValue;
}
}
}
return filteredParams;
}
if (req.customData && req.customData.action) {
try {
req.customData.params = filterParams(req, req.customData.action);
} catch (e) {
return res.status(400).send(e);
}
}
next();
};
});<|fim▁end|> | for (whitelistParam in paramsWhitelist) { |
<|file_name|>AspireDbFilterConfig.java<|end_file_name|><|fim▁begin|>/*
* The aspiredb project
*
* Copyright (c) 2012 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package ubc.pavlab.aspiredb.shared.query;<|fim▁hole|>
import java.io.Serializable;
import org.directwebremoting.annotations.DataTransferObject;
@DataTransferObject(javascript = "AspireDbFilterConfig")
public abstract class AspireDbFilterConfig implements Serializable {
private static final long serialVersionUID = 2621587187020538685L;
}<|fim▁end|> | |
<|file_name|>arrayMap.js<|end_file_name|><|fim▁begin|>define([], function() {
/**
* A specialized version of `_.map` for arrays without support for callback
* shorthands or `this` binding.
*
* @private
* @param {Array} array The array to iterate over.
* @param {Function} iteratee The function invoked per iteration.
* @returns {Array} Returns the new mapped array.
*/
function arrayMap(array, iteratee) {
var index = -1,
length = array.length,
result = Array(length);
while (++index < length) {
result[index] = iteratee(array[index], index, array);
}<|fim▁hole|> }
return arrayMap;
});<|fim▁end|> | return result; |
<|file_name|>packetParser.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Welcome to Gobbler, the Scapy pcap parser and dump scripts
# Part of the sniffMyPackets suite http://www.sniffmypackets.net
# Written by @catalyst256 / [email protected]
import datetime
from layers.http import *
from layers.BadLayers import *
from auxtools import error_logging
import logging
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
from scapy.all import *
from collections import OrderedDict
bind_layers(TCP, HTTP)
def rename_layer(x, n):
n = n.lower().replace(' ', '_').replace('-', '_').replace('.', '_') + '_'
return dict((n+k.lower(), f(v) if hasattr(v, 'keys') else v) for k, v in x.items())
def find_layers(pkts, pcap, pcap_id, streamid):
packet = OrderedDict()
count = 1
pcap_id = pcap_id.encode('utf-8')
streamid = streamid.encode('utf-8')
try:
for p in pkts:
header = {"Buffer": {"timestamp": datetime.datetime.fromtimestamp(p.time).strftime('%Y-%m-%d %H:%M:%S.%f'),
"packetnumber": count, "PCAP ID": pcap_id, "pcapfile": pcap, "StreamID": streamid}}
packet.update(header)<|fim▁hole|> counter = 0
while True:
layer = p.getlayer(counter)
if layer != None:
i = int(counter)
x = p[0][i].fields
t = exclude_layers(x, layer.name)
s = rename_layer(t, layer.name)
v = '{"' + layer.name.replace('.', '_') + '[' + str(i) + ']' + '":' + str(s) + '}'
s = eval(v)
try:
del s['HTTP[3]']
del s['HTTP[5]']
except KeyError:
pass
packet.update(s)
else:
break
counter += 1
count += 1
yield packet
packet.clear()
except Exception as e:
error_logging(str(e), 'PacketParser')
pass<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict'
// dependencies
var path = require('path'),
request = require('request'),
// container connections
ccs = [];
// function to get all containers
function GetAllContainers(host, func) {
request({
json: true,
method: 'GET',
uri: host + '/containers/json'
}, function (err, resp, body) {
var containers = [];
if (err) {
return func(err, containers);
}
<|fim▁hole|>
if (body.length <= 0) {
return func(new Error("You have no containers currently.", containers))
}
body.forEach(function(el) {
containers.push(el)
});
return func(err, containers);
});
};
function GetStats(host, containerID, statsCB) {
ccs.map(function(e){
e.destroy();
ccs.pop();
})
ccs.push(request({
json: true,
method: 'GET',
uri: host + '/containers/' + containerID + '/stats'
})
.on('data', function(data){
statsCB(JSON.parse(data))
}))
};
exports.GetAllContainers = GetAllContainers;
exports.GetStats = GetStats;<|fim▁end|> | if (resp.statusCode != 200) {
return func(new Error("Status from server was: " + resp.statusCode), containers);
} |
<|file_name|>register.go<|end_file_name|><|fim▁begin|>package v1
import (
"github.com/nginxinc/kubernetes-ingress/pkg/apis/configuration"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
)
// SchemeGroupVersion is group version used to register these object.
var SchemeGroupVersion = schema.GroupVersion{Group: configuration.GroupName, Version: "v1"}
// Kind takes an unqualified kind and returns back a Group qualified GroupKind.
func Kind(kind string) schema.GroupKind {
return SchemeGroupVersion.WithKind(kind).GroupKind()
}
// Resource takes an unqualified resource and returns a Group qualified GroupResource.
func Resource(resource string) schema.GroupResource {<|fim▁hole|>var (
// SchemeBuilder builds a scheme
SchemeBuilder = runtime.NewSchemeBuilder(addKnownTypes)
// AddToScheme adds to a scheme
AddToScheme = SchemeBuilder.AddToScheme
)
// Adds the list of known types to Scheme.
func addKnownTypes(scheme *runtime.Scheme) error {
scheme.AddKnownTypes(SchemeGroupVersion,
&VirtualServer{},
&VirtualServerList{},
&VirtualServerRoute{},
&VirtualServerRouteList{},
&Policy{},
&PolicyList{},
)
metav1.AddToGroupVersion(scheme, SchemeGroupVersion)
return nil
}<|fim▁end|> | return SchemeGroupVersion.WithResource(resource).GroupResource()
}
|
<|file_name|>error_handler.go<|end_file_name|><|fim▁begin|>package transport
import (
"context"
"github.com/go-kit/kit/log"
)
// ErrorHandler receives a transport error to be processed for diagnostic purposes.
// Usually this means logging the error.
type ErrorHandler interface {
Handle(ctx context.Context, err error)
}
// LogErrorHandler is a transport error handler implementation which logs an error.
type LogErrorHandler struct {
logger log.Logger
}
func NewLogErrorHandler(logger log.Logger) *LogErrorHandler {
return &LogErrorHandler{
logger: logger,
}
}
func (h *LogErrorHandler) Handle(ctx context.Context, err error) {
h.logger.Log("err", err)
}
<|fim▁hole|>// ErrorHandler that calls f.
type ErrorHandlerFunc func(ctx context.Context, err error)
// Handle calls f(ctx, err).
func (f ErrorHandlerFunc) Handle(ctx context.Context, err error) {
f(ctx, err)
}<|fim▁end|> | // The ErrorHandlerFunc type is an adapter to allow the use of
// ordinary function as ErrorHandler. If f is a function
// with the appropriate signature, ErrorHandlerFunc(f) is a |
<|file_name|>environment.go<|end_file_name|><|fim▁begin|>package builder
import (
"fmt"
"io/ioutil"
"log"
"os"
"os/exec"
"strings"
"sync"
"github.com/fatih/color"
)
//
// Description: Builder will build and run the three packages necessary to run.
//
// Example:
// go run environment.go
//
// RunEnvironment starts everything
func RunEnvironment(installPath string, skipPackageBuild bool) {
var err error
c := color.New(color.FgRed)
packagesToBuild, err := getListOfPackagesToBuild(installPath)
if err != nil {
c.Println(err)
return
}
goPathSRC := fmt.Sprintf("%s\\src", os.Getenv("GOPATH"))
var wg sync.WaitGroup
// change directory then build then run!
for _, pkg := range packagesToBuild {
wg.Add(1)
packageDir := fmt.Sprintf("%s\\%s", goPathSRC, pkg)
if localErr := buildPackage(packageDir); localErr != nil {
c.Printf("Build:\t%s - Fail: %s\n", pkg, localErr)
break
} else {
fmt.Printf("Build:\t%s - Success\n", pkg)
}
go func(packageDir, pkg string) {
fmt.Printf("Run:\t%s\n", pkg)
if localErr := runProgram(packageDir); localErr != nil {
c.Printf("Fail:\t%s - %s\n", pkg, localErr)
}
wg.Done()
}(packageDir, pkg)
}
wg.Wait()
fmt.Println("Package Builder End")
}
func currentPath() string {
dir, err := os.Executable()
if err != nil {
log.Fatal(err)
}
return dir
}
func getListOfPackagesToBuild(installPath string) (packages []string, err error) {
pkgFilePath := installPath + "/packages_to_build.txt"
dat, err := ioutil.ReadFile(pkgFilePath)
if err != nil {<|fim▁hole|> return packages, err
}
fileContent := string(dat)
if len(fileContent) == 0 {
return packages, fmt.Errorf("file content is empty for %s", pkgFilePath)
}
// TODO: builder, update to better solution of splitting
return strings.Split(fileContent, "\r\n"), nil
}
func buildPackage(path string) (err error) {
if err = os.Chdir(path); err != nil {
return err
}
if err = exec.Command("go", "build").Run(); err != nil {
return err
}
return nil
}
func runProgram(path string) (err error) {
elements := strings.Split(path, "\\")
executable := fmt.Sprintf("%s\\%s.exe", path, elements[len(elements)-1])
cmd := exec.Command(executable, "")
if err = cmd.Run(); err != nil {
return err
}
return nil
}<|fim▁end|> | |
<|file_name|>tweenjs-tests.ts<|end_file_name|><|fim▁begin|>/// <reference types="easeljs"/>
var target = new createjs.DisplayObject();
// source : http://www.createjs.com/Docs/TweenJS/modules/TweenJS.html
// Chainable modules :
target.alpha = 1;<|fim▁hole|> //Tween complete
}<|fim▁end|> | createjs.Tween.get(target).wait(500, false).to({ alpha: 0, visible: false }, 1000).call(onComplete);
function onComplete() { |
<|file_name|>webgl.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use euclid::default::{Rect, Size2D};
use ipc_channel::ipc::{IpcBytesReceiver, IpcBytesSender, IpcSharedMemory};
use pixels::PixelFormat;
use sparkle::gl;
use sparkle::gl::Gl;
use std::borrow::Cow;
use std::fmt;
use std::num::NonZeroU32;
use std::ops::Deref;
use webrender_api::{DocumentId, ImageKey, PipelineId};
use webvr_traits::WebVRPoseInformation;
/// Helper function that creates a WebGL channel (WebGLSender, WebGLReceiver) to be used in WebGLCommands.
pub use crate::webgl_channel::webgl_channel;
/// Entry point channel type used for sending WebGLMsg messages to the WebGL renderer.
pub use crate::webgl_channel::WebGLChan;
/// Entry point type used in a Script Pipeline to get the WebGLChan to be used in that thread.
pub use crate::webgl_channel::WebGLPipeline;
/// Receiver type used in WebGLCommands.
pub use crate::webgl_channel::WebGLReceiver;
/// Result type for send()/recv() calls in in WebGLCommands.
pub use crate::webgl_channel::WebGLSendResult;
/// Sender type used in WebGLCommands.
pub use crate::webgl_channel::WebGLSender;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct WebGLCommandBacktrace {
#[cfg(feature = "webgl_backtrace")]
pub backtrace: String,
#[cfg(feature = "webgl_backtrace")]
pub js_backtrace: Option<String>,
}
/// WebGL Message API
#[derive(Debug, Deserialize, Serialize)]
pub enum WebGLMsg {
/// Creates a new WebGLContext.
CreateContext(
WebGLVersion,
Size2D<u32>,
GLContextAttributes,
WebGLSender<Result<(WebGLCreateContextResult), String>>,
),
/// Resizes a WebGLContext.
ResizeContext(WebGLContextId, Size2D<u32>, WebGLSender<Result<(), String>>),
/// Drops a WebGLContext.
RemoveContext(WebGLContextId),
/// Runs a WebGLCommand in a specific WebGLContext.
WebGLCommand(WebGLContextId, WebGLCommand, WebGLCommandBacktrace),
/// Runs a WebVRCommand in a specific WebGLContext.
WebVRCommand(WebGLContextId, WebVRCommand),
/// Locks a specific WebGLContext. Lock messages are used for a correct synchronization
/// with WebRender external image API.
/// WR locks a external texture when it wants to use the shared texture contents.
/// The WR client should not change the shared texture content until the Unlock call.
/// Currently OpenGL Sync Objects are used to implement the synchronization mechanism.
Lock(WebGLContextId, WebGLSender<(u32, Size2D<i32>, usize)>),
/// Unlocks a specific WebGLContext. Unlock messages are used for a correct synchronization
/// with WebRender external image API.
/// The WR unlocks a context when it finished reading the shared texture contents.
/// Unlock messages are always sent after a Lock message.
Unlock(WebGLContextId),
/// Creates or updates the image keys required for WebRender.
UpdateWebRenderImage(WebGLContextId, WebGLSender<ImageKey>),
/// Commands used for the DOMToTexture feature.
DOMToTextureCommand(DOMToTextureCommand),
/// Frees all resources and closes the thread.
Exit,
}
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, PartialEq, Serialize)]
pub enum GlType {
Gl,
Gles,
}
/// Contains the WebGLCommand sender and information about a WebGLContext
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct WebGLCreateContextResult {
/// Sender instance to send commands to the specific WebGLContext
pub sender: WebGLMsgSender,
/// Information about the internal GL Context.
pub limits: GLLimits,
/// How the WebGLContext is shared with WebRender.
pub share_mode: WebGLContextShareMode,
/// The GLSL version supported by the context.
pub glsl_version: WebGLSLVersion,
/// The GL API used by the context.
pub api_type: GlType,
/// The format for creating new offscreen framebuffers for this context.
pub framebuffer_format: GLFormats,
}
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, Serialize)]
pub enum WebGLContextShareMode {
/// Fast: a shared texture_id is used in WebRender.
SharedTexture,
/// Slow: glReadPixels is used to send pixels to WebRender each frame.
Readback,
}
/// Defines the WebGL version
#[derive(Clone, Copy, Debug, Deserialize, Eq, MallocSizeOf, PartialEq, Serialize)]
pub enum WebGLVersion {
/// https://www.khronos.org/registry/webgl/specs/1.0.2/
/// Conforms closely to the OpenGL ES 2.0 API
WebGL1,
/// https://www.khronos.org/registry/webgl/specs/latest/2.0/
/// Conforms closely to the OpenGL ES 3.0 API
WebGL2,
}
/// Defines the GLSL version supported by the WebGL backend contexts.
#[derive(Clone, Copy, Debug, Deserialize, Eq, MallocSizeOf, PartialEq, Serialize)]
pub struct WebGLSLVersion {
/// Major GLSL version
pub major: u32,
/// Minor GLSL version
pub minor: u32,
}
/// Helper struct to send WebGLCommands to a specific WebGLContext.
#[derive(Clone, Debug, Deserialize, MallocSizeOf, Serialize)]
pub struct WebGLMsgSender {
ctx_id: WebGLContextId,
#[ignore_malloc_size_of = "channels are hard"]
sender: WebGLChan,
}
impl WebGLMsgSender {
pub fn new(id: WebGLContextId, sender: WebGLChan) -> Self {
WebGLMsgSender {
ctx_id: id,
sender: sender,
}
}
/// Returns the WebGLContextId associated to this sender
pub fn context_id(&self) -> WebGLContextId {
self.ctx_id
}
/// Send a WebGLCommand message
#[inline]
pub fn send(&self, command: WebGLCommand, backtrace: WebGLCommandBacktrace) -> WebGLSendResult {
self.sender
.send(WebGLMsg::WebGLCommand(self.ctx_id, command, backtrace))
}
/// Send a WebVRCommand message
#[inline]
pub fn send_vr(&self, command: WebVRCommand) -> WebGLSendResult {
self.sender
.send(WebGLMsg::WebVRCommand(self.ctx_id, command))
}
/// Send a resize message
#[inline]
pub fn send_resize(
&self,
size: Size2D<u32>,
sender: WebGLSender<Result<(), String>>,
) -> WebGLSendResult {
self.sender
.send(WebGLMsg::ResizeContext(self.ctx_id, size, sender))
}
#[inline]
pub fn send_remove(&self) -> WebGLSendResult {
self.sender.send(WebGLMsg::RemoveContext(self.ctx_id))
}
#[inline]
pub fn send_update_wr_image(&self, sender: WebGLSender<ImageKey>) -> WebGLSendResult {
self.sender
.send(WebGLMsg::UpdateWebRenderImage(self.ctx_id, sender))
}
pub fn send_dom_to_texture(&self, command: DOMToTextureCommand) -> WebGLSendResult {
self.sender.send(WebGLMsg::DOMToTextureCommand(command))
}
}
#[derive(Deserialize, Serialize)]
pub struct TruncatedDebug<T>(T);
impl<T> From<T> for TruncatedDebug<T> {
fn from(v: T) -> TruncatedDebug<T> {
TruncatedDebug(v)
}
}
impl<T: fmt::Debug> fmt::Debug for TruncatedDebug<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut s = format!("{:?}", self.0);
if s.len() > 20 {
s.truncate(20);
s.push_str("...");
}
write!(f, "{}", s)
}
}
impl<T> Deref for TruncatedDebug<T> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
/// WebGL Commands for a specific WebGLContext
#[derive(Debug, Deserialize, Serialize)]
pub enum WebGLCommand {
GetContextAttributes(WebGLSender<GLContextAttributes>),
ActiveTexture(u32),
BlendColor(f32, f32, f32, f32),
BlendEquation(u32),
BlendEquationSeparate(u32, u32),
BlendFunc(u32, u32),
BlendFuncSeparate(u32, u32, u32, u32),
AttachShader(WebGLProgramId, WebGLShaderId),
DetachShader(WebGLProgramId, WebGLShaderId),
BindAttribLocation(WebGLProgramId, u32, String),
BufferData(u32, IpcBytesReceiver, u32),
BufferSubData(u32, isize, IpcBytesReceiver),
Clear(u32),
ClearColor(f32, f32, f32, f32),
ClearDepth(f32),
ClearStencil(i32),
ColorMask(bool, bool, bool, bool),
CullFace(u32),
FrontFace(u32),
DepthFunc(u32),
DepthMask(bool),
DepthRange(f32, f32),
Enable(u32),
Disable(u32),
CompileShader(WebGLShaderId, String),
CopyTexImage2D(u32, i32, u32, i32, i32, i32, i32, i32),
CopyTexSubImage2D(u32, i32, i32, i32, i32, i32, i32, i32),
CreateBuffer(WebGLSender<Option<WebGLBufferId>>),
CreateFramebuffer(WebGLSender<Option<WebGLFramebufferId>>),
CreateRenderbuffer(WebGLSender<Option<WebGLRenderbufferId>>),
CreateTexture(WebGLSender<Option<WebGLTextureId>>),
CreateProgram(WebGLSender<Option<WebGLProgramId>>),
CreateShader(u32, WebGLSender<Option<WebGLShaderId>>),
DeleteBuffer(WebGLBufferId),
DeleteFramebuffer(WebGLFramebufferId),
DeleteRenderbuffer(WebGLRenderbufferId),
DeleteTexture(WebGLTextureId),
DeleteProgram(WebGLProgramId),
DeleteShader(WebGLShaderId),
BindBuffer(u32, Option<WebGLBufferId>),
BindFramebuffer(u32, WebGLFramebufferBindingRequest),
BindRenderbuffer(u32, Option<WebGLRenderbufferId>),
BindTexture(u32, Option<WebGLTextureId>),
DisableVertexAttribArray(u32),
EnableVertexAttribArray(u32),
FramebufferRenderbuffer(u32, u32, u32, Option<WebGLRenderbufferId>),
FramebufferTexture2D(u32, u32, u32, Option<WebGLTextureId>, i32),
GetExtensions(WebGLSender<String>),
GetShaderPrecisionFormat(u32, u32, WebGLSender<(i32, i32, i32)>),
GetUniformLocation(WebGLProgramId, String, WebGLSender<i32>),
GetShaderInfoLog(WebGLShaderId, WebGLSender<String>),
GetProgramInfoLog(WebGLProgramId, WebGLSender<String>),
GetFramebufferAttachmentParameter(u32, u32, u32, WebGLSender<i32>),
GetRenderbufferParameter(u32, u32, WebGLSender<i32>),
PolygonOffset(f32, f32),
RenderbufferStorage(u32, u32, i32, i32),
ReadPixels(Rect<u32>, u32, u32, IpcBytesSender),
SampleCoverage(f32, bool),
Scissor(i32, i32, u32, u32),
StencilFunc(u32, i32, u32),
StencilFuncSeparate(u32, u32, i32, u32),
StencilMask(u32),
StencilMaskSeparate(u32, u32),
StencilOp(u32, u32, u32),
StencilOpSeparate(u32, u32, u32, u32),
Hint(u32, u32),
LineWidth(f32),
PixelStorei(u32, i32),
LinkProgram(WebGLProgramId, WebGLSender<ProgramLinkInfo>),
Uniform1f(i32, f32),
Uniform1fv(i32, Vec<f32>),
Uniform1i(i32, i32),
Uniform1iv(i32, Vec<i32>),
Uniform2f(i32, f32, f32),
Uniform2fv(i32, Vec<f32>),
Uniform2i(i32, i32, i32),
Uniform2iv(i32, Vec<i32>),
Uniform3f(i32, f32, f32, f32),
Uniform3fv(i32, Vec<f32>),
Uniform3i(i32, i32, i32, i32),
Uniform3iv(i32, Vec<i32>),
Uniform4f(i32, f32, f32, f32, f32),
Uniform4fv(i32, Vec<f32>),
Uniform4i(i32, i32, i32, i32, i32),
Uniform4iv(i32, Vec<i32>),
UniformMatrix2fv(i32, Vec<f32>),
UniformMatrix3fv(i32, Vec<f32>),
UniformMatrix4fv(i32, Vec<f32>),
UseProgram(Option<WebGLProgramId>),
ValidateProgram(WebGLProgramId),
VertexAttrib(u32, f32, f32, f32, f32),
VertexAttribPointer(u32, i32, u32, bool, i32, u32),
VertexAttribPointer2f(u32, i32, bool, i32, u32),
SetViewport(i32, i32, i32, i32),
TexImage2D {
target: u32,
level: u32,
// FIXME(nox): This should be computed on the WebGL thread.
effective_internal_format: u32,
size: Size2D<u32>,
format: TexFormat,
data_type: TexDataType,
// FIXME(nox): This should be computed on the WebGL thread.
effective_data_type: u32,
unpacking_alignment: u32,
alpha_treatment: Option<AlphaTreatment>,
y_axis_treatment: YAxisTreatment,
pixel_format: Option<PixelFormat>,
data: TruncatedDebug<IpcSharedMemory>,
},
TexSubImage2D {
target: u32,
level: u32,
xoffset: i32,
yoffset: i32,
size: Size2D<u32>,
format: TexFormat,
data_type: TexDataType,
// FIXME(nox): This should be computed on the WebGL thread.
effective_data_type: u32,
unpacking_alignment: u32,
alpha_treatment: Option<AlphaTreatment>,
y_axis_treatment: YAxisTreatment,
pixel_format: Option<PixelFormat>,
data: TruncatedDebug<IpcSharedMemory>,
},
CompressedTexImage2D {
target: u32,
level: u32,
internal_format: u32,
size: Size2D<u32>,
data: TruncatedDebug<IpcSharedMemory>,
},
CompressedTexSubImage2D {
target: u32,
level: i32,
xoffset: i32,
yoffset: i32,
size: Size2D<u32>,
format: u32,
data: TruncatedDebug<IpcSharedMemory>,
},
DrawingBufferWidth(WebGLSender<i32>),
DrawingBufferHeight(WebGLSender<i32>),
Finish(WebGLSender<()>),
Flush,
GenerateMipmap(u32),
CreateVertexArray(WebGLSender<Option<WebGLVertexArrayId>>),
DeleteVertexArray(WebGLVertexArrayId),
BindVertexArray(Option<WebGLVertexArrayId>),
GetParameterBool(ParameterBool, WebGLSender<bool>),
GetParameterBool4(ParameterBool4, WebGLSender<[bool; 4]>),
GetParameterInt(ParameterInt, WebGLSender<i32>),
GetParameterInt2(ParameterInt2, WebGLSender<[i32; 2]>),
GetParameterInt4(ParameterInt4, WebGLSender<[i32; 4]>),
GetParameterFloat(ParameterFloat, WebGLSender<f32>),
GetParameterFloat2(ParameterFloat2, WebGLSender<[f32; 2]>),
GetParameterFloat4(ParameterFloat4, WebGLSender<[f32; 4]>),
GetProgramValidateStatus(WebGLProgramId, WebGLSender<bool>),
GetProgramActiveUniforms(WebGLProgramId, WebGLSender<i32>),
GetCurrentVertexAttrib(u32, WebGLSender<[f32; 4]>),
GetTexParameterFloat(u32, TexParameterFloat, WebGLSender<f32>),
GetTexParameterInt(u32, TexParameterInt, WebGLSender<i32>),
TexParameteri(u32, u32, i32),
TexParameterf(u32, u32, f32),
DrawArrays {
mode: u32,
first: i32,
count: i32,
},
DrawArraysInstanced {
mode: u32,
first: i32,
count: i32,
primcount: i32,
},
DrawElements {
mode: u32,
count: i32,
type_: u32,
offset: u32,
},
DrawElementsInstanced {
mode: u32,
count: i32,
type_: u32,
offset: u32,
primcount: i32,
},
VertexAttribDivisor {
index: u32,
divisor: u32,
},
GetUniformBool(WebGLProgramId, i32, WebGLSender<bool>),
GetUniformBool2(WebGLProgramId, i32, WebGLSender<[bool; 2]>),
GetUniformBool3(WebGLProgramId, i32, WebGLSender<[bool; 3]>),
GetUniformBool4(WebGLProgramId, i32, WebGLSender<[bool; 4]>),
GetUniformInt(WebGLProgramId, i32, WebGLSender<i32>),
GetUniformInt2(WebGLProgramId, i32, WebGLSender<[i32; 2]>),
GetUniformInt3(WebGLProgramId, i32, WebGLSender<[i32; 3]>),
GetUniformInt4(WebGLProgramId, i32, WebGLSender<[i32; 4]>),
GetUniformFloat(WebGLProgramId, i32, WebGLSender<f32>),
GetUniformFloat2(WebGLProgramId, i32, WebGLSender<[f32; 2]>),
GetUniformFloat3(WebGLProgramId, i32, WebGLSender<[f32; 3]>),
GetUniformFloat4(WebGLProgramId, i32, WebGLSender<[f32; 4]>),
GetUniformFloat9(WebGLProgramId, i32, WebGLSender<[f32; 9]>),
GetUniformFloat16(WebGLProgramId, i32, WebGLSender<[f32; 16]>),
InitializeFramebuffer {
color: bool,
depth: bool,
stencil: bool,
},
}
macro_rules! define_resource_id {
($name:ident) => {
#[derive(Clone, Copy, Eq, Hash, PartialEq)]
pub struct $name(NonZeroU32);
impl $name {
#[allow(unsafe_code)]
#[inline]
pub unsafe fn new(id: u32) -> Self {
$name(NonZeroU32::new_unchecked(id))
}
#[inline]
pub fn get(self) -> u32 {
self.0.get()
}
}
#[allow(unsafe_code)]
impl<'de> ::serde::Deserialize<'de> for $name {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: ::serde::Deserializer<'de>,
{
let id = u32::deserialize(deserializer)?;
if id == 0 {
Err(::serde::de::Error::custom("expected a non-zero value"))
} else {
Ok(unsafe { $name::new(id) })
}
}
}
impl ::serde::Serialize for $name {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ::serde::Serializer,
{
self.get().serialize(serializer)
}
}
impl ::std::fmt::Debug for $name {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
fmt.debug_tuple(stringify!($name))
.field(&self.get())
.finish()
}
}
impl ::std::fmt::Display for $name {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
write!(fmt, "{}", self.get())
}
}
impl ::malloc_size_of::MallocSizeOf for $name {
fn size_of(&self, _ops: &mut ::malloc_size_of::MallocSizeOfOps) -> usize {
0
}
}
};
}
define_resource_id!(WebGLBufferId);
define_resource_id!(WebGLFramebufferId);
define_resource_id!(WebGLRenderbufferId);
define_resource_id!(WebGLTextureId);
define_resource_id!(WebGLProgramId);
define_resource_id!(WebGLShaderId);
define_resource_id!(WebGLVertexArrayId);
#[derive(
Clone, Copy, Debug, Deserialize, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd, Serialize,
)]
pub struct WebGLContextId(pub usize);
#[derive(Clone, Copy, Debug, Deserialize, PartialEq, Serialize)]
pub enum WebGLError {
InvalidEnum,
InvalidFramebufferOperation,
InvalidOperation,
InvalidValue,
OutOfMemory,
ContextLost,
}
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
pub enum WebGLFramebufferBindingRequest {
Explicit(WebGLFramebufferId),
Default,
}
pub type WebGLResult<T> = Result<T, WebGLError>;
pub type WebVRDeviceId = u32;
// WebVR commands that must be called in the WebGL render thread.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum WebVRCommand {
/// Start presenting to a VR device.
Create(WebVRDeviceId),
/// Synchronize the pose information to be used in the frame.
SyncPoses(
WebVRDeviceId,
// near
f64,
// far
f64,
// sync gamepads too
bool,
WebGLSender<Result<WebVRPoseInformation, ()>>,
),
/// Submit the frame to a VR device using the specified texture coordinates.
SubmitFrame(WebVRDeviceId, [f32; 4], [f32; 4]),
/// Stop presenting to a VR device
Release(WebVRDeviceId),
}
// Trait object that handles WebVR commands.
// Receives the texture id and size associated to the WebGLContext.
pub trait WebVRRenderHandler: Send {
fn handle(&mut self, gl: &Gl, command: WebVRCommand, texture: Option<(u32, Size2D<i32>)>);
}
/// WebGL commands required to implement DOMToTexture feature.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum DOMToTextureCommand {
/// Attaches a HTMLIFrameElement to a WebGLTexture.
Attach(
WebGLContextId,
WebGLTextureId,
DocumentId,
PipelineId,
Size2D<i32>,
),
/// Releases the HTMLIFrameElement to WebGLTexture attachment.
Detach(WebGLTextureId),
/// Lock message used for a correct synchronization with WebRender GL flow.
Lock(PipelineId, usize, WebGLSender<Option<(u32, Size2D<i32>)>>),
}
/// Information about a WebGL program linking operation.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ProgramLinkInfo {
/// Whether the program was linked successfully.
pub linked: bool,
/// The list of active attributes.
pub active_attribs: Box<[ActiveAttribInfo]>,
/// The list of active uniforms.
pub active_uniforms: Box<[ActiveUniformInfo]>,
}
/// Description of a single active attribute.
#[derive(Clone, Debug, Deserialize, MallocSizeOf, Serialize)]
pub struct ActiveAttribInfo {
/// The name of the attribute.
pub name: String,
/// The size of the attribute.
pub size: i32,
/// The type of the attribute.
pub type_: u32,
/// The location of the attribute.
pub location: i32,
}
/// Description of a single active uniform.
#[derive(Clone, Debug, Deserialize, MallocSizeOf, Serialize)]
pub struct ActiveUniformInfo {
/// The base name of the uniform.
pub base_name: Box<str>,
/// The size of the uniform, if it is an array.
pub size: Option<i32>,
/// The type of the uniform.
pub type_: u32,
}
impl ActiveUniformInfo {
pub fn name(&self) -> Cow<str> {
if self.size.is_some() {
let mut name = String::from(&*self.base_name);
name.push_str("[0]");
Cow::Owned(name)
} else {
Cow::Borrowed(&self.base_name)
}
}
}
macro_rules! parameters {
($name:ident { $(
$variant:ident($kind:ident { $(
$param:ident = gl::$value:ident,
)+ }),
)+ }) => {
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
pub enum $name { $(
$variant($kind),
)+}
$(
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
#[repr(u32)]
pub enum $kind { $(
$param = gl::$value,
)+}
)+
impl $name {
pub fn from_u32(value: u32) -> WebGLResult<Self> {
match value {
$($(gl::$value => Ok($name::$variant($kind::$param)),)+)+
_ => Err(WebGLError::InvalidEnum)
}
}
}
}
}
parameters! {
Parameter {
Bool(ParameterBool {
DepthWritemask = gl::DEPTH_WRITEMASK,
SampleCoverageInvert = gl::SAMPLE_COVERAGE_INVERT,
}),
Bool4(ParameterBool4 {
ColorWritemask = gl::COLOR_WRITEMASK,
}),
Int(ParameterInt {
ActiveTexture = gl::ACTIVE_TEXTURE,
AlphaBits = gl::ALPHA_BITS,
BlendDstAlpha = gl::BLEND_DST_ALPHA,
BlendDstRgb = gl::BLEND_DST_RGB,
BlendEquationAlpha = gl::BLEND_EQUATION_ALPHA,
BlendEquationRgb = gl::BLEND_EQUATION_RGB,
BlendSrcAlpha = gl::BLEND_SRC_ALPHA,
BlendSrcRgb = gl::BLEND_SRC_RGB,
BlueBits = gl::BLUE_BITS,
CullFaceMode = gl::CULL_FACE_MODE,
DepthBits = gl::DEPTH_BITS,
DepthFunc = gl::DEPTH_FUNC,
FragmentShaderDerivativeHint = gl::FRAGMENT_SHADER_DERIVATIVE_HINT,
FrontFace = gl::FRONT_FACE,
GenerateMipmapHint = gl::GENERATE_MIPMAP_HINT,
GreenBits = gl::GREEN_BITS,
RedBits = gl::RED_BITS,
SampleBuffers = gl::SAMPLE_BUFFERS,
Samples = gl::SAMPLES,
StencilBackFail = gl::STENCIL_BACK_FAIL,
StencilBackFunc = gl::STENCIL_BACK_FUNC,
StencilBackPassDepthFail = gl::STENCIL_BACK_PASS_DEPTH_FAIL,<|fim▁hole|> StencilBackRef = gl::STENCIL_BACK_REF,
StencilBackValueMask = gl::STENCIL_BACK_VALUE_MASK,
StencilBackWritemask = gl::STENCIL_BACK_WRITEMASK,
StencilBits = gl::STENCIL_BITS,
StencilClearValue = gl::STENCIL_CLEAR_VALUE,
StencilFail = gl::STENCIL_FAIL,
StencilFunc = gl::STENCIL_FUNC,
StencilPassDepthFail = gl::STENCIL_PASS_DEPTH_FAIL,
StencilPassDepthPass = gl::STENCIL_PASS_DEPTH_PASS,
StencilRef = gl::STENCIL_REF,
StencilValueMask = gl::STENCIL_VALUE_MASK,
StencilWritemask = gl::STENCIL_WRITEMASK,
SubpixelBits = gl::SUBPIXEL_BITS,
}),
Int2(ParameterInt2 {
MaxViewportDims = gl::MAX_VIEWPORT_DIMS,
}),
Int4(ParameterInt4 {
ScissorBox = gl::SCISSOR_BOX,
Viewport = gl::VIEWPORT,
}),
Float(ParameterFloat {
DepthClearValue = gl::DEPTH_CLEAR_VALUE,
LineWidth = gl::LINE_WIDTH,
MaxTextureMaxAnisotropyExt = gl::MAX_TEXTURE_MAX_ANISOTROPY_EXT,
PolygonOffsetFactor = gl::POLYGON_OFFSET_FACTOR,
PolygonOffsetUnits = gl::POLYGON_OFFSET_UNITS,
SampleCoverageValue = gl::SAMPLE_COVERAGE_VALUE,
}),
Float2(ParameterFloat2 {
AliasedPointSizeRange = gl::ALIASED_POINT_SIZE_RANGE,
AliasedLineWidthRange = gl::ALIASED_LINE_WIDTH_RANGE,
DepthRange = gl::DEPTH_RANGE,
}),
Float4(ParameterFloat4 {
BlendColor = gl::BLEND_COLOR,
ColorClearValue = gl::COLOR_CLEAR_VALUE,
}),
}
}
parameters! {
TexParameter {
Float(TexParameterFloat {
TextureMaxAnisotropyExt = gl::TEXTURE_MAX_ANISOTROPY_EXT,
}),
Int(TexParameterInt {
TextureWrapS = gl::TEXTURE_WRAP_S,
TextureWrapT = gl::TEXTURE_WRAP_T,
}),
}
}
#[macro_export]
macro_rules! gl_enums {
($(pub enum $name:ident { $($variant:ident = $mod:ident::$constant:ident,)+ })*) => {
$(
#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, MallocSizeOf)]
#[derive(PartialEq, Serialize)]
#[repr(u32)]
pub enum $name { $($variant = $mod::$constant,)+ }
impl $name {
pub fn from_gl_constant(constant: u32) -> Option<Self> {
Some(match constant {
$($mod::$constant => $name::$variant, )+
_ => return None,
})
}
#[inline]
pub fn as_gl_constant(&self) -> u32 {
*self as u32
}
}
)*
}
}
// FIXME: These should come from sparkle
mod gl_ext_constants {
use sparkle::gl::types::GLenum;
pub const COMPRESSED_RGB_S3TC_DXT1_EXT: GLenum = 0x83F0;
pub const COMPRESSED_RGBA_S3TC_DXT1_EXT: GLenum = 0x83F1;
pub const COMPRESSED_RGBA_S3TC_DXT3_EXT: GLenum = 0x83F2;
pub const COMPRESSED_RGBA_S3TC_DXT5_EXT: GLenum = 0x83F3;
pub const COMPRESSED_RGB_ETC1_WEBGL: GLenum = 0x8D64;
pub static COMPRESSIONS: &'static [GLenum] = &[
COMPRESSED_RGB_S3TC_DXT1_EXT,
COMPRESSED_RGBA_S3TC_DXT1_EXT,
COMPRESSED_RGBA_S3TC_DXT3_EXT,
COMPRESSED_RGBA_S3TC_DXT5_EXT,
COMPRESSED_RGB_ETC1_WEBGL,
];
}
gl_enums! {
pub enum TexFormat {
DepthComponent = gl::DEPTH_COMPONENT,
Alpha = gl::ALPHA,
RGB = gl::RGB,
RGBA = gl::RGBA,
Luminance = gl::LUMINANCE,
LuminanceAlpha = gl::LUMINANCE_ALPHA,
CompressedRgbS3tcDxt1 = gl_ext_constants::COMPRESSED_RGB_S3TC_DXT1_EXT,
CompressedRgbaS3tcDxt1 = gl_ext_constants::COMPRESSED_RGBA_S3TC_DXT1_EXT,
CompressedRgbaS3tcDxt3 = gl_ext_constants::COMPRESSED_RGBA_S3TC_DXT3_EXT,
CompressedRgbaS3tcDxt5 = gl_ext_constants::COMPRESSED_RGBA_S3TC_DXT5_EXT,
CompressedRgbEtc1 = gl_ext_constants::COMPRESSED_RGB_ETC1_WEBGL,
}
pub enum TexDataType {
UnsignedByte = gl::UNSIGNED_BYTE,
UnsignedShort4444 = gl::UNSIGNED_SHORT_4_4_4_4,
UnsignedShort5551 = gl::UNSIGNED_SHORT_5_5_5_1,
UnsignedShort565 = gl::UNSIGNED_SHORT_5_6_5,
Float = gl::FLOAT,
HalfFloat = gl::HALF_FLOAT_OES,
}
}
impl TexFormat {
/// Returns how many components does this format need. For example, RGBA
/// needs 4 components, while RGB requires 3.
pub fn components(&self) -> u32 {
match *self {
TexFormat::DepthComponent => 1,
TexFormat::Alpha => 1,
TexFormat::Luminance => 1,
TexFormat::LuminanceAlpha => 2,
TexFormat::RGB => 3,
TexFormat::RGBA => 4,
_ => 1,
}
}
/// Returns whether this format is a known texture compression format.
pub fn is_compressed(&self) -> bool {
gl_ext_constants::COMPRESSIONS.contains(&self.as_gl_constant())
}
}
impl TexDataType {
/// Returns the size in bytes of each element of data.
pub fn element_size(&self) -> u32 {
use self::*;
match *self {
TexDataType::UnsignedByte => 1,
TexDataType::UnsignedShort4444 |
TexDataType::UnsignedShort5551 |
TexDataType::UnsignedShort565 => 2,
TexDataType::Float => 4,
TexDataType::HalfFloat => 2,
}
}
/// Returns how many components a single element may hold. For example, a
/// UnsignedShort4444 holds four components, each with 4 bits of data.
pub fn components_per_element(&self) -> u32 {
match *self {
TexDataType::UnsignedByte => 1,
TexDataType::UnsignedShort565 => 3,
TexDataType::UnsignedShort5551 => 4,
TexDataType::UnsignedShort4444 => 4,
TexDataType::Float => 1,
TexDataType::HalfFloat => 1,
}
}
}
#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, MallocSizeOf, PartialEq, Serialize)]
pub enum AlphaTreatment {
Premultiply,
Unmultiply,
}
#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, MallocSizeOf, PartialEq, Serialize)]
pub enum YAxisTreatment {
AsIs,
Flipped,
}
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
pub struct GLContextAttributes {
pub alpha: bool,
pub depth: bool,
pub stencil: bool,
pub antialias: bool,
pub premultiplied_alpha: bool,
pub preserve_drawing_buffer: bool,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct GLLimits {
pub max_vertex_attribs: u32,
pub max_tex_size: u32,
pub max_cube_map_tex_size: u32,
pub max_combined_texture_image_units: u32,
pub max_fragment_uniform_vectors: u32,
pub max_renderbuffer_size: u32,
pub max_texture_image_units: u32,
pub max_varying_vectors: u32,
pub max_vertex_texture_image_units: u32,
pub max_vertex_uniform_vectors: u32,
}
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, Serialize)]
pub struct GLFormats {
pub texture_format: u32,
pub texture_type: u32,
}<|fim▁end|> | StencilBackPassDepthPass = gl::STENCIL_BACK_PASS_DEPTH_PASS, |
<|file_name|>estimation_lccm_specification_ub91to95.py<|end_file_name|><|fim▁begin|># Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
specification = {}
specification = {
2:
{
"equation_ids":(1,2),
"constant":(0, "act_2_2"),
"blmz":("blmz_2_1", 0),
"cd1":("cd1_2_1", 0),
"dag":("dag_2_1", 0),
"dprd":("dprd_2_1", 0),
"hai":("hai_2_1", 0),
# "pcd":("pcd_2_1", 0), - Jeff removed all PCD on 21Feb 2006
"phu":("phu_2_1", 0),
"pmu":("pmu_2_1", 0),
"tiv":("tiv_2_1", 0),
},
3:
{
"equation_ids":(1,2,3),
"constant":(0,0,"act_3_3"),
"blmz":("blmz_3_1",0, 0),
"c750":("c750_3_1","c750_3_2", 0),
"dloc":("dloc_3_1","dloc_3_2", 0),
"dnlr":(0,"dnlr_3_2", 0),
"dprd":("dprd_3_1",0, 0),
"dpub":(0,"dpub_3_2", 0),
"dres":("dres_3_1","dres_3_2", 0),
"dtim":("dtim_3_1","dtim_3_2", 0),
"gmps":("gmps_3_1",0, 0),
"h450":(0,"h450_3_2", 0),
"mmps":("mmps_3_1",0, 0),
# "pcd":("pcd_3_1","pcd_3_2", 0),
"pfld":("pfld_3_1",0, 0),
"phu":("phu_3_1","phu_3_2", 0),
"pmf":("pmf_3_1","pmf_3_2", 0),
"pmu":("pmu_3_1","pmu_3_2", 0),
"pslp":("pslp_3_1","pslp_3_2", 0),
"pwa":("pwa_3_1",0, 0),
"shei":("shei_3_1",0, 0),
"tiv":("tiv_3_1",0, 0),
"ugl":("ugl_3_1",0, 0),
},
#4:
# {
# "equation_ids":(1,2,3), # note: this is the to_id's
# "constant":(0, "act_4_2", "act_4_3"), #there is no constant term in the equation for to_id 1
# "aai":(0, "aai_4_2","aai_4_3"),
# "amps":(0, "amps_4_2","amps_4_3"),
# "blmz":(0, "blmz_4_2","blmz_4_3"),
# "c450":(0, "c450_4_2","c450_4_3"),
# "c750":(0, "c750_4_2","c750_4_3"),
# "cd1":(0, "cd1_4_2","cd1_4_3"),
# "crit":(0, "crit_4_2","crit_4_3"),
# "dag":(0, "dag_4_2","dag_4_3"),
# "dc":(0, "dc_4_2","dc_4_3"),
# "dcbd":(0, "dcbd_4_2","dcbd_4_3"),
# "dcri":(0, "dcri_4_2","dcri_4_3"),
# "ddt1":(0, "ddt1_4_2","ddt1_4_3"),
# "de":(0, "de_4_2","de_4_3"),
# "dfre":(0, "dfre_4_2","dfre_4_3"),
# "di":(0, "di_4_2","di_4_3"),
# "dloc":(0, "dloc_4_2","dloc_4_3"),
# "dmu":(0, "dmu_4_2","dmu_4_3"),
# "dnlr":(0, "dnlr_4_2","dnlr_4_3"),
# "dos":(0, "dos_4_2","dos_4_3"),
# "dprd":(0, "dprd_4_2","dprd_4_3"),
# "dpub":(0, "dpub_4_2","dpub_4_3"),
# "dres":(0, "dres_4_2","dres_4_3"),
# "dt1":(0, "dt1_4_2","dt1_4_3"),
# "dtim":(0, "dtim_4_2","dtim_4_3"),
# "dwat":(0, "dwat_4_2","dwat_4_3"),
# "dwet":(0, "dwet_4_2","dwet_4_3"),
# "fai":(0, "fai_4_2","fai_4_3"),
# "fmps":(0, "fmps_4_2","fmps_4_3"),
# "gai":(0, "gai_4_2","gai_4_3"),
# "gmps":(0, "gmps_4_2","gmps_4_3"),
# "h450":(0, "h450_4_2","h450_4_3"),
# "h750":(0, "h750_4_2","h750_4_3"),
# "hai":(0, "hai_4_2","hai_4_3"),
# "hd1":(0, "hd1_4_2","hd1_4_3"),
# "hmps":(0, "hmps_4_2","hmps_4_3"),
# "mai":(0, "mai_4_2","mai_4_3"),
# "mmps":(0, "mmps_4_2","mmps_4_3"),
# "pag":(0, "pag_4_2","pag_4_3"),
# "pcc":(0, "pcc_4_2","pcc_4_3"),
# "pcd":(0, "pcd_4_2","pcd_4_3"),
# "pcf":(0, "pcf_4_2","pcf_4_3"),
# "pcri":(0, "pcri_4_2","pcri_4_3"),
# "pes":(0, "pes_4_2","pes_4_3"),
# "pfld":(0, "pfld_4_2","pfld_4_3"),
# "pgr":(0, "pgr_4_2","pgr_4_3"),
# "phu":(0, "phu_4_2","phu_4_3"),
# "plu":(0, "plu_4_2","plu_4_3"),
# "pmf":(0, "pmf_4_2","pmf_4_3"),
# "pmu":(0, "pmu_4_2","pmu_4_3"),
# "psg":(0, "psg_4_2","psg_4_3"),
# "pslp":(0, "pslp_4_2","pslp_4_3"),
# "pstr":(0, "pstr_4_2","pstr_4_3"),
# "pub":(0, "pub_4_2","pub_4_3"),
# "pwa":(0, "pwa_4_2","pwa_4_3"),
# "pwet":(0, "pwet_4_2","pwet_4_3"),
# "shei":(0, "shei_4_2","shei_4_3"),
# "sslp":(0, "sslp_4_2","sslp_4_3"),
# "tbl":(0, "tbl_4_2","tbl_4_3"),
# "tiv":(0, "tiv_4_2","tiv_4_3"),
# "ugl":(0, "ugl_4_2","ugl_4_3"),
# },
5:
{
"equation_ids":(1,2,3,5,6,7), # note: this is the to_id's
"constant":("act_5_1","act_5_2","act_5_3","act_5_5",0,0),
"aai":(0,"aai_5_2","aai_5_3", 0,"aai_5_6","aai_5_7"),
"amps":("amps_5_1","amps_5_2",0, 0,0,"amps_5_7"),
# # "blmz":(0,0,0, 0,"blmz_5_6",0),
# # "c750":("c750_5_1",0,0, 0,0,0),
"cd1":("cd1_5_1",0,"cd1_5_3", 0,0,"cd1_5_7"),
"dag":("dag_5_1",0,0, 0,"dag_5_6","dag_5_7"),
# # "dc":(0,0,0, 0,0,"dc_5_7"),
# # "dcbd":("dcbd_5_1",0,0, 0,0,0),
"dcri":("dcri_5_1",0,0, 0,0,0),
"de":("de_5_1","de_5_2","de_5_3", 0,0,0),
"dloc":("dloc_5_1","dloc_5_2","dloc_5_3", 0,0,0),
"dnlr":(0,"dnlr_5_2","dnlr_5_3", 0,0,0),
"dos":("dos_5_1",0,0, 0,0,0),
"dprd":("dprd_5_1",0,0, 0,0,"dprd_5_7"),
"dpub":("dpub_5_1","dpub_5_2",0, 0,0,0),
"dres":(0,0,"dres_5_3", 0,0,0),
"dtim":("dtim_5_1","dtim_5_2","dtim_5_3", 0,0,0),
"dwat":("dwat_5_1",0,0, 0,0,"dwat_5_7"),
"dwet":(0,0,"dwet_5_3", 0,0,0),
"fmps":("fmps_5_1","fmps_5_2",0, 0,0,"fmps_5_7"),
"h450":(0,0,0, 0,0,"h450_5_7"),
"h750":("h750_5_1","h750_5_2",0, 0,0,"h750_5_7"),
"hai":(0,0,0, 0,"hai_5_6",0),
# "pcd":("pcd_5_1","pcd_5_2",0, 0,0,0),
"pcf":(0,0,0, 0,0,"pcf_5_7"),
"pcri":("pcri_5_1",0,0, 0,0,0),
"pes":("pes_5_1",0,0, 0,"pes_5_6","pes_5_7"),
"phu":("phu_5_1",0,"phu_5_3", 0,"phu_5_6","phu_5_7"),
"plu":(0,0,"plu_5_3", 0,"plu_5_6",0),
"pmu":("pmu_5_1","pmu_5_2",0, 0,"pmu_5_6","pmu_5_7"),
"pstr":(0,"pstr_5_2","pstr_5_3", 0,"pstr_5_6",0),
"pub":("pub_5_1",0,0, 0,0,0),
"pwa":(0,0,0, 0,"pwa_5_6",0),
"pwet":(0,0,0, 0,0,"pwet_5_7"),
"shei":("shei_5_1","shei_5_2",0, 0,"shei_5_6",0),
"sslp":("sslp_5_1","sslp_5_2","sslp_5_3", 0,"sslp_5_6",0),
"tiv":("tiv_5_1",0,0, 0,0,0),
"ugl":("ugl_5_1",0,0, 0,0,0),
},
6:
{
"equation_ids":(1,2,3,5,6), # note: this is the to_id's
"constant":("act_6_1","act_6_2","act_6_3",0,"act_6_6"),
"aai":(0,"aai_6_2","aai_6_3",0,0),
"blmz":("blmz_6_1",0,0,0,0),
"c750":("c750_6_1",0,0,0,0),
"dcri":("dcri_6_1","dcri_6_2","dcri_6_3",0, 0),
"di":("di_6_1",0,0,0,0),
"dloc":("dloc_6_1","dloc_6_2","dloc_6_3",0, 0),
"dnlr":(0,"dnlr_6_2","dnlr_6_3",0, 0),
"dos":("dos_6_1",0,0,0,0),
"dprd":("dprd_6_1",0,0,0, 0),
"dres":("dres_6_1","dres_6_2",0,0, 0),
"dtim":("dtim_6_1","dtim_6_2","dtim_6_3",0, 0),
"fmps":("fmps_6_1",0,0,0, 0),
"gai":(0,0,"gai_6_3","gai_6_5", 0),
<|fim▁hole|> # "pcd":("pcd_6_1","pcd_6_2","pcd_6_3","pcd_6_5", 0),
"pcf":("pcf_6_1","pcf_6_2",0,0,0),
"pes":("pes_6_1",0,"pes_6_3",0, 0),
"pgr":("pgr_6_1","pgr_6_2","pgr_6_3","pgr_6_5", 0),
"phu":("phu_6_1","phu_6_2",0,0, 0),
"plu":(0,0,"plu_6_3","plu_6_5", 0),
"pmu":("pmu_6_1","pmu_6_2","pmu_6_3",0, 0),
"pslp":("pslp_6_1","pslp_6_2","pslp_6_3","pslp_6_5", 0),
"pstr":("pstr_6_1","pstr_6_2",0,0, 0),
"pub":("pub_6_1",0,0,0, 0),
"pwa":(0,0,"pwa_6_3",0, 0),
"pwet":("pwet_6_1","pwet_6_2","pwet_6_3",0, 0),
"shei":("shei_6_1",0,"shei_6_3","shei_6_5", 0),
"tiv":("tiv_6_1",0,0,0, 0),
"ugl":("ugl_6_1","ugl_6_2","ugl_6_3",0, 0),
},
7:
{
"equation_ids":(1,2,3,5,7), # note: this is the to_id's
"constant":("act_7_1","act_7_2","act_7_3",0,"act_7_7"),
"aai":(0,0,"aai_7_3",0, 0),
"blmz":(0,"blmz_7_2","blmz_7_3","blmz_7_5", 0),
"crit":(0,"crit_7_2",0,0, 0),
"dc":("dc_7_1",0,0,0,0),
"dcri":("dcri_7_1","dcri_7_2","dcri_7_3",0, 0),
"ddt1":(0,0,"ddt1_7_3","ddt1_7_5", 0),
"dloc":("dloc_7_1","dloc_7_2","dloc_7_3",0, 0),
"dos":("dos_7_1",0,"dos_7_3",0, 0),
"dprd":("dprd_7_1","dprd_7_2",0,0, 0),
"dpub":(0,"dpub_7_2",0,"dpub_7_5", 0),
"dres":("dres_7_1","dres_7_2",0,0, 0),
"dwat":("dwat_7_1","dwat_7_2",0,0, 0),
"fmps":("fmps_7_1","fmps_7_2","fmps_7_3",0, 0),
"gai":(0,0,0,"gai_7_5", 0),
"h750":("h750_7_1","h750_7_2",0,0, 0),
# "pcd":("pcd_7_1","pcd_7_2","pcd_7_3","pcd_7_5", 0),
"pcf":(0,0,0,"pcf_7_5", 0),
"pes":(0,0,0,"pes_7_5", 0),
"pgr":("pgr_7_1","pgr_7_2",0,"pgr_7_5", 0),
"phu":("phu_7_1","phu_7_2",0,0, 0),
"plu":("plu_7_1",0,"plu_7_3",0, 0),
"pmf":("pmf_7_1","pmf_7_2",0,"pmf_7_5", 0),
"pmu":(0,"pmu_7_2",0,0, 0),
"psg":(0,"psg_7_2",0,0, 0),
"pslp":("pslp_7_1","pslp_7_2",0,0, 0),
"pstr":("pstr_7_1","pstr_7_2",0,0, 0),
"pub":("pub_7_1",0,0,"pub_7_5", 0),
"pwa":(0,0,"pwa_7_3",0, 0),
"shei":(0,0,0,"shei_7_5", 0),
"sslp":(0,0,"sslp_7_3",0, 0),
"tiv":(0,"tiv_7_2","tiv_7_3",0, 0),
"ugl":(0,"ugl_7_2",0,0, 0),
},
10:
{
"equation_ids":(1,2,3,10), # note: this is the to_id's
"constant":("constant_10_1","constant_10_2",0, "constant_10_10"),
"blmz":("blmz_10_1","blmz_10_2","blmz_10_3",0),
"c750":("c750_10_1",0,0,0),
"cd1":(0,"cd1_10_2",0,0),
"crit":("crit_10_1","crit_10_2","crit_10_3",0),
"dag":("dag_10_1","dag_10_2","dag_10_3",0),
"dcbd":("dcbd_10_1","dcbd_10_2","dcbd_10_3",0),
"dcri":("dcri_10_1","dcri_10_2","dcri_10_3",0),
"ddt1":("ddt1_10_1",0,0,0),
"de":(0,"de_10_2",0,0),
"dfre":("dfre_10_1",0,0,0),
"dloc":("dloc_10_1",0,0,0),
"dnlr":("dnlr_10_1",0,"dnlr_10_3",0),
"dprd":("dprd_10_1","dprd_10_2",0,0),
"dres":("dres_10_1",0,0,0),
"dtim":(0,0,"dtim_10_3",0),
"gmps":("gmps_10_1","gmps_10_2","gmps_10_3",0),
"h450":("h450_10_1","h450_10_2",0,0),
"h750":("h750_10_1","h750_10_2","h750_10_3",0),
"mmps":("mmps_10_1",0,0,0),
"pag":(0,0,"pag_10_3",0),
# "pcd":("pcd_10_1","pcd_10_2","pcd_10_3",0),
"pes":(0,0,"pes_10_3",0),
"pfld":("pfld_10_1","pfld_10_2","pfld_10_3",0),
"phu":("phu_10_1",0,0,0),
"plu":(0,"plu_10_2","plu_10_3",0),
"pmu":(0,"pmu_10_2",0,0),
"psg":(0,"psg_10_2",0,0),
"pslp":(0,"pslp_10_2",0,0),
"pstr":("pstr_10_1","pstr_10_2",0,0),
"pwet":("pwet_10_1","pwet_10_2",0,0),
"tiv":("tiv_10_1",0,"tiv_10_3",0),
"ugl":("ugl_10_1","ugl_10_2",0,0),
},
8:
{
"equation_ids": (9,), # note the comma after "9"
"constant":("act_8_9",),
},
11:
{
"equation_ids": (11,), # note the comma after "9"
"constant":("act_11_11",),
},
12:
{
"equation_ids": (12,), # note the comma after "9"
"constant":("act_12_12",),
},
13:
{
"equation_ids": (13,), # note the comma after "9"
"constant":("act_13_13",),
},
14:
{
"equation_ids": (14,), # note the comma after "9"
"constant":("act_14_14",),
}
}<|fim▁end|> | "h750":("h750_6_1","h750_6_2",0,0, 0),
"hmps":("hmps_6_1",0,0,0,0),
|
<|file_name|>query.py<|end_file_name|><|fim▁begin|>import datetime
import numbers
from django.db.models.fields import FieldDoesNotExist
from django.utils import tree
from django.core.exceptions import FieldError
try:
from django.db.models.sql.constants import LOOKUP_SEP
except:
from django.db.models.constants import LOOKUP_SEP
class HStoreConstraint():
value_operators = {'exact': '=', 'iexact': '=', 'in': 'IN', 'lt': '<', 'lte': '<=', 'gt': '>', 'gte': '>='}
def __init__(self, alias, field, value, lookup_type, key=None):
self.lvalue = '%s'
self.alias = alias
self.field = field
self.values = [value]
if lookup_type == 'contains':
if isinstance(value, basestring):
self.operator = '?'
elif isinstance(value, (list, tuple)):
self.operator = '?&'
self.values = [list(value)]
else:
raise ValueError('invalid value %r' % value)
elif lookup_type in self.value_operators:
self.operator = self.value_operators[lookup_type]
if self.operator == 'IN':
test_value = value[0] if len(value) > 0 else ''
self.values = [tuple(value)]
else:
test_value = value
if isinstance(test_value, datetime.datetime):
cast_type = 'timestamp'
elif isinstance(test_value, datetime.date):
cast_type = 'date'
elif isinstance(test_value, datetime.time):
cast_type = 'time'
elif isinstance(test_value, int):
cast_type = 'integer'
elif isinstance(test_value, numbers.Number):
cast_type = 'double precision'
elif isinstance(test_value, basestring):
cast_type = None
else:
raise ValueError('invalid value %r' % test_value)
if cast_type:
self.lvalue = "CAST(NULLIF(%%s->'%s','') AS %s)" % (key, cast_type)
elif lookup_type == 'iexact':
self.lvalue = "lower(%%s->'%s')" % key
self.values = [value.lower()]
elif lookup_type == 'in' and not value:
self.operator = '?'
self.values = [key]
else:
self.lvalue = "%%s->'%s'" % key
else:<|fim▁hole|> if self.alias:
return '%s.%s' % (qn(self.alias), qn(self.field))
else:
return qn(self.field)
def as_sql(self, qn=None, connection=None):
lvalue = self.lvalue % self.sql_for_column(qn, connection)
expr = '%s %s %%s' % (lvalue, self.operator)
return (expr, self.values)
class HQ(tree.Node):
AND = 'AND'
OR = 'OR'
default = AND
query_terms = ['exact', 'iexact', 'lt', 'lte', 'gt', 'gte', 'in', 'contains']
def __init__(self, **kwargs):
super(HQ, self).__init__(children=kwargs.items())
def _combine(self, other, conn):
if not isinstance(other, HQ):
raise TypeError(other)
obj = type(self)()
obj.add(self, conn)
obj.add(other, conn)
return obj
def __or__(self, other):
return self._combine(other, self.OR)
def __and__(self, other):
return self._combine(other, self.AND)
def __invert__(self):
obj = type(self)()
obj.add(self, self.AND)
obj.negate()
return obj
def add_to_query(self, query, used_aliases):
self.add_to_node(query.where, query, used_aliases)
def add_to_node(self, where_node, query, used_aliases):
for child in self.children:
if isinstance(child, HQ):
node = query.where_class()
child.add_to_node(node, query, used_aliases)
where_node.add(node, self.connector)
else:
field, value = child
parts = field.split(LOOKUP_SEP)
if not parts:
raise FieldError("Cannot parse keyword query %r" % field)
lookup_type = self.query_terms[0] # Default lookup type
num_parts = len(parts)
if len(parts) > 1 and parts[-1] in self.query_terms:
# Traverse the lookup query to distinguish related fields from
# lookup types.
lookup_model = query.model
for counter, field_name in enumerate(parts):
try:
lookup_field = lookup_model._meta.get_field(field_name)
except FieldDoesNotExist:
# Not a field. Bail out.
lookup_type = parts.pop()
break
# Unless we're at the end of the list of lookups, let's attempt
# to continue traversing relations.
if (counter + 1) < num_parts:
try:
lookup_model = lookup_field.rel.to
except AttributeError:
# Not a related field. Bail out.
lookup_type = parts.pop()
break
if lookup_type == 'contains':
key = None
else:
key = parts[-1]
parts = parts[:-1]
opts = query.get_meta()
alias = query.get_initial_alias()
field, target, opts, join_list, last, extra = query.setup_joins(parts, opts, alias, True)
col, alias, join_list = query.trim_joins(target, join_list, last, False, False)
where_node.add(HStoreConstraint(alias, col, value, lookup_type, key), self.connector)
if self.negated:
where_node.negate()
def add_hstore(queryset, field, key, name=None):
assert queryset.query.can_filter(), "Cannot change a query once a slice has been taken"
name = name or key
clone = queryset._clone()
clone.query.add_extra({name: "%s -> '%s'" % (field, key)}, None, None, None, None, None)
return clone<|fim▁end|> | raise TypeError('invalid lookup type')
def sql_for_column(self, qn, connection): |
<|file_name|>object_detection_evaluation.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""object_detection_evaluation module.
ObjectDetectionEvaluation is a class which manages ground truth information of a
object detection dataset, and computes frequently used detection metrics such as
Precision, Recall, CorLoc of the provided detection results.
It supports the following operations:
1) Add ground truth information of images sequentially.
2) Add detection result of images sequentially.
3) Evaluate detection metrics on already inserted detection results.
4) Write evaluation result into a pickle file for future processing or
visualization.
Note: This module operates on numpy boxes and box lists.
"""
from abc import ABCMeta
from abc import abstractmethod
import collections
import logging
import unicodedata
import numpy as np
from object_detection.core import standard_fields
from object_detection.utils import label_map_util
from object_detection.utils import metrics
from object_detection.utils import per_image_evaluation
class DetectionEvaluator(object):
"""Interface for object detection evalution classes.
Example usage of the Evaluator:
------------------------------
evaluator = DetectionEvaluator(categories)
# Detections and groundtruth for image 1.
evaluator.add_single_groundtruth_image_info(...)
evaluator.add_single_detected_image_info(...)
# Detections and groundtruth for image 2.
evaluator.add_single_groundtruth_image_info(...)
evaluator.add_single_detected_image_info(...)
metrics_dict = evaluator.evaluate()
"""
__metaclass__ = ABCMeta
def __init__(self, categories):
"""Constructor.
Args:
categories: A list of dicts, each of which has the following keys -
'id': (required) an integer id uniquely identifying this category.
'name': (required) string representing category name e.g., 'cat', 'dog'.
"""
self._categories = categories
@abstractmethod
def add_single_ground_truth_image_info(self, image_id, groundtruth_dict):
"""Adds groundtruth for a single image to be used for evaluation.
Args:
image_id: A unique string/integer identifier for the image.
groundtruth_dict: A dictionary of groundtruth numpy arrays required
for evaluations.
"""
pass
@abstractmethod
def add_single_detected_image_info(self, image_id, detections_dict):
"""Adds detections for a single image to be used for evaluation.
Args:
image_id: A unique string/integer identifier for the image.
detections_dict: A dictionary of detection numpy arrays required
for evaluation.
"""
pass
def get_estimator_eval_metric_ops(self, eval_dict):
"""Returns dict of metrics to use with `tf.estimator.EstimatorSpec`.
Note that this must only be implemented if performing evaluation with a
`tf.estimator.Estimator`.
Args:
eval_dict: A dictionary that holds tensors for evaluating an object
detection model, returned from
eval_util.result_dict_for_single_example().
Returns:
A dictionary of metric names to tuple of value_op and update_op that can
be used as eval metric ops in `tf.estimator.EstimatorSpec`.
"""
pass
@abstractmethod
def evaluate(self):
"""Evaluates detections and returns a dictionary of metrics."""
pass
@abstractmethod
def clear(self):
"""Clears the state to prepare for a fresh evaluation."""
pass
class ObjectDetectionEvaluator(DetectionEvaluator):
"""A class to evaluate detections."""
def __init__(self,
categories,
matching_iou_threshold=0.5,
evaluate_corlocs=False,
metric_prefix=None,
use_weighted_mean_ap=False,
evaluate_masks=False,
group_of_weight=0.0):
"""Constructor.
Args:
categories: A list of dicts, each of which has the following keys -
'id': (required) an integer id uniquely identifying this category.
'name': (required) string representing category name e.g., 'cat', 'dog'.
matching_iou_threshold: IOU threshold to use for matching groundtruth
boxes to detection boxes.
evaluate_corlocs: (optional) boolean which determines if corloc scores
are to be returned or not.
metric_prefix: (optional) string prefix for metric name; if None, no
prefix is used.
use_weighted_mean_ap: (optional) boolean which determines if the mean
average precision is computed directly from the scores and tp_fp_labels
of all classes.
evaluate_masks: If False, evaluation will be performed based on boxes.
If True, mask evaluation will be performed instead.
group_of_weight: Weight of group-of boxes.If set to 0, detections of the
correct class within a group-of box are ignored. If weight is > 0, then
if at least one detection falls within a group-of box with
matching_iou_threshold, weight group_of_weight is added to true
positives. Consequently, if no detection falls within a group-of box,
weight group_of_weight is added to false negatives.
Raises:
ValueError: If the category ids are not 1-indexed.
"""
super(ObjectDetectionEvaluator, self).__init__(categories)
self._num_classes = max([cat['id'] for cat in categories])
if min(cat['id'] for cat in categories) < 1:
raise ValueError('Classes should be 1-indexed.')
self._matching_iou_threshold = matching_iou_threshold
self._use_weighted_mean_ap = use_weighted_mean_ap
self._label_id_offset = 1
self._evaluate_masks = evaluate_masks
self._group_of_weight = group_of_weight
self._evaluation = ObjectDetectionEvaluation(
num_groundtruth_classes=self._num_classes,
matching_iou_threshold=self._matching_iou_threshold,
use_weighted_mean_ap=self._use_weighted_mean_ap,
label_id_offset=self._label_id_offset,
group_of_weight=self._group_of_weight)
self._image_ids = set([])
self._evaluate_corlocs = evaluate_corlocs
self._metric_prefix = (metric_prefix + '_') if metric_prefix else ''
def add_single_ground_truth_image_info(self, image_id, groundtruth_dict):
"""Adds groundtruth for a single image to be used for evaluation.
Args:
image_id: A unique string/integer identifier for the image.
groundtruth_dict: A dictionary containing -
standard_fields.InputDataFields.groundtruth_boxes: float32 numpy array
of shape [num_boxes, 4] containing `num_boxes` groundtruth boxes of
the format [ymin, xmin, ymax, xmax] in absolute image coordinates.
standard_fields.InputDataFields.groundtruth_classes: integer numpy array
of shape [num_boxes] containing 1-indexed groundtruth classes for the
boxes.
standard_fields.InputDataFields.groundtruth_difficult: Optional length
M numpy boolean array denoting whether a ground truth box is a
difficult instance or not. This field is optional to support the case
that no boxes are difficult.
standard_fields.InputDataFields.groundtruth_instance_masks: Optional
numpy array of shape [num_boxes, height, width] with values in {0, 1}.
Raises:
ValueError: On adding groundtruth for an image more than once. Will also
raise error if instance masks are not in groundtruth dictionary.
"""
if image_id in self._image_ids:
raise ValueError('Image with id {} already added.'.format(image_id))
groundtruth_classes = (
groundtruth_dict[standard_fields.InputDataFields.groundtruth_classes] -
self._label_id_offset)
# If the key is not present in the groundtruth_dict or the array is empty
# (unless there are no annotations for the groundtruth on this image)
# use values from the dictionary or insert None otherwise.
if (standard_fields.InputDataFields.groundtruth_difficult in
groundtruth_dict.keys() and
(groundtruth_dict[standard_fields.InputDataFields.groundtruth_difficult]
.size or not groundtruth_classes.size)):
groundtruth_difficult = groundtruth_dict[
standard_fields.InputDataFields.groundtruth_difficult]
else:
groundtruth_difficult = None
if not len(self._image_ids) % 1000:
logging.warn(
'image %s does not have groundtruth difficult flag specified',
image_id)
groundtruth_masks = None
if self._evaluate_masks:
if (standard_fields.InputDataFields.groundtruth_instance_masks not in
groundtruth_dict):
raise ValueError('Instance masks not in groundtruth dictionary.')
groundtruth_masks = groundtruth_dict[
standard_fields.InputDataFields.groundtruth_instance_masks]
self._evaluation.add_single_ground_truth_image_info(
image_key=image_id,
groundtruth_boxes=groundtruth_dict[
standard_fields.InputDataFields.groundtruth_boxes],
groundtruth_class_labels=groundtruth_classes,
groundtruth_is_difficult_list=groundtruth_difficult,
groundtruth_masks=groundtruth_masks)
self._image_ids.update([image_id])
def add_single_detected_image_info(self, image_id, detections_dict):
"""Adds detections for a single image to be used for evaluation.
Args:
image_id: A unique string/integer identifier for the image.
detections_dict: A dictionary containing -
standard_fields.DetectionResultFields.detection_boxes: float32 numpy
array of shape [num_boxes, 4] containing `num_boxes` detection boxes
of the format [ymin, xmin, ymax, xmax] in absolute image coordinates.
standard_fields.DetectionResultFields.detection_scores: float32 numpy
array of shape [num_boxes] containing detection scores for the boxes.
standard_fields.DetectionResultFields.detection_classes: integer numpy
array of shape [num_boxes] containing 1-indexed detection classes for
the boxes.
standard_fields.DetectionResultFields.detection_masks: uint8 numpy
array of shape [num_boxes, height, width] containing `num_boxes` masks
of values ranging between 0 and 1.
Raises:
ValueError: If detection masks are not in detections dictionary.
"""
detection_classes = (
detections_dict[standard_fields.DetectionResultFields.detection_classes]
- self._label_id_offset)
detection_masks = None
if self._evaluate_masks:
if (standard_fields.DetectionResultFields.detection_masks not in
detections_dict):
raise ValueError('Detection masks not in detections dictionary.')
detection_masks = detections_dict[
standard_fields.DetectionResultFields.detection_masks]
self._evaluation.add_single_detected_image_info(
image_key=image_id,
detected_boxes=detections_dict[
standard_fields.DetectionResultFields.detection_boxes],
detected_scores=detections_dict[
standard_fields.DetectionResultFields.detection_scores],
detected_class_labels=detection_classes,
detected_masks=detection_masks)
def evaluate(self):
"""Compute evaluation result.
Returns:
A dictionary of metrics with the following fields -<|fim▁hole|> 1. summary_metrics:
'Precision/mAP@<matching_iou_threshold>IOU': mean average precision at
the specified IOU threshold.
2. per_category_ap: category specific results with keys of the form
'PerformanceByCategory/mAP@<matching_iou_threshold>IOU/category'.
"""
(per_class_ap, mean_ap, _, _, per_class_corloc, mean_corloc) = (
self._evaluation.evaluate())
pascal_metrics = {
self._metric_prefix +
'Precision/mAP@{}IOU'.format(self._matching_iou_threshold):
mean_ap
}
if self._evaluate_corlocs:
pascal_metrics[self._metric_prefix + 'Precision/meanCorLoc@{}IOU'.format(
self._matching_iou_threshold)] = mean_corloc
category_index = label_map_util.create_category_index(self._categories)
for idx in range(per_class_ap.size):
if idx + self._label_id_offset in category_index:
category_name = category_index[idx + self._label_id_offset]['name']
try:
category_name = unicode(category_name, 'utf-8')
except TypeError:
pass
category_name = unicodedata.normalize(
'NFKD', category_name).encode('ascii', 'ignore')
display_name = (
self._metric_prefix + 'PerformanceByCategory/AP@{}IOU/{}'.format(
self._matching_iou_threshold, category_name))
pascal_metrics[display_name] = per_class_ap[idx]
# Optionally add CorLoc metrics.classes
if self._evaluate_corlocs:
display_name = (
self._metric_prefix + 'PerformanceByCategory/CorLoc@{}IOU/{}'
.format(self._matching_iou_threshold, category_name))
pascal_metrics[display_name] = per_class_corloc[idx]
return pascal_metrics
def clear(self):
"""Clears the state to prepare for a fresh evaluation."""
self._evaluation = ObjectDetectionEvaluation(
num_groundtruth_classes=self._num_classes,
matching_iou_threshold=self._matching_iou_threshold,
use_weighted_mean_ap=self._use_weighted_mean_ap,
label_id_offset=self._label_id_offset)
self._image_ids.clear()
class PascalDetectionEvaluator(ObjectDetectionEvaluator):
"""A class to evaluate detections using PASCAL metrics."""
def __init__(self, categories, matching_iou_threshold=0.5):
super(PascalDetectionEvaluator, self).__init__(
categories,
matching_iou_threshold=matching_iou_threshold,
evaluate_corlocs=False,
metric_prefix='PascalBoxes',
use_weighted_mean_ap=False)
class WeightedPascalDetectionEvaluator(ObjectDetectionEvaluator):
"""A class to evaluate detections using weighted PASCAL metrics.
Weighted PASCAL metrics computes the mean average precision as the average
precision given the scores and tp_fp_labels of all classes. In comparison,
PASCAL metrics computes the mean average precision as the mean of the
per-class average precisions.
This definition is very similar to the mean of the per-class average
precisions weighted by class frequency. However, they are typically not the
same as the average precision is not a linear function of the scores and
tp_fp_labels.
"""
def __init__(self, categories, matching_iou_threshold=0.5):
super(WeightedPascalDetectionEvaluator, self).__init__(
categories,
matching_iou_threshold=matching_iou_threshold,
evaluate_corlocs=False,
metric_prefix='WeightedPascalBoxes',
use_weighted_mean_ap=True)
class PascalInstanceSegmentationEvaluator(ObjectDetectionEvaluator):
"""A class to evaluate instance masks using PASCAL metrics."""
def __init__(self, categories, matching_iou_threshold=0.5):
super(PascalInstanceSegmentationEvaluator, self).__init__(
categories,
matching_iou_threshold=matching_iou_threshold,
evaluate_corlocs=False,
metric_prefix='PascalMasks',
use_weighted_mean_ap=False,
evaluate_masks=True)
class WeightedPascalInstanceSegmentationEvaluator(ObjectDetectionEvaluator):
"""A class to evaluate instance masks using weighted PASCAL metrics.
Weighted PASCAL metrics computes the mean average precision as the average
precision given the scores and tp_fp_labels of all classes. In comparison,
PASCAL metrics computes the mean average precision as the mean of the
per-class average precisions.
This definition is very similar to the mean of the per-class average
precisions weighted by class frequency. However, they are typically not the
same as the average precision is not a linear function of the scores and
tp_fp_labels.
"""
def __init__(self, categories, matching_iou_threshold=0.5):
super(WeightedPascalInstanceSegmentationEvaluator, self).__init__(
categories,
matching_iou_threshold=matching_iou_threshold,
evaluate_corlocs=False,
metric_prefix='WeightedPascalMasks',
use_weighted_mean_ap=True,
evaluate_masks=True)
class OpenImagesDetectionEvaluator(ObjectDetectionEvaluator):
"""A class to evaluate detections using Open Images V2 metrics.
Open Images V2 introduce group_of type of bounding boxes and this metric
handles those boxes appropriately.
"""
def __init__(self,
categories,
matching_iou_threshold=0.5,
evaluate_corlocs=False,
metric_prefix='OpenImagesV2',
group_of_weight=0.0):
"""Constructor.
Args:
categories: A list of dicts, each of which has the following keys -
'id': (required) an integer id uniquely identifying this category.
'name': (required) string representing category name e.g., 'cat', 'dog'.
matching_iou_threshold: IOU threshold to use for matching groundtruth
boxes to detection boxes.
evaluate_corlocs: if True, additionally evaluates and returns CorLoc.
metric_prefix: Prefix name of the metric.
group_of_weight: Weight of the group-of bounding box. If set to 0 (default
for Open Images V2 detection protocol), detections of the correct class
within a group-of box are ignored. If weight is > 0, then if at least
one detection falls within a group-of box with matching_iou_threshold,
weight group_of_weight is added to true positives. Consequently, if no
detection falls within a group-of box, weight group_of_weight is added
to false negatives.
"""
super(OpenImagesDetectionEvaluator, self).__init__(
categories,
matching_iou_threshold,
evaluate_corlocs,
metric_prefix=metric_prefix,
group_of_weight=group_of_weight)
def add_single_ground_truth_image_info(self, image_id, groundtruth_dict):
"""Adds groundtruth for a single image to be used for evaluation.
Args:
image_id: A unique string/integer identifier for the image.
groundtruth_dict: A dictionary containing -
standard_fields.InputDataFields.groundtruth_boxes: float32 numpy array
of shape [num_boxes, 4] containing `num_boxes` groundtruth boxes of
the format [ymin, xmin, ymax, xmax] in absolute image coordinates.
standard_fields.InputDataFields.groundtruth_classes: integer numpy array
of shape [num_boxes] containing 1-indexed groundtruth classes for the
boxes.
standard_fields.InputDataFields.groundtruth_group_of: Optional length
M numpy boolean array denoting whether a groundtruth box contains a
group of instances.
Raises:
ValueError: On adding groundtruth for an image more than once.
"""
if image_id in self._image_ids:
raise ValueError('Image with id {} already added.'.format(image_id))
groundtruth_classes = (
groundtruth_dict[standard_fields.InputDataFields.groundtruth_classes] -
self._label_id_offset)
# If the key is not present in the groundtruth_dict or the array is empty
# (unless there are no annotations for the groundtruth on this image)
# use values from the dictionary or insert None otherwise.
if (standard_fields.InputDataFields.groundtruth_group_of in
groundtruth_dict.keys() and
(groundtruth_dict[standard_fields.InputDataFields.groundtruth_group_of]
.size or not groundtruth_classes.size)):
groundtruth_group_of = groundtruth_dict[
standard_fields.InputDataFields.groundtruth_group_of]
else:
groundtruth_group_of = None
if not len(self._image_ids) % 1000:
logging.warn(
'image %s does not have groundtruth group_of flag specified',
image_id)
self._evaluation.add_single_ground_truth_image_info(
image_id,
groundtruth_dict[standard_fields.InputDataFields.groundtruth_boxes],
groundtruth_classes,
groundtruth_is_difficult_list=None,
groundtruth_is_group_of_list=groundtruth_group_of)
self._image_ids.update([image_id])
class OpenImagesDetectionChallengeEvaluator(OpenImagesDetectionEvaluator):
"""A class implements Open Images Challenge Detection metrics.
Open Images Challenge Detection metric has two major changes in comparison
with Open Images V2 detection metric:
- a custom weight might be specified for detecting an object contained in
a group-of box.
- verified image-level labels should be explicitelly provided for
evaluation: in case in image has neither positive nor negative image level
label of class c, all detections of this class on this image will be
ignored.
"""
def __init__(self,
categories,
matching_iou_threshold=0.5,
evaluate_corlocs=False,
group_of_weight=1.0):
"""Constructor.
Args:
categories: A list of dicts, each of which has the following keys -
'id': (required) an integer id uniquely identifying this category.
'name': (required) string representing category name e.g., 'cat', 'dog'.
matching_iou_threshold: IOU threshold to use for matching groundtruth
boxes to detection boxes.
evaluate_corlocs: if True, additionally evaluates and returns CorLoc.
group_of_weight: weight of a group-of box. If set to 0, detections of the
correct class within a group-of box are ignored. If weight is > 0
(default for Open Images Detection Challenge 2018), then if at least one
detection falls within a group-of box with matching_iou_threshold,
weight group_of_weight is added to true positives. Consequently, if no
detection falls within a group-of box, weight group_of_weight is added
to false negatives.
"""
super(OpenImagesDetectionChallengeEvaluator, self).__init__(
categories,
matching_iou_threshold,
evaluate_corlocs,
metric_prefix='OpenImagesChallenge2018',
group_of_weight=group_of_weight)
self._evaluatable_labels = {}
def add_single_ground_truth_image_info(self, image_id, groundtruth_dict):
"""Adds groundtruth for a single image to be used for evaluation.
Args:
image_id: A unique string/integer identifier for the image.
groundtruth_dict: A dictionary containing -
standard_fields.InputDataFields.groundtruth_boxes: float32 numpy array
of shape [num_boxes, 4] containing `num_boxes` groundtruth boxes of
the format [ymin, xmin, ymax, xmax] in absolute image coordinates.
standard_fields.InputDataFields.groundtruth_classes: integer numpy array
of shape [num_boxes] containing 1-indexed groundtruth classes for the
boxes.
standard_fields.InputDataFields.groundtruth_image_classes: integer 1D
numpy array containing all classes for which labels are verified.
standard_fields.InputDataFields.groundtruth_group_of: Optional length
M numpy boolean array denoting whether a groundtruth box contains a
group of instances.
Raises:
ValueError: On adding groundtruth for an image more than once.
"""
super(OpenImagesDetectionChallengeEvaluator,
self).add_single_ground_truth_image_info(image_id, groundtruth_dict)
groundtruth_classes = (
groundtruth_dict[standard_fields.InputDataFields.groundtruth_classes] -
self._label_id_offset)
self._evaluatable_labels[image_id] = np.unique(
np.concatenate(((groundtruth_dict.get(
standard_fields.InputDataFields.groundtruth_image_classes,
np.array([], dtype=int)) - self._label_id_offset),
groundtruth_classes)))
def add_single_detected_image_info(self, image_id, detections_dict):
"""Adds detections for a single image to be used for evaluation.
Args:
image_id: A unique string/integer identifier for the image.
detections_dict: A dictionary containing -
standard_fields.DetectionResultFields.detection_boxes: float32 numpy
array of shape [num_boxes, 4] containing `num_boxes` detection boxes
of the format [ymin, xmin, ymax, xmax] in absolute image coordinates.
standard_fields.DetectionResultFields.detection_scores: float32 numpy
array of shape [num_boxes] containing detection scores for the boxes.
standard_fields.DetectionResultFields.detection_classes: integer numpy
array of shape [num_boxes] containing 1-indexed detection classes for
the boxes.
Raises:
ValueError: If detection masks are not in detections dictionary.
"""
if image_id not in self._image_ids:
# Since for the correct work of evaluator it is assumed that groundtruth
# is inserted first we make sure to break the code if is it not the case.
self._image_ids.update([image_id])
self._evaluatable_labels[image_id] = np.array([])
detection_classes = (
detections_dict[standard_fields.DetectionResultFields.detection_classes]
- self._label_id_offset)
allowed_classes = np.where(
np.isin(detection_classes, self._evaluatable_labels[image_id]))
detection_classes = detection_classes[allowed_classes]
detected_boxes = detections_dict[
standard_fields.DetectionResultFields.detection_boxes][allowed_classes]
detected_scores = detections_dict[
standard_fields.DetectionResultFields.detection_scores][allowed_classes]
self._evaluation.add_single_detected_image_info(
image_key=image_id,
detected_boxes=detected_boxes,
detected_scores=detected_scores,
detected_class_labels=detection_classes)
def clear(self):
"""Clears stored data."""
super(OpenImagesDetectionChallengeEvaluator, self).clear()
self._evaluatable_labels.clear()
ObjectDetectionEvalMetrics = collections.namedtuple(
'ObjectDetectionEvalMetrics', [
'average_precisions', 'mean_ap', 'precisions', 'recalls', 'corlocs',
'mean_corloc'
])
class ObjectDetectionEvaluation(object):
"""Internal implementation of Pascal object detection metrics."""
def __init__(self,
num_groundtruth_classes,
matching_iou_threshold=0.5,
nms_iou_threshold=1.0,
nms_max_output_boxes=10000,
use_weighted_mean_ap=False,
label_id_offset=0,
group_of_weight=0.0,
per_image_eval_class=per_image_evaluation.PerImageEvaluation):
"""Constructor.
Args:
num_groundtruth_classes: Number of ground-truth classes.
matching_iou_threshold: IOU threshold used for matching detected boxes
to ground-truth boxes.
nms_iou_threshold: IOU threshold used for non-maximum suppression.
nms_max_output_boxes: Maximum number of boxes returned by non-maximum
suppression.
use_weighted_mean_ap: (optional) boolean which determines if the mean
average precision is computed directly from the scores and tp_fp_labels
of all classes.
label_id_offset: The label id offset.
group_of_weight: Weight of group-of boxes.If set to 0, detections of the
correct class within a group-of box are ignored. If weight is > 0, then
if at least one detection falls within a group-of box with
matching_iou_threshold, weight group_of_weight is added to true
positives. Consequently, if no detection falls within a group-of box,
weight group_of_weight is added to false negatives.
per_image_eval_class: The class that contains functions for computing
per image metrics.
Raises:
ValueError: if num_groundtruth_classes is smaller than 1.
"""
if num_groundtruth_classes < 1:
raise ValueError('Need at least 1 groundtruth class for evaluation.')
self.per_image_eval = per_image_eval_class(
num_groundtruth_classes=num_groundtruth_classes,
matching_iou_threshold=matching_iou_threshold,
nms_iou_threshold=nms_iou_threshold,
nms_max_output_boxes=nms_max_output_boxes,
group_of_weight=group_of_weight)
self.group_of_weight = group_of_weight
self.num_class = num_groundtruth_classes
self.use_weighted_mean_ap = use_weighted_mean_ap
self.label_id_offset = label_id_offset
self.groundtruth_boxes = {}
self.groundtruth_class_labels = {}
self.groundtruth_masks = {}
self.groundtruth_is_difficult_list = {}
self.groundtruth_is_group_of_list = {}
self.num_gt_instances_per_class = np.zeros(self.num_class, dtype=float)
self.num_gt_imgs_per_class = np.zeros(self.num_class, dtype=int)
self._initialize_detections()
def _initialize_detections(self):
"""Initializes internal data structures."""
self.detection_keys = set()
self.scores_per_class = [[] for _ in range(self.num_class)]
self.tp_fp_labels_per_class = [[] for _ in range(self.num_class)]
self.num_images_correctly_detected_per_class = np.zeros(self.num_class)
self.average_precision_per_class = np.empty(self.num_class, dtype=float)
self.average_precision_per_class.fill(np.nan)
self.precisions_per_class = [np.nan] * self.num_class
self.recalls_per_class = [np.nan] * self.num_class
self.corloc_per_class = np.ones(self.num_class, dtype=float)
def clear_detections(self):
self._initialize_detections()
def add_single_ground_truth_image_info(self,
image_key,
groundtruth_boxes,
groundtruth_class_labels,
groundtruth_is_difficult_list=None,
groundtruth_is_group_of_list=None,
groundtruth_masks=None):
"""Adds groundtruth for a single image to be used for evaluation.
Args:
image_key: A unique string/integer identifier for the image.
groundtruth_boxes: float32 numpy array of shape [num_boxes, 4]
containing `num_boxes` groundtruth boxes of the format
[ymin, xmin, ymax, xmax] in absolute image coordinates.
groundtruth_class_labels: integer numpy array of shape [num_boxes]
containing 0-indexed groundtruth classes for the boxes.
groundtruth_is_difficult_list: A length M numpy boolean array denoting
whether a ground truth box is a difficult instance or not. To support
the case that no boxes are difficult, it is by default set as None.
groundtruth_is_group_of_list: A length M numpy boolean array denoting
whether a ground truth box is a group-of box or not. To support
the case that no boxes are groups-of, it is by default set as None.
groundtruth_masks: uint8 numpy array of shape
[num_boxes, height, width] containing `num_boxes` groundtruth masks.
The mask values range from 0 to 1.
"""
if image_key in self.groundtruth_boxes:
logging.warn(
'image %s has already been added to the ground truth database.',
image_key)
return
self.groundtruth_boxes[image_key] = groundtruth_boxes
self.groundtruth_class_labels[image_key] = groundtruth_class_labels
self.groundtruth_masks[image_key] = groundtruth_masks
if groundtruth_is_difficult_list is None:
num_boxes = groundtruth_boxes.shape[0]
groundtruth_is_difficult_list = np.zeros(num_boxes, dtype=bool)
self.groundtruth_is_difficult_list[
image_key] = groundtruth_is_difficult_list.astype(dtype=bool)
if groundtruth_is_group_of_list is None:
num_boxes = groundtruth_boxes.shape[0]
groundtruth_is_group_of_list = np.zeros(num_boxes, dtype=bool)
self.groundtruth_is_group_of_list[
image_key] = groundtruth_is_group_of_list.astype(dtype=bool)
self._update_ground_truth_statistics(
groundtruth_class_labels,
groundtruth_is_difficult_list.astype(dtype=bool),
groundtruth_is_group_of_list.astype(dtype=bool))
def add_single_detected_image_info(self, image_key, detected_boxes,
detected_scores, detected_class_labels,
detected_masks=None):
"""Adds detections for a single image to be used for evaluation.
Args:
image_key: A unique string/integer identifier for the image.
detected_boxes: float32 numpy array of shape [num_boxes, 4]
containing `num_boxes` detection boxes of the format
[ymin, xmin, ymax, xmax] in absolute image coordinates.
detected_scores: float32 numpy array of shape [num_boxes] containing
detection scores for the boxes.
detected_class_labels: integer numpy array of shape [num_boxes] containing
0-indexed detection classes for the boxes.
detected_masks: np.uint8 numpy array of shape [num_boxes, height, width]
containing `num_boxes` detection masks with values ranging
between 0 and 1.
Raises:
ValueError: if the number of boxes, scores and class labels differ in
length.
"""
if (len(detected_boxes) != len(detected_scores) or
len(detected_boxes) != len(detected_class_labels)):
raise ValueError('detected_boxes, detected_scores and '
'detected_class_labels should all have same lengths. Got'
'[%d, %d, %d]' % len(detected_boxes),
len(detected_scores), len(detected_class_labels))
if image_key in self.detection_keys:
logging.warn(
'image %s has already been added to the detection result database',
image_key)
return
self.detection_keys.add(image_key)
if image_key in self.groundtruth_boxes:
groundtruth_boxes = self.groundtruth_boxes[image_key]
groundtruth_class_labels = self.groundtruth_class_labels[image_key]
# Masks are popped instead of look up. The reason is that we do not want
# to keep all masks in memory which can cause memory overflow.
groundtruth_masks = self.groundtruth_masks.pop(
image_key)
groundtruth_is_difficult_list = self.groundtruth_is_difficult_list[
image_key]
groundtruth_is_group_of_list = self.groundtruth_is_group_of_list[
image_key]
else:
groundtruth_boxes = np.empty(shape=[0, 4], dtype=float)
groundtruth_class_labels = np.array([], dtype=int)
if detected_masks is None:
groundtruth_masks = None
else:
groundtruth_masks = np.empty(shape=[0, 1, 1], dtype=float)
groundtruth_is_difficult_list = np.array([], dtype=bool)
groundtruth_is_group_of_list = np.array([], dtype=bool)
scores, tp_fp_labels, is_class_correctly_detected_in_image = (
self.per_image_eval.compute_object_detection_metrics(
detected_boxes=detected_boxes,
detected_scores=detected_scores,
detected_class_labels=detected_class_labels,
groundtruth_boxes=groundtruth_boxes,
groundtruth_class_labels=groundtruth_class_labels,
groundtruth_is_difficult_list=groundtruth_is_difficult_list,
groundtruth_is_group_of_list=groundtruth_is_group_of_list,
detected_masks=detected_masks,
groundtruth_masks=groundtruth_masks))
for i in range(self.num_class):
if scores[i].shape[0] > 0:
self.scores_per_class[i].append(scores[i])
self.tp_fp_labels_per_class[i].append(tp_fp_labels[i])
(self.num_images_correctly_detected_per_class
) += is_class_correctly_detected_in_image
def _update_ground_truth_statistics(self, groundtruth_class_labels,
groundtruth_is_difficult_list,
groundtruth_is_group_of_list):
"""Update grouth truth statitistics.
1. Difficult boxes are ignored when counting the number of ground truth
instances as done in Pascal VOC devkit.
2. Difficult boxes are treated as normal boxes when computing CorLoc related
statitistics.
Args:
groundtruth_class_labels: An integer numpy array of length M,
representing M class labels of object instances in ground truth
groundtruth_is_difficult_list: A boolean numpy array of length M denoting
whether a ground truth box is a difficult instance or not
groundtruth_is_group_of_list: A boolean numpy array of length M denoting
whether a ground truth box is a group-of box or not
"""
for class_index in range(self.num_class):
num_gt_instances = np.sum(groundtruth_class_labels[
~groundtruth_is_difficult_list
& ~groundtruth_is_group_of_list] == class_index)
num_groupof_gt_instances = self.group_of_weight * np.sum(
groundtruth_class_labels[groundtruth_is_group_of_list] == class_index)
self.num_gt_instances_per_class[
class_index] += num_gt_instances + num_groupof_gt_instances
if np.any(groundtruth_class_labels == class_index):
self.num_gt_imgs_per_class[class_index] += 1
def evaluate(self):
"""Compute evaluation result.
Returns:
A named tuple with the following fields -
average_precision: float numpy array of average precision for
each class.
mean_ap: mean average precision of all classes, float scalar
precisions: List of precisions, each precision is a float numpy
array
recalls: List of recalls, each recall is a float numpy array
corloc: numpy float array
mean_corloc: Mean CorLoc score for each class, float scalar
"""
if (self.num_gt_instances_per_class == 0).any():
logging.warn(
'The following classes have no ground truth examples: %s',
np.squeeze(np.argwhere(self.num_gt_instances_per_class == 0)) +
self.label_id_offset)
if self.use_weighted_mean_ap:
all_scores = np.array([], dtype=float)
all_tp_fp_labels = np.array([], dtype=bool)
for class_index in range(self.num_class):
if self.num_gt_instances_per_class[class_index] == 0:
continue
if not self.scores_per_class[class_index]:
scores = np.array([], dtype=float)
tp_fp_labels = np.array([], dtype=float)
else:
scores = np.concatenate(self.scores_per_class[class_index])
tp_fp_labels = np.concatenate(self.tp_fp_labels_per_class[class_index])
if self.use_weighted_mean_ap:
all_scores = np.append(all_scores, scores)
all_tp_fp_labels = np.append(all_tp_fp_labels, tp_fp_labels)
logging.info('Scores and tpfp per class label: %d', class_index)
logging.info(tp_fp_labels)
logging.info(scores)
precision, recall = metrics.compute_precision_recall(
scores, tp_fp_labels, self.num_gt_instances_per_class[class_index])
self.precisions_per_class[class_index] = precision
self.recalls_per_class[class_index] = recall
average_precision = metrics.compute_average_precision(precision, recall)
self.average_precision_per_class[class_index] = average_precision
self.corloc_per_class = metrics.compute_cor_loc(
self.num_gt_imgs_per_class,
self.num_images_correctly_detected_per_class)
if self.use_weighted_mean_ap:
num_gt_instances = np.sum(self.num_gt_instances_per_class)
precision, recall = metrics.compute_precision_recall(
all_scores, all_tp_fp_labels, num_gt_instances)
mean_ap = metrics.compute_average_precision(precision, recall)
else:
mean_ap = np.nanmean(self.average_precision_per_class)
mean_corloc = np.nanmean(self.corloc_per_class)
return ObjectDetectionEvalMetrics(
self.average_precision_per_class, mean_ap, self.precisions_per_class,
self.recalls_per_class, self.corloc_per_class, mean_corloc)<|fim▁end|> | |
<|file_name|>ChunkInfo.java<|end_file_name|><|fim▁begin|>package derpstream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.PriorityQueue;
import java.util.TimerTask;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* @author Mads
*/
public final class ChunkInfo extends TimerTask {
private static final Logger LOGGER = Logger.getLogger("derpstream");
private static final int BUFFER_PIECES = 3;
private final StreamInfo streamInfo;
private final DerpStream derpStream;
// Template for generating chunk links
private final String chunkPath;
private final LinkedBlockingDeque<Piece> pieceQueue = new LinkedBlockingDeque<>();
private final PriorityQueue<Piece> bufferedPieces = new PriorityQueue<>();
private long nextPieceTime; // when next piece becomes available
private int maxValidPiece; // maximum valid piece that can be requested
private int writePiece; // next piece to be written to disk.
private final Object waitObj = new Object();
ChunkInfo(DerpStream derpStream, StreamInfo streamInfo) throws IOException {
this.streamInfo = streamInfo;
this.derpStream = derpStream;
// Download chunklist
LOGGER.info("Getting latest chunklist...");
String chunkList = DerpStream.downloadString(streamInfo.getChunkInfoPath());
if(!DerpStream.isM3U(chunkList)) throw new IllegalStateException("Invalid chunklist: " + chunkList);
// Parse current chunk index
String search = "#EXT-X-MEDIA-SEQUENCE:";
int start = chunkList.indexOf(search) + search.length();
int end = chunkList.indexOf("\n", start);
maxValidPiece = Integer.parseInt(chunkList.substring(start, end));
writePiece = maxValidPiece - BUFFER_PIECES;
LOGGER.info("Ok. Stream is at piece " + maxValidPiece + "\n");
// Figure out chunkPath template
String[] lines = chunkList.split("\n");<|fim▁hole|> for (String line : lines) {
if(!line.startsWith("#")) {
if(line.contains(""+maxValidPiece)) {
chunkPath = line.replace("" + maxValidPiece, "%d");
LOGGER.info("Setting chunkpath: " + chunkPath);
break;
}
}
}
if(chunkPath == null) throw new IllegalStateException("Couldn't find chunkPath");
this.chunkPath = chunkPath;
// Enqueue valid pieces
for (int i = 0; i < BUFFER_PIECES; i++) {
pieceQueue.add(makePiece(writePiece+i));
}
// 10 seconds to next piece becomes available
nextPieceTime = System.currentTimeMillis() + 10000;
}
// Increments the piece counter for every 10 seconds since start.
public void updatePiece() {
long time = System.currentTimeMillis();
while(time >= nextPieceTime) {
nextPieceTime += 10000;
pieceQueue.add(makePiece(maxValidPiece));
DerpStreamCallbacks callbacks = derpStream.getCallbacks();
if(callbacks != null) {
callbacks.pieceAvailable(maxValidPiece);
}
maxValidPiece++;
}
}
public String getChunkPath() {
return String.format(chunkPath, writePiece);
}
private Piece makePiece(int index) {
return new Piece(index, String.format(chunkPath, index));
}
@Override
public void run() {
// Update pieces
updatePiece();
}
void lostPiece(Piece p) {
synchronized(bufferedPieces) {
bufferedPieces.add(p);
}
synchronized(waitObj) {
waitObj.notify();
}
}
public void registerPiece(Piece p) {
synchronized(bufferedPieces) {
bufferedPieces.add(p);
}
synchronized(waitObj) {
waitObj.notify();
}
}
public Piece grabWork() throws InterruptedException {
return pieceQueue.takeFirst();
}
void startWriting(FileOutputStream fos) throws IOException {
DerpStreamCallbacks callbacks = derpStream.getCallbacks();
while(derpStream.isRunning()) {
// Write data to the file as it becomes available.
synchronized(bufferedPieces) {
while(bufferedPieces.size() > 0) {
Piece topPiece = bufferedPieces.peek();
// Not what we're looking for?
if(topPiece.pieceIndex != writePiece) break;
// Grab it!
Piece removedPiece = bufferedPieces.poll();
// Check it!
if(removedPiece != topPiece) throw new RuntimeException("Huh?");
if(topPiece.data != null) {
LOGGER.fine("Writing " + topPiece);
// Write it!
fos.getChannel().write(topPiece.data);
if(callbacks != null) {
callbacks.finishedWriting(topPiece.pieceIndex);
}
} else {
LOGGER.warning("Skipping " + topPiece);
if(callbacks != null) {
callbacks.skippedWriting(topPiece.pieceIndex);
}
}
writePiece++;
}
}
synchronized(waitObj) {
try {
waitObj.wait(5000);
} catch (InterruptedException ex) {
}
}
}
}
}<|fim▁end|> | String chunkPath = null; |
<|file_name|>list_retention.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2020 DDN. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
use super::*;
use crate::{extensions::RequestExt, font_awesome};
use iml_wire_types::snapshot::{ReserveUnit, SnapshotRetention};
#[derive(Clone, Debug)]
pub enum Msg {
Page(paging::Msg),
Delete(Arc<SnapshotRetention>),
DeleteRetentionResp(fetch::ResponseDataResult<Response<snapshot::remove_retention::Resp>>),
}
#[derive(Default, Debug)]
pub struct Model {
pager: paging::Model,
rows: Vec<Arc<SnapshotRetention>>,
take: take::Model,
}
pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {
match msg {
Msg::Page(msg) => {
paging::update(msg, &mut model.pager, &mut orders.proxy(Msg::Page));
}
Msg::Delete(x) => {
if let Ok(true) = window().confirm_with_message("Are you sure you want to delete this retention policy?") {
let query = snapshot::remove_retention::build(x.id);
let req = fetch::Request::graphql_query(&query);
orders.perform_cmd(req.fetch_json_data(|x| Msg::DeleteRetentionResp(x)));
}
}
Msg::DeleteRetentionResp(x) => match x {
Ok(Response::Data(_)) => {}
Ok(Response::Errors(e)) => {
error!("An error has occurred during Snapshot deletion: ", e);
}
Err(e) => {
error!("An error has occurred during Snapshot deletion: ", e);
}
},
};
}
impl RecordChange<Msg> for Model {
fn update_record(&mut self, _: ArcRecord, cache: &ArcCache, orders: &mut impl Orders<Msg, GMsg>) {
self.rows = cache.snapshot_retention.values().cloned().collect();
orders.proxy(Msg::Page).send_msg(paging::Msg::SetTotal(self.rows.len()));
}
fn remove_record(&mut self, _: RecordId, cache: &ArcCache, orders: &mut impl Orders<Msg, GMsg>) {
self.rows = cache.snapshot_retention.values().cloned().collect();
orders.proxy(Msg::Page).send_msg(paging::Msg::SetTotal(self.rows.len()));
}
fn set_records(&mut self, cache: &ArcCache, orders: &mut impl Orders<Msg, GMsg>) {
self.rows = cache.snapshot_retention.values().cloned().collect();
orders.proxy(Msg::Page).send_msg(paging::Msg::SetTotal(self.rows.len()));
}
}
pub fn view(model: &Model, cache: &ArcCache, session: Option<&Session>) -> Node<Msg> {
panel::view(
h3![class![C.py_4, C.font_normal, C.text_lg], "Snapshot Retention Policies"],
div![
table::wrapper_view(vec![
table::thead_view(vec![
table::th_view(plain!["Filesystem"]),
table::th_view(plain!["Reserve"]),
table::th_view(plain!["Keep"]),
table::th_view(plain!["Last Run"]),<|fim▁hole|> restrict::view(session, GroupType::FilesystemAdministrators, th![]),
]),
tbody![model.rows[model.pager.range()].iter().map(|x| {
tr![
td![
table::td_cls(),
class![C.text_center],
match get_fs_by_name(cache, &x.filesystem_name) {
Some(x) => {
div![resource_links::fs_link(&x)]
}
None => {
plain![x.filesystem_name.to_string()]
}
}
],
table::td_center(plain![format!(
"{} {}",
x.reserve_value,
match x.reserve_unit {
ReserveUnit::Percent => "%",
ReserveUnit::Gibibytes => "GiB",
ReserveUnit::Tebibytes => "TiB",
}
)]),
table::td_center(plain![x.keep_num.to_string()]),
table::td_center(plain![x
.last_run
.map(|x| x.format("%m/%d/%Y %H:%M:%S").to_string())
.unwrap_or_else(|| "---".to_string())]),
td![
class![C.flex, C.justify_center, C.p_4, C.px_3],
restrict::view(
session,
GroupType::FilesystemAdministrators,
button![
class![
C.bg_blue_500,
C.duration_300,
C.flex,
C.hover__bg_blue_400,
C.items_center,
C.px_6,
C.py_2,
C.rounded_sm,
C.text_white,
C.transition_colors,
],
font_awesome(class![C.w_3, C.h_3, C.inline, C.mr_1], "trash"),
"Delete Policy",
simple_ev(Ev::Click, Msg::Delete(Arc::clone(&x)))
]
)
]
]
})]
])
.merge_attrs(class![C.my_6]),
div![
class![C.flex, C.justify_end, C.py_1, C.pr_3],
paging::limit_selection_view(&model.pager).map_msg(Msg::Page),
paging::page_count_view(&model.pager),
paging::next_prev_view(&model.pager).map_msg(Msg::Page)
]
],
)
}<|fim▁end|> | |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2018 [email protected]
# Licensed under the MIT license (http://opensource.org/licenses/MIT)
from keyplus.utility import inverse_map
AES_KEY_LEN = 16
EP_VENDOR_SIZE = 64
VENDOR_REPORT_LEN = 64
FLASH_WRITE_PACKET_LEN = EP_VENDOR_SIZE - 5
SETTINGS_RF_INFO_SIZE = 64
SETTINGS_RF_INFO_HEADER_SIZE = (SETTINGS_RF_INFO_SIZE - AES_KEY_LEN*2)
SETTINGS_SIZE = 512
LAYOUT_HEADER_SIZE = 1
MAX_NUMBER_KEYBOARDS = 64
MAX_NUMBER_LAYOUTS = MAX_NUMBER_KEYBOARDS
MAX_NUMBER_DEVICES = 64
MAX_MATRIX_SIZE = 32
LAYOUT_ID_NONE = 0xfe
LAYOUT_ID_INVALID = 0xff
# Report
KEYBOARD_REPORT_MODE_AUTO = 0 # 6kro -> nkro if more than 6 keys pressed
KEYBOARD_REPORT_MODE_NKRO = 1 # nkro
KEYBOARD_REPORT_MODE_6KRO = 2 # 6kro
REPORT_MODE_STR_MAP = {
KEYBOARD_REPORT_MODE_AUTO: "Auto NKRO",
KEYBOARD_REPORT_MODE_6KRO: "6KRO",
KEYBOARD_REPORT_MODE_NKRO: "NKRO",<|fim▁hole|> if mode in REPORT_MODE_STR_MAP:
return REPORT_MODE_STR_MAP[mode]
else:
return "Unknown({})".format(mode)
# FEATURE_CTRL bit mask values
FEATURE_CTRL_USB_DISABLE = (1 << 0)
FEATURE_CTRL_WIRED_DISABLE = (1 << 1)
FEATURE_CTRL_RF_DISABLE = (1 << 2)
FEATURE_CTRL_RF_MOUSE_DISABLE = (1 << 3)
FEATURE_CTRL_BT_DISABLE = (1 << 4)
FEATURE_CTRL_RESERVED_0 = (1 << 5)
FEATURE_CTRL_RESERVED_1 = (1 << 6)
FEATURE_CTRL_RESERVED_2 = (1 << 7)
###############################################################################
# firmware info constants #
###############################################################################
SUPPORT_SCANNING_MASK = 0x01
SUPPORT_SCANNING_COL_ROW_MASK = 0x02
SUPPORT_SCANNING_ROW_COL_MASK = 0x04
SUPPORT_SCANNING_PINS_MASK = 0x08
SUPPORT_SCANNING_ARBITRARY_MASK = 0x10
SUPPORT_SCANNING_BUILT_IN_MASK = 0x20
SUPPORT_KEY_MEDIA = 0x01
SUPPORT_KEY_MOUSE = 0x02
SUPPORT_KEY_LAYERS = 0x04
SUPPORT_KEY_STICKY = 0x08
SUPPORT_KEY_TAP = 0x10
SUPPORT_KEY_HOLD = 0x20
SUPPORT_KRO_N = 0x01;
SUPPORT_KRO_6 = 0x02;
SUPPORT_LED_INDICATORS = 0x01
SUPPORT_LED_BACKLIGHTING = 0x02
SUPPORT_LED_WS2812 = 0x04
SUPPORT_NRF24 = 0x01
SUPPORT_I2C = 0x02
SUPPORT_UNIFYING = 0x04
SUPPORT_USB = 0x08
SUPPORT_BT = 0x10
VERSION_IS_STABLE = 0x01
VERSION_RESERVED_1 = 0x02
VERSION_RESERVED_2 = 0x04
VERSION_RESERVED_3 = 0x08
SUPPORT_MOUSE = 0x01
SUPPORT_MOUSE_GESTURE = 0x02
MATRIX_SCANNER_INTERNAL_NONE = 0x00
MATRIX_SCANNER_INTERNAL_FAST_ROW_COL = 0x01
MATRIX_SCANNER_INTERNAL_BASIC_SCAN = 0x02
MATRIX_SCANNER_INTERNAL_HARD_CODED = 0x03
MATRIX_SCANNER_INTERNAL_VIRTUAL = 0x04
MATRIX_SCANNER_INTERNAL_CUSTOM = 0xff
INTERNAL_SCAN_METHOD_NAME_TABLE = {
"none": MATRIX_SCANNER_INTERNAL_NONE,
"fast_row_col": MATRIX_SCANNER_INTERNAL_FAST_ROW_COL,
"basic_scan": MATRIX_SCANNER_INTERNAL_BASIC_SCAN,
"hard_coded": MATRIX_SCANNER_INTERNAL_HARD_CODED,
"virtual": MATRIX_SCANNER_INTERNAL_VIRTUAL,
"custom": MATRIX_SCANNER_INTERNAL_CUSTOM,
}
INTERNAL_SCAN_METHOD_TABLE = inverse_map(INTERNAL_SCAN_METHOD_NAME_TABLE)
VIRTUAL_MAP_TABLE_SIZE = 0x300<|fim▁end|> | }
def report_mode_to_str(mode): |
<|file_name|>ScaleClipTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.geom.AffineTransform;
import java.awt.geom.Path2D;
import java.awt.image.BufferedImage;
import java.awt.image.Raster;
import java.io.File;
import java.io.IOException;
import javax.imageio.ImageIO;
/**
* Scaled Line Clipping rendering test
*/
public class ScaleClipTest {
static final boolean SAVE_IMAGE = false;
static final int SIZE = 50;
enum SCALE_MODE {
ORTHO,
NON_ORTHO,
COMPLEX
};
public static void main(String[] args) {
// First display which renderer is tested:
// JDK9 only:
System.setProperty("sun.java2d.renderer.verbose", "true");
System.out.println("Testing renderer: ");
// Other JDK:
String renderer = "undefined";
try {
renderer = sun.java2d.pipe.RenderingEngine.getInstance().getClass().getName();
System.out.println(renderer);
} catch (Throwable th) {
// may fail with JDK9 jigsaw (jake)
if (false) {
System.err.println("Unable to get RenderingEngine.getInstance()");
th.printStackTrace();
}
}
System.out.println("ScaleClipTest: size = " + SIZE);
final BufferedImage image = new BufferedImage(SIZE, SIZE, BufferedImage.TYPE_INT_ARGB);
boolean fail = false;
// testNegativeScale:
for (SCALE_MODE mode : SCALE_MODE.values()) {
try {
testNegativeScale(image, mode);
} catch (IllegalStateException ise) {
System.err.println("testNegativeScale[" + mode + "] failed:");
ise.printStackTrace();
fail = true;
}
}
// testMarginScale:
for (SCALE_MODE mode : SCALE_MODE.values()) {
try {
testMarginScale(image, mode);
} catch (IllegalStateException ise) {
System.err.println("testMarginScale[" + mode + "] failed:");
ise.printStackTrace();
fail = true;
}
}
// Fail at the end:
if (fail) {
throw new RuntimeException("ScaleClipTest has failures.");
}
}
private static void testNegativeScale(final BufferedImage image, final SCALE_MODE mode) {
final Graphics2D g2d = (Graphics2D) image.getGraphics();
try {
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g2d.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
g2d.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_PURE);
g2d.setBackground(Color.WHITE);
g2d.clearRect(0, 0, SIZE, SIZE);
g2d.setColor(Color.BLACK);
// Bug in TransformingPathConsumer2D.adjustClipScale()
// non ortho scale only
final double scale = -1.0;
final AffineTransform at;
switch (mode) {
default:
case ORTHO:
at = AffineTransform.getScaleInstance(scale, scale);
break;
case NON_ORTHO:
at = AffineTransform.getScaleInstance(scale, scale + 1e-5);
break;
case COMPLEX:
at = AffineTransform.getScaleInstance(scale, scale);
at.concatenate(AffineTransform.getShearInstance(1e-4, 1e-4));
break;
}
g2d.setTransform(at);
// Set cap/join to reduce clip margin:
g2d.setStroke(new BasicStroke(2f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL));
final Path2D p = new Path2D.Double();
p.moveTo(scale * 10, scale * 10);
p.lineTo(scale * (SIZE - 10), scale * (SIZE - 10));
g2d.draw(p);
if (SAVE_IMAGE) {
try {
final File file = new File("ScaleClipTest-testNegativeScale-" + mode + ".png");
System.out.println("Writing file: " + file.getAbsolutePath());
ImageIO.write(image, "PNG", file);
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
// Check image:
// 25, 25 = black
checkPixel(image.getData(), 25, 25, Color.BLACK.getRGB());
} finally {
g2d.dispose();
}
}
private static void testMarginScale(final BufferedImage image, final SCALE_MODE mode) {
final Graphics2D g2d = (Graphics2D) image.getGraphics();
try {
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g2d.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
g2d.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_PURE);
g2d.setBackground(Color.WHITE);
g2d.clearRect(0, 0, SIZE, SIZE);
g2d.setColor(Color.BLACK);
// Bug in Stroker.init()
// ortho scale only: scale used twice !
final double scale = 1e-2;
final AffineTransform at;
switch (mode) {
default:
case ORTHO:
at = AffineTransform.getScaleInstance(scale, scale);
break;
case NON_ORTHO:
at = AffineTransform.getScaleInstance(scale, scale + 1e-5);
break;
case COMPLEX:
at = AffineTransform.getScaleInstance(scale, scale);
at.concatenate(AffineTransform.getShearInstance(1e-4, 1e-4));
break;
}
g2d.setTransform(at);
final double invScale = 1.0 / scale;
// Set cap/join to reduce clip margin:
final float w = (float) (3.0 * invScale);
g2d.setStroke(new BasicStroke(w, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL));
final Path2D p = new Path2D.Double();
p.moveTo(invScale * -0.5, invScale * 10);
p.lineTo(invScale * -0.5, invScale * (SIZE - 10));
g2d.draw(p);
if (SAVE_IMAGE) {
try {
final File file = new File("ScaleClipTest-testMarginScale-" + mode + ".png");
System.out.println("Writing file: " + file.getAbsolutePath());
ImageIO.write(image, "PNG", file);
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
// Check image:
// 0, 25 = black
checkPixel(image.getData(), 0, 25, Color.BLACK.getRGB());
} finally {
g2d.dispose();
}
}
private static void checkPixel(final Raster raster,
final int x, final int y,
final int expected) {
<|fim▁hole|> if (rgb[0] != expected) {
throw new IllegalStateException("bad pixel at (" + x + ", " + y
+ ") = " + rgb[0] + " expected: " + expected);
}
}
}<|fim▁end|> | final int[] rgb = (int[]) raster.getDataElements(x, y, null);
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.http import HttpResponse, Http404
from django.template.loader import get_template
from django.template import RequestContext
from django.core.paginator import Paginator, EmptyPage
from django.utils.translation import ugettext as _
from tagging.models import Tag
from messages.models import Message
from settings import LANGUAGE_CODE as lang
from qqq.models import Contribution
from qqq.questions.models import Question<|fim▁hole|>from qqq.collections.models import Collection
from qqq.posts.models import Post
import logging
# the number of results to paginate by
RESULTS_PER_PAGE = 25
def home(request):
"""
Serves the home page, which depends on whether the user is logged in or not.
"""
if request.user.is_authenticated():
return participate(request)
else:
c = RequestContext(request)
if lang == "nl":
c['frontpage'] = 'frontpage_nl.html'
else:
c['frontpage'] = 'frontpage_en.html'
t = get_template('home_public.html')
c['tags_list'] = Tag.objects.cloud_for_model(Question, steps=9, min_count=None)
return HttpResponse(t.render(c))
###################################################################################
#################################### MEMBERS ONLY #################################
###################################################################################
def participate(request):
"""
Serves the home page for logged-in users
"""
t = get_template('home_members.html')
c = RequestContext(request)
filter = request.GET.get(_('filter'), False)
# behold some serious django-fu!
if filter == _('questions'):
c['filter'] = 'questions'
questions = Question.objects.all()
objects = Contribution.objects.filter(question__in=questions).select_related('user', 'question', 'revision', 'collection', 'post', 'tagaction')
elif filter == _('improvements'):
c['filter'] = 'improvements'
revisions = Revision.objects.all()
objects = Contribution.objects.filter(revision__in=revisions).select_related('user', 'question', 'revision', 'collection', 'post', 'tagaction')
elif filter == _('collections'):
c['filter'] = 'collections'
collections = Collection.objects.all()
objects = Contribution.objects.filter(collection__in=collections).select_related('user', 'question', 'revision', 'collection', 'post', 'tagaction')
elif filter == _('posts'):
c['filter'] = 'posts'
posts = Post.objects.all()
objects = Contribution.objects.filter(post__in=posts).select_related('user', 'question', 'revision', 'collection', 'post', 'tagaction')
else:
objects = Contribution.objects.all().select_related('user', 'question', 'revision', 'collection', 'post', 'tagaction')
p = Paginator(objects, RESULTS_PER_PAGE)
c['type'] = {'all': True}
c['paginator'] = p
try:
c['feed'] = p.page(request.GET.get(_('page'), '1'))
except EmptyPage:
raise Http404
c['message_list'] = Message.objects.inbox_for(request.user)
return HttpResponse(t.render(c))<|fim▁end|> | from qqq.revisions.models import Revision |
<|file_name|>fetch_sra_files.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#(c) 2014 Emory University. All Rights Reserved
# Code written by: Michael Sauria ([email protected])
<|fim▁hole|>import sys
import os
import subprocess
file_list, tmp_dir, out_dir, fastq_dump = sys.argv[1:5]
files = []
for line in open(file_list, 'r'):
line = line.strip()
if not line or line.startswith('#'):
continue
fields = line.split()
srx = fields[1]
for srr in fields[2].split(','):
files.append([srr, srx])
for file in files:
srr, srx = file
if (not os.path.exists("%s/%s_1.fastq" % (out_dir, srr)) or
not os.path.exists("%s/%s_2.fastq" % (out_dir, srr))):
if not os.path.exists("%s/%s.sra" % (tmp_dir, srr)):
subprocess.call('wget ftp://ftp-trace.ncbi.nlm.nih.gov/sra/sra-instant/reads/ByExp/sra/%s/%s/%s/%s/%s.sra -O %s' % (srx[:3], srx[:6], srx, srr, srr, "%s/%s.sra" % (tmp_dir, srr)), shell=True)
for file in files:
srr, srx = file
if (not os.path.exists("%s/%s_1.fastq" % (out_dir, srr)) or
not os.path.exists("%s/%s_2.fastq" % (out_dir, srr))):
subprocess.call('cd %s; %s %s.sra --split-3' % (tmp_dir, fastq_dump, srr), shell=True)
subprocess.call('mv %s/%s_1.fastq %s/' % (tmp_dir, srr, out_dir), shell=True)
subprocess.call('mv %s/%s_2.fastq %s/' % (tmp_dir, srr, out_dir), shell=True)
subprocess.call('rm %s/%s.sra' % (tmp_dir, srr), shell=True)<|fim▁end|> | |
<|file_name|>generic_derive.rs<|end_file_name|><|fim▁begin|>pub trait CustomType: prost::Message + Default {}
impl CustomType for u64 {}
#[derive(Clone, prost::Oneof)]
enum GenericEnum<A: CustomType> {
#[prost(message, tag = "1")]
Data(GenericMessage<A>),
#[prost(uint64, tag = "2")]
#[allow(dead_code)]<|fim▁hole|>struct GenericMessage<A: CustomType> {
#[prost(message, tag = "1")]
data: Option<A>,
}
#[test]
fn generic_enum() {
let msg = GenericMessage { data: Some(100u64) };
let enumeration = GenericEnum::Data(msg);
match enumeration {
GenericEnum::Data(d) => assert_eq!(100, d.data.unwrap()),
GenericEnum::Number(_) => panic!("Not supposed to reach"),
}
}<|fim▁end|> | Number(u64),
}
#[derive(Clone, prost::Message)] |
<|file_name|>manage_team.js<|end_file_name|><|fim▁begin|>angular.module('teamform-manage_team-app', ['firebase'])
.controller('ManageTeamCtrl', ['$scope', '$firebaseObject', '$firebaseArray', function($scope, $firebaseObject, $firebaseArray) {
initalizeFirebase();
var teamleader;
firebase.auth().onAuthStateChanged(function(user) {
if (user) {
var userPath = "/user/" + user.uid;
var userref = firebase.database().ref(userPath);
userref.on("value", function(snapshot) {
console.log(snapshot.val());
teamleader = snapshot.val().name;
console.log(teamleader);
}, function (errorObject) {
console.log("The read failed: " + errorObject.code);
});
}
else {}
});
$scope.teaminfo = {TeamLeader:"", Description:"", Forward:"", Midfield:"", LeftBack:"", RightBack:"", Goalkeeper:""};
$scope.input = {teamLeader: teamleader, forward:"", midfield:"", leftBack:"", rightBack:"", goalkeeper:""};
$scope.teamtaginfo = {Pass_and_move:"", Give_and_go:"", The_long_through_ball:"", Triangular_movement:"", Swapping_of_the_wing_man:"", Strong_side_overloads:"", The_zone_defence:"", Depth_considerations:"", The_man_to_man_defence:""};
$scope.teamtaginput = {pass_and_move:0, give_and_go:0, the_long_through_ball:0, triangular_movement:0, swapping_of_the_wing_man:0, strong_side_overloads:0, the_zone_defence:0, depth_considerations:0, the_man_to_man_defence:0};
var eventName, teamName;
eventName = getURLParameter("q");
teamName = getURLParameter("tn");
var eventPath ="/event/" + eventName +"/param";
var eventref = firebase.database().ref(eventPath);
var current_team;
eventref.once("value",function(snapshot)
{
console.log(snapshot.val());<|fim▁hole|> current_team = snapshot.val().No_of_Team;
current_team = current_team +1;
console.log(current_team);
eventref.update(
{
'No_of_Team' : current_team
}
);
}, function (errorObject) {
console.log("The read failed: " + errorObject.code);
});
var ref, refPath;
$scope.EventName = eventName;
$scope.TeamName = teamName;
var teamtaginit = "NULL";
$scope.TeamTag = teamtaginit;
//Get The team info
refPath = "/event/" + eventName + "/team/" + teamName;
ref = firebase.database().ref(refPath);
$scope.teaminfo = $firebaseObject(ref);
ref.set({
TeamName: teamName,
TeamTag: "NULL",
TeamLeader: "",
Description:"",
Forward:"",
Midfield:"",
LeftBack:"",
RightBack:"",
GoalKeeper:"",
NumMembers: 0
});
$scope.teaminfo.$loaded()
.then( function(data) {
// Fill in some initial values when the DB entry doesn't exist
// Enable the UI when the data is successfully loaded and synchornized
//$('#manage_team_page_controller').show();
$scope.teaminfo.TeamLeader = $scope.input.teamLeader;
$scope.teaminfo.Description = $scope.input.description;
$scope.teaminfo.Forward = $scope.input.forward;
$scope.teaminfo.Midfield = $scope.input.midfield;
$scope.teaminfo.LeftBack = $scope.input.leftBack;
$scope.teaminfo.RightBack = $scope.input.rightBack;
$scope.teaminfo.Goalkeeper = $scope.input.goalkeeper;
})
.catch(function(error) {
// Database connection error handling...
//console.error("Error:", error);
});
//Get the team tag info
var tagRef, tagRefPath;
tagRefPath = "/event/" + eventName + "/team/" + teamName + "/tag";
tagRef = firebase.database().ref(tagRefPath);
$scope.teamtaginfo = $firebaseObject(tagRef);
tagRef.set({
Pass_and_move:"",
Give_and_go:"",
The_long_through_ball:"",
Triangular_movement:"",
Swapping_of_the_wing_man:"",
Strong_side_overloads:"",
The_zone_defence:"",
Depth_considerations:"",
The_man_to_man_defence:""
})
$scope.teamtaginfo.$loaded()
.then( function(data) {
/*
$scope.teamtaginfo.Pass_and_move = $scope.teamtaginput.pass_and_move;
$scope.teamtaginfo.Give_and_go = $scope.teamtaginput.give_and_go;
$scope.teamtaginfo.The_long_through_ball = $scope.teamtaginput.the_long_through_ball;
$scope.teamtaginfo.Triangular_movement = $scope.teamtaginput.triangular_movement;
$scope.teamtaginfo.Swapping_of_the_wing_man = $scope.teamtaginput.swapping_of_the_wing_man;
$scope.teamtaginfo.Strong_side_overloads = $scope.teamtaginput.strong_side_overloads;
$scope.teamtaginfo.The_zone_defence = $scope.teamtaginput.the_zone_defence;
$scope.teamtaginfo.Depth_considerations = $scope.teamtaginput.depth_considerations;
$scope.teamtaginfo.The_man_to_man_defence = $scope.teamtaginput.the_man_to_man_defence;
*/
$scope.teamtaginfo.Pass_and_move = 0;
$scope.teamtaginfo.Give_and_go = 0;
$scope.teamtaginfo.The_long_through_ball = 0;
$scope.teamtaginfo.Triangular_movement = 0;
$scope.teamtaginfo.Swapping_of_the_wing_man = 0;
$scope.teamtaginfo.Strong_side_overloads = 0;
$scope.teamtaginfo.The_zone_defence = 0;
$scope.teamtaginfo.Depth_considerations = 0;
$scope.teamtaginfo.The_man_to_man_defence = 0;
})
.catch(function(error) {
});
$scope.saveFunc = function() {
$scope.teaminfo.$save();
$scope.teamtaginfo.$save();
// Finally, go back to the front-end
window.location.href= "team.html?q=" + eventName +"&tn=" + teamName;
}
$scope.processRequest = function(r) {
//$scope.test = "processRequest: " + r;
if (
$scope.param.teamMembers.indexOf(r) < 0 &&
$scope.param.teamMembers.length < $scope.param.currentTeamSize ) {
// Not exists, and the current number of team member is less than the preferred team size
$scope.param.teamMembers.push(r);
$scope.saveFunc();
$scope.saveFuncTeamTag();
}
}
$scope.removeMember = function(member) {
var index = $scope.param.teamMembers.indexOf(member);
if ( index > -1 ) {
$scope.param.teamMembers.splice(index, 1); // remove that item
$scope.saveFunc();
$scope.saveFuncTeamTag();
}
}
}
]);<|fim▁end|> | |
<|file_name|>test_run.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of hopr: https://github.com/hopr/hopr.
#
# Hopr is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Hopr is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Hopr. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import unittest as ut
import tempfile
from mock import MagicMock, sentinel, call
from hopr.tool.run import * # Run, parse_args, run
# TODO: Suppress log output during tests.
class Test1Misc(ut.TestCase):
def test_timeout(self):
dt = 0.01
e = 0.001
timeout = Timeout(dt)
t1 = time()
while(True):
a = timeout()
t2 = time()
if t2 - t1 < dt-e:
self.assertEqual(a, False)
else:
break
while(t2 - t1 <= dt + e):
t2 = time()
self.assertEqual(timeout(), True)
class TestParseArgs(ut.TestCase):
def setUp(self):
self.args = {'no_grab': False,
'timeout': 5,
'log_level': 'info',
'print_keymap': False,
'log_file': '',
'config_dir': '',
}
def test2_parse_args(self):
x = parse_args('--no-grab -t 10 --log-level warning'.split())
self.args.update({'no_grab': True,
'timeout': 10,
'log_level': 'warning',
})
self.assertEqual(self.args, vars(x))
def test2_no_timeout(self):
x = parse_args('-x'.split())
self.args.update({'timeout': 0})
self.assertEqual(self.args, vars(x))
def test1_parse_args_defaults(self):
x = parse_args(''.split())
self.assertEqual({'no_grab': False,
'timeout': 5,
'log_level': 'info',
'log_file': '',
'config_dir': '',
'print_keymap': False,
}, vars(x))
def test1_parse_args_defaults(self):
x = parse_args('--log-file log.txt'.split())
self.assertEqual({'no_grab': False,
'timeout': 5,
'log_level': 'info',
'log_file': 'log.txt',
'config_dir': '',
'print_keymap': False,
}, vars(x))
class TestRun(ut.TestCase):
def setUp(self):
params = dict(event_parser=MagicMock(name='parser'),
event_wrapper=MagicMock(name='event_wrapper'),
find_keyboards=MagicMock(name='find_keyboards'),
read_events=MagicMock(name='read_events'),
grab_keyboards=MagicMock(name='grab_keyboards'))
for k,v in list(params.items()):
setattr(self, k, v)
self.run = partial(run, **params)
def test1_no_events(self):
self.run(timeout=5,
no_grab=True)
def test2_keyboards_are_optionally_grabbed(self):
kbds = [sentinel.kbd1, sentinel.kbd2]
self.find_keyboards.return_value = kbds
self.run(no_grab=True)
self.grab_keyboards.assert_not_called()
self.run(no_grab=False)
self.grab_keyboards.assert_called_once_with(kbds)
def test2_keyboards_events_are_read(self):
kbds = [sentinel.kbd1, sentinel.kbd2]
self.find_keyboards.return_value = kbds
self.run()
self.read_events.assert_called_once_with(kbds)
def test2_events_are_wrapped_before_parsing(self):
events = [sentinel.event]
self.read_events.return_value = events
self.event_wrapper.return_value = sentinel.wrapped_event
self.run()
self.event_wrapper.assert_called_once_with(sentinel.event)
self.event_parser.assert_called_once_with(sentinel.wrapped_event)
def test2_events_are_sent_to_parser(self):
events = [sentinel.event1, sentinel.event2]
self.read_events.return_value = events
self.event_wrapper.side_effect = lambda x : x
self.run()
self.event_parser.assert_has_calls([call(e) for e in events])
def test3_timeout(self):
self.run(timeout=-1)
class TestRunFunction(ut.TestCase):
def test(self):
backend = MagicMock(name='backend')
make_eventparser = MagicMock(name='make_eventparser')
args = '--log-level=error'.split()
run_parse_args(backend=backend,
make_eventparser=make_eventparser,
args=args)
def test_log_file(self):
f = tempfile.NamedTemporaryFile('r')
backend = MagicMock(name='backend')<|fim▁hole|> run_parse_args(backend=backend,
make_eventparser=make_eventparser,
args=args)
logging.getLogger().debug('Test Message')
text = f.read()
self.assertTrue(text.strip().endswith('Test Message'))
if __name__ == "__main__":
# import logging
# logging.getLogger().setLevel('ERROR')
ut.main(failfast=True, exit=False)<|fim▁end|> | make_eventparser = MagicMock(name='make_eventparser')
args = ['--log-level', 'debug', '--log-file', f.name] |
<|file_name|>ShadowMapMasterRenderer.java<|end_file_name|><|fim▁begin|>package shadows;
import java.util.List;
import java.util.Map;
import org.lwjgl.opengl.GL11;
import org.lwjgl.util.vector.Matrix4f;
import org.lwjgl.util.vector.Vector2f;
import org.lwjgl.util.vector.Vector3f;
import entities.Camera;
import entities.Entity;
import entities.Light;
import entities.Player;
import models.TexturedModel;
public class ShadowMapMasterRenderer {
private static final int SHADOW_MAP_SIZE = 5200;
private ShadowFrameBuffer shadowFbo;
private ShadowShader shader;
private ShadowBox shadowBox;
private Matrix4f projectionMatrix = new Matrix4f();
private Matrix4f lightViewMatrix = new Matrix4f();
private Matrix4f projectionViewMatrix = new Matrix4f();
private Matrix4f offset = createOffset();
private ShadowMapEntityRenderer entityRenderer;
/**
* Creates instances of the important objects needed for rendering the scene
* to the shadow map. This includes the {@link ShadowBox} which calculates
* the position and size of the "view cuboid", the simple renderer and
* shader program that are used to render objects to the shadow map, and the
* {@link ShadowFrameBuffer} to which the scene is rendered. The size of the
* shadow map is determined here.
*
* @param camera
* - the camera being used in the scene.
*/
public ShadowMapMasterRenderer(Camera camera) {
shader = new ShadowShader();
shadowBox = new ShadowBox(lightViewMatrix, camera);
shadowFbo = new ShadowFrameBuffer(SHADOW_MAP_SIZE, SHADOW_MAP_SIZE);
entityRenderer = new ShadowMapEntityRenderer(shader, projectionViewMatrix);
}
/**
<|fim▁hole|> * position of the "view cuboid". The light direction is assumed to be
* "-lightPosition" which will be fairly accurate assuming that the light is
* very far from the scene. It then prepares to render, renders the entities
* to the shadow map, and finishes rendering.
*
* @param entities
* - the lists of entities to be rendered. Each list is
* associated with the {@link TexturedModel} that all of the
* entities in that list use.
* @param sun
* - the light acting as the sun in the scene.
*/
public void render(Map<TexturedModel, List<Entity>> entities, Light sun) {
shadowBox.update();
Vector3f sunPosition = sun.getPosition();
Vector3f lightDirection = new Vector3f(-sunPosition.x, -sunPosition.y, -sunPosition.z);
prepare(lightDirection, shadowBox);
entityRenderer.render(entities);
finish();
}
/**
* This biased projection-view matrix is used to convert fragments into
* "shadow map space" when rendering the main render pass. It converts a
* world space position into a 2D coordinate on the shadow map. This is
* needed for the second part of shadow mapping.
*
* @return The to-shadow-map-space matrix.
*/
public Matrix4f getToShadowMapSpaceMatrix() {
return Matrix4f.mul(offset, projectionViewMatrix, null);
}
/**
* Clean up the shader and FBO on closing.
*/
public void cleanUp() {
shader.cleanUp();
shadowFbo.cleanUp();
}
/**
* @return The ID of the shadow map texture. The ID will always stay the
* same, even when the contents of the shadow map texture change
* each frame.
*/
public int getShadowMap() {
return shadowFbo.getShadowMap();
}
/**
* @return The light's "view" matrix.
*/
protected Matrix4f getLightSpaceTransform() {
return lightViewMatrix;
}
/**
* Prepare for the shadow render pass. This first updates the dimensions of
* the orthographic "view cuboid" based on the information that was
* calculated in the {@link SHadowBox} class. The light's "view" matrix is
* also calculated based on the light's direction and the center position of
* the "view cuboid" which was also calculated in the {@link ShadowBox}
* class. These two matrices are multiplied together to create the
* projection-view matrix. This matrix determines the size, position, and
* orientation of the "view cuboid" in the world. This method also binds the
* shadows FBO so that everything rendered after this gets rendered to the
* FBO. It also enables depth testing, and clears any data that is in the
* FBOs depth attachment from last frame. The simple shader program is also
* started.
*
* @param lightDirection
* - the direction of the light rays coming from the sun.
* @param box
* - the shadow box, which contains all the info about the
* "view cuboid".
*/
private void prepare(Vector3f lightDirection, ShadowBox box) {
updateOrthoProjectionMatrix(box.getWidth(), box.getHeight(), box.getLength());
updateLightViewMatrix(lightDirection, box.getCenter());
Matrix4f.mul(projectionMatrix, lightViewMatrix, projectionViewMatrix);
shadowFbo.bindFrameBuffer();
GL11.glEnable(GL11.GL_DEPTH_TEST);
GL11.glClear(GL11.GL_DEPTH_BUFFER_BIT);
shader.start();
}
/**
* Finish the shadow render pass. Stops the shader and unbinds the shadow
* FBO, so everything rendered after this point is rendered to the screen,
* rather than to the shadow FBO.
*/
private void finish() {
shader.stop();
shadowFbo.unbindFrameBuffer();
}
/**
* Updates the "view" matrix of the light. This creates a view matrix which
* will line up the direction of the "view cuboid" with the direction of the
* light. The light itself has no position, so the "view" matrix is centered
* at the center of the "view cuboid". The created view matrix determines
* where and how the "view cuboid" is positioned in the world. The size of
* the view cuboid, however, is determined by the projection matrix.
*
* @param direction
* - the light direction, and therefore the direction that the
* "view cuboid" should be pointing.
* @param center
* - the center of the "view cuboid" in world space.
*/
private void updateLightViewMatrix(Vector3f direction, Vector3f center) {
direction.normalise();
center.negate();
lightViewMatrix.setIdentity();
float pitch = (float) Math.acos(new Vector2f(direction.x, direction.z).length());
Matrix4f.rotate(pitch, new Vector3f(1, 0, 0), lightViewMatrix, lightViewMatrix);
float yaw = (float) Math.toDegrees(((float) Math.atan(direction.x / direction.z)));
yaw = direction.z > 0 ? yaw - 180 : yaw;
Matrix4f.rotate((float) -Math.toRadians(yaw), new Vector3f(0, 1, 0), lightViewMatrix,
lightViewMatrix);
Matrix4f.translate(center, lightViewMatrix, lightViewMatrix);
}
/**
* Creates the orthographic projection matrix. This projection matrix
* basically sets the width, length and height of the "view cuboid", based
* on the values that were calculated in the {@link ShadowBox} class.
*
* @param width
* - shadow box width.
* @param height
* - shadow box height.
* @param length
* - shadow box length.
*/
private void updateOrthoProjectionMatrix(float width, float height, float length) {
projectionMatrix.setIdentity();
projectionMatrix.m00 = 2f / width;
projectionMatrix.m11 = 2f / height;
projectionMatrix.m22 = -2f / length;
projectionMatrix.m33 = 1;
}
/**
* Create the offset for part of the conversion to shadow map space. This
* conversion is necessary to convert from one coordinate system to the
* coordinate system that we can use to sample to shadow map.
*
* @return The offset as a matrix (so that it's easy to apply to other matrices).
*/
private static Matrix4f createOffset() {
Matrix4f offset = new Matrix4f();
offset.translate(new Vector3f(0.5f, 0.5f, 0.5f));
offset.scale(new Vector3f(0.5f, 0.5f, 0.5f));
return offset;
}
}<|fim▁end|> | * Carries out the shadow render pass. This renders the entities to the
* shadow map. First the shadow box is updated to calculate the size and
|
<|file_name|>lv.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2011, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
/**
* @fileOverview Defines the {@link CKEDITOR.lang} object, for the
* Latvian language.
*/
/**#@+
@type String
@example
*/
/**
* Constains the dictionary of language entries.
* @namespace
*/
CKEDITOR.lang['lv'] =
{
/**
* The language reading direction. Possible values are "rtl" for
* Right-To-Left languages (like Arabic) and "ltr" for Left-To-Right
* languages (like English).
* @default 'ltr'
*/
dir : 'ltr',
/*
* Screenreader titles. Please note that screenreaders are not always capable
* of reading non-English words. So be careful while translating it.
*/
editorTitle : 'Rich text editor, %1, press ALT 0 for help.', // MISSING
// ARIA descriptions.
toolbars : 'Editor toolbars', // MISSING
editor : 'Rich Text Editor', // MISSING
// Toolbar buttons without dialogs.
source : 'HTML kods',
newPage : 'Jauna lapa',
save : 'Saglabāt',
preview : 'Pārskatīt',
cut : 'Izgriezt',
copy : 'Kopēt',
paste : 'Ievietot',
print : 'Drukāt',
underline : 'Apakšsvītra',
bold : 'Treknu šriftu',
italic : 'Slīprakstā',
selectAll : 'Iezīmēt visu',
removeFormat : 'Noņemt stilus',
strike : 'Pārsvītrots',
subscript : 'Zemrakstā',
superscript : 'Augšrakstā',
horizontalrule : 'Ievietot horizontālu Atdalītājsvītru',
pagebreak : 'Ievietot lapas pārtraukumu',
pagebreakAlt : 'Page Break', // MISSING
unlink : 'Noņemt hipersaiti',
undo : 'Atcelt',
redo : 'Atkārtot',
// Common messages and labels.
common :
{
browseServer : 'Skatīt servera saturu',
url : 'URL',
protocol : 'Protokols',
upload : 'Augšupielādēt',
uploadSubmit : 'Nosūtīt serverim',
image : 'Attēls',
flash : 'Flash',
form : 'Forma',
checkbox : 'Atzīmēšanas kastīte',
radio : 'Izvēles poga',
textField : 'Teksta rinda',
textarea : 'Teksta laukums',
hiddenField : 'Paslēpta teksta rinda',
button : 'Poga',
select : 'Iezīmēšanas lauks',
imageButton : 'Attēlpoga',
notSet : '<nav iestatīts>',
id : 'Id',
name : 'Nosaukums',
langDir : 'Valodas lasīšanas virziens',
langDirLtr : 'No kreisās uz labo (LTR)',
langDirRtl : 'No labās uz kreiso (RTL)',
langCode : 'Valodas kods',
longDescr : 'Gara apraksta Hipersaite',
cssClass : 'Stilu saraksta klases',
advisoryTitle : 'Konsultatīvs virsraksts',
cssStyle : 'Stils',
ok : 'Darīts!',
cancel : 'Atcelt',
close : 'Close', // MISSING
preview : 'Preview', // MISSING
generalTab : 'General', // MISSING
advancedTab : 'Izvērstais',
validateNumberFailed : 'This value is not a number.', // MISSING
confirmNewPage : 'Any unsaved changes to this content will be lost. Are you sure you want to load new page?', // MISSING
confirmCancel : 'Some of the options have been changed. Are you sure to close the dialog?', // MISSING
options : 'Options', // MISSING
target : 'Target', // MISSING
targetNew : 'New Window (_blank)', // MISSING
targetTop : 'Topmost Window (_top)', // MISSING
targetSelf : 'Same Window (_self)', // MISSING
targetParent : 'Parent Window (_parent)', // MISSING
langDirLTR : 'Left to Right (LTR)', // MISSING
langDirRTL : 'Right to Left (RTL)', // MISSING
styles : 'Style', // MISSING
cssClasses : 'Stylesheet Classes', // MISSING
width : 'Platums',
height : 'Augstums',
align : 'Nolīdzināt',
alignLeft : 'Pa kreisi',
alignRight : 'Pa labi',
alignCenter : 'Centrēti',
alignTop : 'Augšā',
alignMiddle : 'Vertikāli centrēts',
alignBottom : 'Apakšā',
invalidHeight : 'Height must be a number.', // MISSING
invalidWidth : 'Width must be a number.', // MISSING
invalidCssLength : 'Value specified for the "%1" field must be a positive number with or without a valid CSS measurement unit (px, %, in, cm, mm, em, ex, pt, or pc).', // MISSING
invalidHtmlLength : 'Value specified for the "%1" field must be a positive number with or without a valid HTML measurement unit (px or %).', // MISSING
// Put the voice-only part of the label in the span.
unavailable : '%1<span class="cke_accessibility">, unavailable</span>' // MISSING
},
contextmenu :
{
options : 'Context Menu Options' // MISSING
},
// Special char dialog.
specialChar :
{
toolbar : 'Ievietot speciālo simbolu',
title : 'Ievietot īpašu simbolu',
options : 'Special Character Options' // MISSING
},
// Link dialog.
link :
{
toolbar : 'Ievietot/Labot hipersaiti',
other : '<cits>',
menu : 'Labot hipersaiti',
title : 'Hipersaite',
info : 'Hipersaites informācija',
target : 'Mērķis',
upload : 'Augšupielādēt',
advanced : 'Izvērstais',
type : 'Hipersaites tips',
toUrl : 'URL', // MISSING
toAnchor : 'Iezīme šajā lapā',
toEmail : 'E-pasts',
targetFrame : '<ietvars>',
targetPopup : '<uznirstošā logā>',
targetFrameName : 'Mērķa ietvara nosaukums',
targetPopupName : 'Uznirstošā loga nosaukums',
popupFeatures : 'Uznirstošā loga nosaukums īpašības',
popupResizable : 'Resizable', // MISSING
popupStatusBar : 'Statusa josla',
popupLocationBar: 'Atrašanās vietas josla',
popupToolbar : 'Rīku josla',
popupMenuBar : 'Izvēlnes josla',
popupFullScreen : 'Pilnā ekrānā (IE)',
popupScrollBars : 'Ritjoslas',
popupDependent : 'Atkarīgs (Netscape)',
popupLeft : 'Kreisā koordināte',
popupTop : 'Augšējā koordināte',
id : 'Id', // MISSING
langDir : 'Valodas lasīšanas virziens',
langDirLTR : 'No kreisās uz labo (LTR)',
langDirRTL : 'No labās uz kreiso (RTL)',
acccessKey : 'Pieejas kods',
name : 'Nosaukums',
langCode : 'Valodas lasīšanas virziens',
tabIndex : 'Ciļņu indekss',
advisoryTitle : 'Konsultatīvs virsraksts',
advisoryContentType : 'Konsultatīvs satura tips',
cssClasses : 'Stilu saraksta klases',
charset : 'Pievienotā resursa kodu tabula',
styles : 'Stils',
rel : 'Relationship', // MISSING
selectAnchor : 'Izvēlēties iezīmi',
anchorName : 'Pēc iezīmes nosaukuma',
anchorId : 'Pēc elementa ID',
emailAddress : 'E-pasta adrese',
emailSubject : 'Ziņas tēma',
emailBody : 'Ziņas saturs',
noAnchors : '(Šajā dokumentā nav iezīmju)',
noUrl : 'Lūdzu norādi hipersaiti',
noEmail : 'Lūdzu norādi e-pasta adresi'
},
// Anchor dialog
anchor :
{
toolbar : 'Ievietot/Labot iezīmi',
menu : 'Iezīmes īpašības',
title : 'Iezīmes īpašības',
name : 'Iezīmes nosaukums',
errorName : 'Lūdzu norādiet iezīmes nosaukumu',
remove : 'Remove Anchor' // MISSING
},
// List style dialog
list:
{
numberedTitle : 'Numbered List Properties', // MISSING
bulletedTitle : 'Bulleted List Properties', // MISSING
type : 'Type', // MISSING
start : 'Start', // MISSING
validateStartNumber :'List start number must be a whole number.', // MISSING
circle : 'Circle', // MISSING
disc : 'Disc', // MISSING
square : 'Square', // MISSING
none : 'None', // MISSING
notset : '<not set>', // MISSING
armenian : 'Armenian numbering', // MISSING
georgian : 'Georgian numbering (an, ban, gan, etc.)', // MISSING
lowerRoman : 'Lower Roman (i, ii, iii, iv, v, etc.)', // MISSING
upperRoman : 'Upper Roman (I, II, III, IV, V, etc.)', // MISSING
lowerAlpha : 'Lower Alpha (a, b, c, d, e, etc.)', // MISSING
upperAlpha : 'Upper Alpha (A, B, C, D, E, etc.)', // MISSING
lowerGreek : 'Lower Greek (alpha, beta, gamma, etc.)', // MISSING
decimal : 'Decimal (1, 2, 3, etc.)', // MISSING
decimalLeadingZero : 'Decimal leading zero (01, 02, 03, etc.)' // MISSING
},
// Find And Replace Dialog
findAndReplace :
{
title : 'Find and Replace', // MISSING
find : 'Meklēt',
replace : 'Nomainīt',
findWhat : 'Meklēt:',
replaceWith : 'Nomainīt uz:',
notFoundMsg : 'Norādītā frāze netika atrasta.',
matchCase : 'Reģistrjūtīgs',
matchWord : 'Jāsakrīt pilnībā',
matchCyclic : 'Match cyclic', // MISSING
replaceAll : 'Aizvietot visu',
replaceSuccessMsg : '%1 occurrence(s) replaced.' // MISSING
},
// Table Dialog
table :
{
toolbar : 'Tabula',
title : 'Tabulas īpašības',
menu : 'Tabulas īpašības',
deleteTable : 'Dzēst tabulu',
rows : 'Rindas',
columns : 'Kolonnas',
border : 'Rāmja izmērs',
widthPx : 'pikseļos',
widthPc : 'procentuāli',
widthUnit : 'width unit', // MISSING
cellSpace : 'Rūtiņu atstatums',
cellPad : 'Rūtiņu nobīde',
caption : 'Leģenda',
summary : 'Anotācija',
headers : 'Headers', // MISSING
headersNone : 'None', // MISSING
headersColumn : 'First column', // MISSING
headersRow : 'First Row', // MISSING
headersBoth : 'Both', // MISSING
invalidRows : 'Number of rows must be a number greater than 0.', // MISSING
invalidCols : 'Number of columns must be a number greater than 0.', // MISSING
invalidBorder : 'Border size must be a number.', // MISSING
invalidWidth : 'Table width must be a number.', // MISSING
invalidHeight : 'Table height must be a number.', // MISSING
invalidCellSpacing : 'Cell spacing must be a positive number.', // MISSING
invalidCellPadding : 'Cell padding must be a positive number.', // MISSING
cell :
{
menu : 'Šūna',
insertBefore : 'Insert Cell Before', // MISSING
insertAfter : 'Insert Cell After', // MISSING
deleteCell : 'Dzēst rūtiņas',
merge : 'Apvienot rūtiņas',
mergeRight : 'Merge Right', // MISSING
mergeDown : 'Merge Down', // MISSING
splitHorizontal : 'Split Cell Horizontally', // MISSING
splitVertical : 'Split Cell Vertically', // MISSING
title : 'Cell Properties', // MISSING
cellType : 'Cell Type', // MISSING
rowSpan : 'Rows Span', // MISSING
colSpan : 'Columns Span', // MISSING
wordWrap : 'Word Wrap', // MISSING
hAlign : 'Horizontal Alignment', // MISSING
vAlign : 'Vertical Alignment', // MISSING
alignBaseline : 'Baseline', // MISSING
bgColor : 'Background Color', // MISSING
borderColor : 'Border Color', // MISSING
data : 'Data', // MISSING
header : 'Header', // MISSING
yes : 'Yes', // MISSING
no : 'No', // MISSING
invalidWidth : 'Cell width must be a number.', // MISSING
invalidHeight : 'Cell height must be a number.', // MISSING
invalidRowSpan : 'Rows span must be a whole number.', // MISSING
invalidColSpan : 'Columns span must be a whole number.', // MISSING
chooseColor : 'Choose' // MISSING
},
row :
{
menu : 'Rinda',
insertBefore : 'Insert Row Before', // MISSING
insertAfter : 'Insert Row After', // MISSING
deleteRow : 'Dzēst rindas'
},
column :
{
menu : 'Kolonna',
insertBefore : 'Insert Column Before', // MISSING
insertAfter : 'Insert Column After', // MISSING
deleteColumn : 'Dzēst kolonnas'
}
},
// Button Dialog.
button :
{
title : 'Pogas īpašības',
text : 'Teksts (vērtība)',
type : 'Tips',
typeBtn : 'Button', // MISSING
typeSbm : 'Submit', // MISSING
typeRst : 'Reset' // MISSING
},
// Checkbox and Radio Button Dialogs.
checkboxAndRadio :
{
checkboxTitle : 'Atzīmēšanas kastītes īpašības',
radioTitle : 'Izvēles poga īpašības',
value : 'Vērtība',
selected : 'Iezīmēts'
},
// Form Dialog.
form :
{
title : 'Formas īpašības',
menu : 'Formas īpašības',
action : 'Darbība',
method : 'Metode',
encoding : 'Encoding' // MISSING
},
// Select Field Dialog.
select :
{
title : 'Iezīmēšanas lauka īpašības',
selectInfo : 'Informācija',
opAvail : 'Pieejamās iespējas',
value : 'Vērtība',
size : 'Izmērs',
lines : 'rindas',
chkMulti : 'Atļaut vairākus iezīmējumus',
opText : 'Teksts',
opValue : 'Vērtība',
btnAdd : 'Pievienot',
btnModify : 'Veikt izmaiņas',
btnUp : 'Augšup',
btnDown : 'Lejup',
btnSetValue : 'Noteikt kā iezīmēto vērtību',
btnDelete : 'Dzēst'
},
<|fim▁hole|> title : 'Teksta laukuma īpašības',
cols : 'Kolonnas',
rows : 'Rindas'
},
// Text Field Dialog.
textfield :
{
title : 'Teksta rindas īpašības',
name : 'Nosaukums',
value : 'Vērtība',
charWidth : 'Simbolu platums',
maxChars : 'Simbolu maksimālais daudzums',
type : 'Tips',
typeText : 'Teksts',
typePass : 'Parole'
},
// Hidden Field Dialog.
hidden :
{
title : 'Paslēptās teksta rindas īpašības',
name : 'Nosaukums',
value : 'Vērtība'
},
// Image Dialog.
image :
{
title : 'Attēla īpašības',
titleButton : 'Attēlpogas īpašības',
menu : 'Attēla īpašības',
infoTab : 'Informācija par attēlu',
btnUpload : 'Nosūtīt serverim',
upload : 'Augšupielādēt',
alt : 'Alternatīvais teksts',
lockRatio : 'Nemainīga Augstuma/Platuma attiecība',
resetSize : 'Atjaunot sākotnējo izmēru',
border : 'Rāmis',
hSpace : 'Horizontālā telpa',
vSpace : 'Vertikālā telpa',
alertUrl : 'Lūdzu norādīt attēla hipersaiti',
linkTab : 'Hipersaite',
button2Img : 'Do you want to transform the selected image button on a simple image?', // MISSING
img2Button : 'Do you want to transform the selected image on a image button?', // MISSING
urlMissing : 'Image source URL is missing.', // MISSING
validateBorder : 'Border must be a whole number.', // MISSING
validateHSpace : 'HSpace must be a whole number.', // MISSING
validateVSpace : 'VSpace must be a whole number.' // MISSING
},
// Flash Dialog
flash :
{
properties : 'Flash īpašības',
propertiesTab : 'Properties', // MISSING
title : 'Flash īpašības',
chkPlay : 'Automātiska atskaņošana',
chkLoop : 'Nepārtraukti',
chkMenu : 'Atļaut Flash izvēlni',
chkFull : 'Allow Fullscreen', // MISSING
scale : 'Mainīt izmēru',
scaleAll : 'Rādīt visu',
scaleNoBorder : 'Bez rāmja',
scaleFit : 'Precīzs izmērs',
access : 'Script Access', // MISSING
accessAlways : 'Always', // MISSING
accessSameDomain: 'Same domain', // MISSING
accessNever : 'Never', // MISSING
alignAbsBottom : 'Absolūti apakšā',
alignAbsMiddle : 'Absolūti vertikāli centrēts',
alignBaseline : 'Pamatrindā',
alignTextTop : 'Teksta augšā',
quality : 'Quality', // MISSING
qualityBest : 'Best', // MISSING
qualityHigh : 'High', // MISSING
qualityAutoHigh : 'Auto High', // MISSING
qualityMedium : 'Medium', // MISSING
qualityAutoLow : 'Auto Low', // MISSING
qualityLow : 'Low', // MISSING
windowModeWindow: 'Window', // MISSING
windowModeOpaque: 'Opaque', // MISSING
windowModeTransparent : 'Transparent', // MISSING
windowMode : 'Window mode', // MISSING
flashvars : 'Variables for Flash', // MISSING
bgcolor : 'Fona krāsa',
hSpace : 'Horizontālā telpa',
vSpace : 'Vertikālā telpa',
validateSrc : 'Lūdzu norādi hipersaiti',
validateHSpace : 'HSpace must be a number.', // MISSING
validateVSpace : 'VSpace must be a number.' // MISSING
},
// Speller Pages Dialog
spellCheck :
{
toolbar : 'Pareizrakstības pārbaude',
title : 'Spell Check', // MISSING
notAvailable : 'Sorry, but service is unavailable now.', // MISSING
errorLoading : 'Error loading application service host: %s.', // MISSING
notInDic : 'Netika atrasts vārdnīcā',
changeTo : 'Nomainīt uz',
btnIgnore : 'Ignorēt',
btnIgnoreAll : 'Ignorēt visu',
btnReplace : 'Aizvietot',
btnReplaceAll : 'Aizvietot visu',
btnUndo : 'Atcelt',
noSuggestions : '- Nav ieteikumu -',
progress : 'Notiek pareizrakstības pārbaude...',
noMispell : 'Pareizrakstības pārbaude pabeigta: kļūdas netika atrastas',
noChanges : 'Pareizrakstības pārbaude pabeigta: nekas netika labots',
oneChange : 'Pareizrakstības pārbaude pabeigta: 1 vārds izmainīts',
manyChanges : 'Pareizrakstības pārbaude pabeigta: %1 vārdi tika mainīti',
ieSpellDownload : 'Pareizrakstības pārbaudītājs nav pievienots. Vai vēlaties to lejupielādēt tagad?'
},
smiley :
{
toolbar : 'Smaidiņi',
title : 'Ievietot smaidiņu',
options : 'Smiley Options' // MISSING
},
elementsPath :
{
eleLabel : 'Elements path', // MISSING
eleTitle : '%1 element' // MISSING
},
numberedlist : 'Numurēts saraksts',
bulletedlist : 'Izcelts saraksts',
indent : 'Palielināt atkāpi',
outdent : 'Samazināt atkāpi',
justify :
{
left : 'Izlīdzināt pa kreisi',
center : 'Izlīdzināt pret centru',
right : 'Izlīdzināt pa labi',
block : 'Izlīdzināt malas'
},
blockquote : 'Block Quote', // MISSING
clipboard :
{
title : 'Ievietot',
cutError : 'Jūsu pārlūkprogrammas drošības iestatījumi nepieļauj editoram automātiski veikt izgriešanas darbību. Lūdzu, izmantojiet (Ctrl/Cmd+X, lai veiktu šo darbību.',
copyError : 'Jūsu pārlūkprogrammas drošības iestatījumi nepieļauj editoram automātiski veikt kopēšanas darbību. Lūdzu, izmantojiet (Ctrl/Cmd+C), lai veiktu šo darbību.',
pasteMsg : 'Lūdzu, ievietojiet tekstu šajā laukumā, izmantojot klaviatūru (<STRONG>Ctrl/Cmd+V</STRONG>) un apstipriniet ar <STRONG>Darīts!</STRONG>.',
securityMsg : 'Because of your browser security settings, the editor is not able to access your clipboard data directly. You are required to paste it again in this window.', // MISSING
pasteArea : 'Paste Area' // MISSING
},
pastefromword :
{
confirmCleanup : 'The text you want to paste seems to be copied from Word. Do you want to clean it before pasting?', // MISSING
toolbar : 'Ievietot no Worda',
title : 'Ievietot no Worda',
error : 'It was not possible to clean up the pasted data due to an internal error' // MISSING
},
pasteText :
{
button : 'Ievietot kā vienkāršu tekstu',
title : 'Ievietot kā vienkāršu tekstu'
},
templates :
{
button : 'Sagataves',
title : 'Satura sagataves',
options : 'Template Options', // MISSING
insertOption : 'Replace actual contents', // MISSING
selectPromptMsg : 'Lūdzu, norādiet sagatavi, ko atvērt editorā<br>(patreizējie dati tiks zaudēti):',
emptyListMsg : '(Nav norādītas sagataves)'
},
showBlocks : 'Show Blocks', // MISSING
stylesCombo :
{
label : 'Stils',
panelTitle : 'Formatting Styles', // MISSING
panelTitle1 : 'Block Styles', // MISSING
panelTitle2 : 'Inline Styles', // MISSING
panelTitle3 : 'Object Styles' // MISSING
},
format :
{
label : 'Formāts',
panelTitle : 'Formāts',
tag_p : 'Normāls teksts',
tag_pre : 'Formatēts teksts',
tag_address : 'Adrese',
tag_h1 : 'Virsraksts 1',
tag_h2 : 'Virsraksts 2',
tag_h3 : 'Virsraksts 3',
tag_h4 : 'Virsraksts 4',
tag_h5 : 'Virsraksts 5',
tag_h6 : 'Virsraksts 6',
tag_div : 'Rindkopa (DIV)'
},
div :
{
title : 'Create Div Container', // MISSING
toolbar : 'Create Div Container', // MISSING
cssClassInputLabel : 'Stylesheet Classes', // MISSING
styleSelectLabel : 'Style', // MISSING
IdInputLabel : 'Id', // MISSING
languageCodeInputLabel : ' Language Code', // MISSING
inlineStyleInputLabel : 'Inline Style', // MISSING
advisoryTitleInputLabel : 'Advisory Title', // MISSING
langDirLabel : 'Language Direction', // MISSING
langDirLTRLabel : 'Left to Right (LTR)', // MISSING
langDirRTLLabel : 'Right to Left (RTL)', // MISSING
edit : 'Edit Div', // MISSING
remove : 'Remove Div' // MISSING
},
iframe :
{
title : 'IFrame Properties', // MISSING
toolbar : 'IFrame', // MISSING
noUrl : 'Please type the iframe URL', // MISSING
scrolling : 'Enable scrollbars', // MISSING
border : 'Show frame border' // MISSING
},
font :
{
label : 'Šrifts',
voiceLabel : 'Font', // MISSING
panelTitle : 'Šrifts'
},
fontSize :
{
label : 'Izmērs',
voiceLabel : 'Font Size', // MISSING
panelTitle : 'Izmērs'
},
colorButton :
{
textColorTitle : 'Teksta krāsa',
bgColorTitle : 'Fona krāsa',
panelTitle : 'Colors', // MISSING
auto : 'Automātiska',
more : 'Plašāka palete...'
},
colors :
{
'000' : 'Black', // MISSING
'800000' : 'Maroon', // MISSING
'8B4513' : 'Saddle Brown', // MISSING
'2F4F4F' : 'Dark Slate Gray', // MISSING
'008080' : 'Teal', // MISSING
'000080' : 'Navy', // MISSING
'4B0082' : 'Indigo', // MISSING
'696969' : 'Dark Gray', // MISSING
'B22222' : 'Fire Brick', // MISSING
'A52A2A' : 'Brown', // MISSING
'DAA520' : 'Golden Rod', // MISSING
'006400' : 'Dark Green', // MISSING
'40E0D0' : 'Turquoise', // MISSING
'0000CD' : 'Medium Blue', // MISSING
'800080' : 'Purple', // MISSING
'808080' : 'Gray', // MISSING
'F00' : 'Red', // MISSING
'FF8C00' : 'Dark Orange', // MISSING
'FFD700' : 'Gold', // MISSING
'008000' : 'Green', // MISSING
'0FF' : 'Cyan', // MISSING
'00F' : 'Blue', // MISSING
'EE82EE' : 'Violet', // MISSING
'A9A9A9' : 'Dim Gray', // MISSING
'FFA07A' : 'Light Salmon', // MISSING
'FFA500' : 'Orange', // MISSING
'FFFF00' : 'Yellow', // MISSING
'00FF00' : 'Lime', // MISSING
'AFEEEE' : 'Pale Turquoise', // MISSING
'ADD8E6' : 'Light Blue', // MISSING
'DDA0DD' : 'Plum', // MISSING
'D3D3D3' : 'Light Grey', // MISSING
'FFF0F5' : 'Lavender Blush', // MISSING
'FAEBD7' : 'Antique White', // MISSING
'FFFFE0' : 'Light Yellow', // MISSING
'F0FFF0' : 'Honeydew', // MISSING
'F0FFFF' : 'Azure', // MISSING
'F0F8FF' : 'Alice Blue', // MISSING
'E6E6FA' : 'Lavender', // MISSING
'FFF' : 'White' // MISSING
},
scayt :
{
title : 'Spell Check As You Type', // MISSING
opera_title : 'Not supported by Opera', // MISSING
enable : 'Enable SCAYT', // MISSING
disable : 'Disable SCAYT', // MISSING
about : 'About SCAYT', // MISSING
toggle : 'Toggle SCAYT', // MISSING
options : 'Options', // MISSING
langs : 'Languages', // MISSING
moreSuggestions : 'More suggestions', // MISSING
ignore : 'Ignore', // MISSING
ignoreAll : 'Ignore All', // MISSING
addWord : 'Add Word', // MISSING
emptyDic : 'Dictionary name should not be empty.', // MISSING
optionsTab : 'Options', // MISSING
allCaps : 'Ignore All-Caps Words', // MISSING
ignoreDomainNames : 'Ignore Domain Names', // MISSING
mixedCase : 'Ignore Words with Mixed Case', // MISSING
mixedWithDigits : 'Ignore Words with Numbers', // MISSING
languagesTab : 'Languages', // MISSING
dictionariesTab : 'Dictionaries', // MISSING
dic_field_name : 'Dictionary name', // MISSING
dic_create : 'Create', // MISSING
dic_restore : 'Restore', // MISSING
dic_delete : 'Delete', // MISSING
dic_rename : 'Rename', // MISSING
dic_info : 'Initially the User Dictionary is stored in a Cookie. However, Cookies are limited in size. When the User Dictionary grows to a point where it cannot be stored in a Cookie, then the dictionary may be stored on our server. To store your personal dictionary on our server you should specify a name for your dictionary. If you already have a stored dictionary, please type its name and click the Restore button.', // MISSING
aboutTab : 'About' // MISSING
},
about :
{
title : 'About CKEditor', // MISSING
dlgTitle : 'About CKEditor', // MISSING
help : 'Check $1 for help.', // MISSING
userGuide : 'CKEditor User\'s Guide', // MISSING
moreInfo : 'For licensing information please visit our web site:', // MISSING
copy : 'Copyright © $1. All rights reserved.' // MISSING
},
maximize : 'Maximize', // MISSING
minimize : 'Minimize', // MISSING
fakeobjects :
{
anchor : 'Anchor', // MISSING
flash : 'Flash Animation', // MISSING
iframe : 'IFrame', // MISSING
hiddenfield : 'Hidden Field', // MISSING
unknown : 'Unknown Object' // MISSING
},
resize : 'Drag to resize', // MISSING
colordialog :
{
title : 'Select color', // MISSING
options : 'Color Options', // MISSING
highlight : 'Highlight', // MISSING
selected : 'Selected Color', // MISSING
clear : 'Clear' // MISSING
},
toolbarCollapse : 'Collapse Toolbar', // MISSING
toolbarExpand : 'Expand Toolbar', // MISSING
toolbarGroups :
{
document : 'Document', // MISSING
clipboard : 'Clipboard/Undo', // MISSING
editing : 'Editing', // MISSING
forms : 'Forms', // MISSING
basicstyles : 'Basic Styles', // MISSING
paragraph : 'Paragraph', // MISSING
links : 'Links', // MISSING
insert : 'Insert', // MISSING
styles : 'Styles', // MISSING
colors : 'Colors', // MISSING
tools : 'Tools' // MISSING
},
bidi :
{
ltr : 'Text direction from left to right', // MISSING
rtl : 'Text direction from right to left' // MISSING
},
docprops :
{
label : 'Dokumenta īpašības',
title : 'Dokumenta īpašības',
design : 'Design', // MISSING
meta : 'META dati',
chooseColor : 'Choose', // MISSING
other : '<cits>',
docTitle : 'Dokumenta virsraksts <Title>',
charset : 'Simbolu kodējums',
charsetOther : 'Cits simbolu kodējums',
charsetASCII : 'ASCII', // MISSING
charsetCE : 'Central European', // MISSING
charsetCT : 'Chinese Traditional (Big5)', // MISSING
charsetCR : 'Cyrillic', // MISSING
charsetGR : 'Greek', // MISSING
charsetJP : 'Japanese', // MISSING
charsetKR : 'Korean', // MISSING
charsetTR : 'Turkish', // MISSING
charsetUN : 'Unicode (UTF-8)', // MISSING
charsetWE : 'Western European', // MISSING
docType : 'Dokumenta tips',
docTypeOther : 'Cits dokumenta tips',
xhtmlDec : 'Ietvert XHTML deklarācijas',
bgColor : 'Fona krāsa',
bgImage : 'Fona attēla hipersaite',
bgFixed : 'Fona attēls ir fiksēts',
txtColor : 'Teksta krāsa',
margin : 'Lapas robežas',
marginTop : 'Augšā',
marginLeft : 'Pa kreisi',
marginRight : 'Pa labi',
marginBottom : 'Apakšā',
metaKeywords : 'Dokumentu aprakstoši atslēgvārdi (atdalīti ar komatu)',
metaDescription : 'Dokumenta apraksts',
metaAuthor : 'Autors',
metaCopyright : 'Autortiesības',
previewHtml : '<p>This is some <strong>sample text</strong>. You are using <a href="javascript:void(0)">CKEditor</a>.</p>' // MISSING
}
};<|fim▁end|> | // Textarea Dialog.
textarea :
{
|
<|file_name|>test_student_batch_name.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
<|fim▁hole|><|fim▁end|> | # test_records = frappe.get_test_records('Student Batch Name')
class TestStudentBatchName(unittest.TestCase):
pass |
<|file_name|>extern-calling-convention-test.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:extern_calling_convention.rs
// pretty-expanded FIXME #23616
extern crate extern_calling_convention;
use extern_calling_convention::foo;
pub fn main() {
foo(1, 2, 3, 4);
}<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
const fs = require('fs'),
_ = require('lodash');
let config = require('./_default');
const ENV_NAME = process.env.NODE_ENV || process.env.ENV;<|fim▁hole|>
// Merge with ENV file if exits.
if (fs.existsSync(ENVIRONMENT_FILE)) {
const env = require(ENVIRONMENT_FILE);
config = _.mergeWith(config, env);
}
module.exports = config;<|fim▁end|> | const ENVIRONMENT_FILE = `${__dirname}/_${ENV_NAME}.js`; |
<|file_name|>lasdiff.py<|end_file_name|><|fim▁begin|>try:
import traceback
import argparse
import textwrap
import glob
import os
import logging<|fim▁hole|>except ImportError as err:
print('Error {0} import module: {1}'.format(__name__, err))
traceback.print_exc()
exit(128)
script_path = __file__
header = textwrap.dedent('''LAS Diff''')
class LasPyParameters:
def __init__(self):
# predefinied paths
self.parser = argparse.ArgumentParser(prog="lasdiff",
formatter_class=argparse.RawDescriptionHelpFormatter,
description='',
epilog=textwrap.dedent('''
example:
'''))
# reguired parameters
self.parser.add_argument('-i', type=str, dest='input', required=True,
help='required: input file or folder')
self.parser.add_argument('-o', type=str, dest='output', required=True,
help='required: output file or folder (d:\lasfiles\\tests\\results)')
# optional parameters
self.parser.add_argument('-input_format', type=str, dest='input_format', required=False, choices=['las', 'laz'],
help='optional: input format (default=las, laz is not implemented (yet))')
self.parser.add_argument('-cores', type=int, dest='cores', required=False, default=1,
help='optional: cores (default=1)')
self.parser.add_argument('-v', dest='verbose', required=False,
help='optional: verbose toggle (-v=on, nothing=off)', action='store_true')
self.parser.add_argument('-version', action='version', version=self.parser.prog)
def parse(self):
self.args = self.parser.parse_args()
##defaults
if self.args.verbose:
self.args.verbose = ' -v'
else:
self.args.verbose = ''
if self.args.input_format == None:
self.args.input_format = 'las'
if self.args.cores == None:
self.args.cores = 1
# ---------PUBLIC METHODS--------------------
def get_output(self):
return self.args.output
def get_input(self):
return self.args.input
def get_input_format(self):
return self.args.input_format
def get_verbose(self):
return self.args.verbose
def get_cores(self):
return self.args.cores
def DiffLas(parameters):
# Parse incoming parameters
source_file = parameters[0]
destination_file = parameters[1]
# Get name for this process
current = multiprocessing.current_proces()
proc_name = current.name
logging.info('[%s] Starting ...' % (proc_name))
logging.info(
'[%s] Creating diff of %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
# Opening source LAS files for read and write
lasFiles = LasPyConverter.LasPyCompare(source_file, destination_file)
# Opening destination LAS file
logging.info('[%s] Opening %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
lasFiles.OpenReanOnly()
logging.info('[%s] Comparing %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
lasFiles.ComparePointCloud()
logging.info('[%s] Closing %s LAS PointCloud.' % (proc_name, destination_file))
lasFiles.Close()
logging.info('[%s] %s LAS PointCloud has closed.' % (proc_name, destination_file))
return 0
def SetLogging(logfilename):
logging.basicConfig(
filename=logfilename,
filemode='w',
format='%(asctime)s %(name)s %(levelname)s %(message)s', datefmt='%d-%m-%Y %H:%M:%S',
level=logging.DEBUG)
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# set a format which is simpler for console use
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', datefmt='%d-%m-%Y %H:%M:%S')
# tell the handler to use this format
console.setFormatter(formatter)
# add the handler to the root logger
logging.getLogger('').addHandler(console)
def main():
logfilename = 'lasdiff_' + datetime.datetime.today().strftime('%Y%m%d_%H%M%S') + '.log'
SetLogging(logfilename)
logging.info(header)
lasconverterworkflow = LasPyParameters()
lasconverterworkflow.parse()
# File/Directory handler
inputfiles = lasconverterworkflow.get_input()
inputformat = lasconverterworkflow.get_input_format()
outputfiles = lasconverterworkflow.get_output()
outputpath = os.path.normpath(outputfiles)
cores = lasconverterworkflow.get_cores()
inputisdir = False
doing = []
if os.path.isdir(inputfiles):
inputisdir = True
inputfiles = glob.glob(os.path.join(inputfiles, '*' + inputformat))
if not os.path.exists(outputfiles):
os.makedirs(outputfiles)
for workfile in inputfiles:
if os.path.isfile(workfile) and os.path.isfile(os.path.join(outputpath, os.path.basename(workfile))):
logging.info('Adding %s to the queue.' % (workfile))
doing.append([workfile, os.path.join(outputpath, os.path.basename(workfile))])
else:
logging.info('The %s is not file, or pair of comparable files. Skipping.' % (workfile))
elif os.path.isfile(inputfiles):
inputisdir = False
workfile = inputfiles
if os.path.basename(outputfiles) is not "":
doing.append([workfile, outputfiles])
else:
doing.append([workfile, os.path.join(outputpath, os.path.basename(workfile))])
logging.info('Adding %s to the queue.' % (workfile))
else:
# Not a file, not a dir
logging.error('Cannot found input LAS PointCloud file: %s' % (inputfiles))
exit(1)
# If we got one file, start only one process
if inputisdir is False:
cores = 1
if cores != 1:
pool = multiprocessing.Pool(processes=cores)
results = pool.map_async(DiffLas, doing)
pool.close()
pool.join()
else:
for d in doing:
DiffLas(d)
logging.info('Finished, exiting and go home ...')
if __name__ == '__main__':
main()<|fim▁end|> | import datetime
import multiprocessing
from libs import LasPyConverter |
<|file_name|>subnet-cal.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
<|fim▁hole|>#my_subnet = input("Enter Subnet address:")
my_ip = "192.168.1.100"
my_subnet = "255.255.255.0"
ip = my_ip.split(".")
print ip
# Check the validity of the ip address
while True:
#my_ip = input("Enter a ip address:")
if int(ip[0]) <= 223 and (int(ip[1]) != 169 or int(ip[2]) != 254) and (int(ip[1]) <= 254 and int(ip[2]) <= 254 and int(ip[3]) <= 254):
print "You entered a valid ip"
break
else:
print "You entered a wrong ip"
continue
bin_ip = ""
for a in ip:
print bin(int(a)).split("b")[1].zfill(8)
bin_ip += bin(int(a)).split("b")[1].zfill(8)
print bin_ip
print type(bin_ip)<|fim▁end|> | |
<|file_name|>Capsule.py<|end_file_name|><|fim▁begin|>## @file
# generate capsule
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
##
# Import Modules
#
from __future__ import absolute_import
from .GenFdsGlobalVariable import GenFdsGlobalVariable, FindExtendTool
from CommonDataClass.FdfClass import CapsuleClassObject
import Common.LongFilePathOs as os
from io import BytesIO
from Common.Misc import SaveFileOnChange, PackRegistryFormatGuid
import uuid
from struct import pack
from Common import EdkLogger
from Common.BuildToolError import GENFDS_ERROR
from Common.DataType import TAB_LINE_BREAK
WIN_CERT_REVISION = 0x0200
WIN_CERT_TYPE_EFI_GUID = 0x0EF1
EFI_CERT_TYPE_PKCS7_GUID = uuid.UUID('{4aafd29d-68df-49ee-8aa9-347d375665a7}')
EFI_CERT_TYPE_RSA2048_SHA256_GUID = uuid.UUID('{a7717414-c616-4977-9420-844712a735bf}')
## create inf file describes what goes into capsule and call GenFv to generate capsule
#
#
class Capsule (CapsuleClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
CapsuleClassObject.__init__(self)
# For GenFv
self.BlockSize = None
# For GenFv
self.BlockNum = None
self.CapsuleName = None
## Generate FMP capsule
#
# @retval string Generated Capsule file path
#
def GenFmpCapsule(self):
#
# Generate capsule header
# typedef struct {
# EFI_GUID CapsuleGuid;
# UINT32 HeaderSize;
# UINT32 Flags;
# UINT32 CapsuleImageSize;
# } EFI_CAPSULE_HEADER;
#
Header = BytesIO()
#
# Use FMP capsule GUID: 6DCBD5ED-E82D-4C44-BDA1-7194199AD92A
#
Header.write(PackRegistryFormatGuid('6DCBD5ED-E82D-4C44-BDA1-7194199AD92A'))
HdrSize = 0
if 'CAPSULE_HEADER_SIZE' in self.TokensDict:
Header.write(pack('=I', int(self.TokensDict['CAPSULE_HEADER_SIZE'], 16)))
HdrSize = int(self.TokensDict['CAPSULE_HEADER_SIZE'], 16)
else:
Header.write(pack('=I', 0x20))
HdrSize = 0x20
Flags = 0
if 'CAPSULE_FLAGS' in self.TokensDict:
for flag in self.TokensDict['CAPSULE_FLAGS'].split(','):
flag = flag.strip()
if flag == 'PopulateSystemTable':
Flags |= 0x00010000 | 0x00020000
elif flag == 'PersistAcrossReset':
Flags |= 0x00010000
elif flag == 'InitiateReset':
Flags |= 0x00040000
Header.write(pack('=I', Flags))
#
# typedef struct {
# UINT32 Version;
# UINT16 EmbeddedDriverCount;
# UINT16 PayloadItemCount;
# // UINT64 ItemOffsetList[];
# } EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER;
#
FwMgrHdr = BytesIO()
if 'CAPSULE_HEADER_INIT_VERSION' in self.TokensDict:
FwMgrHdr.write(pack('=I', int(self.TokensDict['CAPSULE_HEADER_INIT_VERSION'], 16)))
else:
FwMgrHdr.write(pack('=I', 0x00000001))
FwMgrHdr.write(pack('=HH', len(self.CapsuleDataList), len(self.FmpPayloadList)))
FwMgrHdrSize = 4+2+2+8*(len(self.CapsuleDataList)+len(self.FmpPayloadList))
#
# typedef struct _WIN_CERTIFICATE {
# UINT32 dwLength;
# UINT16 wRevision;
# UINT16 wCertificateType;
# //UINT8 bCertificate[ANYSIZE_ARRAY];
# } WIN_CERTIFICATE;
#
# typedef struct _WIN_CERTIFICATE_UEFI_GUID {
# WIN_CERTIFICATE Hdr;
# EFI_GUID CertType;
# //UINT8 CertData[ANYSIZE_ARRAY];
# } WIN_CERTIFICATE_UEFI_GUID;
#
# typedef struct {
# UINT64 MonotonicCount;
# WIN_CERTIFICATE_UEFI_GUID AuthInfo;
# } EFI_FIRMWARE_IMAGE_AUTHENTICATION;
#
# typedef struct _EFI_CERT_BLOCK_RSA_2048_SHA256 {
# EFI_GUID HashType;
# UINT8 PublicKey[256];
# UINT8 Signature[256];
# } EFI_CERT_BLOCK_RSA_2048_SHA256;
#
PreSize = FwMgrHdrSize
Content = BytesIO()
<|fim▁hole|> File = open(FileName, 'rb')
Content.write(File.read())
File.close()
for fmp in self.FmpPayloadList:
if fmp.Existed:
FwMgrHdr.write(pack('=Q', PreSize))
PreSize += len(fmp.Buffer)
Content.write(fmp.Buffer)
continue
if fmp.ImageFile:
for Obj in fmp.ImageFile:
fmp.ImageFile = Obj.GenCapsuleSubItem()
if fmp.VendorCodeFile:
for Obj in fmp.VendorCodeFile:
fmp.VendorCodeFile = Obj.GenCapsuleSubItem()
if fmp.Certificate_Guid:
ExternalTool, ExternalOption = FindExtendTool([], GenFdsGlobalVariable.ArchList, fmp.Certificate_Guid)
CmdOption = ''
CapInputFile = fmp.ImageFile
if not os.path.isabs(fmp.ImageFile):
CapInputFile = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, fmp.ImageFile)
CapOutputTmp = os.path.join(GenFdsGlobalVariable.FvDir, self.UiCapsuleName) + '.tmp'
if ExternalTool is None:
EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % fmp.Certificate_Guid)
else:
CmdOption += ExternalTool
if ExternalOption:
CmdOption = CmdOption + ' ' + ExternalOption
CmdOption += ' -e ' + ' --monotonic-count ' + str(fmp.MonotonicCount) + ' -o ' + CapOutputTmp + ' ' + CapInputFile
CmdList = CmdOption.split()
GenFdsGlobalVariable.CallExternalTool(CmdList, "Failed to generate FMP auth capsule")
if uuid.UUID(fmp.Certificate_Guid) == EFI_CERT_TYPE_PKCS7_GUID:
dwLength = 4 + 2 + 2 + 16 + os.path.getsize(CapOutputTmp) - os.path.getsize(CapInputFile)
else:
dwLength = 4 + 2 + 2 + 16 + 16 + 256 + 256
fmp.ImageFile = CapOutputTmp
AuthData = [fmp.MonotonicCount, dwLength, WIN_CERT_REVISION, WIN_CERT_TYPE_EFI_GUID, fmp.Certificate_Guid]
fmp.Buffer = fmp.GenCapsuleSubItem(AuthData)
else:
fmp.Buffer = fmp.GenCapsuleSubItem()
FwMgrHdr.write(pack('=Q', PreSize))
PreSize += len(fmp.Buffer)
Content.write(fmp.Buffer)
BodySize = len(FwMgrHdr.getvalue()) + len(Content.getvalue())
Header.write(pack('=I', HdrSize + BodySize))
#
# The real capsule header structure is 28 bytes
#
Header.write('\x00'*(HdrSize-28))
Header.write(FwMgrHdr.getvalue())
Header.write(Content.getvalue())
#
# Generate FMP capsule file
#
CapOutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiCapsuleName) + '.Cap'
SaveFileOnChange(CapOutputFile, Header.getvalue(), True)
return CapOutputFile
## Generate capsule
#
# @param self The object pointer
# @retval string Generated Capsule file path
#
def GenCapsule(self):
if self.UiCapsuleName.upper() + 'cap' in GenFdsGlobalVariable.ImageBinDict:
return GenFdsGlobalVariable.ImageBinDict[self.UiCapsuleName.upper() + 'cap']
GenFdsGlobalVariable.InfLogger( "\nGenerate %s Capsule" %self.UiCapsuleName)
if ('CAPSULE_GUID' in self.TokensDict and
uuid.UUID(self.TokensDict['CAPSULE_GUID']) == uuid.UUID('6DCBD5ED-E82D-4C44-BDA1-7194199AD92A')):
return self.GenFmpCapsule()
CapInfFile = self.GenCapInf()
CapInfFile.writelines("[files]" + TAB_LINE_BREAK)
CapFileList = []
for CapsuleDataObj in self.CapsuleDataList:
CapsuleDataObj.CapsuleName = self.CapsuleName
FileName = CapsuleDataObj.GenCapsuleSubItem()
CapsuleDataObj.CapsuleName = None
CapFileList.append(FileName)
CapInfFile.writelines("EFI_FILE_NAME = " + \
FileName + \
TAB_LINE_BREAK)
SaveFileOnChange(self.CapInfFileName, CapInfFile.getvalue(), False)
CapInfFile.close()
#
# Call GenFv tool to generate capsule
#
CapOutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiCapsuleName)
CapOutputFile = CapOutputFile + '.Cap'
GenFdsGlobalVariable.GenerateFirmwareVolume(
CapOutputFile,
[self.CapInfFileName],
Capsule=True,
FfsList=CapFileList
)
GenFdsGlobalVariable.VerboseLogger( "\nGenerate %s Capsule Successfully" %self.UiCapsuleName)
GenFdsGlobalVariable.SharpCounter = 0
GenFdsGlobalVariable.ImageBinDict[self.UiCapsuleName.upper() + 'cap'] = CapOutputFile
return CapOutputFile
## Generate inf file for capsule
#
# @param self The object pointer
# @retval file inf file object
#
def GenCapInf(self):
self.CapInfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
self.UiCapsuleName + "_Cap" + '.inf')
CapInfFile = BytesIO() #open (self.CapInfFileName , 'w+')
CapInfFile.writelines("[options]" + TAB_LINE_BREAK)
for Item in self.TokensDict:
CapInfFile.writelines("EFI_" + \
Item + \
' = ' + \
self.TokensDict[Item] + \
TAB_LINE_BREAK)
return CapInfFile<|fim▁end|> | for driver in self.CapsuleDataList:
FileName = driver.GenCapsuleSubItem()
FwMgrHdr.write(pack('=Q', PreSize))
PreSize += os.path.getsize(FileName)
|
<|file_name|>project.config.ts<|end_file_name|><|fim▁begin|>import { join } from 'path';
import { SeedConfig } from './seed.config';
import { ExtendPackages } from './seed.config.interfaces';
/**
* This class extends the basic seed configuration, allowing for project specific overrides. A few examples can be found
* below.
*/<|fim▁hole|>
PROJECT_TASKS_DIR = join(process.cwd(), this.TOOLS_DIR, 'tasks', 'project');
constructor() {
super();
// this.APP_TITLE = 'Put name of your app here';
/* Enable typeless compiler runs (faster) between typed compiler runs. */
// this.TYPED_COMPILE_INTERVAL = 5;
// Add `NPM` third-party libraries to be injected/bundled.
this.NPM_DEPENDENCIES = [
...this.NPM_DEPENDENCIES,
// {src: 'jquery/dist/jquery.min.js', inject: 'libs'},
// {src: 'lodash/lodash.min.js', inject: 'libs'},
{ src: 'bootstrap/dist/js/bootstrap.min.js', inject: 'libs' },
{ src: 'bootstrap/dist/css/bootstrap.min.css', inject: true }, // inject into css section
{ src: 'bootstrap/dist/css/bootstrap-theme.min.css', inject: true }, // inject into css section
{ src: 'bootstrap/dist/css/bootstrap-theme.min.css.map', inject: true }, // inject into css section
];
// Add `local` third-party libraries to be injected/bundled.
this.APP_ASSETS = [
...this.APP_ASSETS,
{ src: `${this.CSS_SRC}/theme.min.css`, inject: true, vendor: false },
{ src: `${this.CSS_SRC}/main.css`, inject: true, vendor: false },
{ src: `${this.CSS_SRC}/custom.css`, inject: true, vendor: false }
// {src: `${this.APP_SRC}/your-path-to-lib/libs/jquery-ui.js`, inject: true, vendor: false}
// {src: `${this.CSS_SRC}/path-to-lib/test-lib.css`, inject: true, vendor: false},
];
// Add packages (e.g. ng2-translate)
// let additionalPackages: ExtendPackages[] = [{
// name: 'ng2-translate',
// // Path to the package's bundle
// path: 'node_modules/ng2-translate/bundles/ng2-translate.umd.js'
// }];
//
// this.addPackagesBundles(additionalPackages);
/* Add to or override NPM module configurations: */
// this.mergeObject(this.PLUGIN_CONFIGS['browser-sync'], { ghostMode: false });
let additionalPackages: ExtendPackages[] = [
// required for dev build
{
name:'ng2-bootstrap',
path:'node_modules/ng2-bootstrap/bundles/ng2-bootstrap.umd.min.js'
},
// required for prod build
{
name:'ng2-bootstrap/*',
path:'node_modules/ng2-bootstrap/bundles/ng2-bootstrap.umd.min.js'
},
// mandatory dependency for ng2-bootstrap datepicker
{
name:'moment',
path:'node_modules/moment',
packageMeta:{
main: 'moment.js',
defaultExtension: 'js'
}
}
];
this.addPackagesBundles(additionalPackages);
}
}<|fim▁end|> | export class ProjectConfig extends SeedConfig { |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![cfg_attr(feature = "benchmark", feature(test))]
//! The module `seq` provides the lightweight, generic sequence container [`Seq`] for unmovable data.
//!
//! The container `Seq` is linking data of hierarchical function-scopes on top of each other,
//! forming sequences. A sequence can be embedded into the program during compile time.
//!
//! Initially a sequence is empty. A longer sequence is constructed (see [`ConsRef`]) attaching a
//! new _head_ to the existing sequence, representing the _tail_. The _head_ element has a shorter
//! lifetime, than all elements of the _tail_.
//!
//! Multiple sequences may share the same _tail_, permitting memory-efficient organisation of
//! hierarchical data.
//!
//! The associated methods [`head`] and [`tail`] have been defined for convenience reasons only.
//! The construction and deconstruction of a sequence is realized by the algebraic data-types of Rust
//! solely.
//!
//! Put this in your Cargo.toml:
//! ```toml
//! ## Cargo.toml file
//! [dependencies]
//! seq = "0.5"
//! ```
//! The "default" usage of this type as a queue is to use [`Empty`] or [`ConsRef`] to construct a
//! queue, and [`head`] and [`tail`] to deconstruct a queue into head and remaining
//! tail of a sequence.
//!
//! # Examples<|fim▁hole|>//! Constructing two sequences seq1 as `[1,0]` and seq2 as `[2,1,0]`, sharing data with `seq1`
//! ```rust
//! use seq::Seq;
//!
//! // constructing the sequence 'seq1'
//! const seq1: Seq<i32> = Seq::ConsRef(1, &Seq::ConsRef(0, &Seq::Empty));
//!
//! // construction the sequence 'seq2' sharing data with 'seq1'
//! const seq2: Seq<i32> = Seq::ConsRef(2, &seq1);
//! ```
//! Deconstructing a sequence
//! ```rust
//! use seq::Seq;
//!
//! fn print_head<'a>(seq: &'a Seq<i32>) {
//! println!("head {}", seq.head().unwrap());
//! }
//! ```
//! Extend an existing sequence. Note the lifetime of the return type matches the one of the tail.
//! ```rust
//! use seq::Seq;
//!
//! fn extend<'a>(head: i32, tail: &'a Seq<i32>) -> Seq<'a, i32> {
//! return Seq::ConsRef(head, tail);
//! }
//! ```
//! Extend an existing sequence with dynamic element residing in heap-memory
//! ```rust
//! use seq::Seq;
//!
//! fn extend_boxed<'a>(head: i32, tail: &'a Seq<i32>) -> Box<Seq<'a, i32>> {
//! return Box::new(Seq::ConsRef(head, tail));
//! }
//! ```
//! Iterate a sequence
//! ```rust
//! use seq::Seq;
//!
//! fn sum_up(seq: &Seq<i32>) -> i32 {
//! return seq.into_iter().fold(0, |x, y| x + y);
//! }
//! ```
//! [`Empty`]: enum.Seq.html#variant.Empty
//! [`ConsRef`]: enum.Seq.html#variant.ConsRef
//! [`tail`]: #method.tail
//! [`head`]: #method.head
//! [`Seq`]: enum.Seq.html
use std::fmt;
use std::iter::Iterator;
/// A single-ended, growable, unmovable queue of data, linking constant data with dynamic data.
///
/// The "default" usage of this type as a queue is to use [`Empty`] or [`ConsRef`] to construct a
/// queue, and [`head`] and [`tail`] to deconstruct a queue into head and remaining
/// tail of a sequence.
///
/// # Examples
///
/// Constructing two sequences seq1 as `[1,0]` and seq2 as `[2,1,0]`, sharing data with `seq1`
/// ```rust
/// use seq::Seq;
/// // constructing the sequence 'seq1'
/// const seq1: Seq<i32> = Seq::ConsRef(1, &Seq::ConsRef(0, &Seq::Empty));
///
/// // construction the sequence 'seq2' sharing data with 'seq1'
/// const seq2: Seq<i32> = Seq::ConsRef(2, &seq1);
/// ```
/// Deconstructing a sequence into the [`head`] and [`tail`]
/// ```rust
/// use seq::Seq;
///
/// fn deconstruct<'a>(seq: &'a Seq<i32>) {
/// let head = seq.head().unwrap();
/// let tail = seq.tail().unwrap();
/// // more code here
/// // ...
/// }
/// ```
/// Extend an existing sequence. Note the lifetime of the return type matches the one of the tail.
/// ```rust
/// use seq::Seq;
///
/// fn extend<'a>(head: i32, tail: &'a Seq<i32>) -> Seq<'a, i32> {
/// return Seq::ConsRef(head, tail);
/// }
/// ```
/// Extend an existing sequence with dynamic element residing in heap-memory
/// ```rust
/// use seq::Seq;
///
/// fn extend_boxed<'a>(head: i32, tail: &'a Seq<i32>) -> Box<Seq<'a, i32>> {
/// return Box::new(Seq::ConsRef(head, tail));
/// }
/// ```
/// Iterate a sequence
/// ```rust
/// use seq::Seq;
///
/// fn sum_up(seq: &Seq<i32>) -> i32 {
/// return seq.into_iter().fold(0, |x, y| x + y);
/// }
/// ```
/// [`Empty`]: enum.Seq.html#variant.Empty
/// [`ConsRef`]: enum.Seq.html#variant.ConsRef
/// [`tail`]: #method.tail
/// [`head`]: #method.head
#[derive(Clone)]
pub enum Seq<'a, T: 'a> {
/// The empty sequence
Empty,
/// Constructing a sequence with head data and reference to a tail
ConsRef(T, &'a Seq<'a, T>),
/// Constructing a sequence with head data and reference to boxed tail
#[cfg(not(feature = "lite-seq"))]
ConsOwn(T, Box<Seq<'a, T>>),
}
/// Seq method implementations
impl<'a, T: 'a> Seq<'a, T> {
/// Returns a reference to the head-element
pub fn head(&'a self) -> Option<&'a T> {
match self {
&Seq::Empty => Option::None,
&Seq::ConsRef(ref ft1, _) => Option::Some(&*ft1),
#[cfg(not(feature = "lite-seq"))]
&Seq::ConsOwn(ref ft1, _) => Option::Some(&*ft1),
}
}
/// Returns reference to the tail
pub fn tail(&'a self) -> Option<&'a Seq<T>> {
match self {
&Seq::Empty => Option::None,
&Seq::ConsRef(_, ref rt1) => Option::Some(*rt1),
#[cfg(not(feature = "lite-seq"))]
&Seq::ConsOwn(_, ref rt1) => Option::Some(&**rt1),
}
}
pub fn len(&'a self) -> usize {
match self {
&Seq::Empty => 0,
&Seq::ConsRef(_, ref rt1) => 1 + rt1.len(),
#[cfg(not(feature = "lite-seq"))]
&Seq::ConsOwn(_, ref rt1) => 1 + rt1.len(),
}
}
}
/// The seqdef! macro defines a stack-allocated sequence variable for the speficied data list,
/// the last data item in the list will be the top most in the sequence.
///
/// Example 1) Creating a seq variable s where 2 is the top most data item
/// `seqdef!(s; empty() => 0, 1, 2);`
///
/// Example 2) Creating a seq variable t without explicit empty(). Seq is identical to `s`.
/// `seqdef!(t; 0, 1, 2);`
///
/// Example 3) Creating a seq variable u, using Seq `s` as tail of example 1.
/// `seqdef!(u; &s => 3, 4, 5);`
#[macro_export]
macro_rules! seqdef {
($id:ident; $($ftx:expr),* ) => {
let $id = $crate::Seq::Empty;
$(
let $id = $crate::Seq::ConsRef( $ftx, & $id );
)*
};
($id:ident; $rt:expr => $ft:expr ) => {
let $id = $crate::Seq::ConsRef( $ft, $rt );
};
($id:ident; $rt:expr => $ft0:expr, $($ftx:expr),* ) => {
let $id = $crate::Seq::ConsRef( $ft0, $rt );
$(
let $id = $crate::Seq::ConsRef( $ftx, & $id );
)*
};
}
/// Function returns static reference to empty list
pub fn empty<T>() -> &'static Seq<'static, T> { &Seq::Empty }
/// By default a sequence is empty
impl<'a, T> Default for Seq<'a, T> {
fn default() -> Seq<'a, T> { Seq::Empty }
}
/// Two sequences of type T are equal in case of identical length and sequence of equal data elements.
impl<'a, T: PartialEq> PartialEq for Seq<'a, T> {
fn eq(&self, other: &Seq<'a, T>) -> bool {
match (self, other) {
(&Seq::Empty, &Seq::Empty) => true,
(&Seq::ConsRef(ref ft1, ref rt1), &Seq::ConsRef(ref ft2, ref rt2))
=> ft1 == ft2 && rt1 == rt2,
#[cfg(not(feature = "lite-seq"))]
(&Seq::ConsRef(ref ft1, ref rt1), &Seq::ConsOwn(ref ft2, ref rt2))
=> ft1 == ft2 && *rt1 == &**rt2,
#[cfg(not(feature = "lite-seq"))]
(&Seq::ConsOwn(ref ft1, ref rt1), &Seq::ConsRef(ref ft2, ref rt2))
=> ft1 == ft2 && &**rt1 == *rt2,
#[cfg(not(feature = "lite-seq"))]
(&Seq::ConsOwn(ref ft1, ref rt1), &Seq::ConsOwn(ref ft2, ref rt2))
=> ft1 == ft2 && rt1 == rt2,
_ => false,
}
}
}
/// Debug format of a sequence prints the head element only
impl<'a, T: fmt::Debug> fmt::Debug for Seq<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&Seq::Empty => write!(f, "<>"),
&Seq::ConsRef(ref ft, _) => write!(f, "<{:?},...>", ft),
#[cfg(not(feature = "lite-seq"))]
&Seq::ConsOwn(ref ft, _) => write!(f, "<{:?},...>", ft),
}
}
}
/// A sequence implements the `IntoIterator` trait
///
/// # Example
/// ```rust
/// use seq::Seq;
///
/// fn sum_up(seq: &Seq<i32>) -> i32 {
/// return seq.into_iter().fold(0, |x, y| x + y);
/// }
/// ```
impl<'a, T: 'a> IntoIterator for &'a Seq<'a, T> {
type Item = &'a T;
type IntoIter = SeqIterator<'a, T>;
fn into_iter(self) -> Self::IntoIter {
SeqIterator { cur: &self }
}
}
/// The sequence iterator representation
pub struct SeqIterator<'a, T: 'a> {
cur: &'a Seq<'a, T>,
}
/// The sequence iterator behavior implementation
impl<'a, T: 'a> Iterator for SeqIterator<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
match self.cur {
&Seq::Empty => Option::None,
&Seq::ConsRef(ref ft, ref rt) => {
self.cur = &*rt;
Option::Some(&*ft)
}
#[cfg(not(feature = "lite-seq"))]
&Seq::ConsOwn(ref ft, ref rt) => {
self.cur = &**rt; // deref boxed rest
Option::Some(&*ft)
}
}
}
}
#[cfg(test)]
mod tests {
use super::Seq;
#[cfg(not(feature = "lite-seq"))]
use super::SeqIterator;
use super::empty;
#[cfg(not(feature = "lite-seq"))]
use std::ops;
struct MyData(&'static str);
// this static ring has 4 elements only
static CYC_A: Seq<MyData> = Seq::ConsRef(MyData("Forever"), &CYC_D); // len()==7
static CYC_B : Seq<MyData> = Seq::ConsRef(MyData("Round"), &CYC_A); // len()==5
static CYC_C : Seq<MyData> = Seq::ConsRef(MyData("And"), &CYC_B); // len()==3
static CYC_D : Seq<MyData> = Seq::ConsRef(MyData("Round"), &CYC_C); // len()==5
#[test]
fn test_cyclic() {
// take first 12 elements from cyclic ring and count the characters
let sum = CYC_A.into_iter().take(3*4).fold(0, | x, y| x + y.0.len());
assert_eq!(3*20, sum);
}
#[test]
fn test_consref() {
let s = Seq::ConsRef(1, &Seq::ConsRef(0, &Seq::Empty));
assert_ne!(&s, empty());
}
fn recurs(val: u32, max: u32, base: &Seq<u32>) {
let ext = Seq::ConsRef(val, base);
if val < max {
recurs(val + 1, max, &ext);
}
}
#[test]
fn test_empty() {
let s0: &Seq<u32> = empty();
let s1 = Seq::ConsRef(1u32, s0);
assert_eq!(s0.len(), 0);
assert_eq!(s1.len(), 1);
assert_eq!(s0, empty());
assert_ne!(&s1, empty());
}
#[cfg(not(feature = "lite-seq"))]
#[test]
fn test_shared() {
let s0: &Seq<u32> = empty();
// shared sequence with head s2: <2,1>
let s1 = Seq::ConsRef(1u32, s0);
let s2 = Seq::ConsRef(2u32, &s1);
// t-branch prepending elements to s2: <4,3,2,1>
let t3 = Seq::ConsRef(3u32, &s2);
let t4 = Seq::ConsRef(4u32, &t3);
// r-branch prepending elements to s2: <4,3,2,1>
let r3 = Seq::ConsRef(3u32, &s2);
let r4 = Seq::ConsRef(4u32, &r3);
// z-branch prepending elements to s2: <33,2,1>
let z = Seq::ConsRef(33u32, &s2);
assert_eq!(s0.len(), 0);
assert_eq!(s1.len(), 1);
assert_eq!(s2.len(), 2);
assert_eq!(s0, empty());
assert_eq!(s1, s1);
assert_eq!(s2, s2);
assert_eq!(t3, r3);
assert_eq!(t4, r4);
// z-branch must not equal to t3 or t4
assert_ne!(z, t3);
assert_ne!(z, t4);
}
#[test]
fn test_match() {
let s0: &Seq<u32> = empty();
let s1 = Seq::ConsRef(1u32, s0);
let s2 = Seq::ConsRef(2u32, &s1);
match &s2 {
&Seq::Empty => assert!(false, "seq was not empty!"),
&Seq::ConsRef(h, ref tail) => {
let t: &Seq<u32> = &*tail;
assert_eq!(h, 2u32);
match t {
&Seq::Empty => assert!(false, "seq was not empty!"),
&Seq::ConsRef(h2, _) => {
assert_eq!(h2, 1u32);
}
#[cfg(not(feature = "lite-seq"))]
_ => assert!(false, "seq was not owned!"),
}
}
#[cfg(not(feature = "lite-seq"))]
_ => assert!(false, "seq was not owned!"),
}
println!("seq: {:?}", &s2);
}
#[test]
fn test_printformat() {
let s0: &Seq<u32> = empty();
let s1 = Seq::ConsRef(1u32, s0);
println!("seq: {:?}, {:?}", s0, &s1);
}
#[test]
fn test_recursion() {
recurs(0, 9, empty());
}
#[cfg(not(feature = "lite-seq"))]
fn prepend_boxed<'a>(start: u32, seq: &'a Seq<u32>) -> Box<Seq<'a, u32>> {
Box::new(
Seq::ConsOwn(
start + 3,
Box::new(
Seq::ConsOwn(
start + 2,
Box::new(
Seq::ConsOwn(
start + 1,
Box::new(
Seq::ConsRef(
start,
seq))))))))
}
#[cfg(not(feature = "lite-seq"))]
#[test]
fn test_box() {
let s0: &Seq<u32> = empty();
let s1: Box<Seq<u32>> = prepend_boxed(1, s0);
assert_eq!(s0, empty());
assert_ne!(&*s1, empty());
}
#[derive(PartialEq, PartialOrd, Debug)]
struct Data([u32; 8]);
#[cfg(not(feature = "lite-seq"))]
#[test]
fn test_box_struct() {
let s0: &Seq<Data> = empty();
let s1: Seq<Data> = Seq::ConsRef(Data([0; 8]), s0);
let s2: Box<Seq<Data>> = Box::new(Seq::ConsRef(Data([1; 8]), &s1));
let s3: Box<Seq<Data>> = Box::new(Seq::ConsOwn(Data([2; 8]), s2));
let s4: Seq<Data> = Seq::ConsOwn(Data([3; 8]), s3);
assert_eq!(s4.len(), 4);
assert_eq!(&s4, &s4);
}
#[cfg(not(feature = "lite-seq"))]
#[test]
fn test_iter() {
let s0: &Seq<u32> = empty();
let s1 = Seq::ConsRef(1u32, s0);
let s2 = Seq::ConsRef(2u32, &s1);
let s3 = Seq::ConsRef(3u32, &s2);
let s4 = Seq::ConsRef(4u32, &s3);
let iter: SeqIterator<u32> = s4.into_iter();
let sum = iter.fold(0, ops::Add::add);
assert_eq!(sum, 10);
}
#[cfg(not(feature = "lite-seq"))]
#[test]
fn test_iter_boxed() {
let seq: Box<Seq<u32>> = prepend_boxed(1, empty());
let iter: SeqIterator<u32> = seq.into_iter();
let sum = iter.fold(0, ops::Add::add);
assert_eq!(sum, 10);
}
#[test]
fn test_macro() {
seqdef!(s; empty() => 0);
assert_ne!(&s, empty());
seqdef!(t; &s => 1, 2, 3);
assert_ne!(&t, empty());
seqdef!(u; empty() => 0, 1, 2, 3);
assert_ne!(&u, empty());
assert_eq!(&u, &t);
seqdef!(v; 0);
assert_eq!(&v, &s);
seqdef!(w; 0, 1, 2, 3);
assert_eq!(&w, &u);
}
#[test]
fn test_head_tail() {
let s: &Seq<u32> = empty();
let s = Seq::ConsRef(1u32, s);
let s = Seq::ConsRef(2u32, &s);
let s = Seq::ConsRef(3u32, &s);
let ft = s.head();
let rt = s.tail();
assert_eq!(ft.unwrap(), &3);
assert_eq!(rt.unwrap().head().unwrap(), &2);
}
}
#[cfg(all(feature = "benchmark", test))]
mod benchmark {
extern crate test;
use super::Seq;
use super::empty;
use std::ops;
use std::vec;
use std::collections::LinkedList;
// Returns cumulation of 0, 0+1, 0+1+2, 0+1+2+3, ... 0+1+2+..+(N-1)
fn sum_of_sums(n: u32) -> u32
{
let cumulated = (n * (n + 1) * ((2 * n) + 1) / 6) + ((n * (n + 1)) / 2);
cumulated / 2
}
// Recursive function, adding an element and cumulate the sums, until N-1 is reached.
fn recurs_stack_list(l: &mut LinkedList<u32>, cnt: u32, n: u32) -> u32 {
if cnt < n {
l.push_back(cnt);
let sum = l.iter().fold(0u32, ops::Add::add);
let r = sum + recurs_stack_list(l, cnt + 1, n);
l.pop_back();
r
} else {
0
}
}
#[bench]
fn bench_list_008(b: &mut test::Bencher) {
const N: u32 = 8;
b.iter(|| {
let mut l = LinkedList::new();
let sum = recurs_stack_list(&mut l, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_list_016(b: &mut test::Bencher) {
const N: u32 = 16;
b.iter(|| {
let mut l = LinkedList::new();
let sum = recurs_stack_list(&mut l, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_list_032(b: &mut test::Bencher) {
const N: u32 = 32;
b.iter(|| {
let mut l = LinkedList::new();
let sum = recurs_stack_list(&mut l, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_list_064(b: &mut test::Bencher) {
const N: u32 = 64;
b.iter(|| {
let mut l = LinkedList::new();
let sum = recurs_stack_list(&mut l, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_list_128(b: &mut test::Bencher) {
const N: u32 = 128;
b.iter(|| {
let mut l = LinkedList::new();
let sum = recurs_stack_list(&mut l, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_list_256(b: &mut test::Bencher) {
const N: u32 = 256;
b.iter(|| {
let mut l = LinkedList::new();
let sum = recurs_stack_list(&mut l, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_list_512(b: &mut test::Bencher) {
const N: u32 = 512;
b.iter(|| {
let mut l = LinkedList::new();
let sum = recurs_stack_list(&mut l, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
// Recursive function, adding an element and cumulate the sums, until N-1 is reached.
fn recurs_stack_vec(v: &mut vec::Vec<u32>, cnt: u32, n: u32) -> u32 {
if cnt < n {
v.push(cnt);
let sum = v.iter().fold(0u32, ops::Add::add);
let r = sum + recurs_stack_vec(v, cnt + 1, n);
v.truncate(cnt as usize);
r
} else {
0
}
}
#[bench]
fn bench_vec_008(b: &mut test::Bencher) {
const N: u32 = 8;
b.iter(|| {
let mut v = vec::Vec::new();
let sum = recurs_stack_vec(&mut v, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_vec_016(b: &mut test::Bencher) {
const N: u32 = 16;
b.iter(|| {
let mut v = vec::Vec::new();
let sum = recurs_stack_vec(&mut v, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_vec_032(b: &mut test::Bencher) {
const N: u32 = 32;
b.iter(|| {
let mut v = vec::Vec::new();
let sum = recurs_stack_vec(&mut v, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_vec_064(b: &mut test::Bencher) {
const N: u32 = 64;
b.iter(|| {
let mut v = vec::Vec::new();
let sum = recurs_stack_vec(&mut v, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_vec_128(b: &mut test::Bencher) {
const N: u32 = 128;
b.iter(|| {
let mut v = vec::Vec::new();
let sum = recurs_stack_vec(&mut v, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_vec_256(b: &mut test::Bencher) {
const N: u32 = 256;
b.iter(|| {
let mut v = vec::Vec::new();
let sum = recurs_stack_vec(&mut v, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_vec_512(b: &mut test::Bencher) {
const N: u32 = 512;
b.iter(|| {
let mut v = vec::Vec::new();
let sum = recurs_stack_vec(&mut v, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
// Recursive function, adding an element and cumulate the sums, until N-1 is reached.
fn recurs_stack_seq(s: &Seq<u32>, cnt: u32, n: u32) -> u32 {
if cnt < n {
let ext_s = Seq::ConsRef(cnt, s);
let sum = ext_s.into_iter().fold(0u32, ops::Add::add);
sum + recurs_stack_seq(&ext_s, cnt + 1, n)
} else {
0
}
}
#[bench]
fn bench_seq_008(b: &mut test::Bencher) {
const N: u32 = 8;
b.iter(|| {
let sum = recurs_stack_seq(empty(), 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_seq_016(b: &mut test::Bencher) {
const N: u32 = 16;
b.iter(|| {
let sum = recurs_stack_seq(empty(), 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_seq_032(b: &mut test::Bencher) {
const N: u32 = 32;
b.iter(|| {
let sum = recurs_stack_seq(empty(), 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_seq_064(b: &mut test::Bencher) {
const N: u32 = 64;
b.iter(|| {
let sum = recurs_stack_seq(empty(), 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_seq_128(b: &mut test::Bencher) {
const N: u32 = 128;
b.iter(|| {
let sum = recurs_stack_seq(empty(), 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_seq_256(b: &mut test::Bencher) {
const N: u32 = 256;
b.iter(|| {
let sum = recurs_stack_seq(empty(), 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_seq_512(b: &mut test::Bencher) {
const N: u32 = 512;
b.iter(|| {
let sum = recurs_stack_seq(empty(), 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
// Recursive function, adding an element and cumulate the sums, until N-1 is reached.
fn recurs_stack_array(a: &mut [u32], cnt: u32, n: u32) -> u32 {
if cnt < n {
a[cnt as usize] = cnt;
let sum = a[..(cnt + 1) as usize].into_iter().fold(0u32, ops::Add::add);
sum + recurs_stack_array(a, cnt + 1, n)
} else {
0
}
}
#[bench]
fn bench_array_008(b: &mut test::Bencher) {
const N: u32 = 8;
b.iter(|| {
let mut a: [u32; N as usize] = [0; N as usize];
let sum = recurs_stack_array(&mut a, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_array_016(b: &mut test::Bencher) {
const N: u32 = 16;
b.iter(|| {
let mut a: [u32; N as usize] = [0; N as usize];
let sum = recurs_stack_array(&mut a, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_array_032(b: &mut test::Bencher) {
const N: u32 = 32;
b.iter(|| {
let mut a: [u32; N as usize] = [0; N as usize];
let sum = recurs_stack_array(&mut a, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_array_064(b: &mut test::Bencher) {
const N: u32 = 64;
b.iter(|| {
let mut a: [u32; N as usize] = [0; N as usize];
let sum = recurs_stack_array(&mut a, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_array_128(b: &mut test::Bencher) {
const N: u32 = 128;
b.iter(|| {
let mut a: [u32; N as usize] = [0; N as usize];
let sum = recurs_stack_array(&mut a, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_array_256(b: &mut test::Bencher) {
const N: u32 = 256;
b.iter(|| {
let mut a: [u32; N as usize] = [0; N as usize];
let sum = recurs_stack_array(&mut a, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_array_512(b: &mut test::Bencher) {
const N: u32 = 512;
b.iter(|| {
let mut a: [u32; N as usize] = [0; N as usize];
let sum = recurs_stack_array(&mut a, 0, N);
assert_eq!(sum, sum_of_sums(N - 1));
sum
});
}
#[bench]
fn bench_uninit_008(b: &mut test::Bencher) {
const N: u32 = 8;
b.iter(|| unsafe {
let mut a: [u32; N as usize] = ::std::mem::uninitialized();
let sum = recurs_stack_array(&mut a, 0, N);
assert_eq!(sum, sum_of_sums(N-1));
sum
});
}
#[bench]
fn bench_uninit_016(b: &mut test::Bencher) {
const N: u32 = 16;
b.iter(|| unsafe {
let mut a: [u32; N as usize] = ::std::mem::uninitialized();
let sum = recurs_stack_array(&mut a, 0, N);
assert_eq!(sum, sum_of_sums(N-1));
sum
});
}
#[bench]
fn bench_uninit_032(b: &mut test::Bencher) {
const N: u32 = 32;
b.iter(|| unsafe {
let mut a: [u32; N as usize] = ::std::mem::uninitialized();
let sum = recurs_stack_array(&mut a, 0, N);
assert_eq!(sum, sum_of_sums(N-1));
sum
});
}
#[bench]
fn bench_uninit_064(b: &mut test::Bencher) {
const N: u32 = 64;
b.iter(|| unsafe {
let mut a: [u32; N as usize] = ::std::mem::uninitialized();
let sum = recurs_stack_array(&mut a, 0, N);
assert_eq!(sum, sum_of_sums(N-1));
sum
});
}
#[bench]
fn bench_uninit_128(b: &mut test::Bencher) {
const N: u32 = 128;
b.iter(|| unsafe {
let mut a: [u32; N as usize] = ::std::mem::uninitialized();
let sum = recurs_stack_array(&mut a, 0, N);
assert_eq!(sum, sum_of_sums(N-1));
sum
});
}
#[bench]
fn bench_uninit_512(b: &mut test::Bencher) {
const N: u32 = 512;
b.iter(|| unsafe {
let mut a: [u32; N as usize] = ::std::mem::uninitialized();
let sum = recurs_stack_array(&mut a, 0, N);
assert_eq!(sum, sum_of_sums(N-1));
sum
});
}
}<|fim▁end|> | //! |
<|file_name|>vera.py<|end_file_name|><|fim▁begin|>"""
Support for Vera sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.vera/
"""
import logging
from homeassistant.const import (
TEMP_CELSIUS, TEMP_FAHRENHEIT)
from homeassistant.helpers.entity import Entity
from homeassistant.components.sensor import ENTITY_ID_FORMAT
from homeassistant.components.vera import (
VERA_CONTROLLER, VERA_DEVICES, VeraDevice)
DEPENDENCIES = ['vera']
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Perform the setup for Vera controller devices."""
add_devices(
VeraSensor(device, VERA_CONTROLLER)
for device in VERA_DEVICES['sensor'])
class VeraSensor(VeraDevice, Entity):
"""Representation of a Vera Sensor."""
def __init__(self, vera_device, controller):
"""Initialize the sensor."""
self.current_value = None
self._temperature_units = None
VeraDevice.__init__(self, vera_device, controller)
self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id)
@property
def state(self):
"""Return the name of the sensor."""
return self.current_value
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
if self.vera_device.category == "Temperature Sensor":
return self._temperature_units<|fim▁hole|> elif self.vera_device.category == "Light Sensor":
return 'lux'
elif self.vera_device.category == "Humidity Sensor":
return '%'
def update(self):
"""Update the state."""
if self.vera_device.category == "Temperature Sensor":
self.current_value = self.vera_device.temperature
vera_temp_units = (
self.vera_device.vera_controller.temperature_units)
if vera_temp_units == 'F':
self._temperature_units = TEMP_FAHRENHEIT
else:
self._temperature_units = TEMP_CELSIUS
elif self.vera_device.category == "Light Sensor":
self.current_value = self.vera_device.light
elif self.vera_device.category == "Humidity Sensor":
self.current_value = self.vera_device.humidity
elif self.vera_device.category == "Sensor":
tripped = self.vera_device.is_tripped
self.current_value = 'Tripped' if tripped else 'Not Tripped'
else:
self.current_value = 'Unknown'<|fim▁end|> | |
<|file_name|>environments.js<|end_file_name|><|fim▁begin|>// Here is where you can define configuration overrides based on the execution environment.
// Supply a key to the default export matching the NODE_ENV that you wish to target, and
// the base configuration will apply your overrides before exporting itself.
export default {
// ======================================================
// Overrides when NODE_ENV === 'development'<|fim▁hole|> // NOTE: In development, we use an explicit public path when the assets
// are served webpack by to fix this issue:
// http://stackoverflow.com/questions/34133808/webpack-ots-parsing-error-loading-fonts/34133809#34133809
development: (config) => ({
compiler_public_path: `http://${config.server_host}:${config.server_port}/`,
proxy: {
enabled: false,
options: {
host: 'http://localhost:8000',
match: /^\/api\/.*/
}
}
}),
// ======================================================
// Overrides when NODE_ENV === 'production'
// ======================================================
production: (config) => ({
compiler_public_path: '/',
compiler_fail_on_warning: false,
compiler_hash_type: 'chunkhash',
compiler_devtool: null,
compiler_stats: {
chunks: true,
chunkModules: true,
colors: true
}
})
};<|fim▁end|> | // ====================================================== |
<|file_name|>mne_browse_raw.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
r"""Browse raw data.
This uses :func:`mne.io.read_raw` so it supports the same formats
(without keyword arguments).
Examples
--------
.. code-block:: console
$ mne browse_raw sample_audvis_raw.fif \
--proj sample_audvis_ecg-proj.fif \
--eve sample_audvis_raw-eve.fif
"""
# Authors : Eric Larson, PhD
import sys
import mne
def run():
"""Run command."""
import matplotlib.pyplot as plt
from mne.commands.utils import get_optparser, _add_verbose_flag
from mne.viz import _RAW_CLIP_DEF
parser = get_optparser(__file__, usage='usage: %prog raw [options]')
parser.add_option("--raw", dest="raw_in",
help="Input raw FIF file (can also be specified "
"directly as an argument without the --raw prefix)",
metavar="FILE")
parser.add_option("--proj", dest="proj_in",
help="Projector file", metavar="FILE",
default='')
parser.add_option("--eve", dest="eve_in",
help="Events file", metavar="FILE",
default='')
parser.add_option("-d", "--duration", dest="duration", type="float",
help="Time window for plotting (sec)",
default=10.0)
parser.add_option("-t", "--start", dest="start", type="float",
help="Initial start time for plotting",
default=0.0)
parser.add_option("-n", "--n_channels", dest="n_channels", type="int",
help="Number of channels to plot at a time",
default=20)
parser.add_option("-o", "--order", dest="group_by",
help="Order to use for grouping during plotting "
"('type' or 'original')", default='type')
parser.add_option("-p", "--preload", dest="preload",
help="Preload raw data (for faster navigaton)",
default=False, action="store_true")
parser.add_option("-s", "--show_options", dest="show_options",
help="Show projection options dialog",
default=False)
parser.add_option("--allowmaxshield", dest="maxshield",
help="Allow loading MaxShield processed data",
action="store_true")
parser.add_option("--highpass", dest="highpass", type="float",
help="Display high-pass filter corner frequency",
default=-1)
parser.add_option("--lowpass", dest="lowpass", type="float",
help="Display low-pass filter corner frequency",
default=-1)
parser.add_option("--filtorder", dest="filtorder", type="int",
help="Display filtering IIR order (or 0 to use FIR)",
default=4)
parser.add_option("--clipping", dest="clipping",
help="Enable trace clipping mode, either 'clamp' or "
"'transparent'", default=_RAW_CLIP_DEF)
parser.add_option("--filterchpi", dest="filterchpi",
help="Enable filtering cHPI signals.", default=None,
action="store_true")
_add_verbose_flag(parser)
options, args = parser.parse_args()
if len(args):
raw_in = args[0]
else:
raw_in = options.raw_in
duration = options.duration
start = options.start
n_channels = options.n_channels
group_by = options.group_by
preload = options.preload
show_options = options.show_options
proj_in = options.proj_in
eve_in = options.eve_in
maxshield = options.maxshield
highpass = options.highpass
lowpass = options.lowpass
filtorder = options.filtorder
clipping = options.clipping
if isinstance(clipping, str):
if clipping.lower() == 'none':
clipping = None
else:
try:
clipping = float(clipping) # allow float and convert it
except ValueError:
pass
filterchpi = options.filterchpi
verbose = options.verbose
if raw_in is None:
parser.print_help()
sys.exit(1)
kwargs = dict(preload=preload)
if maxshield:
kwargs.update(allow_maxshield='yes')
raw = mne.io.read_raw(raw_in, **kwargs)
if len(proj_in) > 0:
projs = mne.read_proj(proj_in)
raw.info['projs'] = projs
if len(eve_in) > 0:
events = mne.read_events(eve_in)
else:
events = None
if filterchpi:
if not preload:
raise RuntimeError(
'Raw data must be preloaded for chpi, use --preload')
raw = mne.chpi.filter_chpi(raw)
highpass = None if highpass < 0 or filtorder < 0 else highpass
lowpass = None if lowpass < 0 or filtorder < 0 else lowpass
raw.plot(duration=duration, start=start, n_channels=n_channels,
group_by=group_by, show_options=show_options, events=events,
highpass=highpass, lowpass=lowpass, filtorder=filtorder,
clipping=clipping, verbose=verbose)<|fim▁hole|>mne.utils.run_command_if_main()<|fim▁end|> | plt.show(block=True)
|
<|file_name|>test_views_item_delete.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Delete record tests."""
from __future__ import absolute_import, print_function
from flask import url_for
from helpers import get_json, record_url
from invenio_pidstore.models import PersistentIdentifier
from mock import patch
from sqlalchemy.exc import SQLAlchemyError
def test_valid_delete(app, indexed_records):
"""Test VALID record delete request (DELETE .../records/<record_id>)."""
# Test with and without headers
for i, headers in enumerate([[], [('Accept', 'video/mp4')]]):
pid, record = indexed_records[i]
with app.test_client() as client:
res = client.delete(record_url(pid), headers=headers)
assert res.status_code == 204
res = client.get(record_url(pid))
assert res.status_code == 410
def test_delete_deleted(app, indexed_records):
"""Test deleting a perviously deleted record."""
pid, record = indexed_records[0]
with app.test_client() as client:
res = client.delete(record_url(pid))
assert res.status_code == 204<|fim▁hole|> res = client.delete(record_url(pid))
assert res.status_code == 410
data = get_json(res)
assert 'message' in data
assert data['status'] == 410
def test_delete_notfound(app, indexed_records):
"""Test INVALID record delete request (DELETE .../records/<record_id>)."""
with app.test_client() as client:
# Check that GET with non existing id will return 404
res = client.delete(url_for(
'invenio_records_rest.recid_item', pid_value=0))
assert res.status_code == 404
def test_delete_with_sqldatabase_error(app, indexed_records):
"""Test VALID record delete request (GET .../records/<record_id>)."""
pid, record = indexed_records[0]
with app.test_client() as client:
def raise_error():
raise SQLAlchemyError()
# Force an SQLAlchemy error that will rollback the transaction.
with patch.object(PersistentIdentifier, 'delete',
side_effect=raise_error):
res = client.delete(record_url(pid))
assert res.status_code == 500
with app.test_client() as client:
res = client.get(record_url(pid))
assert res.status_code == 200<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2005
# The President and Fellows of Harvard College.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright<|fim▁hole|># may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# Author: Geoffrey Mainland <[email protected]>
#
__all__ = ["message", "packet", "utils", "tossim"]<|fim▁end|> | # notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the University nor the names of its contributors |
<|file_name|>relay_benchmark_test.go<|end_file_name|><|fim▁begin|>package tchannel_test
import (
"fmt"
"sync"
"testing"
"time"
"github.com/bmizerany/perks/quantile"
"github.com/stretchr/testify/require"
. "github.com/uber/tchannel-go"
"github.com/uber/tchannel-go/benchmark"
"github.com/uber/tchannel-go/relay"
"github.com/uber/tchannel-go/testutils"
)
type benchmarkParams struct {
servers, clients int
requestSize int
appends []relay.KeyVal
}
type workerControl struct {
start sync.WaitGroup
unblockStart chan struct{}
done sync.WaitGroup
}
func init() {
benchmark.BenchmarkDir = "./benchmark/"
}
func newWorkerControl(numWorkers int) *workerControl {
wc := &workerControl{
unblockStart: make(chan struct{}),
}
wc.start.Add(numWorkers)
wc.done.Add(numWorkers)
return wc
}
func (c *workerControl) WaitForStart(f func()) {
c.start.Wait()
f()
close(c.unblockStart)
}
func (c *workerControl) WaitForEnd() {
c.done.Wait()
}
func (c *workerControl) WorkerStart() {
c.start.Done()
<-c.unblockStart
}
func (c *workerControl) WorkerDone() {
c.done.Done()
}
func defaultParams() benchmarkParams {<|fim▁hole|> return benchmarkParams{
servers: 2,
clients: 2,
requestSize: 1024,
}
}
func closeAndVerify(b *testing.B, ch *Channel) {
ch.Close()
isChanClosed := func() bool {
return ch.State() == ChannelClosed
}
if !testutils.WaitFor(time.Second, isChanClosed) {
b.Errorf("Timed out waiting for channel to close, state: %v", ch.State())
}
}
func benchmarkRelay(b *testing.B, p benchmarkParams) {
b.SetBytes(int64(p.requestSize))
b.ReportAllocs()
services := make(map[string][]string)
servers := make([]benchmark.Server, p.servers)
for i := range servers {
servers[i] = benchmark.NewServer(
benchmark.WithServiceName("svc"),
benchmark.WithRequestSize(p.requestSize),
benchmark.WithExternalProcess(),
)
defer servers[i].Close()
services["svc"] = append(services["svc]"], servers[i].HostPort())
}
relay, err := benchmark.NewRealRelay(services, p.appends)
require.NoError(b, err, "Failed to create relay")
defer relay.Close()
clients := make([]benchmark.Client, p.clients)
for i := range clients {
clients[i] = benchmark.NewClient([]string{relay.HostPort()},
benchmark.WithServiceName("svc"),
benchmark.WithRequestSize(p.requestSize),
benchmark.WithExternalProcess(),
benchmark.WithTimeout(10*time.Second),
)
defer clients[i].Close()
require.NoError(b, clients[i].Warmup(), "Warmup failed")
}
quantileVals := []float64{0.50, 0.95, 0.99, 1.0}
quantiles := make([]*quantile.Stream, p.clients)
for i := range quantiles {
quantiles[i] = quantile.NewTargeted(quantileVals...)
}
wc := newWorkerControl(p.clients)
dec := testutils.Decrementor(b.N)
var wg sync.WaitGroup
errC := make(chan error, 1)
defer close(errC)
for i, c := range clients {
wg.Add(1)
go func(i int, c benchmark.Client) {
defer wg.Done()
// Do a warm up call.
c.RawCall(1)
wc.WorkerStart()
defer wc.WorkerDone()
for {
tokens := dec.Multiple(200)
if tokens == 0 {
break
}
durations, err := c.RawCall(tokens)
if err != nil {
errC <- err
return
}
for _, d := range durations {
quantiles[i].Insert(float64(d))
}
}
}(i, c)
}
wg.Wait()
if err := <-errC; err != nil {
b.Fatalf("Call failed: %v", err)
}
var started time.Time
wc.WaitForStart(func() {
b.ResetTimer()
started = time.Now()
})
wc.WaitForEnd()
duration := time.Since(started)
fmt.Printf("\nb.N: %v Duration: %v RPS = %0.0f\n", b.N, duration, float64(b.N)/duration.Seconds())
// Merge all the quantiles into 1
for _, q := range quantiles[1:] {
quantiles[0].Merge(q.Samples())
}
for _, q := range quantileVals {
fmt.Printf(" %0.4f = %v\n", q, time.Duration(quantiles[0].Query(q)))
}
fmt.Println()
}
func BenchmarkRelayNoLatencies(b *testing.B) {
server := benchmark.NewServer(
benchmark.WithServiceName("svc"),
benchmark.WithExternalProcess(),
benchmark.WithNoLibrary(),
)
defer server.Close()
hostMapping := map[string][]string{"svc": {server.HostPort()}}
relay, err := benchmark.NewRealRelay(hostMapping, nil)
require.NoError(b, err, "NewRealRelay failed")
defer relay.Close()
client := benchmark.NewClient([]string{relay.HostPort()},
benchmark.WithServiceName("svc"),
benchmark.WithExternalProcess(),
benchmark.WithNoLibrary(),
benchmark.WithNumClients(10),
benchmark.WithNoChecking(),
benchmark.WithNoDurations(),
benchmark.WithTimeout(10*time.Second),
)
defer client.Close()
require.NoError(b, client.Warmup(), "client.Warmup failed")
b.ResetTimer()
started := time.Now()
for _, calls := range testutils.Batch(b.N, 10000) {
if _, err := client.RawCall(calls); err != nil {
b.Fatalf("Calls failed: %v", err)
}
}
duration := time.Since(started)
fmt.Printf("\nb.N: %v Duration: %v RPS = %0.0f\n", b.N, duration, float64(b.N)/duration.Seconds())
}
func BenchmarkRelay2Servers5Clients1k(b *testing.B) {
p := defaultParams()
p.clients = 5
p.servers = 2
benchmarkRelay(b, p)
}
func BenchmarkRelay4Servers20Clients1k(b *testing.B) {
p := defaultParams()
p.clients = 20
p.servers = 4
benchmarkRelay(b, p)
}
func BenchmarkRelay2Servers5Clients4k(b *testing.B) {
p := defaultParams()
p.requestSize = 4 * 1024
p.clients = 5
p.servers = 2
benchmarkRelay(b, p)
}
func BenchmarkRelayAppends(b *testing.B) {
for _, n := range []int{0, 1, 2, 5, 10} {
b.Run(fmt.Sprintf("%v appends", n), func(b *testing.B) {
p := defaultParams()
for i := 0; i < n; i++ {
p.appends = append(p.appends, relay.KeyVal{Key: []byte("foo"), Val: []byte("bar")})
}
b.ResetTimer()
benchmarkRelay(b, p)
})
}
}<|fim▁end|> | |
<|file_name|>settings_default.py<|end_file_name|><|fim▁begin|>'''
Created on Jun 20, 2016
@author: ionut
'''
import logging
logging.basicConfig(level=logging.DEBUG,
format='[%(asctime)s] - %(name)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("spotify.search").setLevel(logging.WARNING)
logging.getLogger("spotify.session").setLevel(logging.WARNING)
#Database connection - Postgres / Amazon RDS
DSN = "dbname='spotlipy' user='postgres' host='127.0.0.1' password='password'"
#dogstarradio search URL
SEARCH_URL = 'http://www.dogstarradio.com/search_playlist.php'
JUNK_INDICATORS = ['@', '#', '.com', 'Hip Hop Nation', 'SiriusXM']
#for stations numbers and names see stations.txt
STATIONS = [<|fim▁hole|>#if MONTH or DATE are None we will use yesterday for searching
MONTH = None
DATE = None
#Spotify settings
SPOTIFY = {
'username': 'username',
'client_id': 'client_id',
'client_secret': 'client_secret',
'redirect_url': 'redirect_url',
'api_scopes': 'playlist-read-private playlist-modify-public playlist-modify-private'
}<|fim▁end|> | 34, 44
]
|
<|file_name|>test.py<|end_file_name|><|fim▁begin|>###
# Copyright (c) 2005, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"<|fim▁hole|># IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.test import *
class MyChannelLoggerTestCase(PluginTestCase):
plugins = ('MyChannelLogger',)
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:<|fim▁end|> | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
try:
from ._models_py3 import ComplianceStatus
from ._models_py3 import ErrorDefinition
from ._models_py3 import ErrorResponse
from ._models_py3 import HelmOperatorProperties
from ._models_py3 import ProxyResource
from ._models_py3 import Resource
from ._models_py3 import ResourceProviderOperation
from ._models_py3 import ResourceProviderOperationDisplay
from ._models_py3 import ResourceProviderOperationList
from ._models_py3 import Result
from ._models_py3 import SourceControlConfiguration
from ._models_py3 import SourceControlConfigurationList
from ._models_py3 import SystemData
except (SyntaxError, ImportError):
from ._models import ComplianceStatus # type: ignore
from ._models import ErrorDefinition # type: ignore
from ._models import ErrorResponse # type: ignore
from ._models import HelmOperatorProperties # type: ignore
from ._models import ProxyResource # type: ignore
from ._models import Resource # type: ignore
from ._models import ResourceProviderOperation # type: ignore
from ._models import ResourceProviderOperationDisplay # type: ignore
from ._models import ResourceProviderOperationList # type: ignore
from ._models import Result # type: ignore
from ._models import SourceControlConfiguration # type: ignore
from ._models import SourceControlConfigurationList # type: ignore
from ._models import SystemData # type: ignore
from ._source_control_configuration_client_enums import (
ComplianceStateType,
CreatedByType,
Enum0,
Enum1,
MessageLevelType,
OperatorScopeType,
OperatorType,
ProvisioningStateType,
)
__all__ = [
'ComplianceStatus',
'ErrorDefinition',<|fim▁hole|> 'ResourceProviderOperation',
'ResourceProviderOperationDisplay',
'ResourceProviderOperationList',
'Result',
'SourceControlConfiguration',
'SourceControlConfigurationList',
'SystemData',
'ComplianceStateType',
'CreatedByType',
'Enum0',
'Enum1',
'MessageLevelType',
'OperatorScopeType',
'OperatorType',
'ProvisioningStateType',
]<|fim▁end|> | 'ErrorResponse',
'HelmOperatorProperties',
'ProxyResource',
'Resource', |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># Copyright 2015 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import operator
import functools
import datetime
import pytz
import re
import gevent
import gevent.lock
import gevent.event
EPOCH = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=pytz.UTC)
def utc_millisec():
return int(time.time()*1000)
def dt_to_millisec(dt):
if dt.tzinfo == None:
dt = dt.replace(tzinfo=pytz.UTC)
delta = dt - EPOCH
return int(delta.total_seconds()*1000)
def interval_in_sec(val):
if isinstance(val, int):
return val
multipliers = {
'': 1,
'm': 60,
'h': 3600,
'd': 86400
}
mo = re.match("([0-9]+)([dmh]?)", val)
if mo is None:
return None
return int(mo.group(1))*multipliers[mo.group(2)]
def age_out_in_millisec(val):
multipliers = {
'': 1000,
'm': 60000,
'h': 3600000,
'd': 86400000
}
mo = re.match("([0-9]+)([dmh]?)", val)
if mo is None:
return None
return int(mo.group(1))*multipliers[mo.group(2)]
def _merge_atomic_values(op, v1, v2):
if op(v1, v2):
return v2
return v1
def _merge_array(v1, v2):
for e in v2:
if e not in v1:
v1.append(e)
return v1
RESERVED_ATTRIBUTES = {
'sources': _merge_array,
'first_seen': functools.partial(_merge_atomic_values, operator.gt),
'last_seen': functools.partial(_merge_atomic_values, operator.lt),
'type': functools.partial(_merge_atomic_values, operator.eq),
'direction': functools.partial(_merge_atomic_values, operator.eq),
'confidence': functools.partial(_merge_atomic_values, operator.lt),
'country': functools.partial(_merge_atomic_values, operator.eq),
'AS': functools.partial(_merge_atomic_values, operator.eq)
}
class RWLock(object):
def __init__(self):
self.num_readers = 0
self.num_writers = 0
self.m1 = gevent.lock.Semaphore(1)
self.m2 = gevent.lock.Semaphore(1)
self.m3 = gevent.lock.Semaphore(1)
self.w = gevent.lock.Semaphore(1)
self.r = gevent.lock.Semaphore(1)
def lock(self):
self.m2.acquire()
self.num_writers += 1
if self.num_writers == 1:
self.r.acquire()
self.m2.release()
self.w.acquire()
def unlock(self):
self.w.release()
self.m2.acquire()
self.num_writers -= 1
if self.num_writers == 0:
self.r.release()
self.m2.release()
def rlock(self):
self.m3.acquire()
self.r.acquire()
self.m1.acquire()
self.num_readers += 1
if self.num_readers == 1:
self.w.acquire()
self.m1.release()
self.r.release()
self.m3.release()
def runlock(self):
self.m1.acquire()
self.num_readers -= 1
if self.num_readers == 0:
self.w.release()
self.m1.release()
def __enter__(self):
self.rlock()
def __exit__(self, type, value, traceback):
self.runlock()
_AGE_OUT_BASES = ['last_seen', 'first_seen']
def parse_age_out(s, age_out_bases=None, default_base=None):
if s is None:
return None
if age_out_bases is None:
age_out_bases = _AGE_OUT_BASES
if default_base is None:
default_base = 'first_seen'
if default_base not in age_out_bases:
raise ValueError('%s not in %s' % (default_base, age_out_bases))
result = {}
toks = s.split('+', 1)
if len(toks) == 1:
t = toks[0].strip()
if t in age_out_bases:
result['base'] = t
result['offset'] = 0
else:
result['base'] = default_base
result['offset'] = age_out_in_millisec(t)
if result['offset'] is None:
raise ValueError('Invalid age out offset %s' % t)
else:
base = toks[0].strip()
if base not in age_out_bases:
raise ValueError('Invalid age out base %s' % base)
result['base'] = base
result['offset'] = age_out_in_millisec(toks[1].strip())
if result['offset'] is None:
raise ValueError('Invalid age out offset %s' % t)
return result
class GThrottled(object):
def __init__(self, f, wait):
self._timeout = None
self._previous = 0
self._cancelled = False
self._args = []
self._kwargs = {}
<|fim▁hole|> self.wait = wait
def later(self):
self._previous = utc_millisec()
self._timeout = None
self.f(*self._args, **self._kwargs)
def __call__(self, *args, **kwargs):
now = utc_millisec()
remaining = self.wait - (now - self._previous)
if self._cancelled:
return
if remaining <= 0 or remaining > self.wait:
if self._timeout is not None:
self._timeout.join(timeout=5)
self._timeout = None
self._previous = now
self.f(*args, **kwargs)
elif self._timeout is None:
self._args = args
self._kwargs = kwargs
self._timeout = gevent.spawn_later(remaining/1000.0, self.later)
else:
self._args = args
self._kwargs = kwargs
def cancel(self):
self._cancelled = True
if self._timeout:
self._timeout.join(timeout=5)
if self._timeout is not None:
self._timeout.kill()
self._previous = 0
self._timeout = None
self._args = []
self._kwargs = {}<|fim▁end|> | self.f = f |
<|file_name|>uploader_test.go<|end_file_name|><|fim▁begin|>package s3util
import (
"io"
"io/ioutil"
"net/http"
"runtime"
"strings"
"testing"
)
func runUpload(t *testing.T, makeCloser func(io.Reader) io.ReadCloser) *uploader {
c := *DefaultConfig
c.Client = &http.Client{
Transport: RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
var s string
switch q := req.URL.Query(); {
case req.Method == "PUT":
case req.Method == "POST" && q["uploads"] != nil:
s = `<UploadId>foo</UploadId>`
case req.Method == "POST" && q["uploadId"] != nil:
default:
t.Fatal("unexpected request", req)
}
resp := &http.Response{
StatusCode: 200,
Body: makeCloser(strings.NewReader(s)),
Header: http.Header{
"Etag": {`"foo"`},
},
}
return resp, nil
}),
}
u, err := newUploader("https://s3.amazonaws.com/foo/bar", nil, &c)
if err != nil {
t.Fatal("unexpected err", err)
}
const size = minPartSize + minPartSize/3
n, err := io.Copy(u, io.LimitReader(devZero, size))
if err != nil {
t.Fatal("unexpected err", err)
}
if n != size {
t.Fatal("wrote %d bytes want %d", n, size)
}
err = u.Close()
if err != nil {
t.Fatal("unexpected err", err)
}
return u
}
func TestUploaderCloseRespBody(t *testing.T) {
want := make(chan int, 100)
got := make(closeCounter, 100)
f := func(r io.Reader) io.ReadCloser {
want <- 1
return readClose{r, got}
}
runUpload(t, f)
if len(want) != len(got) {
t.Errorf("closes = %d want %d", len(got), len(want))
}
}
// Used in TestUploaderFreesBuffers to force liveness.
var DummyUploader *uploader
func TestUploaderFreesBuffers(t *testing.T) {
var m0, m1 runtime.MemStats
runtime.GC()
runtime.ReadMemStats(&m0)
u := runUpload(t, ioutil.NopCloser)
runtime.GC()
runtime.ReadMemStats(&m1)
// Make sure everything reachable from u is still live while reading m1.
// (Very aggressive cross-package optimization could hypothetically
// break this, rendering the test ineffective.)
DummyUploader = u
// The uploader never allocates buffers smaller than minPartSize,
// so if the increase is < minPartSize we know none are reachable.
inc := m1.Alloc - m0.Alloc
if m1.Alloc > m0.Alloc && inc >= minPartSize {
t.Errorf("inc = %d want <%d", inc, minPartSize)
}
}
type RoundTripperFunc func(*http.Request) (*http.Response, error)
func (f RoundTripperFunc) RoundTrip(req *http.Request) (*http.Response, error) {
return f(req)
}
type closeCounter chan int
func (c closeCounter) Close() error {
c <- 1
return nil
}
type readClose struct {
io.Reader
io.Closer
}<|fim▁hole|>var devZero io.Reader = repeatReader(0)
type repeatReader byte
func (r repeatReader) Read(p []byte) (int, error) {
for i := range p {
p[i] = byte(r)
}
return len(p), nil
}<|fim▁end|> | |
<|file_name|>resource_aws_elasticsearch_domain_policy.go<|end_file_name|><|fim▁begin|>package aws
import (
"fmt"
"log"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
elasticsearch "github.com/aws/aws-sdk-go/service/elasticsearchservice"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/helper/schema"
)
func resourceAwsElasticSearchDomainPolicy() *schema.Resource {
return &schema.Resource{
Create: resourceAwsElasticSearchDomainPolicyUpsert,
Read: resourceAwsElasticSearchDomainPolicyRead,
Update: resourceAwsElasticSearchDomainPolicyUpsert,
Delete: resourceAwsElasticSearchDomainPolicyDelete,
Schema: map[string]*schema.Schema{
"domain_name": {
Type: schema.TypeString,
Required: true,
},
"access_policies": {
Type: schema.TypeString,
Required: true,
DiffSuppressFunc: suppressEquivalentAwsPolicyDiffs,
},
},
}
}
func resourceAwsElasticSearchDomainPolicyRead(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*AWSClient).esconn
name := d.Get("domain_name").(string)
out, err := conn.DescribeElasticsearchDomain(&elasticsearch.DescribeElasticsearchDomainInput{
DomainName: aws.String(name),
})
if err != nil {
if awsErr, ok := err.(awserr.Error); ok && awsErr.Code() == "ResourceNotFoundException" {
log.Printf("[WARN] ElasticSearch Domain %q not found, removing", name)
d.SetId("")
return nil
}
return err
}<|fim▁hole|>
ds := out.DomainStatus
d.Set("access_policies", ds.AccessPolicies)
return nil
}
func resourceAwsElasticSearchDomainPolicyUpsert(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*AWSClient).esconn
domainName := d.Get("domain_name").(string)
_, err := conn.UpdateElasticsearchDomainConfig(&elasticsearch.UpdateElasticsearchDomainConfigInput{
DomainName: aws.String(domainName),
AccessPolicies: aws.String(d.Get("access_policies").(string)),
})
if err != nil {
return err
}
d.SetId("esd-policy-" + domainName)
err = resource.Retry(50*time.Minute, func() *resource.RetryError {
out, err := conn.DescribeElasticsearchDomain(&elasticsearch.DescribeElasticsearchDomainInput{
DomainName: aws.String(d.Get("domain_name").(string)),
})
if err != nil {
return resource.NonRetryableError(err)
}
if *out.DomainStatus.Processing == false {
return nil
}
return resource.RetryableError(
fmt.Errorf("%q: Timeout while waiting for changes to be processed", d.Id()))
})
if err != nil {
return err
}
return resourceAwsElasticSearchDomainPolicyRead(d, meta)
}
func resourceAwsElasticSearchDomainPolicyDelete(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*AWSClient).esconn
_, err := conn.UpdateElasticsearchDomainConfig(&elasticsearch.UpdateElasticsearchDomainConfigInput{
DomainName: aws.String(d.Get("domain_name").(string)),
AccessPolicies: aws.String(""),
})
if err != nil {
return err
}
log.Printf("[DEBUG] Waiting for ElasticSearch domain policy %q to be deleted", d.Get("domain_name").(string))
err = resource.Retry(60*time.Minute, func() *resource.RetryError {
out, err := conn.DescribeElasticsearchDomain(&elasticsearch.DescribeElasticsearchDomainInput{
DomainName: aws.String(d.Get("domain_name").(string)),
})
if err != nil {
return resource.NonRetryableError(err)
}
if *out.DomainStatus.Processing == false {
return nil
}
return resource.RetryableError(
fmt.Errorf("%q: Timeout while waiting for policy to be deleted", d.Id()))
})
if err != nil {
return err
}
d.SetId("")
return nil
}<|fim▁end|> |
log.Printf("[DEBUG] Received ElasticSearch domain: %s", out) |
<|file_name|>prism-puppet.js<|end_file_name|><|fim▁begin|>(function (Prism) {
Prism.languages.puppet = {
'heredoc': [
// Matches the content of a quoted heredoc string (subject to interpolation)
{
pattern: /(@\("([^"\r\n\/):]+)"(?:\/[nrts$uL]*)?\).*(?:\r?\n|\r))(?:.*(?:\r?\n|\r))*?[ \t]*\|?[ \t]*-?[ \t]*\2/,
lookbehind: true,
alias: 'string',
inside: {
// Matches the end tag
'punctuation': /(?=\S).*\S(?= *$)/
// See interpolation below<|fim▁hole|> },
// Matches the content of an unquoted heredoc string (no interpolation)
{
pattern: /(@\(([^"\r\n\/):]+)(?:\/[nrts$uL]*)?\).*(?:\r?\n|\r))(?:.*(?:\r?\n|\r))*?[ \t]*\|?[ \t]*-?[ \t]*\2/,
lookbehind: true,
alias: 'string',
inside: {
// Matches the end tag
'punctuation': /(?=\S).*\S(?= *$)/
}
},
// Matches the start tag of heredoc strings
{
pattern: /@\("?(?:[^"\r\n\/):]+)"?(?:\/[nrts$uL]*)?\)/,
alias: 'string',
inside: {
'punctuation': {
pattern: /(\().+?(?=\))/,
lookbehind: true
}
}
}
],
'multiline-comment': {
pattern: /(^|[^\\])\/\*[\s\S]*?\*\//,
lookbehind: true,
alias: 'comment'
},
'regex': {
// Must be prefixed with the keyword "node" or a non-word char
pattern: /((?:\bnode\s+|[~=\(\[\{,]\s*|[=+]>\s*|^\s*))\/(?:[^\/\\]|\\[\s\S])+\/(?:[imx]+\b|\B)/,
lookbehind: true,
inside: {
// Extended regexes must have the x flag. They can contain single-line comments.
'extended-regex': {
pattern: /^\/(?:[^\/\\]|\\[\s\S])+\/[im]*x[im]*$/,
inside: {
'comment': /#.*/
}
}
}
},
'comment': {
pattern: /(^|[^\\])#.*/,
lookbehind: true
},
'string': {
// Allow for one nested level of double quotes inside interpolation
pattern: /(["'])(?:\$\{(?:[^'"}]|(["'])(?:(?!\2)[^\\]|\\[\s\S])*\2)+\}|(?!\1)[^\\]|\\[\s\S])*\1/,
inside: {
'double-quoted': {
pattern: /^"[\s\S]*"$/,
inside: {
// See interpolation below
}
}
}
},
'variable': {
pattern: /\$(?:::)?\w+(?:::\w+)*/,
inside: {
'punctuation': /::/
}
},
'attr-name': /(?:\w+|\*)(?=\s*=>)/,
'function': [
{
pattern: /(\.)(?!\d)\w+/,
lookbehind: true
},
/\b(?:contain|debug|err|fail|include|info|notice|realize|require|tag|warning)\b|\b(?!\d)\w+(?=\()/
],
'number': /\b(?:0x[a-f\d]+|\d+(?:\.\d+)?(?:e-?\d+)?)\b/i,
'boolean': /\b(?:true|false)\b/,
// Includes words reserved for future use
'keyword': /\b(?:application|attr|case|class|consumes|default|define|else|elsif|function|if|import|inherits|node|private|produces|type|undef|unless)\b/,
'datatype': {
pattern: /\b(?:Any|Array|Boolean|Callable|Catalogentry|Class|Collection|Data|Default|Enum|Float|Hash|Integer|NotUndef|Numeric|Optional|Pattern|Regexp|Resource|Runtime|Scalar|String|Struct|Tuple|Type|Undef|Variant)\b/,
alias: 'symbol'
},
'operator': /=[=~>]?|![=~]?|<(?:<\|?|[=~|-])?|>[>=]?|->?|~>|\|>?>?|[*\/%+?]|\b(?:and|in|or)\b/,
'punctuation': /[\[\]{}().,;]|:+/
};
var interpolation = [
{
// Allow for one nested level of braces inside interpolation
pattern: /(^|[^\\])\$\{(?:[^'"{}]|\{[^}]*\}|(["'])(?:(?!\2)[^\\]|\\[\s\S])*\2)+\}/,
lookbehind: true,
inside: {
'short-variable': {
// Negative look-ahead prevent wrong highlighting of functions
pattern: /(^\$\{)(?!\w+\()(?:::)?\w+(?:::\w+)*/,
lookbehind: true,
alias: 'variable',
inside: {
'punctuation': /::/
}
},
'delimiter': {
pattern: /^\$/,
alias: 'variable'
},
rest: Prism.languages.puppet
}
},
{
pattern: /(^|[^\\])\$(?:::)?\w+(?:::\w+)*/,
lookbehind: true,
alias: 'variable',
inside: {
'punctuation': /::/
}
}
];
Prism.languages.puppet['heredoc'][0].inside.interpolation = interpolation;
Prism.languages.puppet['string'].inside['double-quoted'].inside.interpolation = interpolation;
}(Prism));<|fim▁end|> | } |
<|file_name|>FineFilter.java<|end_file_name|><|fim▁begin|>package com.kitsu.medievalcraft.item.craftingtools.filters;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;<|fim▁hole|>
import cpw.mods.fml.common.registry.GameRegistry;
public class FineFilter extends Item {
private String name = "fineFilter";
private Item item;
public FineFilter() {
setMaxStackSize(1);
setUnlocalizedName(name);
setCreativeTab(CustomTab.MedievalCraftTab);
setTextureName(Main.MODID + ":" + name);
setMaxDamage(300);
setNoRepair();
item = this;
GameRegistry.registerItem(this, name);
}
public boolean getIsRepairable(ItemStack p_82789_1_, ItemStack p_82789_2_)
{
return false;
}
}<|fim▁end|> |
import com.kitsu.medievalcraft.Main;
import com.kitsu.medievalcraft.util.CustomTab; |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::fs::File;
use std::io::{BufRead, BufReader};
fn is_ordered(s: &str) -> bool {
let mut prev = '\x00';
for c in s.chars() {
if c < prev {
return false;
}
prev = c;
}<|fim▁hole|>
true
}
fn find_longest_ordered_words(dict: Vec<String>) -> Vec<String> {
let mut result = Vec::new();
let mut longest_length = 0;
for s in dict {
if is_ordered(&s) {
let n = s.len();
if n > longest_length {
longest_length = n;
result.truncate(0);
}
if n == longest_length {
result.push(s.to_owned());
}
}
}
result
}
fn main() {
let lines = BufReader::new(File::open("unixdict.txt").unwrap())
.lines()
.map(|l| l.unwrap())
.collect();
let longest_ordered = find_longest_ordered_words(lines);
for s in &longest_ordered {
println!("{}", s.to_string());
}
}<|fim▁end|> | |
<|file_name|>lineNumbers.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import 'vs/css!./lineNumbers';
import * as platform from 'vs/base/common/platform';
import { DynamicViewOverlay } from 'vs/editor/browser/view/dynamicViewOverlay';
import { RenderLineNumbersType } from 'vs/editor/common/config/editorOptions';
import { Position } from 'vs/editor/common/core/position';
import { editorActiveLineNumber, editorLineNumbers } from 'vs/editor/common/view/editorColorRegistry';
import { RenderingContext } from 'vs/editor/common/view/renderingContext';
import { ViewContext } from 'vs/editor/common/view/viewContext';
import * as viewEvents from 'vs/editor/common/view/viewEvents';
import { registerThemingParticipant } from 'vs/platform/theme/common/themeService';
export class LineNumbersOverlay extends DynamicViewOverlay {
public static readonly CLASS_NAME = 'line-numbers';
private _context: ViewContext;
private _lineHeight: number;
private _renderLineNumbers: RenderLineNumbersType;
private _renderCustomLineNumbers: ((lineNumber: number) => string) | null;
private _lineNumbersLeft: number;
private _lineNumbersWidth: number;
private _lastCursorModelPosition: Position;
private _renderResult: string[] | null;
constructor(context: ViewContext) {
super();
this._context = context;
this._readConfig();
this._lastCursorModelPosition = new Position(1, 1);
this._renderResult = null;
this._context.addEventHandler(this);
}
private _readConfig(): void {
const config = this._context.configuration.editor;
this._lineHeight = config.lineHeight;
this._renderLineNumbers = config.viewInfo.renderLineNumbers;
this._renderCustomLineNumbers = config.viewInfo.renderCustomLineNumbers;
this._lineNumbersLeft = config.layoutInfo.lineNumbersLeft;
this._lineNumbersWidth = config.layoutInfo.lineNumbersWidth;
}
public dispose(): void {
this._context.removeEventHandler(this);
this._renderResult = null;
super.dispose();
}
// --- begin event handlers
public onConfigurationChanged(e: viewEvents.ViewConfigurationChangedEvent): boolean {
this._readConfig();
return true;
}
public onCursorStateChanged(e: viewEvents.ViewCursorStateChangedEvent): boolean {
const primaryViewPosition = e.selections[0].getPosition();
this._lastCursorModelPosition = this._context.model.coordinatesConverter.convertViewPositionToModelPosition(primaryViewPosition);
if (this._renderLineNumbers === RenderLineNumbersType.Relative || this._renderLineNumbers === RenderLineNumbersType.Interval) {
return true;
}
return false;
}
public onFlushed(e: viewEvents.ViewFlushedEvent): boolean {
return true;
}
public onLinesChanged(e: viewEvents.ViewLinesChangedEvent): boolean {
return true;
}
public onLinesDeleted(e: viewEvents.ViewLinesDeletedEvent): boolean {
return true;
}
public onLinesInserted(e: viewEvents.ViewLinesInsertedEvent): boolean {
return true;
}
public onScrollChanged(e: viewEvents.ViewScrollChangedEvent): boolean {
return e.scrollTopChanged;
}
public onZonesChanged(e: viewEvents.ViewZonesChangedEvent): boolean {
return true;
}
// --- end event handlers
private _getLineRenderLineNumber(viewLineNumber: number): string {
const modelPosition = this._context.model.coordinatesConverter.convertViewPositionToModelPosition(new Position(viewLineNumber, 1));
if (modelPosition.column !== 1) {
return '';
}
let modelLineNumber = modelPosition.lineNumber;
if (this._renderCustomLineNumbers) {
return this._renderCustomLineNumbers(modelLineNumber);
}
if (this._renderLineNumbers === RenderLineNumbersType.Relative) {
let diff = Math.abs(this._lastCursorModelPosition.lineNumber - modelLineNumber);
if (diff === 0) {
return '<span class="relative-current-line-number">' + modelLineNumber + '</span>';
}
return String(diff);
}
if (this._renderLineNumbers === RenderLineNumbersType.Interval) {
if (this._lastCursorModelPosition.lineNumber === modelLineNumber) {
return String(modelLineNumber);
}
if (modelLineNumber % 10 === 0) {
return String(modelLineNumber);
}
return '';
}
return String(modelLineNumber);
}
public prepareRender(ctx: RenderingContext): void {
if (this._renderLineNumbers === RenderLineNumbersType.Off) {
this._renderResult = null;
return;
}
let lineHeightClassName = (platform.isLinux ? (this._lineHeight % 2 === 0 ? ' lh-even' : ' lh-odd') : '');
let visibleStartLineNumber = ctx.visibleRange.startLineNumber;
let visibleEndLineNumber = ctx.visibleRange.endLineNumber;
let common = '<div class="' + LineNumbersOverlay.CLASS_NAME + lineHeightClassName + '" style="left:' + this._lineNumbersLeft.toString() + 'px;width:' + this._lineNumbersWidth.toString() + 'px;">';<|fim▁hole|> let lineIndex = lineNumber - visibleStartLineNumber;
let renderLineNumber = this._getLineRenderLineNumber(lineNumber);
if (renderLineNumber) {
output[lineIndex] = (
common
+ renderLineNumber
+ '</div>'
);
} else {
output[lineIndex] = '';
}
}
this._renderResult = output;
}
public render(startLineNumber: number, lineNumber: number): string {
if (!this._renderResult) {
return '';
}
let lineIndex = lineNumber - startLineNumber;
if (lineIndex < 0 || lineIndex >= this._renderResult.length) {
return '';
}
return this._renderResult[lineIndex];
}
}
// theming
registerThemingParticipant((theme, collector) => {
const lineNumbers = theme.getColor(editorLineNumbers);
if (lineNumbers) {
collector.addRule(`.monaco-editor .line-numbers { color: ${lineNumbers}; }`);
}
const activeLineNumber = theme.getColor(editorActiveLineNumber);
if (activeLineNumber) {
collector.addRule(`.monaco-editor .current-line ~ .line-numbers { color: ${activeLineNumber}; }`);
}
});<|fim▁end|> |
let output: string[] = [];
for (let lineNumber = visibleStartLineNumber; lineNumber <= visibleEndLineNumber; lineNumber++) { |
<|file_name|>classes.js<|end_file_name|><|fim▁begin|>var classes = [
{
"name": "Hal\\Report\\Html\\Reporter",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "generate",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "renderPage",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getTrend",
"role": null,
"public": false,
"private": true,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 4,
"nbMethods": 4,
"nbMethodsPrivate": 1,
"nbMethodsPublic": 3,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 14,
"externals": [
"Hal\\Application\\Config\\Config",
"Hal\\Component\\Output\\Output",
"Hal\\Metric\\Metrics",
"Hal\\Metric\\Consolidated",
"Hal\\Metric\\Consolidated"
],
"lcom": 1,
"length": 360,
"vocabulary": 90,
"volume": 2337.07,
"difficulty": 12.54,
"effort": 29298.84,
"level": 0.08,
"bugs": 0.78,
"time": 1628,
"intelligentContent": 186.42,
"number_operators": 103,
"number_operands": 257,
"number_operators_unique": 8,
"number_operands_unique": 82,
"cloc": 30,
"loc": 151,
"lloc": 124,
"mi": 60.71,
"mIwoC": 28.86,
"commentWeight": 31.85,
"kanDefect": 1.36,
"relativeStructuralComplexity": 81,
"relativeDataComplexity": 0.68,
"relativeSystemComplexity": 81.68,
"totalStructuralComplexity": 324,
"totalDataComplexity": 2.7,
"totalSystemComplexity": 326.7,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 5,
"instability": 0.83,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Report\\Violations\\Xml\\Reporter",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "generate",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 7,
"externals": [
"Hal\\Application\\Config\\Config",
"Hal\\Component\\Output\\Output",
"Hal\\Metric\\Metrics",
"DOMDocument"
],
"lcom": 1,
"length": 96,
"vocabulary": 40,
"volume": 510.91,
"difficulty": 5.71,
"effort": 2919.46,
"level": 0.18,
"bugs": 0.17,
"time": 162,
"intelligentContent": 89.41,
"number_operators": 16,
"number_operands": 80,
"number_operators_unique": 5,
"number_operands_unique": 35,
"cloc": 15,
"loc": 61,
"lloc": 47,
"mi": 78.36,
"mIwoC": 43.62,
"commentWeight": 34.74,
"kanDefect": 0.75,
"relativeStructuralComplexity": 100,
"relativeDataComplexity": 0.23,
"relativeSystemComplexity": 100.23,
"totalStructuralComplexity": 200,
"totalDataComplexity": 0.45,
"totalSystemComplexity": 200.45,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 4,
"instability": 0.8,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Report\\Cli\\Reporter",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "generate",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 9,
"externals": [
"Hal\\Application\\Config\\Config",
"Hal\\Component\\Output\\Output",
"Hal\\Metric\\Metrics",
"Hal\\Metric\\Consolidated"
],
"lcom": 1,
"length": 168,
"vocabulary": 68,
"volume": 1022.69,
"difficulty": 7.75,
"effort": 7921.69,
"level": 0.13,
"bugs": 0.34,
"time": 440,
"intelligentContent": 132.03,
"number_operators": 33,
"number_operands": 135,
"number_operators_unique": 7,
"number_operands_unique": 61,
"cloc": 14,
"loc": 105,
"lloc": 85,
"mi": 62.43,
"mIwoC": 35.63,
"commentWeight": 26.8,
"kanDefect": 1.03,
"relativeStructuralComplexity": 36,
"relativeDataComplexity": 0.36,
"relativeSystemComplexity": 36.36,
"totalStructuralComplexity": 72,
"totalDataComplexity": 0.71,
"totalSystemComplexity": 72.71,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 4,
"instability": 0.8,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Metric\\Consolidated",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getAvg",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getSum",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getClasses",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getFiles",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getProject",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 6,
"nbMethods": 1,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 1,
"nbMethodsGetter": 5,
"nbMethodsSetters": 0,
"ccn": 9,
"externals": [
"Hal\\Metric\\Metrics"
],
"lcom": 1,
"length": 181,
"vocabulary": 53,
"volume": 1036.75,
"difficulty": 10.5,
"effort": 10885.91,
"level": 0.1,
"bugs": 0.35,
"time": 605,
"intelligentContent": 98.74,
"number_operators": 43,
"number_operands": 138,
"number_operators_unique": 7,
"number_operands_unique": 46,
"cloc": 37,
"loc": 123,
"lloc": 86,
"mi": 73.03,
"mIwoC": 35.47,
"commentWeight": 37.55,
"kanDefect": 1.67,
"relativeStructuralComplexity": 9,
"relativeDataComplexity": 1.29,
"relativeSystemComplexity": 10.29,
"totalStructuralComplexity": 54,
"totalDataComplexity": 7.75,
"totalSystemComplexity": 61.75,
"pageRank": 0.01,
"afferentCoupling": 3,
"efferentCoupling": 1,
"instability": 0.25,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Metric\\InterfaceMetric",
"interface": false,
"methods": [],
"nbMethodsIncludingGettersSetters": 0,
"nbMethods": 0,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 0,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 1,
"externals": [
"Hal\\Metric\\ClassMetric"
],
"lcom": 0,
"length": 0,
"vocabulary": 0,
"volume": 0,
"difficulty": 0,
"effort": 0,
"level": 0,
"bugs": 0,
"time": 0,
"intelligentContent": 0,
"number_operators": 0,
"number_operands": 0,
"number_operators_unique": 0,
"number_operands_unique": 0,
"cloc": 0,
"loc": 4,
"lloc": 4,
"mi": 171,
"mIwoC": 171,
"commentWeight": 0,
"kanDefect": 0.15,
"relativeStructuralComplexity": 0,
"relativeDataComplexity": 0,
"relativeSystemComplexity": 0,
"totalStructuralComplexity": 0,
"totalDataComplexity": 0,
"totalSystemComplexity": 0,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 1,
"instability": 0.5,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Metric\\FunctionMetric",
"interface": false,
"methods": [],
"nbMethodsIncludingGettersSetters": 0,
"nbMethods": 0,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 0,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 1,
"externals": [
"Hal\\Metric\\Metric",
"JsonSerializable"
],
"lcom": 0,
"length": 0,
"vocabulary": 0,
"volume": 0,
"difficulty": 0,
"effort": 0,
"level": 0,
"bugs": 0,
"time": 0,
"intelligentContent": 0,
"number_operators": 0,
"number_operands": 0,
"number_operators_unique": 0,
"number_operands_unique": 0,
"cloc": 0,
"loc": 5,
"lloc": 5,
"mi": 171,
"mIwoC": 171,
"commentWeight": 0,
"kanDefect": 0.15,
"relativeStructuralComplexity": 0,
"relativeDataComplexity": 0,
"relativeSystemComplexity": 0,
"totalStructuralComplexity": 0,
"totalDataComplexity": 0,
"totalSystemComplexity": 0,
"pageRank": 0.01,
"afferentCoupling": 4,
"efferentCoupling": 2,
"instability": 0.33,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Metric\\FileMetric",
"interface": false,
"methods": [],
"nbMethodsIncludingGettersSetters": 0,
"nbMethods": 0,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 0,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 1,
"externals": [
"Hal\\Metric\\Metric",
"JsonSerializable"
],
"lcom": 0,
"length": 0,
"vocabulary": 0,
"volume": 0,
"difficulty": 0,
"effort": 0,
"level": 0,
"bugs": 0,
"time": 0,
"intelligentContent": 0,
"number_operators": 0,
"number_operands": 0,
"number_operators_unique": 0,
"number_operands_unique": 0,
"cloc": 0,
"loc": 5,
"lloc": 5,
"mi": 171,
"mIwoC": 171,
"commentWeight": 0,
"kanDefect": 0.15,
"relativeStructuralComplexity": 0,
"relativeDataComplexity": 0,
"relativeSystemComplexity": 0,
"totalStructuralComplexity": 0,
"totalDataComplexity": 0,
"totalSystemComplexity": 0,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 2,
"instability": 0.67,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Metric\\Metrics",
"interface": false,
"methods": [
{
"name": "attach",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "get",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "has",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "all",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "jsonSerialize",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 5,
"nbMethods": 4,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 4,
"nbMethodsGetter": 1,
"nbMethodsSetters": 0,
"ccn": 1,
"externals": [
"JsonSerializable"
],
"lcom": 1,
"length": 21,
"vocabulary": 5,
"volume": 48.76,
"difficulty": 5,
"effort": 243.8,
"level": 0.2,
"bugs": 0.02,
"time": 14,
"intelligentContent": 9.75,
"number_operators": 6,
"number_operands": 15,
"number_operators_unique": 2,
"number_operands_unique": 3,
"cloc": 25,
"loc": 51,
"lloc": 26,
"mi": 101.39,
"mIwoC": 57.18,
"commentWeight": 44.21,
"kanDefect": 0.15,
"relativeStructuralComplexity": 9,
"relativeDataComplexity": 1.4,
"relativeSystemComplexity": 10.4,
"totalStructuralComplexity": 45,
"totalDataComplexity": 7,
"totalSystemComplexity": 52,
"pageRank": 0.02,
"afferentCoupling": 18,
"efferentCoupling": 1,
"instability": 0.05,
"numberOfUnitTests": 12,
"violations": {}
},
{
"name": "Hal\\Metric\\ProjectMetric",
"interface": false,
"methods": [],
"nbMethodsIncludingGettersSetters": 0,
"nbMethods": 0,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 0,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 1,
"externals": [
"Hal\\Metric\\Metric",
"JsonSerializable"
],
"lcom": 0,
"length": 0,
"vocabulary": 0,
"volume": 0,
"difficulty": 0,
"effort": 0,
"level": 0,
"bugs": 0,
"time": 0,
"intelligentContent": 0,
"number_operators": 0,
"number_operands": 0,
"number_operators_unique": 0,
"number_operands_unique": 0,
"cloc": 0,
"loc": 5,
"lloc": 5,
"mi": 171,
"mIwoC": 171,
"commentWeight": 0,
"kanDefect": 0.15,
"relativeStructuralComplexity": 0,
"relativeDataComplexity": 0,
"relativeSystemComplexity": 0,
"totalStructuralComplexity": 0,
"totalDataComplexity": 0,
"totalSystemComplexity": 0,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 2,
"instability": 0.67,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Metric\\Helper\\RoleOfMethodDetector",
"interface": false,
"methods": [
{
"name": "detects",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 1,
"nbMethods": 1,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 1,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 4,
"externals": [],
"lcom": 1,
"length": 52,
"vocabulary": 21,
"volume": 228.4,
"difficulty": 8.75,
"effort": 1998.5,
"level": 0.11,
"bugs": 0.08,
"time": 111,
"intelligentContent": 26.1,
"number_operators": 17,
"number_operands": 35,
"number_operators_unique": 7,
"number_operands_unique": 14,
"cloc": 15,
"loc": 44,
"lloc": 29,
"mi": 90.35,
"mIwoC": 51.05,
"commentWeight": 39.31,
"kanDefect": 0.66,
"relativeStructuralComplexity": 0,
"relativeDataComplexity": 6,
"relativeSystemComplexity": 6,
"totalStructuralComplexity": 0,
"totalDataComplexity": 6,
"totalSystemComplexity": 6,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 0,
"instability": 0,
"numberOfUnitTests": 1,
"violations": {}
},
{
"name": "Hal\\Metric\\Class_\\Component\\MaintainabilityIndexVisitor",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "leaveNode",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 10,
"externals": [
"PhpParser\\NodeVisitorAbstract",
"Hal\\Metric\\Metrics",
"PhpParser\\Node",
"Hal\\Metric\\FunctionMetric",
"LogicException",
"LogicException",
"LogicException",
"LogicException",
"LogicException"
],
"lcom": 1,
"length": 111,
"vocabulary": 36,
"volume": 573.86,
"difficulty": 10.14,
"effort": 5820.6,
"level": 0.1,
"bugs": 0.19,
"time": 323,
"intelligentContent": 56.58,
"number_operators": 40,
"number_operands": 71,
"number_operators_unique": 8,
"number_operands_unique": 28,
"cloc": 31,
"loc": 77,
"lloc": 46,
"mi": 84.67,
"mIwoC": 43.07,
"commentWeight": 41.61,
"kanDefect": 0.78,
"relativeStructuralComplexity": 16,
"relativeDataComplexity": 0.2,
"relativeSystemComplexity": 16.2,
"totalStructuralComplexity": 32,
"totalDataComplexity": 0.4,
"totalSystemComplexity": 32.4,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 9,
"instability": 0.9,
"numberOfUnitTests": 1,
"violations": {}
},
{
"name": "Hal\\Metric\\Class_\\Coupling\\ExternalsVisitor",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "leaveNode",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 20,
"externals": [
"PhpParser\\NodeVisitorAbstract",
"Hal\\Metric\\Metrics",
"PhpParser\\Node"
],
"lcom": 1,
"length": 102,
"vocabulary": 25,
"volume": 473.67,
"difficulty": 14.97,
"effort": 7091.94,
"level": 0.07,
"bugs": 0.16,
"time": 394,
"intelligentContent": 31.64,
"number_operators": 25,
"number_operands": 77,
"number_operators_unique": 7,
"number_operands_unique": 18,
"cloc": 27,
"loc": 102,
"lloc": 75,
"mi": 73.44,
"mIwoC": 37.67,
"commentWeight": 35.77,
"kanDefect": 2.66,
"relativeStructuralComplexity": 25,
"relativeDataComplexity": 0.17,
"relativeSystemComplexity": 25.17,
"totalStructuralComplexity": 50,
"totalDataComplexity": 0.33,
"totalSystemComplexity": 50.33,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 3,
"instability": 0.75,
"numberOfUnitTests": 2,
"violations": {}
},
{
"name": "Hal\\Metric\\Class_\\Text\\HalsteadVisitor",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "leaveNode",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 4,
"externals": [
"PhpParser\\NodeVisitorAbstract",
"Hal\\Metric\\Metrics",
"PhpParser\\Node",
"Hal\\Metric\\FunctionMetric"
],
"lcom": 1,
"length": 218,
"vocabulary": 50,
"volume": 1230.36,
"difficulty": 11.97,
"effort": 14721.41,
"level": 0.08,
"bugs": 0.41,
"time": 818,
"intelligentContent": 102.83,
"number_operators": 71,
"number_operands": 147,
"number_operators_unique": 7,
"number_operands_unique": 43,
"cloc": 29,
"loc": 88,
"lloc": 59,
"mi": 78.03,
"mIwoC": 39.2,
"commentWeight": 38.83,
"kanDefect": 0.57,
"relativeStructuralComplexity": 16,
"relativeDataComplexity": 0.2,
"relativeSystemComplexity": 16.2,
"totalStructuralComplexity": 32,
"totalDataComplexity": 0.4,
"totalSystemComplexity": 32.4,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 4,
"instability": 0.8,
"numberOfUnitTests": 1,
"violations": {}
},
{
"name": "Hal\\Metric\\Class_\\Text\\LengthVisitor",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "leaveNode",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 5,
"externals": [
"PhpParser\\NodeVisitorAbstract",
"Hal\\Metric\\Metrics",
"PhpParser\\Node",
"Hal\\Metric\\FunctionMetric",
"PhpParser\\PrettyPrinter\\Standard"
],
"lcom": 1,
"length": 80,
"vocabulary": 25,
"volume": 371.51,
"difficulty": 7.75,
"effort": 2879.19,
"level": 0.13,
"bugs": 0.12,
"time": 160,
"intelligentContent": 47.94,
"number_operators": 18,
"number_operands": 62,
"number_operators_unique": 5,
"number_operands_unique": 20,
"cloc": 20,
"loc": 55,
"lloc": 36,
"mi": 87.59,
"mIwoC": 47.38,
"commentWeight": 40.21,
"kanDefect": 0.59,
"relativeStructuralComplexity": 25,
"relativeDataComplexity": 0.17,
"relativeSystemComplexity": 25.17,
"totalStructuralComplexity": 50,
"totalDataComplexity": 0.33,
"totalSystemComplexity": 50.33,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 5,
"instability": 0.83,
"numberOfUnitTests": 1,
"violations": {}
},
{
"name": "Hal\\Metric\\Class_\\Complexity\\KanDefectVisitor",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "leaveNode",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 2,
"externals": [
"PhpParser\\NodeVisitorAbstract",
"Hal\\Metric\\Metrics",
"PhpParser\\Node"
],
"lcom": 1,
"length": 50,
"vocabulary": 21,
"volume": 219.62,
"difficulty": 7,
"effort": 1537.31,
"level": 0.14,
"bugs": 0.07,
"time": 85,
"intelligentContent": 31.37,
"number_operators": 15,
"number_operands": 35,
"number_operators_unique": 6,
"number_operands_unique": 15,
"cloc": 15,
"loc": 48,
"lloc": 33,
"mi": 88.3,
"mIwoC": 50.21,
"commentWeight": 38.09,
"kanDefect": 0.44,
"relativeStructuralComplexity": 9,
"relativeDataComplexity": 0.25,
"relativeSystemComplexity": 9.25,
"totalStructuralComplexity": 18,
"totalDataComplexity": 0.5,
"totalSystemComplexity": 18.5,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 3,
"instability": 0.75,
"numberOfUnitTests": 1,
"violations": {}
},
{
"name": "Hal\\Metric\\Class_\\Complexity\\CyclomaticComplexityVisitor",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "leaveNode",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 4,
"externals": [
"PhpParser\\NodeVisitorAbstract",
"Hal\\Metric\\Metrics",
"PhpParser\\Node"
],
"lcom": 1,
"length": 68,
"vocabulary": 19,
"volume": 288.86,
"difficulty": 18.91,
"effort": 5462.06,
"level": 0.05,
"bugs": 0.1,
"time": 303,
"intelligentContent": 15.28,
"number_operators": 16,
"number_operands": 52,
"number_operators_unique": 8,
"number_operands_unique": 11,
"cloc": 27,
"loc": 80,
"lloc": 53,
"mi": 83.79,
"mIwoC": 44.62,
"commentWeight": 39.17,
"kanDefect": 1.04,
"relativeStructuralComplexity": 9,
"relativeDataComplexity": 0.5,
"relativeSystemComplexity": 9.5,
"totalStructuralComplexity": 18,
"totalDataComplexity": 1,
"totalSystemComplexity": 19,
"pageRank": 0,
"afferentCoupling": 2,
"efferentCoupling": 3,
"instability": 0.6,
"numberOfUnitTests": 1,
"violations": {}
},
{
"name": "Hal\\Metric\\Class_\\ClassEnumVisitor",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "leaveNode",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 8,
"externals": [
"PhpParser\\NodeVisitorAbstract",
"Hal\\Metric\\Metrics",
"PhpParser\\Node",
"Hal\\Metric\\InterfaceMetric",
"Hal\\Metric\\ClassMetric",
"Hal\\Metric\\Helper\\RoleOfMethodDetector",
"Hal\\Metric\\FunctionMetric"
],
"lcom": 1,
"length": 113,
"vocabulary": 35,
"volume": 579.61,
"difficulty": 12.59,
"effort": 7298.78,
"level": 0.08,
"bugs": 0.19,
"time": 405,
"intelligentContent": 46.03,
"number_operators": 28,
"number_operands": 85,
"number_operators_unique": 8,
"number_operands_unique": 27,
"cloc": 8,
"loc": 73,
"lloc": 65,
"mi": 64.56,
"mIwoC": 40.03,
"commentWeight": 24.53,
"kanDefect": 1.09,
"relativeStructuralComplexity": 49,
"relativeDataComplexity": 0.13,
"relativeSystemComplexity": 49.13,
"totalStructuralComplexity": 98,
"totalDataComplexity": 0.25,
"totalSystemComplexity": 98.25,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 7,
"instability": 0.88,
"numberOfUnitTests": 11,
"violations": {}
},
{
"name": "Hal\\Metric\\Class_\\Structural\\SystemComplexityVisitor",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "leaveNode",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 4,
"externals": [
"PhpParser\\NodeVisitorAbstract",
"Hal\\Metric\\Metrics",
"PhpParser\\Node"
],
"lcom": 1,
"length": 98,
"vocabulary": 27,
"volume": 465.98,
"difficulty": 8.3,
"effort": 3865.51,
"level": 0.12,
"bugs": 0.16,
"time": 215,
"intelligentContent": 56.17,
"number_operators": 25,
"number_operands": 73,
"number_operators_unique": 5,
"number_operands_unique": 22,
"cloc": 23,
"loc": 63,
"lloc": 40,
"mi": 86.09,
"mIwoC": 45.83,
"commentWeight": 40.26,
"kanDefect": 0.74,
"relativeStructuralComplexity": 9,
"relativeDataComplexity": 0.25,
"relativeSystemComplexity": 9.25,
"totalStructuralComplexity": 18,
"totalDataComplexity": 0.5,
"totalSystemComplexity": 18.5,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 3,
"instability": 0.75,
"numberOfUnitTests": 1,
"violations": {}
},
{
"name": "Hal\\Metric\\Class_\\Structural\\LcomVisitor",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "leaveNode",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "traverse",
"role": null,
"public": false,
"private": true,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 3,
"nbMethods": 3,
"nbMethodsPrivate": 1,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 8,
"externals": [
"PhpParser\\NodeVisitorAbstract",
"Hal\\Metric\\Metrics",
"PhpParser\\Node",
"Hal\\Component\\Tree\\Graph",
"Hal\\Component\\Tree\\Node",
"Hal\\Component\\Tree\\Node",
"Hal\\Component\\Tree\\Node",
"Hal\\Component\\Tree\\Node"
],
"lcom": 1,
"length": 111,
"vocabulary": 23,
"volume": 502.12,
"difficulty": 20.53,
"effort": 10310.1,
"level": 0.05,
"bugs": 0.17,
"time": 573,
"intelligentContent": 24.45,
"number_operators": 34,
"number_operands": 77,
"number_operators_unique": 8,
"number_operands_unique": 15,
"cloc": 27,
"loc": 89,
"lloc": 62,
"mi": 78.59,
"mIwoC": 40.91,
"commentWeight": 37.67,
"kanDefect": 1.47,
"relativeStructuralComplexity": 81,
"relativeDataComplexity": 0.5,
"relativeSystemComplexity": 81.5,
"totalStructuralComplexity": 243,
"totalDataComplexity": 1.5,
"totalSystemComplexity": 244.5,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 8,
"instability": 0.89,
"numberOfUnitTests": 1,
"violations": {}
},
{
"name": "Hal\\Metric\\System\\Changes\\GitChanges",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "calculate",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "doesThisFileShouldBeCounted",
"role": null,
"public": false,
"private": true,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 3,
"nbMethods": 3,
"nbMethodsPrivate": 1,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 14,
"externals": [
"Hal\\Application\\Config\\Config",
"Hal\\Metric\\Metrics",
"Hal\\Application\\Config\\ConfigException",
"DateTime",
"Hal\\Metric\\ProjectMetric",
"Hal\\Metric\\FileMetric"
],
"lcom": 1,
"length": 256,
"vocabulary": 49,
"volume": 1437.37,
"difficulty": 16.17,
"effort": 23237.41,
"level": 0.06,
"bugs": 0.48,
"time": 1291,
"intelligentContent": 88.91,
"number_operators": 62,
"number_operands": 194,
"number_operators_unique": 7,
"number_operands_unique": 42,
"cloc": 36,
"loc": 142,
"lloc": 106,
"mi": 66.99,
"mIwoC": 31.83,
"commentWeight": 35.17,
"kanDefect": 1.82,
"relativeStructuralComplexity": 49,
"relativeDataComplexity": 0.54,
"relativeSystemComplexity": 49.54,
"totalStructuralComplexity": 147,
"totalDataComplexity": 1.63,
"totalSystemComplexity": 148.63,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 6,
"instability": 0.86,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Metric\\System\\Coupling\\PageRank",
"interface": false,
"methods": [
{
"name": "calculate",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "calculatePageRank",
"role": null,
"public": false,
"private": true,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 1,
"nbMethodsPublic": 1,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 12,
"externals": [
"Hal\\Metric\\Metrics"
],
"lcom": 1,
"length": 136,
"vocabulary": 40,
"volume": 723.78,
"difficulty": 24.31,
"effort": 17598.63,
"level": 0.04,
"bugs": 0.24,
"time": 978,
"intelligentContent": 29.77,
"number_operators": 35,
"number_operands": 101,
"number_operators_unique": 13,
"number_operands_unique": 27,
"cloc": 20,
"loc": 75,
"lloc": 55,
"mi": 76.27,
"mIwoC": 40.4,
"commentWeight": 35.87,
"kanDefect": 2.13,
"relativeStructuralComplexity": 16,
"relativeDataComplexity": 0.5,
"relativeSystemComplexity": 16.5,
"totalStructuralComplexity": 32,
"totalDataComplexity": 1,
"totalSystemComplexity": 33,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 1,
"instability": 0.5,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Metric\\System\\Coupling\\Coupling",
"interface": false,
"methods": [
{
"name": "calculate",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 1,
"nbMethods": 1,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 1,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 14,
"externals": [
"Hal\\Metric\\Metrics",
"Hal\\Component\\Tree\\Graph",
"Hal\\Component\\Tree\\Node",
"Hal\\Component\\Tree\\Node"
],
"lcom": 1,
"length": 84,
"vocabulary": 23,
"volume": 379.98,
"difficulty": 11.12,
"effort": 4224.47,
"level": 0.09,
"bugs": 0.13,
"time": 235,
"intelligentContent": 34.18,
"number_operators": 21,
"number_operands": 63,
"number_operators_unique": 6,
"number_operands_unique": 17,
"cloc": 12,
"loc": 56,
"lloc": 44,
"mi": 77.06,
"mIwoC": 44.2,
"commentWeight": 32.86,
"kanDefect": 1.56,
"relativeStructuralComplexity": 100,
"relativeDataComplexity": 0.09,
"relativeSystemComplexity": 100.09,
"totalStructuralComplexity": 100,
"totalDataComplexity": 0.09,
"totalSystemComplexity": 100.09,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 4,
"instability": 0.8,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Metric\\ClassMetric",
"interface": false,
"methods": [],
"nbMethodsIncludingGettersSetters": 0,
"nbMethods": 0,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 0,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 1,
"externals": [
"Hal\\Metric\\Metric",
"JsonSerializable"
],
"lcom": 0,
"length": 0,
"vocabulary": 0,
"volume": 0,
"difficulty": 0,
"effort": 0,
"level": 0,
"bugs": 0,
"time": 0,
"intelligentContent": 0,
"number_operators": 0,
"number_operands": 0,
"number_operators_unique": 0,
"number_operands_unique": 0,
"cloc": 0,
"loc": 5,
"lloc": 5,
"mi": 171,
"mIwoC": 171,
"commentWeight": 0,
"kanDefect": 0.15,
"relativeStructuralComplexity": 0,
"relativeDataComplexity": 0,
"relativeSystemComplexity": 0,
"totalStructuralComplexity": 0,
"totalDataComplexity": 0,
"totalSystemComplexity": 0,
"pageRank": 0.01,
"afferentCoupling": 2,
"efferentCoupling": 2,
"instability": 0.5,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Component\\Ast\\NodeTraverser",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "traverseArray",
"role": null,
"public": false,
"private": true,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 1,
"nbMethodsPublic": 1,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 6,
"externals": [
"PhpParser\\NodeTraverser",
"parent"
],
"lcom": 1,
"length": 93,
"vocabulary": 19,
"volume": 395.06,
"difficulty": 17.79,
"effort": 7028.73,
"level": 0.06,
"bugs": 0.13,
"time": 390,
"intelligentContent": 22.2,
"number_operators": 32,
"number_operands": 61,
"number_operators_unique": 7,
"number_operands_unique": 12,
"cloc": 5,
"loc": 65,
"lloc": 60,
"mi": 63.05,
"mIwoC": 42.22,
"commentWeight": 20.83,
"kanDefect": 1.63,
"relativeStructuralComplexity": 25,
"relativeDataComplexity": 0.75,
"relativeSystemComplexity": 25.75,
"totalStructuralComplexity": 50,
"totalDataComplexity": 1.5,
"totalSystemComplexity": 51.5,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 2,
"instability": 0.67,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Component\\Output\\CliOutput",
"interface": false,
"methods": [
{
"name": "writeln",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "write",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "err",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "clearln",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "setQuietMode",
"role": "setter",
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 5,
"nbMethods": 4,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 4,
"nbMethodsGetter": 0,
"nbMethodsSetters": 1,
"ccn": 2,
"externals": [
"Hal\\Component\\Output\\Output"
],
"lcom": 2,
"length": 30,
"vocabulary": 11,
"volume": 103.78,
"difficulty": 6.29,
"effort": 652.35,
"level": 0.16,
"bugs": 0.03,
"time": 36,
"intelligentContent": 16.51,
"number_operators": 8,
"number_operands": 22,
"number_operators_unique": 4,
"number_operands_unique": 7,
"cloc": 25,
"loc": 54,
"lloc": 31,
"mi": 96.55,
"mIwoC": 53.08,
"commentWeight": 43.47,
"kanDefect": 0.15,
"relativeStructuralComplexity": 4,
"relativeDataComplexity": 1.93,
"relativeSystemComplexity": 5.93,
"totalStructuralComplexity": 20,
"totalDataComplexity": 9.67,
"totalSystemComplexity": 29.67,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 1,
"instability": 0.5,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Component\\Output\\ProgressBar",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "start",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "advance",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "clear",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "hasAnsi",
"role": null,
"public": false,
"private": true,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 5,
"nbMethods": 5,
"nbMethodsPrivate": 1,
"nbMethodsPublic": 4,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 4,
"externals": [
"Hal\\Component\\Output\\Output"
],
"lcom": 1,
"length": 66,
"vocabulary": 29,
"volume": 320.63,
"difficulty": 12.83,
"effort": 4114.71,
"level": 0.08,
"bugs": 0.11,
"time": 229,
"intelligentContent": 24.98,
"number_operators": 24,
"number_operands": 42,
"number_operators_unique": 11,
"number_operands_unique": 18,
"cloc": 40,
"loc": 83,
"lloc": 43,
"mi": 90.27,
"mIwoC": 46.28,
"commentWeight": 43.99,
"kanDefect": 0.36,
"relativeStructuralComplexity": 9,
"relativeDataComplexity": 0.6,
"relativeSystemComplexity": 9.6,
"totalStructuralComplexity": 45,
"totalDataComplexity": 3,
"totalSystemComplexity": 48,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 1,
"instability": 0.5,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Component\\Issue\\Issuer",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "onError",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "enable",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "disable",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "terminate",
"role": null,
"public": false,
"private": true,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "log",
"role": null,
"public": false,
"private": true,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "set",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "clear",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 8,
"nbMethods": 8,
"nbMethodsPrivate": 2,
"nbMethodsPublic": 6,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 7,
"externals": [
"Hal\\Component\\Output\\Output",
"PhpParser\\PrettyPrinter\\Standard"
],
"lcom": 3,
"length": 123,
"vocabulary": 49,
"volume": 690.61,
"difficulty": 6.77,
"effort": 4673.66,
"level": 0.15,
"bugs": 0.23,
"time": 260,
"intelligentContent": 102.05,
"number_operators": 26,
"number_operands": 97,
"number_operators_unique": 6,
"number_operands_unique": 43,
"cloc": 44,
"loc": 152,
"lloc": 95,
"mi": 73.05,
"mIwoC": 36.04,
"commentWeight": 37.01,
"kanDefect": 0.89,
"relativeStructuralComplexity": 16,
"relativeDataComplexity": 1.48,
"relativeSystemComplexity": 17.48,
"totalStructuralComplexity": 128,
"totalDataComplexity": 11.8,
"totalSystemComplexity": 139.8,
"pageRank": 0,
"afferentCoupling": 2,
"efferentCoupling": 2,
"instability": 0.5,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Component\\Tree\\Edge",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getFrom",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getTo",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "asString",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 4,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 2,
"nbMethodsSetters": 0,
"ccn": 1,
"externals": [
"Hal\\Component\\Tree\\Node",
"Hal\\Component\\Tree\\Node"
],
"lcom": 1,
"length": 16,
"vocabulary": 6,
"volume": 41.36,
"difficulty": 2.75,
"effort": 113.74,
"level": 0.36,
"bugs": 0.01,
"time": 6,
"intelligentContent": 15.04,
"number_operators": 5,
"number_operands": 11,
"number_operators_unique": 2,
"number_operands_unique": 4,
"cloc": 23,
"loc": 47,
"lloc": 24,
"mi": 102.62,
"mIwoC": 58.44,
"commentWeight": 44.19,
"kanDefect": 0.15,
"relativeStructuralComplexity": 1,
"relativeDataComplexity": 1.75,
"relativeSystemComplexity": 2.75,
"totalStructuralComplexity": 4,
"totalDataComplexity": 7,
"totalSystemComplexity": 11,
"pageRank": 0.37,
"afferentCoupling": 2,
"efferentCoupling": 2,
"instability": 0.5,
"numberOfUnitTests": 1,
"violations": {}
},
{
"name": "Hal\\Component\\Tree\\Node",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getKey",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getAdjacents",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getEdges",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "addEdge",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getData",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "setData",
"role": "setter",
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 7,
"nbMethods": 3,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 3,
"nbMethodsGetter": 3,
"nbMethodsSetters": 1,
"ccn": 4,
"externals": [
"Hal\\Component\\Tree\\Edge"
],
"lcom": 1,
"length": 47,
"vocabulary": 9,
"volume": 148.99,
"difficulty": 12.4,
"effort": 1847.43,
"level": 0.08,
"bugs": 0.05,
"time": 103,
"intelligentContent": 12.02,
"number_operators": 16,
"number_operands": 31,
"number_operators_unique": 4,
"number_operands_unique": 5,
"cloc": 40,
"loc": 89,
"lloc": 49,
"mi": 90.46,
"mIwoC": 47.38,
"commentWeight": 43.08,
"kanDefect": 0.52,
"relativeStructuralComplexity": 9,
"relativeDataComplexity": 1.64,
"relativeSystemComplexity": 10.64,
"totalStructuralComplexity": 63,
"totalDataComplexity": 11.5,
"totalSystemComplexity": 74.5,
"pageRank": 0.35,
"afferentCoupling": 13,
"efferentCoupling": 1,
"instability": 0.07,
"numberOfUnitTests": 42,
"violations": {}
},
{
"name": "Hal\\Component\\Tree\\Graph",
"interface": false,
"methods": [
{
"name": "insert",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "addEdge",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "asString",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getEdges",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "get",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "has",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "count",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "all",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 8,
"nbMethods": 6,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 6,
"nbMethodsGetter": 2,
"nbMethodsSetters": 0,
"ccn": 6,
"externals": [
"Countable",
"Hal\\Component\\Tree\\Node",
"Hal\\Component\\Tree\\GraphException",
"Hal\\Component\\Tree\\Node",
"Hal\\Component\\Tree\\Node",
"Hal\\Component\\Tree\\GraphException",
"Hal\\Component\\Tree\\GraphException",
"Hal\\Component\\Tree\\Edge"
],
"lcom": 1,
"length": 67,
"vocabulary": 16,
"volume": 268,
"difficulty": 8.5,
"effort": 2278,
"level": 0.12,
"bugs": 0.09,
"time": 127,
"intelligentContent": 31.53,
"number_operators": 16,
"number_operands": 51,
"number_operators_unique": 4,
"number_operands_unique": 12,
"cloc": 35,
"loc": 94,
"lloc": 59,
"mi": 84.1,
"mIwoC": 43.56,
"commentWeight": 40.53,
"kanDefect": 0.82,
"relativeStructuralComplexity": 36,
"relativeDataComplexity": 1.23,
"relativeSystemComplexity": 37.23,
"totalStructuralComplexity": 288,
"totalDataComplexity": 9.86,
"totalSystemComplexity": 297.86,
"pageRank": 0.01,
"afferentCoupling": 3,
"efferentCoupling": 8,
"instability": 0.73,
"numberOfUnitTests": 10,
"violations": {}
},
{
"name": "Hal\\Component\\Tree\\Operator\\CycleDetector",
"interface": false,
"methods": [
{
"name": "isCyclic",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "detectCycle",
"role": null,
"public": false,
"private": true,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 1,
"nbMethodsPublic": 1,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 9,
"externals": [
"Hal\\Component\\Tree\\Graph",
"Hal\\Component\\Tree\\Node"
],
"lcom": 1,
"length": 64,
"vocabulary": 12,
"volume": 229.44,
"difficulty": 14.29,
"effort": 3277.68,
"level": 0.07,
"bugs": 0.08,
"time": 182,
"intelligentContent": 16.06,
"number_operators": 24,
"number_operands": 40,
"number_operators_unique": 5,
"number_operands_unique": 7,
"cloc": 23,
"loc": 64,
"lloc": 41,
"mi": 87.12,
"mIwoC": 47.08,
"commentWeight": 40.04,
"kanDefect": 1.12,
"relativeStructuralComplexity": 36,
"relativeDataComplexity": 0.79,
"relativeSystemComplexity": 36.79,
"totalStructuralComplexity": 72,
"totalDataComplexity": 1.57,
"totalSystemComplexity": 73.57,
"pageRank": 0,
"afferentCoupling": 0,
"efferentCoupling": 2,
"instability": 1,
"numberOfUnitTests": 4,
"violations": {}
},
{
"name": "Hal\\Component\\Tree\\GraphException",
"interface": false,
"methods": [],
"nbMethodsIncludingGettersSetters": 0,
"nbMethods": 0,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 0,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 1,
"externals": [
"LogicException"
],
"lcom": 0,
"length": 0,
"vocabulary": 0,
"volume": 0,
"difficulty": 0,
"effort": 0,
"level": 0,
"bugs": 0,
"time": 0,
"intelligentContent": 0,
"number_operators": 0,
"number_operands": 0,
"number_operators_unique": 0,
"number_operands_unique": 0,
"cloc": 0,
"loc": 4,
"lloc": 4,
"mi": 171,
"mIwoC": 171,
"commentWeight": 0,
"kanDefect": 0.15,
"relativeStructuralComplexity": 0,
"relativeDataComplexity": 0,
"relativeSystemComplexity": 0,
"totalStructuralComplexity": 0,
"totalDataComplexity": 0,
"totalSystemComplexity": 0,
"pageRank": 0.01,
"afferentCoupling": 3,
"efferentCoupling": 1,
"instability": 0.25,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Component\\Tree\\HashMap",
"interface": false,
"methods": [
{
"name": "attach",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "get",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "has",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "count",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getIterator",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 5,
"nbMethods": 5,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 5,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 1,
"externals": [
"Countable",
"IteratorAggregate",
"Hal\\Component\\Tree\\Node",
"ArrayIterator"
],
"lcom": 1,
"length": 21,
"vocabulary": 5,
"volume": 48.76,
"difficulty": 5,
"effort": 243.8,
"level": 0.2,
"bugs": 0.02,
"time": 14,
"intelligentContent": 9.75,
"number_operators": 6,
"number_operands": 15,
"number_operators_unique": 2,
"number_operands_unique": 3,
"cloc": 21,
"loc": 47,
"lloc": 26,
"mi": 100.19,
"mIwoC": 57.18,
"commentWeight": 43.01,
"kanDefect": 0.15,
"relativeStructuralComplexity": 4,
"relativeDataComplexity": 1.87,
"relativeSystemComplexity": 5.87,
"totalStructuralComplexity": 20,
"totalDataComplexity": 9.33,
"totalSystemComplexity": 29.33,
"pageRank": 0,
"afferentCoupling": 0,
"efferentCoupling": 4,
"instability": 1,
"numberOfUnitTests": 3,
"violations": {}
},
{
"name": "Hal\\Component\\File\\Finder",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "fetch",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 4,
"externals": [
"RecursiveDirectoryIterator",
"RecursiveIteratorIterator",
"RegexIterator"
],
"lcom": 1,
"length": 64,
"vocabulary": 25,
"volume": 297.21,
"difficulty": 4.38,
"effort": 1302.05,
"level": 0.23,
"bugs": 0.1,
"time": 72,
"intelligentContent": 67.84,
"number_operators": 18,
"number_operands": 46,
"number_operators_unique": 4,
"number_operands_unique": 21,
"cloc": 35,
"loc": 68,
"lloc": 33,
"mi": 93.84,
"mIwoC": 49.02,
"commentWeight": 44.82,
"kanDefect": 0.68,
"relativeStructuralComplexity": 0,
"relativeDataComplexity": 3,
"relativeSystemComplexity": 3,
"totalStructuralComplexity": 0,
"totalDataComplexity": 6,
"totalSystemComplexity": 6,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 3,
"instability": 0.75,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Violation\\Violations",
"interface": false,
"methods": [
{
"name": "getIterator",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "add",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "count",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "__toString",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 4,
"nbMethods": 4,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 4,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 2,
"externals": [
"IteratorAggregate",
"Countable",
"ArrayIterator",
"Hal\\Violation\\Violation"
],
"lcom": 1,
"length": 20,
"vocabulary": 9,
"volume": 63.4,
"difficulty": 5.2,
"effort": 329.67,
"level": 0.19,
"bugs": 0.02,
"time": 18,
"intelligentContent": 12.19,
"number_operators": 7,
"number_operands": 13,
"number_operators_unique": 4,
"number_operands_unique": 5,
"cloc": 19,
"loc": 44,
"lloc": 25,
"mi": 99.17,
"mIwoC": 56.62,
"commentWeight": 42.55,
"kanDefect": 0.38,
"relativeStructuralComplexity": 1,
"relativeDataComplexity": 1.63,
"relativeSystemComplexity": 2.63,
"totalStructuralComplexity": 4,
"totalDataComplexity": 6.5,
"totalSystemComplexity": 10.5,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 4,
"instability": 0.8,
"numberOfUnitTests": 1,
"violations": {}
},
{
"name": "Hal\\Violation\\Class_\\Blob",
"interface": false,
"methods": [
{
"name": "getName",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "apply",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getLevel",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getDescription",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 4,
"nbMethods": 4,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 4,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 6,
"externals": [
"Hal\\Violation\\Violation",
"Hal\\Metric\\Metric"
],
"lcom": 3,
"length": 47,
"vocabulary": 19,
"volume": 199.65,
"difficulty": 4.27,
"effort": 851.85,
"level": 0.23,
"bugs": 0.07,
"time": 47,
"intelligentContent": 46.79,
"number_operators": 15,
"number_operands": 32,
"number_operators_unique": 4,
"number_operands_unique": 15,
"cloc": 12,
"loc": 56,
"lloc": 42,
"mi": 80.54,
"mIwoC": 47.68,
"commentWeight": 32.86,
"kanDefect": 0.5,
"relativeStructuralComplexity": 4,
"relativeDataComplexity": 1.42,
"relativeSystemComplexity": 5.42,
"totalStructuralComplexity": 16,
"totalDataComplexity": 5.67,
"totalSystemComplexity": 21.67,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 2,
"instability": 0.67,
"numberOfUnitTests": 1,
"violations": {}
},
{
"name": "Hal\\Violation\\Class_\\TooComplexCode",
"interface": false,
"methods": [
{
"name": "getName",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "apply",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getLevel",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getDescription",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 4,
"nbMethods": 4,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 4,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 3,
"externals": [
"Hal\\Violation\\Violation",
"Hal\\Metric\\Metric"
],
"lcom": 3,
"length": 28,
"vocabulary": 15,
"volume": 109.39,
"difficulty": 3.45,
"effort": 377.9,
"level": 0.29,
"bugs": 0.04,
"time": 21,
"intelligentContent": 31.67,
"number_operators": 9,
"number_operands": 19,
"number_operators_unique": 4,
"number_operands_unique": 11,
"cloc": 12,
"loc": 46,
"lloc": 32,
"mi": 88.05,
"mIwoC": 52.49,
"commentWeight": 35.56,
"kanDefect": 0.29,
"relativeStructuralComplexity": 4,
"relativeDataComplexity": 1.75,
"relativeSystemComplexity": 5.75,
"totalStructuralComplexity": 16,
"totalDataComplexity": 7,
"totalSystemComplexity": 23,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 2,
"instability": 0.67,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Violation\\Class_\\TooDependent",
"interface": false,
"methods": [
{
"name": "getName",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "apply",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getLevel",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getDescription",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 4,
"nbMethods": 4,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 4,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 3,
"externals": [
"Hal\\Violation\\Violation",
"Hal\\Metric\\Metric"
],
"lcom": 3,
"length": 28,
"vocabulary": 14,
"volume": 106.61,
"difficulty": 3.8,
"effort": 405.1,
"level": 0.26,
"bugs": 0.04,
"time": 23,
"intelligentContent": 28.05,
"number_operators": 9,
"number_operands": 19,
"number_operators_unique": 4,
"number_operands_unique": 10,
"cloc": 12,
"loc": 45,
"lloc": 31,
"mi": 88.73,
"mIwoC": 52.87,
"commentWeight": 35.87,
"kanDefect": 0.29,
"relativeStructuralComplexity": 4,
"relativeDataComplexity": 1.75,
"relativeSystemComplexity": 5.75,
"totalStructuralComplexity": 16,
"totalDataComplexity": 7,
"totalSystemComplexity": 23,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 2,
"instability": 0.67,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Violation\\Class_\\TooLong",
"interface": false,
"methods": [
{
"name": "getName",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "apply",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getLevel",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getDescription",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 4,
"nbMethods": 4,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 4,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 3,
"externals": [
"Hal\\Violation\\Violation",
"Hal\\Metric\\Metric"
],
"lcom": 3,
"length": 27,
"vocabulary": 15,
"volume": 105.49,
"difficulty": 3.45,
"effort": 364.41,
"level": 0.29,
"bugs": 0.04,
"time": 20,
"intelligentContent": 30.54,
"number_operators": 8,
"number_operands": 19,
"number_operators_unique": 4,
"number_operands_unique": 11,
"cloc": 12,
"loc": 45,
"lloc": 31,
"mi": 88.77,
"mIwoC": 52.9,
"commentWeight": 35.87,
"kanDefect": 0.29,
"relativeStructuralComplexity": 4,
"relativeDataComplexity": 1.42,
"relativeSystemComplexity": 5.42,
"totalStructuralComplexity": 16,
"totalDataComplexity": 5.67,
"totalSystemComplexity": 21.67,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 2,
"instability": 0.67,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Violation\\Class_\\ProbablyBugged",
"interface": false,
"methods": [
{
"name": "getName",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "apply",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getLevel",
"role": null,<|fim▁hole|> "private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "getDescription",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 4,
"nbMethods": 4,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 4,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 3,
"externals": [
"Hal\\Violation\\Violation",
"Hal\\Metric\\Metric"
],
"lcom": 3,
"length": 31,
"vocabulary": 17,
"volume": 126.71,
"difficulty": 3.23,
"effort": 409.38,
"level": 0.31,
"bugs": 0.04,
"time": 23,
"intelligentContent": 39.22,
"number_operators": 10,
"number_operands": 21,
"number_operators_unique": 4,
"number_operands_unique": 13,
"cloc": 13,
"loc": 48,
"lloc": 34,
"mi": 87.55,
"mIwoC": 51.46,
"commentWeight": 36.08,
"kanDefect": 0.29,
"relativeStructuralComplexity": 4,
"relativeDataComplexity": 1.75,
"relativeSystemComplexity": 5.75,
"totalStructuralComplexity": 16,
"totalDataComplexity": 7,
"totalSystemComplexity": 23,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 2,
"instability": 0.67,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Violation\\ViolationParser",
"interface": false,
"methods": [
{
"name": "apply",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 1,
"nbMethods": 1,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 1,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 3,
"externals": [
"Hal\\Metric\\Metrics",
"Hal\\Violation\\Class_\\Blob",
"Hal\\Violation\\Class_\\TooComplexCode",
"Hal\\Violation\\Class_\\ProbablyBugged",
"Hal\\Violation\\Class_\\TooLong",
"Hal\\Violation\\Class_\\TooDependent",
"Hal\\Violation\\Violations"
],
"lcom": 1,
"length": 13,
"vocabulary": 7,
"volume": 36.5,
"difficulty": 2.2,
"effort": 80.29,
"level": 0.45,
"bugs": 0.01,
"time": 4,
"intelligentContent": 16.59,
"number_operators": 2,
"number_operands": 11,
"number_operators_unique": 2,
"number_operands_unique": 5,
"cloc": 4,
"loc": 19,
"lloc": 15,
"mi": 95.62,
"mIwoC": 63,
"commentWeight": 32.62,
"kanDefect": 0.61,
"relativeStructuralComplexity": 9,
"relativeDataComplexity": 0.5,
"relativeSystemComplexity": 9.5,
"totalStructuralComplexity": 9,
"totalDataComplexity": 0.5,
"totalSystemComplexity": 9.5,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 7,
"instability": 0.88,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Application\\Analyze",
"interface": false,
"methods": [
{
"name": "__construct",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "run",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 2,
"externals": [
"Hal\\Application\\Config\\Config",
"Hal\\Component\\Output\\Output",
"Hal\\Component\\Issue\\Issuer",
"Hal\\Metric\\Metrics",
"PhpParser\\ParserFactory",
"Hal\\Component\\Ast\\NodeTraverser",
"PhpParser\\NodeVisitor\\NameResolver",
"Hal\\Metric\\Class_\\ClassEnumVisitor",
"Hal\\Metric\\Class_\\Complexity\\CyclomaticComplexityVisitor",
"Hal\\Metric\\Class_\\Coupling\\ExternalsVisitor",
"Hal\\Metric\\Class_\\Structural\\LcomVisitor",
"Hal\\Metric\\Class_\\Text\\HalsteadVisitor",
"Hal\\Metric\\Class_\\Text\\LengthVisitor",
"Hal\\Metric\\Class_\\Complexity\\CyclomaticComplexityVisitor",
"Hal\\Metric\\Class_\\Component\\MaintainabilityIndexVisitor",
"Hal\\Metric\\Class_\\Complexity\\KanDefectVisitor",
"Hal\\Metric\\Class_\\Structural\\SystemComplexityVisitor",
"Hal\\Component\\Output\\ProgressBar",
"Hal\\Metric\\System\\Coupling\\PageRank",
"Hal\\Metric\\System\\Coupling\\Coupling",
"Hal\\Metric\\System\\Changes\\GitChanges",
"Hal\\Metric\\System\\UnitTesting\\UnitTesting"
],
"lcom": 1,
"length": 88,
"vocabulary": 21,
"volume": 386.52,
"difficulty": 6.25,
"effort": 2415.77,
"level": 0.16,
"bugs": 0.13,
"time": 134,
"intelligentContent": 61.84,
"number_operators": 13,
"number_operands": 75,
"number_operators_unique": 3,
"number_operands_unique": 18,
"cloc": 27,
"loc": 86,
"lloc": 59,
"mi": 81.14,
"mIwoC": 42.99,
"commentWeight": 38.15,
"kanDefect": 0.38,
"relativeStructuralComplexity": 100,
"relativeDataComplexity": 0.36,
"relativeSystemComplexity": 100.36,
"totalStructuralComplexity": 200,
"totalDataComplexity": 0.73,
"totalSystemComplexity": 200.73,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 22,
"instability": 0.96,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Application\\Application",
"interface": false,
"methods": [
{
"name": "run",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 1,
"nbMethods": 1,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 1,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 2,
"externals": [
"Hal\\Component\\Output\\CliOutput",
"Hal\\Component\\Issue\\Issuer",
"Hal\\Application\\Config\\Parser",
"Hal\\Application\\Config\\Validator",
"Hal\\Application\\Config\\Validator",
"Hal\\Application\\Config\\Validator",
"Hal\\Component\\File\\Finder",
"Hal\\Application\\Analyze",
"Hal\\Violation\\ViolationParser",
"Hal\\Report\\Cli\\Reporter",
"Hal\\Report\\Html\\Reporter",
"Hal\\Report\\Violations\\Xml\\Reporter"
],
"lcom": 1,
"length": 71,
"vocabulary": 23,
"volume": 321.17,
"difficulty": 4.5,
"effort": 1445.28,
"level": 0.22,
"bugs": 0.11,
"time": 80,
"intelligentContent": 71.37,
"number_operators": 11,
"number_operands": 60,
"number_operators_unique": 3,
"number_operands_unique": 20,
"cloc": 13,
"loc": 55,
"lloc": 43,
"mi": 80.74,
"mIwoC": 46.55,
"commentWeight": 34.2,
"kanDefect": 0.36,
"relativeStructuralComplexity": 144,
"relativeDataComplexity": 0.08,
"relativeSystemComplexity": 144.08,
"totalStructuralComplexity": 144,
"totalDataComplexity": 0.08,
"totalSystemComplexity": 144.08,
"pageRank": 0,
"afferentCoupling": 0,
"efferentCoupling": 12,
"instability": 1,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Application\\Config\\Validator",
"interface": false,
"methods": [
{
"name": "validate",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "help",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 2,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 2,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 8,
"externals": [
"Hal\\Application\\Config\\Config",
"Hal\\Application\\Config\\ConfigException",
"Hal\\Application\\Config\\ConfigException",
"Hal\\Application\\Config\\ConfigException"
],
"lcom": 2,
"length": 57,
"vocabulary": 23,
"volume": 257.84,
"difficulty": 8.12,
"effort": 2093.08,
"level": 0.12,
"bugs": 0.09,
"time": 116,
"intelligentContent": 31.76,
"number_operators": 11,
"number_operands": 46,
"number_operators_unique": 6,
"number_operands_unique": 17,
"cloc": 15,
"loc": 81,
"lloc": 54,
"mi": 75.17,
"mIwoC": 44.25,
"commentWeight": 30.92,
"kanDefect": 0.96,
"relativeStructuralComplexity": 9,
"relativeDataComplexity": 0.38,
"relativeSystemComplexity": 9.38,
"totalStructuralComplexity": 18,
"totalDataComplexity": 0.75,
"totalSystemComplexity": 18.75,
"pageRank": 0,
"afferentCoupling": 3,
"efferentCoupling": 4,
"instability": 0.57,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Application\\Config\\ConfigException",
"interface": false,
"methods": [],
"nbMethodsIncludingGettersSetters": 0,
"nbMethods": 0,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 0,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 1,
"externals": [
"Exception"
],
"lcom": 0,
"length": 0,
"vocabulary": 0,
"volume": 0,
"difficulty": 0,
"effort": 0,
"level": 0,
"bugs": 0,
"time": 0,
"intelligentContent": 0,
"number_operators": 0,
"number_operands": 0,
"number_operators_unique": 0,
"number_operands_unique": 0,
"cloc": 0,
"loc": 4,
"lloc": 4,
"mi": 171,
"mIwoC": 171,
"commentWeight": 0,
"kanDefect": 0.15,
"relativeStructuralComplexity": 0,
"relativeDataComplexity": 0,
"relativeSystemComplexity": 0,
"totalStructuralComplexity": 0,
"totalDataComplexity": 0,
"totalSystemComplexity": 0,
"pageRank": 0.01,
"afferentCoupling": 4,
"efferentCoupling": 1,
"instability": 0.2,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "Hal\\Application\\Config\\Parser",
"interface": false,
"methods": [
{
"name": "parse",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 1,
"nbMethods": 1,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 1,
"nbMethodsGetter": 0,
"nbMethodsSetters": 0,
"ccn": 8,
"externals": [
"Hal\\Application\\Config\\Config"
],
"lcom": 1,
"length": 67,
"vocabulary": 23,
"volume": 303.08,
"difficulty": 9.18,
"effort": 2781.19,
"level": 0.11,
"bugs": 0.1,
"time": 155,
"intelligentContent": 33.03,
"number_operators": 15,
"number_operands": 52,
"number_operators_unique": 6,
"number_operands_unique": 17,
"cloc": 3,
"loc": 36,
"lloc": 33,
"mi": 70.05,
"mIwoC": 48.42,
"commentWeight": 21.62,
"kanDefect": 0.96,
"relativeStructuralComplexity": 1,
"relativeDataComplexity": 1.5,
"relativeSystemComplexity": 2.5,
"totalStructuralComplexity": 1,
"totalDataComplexity": 1.5,
"totalSystemComplexity": 2.5,
"pageRank": 0,
"afferentCoupling": 1,
"efferentCoupling": 1,
"instability": 0.5,
"numberOfUnitTests": 1,
"violations": {}
},
{
"name": "Hal\\Application\\Config\\Config",
"interface": false,
"methods": [
{
"name": "set",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "has",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "get",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "all",
"role": "getter",
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "fromArray",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 5,
"nbMethods": 4,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 4,
"nbMethodsGetter": 1,
"nbMethodsSetters": 0,
"ccn": 2,
"externals": [],
"lcom": 1,
"length": 29,
"vocabulary": 6,
"volume": 74.96,
"difficulty": 5.75,
"effort": 431.04,
"level": 0.17,
"bugs": 0.02,
"time": 24,
"intelligentContent": 13.04,
"number_operators": 6,
"number_operands": 23,
"number_operators_unique": 2,
"number_operands_unique": 4,
"cloc": 23,
"loc": 52,
"lloc": 29,
"mi": 97.58,
"mIwoC": 54.7,
"commentWeight": 42.87,
"kanDefect": 0.38,
"relativeStructuralComplexity": 4,
"relativeDataComplexity": 2,
"relativeSystemComplexity": 6,
"totalStructuralComplexity": 20,
"totalDataComplexity": 10,
"totalSystemComplexity": 30,
"pageRank": 0.01,
"afferentCoupling": 7,
"efferentCoupling": 0,
"instability": 0,
"numberOfUnitTests": 0,
"violations": {}
},
{
"name": "MyVisitor",
"interface": false,
"methods": [
{
"name": "__construct",
"role": "setter",
"_type": "Hal\\Metric\\FunctionMetric"
},
{
"name": "leaveNode",
"role": null,
"public": true,
"private": false,
"_type": "Hal\\Metric\\FunctionMetric"
}
],
"nbMethodsIncludingGettersSetters": 2,
"nbMethods": 1,
"nbMethodsPrivate": 0,
"nbMethodsPublic": 1,
"nbMethodsGetter": 0,
"nbMethodsSetters": 1,
"ccn": 1,
"externals": [
"PhpParser\\NodeVisitorAbstract",
"PhpParser\\Node"
],
"lcom": 1,
"length": 7,
"vocabulary": 4,
"volume": 14,
"difficulty": 1,
"effort": 14,
"level": 1,
"bugs": 0,
"time": 1,
"intelligentContent": 14,
"number_operators": 1,
"number_operands": 6,
"number_operators_unique": 1,
"number_operands_unique": 3,
"cloc": 13,
"loc": 26,
"lloc": 13,
"mi": 112,
"mIwoC": 67.54,
"commentWeight": 44.46,
"kanDefect": 0.15,
"relativeStructuralComplexity": 0,
"relativeDataComplexity": 1,
"relativeSystemComplexity": 1,
"totalStructuralComplexity": 0,
"totalDataComplexity": 2,
"totalSystemComplexity": 2,
"pageRank": 0,
"afferentCoupling": 0,
"efferentCoupling": 2,
"instability": 1,
"numberOfUnitTests": 0,
"violations": {}
}
]<|fim▁end|> | "public": true, |
<|file_name|>0008_auto_20151028_1154.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('videos', '0007_auto_20151027_2338'),
]
operations = [
migrations.RemoveField(
model_name='video',
name='account',
),
migrations.RemoveField(
model_name='video',
name='events',
),
migrations.AddField(
model_name='video',
name='extra',<|fim▁hole|><|fim▁end|> | field=jsonfield.fields.JSONField(default={}),
),
] |
<|file_name|>test_commands.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import os
from os import path as op
import shutil
import glob
import warnings
from nose.tools import assert_true, assert_raises
from numpy.testing import assert_equal, assert_allclose
from mne import concatenate_raws, read_bem_surfaces
from mne.commands import (mne_browse_raw, mne_bti2fiff, mne_clean_eog_ecg,
mne_compute_proj_ecg, mne_compute_proj_eog,
mne_coreg, mne_kit2fiff,
mne_make_scalp_surfaces, mne_maxfilter,
mne_report, mne_surf2bem, mne_watershed_bem,
mne_compare_fiff, mne_flash_bem, mne_show_fiff,
mne_show_info)
from mne.datasets import testing, sample
from mne.io import read_raw_fif
from mne.utils import (run_tests_if_main, _TempDir, requires_mne, requires_PIL,
requires_mayavi, requires_tvtk, requires_freesurfer,
ArgvSetter, slow_test, ultra_slow_test)
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(base_dir, 'test_raw.fif')
subjects_dir = op.join(testing.data_path(download=False), 'subjects')
warnings.simplefilter('always')
def check_usage(module, force_help=False):
"""Helper to ensure we print usage"""
args = ('--help',) if force_help else ()
with ArgvSetter(args) as out:
try:
module.run()
except SystemExit:
pass
assert_true('Usage: ' in out.stdout.getvalue())
@slow_test
def test_browse_raw():
"""Test mne browse_raw."""
check_usage(mne_browse_raw)
def test_bti2fiff():
"""Test mne bti2fiff."""
check_usage(mne_bti2fiff)
def test_compare_fiff():
"""Test mne compare_fiff."""
check_usage(mne_compare_fiff)
def test_show_fiff():
"""Test mne compare_fiff."""
check_usage(mne_show_fiff)
with ArgvSetter((raw_fname,)):
mne_show_fiff.run()
@requires_mne
def test_clean_eog_ecg():
"""Test mne clean_eog_ecg."""
check_usage(mne_clean_eog_ecg)
tempdir = _TempDir()
raw = concatenate_raws([read_raw_fif(f)
for f in [raw_fname, raw_fname, raw_fname]])
raw.info['bads'] = ['MEG 2443']
use_fname = op.join(tempdir, op.basename(raw_fname))
raw.save(use_fname)
with ArgvSetter(('-i', use_fname, '--quiet')):
mne_clean_eog_ecg.run()
fnames = glob.glob(op.join(tempdir, '*proj.fif'))
assert_true(len(fnames) == 2) # two projs
fnames = glob.glob(op.join(tempdir, '*-eve.fif'))
assert_true(len(fnames) == 3) # raw plus two projs
@slow_test
def test_compute_proj_ecg_eog():
"""Test mne compute_proj_ecg/eog."""
for fun in (mne_compute_proj_ecg, mne_compute_proj_eog):
check_usage(fun)
tempdir = _TempDir()
use_fname = op.join(tempdir, op.basename(raw_fname))
bad_fname = op.join(tempdir, 'bads.txt')
with open(bad_fname, 'w') as fid:
fid.write('MEG 2443\n')
shutil.copyfile(raw_fname, use_fname)
with ArgvSetter(('-i', use_fname, '--bad=' + bad_fname,
'--rej-eeg', '150')):
fun.run()
fnames = glob.glob(op.join(tempdir, '*proj.fif'))
assert_true(len(fnames) == 1)
fnames = glob.glob(op.join(tempdir, '*-eve.fif'))
assert_true(len(fnames) == 1)
def test_coreg():
"""Test mne coreg."""
assert_true(hasattr(mne_coreg, 'run'))
def test_kit2fiff():
"""Test mne kit2fiff."""
# Can't check
check_usage(mne_kit2fiff, force_help=True)
@requires_tvtk
@testing.requires_testing_data
def test_make_scalp_surfaces():
"""Test mne make_scalp_surfaces."""
check_usage(mne_make_scalp_surfaces)
# Copy necessary files to avoid FreeSurfer call
tempdir = _TempDir()
surf_path = op.join(subjects_dir, 'sample', 'surf')
surf_path_new = op.join(tempdir, 'sample', 'surf')
os.mkdir(op.join(tempdir, 'sample'))
os.mkdir(surf_path_new)
subj_dir = op.join(tempdir, 'sample', 'bem')
os.mkdir(subj_dir)
shutil.copy(op.join(surf_path, 'lh.seghead'), surf_path_new)
orig_fs = os.getenv('FREESURFER_HOME', None)
if orig_fs is not None:
del os.environ['FREESURFER_HOME']
cmd = ('-s', 'sample', '--subjects-dir', tempdir)
os.environ['_MNE_TESTING_SCALP'] = 'true'
dense_fname = op.join(subj_dir, 'sample-head-dense.fif')
medium_fname = op.join(subj_dir, 'sample-head-medium.fif')
try:
with ArgvSetter(cmd, disable_stdout=False, disable_stderr=False):
assert_raises(RuntimeError, mne_make_scalp_surfaces.run)
os.environ['FREESURFER_HOME'] = tempdir # don't actually use it
mne_make_scalp_surfaces.run()
assert_true(op.isfile(dense_fname))
assert_true(op.isfile(medium_fname))
assert_raises(IOError, mne_make_scalp_surfaces.run) # no overwrite
finally:
if orig_fs is not None:
os.environ['FREESURFER_HOME'] = orig_fs
else:<|fim▁hole|> del os.environ['_MNE_TESTING_SCALP']
# actually check the outputs
head_py = read_bem_surfaces(dense_fname)
assert_equal(len(head_py), 1)
head_py = head_py[0]
head_c = read_bem_surfaces(op.join(subjects_dir, 'sample', 'bem',
'sample-head-dense.fif'))[0]
assert_allclose(head_py['rr'], head_c['rr'])
def test_maxfilter():
"""Test mne maxfilter."""
check_usage(mne_maxfilter)
with ArgvSetter(('-i', raw_fname, '--st', '--movecomp', '--linefreq', '60',
'--trans', raw_fname)) as out:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
os.environ['_MNE_MAXFILTER_TEST'] = 'true'
try:
mne_maxfilter.run()
finally:
del os.environ['_MNE_MAXFILTER_TEST']
assert_true(len(w) == 1)
for check in ('maxfilter', '-trans', '-movecomp'):
assert_true(check in out.stdout.getvalue(), check)
@slow_test
@requires_mayavi
@requires_PIL
@testing.requires_testing_data
def test_report():
"""Test mne report."""
check_usage(mne_report)
tempdir = _TempDir()
use_fname = op.join(tempdir, op.basename(raw_fname))
shutil.copyfile(raw_fname, use_fname)
with ArgvSetter(('-p', tempdir, '-i', use_fname, '-d', subjects_dir,
'-s', 'sample', '--no-browser', '-m', '30')):
mne_report.run()
fnames = glob.glob(op.join(tempdir, '*.html'))
assert_true(len(fnames) == 1)
def test_surf2bem():
"""Test mne surf2bem."""
check_usage(mne_surf2bem)
@ultra_slow_test
@requires_freesurfer
@testing.requires_testing_data
def test_watershed_bem():
"""Test mne watershed bem."""
check_usage(mne_watershed_bem)
# Copy necessary files to tempdir
tempdir = _TempDir()
mridata_path = op.join(subjects_dir, 'sample', 'mri')
mridata_path_new = op.join(tempdir, 'sample', 'mri')
os.mkdir(op.join(tempdir, 'sample'))
os.mkdir(mridata_path_new)
if op.exists(op.join(mridata_path, 'T1')):
shutil.copytree(op.join(mridata_path, 'T1'), op.join(mridata_path_new,
'T1'))
if op.exists(op.join(mridata_path, 'T1.mgz')):
shutil.copyfile(op.join(mridata_path, 'T1.mgz'),
op.join(mridata_path_new, 'T1.mgz'))
with ArgvSetter(('-d', tempdir, '-s', 'sample', '-o'),
disable_stdout=False, disable_stderr=False):
mne_watershed_bem.run()
@ultra_slow_test
@requires_freesurfer
@sample.requires_sample_data
def test_flash_bem():
"""Test mne flash_bem."""
check_usage(mne_flash_bem, force_help=True)
# Using the sample dataset
subjects_dir = op.join(sample.data_path(download=False), 'subjects')
# Copy necessary files to tempdir
tempdir = _TempDir()
mridata_path = op.join(subjects_dir, 'sample', 'mri')
mridata_path_new = op.join(tempdir, 'sample', 'mri')
os.makedirs(op.join(mridata_path_new, 'flash'))
os.makedirs(op.join(tempdir, 'sample', 'bem'))
shutil.copyfile(op.join(mridata_path, 'T1.mgz'),
op.join(mridata_path_new, 'T1.mgz'))
shutil.copyfile(op.join(mridata_path, 'brain.mgz'),
op.join(mridata_path_new, 'brain.mgz'))
# Copy the available mri/flash/mef*.mgz files from the dataset
files = glob.glob(op.join(mridata_path, 'flash', 'mef*.mgz'))
for infile in files:
shutil.copyfile(infile, op.join(mridata_path_new, 'flash',
op.basename(infile)))
# Test mne flash_bem with --noconvert option
# (since there are no DICOM Flash images in dataset)
currdir = os.getcwd()
with ArgvSetter(('-d', tempdir, '-s', 'sample', '-n'),
disable_stdout=False, disable_stderr=False):
mne_flash_bem.run()
os.chdir(currdir)
def test_show_info():
"""Test mne show_info."""
check_usage(mne_show_info)
with ArgvSetter((raw_fname,)):
mne_show_info.run()
run_tests_if_main()<|fim▁end|> | del os.environ['FREESURFER_HOME'] |
<|file_name|>app.py<|end_file_name|><|fim▁begin|>import os
import os.path
from raiden.constants import RAIDEN_DB_VERSION
def database_from_privatekey(base_dir, app_number):
""" Format a database path based on the private key and app number. """
dbpath = os.path.join(base_dir, f"app{app_number}", f"v{RAIDEN_DB_VERSION}_log.db")
os.makedirs(os.path.dirname(dbpath))
<|fim▁hole|><|fim▁end|> | return dbpath |
<|file_name|>test_relation_model_models.py<|end_file_name|><|fim▁begin|># Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test classes for code snippet for modeling article."""
from appengine.ndb.modeling import relation_model_models as models
from google.appengine.ext import ndb
from tests import AppEngineTestbedCase
class ContactTestCase(AppEngineTestbedCase):
"""A test case for the Contact model with relationship model."""
def setUp(self):
"""Creates 1 contact and 1 company.
Assuming the contact belongs to tmatsuo's addressbook.
"""
super(ContactTestCase, self).setUp()
self.myaddressbook_key = ndb.Key('AddressBook', 'tmatsuo')
mary = models.Contact(parent=self.myaddressbook_key, name='Mary')
mary.put()
self.mary_key = mary.key
google = models.Company(name='Google')
google.put()
self.google_key = google.key
candit = models.Company(name='Candit')
candit.put()
self.candit_key = candit.key
def test_relationship(self):
"""Two companies hire Mary."""
mary = self.mary_key.get()
google = self.google_key.get()
candit = self.candit_key.get()
# first google hires Mary
models.ContactCompany(parent=self.myaddressbook_key,
contact=mary.key,
company=google.key,
title='engineer').put()
# then another company named 'candit' hires Mary too
models.ContactCompany(parent=self.myaddressbook_key,<|fim▁hole|> # get the list of companies that Mary belongs to
self.assertEqual(len(mary.companies), 2)<|fim▁end|> | contact=mary.key,
company=candit.key,
title='president').put() |
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>'use strict';
<|fim▁hole|>const gulp = require('gulp');
const env = require('../index');
// Call it when necessary.
gulpRequireTasks({
// Pass any options to it. Please see below.
path: env.inConfigs('gulp', 'tasks')// This is default
});
gulp.task('default', ['scripts:build', 'json-copy:build']);<|fim▁end|> | // https://github.com/betsol/gulp-require-tasks
// Require the module.
const gulpRequireTasks = require('gulp-require-tasks'); |
<|file_name|>out_response.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2015 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR<|fim▁hole|>// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
'use strict';
var EventEmitter = require('./lib/event_emitter');
var stat = require('./lib/stat');
var inherits = require('util').inherits;
var errors = require('./errors');
var States = require('./reqres_states');
function TChannelOutResponse(id, options) {
options = options || {};
var self = this;
EventEmitter.call(self);
self.errorEvent = self.defineEvent('error');
self.spanEvent = self.defineEvent('span');
self.finishEvent = self.defineEvent('finish');
self.channel = options.channel;
self.inreq = options.inreq;
self.logger = options.logger;
self.random = options.random;
self.timers = options.timers;
self.start = 0;
self.end = 0;
self.state = States.Initial;
self.id = id || 0;
self.code = options.code || 0;
self.tracing = options.tracing || null;
self.headers = options.headers || {};
self.checksumType = options.checksumType || 0;
self.checksum = options.checksum || null;
self.ok = self.code === 0;
self.span = options.span || null;
self.streamed = false;
self._argstream = null;
self.arg1 = null;
self.arg2 = null;
self.arg3 = null;
self.codeString = null;
self.message = null;
}
inherits(TChannelOutResponse, EventEmitter);
TChannelOutResponse.prototype.type = 'tchannel.outgoing-response';
TChannelOutResponse.prototype._sendCallResponse = function _sendCallResponse(args, isLast) {
var self = this;
throw errors.UnimplementedMethod({
className: self.constructor.name,
methodName: '_sendCallResponse'
});
};
TChannelOutResponse.prototype._sendCallResponseCont = function _sendCallResponseCont(args, isLast) {
var self = this;
throw errors.UnimplementedMethod({
className: self.constructor.name,
methodName: '_sendCallResponseCont'
});
};
TChannelOutResponse.prototype._sendError = function _sendError(codeString, message) {
var self = this;
throw errors.UnimplementedMethod({
className: self.constructor.name,
methodName: '_sendError'
});
};
TChannelOutResponse.prototype.sendParts = function sendParts(parts, isLast) {
var self = this;
switch (self.state) {
case States.Initial:
self.sendCallResponseFrame(parts, isLast);
break;
case States.Streaming:
self.sendCallResponseContFrame(parts, isLast);
break;
case States.Done:
self.errorEvent.emit(self, errors.ResponseFrameState({
attempted: 'arg parts',
state: 'Done'
}));
break;
case States.Error:
// TODO: log warn
break;
default:
self.channel.logger.error('TChannelOutResponse is in a wrong state', {
state: self.state
});
break;
}
};
TChannelOutResponse.prototype.sendCallResponseFrame = function sendCallResponseFrame(args, isLast) {
var self = this;
switch (self.state) {
case States.Initial:
self.start = self.timers.now();
self._sendCallResponse(args, isLast);
if (self.span) {
self.span.annotate('ss');
}
if (isLast) self.state = States.Done;
else self.state = States.Streaming;
break;
case States.Streaming:
self.errorEvent.emit(self, errors.ResponseFrameState({
attempted: 'call response',
state: 'Streaming'
}));
break;
case States.Done:
case States.Error:
var arg2 = args[1] || '';
var arg3 = args[2] || '';
self.errorEvent.emit(self, errors.ResponseAlreadyDone({
attempted: 'call response',
state: self.state,
method: 'sendCallResponseFrame',
bufArg2: arg2.slice(0, 50),
arg2: String(arg2).slice(0, 50),
bufArg3: arg3.slice(0, 50),
arg3: String(arg3).slice(0, 50)
}));
}
};
TChannelOutResponse.prototype.sendCallResponseContFrame = function sendCallResponseContFrame(args, isLast) {
var self = this;
switch (self.state) {
case States.Initial:
self.errorEvent.emit(self, errors.ResponseFrameState({
attempted: 'call response continuation',
state: 'Initial'
}));
break;
case States.Streaming:
self._sendCallResponseCont(args, isLast);
if (isLast) self.state = States.Done;
break;
case States.Done:
case States.Error:
self.errorEvent.emit(self, errors.ResponseAlreadyDone({
attempted: 'call response continuation',
state: self.state,
method: 'sendCallResponseContFrame'
}));
}
};
TChannelOutResponse.prototype.sendError = function sendError(codeString, message) {
var self = this;
if (self.state === States.Done || self.state === States.Error) {
self.errorEvent.emit(self, errors.ResponseAlreadyDone({
attempted: 'send error frame: ' + codeString + ': ' + message,
currentState: self.state,
method: 'sendError',
codeString: codeString,
errMessage: message
}));
} else {
if (self.span) {
self.span.annotate('ss');
}
self.state = States.Error;
self.codeString = codeString;
self.message = message;
self.channel.inboundCallsSystemErrorsStat.increment(1, {
'calling-service': self.inreq.headers.cn,
'service': self.inreq.serviceName,
'endpoint': String(self.inreq.arg1),
'type': self.codeString
});
self._sendError(codeString, message);
self.emitFinish();
}
};
TChannelOutResponse.prototype.emitFinish = function emitFinish() {
var self = this;
var now = self.timers.now();
if (self.end) {
self.logger.warn('out response double emitFinish', {
end: self.end,
now: now,
serviceName: self.inreq.serviceName,
cn: self.inreq.headers.cn,
endpoint: String(self.inreq.arg1),
codeString: self.codeString,
errorMessage: self.message,
remoteAddr: self.inreq.connection.socketRemoteAddr,
state: self.state,
isOk: self.ok
});
return;
}
self.end = now;
var latency = self.end - self.inreq.start;
self.channel.emitFastStat(self.channel.buildStat(
'tchannel.inbound.calls.latency',
'timing',
latency,
new stat.InboundCallsLatencyTags(
self.inreq.headers.cn,
self.inreq.serviceName,
self.inreq.endpoint
)
));
if (self.span) {
self.spanEvent.emit(self, self.span);
}
self.finishEvent.emit(self);
};
TChannelOutResponse.prototype.setOk = function setOk(ok) {
var self = this;
if (self.state !== States.Initial) {
self.errorEvent.emit(self, errors.ResponseAlreadyStarted({
state: self.state,
method: 'setOk',
ok: ok
}));
return false;
}
self.ok = ok;
self.code = ok ? 0 : 1; // TODO: too coupled to v2 specifics?
return true;
};
TChannelOutResponse.prototype.sendOk = function sendOk(res1, res2) {
var self = this;
self.setOk(true);
self.send(res1, res2);
};
TChannelOutResponse.prototype.sendNotOk = function sendNotOk(res1, res2) {
var self = this;
if (self.state === States.Error) {
self.logger.error('cannot send application error, already sent error frame', {
res1: res1,
res2: res2
});
} else {
self.setOk(false);
self.send(res1, res2);
}
};
TChannelOutResponse.prototype.send = function send(res1, res2) {
var self = this;
/* send calls after finish() should be swallowed */
if (self.end) {
var logOptions = {
serviceName: self.inreq.serviceName,
cn: self.inreq.headers.cn,
endpoint: self.inreq.endpoint,
remoteAddr: self.inreq.remoteAddr,
end: self.end,
codeString: self.codeString,
errorMessage: self.message,
isOk: self.ok,
hasResponse: !!self.arg3,
state: self.state
};
if (self.inreq && self.inreq.timedOut) {
self.logger.info('OutResponse.send() after inreq timed out', logOptions);
} else {
self.logger.warn('OutResponse called send() after end', logOptions);
}
return;
}
self.arg2 = res1;
self.arg3 = res2;
if (self.ok) {
self.channel.emitFastStat(self.channel.buildStat(
'tchannel.inbound.calls.success',
'counter',
1,
new stat.InboundCallsSuccessTags(
self.inreq.headers.cn,
self.inreq.serviceName,
self.inreq.endpoint
)
));
} else {
// TODO: add outResponse.setErrorType()
self.channel.emitFastStat(self.channel.buildStat(
'tchannel.inbound.calls.app-errors',
'counter',
1,
new stat.InboundCallsAppErrorsTags(
self.inreq.headers.cn,
self.inreq.serviceName,
self.inreq.endpoint,
'unknown'
)
));
}
self.sendCallResponseFrame([self.arg1, res1, res2], true);
self.emitFinish();
return self;
};
module.exports = TChannelOutResponse;<|fim▁end|> | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
<|file_name|>XmlParserJhove.java<|end_file_name|><|fim▁begin|>package output;
import java.io.FileWriter;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collections;
import javax.swing.JOptionPane;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.w3c.dom.Node;
public class XmlParserJhove {
public static void main(String args[]) throws Exception {
JOptionPane.showMessageDialog(null, "Please choose the XML File to analyse", "XmlParsing", JOptionPane.QUESTION_MESSAGE);
String xmlfile = utilities.BrowserDialogs.chooseFile();
parseXmlFile(xmlfile);
}
public static void parseXmlFile(String xmlfile) {
try {
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document doc = dBuilder.parse(xmlfile);
PrintWriter xmlsummary = new PrintWriter(new FileWriter((jhoveValidations.JhoveGuiStarterDialog.jhoveExaminationFolder + "//" + "JhoveExaminationSummary" + ".xml")));
String xmlVersion = "xml version='1.0'";
String xmlEncoding = "encoding='ISO-8859-1'";
String xmlxslStyleSheet = "<?xml-stylesheet type=\"text/xsl\" href=\"JhoveCustomized.xsl\"?>";
xmlsummary.println("<?" + xmlVersion + " " + xmlEncoding + "?>");
xmlsummary.println(xmlxslStyleSheet);
xmlsummary.println("<JhoveFindingsSummary>");
output.XslStyleSheetsJhove.JhoveCustomizedXsl();
ArrayList<String> errormessages = new ArrayList<String>();
doc.getDocumentElement().normalize();
NodeList nList = doc.getElementsByTagName("item");
for (int temp = 0; temp < nList.getLength(); temp++) {
Node nNode = nList.item(temp);
if (nNode.getNodeType() == Node.ELEMENT_NODE) {
Element eElement = (Element) nNode;
xmlsummary.println("<File>");
String testutf8 = eElement.getElementsByTagName("filename").item(0).getTextContent();
if (testutf8.contains("&")) {
String sub = utilities.GenericUtilities.normaliseToUtf8(testutf8);
xmlsummary.println("<FileName>" + sub + "</FileName>");
} else {
xmlsummary.println("<FileName>" + eElement.getElementsByTagName("filename").item(0).getTextContent() + "</FileName>");
<|fim▁hole|> }
if (eElement.getElementsByTagName("creationsoftware").item(0)!= null) {
xmlsummary.println("<CreationSoftware>" + eElement.getElementsByTagName("creationsoftware").item(0).getTextContent() + "</CreationSoftware>");
}
if (eElement.getElementsByTagName("encryption").item(0)!= null) {
xmlsummary.println("<Encryption>" + eElement.getElementsByTagName("encryption").item(0).getTextContent() + "</Encryption>");
}
if (eElement.getElementsByTagName("PdfType").item(0)!= null) {
xmlsummary.println("<PdfType>" + eElement.getElementsByTagName("PdfType").item(0).getTextContent() + "</PdfType>");
}
xmlsummary.println("<Module>" + eElement.getElementsByTagName("reportingModule").item(0).getTextContent() + "</Module>");
xmlsummary.println("<Status>" + eElement.getElementsByTagName("status").item(0).getTextContent() + "</Status>");
String status = eElement.getElementsByTagName("status").item(0).getTextContent();
if ((status.contains("Not")) || (status.contains("not"))) {
System.out.println(eElement.getElementsByTagName("filename").item(0).getTextContent());
int lenmessages = eElement.getElementsByTagName("message").getLength();
xmlsummary.println("<JhoveMessages>" + lenmessages + "</JhoveMessages>");
for (int temp3 = 0; temp3 < lenmessages; temp3++) {
String error = eElement.getElementsByTagName("message").item(temp3).getTextContent();
int writtenmessage = temp3 + 1;
//TODO: get rid of xml escaping characters
error = error.replace("\"", """);
error = error.replace("\'", "'");
error = error.replace("<", "<");
error = error.replace(">", ">");
error = error.replace("&", " &");
xmlsummary.println("<Message" + writtenmessage + ">" + error + "</Message" + writtenmessage + ">");
errormessages.add(error);
}
}
xmlsummary.println("</File>"); //TODO: should be changed to File, but as well in XSLT
}
}
Collections.sort(errormessages);
int i;
// copy ErrorList because later the no. of entries of each
// element will be counted
ArrayList<String> originerrors = new ArrayList<String>();
for (i = 0; i < errormessages.size(); i++) { // There might be a
// pre-defined
// function for this
originerrors.add(errormessages.get(i));
}
// get rid of redundant entries
i = 0;
while (i < errormessages.size() - 1) {
if (errormessages.get(i).equals(errormessages.get(i + 1))) {
errormessages.remove(i);
} else {
i++;
}
}
xmlsummary.println("<SampleSummary>");
xmlsummary.println("<ExaminedPdfFiles>" + nList.getLength() + "</ExaminedPdfFiles>");
xmlsummary.println("<DifferentJhoveMessages>" + errormessages.size() + "</DifferentJhoveMessages>");
// how often does each JHOVE error occur?
int j = 0;
int temp1;
for (i = 0; i < errormessages.size(); i++) {
temp1 = 0;
for (j = 0; j < originerrors.size(); j++) {
if (errormessages.get(i).equals(originerrors.get(j))) {
temp1++;
}
}
xmlsummary.println("<JhoveMessage>");
xmlsummary.println("<MessageText>" + errormessages.get(i) + "</MessageText>");
xmlsummary.println("<Occurance>" + temp1 + "</Occurance>");
xmlsummary.println("</JhoveMessage>");
}
xmlsummary.println("</SampleSummary>");
xmlsummary.println("</JhoveFindingsSummary>");
xmlsummary.close();
}
catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(null, e, "error message", JOptionPane.ERROR_MESSAGE);
}
}
}<|fim▁end|> | }
if (eElement.getElementsByTagName("creationyear").item(0)!= null) {
xmlsummary.println("<CreationYear>" + eElement.getElementsByTagName("creationyear").item(0).getTextContent() + "</CreationYear>");
|
<|file_name|>service_update_test.go<|end_file_name|><|fim▁begin|>package client
import (
"bytes"
"fmt"
"io/ioutil"
"net/http"
"strings"
"testing"
"golang.org/x/net/context"
"github.com/docker/engine-api/types"
"github.com/docker/engine-api/types/swarm"
)
func TestServiceUpdateError(t *testing.T) {
client := &Client{
transport: newMockClient(nil, errorMock(http.StatusInternalServerError, "Server error")),
}
err := client.ServiceUpdate(context.Background(), "service_id", swarm.Version{}, swarm.ServiceSpec{}, types.ServiceUpdateOptions{})
if err == nil || err.Error() != "Error response from daemon: Server error" {
t.Fatalf("expected a Server Error, got %v", err)
}
}
func TestServiceUpdate(t *testing.T) {
expectedURL := "/services/service_id/update"
updateCases := []struct {
swarmVersion swarm.Version
expectedVersion string
}{
{
expectedVersion: "0",
},
{
swarmVersion: swarm.Version{
Index: 0,
},
expectedVersion: "0",
},
{
swarmVersion: swarm.Version{
Index: 10,
},
expectedVersion: "10",
},
}
for _, updateCase := range updateCases {
client := &Client{
transport: newMockClient(nil, func(req *http.Request) (*http.Response, error) {
if !strings.HasPrefix(req.URL.Path, expectedURL) {
return nil, fmt.Errorf("Expected URL '%s', got '%s'", expectedURL, req.URL)
}
if req.Method != "POST" {
return nil, fmt.Errorf("expected POST method, got %s", req.Method)
}
version := req.URL.Query().Get("version")
if version != updateCase.expectedVersion {
return nil, fmt.Errorf("version not set in URL query properly, expected '%s', got %s", updateCase.expectedVersion, version)
}<|fim▁hole|> Body: ioutil.NopCloser(bytes.NewReader([]byte("body"))),
}, nil
}),
}
err := client.ServiceUpdate(context.Background(), "service_id", updateCase.swarmVersion, swarm.ServiceSpec{}, types.ServiceUpdateOptions{})
if err != nil {
t.Fatal(err)
}
}
}<|fim▁end|> | return &http.Response{
StatusCode: http.StatusOK, |
<|file_name|>Base.py<|end_file_name|><|fim▁begin|>"""
$Id: Base.py,v 1.12.2.10 2008/08/01 03:58:03 customdesigned Exp $
This file is part of the pydns project.
Homepage: http://pydns.sourceforge.net
This code is covered by the standard Python License.
Base functionality. Request and Response classes, that sort of thing.
"""
import socket, string, types, time, select
import Type,Class,Opcode
import asyncore
#
# This random generator is used for transaction ids and port selection. This
# is important to prevent spurious results from lost packets, and malicious
# cache poisoning. This doesn't matter if you are behind a caching nameserver
# or your app is a primary DNS server only. To install your own generator,
# replace DNS.Base.random. SystemRandom uses /dev/urandom or similar source.
#
try:
from random import SystemRandom
random = SystemRandom()
except:
import random
class DNSError(Exception): pass
# Lib uses DNSError, so import after defining.
import Lib
defaults= { 'protocol':'udp', 'port':53, 'opcode':Opcode.QUERY,
'qtype':Type.A, 'rd':1, 'timing':1, 'timeout': 30 }
defaults['server']=[]
def ParseResolvConf(resolv_path="/etc/resolv.conf"):
"parses the /etc/resolv.conf file and sets defaults for name servers"
global defaults
lines=open(resolv_path).readlines()
for line in lines:
line = string.strip(line)
if not line or line[0]==';' or line[0]=='#':
continue
fields=string.split(line)
if len(fields) < 2:
continue
if fields[0]=='domain' and len(fields) > 1:
defaults['domain']=fields[1]
if fields[0]=='search':
pass
if fields[0]=='options':
pass
if fields[0]=='sortlist':
pass
if fields[0]=='nameserver':
if fields[1].count(':'):
""" Ignore IPv6 nameservers as we currently do not support querying them. """
pass
else:
defaults['server'].append(fields[1])
def DiscoverNameServers():
import sys
if sys.platform in ('win32', 'nt'):
import win32dns
defaults['server']=win32dns.RegistryResolve()
else:
return ParseResolvConf()
class DnsRequest:
""" high level Request object """
def __init__(self,*name,**args):
self.donefunc=None
self.async=None
self.defaults = {}
self.argparse(name,args)
self.defaults = self.args
self.tid = 0
def argparse(self,name,args):
if not name and self.defaults.has_key('name'):
args['name'] = self.defaults['name']
if type(name) is types.StringType:
args['name']=name
else:
if len(name) == 1:
if name[0]:
args['name']=name[0]
for i in defaults.keys():
if not args.has_key(i):
if self.defaults.has_key(i):
args[i]=self.defaults[i]
else:
args[i]=defaults[i]
if type(args['server']) == types.StringType:
args['server'] = [args['server']]
self.args=args
def socketInit(self,a,b):
self.s = socket.socket(a,b)
def processUDPReply(self):
if self.timeout > 0:
r,w,e = select.select([self.s],[],[],self.timeout)
if not len(r):
raise DNSError, 'Timeout'
(self.reply, self.from_address) = self.s.recvfrom(65535)
self.time_finish=time.time()
self.args['server']=self.ns
return self.processReply()
def processTCPReply(self):
if self.timeout > 0:
r,w,e = select.select([self.s],[],[],self.timeout)
if not len(r):
raise DNSError, 'Timeout'
f = self.s.makefile('r')
header = f.read(2)
if len(header) < 2:
raise DNSError,'EOF'
count = Lib.unpack16bit(header)
self.reply = f.read(count)
if len(self.reply) != count:
# FIXME: Since we are non-blocking, it could just be a large reply
# that we need to loop and wait for.
raise DNSError,'incomplete reply'
self.time_finish=time.time()
self.args['server']=self.ns
return self.processReply()
def processReply(self):
self.args['elapsed']=(self.time_finish-self.time_start)*1000
u = Lib.Munpacker(self.reply)
r=Lib.DnsResult(u,self.args)
r.args=self.args
#self.args=None # mark this DnsRequest object as used.
return r
#### TODO TODO TODO ####
# if protocol == 'tcp' and qtype == Type.AXFR:
# while 1:
# header = f.read(2)
# if len(header) < 2:
# print '========== EOF =========='
# break
# count = Lib.unpack16bit(header)
# if not count:
# print '========== ZERO COUNT =========='
# break
# print '========== NEXT =========='
# reply = f.read(count)
# if len(reply) != count:
# print '*** Incomplete reply ***'
# break
# u = Lib.Munpacker(reply)
# Lib.dumpM(u)
def getSource(self):
"Pick random source port to avoid DNS cache poisoning attack."
while True:
try:
source_port = random.randint(1024,65535)
self.s.bind(('', source_port))
break
except socket.error, msg:
# Error 98, 'Address already in use'
if msg[0] != 98: raise
def conn(self):
self.getSource()
self.s.connect((self.ns,self.port))
def req(self,*name,**args):
" needs a refactoring "
self.argparse(name,args)
#if not self.args:
# raise DNSError,'reinitialize request before reuse'
protocol = self.args['protocol']
self.port = self.args['port']
self.tid = random.randint(0,65535)
self.timeout = self.args['timeout'];
opcode = self.args['opcode']
rd = self.args['rd']
server=self.args['server']
if type(self.args['qtype']) == types.StringType:
try:
qtype = getattr(Type, string.upper(self.args['qtype']))
except AttributeError:
raise DNSError,'unknown query type'
else:
qtype=self.args['qtype']
if not self.args.has_key('name'):
print self.args
raise DNSError,'nothing to lookup'
qname = self.args['name']
if qtype == Type.AXFR:
print 'Query type AXFR, protocol forced to TCP'
protocol = 'tcp'
#print 'QTYPE %d(%s)' % (qtype, Type.typestr(qtype))
m = Lib.Mpacker()
# jesus. keywords and default args would be good. TODO.
m.addHeader(self.tid,
0, opcode, 0, 0, rd, 0, 0, 0,
1, 0, 0, 0)
m.addQuestion(qname, qtype, Class.IN)
self.request = m.getbuf()
try:
if protocol == 'udp':
self.sendUDPRequest(server)
else:
self.sendTCPRequest(server)
except socket.error, reason:<|fim▁hole|> return None
else:
if not self.response:
raise DNSError,'no working nameservers found'
return self.response
def sendUDPRequest(self, server):
"refactor me"
self.response=None
for self.ns in server:
#print "trying udp",self.ns
try:
if self.ns.count(':'):
if hasattr(socket,'has_ipv6') and socket.has_ipv6:
self.socketInit(socket.AF_INET6, socket.SOCK_DGRAM)
else: continue
else:
self.socketInit(socket.AF_INET, socket.SOCK_DGRAM)
try:
# TODO. Handle timeouts &c correctly (RFC)
self.time_start=time.time()
self.conn()
if not self.async:
self.s.send(self.request)
r=self.processUDPReply()
# Since we bind to the source port and connect to the
# destination port, we don't need to check that here,
# but do make sure it's actually a DNS request that the
# packet is in reply to.
while r.header['id'] != self.tid \
or self.from_address[1] != self.port:
r=self.processUDPReply()
self.response = r
# FIXME: check waiting async queries
finally:
if not self.async:
self.s.close()
except socket.error:
continue
break
def sendTCPRequest(self, server):
" do the work of sending a TCP request "
self.response=None
for self.ns in server:
#print "trying tcp",self.ns
try:
if self.ns.count(':'):
if hasattr(socket,'has_ipv6') and socket.has_ipv6:
self.socketInit(socket.AF_INET6, socket.SOCK_STREAM)
else: continue
else:
self.socketInit(socket.AF_INET, socket.SOCK_STREAM)
try:
# TODO. Handle timeouts &c correctly (RFC)
self.time_start=time.time()
self.conn()
buf = Lib.pack16bit(len(self.request))+self.request
# Keep server from making sendall hang
self.s.setblocking(0)
# FIXME: throws WOULDBLOCK if request too large to fit in
# system buffer
self.s.sendall(buf)
self.s.shutdown(socket.SHUT_WR)
r=self.processTCPReply()
if r.header['id'] == self.tid:
self.response = r
break
finally:
self.s.close()
except socket.error:
continue
#class DnsAsyncRequest(DnsRequest):
class DnsAsyncRequest(DnsRequest,asyncore.dispatcher_with_send):
" an asynchronous request object. out of date, probably broken "
def __init__(self,*name,**args):
DnsRequest.__init__(self, *name, **args)
# XXX todo
if args.has_key('done') and args['done']:
self.donefunc=args['done']
else:
self.donefunc=self.showResult
#self.realinit(name,args) # XXX todo
self.async=1
def conn(self):
self.getSource()
self.connect((self.ns,self.port))
self.time_start=time.time()
if self.args.has_key('start') and self.args['start']:
asyncore.dispatcher.go(self)
def socketInit(self,a,b):
self.create_socket(a,b)
asyncore.dispatcher.__init__(self)
self.s=self
def handle_read(self):
if self.args['protocol'] == 'udp':
self.response=self.processUDPReply()
if self.donefunc:
apply(self.donefunc,(self,))
def handle_connect(self):
self.send(self.request)
def handle_write(self):
pass
def showResult(self,*s):
self.response.show()
#
# $Log: Base.py,v $
# Revision 1.12.2.10 2008/08/01 03:58:03 customdesigned
# Don't try to close socket when never opened.
#
# Revision 1.12.2.9 2008/08/01 03:48:31 customdesigned
# Fix more breakage from port randomization patch. Support Ipv6 queries.
#
# Revision 1.12.2.8 2008/07/31 18:22:59 customdesigned
# Wait until tcp response at least starts coming in.
#
# Revision 1.12.2.7 2008/07/28 01:27:00 customdesigned
# Check configured port.
#
# Revision 1.12.2.6 2008/07/28 00:17:10 customdesigned
# Randomize source ports.
#
# Revision 1.12.2.5 2008/07/24 20:10:55 customdesigned
# Randomize tid in requests, and check in response.
#
# Revision 1.12.2.4 2007/05/22 20:28:31 customdesigned
# Missing import Lib
#
# Revision 1.12.2.3 2007/05/22 20:25:52 customdesigned
# Use socket.inetntoa,inetaton.
#
# Revision 1.12.2.2 2007/05/22 20:21:46 customdesigned
# Trap socket error
#
# Revision 1.12.2.1 2007/05/22 20:19:35 customdesigned
# Skip bogus but non-empty lines in resolv.conf
#
# Revision 1.12 2002/04/23 06:04:27 anthonybaxter
# attempt to refactor the DNSRequest.req method a little. after doing a bit
# of this, I've decided to bite the bullet and just rewrite the puppy. will
# be checkin in some design notes, then unit tests and then writing the sod.
#
# Revision 1.11 2002/03/19 13:05:02 anthonybaxter
# converted to class based exceptions (there goes the python1.4 compatibility :)
#
# removed a quite gross use of 'eval()'.
#
# Revision 1.10 2002/03/19 12:41:33 anthonybaxter
# tabnannied and reindented everything. 4 space indent, no tabs.
# yay.
#
# Revision 1.9 2002/03/19 12:26:13 anthonybaxter
# death to leading tabs.
#
# Revision 1.8 2002/03/19 10:30:33 anthonybaxter
# first round of major bits and pieces. The major stuff here (summarised
# from my local, off-net CVS server :/ this will cause some oddities with
# the
#
# tests/testPackers.py:
# a large slab of unit tests for the packer and unpacker code in DNS.Lib
#
# DNS/Lib.py:
# placeholder for addSRV.
# added 'klass' to addA, make it the same as the other A* records.
# made addTXT check for being passed a string, turn it into a length 1 list.
# explicitly check for adding a string of length > 255 (prohibited).
# a bunch of cleanups from a first pass with pychecker
# new code for pack/unpack. the bitwise stuff uses struct, for a smallish
# (disappointly small, actually) improvement, while addr2bin is much
# much faster now.
#
# DNS/Base.py:
# added DiscoverNameServers. This automatically does the right thing
# on unix/ win32. No idea how MacOS handles this. *sigh*
# Incompatible change: Don't use ParseResolvConf on non-unix, use this
# function, instead!
# a bunch of cleanups from a first pass with pychecker
#
# Revision 1.5 2001/08/09 09:22:28 anthonybaxter
# added what I hope is win32 resolver lookup support. I'll need to try
# and figure out how to get the CVS checkout onto my windows machine to
# make sure it works (wow, doing something other than games on the
# windows machine :)
#
# Code from [email protected]
# win32dns.py from
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66260
#
# Really, ParseResolvConf() should be renamed "FindNameServers" or
# some such.
#
# Revision 1.4 2001/08/09 09:08:55 anthonybaxter
# added identifying header to top of each file
#
# Revision 1.3 2001/07/19 07:20:12 anthony
# Handle blank resolv.conf lines.
# Patch from Bastian Kleineidam
#
# Revision 1.2 2001/07/19 06:57:07 anthony
# cvs keywords added
#
#<|fim▁end|> | raise DNSError, reason
if self.async: |
<|file_name|>BubbleChart.tsx<|end_file_name|><|fim▁begin|>import React from 'react'
import {
StyleSheet,
View,
processColor
} from 'react-native'
import { BubbleChart } from 'react-native-charts-wrapper'
class BubbleChartScreen extends React.Component<any, any> {
constructor(props) {
super(props)
const { modeInfo } = props
const temp = props.value.weekLoc.sort((a, b) => {
const day = a.x - b.x
return day || (a.y - b.y)
})
const valueFormatter = props.value.daysMapper.slice()
valueFormatter.unshift()
valueFormatter.push(props.value.daysMapper[0])
const isX = item => item.x === 6
const values = [...temp.filter(isX), ...temp.filter(item => item.x !== 6)]
this.state = {
data: {
dataSets: [{
values,
label: '奖杯数比例',
config: {
color: processColor(modeInfo.deepColor),
highlightCircleWidth: 2,
drawValues: false,
valueTextColor: processColor(modeInfo.titleTextColor)
}
}]
},
legend: {
enabled: true,
textSize: 14,
form: 'CIRCLE',
wordWrapEnabled: true,
textColor: processColor(props.modeInfo.standardTextColor)
},
xAxis: {
valueFormatter,
position: 'BOTTOM',
drawGridLines: false,
granularityEnabled: true,
granularity: 1,
textColor: processColor(props.modeInfo.standardTextColor)
// avoidFirstLastClipping: true
// labelCountForce: true,
// labelCount: 12
},
yAxis: {
left: {
axisMinimum: 0,
axisMaximum: 23,
textColor: processColor(props.modeInfo.standardTextColor)
},
right: {
axisMinimum: 0,
axisMaximum: 23,
textColor: processColor(props.modeInfo.standardTextColor)
}
}
}
}
handleSelect = () => {
}
render() {
// console.log(this.state.data.dataSets[0].values.filter(item => item.x === 6))
return (
<View style={{ height: 250 }}>
<BubbleChart
style={styles.chart}
data={this.state.data}
legend={this.state.legend}
chartDescription={{text: ''}}
xAxis={this.state.xAxis}
yAxis={this.state.yAxis}<|fim▁hole|> onSelect={this.handleSelect}
/>
</View>
)
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#F5FCFF'
},
chart: {
flex: 1
}
})
export default BubbleChartScreen<|fim▁end|> | entryLabelColor={processColor(this.props.modeInfo.titleTextColor)} |
<|file_name|>diff.js<|end_file_name|><|fim▁begin|>function diff( now, props ) { //noinspection FallthroughInSwitchStatementJS
switch ( util.ntype( now ) ) {
case 'number' : case 'string' :
if ( valid( new Type( now ) ) )
now = new Type( now );
else {
if ( !props ) props = now;
now = Type.now();
break;
} // allow [specific] fall-through
case 'array' : case 'object' :
props = now;
now = Type.now();
break;
case 'date' : if ( valid( new Type( +now ) ) ) break; // allow [conditional] fall-through if not a valid date
default : now = Type.now();
}
var diff,
ms = +now - +this,
tense = ms < 0 ? 1 : ms > 0 ? -1 : 0;
if ( !tense ) {
diff = util.obj();
diff.value = 0;
}
else
diff = diff_get( Math.abs( ms ), diff_get_exclusions( props ) );
diff.tense = tense;
return diff;
}
function diff_eval( diff, calc, i, calcs ) {
var time;
if ( diff.__ms__ ) {
if ( !diff.excl[calc[0]] ) {
if ( diff.__ms__ >= calc[1] ) {
time = diff.__ms__ / calc[1];
if ( !( calc[0] in diff.val ) ) {
diff.__ms__ = ( time % 1 ) * calc[1];
diff.val[calc[0]] = Math.floor( time );
}
else {
time = Math.floor( time );
diff.__ms__ -= time * calc[1];
diff.val[calc[0]] += time;
}
}
return diff;
}
// round up or down depending on what's available
if ( ( !calcs[i + 1] || diff.excl[calcs[i + 1][0]] ) && ( calc = calcs[i - 1] ) ) {
time = diff.__ms__ / calc[1];
diff.__ms__ = ( Math.round( time ) * calc[1] ) + ( ( ( diff.__ms__ / calcs[i][1] ) % 1 ) * calcs[i][1] );
return diff_eval( diff, calc, i - 1, [] );
}
return diff;
}
return diff;
}
function diff_get( ms, excl ) {
var diff = time_map.reduce( diff_eval, {
__ms__ : ms, excl : excl, val : util.obj()
} ).val;
diff.value = ms;
return diff;
}
function diff_get_exclusions( props ) {
var excl = util.obj(), incl_remaining = true;
if ( props ) { //noinspection FallthroughInSwitchStatementJS
switch ( util.ntype( props ) ) {
case 'object' : incl_remaining = false; break;
case 'string' : props = props.split( ' ' ); // allow fall-through
case 'array' : props = props.reduce( diff_excl, excl );
incl_remaining = !!util.len( excl );
}
}
time_props.map( function( prop ) {
if ( !( prop in this ) )
this[prop] = !incl_remaining;
}, excl );
return excl;
}
function diff_excl( excl, val ) {
var prop = ( val = String( val ).toLowerCase() ).substring( 1 );<|fim▁hole|> case '+' : excl[prop] = false; break;
case '>' :
time_map.map( diff_excl_iter, { excl : excl, prop : prop, val : true } );
break;
case '<' :
time_map.slice().reverse().map( diff_excl_iter, { excl : excl, prop : prop, val : false } );
break;
default : excl[val] = false;
}
return excl;
}
function diff_excl_iter( calc ) {
if ( calc[0] === this.prop )
this.SET_VALID = true;
if ( this.SET_VALID )
this.excl[calc[0]] = this.val;
}
// this ensures a diff's keys are always in descending order of
// number of milliseconds per unit of time, i.e. year, ..., millisecond
function diff_keys( diff ) {
diff = util.copy( diff ); util.remove( diff, 'tense', 'value' );
// while this may seem like overkill, only having to run `indexOf` once for each sort item means that
// the overall performance is dramatically improved
return Object.keys( diff ).map( function( k ) {
return [time_props.indexOf( k ), k];
} ).sort( function( a, b ) {
a = a[0]; b = b[0];
return a > b ? 1 : -1; // skipping `===` check as we know all indexes are unique
} ).pluck( 1 );
}<|fim▁end|> |
switch ( val.charAt( 0 ) ) {
case '-' : excl[prop] = true; break; |
<|file_name|>test_linalg.py<|end_file_name|><|fim▁begin|>import unittest
import test_dot
import test_math
import test_trans
import test_lapack<|fim▁hole|>suite = unittest.TestSuite([test_dot.suite(),
test_math.suite(),
test_trans.suite(),
test_lapack.suite
])
if __name__ == "__main__":
unittest.TextTestRunner(verbosity=2).run(suite)<|fim▁end|> |
# linear algebra group suite
|
<|file_name|>Sandbox.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2013 University of Dundee & Open Microscopy Environment
# All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from builtins import str
from builtins import range
from builtins import object
import os
import uuid
import shutil
import logging
import tempfile
from scc.git import get_github, get_token_or_user
from subprocess import Popen
sandbox_url = "https://github.com/ome/snoopys-sandbox.git"
class SandboxTest(object):
def setup_method(self, method):
# Basic logging configuration so if a test fails we can see
# the statements at WARN or ERROR at least.
logging.basicConfig()
self.method = method.__name__
self.cwd = os.getcwd()
self.token = get_token_or_user(local=False)
self.gh = get_github(self.token, dont_ask=True)
self.user = self.gh.get_login()
self.path = tempfile.mkdtemp("", "sandbox-", ".")
self.path = os.path.abspath(self.path)
try:
with open(os.devnull, 'w') as dev_null:
p = Popen(["git", "clone", "-q", sandbox_url, self.path],
stdout=dev_null, stderr=dev_null)
assert p.wait() == 0
self.sandbox = self.gh.git_repo(self.path)
self.origin_remote = "origin"
except Exception:
try:
shutil.rmtree(self.path)
finally:
# Return to cwd regardless.
os.chdir(self.cwd)
raise
# If we succeed, then we change to this dir.
os.chdir(self.path)
def shortDescription(self):
return None
def init_submodules(self):
"""
Fetch submodules after cloning the repository
"""
try:
with open(os.devnull, 'w') as dev_null:
p = Popen(["git", "submodule", "update", "--init"],
stdout=dev_null, stderr=dev_null)
assert p.wait() == 0
except Exception:
os.chdir(self.path)
raise
def uuid(self):
"""
Return a string representing a uuid.uuid4
"""
return str(uuid.uuid4())
def fake_branch(self, head="master", commits=None):
"""
Return a local branch with a list of commits, defaults to a single
commit adding a unique file
"""
name = self.uuid()
if commits is None:
commits = [(name, "hi")]
self.sandbox.new_branch(name, head=head)
for n in range(len(commits)):
fname, txt = commits[n]
fname = os.path.join(self.path, fname)
with open(fname, 'w') as f:
f.write(txt)
self.sandbox.add(fname)
self.sandbox.commit("%d: Writing %s" % (n, name))
self.sandbox.get_status()
return name
def add_remote(self):
"""
Add the remote of the authenticated Github user
"""
if self.user not in self.sandbox.list_remotes():
remote_url = "https://%s:[email protected]/%s/%s.git" \
% (self.token, self.user, self.sandbox.origin.name)
self.sandbox.add_remote(self.user, remote_url)
def rename_origin_remote(self, new_name):
"""
Rename the remote used for the upstream repository
"""
self.sandbox.call("git", "remote", "rename", self.origin_remote,
new_name)
self.origin_remote = new_name
def push_branch(self, branch):
"""
Push a local branch to GitHub
"""
self.add_remote()
self.sandbox.push_branch(branch, remote=self.user)
def open_pr(self, branch, base, description=None):
"""
Push a local branch and open a PR against the selected base
"""
self.push_branch(branch)
if description is None:
description = ("This is a call to Sandbox.open_pr by %s" %
self.method)
new_pr = self.sandbox.origin.open_pr(
title="test %s" % branch,
description=description,
base=base,
head="%s:%s" % (self.user, branch))
return new_pr<|fim▁hole|> finally:
try:
shutil.rmtree(self.path)
finally:
# Return to cwd regardless.
os.chdir(self.cwd)<|fim▁end|> |
def teardown_method(self, method):
try:
self.sandbox.cleanup() |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![deny(warnings)]
#![feature(never_type)]
mod cachelib_utils;
mod memcache_utils;
mod mock_store;
use std::borrow::Borrow;
use std::collections::{HashMap, HashSet};
use std::future::Future;
use std::hash::Hash;
use std::time::Duration;
use abomonation::Abomonation;
use anyhow::{Context as _, Error};
use async_trait::async_trait;
use auto_impl::auto_impl;
use bytes::Bytes;
use cloned::cloned;
use futures::stream::{self, StreamExt, TryStreamExt};
use itertools::Itertools;
use memcache::{KeyGen, MEMCACHE_VALUE_MAX_SIZE};
use stats::prelude::*;
pub use crate::cachelib_utils::CachelibHandler;
pub use crate::memcache_utils::MemcacheHandler;
pub use crate::mock_store::MockStoreStats;
pub mod macro_reexport {
pub use once_cell;
}
define_stats_struct! {
CacheStats("mononoke.cache.{}", label: String),
cachelib_hit: timeseries("cachelib.hit"; Rate, Sum),
cachelib_miss: timeseries("cachelib.miss"; Rate, Sum),
memcache_hit: timeseries("memcache.hit"; Rate, Sum),
memcache_miss: timeseries("memcache.miss"; Rate, Sum),
memcache_internal_err: timeseries("memcache.internal_err"; Rate, Sum),
memcache_deserialize_err: timeseries("memcache.deserialize_err"; Rate, Sum),
origin_hit: timeseries("origin.hit"; Rate, Sum),
origin_miss: timeseries("origin.miss"; Rate, Sum),
}
#[macro_export]
macro_rules! impl_singleton_stats {
( $name:literal ) => {
fn stats(&self) -> &$crate::CacheStats {
use $crate::macro_reexport::once_cell::sync::Lazy;
static STATS: Lazy<$crate::CacheStats> =
Lazy::new(|| $crate::CacheStats::new(String::from($name)));<|fim▁hole|> };
}
/// Error type to help with proper reporting of memcache errors
pub enum McErrorKind {
/// error came from calling memcache API
MemcacheInternal,
/// value returned from memcache was None
Missing,
/// deserialization of memcache data to Rust structures failed
Deserialization,
}
const MEMCACHE_CONCURRENCY: usize = 100;
pub type McResult<T> = Result<T, McErrorKind>;
struct CachelibKey(String);
struct MemcacheKey(String);
/// TTL for caching an item
#[derive(Copy, Clone)]
pub enum CacheTtl {
/// The item is valid forever, and can be cached indefinitely
NoTtl,
/// Fetch from backing store once the duration given expires
Ttl(Duration),
}
/// Whether or not to cache an item
#[derive(Copy, Clone)]
pub enum CacheDisposition {
/// Cache this item with the given TTL
Cache(CacheTtl),
/// Do not cache this item; re-fetch from backing store if it's requested again
Ignore,
}
/// Implement this for a data item that can be cached. You will also need
/// #[derive(Abomonation)] on the data item.
pub trait MemcacheEntity: Sized {
/// Convert the item to bytes that can live in Memcache and be deserialized
/// in another process
fn serialize(&self) -> Bytes;
/// Deserialize the item from bytes into an object, or fail to do so
fn deserialize(bytes: Bytes) -> Result<Self, ()>;
}
/// Implement this trait to indicate that you can cache values retrived through you
#[auto_impl(&)]
pub trait EntityStore<V> {
/// Get the cachelib handler. This can be created with `.into()` on a `VolatileLruCachePool`
fn cachelib(&self) -> &CachelibHandler<V>;
/// Get the Memcache KeyGen, for creating Memcache keys. This has both code and site versions,
/// as well as a prefix.
fn keygen(&self) -> &KeyGen;
/// Get the Memcache handler. This can be created with `into()` on a `MemcacheClient`.
fn memcache(&self) -> &MemcacheHandler;
/// Given a value `v`, decide whether or not to cache it.
fn cache_determinator(&self, v: &V) -> CacheDisposition;
/// Finds the cache stats for this handler
///
/// Implement this method with `caching_ext::impl_singleton_stats!` macro, instead of by hand
fn stats(&self) -> &CacheStats;
/// Whether Memcache writes should run in the background. This is normally the desired behavior
/// so this defaults to true, but for tests it's useful to run them synchronously to get
/// consistent outcomes.
fn spawn_memcache_writes(&self) -> bool {
true
}
}
/// Implement this to make it possible to fetch keys via the cache
#[async_trait]
#[auto_impl(&)]
pub trait KeyedEntityStore<K, V>: EntityStore<V> {
/// Given an item key, return the cachelib key to use.
fn get_cache_key(&self, key: &K) -> String;
/// Given a set of keys to fetch from backing store, return a map from keys to fetched values
///
/// If a key has no value in the backing store, omit it from the result map. Only use an
/// Error for a failure to fetch, not absence
async fn get_from_db(&self, keys: HashSet<K>) -> Result<HashMap<K, V>, Error>;
}
/// Utility function to fetch all keys in a single chunk without parallelism
pub fn get_or_fill<K, V>(
store: impl KeyedEntityStore<K, V>,
keys: HashSet<K>,
) -> impl Future<Output = Result<HashMap<K, V>, Error>>
where
K: Hash + Eq + Clone,
// TODO: We should relax the bounds on cachelib's set_cached. We don't need all of this:
V: Abomonation + MemcacheEntity + Send + Clone + 'static,
{
get_or_fill_chunked(store, keys, usize::MAX, 1)
}
/// The core of caching with this module. Takes a store that implements
/// `KeyedEntityStore`, and a set of keys to fetch. Returns a map
/// of fetched values.
///
/// Your accessor functions for consumers should call this to get values
/// from cache or backing store, as this will do the job of keeping
/// cachelib filled from memcache, and memcache filled from your backing store
///
/// fetch_chunk and parallel_chunks are used to implement chunked
/// and parallel fetching. Keys to fetch from the backing store
/// will be split into `fetch_chunk` size groups, and at most `parallel_chunks`
/// groups will be in flight at once.
pub async fn get_or_fill_chunked<K, V>(
store: impl KeyedEntityStore<K, V>,
keys: HashSet<K>,
fetch_chunk: usize,
parallel_chunks: usize,
) -> Result<HashMap<K, V>, Error>
where
K: Hash + Eq + Clone,
// TODO: We should relax the bounds on cachelib's set_cached. We don't need all of this:
V: Abomonation + MemcacheEntity + Send + Clone + 'static,
{
let mut ret = HashMap::<K, V>::with_capacity(keys.len());
let stats = store.stats();
let cachelib_keys: Vec<_> = keys
.into_iter()
.map(|key| {
let cachelib_key = CachelibKey(store.get_cache_key(&key));
(key, cachelib_key)
})
.collect();
let (fetched_from_cachelib, to_fetch_from_memcache) = store
.cachelib()
.get_multiple_from_cachelib::<K>(cachelib_keys)
.with_context(|| "Error reading from cachelib")?;
stats
.cachelib_hit
.add_value(fetched_from_cachelib.len() as i64);
stats
.cachelib_miss
.add_value(to_fetch_from_memcache.len() as i64);
ret.extend(fetched_from_cachelib);
let to_fetch_from_memcache: Vec<(K, CachelibKey, MemcacheKey)> = to_fetch_from_memcache
.into_iter()
.map(|(key, cachelib_key)| {
let memcache_key = MemcacheKey(store.keygen().key(&cachelib_key.0));
(key, cachelib_key, memcache_key)
})
.collect();
let to_fetch_from_store = {
let (fetched_from_memcache, to_fetch_from_store) =
get_multiple_from_memcache(store.memcache(), to_fetch_from_memcache, stats).await;
stats
.memcache_hit
.add_value(fetched_from_memcache.len() as i64);
stats
.memcache_miss
.add_value(to_fetch_from_store.len() as i64);
fill_multiple_cachelib(
store.cachelib(),
fetched_from_memcache
.values()
.filter_map(|(v, k)| match store.cache_determinator(v) {
CacheDisposition::Cache(ttl) => Some((k, ttl, v)),
_ => None,
}),
);
ret.extend(fetched_from_memcache.into_iter().map(|(k, (v, _))| (k, v)));
to_fetch_from_store
};
if !to_fetch_from_store.is_empty() {
let to_fetch_from_store: Vec<_> = to_fetch_from_store
.into_iter()
.chunks(fetch_chunk)
.into_iter()
.map(|chunk| {
let mut keys = HashSet::new();
let mut key_mapping = HashMap::new();
for (key, cachelib_key, memcache_key) in chunk {
keys.insert(key.clone());
key_mapping.insert(key.clone(), (cachelib_key, memcache_key));
}
fill_one_chunk(&store, keys, key_mapping)
})
.collect();
stream::iter(to_fetch_from_store)
.buffer_unordered(parallel_chunks)
.try_fold(&mut ret, |ret, chunk| async move {
ret.extend(chunk);
Ok::<_, Error>(ret)
})
.await?;
}
Ok(ret)
}
async fn fill_one_chunk<K, V>(
store: &impl KeyedEntityStore<K, V>,
keys: HashSet<K>,
mut key_mapping: HashMap<K, (CachelibKey, MemcacheKey)>,
) -> Result<HashMap<K, V>, Error>
where
K: Hash + Eq + Clone,
// TODO: We should relax the bounds on cachelib's set_cached. We don't need all of this:
V: Abomonation + MemcacheEntity + Send + Clone + 'static,
{
let n_keys = keys.len();
let stats = store.stats();
let data = store
.get_from_db(keys)
.await
.with_context(|| "Error reading from store")?;
stats.origin_hit.add_value(data.len() as i64);
stats.origin_miss.add_value((n_keys - data.len()) as i64);
fill_caches_by_key(
store,
data.iter().map(|(key, v)| {
let (cachelib_key, memcache_key) = key_mapping
.remove(key)
.expect("caching_ext: Missing entry in key_mapping, this should not happen");
(cachelib_key, memcache_key, v)
}),
)
.await;
Ok(data)
}
/// Directly fill a cache from data you've prefetched outside the caching system
/// Allows things like microwave to avoid any backing store fetches
pub async fn fill_cache<'a, K, V>(
store: impl KeyedEntityStore<K, V>,
data: impl IntoIterator<Item = (&'a K, &'a V)>,
) where
K: Hash + Eq + Clone + 'a,
V: Abomonation + MemcacheEntity + Send + Clone + 'static,
{
fill_caches_by_key(
&store,
data.into_iter().map(|(k, v)| {
let cachelib_key = CachelibKey(store.get_cache_key(&k));
let memcache_key = MemcacheKey(store.keygen().key(&cachelib_key.0));
(cachelib_key, memcache_key, v)
}),
)
.await;
}
async fn fill_caches_by_key<'a, V>(
store: impl EntityStore<V>,
data: impl IntoIterator<Item = (CachelibKey, MemcacheKey, &'a V)>,
) where
V: Abomonation + MemcacheEntity + Send + Clone + 'static,
{
let mut cachelib_keys = Vec::new();
let mut memcache_keys = Vec::new();
for (cachelib_key, memcache_key, v) in data.into_iter() {
let ttl = match store.cache_determinator(v) {
CacheDisposition::Cache(ttl) => ttl,
CacheDisposition::Ignore => continue,
};
memcache_keys.push((memcache_key, ttl, v));
cachelib_keys.push((cachelib_key, ttl, v));
}
fill_multiple_cachelib(store.cachelib(), cachelib_keys);
fill_multiple_memcache(
store.memcache(),
memcache_keys,
store.spawn_memcache_writes(),
)
.await;
}
async fn get_multiple_from_memcache<K, V>(
memcache: &MemcacheHandler,
keys: Vec<(K, CachelibKey, MemcacheKey)>,
stats: &CacheStats,
) -> (
HashMap<K, (V, CachelibKey)>,
Vec<(K, CachelibKey, MemcacheKey)>,
)
where
K: Eq + Hash,
V: MemcacheEntity,
{
let mc_fetch_futs = keys
.into_iter()
.map(move |(key, cachelib_key, memcache_key)| {
cloned!(memcache);
async move {
let res = memcache
.get(memcache_key.0.clone())
.await
.map_err(|_| McErrorKind::MemcacheInternal)
.and_then(|maybe_bytes| maybe_bytes.ok_or(McErrorKind::Missing))
.and_then(|bytes| {
V::deserialize(bytes).map_err(|()| McErrorKind::Deserialization)
});
(key, cachelib_key, memcache_key, res)
}
});
let mut entries = stream::iter(mc_fetch_futs).buffered(MEMCACHE_CONCURRENCY);
let mut fetched = HashMap::new();
let mut left_to_fetch = Vec::new();
while let Some((key, cachelib_key, memcache_key, res)) = entries.next().await {
match res {
Ok(entity) => {
fetched.insert(key, (entity, cachelib_key));
}
Err(e) => {
match e {
McErrorKind::MemcacheInternal => stats.memcache_internal_err.add_value(1),
McErrorKind::Deserialization => stats.memcache_deserialize_err.add_value(1),
McErrorKind::Missing => {} // no op, we record missing at a higher level anyway.
};
left_to_fetch.push((key, cachelib_key, memcache_key));
}
}
}
(fetched, left_to_fetch)
}
fn fill_multiple_cachelib<'a, V>(
cachelib: &'a CachelibHandler<V>,
data: impl IntoIterator<Item = (impl Borrow<CachelibKey> + 'a, CacheTtl, &'a V)>,
) where
V: Abomonation + Clone + Send + 'static,
{
for (cachelib_key, ttl, v) in data {
let cachelib_key = cachelib_key.borrow();
let ttl = match ttl {
CacheTtl::NoTtl => None,
CacheTtl::Ttl(ttl) => Some(ttl),
};
// NOTE: We ignore failures to cache individual entries here.
let _ = cachelib.set_cached(&cachelib_key.0, v, ttl);
}
}
async fn fill_multiple_memcache<'a, V: 'a>(
memcache: &'a MemcacheHandler,
data: impl IntoIterator<Item = (MemcacheKey, CacheTtl, &'a V)>,
spawn: bool,
) where
V: MemcacheEntity,
{
let futs = data
.into_iter()
.filter_map(|(memcache_key, ttl, v)| {
let bytes = v.serialize();
if bytes.len() >= MEMCACHE_VALUE_MAX_SIZE {
return None;
}
cloned!(memcache);
Some(async move {
match ttl {
CacheTtl::NoTtl => {
let _ = memcache.set(memcache_key.0, bytes).await;
}
CacheTtl::Ttl(ttl) => {
let _ = memcache.set_with_ttl(memcache_key.0, bytes, ttl).await;
}
}
})
})
.collect::<Vec<_>>();
let fut = stream::iter(futs).for_each_concurrent(MEMCACHE_CONCURRENCY, |fut| fut);
if spawn {
tokio::task::spawn(fut);
} else {
fut.await;
}
}
#[cfg(test)]
mod test {
use super::*;
use abomonation_derive::Abomonation;
use maplit::{hashmap, hashset};
use std::sync::atomic::{AtomicUsize, Ordering};
#[derive(Abomonation, Clone, Debug, PartialEq, Eq)]
struct TestEntity(Vec<u8>);
impl MemcacheEntity for TestEntity {
fn serialize(&self) -> Bytes {
Bytes::from(self.0.clone())
}
fn deserialize(bytes: Bytes) -> Result<Self, ()> {
Ok(Self(bytes.to_vec()))
}
}
struct TestStore {
keygen: KeyGen,
cachelib: CachelibHandler<TestEntity>,
memcache: MemcacheHandler,
calls: AtomicUsize,
keys: AtomicUsize,
data: HashMap<String, TestEntity>,
}
impl TestStore {
pub fn new() -> Self {
Self {
keygen: KeyGen::new("", 0, 0),
cachelib: CachelibHandler::create_mock(),
memcache: MemcacheHandler::create_mock(),
calls: AtomicUsize::new(0),
keys: AtomicUsize::new(0),
data: HashMap::new(),
}
}
}
impl EntityStore<TestEntity> for TestStore {
fn cachelib(&self) -> &CachelibHandler<TestEntity> {
&self.cachelib
}
fn keygen(&self) -> &KeyGen {
&self.keygen
}
fn memcache(&self) -> &MemcacheHandler {
&self.memcache
}
fn cache_determinator(&self, _: &TestEntity) -> CacheDisposition {
CacheDisposition::Cache(CacheTtl::NoTtl)
}
impl_singleton_stats!("test");
fn spawn_memcache_writes(&self) -> bool {
false
}
}
#[async_trait]
impl KeyedEntityStore<String, TestEntity> for TestStore {
fn get_cache_key(&self, key: &String) -> String {
format!("key:{}", key)
}
async fn get_from_db(
&self,
keys: HashSet<String>,
) -> Result<HashMap<String, TestEntity>, Error> {
self.calls.fetch_add(1, Ordering::Relaxed);
self.keys.fetch_add(keys.len(), Ordering::Relaxed);
Ok(keys
.into_iter()
.filter_map(|k| {
let v = self.data.get(&k).cloned();
v.map(|v| (k, v))
})
.collect())
}
}
#[tokio::test]
async fn simple() -> Result<(), Error> {
let store = TestStore::new();
let res = get_or_fill(&store, hashset! {}).await?;
assert_eq!(res.len(), 0);
assert_eq!(store.cachelib.gets_count(), 0);
assert_eq!(store.memcache.gets_count(), 0);
let res = get_or_fill(&store, hashset! {"key".into()}).await?;
assert_eq!(res.len(), 0);
assert_eq!(store.cachelib.gets_count(), 1);
assert_eq!(store.memcache.gets_count(), 1);
assert_eq!(store.keys.load(Ordering::Relaxed), 1);
Ok(())
}
#[tokio::test]
async fn fetch_from_db_cachelib_memcache() -> Result<(), Error> {
let mut store = TestStore::new();
let e = TestEntity(vec![0]);
store.data.insert("key".into(), e.clone());
// Fetch from db
let res = get_or_fill(&store, hashset! {"key".into()}).await?;
assert_eq!(res, hashmap! { "key".into() => e.clone() });
assert_eq!(store.cachelib.gets_count(), 1);
assert_eq!(store.memcache.gets_count(), 1);
assert_eq!(store.keys.load(Ordering::Relaxed), 1);
// Now fetch from cachelib
let res = get_or_fill(&store, hashset! {"key".into()}).await?;
assert_eq!(res, hashmap! { "key".into() => e.clone() });
assert_eq!(store.cachelib.gets_count(), 2);
assert_eq!(store.memcache.gets_count(), 1);
assert_eq!(store.keys.load(Ordering::Relaxed), 1);
// Reset cachelib, fetch from memcache
store.cachelib = CachelibHandler::create_mock();
let res = get_or_fill(&store, hashset! {"key".into()}).await?;
assert_eq!(res, hashmap! { "key".into() => e.clone() });
assert_eq!(store.cachelib.gets_count(), 1);
assert_eq!(store.memcache.gets_count(), 2);
assert_eq!(store.keys.load(Ordering::Relaxed), 1);
Ok(())
}
#[tokio::test]
async fn fetch_from_db() -> Result<(), Error> {
let mut store = TestStore::new();
let e0 = TestEntity(vec![0]);
let e1 = TestEntity(vec![1]);
let e2 = TestEntity(vec![2]);
store.data.insert("key0".into(), e0.clone());
store.data.insert("key1".into(), e1.clone());
store.data.insert("key2".into(), e2.clone());
let res = get_or_fill(
&store,
hashset! { "key0".into(), "key1".into(), "key2".into() },
)
.await?;
assert_eq!(
res,
hashmap! { "key0".into() => e0, "key1".into() => e1, "key2".into() => e2 }
);
assert_eq!(store.calls.load(Ordering::Relaxed), 1);
assert_eq!(store.cachelib.gets_count(), 3);
assert_eq!(store.memcache.gets_count(), 3);
assert_eq!(store.keys.load(Ordering::Relaxed), 3);
Ok(())
}
#[tokio::test]
async fn fetch_from_db_chunked() -> Result<(), Error> {
let mut store = TestStore::new();
let e0 = TestEntity(vec![0]);
let e1 = TestEntity(vec![1]);
let e2 = TestEntity(vec![2]);
store.data.insert("key0".into(), e0.clone());
store.data.insert("key1".into(), e1.clone());
store.data.insert("key2".into(), e2.clone());
let res = get_or_fill_chunked(
&store,
hashset! { "key0".into(), "key1".into(), "key2".into() },
1,
3,
)
.await?;
assert_eq!(
res,
hashmap! { "key0".into() => e0, "key1".into() => e1, "key2".into() => e2 }
);
assert_eq!(store.calls.load(Ordering::Relaxed), 3);
assert_eq!(store.cachelib.gets_count(), 3);
assert_eq!(store.memcache.gets_count(), 3);
assert_eq!(store.keys.load(Ordering::Relaxed), 3);
Ok(())
}
#[tokio::test]
async fn fetch_from_all() -> Result<(), Error> {
let mut store = TestStore::new();
let e0 = TestEntity(vec![0]);
let e1 = TestEntity(vec![1]);
let e2 = TestEntity(vec![2]);
store.data.insert("key0".into(), e0.clone());
store.data.insert("key1".into(), e1.clone());
store.data.insert("key2".into(), e2.clone());
let res = get_or_fill(&store, hashset! { "key1".into() }).await?;
assert_eq!(res, hashmap! { "key1".into() => e1.clone() });
assert_eq!(store.cachelib.gets_count(), 1);
assert_eq!(store.memcache.gets_count(), 1);
assert_eq!(store.calls.load(Ordering::Relaxed), 1);
// Reset cachelib
store.cachelib = CachelibHandler::create_mock();
let res = get_or_fill(&store, hashset! { "key0".into() }).await?;
assert_eq!(res, hashmap! { "key0".into() => e0.clone() });
assert_eq!(store.cachelib.gets_count(), 1);
assert_eq!(store.memcache.gets_count(), 2);
assert_eq!(store.calls.load(Ordering::Relaxed), 2);
let res = get_or_fill(
&store,
hashset! { "key0".into(), "key1".into(), "key2".into() },
)
.await?;
assert_eq!(
res,
hashmap! { "key0".into() => e0.clone(), "key1".into() => e1.clone(), "key2".into() => e2.clone() }
);
assert_eq!(store.cachelib.gets_count(), 1 + 3); // 3 new fetches from cachelib, 2 misses
assert_eq!(store.memcache.gets_count(), 2 + 2); // 2 new fetches from memcache, 1 miss
assert_eq!(store.calls.load(Ordering::Relaxed), 2 + 1); // 1 fetch from db
// Only from cachelib
let res = get_or_fill(
&store,
hashset! { "key0".into(), "key1".into(), "key2".into() },
)
.await?;
assert_eq!(
res,
hashmap! { "key0".into() => e0.clone(), "key1".into() => e1.clone(), "key2".into() => e2.clone() }
);
assert_eq!(store.cachelib.gets_count(), 7);
assert_eq!(store.memcache.gets_count(), 4);
assert_eq!(store.calls.load(Ordering::Relaxed), 3);
// // Reset cachelib, only from memcache
store.cachelib = CachelibHandler::create_mock();
let res = get_or_fill(
&store,
hashset! { "key0".into(), "key1".into(), "key2".into() },
)
.await?;
assert_eq!(
res,
hashmap! { "key0".into() => e0.clone(), "key1".into() => e1.clone(), "key2".into() => e2.clone() }
);
assert_eq!(store.cachelib.gets_count(), 3); // 3 misses
assert_eq!(store.memcache.gets_count(), 4 + 3); // 3 hits
assert_eq!(store.calls.load(Ordering::Relaxed), 3);
Ok(())
}
#[tokio::test]
async fn get_from_db_elision() -> Result<(), Error> {
let store = TestStore::new();
get_or_fill(&store, hashset! {}).await?;
assert_eq!(store.calls.load(Ordering::Relaxed), 0);
Ok(())
}
#[tokio::test]
async fn test_fill_cache() -> Result<(), Error> {
let store = TestStore::new();
let e0 = TestEntity(vec![0]);
fill_cache(&store, hashmap! { "key0".into() => e0.clone() }.iter()).await;
let res = get_or_fill(&store, hashset! { "key0".into() }).await?;
assert_eq!(res, hashmap! { "key0".into() => e0.clone() });
assert_eq!(store.cachelib.gets_count(), 1);
assert_eq!(store.memcache.gets_count(), 0);
assert_eq!(store.calls.load(Ordering::Relaxed), 0);
Ok(())
}
}<|fim▁end|> | &*STATS
} |
<|file_name|>OsgiBundlePacking.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.ivy.core.pack;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import org.apache.ivy.util.FileUtil;
/**
* Packaging which handle OSGi bundles with inner packed jar
*/<|fim▁hole|>public class OsgiBundlePacking extends ZipPacking {
private static final String[] NAMES = {"bundle"};
@Override
public String[] getNames() {
return NAMES;
}
@Override
protected void writeFile(InputStream zip, File f) throws IOException {
// XXX maybe we should only unpack file listed by the 'Bundle-ClassPath' MANIFEST header ?
if (f.getName().endsWith(".jar.pack.gz")) {
zip = FileUtil.unwrapPack200(zip);
f = new File(f.getParentFile(), f.getName().substring(0, f.getName().length() - 8));
}
super.writeFile(zip, f);
}
}<|fim▁end|> | |
<|file_name|>rpc-tests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Run Regression Test Suite
This module calls down into individual test cases via subprocess. It will
forward all unrecognized arguments onto the individual test scripts, other
than:
- `-extended`: run the "extended" test suite in addition to the basic one.
- `-win`: signal that this is running in a Windows environment, and we
should run the tests.
- `--coverage`: this generates a basic coverage report for the RPC
interface.
For a description of arguments recognized by test scripts, see
`qa/pull-tester/test_framework/test_framework.py:BitcoinTestFramework.main`.
<|fim▁hole|>"""
import os
import time
import shutil
import sys
import subprocess
import tempfile
import re
from tests_config import *
#If imported values are not defined then set to zero (or disabled)
if 'ENABLE_WALLET' not in vars():
ENABLE_WALLET=0
if 'ENABLE_BITCOIND' not in vars():
ENABLE_BITCOIND=0
if 'ENABLE_UTILS' not in vars():
ENABLE_UTILS=0
if 'ENABLE_ZMQ' not in vars():
ENABLE_ZMQ=0
ENABLE_COVERAGE=0
#Create a set to store arguments and create the passOn string
opts = set()
passOn = ""
p = re.compile("^--")
bold = ("","")
if (os.name == 'posix'):
bold = ('\033[0m', '\033[1m')
for arg in sys.argv[1:]:
if arg == '--coverage':
ENABLE_COVERAGE = 1
elif (p.match(arg) or arg == "-h"):
passOn += " " + arg
else:
opts.add(arg)
#Set env vars
buildDir = BUILDDIR
if "DASHD" not in os.environ:
os.environ["DASHD"] = buildDir + '/src/dashd' + EXEEXT
if "DASHCLI" not in os.environ:
os.environ["DASHCLI"] = buildDir + '/src/dash-cli' + EXEEXT
if EXEEXT == ".exe" and "-win" not in opts:
# https://github.com/bitcoin/bitcoin/commit/d52802551752140cf41f0d9a225a43e84404d3e9
# https://github.com/bitcoin/bitcoin/pull/5677#issuecomment-136646964
print "Win tests currently disabled by default. Use -win option to enable"
sys.exit(0)
if not (ENABLE_WALLET == 1 and ENABLE_UTILS == 1 and ENABLE_BITCOIND == 1):
print "No rpc tests to run. Wallet, utils, and bitcoind must all be enabled"
sys.exit(0)
# python-zmq may not be installed. Handle this gracefully and with some helpful info
if ENABLE_ZMQ:
try:
import zmq
except ImportError as e:
print("ERROR: \"import zmq\" failed. Set ENABLE_ZMQ=0 or " \
"to run zmq tests, see dependency info in /qa/README.md.")
raise e
#Tests
testScripts = [
'bip68-112-113-p2p.py',
'wallet.py',
'wallet-hd.py',
'listtransactions.py',
'receivedby.py',
'mempool_resurrect_test.py',
'txn_doublespend.py --mineblock',
'txn_clone.py',
'getchaintips.py',
'rawtransactions.py',
'rest.py',
'mempool_spendcoinbase.py',
'mempool_reorg.py',
'mempool_limit.py',
'httpbasics.py',
'multi_rpc.py',
'zapwallettxes.py',
'proxy_test.py',
'merkle_blocks.py',
'fundrawtransaction.py',
'signrawtransactions.py',
'walletbackup.py',
'nodehandling.py',
'reindex.py',
'addressindex.py',
'timestampindex.py',
'spentindex.py',
'decodescript.py',
'p2p-fullblocktest.py', # NOTE: needs dash_hash to pass
'blockchain.py',
'disablewallet.py',
'sendheaders.py', # NOTE: needs dash_hash to pass
'keypool.py',
'prioritise_transaction.py',
'invalidblockrequest.py', # NOTE: needs dash_hash to pass
'invalidtxrequest.py', # NOTE: needs dash_hash to pass
'abandonconflict.py',
'p2p-versionbits-warning.py',
]
if ENABLE_ZMQ:
testScripts.append('zmq_test.py')
testScriptsExt = [
'bip9-softforks.py',
'bip65-cltv.py',
'bip65-cltv-p2p.py', # NOTE: needs dash_hash to pass
'bip68-sequence.py',
'bipdersig-p2p.py', # NOTE: needs dash_hash to pass
'bipdersig.py',
'getblocktemplate_longpoll.py', # FIXME: "socket.error: [Errno 54] Connection reset by peer" on my Mac, same as https://github.com/bitcoin/bitcoin/issues/6651
'getblocktemplate_proposals.py',
'txn_doublespend.py',
'txn_clone.py --mineblock',
# 'pruning.py', # Prune mode is incompatible with -txindex.
'forknotify.py',
'invalidateblock.py',
# 'rpcbind_test.py', #temporary, bug in libevent, see #6655
'smartfees.py',
'maxblocksinflight.py',
'p2p-acceptblock.py', # NOTE: needs dash_hash to pass
'mempool_packages.py',
'maxuploadtarget.py',
# 'replace-by-fee.py', # RBF is disabled in Dash Core
]
def runtests():
coverage = None
if ENABLE_COVERAGE:
coverage = RPCCoverage()
print("Initializing coverage directory at %s\n" % coverage.dir)
rpcTestDir = buildDir + '/qa/rpc-tests/'
run_extended = '-extended' in opts
cov_flag = coverage.flag if coverage else ''
flags = " --srcdir %s/src %s %s" % (buildDir, cov_flag, passOn)
#Run Tests
for i in range(len(testScripts)):
if (len(opts) == 0
or (len(opts) == 1 and "-win" in opts )
or run_extended
or testScripts[i] in opts
or re.sub(".py$", "", testScripts[i]) in opts ):
print("Running testscript %s%s%s ..." % (bold[1], testScripts[i], bold[0]))
time0 = time.time()
subprocess.check_call(
rpcTestDir + testScripts[i] + flags, shell=True)
print("Duration: %s s\n" % (int(time.time() - time0)))
# exit if help is called so we print just one set of
# instructions
p = re.compile(" -h| --help")
if p.match(passOn):
sys.exit(0)
# Run Extended Tests
for i in range(len(testScriptsExt)):
if (run_extended or testScriptsExt[i] in opts
or re.sub(".py$", "", testScriptsExt[i]) in opts):
print(
"Running 2nd level testscript "
+ "%s%s%s ..." % (bold[1], testScriptsExt[i], bold[0]))
time0 = time.time()
subprocess.check_call(
rpcTestDir + testScriptsExt[i] + flags, shell=True)
print("Duration: %s s\n" % (int(time.time() - time0)))
if coverage:
coverage.report_rpc_coverage()
print("Cleaning up coverage data")
coverage.cleanup()
class RPCCoverage(object):
"""
Coverage reporting utilities for pull-tester.
Coverage calculation works by having each test script subprocess write
coverage files into a particular directory. These files contain the RPC
commands invoked during testing, as well as a complete listing of RPC
commands per `bitcoin-cli help` (`rpc_interface.txt`).
After all tests complete, the commands run are combined and diff'd against
the complete list to calculate uncovered RPC commands.
See also: qa/rpc-tests/test_framework/coverage.py
"""
def __init__(self):
self.dir = tempfile.mkdtemp(prefix="coverage")
self.flag = '--coveragedir %s' % self.dir
def report_rpc_coverage(self):
"""
Print out RPC commands that were unexercised by tests.
"""
uncovered = self._get_uncovered_rpc_commands()
if uncovered:
print("Uncovered RPC commands:")
print("".join((" - %s\n" % i) for i in sorted(uncovered)))
else:
print("All RPC commands covered.")
def cleanup(self):
return shutil.rmtree(self.dir)
def _get_uncovered_rpc_commands(self):
"""
Return a set of currently untested RPC commands.
"""
# This is shared from `qa/rpc-tests/test-framework/coverage.py`
REFERENCE_FILENAME = 'rpc_interface.txt'
COVERAGE_FILE_PREFIX = 'coverage.'
coverage_ref_filename = os.path.join(self.dir, REFERENCE_FILENAME)
coverage_filenames = set()
all_cmds = set()
covered_cmds = set()
if not os.path.isfile(coverage_ref_filename):
raise RuntimeError("No coverage reference found")
with open(coverage_ref_filename, 'r') as f:
all_cmds.update([i.strip() for i in f.readlines()])
for root, dirs, files in os.walk(self.dir):
for filename in files:
if filename.startswith(COVERAGE_FILE_PREFIX):
coverage_filenames.add(os.path.join(root, filename))
for filename in coverage_filenames:
with open(filename, 'r') as f:
covered_cmds.update([i.strip() for i in f.readlines()])
return all_cmds - covered_cmds
if __name__ == '__main__':
runtests()<|fim▁end|> | |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for stormtrooper project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
<|fim▁hole|>
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "stormtrooper.settings")
application = get_wsgi_application()<|fim▁end|> | from django.core.wsgi import get_wsgi_application |
<|file_name|>test_validators.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name,len-as-condition
from functools import partial
from rebulk.pattern import StringPattern
from ..validators import chars_before, chars_after, chars_surround, validators
chars = ' _.'
left = partial(chars_before, chars)
right = partial(chars_after, chars)
surrounding = partial(chars_surround, chars)
def test_left_chars():
matches = list(StringPattern("word", validator=left).matches("xxxwordxxx"))
assert len(matches) == 0
matches = list(StringPattern("word", validator=left).matches("xxx_wordxxx"))<|fim▁hole|>
def test_right_chars():
matches = list(StringPattern("word", validator=right).matches("xxxwordxxx"))
assert len(matches) == 0
matches = list(StringPattern("word", validator=right).matches("xxxword.xxx"))
assert len(matches) == 1
matches = list(StringPattern("word", validator=right).matches("xxxword"))
assert len(matches) == 1
def test_surrounding_chars():
matches = list(StringPattern("word", validator=surrounding).matches("xxxword xxx"))
assert len(matches) == 0
matches = list(StringPattern("word", validator=surrounding).matches("xxx.wordxxx"))
assert len(matches) == 0
matches = list(StringPattern("word", validator=surrounding).matches("xxx word_xxx"))
assert len(matches) == 1
matches = list(StringPattern("word", validator=surrounding).matches("word"))
assert len(matches) == 1
def test_chain():
matches = list(StringPattern("word", validator=validators(left, right)).matches("xxxword xxx"))
assert len(matches) == 0
matches = list(StringPattern("word", validator=validators(left, right)).matches("xxx.wordxxx"))
assert len(matches) == 0
matches = list(StringPattern("word", validator=validators(left, right)).matches("xxx word_xxx"))
assert len(matches) == 1
matches = list(StringPattern("word", validator=validators(left, right)).matches("word"))
assert len(matches) == 1<|fim▁end|> | assert len(matches) == 1
matches = list(StringPattern("word", validator=left).matches("wordxxx"))
assert len(matches) == 1 |
<|file_name|>plugin.ts<|end_file_name|><|fim▁begin|>/**
* Created by johan on 11/11/2015.
*/
var router = require('express').Router();
var pwd = require('path');
router.get('/:name', function (req, res) {
var pluginName = req.params.name;
var data = req.query;
console.log(data);
var plugin: any = getModule(data.path, data.script);
console.log('plugin : ' + pluginName);
console.log('params : ' + JSON.stringify(req.body));
if (!plugin) {
console.log('Error in load plugin :' + pluginName);
res.send(400);
return;
}
plugin = new plugin.Index();
plugin.action(data, function (response) {
//console.log(response);
res.send(response);
//_actionPluginResponse(res, response);
}, JSON.parse(data.config));
});
var getModule = function (pathPlugin, script) {
var module = false;
var path = false;
path = pwd.normalize(pwd.join(__dirname, '..', pathPlugin, script));
//delete require.cache[require.resolve(module)]
//return require(module)
module = requireUncached(path);
//module = require(path);
//initModule(module);
//if (!module) {
// return false;
//}
return module;
};
var initModule = function (module, name) {
try {
if (!module) {
return;
}
if (module.initialized) {
return;
}
module.initialized = true;
console.log('info', 'initModule: ', name);
if (!module.init) {
return;
}
} catch (ex) {
console.log('warn', 'initModule: ' + ex.message);
}<|fim▁hole|>
function requireUncached(module) {
delete require.cache[require.resolve(module)]
return require(module)
}
module.exports = router;<|fim▁end|> | }; |
<|file_name|>vec4_test.go<|end_file_name|><|fim▁begin|>// Copyright (C) 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package f32_test
import (
"testing"
"github.com/google/gapid/core/assert"
"github.com/google/gapid/core/math/f32"
)
func TestV4DSqrMagnitude(t *testing.T) {
assert := assert.To(t)
for _, test := range []struct {
v f32.Vec4
r float32
}{
{f32.Vec4{0, 0, 0, 0}, 0},
{f32.Vec4{1, 0, 0, 0}, 1},
{f32.Vec4{0, 2, 0, 0}, 4},
{f32.Vec4{0, 0, -3, 0}, 9},
{f32.Vec4{0, 0, 0, -4}, 16},
{f32.Vec4{1, 1, 1, 1}, 4},
} {
assert.For("%v.SqrMagnitude", test.v).That(test.v.SqrMagnitude()).Equals(test.r)
}
}
func TestV4DMagnitude(t *testing.T) {
assert := assert.To(t)
for _, test := range []struct {
v f32.Vec4
r float32
}{
{f32.Vec4{0, 0, 0, 0}, 0},
{f32.Vec4{1, 0, 0, 0}, 1},
{f32.Vec4{0, 2, 0, 0}, 2},
{f32.Vec4{0, 0, -3, 0}, 3},
{f32.Vec4{0, 0, 0, -4}, 4},
{f32.Vec4{1, 1, 1, 1}, 2},
} {
assert.For("%v.Magnitude", test.v).That(test.v.Magnitude()).Equals(test.r)
}
}
func TestV4DScale(t *testing.T) {
assert := assert.To(t)
for _, test := range []struct {
v f32.Vec4
s float32
r f32.Vec4
}{
{f32.Vec4{1, 0, 0, 0}, -1, f32.Vec4{-1, 0, 0, 0}},
{f32.Vec4{0, 2, 0, 0}, -2, f32.Vec4{0, -4, 0, 0}},
{f32.Vec4{0, 0, 3, 0}, -3, f32.Vec4{0, 0, -9, 0}},
{f32.Vec4{0, 0, 0, 4}, -4, f32.Vec4{0, 0, 0, -16}},
{f32.Vec4{1, 1, 1, 1}, 0, f32.Vec4{0, 0, 0, 0}},
} {
assert.For("%v.Scale", test.v).That(test.v.Scale(test.s)).Equals(test.r)
}
}
func TestV4DNormalize(t *testing.T) {
assert := assert.To(t)
for _, test := range []struct {
v f32.Vec4
r f32.Vec4
}{
{f32.Vec4{1, 0, 0, 0}, f32.Vec4{1, 0, 0, 0}},
{f32.Vec4{0, -2, 0, 0}, f32.Vec4{0, -1, 0, 0}},
{f32.Vec4{0, 0, 3, 0}, f32.Vec4{0, 0, 1, 0}},
{f32.Vec4{0, 0, 0, -4}, f32.Vec4{0, 0, 0, -1}},
{f32.Vec4{1, 2, -2, 4}, f32.Vec4{1. / 5, 2. / 5, -2. / 5, 4. / 5}},
} {
assert.For("%v.Normalize", test.v).That(test.v.Normalize()).Equals(test.r)
}
}
func TestV4DXYZ(t *testing.T) {
assert := assert.To(t)
for _, test := range []struct {
v f32.Vec4
r f32.Vec3
}{
{f32.Vec4{0, 0, 0, 0}, f32.Vec3{0, 0, 0}},
{f32.Vec4{1, 2, 3, 4}, f32.Vec3{1, 2, 3}},
} {
assert.For("%v.V3D", test.v).That(test.v.XYZ()).Equals(test.r)
}
}
func TestAdd4D(t *testing.T) {
assert := assert.To(t)
for _, test := range []struct {
a f32.Vec4
b f32.Vec4
r f32.Vec4
}{
{f32.Vec4{0, 0, 0, 0}, f32.Vec4{0, 0, 0, 0}, f32.Vec4{0, 0, 0, 0}},
{f32.Vec4{1, 2, 3, 4}, f32.Vec4{0, 0, 0, 0}, f32.Vec4{1, 2, 3, 4}},
{f32.Vec4{0, 0, 0, 0}, f32.Vec4{4, 3, 2, 1}, f32.Vec4{4, 3, 2, 1}},
{f32.Vec4{1, 2, 3, 4}, f32.Vec4{-1, -2, -3, -4}, f32.Vec4{0, 0, 0, 0}},
} {
assert.For("Add4D(%v, %v)", test.a, test.b).
That(f32.Add4D(test.a, test.b)).Equals(test.r)
}
}
func TestSub4D(t *testing.T) {
assert := assert.To(t)
for _, test := range []struct {
a f32.Vec4
b f32.Vec4
r f32.Vec4
}{
{f32.Vec4{0, 0, 0, 0}, f32.Vec4{0, 0, 0, 0}, f32.Vec4{0, 0, 0, 0}},
{f32.Vec4{1, 2, 3, 4}, f32.Vec4{0, 0, 0, 0}, f32.Vec4{1, 2, 3, 4}},
{f32.Vec4{0, 0, 0, 0}, f32.Vec4{4, 3, 2, 1}, f32.Vec4{-4, -3, -2, -1}},<|fim▁hole|> That(f32.Sub4D(test.a, test.b)).Equals(test.r)
}
}<|fim▁end|> | {f32.Vec4{1, 2, 3, 4}, f32.Vec4{-1, -2, -3, -4}, f32.Vec4{2, 4, 6, 8}},
} {
assert.For("Sub4D(%v, %v)", test.a, test.b). |
<|file_name|>Canon_1D.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import bpy
camera = bpy.context.edit_movieclip.tracking.camera
camera.sensor_width = 27.9
camera.units = 'MILLIMETERS'
camera.focal_length = 24.0
camera.pixel_aspect = 1
camera.k1 = 0.0
camera.k2 = 0.0
camera.k3 = 0.0<|fim▁end|> | |
<|file_name|>sample_input_reader.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""Sample Input Reader for map job."""
import random
import string
import time
from mapreduce import context
from mapreduce import errors
from mapreduce import operation
from mapreduce.api import map_job
# pylint: disable=invalid-name
# Counter name for number of bytes read.
COUNTER_IO_READ_BYTES = "io-read-bytes"
# Counter name for milliseconds spent reading data.
COUNTER_IO_READ_MSEC = "io-read-msec"
class SampleInputReader(map_job.InputReader):
"""A sample InputReader that generates random strings as output.
Primary usage is to as an example InputReader that can be use for test
purposes.
"""
# Total number of entries this reader should generate.
COUNT = "count"
# Length of the generated strings.
STRING_LENGTH = "string_length"
# The default string length if one is not specified.
_DEFAULT_STRING_LENGTH = 10
def __init__(self, count, string_length):
"""Initialize input reader.
Args:
count: number of entries this shard should generate.
string_length: the length of generated random strings.
"""
self._count = count
self._string_length = string_length
def __iter__(self):
ctx = context.get()
while self._count:
self._count -= 1
start_time = time.time()
content = "".join(random.choice(string.ascii_lowercase)
for _ in range(self._string_length))
if ctx:
operation.counters.Increment(
COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx)
operation.counters.Increment(COUNTER_IO_READ_BYTES, len(content))(ctx)
yield content
@classmethod
def from_json(cls, state):
"""Inherit docs."""
return cls(state[cls.COUNT], state[cls.STRING_LENGTH])
def to_json(self):
"""Inherit docs."""
return {self.COUNT: self._count, self.STRING_LENGTH: self._string_length}
@classmethod
def split_input(cls, job_config):
"""Inherit docs."""
params = job_config.input_reader_params
count = params[cls.COUNT]
string_length = params.get(cls.STRING_LENGTH, cls._DEFAULT_STRING_LENGTH)
shard_count = job_config.shard_count
count_per_shard = count // shard_count
mr_input_readers = [
cls(count_per_shard, string_length) for _ in range(shard_count)]
left = count - count_per_shard*shard_count
if left > 0:
mr_input_readers.append(cls(left, string_length))
return mr_input_readers
<|fim▁hole|> super(SampleInputReader, cls).validate(job_config)
params = job_config.input_reader_params
# Validate count.
if cls.COUNT not in params:
raise errors.BadReaderParamsError("Must specify %s" % cls.COUNT)
if not isinstance(params[cls.COUNT], int):
raise errors.BadReaderParamsError("%s should be an int but is %s" %
(cls.COUNT, type(params[cls.COUNT])))
if params[cls.COUNT] <= 0:
raise errors.BadReaderParamsError("%s should be a positive int")
# Validate string length.
if cls.STRING_LENGTH in params and not (
isinstance(params[cls.STRING_LENGTH], int) and
params[cls.STRING_LENGTH] > 0):
raise errors.BadReaderParamsError("%s should be a positive int "
"but is %s" %
(cls.STRING_LENGTH,
params[cls.STRING_LENGTH]))<|fim▁end|> | @classmethod
def validate(cls, job_config):
"""Inherit docs.""" |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.