prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>application.js<|end_file_name|><|fim▁begin|>import Ember from "ember";
const { Route } = Ember;
const set = Ember.set;
export default Route.extend({
setupController() {
this.controllerFor('mixinStack').set('model', []);
let port = this.get('port');
port.on('objectInspector:updateObject', this, this.updateObject);
port.on('objectInspector:updateProperty', this, this.updateProperty);
port.on('objectInspector:updateErrors', this, this.updateErrors);
port.on('objectInspector:droppedObject', this, this.droppedObject);
port.on('deprecation:count', this, this.setDeprecationCount);
port.send('deprecation:getCount');
},
deactivate() {
let port = this.get('port');
port.off('objectInspector:updateObject', this, this.updateObject);
port.off('objectInspector:updateProperty', this, this.updateProperty);
port.off('objectInspector:updateErrors', this, this.updateErrors);
port.off('objectInspector:droppedObject', this, this.droppedObject);
port.off('deprecation:count', this, this.setDeprecationCount);
},
updateObject(options) {<|fim▁hole|> name = options.name,
property = options.property,
objectId = options.objectId,
errors = options.errors;
Ember.NativeArray.apply(details);
details.forEach(arrayize);
let controller = this.get('controller');
if (options.parentObject) {
controller.pushMixinDetails(name, property, objectId, details);
} else {
controller.activateMixinDetails(name, objectId, details, errors);
}
this.send('expandInspector');
},
setDeprecationCount(message) {
this.controller.set('deprecationCount', message.count);
},
updateProperty(options) {
const detail = this.controllerFor('mixinDetails').get('model.mixins').objectAt(options.mixinIndex);
const property = Ember.get(detail, 'properties').findProperty('name', options.property);
set(property, 'value', options.value);
},
updateErrors(options) {
const mixinDetails = this.controllerFor('mixinDetails');
if (mixinDetails.get('model.objectId') === options.objectId) {
mixinDetails.set('model.errors', options.errors);
}
},
droppedObject(message) {
let controller = this.get('controller');
controller.droppedObject(message.objectId);
},
actions: {
expandInspector() {
this.set("controller.inspectorExpanded", true);
},
toggleInspector() {
this.toggleProperty("controller.inspectorExpanded");
},
inspectObject(objectId) {
if (objectId) {
this.get('port').send('objectInspector:inspectById', { objectId: objectId });
}
},
setIsDragging(isDragging) {
this.set('controller.isDragging', isDragging);
},
refreshPage() {
// If the adapter defined a `reloadTab` method, it means
// they prefer to handle the reload themselves
if (typeof this.get('adapter').reloadTab === 'function') {
this.get('adapter').reloadTab();
} else {
// inject ember_debug as quickly as possible in chrome
// so that promises created on dom ready are caught
this.get('port').send('general:refresh');
this.get('adapter').willReload();
}
}
}
});
function arrayize(mixin) {
Ember.NativeArray.apply(mixin.properties);
}<|fim▁end|> | const details = options.details, |
<|file_name|>interfaces.js<|end_file_name|><|fim▁begin|><|fim▁hole|>"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=interfaces.js.map<|fim▁end|> | // @ag-grid-community/react v25.1.0 |
<|file_name|>ResearcherLink.java<|end_file_name|><|fim▁begin|>/**
Copyright 2008 University of Rochester
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package edu.ur.ir.researcher;
import edu.ur.ir.FileSystem;
import edu.ur.ir.FileSystemType;
import edu.ur.persistent.CommonPersistent;
/**
* This is a link in the researcher folder. This
* creates a link between a link and a researcher
* folder
*
* @author Sharmila Ranganathan
*
*/
public class ResearcherLink extends CommonPersistent implements FileSystem{
/** Eclipse generated id */
private static final long serialVersionUID = 3144484183634385274L;
/** Link */
private String url;
/** researcher folder the link belongs to. */
private ResearcherFolder parentFolder;
/** Researcher the link belongs to */
private Researcher researcher;
/** represents the file system type for this researcher link */
private FileSystemType fileSystemType = FileSystemType.RESEARCHER_LINK;
/**
* Package protected constructor.
*/
ResearcherLink(){};
/**
* Create a researcher link with a null researcher folder. This means this
* is a root researcher link.
*
* @param linkVersion
*/
ResearcherLink(Researcher researcher, String link)
{
setResearcher(researcher);
setUrl(link);
}
/**
* Create a link between a folder and link.
*
* @param link - link to create a link with
* @param parentFolder - folder the link is in.
*/
ResearcherLink(Researcher researcher, ResearcherFolder parentFolder, String link)
{
if(link == null)
{
throw new IllegalStateException("link cannot be null");
}
setResearcher(researcher);
setUrl(link);
setParentFolder(parentFolder);
}
/**
* Returns the path for this linkVersion.
*
* The path is the path of the parent folder
*
* @return
*/
public String getPath()
{
String path = null;
if(parentFolder == null)
{
path = PATH_SEPERATOR;
}
else
{
path = parentFolder.getFullPath();
}
return path;
}
/**
* Overridden to string method.
*
* @see java.lang.Object#toString()
*/
public String toString()
{
StringBuffer sb = new StringBuffer("[ id = ");
sb.append(id);
sb.append( " path = ");
sb.append(getPath());
sb.append( " parent Folder = ");
sb.append(parentFolder);
sb.append(" name = ");
sb.append(name);
sb.append(" link = ");
sb.append(url);
sb.append("]");
return sb.toString();
}
/**
* Get the full path of this linkVersion. If there is
* no parent folder the path is just the name of
* the link.
*
* @return the full path.
*/
public String getFullPath()
{
return getPath() + getName();
}
/**
* Hash code for a researcher link.
*
* @see java.lang.Object#hashCode()
*/
public int hashCode()
{
int value = 0;
value += parentFolder == null ? 0 : parentFolder.hashCode();
value += getName() == null ? 0 : getName().hashCode();
value += researcher == null ? 0 : researcher.hashCode();
return value;
}
/**
* Equals method for a researcher link.
*
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals(Object o)
{
if (this == o) return true;
if (!(o instanceof ResearcherLink)) return false;
final ResearcherLink other = (ResearcherLink) o;
if( (other.getName() != null && !other.getName().equals(getName())) ||
(other.getName() == null && getName() != null ) ) return false;
if( (other.getResearcher() != null && !other.getResearcher().equals(getResearcher())) ||
(other.getResearcher() == null && getResearcher() != null ) ) return false;
if( (other.getFullPath() != null && !other.getFullPath().equals(getFullPath())) ||
(other.getFullPath() == null && getFullPath() != null ) ) return false;
return true;
}
/**
* Returns the name of the link.
*
* @see edu.ur.simple.type.NameAware#getName()
*/
public String getName() {
return name;
}
/**
* Returns the description of the link.
*
* @see edu.ur.simple.type.DescriptionAware#getDescription()
*/
public String getDescription() {
return description;
}
/* (non-Javadoc)
* @see edu.ur.ir.FileSystem#getFileSystemType()
*/
public FileSystemType getFileSystemType() {
return fileSystemType;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public ResearcherFolder getParentFolder() {
return parentFolder;
}
<|fim▁hole|>
public Researcher getResearcher() {
return researcher;
}
public void setResearcher(Researcher researcher) {
this.researcher = researcher;
}
}<|fim▁end|> | public void setParentFolder(ResearcherFolder parentFolder) {
this.parentFolder = parentFolder;
}
|
<|file_name|>test_segmentgroup.py<|end_file_name|><|fim▁begin|># -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- http://www.mdanalysis.org
# Copyright (c) 2006-2016 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
from __future__ import absolute_import
from numpy.testing import (
dec,
assert_,
assert_equal,
)
from unittest import skip
import MDAnalysis as mda
from MDAnalysisTests.datafiles import PSF, DCD
from MDAnalysisTests import parser_not_found
class TestSegmentGroup(object):
# Legacy tests from before 363
@dec.skipif(parser_not_found('DCD'),
'DCD parser not available. Are you using python 3?')
def setUp(self):
"""Set up the standard AdK system in implicit solvent."""
self.universe = mda.Universe(PSF, DCD)
self.g = self.universe.atoms.segments
def test_newSegmentGroup(self):
"""test that slicing a SegmentGroup returns a new SegmentGroup (Issue 135)"""
g = self.universe.atoms.segments
newg = g[:]
assert_(isinstance(newg, mda.core.groups.SegmentGroup))
assert_equal(len(newg), len(g))
def test_n_atoms(self):
assert_equal(self.g.n_atoms, 3341)
def test_n_residues(self):
assert_equal(self.g.n_residues, 214)
def test_resids_dim(self):
assert_equal(len(self.g.resids), len(self.g))
for seg, resids in zip(self.g, self.g.resids):
assert_(len(resids) == len(seg.residues))
assert_equal(seg.residues.resids, resids)
def test_resnums_dim(self):
assert_equal(len(self.g.resnums), len(self.g))
for seg, resnums in zip(self.g, self.g.resnums):
assert_(len(resnums) == len(seg.residues))
assert_equal(seg.residues.resnums, resnums)
def test_segids_dim(self):
assert_equal(len(self.g.segids), len(self.g))
def test_set_segids(self):
s = self.universe.select_atoms('all').segments<|fim▁hole|> err_msg="failed to set_segid on segments")
def test_set_segid_updates_self(self):
g = self.universe.select_atoms("resid 10:18").segments
g.segids = 'ADK'
assert_equal(g.segids, ['ADK'],
err_msg="old selection was not changed in place after set_segid")
def test_atom_order(self):
assert_equal(self.universe.segments.atoms.indices,
sorted(self.universe.segments.atoms.indices))<|fim▁end|> | s.segids = 'ADK'
assert_equal(self.universe.segments.segids, ['ADK'], |
<|file_name|>runtime_test.rs<|end_file_name|><|fim▁begin|>// use std::env;
// #[test]
// fn trybuild_tests()
// {
// println!( "current_dir : {:?}", env::current_dir().unwrap() );
// // let t = trybuild::TestCases::new();<|fim▁hole|>// // t.pass( "rust/test/former/test/basic_runtime.rs" );
// }
mod basic_runtime { include!( "./all/basic_runtime.rs" ); }<|fim▁end|> | |
<|file_name|>align.rs<|end_file_name|><|fim▁begin|>s_no_extra_traits! {
#[allow(missing_debug_implementations)]
#[repr(align(8))]
pub struct max_align_t {
priv_: [f64; 3]<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(box_syntax)]
#![feature(fnbox)]
#![feature(drain)]
#![feature(catch_panic)]
#![deny(missing_docs, dead_code)]<|fim▁hole|>//! when attempting to do concurrent/parallel programming.
//!
//! This includes a thread pool, a multi-producer/multi-consumer queue, a task runner, and
//! a publish/subscribe queue.
extern crate canal;
pub mod task_runner;
pub mod thread_pool;<|fim▁end|> |
//! Ferrous Threads Crate
//!
//! This crate contains a number of different structs and functions that are of use |
<|file_name|>demo_satpy_fog.py<|end_file_name|><|fim▁begin|>from __future__ import division
from __future__ import print_function
from PIL import Image
from PIL import ImageFont
from PIL import ImageDraw
from PIL import ImageEnhance
import nwcsaf
import numpy as np
from satpy import Scene, find_files_and_readers
from datetime import datetime, timedelta
from copy import deepcopy
import netCDF4
import subprocess
import sys
import inspect
import logging
LOG = logging.getLogger(__name__)
LOG.setLevel(50)
#CRITICAL 50 #ERROR 40 #WARNING 30 #INFO 20 #DEBUG 10 #NOTSET 0
import matplotlib.pyplot as plt
#from satpy.utils import debug_on
#debug_on()
##import warnings
#warnings.filterwarnings("ignore")
def get_last_SEVIRI_date(RSS, delay=0, time_slot=None):
'''
input: RSS
logical variable True or False
specifies if you like get
(RSS=True) the last rapid scan observation date (every 5 min)
(RSS=False) the last full disk observation date (every 15 min)
(delay=INT) number of minutes to substract before finding the date (good if data needs a few min before arriving)
(time_slot) If not given, take last time
otherwise search scanning time of SEVIRI before given time_slot
output:
date structure with the date of the last SEVIRI observation
'''
from time import gmtime
LOG.info("*** start get_last_SEVIRI_date ("+inspect.getfile(inspect.currentframe())+")")
# if rapid scan service than 5min otherwise 15
if RSS:
nmin = 5
else:
nmin = 15
if (time_slot is None):
# get the current time
gmt = gmtime()
#print ("GMT time: "+ str(gmt))
# or alternatively
# utc = datetime.utcnow()
# convert to datetime format
t0 = datetime(gmt.tm_year, gmt.tm_mon, gmt.tm_mday, gmt.tm_hour, gmt.tm_min, 0)
LOG.debug(" current time = "+str(t0))
else:
t0 = time_slot + timedelta(seconds=nmin*60) # we substract one scanning time later, so we can add it here
LOG.debug(" reference time = "+str(t0))
# apply delay (if it usually takes 5 min for the data to arrive, use delay 5min)
if delay != 0:
t0 -= timedelta(minutes=delay)
LOG.debug(" applying delay "+str(delay)+" min delay, time = "+ str(t0))
LOG.debug(" round by scanning time "+str(nmin)+" min, RSS = "+str(RSS))
#tm_min2 = gmt.tm_min - (gmt.tm_min % nmin)
minute1 = t0.minute - (t0.minute % nmin)
# define current date rounded by one scan time
#date1 = datetime(gmt.tm_year, gmt.tm_mon, gmt.tm_mday, gmt.tm_hour, tm_min2 , 0)
t1 = datetime(t0.year, t0.month, t0.day, t0.hour, minute1, 0)
LOG.debug(" end time of last scan: "+str(t1))
# substracting one scan time (as the start time of scan is returned)
t1 -= timedelta(seconds=nmin*60)
LOG.info(" start time of last scan: "+str(t1))
return t1
def rewrite_xy_axis(netCDF_file):
print("... re-place values on the x and y axis with lon/lat values in "+netCDF_file)
ds = netCDF4.Dataset(netCDF_file, 'r+')
lat = ds["latitude"][:,0]
ds["y"][:] = lat.data
ds["y"].units = 'Degrees North'
lon = ds["longitude"][0,:]
ds["x"][:] = lon.data
ds["x"].units = 'Degrees East'
ds.close()
###############################################################################################
###############################################################################################
if __name__ == '__main__':
sat='MSG4'
if len(sys.argv) == 1:
start_time = get_last_SEVIRI_date(False, delay=6)
base_dir_sat = "/data/cinesat/in/eumetcast1/"<|fim▁hole|> month = int(sys.argv[2])
day = int(sys.argv[3])
hour = int(sys.argv[4])
minute = int(sys.argv[5])
start_time = datetime(year, month, day, hour, minute)
base_dir_sat = start_time.strftime("/data/COALITION2/database/meteosat/radiance_HRIT/case-studies/%Y/%m/%d/")
#base_dir_sat = start_time.strftime("/data/COALITION2/database/meteosat/radiance_HRIT/%Y/%m/%d/")
base_dir_nwc = start_time.strftime("/data/OWARNA/hau/database/meteosat/SAFNWC/%Y/%m/%d/CT/")
else:
start_time = datetime(2020, 10, 7, 16, 0)
base_dir_sat = start_time.strftime("/data/COALITION2/database/meteosat/radiance_HRIT/%Y/%m/%d/")
base_dir_nwc = start_time.strftime("/data/COALITION2/database/meteosat/SAFNWC_v2016/%Y/%m/%d/CT/")
print("... processing time ", start_time)
show_interactively=False
save_black_white_png=False
print("")
print("")
print("*** Creating LSCL (low stratus confidence level) product")
print("")
# read MSG (full disk service) L2
#################################
print("... read "+sat+" L1.5 data")
print(" search for HRIT files in "+base_dir_sat)
files_sat = find_files_and_readers(sensor='seviri',
start_time=start_time, end_time=start_time,
base_dir=base_dir_sat,
reader='seviri_l1b_hrit')
files = deepcopy(files_sat['seviri_l1b_hrit'])
#print(" found SEVIRI files: ", files_sat)
for f in files:
if not (sat in f):
files_sat['seviri_l1b_hrit'].remove(f)
continue
if ("HRV" in f) or ("VIS006" in f) or ("VIS008" in f) or ("IR_016" in f) or ("IR_039" in f):
files_sat['seviri_l1b_hrit'].remove(f)
continue
if ("WV_062" in f) or ("WV_073" in f) or ("IR_097" in f) or ("IR_108" in f) or ("IR_134" in f):
files_sat['seviri_l1b_hrit'].remove(f)
continue
global_scene = Scene(reader="seviri_l1b_hrit", filenames=files_sat)
global_scene.load(['IR_087','IR_120'])
# read NWCSAF files
########################
print("... read "+sat+" NWCSAF CTTH")
print(" search for NWCSAF files in "+base_dir_nwc)
files_nwc = find_files_and_readers(sensor='seviri',
start_time=start_time, end_time=start_time,
base_dir=base_dir_nwc, reader='nwcsaf-geo')
print(" found NWCSAF files: ", files_nwc)
files = deepcopy(files_nwc['nwcsaf-geo'])
for f in files:
# remove files from other satellites
if not (sat in f):
files_nwc['nwcsaf-geo'].remove(f)
continue
# remove CTTH files
if ("CTTH" in f):
files_nwc['nwcsaf-geo'].remove(f)
continue
global_nwc = Scene(filenames=files_nwc)
global_nwc.load(['ct']) # "CT"
# loop over areas, resample and create products
# create netCDF file for area cosmo1
# create png file for area cosmo1_150 (50% more pixels)
############################################################
#for area in ['SeviriDisk00Cosmo',"cosmo1x150"]:
#for area in ['cosmo1', 'cosmo1eqc3km']:
for area in ['cosmo1eqc3km']:
#for area in ['cosmo1x150', 'cosmo1eqc3km']:
# resample MSG L2
##################
print("")
print("=======================")
print("resample to "+area)
local_scene = global_scene.resample(area)
# fake a new channel
print("fake a new channel")
local_scene['lscl'] = deepcopy(local_scene['IR_120'])
#local_scene['lscl'].wavelength=""
#local_scene['lscl'].standard_name="low_stratus_confidence_level"
#local_scene['lscl'].calibration="brightness_temperature_difference"
#print(local_scene['IR_120'])
#print(dir(local_scene['IR_120']))
#print(local_scene['IR_120'].standard_name)
#print(type(local_scene['IR_120'].standard_name))
#local_scene['lscl'].standard_name = "toa_brightness_temperature_difference"
#print(local_scene['lscl'])
##############################################
# calculate lscl "low stratus confidence level
# see MSc Thesis of Anna Ehrler (chapter 3.2.1 to 3.2.2)
##############################################
th_liquid_cloud = 1.8 # K
# cloud_confidence_range
ccr = 1.0 # K
local_scene['lscl'].values = (th_liquid_cloud - (local_scene['IR_120']-local_scene['IR_087']) - ccr) / (-2. * ccr)
#local_scene['lscl'].area_def = local_scene['IR_120'].area_def
# print(global_nwc)
local_nwc = global_nwc.resample(area)
# delete values for high clouds
###########################################
# !!! ONLY NWCSAF VERSION 2016 and 2018 !!!
# !!! Numbers are different for v2013
# ct:comment = "1: Cloud-free land; 2: Cloud-free sea; 3: Snow over land; 4: Sea ice; 5: Very low clouds;
# 6: Low clouds; 7: Mid-level clouds; 8: High opaque clouds; 9: Very high opaque clouds;
# 10: Fractional clouds; 11: High semitransparent thin clouds; 12: High semitransparent meanly thick clouds;
# 13: High semitransparent thick clouds; 14: High semitransparent above low or medium clouds; 15: High semitransparent above snow/ice" ;
for _ct_ in [7,8,9,10,11,12,13,14,15]:
print("replace cloud type",_ct_)
local_scene['lscl'].values = np.where(local_nwc['ct'].values==_ct_, np.nan, local_scene['lscl'].values)
if show_interactively:
fig, ax = plt.subplots(figsize=(13, 7))
pos = plt.imshow(local_scene['lscl'].values, vmin=0, vmax=1)
fig.colorbar(pos)
plt.title(start_time.strftime('low stratus confidence level, %y-%m-%d %H:%MUTC'))
plt.show()
if save_black_white_png:
local_scene.save_dataset('lscl', './lscl_'+area+'.png')
print(dir(local_scene.save_dataset))
print('display ./lscl_'+area+'.png &')
# save png file for SATLive
##############################
if area=="cosmo1x150" or area=="cosmo1":
png_file = start_time.strftime('/data/cinesat/out/MSG_lscl-'+area+'_%y%m%d%H%M.png')
from trollimage.colormap import spectral, greys, ylorrd, rdgy
imgarr = np.array(local_scene['lscl'].data)
from trollimage.image import Image as Timage
img = Timage(imgarr, mode="L")
img.colorize( rdgy.reverse() )
img.save(png_file)
# local_scene.save_dataset( 'lscl', png_file )
from pyresample.utils import load_area
swiss = load_area("/opt/users/hau/monti-pytroll/etc/areas.def", area)
from pycoast import ContourWriterAGG
cw = ContourWriterAGG('/opt/users/common/shapes')
cw.add_borders_to_file(png_file, swiss, outline="green", resolution='i', level=3, width=2)
img = Image.open(png_file)
draw = ImageDraw.Draw(img)
draw.rectangle([(0, 0), (img.size[0]*0.7, 25)], fill=(0,0,0,200))
font = ImageFont.truetype("/usr/openv/java/jre/lib/fonts/LucidaTypewriterBold.ttf", 18)
title = start_time.strftime(" "+sat[0:3]+"-"+sat[3]+', %y-%m-%d %H:%MUTC, low stratus confidence level')
draw.text( (1, 1), title, "green" , font=font) # (255,255,255)
img.save(png_file)
print("display " + png_file +" &")
if area=="cosmo1x150":
scpID="-i ~/.ssh/id_rsa_las"
scpOutputDir="las@zueub241:/srn/las/www/satellite/DATA/MSG_"+"lscl"+"-"+area+"_/"
scp_command = "/usr/bin/scp "+scpID+" "+png_file+" "+scpOutputDir+" 2>&1 &"
print(scp_command)
subprocess.call(scp_command, shell=True)
elif area=="cosmo1":
scpID="-i ~/.ssh/id_rsa_tsa"
scpOutputDir="[email protected]:/scratch/hamann/DayNightFog/"
print("... scp "+png_file+" to "+scpOutputDir)
subprocess.call("/usr/bin/scp "+scpID+" "+png_file+" "+scpOutputDir+" 2>&1 &", shell=True)
# save netCDF file for APN
##############################
if area=='cosmo1eqc3km':
netCDF_file = start_time.strftime('/data/cinesat/out/MSG_lscl-'+area+'_%y%m%d%H%M.nc')
print("... save result in: "+ netCDF_file)
print("include_lonlats=True")
local_scene.save_dataset('lscl', netCDF_file, include_lonlats=True, writer='cf',
exclude_attrs=['raw_metadata'], epoch='seconds since 1970-01-01 00:00:00') #, writer='cf'
#import netCDF4 as nc
#file_input = nc.Dataset(netCDF_file, 'r+')
#print(file_input.variables.keys())
#lonlats = local_scene['lscl'].area.get_lonlats()
#lons = file_input.createVariable('longitues', 'single', ('y', 'x'))
#lats = file_input.createVariable('latitudes', 'single', ('y', 'x'))
#lons[:] = lonlats[0][:,:]
#lats[:] = lonlats[1][:,:]
#local_scene.save_datasets(['lscl'], filename=netCDF_file, include_lonlats=True) #, writer='cf'
print("... ncview " + netCDF_file +" &")
rewrite_xy_axis(netCDF_file)
scpID="-i ~/.ssh/id_rsa_tsa"
#scpOutputDir="[email protected]:/scratch/hamann/DayNightFog/"
scpOutputDir="[email protected]:/scratch/hamann/DayNightFog_Filter-CT-7-15/"
print("... scp "+netCDF_file+" to "+scpOutputDir)
subprocess.call("/usr/bin/scp "+scpID+" "+netCDF_file+" "+scpOutputDir+" 2>&1 &", shell=True)<|fim▁end|> | base_dir_nwc = "/data/cinesat/in/eumetcast1/"
#base_dir_nwc = "/data/cinesat/in/safnwc_v2016/"
elif len(sys.argv) == 6:
year = int(sys.argv[1]) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015-2020 The Sipwise Team - http://sipwise.com
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT<|fim▁hole|>#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
from .gri import GerritRepoInfo # noqa
from .jbi import JenkinsBuildInfo # noqa
from .wni import WorkfrontNoteInfo # noqa<|fim▁end|> | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# libmypaint documentation build configuration file, created by
# sphinx-quickstart2 on Wed Jun 13 23:40:45 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
# Breathe setup, for integrating doxygen content
extensions.append('breathe')
doxyxml_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../doxygen')
print doxyxml_dir
breathe_projects = {"libmypaint": doxyxml_dir}
breathe_default_project = "libmypaint"
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'libmypaint'
copyright = u'2012, MyPaint Development Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
<|fim▁hole|># -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'libmypaintdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'libmypaint.tex', u'libmypaint Documentation',
u'MyPaint Development Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'libmypaint', u'libmypaint Documentation',
[u'MyPaint Development Team'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'libmypaint', u'libmypaint Documentation',
u'MyPaint Development Team', 'libmypaint', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}<|fim▁end|> | |
<|file_name|>1417_Weighing_Problem.py<|end_file_name|><|fim▁begin|># 1417. Weighing Problem
# Gives nn coins, each weighing 10g, but the weight of one coin is 11g. There
# is now a balance that can be accurately weighed. Ask at least a few times
# to be sure to find the 11g gold coin.
#
# Example
# Given n = 3, return 1.
#
# Explanation:
# Select two gold coins on the two ends of the balance. If the two ends of
# the balance are level, the third gold coin is 11g, otherwise the heavy one
# is 11g.
# Given n = 4, return 2.
#
# Explanation:
# Four gold coins can be divided into two groups and placed on both ends of
# the scale. According to the weighing results, select the two heavy gold<|fim▁hole|># class Solution:
# """
# @param n: The number of coins
# @return: The Minimum weighing times int worst case
# """
# def minimumtimes(self, n):
# # Write your code here<|fim▁end|> | # coins and place them on the two ends of the balance for the second
# weighing. The gold coin at the heavy end is 11g gold coins.
|
<|file_name|>EquippedBadgeSettings_pb2.py<|end_file_name|><|fim▁begin|># Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Settings/Master/EquippedBadgeSettings.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message<|fim▁hole|>from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Settings/Master/EquippedBadgeSettings.proto',
package='POGOProtos.Settings.Master',
syntax='proto3',
serialized_pb=_b('\n6POGOProtos/Settings/Master/EquippedBadgeSettings.proto\x12\x1aPOGOProtos.Settings.Master\"y\n\x15\x45quippedBadgeSettings\x12\x1f\n\x17\x65quip_badge_cooldown_ms\x18\x01 \x01(\x03\x12\x1f\n\x17\x63\x61tch_probability_bonus\x18\x02 \x03(\x02\x12\x1e\n\x16\x66lee_probability_bonus\x18\x03 \x03(\x02\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_EQUIPPEDBADGESETTINGS = _descriptor.Descriptor(
name='EquippedBadgeSettings',
full_name='POGOProtos.Settings.Master.EquippedBadgeSettings',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='equip_badge_cooldown_ms', full_name='POGOProtos.Settings.Master.EquippedBadgeSettings.equip_badge_cooldown_ms', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='catch_probability_bonus', full_name='POGOProtos.Settings.Master.EquippedBadgeSettings.catch_probability_bonus', index=1,
number=2, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='flee_probability_bonus', full_name='POGOProtos.Settings.Master.EquippedBadgeSettings.flee_probability_bonus', index=2,
number=3, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=86,
serialized_end=207,
)
DESCRIPTOR.message_types_by_name['EquippedBadgeSettings'] = _EQUIPPEDBADGESETTINGS
EquippedBadgeSettings = _reflection.GeneratedProtocolMessageType('EquippedBadgeSettings', (_message.Message,), dict(
DESCRIPTOR = _EQUIPPEDBADGESETTINGS,
__module__ = 'POGOProtos.Settings.Master.EquippedBadgeSettings_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Settings.Master.EquippedBadgeSettings)
))
_sym_db.RegisterMessage(EquippedBadgeSettings)
# @@protoc_insertion_point(module_scope)<|fim▁end|> | from google.protobuf import reflection as _reflection |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>"use strict";
$(function() {
$(document).ajaxError((event, xhr) => {
console.error(xhr.responseJSON.error_message);
});
});
function basename(str) {
let base;
base = str.substring(str.lastIndexOf("\/") + 1);
base = str.substring(str.lastIndexOf("\\") + 1);
return base;
}
function go_home() {
window.location.replace("/");
}
function update_upload_text() {
let upload_filename = basename($("#file-upload").val());
$("#file-upload-text").val(upload_filename);
}
function show_file_contents(element) {
window.location.replace(window.location.pathname + "/" + $(element).text());
}
function analyze(element) {
let filename = $(element).prev().text();
$.when($.ajax(`analyze/${filename}`)).then(data => {
console.log(data);<|fim▁hole|><|fim▁end|> | });
} |
<|file_name|>SpinnerNumberField.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2014 Bruno Medeiros and other Contributors.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Bruno Medeiros - initial API and implementation
*******************************************************************************/
package melnorme.util.swt.components.fields;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Spinner;
import melnorme.util.swt.SWTLayoutUtil;
import melnorme.util.swt.SWTUtil;
import melnorme.util.swt.components.FieldComponent;
import melnorme.util.swt.components.LabelledFieldComponent;
public class SpinnerNumberField extends LabelledFieldComponent<Integer> {
protected Spinner spinner;
public SpinnerNumberField(String labelText) {
super(labelText, Option_AllowNull.NO, 0);
}
@Override
public int getPreferredLayoutColumns() {
return 2;
}
@Override
protected void createContents_all(Composite topControl) {
createContents_Label(topControl);
createContents_Spinner(topControl);
}
@Override
protected void createContents_layout() {
SWTLayoutUtil.layout2Controls_spanLast(label, spinner);
}
protected void createContents_Spinner(Composite parent) {
spinner = createFieldSpinner(this, parent, SWT.BORDER);
}
public Spinner getSpinner() {
return spinner;
}
@Override
public Spinner getFieldControl() {
return spinner;
}
@Override
protected void doUpdateComponentFromValue() {
spinner.setSelection(getFieldValue());
}
public SpinnerNumberField setValueMinimum(int minimum) {
spinner.setMinimum(minimum);
return this;
<|fim▁hole|> return this;
}
public SpinnerNumberField setValueIncrement(int increment) {
spinner.setIncrement(increment);
return this;
}
@Override
public void setEnabled(boolean enabled) {
SWTUtil.setEnabledIfOk(label, enabled);
SWTUtil.setEnabledIfOk(spinner, enabled);
}
/* ----------------- ----------------- */
public static Spinner createFieldSpinner(FieldComponent<Integer> field, Composite parent, int style) {
final Spinner spinner = new Spinner(parent, style);
spinner.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
field.setFieldValueFromControl(spinner.getSelection());
}
});
return spinner;
}
}<|fim▁end|> | }
public SpinnerNumberField setValueMaximum(int maximum) {
spinner.setMaximum(maximum);
|
<|file_name|>new_data_to_atlas_space.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import os
import os.path
from nipype.interfaces.utility import IdentityInterface, Function
from nipype.interfaces.io import SelectFiles, DataSink, DataGrabber
from nipype.pipeline.engine import Workflow, Node, MapNode
from nipype.interfaces.minc import Resample, BigAverage, VolSymm
import argparse
def create_workflow(
xfm_dir,
xfm_pattern,
atlas_dir,
atlas_pattern,
source_dir,
source_pattern,
work_dir,
out_dir,
name="new_data_to_atlas_space"
):
wf = Workflow(name=name)
wf.base_dir = os.path.join(work_dir)
datasource_source = Node(
interface=DataGrabber(
sort_filelist=True
),
name='datasource_source'
)
datasource_source.inputs.base_directory = os.path.abspath(source_dir)
datasource_source.inputs.template = source_pattern
<|fim▁hole|> datasource_xfm = Node(
interface=DataGrabber(
sort_filelist=True
),
name='datasource_xfm'
)
datasource_xfm.inputs.base_directory = os.path.abspath(xfm_dir)
datasource_xfm.inputs.template = xfm_pattern
datasource_atlas = Node(
interface=DataGrabber(
sort_filelist=True
),
name='datasource_atlas'
)
datasource_atlas.inputs.base_directory = os.path.abspath(atlas_dir)
datasource_atlas.inputs.template = atlas_pattern
resample = MapNode(
interface=Resample(
sinc_interpolation=True
),
name='resample_',
iterfield=['input_file', 'transformation']
)
wf.connect(datasource_source, 'outfiles', resample, 'input_file')
wf.connect(datasource_xfm, 'outfiles', resample, 'transformation')
wf.connect(datasource_atlas, 'outfiles', resample, 'like')
bigaverage = Node(
interface=BigAverage(
output_float=True,
robust=False
),
name='bigaverage',
iterfield=['input_file']
)
wf.connect(resample, 'output_file', bigaverage, 'input_files')
datasink = Node(
interface=DataSink(
base_directory=out_dir,
container=out_dir
),
name='datasink'
)
wf.connect([(bigaverage, datasink, [('output_file', 'average')])])
wf.connect([(resample, datasink, [('output_file', 'atlas_space')])])
wf.connect([(datasource_xfm, datasink, [('outfiles', 'transforms')])])
return wf
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--name",
type=str,
required=True
)
parser.add_argument(
"--xfm_dir",
type=str,
required=True
)
parser.add_argument(
"--xfm_pattern",
type=str,
required=True
)
parser.add_argument(
"--source_dir",
type=str,
required=True
)
parser.add_argument(
"--source_pattern",
type=str,
required=True
)
parser.add_argument(
"--atlas_dir",
type=str,
required=True
)
parser.add_argument(
"--atlas_pattern",
type=str,
required=True
)
parser.add_argument(
"--work_dir",
type=str,
required=True
)
parser.add_argument(
"--out_dir",
type=str,
required=True
)
parser.add_argument(
'--debug',
dest='debug',
action='store_true',
help='debug mode'
)
args = parser.parse_args()
if args.debug:
from nipype import config
config.enable_debug_mode()
config.set('execution', 'stop_on_first_crash', 'true')
config.set('execution', 'remove_unnecessary_outputs', 'false')
config.set('execution', 'keep_inputs', 'true')
config.set('logging', 'workflow_level', 'DEBUG')
config.set('logging', 'interface_level', 'DEBUG')
config.set('logging', 'utils_level', 'DEBUG')
wf = create_workflow(
xfm_dir=os.path.abspath(args.xfm_dir),
xfm_pattern=args.xfm_pattern,
atlas_dir=os.path.abspath(args.atlas_dir),
atlas_pattern=args.atlas_pattern,
source_dir=os.path.abspath(args.source_dir),
source_pattern=args.source_pattern,
work_dir=os.path.abspath(args.work_dir),
out_dir=os.path.abspath(args.out_dir),
name=args.name
)
wf.run(
plugin='MultiProc',
plugin_args={
'n_procs': int(
os.environ["NCPUS"] if "NCPUS" in os.environ else os.cpu_count
)
}
)<|fim▁end|> | |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# EnergiScore Web Documentation documentation build configuration file, created by
# sphinx-quickstart on Mon Feb 9 11:19:34 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'EnergiStream API Client'
copyright = u'2015, MelRok LLC'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1'
# The full version, including alpha/beta/rc tags.
release = '0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.<|fim▁hole|>
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages.
import sphinx_rtd_theme
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'EnergiScoreWebDocumentationdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'EnergiScoreWebDocumentation.tex', u'EnergiScore Web Documentation Documentation',
u'Harrison Fross', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'energiscorewebdocumentation', u'EnergiScore Web Documentation Documentation',
[u'Harrison Fross'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'EnergiScoreWebDocumentation', u'EnergiScore Web Documentation Documentation',
u'Harrison Fross', 'EnergiScoreWebDocumentation', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}<|fim▁end|> | pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = [] |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>import decimal
import json
from datetime import date, datetime, time
import dateutil.parser
from django.conf import settings
from django.db.models import Model
from versions.models import Versionable
DEFAULTS = {
'user_mail_required': {
'default': 'False',
'type': bool
},
'max_items_per_order': {
'default': '10',
'type': int
},
'attendee_names_asked': {
'default': 'True',
'type': bool
},
'attendee_names_required': {
'default': 'False',
'type': bool
},
'reservation_time': {
'default': '30',
'type': int
},
'payment_term_days': {
'default': '14',
'type': int
},
'payment_term_last': {
'default': None,
'type': datetime,
},
'payment_term_accept_late': {
'default': 'True',
'type': bool
},
'presale_start_show_date': {
'default': 'True',
'type': bool
},
'show_items_outside_presale_period': {
'default': 'True',
'type': bool
},
'timezone': {
'default': settings.TIME_ZONE,
'type': str
},
'locales': {
'default': json.dumps([settings.LANGUAGE_CODE]),
'type': list
},
'locale': {
'default': settings.LANGUAGE_CODE,
'type': str
},
'show_date_to': {
'default': 'True',
'type': bool
},
'show_times': {
'default': 'True',
'type': bool
},
'ticket_download': {
'default': 'False',
'type': bool
},
'ticket_download_date': {
'default': None,
'type': datetime
},
'last_order_modification_date': {
'default': None,
'type': datetime
},
'contact_mail': {
'default': None,
'type': str
},
'imprint_url': {
'default': None,
'type': str
},
'mail_prefix': {
'default': None,
'type': str
},
'mail_from': {
'default': settings.MAIL_FROM,
'type': str
}
}
class SettingsProxy:
"""
This objects allows convenient access to settings stored in the
EventSettings/OrganizerSettings database model. It exposes all settings as
properties and it will do all the nasty inheritance and defaults stuff for
you. It will return None for non-existing properties.
"""
def __init__(self, obj, parent=None, type=None):
self._obj = obj
self._parent = parent
self._cached_obj = None
self._type = type
def _cache(self):
if self._cached_obj is None:
self._cached_obj = {}
for setting in self._obj.setting_objects.current.all():
self._cached_obj[setting.key] = setting
return self._cached_obj
def _flush(self):
self._cached_obj = None
def _unserialize(self, value, as_type):
if as_type is not None and isinstance(value, as_type):
return value
elif value is None:
return None
elif as_type == int or as_type == float or as_type == decimal.Decimal:
return as_type(value)
elif as_type == dict or as_type == list:
return json.loads(value)
elif as_type == bool or value in ('True', 'False'):
return value == 'True'
elif as_type == datetime:
return dateutil.parser.parse(value)
elif as_type == date:
return dateutil.parser.parse(value).date()
elif as_type == time:
return dateutil.parser.parse(value).time()
elif as_type is not None and issubclass(as_type, Versionable):
return as_type.objects.current.get(identity=value)<|fim▁hole|> def _serialize(self, value):
if isinstance(value, str):
return value
elif isinstance(value, int) or isinstance(value, float) \
or isinstance(value, bool) or isinstance(value, decimal.Decimal):
return str(value)
elif isinstance(value, list) or isinstance(value, dict):
return json.dumps(value)
elif isinstance(value, datetime) or isinstance(value, date) or isinstance(value, time):
return value.isoformat()
elif isinstance(value, Versionable):
return value.identity
elif isinstance(value, Model):
return value.pk
raise TypeError('Unable to serialize %s into a setting.' % str(type(value)))
def get(self, key, default=None, as_type=None):
"""
Get a setting specified by key 'key'. Normally, settings are strings, but
if you put non-strings into the settings object, you can request unserialization
by specifying 'as_type'
"""
if as_type is None and key in DEFAULTS:
as_type = DEFAULTS[key]['type']
if key in self._cache():
return self._unserialize(self._cache()[key].value, as_type)
value = None
if self._parent:
value = self._parent.settings.get(key)
if value is None and key in DEFAULTS:
return self._unserialize(DEFAULTS[key]['default'], as_type)
if value is None and default is not None:
return self._unserialize(default, as_type)
return self._unserialize(value, as_type)
def __getitem__(self, key):
return self.get(key)
def __getattr__(self, key):
return self.get(key)
def __setattr__(self, key, value):
if key.startswith('_'):
return super().__setattr__(key, value)
self.set(key, value)
def __setitem__(self, key, value):
self.set(key, value)
def set(self, key, value):
if key in self._cache():
s = self._cache()[key]
s = s.clone()
else:
s = self._type(object=self._obj, key=key)
s.value = self._serialize(value)
s.save()
self._cache()[key] = s
def __delattr__(self, key):
if key.startswith('_'):
return super().__delattr__(key)
return self.__delitem__(key)
def __delitem__(self, key):
if key in self._cache():
self._cache()[key].delete()
del self._cache()[key]
class SettingsSandbox:
"""
Transparently proxied access to event settings, handling your domain-
prefixes for you.
"""
def __init__(self, type, key, event):
self._event = event
self._type = type
self._key = key
def _convert_key(self, key):
return '%s_%s_%s' % (self._type, self._key, key)
def __setitem__(self, key, value):
self.set(key, value)
def __setattr__(self, key, value):
if key.startswith('_'):
return super().__setattr__(key, value)
self.set(key, value)
def __getattr__(self, item):
return self.get(item)
def __getitem__(self, item):
return self.get(item)
def __delitem__(self, key):
del self._event.settings[self._convert_key(key)]
def __delattr__(self, key):
del self._event.settings[self._convert_key(key)]
def get(self, key, default=None, as_type=str):
return self._event.settings.get(self._convert_key(key), default=default, as_type=as_type)
def set(self, key, value):
self._event.settings.set(self._convert_key(key), value)<|fim▁end|> | elif as_type is not None and issubclass(as_type, Model):
return as_type.objects.get(pk=value)
return value
|
<|file_name|>dns_2.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>#[test]
fn parse_query() {
let data = include_bytes!("../assets/captures/dns_2_query.bin");
let question = Question::new("google.com.", QType::ByType(Type::AAAA), Class::Internet)
.unwrap();
let msg = Message::parse(&data[..]).unwrap();
assert!(msg.is_query());
assert_eq!(msg.id(), 3);
assert_eq!(msg.opcode(), Opcode::Query);
assert_eq!(msg.questions, vec![question]);
}
#[test]
fn parse_response() {
let data = include_bytes!("../assets/captures/dns_2_response.bin");
let question = Question::new("google.com.", QType::ByType(Type::AAAA), Class::Internet)
.unwrap();
let rr = ResourceRecord::AAAA {
name: "google.com.".parse().unwrap(),
class: Class::Internet,
ttl: 299,
addr: "2607:f8b0:400a:809::200e".parse().unwrap(),
};
let msg = Message::parse(&data[..]).unwrap();
assert!(msg.is_response());
assert_eq!(msg.id(), 3);
assert_eq!(msg.opcode(), Opcode::Query);
assert_eq!(msg.questions, vec![question]);
assert_eq!(msg.answers, vec![rr]);
}<|fim▁end|> | extern crate martin;
use martin::*;
|
<|file_name|>ExternalEventAttendee.java<|end_file_name|><|fim▁begin|>package se.tmeit.app.model;
import com.google.auto.value.AutoValue;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
<|fim▁hole|>public abstract class ExternalEventAttendee {
public static ExternalEventAttendee fromJson(JSONObject json) throws JSONException {
return builder()
.setDateOfBirth(json.optString(Keys.DOB, ""))
.setDrinkPreferences(json.optString(Keys.DRINK_PREFS, ""))
.setFoodPreferences(json.optString(Keys.FOOD_PREFS, ""))
.setName(json.optString(Keys.NAME, ""))
.setNotes(json.optString(Keys.NOTES, ""))
.setId(json.optInt(Keys.ID))
.build();
}
public static List<ExternalEventAttendee> fromJsonArray(JSONArray json) throws JSONException {
ArrayList<ExternalEventAttendee> result = new ArrayList<>(json.length());
for (int i = 0; i < json.length(); i++) {
result.add(fromJson(json.getJSONObject(i)));
}
return result;
}
public static Builder builder() {
return new AutoValue_ExternalEventAttendee.Builder()
.setId(0)
.setName("");
}
public abstract String dateOfBirth();
public abstract String drinkPreferences();
public abstract String foodPreferences();
public abstract String name();
public abstract String notes();
public abstract int id();
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder setDateOfBirth(String value);
public abstract Builder setDrinkPreferences(String value);
public abstract Builder setFoodPreferences(String value);
abstract Builder setName(String value);
public abstract Builder setNotes(String value);
abstract Builder setId(int id);
public abstract ExternalEventAttendee build();
}
public static class Keys {
public static final String DOB = "dob";
public static final String DRINK_PREFS = "drink_prefs";
public static final String FOOD_PREFS = "food_prefs";
public static final String ID = "id";
public static final String NAME = "user_name";
public static final String NOTES = "notes";
private Keys() {
}
}
}<|fim▁end|> | /**
* Model object for attendees of an external event.
*/
@AutoValue |
<|file_name|>resource_osc_api_gateway_method_response_test.go<|end_file_name|><|fim▁begin|>package osc
import (
"fmt"
"testing"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/apigateway"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
)
func TestAccAWSAPIGatewayMethodResponse_basic(t *testing.T) {
var conf apigateway.MethodResponse<|fim▁hole|> resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSAPIGatewayMethodResponseDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccAWSAPIGatewayMethodResponseConfig,
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSAPIGatewayMethodResponseExists("aws_api_gateway_method_response.error", &conf),
testAccCheckAWSAPIGatewayMethodResponseAttributes(&conf),
resource.TestCheckResourceAttr(
"aws_api_gateway_method_response.error", "status_code", "400"),
resource.TestCheckResourceAttr(
"aws_api_gateway_method_response.error", "response_models.application/json", "Error"),
),
},
resource.TestStep{
Config: testAccAWSAPIGatewayMethodResponseConfigUpdate,
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSAPIGatewayMethodResponseExists("aws_api_gateway_method_response.error", &conf),
testAccCheckAWSAPIGatewayMethodResponseAttributesUpdate(&conf),
resource.TestCheckResourceAttr(
"aws_api_gateway_method_response.error", "status_code", "400"),
resource.TestCheckResourceAttr(
"aws_api_gateway_method_response.error", "response_models.application/json", "Empty"),
),
},
},
})
}
func testAccCheckAWSAPIGatewayMethodResponseAttributes(conf *apigateway.MethodResponse) resource.TestCheckFunc {
return func(s *terraform.State) error {
if *conf.StatusCode == "" {
return fmt.Errorf("empty StatusCode")
}
if val, ok := conf.ResponseModels["application/json"]; !ok {
return fmt.Errorf("missing application/json ResponseModel")
} else {
if *val != "Error" {
return fmt.Errorf("wrong application/json ResponseModel")
}
}
if val, ok := conf.ResponseParameters["method.response.header.Content-Type"]; !ok {
return fmt.Errorf("missing Content-Type ResponseParameters")
} else {
if *val != true {
return fmt.Errorf("wrong ResponseParameters value")
}
}
return nil
}
}
func testAccCheckAWSAPIGatewayMethodResponseAttributesUpdate(conf *apigateway.MethodResponse) resource.TestCheckFunc {
return func(s *terraform.State) error {
if *conf.StatusCode == "" {
return fmt.Errorf("empty StatusCode")
}
if val, ok := conf.ResponseModels["application/json"]; !ok {
return fmt.Errorf("missing application/json ResponseModel")
} else {
if *val != "Empty" {
return fmt.Errorf("wrong application/json ResponseModel")
}
}
if conf.ResponseParameters["method.response.header.Content-Type"] != nil {
return fmt.Errorf("Content-Type ResponseParameters shouldn't exist")
}
return nil
}
}
func testAccCheckAWSAPIGatewayMethodResponseExists(n string, res *apigateway.MethodResponse) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
if !ok {
return fmt.Errorf("Not found: %s", n)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No API Gateway Method ID is set")
}
conn := testAccProvider.Meta().(*AWSClient).apigateway
req := &apigateway.GetMethodResponseInput{
HttpMethod: aws.String("GET"),
ResourceId: aws.String(s.RootModule().Resources["aws_api_gateway_resource.test"].Primary.ID),
RestApiId: aws.String(s.RootModule().Resources["aws_api_gateway_rest_api.test"].Primary.ID),
StatusCode: aws.String(rs.Primary.Attributes["status_code"]),
}
describe, err := conn.GetMethodResponse(req)
if err != nil {
return err
}
*res = *describe
return nil
}
}
func testAccCheckAWSAPIGatewayMethodResponseDestroy(s *terraform.State) error {
conn := testAccProvider.Meta().(*AWSClient).apigateway
for _, rs := range s.RootModule().Resources {
if rs.Type != "aws_api_gateway_method_response" {
continue
}
req := &apigateway.GetMethodResponseInput{
HttpMethod: aws.String("GET"),
ResourceId: aws.String(s.RootModule().Resources["aws_api_gateway_resource.test"].Primary.ID),
RestApiId: aws.String(s.RootModule().Resources["aws_api_gateway_rest_api.test"].Primary.ID),
StatusCode: aws.String(rs.Primary.Attributes["status_code"]),
}
_, err := conn.GetMethodResponse(req)
if err == nil {
return fmt.Errorf("API Gateway Method still exists")
}
aws2err, ok := err.(awserr.Error)
if !ok {
return err
}
if aws2err.Code() != "NotFoundException" {
return err
}
return nil
}
return nil
}
const testAccAWSAPIGatewayMethodResponseConfig = `
resource "aws_api_gateway_rest_api" "test" {
name = "test"
}
resource "aws_api_gateway_resource" "test" {
rest_api_id = "${aws_api_gateway_rest_api.test.id}"
parent_id = "${aws_api_gateway_rest_api.test.root_resource_id}"
path_part = "test"
}
resource "aws_api_gateway_method" "test" {
rest_api_id = "${aws_api_gateway_rest_api.test.id}"
resource_id = "${aws_api_gateway_resource.test.id}"
http_method = "GET"
authorization = "NONE"
request_models = {
"application/json" = "Error"
}
}
resource "aws_api_gateway_method_response" "error" {
rest_api_id = "${aws_api_gateway_rest_api.test.id}"
resource_id = "${aws_api_gateway_resource.test.id}"
http_method = "${aws_api_gateway_method.test.http_method}"
status_code = "400"
response_models = {
"application/json" = "Error"
}
response_parameters = {
"method.response.header.Content-Type" = true
}
}
`
const testAccAWSAPIGatewayMethodResponseConfigUpdate = `
resource "aws_api_gateway_rest_api" "test" {
name = "test"
}
resource "aws_api_gateway_resource" "test" {
rest_api_id = "${aws_api_gateway_rest_api.test.id}"
parent_id = "${aws_api_gateway_rest_api.test.root_resource_id}"
path_part = "test"
}
resource "aws_api_gateway_method" "test" {
rest_api_id = "${aws_api_gateway_rest_api.test.id}"
resource_id = "${aws_api_gateway_resource.test.id}"
http_method = "GET"
authorization = "NONE"
request_models = {
"application/json" = "Error"
}
}
resource "aws_api_gateway_method_response" "error" {
rest_api_id = "${aws_api_gateway_rest_api.test.id}"
resource_id = "${aws_api_gateway_resource.test.id}"
http_method = "${aws_api_gateway_method.test.http_method}"
status_code = "400"
response_models = {
"application/json" = "Empty"
}
response_parameters = {
"method.response.header.Host" = true
}
}
`<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![recursion_limit = "1024"] // for error_chain!
extern crate rand;
extern crate scopeguard;
#[macro_use]<|fim▁hole|>extern crate toml;
extern crate download;
extern crate semver;
#[cfg(windows)]
extern crate winapi;
#[cfg(windows)]
extern crate winreg;
#[cfg(windows)]
extern crate shell32;
#[cfg(windows)]
extern crate ole32;
#[cfg(windows)]
extern crate kernel32;
#[cfg(windows)]
extern crate advapi32;
#[cfg(windows)]
extern crate userenv;
#[cfg(unix)]
extern crate libc;
pub mod errors;
pub mod notifications;
pub mod raw;
pub mod tty;
pub mod utils;
pub mod toml_utils;
pub use errors::*;
pub use notifications::{Notification};
pub mod notify;<|fim▁end|> | extern crate error_chain;
extern crate rustc_serialize;
extern crate sha2;
extern crate url; |
<|file_name|>test_hb.py<|end_file_name|><|fim▁begin|>from __future__ import division, print_function, absolute_import
from io import StringIO
import tempfile
import numpy as np
from numpy.testing import assert_equal, \
assert_array_almost_equal_nulp
from scipy.sparse import coo_matrix, csc_matrix, rand
from scipy.io import hb_read, hb_write
SIMPLE = """\
No Title |No Key
9 4 1 4
RUA 100 100 10 0
(26I3) (26I3) (3E23.15)
1 2 2 2 2 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3
3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3
3 3 3 3 3 3 3 4 4 4 6 6 6 6 6 6 6 6 6 6 6 8 9 9 9 9
9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 11
37 71 89 18 30 45 70 19 25 52
2.971243799687726e-01 3.662366682877375e-01 4.786962174699534e-01
6.490068647991184e-01 6.617490424831662e-02 8.870370343191623e-01
4.196478590163001e-01 5.649603072111251e-01 9.934423887087086e-01
6.912334991524289e-01
"""
SIMPLE_MATRIX = coo_matrix(
((0.297124379969, 0.366236668288, 0.47869621747, 0.649006864799,
0.0661749042483, 0.887037034319, 0.419647859016,
0.564960307211, 0.993442388709, 0.691233499152,),<|fim▁hole|>
def assert_csc_almost_equal(r, l):
r = csc_matrix(r)
l = csc_matrix(l)
assert_equal(r.indptr, l.indptr)
assert_equal(r.indices, l.indices)
assert_array_almost_equal_nulp(r.data, l.data, 10000)
class TestHBReader(object):
def test_simple(self):
m = hb_read(StringIO(SIMPLE))
assert_csc_almost_equal(m, SIMPLE_MATRIX)
class TestHBReadWrite(object):
def check_save_load(self, value):
with tempfile.NamedTemporaryFile(mode='w+t') as file:
hb_write(file, value)
file.file.seek(0)
value_loaded = hb_read(file)
assert_csc_almost_equal(value, value_loaded)
def test_simple(self):
random_matrix = rand(10, 100, 0.1)
for matrix_format in ('coo', 'csc', 'csr', 'bsr', 'dia', 'dok', 'lil'):
matrix = random_matrix.asformat(matrix_format, copy=False)
self.check_save_load(matrix)<|fim▁end|> | (np.array([[36, 70, 88, 17, 29, 44, 69, 18, 24, 51],
[0, 4, 58, 61, 61, 72, 72, 73, 99, 99]])))) |
<|file_name|>foot.component.spec.ts<|end_file_name|><|fim▁begin|>import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { FootComponent } from './foot.component';
describe('FootComponent', () => {
let component: FootComponent;
let fixture: ComponentFixture<FootComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ FootComponent ]
})
.compileComponents();
}));
beforeEach(() => {<|fim▁hole|> component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});<|fim▁end|> | fixture = TestBed.createComponent(FootComponent); |
<|file_name|>mechonMamre.py<|end_file_name|><|fim▁begin|>from bs4 import BeautifulSoup as Soup
import json
import re
import requests
from common import *
from nltk.corpus import words
entries = []
WEBSITE = 'http://www.mechon-mamre.org/jewfaq/glossary.htm'
SITE_TITLE = "Mechon Mamre"
source_object = {"site":WEBSITE, "title":SITE_TITLE}
def main():
parseMechonMamre()
def parseMechonMamre():
response = requests.get(WEBSITE)
page = Soup(response.content, "lxml")
stack = [page]
while(len(stack)>0):
node = stack.pop()
for child in node.contents:
if child.name == 'dl':
parseList(child)
elif child.name:
stack.append(child)
print("Done")<|fim▁hole|>def parseList(node):
entry = {"language":"Hebrew", "english":[]}
entryDone = False
foundTerm = False
for line in [line for line in node.contents if line.name or len(line.strip())>0]:
if line.name == "dt":
parseTerm(entry, line.text)
else:
breaklineCount = 0
if entryDone:
if len(entry["english"])>0 and not entry["english"][0].endswith("CSULB") and not entry["english"][0].startswith("email"):
addEntry(entry)
entry = {"language":"Yiddish", "english":[]}
entryDone = False
foundTerm = False
if not foundTerm:
split = line.split(":", 1)
term = split[0]
foundTerm = True
for t in term.split("/"):
entry["english"].append(t.strip().lower())
if len(split) > 1:
entry["definition"] = {"text":split[1].strip(),"source":source_object}
else:
pass
else:
if "definition" in entry:
entry["definition"]["text"] += " "+line.strip()
else:
entry["definition"] = {"text":line.strip(),"source":source_object}
def parseTerm(entry, term):
if(term.startswith("Kohein")):
pass
else:
matches = re.findall("([a-zA-Z-'\d][a-zA-Z- '\d]+)(?: \(([^;\)]*)(;[^;\)]*)*\))?(;|$)",term)
return matches[0][0]
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>DeploymentReadyAction.cpp<|end_file_name|><|fim▁begin|>/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/codedeploy/model/DeploymentReadyAction.h>
#include <aws/core/utils/HashingUtils.h>
#include <aws/core/Globals.h>
#include <aws/core/utils/EnumParseOverflowContainer.h>
using namespace Aws::Utils;
namespace Aws
{
namespace CodeDeploy
{
namespace Model
{
namespace DeploymentReadyActionMapper
{
static const int CONTINUE_DEPLOYMENT_HASH = HashingUtils::HashString("CONTINUE_DEPLOYMENT");
static const int STOP_DEPLOYMENT_HASH = HashingUtils::HashString("STOP_DEPLOYMENT");
DeploymentReadyAction GetDeploymentReadyActionForName(const Aws::String& name)
{
int hashCode = HashingUtils::HashString(name.c_str());
if (hashCode == CONTINUE_DEPLOYMENT_HASH)
{
return DeploymentReadyAction::CONTINUE_DEPLOYMENT;
}
else if (hashCode == STOP_DEPLOYMENT_HASH)
{
return DeploymentReadyAction::STOP_DEPLOYMENT;
}
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
overflowContainer->StoreOverflow(hashCode, name);
return static_cast<DeploymentReadyAction>(hashCode);
}
return DeploymentReadyAction::NOT_SET;
}
Aws::String GetNameForDeploymentReadyAction(DeploymentReadyAction enumValue)
{
switch(enumValue)
{<|fim▁hole|> case DeploymentReadyAction::STOP_DEPLOYMENT:
return "STOP_DEPLOYMENT";
default:
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
return overflowContainer->RetrieveOverflow(static_cast<int>(enumValue));
}
return {};
}
}
} // namespace DeploymentReadyActionMapper
} // namespace Model
} // namespace CodeDeploy
} // namespace Aws<|fim▁end|> | case DeploymentReadyAction::CONTINUE_DEPLOYMENT:
return "CONTINUE_DEPLOYMENT"; |
<|file_name|>KafkaCP.py<|end_file_name|><|fim▁begin|>__author__ = 'asifj'
import logging
from kafka import KafkaConsumer
import json
import traceback
from bson.json_util import dumps
from kafka import SimpleProducer, KafkaClient
from utils import Utils
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
inputs = []
consumer = KafkaConsumer("SAPEvent", bootstrap_servers=['172.22.147.242:9092', '172.22.147.232:9092', '172.22.147.243:9092'], auto_commit_enable=False, auto_offset_reset="smallest")
message_no = 1
<|fim▁hole|> topic = message.topic
partition = message.partition
offset = message.offset
key = message.key
message = message.value
print "================================================================================================================="
if message is not None:
try:
document = json.loads(message)
collection = document.keys()[0]
if collection == "customerMaster":
print "customerMaster"
elif collection == "srAttachements":
#print dumps(document, sort_keys=True)
inputs.append(document)
except Exception, err:
print "CustomException"
print "Kafka Message: "+str(message)
print(traceback.format_exc())
print "================================================================================================================="
print "\n"
message_no += 1
'''
# To send messages synchronously
kafka = KafkaClient('172.22.147.232:9092,172.22.147.242:9092,172.22.147.243:9092')
producer = SimpleProducer(kafka)
for i in inputs:
try:
#producer.send_messages(b'SAPEvent', json.dumps(input))
document = json.loads(str(i.value))
type = document.keys()[0]
if type == "srDetails":
print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
row = []
utils = Utils()
row = utils.validate_sr_details( document['srDetails'], row)
print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
print "\n\n"
except Exception:
print "Kafka: "+str(document)
print Exception.message
print(traceback.format_exc())<|fim▁end|> | inputs = consumer.fetch_messages()
'''for message in consumer:
|
<|file_name|>commands.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <Aclapi.h>
#include <windows.h>
#include <stddef.h>
#include <string>
#include "sandbox/win/tests/validation_tests/commands.h"
#include "sandbox/win/tests/common/controller.h"
namespace {
// Returns the HKEY corresponding to name. If there is no HKEY corresponding
// to the name it returns NULL.
HKEY GetHKEYFromString(const base::string16 &name) {
if (name == L"HKLM")
return HKEY_LOCAL_MACHINE;
if (name == L"HKCR")
return HKEY_CLASSES_ROOT;
if (name == L"HKCC")
return HKEY_CURRENT_CONFIG;
if (name == L"HKCU")
return HKEY_CURRENT_USER;
if (name == L"HKU")
return HKEY_USERS;
return NULL;
}
// Modifies string to remove the leading and trailing quotes.
void trim_quote(base::string16* string) {
base::string16::size_type pos1 = string->find_first_not_of(L'"');
base::string16::size_type pos2 = string->find_last_not_of(L'"');
if (pos1 == base::string16::npos || pos2 == base::string16::npos)
string->clear();
else
(*string) = string->substr(pos1, pos2 + 1);
}
int TestOpenFile(base::string16 path, bool for_write) {
wchar_t path_expanded[MAX_PATH + 1] = {0};
DWORD size = ::ExpandEnvironmentStrings(path.c_str(), path_expanded,
MAX_PATH);
if (!size)
return sandbox::SBOX_TEST_FAILED_TO_EXECUTE_COMMAND;
HANDLE file;
file = ::CreateFile(path_expanded,
for_write ? GENERIC_READ | GENERIC_WRITE : GENERIC_READ,
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
NULL, // No security attributes.
OPEN_EXISTING,
FILE_FLAG_BACKUP_SEMANTICS,
NULL); // No template.
if (file != INVALID_HANDLE_VALUE) {
::CloseHandle(file);
return sandbox::SBOX_TEST_SUCCEEDED;
}
return (::GetLastError() == ERROR_ACCESS_DENIED) ?
sandbox::SBOX_TEST_DENIED : sandbox::SBOX_TEST_FAILED_TO_EXECUTE_COMMAND;
}
} // namespace
namespace sandbox {
SBOX_TESTS_COMMAND int ValidWindow(int argc, wchar_t **argv) {
return (argc == 1) ?
TestValidWindow(
reinterpret_cast<HWND>(static_cast<ULONG_PTR>(_wtoi(argv[0])))) :
SBOX_TEST_FAILED_TO_EXECUTE_COMMAND;
}
int TestValidWindow(HWND window) {
return ::IsWindow(window) ? SBOX_TEST_SUCCEEDED : SBOX_TEST_DENIED;
}
SBOX_TESTS_COMMAND int OpenProcessCmd(int argc, wchar_t **argv) {
return (argc == 2) ?
TestOpenProcess(_wtol(argv[0]), _wtol(argv[1])) :
SBOX_TEST_FAILED_TO_EXECUTE_COMMAND;
}
int TestOpenProcess(DWORD process_id, DWORD access_mask) {
HANDLE process = ::OpenProcess(access_mask,
FALSE, // Do not inherit handle.
process_id);
if (process != NULL) {
::CloseHandle(process);
return SBOX_TEST_SUCCEEDED;
}
return (::GetLastError() == ERROR_ACCESS_DENIED) ?
sandbox::SBOX_TEST_DENIED : sandbox::SBOX_TEST_FAILED_TO_EXECUTE_COMMAND;
}
SBOX_TESTS_COMMAND int OpenThreadCmd(int argc, wchar_t **argv) {
return (argc == 1) ?
TestOpenThread(_wtoi(argv[0])) : SBOX_TEST_FAILED_TO_EXECUTE_COMMAND;
}
int TestOpenThread(DWORD thread_id) {
HANDLE thread = ::OpenThread(THREAD_QUERY_INFORMATION,
FALSE, // Do not inherit handles.
thread_id);
if (thread != NULL) {
::CloseHandle(thread);
return SBOX_TEST_SUCCEEDED;
}
return (::GetLastError() == ERROR_ACCESS_DENIED) ?
sandbox::SBOX_TEST_DENIED : sandbox::SBOX_TEST_FAILED_TO_EXECUTE_COMMAND;
}
SBOX_TESTS_COMMAND int OpenFileCmd(int argc, wchar_t **argv) {
if (1 != argc)
return SBOX_TEST_FAILED_TO_EXECUTE_COMMAND;
base::string16 path = argv[0];
trim_quote(&path);
return TestOpenReadFile(path);
}
int TestOpenReadFile(const base::string16& path) {
return TestOpenFile(path, false);
}
int TestOpenWriteFile(int argc, wchar_t **argv) {
if (argc != 1)
return SBOX_TEST_FAILED_TO_EXECUTE_COMMAND;
base::string16 path = argv[0];
trim_quote(&path);
return TestOpenWriteFile(path);
}
int TestOpenWriteFile(const base::string16& path) {
return TestOpenFile(path, true);
}
SBOX_TESTS_COMMAND int OpenKey(int argc, wchar_t **argv) {
if (argc != 1 && argc != 2)
return SBOX_TEST_FAILED_TO_EXECUTE_COMMAND;
// Get the hive.
HKEY base_key = GetHKEYFromString(argv[0]);
// Get the subkey.
base::string16 subkey;
if (argc == 2) {
subkey = argv[1];
trim_quote(&subkey);
}
return TestOpenKey(base_key, subkey);
}
int TestOpenKey(HKEY base_key, base::string16 subkey) {
HKEY key;
LONG err_code = ::RegOpenKeyEx(base_key,
subkey.c_str(),
0, // Reserved, must be 0.
MAXIMUM_ALLOWED,
&key);
if (err_code == ERROR_SUCCESS) {
::RegCloseKey(key);
return SBOX_TEST_SUCCEEDED;
}
return (err_code == ERROR_INVALID_HANDLE || err_code == ERROR_ACCESS_DENIED) ?
SBOX_TEST_DENIED : SBOX_TEST_FAILED_TO_EXECUTE_COMMAND;
}
// Returns true if the current's thread desktop is the interactive desktop.
// In Vista there is a more direct test but for XP and w2k we need to check
// the object name.
bool IsInteractiveDesktop(bool* is_interactive) {
HDESK current_desk = ::GetThreadDesktop(::GetCurrentThreadId());
if (current_desk == NULL)
return false;
wchar_t current_desk_name[256] = {0};
if (!::GetUserObjectInformationW(current_desk, UOI_NAME, current_desk_name,
sizeof(current_desk_name), NULL))
return false;
*is_interactive = (0 == _wcsicmp(L"default", current_desk_name));
return true;
}
SBOX_TESTS_COMMAND int OpenInteractiveDesktop(int, wchar_t **) {
return TestOpenInputDesktop();
}
int TestOpenInputDesktop() {
bool is_interactive = false;
if (IsInteractiveDesktop(&is_interactive) && is_interactive)
return SBOX_TEST_SUCCEEDED;
HDESK desk = ::OpenInputDesktop(0, FALSE, DESKTOP_CREATEWINDOW);
if (desk) {
::CloseDesktop(desk);
return SBOX_TEST_SUCCEEDED;
}
return SBOX_TEST_DENIED;
}
SBOX_TESTS_COMMAND int SwitchToSboxDesktop(int, wchar_t **) {
return TestSwitchDesktop();
}
int TestSwitchDesktop() {
HDESK desktop = ::GetThreadDesktop(::GetCurrentThreadId());
if (desktop == NULL)
return SBOX_TEST_FAILED;
return ::SwitchDesktop(desktop) ? SBOX_TEST_SUCCEEDED : SBOX_TEST_DENIED;
}
SBOX_TESTS_COMMAND int OpenAlternateDesktop(int, wchar_t **argv) {
return TestOpenAlternateDesktop(argv[0]);
}
int TestOpenAlternateDesktop(wchar_t *desktop_name) {
// Test for WRITE_DAC permission on the handle.
HDESK desktop = ::GetThreadDesktop(::GetCurrentThreadId());
if (desktop) {
HANDLE test_handle;
if (::DuplicateHandle(::GetCurrentProcess(), desktop,
::GetCurrentProcess(), &test_handle,
WRITE_DAC, FALSE, 0)) {
DWORD result = ::SetSecurityInfo(test_handle, SE_WINDOW_OBJECT,
DACL_SECURITY_INFORMATION, NULL, NULL,
NULL, NULL);
::CloseHandle(test_handle);
if (result == ERROR_SUCCESS)
return SBOX_TEST_SUCCEEDED;
} else if (::GetLastError() != ERROR_ACCESS_DENIED) {
return SBOX_TEST_FAILED;
}
}
// Open by name with WRITE_DAC.
desktop = ::OpenDesktop(desktop_name, 0, FALSE, WRITE_DAC);
if (!desktop && ::GetLastError() == ERROR_ACCESS_DENIED)
return SBOX_TEST_DENIED;
::CloseDesktop(desktop);
return SBOX_TEST_SUCCEEDED;<|fim▁hole|>}
BOOL CALLBACK DesktopTestEnumProc(LPTSTR desktop_name, LPARAM result) {
return TRUE;
}
SBOX_TESTS_COMMAND int EnumAlternateWinsta(int, wchar_t **) {
return TestEnumAlternateWinsta();
}
int TestEnumAlternateWinsta() {
// Try to enumerate the destops on the alternate windowstation.
return ::EnumDesktopsW(NULL, DesktopTestEnumProc, 0) ?
SBOX_TEST_SUCCEEDED : SBOX_TEST_DENIED;
}
SBOX_TESTS_COMMAND int SleepCmd(int argc, wchar_t **argv) {
if (argc != 1)
return SBOX_TEST_FAILED_TO_EXECUTE_COMMAND;
::Sleep(_wtoi(argv[0]));
return SBOX_TEST_SUCCEEDED;
}
SBOX_TESTS_COMMAND int AllocateCmd(int argc, wchar_t **argv) {
if (argc != 1)
return SBOX_TEST_FAILED_TO_EXECUTE_COMMAND;
size_t mem_size = static_cast<size_t>(_wtoll(argv[0]));
void* memory = ::VirtualAlloc(NULL, mem_size, MEM_COMMIT | MEM_RESERVE,
PAGE_READWRITE);
if (!memory) {
// We need to give the broker a chance to kill our process on failure.
::Sleep(5000);
return SBOX_TEST_DENIED;
}
return ::VirtualFree(memory, 0, MEM_RELEASE) ?
SBOX_TEST_SUCCEEDED : SBOX_TEST_FAILED;
}
} // namespace sandbox<|fim▁end|> | |
<|file_name|>uStripDesign.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# @Author: Marco Benzi <[email protected]>
# @Date: 2015-06-07 19:44:12
# @Last Modified 2015-06-09
# @Last Modified time: 2015-06-09 16:07:05
# ==========================================================================
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ==========================================================================
import math
"""
Speed of light constant
"""
c = 3E8
"""
Vacuum permittivity
"""
e0 = 8.8541E-12
"""
Vacuum permeability
"""
u0 = 4E-7*math.pi
def getEffectivePermitivity(WHratio, er):
"""
Returns the effective permitivity for a given W/H ratio.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `WHratio` : W/H ratio.
- `er` : Relative permitivity of the dielectric.
"""
if WHratio <= 1:
return (er + 1)/2 + ((1 + 12/WHratio)**(-0.5) + 0.04*(1-WHratio)**2)*(er -1)/2
else:
return (er + 1)/2 + ((1 + 12/WHratio)**(-0.5))*(er -1)/2
def getAuxVarA(Zo,er):
"""
Returns the auxiliary variable
A = (Zo)/60 * math.sqrt((er + 1)/2) + (er-1)/(er+1)*(0.23+0.11/er)
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
return (Zo)/60 * math.sqrt((er + 1)/2) + (er-1)/(er+1)*(0.23+0.11/er)
def getAuxVarB(Zo,er):
"""
Returns the auxiliary variable
B = (377*math.pi)/(2*Zo*math.sqrt(er))
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
return (377*math.pi)/(2*Zo*math.sqrt(er))
def getWHRatioA(Zo,er):
"""
Returns the W/H ratio for W/H < 2. If the result is > 2, then other method
should be used.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
A = getAuxVarA(Zo,er)
return (8*math.e**A)/(math.e**(2*A) - 2)
def getWHRatioB(Zo,er):
"""
Returns the W/H ratio for W/H > 2. If the result is < 2, then other method
should be used.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
B = getAuxVarB(Zo,er)
return (2/math.pi)*(B-1 - math.log(2*B - 1) + (er - 1)*(math.log(B-1) + 0.39 - 0.61/er)/(2*er))
def getCharacteristicImpedance(WHratio, ef):
"""
Returns the characteristic impedance of the medium, based on the effective
permitivity and W/H ratio.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `WHratio` : W/H ratio.
- `ef` : Effective permitivity of the dielectric.
"""
if WHratio <= 1:
return (60/math.sqrt(ef))*math.log(8/WHratio + WHratio/4)
else:
return (120*math.pi/math.sqrt(ef))/(WHratio + 1.393 + 0.667*math.log(WHratio +1.444))
def getWHRatio(Zo,er):
"""
Returns the W/H ratio, after trying with the two possible set of solutions,
for when W/H < 2 or else. When no solution, returns zero.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
efa = er
efb = er
Zoa = Zo
Zob = Zo
while 1:
rA = getWHRatioA(Zoa,efa)
rB = getWHRatioB(Zob,efb)
if rA < 2:
return rA
if rB > 2:
return rB
Zoa = math.sqrt(efa)*Zoa
Zob = math.sqrt(efb)*Zob
def getCorrectedWidth(W,H,t):
"""
For significant conductor thickness, this returns the corrected width.
Paramenters:
- `W` : Width
- `H` : Height
- `t` : Conductor thickness
"""
if t < H and t < W/2:
if W/H <= math.pi/2:
return W + (1 + math.log(2*H/t))*(t/math.pi)
else:
return W + (1 + math.log(4*math.pi*H/t))*(t/math.pi)
else:
print "The conductor is too thick!!"
def getConductorLoss(W,H,t,sigma,f,Zo):
"""
Returns the conductor loss in [Np/m].
Parameters:
- `W` : Width
- `H` : Height
- `t` : Conductor thickness
- `sigma` : Conductance of medium
- `f` : Operating frequency
- `Zo` : Characteristic impedance
"""
We = getCorrectedWidth(W,H,t)
P = 1 - (We/4/H)**2
Rs = math.sqrt((math.pi*f*u0)/sigma)
Q = 1 + H/We + (math.log((2*H)/t)-t/W)*H/(We*math.pi)
if W/H <= 1/(2*math.pi):
return (1 + H/We + (math.log(4*pi*W/t) + t/W)*H/(math.pi*We))*(8.68*Rs*P)/(2*pi*Zo*H)
elif W/H <= 2:
return (8.68*Rs*P*Q)/(2*math.pi*Zo*H)
else:
return ((8.68*Rs*Q)/(Zo*H))*(We/H + (We/math.pi/H)/(We/2/H)+0.94)*((H/We + 2*math.log(We/2/H + 0.94)/math.pi)**(-2))
def getDielectricLoss(er,ef,tanD,f):
"""<|fim▁hole|> Returns the dielectric loss in [dB/cm].
Paramenters:
- `er` : Relative permitivity of the dielectric
- `ef` : Effective permitivity
- `tanD` : tan \delta
- `f` : Operating frequency
"""
lam = c/math.sqrt(ef)/f
return 27.3*(er*(ef-1)*tanD)/(lam*math.sqrt(er)*(er-1))<|fim▁end|> | |
<|file_name|>run.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
|jedi| is mostly being tested by what I would call "Blackbox Tests". These
tests are just testing the interface and do input/output testing. This makes a
lot of sense for |jedi|. Jedi supports so many different code structures, that
it is just stupid to write 200'000 unittests in the manner of
``regression.py``. Also, it is impossible to do doctests/unittests on most of
the internal data structures. That's why |jedi| uses mostly these kind of
tests.
There are different kind of tests:
- completions / goto_definitions ``#?``
- goto_assignments: ``#!``
- usages: ``#<``
How to run tests?
+++++++++++++++++
Jedi uses pytest_ to run unit and integration tests. To run tests,
simply run ``py.test``. You can also use tox_ to run tests for
multiple Python versions.
.. _pytest: http://pytest.org
.. _tox: http://testrun.org/tox
Integration test cases are located in ``test/completion`` directory
and each test cases are indicated by the comment ``#?`` (completions /
definitions), ``#!`` (assignments) and ``#<`` (usages). There is also
support for third party libraries. In a normal test run they are not
being executed, you have to provide a ``--thirdparty`` option.
In addition to standard `-k` and `-m` options in py.test, you can use
`-T` (`--test-files`) option to specify integration test cases to run.
It takes the format of ``FILE_NAME[:LINE[,LINE[,...]]]`` where
``FILE_NAME`` is a file in ``test/completion`` and ``LINE`` is a line
number of the test comment. Here is some recipes:
Run tests only in ``basic.py`` and ``imports.py``::
py.test test/test_integration.py -T basic.py -T imports.py
Run test at line 4, 6, and 8 in ``basic.py``::
py.test test/test_integration.py -T basic.py:4,6,8
See ``py.test --help`` for more information.
If you want to debug a test, just use the ``--pdb`` option.
Alternate Test Runner
+++++++++++++++++++++
If you don't like the output of ``py.test``, there's an alternate test runner
that you can start by running ``./run.py``. The above example could be run by::
./run.py basic 4 6 8 50-80
The advantage of this runner is simplicity and more customized error reports.
Using both runners will help you to have a quicker overview of what's
happening.
Auto-Completion
+++++++++++++++
Uses comments to specify a test in the next line. The comment says, which
results are expected. The comment always begins with `#?`. The last row
symbolizes the cursor.
For example::
#? ['real']
a = 3; a.rea
Because it follows ``a.rea`` and a is an ``int``, which has a ``real``
property.
Goto Definitions
++++++++++++++++
Definition tests use the same symbols like completion tests. This is
possible because the completion tests are defined with a list::
#? int()
ab = 3; ab
Goto Assignments
++++++++++++++++
Tests look like this::
abc = 1
#! ['abc=1']
abc
Additionally it is possible to add a number which describes to position of
the test (otherwise it's just end of line)::
#! 2 ['abc=1']
abc
Usages
++++++
Tests look like this::
abc = 1
#< abc@1,0 abc@3,0
abc
"""
import os
import re
import sys
import operator
from ast import literal_eval
from io import StringIO
from functools import reduce
import jedi
from jedi._compatibility import unicode, is_py3
from jedi.parser import Parser, load_grammar
from jedi.api.classes import Definition<|fim▁hole|>TEST_ASSIGNMENTS = 2
TEST_USAGES = 3
class IntegrationTestCase(object):
def __init__(self, test_type, correct, line_nr, column, start, line,
path=None, skip=None):
self.test_type = test_type
self.correct = correct
self.line_nr = line_nr
self.column = column
self.start = start
self.line = line
self.path = path
self.skip = skip
@property
def module_name(self):
return os.path.splitext(os.path.basename(self.path))[0]
@property
def line_nr_test(self):
"""The test is always defined on the line before."""
return self.line_nr - 1
def __repr__(self):
return '<%s: %s:%s:%s>' % (self.__class__.__name__, self.module_name,
self.line_nr_test, self.line.rstrip())
def script(self):
return jedi.Script(self.source, self.line_nr, self.column, self.path)
def run(self, compare_cb):
testers = {
TEST_COMPLETIONS: self.run_completion,
TEST_DEFINITIONS: self.run_goto_definitions,
TEST_ASSIGNMENTS: self.run_goto_assignments,
TEST_USAGES: self.run_usages,
}
return testers[self.test_type](compare_cb)
def run_completion(self, compare_cb):
completions = self.script().completions()
#import cProfile; cProfile.run('script.completions()')
comp_str = set([c.name for c in completions])
return compare_cb(self, comp_str, set(literal_eval(self.correct)))
def run_goto_definitions(self, compare_cb):
script = self.script()
evaluator = script._evaluator
def comparison(definition):
suffix = '()' if definition.type == 'instance' else ''
return definition.desc_with_module + suffix
def definition(correct, correct_start, path):
should_be = set()
for match in re.finditer('(?:[^ ]+)', correct):
string = match.group(0)
parser = Parser(load_grammar(), string, start_symbol='eval_input')
parser.position_modifier.line = self.line_nr
element = parser.get_parsed_node()
element.parent = jedi.api.completion.get_user_scope(
script._get_module(),
(self.line_nr, self.column)
)
results = evaluator.eval_element(element)
if not results:
raise Exception('Could not resolve %s on line %s'
% (match.string, self.line_nr - 1))
should_be |= set(Definition(evaluator, r) for r in results)
# Because the objects have different ids, `repr`, then compare.
should = set(comparison(r) for r in should_be)
return should
should = definition(self.correct, self.start, script.path)
result = script.goto_definitions()
is_str = set(comparison(r) for r in result)
return compare_cb(self, is_str, should)
def run_goto_assignments(self, compare_cb):
result = self.script().goto_assignments()
comp_str = str(sorted(str(r.description) for r in result))
return compare_cb(self, comp_str, self.correct)
def run_usages(self, compare_cb):
result = self.script().usages()
self.correct = self.correct.strip()
compare = sorted((r.module_name, r.line, r.column) for r in result)
wanted = []
if not self.correct:
positions = []
else:
positions = literal_eval(self.correct)
for pos_tup in positions:
if type(pos_tup[0]) == str:
# this means that there is a module specified
wanted.append(pos_tup)
else:
line = pos_tup[0]
if pos_tup[0] is not None:
line += self.line_nr
wanted.append((self.module_name, line, pos_tup[1]))
return compare_cb(self, compare, sorted(wanted))
def skip_python_version(line):
comp_map = {
'==': 'eq',
'<=': 'le',
'>=': 'ge',
'<': 'gk',
'>': 'lt',
}
# check for python minimal version number
match = re.match(r" *# *python *([<>]=?|==) *(\d+(?:\.\d+)?)$", line)
if match:
minimal_python_version = tuple(
map(int, match.group(2).split(".")))
operation = getattr(operator, comp_map[match.group(1)])
if not operation(sys.version_info, minimal_python_version):
return "Minimal python version %s %s" % (match.group(1), match.group(2))
return None
def collect_file_tests(path, lines, lines_to_execute):
def makecase(t):
return IntegrationTestCase(t, correct, line_nr, column,
start, line, path=path, skip=skip)
start = None
correct = None
test_type = None
skip = None
for line_nr, line in enumerate(lines, 1):
if correct is not None:
r = re.match('^(\d+)\s*(.*)$', correct)
if r:
column = int(r.group(1))
correct = r.group(2)
start += r.regs[2][0] # second group, start index
else:
column = len(line) - 1 # -1 for the \n
if test_type == '!':
yield makecase(TEST_ASSIGNMENTS)
elif test_type == '<':
yield makecase(TEST_USAGES)
elif correct.startswith('['):
yield makecase(TEST_COMPLETIONS)
else:
yield makecase(TEST_DEFINITIONS)
correct = None
else:
skip = skip or skip_python_version(line)
try:
r = re.search(r'(?:^|(?<=\s))#([?!<])\s*([^\n]*)', line)
# test_type is ? for completion and ! for goto_assignments
test_type = r.group(1)
correct = r.group(2)
# Quick hack to make everything work (not quite a bloody unicorn hack though).
if correct == '':
correct = ' '
start = r.start()
except AttributeError:
correct = None
else:
# Skip the test, if this is not specified test.
for l in lines_to_execute:
if isinstance(l, tuple) and l[0] <= line_nr <= l[1] \
or line_nr == l:
break
else:
if lines_to_execute:
correct = None
def collect_dir_tests(base_dir, test_files, check_thirdparty=False):
for f_name in os.listdir(base_dir):
files_to_execute = [a for a in test_files.items() if f_name.startswith(a[0])]
lines_to_execute = reduce(lambda x, y: x + y[1], files_to_execute, [])
if f_name.endswith(".py") and (not test_files or files_to_execute):
skip = None
if check_thirdparty:
lib = f_name.replace('_.py', '')
try:
# there is always an underline at the end.
# It looks like: completion/thirdparty/pylab_.py
__import__(lib)
except ImportError:
skip = 'Thirdparty-Library %s not found.' % lib
path = os.path.join(base_dir, f_name)
if is_py3:
source = open(path, encoding='utf-8').read()
else:
source = unicode(open(path).read(), 'UTF-8')
for case in collect_file_tests(path, StringIO(source),
lines_to_execute):
case.source = source
if skip:
case.skip = skip
yield case
docoptstr = """
Using run.py to make debugging easier with integration tests.
An alternative testing format, which is much more hacky, but very nice to
work with.
Usage:
run.py [--pdb] [--debug] [--thirdparty] [<rest>...]
run.py --help
Options:
-h --help Show this screen.
--pdb Enable pdb debugging on fail.
-d, --debug Enable text output debugging (please install ``colorama``).
--thirdparty Also run thirdparty tests (in ``completion/thirdparty``).
"""
if __name__ == '__main__':
import docopt
arguments = docopt.docopt(docoptstr)
import time
t_start = time.time()
# Sorry I didn't use argparse here. It's because argparse is not in the
# stdlib in 2.5.
import sys
if arguments['--debug']:
jedi.set_debug_function()
# get test list, that should be executed
test_files = {}
last = None
for arg in arguments['<rest>']:
match = re.match('(\d+)-(\d+)', arg)
if match:
start, end = match.groups()
test_files[last].append((int(start), int(end)))
elif arg.isdigit():
if last is None:
continue
test_files[last].append(int(arg))
else:
test_files[arg] = []
last = arg
# completion tests:
dir_ = os.path.dirname(os.path.realpath(__file__))
completion_test_dir = os.path.join(dir_, '../test/completion')
completion_test_dir = os.path.abspath(completion_test_dir)
summary = []
tests_fail = 0
# execute tests
cases = list(collect_dir_tests(completion_test_dir, test_files))
if test_files or arguments['--thirdparty']:
completion_test_dir += '/thirdparty'
cases += collect_dir_tests(completion_test_dir, test_files, True)
def file_change(current, tests, fails):
if current is not None:
current = os.path.basename(current)
print('%s \t\t %s tests and %s fails.' % (current, tests, fails))
def report(case, actual, desired):
if actual == desired:
return 0
else:
print("\ttest fail @%d, actual = %s, desired = %s"
% (case.line_nr - 1, actual, desired))
return 1
import traceback
current = cases[0].path if cases else None
count = fails = 0
for c in cases:
if c.skip:
continue
if current != c.path:
file_change(current, count, fails)
current = c.path
count = fails = 0
try:
if c.run(report):
tests_fail += 1
fails += 1
except Exception:
traceback.print_exc()
print("\ttest fail @%d" % (c.line_nr - 1))
tests_fail += 1
fails += 1
if arguments['--pdb']:
import pdb
pdb.post_mortem()
count += 1
file_change(current, count, fails)
print('\nSummary: (%s fails of %s tests) in %.3fs'
% (tests_fail, len(cases), time.time() - t_start))
for s in summary:
print(s)
exit_code = 1 if tests_fail else 0
sys.exit(exit_code)<|fim▁end|> |
TEST_COMPLETIONS = 0
TEST_DEFINITIONS = 1 |
<|file_name|>a5.rs<|end_file_name|><|fim▁begin|>fn main() { // Ciclos loop es un iterador infinito al menos que se ponga "break;"
// quita los comentarios de las lineas para ver la sentencia break temrinar el loop al llegar X a 5.
// let mut x: i32 = 0;
loop {
println!("Precione Ctrl + C para terminar");
// if x == 5{<|fim▁hole|>// break;
// }else{
// x += 1;
// };
// println!("x es {}",x);
};
}<|fim▁end|> | |
<|file_name|>signrawtransactions.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test transaction signing using the signrawtransaction RPC."""
from test_framework.test_framework import IonTestFramework
from test_framework.util import *
class SignRawTransactionsTest(IonTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
def successful_signing_test(self):<|fim▁hole|>
Expected results:
1) The transaction has a complete set of signatures
2) No script verification error occurred"""
privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N', 'cVKpPfVKSJxKqVpE9awvXNWuLHCa5j5tiE7K6zbUSptFpTEtiFrA']
inputs = [
# Valid pay-to-pubkey scripts
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'},
{'txid': '83a4f6a6b73660e13ee6cb3c6063fa3759c50c9b7521d0536022961898f4fb02', 'vout': 0,
'scriptPubKey': '76a914669b857c03a5ed269d5d85a1ffac9ed5d663072788ac'},
]
outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
rawTxSigned = self.nodes[0].signrawtransaction(rawTx, inputs, privKeys)
# 1) The transaction has a complete set of signatures
assert 'complete' in rawTxSigned
assert_equal(rawTxSigned['complete'], True)
# 2) No script verification error occurred
assert 'errors' not in rawTxSigned
# Check that signrawtransaction doesn't blow up on garbage merge attempts
dummyTxInconsistent = self.nodes[0].createrawtransaction([inputs[0]], outputs)
rawTxUnsigned = self.nodes[0].signrawtransaction(rawTx + dummyTxInconsistent, inputs)
assert 'complete' in rawTxUnsigned
assert_equal(rawTxUnsigned['complete'], False)
# Check that signrawtransaction properly merges unsigned and signed txn, even with garbage in the middle
rawTxSigned2 = self.nodes[0].signrawtransaction(rawTxUnsigned["hex"] + dummyTxInconsistent + rawTxSigned["hex"], inputs)
assert 'complete' in rawTxSigned2
assert_equal(rawTxSigned2['complete'], True)
assert 'errors' not in rawTxSigned2
def script_verification_error_test(self):
"""Create and sign a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script.
Expected results:
3) The transaction has no complete set of signatures
4) Two script verification errors occurred
5) Script verification errors have certain properties ("txid", "vout", "scriptSig", "sequence", "error")
6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)"""
privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N']
inputs = [
# Valid pay-to-pubkey script
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0},
# Invalid script
{'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7},
# Missing scriptPubKey
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 1},
]
scripts = [
# Valid pay-to-pubkey script
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'},
# Invalid script
{'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7,
'scriptPubKey': 'badbadbadbad'}
]
outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
# Make sure decoderawtransaction is at least marginally sane
decodedRawTx = self.nodes[0].decoderawtransaction(rawTx)
for i, inp in enumerate(inputs):
assert_equal(decodedRawTx["vin"][i]["txid"], inp["txid"])
assert_equal(decodedRawTx["vin"][i]["vout"], inp["vout"])
# Make sure decoderawtransaction throws if there is extra data
assert_raises(JSONRPCException, self.nodes[0].decoderawtransaction, rawTx + "00")
rawTxSigned = self.nodes[0].signrawtransaction(rawTx, scripts, privKeys)
# 3) The transaction has no complete set of signatures
assert 'complete' in rawTxSigned
assert_equal(rawTxSigned['complete'], False)
# 4) Two script verification errors occurred
assert 'errors' in rawTxSigned
assert_equal(len(rawTxSigned['errors']), 2)
# 5) Script verification errors have certain properties
assert 'txid' in rawTxSigned['errors'][0]
assert 'vout' in rawTxSigned['errors'][0]
assert 'witness' in rawTxSigned['errors'][0]
assert 'scriptSig' in rawTxSigned['errors'][0]
assert 'sequence' in rawTxSigned['errors'][0]
assert 'error' in rawTxSigned['errors'][0]
# 6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)
assert_equal(rawTxSigned['errors'][0]['txid'], inputs[1]['txid'])
assert_equal(rawTxSigned['errors'][0]['vout'], inputs[1]['vout'])
assert_equal(rawTxSigned['errors'][1]['txid'], inputs[2]['txid'])
assert_equal(rawTxSigned['errors'][1]['vout'], inputs[2]['vout'])
assert not rawTxSigned['errors'][0]['witness']
# Now test signing failure for transaction with input witnesses
p2wpkh_raw_tx = "01000000000102fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f00000000494830450221008b9d1dc26ba6a9cb62127b02742fa9d754cd3bebf337f7a55d114c8e5cdd30be022040529b194ba3f9281a99f2b1c0a19c0489bc22ede944ccf4ecbab4cc618ef3ed01eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac000247304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee0121025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee635711000000"
rawTxSigned = self.nodes[0].signrawtransaction(p2wpkh_raw_tx)
# 7) The transaction has no complete set of signatures
assert 'complete' in rawTxSigned
assert_equal(rawTxSigned['complete'], False)
# 8) Two script verification errors occurred
assert 'errors' in rawTxSigned
assert_equal(len(rawTxSigned['errors']), 2)
# 9) Script verification errors have certain properties
assert 'txid' in rawTxSigned['errors'][0]
assert 'vout' in rawTxSigned['errors'][0]
assert 'witness' in rawTxSigned['errors'][0]
assert 'scriptSig' in rawTxSigned['errors'][0]
assert 'sequence' in rawTxSigned['errors'][0]
assert 'error' in rawTxSigned['errors'][0]
# Non-empty witness checked here
assert_equal(rawTxSigned['errors'][1]['witness'], ["304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee01", "025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357"])
assert not rawTxSigned['errors'][0]['witness']
def run_test(self):
self.successful_signing_test()
self.script_verification_error_test()
if __name__ == '__main__':
SignRawTransactionsTest().main()<|fim▁end|> | """Create and sign a valid raw transaction with one input. |
<|file_name|>acl.py<|end_file_name|><|fim▁begin|>import struct
import uuid
from enum import IntEnum
from typing import List, Optional, Set
from .sid import SID
class ACEFlag(IntEnum):
""" ACE type-specific control flags. """
OBJECT_INHERIT = 0x01
CONTAINER_INHERIT = 0x02
NO_PROPAGATE_INHERIT = 0x04
INHERIT_ONLY = 0x08
INHERITED = 0x10
SUCCESSFUL_ACCESS = 0x40
FAILED_ACCESS = 0x80
@property
def short_name(self) -> str:
""" The SDDL short name of the flag. """
short_names = {
"OBJECT_INHERIT": "OI",
"CONTAINER_INHERIT": "CI",
"NO_PROPAGATE_INHERIT": "NP",
"INHERIT_ONLY": "IO",
"INHERITED": "ID",
"SUCCESSFUL_ACCESS": "SA",
"FAILED_ACCESS": "FA",
}
return short_names[self.name]
class ACEType(IntEnum):
""" Type of the ACE. """
ACCESS_ALLOWED = 0
ACCESS_DENIED = 1
SYSTEM_AUDIT = 2
SYSTEM_ALARM = 3
ACCESS_ALLOWED_COMPOUND = 4
ACCESS_ALLOWED_OBJECT = 5
ACCESS_DENIED_OBJECT = 6
SYSTEM_AUDIT_OBJECT = 7
SYSTEM_ALARM_OBJECT = 8
ACCESS_ALLOWED_CALLBACK = 9
ACCESS_DENIED_CALLBACK = 10
ACCESS_ALLOWED_CALLBACK_OBJECT = 11
ACCESS_DENIED_CALLBACK_OBJECT = 12
SYSTEM_AUDIT_CALLBACK = 13
SYSTEM_ALARM_CALLBACK = 14
SYSTEM_AUDIT_CALLBACK_OBJECT = 15
SYSTEM_ALARM_CALLBACK_OBJECT = 16
SYSTEM_MANDATORY_LABEL = 17
SYSTEM_RESOURCE_ATTRIBUTE = 18
SYSTEM_SCOPED_POLICY_ID = 19
@property
def short_name(self) -> str:
""" The SDDL short name of the type. """
short_names = {
"ACCESS_ALLOWED": "A",
"ACCESS_DENIED": "D",
"SYSTEM_AUDIT": "AU",
"SYSTEM_ALARM": "AL",
"ACCESS_ALLOWED_COMPOUND": "",
"ACCESS_ALLOWED_OBJECT": "OA",
"ACCESS_DENIED_OBJECT": "OD",
"SYSTEM_AUDIT_OBJECT": "OU",
"SYSTEM_ALARM_OBJECT": "OL",
"ACCESS_ALLOWED_CALLBACK": "XA",
"ACCESS_DENIED_CALLBACK": "XD",
"ACCESS_ALLOWED_CALLBACK_OBJECT": "ZA",
"ACCESS_DENIED_CALLBACK_OBJECT": "ZD",
"SYSTEM_AUDIT_CALLBACK": "XU",
"SYSTEM_ALARM_CALLBACK": "XL",
"SYSTEM_AUDIT_CALLBACK_OBJECT": "ZU",
"SYSTEM_ALARM_CALLBACK_OBJECT": "ZL",
"SYSTEM_MANDATORY_LABEL": "ML",
"SYSTEM_RESOURCE_ATTRIBUTE": "RA",
"SYSTEM_SCOPED_POLICY_ID": "SP",
}
return short_names[self.name]
@property
def is_object_type(self) -> bool:
""" Flag for ACE types with objects. """
return self in (<|fim▁hole|> ACEType.ACCESS_ALLOWED_OBJECT,
ACEType.ACCESS_DENIED_OBJECT,
ACEType.SYSTEM_AUDIT_OBJECT,
ACEType.SYSTEM_ALARM_OBJECT,
ACEType.ACCESS_ALLOWED_CALLBACK_OBJECT,
ACEType.ACCESS_DENIED_CALLBACK_OBJECT,
ACEType.SYSTEM_AUDIT_CALLBACK_OBJECT,
ACEType.SYSTEM_ALARM_CALLBACK_OBJECT,
)
class ACERight(IntEnum):
""" The rights of the ACE. """
GENERIC_READ = 0x80000000
GENERIC_WRITE = 0x4000000
GENERIC_EXECUTE = 0x20000000
GENERIC_ALL = 0x10000000
MAXIMUM_ALLOWED = 0x02000000
ACCESS_SYSTEM_SECURITY = 0x01000000
SYNCHRONIZE = 0x00100000
WRITE_OWNER = 0x00080000
WRITE_DACL = 0x00040000
READ_CONTROL = 0x00020000
DELETE = 0x00010000
DS_CONTROL_ACCESS = 0x00000100
DS_CREATE_CHILD = 0x00000001
DS_DELETE_CHILD = 0x00000002
ACTRL_DS_LIST = 0x00000004
DS_SELF = 0x00000008
DS_READ_PROP = 0x00000010
DS_WRITE_PROP = 0x00000020
DS_DELETE_TREE = 0x00000040
DS_LIST_OBJECT = 0x00000080
@property
def short_name(self) -> str:
""" The SDDL short name of the access right. """
short_names = {
"GENERIC_READ": "GR",
"GENERIC_WRITE": "GW",
"GENERIC_EXECUTE": "GX",
"GENERIC_ALL": "GA",
"MAXIMUM_ALLOWED": "MA",
"ACCESS_SYSTEM_SECURITY": "AS",
"SYNCHRONIZE": "SY",
"WRITE_OWNER": "WO",
"WRITE_DACL": "WD",
"READ_CONTROL": "RC",
"DELETE": "SD",
"DS_CONTROL_ACCESS": "CR",
"DS_CREATE_CHILD": "CC",
"DS_DELETE_CHILD": "DC",
"ACTRL_DS_LIST": "LC",
"DS_SELF": "SW",
"DS_READ_PROP": "RP",
"DS_WRITE_PROP": "WP",
"DS_DELETE_TREE": "DT",
"DS_LIST_OBJECT": "LO",
}
return short_names[self.name]
class ACLRevision(IntEnum):
""" The ACL revision. """
ACL_REVISION = 0x02
ACL_REVISION_DS = 0x04
class ACE:
"""
A class for the access control entry, that encodes the user rights
afforded to a principal.
:param ACEType ace_type: the type of the ACE.
:param Set[ACEFlag] flags: the set of flags for the ACE.
:param int mask: the access mask to encode the user rights as an int.
:param SID trustee_sid: the SID of the trustee.
:param uuid.UUID|None object_type: a UUID that identifies a property
set, property, extended right, or type of child object.
:param uuid.UUID|None inherited_object_type: a UUID that identifies the
type of child object that can inherit the ACE.
:param bytes application_data: optional application data.
"""
def __init__(
self,
ace_type: ACEType,
flags: Set[ACEFlag],
mask: int,
trustee_sid: SID,
object_type: Optional[uuid.UUID],
inherited_object_type: Optional[uuid.UUID],
application_data: bytes,
) -> None:
self.__type = ace_type
self.__flags = flags
self.__mask = mask
self.__object_type = object_type
self.__inherited_object_type = inherited_object_type
self.__trustee_sid = trustee_sid
self.__application_data = application_data
@classmethod
def from_binary(cls, data: bytes) -> "ACE":
"""
Create an ACE object from a binary blob.
:param bytes data: a little-endian byte ordered byte input.
:returns: A new ACE instance.
:rtype: ACE
:raises TypeError: when the parameter is not bytes.
:raises ValueError: when the input cannot be parsed as an ACE
object.
"""
try:
if not isinstance(data, bytes):
raise TypeError("The `data` parameter must be bytes")
object_type = None
inherited_object_type = None
application_data = None
ace_type, flags, size, mask = struct.unpack("<BBHL", data[:8])
pos = 8
if ACEType(ace_type).is_object_type:
obj_flag = struct.unpack("<I", data[8:12])[0]
pos += 4
if obj_flag & 0x00000001:
object_type = uuid.UUID(bytes_le=data[pos : pos + 16])
pos += 16
if obj_flag & 0x00000002:
inherited_object_type = uuid.UUID(bytes_le=data[pos : pos + 16])
pos += 16
trustee_sid = SID(bytes_le=data[pos:])
pos += trustee_sid.size
application_data = data[pos:size]
this = cls(
ACEType(ace_type),
{flg for flg in ACEFlag if flags & flg},
mask,
trustee_sid,
object_type,
inherited_object_type,
application_data,
)
return this
except struct.error as err:
raise ValueError(f"Not a valid binary ACE, {err}")
def __str__(self):
""" Return the SDDL string representation of the ACE object. """
flags = "".join(
flg.short_name for flg in sorted(self.flags, key=lambda f: f.value)
)
rights = "".join(
rgt.short_name for rgt in sorted(self.rights, key=lambda r: r.value)
)
object_guid = self.object_type if self.object_type else ""
inherit_object_guid = (
self.inherited_object_type if self.inherited_object_type else ""
)
sid = (
self.trustee_sid.sddl_alias
if self.trustee_sid.sddl_alias
else str(self.trustee_sid)
)
return f"({self.type.short_name};{flags};{rights};{object_guid};{inherit_object_guid};{sid})"
def to_binary(self) -> bytes:
"""
Convert ACE object to binary form with little-endian byte order.
:returns: Bytes of the binary ACE instance
:rtype: bytes
"""
size = self.size
data = bytearray(size)
struct.pack_into(
"<BBHL", data, 0, self.type.value, sum(self.flags), size, self.mask
)
pos = 8
if self.type.is_object_type:
obj_flag = 0x00000001 if self.object_type else 0
obj_flag |= 0x00000002 if self.inherited_object_type else 0
struct.pack_into("<L", data, pos, obj_flag)
pos += 4
if self.object_type:
data[pos : pos + 16] = self.object_type.bytes_le
pos += 16
if self.inherited_object_type:
data[pos : pos + 16] = self.inherited_object_type.bytes_le
pos += 16
data[pos : pos + self.trustee_sid.size] = self.trustee_sid.bytes_le
pos += self.trustee_sid.size
data[pos : pos + size] = self.application_data
return bytes(data)
@property
def type(self) -> ACEType:
""" The type of the ACE. """
return self.__type
@property
def flags(self) -> Set[ACEFlag]:
""" The flags of the ACE. """
return self.__flags
@property
def size(self) -> int:
""" The binary size of ACE in bytes. """
size = 8
if self.type.is_object_type:
size += 4
if self.object_type:
size += 16
if self.inherited_object_type:
size += 16
size += self.trustee_sid.size
size += len(self.application_data)
return size
@property
def mask(self) -> int:
""" The acces mask """
return self.__mask
@property
def rights(self) -> Set[ACERight]:
""" The set of ACERights based on the access mask."""
return {rgt for rgt in ACERight if self.mask & rgt}
@property
def object_type(self) -> Optional[uuid.UUID]:
""" The uuid of the object type. """
return self.__object_type
@property
def inherited_object_type(self) -> Optional[uuid.UUID]:
""" The uuid of the inherited object type. """
return self.__inherited_object_type
@property
def trustee_sid(self) -> SID:
""" The sid of the trustee. """
return self.__trustee_sid
@property
def application_data(self) -> bytes:
""" The possible application data. """
return self.__application_data
class ACL:
"""
The access control list (ACL) is used to specify a list of individual
access control entries (ACEs). An ACL and an array of ACEs comprise a
complete access control list.
:param ACLRevision revision: the revision of the ACL.
:param List[ACE] aces: list of :class:`ACE`.
"""
def __init__(self, revision: ACLRevision, aces: List[ACE]) -> None:
self.__revision = revision
self.__aces = aces
@classmethod
def from_binary(cls, data: bytes) -> "ACL":
"""
Create an ACL object from a binary blob.
:param bytes data: a little-endian byte ordered byte input.
:returns: A new ACL instance.
:rtype: ACL
:raises TypeError: when the parameter is not bytes.
:raises ValueError: when the input cannot be parsed as an ACL
object.
"""
try:
if not isinstance(data, bytes):
raise TypeError("The `data` parameter must be bytes")
# Unwanted values are the reserved sbz1, size and sbz2.
rev, _, _, count, _ = struct.unpack("<BBHHH", data[:8])
pos = 8
aces = []
for _ in range(count):
ace = ACE.from_binary(data[pos:])
aces.append(ace)
pos += ace.size
this = cls(ACLRevision(rev), aces)
return this
except struct.error as err:
raise ValueError(f"Not a valid binary ACL, {err}")
def to_binary(self) -> bytes:
"""
Convert ACL object to binary form with little-endian byte order.
:returns: Bytes of the binary ACL instance
:rtype: bytes
"""
size = self.size
data = bytearray(8)
struct.pack_into("<BBHHH", data, 0, self.revision, 0, size, len(self.aces), 0)
pos = 8
for ace in self.aces:
data.extend(ace.to_binary())
return bytes(data)
@property
def revision(self) -> ACLRevision:
""" The revision of ACL. """
return self.__revision
@property
def size(self) -> int:
""" The binary size in bytes. """
return 8 + sum(ace.size for ace in self.aces)
@property
def aces(self) -> List[ACE]:
""" The list of :class:`ACE` objects. """
return self.__aces<|fim▁end|> | |
<|file_name|>client.js<|end_file_name|><|fim▁begin|>"use strict";
var React = require("react");
var MainView = require("../views/MainView");
var MainElement = document.getElementById("main");
var socket = require("socket.io-client")();
var Store = require("../views/Store");
var omit = require("lodash/object/omit");
socket.on("initialize", function (data) {
Store.league = data.league;
Store.adp = data.adp;
var i = 0,
players = data.players,
total = players.length;
for (; i < total; ++i) {
Store.players[players[i].id] = omit(players[i], ["_id", "id"]);
}
var franchises = data.league.franchises.franchise;
total = franchises.length;
i = 0;
for (; i < total; ++i) {
Store.franchises[franchises[i].id] = omit(franchises[i], ["_id", "id"]);
}
var handleClick = function handleClick(event, callback) {
console.log("deeper click");
var typeStr = event.target.getAttribute("data-target");
socket.emit("client-pull", typeStr, callback);
};
React.render(React.createElement(MainView, { onClick: handleClick }), MainElement);
});
socket.on("data-change", function (data) {
console.log("data-change:", data);<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>test_purchase_request.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2016 Eficent Business and IT Consulting Services S.L.
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl-3.0).
from openerp.tests import common
from openerp.tools import SUPERUSER_ID<|fim▁hole|> def setUp(self):
super(TestPurchaseRequest, self).setUp()
self.purchase_request = self.env['purchase.request']
self.purchase_request_line = self.env['purchase.request.line']
def test_purchase_request_status(self):
vals = {
'picking_type_id': self.env.ref('stock.picking_type_in').id,
'requested_by': SUPERUSER_ID,
}
purchase_request = self.purchase_request.create(vals)
vals = {
'request_id': purchase_request.id,
'product_id': self.env.ref('product.product_product_13').id,
'product_uom_id': self.env.ref('product.product_uom_unit').id,
'product_qty': 5.0,
}
self.purchase_request_line.create(vals)
self.assertEqual(
purchase_request.is_editable, True,
'Should be editable')
purchase_request.button_to_approve()
self.assertEqual(
purchase_request.state, 'to_approve',
'Should be in state to_approve')
self.assertEqual(
purchase_request.is_editable, False,
'Should not be editable')
purchase_request.button_draft()
self.assertEqual(
purchase_request.is_editable, True,
'Should be editable')
self.assertEqual(
purchase_request.state, 'draft',
'Should be in state draft')
self.purchase_request_line.unlink()<|fim▁end|> |
class TestPurchaseRequest(common.TransactionCase):
|
<|file_name|>DefaultSeverityFacet.tsx<|end_file_name|><|fim▁begin|>/*
* SonarQube
* Copyright (C) 2009-2022 SonarSource SA
* mailto:info AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
import * as React from 'react';
import SeverityHelper from '../../../components/shared/SeverityHelper';
import { SEVERITIES } from '../../../helpers/constants';
import { translate } from '../../../helpers/l10n';
import Facet, { BasicProps } from './Facet';
export default class DefaultSeverityFacet extends React.PureComponent<BasicProps> {
renderName = (severity: string) => <SeverityHelper severity={severity} />;
renderTextName = (severity: string) => translate('severity', severity);
render() {<|fim▁hole|> return (
<Facet
{...this.props}
halfWidth={true}
options={SEVERITIES}
property="severities"
renderName={this.renderName}
renderTextName={this.renderTextName}
/>
);
}
}<|fim▁end|> | |
<|file_name|>prework--numpy-ultraquick-tutorial.py<|end_file_name|><|fim▁begin|>"""
## NumPy UltraQuick Tutorial
[source](https://colab.research.google.com/github/google/eng-edu/blob/main/ml/cc/exercises/numpy_ultraquick_tutorial.ipynb?utm_source=mlcc)
> create/manipulate vectors and matrices
"""
## import module as
import numpy as np
## populate array with specific numbers
### 'np.array' to create NumPy matrix with hand-picked values
one_dim_array = np.array([1.3, 3.7, 4.3, 5.6, 7.9])
print(one_dim_array)
two_dim_array = np.array([[1.3, 3.7], [4.3, 5.6], [6.4, 7.9]])
print(two_dim_array)
### can populate matrix with all zeros or one using 'np.zeros' or 'np.ones'
## populate arrays with number sequences using 'np.arange'
seq_int = np.arange(3, 9)
print(seq_int)
<|fim▁hole|>### 'random' for floats between 0.0 & 1.0
rand_floats_between_0_and_1 = np.random.random([5])
print(rand_floats_between_0_and_1)
## math operations on NumPy operands
### 'broadcasting' is expanding shape of an operand in matrix math operation
### to dimensions compatible for that operation
rand_floats_between_1_and_2 = rand_floats_between_0_and_1 + 1.0
rand_floats_between_100_and_200 = rand_floats_between_1_and_2 * 100.0
"""
Task.1 Create a Linear Dataset
to create a simple dataset consisting single feature and label
* assign int sequence from 6 to 20 to a NumPy array name 'feature'
* assign 15 values to NumPy array named 'label' as: 'label = (3) (feature) + 4'; as first value be '(3) (6) + 4 = 22'
"""
feature = np.arange(6, 21)
print(feature)
label = (feature * 3) + 4.0
print(label)
"""
Task.2 Add some noise to the dataset
to mae dataset realistic; insert random noise to each element of 'label' array
* modify each value assigned to 'label' by adding different random float between -2/+2 without 'broadcasting'
instead create noise array having same dimension
"""
noise = (np.random.random([15]) * 4)
print(noise)
label = label + noise
print(label)<|fim▁end|> | ## populate arrays with random numbers
### 'randint' for integers
rand_ints_between_10_and_50 = np.random.randint(low=10, high=51, size=(5))
print(rand_ints_between_10_and_50) |
<|file_name|>Nodelink.java<|end_file_name|><|fim▁begin|>/**
Eubrazil Scientific Gateway
Copyright (C) 2015 CMCC
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
**/
package it.cmcc.ophidiaweb.utils.deserialization;
import javax.annotation.Generated;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang.builder.ToStringBuilder;
@JsonInclude(JsonInclude.Include.NON_NULL)
@Generated("org.jsonschema2pojo")
<|fim▁hole|> "description"
})
public class Nodelink {
@JsonProperty("node")
private String node;
@JsonProperty("description")
private String description;
@JsonProperty("node")
public String getNode() {
return node;
}
@JsonProperty("node")
public void setNode(String node) {
this.node = node;
}
@JsonProperty("description")
public String getDescription() {
return description;
}
@JsonProperty("description")
public void setDescription(String description) {
this.description = description;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this);
}
@Override
public boolean equals(Object other) {
return EqualsBuilder.reflectionEquals(this, other);
}
}<|fim▁end|> | @JsonPropertyOrder({
"node",
|
<|file_name|>EventTypes.js<|end_file_name|><|fim▁begin|>/* Copyright (C) 2011-2014 Mattias Ekendahl. Used under MIT license, see full details at https://github.com/developedbyme/dbm/blob/master/LICENSE.txt */
dbm.registerClass("dbm.thirdparty.facebook.constants.EventTypes", null, function(objectFunctions, staticFunctions, ClassReference) {
//console.log("dbm.thirdparty.facebook.constants.EventTypes");
//REFERENCE: http://developers.facebook.com/docs/reference/javascript/FB.Event.subscribe/
var EventTypes = dbm.importClass("dbm.thirdparty.facebook.constants.EventTypes");
<|fim▁hole|> staticFunctions.AUTH_PROMPT = "auth.prompt";
staticFunctions.XFBML_RENDER = "xfbml.render";
staticFunctions.EDGE_CREATE = "edge.create";
staticFunctions.EDGE_REMOVE = "edge.remove";
staticFunctions.COMMENT_CREATE = "comment.create";
staticFunctions.COMMENT_REMOVE = "comment.remove";
staticFunctions.MESSAGE_SEND = "message.send";
});<|fim▁end|> | staticFunctions.AUTH_LOGIN = "auth.login";
staticFunctions.AUTH_RESPONSE_CHANGE = "auth.authResponseChange";
staticFunctions.AUTH_STATUS_CHANGE = "auth.statusChange";
staticFunctions.AUTH_LOGOUT = "auth.logout"; |
<|file_name|>MapEvictAllCodec.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* eslint-disable max-len */<|fim▁hole|>
// hex: 0x011F00
const REQUEST_MESSAGE_TYPE = 73472;
// hex: 0x011F01
// RESPONSE_MESSAGE_TYPE = 73473
const REQUEST_INITIAL_FRAME_SIZE = PARTITION_ID_OFFSET + BitsUtil.INT_SIZE_IN_BYTES;
/** @internal */
export class MapEvictAllCodec {
static encodeRequest(name: string): ClientMessage {
const clientMessage = ClientMessage.createForEncode();
clientMessage.setRetryable(false);
const initialFrame = Frame.createInitialFrame(REQUEST_INITIAL_FRAME_SIZE);
clientMessage.addFrame(initialFrame);
clientMessage.setMessageType(REQUEST_MESSAGE_TYPE);
clientMessage.setPartitionId(-1);
StringCodec.encode(clientMessage, name);
return clientMessage;
}
}<|fim▁end|> | import {BitsUtil} from '../util/BitsUtil';
import {ClientMessage, Frame, PARTITION_ID_OFFSET} from '../protocol/ClientMessage';
import {StringCodec} from './builtin/StringCodec'; |
<|file_name|>config.go<|end_file_name|><|fim▁begin|><|fim▁hole|>// Copyright 2018 Authors of Cilium
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package ipcache
import (
"github.com/cilium/cilium/pkg/logging"
"github.com/cilium/cilium/pkg/logging/logfields"
)
var (
log = logging.DefaultLogger.WithField(logfields.LogSubsys, "ipcache")
)<|fim▁end|> | |
<|file_name|>post.py<|end_file_name|><|fim▁begin|>class Post(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
<|fim▁hole|><|fim▁end|> | def __iter__(self):
return iter(self.__dict__) |
<|file_name|>client.py<|end_file_name|><|fim▁begin|># Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for interacting with the Google BigQuery API."""
from google.cloud.client import ClientWithProject
from google.cloud.bigquery._http import Connection
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.job import CopyJob
from google.cloud.bigquery.job import ExtractTableToStorageJob
from google.cloud.bigquery.job import LoadTableFromStorageJob
from google.cloud.bigquery.job import QueryJob
from google.cloud.bigquery.query import QueryResults
from google.cloud.iterator import HTTPIterator
class Project(object):
"""Wrapper for resource describing a BigQuery project.
:type project_id: str
:param project_id: Opaque ID of the project
:type numeric_id: int
:param numeric_id: Numeric ID of the project
:type friendly_name: str
:param friendly_name: Display name of the project
"""
def __init__(self, project_id, numeric_id, friendly_name):
self.project_id = project_id
self.numeric_id = numeric_id
self.friendly_name = friendly_name
@classmethod
def from_api_repr(cls, resource):
"""Factory: construct an instance from a resource dict."""
return cls(
resource['id'], resource['numericId'], resource['friendlyName'])
class Client(ClientWithProject):
"""Client to bundle configuration needed for API requests.
:type project: str
:param project: the project which the client acts on behalf of. Will be
passed when creating a dataset / job. If not passed,
falls back to the default inferred from the environment.
:type credentials: :class:`~google.auth.credentials.Credentials`
:param credentials: (Optional) The OAuth2 Credentials to use for this
client. If not passed (and if no ``_http`` object is
passed), falls back to the default inferred from the
environment.
:type _http: :class:`~httplib2.Http`
:param _http: (Optional) HTTP object to make requests. Can be any object
that defines ``request()`` with the same interface as
:meth:`~httplib2.Http.request`. If not passed, an
``_http`` object is created that is bound to the
``credentials`` for the current object.
This parameter should be considered private, and could
change in the future.
"""
SCOPE = ('https://www.googleapis.com/auth/bigquery',
'https://www.googleapis.com/auth/cloud-platform')
"""The scopes required for authenticating as a BigQuery consumer."""
def __init__(self, project=None, credentials=None, _http=None):
super(Client, self).__init__(
project=project, credentials=credentials, _http=_http)
self._connection = Connection(self)
def list_projects(self, max_results=None, page_token=None):
"""List projects for the project associated with this client.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/projects/list
:type max_results: int
:param max_results: maximum number of projects to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of projects. If
not passed, the API will return the first page of
projects.
:rtype: :class:`~google.cloud.iterator.Iterator`
:returns: Iterator of :class:`~google.cloud.bigquery.client.Project`
accessible to the current client.
"""
return HTTPIterator(
client=self, path='/projects', item_to_value=_item_to_project,
items_key='projects', page_token=page_token,
max_results=max_results)
def list_datasets(self, include_all=False, max_results=None,
page_token=None):
"""List datasets for the project associated with this client.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list
:type include_all: bool
:param include_all: True if results include hidden datasets.
:type max_results: int
:param max_results: maximum number of datasets to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of datasets. If
not passed, the API will return the first page of
datasets.
:rtype: :class:`~google.cloud.iterator.Iterator`
:returns: Iterator of :class:`~google.cloud.bigquery.dataset.Dataset`.
accessible to the current client.
"""
extra_params = {}
if include_all:
extra_params['all'] = True
path = '/projects/%s/datasets' % (self.project,)
return HTTPIterator(
client=self, path=path, item_to_value=_item_to_dataset,
items_key='datasets', page_token=page_token,
max_results=max_results, extra_params=extra_params)
def dataset(self, dataset_name, project=None):
"""Construct a dataset bound to this client.
:type dataset_name: str
:param dataset_name: Name of the dataset.
:type project: str
:param project: (Optional) project ID for the dataset (defaults to
the project of the client).
:rtype: :class:`google.cloud.bigquery.dataset.Dataset`
:returns: a new ``Dataset`` instance
"""
return Dataset(dataset_name, client=self, project=project)
def job_from_resource(self, resource):
"""Detect correct job type from resource and instantiate.
:type resource: dict
:param resource: one job resource from API response
:rtype: One of:
:class:`google.cloud.bigquery.job.LoadTableFromStorageJob`,
:class:`google.cloud.bigquery.job.CopyJob`,
:class:`google.cloud.bigquery.job.ExtractTableToStorageJob`,
:class:`google.cloud.bigquery.job.QueryJob`,
:class:`google.cloud.bigquery.job.RunSyncQueryJob`
:returns: the job instance, constructed via the resource
"""
config = resource['configuration']
if 'load' in config:
return LoadTableFromStorageJob.from_api_repr(resource, self)<|fim▁hole|> elif 'query' in config:
return QueryJob.from_api_repr(resource, self)
raise ValueError('Cannot parse job resource')
def list_jobs(self, max_results=None, page_token=None, all_users=None,
state_filter=None):
"""List jobs for the project associated with this client.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/list
:type max_results: int
:param max_results: maximum number of jobs to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of jobs. If
not passed, the API will return the first page of
jobs.
:type all_users: bool
:param all_users: if true, include jobs owned by all users in the
project.
:type state_filter: str
:param state_filter: if passed, include only jobs matching the given
state. One of
* ``"done"``
* ``"pending"``
* ``"running"``
:rtype: :class:`~google.cloud.iterator.Iterator`
:returns: Iterable of job instances.
"""
extra_params = {'projection': 'full'}
if all_users is not None:
extra_params['allUsers'] = all_users
if state_filter is not None:
extra_params['stateFilter'] = state_filter
path = '/projects/%s/jobs' % (self.project,)
return HTTPIterator(
client=self, path=path, item_to_value=_item_to_job,
items_key='jobs', page_token=page_token,
max_results=max_results, extra_params=extra_params)
def load_table_from_storage(self, job_name, destination, *source_uris):
"""Construct a job for loading data into a table from CloudStorage.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load
:type job_name: str
:param job_name: Name of the job.
:type destination: :class:`google.cloud.bigquery.table.Table`
:param destination: Table into which data is to be loaded.
:type source_uris: sequence of string
:param source_uris: URIs of data files to be loaded; in format
``gs://<bucket_name>/<object_name_or_glob>``.
:rtype: :class:`google.cloud.bigquery.job.LoadTableFromStorageJob`
:returns: a new ``LoadTableFromStorageJob`` instance
"""
return LoadTableFromStorageJob(job_name, destination, source_uris,
client=self)
def copy_table(self, job_name, destination, *sources):
"""Construct a job for copying one or more tables into another table.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy
:type job_name: str
:param job_name: Name of the job.
:type destination: :class:`google.cloud.bigquery.table.Table`
:param destination: Table into which data is to be copied.
:type sources: sequence of :class:`google.cloud.bigquery.table.Table`
:param sources: tables to be copied.
:rtype: :class:`google.cloud.bigquery.job.CopyJob`
:returns: a new ``CopyJob`` instance
"""
return CopyJob(job_name, destination, sources, client=self)
def extract_table_to_storage(self, job_name, source, *destination_uris):
"""Construct a job for extracting a table into Cloud Storage files.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract
:type job_name: str
:param job_name: Name of the job.
:type source: :class:`google.cloud.bigquery.table.Table`
:param source: table to be extracted.
:type destination_uris: sequence of string
:param destination_uris: URIs of CloudStorage file(s) into which
table data is to be extracted; in format
``gs://<bucket_name>/<object_name_or_glob>``.
:rtype: :class:`google.cloud.bigquery.job.ExtractTableToStorageJob`
:returns: a new ``ExtractTableToStorageJob`` instance
"""
return ExtractTableToStorageJob(job_name, source, destination_uris,
client=self)
def run_async_query(self, job_name, query,
udf_resources=(), query_parameters=()):
"""Construct a job for running a SQL query asynchronously.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query
:type job_name: str
:param job_name: Name of the job.
:type query: str
:param query: SQL query to be executed
:type udf_resources: tuple
:param udf_resources: An iterable of
:class:`google.cloud.bigquery._helpers.UDFResource`
(empty by default)
:type query_parameters: tuple
:param query_parameters:
An iterable of
:class:`google.cloud.bigquery._helpers.AbstractQueryParameter`
(empty by default)
:rtype: :class:`google.cloud.bigquery.job.QueryJob`
:returns: a new ``QueryJob`` instance
"""
return QueryJob(job_name, query, client=self,
udf_resources=udf_resources,
query_parameters=query_parameters)
def run_sync_query(self, query, udf_resources=(), query_parameters=()):
"""Run a SQL query synchronously.
:type query: str
:param query: SQL query to be executed
:type udf_resources: tuple
:param udf_resources: An iterable of
:class:`google.cloud.bigquery._helpers.UDFResource`
(empty by default)
:type query_parameters: tuple
:param query_parameters:
An iterable of
:class:`google.cloud.bigquery._helpers.AbstractQueryParameter`
(empty by default)
:rtype: :class:`google.cloud.bigquery.query.QueryResults`
:returns: a new ``QueryResults`` instance
"""
return QueryResults(query, client=self,
udf_resources=udf_resources,
query_parameters=query_parameters)
# pylint: disable=unused-argument
def _item_to_project(iterator, resource):
"""Convert a JSON project to the native object.
:type iterator: :class:`~google.cloud.iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: An item to be converted to a project.
:rtype: :class:`.Project`
:returns: The next project in the page.
"""
return Project.from_api_repr(resource)
# pylint: enable=unused-argument
def _item_to_dataset(iterator, resource):
"""Convert a JSON dataset to the native object.
:type iterator: :class:`~google.cloud.iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: An item to be converted to a dataset.
:rtype: :class:`.Dataset`
:returns: The next dataset in the page.
"""
return Dataset.from_api_repr(resource, iterator.client)
def _item_to_job(iterator, resource):
"""Convert a JSON job to the native object.
:type iterator: :class:`~google.cloud.iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: An item to be converted to a job.
:rtype: job instance.
:returns: The next job in the page.
"""
return iterator.client.job_from_resource(resource)<|fim▁end|> | elif 'copy' in config:
return CopyJob.from_api_repr(resource, self)
elif 'extract' in config:
return ExtractTableToStorageJob.from_api_repr(resource, self) |
<|file_name|>mock_test.go<|end_file_name|><|fim▁begin|>package evoli
import "math/rand"
type crosserMock struct {
}
func (c crosserMock) Cross(parent1, parent2 Individual) (child1, child2 Individual, err error) {<|fim▁hole|> return NewIndividual(w*parent1.Fitness() + (1-w)*parent2.Fitness()),
NewIndividual((1-w)*parent1.Fitness() + w*parent2.Fitness()),
nil
}
type evaluaterMock struct {
}
func (e evaluaterMock) Evaluate(individual Individual) (Fitness float64, err error) {
return individual.Fitness(), nil
}
type mutaterMock struct {
}
func (m mutaterMock) Mutate(individual Individual, p float64) (Individual, error) {
return individual, nil
}
type positionerMock struct {
}
func (p positionerMock) Position(indiv, pBest, gBest Individual, c1, c2 float64) (Individual, error) {
return NewIndividual((indiv.Fitness() + pBest.Fitness() + gBest.Fitness()) / 3), nil
}<|fim▁end|> | w := 0.1 + 0.8*rand.Float64() |
<|file_name|>io.rs<|end_file_name|><|fim▁begin|>use core::cmp::PartialEq;
use core::ops::{BitAnd, BitOr, Not};
pub trait Io {
type Value: Copy + PartialEq + BitAnd<Output = Self::Value> + BitOr<Output = Self::Value> + Not<Output = Self::Value>;
fn read(&self) -> Self::Value;
fn write(&mut self, value: Self::Value);
#[inline(always)]
fn readf(&self, flags: Self::Value) -> bool {
(self.read() & flags) as Self::Value == flags
}
#[inline(always)]
fn writef(&mut self, flags: Self::Value, value: bool) {
let tmp: Self::Value = match value {
true => self.read() | flags,
false => self.read() & !flags,
};
self.write(tmp);
}
}
pub struct ReadOnly<I: Io> {
inner: I
}
impl<I: Io> ReadOnly<I> {
pub const fn new(inner: I) -> ReadOnly<I> {
ReadOnly {
inner: inner
}
}
#[inline(always)]
pub fn read(&self) -> I::Value {
self.inner.read()
}
#[inline(always)]
pub fn readf(&self, flags: I::Value) -> bool {
self.inner.readf(flags)
}
}
pub struct WriteOnly<I: Io> {
inner: I
}<|fim▁hole|> WriteOnly {
inner: inner
}
}
#[inline(always)]
pub fn write(&mut self, value: I::Value) {
self.inner.write(value)
}
#[inline(always)]
pub fn writef(&mut self, flags: I::Value, value: bool) {
self.inner.writef(flags, value)
}
}<|fim▁end|> |
impl<I: Io> WriteOnly<I> {
pub const fn new(inner: I) -> WriteOnly<I> { |
<|file_name|>globalize.culture.zh-HK.js<|end_file_name|><|fim▁begin|>/*
* Globalize Culture zh-HK
*
* http://github.com/jquery/globalize
*
* Copyright Software Freedom Conservancy, Inc.
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*
* This file was generated by the Globalize Culture Generator
* Translation: bugs found in this file need to be fixed in the generator
*/
(function( window, undefined ) {
var Globalize;
if ( typeof require !== "undefined"
&& typeof exports !== "undefined"<|fim▁hole|> && typeof module !== "undefined" ) {
// Assume CommonJS
Globalize = require( "globalize" );
} else {
// Global variable
Globalize = window.Globalize;
}
Globalize.addCultureInfo( "zh-HK", "default", {
name: "zh-HK",
englishName: "Chinese (Traditional, Hong Kong S.A.R.)",
nativeName: "中文(香港特別行政區)",
language: "zh-CHT",
numberFormat: {
NaN: "非數字",
negativeInfinity: "負無窮大",
positiveInfinity: "正無窮大",
percent: {
pattern: ["-n%","n%"]
},
currency: {
symbol: "HK$"
}
},
calendars: {
standard: {
days: {
names: ["星期日","星期一","星期二","星期三","星期四","星期五","星期六"],
namesAbbr: ["週日","週一","週二","週三","週四","週五","週六"],
namesShort: ["日","一","二","三","四","五","六"]
},
months: {
names: ["一月","二月","三月","四月","五月","六月","七月","八月","九月","十月","十一月","十二月",""],
namesAbbr: ["一月","二月","三月","四月","五月","六月","七月","八月","九月","十月","十一月","十二月",""]
},
AM: ["上午","上午","上午"],
PM: ["下午","下午","下午"],
eras: [{"name":"公元","start":null,"offset":0}],
patterns: {
d: "d/M/yyyy",
D: "yyyy'年'M'月'd'日'",
t: "H:mm",
T: "H:mm:ss",
f: "yyyy'年'M'月'd'日' H:mm",
F: "yyyy'年'M'月'd'日' H:mm:ss",
M: "M'月'd'日'",
Y: "yyyy'年'M'月'"
}
}
}
});
}( this ));<|fim▁end|> | |
<|file_name|>parse-complex-macro-invoc-op.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|>
// Test parsing binary operators after macro invocations.
// pretty-expanded FIXME #23616
#![feature(macro_rules)]
macro_rules! id {
($e: expr) => { $e }
}
fn foo() {
id!(1) + 1;
id![1] - 1;
id!(1) * 1;
id![1] / 1;
id!(1) % 1;
id!(1) & 1;
id![1] | 1;
id!(1) ^ 1;
let mut x = 1;
id![x] = 2;
id!(x) += 1;
id!(1f64).clone();
id!([1, 2, 3])[1];
id;
id!(true) && true;
id![true] || true;
}
fn main() {}<|fim▁end|> | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms. |
<|file_name|>menus.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Module handling report menus contents"""
from contextlib import contextmanager
import attr
from navmazing import NavigateToAttribute
from widgetastic.widget import Text
from widgetastic_patternfly import Button
from . import CloudIntelReportsView
from . import ReportsMultiBoxSelect
from cfme.modeling.base import BaseCollection
from cfme.modeling.base import BaseEntity
from cfme.utils.appliance.implementations.ui import CFMENavigateStep
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.appliance.implementations.ui import navigator
from widgetastic_manageiq import FolderManager
from widgetastic_manageiq import ManageIQTree
class EditReportMenusView(CloudIntelReportsView):
title = Text("#explorer_title_text")
reports_tree = ManageIQTree("menu_roles_treebox")
# Buttons
save_button = Button("Save")
reset_button = Button("Reset")
default_button = Button("Default")
cancel_button = Button("Cancel")
commit_button = Button("Commit")
discard_button = Button("Discard")
manager = FolderManager(".//div[@id='folder_lists']/table")
report_select = ReportsMultiBoxSelect(
move_into="Move selected reports right",
move_from="Move selected reports left",
available_items="available_reports",
chosen_items="selected_reports"
)
@property
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == 'Editing EVM Group "{}"'.format(self.context["object"].group) and
self.edit_report_menus.is_opened and
self.edit_report_menus.tree.currently_selected == [
"All EVM Groups",
self.context["object"].group
]
)
@attr.s
class ReportMenu(BaseEntity):
"""
This is a fake class mainly needed for navmazing navigation.
"""
group = None
def go_to_group(self, group_name):
self.group = group_name
view = navigate_to(self, "EditReportMenus")
assert view.is_displayed
return view
def get_folders(self, group):
"""Returns list of folders for given user group.
Args:
group: User group to check.
"""
view = self.go_to_group(group)
view.reports_tree.click_path("Top Level")
fields = view.manager.fields
view.discard_button.click()
return fields
def get_subfolders(self, group, folder):
"""Returns list of sub-folders for given user group and folder.
Args:
group: User group to check.
folder: Folder to read.
"""
view = self.go_to_group(group)
view.reports_tree.click_path("Top Level", folder)
fields = view.manager.fields
view.discard_button.click()
return fields
def add_folder(self, group, folder):
"""Adds a folder under top-level.
Args:
group: User group.
folder: Name of the new folder.
"""
with self.manage_folder() as top_level:
top_level.add(folder)
def add_subfolder(self, group, folder, subfolder):
"""Adds a subfolder under specified folder.
Args:
group: User group.
folder: Name of the folder.
subfolder: Name of the new subdfolder.
"""
with self.manage_folder(folder) as fldr:
fldr.add(subfolder)
def reset_to_default(self, group):
"""Clicks the `Default` button.
Args:
group: Group to set to Default
"""
view = self.go_to_group(group)
view.default_button.click()
view.save_button.click()
@contextmanager
def manage_subfolder(self, group, folder, subfolder):
"""Context manager to use when modifying the subfolder contents.
You can use manager's :py:meth:`FolderManager.bail_out` classmethod to end and discard the
changes done inside the with block.
Args:
group: User group.
folder: Parent folder name.
subfolder: Subfolder name to manage.
Returns: Context-managed :py:class: `widgetastic_manageiq.MultiBoxSelect` instance
"""<|fim▁hole|> try:
yield view.report_select
except FolderManager._BailOut:
view.discard_button.click()
except Exception:
# In case of any exception, nothing will be saved
view.discard_button.click()
raise # And reraise the exception
else:
# If no exception happens, save!
view.commit_button.click()
view.save_button.click()
@contextmanager
def manage_folder(self, group, folder=None):
"""Context manager to use when modifying the folder contents.
You can use manager's :py:meth:`FolderManager.bail_out` classmethod to end and discard the
changes done inside the with block. This context manager does not give the manager as a
value to the with block so you have to import and use the :py:class:`FolderManager` class
manually.
Args:
group: User group.
folder: Which folder to manage. If None, top-level will be managed.
Returns: Context-managed :py:class:`widgetastic_manageiq.FolderManager` instance
"""
view = self.go_to_group(group)
if folder is None:
view.reports_tree.click_path("Top Level")
else:
view.reports_tree.click_path("Top Level", folder)
try:
yield view.manager
except FolderManager._BailOut:
view.manager.discard()
except Exception:
# In case of any exception, nothing will be saved
view.manager.discard()
raise # And reraise the exception
else:
# If no exception happens, save!
view.manager.commit()
view.save_button.click()
@attr.s
class ReportMenusCollection(BaseCollection):
"""Collection object for the :py:class:'cfme.intelligence.reports.ReportMenu'."""
ENTITY = ReportMenu
@navigator.register(ReportMenu)
class EditReportMenus(CFMENavigateStep):
VIEW = EditReportMenusView
prerequisite = NavigateToAttribute("appliance.server", "CloudIntelReports")
def step(self, *args, **kwargs):
self.view.edit_report_menus.tree.click_path(
"All EVM Groups",
self.obj.group
)<|fim▁end|> | view = self.go_to_group(group)
view.reports_tree.click_path("Top Level", folder, subfolder) |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>/*jslint browser: true*/
/*global Tangram, gui */
map = (function () {
'use strict';
var locations = {
'Yangon': [16.8313077,96.2187007,7]
};
var map_start_location = locations['Yangon'];
/*** Map ***/
var map = L.map('map',
{"keyboardZoomOffset" : .05, maxZoom: 20 }<|fim▁hole|>
var layer = Tangram.leafletLayer({
scene: 'cinnabar-style-more-labels.yaml?r=2',
attribution: '<a href="https://mapzen.com/tangram" target="_blank">Tangram</a> | © OSM contributors | <a href="https://mapzen.com/" target="_blank">Mapzen</a>'
});
window.layer = layer;
var scene = layer.scene;
window.scene = scene;
// setView expects format ([lat, long], zoom)
map.setView(map_start_location.slice(0, 3), map_start_location[2]);
function long2tile(lon,zoom) { return (Math.floor((lon+180)/360*Math.pow(2,zoom))); }
function lat2tile(lat,zoom) { return (Math.floor((1-Math.log(Math.tan(lat*Math.PI/180) + 1/Math.cos(lat*Math.PI/180))/Math.PI)/2 *Math.pow(2,zoom))); }
/***** Render loop *****/
function addGUI () {
// Link to edit in OSM - hold 'e' and click
}
// Feature selection
function initFeatureSelection () {
// Selection info shown on hover
var selection_info = document.createElement('div');
selection_info.setAttribute('class', 'label');
selection_info.style.display = 'block';
// Show selected feature on hover
scene.container.addEventListener('mousemove', function (event) {
var pixel = { x: event.clientX, y: event.clientY };
scene.getFeatureAt(pixel).then(function(selection) {
if (!selection) {
return;
}
var feature = selection.feature;
if (feature != null) {
// console.log("selection map: " + JSON.stringify(feature));
var label = '';
if (feature.properties.name != null) {
label = feature.properties.name;
}
if (label != '') {
selection_info.style.left = (pixel.x + 5) + 'px';
selection_info.style.top = (pixel.y + 15) + 'px';
selection_info.innerHTML = '<span class="labelInner">' + label + '</span>';
scene.container.appendChild(selection_info);
}
else if (selection_info.parentNode != null) {
selection_info.parentNode.removeChild(selection_info);
}
}
else if (selection_info.parentNode != null) {
selection_info.parentNode.removeChild(selection_info);
}
});
// Don't show labels while panning
if (scene.panning == true) {
if (selection_info.parentNode != null) {
selection_info.parentNode.removeChild(selection_info);
}
}
});
// Show selected feature on hover
scene.container.addEventListener('click', function (event) {
var pixel = { x: event.clientX, y: event.clientY };
scene.getFeatureAt(pixel).then(function(selection) {
if (!selection) {
return;
}
var feature = selection.feature;
if (feature != null) {
// console.log("selection map: " + JSON.stringify(feature));
var label = '';
if (feature.properties != null) {
// console.log(feature.properties);
var obj = JSON.parse(JSON.stringify(feature.properties));
for (var x in feature.properties) {
var val = feature.properties[x]
label += "<span class='labelLine' key="+x+" value="+val+" onclick='setValuesFromSpan(this)'>"+x+" : "+val+"</span><br>"
}
}
if (label != '') {
selection_info.style.left = (pixel.x + 5) + 'px';
selection_info.style.top = (pixel.y + 15) + 'px';
selection_info.innerHTML = '<span class="labelInner">' + label + '</span>';
scene.container.appendChild(selection_info);
}
else if (selection_info.parentNode != null) {
selection_info.parentNode.removeChild(selection_info);
}
}
else if (selection_info.parentNode != null) {
selection_info.parentNode.removeChild(selection_info);
}
});
// Don't show labels while panning
if (scene.panning == true) {
if (selection_info.parentNode != null) {
selection_info.parentNode.removeChild(selection_info);
}
}
});
}
window.addEventListener('load', function () {
// Scene initialized
layer.on('init', function() {
addGUI();
//initFeatureSelection();
});
layer.addTo(map);
});
return map;
}());<|fim▁end|> | ); |
<|file_name|>widgets.py<|end_file_name|><|fim▁begin|>"""
HTML Widget classes
"""
try:
set
except NameError:
from sets import Set as set # Python 2.3 fallback
import copy
from itertools import chain
from django.conf import settings
from django.utils.datastructures import MultiValueDict, MergeDict
from django.utils.html import escape, conditional_escape
from django.utils.translation import ugettext
from django.utils.encoding import StrAndUnicode, force_unicode
from django.utils.safestring import mark_safe
from django.utils import datetime_safe
from datetime import time
from util import flatatt
from urlparse import urljoin
__all__ = (
'Media', 'MediaDefiningClass', 'Widget', 'TextInput', 'PasswordInput',
'HiddenInput', 'MultipleHiddenInput',
'FileInput', 'DateTimeInput', 'TimeInput', 'Textarea', 'CheckboxInput',
'Select', 'NullBooleanSelect', 'SelectMultiple', 'RadioSelect',
'CheckboxSelectMultiple', 'MultiWidget',
'SplitDateTimeWidget',
)
MEDIA_TYPES = ('css','js')
class Media(StrAndUnicode):
def __init__(self, media=None, **kwargs):
if media:
media_attrs = media.__dict__
else:
media_attrs = kwargs
self._css = {}
self._js = []
for name in MEDIA_TYPES:
getattr(self, 'add_' + name)(media_attrs.get(name, None))
# Any leftover attributes must be invalid.
# if media_attrs != {}:
# raise TypeError, "'class Media' has invalid attribute(s): %s" % ','.join(media_attrs.keys())
def __unicode__(self):
return self.render()
def render(self):
return mark_safe(u'\n'.join(chain(*[getattr(self, 'render_' + name)() for name in MEDIA_TYPES])))
def render_js(self):
return [u'<script type="text/javascript" src="%s"></script>' % self.absolute_path(path) for path in self._js]
def render_css(self):
# To keep rendering order consistent, we can't just iterate over items().
# We need to sort the keys, and iterate over the sorted list.
media = self._css.keys()
media.sort()
return chain(*[
[u'<link href="%s" type="text/css" media="%s" rel="stylesheet" />' % (self.absolute_path(path), medium)
for path in self._css[medium]]
for medium in media])
def absolute_path(self, path):
if path.startswith(u'http://') or path.startswith(u'https://') or path.startswith(u'/'):
return path
return urljoin(settings.MEDIA_URL,path)
def __getitem__(self, name):
"Returns a Media object that only contains media of the given type"
if name in MEDIA_TYPES:
return Media(**{name: getattr(self, '_' + name)})
raise KeyError('Unknown media type "%s"' % name)
def add_js(self, data):
if data:
self._js.extend([path for path in data if path not in self._js])
def add_css(self, data):
if data:
for medium, paths in data.items():
self._css.setdefault(medium, []).extend([path for path in paths if path not in self._css[medium]])
def __add__(self, other):
combined = Media()
for name in MEDIA_TYPES:
getattr(combined, 'add_' + name)(getattr(self, '_' + name, None))
getattr(combined, 'add_' + name)(getattr(other, '_' + name, None))
return combined
def media_property(cls):
def _media(self):
# Get the media property of the superclass, if it exists
if hasattr(super(cls, self), 'media'):
base = super(cls, self).media
else:
base = Media()
# Get the media definition for this class
definition = getattr(cls, 'Media', None)
if definition:
extend = getattr(definition, 'extend', True)
if extend:
if extend == True:
m = base
else:
m = Media()
for medium in extend:
m = m + base[medium]
return m + Media(definition)
else:
return Media(definition)
else:
return base
return property(_media)
class MediaDefiningClass(type):
"Metaclass for classes that can have media definitions"
def __new__(cls, name, bases, attrs):
new_class = super(MediaDefiningClass, cls).__new__(cls, name, bases,
attrs)
if 'media' not in attrs:
new_class.media = media_property(new_class)
return new_class
class Widget(object):
__metaclass__ = MediaDefiningClass
is_hidden = False # Determines whether this corresponds to an <input type="hidden">.
needs_multipart_form = False # Determines does this widget need multipart-encrypted form
def __init__(self, attrs=None):
if attrs is not None:
self.attrs = attrs.copy()
else:
self.attrs = {}
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
memo[id(self)] = obj
return obj
def render(self, name, value, attrs=None):
"""
Returns this Widget rendered as HTML, as a Unicode string.
The 'value' given is not guaranteed to be valid input, so subclass
implementations should program defensively.
"""
raise NotImplementedError
def build_attrs(self, extra_attrs=None, **kwargs):
"Helper function for building an attribute dictionary."
attrs = dict(self.attrs, **kwargs)
if extra_attrs:
attrs.update(extra_attrs)
return attrs
def value_from_datadict(self, data, files, name):
"""
Given a dictionary of data and this widget's name, returns the value
of this widget. Returns None if it's not provided.
"""
return data.get(name, None)
def _has_changed(self, initial, data):
"""
Return True if data differs from initial.
"""
# For purposes of seeing whether something has changed, None is
# the same as an empty string, if the data or inital value we get
# is None, replace it w/ u''.
if data is None:
data_value = u''
else:
data_value = data
if initial is None:
initial_value = u''
else:
initial_value = initial
if force_unicode(initial_value) != force_unicode(data_value):
return True
return False
def id_for_label(self, id_):
"""
Returns the HTML ID attribute of this Widget for use by a <label>,
given the ID of the field. Returns None if no ID is available.
This hook is necessary because some widgets have multiple HTML
elements and, thus, multiple IDs. In that case, this method should
return an ID value that corresponds to the first ID in the widget's
tags.
"""
return id_
id_for_label = classmethod(id_for_label)<|fim▁hole|>
class Input(Widget):
"""
Base class for all <input> widgets (except type='checkbox' and
type='radio', which are special).
"""
input_type = None # Subclasses must define this.
def render(self, name, value, attrs=None):
if value is None: value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_unicode(value)
return mark_safe(u'<input%s />' % flatatt(final_attrs))
class TextInput(Input):
input_type = 'text'
class PasswordInput(Input):
input_type = 'password'
def __init__(self, attrs=None, render_value=True):
super(PasswordInput, self).__init__(attrs)
self.render_value = render_value
def render(self, name, value, attrs=None):
if not self.render_value: value=None
return super(PasswordInput, self).render(name, value, attrs)
class HiddenInput(Input):
input_type = 'hidden'
is_hidden = True
class MultipleHiddenInput(HiddenInput):
"""
A widget that handles <input type="hidden"> for fields that have a list
of values.
"""
def __init__(self, attrs=None, choices=()):
super(MultipleHiddenInput, self).__init__(attrs)
# choices can be any iterable
self.choices = choices
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
return mark_safe(u'\n'.join([(u'<input%s />' %
flatatt(dict(value=force_unicode(v), **final_attrs)))
for v in value]))
def value_from_datadict(self, data, files, name):
if isinstance(data, (MultiValueDict, MergeDict)):
return data.getlist(name)
return data.get(name, None)
class FileInput(Input):
input_type = 'file'
needs_multipart_form = True
def render(self, name, value, attrs=None):
return super(FileInput, self).render(name, None, attrs=attrs)
def value_from_datadict(self, data, files, name):
"File widgets take data from FILES, not POST"
return files.get(name, None)
def _has_changed(self, initial, data):
if data is None:
return False
return True
class Textarea(Widget):
def __init__(self, attrs=None):
# The 'rows' and 'cols' attributes are required for HTML correctness.
self.attrs = {'cols': '40', 'rows': '10'}
if attrs:
self.attrs.update(attrs)
def render(self, name, value, attrs=None):
if value is None: value = ''
value = force_unicode(value)
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(u'<textarea%s>%s</textarea>' % (flatatt(final_attrs),
conditional_escape(force_unicode(value))))
class DateTimeInput(Input):
input_type = 'text'
format = '%Y-%m-%d %H:%M:%S' # '2006-10-25 14:30:59'
def __init__(self, attrs=None, format=None):
super(DateTimeInput, self).__init__(attrs)
if format:
self.format = format
def render(self, name, value, attrs=None):
if value is None:
value = ''
elif hasattr(value, 'strftime'):
value = datetime_safe.new_datetime(value)
value = value.strftime(self.format)
return super(DateTimeInput, self).render(name, value, attrs)
class TimeInput(Input):
input_type = 'text'
def render(self, name, value, attrs=None):
if value is None:
value = ''
elif isinstance(value, time):
value = value.replace(microsecond=0)
return super(TimeInput, self).render(name, value, attrs)
class CheckboxInput(Widget):
def __init__(self, attrs=None, check_test=bool):
super(CheckboxInput, self).__init__(attrs)
# check_test is a callable that takes a value and returns True
# if the checkbox should be checked for that value.
self.check_test = check_test
def render(self, name, value, attrs=None):
final_attrs = self.build_attrs(attrs, type='checkbox', name=name)
try:
result = self.check_test(value)
except: # Silently catch exceptions
result = False
if result:
final_attrs['checked'] = 'checked'
if value not in ('', True, False, None):
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_unicode(value)
return mark_safe(u'<input%s />' % flatatt(final_attrs))
def value_from_datadict(self, data, files, name):
if name not in data:
# A missing value means False because HTML form submission does not
# send results for unselected checkboxes.
return False
return super(CheckboxInput, self).value_from_datadict(data, files, name)
def _has_changed(self, initial, data):
# Sometimes data or initial could be None or u'' which should be the
# same thing as False.
return bool(initial) != bool(data)
class Select(Widget):
def __init__(self, attrs=None, choices=()):
super(Select, self).__init__(attrs)
# choices can be any iterable, but we may need to render this widget
# multiple times. Thus, collapse it into a list so it can be consumed
# more than once.
self.choices = list(choices)
def render(self, name, value, attrs=None, choices=()):
if value is None: value = ''
final_attrs = self.build_attrs(attrs, name=name)
output = [u'<select%s>' % flatatt(final_attrs)]
options = self.render_options(choices, [value])
if options:
output.append(options)
output.append('</select>')
return mark_safe(u'\n'.join(output))
def render_options(self, choices, selected_choices):
def render_option(option_value, option_label):
option_value = force_unicode(option_value)
selected_html = (option_value in selected_choices) and u' selected="selected"' or ''
return u'<option value="%s"%s>%s</option>' % (
escape(option_value), selected_html,
conditional_escape(force_unicode(option_label)))
# Normalize to strings.
selected_choices = set([force_unicode(v) for v in selected_choices])
output = []
for option_value, option_label in chain(self.choices, choices):
if isinstance(option_label, (list, tuple)):
output.append(u'<optgroup label="%s">' % escape(force_unicode(option_value)))
for option in option_label:
output.append(render_option(*option))
output.append(u'</optgroup>')
else:
output.append(render_option(option_value, option_label))
return u'\n'.join(output)
class NullBooleanSelect(Select):
"""
A Select Widget intended to be used with NullBooleanField.
"""
def __init__(self, attrs=None):
choices = ((u'1', ugettext('Unknown')), (u'2', ugettext('Yes')), (u'3', ugettext('No')))
super(NullBooleanSelect, self).__init__(attrs, choices)
def render(self, name, value, attrs=None, choices=()):
try:
value = {True: u'2', False: u'3', u'2': u'2', u'3': u'3'}[value]
except KeyError:
value = u'1'
return super(NullBooleanSelect, self).render(name, value, attrs, choices)
def value_from_datadict(self, data, files, name):
value = data.get(name, None)
return {u'2': True, u'3': False, True: True, False: False}.get(value, None)
def _has_changed(self, initial, data):
# Sometimes data or initial could be None or u'' which should be the
# same thing as False.
return bool(initial) != bool(data)
class SelectMultiple(Select):
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
final_attrs = self.build_attrs(attrs, name=name)
output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe(u'\n'.join(output))
def value_from_datadict(self, data, files, name):
if isinstance(data, (MultiValueDict, MergeDict)):
return data.getlist(name)
return data.get(name, None)
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
for value1, value2 in zip(initial, data):
if force_unicode(value1) != force_unicode(value2):
return True
return False
class RadioInput(StrAndUnicode):
"""
An object used by RadioFieldRenderer that represents a single
<input type='radio'>.
"""
def __init__(self, name, value, attrs, choice, index):
self.name, self.value = name, value
self.attrs = attrs
self.choice_value = force_unicode(choice[0])
self.choice_label = force_unicode(choice[1])
self.index = index
def __unicode__(self):
if 'id' in self.attrs:
label_for = ' for="%s_%s"' % (self.attrs['id'], self.index)
else:
label_for = ''
choice_label = conditional_escape(force_unicode(self.choice_label))
return mark_safe(u'<label%s>%s %s</label>' % (label_for, self.tag(), choice_label))
def is_checked(self):
return self.value == self.choice_value
def tag(self):
if 'id' in self.attrs:
self.attrs['id'] = '%s_%s' % (self.attrs['id'], self.index)
final_attrs = dict(self.attrs, type='radio', name=self.name, value=self.choice_value)
if self.is_checked():
final_attrs['checked'] = 'checked'
return mark_safe(u'<input%s />' % flatatt(final_attrs))
class RadioFieldRenderer(StrAndUnicode):
"""
An object used by RadioSelect to enable customization of radio widgets.
"""
def __init__(self, name, value, attrs, choices):
self.name, self.value, self.attrs = name, value, attrs
self.choices = choices
def __iter__(self):
for i, choice in enumerate(self.choices):
yield RadioInput(self.name, self.value, self.attrs.copy(), choice, i)
def __getitem__(self, idx):
choice = self.choices[idx] # Let the IndexError propogate
return RadioInput(self.name, self.value, self.attrs.copy(), choice, idx)
def __unicode__(self):
return self.render()
def render(self):
"""Outputs a <ul> for this set of radio fields."""
return mark_safe(u'<ul>\n%s\n</ul>' % u'\n'.join([u'<li>%s</li>'
% force_unicode(w) for w in self]))
class RadioSelect(Select):
renderer = RadioFieldRenderer
def __init__(self, *args, **kwargs):
# Override the default renderer if we were passed one.
renderer = kwargs.pop('renderer', None)
if renderer:
self.renderer = renderer
super(RadioSelect, self).__init__(*args, **kwargs)
def get_renderer(self, name, value, attrs=None, choices=()):
"""Returns an instance of the renderer."""
if value is None: value = ''
str_value = force_unicode(value) # Normalize to string.
final_attrs = self.build_attrs(attrs)
choices = list(chain(self.choices, choices))
return self.renderer(name, str_value, final_attrs, choices)
def render(self, name, value, attrs=None, choices=()):
return self.get_renderer(name, value, attrs, choices).render()
def id_for_label(self, id_):
# RadioSelect is represented by multiple <input type="radio"> fields,
# each of which has a distinct ID. The IDs are made distinct by a "_X"
# suffix, where X is the zero-based index of the radio field. Thus,
# the label for a RadioSelect should reference the first one ('_0').
if id_:
id_ += '_0'
return id_
id_for_label = classmethod(id_for_label)
class CheckboxSelectMultiple(SelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
has_id = attrs and 'id' in attrs
final_attrs = self.build_attrs(attrs, name=name)
output = [u'<ul>']
# Normalize to strings
str_values = set([force_unicode(v) for v in value])
for i, (option_value, option_label) in enumerate(chain(self.choices, choices)):
# If an ID attribute was given, add a numeric index as a suffix,
# so that the checkboxes don't all have the same ID attribute.
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (attrs['id'], i))
label_for = u' for="%s"' % final_attrs['id']
else:
label_for = ''
cb = CheckboxInput(final_attrs, check_test=lambda value: value in str_values)
option_value = force_unicode(option_value)
rendered_cb = cb.render(name, option_value)
option_label = conditional_escape(force_unicode(option_label))
output.append(u'<li><label%s>%s %s</label></li>' % (label_for, rendered_cb, option_label))
output.append(u'</ul>')
return mark_safe(u'\n'.join(output))
def id_for_label(self, id_):
# See the comment for RadioSelect.id_for_label()
if id_:
id_ += '_0'
return id_
id_for_label = classmethod(id_for_label)
class MultiWidget(Widget):
"""
A widget that is composed of multiple widgets.
Its render() method is different than other widgets', because it has to
figure out how to split a single value for display in multiple widgets.
The ``value`` argument can be one of two things:
* A list.
* A normal value (e.g., a string) that has been "compressed" from
a list of values.
In the second case -- i.e., if the value is NOT a list -- render() will
first "decompress" the value into a list before rendering it. It does so by
calling the decompress() method, which MultiWidget subclasses must
implement. This method takes a single "compressed" value and returns a
list.
When render() does its HTML rendering, each value in the list is rendered
with the corresponding widget -- the first value is rendered in the first
widget, the second value is rendered in the second widget, etc.
Subclasses may implement format_output(), which takes the list of rendered
widgets and returns a string of HTML that formats them any way you'd like.
You'll probably want to use this class with MultiValueField.
"""
def __init__(self, widgets, attrs=None):
self.widgets = [isinstance(w, type) and w() or w for w in widgets]
super(MultiWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
# value is a list of values, each corresponding to a widget
# in self.widgets.
if not isinstance(value, list):
value = self.decompress(value)
output = []
final_attrs = self.build_attrs(attrs)
id_ = final_attrs.get('id', None)
for i, widget in enumerate(self.widgets):
try:
widget_value = value[i]
except IndexError:
widget_value = None
if id_:
final_attrs = dict(final_attrs, id='%s_%s' % (id_, i))
output.append(widget.render(name + '_%s' % i, widget_value, final_attrs))
return mark_safe(self.format_output(output))
def id_for_label(self, id_):
# See the comment for RadioSelect.id_for_label()
if id_:
id_ += '_0'
return id_
id_for_label = classmethod(id_for_label)
def value_from_datadict(self, data, files, name):
return [widget.value_from_datadict(data, files, name + '_%s' % i) for i, widget in enumerate(self.widgets)]
def _has_changed(self, initial, data):
if initial is None:
initial = [u'' for x in range(0, len(data))]
else:
if not isinstance(initial, list):
initial = self.decompress(initial)
for widget, initial, data in zip(self.widgets, initial, data):
if widget._has_changed(initial, data):
return True
return False
def format_output(self, rendered_widgets):
"""
Given a list of rendered widgets (as strings), returns a Unicode string
representing the HTML for the whole lot.
This hook allows you to format the HTML design of the widgets, if
needed.
"""
return u''.join(rendered_widgets)
def decompress(self, value):
"""
Returns a list of decompressed values for the given compressed value.
The given value can be assumed to be valid, but not necessarily
non-empty.
"""
raise NotImplementedError('Subclasses must implement this method.')
def _get_media(self):
"Media for a multiwidget is the combination of all media of the subwidgets"
media = Media()
for w in self.widgets:
media = media + w.media
return media
media = property(_get_media)
class SplitDateTimeWidget(MultiWidget):
"""
A Widget that splits datetime input into two <input type="text"> boxes.
"""
def __init__(self, attrs=None):
widgets = (TextInput(attrs=attrs), TextInput(attrs=attrs))
super(SplitDateTimeWidget, self).__init__(widgets, attrs)
def decompress(self, value):
if value:
return [value.date(), value.time().replace(microsecond=0)]
return [None, None]
class SplitHiddenDateTimeWidget(SplitDateTimeWidget):
"""
A Widget that splits datetime input into two <input type="hidden"> inputs.
"""
def __init__(self, attrs=None):
widgets = (HiddenInput(attrs=attrs), HiddenInput(attrs=attrs))
super(SplitDateTimeWidget, self).__init__(widgets, attrs)<|fim▁end|> | |
<|file_name|>TestROIStencil.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# A script to test the vtkROIStencilSource
reader = vtk.vtkPNGReader()
reader.SetDataSpacing(0.8,0.8,1.5)
reader.SetDataOrigin(0.0,0.0,0.0)
reader.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/fullhead15.png")
shiftScale = vtk.vtkImageShiftScale()
shiftScale.SetInputConnection(reader.GetOutputPort())
shiftScale.SetScale(0.2)
shiftScale.Update()
roiStencil1 = vtk.vtkROIStencilSource()
roiStencil1.SetShapeToEllipsoid()
roiStencil1.SetBounds(20,300,80,150,0,0)
roiStencil1.SetInformationInput(reader.GetOutput())
roiStencil2 = vtk.vtkROIStencilSource()
roiStencil2.SetShapeToCylinderX()
roiStencil2.SetBounds(20,300,80,150,0,0)
roiStencil2.SetInformationInput(reader.GetOutput())
roiStencil3 = vtk.vtkROIStencilSource()
roiStencil3.SetShapeToCylinderZ()
roiStencil3.SetBounds(20,300,80,150,0,0)
roiStencil3.SetInformationInput(reader.GetOutput())
roiStencil4 = vtk.vtkROIStencilSource()
roiStencil4.SetShapeToBox()
roiStencil4.SetBounds(20,300,80,150,0,0)
roiStencil4.SetInformationInput(reader.GetOutput())
stencil1 = vtk.vtkImageStencil()
stencil1.SetInputConnection(reader.GetOutputPort())
stencil1.SetBackgroundInputData(shiftScale.GetOutput())
stencil1.SetStencilConnection(roiStencil1.GetOutputPort())
stencil2 = vtk.vtkImageStencil()
stencil2.SetInputConnection(reader.GetOutputPort())
stencil2.SetBackgroundInputData(shiftScale.GetOutput())
stencil2.SetStencilConnection(roiStencil2.GetOutputPort())
stencil3 = vtk.vtkImageStencil()
stencil3.SetInputConnection(reader.GetOutputPort())
stencil3.SetBackgroundInputData(shiftScale.GetOutput())
stencil3.SetStencilConnection(roiStencil3.GetOutputPort())
stencil4 = vtk.vtkImageStencil()
stencil4.SetInputConnection(reader.GetOutputPort())
stencil4.SetBackgroundInputData(shiftScale.GetOutput())
stencil4.SetStencilConnection(roiStencil4.GetOutputPort())
mapper1 = vtk.vtkImageMapper()
mapper1.SetInputConnection(stencil1.GetOutputPort())
mapper1.SetColorWindow(2000)
mapper1.SetColorLevel(1000)
mapper1.SetZSlice(0)
mapper2 = vtk.vtkImageMapper()
mapper2.SetInputConnection(stencil2.GetOutputPort())
mapper2.SetColorWindow(2000)
mapper2.SetColorLevel(1000)
mapper2.SetZSlice(0)
mapper3 = vtk.vtkImageMapper()
mapper3.SetInputConnection(stencil3.GetOutputPort())
mapper3.SetColorWindow(2000)
mapper3.SetColorLevel(1000)
mapper3.SetZSlice(0)
mapper4 = vtk.vtkImageMapper()
mapper4.SetInputConnection(stencil4.GetOutputPort())
mapper4.SetColorWindow(2000)
mapper4.SetColorLevel(1000)
mapper4.SetZSlice(0)
actor1 = vtk.vtkActor2D()
actor1.SetMapper(mapper1)
actor2 = vtk.vtkActor2D()
actor2.SetMapper(mapper2)
actor3 = vtk.vtkActor2D()
actor3.SetMapper(mapper3)
actor4 = vtk.vtkActor2D()
actor4.SetMapper(mapper4)
imager1 = vtk.vtkRenderer()
imager1.AddActor2D(actor1)
imager1.SetViewport(0.5,0.0,1.0,0.5)
imager2 = vtk.vtkRenderer()
imager2.AddActor2D(actor2)
imager2.SetViewport(0.0,0.0,0.5,0.5)
imager3 = vtk.vtkRenderer()
imager3.AddActor2D(actor3)
imager3.SetViewport(0.5,0.5,1.0,1.0)
imager4 = vtk.vtkRenderer()
imager4.AddActor2D(actor4)
imager4.SetViewport(0.0,0.5,0.5,1.0)
imgWin = vtk.vtkRenderWindow()
imgWin.AddRenderer(imager1)
imgWin.AddRenderer(imager2)
imgWin.AddRenderer(imager3)
<|fim▁hole|># --- end of script --<|fim▁end|> | imgWin.AddRenderer(imager4)
imgWin.SetSize(512,512)
imgWin.Render()
|
<|file_name|>twitter.py<|end_file_name|><|fim▁begin|># coding: utf-8
from __future__ import absolute_import
import flask
import auth
import config
import model
import util
from main import app
twitter_config = dict(
access_token_url='https://api.twitter.com/oauth/access_token',
authorize_url='https://api.twitter.com/oauth/authorize',<|fim▁hole|> consumer_key=config.CONFIG_DB.twitter_consumer_key,
consumer_secret=config.CONFIG_DB.twitter_consumer_secret,
request_token_url='https://api.twitter.com/oauth/request_token',
)
twitter = auth.create_oauth_app(twitter_config, 'twitter')
@app.route('/api/auth/callback/twitter/')
def twitter_authorized():
response = twitter.authorized_response()
if response is None:
flask.flash('You denied the request to sign in.')
return flask.redirect(util.get_next_url())
flask.session['oauth_token'] = (
response['oauth_token'],
response['oauth_token_secret'],
)
user_db = retrieve_user_from_twitter(response)
return auth.signin_user_db(user_db)
@twitter.tokengetter
def get_twitter_token():
return flask.session.get('oauth_token')
@app.route('/signin/twitter/')
def signin_twitter():
return auth.signin_oauth(twitter)
def retrieve_user_from_twitter(response):
auth_id = 'twitter_%s' % response['user_id']
user_db = model.User.get_by('auth_ids', auth_id)
return user_db or auth.create_user_db(
auth_id=auth_id,
name=response['screen_name'],
username=response['screen_name'],
)<|fim▁end|> | base_url='https://api.twitter.com/1.1/', |
<|file_name|>decoder.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Seq2seq layer operations for use in neural networks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import rnn
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.util import nest
__all__ = ["Decoder", "dynamic_decode"]
_transpose_batch_time = rnn._transpose_batch_time # pylint: disable=protected-access
@six.add_metaclass(abc.ABCMeta)
class Decoder(object):
"""An RNN Decoder abstract interface object.
Concepts used by this interface:
- `inputs`: (structure of) tensors and TensorArrays that is passed as input to
the RNNCell composing the decoder, at each time step.
- `state`: (structure of) tensors and TensorArrays that is passed to the
RNNCell instance as the state.
- `finished`: boolean tensor telling whether each sequence in the batch is
finished.
- `outputs`: Instance of BasicDecoderOutput. Result of the decoding, at each
time step.
"""
@property
def batch_size(self):
"""The batch size of input values."""
raise NotImplementedError
@property
def output_size(self):
"""A (possibly nested tuple of...) integer[s] or `TensorShape` object[s]."""
raise NotImplementedError
@property
def output_dtype(self):
"""A (possibly nested tuple of...) dtype[s]."""
raise NotImplementedError
@abc.abstractmethod
def initialize(self, name=None):
"""Called before any decoding iterations.
This methods must compute initial input values and initial state.
Args:
name: Name scope for any created operations.
Returns:
`(finished, initial_inputs, initial_state)`: initial values of
'finished' flags, inputs and state.
"""
raise NotImplementedError
@abc.abstractmethod
def step(self, time, inputs, state, name=None):
"""Called per step of decoding (but only once for dynamic decoding).
Args:
time: Scalar `int32` tensor. Current step number.
inputs: RNNCell input (possibly nested tuple of) tensor[s] for this time
step.
state: RNNCell state (possibly nested tuple of) tensor[s] from previous
time step.
name: Name scope for any created operations.
Returns:
`(outputs, next_state, next_inputs, finished)`: `outputs` is an instance
of BasicDecoderOutput, `next_state` is a (structure of) state tensors and
TensorArrays, `next_inputs` is the tensor that should be used as input for
the next step, `finished` is a boolean tensor telling whether the sequence
is complete, for each sequence in the batch.
"""
raise NotImplementedError
def finalize(self, outputs, final_state, sequence_lengths):
raise NotImplementedError
def _create_zero_outputs(size, dtype, batch_size):
"""Create a zero outputs Tensor structure."""
def _t(s):
return (s if isinstance(s, ops.Tensor) else constant_op.constant(
tensor_shape.TensorShape(s).as_list(),
dtype=dtypes.int32,
name="zero_suffix_shape"))
def _create(s, d):
return array_ops.zeros(
array_ops.concat(
([batch_size], _t(s)), axis=0), dtype=d)
return nest.map_structure(_create, size, dtype)
def dynamic_decode(decoder,
output_time_major=False,
impute_finished=False,
maximum_iterations=None,
parallel_iterations=32,
swap_memory=False,
scope=None):
"""Perform dynamic decoding with `decoder`.
Calls initialize() once and step() repeatedly on the Decoder object.
Args:
decoder: A `Decoder` instance.
output_time_major: Python boolean. Default: `False` (batch major). If
`True`, outputs are returned as time major tensors (this mode is faster).
Otherwise, outputs are returned as batch major tensors (this adds extra
time to the computation).
impute_finished: Python boolean. If `True`, then states for batch
entries which are marked as finished get copied through and the
corresponding outputs get zeroed out. This causes some slowdown at
each time step, but ensures that the final state and outputs have
the correct values and that backprop ignores time steps that were
marked as finished.
maximum_iterations: `int32` scalar, maximum allowed number of decoding
steps. Default is `None` (decode until the decoder is fully done).
parallel_iterations: Argument passed to `tf.while_loop`.
swap_memory: Argument passed to `tf.while_loop`.
scope: Optional variable scope to use.
Returns:
`(final_outputs, final_state, final_sequence_lengths)`.
Raises:
TypeError: if `decoder` is not an instance of `Decoder`.
ValueError: if `maximum_iterations` is provided but is not a scalar.
"""
if not isinstance(decoder, Decoder):
raise TypeError("Expected decoder to be type Decoder, but saw: %s" %
type(decoder))
with variable_scope.variable_scope(scope, "decoder") as varscope:
# Properly cache variable values inside the while_loop
if varscope.caching_device is None:
varscope.set_caching_device(lambda op: op.device)
if maximum_iterations is not None:
maximum_iterations = ops.convert_to_tensor(
maximum_iterations, dtype=dtypes.int32, name="maximum_iterations")
if maximum_iterations.get_shape().ndims != 0:
raise ValueError("maximum_iterations must be a scalar")
initial_finished, initial_inputs, initial_state = decoder.initialize()
zero_outputs = _create_zero_outputs(decoder.output_size,
decoder.output_dtype,
decoder.batch_size)
if maximum_iterations is not None:
initial_finished = math_ops.logical_or(
initial_finished, 0 >= maximum_iterations)
initial_sequence_lengths = array_ops.zeros_like(
initial_finished, dtype=dtypes.int32)
initial_time = constant_op.constant(0, dtype=dtypes.int32)
def _shape(batch_size, from_shape):
if not isinstance(from_shape, tensor_shape.TensorShape):
return tensor_shape.TensorShape(None)
else:
batch_size = tensor_util.constant_value(
ops.convert_to_tensor(
batch_size, name="batch_size"))
return tensor_shape.TensorShape([batch_size]).concatenate(from_shape)
def _create_ta(s, d):
return tensor_array_ops.TensorArray(
dtype=d,
size=0,
dynamic_size=True,
element_shape=_shape(decoder.batch_size, s))
initial_outputs_ta = nest.map_structure(_create_ta, decoder.output_size,
decoder.output_dtype)
def condition(unused_time, unused_outputs_ta, unused_state, unused_inputs,
finished, unused_sequence_lengths):
return math_ops.logical_not(math_ops.reduce_all(finished))
def body(time, outputs_ta, state, inputs, finished, sequence_lengths):
"""Internal while_loop body.
Args:
time: scalar int32 tensor.
outputs_ta: structure of TensorArray.
state: (structure of) state tensors and TensorArrays.
inputs: (structure of) input tensors.
finished: bool tensor (keeping track of what's finished).
sequence_lengths: int32 tensor (keeping track of time of finish).
Returns:
`(time + 1, outputs_ta, next_state, next_inputs, next_finished,
next_sequence_lengths)`.
```
"""
(next_outputs, decoder_state, next_inputs,
decoder_finished) = decoder.step(time, inputs, state)
next_finished = math_ops.logical_or(decoder_finished, finished)
if maximum_iterations is not None:
next_finished = math_ops.logical_or(
next_finished, time + 1 >= maximum_iterations)
next_sequence_lengths = array_ops.where(
math_ops.logical_and(math_ops.logical_not(finished), next_finished),
array_ops.fill(array_ops.shape(sequence_lengths), time + 1),
sequence_lengths)
nest.assert_same_structure(state, decoder_state)
nest.assert_same_structure(outputs_ta, next_outputs)
nest.assert_same_structure(inputs, next_inputs)
# Zero out output values past finish
if impute_finished:
emit = nest.map_structure(
lambda out, zero: array_ops.where(finished, zero, out),
next_outputs,
zero_outputs)
else:
emit = next_outputs
# Copy through states past finish
def _maybe_copy_state(new, cur):
# TensorArrays and scalar states get passed through.
if isinstance(cur, tensor_array_ops.TensorArray):
pass_through = True
else:
new.set_shape(cur.shape)<|fim▁hole|> next_state = nest.map_structure(
_maybe_copy_state, decoder_state, state)
else:
next_state = decoder_state
outputs_ta = nest.map_structure(lambda ta, out: ta.write(time, out),
outputs_ta, emit)
return (time + 1, outputs_ta, next_state, next_inputs, next_finished,
next_sequence_lengths)
res = control_flow_ops.while_loop(
condition,
body,
loop_vars=[
initial_time, initial_outputs_ta, initial_state, initial_inputs,
initial_finished, initial_sequence_lengths,
],
parallel_iterations=parallel_iterations,
swap_memory=swap_memory)
final_outputs_ta = res[1]
final_state = res[2]
final_sequence_lengths = res[5]
final_outputs = nest.map_structure(lambda ta: ta.stack(), final_outputs_ta)
try:
final_outputs, final_state = decoder.finalize(
final_outputs, final_state, final_sequence_lengths)
except NotImplementedError:
pass
if not output_time_major:
final_outputs = nest.map_structure(_transpose_batch_time, final_outputs)
return final_outputs, final_state, final_sequence_lengths<|fim▁end|> | pass_through = (new.shape.ndims == 0)
return new if pass_through else array_ops.where(finished, cur, new)
if impute_finished: |
<|file_name|>RootContainer.js<|end_file_name|><|fim▁begin|>// @flow
import React, { Component } from 'react'
import { View, StatusBar } from 'react-native'
import NavigationRouter from '../Navigation/NavigationRouter'
import { connect } from 'react-redux'
import StartupActions from '../Redux/StartupRedux'
import ReduxPersist from '../Config/ReduxPersist'
// Styles
import styles from './Styles/RootContainerStyle'
class RootContainer extends Component {
componentDidMount () {
// if redux persist is not active fire startup action
if (!ReduxPersist.active) {
this.props.startup()
}
}
render () {
return (
<View style={styles.applicationView}>
<StatusBar barStyle='light-content' />
<NavigationRouter />
</View>
)<|fim▁hole|>}
const mapStateToDispatch = (dispatch) => ({
startup: () => dispatch(StartupActions.startup())
})
export default connect(null, mapStateToDispatch)(RootContainer)<|fim▁end|> | } |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Generated by Django 1.11.5 on 2017-09-19 16:34
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Beer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('description', models.TextField()),
],
),
migrations.CreateModel(
name='Brewery',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('description', models.TextField()),
('location', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Style',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Venue',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('description', models.TextField()),
('venue_type', models.CharField(choices=[('bar', 'Bar'), ('brew', 'Brewery'), ('truck', 'Food Truck')], max_length=5)),
('beers', models.ManyToManyField(related_name='venues', to='tracker.Beer')),
],
),
migrations.AddField(
model_name='beer',
name='brewery',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tracker.Brewery'),
),
migrations.AddField(
model_name='beer',
name='style',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tracker.Style'),
),
]<|fim▁end|> | # -*- coding: utf-8 -*- |
<|file_name|>LockingFileTools.java<|end_file_name|><|fim▁begin|>package net.sourceforge.seqware.common.util.filetools.lock;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.channels.FileLock;
import java.nio.charset.StandardCharsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* LockingFileTools class.
* </p>
*
* @author boconnor
* @version $Id: $Id
*/
public class LockingFileTools {
private static final Logger LOGGER = LoggerFactory.getLogger(LockingFileTools.class);
private static final int RETRIES = 100;
public static boolean lockAndAppend(File file, String output) {
return lockAndWrite(file, output, true);
}<|fim▁hole|>
/**
* Try to acquire lock. If we can, write the String to file and then release the lock
*
* @param file
* a {@link java.io.File} object.
* @param output
* a {@link java.lang.String} object.
* @param append
* @return a boolean.
*/
public static boolean lockAndWrite(File file, String output, boolean append) {
for (int i = 0; i < RETRIES; i++) {
try {
try (FileOutputStream fos = new FileOutputStream(file, append)) {
FileLock fl = fos.getChannel().tryLock();
if (fl != null) {
try (OutputStreamWriter fw = new OutputStreamWriter(fos, StandardCharsets.UTF_8)) {
fw.append(output);
fl.release();
}
// LOGGER.info("LockingFileTools.lockAndWrite Locked, appended, and released for file: "+file.getAbsolutePath()+" value: "+output);
return true;
} else {
LOGGER.info("LockingFileTools.lockAndWrite Can't get lock for " + file.getAbsolutePath() + " try number " + i + " of " + RETRIES);
// sleep for 2 seconds before trying again
Thread.sleep(2000);
}
}
} catch (IOException | InterruptedException e) {
LOGGER.error("LockingFileTools.lockAndWrite Attempt " + i + " Exception with LockingFileTools: " + e.getMessage(), e);
}
}
LOGGER.error("LockingFileTools.lockAndWrite Unable to get lock for " + file.getAbsolutePath() + " gave up after " + RETRIES + " tries");
return false;
}
}<|fim▁end|> | |
<|file_name|>refs.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use std::ops::Deref;
use serde::ser::{Serialize, Serializer};
use crate::types::*;
macro_rules! integer_id_impls {
($name: ident) => {
impl $name {
pub fn new(inner: Integer) -> Self {
$name(inner)
}
}
impl ::std::fmt::Display for $name {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
self.0.fmt(f)
}
}
impl From<Integer> for $name {
fn from(inner: Integer) -> Self {
$name::new(inner)
}
}
impl From<$name> for Integer {
fn from(from: $name) -> Self {
from.0
}
}
impl<'de> ::serde::de::Deserialize<'de> for $name {
fn deserialize<D>(deserializer: D) -> Result<$name, D::Error>
where
D: ::serde::de::Deserializer<'de>,
{
let inner = ::serde::de::Deserialize::deserialize(deserializer)?;
Ok($name::new(inner))
}
}
impl ::serde::ser::Serialize for $name {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ::serde::ser::Serializer,
{
serializer.serialize_i64(self.0)
}
}
};
}
/// Get source `ChatId` from the type reference.
pub trait ToSourceChat {
fn to_source_chat(&self) -> ChatId;
}
impl<S> ToSourceChat for S
where
S: Deref,
S::Target: ToSourceChat,
{
fn to_source_chat(&self) -> ChatId {
self.deref().to_source_chat()
}
}
impl ToSourceChat for Message {
fn to_source_chat(&self) -> ChatId {
self.chat.id()
}
}
impl ToSourceChat for ChannelPost {
fn to_source_chat(&self) -> ChatId {
self.chat.id.into()
}
}
impl ToSourceChat for MessageOrChannelPost {
fn to_source_chat(&self) -> ChatId {
match self {
&MessageOrChannelPost::Message(ref message) => message.to_source_chat(),
&MessageOrChannelPost::ChannelPost(ref channel_post) => channel_post.to_source_chat(),
}
}
}
/// Unique identifier for the target chat or username of the
/// target channel (in the format @channelusername)
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum ChatRef {
Id(ChatId),
#[doc(hidden)]
ChannelUsername(String),
}
impl ChatRef {
pub fn from_chat_id(chat_id: ChatId) -> ChatRef {
ChatRef::Id(chat_id)
}
}
/// Get `ChatRef` from the type reference.
pub trait ToChatRef {
fn to_chat_ref(&self) -> ChatRef;
}
impl<S> ToChatRef for S
where
S: Deref,
S::Target: ToChatRef,
{
fn to_chat_ref(&self) -> ChatRef {
self.deref().to_chat_ref()
}
}
impl ToChatRef for ChatRef {
fn to_chat_ref(&self) -> ChatRef {
self.clone()
}
}
impl ToChatRef for Chat {
fn to_chat_ref(&self) -> ChatRef {
self.id().to_chat_ref()
}
}
impl ToChatRef for MessageChat {
fn to_chat_ref(&self) -> ChatRef {
self.id().to_chat_ref()
}
}
impl ToChatRef for ChatMember {
fn to_chat_ref(&self) -> ChatRef {
self.user.to_chat_ref()
}
}
impl ToChatRef for ForwardFrom {
fn to_chat_ref(&self) -> ChatRef {
match *self {
ForwardFrom::User { ref user, .. } => user.to_chat_ref(),
ForwardFrom::Channel { ref channel, .. } => channel.to_chat_ref(),
ForwardFrom::ChannelHiddenUser { ref sender_name } => {
ChatRef::ChannelUsername(sender_name.clone())
}
}
}
}
impl ToChatRef for Forward {
fn to_chat_ref(&self) -> ChatRef {
self.from.to_chat_ref()
}
}
impl Serialize for ChatRef {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match *self {
ChatRef::Id(id) => serializer.serialize_i64(id.into()),
ChatRef::ChannelUsername(ref username) => serializer.serialize_str(&username),
}
}
}
impl fmt::Display for ChatRef {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ChatRef::Id(id) => write!(f, "{}", id),
ChatRef::ChannelUsername(ref username) => write!(f, "{}", username),
}
}
}
macro_rules! chat_id_impls {
($id: ident) => {
integer_id_impls!($id);
impl ToChatRef for $id {
fn to_chat_ref(&self) -> ChatRef {
ChatRef::from_chat_id((*self).into())
}
}
};
}
macro_rules! specific_chat_id_impls {
($id: ident, $typ: ident) => {
chat_id_impls!($id);
impl From<$id> for ChatId {
fn from(c: $id) -> Self {
ChatId::new(c.into())
}
}
impl ToChatRef for $typ {
fn to_chat_ref(&self) -> ChatRef {
self.id.to_chat_ref()
}
}
};
}
/// Get `UserId` from the type reference.
pub trait ToUserId {
fn to_user_id(&self) -> UserId;
}
impl<S> ToUserId for S
where
S: Deref,
S::Target: ToUserId,
{
fn to_user_id(&self) -> UserId {
self.deref().to_user_id()
}
}
impl ToUserId for UserId {
fn to_user_id(&self) -> UserId {
*self
}
}
impl ToUserId for ChatMember {
fn to_user_id(&self) -> UserId {
self.user.id
}
}
impl ToUserId for User {
fn to_user_id(&self) -> UserId {
self.id
}
}
/// Unique user identifier.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct UserId(Integer);
specific_chat_id_impls!(UserId, User);
/// Unique group identifier.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct GroupId(Integer);
specific_chat_id_impls!(GroupId, Group);
/// Unique supergroup identifier.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct SupergroupId(Integer);
specific_chat_id_impls!(SupergroupId, Supergroup);
/// Unique channel identifier.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ChannelId(Integer);
specific_chat_id_impls!(ChannelId, Channel);
/// Unique chat identifier.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ChatId(Integer);
chat_id_impls!(ChatId);
/// Get `MessageId` from the type reference.
pub trait ToMessageId {
fn to_message_id(&self) -> MessageId;
}
impl<S> ToMessageId for S
where
S: Deref,
S::Target: ToMessageId,
{
fn to_message_id(&self) -> MessageId {
self.deref().to_message_id()
}
}
impl ToMessageId for MessageId {
fn to_message_id(&self) -> MessageId {
*self
}
}
impl ToMessageId for Message {
fn to_message_id(&self) -> MessageId {
self.id
}
}
impl ToMessageId for ChannelPost {
fn to_message_id(&self) -> MessageId {
self.id
}
}
impl ToMessageId for MessageOrChannelPost {
fn to_message_id(&self) -> MessageId {
match self {
&MessageOrChannelPost::Message(ref message) => message.to_message_id(),
&MessageOrChannelPost::ChannelPost(ref channel_post) => channel_post.to_message_id(),
}
}
}
/// Unique message identifier inside a chat.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MessageId(Integer);
integer_id_impls!(MessageId);
/// Get `FileRef` from the type reference.
pub trait ToFileRef {
fn to_file_ref(&self) -> FileRef;
}
impl<S> ToFileRef for S
where
S: Deref,
S::Target: ToFileRef,
{
fn to_file_ref(&self) -> FileRef {
self.deref().to_file_ref()
}
}
macro_rules! file_id_impls {
($name: ident) => {
impl ToFileRef for $name {
fn to_file_ref(&self) -> FileRef {
self.file_id.clone().into()
}
}
};
}
file_id_impls!(PhotoSize);
file_id_impls!(Audio);
file_id_impls!(Document);
file_id_impls!(Sticker);
file_id_impls!(Video);
file_id_impls!(Voice);
file_id_impls!(VideoNote);
/// Unique file identifier reference.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct FileRef {
pub(crate) inner: String,
}
impl<'a> From<&'a str> for FileRef {
fn from(s: &'a str) -> Self {
FileRef {
inner: s.to_string(),
}
}
}
impl<'a> From<String> for FileRef {
fn from(s: String) -> Self {
FileRef { inner: s.clone() }
}
}
impl Serialize for FileRef {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&self.inner)
}
}
/// Get `CallbackQueryId` from the type reference.
pub trait ToCallbackQueryId {
fn to_callback_query_id(&self) -> CallbackQueryId;
}
impl<S> ToCallbackQueryId for S
where
S: Deref,
S::Target: ToCallbackQueryId,
{
fn to_callback_query_id(&self) -> CallbackQueryId {
self.deref().to_callback_query_id()
}
}
impl ToCallbackQueryId for CallbackQuery {
fn to_callback_query_id(&self) -> CallbackQueryId {
self.id.clone()
}<|fim▁hole|>/// Unique identifier for CallbackQuery.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
pub struct CallbackQueryId(String);
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
pub struct InlineQueryId(String);<|fim▁end|> | }
|
<|file_name|>outlines.js<|end_file_name|><|fim▁begin|>/**
* libjass
*
* https://github.com/Arnavion/libjass
*
* Copyright 2013 Arnav Singh
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
define(["intern!tdd", "require", "tests/support/test-page"], function (tdd, require, TestPage) {
tdd.suite("Outlines", function () {
tdd.test("Basic", function () {
var testPage = new TestPage(this.remote, require.toUrl("tests/support/browser-test-page.html"), "/tests/functional/outlines/outlines.ass", 1280, 720);
return testPage
.prepare()
.then(function (testPage) { return testPage.seekAndCompareScreenshot(0.5, require.toUrl("./outlines-1.png")); })
.then(function (testPage) { return testPage.seekAndCompareScreenshot(1.5, require.toUrl("./outlines-2.png")); })
.then(function (testPage) { return testPage.seekAndCompareScreenshot(2.5, require.toUrl("./outlines-3.png")); })
.then(function (testPage) { return testPage.seekAndCompareScreenshot(3.5, require.toUrl("./outlines-4.png")); })
.then(function (testPage) { return testPage.done(); });<|fim▁hole|> });
});
});<|fim▁end|> | |
<|file_name|>RegexParser.py<|end_file_name|><|fim▁begin|>from NfaBuilder import NfaBuilder
def enum(**enums):
return type('Enum', (), enums)
nodeTypes = enum(CHARACTER = 1, STAR = 2, QUESTION = 3, ALTER = 4, CONCAT = 5, PLUS = 6)
specialTransitions = enum(EPSILON = -1)
<|fim▁hole|>
class ParseNode():
"""
Represents a node in the resulted binary tree
"""
def __init__(self, nodetype, data, left, right):
self.nodeType = nodetype
self.data = data #Actual input character found in tree leaves
self.left = left
self.right = right
class RegexParser():
"""
Parses the given regular expression into a binary tree of ParseNodes
"""
def __init__(self):
self.builder = NfaBuilder()
def getCharacter(self):
"""
Returns:
a ParseNode with input character, a leaf node
"""
return ParseNode(nodeTypes.CHARACTER, self.scanner.pop(), 0, 0)
def getAtomicNode(self):
"""
Either parses a part of regex inside parenthesis, or a single input character
Returns:
The parsed part of regex (string)
"""
atomicNode = None
if self.scanner.peek() == '(':
self.scanner.pop()
atomicNode = self.getExpressionNode()
if self.scanner.pop() != ')':
raise ParseError('Expected )')
else:
atomicNode = self.getCharacter()
return atomicNode
def getRepetitionNode(self):
"""
Returns:
a repeating node, or atomic node if no repetition found
"""
atomicNode = self.getAtomicNode()
if self.scanner.peek() == '*':
self.scanner.pop()
return ParseNode(nodeTypes.STAR, 0, atomicNode, 0)
elif self.scanner.peek() == '?':
self.scanner.pop()
return ParseNode(nodeTypes.QUESTION, 0, atomicNode, 0)
elif self.scanner.peek() == '+':
self.scanner.pop()
return ParseNode(nodeTypes.PLUS, 0, atomicNode, 0)
else:
return atomicNode
def getConcatNode(self):
"""
Returns:
a concat node
"""
left = self.getRepetitionNode()
if self.scanner.peek() == '.':
self.scanner.pop()
right = self.getConcatNode()
return ParseNode(nodeTypes.CONCAT, 0, left, right)
else:
return left
def getExpressionNode(self):
"""
Returns:
an expression node, starts a new recursive parsing
"""
left = self.getConcatNode()
if self.scanner.peek() == '|':
self.scanner.pop()
right = self.getExpressionNode()
return ParseNode(nodeTypes.ALTER, 0, left, right)
else:
return left
def prepare(self, inputData):
"""
Adds concat dots to the input regex. This helps parsing
Args:
* inputData (string): The regex in which the dots are added to
Returns:
The modified regex
"""
output = []
for c in range(0, len(inputData) - 1):
currentSymbol = inputData[c]
output.append(currentSymbol)
nextSymbol = inputData[c+1]
if ((currentSymbol.isalnum() or currentSymbol in [')', '*', '?','+']) and
nextSymbol not in [')', '|', '*', '?','+']):
output.append('.')
output.append(inputData[len(inputData)-1])
self.scanner = RegexScanner(output)
return output
def printNode(self, node, offset):
if not node:
return
if node.nodeType == nodeTypes.CHARACTER:
print node.data.rjust(offset)
elif node.nodeType == nodeTypes.ALTER:
print '|'.rjust(offset)
elif node.nodeType == nodeTypes.CONCAT:
print '.'.rjust(offset)
elif node.nodeType == nodeTypes.QUESTION:
print '?'.rjust(offset)
elif node.nodeType == nodeTypes.STAR:
print '*'.rjust(offset)
elif node.nodeType == nodeTypes.PLUS:
print '+'.rjust(offset)
else:
print ''.rjust(offset)
self.printNode(node.left, offset - 4)
self.printNode(node.right, offset + 4)
def getNfaFromParseTree(self, tree):
"""
Generates a nfa from regex which has been parsed to a tree
"""
if tree.nodeType == nodeTypes.CHARACTER:
return self.builder.getSingleInputNfa(tree.data)
elif tree.nodeType == nodeTypes.ALTER:
return self.builder.getAlterNfa(self.getNfaFromParseTree(tree.left),
self.getNfaFromParseTree(tree.right))
elif tree.nodeType == nodeTypes.CONCAT:
return self.builder.getConcatNfa(self.getNfaFromParseTree(tree.left),
self.getNfaFromParseTree(tree.right))
elif tree.nodeType == nodeTypes.STAR:
return self.builder.getStarNfa(self.getNfaFromParseTree(tree.left))
elif tree.nodeType == nodeTypes.PLUS:
return self.builder.getPlusNfa(self.getNfaFromParseTree(tree.left))
elif tree.nodeType == nodeTypes.QUESTION:
return self.builder.getAlterNfa(self.getNfaFromParseTree(tree.left),
self.builder.getSingleInputNfa(specialTransitions.EPSILON))
else:
return
def regexToNfa(self, regex):
"""
Constructs a NFA from Regex
Args:
* regex (string): NFA is constructed from this regex
Returns:
NFA created from the parse tree using :func:`getNfaFromParseTree`
"""
if not regex:
raise ParseError('Empty expression')
self.prepare(regex)
self.treeRoot = self.getExpressionNode()
return self.getNfaFromParseTree(self.treeRoot)
class RegexScanner():
def __init__(self, inputData):
self.data = inputData
self.next = 0
def peek(self):
"""
Peeks character from regex string
Returns:
char or 0 if no characters are available
"""
return self.data[self.next] if self.next < len(self.data) else 0
def pop(self):
"""
Pops character from regex string
Returns:
Popped character
"""
nextChar = self.peek()
if self.next < len(self.data):
self.next += 1
return nextChar
def current_position(self):
"""
Returns:
int
"""
return self.next<|fim▁end|> | class ParseError( Exception ): pass
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::io;<|fim▁hole|>fn read_input() -> io::Result<()> {
let mut input = String::new();
io::stdin().read_line(&mut input)?;
println!("You typed: {}", input.trim());
Ok(())
}
fn main() {
read_input().unwrap();
}<|fim▁end|> | |
<|file_name|>Sphere.js<|end_file_name|><|fim▁begin|>/**
* @module og/bv/Sphere
*/
'use strict';
import { Vec3 } from '../math/Vec3.js';
/**
<|fim▁hole|> * @param {og.Vec3} [center] - Bounding sphere coordiantes.
*/
class Sphere {
constructor(radius, center) {
/**
* Sphere radius.
* @public
* @type {Number}
*/
this.radius = radius || 0;
/**
* Sphere coordiantes.
* @public
* @type {og.Vec3}
*/
this.center = center ? center.clone() : new Vec3();
}
/**
* Sets bounding sphere coordinates by the bounds array.
* @param {Array.<number>} bounds - Bounds is an array where [minX, minY, minZ, maxX, maxY, maxZ]
*/
setFromBounds(bounds) {
let m = new Vec3(bounds[0], bounds[1], bounds[2]);
this.center.set(m.x + (bounds[3] - m.x) * 0.5, m.y + (bounds[3] - m.y) * 0.5, m.z + (bounds[5] - m.z) * 0.5);
this.radius = this.center.distance(m);
}
/**
* Sets bounding sphere coordiantes by ellipsoid geodetic extend.
* @param {og.Ellipsoid} ellipsoid - Ellipsoid.
* @param {og.Extent} extent - Geodetic extent.
*/
setFromExtent(ellipsoid, extent) {
this.setFromBounds(extent.getCartesianBounds(ellipsoid));
}
};
export { Sphere };<|fim▁end|> | * Bounding sphere class.
* @class
* @param {Number} [radius] - Bounding sphere radius.
|
<|file_name|>client_go_adapter.go<|end_file_name|><|fim▁begin|>/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package metrics
import (
"net/url"
"time"
"github.com/prometheus/client_golang/prometheus"
reflectormetrics "k8s.io/client-go/tools/cache"
clientmetrics "k8s.io/client-go/tools/metrics"
)
// this file contains setup logic to initialize the myriad of places
// that client-go registers metrics. We copy the names and formats
// from Kubernetes so that we match the core controllers.
var (
// client metrics
requestLatency = prometheus.NewHistogramVec(
prometheus.HistogramOpts{
Name: "rest_client_request_latency_seconds",
Help: "Request latency in seconds. Broken down by verb and URL.",
Buckets: prometheus.ExponentialBuckets(0.001, 2, 10),
},
[]string{"verb", "url"},
)
requestResult = prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "rest_client_requests_total",
Help: "Number of HTTP requests, partitioned by status code, method, and host.",
},
[]string{"code", "method", "host"},
)
// reflector metrics
// TODO(directxman12): update these to be histograms once the metrics overhaul KEP
// PRs start landing.
reflectorSubsystem = "reflector"
listsTotal = prometheus.NewCounterVec(prometheus.CounterOpts{
Subsystem: reflectorSubsystem,
Name: "lists_total",
Help: "Total number of API lists done by the reflectors",
}, []string{"name"})
listsDuration = prometheus.NewSummaryVec(prometheus.SummaryOpts{
Subsystem: reflectorSubsystem,
Name: "list_duration_seconds",
Help: "How long an API list takes to return and decode for the reflectors",
}, []string{"name"})
itemsPerList = prometheus.NewSummaryVec(prometheus.SummaryOpts{
Subsystem: reflectorSubsystem,
Name: "items_per_list",
Help: "How many items an API list returns to the reflectors",
}, []string{"name"})
watchesTotal = prometheus.NewCounterVec(prometheus.CounterOpts{
Subsystem: reflectorSubsystem,
Name: "watches_total",
Help: "Total number of API watches done by the reflectors",
}, []string{"name"})
shortWatchesTotal = prometheus.NewCounterVec(prometheus.CounterOpts{
Subsystem: reflectorSubsystem,
Name: "short_watches_total",
Help: "Total number of short API watches done by the reflectors",
}, []string{"name"})
watchDuration = prometheus.NewSummaryVec(prometheus.SummaryOpts{
Subsystem: reflectorSubsystem,
Name: "watch_duration_seconds",
Help: "How long an API watch takes to return and decode for the reflectors",
}, []string{"name"})
itemsPerWatch = prometheus.NewSummaryVec(prometheus.SummaryOpts{
Subsystem: reflectorSubsystem,
Name: "items_per_watch",
Help: "How many items an API watch returns to the reflectors",
}, []string{"name"})
lastResourceVersion = prometheus.NewGaugeVec(prometheus.GaugeOpts{
Subsystem: reflectorSubsystem,
Name: "last_resource_version",
Help: "Last resource version seen for the reflectors",
}, []string{"name"})
)
func init() {
registerClientMetrics()
registerReflectorMetrics()
}
// registerClientMetrics sets up the client latency metrics from client-go
func registerClientMetrics() {
// register the metrics with our registry
Registry.MustRegister(requestLatency)
Registry.MustRegister(requestResult)
// register the metrics with client-go
clientmetrics.Register(&latencyAdapter{metric: requestLatency}, &resultAdapter{metric: requestResult})
}
// registerReflectorMetrics sets up reflector (reconcile) loop metrics
func registerReflectorMetrics() {
Registry.MustRegister(listsTotal)
Registry.MustRegister(listsDuration)
Registry.MustRegister(itemsPerList)
Registry.MustRegister(watchesTotal)
Registry.MustRegister(shortWatchesTotal)
Registry.MustRegister(watchDuration)
Registry.MustRegister(itemsPerWatch)
Registry.MustRegister(lastResourceVersion)
reflectormetrics.SetReflectorMetricsProvider(reflectorMetricsProvider{})
}
// this section contains adapters, implementations, and other sundry organic, artisanally
// hand-crafted syntax trees required to convince client-go that it actually wants to let
// someone use its metrics.
// Client metrics adapters (method #1 for client-go metrics),
// copied (more-or-less directly) from k8s.io/kubernetes setup code
// (which isn't anywhere in an easily-importable place).
type latencyAdapter struct {
metric *prometheus.HistogramVec
}
func (l *latencyAdapter) Observe(verb string, u url.URL, latency time.Duration) {
l.metric.WithLabelValues(verb, u.String()).Observe(latency.Seconds())
}
type resultAdapter struct {
metric *prometheus.CounterVec
}
func (r *resultAdapter) Increment(code, method, host string) {
r.metric.WithLabelValues(code, method, host).Inc()
}
// Reflector metrics provider (method #2 for client-go metrics),
// copied (more-or-less directly) from k8s.io/kubernetes setup code
// (which isn't anywhere in an easily-importable place).
type reflectorMetricsProvider struct{}
func (reflectorMetricsProvider) NewListsMetric(name string) reflectormetrics.CounterMetric {
return listsTotal.WithLabelValues(name)
}
<|fim▁hole|>func (reflectorMetricsProvider) NewListDurationMetric(name string) reflectormetrics.SummaryMetric {
return listsDuration.WithLabelValues(name)
}
func (reflectorMetricsProvider) NewItemsInListMetric(name string) reflectormetrics.SummaryMetric {
return itemsPerList.WithLabelValues(name)
}
func (reflectorMetricsProvider) NewWatchesMetric(name string) reflectormetrics.CounterMetric {
return watchesTotal.WithLabelValues(name)
}
func (reflectorMetricsProvider) NewShortWatchesMetric(name string) reflectormetrics.CounterMetric {
return shortWatchesTotal.WithLabelValues(name)
}
func (reflectorMetricsProvider) NewWatchDurationMetric(name string) reflectormetrics.SummaryMetric {
return watchDuration.WithLabelValues(name)
}
func (reflectorMetricsProvider) NewItemsInWatchMetric(name string) reflectormetrics.SummaryMetric {
return itemsPerWatch.WithLabelValues(name)
}
func (reflectorMetricsProvider) NewLastResourceVersionMetric(name string) reflectormetrics.GaugeMetric {
return lastResourceVersion.WithLabelValues(name)
}<|fim▁end|> | |
<|file_name|>rebuildSchemata.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
@file rebuildSchemata.py
@author Michael Behrisch
@date 2011-07-11
@version $Id: rebuildSchemata.py 11671 2012-01-07 20:14:30Z behrisch $
Let all SUMO binarie write the schema for their config
SUMO, Simulation of Urban MObility; see http://sumo.sourceforge.net/
Copyright (C) 2011-2012 DLR (http://www.dlr.de/) and contributors
All rights reserved
"""<|fim▁hole|> subprocess.call([os.path.join(binDir, exe), "--save-schema", os.path.join(homeDir, "docs", "internet", "xsd" , exe+"Configuration.xsd")])<|fim▁end|> | import os, sys, subprocess
homeDir = os.environ.get("SUMO_HOME", os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
binDir = os.environ.get("SUMO_BINDIR", os.path.join(homeDir, "bin"))
for exe in "activitygen dfrouter duarouter jtrrouter netconvert netgen od2trips polyconvert sumo".split(): |
<|file_name|>lc069-sqrtx.py<|end_file_name|><|fim▁begin|># coding=utf-8
import unittest
"""69. Sqrt(x)
https://leetcode.com/problems/sqrtx/description/
Implement `int sqrt(int x)`.
Compute and return the square root of _x_ , where _x_ is guaranteed to be a
non-negative integer.
Since the return type is an integer, the decimal digits are truncated and only
the integer part of the result is returned.
**Example 1:**
**Input:** 4
**Output:** 2
**Example 2:**
**Input:** 8
**Output:** 2
**Explanation:** The square root of 8 is 2.82842..., and since
the decimal part is truncated, 2 is returned.
Similar Questions:
Pow(x, n) (powx-n)
Valid Perfect Square (valid-perfect-square)
"""
class Solution(object):
def mySqrt(self, x):
"""
:type x: int
:rtype: int
"""
def test(self):
pass
<|fim▁hole|>if __name__ == "__main__":
unittest.main()<|fim▁end|> | |
<|file_name|>testtermopi.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
"""
title : testtermopi.py
description : This program runs the termopi.py
: Displays the status of the resources (cpu load and memory usage) consumed by a Raspberry Pi
computer and the resources consumed by one or more containers instantiated in the Pi.
source :
author : Carlos Molina-Jimenez ([email protected])
date : 27 Mar 2017
institution : Computer Laboratory, University of Cambridge
version : 1.0
usage :
notes :
compile and run : % python termopi.py
: It imports pidict.py, dockerctl.py and picheck.py which are found in
: ./modules.
: You need to include "./modules" in the PYTHONPATH environment variable to
: indicate python where to find the pidict.py, dockerctl.py and picheck.py.
: For example, in a bash shell, you need to include the following lines
: in your .bash_profile file located in you home directory (you can see it with
: (# ls -la).
:
: PYTHONPATH="./modules"
: export PYTHONPATH
python_version : Python 2.7.12
====================================================
"""
from modules.tools.termopi import termopi # class with dictionary data structure
# Threshold of cpu exhaustion
cpuUsageThreshold= 50
cpuLoadThreshold= 3
termo= termopi()
termo.prt_pi_resources()<|fim▁hole|><|fim▁end|> | termo.create_jsonfile_with_pi_status()
#termo.check_pi_resource_status(cpuUsageThreshold) |
<|file_name|>FormLabel.js<|end_file_name|><|fim▁begin|>var React = require('react/addons');
var blacklist = require('blacklist');
var classNames = require('classnames');
module.exports = React.createClass({<|fim▁hole|> id: React.PropTypes.string,
style: React.PropTypes.object,
verticalAlign: React.PropTypes.oneOf(['baseline', 'bottom', 'inherit', 'initial', 'middle', 'sub', 'super', 'text-bottom', 'text-top', 'top'])
},
render() {
// classes
var className = classNames('form-label', this.props.className);
// props
var props = blacklist(this.props, 'htmlFor', 'id', 'className', 'style');
// style
var style;
if (this.props.verticalAlign) {
style = {
verticalAlign: this.props.verticalAlign
};
}
return (
<label className={className} htmlFor={this.props.htmlFor || this.props.id} style={style || this.props.style} {...props}>
{this.props.children}
</label>
);
}
});<|fim▁end|> | displayName: 'FormLabel',
propTypes: {
className: React.PropTypes.string,
htmlFor: React.PropTypes.string, |
<|file_name|>mesos_monitor.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_serialization import jsonutils
from magnum.common import urlfetch
from magnum.conductor.monitors import MonitorBase
class MesosMonitor(MonitorBase):
def __init__(self, context, bay):
super(MesosMonitor, self).__init__(context, bay)
self.data = {}
@property
def metrics_spec(self):
return {<|fim▁hole|> 'memory_util': {
'unit': '%',
'func': 'compute_memory_util',
},
'cpu_util': {
'unit': '%',
'func': 'compute_cpu_util',
},
}
def _build_url(self, url, protocol='http', port='80', path='/'):
return protocol + '://' + url + ':' + port + path
def _is_leader(self, state):
return state['leader'] == state['pid']
def pull_data(self):
self.data['mem_total'] = 0
self.data['mem_used'] = 0
self.data['cpu_total'] = 0
self.data['cpu_used'] = 0
for master_addr in self.bay.master_addresses:
mesos_master_url = self._build_url(master_addr, port='5050',
path='/state')
master = jsonutils.loads(urlfetch.get(mesos_master_url))
if self._is_leader(master):
for slave in master['slaves']:
self.data['mem_total'] += slave['resources']['mem']
self.data['mem_used'] += slave['used_resources']['mem']
self.data['cpu_total'] += slave['resources']['cpus']
self.data['cpu_used'] += slave['used_resources']['cpus']
break
def compute_memory_util(self):
if self.data['mem_total'] == 0 or self.data['mem_used'] == 0:
return 0
else:
return self.data['mem_used'] * 100 / self.data['mem_total']
def compute_cpu_util(self):
if self.data['cpu_used'] == 0:
return 0
else:
return self.data['cpu_used'] * 100 / self.data['cpu_total']<|fim▁end|> | |
<|file_name|>transport_internal_test.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// White-box tests for transport.go (in package http instead of http_test).
package req
import (
"bytes"
"context"
"crypto/tls"
"errors"
"github.com/imroc/req/v3/internal/testcert"
"io"
"net"
"net/http"
"strings"
"testing"
)
func withT(r *http.Request, t *testing.T) *http.Request {
return r.WithContext(context.WithValue(r.Context(), tLogKey{}, t.Logf))
}
// Issue 15446: incorrect wrapping of errors when server closes an idle connection.
func TestTransportPersistConnReadLoopEOF(t *testing.T) {
ln := newLocalListener(t)
defer ln.Close()
connc := make(chan net.Conn, 1)
go func() {
defer close(connc)
c, err := ln.Accept()
if err != nil {
t.Error(err)
return
}
connc <- c
}()
tr := new(Transport)
req, _ := http.NewRequest("GET", "http://"+ln.Addr().String(), nil)
req = withT(req, t)
treq := &transportRequest{Request: req}
cm := connectMethod{targetScheme: "http", targetAddr: ln.Addr().String()}
pc, err := tr.getConn(treq, cm)
if err != nil {
t.Fatal(err)
}
defer pc.close(errors.New("test over"))
conn := <-connc
if conn == nil {
// Already called t.Error in the accept goroutine.
return
}
conn.Close() // simulate the server hanging up on the client
_, err = pc.roundTrip(treq)
if !isTransportReadFromServerError(err) && err != errServerClosedIdle {
t.Errorf("roundTrip = %#v, %v; want errServerClosedIdle or transportReadFromServerError", err, err)
}
<-pc.closech
err = pc.closed
if !isTransportReadFromServerError(err) && err != errServerClosedIdle {
t.Errorf("pc.closed = %#v, %v; want errServerClosedIdle or transportReadFromServerError", err, err)
}
}
func isTransportReadFromServerError(err error) bool {
_, ok := err.(transportReadFromServerError)
return ok
}
func newLocalListener(t *testing.T) net.Listener {
ln, err := net.Listen("tcp", "127.0.0.1:0")
if err != nil {
ln, err = net.Listen("tcp6", "[::1]:0")
}
if err != nil {
t.Fatal(err)
}
return ln
}
func dummyRequest(method string) *http.Request {
req, err := http.NewRequest(method, "http://fake.tld/", nil)
if err != nil {
panic(err)
}
return req
}
func dummyRequestWithBody(method string) *http.Request {
req, err := http.NewRequest(method, "http://fake.tld/", strings.NewReader("foo"))
if err != nil {
panic(err)
}
return req
}
func dummyRequestWithBodyNoGetBody(method string) *http.Request {
req := dummyRequestWithBody(method)
req.GetBody = nil
return req
}
// issue22091Error acts like a golang.org/x/net/http2.ErrNoCachedConn.
type issue22091Error struct{}
func (issue22091Error) IsHTTP2NoCachedConnError() {}
func (issue22091Error) Error() string { return "issue22091Error" }
func TestTransportShouldRetryRequest(t *testing.T) {
tests := []struct {
pc *persistConn
req *http.Request
err error
want bool
}{
0: {
pc: &persistConn{reused: false},
req: dummyRequest("POST"),
err: nothingWrittenError{},
want: false,
},
1: {
pc: &persistConn{reused: true},
req: dummyRequest("POST"),
err: nothingWrittenError{},
want: true,
},
2: {
pc: &persistConn{reused: true},
req: dummyRequest("POST"),
err: http2ErrNoCachedConn,
want: true,
},
3: {
pc: nil,
req: nil,
err: issue22091Error{}, // like an external http2ErrNoCachedConn
want: true,
},
4: {
pc: &persistConn{reused: true},
req: dummyRequest("POST"),
err: errMissingHost,
want: false,
},
5: {
pc: &persistConn{reused: true},
req: dummyRequest("POST"),
err: transportReadFromServerError{},
want: false,
},
6: {
pc: &persistConn{reused: true},
req: dummyRequest("GET"),
err: transportReadFromServerError{},
want: true,
},
7: {
pc: &persistConn{reused: true},
req: dummyRequest("GET"),
err: errServerClosedIdle,
want: true,
},
8: {
pc: &persistConn{reused: true},
req: dummyRequestWithBody("POST"),
err: nothingWrittenError{},
want: true,
},
9: {
pc: &persistConn{reused: true},
req: dummyRequestWithBodyNoGetBody("POST"),
err: nothingWrittenError{},
want: false,
},
}
for i, tt := range tests {
got := tt.pc.shouldRetryRequest(tt.req, tt.err)
if got != tt.want {
t.Errorf("%d. shouldRetryRequest = %v; want %v", i, got, tt.want)
}
}
}
<|fim▁hole|>
func (f roundTripFunc) RoundTrip(r *http.Request) (*http.Response, error) {
return f(r)
}
// Issue 25009
func TestTransportBodyAltRewind(t *testing.T) {
cert, err := tls.X509KeyPair(testcert.LocalhostCert, testcert.LocalhostKey)
if err != nil {
t.Fatal(err)
}
ln := newLocalListener(t)
defer ln.Close()
go func() {
tln := tls.NewListener(ln, &tls.Config{
NextProtos: []string{"foo"},
Certificates: []tls.Certificate{cert},
})
for i := 0; i < 2; i++ {
sc, err := tln.Accept()
if err != nil {
t.Error(err)
return
}
if err := sc.(TLSConn).Handshake(); err != nil {
t.Error(err)
return
}
sc.Close()
}
}()
addr := ln.Addr().String()
req, _ := http.NewRequest("POST", "https://example.org/", bytes.NewBufferString("request"))
roundTripped := false
tr := &Transport{
DisableKeepAlives: true,
TLSNextProto: map[string]func(string, TLSConn) http.RoundTripper{
"foo": func(authority string, c TLSConn) http.RoundTripper {
return roundTripFunc(func(r *http.Request) (*http.Response, error) {
n, _ := io.Copy(io.Discard, r.Body)
if n == 0 {
t.Error("body length is zero")
}
if roundTripped {
return &http.Response{
Body: NoBody,
StatusCode: 200,
}, nil
}
roundTripped = true
return nil, http2noCachedConnError{}
})
},
},
DialTLSContext: func(_ context.Context, _, _ string) (net.Conn, error) {
tc, err := tls.Dial("tcp", addr, &tls.Config{
InsecureSkipVerify: true,
NextProtos: []string{"foo"},
})
if err != nil {
return nil, err
}
if err := tc.Handshake(); err != nil {
return nil, err
}
return tc, nil
},
}
c := &http.Client{Transport: tr}
_, err = c.Do(req)
if err != nil {
t.Error(err)
}
}<|fim▁end|> | type roundTripFunc func(r *http.Request) (*http.Response, error) |
<|file_name|>withOneTarget_after.py<|end_file_name|><|fim▁begin|><|fim▁hole|> print(42)<|fim▁end|> | def main():
with open('file.txt'): |
<|file_name|>source_map.rs<|end_file_name|><|fim▁begin|>use std::slice::Iter;
use crate::base::pos::Line;
use crate::base::symbol::Symbol;
use crate::base::types::ArcType;
use crate::types::VmIndex;
#[derive(Debug, Default, Eq, PartialEq, Clone, Hash)]
#[cfg_attr(feature = "serde_derive", derive(Deserialize, Serialize))]
pub struct SourceMap {
/// The index of the first instruction for each line
map: Vec<(usize, Line)>,
}
impl SourceMap {
pub fn new() -> SourceMap {
SourceMap { map: Vec::new() }
}
/// Defines the instruction at `instruction_index` to be at `current_line`.
/// This function must be called with indexes in increasing order
pub fn emit(&mut self, instruction_index: usize, current_line: Line) {
let last_emitted_line = self.map.last().map(|&(_, x)| x);
if last_emitted_line != Some(current_line) {
self.map.push((instruction_index, current_line));
}
}
pub fn close(&mut self, instruction_index: usize, current_line: Option<Line>) {
// Push one final item to indicate the end of the function
if let Some(current_line) = current_line.or_else(|| self.map.last().map(|t| t.1)) {
self.map.push((instruction_index, current_line));
}
}
/// Returns the line where the instruction at `instruction_index` were defined
pub fn line(&self, instruction_index: usize) -> Option<Line> {
// The line for `instruction_index` is at the last index still larger than
// the index in `map`
let p = self
.map
.iter()
.position(|&(index, _)| index > instruction_index)
.unwrap_or(self.map.len());
if p == 0
|| (p == self.map.len()
&& instruction_index >= self.map.last().expect("Empty source_map").0)
{
// instruction_index is not valid in the function
None
} else {
Some(self.map[p - 1].1)
}
}
}
#[derive(Debug, Eq, PartialEq, Clone, Hash)]
#[cfg_attr(feature = "serde_derive", derive(DeserializeState, SerializeState))]
#[cfg_attr(
feature = "serde_derive",
serde(
deserialize_state = "crate::serialization::DeSeed<'gc>",
de_parameters = "'gc"
)
)]
#[cfg_attr(
feature = "serde_derive",
serde(serialize_state = "crate::serialization::SeSeed")
)]
pub struct Local {
start: usize,
end: usize,
pub index: VmIndex,
#[cfg_attr(
feature = "serde_derive",
serde(state_with = "crate::serialization::symbol")
)]
pub name: Symbol,
#[cfg_attr(
feature = "serde_derive",
serde(state_with = "crate::serialization::borrow")
)]
pub typ: ArcType,
}
#[derive(Debug, Default, Eq, PartialEq, Clone, Hash)]<|fim▁hole|> feature = "serde_derive",
serde(
deserialize_state = "crate::serialization::DeSeed<'gc>",
de_parameters = "'gc"
)
)]
#[cfg_attr(
feature = "serde_derive",
serde(serialize_state = "crate::serialization::SeSeed")
)]
pub struct LocalMap {
// Instruction indexes marking [start, end) where the local variable `Symbol` exists
#[cfg_attr(feature = "serde_derive", serde(state))]
map: Vec<Local>,
}
impl LocalMap {
pub fn new() -> LocalMap {
LocalMap { map: Vec::new() }
}
/// Emits a local which is available starting from `instruction_index`. The end of each local's
/// scope must be defined by calling `close`
pub fn emit(&mut self, instruction_index: usize, index: VmIndex, name: Symbol, typ: ArcType) {
self.map.push(Local {
start: instruction_index,
end: instruction_index,
index: index,
name: name,
typ: typ,
});
}
/// `close` marks the end of a variables span and should be called for each variable inserted with
/// `emit` but in reverse order
pub fn close(&mut self, instruction_index: usize) {
if let Some(local) = self.map.iter_mut().rev().find(|t| t.start == t.end) {
local.end = instruction_index;
}
}
/// Returns an iterator over the variables in scope at `instruction_index`
pub fn locals(&self, instruction_index: usize) -> LocalIter {
LocalIter {
locals: self.map.iter(),
instruction_index: instruction_index,
}
}
}
pub struct LocalIter<'a> {
locals: Iter<'a, Local>,
instruction_index: usize,
}
impl<'a> LocalIter<'a> {
pub fn empty() -> LocalIter<'a> {
LocalIter {
locals: [].iter(),
instruction_index: 0,
}
}
}
impl<'a> Iterator for LocalIter<'a> {
type Item = &'a Local;
fn next(&mut self) -> Option<&'a Local> {
while let Some(local) = self.locals.next() {
if local.start <= self.instruction_index && self.instruction_index < local.end {
return Some(local);
}
}
None
}
}<|fim▁end|> | #[cfg_attr(feature = "serde_derive", derive(DeserializeState, SerializeState))]
#[cfg_attr( |
<|file_name|>redundant_semicolon.rs<|end_file_name|><|fim▁begin|>use crate::{EarlyContext, EarlyLintPass, LintContext};
use rustc_ast::{Block, StmtKind};
use rustc_errors::Applicability;
use rustc_span::Span;
declare_lint! {
/// The `redundant_semicolons` lint detects unnecessary trailing
/// semicolons.
///
/// ### Example
///
/// ```rust
/// let _ = 123;;
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Extra semicolons are not needed, and may be removed to avoid confusion
/// and visual clutter.
pub REDUNDANT_SEMICOLONS,
Warn,
"detects unnecessary trailing semicolons"
}
declare_lint_pass!(RedundantSemicolons => [REDUNDANT_SEMICOLONS]);
impl EarlyLintPass for RedundantSemicolons {
fn check_block(&mut self, cx: &EarlyContext<'_>, block: &Block) {
let mut seq = None;
for stmt in block.stmts.iter() {
match (&stmt.kind, &mut seq) {
(StmtKind::Empty, None) => seq = Some((stmt.span, false)),
(StmtKind::Empty, Some(seq)) => *seq = (seq.0.to(stmt.span), true),
(_, seq) => maybe_lint_redundant_semis(cx, seq),
}
}
maybe_lint_redundant_semis(cx, &mut seq);
}
}
fn maybe_lint_redundant_semis(cx: &EarlyContext<'_>, seq: &mut Option<(Span, bool)>) {
if let Some((span, multiple)) = seq.take() {
// FIXME: Find a better way of ignoring the trailing<|fim▁hole|> // semicolon from macro expansion
if span == rustc_span::DUMMY_SP {
return;
}
cx.struct_span_lint(REDUNDANT_SEMICOLONS, span, |lint| {
let (msg, rem) = if multiple {
("unnecessary trailing semicolons", "remove these semicolons")
} else {
("unnecessary trailing semicolon", "remove this semicolon")
};
lint.build(msg)
.span_suggestion(span, rem, String::new(), Applicability::MaybeIncorrect)
.emit();
});
}
}<|fim▁end|> | |
<|file_name|>test_createkilldye.py<|end_file_name|><|fim▁begin|># ------------------------------------------------------------
# Developping with MicroPython in an async way
#
# ------------------------------------------------------------
# === asyncio tests ===
# ------------------------------------------------------------
print("==== /test/asyncio/test_createkilldye.py")
import logging
log = logging.getlogger("test")
logs = logging.getlogger("scheduler")
logs.setLevel(logging.TRACE)
logging.setGlobal(logging.DEBUG)
loge = logging.getlogger("esp")
loge.setLevel(logging.INFO)
import utime as time,sys
import asyncio
from neopixels import Neopixels
neo = Neopixels(13,4)
neo.brightness = 50
neo.clearBuffer()
# ------------------------------------------------------------
# === Example ===
# ------------------------------------------------------------
total = 0
# 4 tasks
def led0():
log.info("Task led0 created!")
yield<|fim▁hole|> while True:
neo.toggleR( 0,80 )
neo.writeBuffer()
yield
log.info("Task led0 dies!")
def led1():
yield
while True:
neo.toggleG( 1,80 )
neo.writeBuffer()
yield
log.info("Task led1 dies!")
def led2():
yield
while True:
neo.toggleB( 2,80 )
neo.writeBuffer()
yield
log.info("Task led2 dies!")
def led3():
yield
while True:
neo.toggleR( 3,80 )
neo.writeBuffer()
yield
log.info("Task led3 dies!")
def master_of_universe():
yield
log.info("Creating task led0. Red led goes flashing fast!")
tid = yield asyncio.CreateTask( led0(), period = 100, prio = 11 )
log.info("Kill task led0 with tid %d. Red led stops flashing!",tid)
yield asyncio.KillTask(tid)
log.info("Kill the os itself!")
yield asyncio.KillOs()
log.info("Task master_of_universe is ready!")
now = time.ticks_ms()
print (now)
# Run them
sched = asyncio.sched
sched.task(led1(), period = 300, time2run = 200)
sched.task(led2(), period = 700, time2run = 300)
sched.task(led3(), period = 4000, time2run = 4000)
sched.task(master_of_universe(), period = 4000, time2run = 4000 )
log.info("test creating killing tasks")
sched.mainloop()<|fim▁end|> | |
<|file_name|>Map.py<|end_file_name|><|fim▁begin|># by amounra 0216 : http://www.aumhaa.com
"""
Codec_Map.py
Created by amounra on 2010-10-05.
Copyright (c) 2010 __artisia__. All rights reserved.
This file allows the reassignment of the controls from their default arrangement. The order is from left to right;
Buttons are Note #'s and Faders/Rotaries are Controller #'s
"""
CHANNEL = 0 #main channel (0 - 15)
CODE_BUTTONS = [[1, 5, 9, 13, 17, 21, 25, 29],
[2, 6, 10, 14, 18, 22, 26, 30],
[3, 7, 11, 15, 19, 23, 27, 31],
[4, 8, 12, 16, 20, 24, 28, 32]]
CODE_DIALS = [[1, 5, 9, 13, 17, 21, 25, 29],
[2, 6, 10, 14, 18, 22, 26, 30],
[3, 7, 11, 15, 19, 23, 27, 31],
[4, 8, 12, 16, 20, 24, 28, 32]]
CODE_COLUMN_BUTTONS = [38, 39, 40, 41, 42, 43, 44, 45]
CODE_ROW_BUTTONS = [33, 34, 35, 36]
LIVID = 37 #single
FOLLOW = True #this sets whether or not the last selected device on a track is selected for editing when you select a new track
<|fim▁hole|>
USE_DEVICE_SELECTOR = True
FACTORY_RESET = False
SHIFT_LATCHING = True
from aumhaa.v2.livid.colors import *
class CodecColors:
ResetSendsColor = LividRGB.WHITE
class Mod:
ShiftOff = LividRGB.OFF
ShiftOn = LividRGB.WHITE
class Mode:
Main = LividRGB.WHITE
Main_shifted = LividRGB.BlinkFast.WHITE
class ShiftMode:
Enabled = LividRGB.BlinkFast.WHITE
Disabled = LividRGB.OFF
class DefaultButton:
On = LividRGB.WHITE
Off = LividRGB.OFF
Disabled = LividRGB.OFF
Alert = LividRGB.BlinkFast.WHITE
class Session:
StopClipTriggered = LividRGB.BlinkFast.BLUE
StopClip = LividRGB.BLUE
Scene = LividRGB.CYAN
NoScene = LividRGB.OFF
SceneTriggered = LividRGB.BlinkFast.BLUE
ClipTriggeredPlay = LividRGB.BlinkFast.GREEN
ClipTriggeredRecord = LividRGB.BlinkFast.RED
RecordButton = LividRGB.OFF
ClipStopped = LividRGB.WHITE
ClipStarted = LividRGB.GREEN
ClipRecording = LividRGB.RED
NavigationButtonOn = LividRGB.BLUE
class Mixer:
SoloOn = LividRGB.CYAN
SoloOff = LividRGB.OFF
MuteOn = LividRGB.YELLOW
MuteOff = LividRGB.OFF
ArmSelected = LividRGB.GREEN
ArmUnselected = LividRGB.RED
ArmOff = LividRGB.OFF
StopClip = LividRGB.BLUE
SelectedOn = LividRGB.BLUE
SelectedOff = LividRGB.OFF
class Recording:
Transition = LividRGB.BlinkSlow.GREEN
class Recorder:
On = LividRGB.WHITE
Off = LividRGB.BLUE
NewOn = LividRGB.BlinkFast.YELLOW
NewOff = LividRGB.YELLOW
FixedOn = LividRGB.BlinkFast.CYAN
FixedOff = LividRGB.CYAN
RecordOn = LividRGB.BlinkFast.GREEN
RecordOff = LividRGB.GREEN
FixedAssigned = LividRGB.MAGENTA
FixedNotAssigned = LividRGB.OFF
class Transport:
OverdubOn = LividRGB.BlinkFast.RED
OverdubOff = LividRGB.RED
StopOn = LividRGB.BLUE
StopOff = LividRGB.BLUE
class Device:
NavOn = LividRGB.MAGENTA
NavOff = LividRGB.OFF
BankOn = LividRGB.YELLOW
BankOff = LividRGB.OFF
ChainNavOn = LividRGB.RED
ChainNavOff = LividRGB.OFF
ContainNavOn = LividRGB.CYAN
ContainNavOff = LividRGB.OFF
## a<|fim▁end|> | COLOR_MAP = [127, 127, 127, 127, 127, 127, 127] |
<|file_name|>bookr.controller.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('shoprApp')
.controller('BookrCtrl', function ($scope, localStorageService, Auth, $http, $routeParams, $location) {
$scope.Auth = Auth;
//$scope.books = Auth.getCurrentUser().books;
$scope.mySubs = Auth.getCurrentUser().books;
function getById(books, id) {
for(var b in books) {
if(books[b]._id === id)
return books[b];
}
}
function loadMore(book) {
var page = book.recent[book.recent.length-book.pageIndex-1];
if(page && !page.src) {
$http.get('/api/pages/'+book._id+'/'+page.issue).success(function(fullPage) {
for(var k in fullPage[0]) page[k]=fullPage[0][k];
setTimeout(function(){ $scope.$digest(); }, 500);
});
}
var forward = book.recent[book.recent.length-book.pageIndex];
if(forward && !forward.src) {
$http.get('/api/pages/'+book._id+'/'+forward.issue).success(function(fullPage) {
for(var k in fullPage[0]) forward[k]=fullPage[0][k];
setTimeout(function(){ $scope.$digest(); }, 500);
});
}
var back = book.recent[book.recent.length-book.pageIndex-2];
if(back && !back.src) {
$http.get('/api/pages/'+book._id+'/'+back.issue).success(function(fullPage) {
for(var k in fullPage[0]) back[k]=fullPage[0][k];
setTimeout(function(){ $scope.$digest(); }, 500);
});
}
}
function init(book, pageIndex) {
book.pageIndex = pageIndex;
$scope.$watch(function(){ return this.pageIndex; }.bind(book), function(newValue, oldValue){
console.log(this);
loadMore(this);
// $location.path('/bookr/'+this._id+'/'+this.pageIndex, false);
var elm = $('book[ng-id="'+this._id+'"]')[0];
elm && $('html, body').animate({ scrollTop: elm.offsetTop }, 450);
}.bind(book));
$scope.books.push(book);<|fim▁hole|>
function pad(books) {
for(var b in books) {
var book = books[b];
console.log(book);
var recentLength = book.recent.length;
for(var i = 0; i < book.count-recentLength; i++)
book.recent.push({_id:book._id+'_'+(book.count-(recentLength+1)-i), issue:book.count-(recentLength+1)-i});
console.log(book);
}
}
var books = [$routeParams];
$scope.books = [];
for(var b in books) {
var book = getById($scope.mySubs, books[b].book);
if(!book) {
var page = books[b].page;
$http.get('/api/books/'+books[b].book).success(function(book){
pad([book]);
console.log(book);
$scope.mySubs.push(book);
init(book, page);
console.log('wtf?');
});
continue;
}
init(book, parseInt(books[b].page));
}
});<|fim▁end|> |
} |
<|file_name|>network.py<|end_file_name|><|fim▁begin|># coding=utf-8
"""
The NetworkCollector class collects metrics on network interface usage
using /proc/net/dev.
#### Dependencies
* /proc/net/dev
"""
import diamond.collector
from diamond.collector import str_to_bool
import diamond.convertor<|fim▁hole|>
try:
import psutil
except ImportError:
psutil = None
class NetworkCollector(diamond.collector.Collector):
PROC = '/proc/net/dev'
def get_default_config_help(self):
config_help = super(NetworkCollector, self).get_default_config_help()
config_help.update({
'interfaces': 'List of interface types to collect',
'greedy': 'Greedy match interfaces',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(NetworkCollector, self).get_default_config()
config.update({
'path': 'network',
'interfaces': ['eth', 'bond', 'em', 'p1p', 'eno', 'enp', 'ens',
'enx'],
'byte_unit': ['bit', 'byte'],
'greedy': 'true',
})
return config
def collect(self):
"""
Collect network interface stats.
"""
# Initialize results
results = {}
if os.access(self.PROC, os.R_OK):
# Open File
file = open(self.PROC)
# Build Regular Expression
greed = ''
if str_to_bool(self.config['greedy']):
greed = '\S*'
exp = (('^(?:\s*)((?:%s)%s):(?:\s*)' +
'(?P<rx_bytes>\d+)(?:\s*)' +
'(?P<rx_packets>\w+)(?:\s*)' +
'(?P<rx_errors>\d+)(?:\s*)' +
'(?P<rx_drop>\d+)(?:\s*)' +
'(?P<rx_fifo>\d+)(?:\s*)' +
'(?P<rx_frame>\d+)(?:\s*)' +
'(?P<rx_compressed>\d+)(?:\s*)' +
'(?P<rx_multicast>\d+)(?:\s*)' +
'(?P<tx_bytes>\d+)(?:\s*)' +
'(?P<tx_packets>\w+)(?:\s*)' +
'(?P<tx_errors>\d+)(?:\s*)' +
'(?P<tx_drop>\d+)(?:\s*)' +
'(?P<tx_fifo>\d+)(?:\s*)' +
'(?P<tx_colls>\d+)(?:\s*)' +
'(?P<tx_carrier>\d+)(?:\s*)' +
'(?P<tx_compressed>\d+)(?:.*)$') %
(('|'.join(self.config['interfaces'])), greed))
reg = re.compile(exp)
# Match Interfaces
for line in file:
match = reg.match(line)
if match:
device = match.group(1)
results[device] = match.groupdict()
# Close File
file.close()
else:
if not psutil:
self.log.error('Unable to import psutil')
self.log.error('No network metrics retrieved')
return None
network_stats = psutil.network_io_counters(True)
for device in network_stats.keys():
network_stat = network_stats[device]
results[device] = {}
results[device]['rx_bytes'] = network_stat.bytes_recv
results[device]['tx_bytes'] = network_stat.bytes_sent
results[device]['rx_packets'] = network_stat.packets_recv
results[device]['tx_packets'] = network_stat.packets_sent
for device in results:
stats = results[device]
for s, v in stats.items():
# Get Metric Name
metric_name = '.'.join([device, s])
# Get Metric Value
metric_value = self.derivative(metric_name,
long(v),
diamond.collector.MAX_COUNTER)
# Convert rx_bytes and tx_bytes
if s == 'rx_bytes' or s == 'tx_bytes':
convertor = diamond.convertor.binary(value=metric_value,
unit='byte')
for u in self.config['byte_unit']:
# Public Converted Metric
self.publish(metric_name.replace('bytes', u),
convertor.get(unit=u), 2)
else:
# Publish Metric Derivative
self.publish(metric_name, metric_value)
return None<|fim▁end|> | import os
import re |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# (c) 2015 Incaser Informatica S.L. - Sergio Teruel<|fim▁hole|><|fim▁end|> | # (c) 2015 Incaser Informatica S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import main |
<|file_name|>vowel.py<|end_file_name|><|fim▁begin|>vowels = ['a','e','i','o','u']
s = 'sudeep'
count = 0
for i in s:<|fim▁hole|> print 'vowel found'
count = count + 1
print "number of vowels:",str(count)<|fim▁end|> | for q in vowels:
if s[i] == vowels[q]: |
<|file_name|>Pesawat.java<|end_file_name|><|fim▁begin|>/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package delay.model;
/**
*
* @author M3 New
*/
public class Pesawat {
private String namaPesawat;
private String idPesawat;
private String idMaskapai;
public Pesawat(){
}
public Pesawat(String namaPesawat, String idPesawat, String idMaskapai) {
this.namaPesawat = namaPesawat;
this.idPesawat = idPesawat;
this.idMaskapai = idMaskapai;
}
public String getNamaPesawat() {
return namaPesawat;
}
public String getIdMaskapai() {
return idMaskapai;
}
public void setIdMaskapai(String idMaskapai) {<|fim▁hole|> public void setNamaPesawat(String namaPesawat) {
this.namaPesawat = namaPesawat;
}
public String getIdPesawat() {
return idPesawat;
}
public void setIdPesawat(String idPesawat) {
this.idPesawat = idPesawat;
}
}<|fim▁end|> | this.idMaskapai = idMaskapai;
}
|
<|file_name|>ng_form.ts<|end_file_name|><|fim▁begin|>import {
PromiseWrapper,
ObservableWrapper,
EventEmitter,
PromiseCompleter
} from 'angular2/src/facade/async';
import {StringMapWrapper, List, ListWrapper} from 'angular2/src/facade/collection';
import {isPresent, isBlank, CONST_EXPR} from 'angular2/src/facade/lang';
import {Directive} from 'angular2/metadata';
import {forwardRef, Binding} from 'angular2/di';
import {NgControl} from './ng_control';
import {Form} from './form_interface';
import {NgControlGroup} from './ng_control_group';
import {ControlContainer} from './control_container';
import {AbstractControl, ControlGroup, Control} from '../model';
import {setUpControl} from './shared';
const formDirectiveBinding =
CONST_EXPR(new Binding(ControlContainer, {toAlias: forwardRef(() => NgForm)}));
/**
* Creates and binds a form object to a DOM element.
*
* # Example
*
* ```
* @Component({selector: "signup-comp"})
* @View({
* directives: [FORM_DIRECTIVES],
* template: `
* <form #f="form" (submit)='onSignUp(f.value)'>
* <div ng-control-group='credentials' #credentials="form">
* Login <input type='text' ng-control='login'>
* Password <input type='password' ng-control='password'>
* </div><|fim▁hole|> * <div *ng-if="!credentials.valid">Credentials are invalid</div>
*
* <div ng-control-group='personal'>
* Name <input type='text' ng-control='name'>
* </div>
* <button type='submit'>Sign Up!</button>
* </form>
* `})
* class SignupComp {
* onSignUp(value) {
* // value === {personal: {name: 'some name'},
* // credentials: {login: 'some login', password: 'some password'}}
* }
* }
*
* ```
*/
@Directive({
selector: 'form:not([ng-no-form]):not([ng-form-model]),ng-form,[ng-form]',
bindings: [formDirectiveBinding],
host: {
'(submit)': 'onSubmit()',
},
events: ['ngSubmit'],
exportAs: 'form'
})
export class NgForm extends ControlContainer implements Form {
form: ControlGroup;
ngSubmit = new EventEmitter();
constructor() {
super();
this.form = new ControlGroup({});
}
get formDirective(): Form { return this; }
get control(): ControlGroup { return this.form; }
get path(): List<string> { return []; }
get controls(): StringMap<string, AbstractControl> { return this.form.controls; }
addControl(dir: NgControl): void {
this._later(_ => {
var container = this._findContainer(dir.path);
var c = new Control();
setUpControl(c, dir);
container.addControl(dir.name, c);
c.updateValidity();
});
}
getControl(dir: NgControl): Control { return <Control>this.form.find(dir.path); }
removeControl(dir: NgControl): void {
this._later(_ => {
var container = this._findContainer(dir.path);
if (isPresent(container)) {
container.removeControl(dir.name);
container.updateValidity();
}
});
}
addControlGroup(dir: NgControlGroup): void {
this._later(_ => {
var container = this._findContainer(dir.path);
var c = new ControlGroup({});
container.addControl(dir.name, c);
c.updateValidity();
});
}
removeControlGroup(dir: NgControlGroup): void {
this._later(_ => {
var container = this._findContainer(dir.path);
if (isPresent(container)) {
container.removeControl(dir.name);
container.updateValidity();
}
});
}
getControlGroup(dir: NgControlGroup): ControlGroup {
return <ControlGroup>this.form.find(dir.path);
}
updateModel(dir: NgControl, value: any): void {
this._later(_ => {
var c = <Control>this.form.find(dir.path);
c.updateValue(value);
});
}
onSubmit(): boolean {
ObservableWrapper.callNext(this.ngSubmit, null);
return false;
}
_findContainer(path: List<string>): ControlGroup {
ListWrapper.removeLast(path);
return ListWrapper.isEmpty(path) ? this.form : <ControlGroup>this.form.find(path);
}
_later(fn) {
var c: PromiseCompleter<any> = PromiseWrapper.completer();
PromiseWrapper.then(c.promise, fn, (_) => {});
c.resolve(null);
}
}<|fim▁end|> | |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models<|fim▁hole|> def forwards(self, orm):
# Adding model 'Result'
db.create_table('taxonomy_result', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=100)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal('taxonomy', ['Result'])
# Adding model 'Tag'
db.create_table('taxonomy_tag', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50, db_index=True)),
))
db.send_create_signal('taxonomy', ['Tag'])
# Adding model 'Category'
db.create_table('taxonomy_category', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('parent', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='children', null=True, to=orm['taxonomy.Category'])),
('title', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50, db_index=True)),
))
db.send_create_signal('taxonomy', ['Category'])
# Adding model 'Vote'
db.create_table('taxonomy_vote', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
('owner', self.gf('django.db.models.fields.related.ForeignKey')(related_name='poll_votes', to=orm['auth.User'])),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal('taxonomy', ['Vote'])
# Adding unique constraint on 'Vote', fields ['owner', 'content_type', 'object_id']
db.create_unique('taxonomy_vote', ['owner_id', 'content_type_id', 'object_id'])
def backwards(self, orm):
# Removing unique constraint on 'Vote', fields ['owner', 'content_type', 'object_id']
db.delete_unique('taxonomy_vote', ['owner_id', 'content_type_id', 'object_id'])
# Deleting model 'Result'
db.delete_table('taxonomy_result')
# Deleting model 'Tag'
db.delete_table('taxonomy_tag')
# Deleting model 'Category'
db.delete_table('taxonomy_category')
# Deleting model 'Vote'
db.delete_table('taxonomy_vote')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'taxonomy.category': {
'Meta': {'ordering': "('title',)", 'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['taxonomy.Category']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'taxonomy.result': {
'Meta': {'object_name': 'Result'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'taxonomy.tag': {
'Meta': {'ordering': "('title',)", 'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'taxonomy.vote': {
'Meta': {'unique_together': "(('owner', 'content_type', 'object_id'),)", 'object_name': 'Vote'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'poll_votes'", 'to': "orm['auth.User']"})
}
}
complete_apps = ['taxonomy']<|fim▁end|> |
class Migration(SchemaMigration):
|
<|file_name|>OFFSIDE.cpp<|end_file_name|><|fim▁begin|>#include<iostream>
using namespace std;
int main()
{
int a,d,x[20],y[20],a1,temp,i,j;
while(1)
{
a1=20000;
cin>>a>>d;
if(a==0 & d==0)
break;
for(i=0;i<a;i++)
{<|fim▁hole|> a1=x[i];
}
for(i=0;i<d;i++)
{
cin>>y[i];
}
for(i=0;i<2;i++)
{
for(j=i+1;j<d;j++)
{
if(y[i]>y[j])
{
temp=y[i];
y[i]=y[j];
y[j]=temp;
}
}
}
if(a1>=y[1] || (a1>=y[0] && a1>=y[1]))
{
cout<<"N\n";
}
else
cout<<"Y\n";
}
}<|fim▁end|> | cin>>x[i];
if(x[i]<a1) |
<|file_name|>connection-button.js<|end_file_name|><|fim▁begin|>import BarButton from '../bar-button/bar-button';
import { useRef, useEffect } from 'react';
export default function ConnectionButton() {
const buttonRef = useRef();
const isUserConnected = elementorAdminTopBarConfig.is_user_connected;
useEffect( () => {
if ( ! buttonRef.current || isUserConnected ) {
return;
}
<|fim▁hole|> jQuery( buttonRef.current ).elementorConnect();
}, [] );
let tooltipText = __( 'Connect your account to get access to Elementor\'s Template Library & more.', 'elementor' ),
connectUrl = elementorAdminTopBarConfig.connect_url,
buttonText = __( 'Connect Account', 'elementor' ),
targetUrl = '_self';
if ( isUserConnected ) {
tooltipText = '';
connectUrl = 'https://go.elementor.com/wp-dash-admin-bar-account/';
buttonText = __( 'My Elementor', 'elementor' );
targetUrl = '_blank';
}
return (
<BarButton icon="eicon-user-circle-o" buttonRef={buttonRef} dataInfo={tooltipText} href={connectUrl} target={targetUrl}>{ buttonText }</BarButton>
);
}<|fim▁end|> | |
<|file_name|>manageindexes.js<|end_file_name|><|fim▁begin|>ManageIndexes = {
GridPanel : function(config) {
config.autoExpandColumn = 'columns';
config.viewConfig = { forceFit: true };
ManageIndexes.GridPanel.superclass.constructor.call(this, config);
},
gridPanelStore : function(data) {
var store = new Ext.data.SimpleStore( {
fields : data.fields
});
store.loadData(data.data);
return store;
},
gridPanelColumnModel : function(data) {
for ( var i = 0; i < data.models.length; i++) {
var curr = data.models[i];
if (curr.id == 'unique' || curr.id == 'fullText') {
curr.renderer = function(v) {
//return '<div class="x-grid3-check-col' + (v ? '-on' : '') + '"> </div>';
return '<input type="checkbox" '+(v?'checked="checked"':'')+' disabled="disabled" />';
};
}
}
var cm = new Ext.grid.ColumnModel( {
columns : data.models
});
return cm;
},
closeManageIndexesWindow : function() {
Ext.getCmp('manage_indexes_window').close();
},
showDeleteIndexConfirmation : function() {
// Get the selected row(s)
var indexesGrid = Ext.getCmp('manage_indexes_grid');
var selModel = indexesGrid.getSelectionModel();
var rows = selModel.getSelections();
// If no rows are selected, alert the user.
if (!rows.length) {
var msg = "Please select index(s) to delete!";
Dbl.Utils.showErrorMsg(msg, '');
// Ext.MessageBox.alert('No Index(es) Selected',
// 'Please select the index(es) to delete');
return;
}
ManageIndexes.indexesForDeletion = [];
for ( var i = 0; i < rows.length; i++) {
ManageIndexes.indexesForDeletion.push(rows[i].data.indexName);
}
// Get confirmation from the user
var title = 'Are you sure?';
var message = 'Are you sure you want to delete the selected index(es)?';
var handleConfirmation = function(btn) {
if (btn == "yes") {
// Send the delete index command to server
Server.sendCommand('delete_indexes', {
indexes : ManageIndexes.indexesForDeletion,
table : Explorer.selectedDbTable
}, function(data) {
// var msg = "Index(s) deleted successfully";
Dbl.Utils.showInfoMsg(Messages.getMsg('index_deletion_success'));
// Ext.MessageBox.alert('Success!!',
// 'Index(es) deleted successfully');
ManageIndexes.refreshGrid();
// Server.sendCommand('get_min_table_indexes', {
// parent_database : Explorer.selectedDatabase,
// table : Explorer.selectedDbTable
// }, function(data) {
// var store = ManageIndexes.gridPanelStore(data);
// var cm = ManageIndexes.gridPanelColumnModel(data);
// Ext.getCmp('manage_indexes_grid')
// .reconfigure(store, cm);
//
// });
});
}
};
Ext.MessageBox.confirm(title, message, handleConfirmation);
},
indexesForDeletion : [],
refreshGrid: function()
{
Server.sendCommand('get_min_table_indexes', {
parent_database : Explorer.selectedDatabase,
table : Explorer.selectedDbTable
}, function(data) {
var store = ManageIndexes.gridPanelStore(data);
var cm = ManageIndexes.gridPanelColumnModel(data);
Ext.getCmp('manage_indexes_grid').reconfigure(store, cm);
});
},
showEditIndexWindow: function()
{
var selectionCount = Ext.getCmp('manage_indexes_grid').getSelectionModel().getCount();
if(!selectionCount) {
// var msg = "Please select an index to edit!";
Dbl.Utils.showErrorMsg(Messages.getMsg('edit_index_required'));
} else if(selectionCount > 1) {
// var msg = "Please select a single index to edit!";
Dbl.Utils.showErrorMsg(Messages.getMsg('edit_index_single'));
} else {
Server.sendCommand('get_min_table_columns', {
table : Explorer.selectedDbTable
}, function(data) {
data.editMode = true;
ManageIndexes.addIndexWin = new ManageIndexes.AddIndexWindow(data);
ManageIndexes.addIndexWin.show();
});
}
},
showAddIndexWindow : function(editMode) {
Server.sendCommand('get_min_table_columns', {
table : Explorer.selectedDbTable
}, function(data) {
ManageIndexes.addIndexWin = new ManageIndexes.AddIndexWindow(data);
ManageIndexes.addIndexWin.show();
});
},
AddIndexWindow : function(data) {
var gridPanel = new ManageIndexes.AddIndexGrid(data);
var form = new ManageIndexes.AddIndexForm();
if(data.editMode)
{
var index = Ext.getCmp('manage_indexes_grid').getSelectionModel().getSelected().data;
var indexName = index.indexName;
var formObj = form.getForm();
formObj.findField('add_index_form_index_name').setValue(indexName);
formObj.findField('add_index_form_original_name').setValue(indexName);
//form.originalIndexName = indexName;
var indexType;
if(indexName == 'PRIMARY')
indexType = 'primary';
else if(index.unique == true)
indexType = 'unique';
else if(index.fullText == true)
indexType = 'fullText';
else
indexType = 'none';
var cmpId = 'add_index_form_index_type_'+indexType;
Ext.getCmp('options_group').setValue(cmpId,true);
var columns = index.columns.split(',').reverse();
for(var i=0; i<columns.length; i++)
{
var recIndex = gridPanel.getStore().find('Name',columns[i]);
var rec = gridPanel.getStore().getAt(recIndex);
rec.set('included', true);
gridPanel.getStore().remove(rec);
gridPanel.getStore().insert(0, rec);
}
}
ManageIndexes.AddIndexWindow.superclass.constructor.call(this, {
title : "Add New Index",
id : "add_index_window",
headerAsText : true,
width : 350,
resizable : false,
modal : true,
plain : true,
stateful : true,
shadow : false,
onEsc : function() {
},
closeAction : 'destroy',
items : [ form, gridPanel ],
buttons : [
{
text: data.editMode ? 'submit' : 'add',
handler: data.editMode?ManageIndexes.editIndex:ManageIndexes.createAndAddIndex
},
{
text: 'cancel',
handler: ManageIndexes.closeAddIndexWindow
}]
});
},
AddIndexGrid : function(data) {
var includedModel = new Ext.ux.CheckColumn({
header: ' ',
checkOnly: true,
dataIndex: 'included',
width: 20});
for(var i=0; i<data.fields.length; i++)
{
if(data.fields[i] == "included") {
data.fields[i].type = 'bool';
data.models[i] = includedModel;
}
}
ManageIndexes.AddIndexGrid.superclass.constructor.call(this, {
fields : data.fields,
data : data.data,
models : data.models,
autoExpandColumn: 'Name',
viewConfig: { forceFit: true },
id : 'add_index_grid',
height: 180,
//width: 333,
autoScroll: true,
fbar: [Messages.getMsg('edit_index_footer')],
enableDragDrop: true,
ddGroup: 'mygridDD',
plugins: [includedModel],
listeners: {
"render": {
scope: this,
fn: function(grid) {
var ddrow = new Ext.dd.DropTarget(grid.container, {
ddGroup : 'mygridDD',
copy: false,
notifyDrop : function(dd, e, data){
//Ext.getCmp('reorder_columns_window').reorderButton.enable();
var ds = grid.store;
var sm = grid.getSelectionModel();
var rows = sm.getSelections();
//var rows = this.currentRowEl;
if(dd.getDragData(e)) {
var cindex=dd.getDragData(e).rowIndex;
if(typeof(cindex) != "undefined") {
for(i = 0; i < rows.length; i++) {
ds.remove(ds.getById(rows[i].id));
}
ds.insert(cindex,data.selections);
sm.clearSelections();
}
}
}
});
}
}
}
});
},
AddIndexForm: function(data) {
var radioGroupItems = [
{
boxLabel: 'Unique',
name: 'add_index_form_index_type',
id: 'add_index_form_index_type_unique',
inputValue: 'unique'
},
{
boxLabel: 'Full Text',
name: 'add_index_form_index_type',
id: 'add_index_form_index_type_fullText',
inputValue: 'fullText'
},
{
boxLabel: 'Primary',
name: 'add_index_form_index_type',
id: 'add_index_form_index_type_primary',
inputValue: 'primary',
listeners:
{
'check': {
fn: function()
{
var form = Ext.getCmp('add_index_form').getForm().getValues(false);
var indexName = Ext.getCmp('add_index_form_index_name');
if(form.add_index_form_index_type == 'primary')
{
indexName.prevValue = form.add_index_form_index_name;
indexName.setValue('PRIMARY');
indexName.disable();
}
else
{
indexName.setValue(indexName.prevValue);
indexName.enable();
}
<|fim▁hole|> {
boxLabel: 'None',
name: 'add_index_form_index_type',
id: 'add_index_form_index_type_none',
inputValue: 'none',
checked: true
}];
ManageIndexes.AddIndexForm.superclass.constructor.call(this, {
id: 'add_index_form',
labelAlign: 'top',
frame: true,
bodyStyle: "padding: 5px",
defaults: {
anchor: '100%'
},
items:[
{
xtype: 'textfield',
fieldLabel: 'Index Name',
name: 'add_index_form_index_name',
id: 'add_index_form_index_name',
blankText: 'Index name is required',
allowBlank: false
},
{
xtype: 'hidden',
name: 'add_index_form_original_name',
id: 'add_index_form_original_name'
},
{
xtype: 'radiogroup',
rows: 1,
id: 'options_group',
defaults: {
anchor: '100%'
},
bodyStyle: "padding: 0px; margin: 0px",
items: radioGroupItems,
fieldLabel: 'Index Options'
}]
});
},
editIndex: function()
{
ManageIndexes.createAndAddIndex(true);
},
createAndAddIndex: function(editMode)
{
var form = Ext.getCmp('add_index_form').getForm();
if(!form.isValid())
{
return;
}
var values = form.getValues();
var store = Ext.getCmp('add_index_grid').getStore();
var indexes = [];
var selectedRows = 0;
for(var i=0; i<store.getCount(); i++)
{
var record = store.getAt(i);
if(record.get('included') == true)
{
indexes.push(record.get('Name'));
selectedRows++;
}
}
if(selectedRows < 1)
{
// var msg = 'Please select at least one column';
Dbl.Utils.showErrorMsg(Messages.getMsg('add_index_column_req'));
//Ext.MessageBox.alert('No Columns Selected', 'Please Select atleast one column');
return;
}
Server.sendCommand(
'create_indexes',
{
table: Explorer.selectedDbTable,
type: values.add_index_form_index_type,
name: values.add_index_form_index_name,
indexes: indexes,
originalName: values.add_index_form_original_name
},
function(data) {
if(data.success) {
ManageIndexes.refreshGrid();
ManageIndexes.closeAddIndexWindow();
// var msg = 'Index added successfully';
Dbl.Utils.showInfoMsg(Messages.getMsg('index_addition_success'));
} else if(!data.success) {
var msg = data.msg ? data.msg : data;
Dbl.Utils.showErrorMsg(data.msg, '');
}
}, function(data){
Dbl.Utils.showErrorMsg(data.msg, '');
});
},
closeAddIndexWindow: function() {
Ext.getCmp('add_index_window').close();
}
};
Ext.onReady(function() {
Ext.extend(ManageIndexes.GridPanel, Dbl.ListViewPanel, {
hello : function(str) {
}
});
Ext.extend(ManageIndexes.AddIndexWindow, Ext.Window, {
});
Ext.extend(ManageIndexes.AddIndexGrid, Dbl.ListViewPanel, {});
Ext.extend(ManageIndexes.AddIndexForm, Ext.FormPanel, {});
});<|fim▁end|> | }
}
}
},
|
<|file_name|>invocation.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2014 Richard Diamond & contributors.
//
// This file is part of Rust Rocket.
//
// Rust Rocket is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Rust Rocket is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with Rust Rocket. If not, see <http://www.gnu.org/licenses/>.
use toolchain::tool::{Tool, Compiler, Cc, Cxx, Ar, Ld};<|fim▁hole|>
}
pub struct Invocation<'a> {
state: Path,
print_invocation: bool,
// are we under a configure script? if so we don't need to resolve addresses.
configure: bool,
tool: Tool,
opts: &'a [String],
}
impl<'a> Invocation<'a> {
pub fn new(state: &str,
print_invocation: bool,
tool: &str,
opts: &'a [String]) -> Invocation<'a> {
Invocation {
state_file: Path::new(state),
print_invocation: print_invocation,
tool: from_str(tool).expect("unknown tool specified; this is more than likely a bug"),
opts: opts,
}
}
pub fn run(&self) {
use std::io::fs::File;
use serialize::ebml::reader::Decoder;
use serialize::ebml::Doc;
// don't try-block this; if we can't read the state file, we really do need to fail!().
let state = {
let state_bytes = try!({try!(File::open(self.state_file))}.read_to_end());
let mut decoder = Decoder::new(Doc::new(state_bytes));
decode(&mut decoder)
};
match self.tool {
Cc => {
}
Cxx => {
}
Ar => {
}
Ld => {
}
}
}
}<|fim▁end|> |
pub struct State { |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>mod acc;
mod palalpha;
mod pal;
mod prewitt;
mod undither;
pub use crate::undither::Undither;
pub use crate::pal::Pal;
pub use crate::palalpha::PalAlpha;<|fim▁hole|>pub use crate::palalpha::PixAlphaAble;<|fim▁end|> | |
<|file_name|>route.js<|end_file_name|><|fim▁begin|>Router.route('/users', {<|fim▁hole|>});<|fim▁end|> | name: 'menu.users',
template: 'users',
parent: 'menu',
title: 'Users', |
<|file_name|>TsMuxeRVideo.java<|end_file_name|><|fim▁begin|>/*
* PS3 Media Server, for streaming any medias to your PS3.
* Copyright (C) 2008 A.Brochard<|fim▁hole|> * modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; version 2
* of the License only.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package net.pms.encoders;
import com.jgoodies.forms.builder.PanelBuilder;
import com.jgoodies.forms.factories.Borders;
import com.jgoodies.forms.layout.CellConstraints;
import com.jgoodies.forms.layout.FormLayout;
import java.awt.ComponentOrientation;
import java.awt.Font;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Locale;
import javax.swing.JCheckBox;
import javax.swing.JComponent;
import javax.swing.JPanel;
import net.pms.Messages;
import net.pms.PMS;
import net.pms.configuration.DeviceConfiguration;
import net.pms.configuration.PmsConfiguration;
import net.pms.configuration.RendererConfiguration;
import net.pms.dlna.*;
import net.pms.formats.Format;
import net.pms.io.*;
import net.pms.newgui.GuiUtil;
import net.pms.util.CodecUtil;
import net.pms.util.FormLayoutUtil;
import net.pms.util.PlayerUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TsMuxeRVideo extends Player {
private static final Logger LOGGER = LoggerFactory.getLogger(TsMuxeRVideo.class);
private static final String COL_SPEC = "left:pref, 0:grow";
private static final String ROW_SPEC = "p, 3dlu, p, 3dlu, p, 3dlu, p, 3dlu, p, 3dlu, 0:grow";
public static final String ID = "tsmuxer";
@Deprecated
public TsMuxeRVideo(PmsConfiguration configuration) {
this();
}
public TsMuxeRVideo() {
}
@Override
public boolean excludeFormat(Format format) {
String extension = format.getMatchedExtension();
return extension != null
&& !extension.equals("mp4")
&& !extension.equals("mkv")
&& !extension.equals("ts")
&& !extension.equals("tp")
&& !extension.equals("m2ts")
&& !extension.equals("m2t")
&& !extension.equals("mpg")
&& !extension.equals("evo")
&& !extension.equals("mpeg")
&& !extension.equals("vob")
&& !extension.equals("m2v")
&& !extension.equals("mts")
&& !extension.equals("mov");
}
@Override
public int purpose() {
return VIDEO_SIMPLEFILE_PLAYER;
}
@Override
public String id() {
return ID;
}
@Override
public boolean isTimeSeekable() {
return true;
}
@Override
public String[] args() {
return null;
}
@Override
public String executable() {
return configuration.getTsmuxerPath();
}
@Override
public ProcessWrapper launchTranscode(
DLNAResource dlna,
DLNAMediaInfo media,
OutputParams params
) throws IOException {
// Use device-specific pms conf
PmsConfiguration prev = configuration;
configuration = (DeviceConfiguration) params.mediaRenderer;
final String filename = dlna.getSystemName();
setAudioAndSubs(filename, media, params);
PipeIPCProcess ffVideoPipe;
ProcessWrapperImpl ffVideo;
PipeIPCProcess ffAudioPipe[] = null;
ProcessWrapperImpl ffAudio[] = null;
String fps = media.getValidFps(false);
int width = media.getWidth();
int height = media.getHeight();
if (width < 320 || height < 240) {
width = -1;
height = -1;
}
String videoType = "V_MPEG4/ISO/AVC";
if (media.getCodecV() != null && media.getCodecV().startsWith("mpeg2")) {
videoType = "V_MPEG-2";
}
boolean aacTranscode = false;
String[] ffmpegCommands;
if (this instanceof TsMuxeRAudio && media.getFirstAudioTrack() != null) {
ffVideoPipe = new PipeIPCProcess(System.currentTimeMillis() + "fakevideo", System.currentTimeMillis() + "videoout", false, true);
String timeEndValue1 = "-t";
String timeEndValue2 = "" + params.timeend;
if (params.timeend < 1) {
timeEndValue1 = "-y";
timeEndValue2 = "-y";
}
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
timeEndValue1, timeEndValue2,
"-loop", "1",
"-i", "DummyInput.jpg",
"-f", "h264",
"-c:v", "libx264",
"-level", "31",
"-tune", "zerolatency",
"-pix_fmt", "yuv420p",
"-an",
"-y",
ffVideoPipe.getInputPipe()
};
videoType = "V_MPEG4/ISO/AVC";
OutputParams ffparams = new OutputParams(configuration);
ffparams.maxBufferSize = 1;
ffVideo = new ProcessWrapperImpl(ffmpegCommands, ffparams);
if (
filename.toLowerCase().endsWith(".flac") &&
media.getFirstAudioTrack().getBitsperSample() >= 24 &&
media.getFirstAudioTrack().getSampleRate() % 48000 == 0
) {
ffAudioPipe = new PipeIPCProcess[1];
ffAudioPipe[0] = new PipeIPCProcess(System.currentTimeMillis() + "flacaudio", System.currentTimeMillis() + "audioout", false, true);
String[] flacCmd = new String[] {
configuration.getFlacPath(),
"--output-name=" + ffAudioPipe[0].getInputPipe(),
"-d",
"-f",
"-F",
filename
};
ffparams = new OutputParams(configuration);
ffparams.maxBufferSize = 1;
ffAudio = new ProcessWrapperImpl[1];
ffAudio[0] = new ProcessWrapperImpl(flacCmd, ffparams);
} else {
ffAudioPipe = new PipeIPCProcess[1];
ffAudioPipe[0] = new PipeIPCProcess(System.currentTimeMillis() + "mlpaudio", System.currentTimeMillis() + "audioout", false, true);
String depth = "pcm_s16le";
String rate = "48000";
if (media.getFirstAudioTrack().getBitsperSample() >= 24) {
depth = "pcm_s24le";
}
if (media.getFirstAudioTrack().getSampleRate() > 48000) {
rate = "" + media.getFirstAudioTrack().getSampleRate();
}
String[] flacCmd = new String[] {
configuration.getFfmpegPath(),
"-i", filename,
"-ar", rate,
"-f", "wav",
"-acodec", depth,
"-y",
ffAudioPipe[0].getInputPipe()
};
ffparams = new OutputParams(configuration);
ffparams.maxBufferSize = 1;
ffAudio = new ProcessWrapperImpl[1];
ffAudio[0] = new ProcessWrapperImpl(flacCmd, ffparams);
}
} else {
params.waitbeforestart = 5000;
params.manageFastStart();
ffVideoPipe = new PipeIPCProcess(System.currentTimeMillis() + "ffmpegvideo", System.currentTimeMillis() + "videoout", false, true);
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-c", "copy",
"-f", "rawvideo",
"-y",
ffVideoPipe.getInputPipe()
};
InputFile newInput = new InputFile();
newInput.setFilename(filename);
newInput.setPush(params.stdin);
/**
* Note: This logic is weird; on one hand we check if the renderer requires videos to be Level 4.1 or below, but then
* the other function allows the video to exceed those limits.
* In reality this won't cause problems since renderers typically don't support above 4.1 anyway - nor are many
* videos encoded higher than that either - but it's worth acknowledging the logic discrepancy.
*/
if (!media.isVideoWithinH264LevelLimits(newInput, params.mediaRenderer) && params.mediaRenderer.isH264Level41Limited()) {
LOGGER.info("The video will not play or will show a black screen");
}
if (media.getH264AnnexB() != null && media.getH264AnnexB().length > 0) {
StreamModifier sm = new StreamModifier();
sm.setHeader(media.getH264AnnexB());
sm.setH264AnnexB(true);
ffVideoPipe.setModifier(sm);
}
OutputParams ffparams = new OutputParams(configuration);
ffparams.maxBufferSize = 1;
ffparams.stdin = params.stdin;
ffVideo = new ProcessWrapperImpl(ffmpegCommands, ffparams);
int numAudioTracks = 1;
if (media.getAudioTracksList() != null && media.getAudioTracksList().size() > 1 && configuration.isMuxAllAudioTracks()) {
numAudioTracks = media.getAudioTracksList().size();
}
boolean singleMediaAudio = media.getAudioTracksList().size() <= 1;
if (params.aid != null) {
boolean ac3Remux;
boolean dtsRemux;
boolean encodedAudioPassthrough;
boolean pcm;
if (numAudioTracks <= 1) {
ffAudioPipe = new PipeIPCProcess[numAudioTracks];
ffAudioPipe[0] = new PipeIPCProcess(System.currentTimeMillis() + "ffmpegaudio01", System.currentTimeMillis() + "audioout", false, true);
encodedAudioPassthrough = configuration.isEncodedAudioPassthrough() && params.aid.isNonPCMEncodedAudio() && params.mediaRenderer.isWrapEncodedAudioIntoPCM();
ac3Remux = params.aid.isAC3() && configuration.isAudioRemuxAC3() && !encodedAudioPassthrough && !params.mediaRenderer.isTranscodeToAAC();
dtsRemux = configuration.isAudioEmbedDtsInPcm() && params.aid.isDTS() && params.mediaRenderer.isDTSPlayable() && !encodedAudioPassthrough;
pcm = configuration.isAudioUsePCM() &&
media.isValidForLPCMTranscoding() &&
(
params.aid.isLossless() ||
(params.aid.isDTS() && params.aid.getAudioProperties().getNumberOfChannels() <= 6) ||
params.aid.isTrueHD() ||
(
!configuration.isMencoderUsePcmForHQAudioOnly() &&
(
params.aid.isAC3() ||
params.aid.isMP3() ||
params.aid.isAAC() ||
params.aid.isVorbis() ||
// params.aid.isWMA() ||
params.aid.isMpegAudio()
)
)
) && params.mediaRenderer.isLPCMPlayable();
int channels;
if (ac3Remux) {
channels = params.aid.getAudioProperties().getNumberOfChannels(); // AC-3 remux
} else if (dtsRemux || encodedAudioPassthrough) {
channels = 2;
} else if (pcm) {
channels = params.aid.getAudioProperties().getNumberOfChannels();
} else {
channels = configuration.getAudioChannelCount(); // 5.1 max for AC-3 encoding
}
if (!ac3Remux && (dtsRemux || pcm || encodedAudioPassthrough)) {
// DTS remux or LPCM
StreamModifier sm = new StreamModifier();
sm.setPcm(pcm);
sm.setDtsEmbed(dtsRemux);
sm.setEncodedAudioPassthrough(encodedAudioPassthrough);
sm.setNbChannels(channels);
sm.setSampleFrequency(params.aid.getSampleRate() < 48000 ? 48000 : params.aid.getSampleRate());
sm.setBitsPerSample(16);
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-ac", "" + sm.getNbChannels(),
"-f", "ac3",
"-c:a", sm.isDtsEmbed() || sm.isEncodedAudioPassthrough() ? "copy" : "pcm",
"-y",
ffAudioPipe[0].getInputPipe()
};
// Use PCM trick when media renderer does not support DTS in MPEG
if (!params.mediaRenderer.isMuxDTSToMpeg()) {
ffAudioPipe[0].setModifier(sm);
}
} else if (!ac3Remux && params.mediaRenderer.isTranscodeToAAC()) {
// AAC audio
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-ac", "" + channels,
"-f", "adts",
"-c:a", "aac",
"-strict", "experimental",
"-ab", Math.min(configuration.getAudioBitrate(), 320) + "k",
"-y",
ffAudioPipe[0].getInputPipe()
};
aacTranscode = true;
} else {
// AC-3 audio
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-ac", "" + channels,
"-f", "ac3",
"-c:a", (ac3Remux) ? "copy" : "ac3",
"-ab", String.valueOf(CodecUtil.getAC3Bitrate(configuration, params.aid)) + "k",
"-y",
ffAudioPipe[0].getInputPipe()
};
}
ffparams = new OutputParams(configuration);
ffparams.maxBufferSize = 1;
ffparams.stdin = params.stdin;
ffAudio = new ProcessWrapperImpl[numAudioTracks];
ffAudio[0] = new ProcessWrapperImpl(ffmpegCommands, ffparams);
} else {
ffAudioPipe = new PipeIPCProcess[numAudioTracks];
ffAudio = new ProcessWrapperImpl[numAudioTracks];
for (int i = 0; i < media.getAudioTracksList().size(); i++) {
DLNAMediaAudio audio = media.getAudioTracksList().get(i);
ffAudioPipe[i] = new PipeIPCProcess(System.currentTimeMillis() + "ffmpeg" + i, System.currentTimeMillis() + "audioout" + i, false, true);
encodedAudioPassthrough = configuration.isEncodedAudioPassthrough() && params.aid.isNonPCMEncodedAudio() && params.mediaRenderer.isWrapEncodedAudioIntoPCM();
ac3Remux = audio.isAC3() && configuration.isAudioRemuxAC3() && !encodedAudioPassthrough && !params.mediaRenderer.isTranscodeToAAC();
dtsRemux = configuration.isAudioEmbedDtsInPcm() && audio.isDTS() && params.mediaRenderer.isDTSPlayable() && !encodedAudioPassthrough;
pcm = configuration.isAudioUsePCM() &&
media.isValidForLPCMTranscoding() &&
(
audio.isLossless() ||
(audio.isDTS() && audio.getAudioProperties().getNumberOfChannels() <= 6) ||
audio.isTrueHD() ||
(
!configuration.isMencoderUsePcmForHQAudioOnly() &&
(
audio.isAC3() ||
audio.isMP3() ||
audio.isAAC() ||
audio.isVorbis() ||
// audio.isWMA() ||
audio.isMpegAudio()
)
)
) && params.mediaRenderer.isLPCMPlayable();
int channels;
if (ac3Remux) {
channels = audio.getAudioProperties().getNumberOfChannels(); // AC-3 remux
} else if (dtsRemux || encodedAudioPassthrough) {
channels = 2;
} else if (pcm) {
channels = audio.getAudioProperties().getNumberOfChannels();
} else {
channels = configuration.getAudioChannelCount(); // 5.1 max for AC-3 encoding
}
if (!ac3Remux && (dtsRemux || pcm || encodedAudioPassthrough)) {
// DTS remux or LPCM
StreamModifier sm = new StreamModifier();
sm.setPcm(pcm);
sm.setDtsEmbed(dtsRemux);
sm.setEncodedAudioPassthrough(encodedAudioPassthrough);
sm.setNbChannels(channels);
sm.setSampleFrequency(audio.getSampleRate() < 48000 ? 48000 : audio.getSampleRate());
sm.setBitsPerSample(16);
if (!params.mediaRenderer.isMuxDTSToMpeg()) {
ffAudioPipe[i].setModifier(sm);
}
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-ac", "" + sm.getNbChannels(),
"-f", "ac3",
singleMediaAudio ? "-y" : "-map", singleMediaAudio ? "-y" : ("0:a:" + (media.getAudioTracksList().indexOf(audio))),
"-c:a", sm.isDtsEmbed() || sm.isEncodedAudioPassthrough() ? "copy" : "pcm",
"-y",
ffAudioPipe[i].getInputPipe()
};
} else if (!ac3Remux && params.mediaRenderer.isTranscodeToAAC()) {
// AAC audio
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-ac", "" + channels,
"-f", "adts",
singleMediaAudio ? "-y" : "-map", singleMediaAudio ? "-y" : ("0:a:" + (media.getAudioTracksList().indexOf(audio))),
"-c:a", "aac",
"-strict", "experimental",
"-ab", Math.min(configuration.getAudioBitrate(), 320) + "k",
"-y",
ffAudioPipe[i].getInputPipe()
};
aacTranscode = true;
} else {
// AC-3 remux or encoding
ffmpegCommands = new String[] {
configuration.getFfmpegPath(),
"-ss", params.timeseek > 0 ? "" + params.timeseek : "0",
"-i", filename,
"-ac", "" + channels,
"-f", "ac3",
singleMediaAudio ? "-y" : "-map", singleMediaAudio ? "-y" : ("0:a:" + (media.getAudioTracksList().indexOf(audio))),
"-c:a", (ac3Remux) ? "copy" : "ac3",
"-ab", String.valueOf(CodecUtil.getAC3Bitrate(configuration, audio)) + "k",
"-y",
ffAudioPipe[i].getInputPipe()
};
}
ffparams = new OutputParams(configuration);
ffparams.maxBufferSize = 1;
ffparams.stdin = params.stdin;
ffAudio[i] = new ProcessWrapperImpl(ffmpegCommands, ffparams);
}
}
}
}
File f = new File(configuration.getTempFolder(), "pms-tsmuxer.meta");
params.log = false;
try (PrintWriter pw = new PrintWriter(f)) {
pw.print("MUXOPT --no-pcr-on-video-pid");
pw.print(" --new-audio-pes");
pw.print(" --no-asyncio");
pw.print(" --vbr");
pw.println(" --vbv-len=500");
String sei = "insertSEI";
if (
params.mediaRenderer.isPS3() &&
media.isWebDl(filename, params)
) {
sei = "forceSEI";
}
String videoparams = "level=4.1, " + sei + ", contSPS, track=1";
if (this instanceof TsMuxeRAudio) {
videoparams = "track=224";
}
if (configuration.isFix25FPSAvMismatch()) {
fps = "25";
}
pw.println(videoType + ", \"" + ffVideoPipe.getOutputPipe() + "\", " + (fps != null ? ("fps=" + fps + ", ") : "") + (width != -1 ? ("video-width=" + width + ", ") : "") + (height != -1 ? ("video-height=" + height + ", ") : "") + videoparams);
if (ffAudioPipe != null && ffAudioPipe.length == 1) {
String timeshift = "";
boolean ac3Remux;
boolean dtsRemux;
boolean encodedAudioPassthrough;
boolean pcm;
encodedAudioPassthrough = configuration.isEncodedAudioPassthrough() && params.aid.isNonPCMEncodedAudio() && params.mediaRenderer.isWrapEncodedAudioIntoPCM();
ac3Remux = params.aid.isAC3() && configuration.isAudioRemuxAC3() && !encodedAudioPassthrough && !params.mediaRenderer.isTranscodeToAAC();
dtsRemux = configuration.isAudioEmbedDtsInPcm() && params.aid.isDTS() && params.mediaRenderer.isDTSPlayable() && !encodedAudioPassthrough;
pcm = configuration.isAudioUsePCM() &&
media.isValidForLPCMTranscoding() &&
(
params.aid.isLossless() ||
(params.aid.isDTS() && params.aid.getAudioProperties().getNumberOfChannels() <= 6) ||
params.aid.isTrueHD() ||
(
!configuration.isMencoderUsePcmForHQAudioOnly() &&
(
params.aid.isAC3() ||
params.aid.isMP3() ||
params.aid.isAAC() ||
params.aid.isVorbis() ||
// params.aid.isWMA() ||
params.aid.isMpegAudio()
)
)
) && params.mediaRenderer.isLPCMPlayable();
String type = "A_AC3";
if (ac3Remux) {
// AC-3 remux takes priority
type = "A_AC3";
} else if (aacTranscode) {
type = "A_AAC";
} else {
if (pcm || this instanceof TsMuxeRAudio) {
type = "A_LPCM";
}
if (encodedAudioPassthrough || this instanceof TsMuxeRAudio) {
type = "A_LPCM";
}
if (dtsRemux || this instanceof TsMuxeRAudio) {
type = "A_LPCM";
if (params.mediaRenderer.isMuxDTSToMpeg()) {
type = "A_DTS";
}
}
}
if (params.aid != null && params.aid.getAudioProperties().getAudioDelay() != 0 && params.timeseek == 0) {
timeshift = "timeshift=" + params.aid.getAudioProperties().getAudioDelay() + "ms, ";
}
pw.println(type + ", \"" + ffAudioPipe[0].getOutputPipe() + "\", " + timeshift + "track=2");
} else if (ffAudioPipe != null) {
for (int i = 0; i < media.getAudioTracksList().size(); i++) {
DLNAMediaAudio lang = media.getAudioTracksList().get(i);
String timeshift = "";
boolean ac3Remux;
boolean dtsRemux;
boolean encodedAudioPassthrough;
boolean pcm;
encodedAudioPassthrough = configuration.isEncodedAudioPassthrough() && params.aid.isNonPCMEncodedAudio() && params.mediaRenderer.isWrapEncodedAudioIntoPCM();
ac3Remux = lang.isAC3() && configuration.isAudioRemuxAC3() && !encodedAudioPassthrough;
dtsRemux = configuration.isAudioEmbedDtsInPcm() && lang.isDTS() && params.mediaRenderer.isDTSPlayable() && !encodedAudioPassthrough;
pcm = configuration.isAudioUsePCM() &&
media.isValidForLPCMTranscoding() &&
(
lang.isLossless() ||
(lang.isDTS() && lang.getAudioProperties().getNumberOfChannels() <= 6) ||
lang.isTrueHD() ||
(
!configuration.isMencoderUsePcmForHQAudioOnly() &&
(
params.aid.isAC3() ||
params.aid.isMP3() ||
params.aid.isAAC() ||
params.aid.isVorbis() ||
// params.aid.isWMA() ||
params.aid.isMpegAudio()
)
)
) && params.mediaRenderer.isLPCMPlayable();
String type = "A_AC3";
if (ac3Remux) {
// AC-3 remux takes priority
type = "A_AC3";
} else {
if (pcm) {
type = "A_LPCM";
}
if (encodedAudioPassthrough) {
type = "A_LPCM";
}
if (dtsRemux) {
type = "A_LPCM";
if (params.mediaRenderer.isMuxDTSToMpeg()) {
type = "A_DTS";
}
}
}
if (lang.getAudioProperties().getAudioDelay() != 0 && params.timeseek == 0) {
timeshift = "timeshift=" + lang.getAudioProperties().getAudioDelay() + "ms, ";
}
pw.println(type + ", \"" + ffAudioPipe[i].getOutputPipe() + "\", " + timeshift + "track=" + (2 + i));
}
}
}
PipeProcess tsPipe = new PipeProcess(System.currentTimeMillis() + "tsmuxerout.ts");
/**
* Use the newer version of tsMuxeR on PS3 since other renderers
* like Panasonic TVs don't always recognize the new output
*/
String executable = executable();
if (params.mediaRenderer.isPS3()) {
executable = configuration.getTsmuxerNewPath();
}
String[] cmdArray = new String[]{
executable,
f.getAbsolutePath(),
tsPipe.getInputPipe()
};
cmdArray = finalizeTranscoderArgs(
filename,
dlna,
media,
params,
cmdArray
);
ProcessWrapperImpl p = new ProcessWrapperImpl(cmdArray, params);
params.maxBufferSize = 100;
params.input_pipes[0] = tsPipe;
params.stdin = null;
ProcessWrapper pipe_process = tsPipe.getPipeProcess();
p.attachProcess(pipe_process);
pipe_process.runInNewThread();
try {
Thread.sleep(50);
} catch (InterruptedException e) {
}
tsPipe.deleteLater();
ProcessWrapper ff_pipe_process = ffVideoPipe.getPipeProcess();
p.attachProcess(ff_pipe_process);
ff_pipe_process.runInNewThread();
try {
Thread.sleep(50);
} catch (InterruptedException e) {
}
ffVideoPipe.deleteLater();
p.attachProcess(ffVideo);
ffVideo.runInNewThread();
try {
Thread.sleep(50);
} catch (InterruptedException e) {
}
if (ffAudioPipe != null && params.aid != null) {
for (int i = 0; i < ffAudioPipe.length; i++) {
ff_pipe_process = ffAudioPipe[i].getPipeProcess();
p.attachProcess(ff_pipe_process);
ff_pipe_process.runInNewThread();
try {
Thread.sleep(50);
} catch (InterruptedException e) {
}
ffAudioPipe[i].deleteLater();
p.attachProcess(ffAudio[i]);
ffAudio[i].runInNewThread();
}
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
p.runInNewThread();
configuration = prev;
return p;
}
@Override
public String mimeType() {
return "video/mpeg";
}
@Override
public String name() {
return "tsMuxeR";
}
@Override
public int type() {
return Format.VIDEO;
}
private JCheckBox tsmuxerforcefps;
private JCheckBox muxallaudiotracks;
@Override
public JComponent config() {
// Apply the orientation for the locale
ComponentOrientation orientation = ComponentOrientation.getOrientation(PMS.getLocale());
String colSpec = FormLayoutUtil.getColSpec(COL_SPEC, orientation);
FormLayout layout = new FormLayout(colSpec, ROW_SPEC);
PanelBuilder builder = new PanelBuilder(layout);
builder.border(Borders.EMPTY);
builder.opaque(false);
CellConstraints cc = new CellConstraints();
JComponent cmp = builder.addSeparator(Messages.getString("NetworkTab.5"), FormLayoutUtil.flip(cc.xyw(2, 1, 1), colSpec, orientation));
cmp = (JComponent) cmp.getComponent(0);
cmp.setFont(cmp.getFont().deriveFont(Font.BOLD));
tsmuxerforcefps = new JCheckBox(Messages.getString("TsMuxeRVideo.2"), configuration.isTsmuxerForceFps());
tsmuxerforcefps.setContentAreaFilled(false);
tsmuxerforcefps.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
configuration.setTsmuxerForceFps(e.getStateChange() == ItemEvent.SELECTED);
}
});
builder.add(GuiUtil.getPreferredSizeComponent(tsmuxerforcefps), FormLayoutUtil.flip(cc.xy(2, 3), colSpec, orientation));
muxallaudiotracks = new JCheckBox(Messages.getString("TsMuxeRVideo.19"), configuration.isMuxAllAudioTracks());
muxallaudiotracks.setContentAreaFilled(false);
muxallaudiotracks.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
configuration.setMuxAllAudioTracks(e.getStateChange() == ItemEvent.SELECTED);
}
});
builder.add(GuiUtil.getPreferredSizeComponent(muxallaudiotracks), FormLayoutUtil.flip(cc.xy(2, 5), colSpec, orientation));
JPanel panel = builder.getPanel();
// Apply the orientation to the panel and all components in it
panel.applyComponentOrientation(orientation);
return panel;
}
@Override
public boolean isInternalSubtitlesSupported() {
return false;
}
@Override
public boolean isExternalSubtitlesSupported() {
return false;
}
@Override
public boolean isPlayerCompatible(RendererConfiguration mediaRenderer) {
return mediaRenderer != null && mediaRenderer.isMuxH264MpegTS();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isCompatible(DLNAResource resource) {
DLNAMediaSubtitle subtitle = resource.getMediaSubtitle();
// Check whether the subtitle actually has a language defined,
// uninitialized DLNAMediaSubtitle objects have a null language.
if (subtitle != null && subtitle.getLang() != null) {
// The resource needs a subtitle, but PMS does not support subtitles for tsMuxeR.
return false;
}
try {
String audioTrackName = resource.getMediaAudio().toString();
String defaultAudioTrackName = resource.getMedia().getAudioTracksList().get(0).toString();
if (!audioTrackName.equals(defaultAudioTrackName)) {
// PMS only supports playback of the default audio track for tsMuxeR
return false;
}
} catch (NullPointerException e) {
LOGGER.trace("tsMuxeR cannot determine compatibility based on audio track for " + resource.getSystemName());
} catch (IndexOutOfBoundsException e) {
LOGGER.trace("tsMuxeR cannot determine compatibility based on default audio track for " + resource.getSystemName());
}
if (
PlayerUtil.isVideo(resource, Format.Identifier.MKV) ||
PlayerUtil.isVideo(resource, Format.Identifier.MPG)
) {
return true;
}
return false;
}
}<|fim▁end|> | *
* This program is free software; you can redistribute it and/or |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
A sub-package for efficiently dealing with polynomials.
Within the documentation for this sub-package, a "finite power series,"
i.e., a polynomial (also referred to simply as a "series") is represented
by a 1-D numpy array of the polynomial's coefficients, ordered from lowest
order term to highest. For example, array([1,2,3]) represents
``P_0 + 2*P_1 + 3*P_2``, where P_n is the n-th order basis polynomial
applicable to the specific module in question, e.g., `polynomial` (which
"wraps" the "standard" basis) or `chebyshev`. For optimal performance,
all operations on polynomials, including evaluation at an argument, are
implemented as operations on the coefficients. Additional (module-specific)
information can be found in the docstring for the module of interest.
"""
from polynomial import *
from chebyshev import *
from polyutils import *
<|fim▁hole|><|fim▁end|> | from numpy.testing import Tester
test = Tester(__file__).test
bench = Tester(__file__).bench |
<|file_name|>navbarDirective.js<|end_file_name|><|fim▁begin|>(function(angular) {
'use strict';
angular<|fim▁hole|> return {
restrict: 'E',
templateUrl: 'nav/_navbar.html',
controller: 'NavbarController',
controllerAs: 'nav'
};
}
} (window.angular));<|fim▁end|> | .module('jstube.chromeExtensionCleaner.popup')
.directive('jstubeNavbar', jstubeNavbar);
function jstubeNavbar() { |
<|file_name|>kpiHelper.ts<|end_file_name|><|fim▁begin|>/*
* Power BI Visualizations
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the ""Software""), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/// <reference path="../_references.ts"/>
module powerbitests.kpiHelper {
import ValueType = powerbi.ValueType;
import DataViewTransform = powerbi.data.DataViewTransform;
export function buildDataViewForRedTrend(): powerbi.DataView {
let dataViewMetadata: powerbi.DataViewMetadata = buildDefaultDataViewMetadata();
let dataViewCategorical: powerbi.DataViewCategorical = buildDataViewCategoricalForRedTrend();
let dataView: powerbi.DataView = {
metadata: dataViewMetadata,
categorical: dataViewCategorical
};
return dataView;
}
function buildDefaultDataViewMetadata(): powerbi.DataViewMetadata {
return {
columns: [
{ displayName: "TrendLine", type: ValueType.fromDescriptor({ text: true }) },
{ displayName: "Indicator", type: ValueType.fromDescriptor({ numeric: true }), isMeasure: true, roles: { "Indicator": true } },
{ displayName: "Goal", isMeasure: true }]
};
}
function buildDataViewCategoricalForRedTrend(): powerbi.DataViewCategorical {
let dataViewMetadata = buildDefaultDataViewMetadata();
let dataViewCategorical = {
categories: [{
source: dataViewMetadata.columns[0],
values: ["Apple", "Orange", "Kiwi", "Grapes", "Banana"],
identity: [
mocks.dataViewScopeIdentity("Apple"),
mocks.dataViewScopeIdentity("Orange"),
mocks.dataViewScopeIdentity("Kiwi"),
mocks.dataViewScopeIdentity("Grapes"),
mocks.dataViewScopeIdentity("Banana")
],
}],
values: DataViewTransform.createValueColumns([{
source: dataViewMetadata.columns[1],
values: [20, 10, 30, 15, 12]
},
{
source: dataViewMetadata.columns[2],
values: [20, 20, 20, 20, 20]
}])
};
return dataViewCategorical;
}
<|fim▁hole|> export function buildDataViewForGreenTrend(): powerbi.DataView {
let dataView = buildDataViewForRedTrend();
dataView.categorical.values[0].values = [20, 10, 30, 15, 25];
return dataView;
}
export function buildDataViewForNoGoalTrend(): powerbi.DataView {
let dataViewMetadata: powerbi.DataViewMetadata = buildDataViewMetadataForNoGoal();
let dataViewCategorical: powerbi.DataViewCategorical = buildDataViewCategoricalForNoGoal();
let dataView: powerbi.DataView = {
metadata: dataViewMetadata,
categorical: dataViewCategorical
};
return dataView;
}
function buildDataViewMetadataForNoGoal(): powerbi.DataViewMetadata {
return {
columns: [
{ displayName: "Fruit", type: ValueType.fromDescriptor({ text: true }) },
{ displayName: "Indicator", isMeasure: true, type: ValueType.fromDescriptor({ numeric: true }), roles: { "Indicator": true } },]
};
}
function buildDataViewCategoricalForNoGoal(): powerbi.DataViewCategorical {
let dataViewMetadata = buildDataViewMetadataForNoGoal();
let dataViewCategorical = {
categories: [{
source: dataViewMetadata.columns[0],
values: ["Apple", "Orange", "Kiwi", "Grapes", "Banana"],
identity: [
mocks.dataViewScopeIdentity("Apple"),
mocks.dataViewScopeIdentity("Orange"),
mocks.dataViewScopeIdentity("Kiwi"),
mocks.dataViewScopeIdentity("Grapes"),
mocks.dataViewScopeIdentity("Banana")
],
}],
values: DataViewTransform.createValueColumns([{
source: dataViewMetadata.columns[1],
values: [20, 10, 30, 15, 18]
}])
};
return dataViewCategorical;
}
export function buildDataViewForGreenNoTrend(): powerbi.DataView {
let dataViewMetadata: powerbi.DataViewMetadata = buildDataViewMetadataForGreenNoTrend();
let dataViewCategorical: powerbi.DataViewCategorical = buildDataViewCategoricalForGreenNoTrend();
let dataView: powerbi.DataView = {
metadata: dataViewMetadata,
categorical: dataViewCategorical
};
return dataView;
}
function buildDataViewMetadataForGreenNoTrend(): powerbi.DataViewMetadata {
return {
columns: [
{ displayName: "Indicator", isMeasure: true, type: ValueType.fromDescriptor({ numeric: true }), roles: { "Indicator": true } },
{ displayName: "Goal", isMeasure: true, roles: { "Goal": true }}]
};
}
function buildDataViewCategoricalForGreenNoTrend(): powerbi.DataViewCategorical {
let dataViewMetadata = buildDataViewMetadataForGreenNoTrend();
let dataViewCategorical = {
values: DataViewTransform.createValueColumns([{
source: dataViewMetadata.columns[0],
values: [20]
},
{
source: dataViewMetadata.columns[1],
values: [15]
}])
};
return dataViewCategorical;
}
export function buildDataViewForRedNoTrend(): powerbi.DataView {
let dataView = buildDataViewForGreenNoTrend();
dataView.categorical.values[0].values = [10];
return dataView;
}
}<|fim▁end|> | |
<|file_name|>porter_segment.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import json
import requests
import sys, os
import time
import re
class Helper:
initheaders = {
"Host": "segmentfault.com",
"Connection": "keep-alive",
"Content-Length": "55",
"Accept": "*/*",
"Origin": "https://segmentfault.com",
"X-Requested-With": "XMLHttpRequest",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"DNT": "1",
"Referer": "https://segmentfault.com/",
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "zh-CN,zh;q=0.8,en;q=0.6,ja;q=0.4,zh-TW;q=0.2",
"Pragma": "no-cache",
"Cache-Control": "no-cache",
"Cookie": "PHPSESSID=web3~fdf535b2518f7f061780d987bb65934a; _gat=1; io=onpREhr-L-d7pRxJHvSF; Hm_lvt_e23800c454aa573c0ccb16b52665ac26=1508383051,1508500169,1508563643,1508565378; Hm_lpvt_e23800c454aa573c0ccb16b52665ac26=1508569683; _ga=GA1.2.613128477.1495522770; _gid=GA1.2.1217955936.1508498183"
}
def __init__(self):
self.loadConfig()
self._session = requests.session()
self._session.headers = Helper.initheaders
self._session.max_redirects = 60
if(self.initHeader() != None):
print 'use cached headers'
self._session.headers = self.initHeader()
print self._session.headers
self.filenameList = {}
def loadConfig(self):
# 获取配置文件
currentProject = os.path.dirname(sys.path[0])
configStr = os.path.abspath( os.path.join(currentProject,'config.json'))
data_file = open(configStr)
data = json.load(data_file)
self.loginUrl = data["segment"]["login-url"]
self.loginPage = data["segment"]["login-page"]
self.postUrl = data["segment"]["post-url"]
self.username = data["segment"]["username"]
self.password = data["segment"]["password"]
self.draftUrl = data["segment"]["draft-url"]
def initHeader(self):
try:
cookiepath = os.path.abspath(os.path.join(os.path.dirname('.'), 'cookie/segment_cookies'))
data_file = open(cookiepath,'r')
data = json.load(data_file)
return data
except ValueError, e:
print 'cache-cookie is None'
return None
except IOError , e:
print 'file is not found'
return None
def login(self):
# 使用緩存登陸 //TODO //TODO token
# try:
# print self._session.headers
# res = self._session.post(self.loginUrl + '?_=b56c39ea0c0d50b3dd9e5fa11d9e2f00', timeout=10)
# except requests.exceptions.ReadTimeout,e:
# print '使用緩存登錄失敗'
res = '';
while(res == ''):
try:
data = self._prepare_login_form_data()
res = self._session.post(self.loginUrl,data=data,timeout=10)
print res
if(res.status_code == 200):
self.saveHeader()
print 'login succ'
return 0
else:
print 'login fail'
except ValueError,e:
print e
print 'use cached login is succ'
return 'succ'
except requests.exceptions.ConnectionError:
print 'requests.exceptions.ConnectionError try again'
time.sleep(5)
print 'sleep over'
continue
def _prepare_login_form_data(self):
# 封装返回数据
form = {
'username': str(self.username),
'password': str(self.password),
'remember': "1"
}
print form<|fim▁hole|>
def postArticle(self,filename):
print '--submit post--'
self._session.headers['Referer'] = 'https://segmentfault.com/write?freshman=1'
formdata = self._prepare_post_form_data(filename)
res = ''
while (res == ''):
try:
res = self._session.post(self.postUrl,data=formdata,timeout=10)
print res
if(res.json()['status'] == 0):
print '文章发布成功:'+formdata['title']
else:
print '文章发布失败:'+formdata['title']
except:
print '发布异常'
continue
print '-- post end --'
def _prepare_post_form_data(self,filename):
draftData = self.extractFile(filename)
print draftData
print '-- save draft --'
artId = ''
res = ''
while (res == ''):
try:
res = self._session.post(self.draftUrl,data=draftData,timeout=10)
status = res.json()['status']
if(status == 0):
artId = res.json()['data']
print '保存草稿成功'
else:
print '保存草稿失败'
return None
except:
print '保存草稿出现异常'
time.sleep(5)
continue
del draftData['do']
del draftData['aticleId']
draftData['license'] = '1'
draftData['draftId'] = artId
draftData['createId'] = ''
draftData['newsType'] = '1490000006201495'
return draftData
def saveHeader(self):
cookiepath = os.path.abspath(os.path.join(os.path.dirname('.'), 'cookie/segment_cookies'))
file = open(cookiepath, 'w')
cookies = self._session.headers
json.dump(cookies, file)
file.close()
def dirCb(self,dirname):
for line in os.listdir(dirname):
filename = os.path.abspath(os.path.join(dirname, line))
if(os.path.isdir(filename)):
self.dirCb(filename)
else:
pattern = re.compile(r"(\d+)-(\d+)-(\d+)-(\S+.md)")
result = pattern.findall(filename)
if (len(result) != 0):
tags = filename.split('_posts')[1]
# print tags
tagname = ''
for tag in tags.split(os.sep):
if (tag != '' and len(pattern.findall(tag)) == 0):
tagname = tagname + '|' + tag
tagname = tagname[1:]
self.filenameList[filename] = tagname
# for fn in self.filenameList:
# print fn +' -t- '+self.filenameList[fn]
def destroy(self):
self._session.close()
def extractFile(self,filename):
data = {}
## 长度
file = open(filename)
filecontent = file.read()
print len(filecontent)
if (len(filecontent) >= 75000):
filecontent = filecontent[0:75000]
## 链接添加
pattern = re.compile(r"(\d+)-(\d+)-(\d+)-(\S+).md")
print filename
result = pattern.findall(filename)
print result
href = 'www.paraller.com/' + result[0][0] + '/' + result[0][1] + '/' + result[0][2] + '/' + result[0][
3] + '/'
lience = '转载请注明出处 [http://www.paraller.com](http://www.paraller.com) \r\n 原文排版地址 [' + href + '](' + href + ')\r\n'
print lience
## 处理头部注释
pattern = re.compile(r"---(\n(.{0,}))*---")
filecontent = re.sub(pattern, lience, filecontent)
## 封装数据
data = {
"do": "saveArticle",
"type": "1",
"title": result[0][3],
"text": filecontent,
"weibo": "0",
"blogId": "0",
"aticleId": "",
"id": "",
"url": ""
}
print self.filenameList[filename]
# 获取标签
tags = self.filenameList[filename].split('|')
tagsDict = []
for tag in tags:
print 'tag is :'
print tag
data['tags[]'] = self.getTags(tag)
return data
def getTags(self,tagname):
## 标签处理
self._session.headers['Referer'] = 'https://segmentfault.com/write?freshman=1'
if(self._session.headers.has_key('Origin')):
del self._session.headers['Origin']
del self._session.headers['Content-Length']
del self._session.headers['Content-Type']
res = ''
while res == '':
try:
print 'https://segmentfault.com/api/tags/search?q='+tagname+'&_=7ee3470a9132cf004a134938342f4b35'
res = self._session.get('https://segmentfault.com/api/tags/search?q='+tagname+'&_=7ee3470a9132cf004a134938342f4b35',timeout=5)
except:
time.sleep(5)
print 'ag'
continue
print res.text
if( len(res.json()['data']) == 0):
print 'could not found tag,ag'
print tagname[0:len(tagname)-1]
self.getTags(tagname[0:len(tagname)-1])
else:
print res.json()['data'][0]['name']
return res.json()['data'][0]['id']
if __name__ == '__main__':
_helper = Helper()
code = _helper.login()
if (code == 0):
path = os.path.abspath(os.path.join(sys.path[0],'../../'))
_helper.dirCb(path)
for filename in _helper.filenameList:
_helper.postArticle(filename)
else:
print '登录失败'
_helper.destroy()
# _helper.extractFile('/Users/zhidaliao/Desktop/zhida_blog/_posts/运维 & 主机 & 系统搭建/2016-05-22-gitlab-runner-maven卡死的情况.md')
# _helper.postArticle('/Users/zhidaliao/Desktop/zhida_blog/_posts/运维 & 主机 & 系统搭建/2016-05-22-gitlab-runner-maven卡死的情况.md')
# _helper._prepare_post_form_data('/Users/zhidaliao/Desktop/zhida_blog/_posts/运维 & 主机 & 系统搭建/2016-05-22-gitlab-runner-maven卡死的情况.md')
# 遍历文章
# _helper.loopDir()
# _helper.dirCb('docker')
# if(code == 0):
# _helper.postArticle()
# _helper.destroy()
else:
print 'being imported as module'<|fim▁end|> | return form
|
<|file_name|>binary_buffer.cpp<|end_file_name|><|fim▁begin|>#include "binary_buffer.hpp"
#include <iterator>
#include <algorithm>
#include <sstream>
#include <boost/endian/conversion.hpp>
using boost::endian::native_to_big;
using boost::endian::big_to_native;
namespace {
using aria::byte;
template <typename P>
void append_bytes_to_vector(std::vector<byte> & vec, P primitive)
{
auto * begin = reinterpret_cast<byte *>(&primitive);
auto * end = begin + sizeof(primitive);<|fim▁hole|> P read_primitive_and_advance(const byte * buffer, size_t size, size_t & offset, const std::string & name)
{
size_t stride = sizeof(P);
if (offset + stride <= size) {
auto i = reinterpret_cast<const P *>(buffer + offset);
offset += stride;
return big_to_native(*i);
} else {
throw aria::internal::buffer_error("Insufficient bytes available to read " + name + ".");
}
}
}
aria::internal::buffer_error::buffer_error(const char *what)
: std::runtime_error(what)
{
}
aria::internal::buffer_error::buffer_error(const std::string &what)
: std::runtime_error(what)
{
}
void aria::internal::binary_buffer_writer::write_uint8(uint8_t i)
{
_bytes.push_back(static_cast<byte>(i));
}
void aria::internal::binary_buffer_writer::write_uint16(uint16_t i)
{
append_bytes_to_vector(_bytes, native_to_big(i));
}
void aria::internal::binary_buffer_writer::write_uint32(uint32_t i)
{
append_bytes_to_vector(_bytes, native_to_big(i));
}
void aria::internal::binary_buffer_writer::write_uint64(uint64_t i)
{
append_bytes_to_vector(_bytes, native_to_big(i));
}
void aria::internal::binary_buffer_writer::write_string(const std::string &str)
{
write_uint32(str.size());
for (auto c : str) {
_bytes.push_back(static_cast<byte>(c));
}
}
void aria::internal::binary_buffer_writer::write_bytes(const std::vector<aria::byte> &bytes)
{
write_uint32(bytes.size());
std::copy(bytes.begin(), bytes.end(), std::back_inserter(_bytes));
}
std::vector<aria::byte> aria::internal::binary_buffer_writer::take_buffer()
{
std::vector<byte> buffer;
_bytes.swap(buffer);
return buffer;
}
aria::internal::binary_buffer_reader::binary_buffer_reader(const std::vector<byte> * buffer)
: _buffer_start(buffer->data()), _buffer_size(buffer->size()), _offset(0)
{
}
uint8_t aria::internal::binary_buffer_reader::read_uint8()
{
return read_primitive_and_advance<uint8_t>(_buffer_start, _buffer_size, _offset, "uint8");
}
uint16_t aria::internal::binary_buffer_reader::read_uint16()
{
return read_primitive_and_advance<uint16_t>(_buffer_start, _buffer_size, _offset, "uint16");
}
uint32_t aria::internal::binary_buffer_reader::read_uint32()
{
return read_primitive_and_advance<uint32_t>(_buffer_start, _buffer_size, _offset, "uint32");
}
uint64_t aria::internal::binary_buffer_reader::read_uint64()
{
return read_primitive_and_advance<uint64_t>(_buffer_start, _buffer_size, _offset, "uint64");
}
std::string aria::internal::binary_buffer_reader::read_string()
{
uint32_t size;
try {
size = read_uint32();
} catch (buffer_error) {
throw buffer_error("Insufficient bytes available to read string size.");
}
if (_offset + size <= _buffer_size) {
auto data = reinterpret_cast<const char *>(_buffer_start + _offset);
_offset += size;
return std::string(data, size);;
} else {
assert(_offset <= _buffer_size);
auto available = _buffer_size - _offset;
std::stringstream ss;
ss << "Expected " << size << " bytes of string data, but only " << available
<< " available bytes in buffer.";
throw buffer_error(ss.str());
}
}
std::vector<byte> aria::internal::binary_buffer_reader::read_bytes()
{
uint32_t size;
try {
size = read_uint32();
} catch (buffer_error) {
throw buffer_error("Insufficient bytes available to read data size.");
}
if (_offset + size <= _buffer_size) {
auto data = _buffer_start + _offset;
_offset += size;
return std::vector<byte>(data, data + size);
} else {
assert(_offset <= _buffer_size);
auto available = _buffer_size - _offset;
std::stringstream ss;
ss << "Expected " << size << " bytes of data, but only " << available
<< " available bytes in buffer.";
throw buffer_error(ss.str());
}
}<|fim▁end|> | std::copy(begin, end, std::back_inserter(vec));
}
template <typename P> |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import AnswerRating from './answerRating';
import FeedBackResults from './feedbackResults';
import './less/feedback.less';
// Check if bar rating should be initialized
const ratingWrapper = document.querySelector('.rating-wrapper');
if (ratingWrapper !== null) {
AnswerRating();
}<|fim▁hole|>if (feedBackResultsElement !== null) {
FeedBackResults();
}<|fim▁end|> |
// Check if feed back results charts should be initialized
const feedBackResultsElement = document.getElementById('feedback-results'); |
<|file_name|>tok.rs<|end_file_name|><|fim▁begin|>use std::str::FromStr;
#[derive(Debug)]
pub enum Tok {
Num(i32),
LParen,
RParen,
Minus,
Plus,
Times,
Div,
Comma,
}
// simplest and stupidest possible tokenizer
pub fn tokenize(s: &str) -> Vec<(usize, Tok, usize)> {
let mut tokens = vec![];
let mut chars = s.chars();
let mut lookahead = chars.next();
while let Some(c) = lookahead {
// skip whitespace characters
if !c.is_whitespace() {
match c {
'(' => tokens.push(Tok::LParen),
')' => tokens.push(Tok::RParen),
'-' => tokens.push(Tok::Minus),
'+' => tokens.push(Tok::Plus),
'*' => tokens.push(Tok::Times),
',' => tokens.push(Tok::Comma),
'/' => tokens.push(Tok::Div),
_ if c.is_digit(10) => {
let (tmp, next) = take_while(c, &mut chars, |c| c.is_digit(10));
lookahead = next;
tokens.push(Tok::Num(i32::from_str(&tmp).unwrap()));
continue;
}
_ => {
panic!("invalid character: {:?}", c);
}
}
}
// advance to next character by default
lookahead = chars.next();
}
tokens.into_iter()
.enumerate()
.map(|(i, tok)| (i*2, tok, i*2+1))<|fim▁hole|> where C: Iterator<Item=char>, F: Fn(char) -> bool
{
let mut buf = String::new();
buf.push(c0);
while let Some(c) = chars.next() {
if !f(c) {
return (buf, Some(c));
}
buf.push(c);
}
return (buf, None);
}<|fim▁end|> | .collect()
}
fn take_while<C,F>(c0: char, chars: &mut C, f: F) -> (String, Option<char>) |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/**
@file Export all functions in yuv-video to user
@author Gilson Varghese<[email protected]>
@date 13 Oct, 2016
**/
/**
Module includes
*/
var frameReader = require(./lib/framereader);
var frameWriter = require(./lib/framewriter);
var frameConverter = require(./lib/frameconverter);
/**
Global variables
*/
var version = "1.0.0";
/**
Export all the functions to global namespace
*/
module.exports = {
/**
Test function to test the reader
*/
version: function() {
return version;
},
/**
Frame reader to read frame according to the given options
*/
frameReader: frameReader,
/**
Frame Writer to write frame according to the options
*/
frameWrite: frameWriter,
/**
Frame Converter to conver frame into various formats
Currently only YV21 and V210 are supported
*/
frameConverter: frameConverter<|fim▁hole|><|fim▁end|> |
}; |
<|file_name|>response.py<|end_file_name|><|fim▁begin|>from datetime import timedelta
from propeller.cookie import Cookie
from propeller.options import Options
from propeller.template import Template
from propeller.util.dict import MultiDict
from urllib import quote
import httplib
import propeller
class Response(object):
def __init__(self, body='', status_code=200, content_type='text/html'):
self.body = body
self.status_code = status_code
self.headers = MultiDict()
self.cookies = []
self.headers['Content-Type'] = content_type
def _get_status_code(self):
return self._status_code
def _set_status_code(self, status_code):
assert status_code >= 200 and status_code <= 500, \
'status_code must be an int between 200 and 500'
self._status_code = status_code
def _get_body(self):
return self._body
def _set_body(self, body):
assert isinstance(body, basestring) or isinstance(body, Template), \
'body must be an instance of basestring or Template'
if isinstance(body, basestring):
self._body = body
elif isinstance(body, Template):
self._body = str(body)
def _build_headers(self):
self.headers['Content-Length'] = len(self.body)
if 'Content-Type' not in self.headers or not \
self.headers['Content-Type'][0]:
self.headers['Content-Type'] = 'text/html; charset=utf-8'
status = 'HTTP/1.0 %d %s' % (self.status_code,
httplib.responses[self.status_code])
headers = ['%s: %s' % (k, v) for k, v in self.headers.items()]
headers += ['Set-Cookie: %s' % str(c) for c in self.cookies]
headers = '\r\n'.join([status] + headers) + '\r\n\r\n'
return headers
def _error_page(self, title, subtitle='', traceback=None):
t = Options.tpl_env.get_template('error.html')
return t.render(
title=title,
subtitle=subtitle,
traceback=traceback,
version=propeller.__version__
)
def set_cookie(self, name, value, domain=None, expires=None, path=None,
secure=False):
self.cookies.append(Cookie(name=name, value=value, domain=domain,
expires=expires, path=path, secure=secure))
def __str__(self):
return self._build_headers() + self.body
status_code = property(_get_status_code, _set_status_code)
body = property(_get_body, _set_body)
class RedirectResponse(Response):
def __init__(self, redirect_url, permanent=False, *args, **kwargs):
status_code = 301 if permanent else 302
super(RedirectResponse, self).__init__(status_code=status_code, *args,
**kwargs)
self.redirect_url = redirect_url
def __str__(self):
if 'Location' not in self.headers:
self.headers['Location'] = self.redirect_url
return super(RedirectResponse, self).__str__()
class BadRequestResponse(Response):
def __init__(self, *args, **kwargs):
super(BadRequestResponse, self).__init__(status_code=400, *args,
**kwargs)
def __str__(self):
if not self.body and Options.debug:
self.body = self._error_page(httplib.responses[self.status_code])
return super(BadRequestResponse, self).__str__()<|fim▁hole|>
class NotFoundResponse(Response):
def __init__(self, url=None, *args, **kwargs):
super(NotFoundResponse, self).__init__(status_code=404, *args,
**kwargs)
self.url = url
def __str__(self):
if not self.body and Options.debug:
self.body = self._error_page(httplib.responses[self.status_code],
self.url)
return super(NotFoundResponse, self).__str__()
class InternalServerErrorResponse(Response):
def __init__(self, title, subtitle, traceback, *args, **kwargs):
super(InternalServerErrorResponse, self).__init__(status_code=500,
*args, **kwargs)
self.title = title
self.subtitle = subtitle
self.traceback = traceback
def __str__(self):
if not self.body and Options.debug:
self.body = self._error_page(self.title,
self.subtitle,
self.traceback)
return super(InternalServerErrorResponse, self).__str__()<|fim▁end|> | |
<|file_name|>path-type-bounds.rs<|end_file_name|><|fim▁begin|>// pp-exact<|fim▁hole|>
trait Tr { }
impl Tr for int;
fn foo(x: ~Tr: Freeze) -> ~Tr: Freeze { x }
fn main() {
let x: ~Tr: Freeze;
~1 as ~Tr: Freeze;
}<|fim▁end|> | |
<|file_name|>styles.js<|end_file_name|><|fim▁begin|>'use strict';
var React = require('react-native');
var {StyleSheet, PixelRatio, Platform} = React;
var styles = StyleSheet.create({
container: {
flex:1,
paddingTop: 70,
backgroundColor: '#F7F7F7',
},
row: {
flexDirection: 'row',
backgroundColor:'white',
borderRadius: 0,
borderWidth: 0,
borderTopWidth: 1 / PixelRatio.get(),
borderColor: '#d6d7da',
padding:10,<|fim▁hole|> categoryLabel: {
fontSize: 15,
textAlign: 'left',
left: 10,
padding:10,
fontWeight:'bold',
},
lastRow: {
flexDirection: 'row',
backgroundColor:'white',
borderRadius: 0,
borderWidth: 0,
borderTopWidth: 1 / PixelRatio.get(),
borderBottomWidth: 1 / PixelRatio.get(),
borderColor: '#d6d7da',
padding:10,
alignItems: 'center'
},
rowLabel: {
left:10,
flex:1,
fontSize:15,
},
rowInput: {
fontSize:15,
flex:1,
height:(Platform.OS=='ios') ? 30 : 50
},
messageItem: {
padding:10,
paddingRight:20,
fontSize:15
},
messageBar: {
backgroundColor:'white',
flexDirection:'row',
left:0,
right:0,
height:55
},
message: {
left:10,
right:10,
fontSize:15,
flex:1,
height:30
},
button: {
backgroundColor: 'white',
padding: 15,
borderColor: '#eeeeee',
borderWidth:1,
borderBottomWidth: 1 / PixelRatio.get(),
marginTop:20,
borderRadius:10,
width:300,
marginRight:20,
marginLeft:20,
alignSelf: 'center'
},
sendButton: {
justifyContent: 'center',
width:80
},
navBar: {
backgroundColor: '#0db0d9'
},
loadingContainer: {
position: 'absolute',
top:0,
bottom:0,
left:0,
right:0,
backgroundColor:'black',
opacity:0.7,
justifyContent: 'center',
alignItems: 'center'
},
loading: {
width:70,
borderRadius: 6,
height:70,
justifyContent: 'center',
alignItems: 'center',
backgroundColor:'white'
}
});
module.exports = styles;<|fim▁end|> | alignItems: 'center'
}, |
<|file_name|>bv_feature_selection.py<|end_file_name|><|fim▁begin|>import pyqtgraph as pg
from pyqtgraph.Qt import QtGui, QtCore
import numpy#
from pyqtgraph.parametertree import Parameter, ParameterTree, ParameterItem, registerParameterType
class FeatureSelectionDialog(QtGui.QDialog):
def __init__(self,viewer, parent):
super(FeatureSelectionDialog, self).__init__(parent)
self.resize(800,600)
self.viewer = viewer
self.layout = QtGui.QVBoxLayout()
self.setLayout(self.layout)
self.buttonBox = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Ok|QtGui.QDialogButtonBox.Cancel)
self.buttonBox.accepted.connect(self.onPressAccepted)
def makeCheckBox(name, val=True):
return {
'name': name,
'type': 'bool',
'value': val,
#'tip': "This is a checkbox",
}
sigmaOpt = {'name': 'sigma', 'type': 'str', 'value': '[0.0, 1.0, 2.0, 4.0]' }
wardOpts = {'name': 'wardness', 'type': 'str', 'value': '[0.0, 0.1, 0.2]' }
filterChild = [
makeCheckBox("computeFilter"),
sigmaOpt,
{
'name':'UCM',
'children': [
makeCheckBox("ucmFilters"),
wardOpts,
{'name': 'meanSign', 'type': 'float', 'value': '1.0' }
]
}
]
params = [
{
'name' : "RawData",
'type' : 'group',
'children' : [
{
'name': 'Compute Features On Raw Data',
'type': 'bool',
'value': True,
'tip': "This is a checkbox",
},
{
'name' : "0-Order Filter",
'type' : 'group',
'children' : filterChild
},
{
'name' : "1-Order Filter",
'type' : 'group',
'children' : filterChild
},
{
'name' : "2-Order Filter",
'type' : 'group',
'children' : filterChild
}
]
},
#ComplexParameter(name='Custom parameter group (reciprocal values)'),
#ScalableGroup(name="Expandable Parameter Group", children=[
# {'name': 'ScalableParam 1', 'type': 'str', 'value': "default param 1"},
# {'name': 'ScalableParam 2', 'type': 'str', 'value': "default param 2"},
#]),
]
## Create tree of Parameter objects
self.p = Parameter.create(name='params', type='group', children=params)
self.t = ParameterTree()
self.t.setParameters(self.p, showTop=False)
self.layout.addWidget(self.t)
self.layout.addWidget(self.buttonBox)
## If anything changes in the tree, print a message
def change(param, changes):
print("tree changes:")
for param, change, data in changes:
path = self.p.childPath(param)
if path is not None:
childName = '.'.join(path)
else:
childName = param.name()
print(' parameter: %s'% childName)
print(' change: %s'% change)
print(' data: %s'% str(data))
print(' ----------')
self.p.sigTreeStateChanged.connect(change)
<|fim▁hole|>
def onPressAccepted(self):
self.hide()
self.viewer.onClickedComputeFeaturesImpl(self.p)
def keyPressEvent(self, event):
if event.key() == QtCore.Qt.Key_Escape:
self.hide()
event.accept()
else:
super(QtGui.QDialog, self).keyPressEvent(event)<|fim▁end|> | |
<|file_name|>angular-enhance-text.min.js<|end_file_name|><|fim▁begin|>(function(angular){
'use strict';
var app = angular.module('bernhardposselt.enhancetext', ['ngSanitize']);
app.factory('TextEnhancer',
["SmileyEnhancer", "VideoEnhancer", "NewLineEnhancer", "ImageEnhancer", "YouTubeEnhancer", "LinkEnhancer", function (SmileyEnhancer, VideoEnhancer, NewLineEnhancer, ImageEnhancer,
YouTubeEnhancer, LinkEnhancer) {
return function (text, options) {
text = escapeHtml(text);
text = SmileyEnhancer(text, options.smilies);
if (options.embedImages) {
text = ImageEnhancer(text, options.embeddedImagesHeight,
options.embeddedVideosWidth,
options.embeddedLinkTarget);
}
if (options.embedVideos) {
text = VideoEnhancer(text, options.embeddedImagesHeight,
options.embeddedVideosWidth);
}
if (options.embedYoutube) {
text = YouTubeEnhancer(text, options.embeddedYoutubeHeight,
options.embeddedYoutubeWidth);
}
if (options.newLineToBr) {
text = NewLineEnhancer(text);
}
if (options.embedLinks) {
text = LinkEnhancer(text, options.embeddedLinkTarget);
}
return text;
};
}]);
app.factory('ImageEnhancer', function () {
return function (text, height, width, target) {
if(target === undefined) {
target = '_blank';
}
var imgRegex = /((?:https?):\/\/\S*\.(?:gif|jpg|jpeg|tiff|png|svg|webp))(\s<\1>){0,1}/gi;
var imgDimensions = getDimensionsHtml(height, width);
var img = '<a href="$1" target="' + target +
'">' + '<img ' + imgDimensions + 'alt="image" src="$1"/>$1</a>';
return text.replace(imgRegex, img);
};
});
app.factory('LinkEnhancer', function () {
return function (text, target) {
if(target === undefined) {
target = '_blank';
}
var regex = /((href|src)=["']|)(\b(https?|ftp|file):\/\/((?!>)[-A-Z0-9+&@#\/%?=~_|!:,.;])*((?!>)[-A-Z0-9+&@#\/%=~_|]))/ig;
return text.replace(regex, function() {
return arguments[1] ?
arguments[0] :
'<a target="' + target + '" href="'+ arguments[3] + '">' + arguments[3] + '</a>';
});
};
});
app.factory('NewLineEnhancer', function () {
return function (text) {
return text.replace(/\n/g, '<br/>').replace(/ /g, '<br/>');
};
});
app.factory('SmileyEnhancer', function () {
return function(text, smilies) {
var smileyKeys = Object.keys(smilies);
// split input into lines to avoid dealing with tons of
// additional complexity/combinations arising from new lines
var lines = text.split('\n');
var smileyReplacer = function (smiley, replacement, line) {
// four possibilities: at the beginning, at the end, in the
// middle or only the smiley
var startSmiley = "^" + escapeRegExp(smiley) + " ";
var endSmiley = " " + escapeRegExp(smiley) + "$";
var middleSmiley = " " + escapeRegExp(smiley) + " ";
var onlySmiley = "^" + escapeRegExp(smiley) + "$";
return line.
replace(new RegExp(startSmiley), replacement + " ").
replace(new RegExp(endSmiley), " " + replacement).
replace(new RegExp(middleSmiley), " " + replacement + " ").
replace(new RegExp(onlySmiley), replacement);
};
// loop over smilies and replace them in the text
for (var i=0; i<smileyKeys.length; i++) {
var smiley = smileyKeys[i];
var replacement = '<img alt="' + smiley + '" src="' +
smilies[smiley] + '"/>';
// partially apply the replacer function to set the replacement
// string
var replacer = smileyReplacer.bind(null, smiley, replacement);
lines = lines.map(replacer);
}
return lines.join('\n');
};
});
app.factory('VideoEnhancer', function () {
return function (text, height, width) {
var regex = /((?:https?):\/\/\S*\.(?:ogv|webm))/gi;
var dimensions = getDimensionsHtml(height, width);
var vid = '<video ' + dimensions + 'src="$1" controls preload="none"></video>';
return text.replace(regex, vid);
};
});
app.factory('YouTubeEnhancer', function () {
return function (text, height, width) {
var regex = /https?:\/\/(?:[0-9A-Z-]+\.)?(?:youtu\.be\/|youtube\.com(?:\/embed\/|\/v\/|\/watch\?v=|\/ytscreeningroom\?v=|\/feeds\/api\/videos\/|\/user\S*[^\w\-\s]|\S*[^\w\-\s]))([\w\-]{11})[?=&+%\w-]*/gi;
var dimensions = getDimensionsHtml(height, width);
var html = '<iframe ' + dimensions +
'src="https://www.youtube.com/embed/$1" ' +
'frameborder="0" allowfullscreen></iframe>';
return text.replace(regex, html);
};
});
app.provider('enhanceTextFilter', function () {
var options = {
cache: true,
newLineToBr: true,
embedLinks: true,
embeddedLinkTarget: '_blank',
embedImages: true,
embeddedImagesHeight: undefined,
embeddedImagesWidth: undefined,
embedVideos: true,
embeddedVideosHeight: undefined,
embeddedVideosWidth: undefined,
embedYoutube: true,
embeddedYoutubeHeight: undefined,
embeddedYoutubeWidth: undefined,
smilies: {}
},
textCache = {};
this.setOptions = function (customOptions) {
angular.extend(options, customOptions);
};
/* @ngInject */
this.$get = function ($sce, TextEnhancer) {
return function (text) {
var originalText = text;
// hit cache first before replacing
if (options.cache) {
var cachedResult = textCache[text];
if (angular.isDefined(cachedResult)) {
return cachedResult;
}
}
text = TextEnhancer(text, options);<|fim▁hole|>
// cache result
if (options.cache) {
textCache[originalText] = text;
}
return text;
};
};
this.$get.$inject = ["$sce", "TextEnhancer"];
});
function escapeHtml(str) {
var div = document.createElement('div');
div.appendChild(document.createTextNode(str));
return div.innerHTML;
}
// taken from https://developer.mozilla.org/en/docs/Web/JavaScript/Guide/Regular_Expressions
function escapeRegExp (str) {
return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&");
}
function getDimensionsHtml (height, width) {
var dimensions = '';
if (angular.isDefined(height)) {
dimensions += 'height="' + height + '" ';
}
if (angular.isDefined(width)) {
dimensions += 'width="' + width + '" ';
}
return dimensions;
}
})(angular, undefined);<|fim▁end|> |
// trust result to able to use it in ng-bind-html
text = $sce.trustAsHtml(text); |
<|file_name|>Testdb.java<|end_file_name|><|fim▁begin|>/* Copyright (c) 2001-2008, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb.sample;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import org.hsqldb.jdbc.jdbcDataSource;
/**
* Title: Testdb
* Description: simple hello world db example of a
* standalone persistent db application
*
* every time it runs it adds four more rows to sample_table
* it does a query and prints the results to standard out
*
* Author: Karl Meissner [email protected]
*/
public class Testdb {
Connection conn; //our connnection to the db - presist for life of program
// we dont want this garbage collected until we are done
public Testdb(String db_file_name_prefix) throws Exception { // note more general exception
// connect to the database. This will load the db files and start the
// database if it is not alread running.
// db_file_name_prefix is used to open or create files that hold the state
// of the db.
// It can contain directory names relative to the
// current working directory
jdbcDataSource dataSource = new jdbcDataSource();
dataSource.setDatabase("jdbc:hsqldb:" + db_file_name_prefix);
Connection c = dataSource.getConnection("sa", "");
}
public void shutdown() throws SQLException {
Statement st = conn.createStatement();
<|fim▁hole|> // otherwise there will be an unclean shutdown
// when program ends
st.execute("SHUTDOWN");
conn.close(); // if there are no other open connection
}
//use for SQL command SELECT
public synchronized void query(String expression) throws SQLException {
Statement st = null;
ResultSet rs = null;
st = conn.createStatement(); // statement objects can be reused with
// repeated calls to execute but we
// choose to make a new one each time
rs = st.executeQuery(expression); // run the query
// do something with the result set.
dump(rs);
st.close(); // NOTE!! if you close a statement the associated ResultSet is
// closed too
// so you should copy the contents to some other object.
// the result set is invalidated also if you recycle an Statement
// and try to execute some other query before the result set has been
// completely examined.
}
//use for SQL commands CREATE, DROP, INSERT and UPDATE
public synchronized void update(String expression) throws SQLException {
Statement st = null;
st = conn.createStatement(); // statements
int i = st.executeUpdate(expression); // run the query
if (i == -1) {
System.out.println("db error : " + expression);
}
st.close();
} // void update()
public static void dump(ResultSet rs) throws SQLException {
// the order of the rows in a cursor
// are implementation dependent unless you use the SQL ORDER statement
ResultSetMetaData meta = rs.getMetaData();
int colmax = meta.getColumnCount();
int i;
Object o = null;
// the result set is a cursor into the data. You can only
// point to one row at a time
// assume we are pointing to BEFORE the first row
// rs.next() points to next row and returns true
// or false if there is no next row, which breaks the loop
for (; rs.next(); ) {
for (i = 0; i < colmax; ++i) {
o = rs.getObject(i + 1); // Is SQL the first column is indexed
// with 1 not 0
System.out.print(o.toString() + " ");
}
System.out.println(" ");
}
} //void dump( ResultSet rs )
public static void main(String[] args) {
Testdb db = null;
try {
db = new Testdb("db_file");
} catch (Exception ex1) {
ex1.printStackTrace(); // could not start db
return; // bye bye
}
try {
//make an empty table
//
// by declaring the id column IDENTITY, the db will automatically
// generate unique values for new rows- useful for row keys
db.update(
"CREATE TABLE sample_table ( id INTEGER IDENTITY, str_col VARCHAR(256), num_col INTEGER)");
} catch (SQLException ex2) {
//ignore
//ex2.printStackTrace(); // second time we run program
// should throw execption since table
// already there
//
// this will have no effect on the db
}
try {
// add some rows - will create duplicates if run more then once
// the id column is automatically generated
db.update(
"INSERT INTO sample_table(str_col,num_col) VALUES('Ford', 100)");
db.update(
"INSERT INTO sample_table(str_col,num_col) VALUES('Toyota', 200)");
db.update(
"INSERT INTO sample_table(str_col,num_col) VALUES('Honda', 300)");
db.update(
"INSERT INTO sample_table(str_col,num_col) VALUES('GM', 400)");
// do a query
db.query("SELECT * FROM sample_table WHERE num_col < 250");
// at end of program
db.shutdown();
} catch (SQLException ex3) {
ex3.printStackTrace();
}
} // main()
} // class Testdb<|fim▁end|> | // db writes out to files and performs clean shuts down |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.