prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>helpers.py<|end_file_name|><|fim▁begin|>from collections import OrderedDict
from django.core.cache import cache<|fim▁hole|>from django.conf import settings
import jingo
import jinja2
from bedrock.firefox.models import FirefoxOSFeedLink
from bedrock.firefox.firefox_details import firefox_desktop, firefox_android
from bedrock.base.urlresolvers import reverse
from lib.l10n_utils import get_locale
def android_builds(channel, builds=None):
builds = builds or []
variations = OrderedDict([
('api-9', 'Gingerbread'),
('api-11', 'Honeycomb+ ARMv7+'),
('x86', 'x86'),
])
if channel == 'alpha':
for type, arch_pretty in variations.iteritems():
link = firefox_android.get_download_url('alpha', type)
builds.append({'os': 'android',
'os_pretty': 'Android',
'os_arch_pretty': 'Android %s' % arch_pretty,
'arch': 'x86' if type == 'x86' else 'armv7up %s' % type,
'arch_pretty': arch_pretty,
'download_link': link})
else:
link = firefox_android.get_download_url(channel)
builds.append({'os': 'android',
'os_pretty': 'Android',
'download_link': link})
return builds
@jingo.register.function
@jinja2.contextfunction
def download_firefox(ctx, channel='release', small=False, icon=True,
platform='all', dom_id=None, locale=None, simple=False,
force_direct=False, force_full_installer=False,
force_funnelcake=False, check_old_fx=False):
""" Output a "download firefox" button.
:param ctx: context from calling template.
:param channel: name of channel: 'release', 'beta' or 'alpha'.
:param small: Display the small button if True.
:param icon: Display the Fx icon on the button if True.
:param platform: Target platform: 'desktop', 'android' or 'all'.
:param dom_id: Use this string as the id attr on the element.
:param locale: The locale of the download. Default to locale of request.
:param simple: Display button with text only if True. Will not display
icon or privacy/what's new/systems & languages links. Can be used
in conjunction with 'small'.
:param force_direct: Force the download URL to be direct.
:param force_full_installer: Force the installer download to not be
the stub installer (for aurora).
:param force_funnelcake: Force the download version for en-US Windows to be
'latest', which bouncer will translate to the funnelcake build.
:param check_old_fx: Checks to see if the user is on an old version of
Firefox and, if true, changes the button text from 'Free Download'
to 'Update your Firefox'. Must be used in conjunction with
'simple' param being true.
:return: The button html.
"""
show_desktop = platform in ['all', 'desktop']
show_android = platform in ['all', 'android']
alt_channel = '' if channel == 'release' else channel
locale = locale or get_locale(ctx['request'])
funnelcake_id = ctx.get('funnelcake_id', False)
dom_id = dom_id or 'download-button-%s-%s' % (
'desktop' if platform == 'all' else platform, channel)
l_version = firefox_desktop.latest_builds(locale, channel)
if l_version:
version, platforms = l_version
else:
locale = 'en-US'
version, platforms = firefox_desktop.latest_builds('en-US', channel)
# Gather data about the build for each platform
builds = []
if show_desktop:
for plat_os, plat_os_pretty in firefox_desktop.platform_labels.iteritems():
# Windows 64-bit builds are currently available only on the Aurora
# and Beta channel
if plat_os == 'win64' and channel not in ['alpha', 'beta']:
continue
# Fallback to en-US if this plat_os/version isn't available
# for the current locale
_locale = locale if plat_os_pretty in platforms else 'en-US'
# And generate all the info
download_link = firefox_desktop.get_download_url(
channel, version, plat_os, _locale,
force_direct=force_direct,
force_full_installer=force_full_installer,
force_funnelcake=force_funnelcake,
funnelcake_id=funnelcake_id,
)
# If download_link_direct is False the data-direct-link attr
# will not be output, and the JS won't attempt the IE popup.
if force_direct:
# no need to run get_download_url again with the same args
download_link_direct = False
else:
download_link_direct = firefox_desktop.get_download_url(
channel, version, plat_os, _locale,
force_direct=True,
force_full_installer=force_full_installer,
force_funnelcake=force_funnelcake,
funnelcake_id=funnelcake_id,
)
if download_link_direct == download_link:
download_link_direct = False
builds.append({'os': plat_os,
'os_pretty': plat_os_pretty,
'download_link': download_link,
'download_link_direct': download_link_direct})
if show_android:
builds = android_builds(channel, builds)
# Get the native name for current locale
langs = firefox_desktop.languages
locale_name = langs[locale]['native'] if locale in langs else locale
data = {
'locale_name': locale_name,
'version': version,
'product': 'firefox-android' if platform == 'android' else 'firefox',
'builds': builds,
'id': dom_id,
'small': small,
'simple': simple,
'channel': alt_channel,
'show_android': show_android,
'show_desktop': show_desktop,
'icon': icon,
'check_old_fx': check_old_fx and simple,
}
html = jingo.render_to_string(ctx['request'],
'firefox/includes/download-button.html',
data)
return jinja2.Markup(html)
@jingo.register.function
def firefox_url(platform, page, channel=None):
"""
Return a product-related URL like /firefox/all/ or /mobile/beta/notes/.
Examples
========
In Template
-----------
{{ firefox_url('desktop', 'all', 'organizations') }}
{{ firefox_url('desktop', 'sysreq', channel) }}
{{ firefox_url('android', 'notes') }}
"""
kwargs = {}
# Tweak the channel name for the naming URL pattern in urls.py
if channel == 'release':
channel = None
if channel == 'alpha':
if platform == 'desktop':
channel = 'developer'
if platform == 'android':
channel = 'aurora'
if channel == 'esr':
channel = 'organizations'
if channel:
kwargs['channel'] = channel
if page == 'notes' and platform != 'desktop':
kwargs['platform'] = platform
return reverse('firefox.%s' % page, kwargs=kwargs)
@jingo.register.function
def firefox_os_feed_links(locale, force_cache_refresh=False):
if locale in settings.FIREFOX_OS_FEED_LOCALES:
cache_key = 'firefox-os-feed-links-' + locale
if not force_cache_refresh:
links = cache.get(cache_key)
if links:
return links
links = list(
FirefoxOSFeedLink.objects.filter(locale=locale).order_by(
'-id').values_list('link', 'title')[:10])
cache.set(cache_key, links)
return links
elif '-' in locale:
return firefox_os_feed_links(locale.split('-')[0])
@jingo.register.function
def firefox_os_blog_link(locale):
try:
return settings.FXOS_PRESS_BLOG_LINKS[locale]
except KeyError:
if '-' in locale:
return firefox_os_blog_link(locale.split('-')[0])
else:
return None<|fim▁end|>
| |
<|file_name|>character.rs<|end_file_name|><|fim▁begin|>use session::game::{Session, GameState};
use session::game::chunk::{Ref, ChunkImpl};
use std::io;
use protocol::{Protocol, VarInt, VarShort};
use protocol::messages::game::character::choice::*;
use protocol::messages::game::character::creation::*;
use protocol::messages::game::inventory::items::*;
use protocol::messages::game::character::stats::*;
use protocol::messages::game::context::notification::*;
use protocol::messages::game::chat::channel::EnabledChannelsMessage;
use protocol::types::game::look::*;
use protocol::enums::{character_creation_result, chat_channels_multi};
use session::game::handlers::error::Error;
use protocol::variants::{FriendInformationsVariant, IgnoredInformationsVariant};
use shared::net::{Token, Msg};
use diesel::*;
use character::{CharacterMinimal, Character, SqlCharacter};
use std::sync::atomic::{ATOMIC_ISIZE_INIT, AtomicIsize, Ordering};
use shared::database;
use shared::database::schema::character_counts;
use server::{self, SERVER};
use std::collections::HashMap;
use protocol::messages::queues::*;
use std::mem;
pub static QUEUE_SIZE: AtomicIsize = ATOMIC_ISIZE_INIT;
pub static QUEUE_COUNTER: AtomicIsize = ATOMIC_ISIZE_INIT;
fn validate_name(name: &str) -> bool {
if name.len() < 4 {
return false;
}
let mut dash = false;
let len = name.len();
let name: Vec<_> = name.chars().collect();
for i in 0..len {
if name[i] == '-' {
if dash || i == 0 || i == len - 1 {
return false;
}
dash = true;
} else if name[i] < 'A' || (name[i] > 'Z' && name[i] < 'a') || name[i] > 'z' {
return false;
}
if name[i] >= 'A' && name[i] <= 'Z' && i != 0 && name[i - 1] != '-' {
return false;
}
}
true
}
#[insertable_into(character_counts)]
struct CharacterCount(
#[column_name="server_id"]
i16,
#[column_name="account_id"]
i32,
);
<|fim▁hole|> auth_uri: String, server_id: i16)
-> Result<CharacterMinimal, Error> {
use shared::database::schema::{lower, characters, character_minimals};
let lower_name = name.to_lowercase();
let name_exists: Option<i32> = try!(
character_minimals::table.filter(lower(character_minimals::name).eq(&lower_name))
.select_sql::<types::Integer>("1")
.first(conn)
.optional()
);
if name_exists.is_some() {
return Err(Error::Other);
}
let auth_conn = try!(Connection::establish(&auth_uri));
auth_conn.transaction(|| {
try!(
insert(&CharacterCount(server_id, account_id)).into(character_counts::table)
.execute(&auth_conn)
);
let new_char = SqlCharacter::default(spawn_map);
let res: SqlCharacter = try!(
insert(&new_char).into(characters::table)
.get_result(conn)
);
let id = res.id();
let base = CharacterMinimal::new(id, account_id, nickname, 1, name, breed, sex, look);
try!(
insert(&base).into(character_minimals::table)
.execute(conn)
);
Ok(base)
}).map_err(From::from)
}
fn load_character(conn: &Connection, tok: Token, base: CharacterMinimal)
-> Result<(Character, i32), Error> {
use shared::database::schema::characters;
let ch_id = base.id();
let character: Option<SqlCharacter> = try!(
characters::table.filter(characters::id.eq(&ch_id))
.first(conn)
.optional()
);
match character {
Some(character) => {
let map_id = character.map_id;
match Character::new(tok, base, character) {
Some(character) => Ok((character, map_id)),
None => {
error!("invalid cell for character {}", ch_id);
return Err(Error::Other);
}
}
},
None => return Err(Error::Other),
}
}
enum SelectionType {
Creation(String, i16, EntityLook, bool, i32, i32, String, String, i16),
Default(CharacterMinimal),
}
struct ScopeExit;
impl Drop for ScopeExit {
fn drop(&mut self) {
let _ = QUEUE_SIZE.fetch_sub(1, Ordering::Relaxed);
let _ = QUEUE_COUNTER.fetch_add(1, Ordering::Relaxed);
}
}
impl Session {
fn character_selection_error(&mut self, chars: HashMap<i64, CharacterMinimal>) {
self.state = GameState::CharacterSelection(chars);
let buf = QueueStatusMessage {
position: 0,
total: 0,
}.unwrap();
write!(SERVER, self.base.token, buf);
}
fn character_selection_success(&mut self, _: &mut ChunkImpl, mut ch: Character, map_id: i32,
friends: HashMap<i32, FriendInformationsVariant>,
ignored: HashMap<i32, IgnoredInformationsVariant>) {
log_info!(self, "selected character id = {}", ch.minimal().id());
let mut buf = CharacterSelectedSuccessMessage {
infos: ch.minimal().as_character_base(),
is_collecting_stats: false,
}.unwrap();
QueueStatusMessage {
position: 0,
total: 0,
}.unwrap_with_buf(&mut buf);
InventoryContentMessage {
objects: Vec::new(),
kamas: VarInt(ch.kamas()),
}.unwrap_with_buf(&mut buf);
InventoryWeightMessage {
weight: VarInt(0),
weight_max: VarInt(0),
}.unwrap_with_buf(&mut buf);
NotificationListMessage {
flags: Vec::new(),
}.unwrap_with_buf(&mut buf);
CharacterStatsListMessage {
stats: ch.get_character_characteristics(),
}.unwrap_with_buf(&mut buf);
let has_global_channel = {
let account = self.account.as_ref().unwrap();
EnabledChannelsMessage {
channels: account.channels.iter().cloned().collect(),
disallowed: Vec::new(),
}.unwrap_with_buf(&mut buf);
account.channels.contains(&(chat_channels_multi::GLOBAL as u8))
};
ch.set_has_global_channel(has_global_channel);
write!(SERVER, self.base.token, buf);
self.state = GameState::SwitchingContext(map_id, ch);
self.friends_cache = friends;
self.ignored_cache = ignored;
}
fn select_character(&mut self, ty: SelectionType) {
let account = self.account.as_ref().unwrap();
let token = self.base.token;
let (server, io_loop) = SERVER.with(|s| {
(s.server.clone(), s.io_loop.clone())
});
let account_id = account.id;
let social = account.social.clone();
let state = GameState::GameQueue(
QUEUE_SIZE.fetch_add(1, Ordering::Relaxed) + 1,
QUEUE_COUNTER.load(Ordering::Relaxed)
);
let state = mem::replace(&mut self.state, state);
let characters_list = match state {
GameState::CharacterSelection(characters) => characters,
_ => unreachable!(),
};
SERVER.with(|s| database::execute(&s.db, move |conn| {
let decrease_queue = ScopeExit;
let base = match ty {
SelectionType::Creation(
name, breed, look, sex, spawn_map,
account_id, account_nickname, auth_uri, server_id
) => {
let res = conn.transaction(|| {
create_character(
conn, token,
name, breed, look, sex, spawn_map,
account_id, account_nickname, auth_uri, server_id
)
}).map_err(From::from);
match res {
Err(err) => {
let result = if let Error::Sql(err) = err {
error!("create_character sql error: {}", err);
character_creation_result::ERR_NO_REASON
} else {
character_creation_result::ERR_NAME_ALREADY_EXISTS
};
let buf = CharacterCreationResultMessage {
result: result,
}.unwrap();
let _ = io_loop.send(Msg::Write(token, buf));
server::session_callback(&server, token, move |session, _| {
session.character_selection_error(characters_list)
});
return;
},
Ok(base) => {
let buf = CharacterCreationResultMessage {
result: character_creation_result::OK,
}.unwrap();
let _ = io_loop.send(Msg::Write(token, buf));
server::insert_character_minimal(&server, base.clone());
base
}
}
},
SelectionType::Default(base) => base,
};
let res = conn.transaction(|| {
load_character(conn, token, base)
}).map_err(From::from);
match res {
Err(err) => {
if let Error::Sql(err) = err {
error!("load_character sql error: {}", err);
}
let buf = CharacterSelectedErrorMessage.unwrap();
let _ = io_loop.send(Msg::Write(token, buf));
server::session_callback(&server, token, move |session, _| {
session.character_selection_error(characters_list)
});
return;
}
Ok((ch, map_id)) => {
let ch_id = ch.minimal().id();
server::character_selection_success(
&server,
token,
account_id,
ch_id,
social,
move |session, chunk, friends, ignored| {
session.character_selection_success(
chunk,
ch,
map_id,
friends,
ignored
)
}
);
}
}
}));
}
}
#[register_handlers]
impl Session {
pub fn handle_characters_list_request<'a>(&mut self, _: Ref<'a>,
_: CharactersListRequestMessage) -> io::Result<()> {
let characters = match self.state {
GameState::CharacterSelection(ref characters) => characters,
_ => return Ok(()),
};
let buf = CharactersListMessage {
base: BasicCharactersListMessage {
characters: characters.iter()
.map(|ch| ch.1.as_character_base().into())
.collect(),
},
has_startup_actions: false,
}.unwrap();
write!(SERVER, self.base.token, buf);
Ok(())
}
pub fn handle_character_creation_request<'a>(&mut self, chunk: Ref<'a>,
msg: CharacterCreationRequestMessage)
-> io::Result<()> {
match self.state {
GameState::CharacterSelection(_) => (),
_ => return Ok(()),
};
let characters_count = self.account.as_ref().unwrap().characters_count;
let max_characters_count = self.account.as_ref().unwrap().max_characters_count;
if characters_count >= max_characters_count {
let buf = CharacterCreationResultMessage {
result: character_creation_result::ERR_TOO_MANY_CHARACTERS,
}.unwrap();
write!(SERVER, self.base.token, buf);
return Ok(());
}
if !validate_name(&msg.name) {
let buf = CharacterCreationResultMessage {
result: character_creation_result::ERR_INVALID_NAME,
}.unwrap();
write!(SERVER, self.base.token, buf);
return Ok(());
}
let look_and_map = SERVER.with(|s| {
s.breeds.get(&(msg.breed as i16)).map(|b| {
(
if msg.sex { b.female_look().clone() } else { b.male_look().clone() },
b.spawn_map()
)
})
});
let (mut look, map) = match look_and_map {
Some((look, map)) => (look, map),
None => {
let buf = CharacterCreationResultMessage {
result: character_creation_result::ERR_NO_REASON,
}.unwrap();
write!(SERVER, self.base.token, buf);
return Ok(());
}
};
let head = SERVER.with(|s| {
s.heads.get(&msg.cosmetic_id.0).map(|h| h.clone())
});
if head.is_none() || head.as_ref().unwrap().breed_id() != msg.breed as i16
|| head.as_ref().unwrap().gender() != msg.sex {
let buf = CharacterCreationResultMessage {
result: character_creation_result::ERR_NO_REASON,
}.unwrap();
write!(SERVER, self.base.token, buf);
return Ok(());
}
let mut colors = HashMap::new();
for &c in &look.indexed_colors {
let _ = colors.insert((c >> 24) & 255, c & 16777215);
}
for i in 0 .. msg.colors.0.len() {
let ind = (i + 1) as i32;
if msg.colors.0[i] != -1 && colors.contains_key(&ind) {
let _ = colors.insert(ind, msg.colors.0[i]);
}
}
look.indexed_colors.clear();
for p in &colors {
look.indexed_colors.push((p.0 & 255) << 24 | p.1 & 16777215);
}
look.skins.push(VarShort(head.unwrap().skin()));
let account_id = self.account.as_ref().unwrap().id;
let account_nickname = self.account.as_ref().unwrap().nickname.clone();
let (auth_uri, server_id) = SERVER.with(|s| {
(s.cnf.auth_database_uri.clone(), s.cnf.server_id)
});
self.select_character(SelectionType::Creation(
msg.name, msg.breed as i16, look, msg.sex, map,
account_id, account_nickname, auth_uri, server_id
));
Ok(())
}
pub fn handle_character_selection<'a>(&mut self, _: Ref<'a>, msg: CharacterSelectionMessage)
-> io::Result<()> {
let ch = {
let characters = match self.state {
GameState::CharacterSelection(ref characters) => characters,
_ => return Ok(()),
};
match characters.get(&msg.id.0) {
Some(ch) => ch.clone(),
None => {
let buf = CharacterSelectedErrorMessage.unwrap();
write!(SERVER, self.base.token, buf);
return Ok(());
}
}
};
self.select_character(SelectionType::Default(ch));
Ok(())
}
}<|fim▁end|>
|
fn create_character(conn: &Connection, tok: Token, name: String, breed: i16, look: EntityLook,
sex: bool, spawn_map: i32, account_id: i32, nickname: String,
|
<|file_name|>test_offline.py<|end_file_name|><|fim▁begin|>#Import Libraries
import eventmaster as EM
from time import sleep
import random
import sys
""" Create new Instance of EventMasterSwitcher and turn off logging """
s3 = EM.EventMasterSwitcher()
s3.setVerbose(0)
with open('example_settings_.xml', 'r') as content_file:
content = content_file.read()
s3.loadFromXML(content)
""" Enumerate all Inputs and print known information for each """
print("# Inputs")
for input_id, input_inst in s3.getInputs().items():
input_name = input_inst.getName()
frozen_string = "is Frozen" if input_inst.getFreeze() else "is not Frozen"
print(" ({0!s}) {1!s} {2!s}".format(input_id, input_name, frozen_string))
""" Enumerate all Outputs and print known information for each """
print("\r\n# Outputs")
for output_id, output_inst in s3.getOutputs().items():
output_name = output_inst.getName()
print(" ({0!s}) {1!s}".format(output_id, output_name))<|fim▁hole|>""" Enumerate all Presets and print known information for each """
print("\r\n# Presets")
for preset_id, preset_inst in s3.getPresets().items():
preset_name = preset_inst.getName()
print(" ({0!s}) {1!s}".format(preset_id, preset_name))
""" Enumerate all Destinations and print known information for each """
print("\r\n# Destinations:")
for dest_id, dest_inst in s3.getScreenDests().items():
dest_numoflayers = len(dest_inst.getLayers())
dest_name = dest_inst.getName()
dest_size = dest_inst.getSize()
print("\n {1!s} is {2!s} x {3!s} & has {4!s} layer(s)".format( dest_id,
dest_name,
dest_size["HSize"],
dest_size["VSize"],
dest_numoflayers))
""" Enumerate all Layers for Destination and print known information for each """
for layer_number, layer_inst in dest_inst.getLayers().items():
if "Pvw" in layer_inst:
layer_name_pvw = layer_inst["Pvw"].getName()
layer_owin_pvw = layer_inst["Pvw"].getOWIN()
layer_hpos_pvw = layer_owin_pvw["HPos"]
layer_hsize_pvw = layer_owin_pvw["HSize"]
layer_vpos_pvw = layer_owin_pvw["VPos"]
layer_vsize_pvw = layer_owin_pvw["VSize"]
if layer_inst["Pvw"].getSource() is not None:
layer_source_name_pvw = layer_inst["Pvw"].getSource().getName()
else:
layer_source_name_pvw = "(Unknown)"
size_string_pvw = " {4!s} is on PVW - {0!s}x{1!s} at {2!s},{3!s}".format(layer_hsize_pvw, layer_vsize_pvw, layer_hpos_pvw, layer_vpos_pvw, layer_name_pvw)
source_string_pvw = " current source is {0!s}".format(layer_source_name_pvw)
else:
size_string_pvw = " Layer is not on PVW "
source_string_pvw = ""
if "Pgm" in layer_inst:
layer_name_pgm = layer_inst["Pgm"].getName()
layer_owin_pgm = layer_inst["Pgm"].getOWIN()
layer_hpos_pgm = layer_owin_pgm["HPos"]
layer_hsize_pgm = layer_owin_pgm["HSize"]
layer_vpos_pgm = layer_owin_pgm["VPos"]
layer_vsize_pgm = layer_owin_pgm["VSize"]
if layer_inst["Pgm"].getSource() is not None:
layer_source_name_pgm = layer_inst["Pgm"].getSource().getName()
else:
layer_source_name_pgm = "(Unknown)"
size_string_pgm = " {4!s} is on PGM - {0!s}x{1!s} at {2!s},{3!s}".format(layer_hsize_pgm, layer_vsize_pgm, layer_hpos_pgm, layer_vpos_pgm, layer_name_pgm)
source_string_pgm = " current source is {0!s}".format(layer_source_name_pgm)
else:
size_string_pgm = " Layer is not on PGM "
source_string_pgm = ""
size_string = " {4!s} is on PGM - {0!s}x{1!s} at {2!s},{3!s}".format(layer_hsize_pgm, layer_vsize_pgm, layer_hpos_pgm, layer_vpos_pgm, layer_name_pgm)
source_string = " current source is {0!s}".format(layer_source_name_pgm)
print(" ({0!s}) {1!s}\n {2!s}\n {3!s}\n {4!s}".format(layer_number+1, size_string_pgm, source_string_pgm, size_string_pvw, source_string_pvw))
sys.exit()<|fim▁end|>
| |
<|file_name|>MC_12_MagDown_kstar_rho_kpipipi.py<|end_file_name|><|fim▁begin|>#-- GAUDI jobOptions generated on Wed Jun 10 17:31:51 2015
#-- Contains event types :
#-- 11104041 - 117 files - 2010995 events - 432.61 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass Step-124834
<|fim▁hole|>#-- ApplicationName : Brunel
#-- ApplicationVersion : v43r2p7
#-- OptionFiles : $APPCONFIGOPTS/Brunel/DataType-2012.py;$APPCONFIGOPTS/Brunel/MC-WithTruth.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-124620
#-- StepId : 124620
#-- StepName : Digi13 with G4 dE/dx
#-- ApplicationName : Boole
#-- ApplicationVersion : v26r3
#-- OptionFiles : $APPCONFIGOPTS/Boole/Default.py;$APPCONFIGOPTS/Boole/DataType-2012.py;$APPCONFIGOPTS/Boole/Boole-SiG4EnergyDeposit.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-124632
#-- StepId : 124632
#-- StepName : TCK-0x409f0045 Flagged for Sim08 2012
#-- ApplicationName : Moore
#-- ApplicationVersion : v14r8p1
#-- OptionFiles : $APPCONFIGOPTS/Moore/MooreSimProductionWithL0Emulation.py;$APPCONFIGOPTS/Conditions/TCK-0x409f0045.py;$APPCONFIGOPTS/Moore/DataType-2012.py;$APPCONFIGOPTS/L0/L0TCK-0x0045.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-124630
#-- StepId : 124630
#-- StepName : Stripping20-NoPrescalingFlagged for Sim08
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v32r2p1
#-- OptionFiles : $APPCONFIGOPTS/DaVinci/DV-Stripping20-Stripping-MC-NoPrescaling.py;$APPCONFIGOPTS/DaVinci/DataType-2012.py;$APPCONFIGOPTS/DaVinci/InputType-DST.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-125577
#-- StepId : 125577
#-- StepName : Sim08a - 2012 - MD - Pythia8
#-- ApplicationName : Gauss
#-- ApplicationVersion : v45r3
#-- OptionFiles : $APPCONFIGOPTS/Gauss/Sim08-Beam4000GeV-md100-2012-nu2.5.py;$DECFILESROOT/options/@{eventType}.py;$LBPYTHIA8ROOT/options/Pythia8.py;$APPCONFIGOPTS/Gauss/G4PL_FTFP_BERT_EmNoCuts.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : Sim08-20130503-1
#-- CONDDB : Sim08-20130503-1-vc-md100
#-- ExtraPackages : AppConfig.v3r171;DecFiles.v27r11
#-- Visible : Y
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles(['LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000001_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000002_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000003_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000004_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000005_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000006_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000007_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000008_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000009_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000010_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000011_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000012_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000013_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000014_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000015_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000016_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000017_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000018_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000019_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000020_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000021_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000022_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000023_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000024_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000025_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000026_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000027_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000028_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000029_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000030_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000031_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000032_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000033_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000034_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000035_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000036_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000037_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000038_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000039_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000040_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000041_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000042_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000043_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000044_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000045_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000046_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000047_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000048_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000049_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000050_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000051_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000052_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000053_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000054_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000055_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000056_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000057_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000058_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000059_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000060_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000061_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000062_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000063_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000064_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000065_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000066_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000067_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000068_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000069_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000070_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000071_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000072_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000073_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000074_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000075_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000076_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000077_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000078_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000079_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000080_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000081_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000082_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000083_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000084_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000085_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000086_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000087_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000088_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000089_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000090_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000091_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000092_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000093_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000094_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000095_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000096_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000097_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000098_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000099_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000100_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000101_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000102_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000103_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000104_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000105_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000106_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000108_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000109_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000110_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000111_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000112_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000113_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000114_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000115_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000116_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000117_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00030282/0000/00030282_00000118_1.allstreams.dst'
], clear=True)<|fim▁end|>
|
#-- StepId : 124834
#-- StepName : Reco14a for MC
|
<|file_name|>containers.rs<|end_file_name|><|fim▁begin|>use std::default::Default;
use std::collections::BTreeMap;
use quire::validate as V;
use super::builders::{Builder, builder_validator};
use super::Range;
#[derive(Decodable, Clone, PartialEq, Eq)]
pub enum Volume {
Tmpfs(TmpfsInfo),
BindRW(Path),
VaggaBin,
}
#[derive(Decodable, Clone, PartialEq, Eq)]
pub struct TmpfsInfo {
pub size: usize,
pub mode: u32,
}
#[derive(Decodable, Clone)]
pub struct Container {
pub setup: Vec<Builder>,
pub auto_clean: bool,
pub uids: Vec<Range>,
pub gids: Vec<Range>,
pub environ_file: Option<Path>,
pub environ: BTreeMap<String, String>,
pub resolv_conf_path: Option<Path>,
pub volumes: BTreeMap<Path, Volume>,
}
impl PartialEq for Container {
fn eq(&self, _other: &Container) -> bool { false }
}
pub fn volume_validator<'a>() -> Box<V::Validator + 'a> {
return box V::Enum { options: vec!(
("Tmpfs".to_string(), box V::Structure { members: vec!(
("size".to_string(), box V::Numeric {
min: Some(0us),
default: Some(100*1024*1024),
.. Default::default()} as Box<V::Validator>),
("mode".to_string(), box V::Numeric {
min: Some(0u32),
max: Some(0o1777u32),
default: Some(0o766),
.. Default::default()} as Box<V::Validator>),
),.. Default::default()} as Box<V::Validator>),
("VaggaBin".to_string(), box V::Nothing),
("BindRW".to_string(), box V::Scalar {
.. Default::default()}),
), .. Default::default()} as Box<V::Validator>;
}
pub fn container_validator<'a>() -> Box<V::Validator + 'a> {
return box V::Structure { members: vec!(
("setup".to_string(), box V::Sequence {
element: builder_validator(),
.. Default::default()} as Box<V::Validator>),
("auto_clean".to_string(), box V::Scalar {
default: Some("false".to_string()),
.. Default::default()} as Box<V::Validator>),
("environ".to_string(), box V::Mapping {
key_element: box V::Scalar {
.. Default::default()} as Box<V::Validator>,
value_element: box V::Scalar {
.. Default::default()} as Box<V::Validator>,
.. Default::default()} as Box<V::Validator>),
("environ_file".to_string(), box V::Scalar {
optional: true,
.. Default::default()} as Box<V::Validator>),
("resolv_conf_path".to_string(), box V::Directory {
absolute: Some(true),
optional: true,
default: Some(Path::new("/etc/resolv.conf")),
.. Default::default()} as Box<V::Validator>),
("uids".to_string(), box V::Sequence {
element: box V::Scalar {
.. Default::default()} as Box<V::Validator>,
.. Default::default()} as Box<V::Validator>),
("gids".to_string(), box V::Sequence {
element: box V::Scalar {<|fim▁hole|> absolute: Some(true),
.. Default::default()} as Box<V::Validator>,
value_element: volume_validator(),
.. Default::default()} as Box<V::Validator>),
), .. Default::default()} as Box<V::Validator>;
}<|fim▁end|>
|
.. Default::default()} as Box<V::Validator>,
.. Default::default()} as Box<V::Validator>),
("volumes".to_string(), box V::Mapping {
key_element: box V::Directory {
|
<|file_name|>interface.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> getImport(): string;
}<|fim▁end|>
|
interface bugfix {
|
<|file_name|>proxy_socks4.go<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2014, Yawning Angel <yawning at torproject dot org><|fim▁hole|> * modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* This is inspired by go.net/proxy/socks5.go:
*
* Copyright 2011 The Go Authors. All rights reserved.
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file.
*/
package main
import (
"errors"
"fmt"
"io"
"net"
"net/url"
"strconv"
"golang.org/x/net/proxy"
)
// socks4Proxy is a SOCKS4 proxy.
type socks4Proxy struct {
hostPort string
username string
forward proxy.Dialer
}
const (
socks4Version = 0x04
socks4CommandConnect = 0x01
socks4Null = 0x00
socks4ReplyVersion = 0x00
socks4Granted = 0x5a
socks4Rejected = 0x5b
socks4RejectedIdentdFailed = 0x5c
socks4RejectedIdentdMismatch = 0x5d
)
func newSOCKS4(uri *url.URL, forward proxy.Dialer) (proxy.Dialer, error) {
s := new(socks4Proxy)
s.hostPort = uri.Host
s.forward = forward
if uri.User != nil {
s.username = uri.User.Username()
}
return s, nil
}
func (s *socks4Proxy) Dial(network, addr string) (net.Conn, error) {
if network != "tcp" && network != "tcp4" {
return nil, errors.New("invalid network type")
}
// Deal with the destination address/string.
ipStr, portStr, err := net.SplitHostPort(addr)
if err != nil {
return nil, err
}
ip := net.ParseIP(ipStr)
if ip == nil {
return nil, errors.New("failed to parse destination IP")
}
ip4 := ip.To4()
if ip4 == nil {
return nil, errors.New("destination address is not IPv4")
}
port, err := strconv.ParseUint(portStr, 10, 16)
if err != nil {
return nil, err
}
// Connect to the proxy.
c, err := s.forward.Dial("tcp", s.hostPort)
if err != nil {
return nil, err
}
// Make/write the request:
// +----+----+----+----+----+----+----+----+----+----+....+----+
// | VN | CD | DSTPORT | DSTIP | USERID |NULL|
// +----+----+----+----+----+----+----+----+----+----+....+----+
req := make([]byte, 0, 9+len(s.username))
req = append(req, socks4Version)
req = append(req, socks4CommandConnect)
req = append(req, byte(port>>8), byte(port))
req = append(req, ip4...)
if s.username != "" {
req = append(req, s.username...)
}
req = append(req, socks4Null)
_, err = c.Write(req)
if err != nil {
c.Close()
return nil, err
}
// Read the response:
// +----+----+----+----+----+----+----+----+
// | VN | CD | DSTPORT | DSTIP |
// +----+----+----+----+----+----+----+----+
var resp [8]byte
_, err = io.ReadFull(c, resp[:])
if err != nil {
c.Close()
return nil, err
}
if resp[0] != socks4ReplyVersion {
c.Close()
return nil, errors.New("proxy returned invalid SOCKS4 version")
}
if resp[1] != socks4Granted {
c.Close()
return nil, fmt.Errorf("proxy error: %s", socks4ErrorToString(resp[1]))
}
return c, nil
}
func socks4ErrorToString(code byte) string {
switch code {
case socks4Rejected:
return "request rejected or failed"
case socks4RejectedIdentdFailed:
return "request rejected becasue SOCKS server cannot connect to identd on the client"
case socks4RejectedIdentdMismatch:
return "request rejected because the client program and identd report different user-ids"
default:
return fmt.Sprintf("unknown failure code %x", code)
}
}
func init() {
// Despite the scheme name, this really is SOCKS4.
proxy.RegisterDialerType("socks4a", newSOCKS4)
}<|fim▁end|>
|
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
|
<|file_name|>osutil_test.go<|end_file_name|><|fim▁begin|>// Copyright (C) 2014 The Syncthing Authors.
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this file,
// You can obtain one at http://mozilla.org/MPL/2.0/.
package osutil_test
import (
"os"
"runtime"
"testing"
"github.com/syncthing/syncthing/lib/osutil"
)
func TestInWriteableDir(t *testing.T) {
err := os.RemoveAll("testdata")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll("testdata")
os.Mkdir("testdata", 0700)
os.Mkdir("testdata/rw", 0700)
os.Mkdir("testdata/ro", 0500)
create := func(name string) error {
fd, err := os.Create(name)
if err != nil {
return err
}
fd.Close()
return nil
}
// These should succeed
err = osutil.InWritableDir(create, "testdata/file")
if err != nil {
t.Error("testdata/file:", err)
}
err = osutil.InWritableDir(create, "testdata/rw/foo")
if err != nil {
t.Error("testdata/rw/foo:", err)
}
err = osutil.InWritableDir(os.Remove, "testdata/rw/foo")
if err != nil {
t.Error("testdata/rw/foo:", err)
}
<|fim▁hole|> err = osutil.InWritableDir(create, "testdata/ro/foo")
if err != nil {
t.Error("testdata/ro/foo:", err)
}
err = osutil.InWritableDir(os.Remove, "testdata/ro/foo")
if err != nil {
t.Error("testdata/ro/foo:", err)
}
// These should not
err = osutil.InWritableDir(create, "testdata/nonexistent/foo")
if err == nil {
t.Error("testdata/nonexistent/foo returned nil error")
}
err = osutil.InWritableDir(create, "testdata/file/foo")
if err == nil {
t.Error("testdata/file/foo returned nil error")
}
}
func TestInWritableDirWindowsRemove(t *testing.T) {
if runtime.GOOS != "windows" {
t.Skipf("Tests not required")
return
}
err := os.RemoveAll("testdata")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll("testdata")
create := func(name string) error {
fd, err := os.Create(name)
if err != nil {
return err
}
fd.Close()
return nil
}
os.Mkdir("testdata", 0700)
os.Mkdir("testdata/windows", 0500)
os.Mkdir("testdata/windows/ro", 0500)
create("testdata/windows/ro/readonly")
os.Chmod("testdata/windows/ro/readonly", 0500)
for _, path := range []string{"testdata/windows/ro/readonly", "testdata/windows/ro", "testdata/windows"} {
err := os.Remove(path)
if err == nil {
t.Errorf("Expected error %s", path)
}
}
for _, path := range []string{"testdata/windows/ro/readonly", "testdata/windows/ro", "testdata/windows"} {
err := osutil.InWritableDir(osutil.Remove, path)
if err != nil {
t.Errorf("Unexpected error %s: %s", path, err)
}
}
}
func TestInWritableDirWindowsRename(t *testing.T) {
if runtime.GOOS != "windows" {
t.Skipf("Tests not required")
return
}
err := os.RemoveAll("testdata")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll("testdata")
create := func(name string) error {
fd, err := os.Create(name)
if err != nil {
return err
}
fd.Close()
return nil
}
os.Mkdir("testdata", 0700)
os.Mkdir("testdata/windows", 0500)
os.Mkdir("testdata/windows/ro", 0500)
create("testdata/windows/ro/readonly")
os.Chmod("testdata/windows/ro/readonly", 0500)
for _, path := range []string{"testdata/windows/ro/readonly", "testdata/windows/ro", "testdata/windows"} {
err := os.Rename(path, path+"new")
if err == nil {
t.Skipf("seem like this test doesn't work here")
return
}
}
rename := func(path string) error {
return osutil.Rename(path, path+"new")
}
for _, path := range []string{"testdata/windows/ro/readonly", "testdata/windows/ro", "testdata/windows"} {
err := osutil.InWritableDir(rename, path)
if err != nil {
t.Errorf("Unexpected error %s: %s", path, err)
}
_, err = os.Stat(path + "new")
if err != nil {
t.Errorf("Unexpected error %s: %s", path, err)
}
}
}
func TestDiskUsage(t *testing.T) {
free, err := osutil.DiskFreePercentage(".")
if err != nil {
if runtime.GOOS == "netbsd" ||
runtime.GOOS == "openbsd" ||
runtime.GOOS == "solaris" {
t.Skip()
}
t.Errorf("Unexpected error: %s", err)
}
if free < 1 {
t.Error("Disk is full?", free)
}
}<|fim▁end|>
| |
<|file_name|>lexicographically-comparison.js<|end_file_name|><|fim▁begin|>function solve(args) {
var text = args[0].split('\n');
var textA = text[0];
var textB = text[1];
var state = 0;
for (var i = 0; i < Math.min(textA.length, textB.length); i += 1) {
if (textA[i] > textB[i]) {
state = 1;
break;
}
if (textA[i] < textB[i]) {
state = 2;
break;
}
}
if (state === 0 && textA.length > textB.length) {
state = 1;
}
if (state === 0 && textB.length > textA.length) {
state = 2;
}
switch (state) {
case 0:
console.log('=');
break;
case 1:
console.log('>');
break;
case 2:
console.log('<');<|fim▁hole|> break;
}
}<|fim▁end|>
|
break;
default:
|
<|file_name|>core.js<|end_file_name|><|fim▁begin|>/* public/core.js */
var angTodo = angular.module('angTodo', []);
function mainController($scope, $http) {
$scope.formData = {};
/* when landing on the page, get all todos and show them */
$http.get('/api/todos')
.success(function(data) {
$scope.todos = data;
console.log(data);
})
.error(function(data) {
console.log('Error: ' + data);
});
/* when submitting the add form, send the text to the node API */
$scope.createTodo = function() {
$http.post('/api/todos', $scope.formData)
.success(function(data) {
$scope.formData = {}; // clear the form so another todo can be entered
$scope.todos = data;
console.log(data);
})
.error(function(data) {
console.log('Error: ' + data);
});
};
/* delete a todo after checking it */
$scope.deleteTodo = function(id) {
$http.delete('/api/todos/' + id)<|fim▁hole|> $scope.todos = data;
console.log(data);
})
.error(function(data) {
console.log('Error: ' + data);
});
};
} /* function mainController */<|fim▁end|>
|
.success(function(data) {
|
<|file_name|>PanelHeroi.java<|end_file_name|><|fim▁begin|>package com.limagiran.hearthstone.heroi.view;
import com.limagiran.hearthstone.heroi.control.Heroi;
import com.limagiran.hearthstone.util.AbsolutesConstraints;
import com.limagiran.hearthstone.util.Images;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JPanel;
import org.netbeans.lib.awtextra.AbsoluteConstraints;
import org.netbeans.lib.awtextra.AbsoluteLayout;
/**<|fim▁hole|> * @author Vinicius
*/
public class PanelHeroi extends JPanel {
private Animacao animacao;
private Congelado congelado;
private JLabel heroi;
private final ImageIcon imagemHeroi;
private final Heroi hero;
public PanelHeroi(Heroi hero, ImageIcon image) {
super(new AbsoluteLayout());
this.hero = hero;
imagemHeroi = image;
init();
}
private void init() {
setOpaque(false);
congelado = new Congelado();
heroi = new JLabel(imagemHeroi, JLabel.CENTER);
animacao = new Animacao(hero);
add(animacao, new AbsoluteConstraints(0, 0, imagemHeroi.getIconWidth(), imagemHeroi.getIconHeight()));
add(congelado, AbsolutesConstraints.ZERO);
add(heroi, AbsolutesConstraints.ZERO);
}
public void atualizar() {
congelado.repaint();
heroi.repaint();
}
public void setFreeze(boolean flag) {
congelado.setVisible(flag);
}
public Animacao getAnimacao() {
return animacao;
}
@Override
public String toString() {
return hero.getToString();
}
}
class Congelado extends JLabel {
public Congelado() {
super(Images.HEROI_CONGELADO, JLabel.CENTER);
init();
}
private void init() {
setOpaque(false);
setVisible(false);
}
}<|fim▁end|>
|
*
|
<|file_name|>Shape.js<|end_file_name|><|fim▁begin|>/**
* @class Ext.sparkline.Shape
* @private
*/
Ext.define('Ext.sparkline.Shape', {
constructor: function (target, id, type, args) {
this.target = target;
this.id = id;
this.type = type;
this.args = args;
},
append: function () {
this.target.appendShape(this);<|fim▁hole|> }
});<|fim▁end|>
|
return this;
|
<|file_name|>server2.cc<|end_file_name|><|fim▁begin|>#include "muduo/net/TcpServer.h"
#include "muduo/base/Atomic.h"
#include "muduo/base/FileUtil.h"
#include "muduo/base/Logging.h"
#include "muduo/base/ProcessInfo.h"
#include "muduo/base/Thread.h"
#include "muduo/net/EventLoop.h"
#include "muduo/net/InetAddress.h"
#include <utility>
#include <stdio.h>
#include <unistd.h>
using namespace muduo;
using namespace muduo::net;
int numThreads = 0;
class EchoServer
{
public:
EchoServer(EventLoop* loop, const InetAddress& listenAddr)
: server_(loop, listenAddr, "EchoServer"),
startTime_(Timestamp::now())
{
server_.setConnectionCallback(
std::bind(&EchoServer::onConnection, this, _1));
server_.setMessageCallback(
std::bind(&EchoServer::onMessage, this, _1, _2, _3));
server_.setThreadNum(numThreads);
loop->runEvery(5.0, std::bind(&EchoServer::printThroughput, this));
}
void start()
{
LOG_INFO << "starting " << numThreads << " threads.";
server_.start();
}
<|fim▁hole|> << conn->localAddress().toIpPort() << " is "
<< (conn->connected() ? "UP" : "DOWN");
conn->setTcpNoDelay(true);
if (conn->connected())
{
connections_.increment();
}
else
{
connections_.decrement();
}
}
void onMessage(const TcpConnectionPtr& conn, Buffer* buf, Timestamp)
{
size_t len = buf->readableBytes();
transferredBytes_.addAndGet(len);
receivedMessages_.incrementAndGet();
conn->send(buf);
}
void printThroughput()
{
Timestamp endTime = Timestamp::now();
double bytes = static_cast<double>(transferredBytes_.getAndSet(0));
int msgs = receivedMessages_.getAndSet(0);
double bytesPerMsg = msgs > 0 ? bytes/msgs : 0;
double time = timeDifference(endTime, startTime_);
printf("%.3f MiB/s %.2f Kilo Msgs/s %.2f bytes per msg, ",
bytes/time/1024/1024,
static_cast<double>(msgs)/time/1000,
bytesPerMsg);
printConnection();
fflush(stdout);
startTime_ = endTime;
}
void printConnection()
{
string procStatus = ProcessInfo::procStatus();
printf("%d conn, files %d , VmSize %ld KiB, RSS %ld KiB, ",
connections_.get(),
ProcessInfo::openedFiles(),
getLong(procStatus, "VmSize:"),
getLong(procStatus, "VmRSS:"));
string meminfo;
FileUtil::readFile("/proc/meminfo", 65536, &meminfo);
long total_kb = getLong(meminfo, "MemTotal:");
long free_kb = getLong(meminfo, "MemFree:");
long buffers_kb = getLong(meminfo, "Buffers:");
long cached_kb = getLong(meminfo, "Cached:");
printf("system memory used %ld KiB\n",
total_kb - free_kb - buffers_kb - cached_kb);
}
long getLong(const string& procStatus, const char* key)
{
long result = 0;
size_t pos = procStatus.find(key);
if (pos != string::npos)
{
result = ::atol(procStatus.c_str() + pos + strlen(key));
}
return result;
}
TcpServer server_;
AtomicInt32 connections_;
AtomicInt32 receivedMessages_;
AtomicInt64 transferredBytes_;
Timestamp startTime_;
};
int main(int argc, char* argv[])
{
LOG_INFO << "pid = " << getpid()
<< ", tid = " << CurrentThread::tid()
<< ", max files = " << ProcessInfo::maxOpenFiles();
Logger::setLogLevel(Logger::WARN);
if (argc > 1)
{
numThreads = atoi(argv[1]);
}
EventLoop loop;
InetAddress listenAddr(2007);
EchoServer server(&loop, listenAddr);
server.start();
loop.loop();
}<|fim▁end|>
|
private:
void onConnection(const TcpConnectionPtr& conn)
{
LOG_TRACE << conn->peerAddress().toIpPort() << " -> "
|
<|file_name|>model_protobuf_null_value.go<|end_file_name|><|fim▁begin|>/*
* Echo Service
*<|fim▁hole|> * Echo Service API consists of a single service which returns a message.
*
* API version: version not set
* Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git)
*/
package echo
// ProtobufNullValue : `NullValue` is a singleton enumeration to represent the null value for the `Value` type union. The JSON representation for `NullValue` is JSON `null`. - NULL_VALUE: Null value.
type ProtobufNullValue string
// List of protobufNullValue
const (
NULL_VALUE_ProtobufNullValue ProtobufNullValue = "NULL_VALUE"
)<|fim▁end|>
| |
<|file_name|>fs.py<|end_file_name|><|fim▁begin|>import os
import sys
import errno
import itertools
import logging
import stat
import threading
from fuse import FuseOSError, Operations
from . import exceptions, utils
from .keys import Key
from .logs import Log
from .views import View
logger = logging.getLogger('basefs.fs')
class ViewToErrno():
def __enter__(self):
return self
def __exit__(self, exc_type, exc, exc_tb):
if exc_type is exceptions.PermissionDenied:
raise FuseOSError(errno.EACCES)
if exc_type is exceptions.DoesNotExist:
raise FuseOSError(errno.ENOENT)
if exc_type is exceptions.Exists:
raise FuseOSError(errno.EEXIST)
class FileSystem(Operations):
def __init__(self, view, serf=None, serf_agent=None, init_function=None):
self.view = view
self.cache = {}
self.dirty = {}
self.loaded = view.log.loaded
self.init_function = init_function
self.serf = serf
self.serf_agent = serf_agent
def __call__(self, op, path, *args):
logger.debug('-> %s %s %s', op, path, repr(args))
ret = '[Unhandled Exception]'
try:
ret = getattr(self, op)(path, *args)
return ret
except OSError as e:
ret = str(e)
raise
finally:
logger.debug('<- %s %s', op, repr(ret))
def init(self, path):
""" threads should start here, otherwise will not run when fuse is backgrounded """
if self.init_function:
self.init_function()
def destroy(self, path):
super().destroy(path)
if self.serf_agent:
self.serf_agent.stop()
def get_node(self, path):
# check if logfile has been modified
if self.loaded != self.view.log.loaded:
logger.debug('-> %s rebuild', path)
self.view.build()
self.loaded = self.view.log.loaded
with ViewToErrno():
node = self.view.get(path)
if node.entry.action == node.entry.DELETE:
raise FuseOSError(errno.ENOENT)
return node
def send(self, node):
if self.serf:
entry = node.entry
logger.debug("Sending entry %s '%s'", entry.hash, entry.name)
self.serf.send(node.entry)
# def access(self, path, mode):
# return super(FileSystem, self).access(path, mode)
# full_path = self._full_path(path)
# if not os.access(full_path, mode):
# raise FuseOSError(errno.EACCES)
# def chmod(self, path, mode):
# full_path = self._full_path(path)
# return os.chmod(full_path, mode)
# def chown(self, path, uid, gid):
# full_path = self._full_path(path)
# return os.chown(full_path, uid, gid)
def getattr(self, path, fh=None):
try:
content = self.cache[path]
except KeyError:
node = self.get_node(path)
has_perm = bool(self.view.get_key(path))
if node.entry.action == node.entry.MKDIR:
mode = stat.S_IFDIR | (0o0750 if has_perm else 0o0550)
else:
mode = stat.S_IFREG | (0o0640 if has_perm else 0o0440)
return {
'st_atime': node.entry.timestamp,
'st_ctime': node.entry.ctime,
'st_gid': os.getgid(),
'st_mode': mode,
'st_mtime': node.entry.timestamp,
'st_nlink': 1,
'st_size': len(node.content),
'st_uid': os.getuid(),
}
else:
import time
return {
'st_atime': time.time(),
'st_ctime': time.time(),
'st_gid': os.getgid(),
'st_mode': stat.S_IFREG | 0o0640,
'st_mtime': time.time(),
'st_nlink': 1,
'st_size': len(content),
'st_uid': os.getuid(),
}
# full_path = self._full_path(path)
# st = os.lstat(full_path)
# return dict((key, getattr(st, key)) for key in ())
def readdir(self, path, fh):
node = self.get_node(path)
entry = node.entry
dirs = ['.', '..']
for d in itertools.chain(dirs, [child.entry.name for child in node.childs if child.entry.action not in (entry.DELETE, entry.GRANT, entry.REVOKE)]):
yield d
# def readlink(self, path):
# pathname = os.readlink(self._full_path(path))
# if pathname.startswith("/"):
# # Path name is absolute, sanitize it.
# return os.path.relpath(pathname, self.root)
# else:
# return pathname
def mknod(self, path, mode, dev):
raise NotImplementedError
def rmdir(self, path):
with ViewToErrno():
node = self.view.delete(path)
self.send(node)
def mkdir(self, path, mode):
with ViewToErrno():
node = self.view.mkdir(path)
self.send(node)
return 0
# def statfs(self, path):
# full_path = self._full_path(path)
# stv = os.statvfs(full_path)
# return dict((key, getattr(stv, key)) for key in ('f_bavail', 'f_bfree',
# 'f_blocks', 'f_bsize', 'f_favail', 'f_ffree', 'f_files', 'f_flag',
# 'f_frsize', 'f_namemax'))
def unlink(self, path):
with ViewToErrno():
node = self.view.delete(path)
self.send(node)
# return os.unlink(self._full_path(path))
# def symlink(self, name, target):<|fim▁hole|>
# def link(self, target, name):
# return os.link(self._full_path(target), self._full_path(name))
# def utimens(self, path, times=None):
# return os.utime(self._full_path(path), times)
# # File methods
# # ============
def open(self, path, flags):
node = self.get_node(path)
id = int(node.entry.hash, 16)
if path not in self.cache:
self.cache[path] = node.content
self.dirty[path] = False
return id
def create(self, path, mode, fi=None):
self.cache[path] = b''
self.dirty[path] = True
return id(path)
def read(self, path, length, offset, fh):
try:
content = self.cache[path]
except KeyError:
node = self.get_node(path)
content = node.content
return content[offset:offset+length]
def write(self, path, buf, offset, fh):
# TODO check write perissions
try:
content = self.cache[path]
except KeyError:
node = self.get_node(path)
content = node.content
size = len(buf)
new_content = content[:offset] + buf + content[offset+size:]
if content != new_content:
self.dirty[path] = True
self.cache[path] = new_content
return size
def truncate(self, path, length, fh=None):
self.cache[path] = self.cache[path][:length]
self.dirty[path] = True
# def flush(self, path, fh):
# # TODO Filesystems shouldn't assume that flush will always be called after some writes, or that if will be called at all.
# content = self.cache.pop(path, None)
# dirty = self.dirty.pop(path, False)
# if content is not None and dirty:
# print('write')
# node = self.view.write(path, content)
## self.send(node)
def release(self, path, fh):
content = self.cache.pop(path, None)
dirty = self.dirty.pop(path, False)
if content is not None and dirty:
# TODO raise permission denied should happen in write() create().... not here
with ViewToErrno():
node = self.view.write(path, content)
self.send(node)
# def fsync(self, path, fdatasync, fh):
# return self.flush(path, fh)
# return None<|fim▁end|>
|
# return os.symlink(name, self._full_path(target))
def rename(self, old, new):
raise NotImplementedError
|
<|file_name|>test.py<|end_file_name|><|fim▁begin|># --------------------------------------------------------
# Tensorflow Faster R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Xinlei Chen
# --------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
#import cv2
from scipy.misc import imresize
from scipy.misc import imread
import numpy as np
try:
import cPickle as pickle
except ImportError:
import pickle
import os
import math
import tensorflow as tf
from utils.timer import Timer
from utils.cython_nms import nms, nms_new
from utils.boxes_grid import get_boxes_grid
from utils.blob import im_list_to_blob
from model.config import cfg, get_output_dir
from model.bbox_transform import clip_boxes, bbox_transform_inv
def _get_image_blob(im):
"""Converts an image into a network input.
Arguments:
im (ndarray): a color image in BGR order
Returns:
blob (ndarray): a data blob holding an image pyramid
im_scale_factors (list): list of image scales (relative to im) used
in the image pyramid
"""
im_orig = im.astype(np.float32, copy=True)
im_orig -= cfg.PIXEL_MEANS
im_shape = im_orig.shape
im_size_min = np.min(im_shape[0:2])
im_size_max = np.max(im_shape[0:2])
processed_ims = []
im_scale_factors = []
for target_size in cfg.TEST.SCALES:
im_scale = float(target_size) / float(im_size_min)
# Prevent the biggest axis from being more than MAX_SIZE
if np.round(im_scale * im_size_max) > cfg.TEST.MAX_SIZE:
im_scale = float(cfg.TEST.MAX_SIZE) / float(im_size_max)
im_row,im_col,_ = im.shape
im = imresize(im_orig, (int(im_row*im_scale), int(im_col*im_scale)))
im_scale_factors.append(im_scale)
processed_ims.append(im)<|fim▁hole|>
# Create a blob to hold the input images
blob = im_list_to_blob(processed_ims)
return blob, np.array(im_scale_factors)
def _get_blobs(im):
"""Convert an image and RoIs within that image into network inputs."""
blobs = {}
blobs['data'], im_scale_factors = _get_image_blob(im)
return blobs, im_scale_factors
def _clip_boxes(boxes, im_shape):
"""Clip boxes to image boundaries."""
# x1 >= 0
boxes[:, 0::4] = np.maximum(boxes[:, 0::4], 0)
# y1 >= 0
boxes[:, 1::4] = np.maximum(boxes[:, 1::4], 0)
# x2 < im_shape[1]
boxes[:, 2::4] = np.minimum(boxes[:, 2::4], im_shape[1] - 1)
# y2 < im_shape[0]
boxes[:, 3::4] = np.minimum(boxes[:, 3::4], im_shape[0] - 1)
return boxes
def _rescale_boxes(boxes, inds, scales):
"""Rescale boxes according to image rescaling."""
for i in range(boxes.shape[0]):
boxes[i,:] = boxes[i,:] / scales[int(inds[i])]
return boxes
def im_detect(sess, net, im):
blobs, im_scales = _get_blobs(im)
assert len(im_scales) == 1, "Only single-image batch implemented"
im_blob = blobs['data']
# seems to have height, width, and image scales
# still not sure about the scale, maybe full image it is 1.
blobs['im_info'] = \
np.array([[im_blob.shape[1], im_blob.shape[2], im_scales[0]]], dtype=np.float32)
_, scores, bbox_pred, rois = \
net.test_image(sess, blobs['data'], blobs['im_info'])
boxes = rois[:, 1:5] / im_scales[0]
# print(scores.shape, bbox_pred.shape, rois.shape, boxes.shape)
scores = np.reshape(scores, [scores.shape[0], -1])
bbox_pred = np.reshape(bbox_pred, [bbox_pred.shape[0], -1])
if cfg.TEST.BBOX_REG:
# Apply bounding-box regression deltas
box_deltas = bbox_pred
pred_boxes = bbox_transform_inv(boxes, box_deltas)
pred_boxes = _clip_boxes(pred_boxes, im.shape)
else:
# Simply repeat the boxes, once for each class
pred_boxes = np.tile(boxes, (1, scores.shape[1]))
return scores, pred_boxes
def apply_nms(all_boxes, thresh):
"""Apply non-maximum suppression to all predicted boxes output by the
test_net method.
"""
num_classes = len(all_boxes)
num_images = len(all_boxes[0])
nms_boxes = [[[] for _ in range(num_images)] for _ in range(num_classes)]
for cls_ind in range(num_classes):
for im_ind in range(num_images):
dets = all_boxes[cls_ind][im_ind]
if dets == []:
continue
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
scores = dets[:, 4]
inds = np.where((x2 > x1) & (y2 > y1) & (scores > cfg.TEST.DET_THRESHOLD))[0]
dets = dets[inds,:]
if dets == []:
continue
keep = nms(dets, thresh)
if len(keep) == 0:
continue
nms_boxes[cls_ind][im_ind] = dets[keep, :].copy()
return nms_boxes
def test_net(sess, net, imdb, weights_filename, experiment_setup=None,
max_per_image=100, thresh=0.05):
np.random.seed(cfg.RNG_SEED)
"""Test a Fast R-CNN network on an image database."""
num_images = len(imdb.image_index)
# num_images = 2
# all detections are collected into:
# all_boxes[cls][image] = N x 5 array of detections in
# (x1, y1, x2, y2, score)
all_boxes = [[[] for _ in range(num_images)]
for _ in range(imdb.num_classes)]
output_dir = get_output_dir(imdb, weights_filename)
print('using output_dir: ', output_dir)
# timers
_t = {'im_detect' : Timer(), 'misc' : Timer()}
# define a writer to write the histogram of summaries
# test_tbdir = '/home/shuang/projects/tf-faster-rcnn/tensorboard/'
# if not os.path.exists(test_tbdir):
# print('making directory for test tensorboard result')
# os.mkdir(test_tbdir)
# writer = tf.summary.FileWriter(test_tbdir,sess.graph)
# define a folder for activation results
test_actdir = '../activations_retrained'
if not os.path.exists(test_actdir):
os.mkdir(test_actdir)
# define a folder for zero fractions
test_zerodir = './zero_fractions'
if not os.path.exists(test_zerodir):
os.mkdir(test_zerodir)
for i in range(num_images):
im = imread(imdb.image_path_at(i))
_t['im_detect'].tic()
scores, boxes = im_detect(sess, net, im)
_t['im_detect'].toc()
# write act summaries to tensorboard
# writer.add_summary(act_summaries)
# record the zero fraction -> only for vgg16
# zero_frac = []
# for layer_ind in range(13):
# batch_num,row,col,filter_num = acts[layer_ind].shape
# zero_frac.append([])
# for j in range(filter_num):
# # print(acts[0][:,:,:,i].shape)
# fraction = 1-np.count_nonzero(acts[layer_ind][:,:,:,j])/(batch_num*row*col)
# zero_frac[layer_ind].append(fraction)
_t['misc'].tic()
# skip j = 0, because it's the background class
chosen_classes = []
for j in range(1, imdb.num_classes):
# for j, clas in enumerate(imdb._classes[1:]):
inds = np.where(scores[:, j] > thresh)[0]
cls_scores = scores[inds, j]
cls_boxes = boxes[inds, j*4:(j+1)*4]
cls_dets = np.hstack((cls_boxes, cls_scores[:, np.newaxis])) \
.astype(np.float32, copy=False)
keep = nms(cls_dets, cfg.TEST.NMS)
cls_dets = cls_dets[keep, :]
all_boxes[j][i] = cls_dets
# if len(cls_dets)!=0: # only for recording activations_res
# chosen_classes.append(imdb._classes[j])
# Limit to max_per_image detections *over all classes*
if max_per_image > 0:
image_scores = np.hstack([all_boxes[j][i][:, -1]
for j in range(1, imdb.num_classes)])
if len(image_scores) > max_per_image:
image_thresh = np.sort(image_scores)[-max_per_image]
for j in range(1, imdb.num_classes):
keep = np.where(all_boxes[j][i][:, -1] >= image_thresh)[0]
all_boxes[j][i] = all_boxes[j][i][keep, :]
_t['misc'].toc()
# write acts to a seperate text file for each seprate image file -> only vgg
# f_name = '{}/{}.txt'.format(test_actdir,i)
# act_file = open(f_name,'w')
# act_file.write('\n'.join(chosen_classes))
# act_file.write('\n')
# sum_act = []
# for arr in acts:
# temp = np.sum(arr,axis = (0,1,2))
# sum_act.append(temp)
# for item in sum_act:
# act_file.write('{}\n'.format(str(item)))
# act_file.close()
# chosen_classes = []
# write zero fractions to text files -> only vgg
# file_name = '{}/{}.txt'.format(test_zerodir,i)
# zero_file = open(file_name,'w')
# zero_file.write('\n'.join(chosen_classes))
# zero_file.write('\n')
# for arr in zero_frac:
# zero_file.write('{}\n'.format(str(arr)))
# zero_file.close()
# chosen_classes = []
if i%1000==0:
print('im_detect: {:d}/{:d} {:.3f}s {:.3f}s' \
.format(i + 1, num_images, _t['im_detect'].average_time,
_t['misc'].average_time))
# writer.close()
det_file = os.path.join(output_dir, 'detections.pkl')
with open(det_file, 'wb') as f:
pickle.dump(all_boxes, f, pickle.HIGHEST_PROTOCOL)
print('Evaluating detections')
imdb.evaluate_detections(all_boxes, output_dir, experiment_setup)<|fim▁end|>
| |
<|file_name|>activation-sink-default-value.js<|end_file_name|><|fim▁begin|>var n = 10000000;
function bar(f) { f(10); }
function foo(b) {
var result = 0;
var imUndefined;
var baz;
var set = function (x) { result = x; return (imUndefined, baz); }
baz = 40;
if (b) {
bar(set);
if (result != 10)
throw "Error: bad: " + result;
if (baz !== 40)
throw "Error: bad: " + baz;
if (imUndefined !== void 0)
throw "Error: bad value: " + imUndefined;
return 0;
}<|fim▁hole|>
noInline(bar);
noInline(foo);
for (var i = 0; i < n; i++) {
var result = foo(!(i % 100));
if (result != 0)
throw "Error: bad result: " + result;
}<|fim▁end|>
|
return result;
}
|
<|file_name|>logutil.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#
# logutil.py
# A module containing means of interacting with log files.
#
import logging
import logging.handlers
import os
import time
from data_structures import enum
from config import get_config_value
LoggingSection = enum(
'CLIENT',
'CRAWLER',
'DATA',
'FRONTIER',
'TEST',
'UTILITIES',
)
#region Setup
logging.basicConfig(level=logging.INFO,
format='[%(asctime)s %(levelname)s] %(name)s::%(funcName)s - %(message)s',
datefmt='%x %X %Z')
module_dir = os.path.dirname(__file__)
logfile = os.path.join(module_dir, get_config_value('LOG', 'path'))
logdir = os.path.join(module_dir, get_config_value('LOG', 'dir'))
if not os.path.exists(logdir):
os.mkdir(logdir)
handler = logging.handlers.RotatingFileHandler(logfile,
maxBytes=8192,
backupCount=10, )
formatter = logging.Formatter('[%(asctime)s %(levelname)s] %(name)s::%(funcName)s - %(message)s')
formatter.datefmt = '%x %X %Z'
formatter.converter = time.gmtime
handler.setFormatter(formatter)
#endregion
def get_logger(section, name):
"""
Fetches a logger.
Arguments:
section (string): The section the logger is attributed to.
name (string): The name of the logger.
Returns:
The logger corresponding to the section and name provided.
"""
section_name = LoggingSection.reverse_mapping[section].lower()
logger = logging.getLogger('htresearch.{0}.{1}'.format(section_name, name))
logger.addHandler(handler)
logger.setLevel(logging.INFO)
return logger<|fim▁end|>
| |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! LES Protocol Version 1 implementation.
//!
//! This uses a "Provider" to answer requests.
//! See https://github.com/ethcore/parity/wiki/Light-Ethereum-Subprotocol-(LES)
use ethcore::transaction::UnverifiedTransaction;
use ethcore::receipt::Receipt;
use io::TimerToken;
use network::{NetworkProtocolHandler, NetworkContext, PeerId};
use rlp::{RlpStream, Stream, UntrustedRlp, View};
use util::hash::H256;
use util::{Bytes, Mutex, RwLock, U256};
use time::{Duration, SteadyTime};
use std::collections::HashMap;
use std::fmt;
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
use provider::Provider;
use request::{self, HashOrNumber, Request};
use self::buffer_flow::{Buffer, FlowParams};
use self::context::{Ctx, TickCtx};
use self::error::Punishment;
use self::request_set::RequestSet;
use self::id_guard::IdGuard;
mod context;
mod error;
mod status;
mod request_set;
#[cfg(test)]
mod tests;
pub mod buffer_flow;
pub use self::error::Error;
pub use self::context::{BasicContext, EventContext, IoContext};
pub use self::status::{Status, Capabilities, Announcement};
const TIMEOUT: TimerToken = 0;
const TIMEOUT_INTERVAL_MS: u64 = 1000;
const TICK_TIMEOUT: TimerToken = 1;
const TICK_TIMEOUT_INTERVAL_MS: u64 = 5000;
// minimum interval between updates.
const UPDATE_INTERVAL_MS: i64 = 5000;
/// Supported protocol versions.
pub const PROTOCOL_VERSIONS: &'static [u8] = &[1];
/// Max protocol version.
pub const MAX_PROTOCOL_VERSION: u8 = 1;
/// Packet count for LES.
pub const PACKET_COUNT: u8 = 15;
// packet ID definitions.
mod packet {
// the status packet.
pub const STATUS: u8 = 0x00;
// announcement of new block hashes or capabilities.
pub const ANNOUNCE: u8 = 0x01;
// request and response for block headers
pub const GET_BLOCK_HEADERS: u8 = 0x02;
pub const BLOCK_HEADERS: u8 = 0x03;
// request and response for block bodies
pub const GET_BLOCK_BODIES: u8 = 0x04;
pub const BLOCK_BODIES: u8 = 0x05;
// request and response for transaction receipts.
pub const GET_RECEIPTS: u8 = 0x06;
pub const RECEIPTS: u8 = 0x07;
// request and response for merkle proofs.
pub const GET_PROOFS: u8 = 0x08;
pub const PROOFS: u8 = 0x09;
// request and response for contract code.
pub const GET_CONTRACT_CODES: u8 = 0x0a;
pub const CONTRACT_CODES: u8 = 0x0b;
// relay transactions to peers.
pub const SEND_TRANSACTIONS: u8 = 0x0c;
// request and response for header proofs in a CHT.
pub const GET_HEADER_PROOFS: u8 = 0x0d;
pub const HEADER_PROOFS: u8 = 0x0e;
}
// timeouts for different kinds of requests. all values are in milliseconds.
// TODO: variable timeouts based on request count.
mod timeout {
pub const HANDSHAKE: i64 = 2500;
pub const HEADERS: i64 = 5000;
pub const BODIES: i64 = 5000;
pub const RECEIPTS: i64 = 3500;
pub const PROOFS: i64 = 4000;
pub const CONTRACT_CODES: i64 = 5000;
pub const HEADER_PROOFS: i64 = 3500;
}
/// A request id.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
pub struct ReqId(usize);
impl fmt::Display for ReqId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Request #{}", self.0)
}
}
// A pending peer: one we've sent our status to but
// may not have received one for.
struct PendingPeer {
sent_head: H256,
last_update: SteadyTime,
}
/// Relevant data to each peer. Not accessible publicly, only `pub` due to
/// limitations of the privacy system.
pub struct Peer {
local_buffer: Buffer, // their buffer relative to us
status: Status,
capabilities: Capabilities,
remote_flow: Option<(Buffer, FlowParams)>,
sent_head: H256, // last chain head we've given them.
last_update: SteadyTime,
pending_requests: RequestSet,
failed_requests: Vec<ReqId>,
}
impl Peer {
// check the maximum cost of a request, returning an error if there's
// not enough buffer left.
// returns the calculated maximum cost.
fn deduct_max(&mut self, flow_params: &FlowParams, kind: request::Kind, max: usize) -> Result<U256, Error> {
flow_params.recharge(&mut self.local_buffer);
let max_cost = flow_params.compute_cost(kind, max);
self.local_buffer.deduct_cost(max_cost)?;
Ok(max_cost)
}
// refund buffer for a request. returns new buffer amount.
fn refund(&mut self, flow_params: &FlowParams, amount: U256) -> U256 {
flow_params.refund(&mut self.local_buffer, amount);
self.local_buffer.current()
}
}
/// An LES event handler.
///
/// Each handler function takes a context which describes the relevant peer
/// and gives references to the IO layer and protocol structure so new messages
/// can be dispatched immediately.
///
/// Request responses are not guaranteed to be complete or valid, but passed IDs will be correct.
/// Response handlers are not given a copy of the original request; it is assumed
/// that relevant data will be stored by interested handlers.
pub trait Handler: Send + Sync {
/// Called when a peer connects.
fn on_connect(&self, _ctx: &EventContext, _status: &Status, _capabilities: &Capabilities) { }
/// Called when a peer disconnects, with a list of unfulfilled request IDs as
/// of yet.
fn on_disconnect(&self, _ctx: &EventContext, _unfulfilled: &[ReqId]) { }
/// Called when a peer makes an announcement.
fn on_announcement(&self, _ctx: &EventContext, _announcement: &Announcement) { }
/// Called when a peer requests relay of some transactions.
fn on_transactions(&self, _ctx: &EventContext, _relay: &[UnverifiedTransaction]) { }
/// Called when a peer responds with block bodies.
fn on_block_bodies(&self, _ctx: &EventContext, _req_id: ReqId, _bodies: &[Bytes]) { }
/// Called when a peer responds with block headers.
fn on_block_headers(&self, _ctx: &EventContext, _req_id: ReqId, _headers: &[Bytes]) { }
/// Called when a peer responds with block receipts.
fn on_receipts(&self, _ctx: &EventContext, _req_id: ReqId, _receipts: &[Vec<Receipt>]) { }
/// Called when a peer responds with state proofs. Each proof should be a series of trie
/// nodes in ascending order by distance from the root.
fn on_state_proofs(&self, _ctx: &EventContext, _req_id: ReqId, _proofs: &[Vec<Bytes>]) { }
/// Called when a peer responds with contract code.
fn on_code(&self, _ctx: &EventContext, _req_id: ReqId, _codes: &[Bytes]) { }
/// Called when a peer responds with header proofs. Each proof should be a block header coupled
/// with a series of trie nodes is ascending order by distance from the root.
fn on_header_proofs(&self, _ctx: &EventContext, _req_id: ReqId, _proofs: &[(Bytes, Vec<Bytes>)]) { }
/// Called to "tick" the handler periodically.
fn tick(&self, _ctx: &BasicContext) { }
/// Called on abort. This signals to handlers that they should clean up
/// and ignore peers.
// TODO: coreresponding `on_activate`?
fn on_abort(&self) { }
}
/// Protocol parameters.
pub struct Params {
/// Network id.
pub network_id: u64,
/// Buffer flow parameters.
pub flow_params: FlowParams,
/// Initial capabilities.
pub capabilities: Capabilities,
}
/// Type alias for convenience.
pub type PeerMap = HashMap<PeerId, Mutex<Peer>>;
mod id_guard {
use network::PeerId;
use util::RwLockReadGuard;
use super::{PeerMap, ReqId};
// Guards success or failure of given request.
// On drop, inserts the req_id into the "failed requests"
// set for the peer unless defused. In separate module to enforce correct usage.
pub struct IdGuard<'a> {
peers: RwLockReadGuard<'a, PeerMap>,
peer_id: PeerId,
req_id: ReqId,
active: bool,
}
impl<'a> IdGuard<'a> {
/// Create a new `IdGuard`, which will prevent access of the inner ReqId
/// (for forming responses, triggering handlers) until defused
pub fn new(peers: RwLockReadGuard<'a, PeerMap>, peer_id: PeerId, req_id: ReqId) -> Self {
IdGuard {
peers: peers,
peer_id: peer_id,
req_id: req_id,
active: true,
}
}
/// Defuse the guard, signalling that the request has been successfully decoded.
pub fn defuse(mut self) -> ReqId {
// can't use the mem::forget trick here since we need the
// read guard to drop.
self.active = false;
self.req_id
}
}
impl<'a> Drop for IdGuard<'a> {
fn drop(&mut self) {
if !self.active { return }
if let Some(p) = self.peers.get(&self.peer_id) {
p.lock().failed_requests.push(self.req_id);
}
}
}
}
/// This is an implementation of the light ethereum network protocol, abstracted
/// over a `Provider` of data and a p2p network.
///
/// This is simply designed for request-response purposes. Higher level uses
/// of the protocol, such as synchronization, will function as wrappers around
/// this system.
//
// LOCK ORDER:
// Locks must be acquired in the order declared, and when holding a read lock
// on the peers, only one peer may be held at a time.
pub struct LightProtocol {
provider: Arc<Provider>,
genesis_hash: H256,
network_id: u64,
pending_peers: RwLock<HashMap<PeerId, PendingPeer>>,
peers: RwLock<PeerMap>,
capabilities: RwLock<Capabilities>,
flow_params: FlowParams, // assumed static and same for every peer.
handlers: Vec<Arc<Handler>>,
req_id: AtomicUsize,
}
impl LightProtocol {
/// Create a new instance of the protocol manager.
pub fn new(provider: Arc<Provider>, params: Params) -> Self {
debug!(target: "les", "Initializing LES handler");
let genesis_hash = provider.chain_info().genesis_hash;
LightProtocol {
provider: provider,
genesis_hash: genesis_hash,
network_id: params.network_id,
pending_peers: RwLock::new(HashMap::new()),
peers: RwLock::new(HashMap::new()),
capabilities: RwLock::new(params.capabilities),
flow_params: params.flow_params,
handlers: Vec::new(),
req_id: AtomicUsize::new(0),
}
}
/// Attempt to get peer status.
pub fn peer_status(&self, peer: &PeerId) -> Option<Status> {
self.peers.read().get(&peer)
.map(|peer| peer.lock().status.clone())
}
/// Check the maximum amount of requests of a specific type
/// which a peer would be able to serve. Returns zero if the
/// peer is unknown or has no buffer flow parameters.
fn max_requests(&self, peer: PeerId, kind: request::Kind) -> usize {
self.peers.read().get(&peer).and_then(|peer| {
let mut peer = peer.lock();
match peer.remote_flow {
Some((ref mut buf, ref flow)) => {
flow.recharge(buf);
Some(flow.max_amount(&*buf, kind))
}
None => None,
}
}).unwrap_or(0)
}
/// Make a request to a peer.
///
/// Fails on: nonexistent peer, network error, peer not server,
/// insufficient buffer. Does not check capabilities before sending.
/// On success, returns a request id which can later be coordinated
/// with an event.
pub fn request_from(&self, io: &IoContext, peer_id: &PeerId, request: Request) -> Result<ReqId, Error> {
let peers = self.peers.read();
let peer = peers.get(peer_id).ok_or_else(|| Error::UnknownPeer)?;
let mut peer = peer.lock();
match peer.remote_flow {
Some((ref mut buf, ref flow)) => {
flow.recharge(buf);
let max = flow.compute_cost(request.kind(), request.amount());
buf.deduct_cost(max)?;
}
None => return Err(Error::NotServer),
}
let req_id = self.req_id.fetch_add(1, Ordering::SeqCst);
let packet_data = encode_request(&request, req_id);
trace!(target: "les", "Dispatching request {} to peer {}", req_id, peer_id);
let packet_id = match request.kind() {
request::Kind::Headers => packet::GET_BLOCK_HEADERS,
request::Kind::Bodies => packet::GET_BLOCK_BODIES,
request::Kind::Receipts => packet::GET_RECEIPTS,
request::Kind::StateProofs => packet::GET_PROOFS,
request::Kind::Codes => packet::GET_CONTRACT_CODES,
request::Kind::HeaderProofs => packet::GET_HEADER_PROOFS,
};
io.send(*peer_id, packet_id, packet_data);
peer.pending_requests.insert(ReqId(req_id), request, SteadyTime::now());
Ok(ReqId(req_id))
}
/// Make an announcement of new chain head and capabilities to all peers.
/// The announcement is expected to be valid.
pub fn make_announcement(&self, io: &IoContext, mut announcement: Announcement) {
let mut reorgs_map = HashMap::new();
let now = SteadyTime::now();
// update stored capabilities
self.capabilities.write().update_from(&announcement);
// calculate reorg info and send packets
for (peer_id, peer_info) in self.peers.read().iter() {
let mut peer_info = peer_info.lock();
// TODO: "urgent" announcements like new blocks?
// the timer approach will skip 1 (possibly 2) in rare occasions.
if peer_info.sent_head == announcement.head_hash ||
peer_info.status.head_num >= announcement.head_num ||
now - peer_info.last_update < Duration::milliseconds(UPDATE_INTERVAL_MS) {
continue
}
peer_info.last_update = now;
let reorg_depth = reorgs_map.entry(peer_info.sent_head)
.or_insert_with(|| {
match self.provider.reorg_depth(&announcement.head_hash, &peer_info.sent_head) {
Some(depth) => depth,
None => {
// both values will always originate locally -- this means something
// has gone really wrong
debug!(target: "les", "couldn't compute reorganization depth between {:?} and {:?}",
&announcement.head_hash, &peer_info.sent_head);
0
}
}
});
peer_info.sent_head = announcement.head_hash;
announcement.reorg_depth = *reorg_depth;
io.send(*peer_id, packet::ANNOUNCE, status::write_announcement(&announcement));
}
}
/// Add an event handler.
///
/// These are intended to be added when the protocol structure
/// is initialized as a means of customizing its behavior,
/// and dispatching requests immediately upon events.
pub fn add_handler(&mut self, handler: Arc<Handler>) {
self.handlers.push(handler);
}
/// Signal to handlers that network activity is being aborted
/// and clear peer data.
pub fn abort(&self) {
for handler in &self.handlers {
handler.on_abort();
}
// acquire in order and hold.
let mut pending_peers = self.pending_peers.write();
let mut peers = self.peers.write();
pending_peers.clear();
peers.clear();
}
// Does the common pre-verification of responses before the response itself
// is actually decoded:
// - check whether peer exists
// - check whether request was made
// - check whether request kinds match
fn pre_verify_response(&self, peer: &PeerId, kind: request::Kind, raw: &UntrustedRlp) -> Result<IdGuard, Error> {
let req_id = ReqId(raw.val_at(0)?);
let cur_buffer: U256 = raw.val_at(1)?;
trace!(target: "les", "pre-verifying response from peer {}, kind={:?}", peer, kind);
let mut had_req = false;
let peers = self.peers.read();
let maybe_err = match peers.get(peer) {
Some(peer_info) => {
let mut peer_info = peer_info.lock();
let req_info = peer_info.pending_requests.remove(&req_id, SteadyTime::now());
let flow_info = peer_info.remote_flow.as_mut();
match (req_info, flow_info) {
(Some(request), Some(flow_info)) => {
had_req = true;
let &mut (ref mut buf, ref mut flow) = flow_info;
let actual_buffer = ::std::cmp::min(cur_buffer, *flow.limit());
buf.update_to(actual_buffer);
if request.kind() != kind {
Some(Error::UnsolicitedResponse)
} else {
None
}
}
(None, _) => Some(Error::UnsolicitedResponse),
(_, None) => Some(Error::NotServer), // really should be impossible.
}
}
None => Some(Error::UnknownPeer), // probably only occurs in a race of some kind.
};
if had_req {
let id_guard = IdGuard::new(peers, *peer, req_id);
match maybe_err {
Some(err) => Err(err),
None => Ok(id_guard)
}
} else {
Err(maybe_err.expect("every branch without a request leads to error; qed"))
}
}
/// Handle an LES packet using the given io context.
/// Packet data is _untrusted_, which means that invalid data won't lead to
/// issues.
pub fn handle_packet(&self, io: &IoContext, peer: &PeerId, packet_id: u8, data: &[u8]) {
let rlp = UntrustedRlp::new(data);
trace!(target: "les", "Incoming packet {} from peer {}", packet_id, peer);
// handle the packet
let res = match packet_id {
packet::STATUS => self.status(peer, io, rlp),
packet::ANNOUNCE => self.announcement(peer, io, rlp),
packet::GET_BLOCK_HEADERS => self.get_block_headers(peer, io, rlp),
packet::BLOCK_HEADERS => self.block_headers(peer, io, rlp),
packet::GET_BLOCK_BODIES => self.get_block_bodies(peer, io, rlp),
packet::BLOCK_BODIES => self.block_bodies(peer, io, rlp),
packet::GET_RECEIPTS => self.get_receipts(peer, io, rlp),
packet::RECEIPTS => self.receipts(peer, io, rlp),
packet::GET_PROOFS => self.get_proofs(peer, io, rlp),
packet::PROOFS => self.proofs(peer, io, rlp),
packet::GET_CONTRACT_CODES => self.get_contract_code(peer, io, rlp),
packet::CONTRACT_CODES => self.contract_code(peer, io, rlp),
packet::GET_HEADER_PROOFS => self.get_header_proofs(peer, io, rlp),
packet::HEADER_PROOFS => self.header_proofs(peer, io, rlp),
packet::SEND_TRANSACTIONS => self.relay_transactions(peer, io, rlp),
other => {
Err(Error::UnrecognizedPacket(other))
}
};
if let Err(e) = res {
punish(*peer, io, e);
}
}
// check timeouts and punish peers.
fn timeout_check(&self, io: &IoContext) {
let now = SteadyTime::now();
// handshake timeout
{
let mut pending = self.pending_peers.write();
let slowpokes: Vec<_> = pending.iter()
.filter(|&(_, ref peer)| {
peer.last_update + Duration::milliseconds(timeout::HANDSHAKE) <= now
})
.map(|(&p, _)| p)
.collect();
for slowpoke in slowpokes {
debug!(target: "les", "Peer {} handshake timed out", slowpoke);
pending.remove(&slowpoke);
io.disconnect_peer(slowpoke);
}
}
// request timeouts
{
for (peer_id, peer) in self.peers.read().iter() {
if peer.lock().pending_requests.check_timeout(now) {
debug!(target: "les", "Peer {} request timeout", peer_id);
io.disconnect_peer(*peer_id);
}
}
}
}
/// called when a peer connects.
pub fn on_connect(&self, peer: &PeerId, io: &IoContext) {
let proto_version = match io.protocol_version(*peer).ok_or(Error::WrongNetwork) {
Ok(pv) => pv,
Err(e) => { punish(*peer, io, e); return }
};
if PROTOCOL_VERSIONS.iter().find(|x| **x == proto_version).is_none() {
punish(*peer, io, Error::UnsupportedProtocolVersion(proto_version));
return;
}
let chain_info = self.provider.chain_info();
let status = Status {
head_td: chain_info.total_difficulty,
head_hash: chain_info.best_block_hash,
head_num: chain_info.best_block_number,
genesis_hash: chain_info.genesis_hash,
protocol_version: proto_version as u32, // match peer proto version
network_id: self.network_id,
last_head: None,
};
let capabilities = self.capabilities.read().clone();
let status_packet = status::write_handshake(&status, &capabilities, Some(&self.flow_params));
self.pending_peers.write().insert(*peer, PendingPeer {
sent_head: chain_info.best_block_hash,
last_update: SteadyTime::now(),
});
io.send(*peer, packet::STATUS, status_packet);
}
/// called when a peer disconnects.
pub fn on_disconnect(&self, peer: PeerId, io: &IoContext) {
trace!(target: "les", "Peer {} disconnecting", peer);
self.pending_peers.write().remove(&peer);
let unfulfilled = match self.peers.write().remove(&peer) {
None => return,
Some(peer_info) => {
let peer_info = peer_info.into_inner();
let mut unfulfilled: Vec<_> = peer_info.pending_requests.collect_ids();
unfulfilled.extend(peer_info.failed_requests);
unfulfilled
}
};
for handler in &self.handlers {
handler.on_disconnect(&Ctx {
peer: peer,
io: io,
proto: self,
}, &unfulfilled)
}
}
/// Execute the given closure with a basic context derived from the I/O context.
pub fn with_context<F, T>(&self, io: &IoContext, f: F) -> T
where F: FnOnce(&BasicContext) -> T
{
f(&TickCtx {
io: io,
proto: self,
})
}
fn tick_handlers(&self, io: &IoContext) {
for handler in &self.handlers {
handler.tick(&TickCtx {
io: io,
proto: self,
})
}
}
}
impl LightProtocol {
// Handle status message from peer.
fn status(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
let pending = match self.pending_peers.write().remove(peer) {
Some(pending) => pending,
None => {
return Err(Error::UnexpectedHandshake);
}
};
let (status, capabilities, flow_params) = status::parse_handshake(data)?;
trace!(target: "les", "Connected peer with chain head {:?}", (status.head_hash, status.head_num));
if (status.network_id, status.genesis_hash) != (self.network_id, self.genesis_hash) {
return Err(Error::WrongNetwork);
}
if Some(status.protocol_version as u8) != io.protocol_version(*peer) {
return Err(Error::BadProtocolVersion);
}
let remote_flow = flow_params.map(|params| (params.create_buffer(), params));
self.peers.write().insert(*peer, Mutex::new(Peer {
local_buffer: self.flow_params.create_buffer(),
status: status.clone(),
capabilities: capabilities.clone(),
remote_flow: remote_flow,
sent_head: pending.sent_head,
last_update: pending.last_update,
pending_requests: RequestSet::default(),
failed_requests: Vec::new(),
}));
for handler in &self.handlers {
handler.on_connect(&Ctx {
peer: *peer,
io: io,
proto: self,
}, &status, &capabilities)
}
Ok(())
}
// Handle an announcement.
fn announcement(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
if !self.peers.read().contains_key(peer) {
debug!(target: "les", "Ignoring announcement from unknown peer");
return Ok(())
}
let announcement = status::parse_announcement(data)?;
// scope to ensure locks are dropped before moving into handler-space.
{
let peers = self.peers.read();
let peer_info = match peers.get(peer) {
Some(info) => info,
None => return Ok(()),
};
let mut peer_info = peer_info.lock();
// update status.
{
// TODO: punish peer if they've moved backwards.
let status = &mut peer_info.status;
let last_head = status.head_hash;
status.head_hash = announcement.head_hash;
status.head_td = announcement.head_td;
status.head_num = announcement.head_num;
status.last_head = Some((last_head, announcement.reorg_depth));
}
// update capabilities.
peer_info.capabilities.update_from(&announcement);
}
for handler in &self.handlers {
handler.on_announcement(&Ctx {
peer: *peer,
io: io,
proto: self,
}, &announcement);
}
Ok(())
}
// Handle a request for block headers.
fn get_block_headers(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_HEADERS: usize = 512;
let peers = self.peers.read();
let peer = match peers.get(peer) {
Some(peer) => peer,
None => {
debug!(target: "les", "Ignoring request from unknown peer");
return Ok(())
}
};
let mut peer = peer.lock();
let req_id: u64 = data.val_at(0)?;
let data = data.at(1)?;
let start_block = {
if data.at(0)?.size() == 32 {
HashOrNumber::Hash(data.val_at(0)?)
} else {
HashOrNumber::Number(data.val_at(0)?)
}
};
let req = request::Headers {
start: start_block,
max: ::std::cmp::min(MAX_HEADERS, data.val_at(1)?),
skip: data.val_at(2)?,
reverse: data.val_at(3)?,
};
let max_cost = peer.deduct_max(&self.flow_params, request::Kind::Headers, req.max)?;
let response = self.provider.block_headers(req);
let actual_cost = self.flow_params.compute_cost(request::Kind::Headers, response.len());
assert!(max_cost >= actual_cost, "Actual cost exceeded maximum computed cost.");
let cur_buffer = peer.refund(&self.flow_params, max_cost - actual_cost);
io.respond(packet::BLOCK_HEADERS, {
let mut stream = RlpStream::new_list(3);
stream.append(&req_id).append(&cur_buffer).begin_list(response.len());
for header in response {
stream.append_raw(&header.into_inner(), 1);
}
stream.out()
});
Ok(())
}
// Receive a response for block headers.
fn block_headers(&self, peer: &PeerId, io: &IoContext, raw: UntrustedRlp) -> Result<(), Error> {
let id_guard = self.pre_verify_response(peer, request::Kind::Headers, &raw)?;
let raw_headers: Vec<_> = raw.at(2)?.iter().map(|x| x.as_raw().to_owned()).collect();
let req_id = id_guard.defuse();
for handler in &self.handlers {
handler.on_block_headers(&Ctx {
peer: *peer,
io: io,
proto: self,
}, req_id, &raw_headers);
}
Ok(())
}
// Handle a request for block bodies.
fn get_block_bodies(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_BODIES: usize = 256;
let peers = self.peers.read();
let peer = match peers.get(peer) {
Some(peer) => peer,
None => {
debug!(target: "les", "Ignoring request from unknown peer");
return Ok(())
}
};
let mut peer = peer.lock();
let req_id: u64 = data.val_at(0)?;
let req = request::Bodies {
block_hashes: data.at(1)?.iter()
.take(MAX_BODIES)
.map(|x| x.as_val())
.collect::<Result<_, _>>()?
};
let max_cost = peer.deduct_max(&self.flow_params, request::Kind::Bodies, req.block_hashes.len())?;
let response = self.provider.block_bodies(req);
let response_len = response.iter().filter(|x| x.is_some()).count();
let actual_cost = self.flow_params.compute_cost(request::Kind::Bodies, response_len);
assert!(max_cost >= actual_cost, "Actual cost exceeded maximum computed cost.");
let cur_buffer = peer.refund(&self.flow_params, max_cost - actual_cost);
io.respond(packet::BLOCK_BODIES, {
let mut stream = RlpStream::new_list(3);
stream.append(&req_id).append(&cur_buffer).begin_list(response.len());
for body in response {
match body {
Some(body) => stream.append_raw(&body.into_inner(), 1),
None => stream.append_empty_data(),
};
}
stream.out()
});
Ok(())
}
// Receive a response for block bodies.
fn block_bodies(&self, peer: &PeerId, io: &IoContext, raw: UntrustedRlp) -> Result<(), Error> {
let id_guard = self.pre_verify_response(peer, request::Kind::Bodies, &raw)?;
let raw_bodies: Vec<Bytes> = raw.at(2)?.iter().map(|x| x.as_raw().to_owned()).collect();
let req_id = id_guard.defuse();
for handler in &self.handlers {
handler.on_block_bodies(&Ctx {
peer: *peer,
io: io,
proto: self,
}, req_id, &raw_bodies);
}
Ok(())
}
// Handle a request for receipts.
fn get_receipts(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_RECEIPTS: usize = 256;
let peers = self.peers.read();
let peer = match peers.get(peer) {
Some(peer) => peer,
None => {
debug!(target: "les", "Ignoring request from unknown peer");
return Ok(())
}
};
let mut peer = peer.lock();
let req_id: u64 = data.val_at(0)?;
let req = request::Receipts {
block_hashes: data.at(1)?.iter()
.take(MAX_RECEIPTS)
.map(|x| x.as_val())
.collect::<Result<_,_>>()?
};
let max_cost = peer.deduct_max(&self.flow_params, request::Kind::Receipts, req.block_hashes.len())?;
let response = self.provider.receipts(req);
let response_len = response.iter().filter(|x| &x[..] != &::rlp::EMPTY_LIST_RLP).count();
let actual_cost = self.flow_params.compute_cost(request::Kind::Receipts, response_len);
assert!(max_cost >= actual_cost, "Actual cost exceeded maximum computed cost.");
let cur_buffer = peer.refund(&self.flow_params, max_cost - actual_cost);
io.respond(packet::RECEIPTS, {
let mut stream = RlpStream::new_list(3);
stream.append(&req_id).append(&cur_buffer).begin_list(response.len());
for receipts in response {
stream.append_raw(&receipts, 1);
}
stream.out()
});
Ok(())
}
// Receive a response for receipts.
fn receipts(&self, peer: &PeerId, io: &IoContext, raw: UntrustedRlp) -> Result<(), Error> {
let id_guard = self.pre_verify_response(peer, request::Kind::Receipts, &raw)?;
let raw_receipts: Vec<Vec<Receipt>> = raw.at(2)?
.iter()
.map(|x| x.as_val())
.collect::<Result<_,_>>()?;
let req_id = id_guard.defuse();
for handler in &self.handlers {
handler.on_receipts(&Ctx {
peer: *peer,
io: io,
proto: self,
}, req_id, &raw_receipts);
}
Ok(())
}
// Handle a request for proofs.
fn get_proofs(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_PROOFS: usize = 128;
let peers = self.peers.read();
let peer = match peers.get(peer) {
Some(peer) => peer,
None => {
debug!(target: "les", "Ignoring request from unknown peer");
return Ok(())
}
};
let mut peer = peer.lock();
let req_id: u64 = data.val_at(0)?;
let req = {
let requests: Result<Vec<_>, Error> = data.at(1)?.iter().take(MAX_PROOFS).map(|x| {
Ok(request::StateProof {
block: x.val_at(0)?,
key1: x.val_at(1)?,
key2: if x.at(2)?.is_empty() { None } else { Some(x.val_at(2)?) },
from_level: x.val_at(3)?,
})
}).collect();
request::StateProofs {
requests: requests?,
}
};
let max_cost = peer.deduct_max(&self.flow_params, request::Kind::StateProofs, req.requests.len())?;
let response = self.provider.proofs(req);
let response_len = response.iter().filter(|x| &x[..] != &::rlp::EMPTY_LIST_RLP).count();
let actual_cost = self.flow_params.compute_cost(request::Kind::StateProofs, response_len);
assert!(max_cost >= actual_cost, "Actual cost exceeded maximum computed cost.");
let cur_buffer = peer.refund(&self.flow_params, max_cost - actual_cost);
io.respond(packet::PROOFS, {
let mut stream = RlpStream::new_list(3);
stream.append(&req_id).append(&cur_buffer).begin_list(response.len());
for proof in response {
stream.append_raw(&proof, 1);
}
stream.out()
});
Ok(())
}
// Receive a response for proofs.
fn proofs(&self, peer: &PeerId, io: &IoContext, raw: UntrustedRlp) -> Result<(), Error> {
let id_guard = self.pre_verify_response(peer, request::Kind::StateProofs, &raw)?;
let raw_proofs: Vec<Vec<Bytes>> = raw.at(2)?.iter()
.map(|x| x.iter().map(|node| node.as_raw().to_owned()).collect())
.collect();
let req_id = id_guard.defuse();
for handler in &self.handlers {
handler.on_state_proofs(&Ctx {
peer: *peer,
io: io,
proto: self,
}, req_id, &raw_proofs);
}
Ok(())
}
// Handle a request for contract code.
fn get_contract_code(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_CODES: usize = 256;
let peers = self.peers.read();
let peer = match peers.get(peer) {
Some(peer) => peer,
None => {
debug!(target: "les", "Ignoring request from unknown peer");
return Ok(())
}
};
let mut peer = peer.lock();
let req_id: u64 = data.val_at(0)?;
let req = {
let requests: Result<Vec<_>, Error> = data.at(1)?.iter().take(MAX_CODES).map(|x| {
Ok(request::ContractCode {
block_hash: x.val_at(0)?,
account_key: x.val_at(1)?,
})
}).collect();
request::ContractCodes {
code_requests: requests?,
}
};
let max_cost = peer.deduct_max(&self.flow_params, request::Kind::Codes, req.code_requests.len())?;
let response = self.provider.contract_codes(req);
let response_len = response.iter().filter(|x| !x.is_empty()).count();
let actual_cost = self.flow_params.compute_cost(request::Kind::Codes, response_len);
assert!(max_cost >= actual_cost, "Actual cost exceeded maximum computed cost.");
let cur_buffer = peer.refund(&self.flow_params, max_cost - actual_cost);
io.respond(packet::CONTRACT_CODES, {
let mut stream = RlpStream::new_list(3);
stream.append(&req_id).append(&cur_buffer).begin_list(response.len());
for code in response {
stream.append(&code);
}
stream.out()
});
Ok(())
}
// Receive a response for contract code.
fn contract_code(&self, peer: &PeerId, io: &IoContext, raw: UntrustedRlp) -> Result<(), Error> {
let id_guard = self.pre_verify_response(peer, request::Kind::Codes, &raw)?;
let raw_code: Vec<Bytes> = raw.at(2)?.iter()
.map(|x| x.as_val())
.collect::<Result<_,_>>()?;
let req_id = id_guard.defuse();
for handler in &self.handlers {
handler.on_code(&Ctx {
peer: *peer,
io: io,
proto: self,
}, req_id, &raw_code);
}
Ok(())
}
// Handle a request for header proofs
fn get_header_proofs(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_PROOFS: usize = 256;
let peers = self.peers.read();
let peer = match peers.get(peer) {
Some(peer) => peer,
None => {
debug!(target: "les", "Ignoring request from unknown peer");
return Ok(())
}
};
let mut peer = peer.lock();
let req_id: u64 = data.val_at(0)?;
let req = {
let requests: Result<Vec<_>, Error> = data.at(1)?.iter().take(MAX_PROOFS).map(|x| {
Ok(request::HeaderProof {
cht_number: x.val_at(0)?,
block_number: x.val_at(1)?,
from_level: x.val_at(2)?,
})
}).collect();
request::HeaderProofs {
requests: requests?,
}
};
let max_cost = peer.deduct_max(&self.flow_params, request::Kind::HeaderProofs, req.requests.len())?;
let response = self.provider.header_proofs(req);
let response_len = response.iter().filter(|x| &x[..] != ::rlp::EMPTY_LIST_RLP).count();
let actual_cost = self.flow_params.compute_cost(request::Kind::HeaderProofs, response_len);
assert!(max_cost >= actual_cost, "Actual cost exceeded maximum computed cost.");
let cur_buffer = peer.refund(&self.flow_params, max_cost - actual_cost);
io.respond(packet::HEADER_PROOFS, {
let mut stream = RlpStream::new_list(3);
stream.append(&req_id).append(&cur_buffer).begin_list(response.len());
for proof in response {
stream.append_raw(&proof, 1);
}
stream.out()
});
Ok(())
}
// Receive a response for header proofs
fn header_proofs(&self, peer: &PeerId, io: &IoContext, raw: UntrustedRlp) -> Result<(), Error> {
fn decode_res(raw: UntrustedRlp) -> Result<(Bytes, Vec<Bytes>), ::rlp::DecoderError> {
Ok((
raw.val_at(0)?,
raw.at(1)?.iter().map(|x| x.as_raw().to_owned()).collect(),
))
}
let id_guard = self.pre_verify_response(peer, request::Kind::HeaderProofs, &raw)?;
let raw_proofs: Vec<_> = raw.at(2)?.iter()
.map(decode_res)
.collect::<Result<_,_>>()?;
let req_id = id_guard.defuse();
for handler in &self.handlers {
handler.on_header_proofs(&Ctx {
peer: *peer,
io: io,
proto: self,
}, req_id, &raw_proofs);
}
Ok(())
}
// Receive a set of transactions to relay.
fn relay_transactions(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_TRANSACTIONS: usize = 256;
let txs: Vec<_> = data.iter()
.take(MAX_TRANSACTIONS)
.map(|x| x.as_val::<UnverifiedTransaction>())
.collect::<Result<_,_>>()?;
debug!(target: "les", "Received {} transactions to relay from peer {}", txs.len(), peer);
for handler in &self.handlers {
handler.on_transactions(&Ctx {
peer: *peer,
io: io,
proto: self,
}, &txs);
}
Ok(())
}
}
// if something went wrong, figure out how much to punish the peer.
fn punish(peer: PeerId, io: &IoContext, e: Error) {
match e.punishment() {
Punishment::None => {}
Punishment::Disconnect => {
debug!(target: "les", "Disconnecting peer {}: {}", peer, e);
io.disconnect_peer(peer)
}
Punishment::Disable => {
debug!(target: "les", "Disabling peer {}: {}", peer, e);
io.disable_peer(peer)
}
}
}
impl NetworkProtocolHandler for LightProtocol {
fn initialize(&self, io: &NetworkContext) {
io.register_timer(TIMEOUT, TIMEOUT_INTERVAL_MS)
.expect("Error registering sync timer.");
io.register_timer(TICK_TIMEOUT, TICK_TIMEOUT_INTERVAL_MS)
.expect("Error registering sync timer.");
}
fn read(&self, io: &NetworkContext, peer: &PeerId, packet_id: u8, data: &[u8]) {
self.handle_packet(io, peer, packet_id, data);
}
fn connected(&self, io: &NetworkContext, peer: &PeerId) {
self.on_connect(peer, io);
}
fn disconnected(&self, io: &NetworkContext, peer: &PeerId) {
self.on_disconnect(*peer, io);
}
fn timeout(&self, io: &NetworkContext, timer: TimerToken) {
match timer {
TIMEOUT => self.timeout_check(io),
TICK_TIMEOUT => self.tick_handlers(io),
_ => warn!(target: "les", "received timeout on unknown token {}", timer),
}
}
}
// Helper for encoding the request to RLP with the given ID.
fn encode_request(req: &Request, req_id: usize) -> Vec<u8> {
match *req {
Request::Headers(ref headers) => {
let mut stream = RlpStream::new_list(2);
stream.append(&req_id).begin_list(4);
match headers.start {
HashOrNumber::Hash(ref hash) => stream.append(hash),
HashOrNumber::Number(ref num) => stream.append(num),
};
stream
.append(&headers.max)
.append(&headers.skip)
.append(&headers.reverse);
stream.out()
}
Request::Bodies(ref request) => {
let mut stream = RlpStream::new_list(2);
stream.append(&req_id).begin_list(request.block_hashes.len());
for hash in &request.block_hashes {
stream.append(hash);
}
stream.out()
}
Request::Receipts(ref request) => {
let mut stream = RlpStream::new_list(2);
stream.append(&req_id).begin_list(request.block_hashes.len());
for hash in &request.block_hashes {
stream.append(hash);
}
stream.out()
}
Request::StateProofs(ref request) => {
let mut stream = RlpStream::new_list(2);
stream.append(&req_id).begin_list(request.requests.len());
for proof_req in &request.requests {
stream.begin_list(4)
.append(&proof_req.block)
.append(&proof_req.key1);
match proof_req.key2 {
Some(ref key2) => stream.append(key2),
None => stream.append_empty_data(),
};
stream.append(&proof_req.from_level);
}
stream.out()
}
Request::Codes(ref request) => {
let mut stream = RlpStream::new_list(2);
stream.append(&req_id).begin_list(request.code_requests.len());
for code_req in &request.code_requests {
stream.begin_list(2)
.append(&code_req.block_hash)
.append(&code_req.account_key);
}
stream.out()
}
Request::HeaderProofs(ref request) => {
let mut stream = RlpStream::new_list(2);
stream.append(&req_id).begin_list(request.requests.len());
for proof_req in &request.requests {
stream.begin_list(3)
.append(&proof_req.cht_number)
.append(&proof_req.block_number)
.append(&proof_req.from_level);
}
stream.out()<|fim▁hole|> }
}<|fim▁end|>
|
}
|
<|file_name|>quickstart.js<|end_file_name|><|fim▁begin|>//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ** This file is automatically generated by gapic-generator-typescript. **
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
'use strict';
const path = require('path');
const {assert} = require('chai');
const cp = require('child_process');
const {describe, it} = require('mocha');
const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
const cwd = path.join(__dirname, '..');
const project = process.env.GCLOUD_PROJECT;
describe('Quickstart', () => {
it('should run quickstart', async () => {
const output = execSync(`node ./quickstart.js ${project} us-west1-b`, {
cwd,
});
assert.match(output, /instance: projects/);<|fim▁hole|>});<|fim▁end|>
|
});
|
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "veterinario.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()<|fim▁end|>
|
"""
WSGI config for veterinario project.
It exposes the WSGI callable as a module-level variable named ``application``.
|
<|file_name|>sftp_input_service_tests.py<|end_file_name|><|fim▁begin|>import unittest
import json
from bitmovin import Bitmovin, Response, SFTPInput
from bitmovin.errors import BitmovinApiError
from tests.bitmovin import BitmovinTestCase
class SFTPInputTests(BitmovinTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
@classmethod
def tearDownClass(cls):
super().tearDownClass()
def setUp(self):
super().setUp()
self.bitmovin = Bitmovin(self.api_key)
self.assertIsNotNone(self.bitmovin)
self.assertTrue(isinstance(self.bitmovin, Bitmovin))
def tearDown(self):
super().tearDown()
def test_create_sftp_input(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
input_resource_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(input_resource_response)
self.assertIsNotNone(input_resource_response.resource)
self.assertIsNotNone(input_resource_response.resource.id)
self._compare_sftp_inputs(sample_input, input_resource_response.resource)
def test_create_sftp_input_without_name(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
sample_input.name = None
input_resource_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(input_resource_response)
self.assertIsNotNone(input_resource_response.resource)
self.assertIsNotNone(input_resource_response.resource.id)
self._compare_sftp_inputs(sample_input, input_resource_response.resource)
def test_create_sftp_input_custom(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
sample_input.port = 9921
input_resource_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(input_resource_response)
self.assertIsNotNone(input_resource_response.resource)
self.assertIsNotNone(input_resource_response.resource.id)
self._compare_sftp_inputs(sample_input, input_resource_response.resource)
self.assertEqual(sample_input.port, input_resource_response.resource.port)
def test_retrieve_sftp_input(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
created_input_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(created_input_response)
self.assertIsNotNone(created_input_response.resource)
self.assertIsNotNone(created_input_response.resource.id)
self._compare_sftp_inputs(sample_input, created_input_response.resource)
retrieved_input_response = self.bitmovin.inputs.SFTP.retrieve(created_input_response.resource.id)
self.assertIsNotNone(retrieved_input_response)
self.assertIsNotNone(retrieved_input_response.resource)
self._compare_sftp_inputs(created_input_response.resource, retrieved_input_response.resource)
def test_delete_sftp_input(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
created_input_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(created_input_response)
self.assertIsNotNone(created_input_response.resource)
self.assertIsNotNone(created_input_response.resource.id)
self._compare_sftp_inputs(sample_input, created_input_response.resource)
deleted_minimal_resource = self.bitmovin.inputs.SFTP.delete(created_input_response.resource.id)
self.assertIsNotNone(deleted_minimal_resource)
self.assertIsNotNone(deleted_minimal_resource.resource)
self.assertIsNotNone(deleted_minimal_resource.resource.id)
try:
self.bitmovin.inputs.SFTP.retrieve(created_input_response.resource.id)
self.fail(
'Previous statement should have thrown an exception. ' +
'Retrieving input after deleting it shouldn\'t be possible.'
)
except BitmovinApiError:
pass
def test_list_sftp_inputs(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
created_input_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(created_input_response)
self.assertIsNotNone(created_input_response.resource)
self.assertIsNotNone(created_input_response.resource.id)
self._compare_sftp_inputs(sample_input, created_input_response.resource)
inputs = self.bitmovin.inputs.SFTP.list()
self.assertIsNotNone(inputs)
self.assertIsNotNone(inputs.resource)
self.assertIsNotNone(inputs.response)
self.assertIsInstance(inputs.resource, list)<|fim▁hole|>
def test_retrieve_sftp_input_custom_data(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
sample_input.customData = '<pre>my custom data</pre>'
created_input_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(created_input_response)
self.assertIsNotNone(created_input_response.resource)
self.assertIsNotNone(created_input_response.resource.id)
self._compare_sftp_inputs(sample_input, created_input_response.resource)
custom_data_response = self.bitmovin.inputs.SFTP.retrieve_custom_data(created_input_response.resource.id)
custom_data = custom_data_response.resource
self.assertEqual(sample_input.customData, json.loads(custom_data.customData))
def _compare_sftp_inputs(self, first: SFTPInput, second: SFTPInput):
"""
:param first: SFTPInput
:param second: SFTPInput
:return: bool
"""
self.assertEqual(first.host, second.host)
self.assertEqual(first.name, second.name)
self.assertEqual(first.description, second.description)
#self.assertEqual(first.username, second.username) # issue 574
def _get_sample_sftp_input(self):
sftp_input_settings = self.settings.get('sampleObjects').get('inputs').get('sftp')\
.get('3945fee9-5e0f-48ce-8f3d-d451c0bf1071')
files = sftp_input_settings.get('files')
sftp_input = SFTPInput(
host=sftp_input_settings.get('host'),
username=sftp_input_settings.get('username'),
password=sftp_input_settings.get('password'),
name='Sample SFTP input'
)
self.assertIsNotNone(sftp_input.host)
self.assertIsNotNone(sftp_input.username)
self.assertIsNotNone(sftp_input.password)
return sftp_input, files
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
self.assertIsInstance(inputs.response, Response)
self.assertGreater(inputs.resource.__sizeof__(), 1)
|
<|file_name|>core.py<|end_file_name|><|fim▁begin|>#
# core.py
#
# Copyright (C) 2009 Andrew Resch <[email protected]>
#
# Deluge is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# deluge is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with deluge. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
#
#
import os
import time
import hashlib
import logging
from subprocess import Popen, PIPE
from deluge.plugins.pluginbase import CorePluginBase
import deluge.component as component
from deluge.configmanager import ConfigManager
from deluge.core.rpcserver import export
from deluge.event import DelugeEvent
log = logging.getLogger(__name__)
DEFAULT_CONFIG = {
"commands": []
}
EXECUTE_ID = 0
EXECUTE_EVENT = 1<|fim▁hole|>EXECUTE_COMMAND = 2
EVENT_MAP = {
"complete": "TorrentFinishedEvent",
"added": "TorrentAddedEvent"
}
class ExecuteCommandAddedEvent(DelugeEvent):
"""
Emitted when a new command is added.
"""
def __init__(self, command_id, event, command):
self._args = [command_id, event, command]
class ExecuteCommandRemovedEvent(DelugeEvent):
"""
Emitted when a command is removed.
"""
def __init__(self, command_id):
self._args = [command_id]
class Core(CorePluginBase):
def enable(self):
self.config = ConfigManager("execute.conf", DEFAULT_CONFIG)
event_manager = component.get("EventManager")
self.registered_events = {}
# Go through the commands list and register event handlers
for command in self.config["commands"]:
event = command[EXECUTE_EVENT]
if event in self.registered_events:
continue
def create_event_handler(event):
def event_handler(torrent_id):
self.execute_commands(torrent_id, event)
return event_handler
event_handler = create_event_handler(event)
event_manager.register_event_handler(EVENT_MAP[event], event_handler)
self.registered_events[event] = event_handler
log.debug("Execute core plugin enabled!")
def execute_commands(self, torrent_id, event):
torrent = component.get("TorrentManager").torrents[torrent_id]
info = torrent.get_status(["name", "save_path", "move_on_completed", "move_on_completed_path"])
# Grab the torrent name and save path
torrent_name = info["name"]
if event == "complete":
save_path = info["move_on_completed_path"] if info ["move_on_completed"] else info["save_path"]
else:
save_path = info["save_path"]
log.debug("[execute] Running commands for %s", event)
# Go through and execute all the commands
for command in self.config["commands"]:
if command[EXECUTE_EVENT] == event:
command = os.path.expandvars(command[EXECUTE_COMMAND])
command = os.path.expanduser(command)
log.debug("[execute] running %s", command)
p = Popen([command, torrent_id, torrent_name, save_path], stdin=PIPE, stdout=PIPE, stderr=PIPE)
if p.wait() != 0:
log.warn("Execute command failed with exit code %d", p.returncode)
def disable(self):
self.config.save()
event_manager = component.get("EventManager")
for event, handler in self.registered_events.iteritems():
event_manager.deregister_event_handler(event, handler)
log.debug("Execute core plugin disabled!")
### Exported RPC methods ###
@export
def add_command(self, event, command):
command_id = hashlib.sha1(str(time.time())).hexdigest()
self.config["commands"].append((command_id, event, command))
self.config.save()
component.get("EventManager").emit(ExecuteCommandAddedEvent(command_id, event, command))
@export
def get_commands(self):
return self.config["commands"]
@export
def remove_command(self, command_id):
for command in self.config["commands"]:
if command[EXECUTE_ID] == command_id:
self.config["commands"].remove(command)
component.get("EventManager").emit(ExecuteCommandRemovedEvent(command_id))
break
self.config.save()
@export
def save_command(self, command_id, event, cmd):
for i, command in enumerate(self.config["commands"]):
if command[EXECUTE_ID] == command_id:
self.config["commands"][i] = (command_id, event, cmd)
break
self.config.save()<|fim▁end|>
| |
<|file_name|>match-value-binding-in-guard-3291.rs<|end_file_name|><|fim▁begin|>// run-pass
// pretty-expanded FIXME #23616
#![feature(box_syntax)]
fn foo(x: Option<Box<isize>>, b: bool) -> isize {
match x {
None => { 1 }
Some(ref x) if b => { *x.clone() }
Some(_) => { 0 }
}<|fim▁hole|> foo(Some(box 22), false);
foo(None, true);
foo(None, false);
}<|fim▁end|>
|
}
pub fn main() {
foo(Some(box 22), true);
|
<|file_name|>__main__.py<|end_file_name|><|fim▁begin|># Copyright 2020 The Pigweed Authors<|fim▁hole|># the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Runs the main function in detokenize.py."""
from pw_tokenizer import detokenize
detokenize.main()<|fim▁end|>
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate rust_web_boilerplate;
fn main() {<|fim▁hole|><|fim▁end|>
|
let (rocket, _) = rust_web_boilerplate::rocket_factory();
rocket.launch();
}
|
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>// TypeScript Version: 2.8
// Generated from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/types/lodash/scripts/generate-modules.ts
import { zip } from "lodash";
export = zip;<|fim▁end|>
|
// Type definitions for lodash.zip 4.2
// Project: https://lodash.com
// Definitions by: Brian Zengel <https://github.com/bczengel>, Ilya Mochalov <https://github.com/chrootsu>, Stepan Mikhaylyuk <https://github.com/stepancar>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
<|file_name|>WordInfo.java<|end_file_name|><|fim▁begin|>/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.speech.v1.model;<|fim▁hole|> * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Speech-to-Text API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class WordInfo extends com.google.api.client.json.GenericJson {
/**
* The confidence estimate between 0.0 and 1.0. A higher number indicates an estimated greater
* likelihood that the recognized words are correct. This field is set only for the top
* alternative of a non-streaming result or, of a streaming result where `is_final=true`. This
* field is not guaranteed to be accurate and users should not rely on it to be always provided.
* The default of 0.0 is a sentinel value indicating `confidence` was not set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float confidence;
/**
* Time offset relative to the beginning of the audio, and corresponding to the end of the spoken
* word. This field is only set if `enable_word_time_offsets=true` and only in the top hypothesis.
* This is an experimental feature and the accuracy of the time offset can vary.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String endTime;
/**
* Output only. A distinct integer value is assigned for every speaker within the audio. This
* field specifies which one of those speakers was detected to have spoken this word. Value ranges
* from '1' to diarization_speaker_count. speaker_tag is set if enable_speaker_diarization =
* 'true' and only in the top alternative.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer speakerTag;
/**
* Time offset relative to the beginning of the audio, and corresponding to the start of the
* spoken word. This field is only set if `enable_word_time_offsets=true` and only in the top
* hypothesis. This is an experimental feature and the accuracy of the time offset can vary.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String startTime;
/**
* The word corresponding to this set of information.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String word;
/**
* The confidence estimate between 0.0 and 1.0. A higher number indicates an estimated greater
* likelihood that the recognized words are correct. This field is set only for the top
* alternative of a non-streaming result or, of a streaming result where `is_final=true`. This
* field is not guaranteed to be accurate and users should not rely on it to be always provided.
* The default of 0.0 is a sentinel value indicating `confidence` was not set.
* @return value or {@code null} for none
*/
public java.lang.Float getConfidence() {
return confidence;
}
/**
* The confidence estimate between 0.0 and 1.0. A higher number indicates an estimated greater
* likelihood that the recognized words are correct. This field is set only for the top
* alternative of a non-streaming result or, of a streaming result where `is_final=true`. This
* field is not guaranteed to be accurate and users should not rely on it to be always provided.
* The default of 0.0 is a sentinel value indicating `confidence` was not set.
* @param confidence confidence or {@code null} for none
*/
public WordInfo setConfidence(java.lang.Float confidence) {
this.confidence = confidence;
return this;
}
/**
* Time offset relative to the beginning of the audio, and corresponding to the end of the spoken
* word. This field is only set if `enable_word_time_offsets=true` and only in the top hypothesis.
* This is an experimental feature and the accuracy of the time offset can vary.
* @return value or {@code null} for none
*/
public String getEndTime() {
return endTime;
}
/**
* Time offset relative to the beginning of the audio, and corresponding to the end of the spoken
* word. This field is only set if `enable_word_time_offsets=true` and only in the top hypothesis.
* This is an experimental feature and the accuracy of the time offset can vary.
* @param endTime endTime or {@code null} for none
*/
public WordInfo setEndTime(String endTime) {
this.endTime = endTime;
return this;
}
/**
* Output only. A distinct integer value is assigned for every speaker within the audio. This
* field specifies which one of those speakers was detected to have spoken this word. Value ranges
* from '1' to diarization_speaker_count. speaker_tag is set if enable_speaker_diarization =
* 'true' and only in the top alternative.
* @return value or {@code null} for none
*/
public java.lang.Integer getSpeakerTag() {
return speakerTag;
}
/**
* Output only. A distinct integer value is assigned for every speaker within the audio. This
* field specifies which one of those speakers was detected to have spoken this word. Value ranges
* from '1' to diarization_speaker_count. speaker_tag is set if enable_speaker_diarization =
* 'true' and only in the top alternative.
* @param speakerTag speakerTag or {@code null} for none
*/
public WordInfo setSpeakerTag(java.lang.Integer speakerTag) {
this.speakerTag = speakerTag;
return this;
}
/**
* Time offset relative to the beginning of the audio, and corresponding to the start of the
* spoken word. This field is only set if `enable_word_time_offsets=true` and only in the top
* hypothesis. This is an experimental feature and the accuracy of the time offset can vary.
* @return value or {@code null} for none
*/
public String getStartTime() {
return startTime;
}
/**
* Time offset relative to the beginning of the audio, and corresponding to the start of the
* spoken word. This field is only set if `enable_word_time_offsets=true` and only in the top
* hypothesis. This is an experimental feature and the accuracy of the time offset can vary.
* @param startTime startTime or {@code null} for none
*/
public WordInfo setStartTime(String startTime) {
this.startTime = startTime;
return this;
}
/**
* The word corresponding to this set of information.
* @return value or {@code null} for none
*/
public java.lang.String getWord() {
return word;
}
/**
* The word corresponding to this set of information.
* @param word word or {@code null} for none
*/
public WordInfo setWord(java.lang.String word) {
this.word = word;
return this;
}
@Override
public WordInfo set(String fieldName, Object value) {
return (WordInfo) super.set(fieldName, value);
}
@Override
public WordInfo clone() {
return (WordInfo) super.clone();
}
}<|fim▁end|>
|
/**
* Word-specific information for recognized words.
*
|
<|file_name|>mod.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, division
from sympy.core.numbers import nan
from .function import Function
class Mod(Function):
"""Represents a modulo operation on symbolic expressions.
Receives two arguments, dividend p and divisor q.
The convention used is the same as Python's: the remainder always has the
same sign as the divisor.
Examples
========
>>> from sympy.abc import x, y
>>> x**2 % y
Mod(x**2, y)
>>> _.subs({x: 5, y: 6})
1
"""
@classmethod
def eval(cls, p, q):
from sympy.core.add import Add
from sympy.core.mul import Mul
from sympy.core.singleton import S
from sympy.core.exprtools import gcd_terms
from sympy.polys.polytools import gcd
def doit(p, q):
"""Try to return p % q if both are numbers or +/-p is known
to be less than or equal q.
"""
if p.is_infinite or q.is_infinite or p is nan or q is nan:
return nan
if (p == q or p == -q or
p.is_Pow and p.exp.is_Integer and p.base == q or
p.is_integer and q == 1):
return S.Zero
if q.is_Number:
if p.is_Number:
return (p % q)
if q == 2:
if p.is_even:
return S.Zero
elif p.is_odd:
return S.One
# by ratio
r = p/q
try:
d = int(r)
except TypeError:
pass
else:
if type(d) is int:
rv = p - d*q
if (rv*q < 0) == True:
rv += q
return rv
# by difference
d = p - q
if d.is_negative:
if q.is_negative:
return d
elif q.is_positive:
return p
rv = doit(p, q)
if rv is not None:
return rv
# denest
if p.func is cls:
# easy
qinner = p.args[1]
if qinner == q:
return p
# XXX other possibilities?
# extract gcd; any further simplification should be done by the user
G = gcd(p, q)<|fim▁hole|> pwas, qwas = p, q
# simplify terms
# (x + y + 2) % x -> Mod(y + 2, x)
if p.is_Add:
args = []
for i in p.args:
a = cls(i, q)
if a.count(cls) > i.count(cls):
args.append(i)
else:
args.append(a)
if args != list(p.args):
p = Add(*args)
else:
# handle coefficients if they are not Rational
# since those are not handled by factor_terms
# e.g. Mod(.6*x, .3*y) -> 0.3*Mod(2*x, y)
cp, p = p.as_coeff_Mul()
cq, q = q.as_coeff_Mul()
ok = False
if not cp.is_Rational or not cq.is_Rational:
r = cp % cq
if r == 0:
G *= cq
p *= int(cp/cq)
ok = True
if not ok:
p = cp*p
q = cq*q
# simple -1 extraction
if p.could_extract_minus_sign() and q.could_extract_minus_sign():
G, p, q = [-i for i in (G, p, q)]
# check again to see if p and q can now be handled as numbers
rv = doit(p, q)
if rv is not None:
return rv*G
# put 1.0 from G on inside
if G.is_Float and G == 1:
p *= G
return cls(p, q, evaluate=False)
elif G.is_Mul and G.args[0].is_Float and G.args[0] == 1:
p = G.args[0]*p
G = Mul._from_args(G.args[1:])
return G*cls(p, q, evaluate=(p, q) != (pwas, qwas))
def _eval_is_integer(self):
from sympy.core.logic import fuzzy_and, fuzzy_not
p, q = self.args
if fuzzy_and([p.is_integer, q.is_integer, fuzzy_not(q.is_zero)]):
return True
def _eval_is_nonnegative(self):
if self.args[1].is_positive:
return True
def _eval_is_nonpositive(self):
if self.args[1].is_negative:
return True<|fim▁end|>
|
if G != 1:
p, q = [
gcd_terms(i/G, clear=False, fraction=False) for i in (p, q)]
|
<|file_name|>assign27.09.py<|end_file_name|><|fim▁begin|>n=int(input('Enter any number: '))
if n%2!=0:
n=n+1
<|fim▁hole|> else:
print(' ',end='')
print()<|fim▁end|>
|
for i in range(n):
for j in range(n):
if (i==int(n/2)) or j==int(n/2) or ((i==0)and (j>=int(n/2))) or ((j==0)and (i<=int(n/2))) or ((j==n-1)and (i>=int(n/2))) or ((i==n-1)and (j<=int(n/2))):
print('*',end='')
|
<|file_name|>article.rs<|end_file_name|><|fim▁begin|>use chrono::naive::NaiveDateTime;
use serde::{Deserialize, Serialize};
use validator::Validate;
<|fim▁hole|>pub struct Article {
pub id: i32,
pub author_id: i32,
pub in_reply_to: Option<String>,
pub title: String,
pub slug: String,
pub guid: String,
pub article_format: String,
pub excerpt: Option<String>,
pub body: String,
pub published: bool,
pub inserted_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub posse: bool,
pub lang: String,
}
#[derive(Deserialize, Serialize, Debug, Insertable, Clone, Validate, Default)]
#[table_name = "articles"]
pub struct NewArticle {
pub author_id: Option<i32>,
pub in_reply_to: Option<String>,
#[validate(length(min = 3, max = 255))]
pub title: String,
#[validate(length(min = 3, max = 255))]
pub slug: String,
pub guid: Option<String>,
pub article_format: Option<String>,
pub excerpt: Option<String>,
#[validate(length(min = 3, max = 255))]
pub body: String,
#[serde(default)]
pub published: bool,
#[serde(default)]
pub posse: bool,
#[validate(length(min = 2, max = 2))]
pub lang: String,
pub inserted_at: Option<NaiveDateTime>,
pub updated_at: Option<NaiveDateTime>,
}<|fim▁end|>
|
use crate::schema::articles;
#[derive(Debug, Clone, Queryable, Insertable, Serialize, Deserialize)]
|
<|file_name|>runtests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-<|fim▁hole|>
def runtests():
fail = 0
for test in os.listdir("."):
if test.startswith("tst_") and test.endswith(".py"):
if 0 != subprocess.call(["./" + test]):
fail += 1
print test, "failed!"
if not fail:
return 0
return 1
if __name__ == "__main__":
sys.exit(runtests())<|fim▁end|>
|
import sys
import os
import subprocess
|
<|file_name|>echoserver.py<|end_file_name|><|fim▁begin|>import argparse
import asyncio
import gc
import os.path
import pathlib
import socket
import ssl
PRINT = 0
async def echo_server(loop, address, unix):
if unix:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(address)
sock.listen(5)
sock.setblocking(False)<|fim▁hole|> if PRINT:
print('Server listening at', address)
with sock:
while True:
client, addr = await loop.sock_accept(sock)
if PRINT:
print('Connection from', addr)
loop.create_task(echo_client(loop, client))
async def echo_client(loop, client):
try:
client.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
except (OSError, NameError):
pass
with client:
while True:
data = await loop.sock_recv(client, 1000000)
if not data:
break
await loop.sock_sendall(client, data)
if PRINT:
print('Connection closed')
async def echo_client_streams(reader, writer):
sock = writer.get_extra_info('socket')
try:
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
except (OSError, NameError):
pass
if PRINT:
print('Connection from', sock.getpeername())
while True:
data = await reader.read(1000000)
if not data:
break
writer.write(data)
if PRINT:
print('Connection closed')
writer.close()
class EchoProtocol(asyncio.Protocol):
def connection_made(self, transport):
self.transport = transport
def connection_lost(self, exc):
self.transport = None
def data_received(self, data):
self.transport.write(data)
class EchoBufferedProtocol(asyncio.BufferedProtocol):
def connection_made(self, transport):
self.transport = transport
# Here the buffer is intended to be copied, so that the outgoing buffer
# won't be wrongly updated by next read
self.buffer = bytearray(256 * 1024)
def connection_lost(self, exc):
self.transport = None
def get_buffer(self, sizehint):
return self.buffer
def buffer_updated(self, nbytes):
self.transport.write(self.buffer[:nbytes])
async def print_debug(loop):
while True:
print(chr(27) + "[2J") # clear screen
loop.print_debug_info()
await asyncio.sleep(0.5)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--uvloop', default=False, action='store_true')
parser.add_argument('--streams', default=False, action='store_true')
parser.add_argument('--proto', default=False, action='store_true')
parser.add_argument('--addr', default='127.0.0.1:25000', type=str)
parser.add_argument('--print', default=False, action='store_true')
parser.add_argument('--ssl', default=False, action='store_true')
parser.add_argument('--buffered', default=False, action='store_true')
args = parser.parse_args()
if args.uvloop:
import uvloop
loop = uvloop.new_event_loop()
print('using UVLoop')
else:
loop = asyncio.new_event_loop()
print('using asyncio loop')
asyncio.set_event_loop(loop)
loop.set_debug(False)
if args.print:
PRINT = 1
if hasattr(loop, 'print_debug_info'):
loop.create_task(print_debug(loop))
PRINT = 0
unix = False
if args.addr.startswith('file:'):
unix = True
addr = args.addr[5:]
if os.path.exists(addr):
os.remove(addr)
else:
addr = args.addr.split(':')
addr[1] = int(addr[1])
addr = tuple(addr)
print('serving on: {}'.format(addr))
server_context = None
if args.ssl:
print('with SSL')
if hasattr(ssl, 'PROTOCOL_TLS'):
server_context = ssl.SSLContext(ssl.PROTOCOL_TLS)
else:
server_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
server_context.load_cert_chain(
(pathlib.Path(__file__).parent.parent.parent /
'tests' / 'certs' / 'ssl_cert.pem'),
(pathlib.Path(__file__).parent.parent.parent /
'tests' / 'certs' / 'ssl_key.pem'))
if hasattr(server_context, 'check_hostname'):
server_context.check_hostname = False
server_context.verify_mode = ssl.CERT_NONE
if args.streams:
if args.proto:
print('cannot use --stream and --proto simultaneously')
exit(1)
if args.buffered:
print('cannot use --stream and --buffered simultaneously')
exit(1)
print('using asyncio/streams')
if unix:
coro = asyncio.start_unix_server(echo_client_streams,
addr,
ssl=server_context)
else:
coro = asyncio.start_server(echo_client_streams,
*addr,
ssl=server_context)
srv = loop.run_until_complete(coro)
elif args.proto:
if args.streams:
print('cannot use --stream and --proto simultaneously')
exit(1)
if args.buffered:
print('using buffered protocol')
protocol = EchoBufferedProtocol
else:
print('using simple protocol')
protocol = EchoProtocol
if unix:
coro = loop.create_unix_server(protocol, addr,
ssl=server_context)
else:
coro = loop.create_server(protocol, *addr,
ssl=server_context)
srv = loop.run_until_complete(coro)
else:
if args.ssl:
print('cannot use SSL for loop.sock_* methods')
exit(1)
print('using sock_recv/sock_sendall')
loop.create_task(echo_server(loop, addr, unix))
try:
loop.run_forever()
finally:
if hasattr(loop, 'print_debug_info'):
gc.collect()
print(chr(27) + "[2J")
loop.print_debug_info()
loop.close()<|fim▁end|>
| |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>import './init'
import React from 'react'
import ReactDom from 'react-dom'
import Root from './root'
import {APP_THEMES_LIGHT, APP_THEMES_DARK} from 'reducers/settings/constants'
import LocalStorage from 'lib/localStorage'
import {initializeStore} from './redux/store'
import {initServiceWorker} from './swWindow'
// render app
const renderApp = (Component, appRoot, store) => {
initServiceWorker(store)
ReactDom.render(
<Component store={store} />,
appRoot, () => {
// need to make this for feature tests - application ready for testing
window.__isAppReady = true
})
}
const prepareStoreData = () => {
let theme = LocalStorage.getItem('theme')
if (!theme) {
if (window.matchMedia('(prefers-color-scheme: dark)')?.matches) {
theme = APP_THEMES_DARK
}
}
<|fim▁hole|> }
}
}
// init store and start app
const appRoot = document.getElementById('app-root')
const store = initializeStore(prepareStoreData())
renderApp(Root, appRoot, store)<|fim▁end|>
|
return {
settings: {
theme: theme || APP_THEMES_LIGHT
|
<|file_name|>range.js<|end_file_name|><|fim▁begin|>import { ChangeDetectorRef, Component, ElementRef, Input, Optional, Renderer, ViewChild, ViewEncapsulation } from '@angular/core';
import { NG_VALUE_ACCESSOR } from '@angular/forms';
import { clamp, isTrueProperty } from '../../util/util';
import { Config } from '../../config/config';
import { DomController } from '../../platform/dom-controller';
import { Form } from '../../util/form';
import { Haptic } from '../../tap-click/haptic';
import { BaseInput } from '../../util/base-input';
import { Item } from '../item/item';
import { Platform } from '../../platform/platform';
import { pointerCoord } from '../../util/dom';
import { UIEventManager } from '../../gestures/ui-event-manager';
/**
* \@name Range
* \@description
* The Range slider lets users select from a range of values by moving
* the slider knob. It can accept dual knobs, but by default one knob
* controls the value of the range.
*
* ### Range Labels
* Labels can be placed on either side of the range by adding the
* `range-left` or `range-right` property to the element. The element
* doesn't have to be an `ion-label`, it can be added to any element
* to place it to the left or right of the range. See [usage](#usage)
* below for examples.
*
*
* ### Minimum and Maximum Values
* Minimum and maximum values can be passed to the range through the `min`
* and `max` properties, respectively. By default, the range sets the `min`
* to `0` and the `max` to `100`.
*
*
* ### Steps and Snaps
* The `step` property specifies the value granularity of the range's value.
* It can be useful to set the `step` when the value isn't in increments of `1`.
* Setting the `step` property will show tick marks on the range for each step.
* The `snaps` property can be set to automatically move the knob to the nearest
* tick mark based on the step property value.
*
*
* ### Dual Knobs
* Setting the `dualKnobs` property to `true` on the range component will
* enable two knobs on the range. If the range has two knobs, the value will
* be an object containing two properties: `lower` and `upper`.
*
*
* \@usage
* ```html
* <ion-list>
* <ion-item>
* <ion-range [(ngModel)]="singleValue" color="danger" pin="true"></ion-range>
* </ion-item>
*
* <ion-item>
* <ion-range min="-200" max="200" [(ngModel)]="saturation" color="secondary">
* <ion-label range-left>-200</ion-label>
* <ion-label range-right>200</ion-label>
* </ion-range>
* </ion-item>
*
* <ion-item>
* <ion-range min="20" max="80" step="2" [(ngModel)]="brightness">
* <ion-icon small range-left name="sunny"></ion-icon>
* <ion-icon range-right name="sunny"></ion-icon>
* </ion-range>
* </ion-item>
*
* <ion-item>
* <ion-label>step=100, snaps, {{singleValue4}}</ion-label>
* <ion-range min="1000" max="2000" step="100" snaps="true" color="secondary" [(ngModel)]="singleValue4"></ion-range>
* </ion-item>
*
* <ion-item>
* <ion-label>dual, step=3, snaps, {{dualValue2 | json}}</ion-label>
* <ion-range dualKnobs="true" [(ngModel)]="dualValue2" min="21" max="72" step="3" snaps="true"></ion-range>
* </ion-item>
* </ion-list>
* ```
*
*
* \@demo /docs/demos/src/range/
*/
export class Range extends BaseInput {
/**
* @param {?} form
* @param {?} _haptic
* @param {?} item
* @param {?} config
* @param {?} _plt
* @param {?} elementRef
* @param {?} renderer
* @param {?} _dom
* @param {?} _cd
*/
constructor(form, _haptic, item, config, _plt, elementRef, renderer, _dom, _cd) {
super(config, elementRef, renderer, 'range', 0, form, item, null);
this._haptic = _haptic;
this._plt = _plt;
this._dom = _dom;
this._cd = _cd;
this._min = 0;
this._max = 100;
this._step = 1;
this._valA = 0;
this._valB = 0;
this._ratioA = 0;
this._ratioB = 0;
this._events = new UIEventManager(_plt);
}
/**
* \@input {number} Minimum integer value of the range. Defaults to `0`.
* @return {?}
*/
get min() {
return this._min;
}
/**
* @param {?} val
* @return {?}
*/
set min(val) {
val = Math.round(val);
if (!isNaN(val)) {
this._min = val;
this._inputUpdated();
}
}
/**
* \@input {number} Maximum integer value of the range. Defaults to `100`.
* @return {?}
*/
get max() {
return this._max;
}
/**
* @param {?} val
* @return {?}
*/
set max(val) {
val = Math.round(val);
if (!isNaN(val)) {
this._max = val;
this._inputUpdated();
}
}
/**
* \@input {number} Specifies the value granularity. Defaults to `1`.
* @return {?}
*/
get step() {
return this._step;
}
/**
* @param {?} val
* @return {?}
*/
set step(val) {
val = Math.round(val);
if (!isNaN(val) && val > 0) {
this._step = val;
}
}
/**
* \@input {boolean} If true, the knob snaps to tick marks evenly spaced based
* on the step property value. Defaults to `false`.
* @return {?}
*/
get snaps() {
return this._snaps;
}
/**
* @param {?} val
* @return {?}
*/
set snaps(val) {
this._snaps = isTrueProperty(val);
}
/**
* \@input {boolean} If true, a pin with integer value is shown when the knob
* is pressed. Defaults to `false`.
* @return {?}
*/
get pin() {
return this._pin;
}
/**
* @param {?} val
* @return {?}
*/
set pin(val) {
this._pin = isTrueProperty(val);
}
/**
* \@input {number} How long, in milliseconds, to wait to trigger the
* `ionChange` event after each change in the range value. Default `0`.
* @return {?}
*/
get debounce() {
return this._debouncer.wait;
}
/**
* @param {?} val
* @return {?}
*/
set debounce(val) {
this._debouncer.wait = val;
}
/**
* \@input {boolean} Show two knobs. Defaults to `false`.
* @return {?}
*/
get dualKnobs() {
return this._dual;
}
/**
* @param {?} val
* @return {?}
*/
set dualKnobs(val) {
this._dual = isTrueProperty(val);
}
/**
* Returns the ratio of the knob's is current location, which is a number
* between `0` and `1`. If two knobs are used, this property represents
* the lower value.
* @return {?}
*/
get ratio() {
if (this._dual) {
return Math.min(this._ratioA, this._ratioB);
}
return this._ratioA;
}
/**
* Returns the ratio of the upper value's is current location, which is
* a number between `0` and `1`. If there is only one knob, then this
* will return `null`.
* @return {?}
*/
get ratioUpper() {
if (this._dual) {
return Math.max(this._ratioA, this._ratioB);
}
return null;
}
/**
* @hidden
* @return {?}
*/
ngAfterContentInit() {
this._initialize();
// add touchstart/mousedown listeners
this._events.pointerEvents({
element: this._slider.nativeElement,
pointerDown: this._pointerDown.bind(this),
pointerMove: this._pointerMove.bind(this),
pointerUp: this._pointerUp.bind(this),
zone: true
});
// build all the ticks if there are any to show
this._createTicks();
}
/**
* \@internal
* @param {?} ev
* @return {?}
*/
_pointerDown(ev) {
// TODO: we could stop listening for events instead of checking this._disabled.
// since there are a lot of events involved, this solution is
// enough for the moment
if (this._disabled) {
return false;
}
// trigger ionFocus event
this._fireFocus();
// prevent default so scrolling does not happen
ev.preventDefault();
ev.stopPropagation();
// get the start coordinates
const /** @type {?} */ current = pointerCoord(ev);
// get the full dimensions of the slider element
const /** @type {?} */ rect = this._rect = this._plt.getElementBoundingClientRect(this._slider.nativeElement);
// figure out which knob they started closer to
const /** @type {?} */ ratio = clamp(0, (current.x - rect.left) / (rect.width), 1);
this._activeB = this._dual && (Math.abs(ratio - this._ratioA) > Math.abs(ratio - this._ratioB));
// update the active knob's position
this._update(current, rect, true);
// trigger a haptic start
this._haptic.gestureSelectionStart();
// return true so the pointer events
// know everything's still valid
return true;
}
/**
* \@internal
* @param {?} ev
* @return {?}
*/
_pointerMove(ev) {
if (this._disabled) {
return;
}
// prevent default so scrolling does not happen
ev.preventDefault();
ev.stopPropagation();
// update the active knob's position
const /** @type {?} */ hasChanged = this._update(pointerCoord(ev), this._rect, true);
if (hasChanged && this._snaps) {
// trigger a haptic selection changed event
// if this is a snap range
this._haptic.gestureSelectionChanged();
}
}
/**
* \@internal
* @param {?} ev
* @return {?}
*/
_pointerUp(ev) {
if (this._disabled) {
return;
}
// prevent default so scrolling does not happen
ev.preventDefault();
ev.stopPropagation();
// update the active knob's position
this._update(pointerCoord(ev), this._rect, false);
// trigger a haptic end
this._haptic.gestureSelectionEnd();
// trigger ionBlur event
this._fireBlur();
}
/**
* \@internal
* @param {?} current
* @param {?} rect
* @param {?} isPressed
* @return {?}
*/
_update(current, rect, isPressed) {
// figure out where the pointer is currently at
// update the knob being interacted with
let /** @type {?} */ ratio = clamp(0, (current.x - rect.left) / (rect.width), 1);
let /** @type {?} */ val = this._ratioToValue(ratio);
if (this._snaps) {
// snaps the ratio to the current value
ratio = this._valueToRatio(val);
}
// update which knob is pressed
this._pressed = isPressed;
let /** @type {?} */ valChanged = false;
if (this._activeB) {
// when the pointer down started it was determined
// that knob B was the one they were interacting with
this._pressedB = isPressed;
this._pressedA = false;
this._ratioB = ratio;
valChanged = val === this._valB;
this._valB = val;
}
else {
// interacting with knob A
this._pressedA = isPressed;
this._pressedB = false;
this._ratioA = ratio;
valChanged = val === this._valA;
this._valA = val;
}
this._updateBar();
if (valChanged) {
return false;
}
// value has been updated
let /** @type {?} */ value;
if (this._dual) {
// dual knobs have an lower and upper value
value = {
lower: Math.min(this._valA, this._valB),
upper: Math.max(this._valA, this._valB)
};
(void 0) /* console.debug */;
}
else {
// single knob only has one value
value = this._valA;
(void 0) /* console.debug */;
}
// Update input value
this.value = value;
return true;
}
/**
* \@internal
* @return {?}
*/
_updateBar() {
const /** @type {?} */ ratioA = this._ratioA;
const /** @type {?} */ ratioB = this._ratioB;
if (this._dual) {
this._barL = `${(Math.min(ratioA, ratioB) * 100)}%`;
this._barR = `${100 - (Math.max(ratioA, ratioB) * 100)}%`;
}
else {
this._barL = '';
this._barR = `${100 - (ratioA * 100)}%`;
}
this._updateTicks();
}
/**
* \@internal
* @return {?}
*/
_createTicks() {
if (this._snaps) {
this._dom.write(() => {
// TODO: Fix to not use RAF
this._ticks = [];
for (var /** @type {?} */ value = this._min; value <= this._max; value += this._step) {
var /** @type {?} */ ratio = this._valueToRatio(value);
this._ticks.push({
ratio: ratio,
left: `${ratio * 100}%`,
});
}
this._updateTicks();
});
}
}
/**
* \@internal
* @return {?}
*/
_updateTicks() {
const /** @type {?} */ ticks = this._ticks;
const /** @type {?} */ ratio = this.ratio;
if (this._snaps && ticks) {
if (this._dual) {
var /** @type {?} */ upperRatio = this.ratioUpper;
ticks.forEach(t => {
t.active = (t.ratio >= ratio && t.ratio <= upperRatio);
});
}
else {
ticks.forEach(t => {
t.active = (t.ratio <= ratio);
});
}
}
}
/**
* @hidden
* @param {?} isIncrease
* @param {?} isKnobB
* @return {?}
*/
_keyChg(isIncrease, isKnobB) {
const /** @type {?} */ step = this._step;
if (isKnobB) {
if (isIncrease) {
this._valB += step;
}
else {
this._valB -= step;
}
this._valB = clamp(this._min, this._valB, this._max);
this._ratioB = this._valueToRatio(this._valB);
}
else {
if (isIncrease) {
this._valA += step;
}
else {
this._valA -= step;
}
this._valA = clamp(this._min, this._valA, this._max);
this._ratioA = this._valueToRatio(this._valA);
}
this._updateBar();
}
/**
* \@internal
* @param {?} ratio
* @return {?}
*/
_ratioToValue(ratio) {
ratio = Math.round(((this._max - this._min) * ratio));
ratio = Math.round(ratio / this._step) * this._step + this._min;
return clamp(this._min, ratio, this._max);
}
/**
* \@internal
* @param {?} value
* @return {?}
*/
_valueToRatio(value) {
value = Math.round((value - this._min) / this._step) * this._step;
value = value / (this._max - this._min);
return clamp(0, value, 1);
}
/**
* @param {?} val
* @return {?}
*/
_inputNormalize(val) {
if (this._dual) {
return val;
}
else {
val = parseFloat(val);
return isNaN(val) ? undefined : val;
}
}
/**
* @hidden
* @return {?}
*/
_inputUpdated() {
const /** @type {?} */ val = this.value;
if (this._dual) {
this._valA = val.lower;
this._valB = val.upper;
this._ratioA = this._valueToRatio(val.lower);
this._ratioB = this._valueToRatio(val.upper);
}
else {
this._valA = val;
this._ratioA = this._valueToRatio(val);
}
this._updateBar();
this._cd.detectChanges();
}
/**
* @hidden
* @return {?}
*/
ngOnDestroy() {
super.ngOnDestroy();
this._events.destroy();
}
}
Range.decorators = [
{ type: Component, args: [{
selector: 'ion-range',
template: '<ng-content select="[range-left]"></ng-content>' +
'<div class="range-slider" #slider>' +
'<div class="range-tick" *ngFor="let t of _ticks" [style.left]="t.left" [class.range-tick-active]="t.active" role="presentation"></div>' +
'<div class="range-bar" role="presentation"></div>' +
'<div class="range-bar range-bar-active" [style.left]="_barL" [style.right]="_barR" #bar role="presentation"></div>' +
'<div class="range-knob-handle" (ionIncrease)="_keyChg(true, false)" (ionDecrease)="_keyChg(false, false)" [ratio]="_ratioA" [val]="_valA" [pin]="_pin" [pressed]="_pressedA" [min]="_min" [max]="_max" [disabled]="_disabled" [labelId]="_labelId"></div>' +
'<div class="range-knob-handle" (ionIncrease)="_keyChg(true, true)" (ionDecrease)="_keyChg(false, true)" [ratio]="_ratioB" [val]="_valB" [pin]="_pin" [pressed]="_pressedB" [min]="_min" [max]="_max" [disabled]="_disabled" [labelId]="_labelId" *ngIf="_dual"></div>' +
'</div>' +
'<ng-content select="[range-right]"></ng-content>',
host: {
'[class.range-disabled]': '_disabled',
'[class.range-pressed]': '_pressed',
'[class.range-has-pin]': '_pin'
},
providers: [{ provide: NG_VALUE_ACCESSOR, useExisting: Range, multi: true }],
encapsulation: ViewEncapsulation.None,
},] },
];
/**
* @nocollapse
*/
Range.ctorParameters = () => [
{ type: Form, },
{ type: Haptic, },
{ type: Item, decorators: [{ type: Optional },] },
{ type: Config, },
{ type: Platform, },
{ type: ElementRef, },
{ type: Renderer, },
{ type: DomController, },
{ type: ChangeDetectorRef, },
];
Range.propDecorators = {
'_slider': [{ type: ViewChild, args: ['slider',] },],
'min': [{ type: Input },],
'max': [{ type: Input },],
'step': [{ type: Input },],
'snaps': [{ type: Input },],
'pin': [{ type: Input },],
'debounce': [{ type: Input },],
'dualKnobs': [{ type: Input },],
};
function Range_tsickle_Closure_declarations() {
/** @type {?} */
Range.decorators;
/**
* @nocollapse
* @type {?}
*/
Range.ctorParameters;
/** @type {?} */
Range.propDecorators;
/** @type {?} */
Range.prototype._dual;
/** @type {?} */
Range.prototype._pin;
/** @type {?} */
Range.prototype._pressed;
/** @type {?} */
Range.prototype._activeB;
/** @type {?} */
Range.prototype._rect;
/** @type {?} */
Range.prototype._ticks;
/** @type {?} */
Range.prototype._min;
/** @type {?} */
Range.prototype._max;
/** @type {?} */
Range.prototype._step;
/** @type {?} */
Range.prototype._snaps;
/** @type {?} */
Range.prototype._valA;
/** @type {?} */
Range.prototype._valB;
/** @type {?} */
Range.prototype._ratioA;
/** @type {?} */
Range.prototype._ratioB;
/** @type {?} */
Range.prototype._pressedA;
/** @type {?} */
Range.prototype._pressedB;
/** @type {?} */
Range.prototype._barL;
/** @type {?} */
Range.prototype._barR;
/** @type {?} */
Range.prototype._events;
/** @type {?} */
Range.prototype._slider;
/** @type {?} */
Range.prototype._haptic;<|fim▁hole|> /** @type {?} */
Range.prototype._plt;
/** @type {?} */
Range.prototype._dom;
/** @type {?} */
Range.prototype._cd;
}
//# sourceMappingURL=range.js.map<|fim▁end|>
| |
<|file_name|>stream.rs<|end_file_name|><|fim▁begin|>use crate::internal::streamname::{
self, DIGITAL_SIGNATURE_STREAM_NAME, MSI_DIGITAL_SIGNATURE_EX_STREAM_NAME,
SUMMARY_INFO_STREAM_NAME,
};
use cfb;
use std::io::{self, Read, Seek, SeekFrom, Write};
// ========================================================================= //
/// An IO reader for an embedded binary stream in a package.
pub struct StreamReader<'a, F: 'a> {
stream: cfb::Stream<'a, F>,
}
impl<'a, F> StreamReader<'a, F> {
pub(crate) fn new(stream: cfb::Stream<'a, F>) -> StreamReader<'a, F> {
StreamReader { stream }
}
}
impl<'a, F: Read + Seek> Read for StreamReader<'a, F> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.stream.read(buf)
}
}
impl<'a, F: Read + Seek> Seek for StreamReader<'a, F> {
fn seek(&mut self, from: SeekFrom) -> io::Result<u64> {
self.stream.seek(from)
}
}
// ========================================================================= //
/// An IO writer for an embedded binary stream in a package.
pub struct StreamWriter<'a, F: 'a> {
stream: cfb::Stream<'a, F>,
}<|fim▁hole|>
impl<'a, F> StreamWriter<'a, F> {
pub(crate) fn new(stream: cfb::Stream<'a, F>) -> StreamWriter<'a, F> {
StreamWriter { stream }
}
}
impl<'a, F: Read + Seek + Write> Write for StreamWriter<'a, F> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.stream.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.stream.flush()
}
}
impl<'a, F: Read + Seek + Write> Seek for StreamWriter<'a, F> {
fn seek(&mut self, from: SeekFrom) -> io::Result<u64> {
self.stream.seek(from)
}
}
// ========================================================================= //
/// An iterator over the names of the binary streams in a package.
///
/// No guarantees are made about the order in which items are returned.
pub struct Streams<'a> {
entries: cfb::Entries<'a>,
}
impl<'a> Streams<'a> {
pub(crate) fn new(entries: cfb::Entries<'a>) -> Streams<'a> {
Streams { entries }
}
}
impl<'a> Iterator for Streams<'a> {
type Item = String;
fn next(&mut self) -> Option<String> {
loop {
let entry = match self.entries.next() {
Some(entry) => entry,
None => return None,
};
if !entry.is_stream()
|| entry.name() == DIGITAL_SIGNATURE_STREAM_NAME
|| entry.name() == MSI_DIGITAL_SIGNATURE_EX_STREAM_NAME
|| entry.name() == SUMMARY_INFO_STREAM_NAME
{
continue;
}
let (name, is_table) = streamname::decode(entry.name());
if !is_table {
return Some(name);
}
}
}
}
// ========================================================================= //<|fim▁end|>
| |
<|file_name|>pulsarspout.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Spout for Apache Pulsar: """
import os
import tempfile
import pulsar
import heronpy.api.src.python.api_constants as api_constants
from heronpy.api.src.python.spout.spout import Spout
from heronpy.streamlet.src.python.streamletboltbase import StreamletBoltBase
def GenerateLogConfContents(logFileName):
return """
# Define the root logger with appender file
log4j.rootLogger = INFO, FILE
# Define the file appender
log4j.appender.FILE=org.apache.log4j.DailyRollingFileAppender
log4j.appender.FILE.File=%s""" % logFileName + """
log4j.appender.FILE.Threshold=INFO
log4j.appender.FILE.DatePattern='.' yyyy-MM-dd-a
log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
log4j.appender.FILE.layout.ConversionPattern=%d{yy-MM-dd HH:mm:ss.SSS} %X{pname}:%X{pid} %-5p %l- %m%n
"""
def GenerateLogConfig(context):
namePrefix = str(context.get_component_id()) + "-" + str(context.get_task_id())
logFileName = os.getcwd() + "/" + namePrefix
flHandler = tempfile.NamedTemporaryFile(prefix=namePrefix, suffix='.conf',
dir=os.getcwd(), delete=False)
flHandler.write(GenerateLogConfContents(logFileName))
flHandler.flush()
flHandler.close()
return flHandler.name
class PulsarSpout(Spout, StreamletBoltBase):
"""PulsarSpout: reads from a pulsar topic"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=no-self-use
def default_deserializer(self, msg):
return [str(msg)]
# TopologyBuilder uses these constants to set
# cluster/topicname
serviceUrl = "PULSAR_SERVICE_URL"
topicName = "PULSAR_TOPIC"
receiveTimeoutMs = "PULSAR_RECEIVE_TIMEOUT_MS"
deserializer = "PULSAR_MESSAGE_DESERIALIZER"
def initialize(self, config, context):
"""Implements Pulsar Spout's initialize method"""
self.logger.info("Initializing PulsarSpout with the following")
self.logger.info("Component-specific config: \n%s" % str(config))
self.logger.info("Context: \n%s" % str(context))
self.emit_count = 0
self.ack_count = 0
self.fail_count = 0
if not PulsarSpout.serviceUrl in config or not PulsarSpout.topicName in config:
self.logger.fatal("Need to specify both serviceUrl and topicName")
self.pulsar_cluster = str(config[PulsarSpout.serviceUrl])
self.topic = str(config[PulsarSpout.topicName])
mode = config[api_constants.TOPOLOGY_RELIABILITY_MODE]
if mode == api_constants.TopologyReliabilityMode.ATLEAST_ONCE:
self.acking_timeout = 1000 * int(config[api_constants.TOPOLOGY_MESSAGE_TIMEOUT_SECS])
else:
self.acking_timeout = 30000
if PulsarSpout.receiveTimeoutMs in config:
self.receive_timeout_ms = config[PulsarSpout.receiveTimeoutMs]
else:
self.receive_timeout_ms = 10
if PulsarSpout.deserializer in config:
self.deserializer = config[PulsarSpout.deserializer]
if not callable(self.deserializer):
self.logger.fatal("Pulsar Message Deserializer needs to be callable")
else:
self.deserializer = self.default_deserializer
# First generate the config
self.logConfFileName = GenerateLogConfig(context)
self.logger.info("Generated LogConf at %s" % self.logConfFileName)
# We currently use the high level consumer API
# For supporting effectively once, we will need to switch
# to using lower level Reader API, when it becomes
# available in python
self.client = pulsar.Client(self.pulsar_cluster, log_conf_file_path=self.logConfFileName)
self.logger.info("Setup Client with cluster %s" % self.pulsar_cluster)
try:
self.consumer = self.client.subscribe(self.topic, context.get_topology_name(),
consumer_type=pulsar.ConsumerType.Failover,
unacked_messages_timeout_ms=self.acking_timeout)
except Exception as e:
self.logger.fatal("Pulsar client subscription failed: %s" % str(e))
self.logger.info("Subscribed to topic %s" % self.topic)
def next_tuple(self):
try:
msg = self.consumer.receive(timeout_millis=self.receive_timeout_ms)
except Exception as e:
self.logger.debug("Exception during recieve: %s" % str(e))
return
try:
self.emit(self.deserializer(msg.data()), tup_id=msg.message_id())
self.emit_count += 1
except Exception as e:
self.logger.info("Exception during emit: %s" % str(e))
def ack(self, tup_id):
self.ack_count += 1
self.consumer.acknowledge(tup_id)
def fail(self, tup_id):
self.fail_count += 1<|fim▁hole|><|fim▁end|>
|
self.logger.debug("Failed tuple %s" % str(tup_id))
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import artistools.makemodel.botyanski2017<|fim▁end|>
|
# import .1dslicefrom3d
|
<|file_name|>rk4.rs<|end_file_name|><|fim▁begin|>use libc::c_double;
/// Given a differential function dx(x, t),
/// initial condition x0,
/// and a list of times t,
/// find x(t) at each point in t
pub fn odeint(dx: (&Fn(c_double, c_double) -> c_double),
x0: c_double, t_vec: &Vec<c_double>) -> Vec<c_double> {
// Need there to be at least two times for this method to work
assert!(t_vec.len() >= 2);
// Collect x values in this vector
let mut result = Vec::<c_double>::new();
result.push(x0);
// Need to get step size by taking the difference between
// two adjacent times
for i in 0..(t_vec.len() - 1) { // Subtracting 1 from the length isn't a typo
// This implementation is from Wikipedia
let ti = t_vec[i];
let tnext = t_vec[i+1];
let h = tnext - ti;
let xi = result.pop().unwrap();
let k1 = dx(xi, ti);
let k2 = dx(xi + h/2.0*k1, ti + h/2.0);
let k3 = dx(xi + h/2.0*k2, ti + h/2.0);
let k4 = dx(xi + h*k3, ti + h);
let xnext = xi + h/6.0*(k1 + 2.0*k2 + 2.0*k3 + k4);
result.push(xi);
result.push(xnext);
}
result
}
/// FFI version of `odeint`
#[no_mangle]
pub extern "C" fn odeint_64(dx: (&Fn(c_double, c_double) -> c_double),
x0: c_double, t_vec: *mut Vec<c_double>) -> *mut Vec<c_double> {
unsafe { unsafe_alloc_vec_f64!(odeint(dx, x0, &*t_vec)) }
}
#[cfg(test)]
mod tests_rk4 {
use super::*;
const THRESHOLD: f64 = 0.0000001;
// Test differential to give to odeint
#[allow(unused_variables)]
fn velocity_one(x: f64, t: f64) -> f64 {
1.0 // Velocity of particle is 1
}
#[allow(unused_variables)]
fn free_fall(x: f64, t: f64) -> f64 {
let g = -9.81;
g*t
}
<|fim▁hole|> #[test]
fn test_velocity_one() {
let ref t = vec![0.0, 1.0];
let x0 = 0.0;
let mut result = odeint(&velocity_one, x0, t);
assert!((result.pop().unwrap() - 1.0).abs() < THRESHOLD);
}
#[test]
fn test_length() {
let ref t = vec![0.0, 1.0, 2.0];
let x0 = 0.0;
let result = odeint(&velocity_one, x0, t);
assert_eq!(result.len(), 3);
}
#[test]
fn test_free_fall() {
let mut times = Vec::<f64>::new();
let mut i = 0.0;
while i <= 10.0 {
times.push(i);
i += 0.1;
}
let x0 = 0.0;
let mut result = odeint(&free_fall, x0, ×);
let expected_value = -490.5;
//println!("printing a result vector: {:?}", result);
assert!((result.pop().unwrap() - expected_value).abs() < THRESHOLD);
}
}<|fim▁end|>
|
#[test]
fn rk4_compiles() {
}
|
<|file_name|>Dir.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2001 - 2014 The SCons Foundation<|fim▁hole|># "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Scanner/Dir.py 2014/09/27 12:51:43 garyo"
import SCons.Node.FS
import SCons.Scanner
def only_dirs(nodes):
is_Dir = lambda n: isinstance(n.disambiguate(), SCons.Node.FS.Dir)
return list(filter(is_Dir, nodes))
def DirScanner(**kw):
"""Return a prototype Scanner instance for scanning
directories for on-disk files"""
kw['node_factory'] = SCons.Node.FS.Entry
kw['recursive'] = only_dirs
return SCons.Scanner.Base(scan_on_disk, "DirScanner", **kw)
def DirEntryScanner(**kw):
"""Return a prototype Scanner instance for "scanning"
directory Nodes for their in-memory entries"""
kw['node_factory'] = SCons.Node.FS.Entry
kw['recursive'] = None
return SCons.Scanner.Base(scan_in_memory, "DirEntryScanner", **kw)
skip_entry = {}
skip_entry_list = [
'.',
'..',
'.sconsign',
# Used by the native dblite.py module.
'.sconsign.dblite',
# Used by dbm and dumbdbm.
'.sconsign.dir',
# Used by dbm.
'.sconsign.pag',
# Used by dumbdbm.
'.sconsign.dat',
'.sconsign.bak',
# Used by some dbm emulations using Berkeley DB.
'.sconsign.db',
]
for skip in skip_entry_list:
skip_entry[skip] = 1
skip_entry[SCons.Node.FS._my_normcase(skip)] = 1
do_not_scan = lambda k: k not in skip_entry
def scan_on_disk(node, env, path=()):
"""
Scans a directory for on-disk files and directories therein.
Looking up the entries will add these to the in-memory Node tree
representation of the file system, so all we have to do is just
that and then call the in-memory scanning function.
"""
try:
flist = node.fs.listdir(node.abspath)
except (IOError, OSError):
return []
e = node.Entry
for f in filter(do_not_scan, flist):
# Add ./ to the beginning of the file name so if it begins with a
# '#' we don't look it up relative to the top-level directory.
e('./' + f)
return scan_in_memory(node, env, path)
def scan_in_memory(node, env, path=()):
"""
"Scans" a Node.FS.Dir for its in-memory entries.
"""
try:
entries = node.entries
except AttributeError:
# It's not a Node.FS.Dir (or doesn't look enough like one for
# our purposes), which can happen if a target list containing
# mixed Node types (Dirs and Files, for example) has a Dir as
# the first entry.
return []
entry_list = sorted(filter(do_not_scan, list(entries.keys())))
return [entries[n] for n in entry_list]
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:<|fim▁end|>
|
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
|
<|file_name|>test_table.py<|end_file_name|><|fim▁begin|>from http import HTTPStatus
from json import loads
from unittest.mock import Mock, patch
from uuid import uuid4
from tornado.testing import gen_test
from pokerserver.database import PlayerState, UUIDsRelation
from pokerserver.models import InvalidTurnError, NotYourTurnError, Player, PositionOccupiedError
from tests.utils import IntegrationHttpTestCase, create_table, return_done_future
class TestTableController(IntegrationHttpTestCase):
async def async_setup(self):
self.table_id = 1
self.uuid = uuid4()
self.uuid2 = uuid4()
self.player_name = 'c'
self.player_name2 = 'd'
await UUIDsRelation.add_uuid(self.uuid, self.player_name)
await UUIDsRelation.add_uuid(self.uuid2, self.player_name2)
players = [
Player(self.table_id, 1, 'a', 0, ['Ah', 'Ac'], 0),
Player(self.table_id, 2, 'b', 0, ['Kh', 'Kc'], 0),
Player(self.table_id, 5, 'c', 0, ['Qh', 'Qc'], 0)
]
table = await create_table(table_id=self.table_id, players=players)
self.table_name = table.name
@gen_test
async def test_get_for_player_at_table(self):
await self.async_setup()
response = await self.fetch_async('/table/{}?uuid={}'.format(self.table_name, self.uuid))
self.assertEqual(response.code, HTTPStatus.OK.value)
table = loads(response.body.decode())
self.assertEqual(table, {
'big_blind': 2,
'can_join': False,
'current_player': None,
'dealer': None,
'state': 'waiting for players',
'round': 'preflop',
'open_cards': [],
'players': [{
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'a',
'bet': 0,
'position': 1,
'state': PlayerState.PLAYING.value
}, {
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'b',
'bet': 0,
'position': 2,
'state': PlayerState.PLAYING.value
}, {
'table_id': 1,
'balance': 0,
'cards': ['Qh', 'Qc'],
'name': 'c',
'bet': 0,
'position': 5,
'state': PlayerState.PLAYING.value
}],
'pots': [{
'bets': {}
}],
'small_blind': 1
})
@gen_test
async def test_get_for_player_not_at_table(self):
await self.async_setup()
response = await self.fetch_async('/table/{}?uuid={}'.format(self.table_name, self.uuid2))
self.assertEqual(response.code, HTTPStatus.OK.value)
table = loads(response.body.decode())
self.assertEqual(table, {
'big_blind': 2,
'can_join': True,
'current_player': None,
'dealer': None,
'state': 'waiting for players',
'round': 'preflop',
'open_cards': [],
'players': [{
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'a',
'bet': 0,
'position': 1,
'state': PlayerState.PLAYING.value
}, {
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'b',
'bet': 0,
'position': 2,
'state': PlayerState.PLAYING.value
}, {
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'c',
'bet': 0,
'position': 5,
'state': PlayerState.PLAYING.value
}],
'pots': [{
'bets': {}
}],
'small_blind': 1
})
@gen_test
async def test_get_for_unauthorized_player(self):
await self.async_setup()
response = await self.fetch_async('/table/{}'.format(self.table_name))
self.assertEqual(response.code, HTTPStatus.OK.value)
table = loads(response.body.decode())
self.assertEqual(table, {
'big_blind': 2,
'can_join': True,
'current_player': None,
'dealer': None,
'state': 'waiting for players',
'round': 'preflop',
'open_cards': [],
'players': [{
'table_id': 1,<|fim▁hole|> 'position': 1,
'state': PlayerState.PLAYING.value
}, {
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'b',
'bet': 0,
'position': 2,
'state': PlayerState.PLAYING.value
}, {
'table_id': 1,
'balance': 0,
'cards': [],
'name': 'c',
'bet': 0,
'position': 5,
'state': PlayerState.PLAYING.value
}],
'pots': [{
'bets': {}
}],
'small_blind': 1
})
class TestJoinController(IntegrationHttpTestCase):
async def async_setup(self):
self.uuid = uuid4()
self.player_name = 'player'
await UUIDsRelation.add_uuid(self.uuid, self.player_name)
table = await create_table(max_player_count=2)
self.table_name = table.name
@patch('pokerserver.controllers.base.BaseController.load_match')
@gen_test
async def test_join(self, load_mock):
await self.async_setup()
match_mock = Mock()
match_mock.table.players = []
match_mock.join.side_effect = return_done_future()
load_mock.side_effect = return_done_future(match_mock)
response = await self.post_with_uuid(
'/table/{}/actions/join'.format(self.table_name),
self.uuid,
body={'position': 1}
)
self.assertEqual(response.code, HTTPStatus.OK.value)
load_mock.assert_called_once_with(self.table_name)
match_mock.join.assert_called_once_with(self.player_name, 1)
@patch('pokerserver.controllers.base.BaseController.load_match')
@gen_test
async def test_join_occupied_position(self, load_mock):
await self.async_setup()
match_mock = Mock()
match_mock.table.players = []
match_mock.join.side_effect = return_done_future(exception=PositionOccupiedError)
load_mock.side_effect = return_done_future(match_mock)
response = await self.post_with_uuid(
'/table/{}/actions/join'.format(self.table_name),
self.uuid,
body={'position': 1},
raise_error=False
)
self.assertEqual(response.code, HTTPStatus.CONFLICT.value)
@gen_test
async def test_join_missing_parameter(self):
await self.async_setup()
response = await self.post_with_uuid(
'/table/{}/actions/join'.format(self.table_name),
self.uuid,
body={},
raise_error=False)
self.assertEqual(response.code, HTTPStatus.BAD_REQUEST.value)
@gen_test
async def test_join_invalid_parameter(self):
await self.async_setup()
response = await self.post_with_uuid(
'/table/{}/actions/join'.format(self.table_name),
self.uuid,
body={'position': -1},
raise_error=False
)
self.assertEqual(response.code, HTTPStatus.BAD_REQUEST.value)
class TestFoldController(IntegrationHttpTestCase):
async def async_setup(self):
self.uuid = uuid4()
self.player_name = 'player'
await UUIDsRelation.add_uuid(self.uuid, self.player_name)
table = await create_table(max_player_count=2)
self.table_name = table.name
@patch('pokerserver.controllers.base.BaseController.load_match')
@gen_test
async def test_fold(self, load_mock):
await self.async_setup()
match_mock = Mock()
match_mock.table.players = []
match_mock.fold.side_effect = return_done_future()
load_mock.side_effect = return_done_future(match_mock)
response = await self.post_with_uuid('/table/{}/actions/fold'.format(self.table_name), self.uuid)
self.assertEqual(response.code, HTTPStatus.OK.value)
load_mock.assert_called_once_with(self.table_name)
match_mock.fold.assert_called_once_with(self.player_name)
@patch('pokerserver.controllers.base.BaseController.load_match')
@gen_test
async def test_fold_invalid_turn(self, load_mock):
await self.async_setup()
match_mock = Mock()
match_mock.table.players = []
match_mock.fold.side_effect = return_done_future(exception=NotYourTurnError)
load_mock.side_effect = return_done_future(match_mock)
response = await self.post_with_uuid(
'/table/{}/actions/fold'.format(self.table_name),
self.uuid,
raise_error=False
)
self.assertEqual(response.code, HTTPStatus.BAD_REQUEST.value)
class TestCallController(IntegrationHttpTestCase):
async def async_setup(self):
self.uuid = uuid4()
self.player_name = 'player'
await UUIDsRelation.add_uuid(self.uuid, self.player_name)
table = await create_table(max_player_count=2)
self.table_name = table.name
@patch('pokerserver.controllers.base.BaseController.load_match')
@gen_test
async def test_call(self, load_mock):
await self.async_setup()
match_mock = Mock()
match_mock.table.players = []
match_mock.call.side_effect = return_done_future()
load_mock.side_effect = return_done_future(match_mock)
response = await self.post_with_uuid('/table/{}/actions/call'.format(self.table_name), self.uuid)
self.assertEqual(response.code, HTTPStatus.OK.value)
load_mock.assert_called_once_with(self.table_name)
match_mock.call.assert_called_once_with(self.player_name)
@patch('pokerserver.controllers.base.BaseController.load_match')
@gen_test
async def test_call_invalid_turn(self, load_mock):
await self.async_setup()
match_mock = Mock()
match_mock.table.players = []
match_mock.call.side_effect = return_done_future(exception=NotYourTurnError)
load_mock.side_effect = return_done_future(match_mock)
response = await self.post_with_uuid(
'/table/{}/actions/call'.format(self.table_name),
self.uuid,
raise_error=False
)
self.assertEqual(response.code, HTTPStatus.BAD_REQUEST.value)
class TestCheckController(IntegrationHttpTestCase):
async def async_setup(self):
self.uuid = uuid4()
self.player_name = 'player'
await UUIDsRelation.add_uuid(self.uuid, self.player_name)
table = await create_table(max_player_count=2)
self.table_name = table.name
@patch('pokerserver.controllers.base.BaseController.load_match')
@gen_test
async def test_check(self, load_mock):
await self.async_setup()
match_mock = Mock()
match_mock.table.players = []
match_mock.check.side_effect = return_done_future()
load_mock.side_effect = return_done_future(match_mock)
response = await self.post_with_uuid('/table/{}/actions/check'.format(self.table_name), self.uuid)
self.assertEqual(response.code, HTTPStatus.OK.value)
load_mock.assert_called_once_with(self.table_name)
match_mock.check.assert_called_once_with(self.player_name)
@patch('pokerserver.controllers.base.BaseController.load_match')
@gen_test
async def test_check_invalid_turn(self, load_mock):
await self.async_setup()
match_mock = Mock()
match_mock.table.players = []
match_mock.check.side_effect = return_done_future(exception=InvalidTurnError)
load_mock.side_effect = return_done_future(match_mock)
response = await self.post_with_uuid(
'/table/{}/actions/check'.format(self.table_name),
self.uuid,
raise_error=False
)
self.assertEqual(response.code, HTTPStatus.BAD_REQUEST.value)
class TestRaiseController(IntegrationHttpTestCase):
async def async_setup(self):
self.uuid = uuid4()
self.player_name = 'player'
await UUIDsRelation.add_uuid(self.uuid, self.player_name)
table = await create_table(max_player_count=2)
self.table_name = table.name
@patch('pokerserver.controllers.base.BaseController.load_match')
@gen_test
async def test_raise(self, load_mock):
await self.async_setup()
match_mock = Mock()
match_mock.table.players = []
match_mock.raise_bet.side_effect = return_done_future()
load_mock.side_effect = return_done_future(match_mock)
response = await self.post_with_uuid(
'/table/{}/actions/raise'.format(self.table_name),
self.uuid,
body={'amount': 17}
)
self.assertEqual(response.code, HTTPStatus.OK.value)
load_mock.assert_called_once_with(self.table_name)
match_mock.raise_bet.assert_called_once_with(self.player_name, 17)
@patch('pokerserver.controllers.base.BaseController.load_match')
@gen_test
async def test_raise_invalid_turn(self, load_mock):
await self.async_setup()
match_mock = Mock()
match_mock.table.players = []
match_mock.raise_bet.side_effect = return_done_future(exception=NotYourTurnError)
load_mock.side_effect = return_done_future(match_mock)
response = await self.post_with_uuid(
'/table/{}/actions/raise'.format(self.table_name),
self.uuid,
body={'amount': 3},
raise_error=False
)
self.assertEqual(response.code, HTTPStatus.BAD_REQUEST.value)
@gen_test
async def test_raise_missing_parameter(self):
await self.async_setup()
response = await self.post_with_uuid(
'/table/{}/actions/raise'.format(self.table_name),
self.uuid,
raise_error=False
)
self.assertEqual(response.code, HTTPStatus.BAD_REQUEST.value)
@gen_test
async def test_raise_invalid_parameter(self):
await self.async_setup()
response = await self.post_with_uuid(
'/table/{}/actions/raise?amount=googol'.format(self.table_name),
self.uuid,
raise_error=False
)
self.assertEqual(response.code, HTTPStatus.BAD_REQUEST.value)<|fim▁end|>
|
'balance': 0,
'cards': [],
'name': 'a',
'bet': 0,
|
<|file_name|>tpl_ceph_install.go<|end_file_name|><|fim▁begin|>/*
** Copyright [2013-2015] [Megam Systems]
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
package ubuntu
import (
"fmt"
"os"
"strings"
"github.com/megamsys/megdc/templates"
"github.com/megamsys/urknall"
//"github.com/megamsys/libgo/cmd"
)
const (
CephUser = "CephUser"
Osd = "Osd"
Phydev = "PhyDev"
UserHomePrefix = "/home/"
StrictHostKey = `
ConnectTimeout 5
Host *
StrictHostKeyChecking no
`
SSHHostConfig = `
Host %s
Hostname %s
User %s
`
CephConf = `osd crush chooseleaf type = 0
osd_pool_default_size = %d
public network = %s
cluster network = %s
mon_pg_warn_max_per_osd = 0
`
)
var ubuntucephinstall *UbuntuCephInstall
func init() {
ubuntucephinstall = &UbuntuCephInstall{}
templates.Register("UbuntuCephInstall", ubuntucephinstall)
}
type UbuntuCephInstall struct {
osds []string
cephuser string
phydev string
}
func (tpl *UbuntuCephInstall) Options(t *templates.Template) {
if osds, ok := t.Maps[Osd]; ok {
tpl.osds = osds
}
if cephuser, ok := t.Options[CephUser]; ok {
tpl.cephuser = cephuser
}
if phydev, ok := t.Options[Phydev]; ok {
tpl.phydev = phydev
}
}
func (tpl *UbuntuCephInstall) Render(p urknall.Package) {
p.AddTemplate("ceph", &UbuntuCephInstallTemplate{
osds: tpl.osds,
cephuser: tpl.cephuser,
cephhome: UserHomePrefix + tpl.cephuser,
phydev: tpl.phydev,
})
}
func (tpl *UbuntuCephInstall) Run(target urknall.Target) error {
return urknall.Run(target, &UbuntuCephInstall{
osds: tpl.osds,
cephuser: tpl.cephuser,
phydev: tpl.phydev,
})
}
type UbuntuCephInstallTemplate struct {
osds []string
cephuser string
cephhome string
phydev string
}
func (m *UbuntuCephInstallTemplate) Render(pkg urknall.Package) {
host, _ := os.Hostname()
ip := IP(m.phydev)
osddir := ArraytoString("/","/osd",m.osds)
hostosd := ArraytoString(host+":/","/osd",m.osds)
CephUser := m.cephuser
CephHome := m.cephhome
pkg.AddCommands("cephinstall",
Shell("echo deb https://download.ceph.com/debian-infernalis/ $(lsb_release -sc) main | tee /etc/apt/sources.list.d/ceph.list"),
Shell("wget -q -O- 'https://download.ceph.com/keys/release.asc' | apt-key add -"),
InstallPackages("apt-transport-https sudo"),
UpdatePackagesOmitError(),
InstallPackages("ceph-deploy ceph-common ceph-mds dnsmasq openssh-server ntp sshpass ceph ceph-mds ceph-deploy radosgw"),
)
pkg.AddCommands("cephuser_add",
AddUser(CephUser,false),
)
pkg.AddCommands("cephuser_sudoer",
Shell("echo '"+CephUser+" ALL = (root) NOPASSWD:ALL' | sudo tee /etc/sudoers.d/"+CephUser+""),
)
pkg.AddCommands("chmod_sudoer",
Shell("sudo chmod 0440 /etc/sudoers.d/"+CephUser+""),
)
pkg.AddCommands("etchost",
Shell("echo '"+ip+" "+host+"' >> /etc/hosts"),
)
pkg.AddCommands("ssh-keygen",
Mkdir(CephHome+"/.ssh", CephUser, 0700),
AsUser(CephUser, Shell("ssh-keygen -N '' -t rsa -f "+CephHome+"/.ssh/id_rsa")),
AsUser(CephUser, Shell("cp "+CephHome+"/.ssh/id_rsa.pub "+CephHome+"/.ssh/authorized_keys")),
)
pkg.AddCommands("ssh_known_hosts",
WriteFile(CephHome+"/.ssh/ssh_config", StrictHostKey, CephUser, 0755),
WriteFile(CephHome+"/.ssh/config", fmt.Sprintf(SSHHostConfig, host, host, CephUser), CephUser, 0755),
)
pkg.AddCommands("mkdir_osd",
Mkdir(osddir,"", 0755),
Shell("sudo chown -R "+CephUser+":"+CephUser+" "+osddir ),
)
pkg.AddCommands("write_cephconf",
AsUser(CephUser, Shell("mkdir "+CephHome+"/ceph-cluster")),
AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster")),
AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster;ceph-deploy new "+host+" ")),
AsUser(CephUser, Shell("echo 'osd crush chooseleaf type = 0' >> "+CephHome+"/ceph-cluster/ceph.conf")),
AsUser(CephUser,Shell("echo 'osd_pool_default_size = 2' >> "+CephHome+"/ceph-cluster/ceph.conf")),
AsUser(CephUser,Shell("echo 'mon_pg_warn_max_per_osd = 0' >> "+CephHome+"/ceph-cluster/ceph.conf")),
AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster;ceph-deploy install "+host+"")),
AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster;ceph-deploy mon create-initial")),
AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster;ceph-deploy osd prepare "+ hostosd )),<|fim▁hole|> AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster;ceph-deploy osd activate "+ hostosd )),
AsUser(CephUser, Shell("cd "+CephHome+"/ceph-cluster;ceph-deploy admin "+host+"")),
AsUser(CephUser, Shell("sudo chmod +r /etc/ceph/ceph.client.admin.keyring")),
AsUser(CephUser, Shell("sleep 180")),
AsUser(CephUser, Shell("ceph osd pool set rbd pg_num 100")),
AsUser(CephUser, Shell("sleep 180")),
AsUser(CephUser, Shell("ceph osd pool set rbd pgp_num 100")),
)
pkg.AddCommands("copy_keyring",
Shell("cp "+CephHome+"/ceph-cluster/*.keyring /etc/ceph/"),
)
}
func (m *UbuntuCephInstallTemplate) noOfIpsFromMask() int {
si, _ := IPNet(m.phydev).Mask.Size() //from your netwwork
return si
}
func (m *UbuntuCephInstallTemplate) slashIp() string {
s := strings.Split(IP(m.phydev), ".")
p := s[0 : len(s)-1]
p = append(p, "0")
return fmt.Sprintf("%s/%d", strings.Join(p, "."), m.noOfIpsFromMask())
}
func (m *UbuntuCephInstallTemplate) osdPoolSize(osds ...string) int {
return len(osds)
}<|fim▁end|>
| |
<|file_name|>KillAura.java<|end_file_name|><|fim▁begin|>package zeonClient.mods;
import java.util.Iterator;
import org.lwjgl.input.Keyboard;
import net.minecraft.client.entity.EntityOtherPlayerMP;
import net.minecraft.client.entity.EntityPlayerSP;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.network.play.client.CPacketPlayerDigging;
import net.minecraft.network.play.client.CPacketPlayerDigging.Action;
import net.minecraft.network.play.client.CPacketUseEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.util.math.BlockPos;
import zeonClient.main.Category;
public class KillAura extends Mod {
private int ticks = 0;
public KillAura() {
super("KillAura", "KillAura", Keyboard.KEY_R, Category.COMBAT);
}
<|fim▁hole|> public void onUpdate() {
if(this.isToggled()) {
ticks++;
if(ticks >= 20 - speed()) {
ticks = 0;
mc.player.rotationYaw +=0.2F;
for(Iterator<Entity> entities = mc.world.loadedEntityList.iterator(); entities.hasNext();) {
Object object = entities.next();
if(object instanceof EntityLivingBase) {
EntityLivingBase e = (EntityLivingBase) object;
if(e instanceof EntityPlayerSP) continue;
if(mc.player.getDistanceToEntity(e) <= 7F) {
if(e.isInvisible()) {
break;
}
if(e.isEntityAlive()) {
if(mc.player.getHeldItemMainhand() != null) {
mc.player.attackTargetEntityWithCurrentItem(e);
}
if(mc.player.isActiveItemStackBlocking()) {
mc.player.connection.sendPacket(new CPacketPlayerDigging(Action.RELEASE_USE_ITEM, new BlockPos(0, 0, 0), EnumFacing.UP));
}
mc.player.connection.sendPacket(new CPacketUseEntity(e));
mc.player.swingArm(EnumHand.MAIN_HAND);
break;
}
}
}
}
}
}
}
private int speed() {
return 18;
}
}<|fim▁end|>
| |
<|file_name|>fleet_route_stop.py<|end_file_name|><|fim▁begin|># Copyright 2019 Mentxu Isuskitza - AvanzOSC
# Copyright 2019 Oihana Larrañaga - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import api, fields, models
from odoo.models import expression
from odoo.tools.safe_eval import safe_eval
class FleetRouteStop(models.Model):
_name = 'fleet.route.stop'
_description = 'Route Stop'
_order = 'route_id, sequence, estimated_time'
name = fields.Char(string='Description', required=True)
location_id = fields.Many2one(
string='Location', comodel_name='res.partner',
domain=lambda self: [
('category_id', 'in',
self.env.ref('fleet_route.stop_location_partner_cat').id)])
street = fields.Char(
string='Street', related='location_id.street')
city = fields.Char(
string='City', related='location_id.city')
state_id = fields.Many2one(
string='State', comodel_name='res.country.state',
related='location_id.state_id')
country_id = fields.Many2one(
string='Country', comodel_name='res.country',
related='location_id.country_id')
comment = fields.Text(
string='Internal notes', related='location_id.comment')
estimated_time = fields.Float(string='Estimated time')
sequence = fields.Integer(string="Sequence", default=1)
route_id = fields.Many2one(
string='Route', comodel_name='fleet.route', required=True,
ondelete='cascade')
manager_id = fields.Many2one(
string="Manager", comodel_name="hr.employee",
related="route_id.manager_id", store=True)
manager_phone_mobile = fields.Char(
string="Phone/mobile", related="route_id.manager_phone_mobile",
store=True)
<|fim▁hole|> self.ensure_one()
if not self.name:
self.name = self.location_id.display_name
@api.multi
def open_map(self):
self.ensure_one()
return self.location_id.open_map()
@api.multi
def button_open_form(self):
self.ensure_one()
action = self.env.ref("fleet_route.action_fleet_route_stop")
form_view = self.env.ref("fleet_route.fleet_route_stop_view_form")
action_dict = action.read()[0] if action else {}
domain = expression.AND([
[("id", "=", self.id)],
safe_eval(action.domain or "[]")])
action_dict.update({
"domain": domain,
"view_id": form_view.id,
"view_mode": "form",
"res_id": self.id,
"views": [],
})
return action_dict
@api.multi
def name_get(self):
""" name_get() -> [(id, name), ...]
Returns a textual representation for the records in ``self``.
By default this is the value of the ``display_name`` field.
:return: list of pairs ``(id, text_repr)`` for each records
:rtype: list(tuple)
"""
result = []
if self.env.context.get("hide_route"):
return super(FleetRouteStop, self).name_get()
for record in self:
field = record.route_id._fields["direction"]
direction = field.convert_to_export(
record.route_id["direction"], record.route_id)
result.append((record.id, "{} [{} ({})]".format(
record.name, record.route_id.name_id.name, direction)))
return result<|fim▁end|>
|
@api.onchange("location_id")
def _onchange_location_id(self):
|
<|file_name|>94-mobile_actuator.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
// If you use this as a template, update the copyright with your own name.
// Sample Node-RED node file
module.exports = function(RED) {
"use strict";
// require any external libraries we may need....
//var foo = require("foo-library");
<|fim▁hole|> function mobileactuatorNode(n) {
// Create a RED node
RED.nodes.createNode(this,n);
// Store local copies of the node configuration (as defined in the .html)
this.topic = n.topic;
this.mobile_type = n.mobile_type;
this.mobile_port = n.mobile_port;
// copy "this" object in case we need it in context of callbacks of other functions.
var node = this;
// Do whatever you need to do in here - declare callbacks etc
// Note: this sample doesn't do anything much - it will only send
// this message once at startup...
// Look at other real nodes for some better ideas of what to do....
var msg = {};
msg.topic = this.topic;
msg.payload = "Hello world !"
// send out the message to the rest of the workspace.
// ... this message will get sent at startup so you may not see it in a debug node.
//this.send(msg);
// respond to inputs....
this.on('input', function (msg) {
//node.warn("I saw a payload: "+msg.payload);
// in this example just send it straight on... should process it here really
if (msg.payload[0] == this.mobile_type && msg.payload[1] == 0){
msg.payload = msg.payload.substr(2)
node.send(msg);
}
});
this.on("close", function() {
// Called when the node is shutdown - eg on redeploy.
// Allows ports to be closed, connections dropped etc.
// eg: node.client.disconnect();
});
}
RED.nodes.registerType("mobile actuator",mobileactuatorNode);
}<|fim▁end|>
|
// The main node definition - most things happen in here
|
<|file_name|>build_index_page.py<|end_file_name|><|fim▁begin|>import glob
json_files = glob.glob("tests/**/output/**/*.json", recursive=True)
html_files = glob.glob("tests/**/output/**/*.html", recursive=True)
html_list = ""
for f_ in html_files:
html_list += '\t<li><a href="{}">{}</li>\n'.format(
f_[6:],
f_.split(".")[-2],
)
json_list = ""
for f_ in json_files:
json_list += '\t<li><a href="{}">{}</li>\n'.format(
f_[6:],
f_.split(".")[-2],
)
html_file = """
<html>
<body>
<h3>HTML</h3>
<ul>
{}
</ul>
<br/><br/><|fim▁hole|> </ul>
</body>
</html>
""".format(
html_list, json_list
)
print(html_file)<|fim▁end|>
|
<h3>JSON</h3>
<ul>
{}
|
<|file_name|>autoderef-method.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait double {
fn double(self: Box<Self>) -> uint;
}
impl double for uint {<|fim▁hole|> fn double(self: Box<uint>) -> uint { *self * 2u }
}
pub fn main() {
let x = box 3u;
assert_eq!(x.double(), 6u);
}<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import AssetServiceTransport
from .grpc import AssetServiceGrpcTransport
from .grpc_asyncio import AssetServiceGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]]
_transport_registry["grpc"] = AssetServiceGrpcTransport
_transport_registry["grpc_asyncio"] = AssetServiceGrpcAsyncIOTransport
__all__ = (
"AssetServiceTransport",<|fim▁hole|><|fim▁end|>
|
"AssetServiceGrpcTransport",
"AssetServiceGrpcAsyncIOTransport",
)
|
<|file_name|>goodbye.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
print('goodbye')<|fim▁end|>
|
__author__ = 'las3wh'
|
<|file_name|>ApplicationLocation.java<|end_file_name|><|fim▁begin|>/**
*
*/
package com.velocity.enums;<|fim▁hole|> * This Enum defines the values for ApplicationLocation
*
* @author Vimal Kumar
* @date 12-March-2015
*/
public enum ApplicationLocation {
HomeInternet, NotSet, OffPremises, OnPremises, Unknown
}<|fim▁end|>
|
/**
|
<|file_name|>JSR88_Util.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.console.configcreator;
import java.io.File;
import javax.enterprise.deploy.shared.factories.DeploymentFactoryManager;
import javax.enterprise.deploy.spi.DeploymentManager;
import javax.enterprise.deploy.spi.Target;
import javax.enterprise.deploy.spi.status.ProgressObject;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.portlet.PortletException;
import javax.portlet.PortletRequest;
import org.apache.geronimo.deployment.plugin.jmx.CommandContext;
import org.apache.geronimo.deployment.plugin.jmx.JMXDeploymentManager;
import org.apache.geronimo.deployment.plugin.local.DistributeCommand;
import org.apache.geronimo.j2ee.deployment.ApplicationInfo;
import org.apache.geronimo.j2ee.deployment.EARConfigBuilder;
import org.apache.geronimo.kernel.Kernel;
import org.apache.geronimo.kernel.KernelRegistry;
/**
* Util class for JSR-88 related functions
*
* @version $Rev$ $Date$
*/
public class JSR88_Util {
/*private static List getEjbClassLoaders(PortletRequest request) {
List deployedEjbs = JSR77_Util.getDeployedEJBs(request);
List configurations = new ArrayList();
for (int i = 0; i < deployedEjbs.size(); i++) {
String ejbPatternName = ((ReferredData) deployedEjbs.get(i)).getPatternName();
configurations.add(getDependencyString(ejbPatternName));
}
return getConfigClassLoaders(configurations);
}
private static List getConfigClassLoaders(List configurationNames) {
List classLoaders = new ArrayList();
ConfigurationManager configurationManager = PortletManager.getConfigurationManager();
for (int i = 0; i < configurationNames.size(); i++) {
Artifact configurationId = Artifact.create((String) configurationNames.get(i));
classLoaders.add(configurationManager.getConfiguration(configurationId).getConfigurationClassLoader());
}
return classLoaders;<|fim▁hole|> }*/
public static ApplicationInfo createApplicationInfo(PortletRequest actionRequest, File moduleFile) {
ApplicationInfo applicationInfo = null;
EARConfigBuilder.createPlanMode.set(Boolean.TRUE);
try {
DeploymentFactoryManager dfm = DeploymentFactoryManager.getInstance();
DeploymentManager mgr = dfm.getDeploymentManager("deployer:geronimo:inVM", null, null);
if (mgr instanceof JMXDeploymentManager) {
((JMXDeploymentManager) mgr).setLogConfiguration(false, true);
}
Target[] targets = mgr.getTargets();
if (null == targets) {
throw new IllegalStateException("No target to distribute to");
}
targets = new Target[] { targets[0] };
DistributeCommand command = new DistributeCommand(getKernel(), targets, moduleFile, null);
CommandContext commandContext = new CommandContext(true, true, null, null, false);
commandContext.setUsername("system");
commandContext.setPassword("manager");
command.setCommandContext(commandContext);
command.doDeploy(targets[0], true);
} catch (Exception e) {
// Any better ideas?
if(EARConfigBuilder.appInfo.get() == null) throw new RuntimeException(e);
} finally {
EARConfigBuilder.createPlanMode.set(Boolean.FALSE);
applicationInfo = EARConfigBuilder.appInfo.get();
EARConfigBuilder.appInfo.set(null);
}
return applicationInfo;
}
private static Kernel getKernel() {
// todo: consider making this configurable; we could easily connect to a remote kernel if we wanted to
Kernel kernel = null;
try {
kernel = (Kernel) new InitialContext().lookup("java:comp/GeronimoKernel");
} catch (NamingException e) {
// log.error("Unable to look up kernel in JNDI", e);
}
if (kernel == null) {
// log.debug("Unable to find kernel in JNDI; using KernelRegistry instead");
kernel = KernelRegistry.getSingleKernel();
}
return kernel;
}
public static String[] deploy(PortletRequest actionRequest, File moduleFile, File planFile)
throws PortletException {
// TODO this is a duplicate of the code from
// org.apache.geronimo.console.configmanager.DeploymentPortlet.processAction()
// TODO need to eliminate this duplicate code
DeploymentFactoryManager dfm = DeploymentFactoryManager.getInstance();
String[] statusMsgs = new String[2];
try {
DeploymentManager mgr = dfm.getDeploymentManager("deployer:geronimo:inVM", null, null);
try {
if (mgr instanceof JMXDeploymentManager) {
((JMXDeploymentManager) mgr).setLogConfiguration(false, true);
}
Target[] targets = mgr.getTargets();
if (null == targets) {
throw new IllegalStateException("No target to distribute to");
}
targets = new Target[] { targets[0] };
ProgressObject progress = mgr.distribute(targets, moduleFile, planFile);
while (progress.getDeploymentStatus().isRunning()) {
Thread.sleep(100);
}
if (progress.getDeploymentStatus().isCompleted()) {
progress = mgr.start(progress.getResultTargetModuleIDs());
while (progress.getDeploymentStatus().isRunning()) {
Thread.sleep(100);
}
statusMsgs[0] = "infoMsg01";
} else {
statusMsgs[0] = "errorMsg02";
statusMsgs[1] = progress.getDeploymentStatus().getMessage();
}
} finally {
mgr.release();
}
} catch (Exception e) {
throw new PortletException(e);
}
return statusMsgs;
}
}<|fim▁end|>
| |
<|file_name|>test_checks.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from py.test import mark
from translate.filters import checks
from translate.lang import data
from translate.storage import po, xliff
def strprep(str1, str2, message=None):
return data.normalized_unicode(str1), data.normalized_unicode(str2), data.normalized_unicode(message)
def passes(filterfunction, str1, str2):
"""returns whether the given strings pass on the given test, handling FilterFailures"""
str1, str2, no_message = strprep(str1, str2)
try:
filterresult = filterfunction(str1, str2)
except checks.FilterFailure, e:
filterresult = False
return filterresult
def fails(filterfunction, str1, str2, message=None):
"""returns whether the given strings fail on the given test, handling only FilterFailures"""
str1, str2, message = strprep(str1, str2, message)
try:
filterresult = filterfunction(str1, str2)
except checks.SeriousFilterFailure, e:
filterresult = True
except checks.FilterFailure, e:
if message:
exc_message = e.messages[0]
filterresult = exc_message != message
print exc_message.encode('utf-8')
else:
filterresult = False
return not filterresult
def fails_serious(filterfunction, str1, str2, message=None):
"""returns whether the given strings fail on the given test, handling only SeriousFilterFailures"""
str1, str2, message = strprep(str1, str2, message)
try:
filterresult = filterfunction(str1, str2)
except checks.SeriousFilterFailure, e:
if message:
exc_message = e.messages[0]
filterresult = exc_message != message
print exc_message.encode('utf-8')
else:
filterresult = False
return not filterresult
def test_defaults():
"""tests default setup and that checks aren't altered by other constructions"""
stdchecker = checks.StandardChecker()
assert stdchecker.config.varmatches == []
mozillachecker = checks.MozillaChecker()
stdchecker = checks.StandardChecker()
assert stdchecker.config.varmatches == []
def test_construct():
"""tests that the checkers can be constructed"""
stdchecker = checks.StandardChecker()
mozillachecker = checks.MozillaChecker()
ooochecker = checks.OpenOfficeChecker()
gnomechecker = checks.GnomeChecker()
kdechecker = checks.KdeChecker()
def test_accelerator_markers():
"""test that we have the correct accelerator marker for the various default configs"""
stdchecker = checks.StandardChecker()
assert stdchecker.config.accelmarkers == []
mozillachecker = checks.MozillaChecker()
assert mozillachecker.config.accelmarkers == ["&"]
ooochecker = checks.OpenOfficeChecker()
assert ooochecker.config.accelmarkers == ["~"]
gnomechecker = checks.GnomeChecker()
assert gnomechecker.config.accelmarkers == ["_"]
kdechecker = checks.KdeChecker()
assert kdechecker.config.accelmarkers == ["&"]
def test_messages():
"""test that our helpers can check for messages and that these error messages can contain Unicode"""
stdchecker = checks.StandardChecker(checks.CheckerConfig(validchars='ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'))
assert fails(stdchecker.validchars, "Some unexpected characters", "©", "Invalid characters: '©' (\\u00a9)")
stdchecker = checks.StandardChecker()
assert fails_serious(stdchecker.escapes, r"A tab", r"'n Ṱab\t", r"""Escapes in original () don't match escapes in translation ('Ṱab\t')""")
def test_accelerators():
"""tests accelerators"""
stdchecker = checks.StandardChecker(checks.CheckerConfig(accelmarkers="&"))
assert passes(stdchecker.accelerators, "&File", "&Fayile")
assert fails(stdchecker.accelerators, "&File", "Fayile")
assert fails(stdchecker.accelerators, "File", "&Fayile")
assert passes(stdchecker.accelerators, "Mail && News", "Pos en Nuus")
assert fails(stdchecker.accelerators, "Mail & News", "Pos en Nuus")
assert passes(stdchecker.accelerators, "&Allow", u'&\ufeb2\ufee3\ufe8e\ufea3')
assert fails(stdchecker.accelerators, "Open &File", "Vula& Ifayile")
kdechecker = checks.KdeChecker()
assert passes(kdechecker.accelerators, "&File", "&Fayile")
assert fails(kdechecker.accelerators, "&File", "Fayile")
assert fails(kdechecker.accelerators, "File", "&Fayile")
gnomechecker = checks.GnomeChecker()
assert passes(gnomechecker.accelerators, "_File", "_Fayile")
assert fails(gnomechecker.accelerators, "_File", "Fayile")
assert fails(gnomechecker.accelerators, "File", "_Fayile")
assert fails(gnomechecker.accelerators, "_File", "_Fayil_e")
mozillachecker = checks.MozillaChecker()
assert passes(mozillachecker.accelerators, "&File", "&Fayile")
assert passes(mozillachecker.accelerators, "Warn me if this will disable any of my add&-ons", "&Waarsku my as dit enige van my byvoegings sal deaktiveer")
assert fails_serious(mozillachecker.accelerators, "&File", "Fayile")
assert fails_serious(mozillachecker.accelerators, "File", "&Fayile")
assert passes(mozillachecker.accelerators, "Mail & News", "Pos en Nuus")
assert fails_serious(mozillachecker.accelerators, "Mail & News", "Pos en &Nuus")
assert fails_serious(mozillachecker.accelerators, "&File", "Fayile")
ooochecker = checks.OpenOfficeChecker()
assert passes(ooochecker.accelerators, "~File", "~Fayile")
assert fails(ooochecker.accelerators, "~File", "Fayile")
assert fails(ooochecker.accelerators, "File", "~Fayile")
# We don't want an accelerator for letters with a diacritic
assert fails(ooochecker.accelerators, "F~ile", "L~êer")
# Bug 289: accept accented accelerator characters
afchecker = checks.StandardChecker(checks.CheckerConfig(accelmarkers="&", targetlanguage="fi"))
assert passes(afchecker.accelerators, "&Reload Frame", "P&äivitä kehys")
# Problems:
# Accelerator before variable - see test_acceleratedvariables
@mark.xfail(reason="Accelerated variables needs a better implementation")
def test_acceleratedvariables():
"""test for accelerated variables"""
# FIXME: disabled since acceleratedvariables has been removed, but these checks are still needed
mozillachecker = checks.MozillaChecker()
assert fails(mozillachecker.acceleratedvariables, "%S &Options", "&%S Ikhetho")
assert passes(mozillachecker.acceleratedvariables, "%S &Options", "%S &Ikhetho")
ooochecker = checks.OpenOfficeChecker()
assert fails(ooochecker.acceleratedvariables, "%PRODUCTNAME% ~Options", "~%PRODUCTNAME% Ikhetho")
assert passes(ooochecker.acceleratedvariables, "%PRODUCTNAME% ~Options", "%PRODUCTNAME% ~Ikhetho")
def test_acronyms():
"""tests acronyms"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.acronyms, "An HTML file", "'n HTML leer")
assert fails(stdchecker.acronyms, "An HTML file", "'n LMTH leer")
assert passes(stdchecker.acronyms, "It is HTML.", "Dit is HTML.")
# We don't mind if you add an acronym to correct bad capitalisation in the original
assert passes(stdchecker.acronyms, "An html file", "'n HTML leer")
# We shouldn't worry about acronyms that appear in a musttranslate file
stdchecker = checks.StandardChecker(checks.CheckerConfig(musttranslatewords=["OK"]))
assert passes(stdchecker.acronyms, "OK", "Kulungile")
# Assert punctuation should not hide accronyms
assert fails(stdchecker.acronyms, "Location (URL) not found", "Blah blah blah")
# Test '-W' (bug 283)
assert passes(stdchecker.acronyms, "%s: option `-W %s' is ambiguous", "%s: opsie '-W %s' is dubbelsinnig")
def test_blank():
"""tests blank"""
stdchecker = checks.StandardChecker()
assert fails(stdchecker.blank, "Save as", " ")
assert fails(stdchecker.blank, "_: KDE comment\\n\nSimple string", " ")
def test_brackets():
"""tests brackets"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.brackets, "N number(s)", "N getal(le)")
assert fails(stdchecker.brackets, "For {sic} numbers", "Vier getalle")
assert fails(stdchecker.brackets, "For }sic{ numbers", "Vier getalle")
assert fails(stdchecker.brackets, "For [sic] numbers", "Vier getalle")
assert fails(stdchecker.brackets, "For ]sic[ numbers", "Vier getalle")
assert passes(stdchecker.brackets, "{[(", "[({")
def test_compendiumconflicts():
"""tests compendiumconflicts"""
stdchecker = checks.StandardChecker()
assert fails(stdchecker.compendiumconflicts, "File not saved", r"""#-#-#-#-# file1.po #-#-#-#-#\n
Leer nie gestoor gestoor nie\n
#-#-#-#-# file1.po #-#-#-#-#\n
Leer nie gestoor""")
def test_doublequoting():
"""tests double quotes"""
stdchecker = checks.StandardChecker()
assert fails(stdchecker.doublequoting, "Hot plate", "\"Ipuleti\" elishisa")
assert passes(stdchecker.doublequoting, "\"Hot\" plate", "\"Ipuleti\" elishisa")
assert fails(stdchecker.doublequoting, "'Hot' plate", "\"Ipuleti\" elishisa")
assert passes(stdchecker.doublequoting, "\\\"Hot\\\" plate", "\\\"Ipuleti\\\" elishisa")
# We don't want the filter to complain about "untranslated" quotes in xml attributes
frchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="fr"))
assert passes(frchecker.doublequoting, "Click <a href=\"page.html\">", "Clique <a href=\"page.html\">")
assert fails(frchecker.doublequoting, "Do \"this\"", "Do \"this\"")
assert passes(frchecker.doublequoting, "Do \"this\"", "Do « this »")
assert fails(frchecker.doublequoting, "Do \"this\"", "Do « this » « this »")
# This used to fail because we strip variables, and was left with an empty quotation that was not converted
assert passes(frchecker.doublequoting, u"Copying `%s' to `%s'", u"Copie de « %s » vers « %s »")
vichecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="vi"))
assert passes(vichecker.doublequoting, 'Save "File"', u"Lưu « Tập tin »")
# Had a small exception with such a case:
eschecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="es"))
assert passes(eschecker.doublequoting, "<![CDATA[ Enter the name of the Windows workgroup that this server should appear in. ]]>",
"<![CDATA[ Ingrese el nombre del grupo de trabajo de Windows en el que debe aparecer este servidor. ]]>")
def test_doublespacing():
"""tests double spacing"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.doublespacing, "Sentence. Another sentence.", "Sin. 'n Ander sin.")
assert passes(stdchecker.doublespacing, "Sentence. Another sentence.", "Sin. No double spacing.")
assert fails(stdchecker.doublespacing, "Sentence. Another sentence.", "Sin. Missing the double space.")
assert fails(stdchecker.doublespacing, "Sentence. Another sentence.", "Sin. Uneeded double space in translation.")
ooochecker = checks.OpenOfficeChecker()
assert passes(ooochecker.doublespacing, "Execute %PROGRAMNAME Calc", "Blah %PROGRAMNAME Calc")
assert passes(ooochecker.doublespacing, "Execute %PROGRAMNAME Calc", "Blah % PROGRAMNAME Calc")
def test_doublewords():
"""tests doublewords"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.doublewords, "Save the rhino", "Save the rhino")
assert fails(stdchecker.doublewords, "Save the rhino", "Save the the rhino")
# Double variables are not an error
stdchecker = checks.StandardChecker(checks.CheckerConfig(varmatches=[("%", 1)]))
assert passes(stdchecker.doublewords, "%s %s installation", "tsenyo ya %s %s")
# Double XML tags are not an error
stdchecker = checks.StandardChecker()
assert passes(stdchecker.doublewords, "Line one <br> <br> line two", "Lyn een <br> <br> lyn twee")
# In some language certain double words are not errors
st_checker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="st"))
assert passes(st_checker.doublewords, "Color to draw the name of a message you sent.", "Mmala wa ho taka bitso la molaetsa oo o o rometseng.")
assert passes(st_checker.doublewords, "Ten men", "Banna ba ba leshome")
assert passes(st_checker.doublewords, "Give SARS the tax", "Lekgetho le le fe SARS")
def test_endpunc():
"""tests endpunc"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.endpunc, "Question?", "Correct?")
assert fails(stdchecker.endpunc, " Question?", "Wrong ?")
# Newlines must not mask end punctuation
assert fails(stdchecker.endpunc, "Exit change recording mode?\n\n", "Phuma esimeni sekugucula kubhalisa.\n\n")
mozillachecker = checks.MozillaChecker()
assert passes(mozillachecker.endpunc, "Upgrades an existing $ProductShortName$ installation.", "Ku antswisiwa ka ku nghenisiwa ka $ProductShortName$.")
# Real examples
assert passes(stdchecker.endpunc, "A nickname that identifies this publishing site (e.g.: 'MySite')", "Vito ro duvulela leri tirhisiwaka ku kuma sayiti leri ro kandziyisa (xik.: 'Sayiti ra Mina')")
assert fails(stdchecker.endpunc, "Question", u"Wrong\u2026")
# Making sure singlequotes don't confuse things
assert passes(stdchecker.endpunc, "Pseudo-elements can't be negated '%1$S'.", "Pseudo-elemente kan nie '%1$S' ontken word nie.")
stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='km'))
assert passes(stdchecker.endpunc, "In this new version, there are some minor conversion improvements on complex style in Openoffice.org Writer.", u"នៅក្នុងកំណែថ្មីនេះ មានការកែសម្រួលមួយចំនួនតូចទាក់ទងនឹងការបំលែងពុម្ពអក្សរខ្មែរ ក្នុងកម្មវិធីការិយាល័យ ស្លឹករឹត ដែលមានប្រើប្រាស់រចនាប័ទ្មស្មុគស្មាញច្រើន\u00a0។")
stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='zh'))
assert passes(stdchecker.endpunc, "To activate your account, follow this link:\n", u"要啟用戶口,請瀏覽這個鏈結:\n")
stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='vi'))
assert passes(stdchecker.endpunc, "Do you want to delete the XX dialog?", u"Bạn có muốn xoá hộp thoại XX không?")
stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='fr'))
assert passes(stdchecker.endpunc, "Header:", u"En-tête :")
assert passes(stdchecker.endpunc, "Header:", u"En-tête\u00a0:")
def test_endwhitespace():
"""tests endwhitespace"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.endwhitespace, "A setence.", "I'm correct.")
assert passes(stdchecker.endwhitespace, "A setence. ", "I'm correct. ")
assert fails(stdchecker.endwhitespace, "A setence. ", "'I'm incorrect.")
assert passes(stdchecker.endwhitespace, "Problem with something: %s\n", "Probleem met iets: %s\n")
zh_checker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='zh'))
# This should pass since the space is not needed in Chinese
assert passes(zh_checker.endwhitespace, "Init. Limit: ", "起始时间限制:")
def test_escapes():
"""tests escapes"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.escapes, r"""A sentence""", "I'm correct.")
assert passes(stdchecker.escapes, "A file\n", "'n Leer\n")
assert fails_serious(stdchecker.escapes, r"blah. A file", r"bleah.\n'n leer")
assert passes(stdchecker.escapes, r"A tab\t", r"'n Tab\t")
assert fails_serious(stdchecker.escapes, r"A tab\t", r"'n Tab")
assert passes(stdchecker.escapes, r"An escape escape \\", r"Escape escape \\")
assert fails_serious(stdchecker.escapes, r"An escape escape \\", "Escape escape")
assert passes(stdchecker.escapes, r"A double quote \"", r"Double quote \"")
assert fails_serious(stdchecker.escapes, r"A double quote \"", "Double quote")
# Escaped escapes
assert passes(stdchecker.escapes, "An escaped newline \\n", "Escaped newline \\n")
assert fails_serious(stdchecker.escapes, "An escaped newline \\n", "Escaped newline \n")
# Real example
ooochecker = checks.OpenOfficeChecker()
assert passes(ooochecker.escapes, ",\t44\t;\t59\t:\t58\t{Tab}\t9\t{space}\t32", ",\t44\t;\t59\t:\t58\t{Tab}\t9\t{space}\t32")
def test_newlines():
"""tests newlines"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.newlines, "Nothing to see", "Niks te sien")
assert passes(stdchecker.newlines, "Correct\n", "Korrek\n")
assert passes(stdchecker.newlines, "Correct\r", "Korrek\r")
assert passes(stdchecker.newlines, "Correct\r\n", "Korrek\r\n")
assert fails(stdchecker.newlines, "A file\n", "'n Leer")
assert fails(stdchecker.newlines, "A file", "'n Leer\n")
assert fails(stdchecker.newlines, "A file\r", "'n Leer")
assert fails(stdchecker.newlines, "A file", "'n Leer\r")
assert fails(stdchecker.newlines, "A file\n", "'n Leer\r\n")
assert fails(stdchecker.newlines, "A file\r\n", "'n Leer\n")
assert fails(stdchecker.newlines, "blah.\nA file", "bleah. 'n leer")
# msgfmt errors
assert fails(stdchecker.newlines, "One two\n", "Een\ntwee")
assert fails(stdchecker.newlines, "\nOne two", "Een\ntwee")
# Real example
ooochecker = checks.OpenOfficeChecker()
assert fails(ooochecker.newlines, "The arrowhead was modified without saving.\nWould you like to save the arrowhead now?", "Ṱhoho ya musevhe yo khwinifhadzwa hu si na u seiva.Ni khou ṱoda u seiva thoho ya musevhe zwino?")
def test_tabs():
"""tests tabs"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.tabs, "Nothing to see", "Niks te sien")
assert passes(stdchecker.tabs, "Correct\t", "Korrek\t")
assert passes(stdchecker.tabs, "Correct\tAA", "Korrek\tAA")
assert fails_serious(stdchecker.tabs, "A file\t", "'n Leer")
assert fails_serious(stdchecker.tabs, "A file", "'n Leer\t")
ooochecker = checks.OpenOfficeChecker()
assert passes(ooochecker.tabs, ",\t44\t;\t59\t:\t58\t{Tab}\t9\t{space}\t32", ",\t44\t;\t59\t:\t58\t{Tab}\t9\t{space}\t32")
def test_filepaths():
"""tests filepaths"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.filepaths, "%s to the file /etc/hosts on your system.", "%s na die leer /etc/hosts op jou systeem.")
assert fails(stdchecker.filepaths, "%s to the file /etc/hosts on your system.", "%s na die leer /etc/gasheer op jou systeem.")
def test_kdecomments():
"""tests kdecomments"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.kdecomments, r"""_: I am a comment\n
A string to translate""", "'n String om te vertaal")
assert fails(stdchecker.kdecomments, r"""_: I am a comment\n
A string to translate""", r"""_: Ek is 'n commment\n
'n String om te vertaal""")
assert fails(stdchecker.kdecomments, """_: I am a comment\\n\n""", """_: I am a comment\\n\n""")
def test_long():
"""tests long messages"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.long, "I am normal", "Ek is ook normaal")
assert fails(stdchecker.long, "Short.", "Kort.......................................................................................")
assert fails(stdchecker.long, "a", "bc")
def test_musttranslatewords():
"""tests stopwords"""
stdchecker = checks.StandardChecker(checks.CheckerConfig(musttranslatewords=[]))
assert passes(stdchecker.musttranslatewords, "This uses Mozilla of course", "hierdie gebruik le mozille natuurlik")
stdchecker = checks.StandardChecker(checks.CheckerConfig(musttranslatewords=["Mozilla"]))
assert passes(stdchecker.musttranslatewords, "This uses Mozilla of course", "hierdie gebruik le mozille natuurlik")
assert fails(stdchecker.musttranslatewords, "This uses Mozilla of course", "hierdie gebruik Mozilla natuurlik")
assert passes(stdchecker.musttranslatewords, "This uses Mozilla. Don't you?", "hierdie gebruik le mozille soos jy")
assert fails(stdchecker.musttranslatewords, "This uses Mozilla. Don't you?", "hierdie gebruik Mozilla soos jy")
# should always pass if there are no stopwords in the original
assert passes(stdchecker.musttranslatewords, "This uses something else. Don't you?", "hierdie gebruik Mozilla soos jy")
# check that we can find words surrounded by punctuation
assert passes(stdchecker.musttranslatewords, "Click 'Mozilla' button", "Kliek 'Motzille' knoppie")
assert fails(stdchecker.musttranslatewords, "Click 'Mozilla' button", "Kliek 'Mozilla' knoppie")
assert passes(stdchecker.musttranslatewords, 'Click "Mozilla" button', 'Kliek "Motzille" knoppie')
assert fails(stdchecker.musttranslatewords, 'Click "Mozilla" button', 'Kliek "Mozilla" knoppie')
assert fails(stdchecker.musttranslatewords, 'Click "Mozilla" button', u'Kliek «Mozilla» knoppie')
assert passes(stdchecker.musttranslatewords, "Click (Mozilla) button", "Kliek (Motzille) knoppie")
assert fails(stdchecker.musttranslatewords, "Click (Mozilla) button", "Kliek (Mozilla) knoppie")
assert passes(stdchecker.musttranslatewords, "Click Mozilla!", "Kliek Motzille!")
assert fails(stdchecker.musttranslatewords, "Click Mozilla!", "Kliek Mozilla!")
## We need to define more word separators to allow us to find those hidden untranslated items
#assert fails(stdchecker.musttranslatewords, "Click OK", "Blah we-OK")
# Don't get confused when variables are the same as a musttranslate word
stdchecker = checks.StandardChecker(checks.CheckerConfig(varmatches=[("%", None), ], musttranslatewords=["OK"]))
assert passes(stdchecker.musttranslatewords, "Click %OK to start", "Kliek %OK om te begin")
# Unicode
assert fails(stdchecker.musttranslatewords, "Click OK", u"Kiḽikani OK")
def test_notranslatewords():
"""tests stopwords"""
stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=[]))
assert passes(stdchecker.notranslatewords, "This uses Mozilla of course", "hierdie gebruik le mozille natuurlik")
stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=["Mozilla", "Opera"]))
assert fails(stdchecker.notranslatewords, "This uses Mozilla of course", "hierdie gebruik le mozille natuurlik")
assert passes(stdchecker.notranslatewords, "This uses Mozilla of course", "hierdie gebruik Mozilla natuurlik")
assert fails(stdchecker.notranslatewords, "This uses Mozilla. Don't you?", "hierdie gebruik le mozille soos jy")
assert passes(stdchecker.notranslatewords, "This uses Mozilla. Don't you?", "hierdie gebruik Mozilla soos jy")
# should always pass if there are no stopwords in the original
assert passes(stdchecker.notranslatewords, "This uses something else. Don't you?", "hierdie gebruik Mozilla soos jy")
# Cope with commas
assert passes(stdchecker.notranslatewords, "using Mozilla Task Manager", u"šomiša Selaola Mošomo sa Mozilla, gomme")
# Find words even if they are embedded in punctuation
assert fails(stdchecker.notranslatewords, "Click 'Mozilla' button", "Kliek 'Motzille' knoppie")
assert passes(stdchecker.notranslatewords, "Click 'Mozilla' button", "Kliek 'Mozilla' knoppie")
assert fails(stdchecker.notranslatewords, "Click Mozilla!", "Kliek Motzille!")
assert passes(stdchecker.notranslatewords, "Click Mozilla!", "Kliek Mozilla!")
assert fails(stdchecker.notranslatewords, "Searches (From Opera)", "adosako (kusukela ku- Ophera)")
stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=["Sun", "NeXT"]))
assert fails(stdchecker.notranslatewords, "Sun/NeXT Audio", "Odio dza Ḓuvha/TeVHELAHO")
assert passes(stdchecker.notranslatewords, "Sun/NeXT Audio", "Odio dza Sun/NeXT")
stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=["sendmail"]))
assert fails(stdchecker.notranslatewords, "because 'sendmail' could", "ngauri 'rumelameiḽi' a yo")
assert passes(stdchecker.notranslatewords, "because 'sendmail' could", "ngauri 'sendmail' a yo")
stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=["Base"]))
assert fails(stdchecker.notranslatewords, " - %PRODUCTNAME Base: Relation design", " - %PRODUCTNAME Sisekelo: Umsiko wekuhlobana")
stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=["Writer"]))
assert fails(stdchecker.notranslatewords, "&[ProductName] Writer/Web", "&[ProductName] Umbhali/iWebhu")
# Unicode - different decompositions
stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=[u"\u1e3cike"]))
assert passes(stdchecker.notranslatewords, u"You \u1e3cike me", u"Ek \u004c\u032dike jou")
def test_numbers():
"""test numbers"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.numbers, "Netscape 4 was not as good as Netscape 7.", "Netscape 4 was nie so goed soos Netscape 7 nie.")
# Check for correct detection of degree. Also check that we aren't getting confused with 1 and 2 byte UTF-8 characters
assert fails(stdchecker.numbers, "180° turn", "180 turn")
assert passes(stdchecker.numbers, "180° turn", "180° turn")
assert fails(stdchecker.numbers, "180° turn", "360 turn")
assert fails(stdchecker.numbers, "180° turn", "360° turn")
assert passes(stdchecker.numbers, "180~ turn", "180 turn")
assert passes(stdchecker.numbers, "180¶ turn", "180 turn")
# Numbers with multiple decimal points
assert passes(stdchecker.numbers, "12.34.56", "12.34.56")
assert fails(stdchecker.numbers, "12.34.56", "98.76.54")
# Currency
# FIXME we should probably be able to handle currency checking with locale inteligence
assert passes(stdchecker.numbers, "R57.60", "R57.60")
# FIXME - again locale intelligence should allow us to use other decimal seperators
assert fails(stdchecker.numbers, "R57.60", "R57,60")
assert fails(stdchecker.numbers, "1,000.00", "1 000,00")
# You should be able to reorder numbers
assert passes(stdchecker.numbers, "40-bit RC2 encryption with RSA and an MD5", "Umbhalo ocashile i-RC2 onamabhithi angu-40 one-RSA ne-MD5")
# Don't fail the numbers check if the entry is a dialogsize entry
mozillachecker = checks.MozillaChecker()
assert passes(mozillachecker.numbers, 'width: 12em;', 'width: 20em;')
def test_options():
"""tests command line options e.g. --option"""<|fim▁hole|> assert passes(stdchecker.options, "--help", "--help")
assert fails(stdchecker.options, "--help", "--hulp")
assert fails(stdchecker.options, "--input=FILE", "--input=FILE")
assert passes(stdchecker.options, "--input=FILE", "--input=LÊER")
assert fails(stdchecker.options, "--input=FILE", "--tovoer=LÊER")
# We don't want just any '--' to trigger this test - the error will be confusing
assert passes(stdchecker.options, "Hello! -- Hi", "Hallo! — Haai")
assert passes(stdchecker.options, "--blank--", "--vide--")
def test_printf():
"""tests printf style variables"""
# This should really be a subset of the variable checks
# Ideally we should be able to adapt based on #, directives also
stdchecker = checks.StandardChecker()
assert passes(stdchecker.printf, "I am %s", "Ek is %s")
assert fails(stdchecker.printf, "I am %s", "Ek is %d")
assert passes(stdchecker.printf, "I am %#100.50hhf", "Ek is %#100.50hhf")
assert fails(stdchecker.printf, "I am %#100s", "Ek is %10s")
assert fails(stdchecker.printf, "... for user %.100s on %.100s:", "... lomuntu osebenzisa i-%. I-100s e-100s:")
assert passes(stdchecker.printf, "%dMB", "%d MG")
# Reordering
assert passes(stdchecker.printf, "String %s and number %d", "String %1$s en nommer %2$d")
assert passes(stdchecker.printf, "String %1$s and number %2$d", "String %1$s en nommer %2$d")
assert passes(stdchecker.printf, "String %s and number %d", "Nommer %2$d and string %1$s")
assert passes(stdchecker.printf, "String %s and real number %f and number %d", "String %1$s en nommer %3$d en reële getal %2$f")
assert passes(stdchecker.printf, "String %1$s and real number %2$f and number %3$d", "String %1$s en nommer %3$d en reële getal %2$f")
assert passes(stdchecker.printf, "Real number %2$f and string %1$s and number %3$d", "String %1$s en nommer %3$d en reële getal %2$f")
assert fails(stdchecker.printf, "String %s and number %d", "Nommer %1$d and string %2$s")
assert fails(stdchecker.printf, "String %s and real number %f and number %d", "String %1$s en nommer %3$d en reële getal %2$d")
assert fails(stdchecker.printf, "String %s and real number %f and number %d", "String %1$s en nommer %3$d en reële getal %4$f")
assert fails(stdchecker.printf, "String %s and real number %f and number %d", "String %2$s en nommer %3$d en reële getal %2$f")
assert fails(stdchecker.printf, "Real number %2$f and string %1$s and number %3$d", "String %1$f en nommer %3$d en reële getal %2$f")
# checking python format strings
assert passes(stdchecker.printf, "String %(1)s and number %(2)d", "Nommer %(2)d en string %(1)s")
assert passes(stdchecker.printf, "String %(str)s and number %(num)d", "Nommer %(num)d en string %(str)s")
assert fails(stdchecker.printf, "String %(str)s and number %(num)d", "Nommer %(nommer)d en string %(str)s")
assert fails(stdchecker.printf, "String %(str)s and number %(num)d", "Nommer %(num)d en string %s")
# checking omitted plural format string placeholder %.0s
stdchecker.hasplural = 1
assert passes(stdchecker.printf, "%d plurals", "%.0s plural")
def test_puncspacing():
"""tests spacing after punctuation"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.puncspacing, "One, two, three.", "Kunye, kubili, kuthathu.")
assert passes(stdchecker.puncspacing, "One, two, three. ", "Kunye, kubili, kuthathu.")
assert fails(stdchecker.puncspacing, "One, two, three. ", "Kunye, kubili,kuthathu.")
assert passes(stdchecker.puncspacing, "One, two, three!?", "Kunye, kubili, kuthathu?")
# Some languages have padded puntuation marks
frchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="fr"))
assert passes(frchecker.puncspacing, "Do \"this\"", "Do « this »")
assert passes(frchecker.puncspacing, u"Do \"this\"", u"Do «\u00a0this\u00a0»")
assert fails(frchecker.puncspacing, "Do \"this\"", "Do «this»")
def test_purepunc():
"""tests messages containing only punctuation"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.purepunc, ".", ".")
assert passes(stdchecker.purepunc, "", "")
assert fails(stdchecker.purepunc, ".", " ")
assert fails(stdchecker.purepunc, "Find", "'")
assert fails(stdchecker.purepunc, "'", "Find")
assert passes(stdchecker.purepunc, "year measurement template|2000", "2000")
def test_sentencecount():
"""tests sentencecount messages"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.sentencecount, "One. Two. Three.", "Een. Twee. Drie.")
assert passes(stdchecker.sentencecount, "One two three", "Een twee drie.")
assert fails(stdchecker.sentencecount, "One. Two. Three.", "Een Twee. Drie.")
assert passes(stdchecker.sentencecount, "Sentence with i.e. in it.", "Sin met d.w.s. in dit.") # bug 178, description item 8
el_checker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='el'))
assert fails(el_checker.sentencecount, "First sentence. Second sentence.", "Πρώτη πρόταση. δεύτερη πρόταση.")
def test_short():
"""tests short messages"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.short, "I am normal", "Ek is ook normaal")
assert fails(stdchecker.short, "I am a very long sentence", "Ek")
assert fails(stdchecker.short, "abcde", "c")
def test_singlequoting():
"""tests single quotes"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.singlequoting, "A 'Hot' plate", "Ipuleti 'elishisa' kunye")
# FIXME this should pass but doesn't probably to do with our logic that got confused at the end of lines
assert passes(stdchecker.singlequoting, "'Hot' plate", "Ipuleti 'elishisa'")
# FIXME newlines also confuse our algorithm for single quotes
assert passes(stdchecker.singlequoting, "File '%s'\n", "'%s' Faele\n")
assert fails(stdchecker.singlequoting, "'Hot' plate", "Ipuleti \"elishisa\"")
assert passes(stdchecker.singlequoting, "It's here.", "Dit is hier.")
# Don't get confused by punctuation that touches a single quote
assert passes(stdchecker.singlequoting, "File '%s'.", "'%s' Faele.")
assert passes(stdchecker.singlequoting, "Blah 'format' blah.", "Blah blah 'sebopego'.")
assert passes(stdchecker.singlequoting, "Blah 'format' blah!", "Blah blah 'sebopego'!")
assert passes(stdchecker.singlequoting, "Blah 'format' blah?", "Blah blah 'sebopego'?")
# Real examples
assert passes(stdchecker.singlequoting, "A nickname that identifies this publishing site (e.g.: 'MySite')", "Vito ro duvulela leri tirhisiwaka ku kuma sayiti leri ro kandziyisa (xik.: 'Sayiti ra Mina')")
assert passes(stdchecker.singlequoting, "isn't", "ayikho")
assert passes(stdchecker.singlequoting, "Required (can't send message unless all recipients have certificates)", "Verlang (kan nie boodskappe versend tensy al die ontvangers sertifikate het nie)")
# Afrikaans 'n
assert passes(stdchecker.singlequoting, "Please enter a different site name.", "Tik 'n ander werfnaam in.")
assert passes(stdchecker.singlequoting, "\"%name%\" already exists. Please enter a different site name.", "\"%name%\" bestaan reeds. Tik 'n ander werfnaam in.")
# Check that accelerators don't mess with removing singlequotes
mozillachecker = checks.MozillaChecker()
assert passes(mozillachecker.singlequoting, "&Don't import anything", "&Moenie enigiets invoer nie")
ooochecker = checks.OpenOfficeChecker()
assert passes(ooochecker.singlequoting, "~Don't import anything", "~Moenie enigiets invoer nie")
vichecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="vi"))
assert passes(vichecker.singlequoting, "Save 'File'", u"Lưu « Tập tin »")
assert passes(vichecker.singlequoting, "Save `File'", u"Lưu « Tập tin »")
def test_simplecaps():
"""tests simple caps"""
# Simple caps is a very vauge test so the checks here are mostly for obviously fixable problem
# or for checking obviously correct situations that are triggering a failure.
stdchecker = checks.StandardChecker()
assert passes(stdchecker.simplecaps, "MB of disk space for the cache.", "MB yendzawo yediski etsala.")
# We should squash 'I' in the source text as it messes with capital detection
assert passes(stdchecker.simplecaps, "if you say I want", "as jy se ek wil")
assert passes(stdchecker.simplecaps, "sentence. I want more.", "sin. Ek wil meer he.")
assert passes(stdchecker.simplecaps, "Where are we? I can't see where we are going.", "Waar is ons? Ek kan nie sien waar ons gaan nie.")
## We should remove variables before checking
stdchecker = checks.StandardChecker(checks.CheckerConfig(varmatches=[("%", 1)]))
assert passes(stdchecker.simplecaps, "Could not load %s", "A swi koteki ku panga %S")
assert passes(stdchecker.simplecaps, "The element \"%S\" is not recognized.", "Elemente \"%S\" a yi tiveki.")
stdchecker = checks.StandardChecker(checks.CheckerConfig(varmatches=[("&", ";")]))
assert passes(stdchecker.simplecaps, "Determine how &brandShortName; connects to the Internet.", "Kuma &brandShortName; hlanganisa eka Internete.")
## If source is ALL CAPS then we should just check that target is also ALL CAPS
assert passes(stdchecker.simplecaps, "COUPDAYS", "COUPMALANGA")
# Just some that at times have failed but should always pass
assert passes(stdchecker.simplecaps, "Create a query entering an SQL statement directly.", "Yakha sibuti singena SQL inkhomba yesitatimende.")
ooochecker = checks.OpenOfficeChecker()
assert passes(ooochecker.simplecaps, "SOLK (%PRODUCTNAME Link)", "SOLK (%PRODUCTNAME Thumanyo)")
assert passes(ooochecker.simplecaps, "%STAROFFICE Image", "Tshifanyiso tsha %STAROFFICE")
assert passes(stdchecker.simplecaps, "Flies, flies, everywhere! Ack!", u"Vlieë, oral vlieë! Jig!")
def test_spellcheck():
"""tests spell checking"""
stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="af"))
assert passes(stdchecker.spellcheck, "Great trek", "Groot trek")
assert fails(stdchecker.spellcheck, "Final deadline", "End of the road")
# Bug 289: filters accelerators before spell checking
stdchecker = checks.StandardChecker(checks.CheckerConfig(accelmarkers="&", targetlanguage="fi"))
assert passes(stdchecker.spellcheck, "&Reload Frame", "P&äivitä kehys")
# Ensure we don't check notranslatewords
stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="af"))
assert fails(stdchecker.spellcheck, "Mozilla is wonderful", "Mozillaaa is wonderlik")
# We should pass the test if the "error" occurs in the English
assert passes(stdchecker.spellcheck, "Mozilla is wonderful", "Mozilla is wonderlik")
stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="af", notranslatewords=["Mozilla"]))
assert passes(stdchecker.spellcheck, "Mozilla is wonderful", "Mozilla is wonderlik")
def test_startcaps():
"""tests starting capitals"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.startcaps, "Find", "Vind")
assert passes(stdchecker.startcaps, "find", "vind")
assert fails(stdchecker.startcaps, "Find", "vind")
assert fails(stdchecker.startcaps, "find", "Vind")
assert passes(stdchecker.startcaps, "'", "'")
assert passes(stdchecker.startcaps, "\\.,/?!`'\"[]{}()@#$%^&*_-;:<>Find", "\\.,/?!`'\"[]{}()@#$%^&*_-;:<>Vind")
# With leading whitespace
assert passes(stdchecker.startcaps, " Find", " Vind")
assert passes(stdchecker.startcaps, " find", " vind")
assert fails(stdchecker.startcaps, " Find", " vind")
assert fails(stdchecker.startcaps, " find", " Vind")
# Leading punctuation
assert passes(stdchecker.startcaps, "'Find", "'Vind")
assert passes(stdchecker.startcaps, "'find", "'vind")
assert fails(stdchecker.startcaps, "'Find", "'vind")
assert fails(stdchecker.startcaps, "'find", "'Vind")
# Unicode
assert passes(stdchecker.startcaps, "Find", u"Šind")
assert passes(stdchecker.startcaps, "find", u"šind")
assert fails(stdchecker.startcaps, "Find", u"šind")
assert fails(stdchecker.startcaps, "find", u"Šind")
# Unicode further down the Unicode tables
assert passes(stdchecker.startcaps, "A text enclosed...", u"Ḽiṅwalwa ḽo katelwaho...")
assert fails(stdchecker.startcaps, "A text enclosed...", u"ḽiṅwalwa ḽo katelwaho...")
# Accelerators
stdchecker = checks.StandardChecker(checks.CheckerConfig(accelmarkers="&"))
assert passes(stdchecker.startcaps, "&Find", "Vi&nd")
# Language specific stuff
stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='af'))
assert passes(stdchecker.startcaps, "A cow", "'n Koei")
assert passes(stdchecker.startcaps, "A list of ", "'n Lys van ")
# should pass:
#assert passes(stdchecker.startcaps, "A 1k file", u"'n 1k-lêer")
assert passes(stdchecker.startcaps, "'Do it'", "'Doen dit'")
assert fails(stdchecker.startcaps, "'Closer than'", "'nader as'")
assert passes(stdchecker.startcaps, "List", "Lys")
assert passes(stdchecker.startcaps, "a cow", "'n koei")
assert fails(stdchecker.startcaps, "a cow", "'n Koei")
assert passes(stdchecker.startcaps, "(A cow)", "('n Koei)")
assert fails(stdchecker.startcaps, "(a cow)", "('n Koei)")
def test_startpunc():
"""tests startpunc"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.startpunc, "<< Previous", "<< Correct")
assert fails(stdchecker.startpunc, " << Previous", "Wrong")
assert fails(stdchecker.startpunc, "Question", u"\u2026Wrong")
assert passes(stdchecker.startpunc, "<fish>hello</fish> world", "world <fish>hello</fish>")
# The inverted Spanish question mark should be accepted
stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='es'))
assert passes(stdchecker.startpunc, "Do you want to reload the file?", u"¿Quiere recargar el archivo?")
# The Afrikaans indefinite article should be accepted
stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='af'))
assert passes(stdchecker.startpunc, "A human?", u"'n Mens?")
def test_startwhitespace():
"""tests startwhitespace"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.startwhitespace, "A setence.", "I'm correct.")
assert fails(stdchecker.startwhitespace, " A setence.", "I'm incorrect.")
def test_unchanged():
"""tests unchanged entries"""
stdchecker = checks.StandardChecker(checks.CheckerConfig(accelmarkers="&"))
assert fails(stdchecker.unchanged, "Unchanged", "Unchanged")
assert fails(stdchecker.unchanged, "&Unchanged", "Un&changed")
assert passes(stdchecker.unchanged, "Unchanged", "Changed")
assert passes(stdchecker.unchanged, "1234", "1234")
assert passes(stdchecker.unchanged, "2×2", "2×2") # bug 178, description item 14
assert passes(stdchecker.unchanged, "I", "I")
assert passes(stdchecker.unchanged, " ", " ") # bug 178, description item 5
assert passes(stdchecker.unchanged, "???", "???") # bug 178, description item 15
assert passes(stdchecker.unchanged, "&ACRONYM", "&ACRONYM") # bug 178, description item 7
assert passes(stdchecker.unchanged, "F1", "F1") # bug 178, description item 20
assert fails(stdchecker.unchanged, "Two words", "Two words")
#TODO: this still fails
# assert passes(stdchecker.unchanged, "NOMINAL", "NOMİNAL")
gnomechecker = checks.GnomeChecker()
assert fails(gnomechecker.unchanged, "Entity references, such as & and ©", "Entity references, such as & and ©")
# Variable only and variable plus punctuation messages should be ignored
mozillachecker = checks.MozillaChecker()
assert passes(mozillachecker.unchanged, "$ProgramName$", "$ProgramName$")
assert passes(mozillachecker.unchanged, "$file$ : $dir$", "$file$ : $dir$") # bug 178, description item 13
assert fails(mozillachecker.unchanged, "$file$ in $dir$", "$file$ in $dir$")
assert passes(mozillachecker.unchanged, "&brandShortName;", "&brandShortName;")
# Don't translate words should be ignored
stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=["Mozilla"]))
assert passes(stdchecker.unchanged, "Mozilla", "Mozilla") # bug 178, description item 10
# Don't fail unchanged if the entry is a dialogsize, quite plausible that you won't change it
mozillachecker = checks.MozillaChecker()
assert passes(mozillachecker.unchanged, 'width: 12em;', 'width: 12em;')
def test_untranslated():
"""tests untranslated entries"""
stdchecker = checks.StandardChecker()
assert fails(stdchecker.untranslated, "I am untranslated", "")
assert passes(stdchecker.untranslated, "I am translated", "Ek is vertaal")
# KDE comments that make it into translations should not mask untranslated test
assert fails(stdchecker.untranslated, "_: KDE comment\\n\nI am untranslated", "_: KDE comment\\n\n")
def test_validchars():
"""tests valid characters"""
stdchecker = checks.StandardChecker(checks.CheckerConfig())
assert passes(stdchecker.validchars, "The check always passes if you don't specify chars", "Die toets sal altyd werk as jy nie karacters specifisier")
stdchecker = checks.StandardChecker(checks.CheckerConfig(validchars='ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'))
assert passes(stdchecker.validchars, "This sentence contains valid characters", "Hierdie sin bevat ware karakters")
assert fails(stdchecker.validchars, "Some unexpected characters", "©®°±÷¼½¾")
stdchecker = checks.StandardChecker(checks.CheckerConfig(validchars='⠁⠂⠃⠄⠅⠆⠇⠈⠉⠊⠋⠌⠍⠎⠏⠐⠑⠒⠓⠔⠕⠖⠗⠘⠙⠚⠛⠜⠝⠞⠟⠠⠡⠢⠣⠤⠥⠦⠧⠨⠩⠪⠫⠬⠭⠮⠯⠰'))
assert passes(stdchecker.validchars, "Our target language is all non-ascii", "⠁⠂⠃⠄⠆⠇⠈⠉⠜⠝⠞⠟⠠⠡⠢⠣⠤⠥⠦⠧⠨⠩⠪⠫")
assert fails(stdchecker.validchars, "Our target language is all non-ascii", "Some ascii⠁⠂⠃⠄⠆⠇⠈⠉⠜⠝⠞⠟⠠⠡⠢⠣⠤⠥⠦⠧⠨⠩⠪⠫")
stdchecker = checks.StandardChecker(checks.CheckerConfig(validchars=u'\u004c\u032d'))
assert passes(stdchecker.validchars, "This sentence contains valid chars", u"\u004c\u032d")
assert passes(stdchecker.validchars, "This sentence contains valid chars", u"\u1e3c")
stdchecker = checks.StandardChecker(checks.CheckerConfig(validchars=u'\u1e3c'))
assert passes(stdchecker.validchars, "This sentence contains valid chars", u"\u1e3c")
assert passes(stdchecker.validchars, "This sentence contains valid chars", u"\u004c\u032d")
def test_variables_kde():
"""tests variables in KDE translations"""
# GNOME variables
kdechecker = checks.KdeChecker()
assert passes(kdechecker.variables, "%d files of type %s saved.", "%d leers van %s tipe gestoor.")
assert fails_serious(kdechecker.variables, "%d files of type %s saved.", "%s leers van %s tipe gestoor.")
def test_variables_gnome():
"""tests variables in GNOME translations"""
# GNOME variables
gnomechecker = checks.GnomeChecker()
assert passes(gnomechecker.variables, "%d files of type %s saved.", "%d leers van %s tipe gestoor.")
assert fails_serious(gnomechecker.variables, "%d files of type %s saved.", "%s leers van %s tipe gestoor.")
assert passes(gnomechecker.variables, "Save $(file)", "Stoor $(file)")
assert fails_serious(gnomechecker.variables, "Save $(file)", "Stoor $(leer)")
def test_variables_mozilla():
"""tests variables in Mozilla translations"""
# Mozilla variables
mozillachecker = checks.MozillaChecker()
assert passes(mozillachecker.variables, "Use the &brandShortname; instance.", "Gebruik die &brandShortname; weergawe.")
assert fails_serious(mozillachecker.variables, "Use the &brandShortname; instance.", "Gebruik die &brandKortnaam; weergawe.")
assert passes(mozillachecker.variables, "Save %file%", "Stoor %file%")
assert fails_serious(mozillachecker.variables, "Save %file%", "Stoor %leer%")
assert passes(mozillachecker.variables, "Save $file$", "Stoor $file$")
assert fails_serious(mozillachecker.variables, "Save $file$", "Stoor $leer$")
assert passes(mozillachecker.variables, "%d files of type %s saved.", "%d leers van %s tipe gestoor.")
assert fails_serious(mozillachecker.variables, "%d files of type %s saved.", "%s leers van %s tipe gestoor.")
assert passes(mozillachecker.variables, "Save $file", "Stoor $file")
assert fails_serious(mozillachecker.variables, "Save $file", "Stoor $leer")
assert passes(mozillachecker.variables, "About $ProgramName$", "Oor $ProgramName$")
assert fails_serious(mozillachecker.variables, "About $ProgramName$", "Oor $NaamVanProgam$")
assert passes(mozillachecker.variables, "About $_CLICK", "Oor $_CLICK")
assert fails_serious(mozillachecker.variables, "About $_CLICK", "Oor $_KLIK")
assert passes(mozillachecker.variables, "About $_CLICK and more", "Oor $_CLICK en meer")
assert fails_serious(mozillachecker.variables, "About $_CLICK and more", "Oor $_KLIK en meer")
assert passes(mozillachecker.variables, "About $(^NameDA)", "Oor $(^NameDA)")
assert fails_serious(mozillachecker.variables, "About $(^NameDA)", "Oor $(^NaamDA)")
# Double variable problem
assert fails_serious(mozillachecker.variables, "Create In <<", "Etsa ka Ho <lt;")
# Variables at the end of a sentence
assert fails_serious(mozillachecker.variables, "...time you start &brandShortName;.", "...lekgetlo le latelang ha o qala &LebitsoKgutshwane la kgwebo;.")
# Ensure that we can detect two variables of the same name with one faulty
assert fails_serious(mozillachecker.variables, "&brandShortName; successfully downloaded and installed updates. You will have to restart &brandShortName; to complete the update.", "&brandShortName; ḽo dzhenisa na u longela khwinifhadzo zwavhuḓi. Ni ḓo tea u thoma hafhu &DzinaḼipfufhi ḽa pfungavhuṇe; u itela u fhedzisa khwinifha dzo.")
# We must detect entities in their fullform, ie with fullstop in the middle.
assert fails_serious(mozillachecker.variables, "Welcome to the &pluginWizard.title;", "Wamkelekile kwi&Sihloko Soncedo lwe-plugin;")
# Variables that are missing in quotes should be detected
assert fails_serious(mozillachecker.variables, "\"%S\" is an executable file.... Are you sure you want to launch \"%S\"?", ".... Uyaqiniseka ukuthi ufuna ukuqalisa I\"%S\"?")
# False positive $ style variables
assert passes(mozillachecker.variables, "for reporting $ProductShortName$ crash information", "okokubika ukwaziswa kokumosheka kwe-$ProductShortName$")
# We shouldn't mask variables within variables. This should highlight &brandShortName as missing and & as extra
assert fails_serious(mozillachecker.variables, "&brandShortName;", "&brandShortName;")
def test_variables_openoffice():
"""tests variables in OpenOffice translations"""
# OpenOffice.org variables
ooochecker = checks.OpenOfficeChecker()
assert passes(ooochecker.variables, "Use the &brandShortname; instance.", "Gebruik die &brandShortname; weergawe.")
assert fails_serious(ooochecker.variables, "Use the &brandShortname; instance.", "Gebruik die &brandKortnaam; weergawe.")
assert passes(ooochecker.variables, "Save %file%", "Stoor %file%")
assert fails_serious(ooochecker.variables, "Save %file%", "Stoor %leer%")
assert passes(ooochecker.variables, "Save %file", "Stoor %file")
assert fails_serious(ooochecker.variables, "Save %file", "Stoor %leer")
assert passes(ooochecker.variables, "Save %1", "Stoor %1")
assert fails_serious(ooochecker.variables, "Save %1", "Stoor %2")
assert passes(ooochecker.variables, "Save %", "Stoor %")
assert fails_serious(ooochecker.variables, "Save %", "Stoor")
assert passes(ooochecker.variables, "Save $(file)", "Stoor $(file)")
assert fails_serious(ooochecker.variables, "Save $(file)", "Stoor $(leer)")
assert passes(ooochecker.variables, "Save $file$", "Stoor $file$")
assert fails_serious(ooochecker.variables, "Save $file$", "Stoor $leer$")
assert passes(ooochecker.variables, "Save ${file}", "Stoor ${file}")
assert fails_serious(ooochecker.variables, "Save ${file}", "Stoor ${leer}")
assert passes(ooochecker.variables, "Save #file#", "Stoor #file#")
assert fails_serious(ooochecker.variables, "Save #file#", "Stoor #leer#")
assert passes(ooochecker.variables, "Save #1", "Stoor #1")
assert fails_serious(ooochecker.variables, "Save #1", "Stoor #2")
assert passes(ooochecker.variables, "Save #", "Stoor #")
assert fails_serious(ooochecker.variables, "Save #", "Stoor")
assert passes(ooochecker.variables, "Save ($file)", "Stoor ($file)")
assert fails_serious(ooochecker.variables, "Save ($file)", "Stoor ($leer)")
assert passes(ooochecker.variables, "Save $[file]", "Stoor $[file]")
assert fails_serious(ooochecker.variables, "Save $[file]", "Stoor $[leer]")
assert passes(ooochecker.variables, "Save [file]", "Stoor [file]")
assert fails_serious(ooochecker.variables, "Save [file]", "Stoor [leer]")
assert passes(ooochecker.variables, "Save $file", "Stoor $file")
assert fails_serious(ooochecker.variables, "Save $file", "Stoor $leer")
assert passes(ooochecker.variables, "Use @EXTENSION@", "Gebruik @EXTENSION@")
assert fails_serious(ooochecker.variables, "Use @EXTENSUION@", "Gebruik @UITBRUIDING@")
# Same variable name twice
assert fails_serious(ooochecker.variables, r"""Start %PROGRAMNAME% as %PROGRAMNAME%""", "Begin %PROGRAMNAME%")
def test_variables_cclicense():
"""Tests variables in Creative Commons translations."""
checker = checks.CCLicenseChecker()
assert passes(checker.variables, "CC-GNU @license_code@.", "CC-GNU @license_code@.")
assert fails_serious(checker.variables, "CC-GNU @license_code@.", "CC-GNU @lisensie_kode@.")
assert passes(checker.variables, "Deed to the @license_name_full@", "Akte vir die @license_name_full@")
assert fails_serious(checker.variables, "Deed to the @license_name_full@", "Akte vir die @volle_lisensie@")
assert passes(checker.variables, "The @license_name_full@ is", "Die @license_name_full@ is")
assert fails_serious(checker.variables, "The @license_name_full@ is", "Die @iiilicense_name_full@ is")
assert fails_serious(checker.variables, "A @ccvar@", "'n @ccvertaaldeveranderlike@")
def test_xmltags():
"""tests xml tags"""
stdchecker = checks.StandardChecker()
assert fails(stdchecker.xmltags, "Do it <b>now</b>", "Doen dit <v>nou</v>")
assert passes(stdchecker.xmltags, "Do it <b>now</b>", "Doen dit <b>nou</b>")
assert passes(stdchecker.xmltags, "Click <img src=\"img.jpg\">here</img>", "Klik <img src=\"img.jpg\">hier</img>")
assert fails(stdchecker.xmltags, "Click <img src=\"image.jpg\">here</img>", "Klik <img src=\"prent.jpg\">hier</img>")
assert passes(stdchecker.xmltags, "Click <img src=\"img.jpg\" alt=\"picture\">here</img>", "Klik <img src=\"img.jpg\" alt=\"prentjie\">hier</img>")
assert passes(stdchecker.xmltags, "Click <a title=\"tip\">here</a>", "Klik <a title=\"wenk\">hier</a>")
assert passes(stdchecker.xmltags, "Click <div title=\"tip\">here</div>", "Klik <div title=\"wenk\">hier</div>")
assert passes(stdchecker.xmltags, "Start with the <start> tag", "Begin met die <begin>")
assert fails(stdchecker.xmltags, "Click <a href=\"page.html\">", "Klik <a hverw=\"page.html\">")
assert passes(stdchecker.xmltags, "Click <a xml-lang=\"en\" href=\"page.html\">", "Klik <a xml-lang=\"af\" href=\"page.html\">")
assert passes(stdchecker.xmltags, "Click <div lang=\"en\" dir=\"ltr\">", "Klik <div lang=\"ar\" dir=\"rtl\">")
assert fails(stdchecker.xmltags, "Click <a href=\"page.html\" target=\"koei\">", "Klik <a href=\"page.html\">")
assert fails(stdchecker.xmltags, "<b>Current Translation</b>", "<b>Traducción Actual:<b>")
assert passes(stdchecker.xmltags, "<Error>", "<Fout>")
assert fails(stdchecker.xmltags, "%d/%d translated\n(%d blank, %d fuzzy)", "<br>%d/%d μεταφρασμένα\n<br>(%d κενά, %d ασαφή)")
assert fails(stdchecker.xmltags, '(and <a href="http://www.schoolforge.net/education-software" class="external">other open source software</a>)', '(en <a href="http://www.schoolforge.net/education-software" class="external">ander Vry Sagteware</a)')
assert fails(stdchecker.xmltags, 'Because Tux Paint (and <a href="http://www.schoolforge.net/education-software" class="external">other open source software</a>) is free of cost and not limited in any way, a school can use it <i>today</i>, without waiting for procurement or a budget!', 'Omdat Tux Paint (en <a href="http://www.schoolforge.net/education-software" class="external">ander Vry Sagteware</a)gratis is en nie beperk is op enige manier nie, kan \'n skool dit vandag</i> gebruik sonder om te wag vir goedkeuring of \'n begroting!')
assert fails(stdchecker.xmltags, "test <br />", "test <br>")
assert fails(stdchecker.xmltags, "test <img src='foo.jpg'/ >", "test <img src='foo.jpg' >")
frchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="fr"))
assert fails(frchecker.xmltags, "Click <a href=\"page.html\">", "Klik <a href=« page.html »>")
def test_ooxmltags():
"""Tests the xml tags in OpenOffice.org translations for quality as done in gsicheck"""
ooochecker = checks.OpenOfficeChecker()
#some attributes can be changed or removed
assert fails(ooochecker.xmltags, "<img src=\"a.jpg\" width=\"400\">", "<img src=\"b.jpg\" width=\"500\">")
assert passes(ooochecker.xmltags, "<img src=\"a.jpg\" width=\"400\">", "<img src=\"a.jpg\" width=\"500\">")
assert passes(ooochecker.xmltags, "<img src=\"a.jpg\" width=\"400\">", "<img src=\"a.jpg\">")
assert passes(ooochecker.xmltags, "<img src=\"a.jpg\">", "<img src=\"a.jpg\" width=\"400\">")
assert passes(ooochecker.xmltags, "<alt xml-lang=\"ab\">text</alt>", "<alt>teks</alt>")
assert passes(ooochecker.xmltags, "<ahelp visibility=\"visible\">bla</ahelp>", "<ahelp>blu</ahelp>")
assert fails(ooochecker.xmltags, "<ahelp visibility=\"visible\">bla</ahelp>", "<ahelp visibility=\"invisible\">blu</ahelp>")
assert fails(ooochecker.xmltags, "<ahelp visibility=\"invisible\">bla</ahelp>", "<ahelp>blu</ahelp>")
#some attributes can be changed, but not removed
assert passes(ooochecker.xmltags, "<link name=\"John\">", "<link name=\"Jan\">")
assert fails(ooochecker.xmltags, "<link name=\"John\">", "<link naam=\"Jan\">")
# Reported OOo error
## Bug 1910
assert fails(ooochecker.xmltags, u"""<variable id="FehlendesElement">In a database file window, click the <emph>Queries</emph> icon, then choose <emph>Edit - Edit</emph>. When referenced fields no longer exist, you see this dialog</variable>""", u"""<variable id="FehlendesElement">Dans une fenêtre de fichier de base de données, cliquez sur l'icône <emph>Requêtes</emph>, puis choisissez <emph>Éditer - Éditer</emp>. Lorsque les champs de référence n'existent plus, vous voyez cette boîte de dialogue</variable>""")
assert fails(ooochecker.xmltags, "<variable> <emph></emph> <emph></emph> </variable>", "<variable> <emph></emph> <emph></emp> </variable>")
def test_functions():
"""tests to see that funtions() are not translated"""
stdchecker = checks.StandardChecker()
assert fails(stdchecker.functions, "blah rgb() blah", "blee brg() blee")
assert passes(stdchecker.functions, "blah rgb() blah", "blee rgb() blee")
assert fails(stdchecker.functions, "percentage in rgb()", "phesenthe kha brg()")
assert passes(stdchecker.functions, "percentage in rgb()", "phesenthe kha rgb()")
assert fails(stdchecker.functions, "rgb() in percentage", "brg() kha phesenthe")
assert passes(stdchecker.functions, "rgb() in percentage", "rgb() kha phesenthe")
assert fails(stdchecker.functions, "blah string.rgb() blah", "blee bleeb.rgb() blee")
assert passes(stdchecker.functions, "blah string.rgb() blah", "blee string.rgb() blee")
assert passes(stdchecker.functions, "or domain().", "domain() verwag.")
assert passes(stdchecker.functions, "Expected url(), url-prefix(), or domain().", "url(), url-prefix() of domain() verwag.")
def test_emails():
"""tests to see that email addresses are not translated"""
stdchecker = checks.StandardChecker()
assert fails(stdchecker.emails, "blah [email protected] blah", "blee [email protected] blee")
assert passes(stdchecker.emails, "blah [email protected] blah", "blee [email protected] blee")
def test_urls():
"""tests to see that URLs are not translated"""
stdchecker = checks.StandardChecker()
assert fails(stdchecker.urls, "blah http://translate.org.za blah", "blee http://vertaal.org.za blee")
assert passes(stdchecker.urls, "blah http://translate.org.za blah", "blee http://translate.org.za blee")
def test_simpleplurals():
"""test that we can find English style plural(s)"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.simpleplurals, "computer(s)", "rekenaar(s)")
assert fails(stdchecker.simpleplurals, "plural(s)", "meervoud(e)")
assert fails(stdchecker.simpleplurals, "Ungroup Metafile(s)...", "Kuvhanganyululani Metafaela(dzi)...")
# Test a language that doesn't use plurals
stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='vi'))
assert passes(stdchecker.simpleplurals, "computer(s)", u"Máy tính")
assert fails(stdchecker.simpleplurals, "computer(s)", u"Máy tính(s)")
def test_nplurals():
"""Test that we can find the wrong number of plural forms. Note that this
test uses a UnitChecker, not a translation checker."""
checker = checks.StandardUnitChecker()
unit = po.pounit("")
unit.source = ["%d file", "%d files"]
unit.target = [u"%d lêer", u"%d lêers"]
assert checker.nplurals(unit)
checker = checks.StandardUnitChecker(checks.CheckerConfig(targetlanguage='af'))
unit.source = "%d files"
unit.target = "%d lêer"
assert checker.nplurals(unit)
unit.source = ["%d file", "%d files"]
unit.target = [u"%d lêer", u"%d lêers"]
assert checker.nplurals(unit)
unit.source = ["%d file", "%d files"]
unit.target = [u"%d lêer", u"%d lêers", u"%d lêeeeers"]
assert not checker.nplurals(unit)
unit.source = ["%d file", "%d files"]
unit.target = [u"%d lêer"]
assert not checker.nplurals(unit)
checker = checks.StandardUnitChecker(checks.CheckerConfig(targetlanguage='km'))
unit.source = "%d files"
unit.target = "%d ឯកសារ"
assert checker.nplurals(unit)
unit.source = ["%d file", "%d files"]
unit.target = [u"%d ឯកសារ"]
assert checker.nplurals(unit)
unit.source = ["%d file", "%d files"]
unit.target = [u"%d ឯកសារ", u"%d lêers"]
assert not checker.nplurals(unit)
def test_credits():
"""tests credits"""
stdchecker = checks.StandardChecker()
assert passes(stdchecker.credits, "File", "iFayile")
assert passes(stdchecker.credits, "&File", "&Fayile")
assert passes(stdchecker.credits, "translator-credits", "Ekke, ekke!")
assert passes(stdchecker.credits, "Your names", "Ekke, ekke!")
assert passes(stdchecker.credits, "ROLES_OF_TRANSLATORS", "Ekke, ekke!")
kdechecker = checks.KdeChecker()
assert passes(kdechecker.credits, "File", "iFayile")
assert passes(kdechecker.credits, "&File", "&Fayile")
assert passes(kdechecker.credits, "translator-credits", "Ekke, ekke!")
assert fails(kdechecker.credits, "Your names", "Ekke, ekke!")
assert fails(kdechecker.credits, "ROLES_OF_TRANSLATORS", "Ekke, ekke!")
gnomechecker = checks.GnomeChecker()
assert passes(gnomechecker.credits, "File", "iFayile")
assert passes(gnomechecker.credits, "&File", "&Fayile")
assert fails(gnomechecker.credits, "translator-credits", "Ekke, ekke!")
assert passes(gnomechecker.credits, "Your names", "Ekke, ekke!")
assert passes(gnomechecker.credits, "ROLES_OF_TRANSLATORS", "Ekke, ekke!")
def test_gconf():
"""test GNOME gconf errors"""
gnomechecker = checks.GnomeChecker()
# Let's cheat a bit and prepare the checker as the run_filters() method
# would do by adding locations needed by the gconf test
gnomechecker.locations = []
assert passes(gnomechecker.gconf, 'Blah "gconf_setting"', 'Bleh "gconf_setting"')
assert passes(gnomechecker.gconf, 'Blah "gconf_setting"', 'Bleh "gconf_steling"')
gnomechecker.locations = ['file.schemas.in.h:24']
assert passes(gnomechecker.gconf, 'Blah "gconf_setting"', 'Bleh "gconf_setting"')
assert fails(gnomechecker.gconf, 'Blah "gconf_setting"', 'Bleh "gconf_steling"')
# redo the same, but with the new location comment:
gnomechecker.locations = ['file.gschema.xml.in.in.h:24']
assert passes(gnomechecker.gconf, 'Blah "gconf_setting"', 'Bleh "gconf_setting"')
assert fails(gnomechecker.gconf, 'Blah "gconf_setting"', 'Bleh "gconf_steling"')
def test_hassuggestion():
"""test that hassuggestion() works"""
checker = checks.StandardUnitChecker()
po_store = po.pofile()
po_store.addsourceunit("koeie")
assert checker.hassuggestion(po_store.units[-1])
xliff_store = xliff.xlifffile.parsestring('''
<xliff version='1.2'
xmlns='urn:oasis:names:tc:xliff:document:1.2'>
<file original='hello.txt' source-language='en' target-language='fr' datatype='plaintext'>
<body>
<trans-unit id='hi'>
<source>Hello world</source>
<target>Bonjour le monde</target>
<alt-trans>
<target xml:lang='es'>Hola mundo</target>
</alt-trans>
</trans-unit>
</body>
</file>
</xliff>
''')
assert not checker.hassuggestion(xliff_store.units[0])
def test_dialogsizes():
"""test Mozilla dialog sizes"""
mozillachecker = checks.MozillaChecker()
assert passes(mozillachecker.dialogsizes, 'width: 12em;', 'width: 12em;')
assert passes(mozillachecker.dialogsizes, 'width: 12em; height: 36em', 'width: 12em; height: 36em')
assert fails(mozillachecker.dialogsizes, 'height: 12em;', 'hoogde: 12em;')
assert passes(mozillachecker.dialogsizes, 'height: 12em;', 'height: 24px;')
assert fails(mozillachecker.dialogsizes, 'height: 12em;', 'height: 24xx;')
assert fails(mozillachecker.dialogsizes, 'height: 12.5em;', 'height: 12,5em;')<|fim▁end|>
|
stdchecker = checks.StandardChecker()
|
<|file_name|>asm.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
# Translation of inline assembly.
*/
use lib;
use middle::trans::build::*;
use middle::trans::callee;
use middle::trans::common::*;
use middle::trans::cleanup;
use middle::trans::cleanup::CleanupMethods;
use middle::trans::expr;
use middle::trans::type_of;
use middle::trans::type_::Type;
use std::c_str::ToCStr;
use std::string::String;
use syntax::ast;
// Take an inline assembly expression and splat it out via LLVM
pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
-> &'a Block<'a> {
let fcx = bcx.fcx;
let mut bcx = bcx;
let mut constraints = Vec::new();
let mut output_types = Vec::new();
let temp_scope = fcx.push_custom_cleanup_scope();
// Prepare the output operands
let outputs = ia.outputs.iter().map(|&(ref c, ref out)| {
constraints.push((*c).clone());
let out_datum = unpack_datum!(bcx, expr::trans(bcx, &**out));
output_types.push(type_of::type_of(bcx.ccx(), out_datum.ty));
out_datum.val
}).collect::<Vec<_>>();
// Now the input operands
let inputs = ia.inputs.iter().map(|&(ref c, ref input)| {
constraints.push((*c).clone());
let in_datum = unpack_datum!(bcx, expr::trans(bcx, &**input));
unpack_result!(bcx, {
callee::trans_arg_datum(bcx,
expr_ty(bcx, &**input),<|fim▁hole|> callee::DontAutorefArg)
})
}).collect::<Vec<_>>();
// no failure occurred preparing operands, no need to cleanup
fcx.pop_custom_cleanup_scope(temp_scope);
let mut constraints =
String::from_str(constraints.iter()
.map(|s| s.get().to_string())
.collect::<Vec<String>>()
.connect(",")
.as_slice());
let mut clobbers = get_clobbers();
if !ia.clobbers.get().is_empty() && !clobbers.is_empty() {
clobbers = format!("{},{}", ia.clobbers.get(), clobbers);
} else {
clobbers.push_str(ia.clobbers.get());
}
// Add the clobbers to our constraints list
if clobbers.len() != 0 && constraints.len() != 0 {
constraints.push_char(',');
constraints.push_str(clobbers.as_slice());
} else {
constraints.push_str(clobbers.as_slice());
}
debug!("Asm Constraints: {:?}", constraints.as_slice());
let num_outputs = outputs.len();
// Depending on how many outputs we have, the return type is different
let output_type = if num_outputs == 0 {
Type::void(bcx.ccx())
} else if num_outputs == 1 {
*output_types.get(0)
} else {
Type::struct_(bcx.ccx(), output_types.as_slice(), false)
};
let dialect = match ia.dialect {
ast::AsmAtt => lib::llvm::AD_ATT,
ast::AsmIntel => lib::llvm::AD_Intel
};
let r = ia.asm.get().with_c_str(|a| {
constraints.as_slice().with_c_str(|c| {
InlineAsmCall(bcx,
a,
c,
inputs.as_slice(),
output_type,
ia.volatile,
ia.alignstack,
dialect)
})
});
// Again, based on how many outputs we have
if num_outputs == 1 {
Store(bcx, r, *outputs.get(0));
} else {
for (i, o) in outputs.iter().enumerate() {
let v = ExtractValue(bcx, r, i);
Store(bcx, v, *o);
}
}
return bcx;
}
// Default per-arch clobbers
// Basically what clang does
#[cfg(target_arch = "arm")]
#[cfg(target_arch = "mips")]
#[cfg(target_arch = "mipsel")]
fn get_clobbers() -> String {
"".to_string()
}
#[cfg(target_arch = "x86")]
#[cfg(target_arch = "x86_64")]
fn get_clobbers() -> String {
"~{dirflag},~{fpsr},~{flags}".to_string()
}<|fim▁end|>
|
in_datum,
cleanup::CustomScope(temp_scope),
|
<|file_name|>mem.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Memory profiling functions.
use ipc_channel::ipc::{self, IpcReceiver};
use ipc_channel::router::ROUTER;
use profile_traits::mem::ReportsChan;
use profile_traits::mem::{ProfilerChan, ProfilerMsg, ReportKind, Reporter, ReporterRequest};
use std::borrow::ToOwned;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::thread::sleep_ms;
use util::task::spawn_named;
pub struct Profiler {
/// The port through which messages are received.
pub port: IpcReceiver<ProfilerMsg>,
/// Registered memory reporters.
reporters: HashMap<String, Reporter>,
}
const JEMALLOC_HEAP_ALLOCATED_STR: &'static str = "jemalloc-heap-allocated";
const SYSTEM_HEAP_ALLOCATED_STR: &'static str = "system-heap-allocated";
impl Profiler {
pub fn create(period: Option<f64>) -> ProfilerChan {
let (chan, port) = ipc::channel().unwrap();
// Create the timer thread if a period was provided.
if let Some(period) = period {
let period_ms = (period * 1000.) as u32;
let chan = chan.clone();
spawn_named("Memory profiler timer".to_owned(), move || {
loop {
sleep_ms(period_ms);
if chan.send(ProfilerMsg::Print).is_err() {
break;
}
}
});
}
// Always spawn the memory profiler. If there is no timer thread it won't receive regular
// `Print` events, but it will still receive the other events.
spawn_named("Memory profiler".to_owned(), move || {
let mut mem_profiler = Profiler::new(port);
mem_profiler.start();
});
let mem_profiler_chan = ProfilerChan(chan);
// Register the system memory reporter, which will run on its own thread. It never needs to
// be unregistered, because as long as the memory profiler is running the system memory
// reporter can make measurements.
let (system_reporter_sender, system_reporter_receiver) = ipc::channel().unwrap();
ROUTER.add_route(system_reporter_receiver.to_opaque(), box |message| {
let request: ReporterRequest = message.to().unwrap();
system_reporter::collect_reports(request)
});
mem_profiler_chan.send(ProfilerMsg::RegisterReporter("system".to_owned(),
Reporter(system_reporter_sender)));
mem_profiler_chan
}
pub fn new(port: IpcReceiver<ProfilerMsg>) -> Profiler {
Profiler {
port: port,
reporters: HashMap::new(),
}
}
pub fn start(&mut self) {
loop {
match self.port.recv() {
Ok(msg) => {
if !self.handle_msg(msg) {
break
}
}
_ => break
}
}
}
fn handle_msg(&mut self, msg: ProfilerMsg) -> bool {
match msg {
ProfilerMsg::RegisterReporter(name, reporter) => {
// Panic if it has already been registered.
let name_clone = name.clone();
match self.reporters.insert(name, reporter) {
None => true,
Some(_) => panic!(format!("RegisterReporter: '{}' name is already in use",
name_clone)),
}
},
ProfilerMsg::UnregisterReporter(name) => {
// Panic if it hasn't previously been registered.
match self.reporters.remove(&name) {
Some(_) => true,
None =>
panic!(format!("UnregisterReporter: '{}' name is unknown", &name)),
}
},
ProfilerMsg::Print => {
self.handle_print_msg();
true
},
ProfilerMsg::Exit => false
}
}
fn handle_print_msg(&self) {
println!("Begin memory reports");
println!("|");
// Collect reports from memory reporters.
//
// This serializes the report-gathering. It might be worth creating a new scoped thread for
// each reporter once we have enough of them.
//
// If anything goes wrong with a reporter, we just skip it.
//
// We also track the total memory reported on the jemalloc heap and the system heap, and
// use that to compute the special "jemalloc-heap-unclassified" and
// "system-heap-unclassified" values.
let mut forest = ReportsForest::new();
let mut jemalloc_heap_reported_size = 0;
let mut system_heap_reported_size = 0;
let mut jemalloc_heap_allocated_size: Option<usize> = None;
let mut system_heap_allocated_size: Option<usize> = None;
for reporter in self.reporters.values() {
let (chan, port) = ipc::channel().unwrap();
reporter.collect_reports(ReportsChan(chan));
if let Ok(mut reports) = port.recv() {
for report in &mut reports {
// Add "explicit" to the start of the path, when appropriate.
match report.kind {
ReportKind::ExplicitJemallocHeapSize |
ReportKind::ExplicitSystemHeapSize |
ReportKind::ExplicitNonHeapSize |
ReportKind::ExplicitUnknownLocationSize =>
report.path.insert(0, String::from("explicit")),
ReportKind::NonExplicitSize => {},
}
// Update the reported fractions of the heaps, when appropriate.
match report.kind {
ReportKind::ExplicitJemallocHeapSize =>
jemalloc_heap_reported_size += report.size,
ReportKind::ExplicitSystemHeapSize =>
system_heap_reported_size += report.size,
_ => {},
}
// Record total size of the heaps, when we see them.
if report.path.len() == 1 {
if report.path[0] == JEMALLOC_HEAP_ALLOCATED_STR {
assert!(jemalloc_heap_allocated_size.is_none());
jemalloc_heap_allocated_size = Some(report.size);
} else if report.path[0] == SYSTEM_HEAP_ALLOCATED_STR {
assert!(system_heap_allocated_size.is_none());
system_heap_allocated_size = Some(report.size);
}
}
// Insert the report.
forest.insert(&report.path, report.size);
}
}
}
// Compute and insert the heap-unclassified values.
if let Some(jemalloc_heap_allocated_size) = jemalloc_heap_allocated_size {
forest.insert(&path!["explicit", "jemalloc-heap-unclassified"],
jemalloc_heap_allocated_size - jemalloc_heap_reported_size);
}
if let Some(system_heap_allocated_size) = system_heap_allocated_size {
forest.insert(&path!["explicit", "system-heap-unclassified"],
system_heap_allocated_size - system_heap_reported_size);
}
forest.print();
println!("|");
println!("End memory reports");
println!("");
}
}
/// A collection of one or more reports with the same initial path segment. A ReportsTree
/// containing a single node is described as "degenerate".
struct ReportsTree {
/// For leaf nodes, this is the sum of the sizes of all reports that mapped to this location.
/// For interior nodes, this is the sum of the sizes of all its child nodes.
size: usize,
/// For leaf nodes, this is the count of all reports that mapped to this location.
/// For interor nodes, this is always zero.
count: u32,
/// The segment from the report path that maps to this node.
path_seg: String,
/// Child nodes.
children: Vec<ReportsTree>,
}
impl ReportsTree {
fn new(path_seg: String) -> ReportsTree {
ReportsTree {
size: 0,
count: 0,
path_seg: path_seg,
children: vec![]
}
}
// Searches the tree's children for a path_seg match, and returns the index if there is a
// match.
fn find_child(&self, path_seg: &String) -> Option<usize> {
for (i, child) in self.children.iter().enumerate() {
if child.path_seg == *path_seg {
return Some(i);
}
}
None
}
// Insert the path and size into the tree, adding any nodes as necessary.
fn insert(&mut self, path: &[String], size: usize) {
let mut t: &mut ReportsTree = self;
for path_seg in path {
let i = match t.find_child(&path_seg) {
Some(i) => i,
None => {
let new_t = ReportsTree::new(path_seg.clone());
t.children.push(new_t);
t.children.len() - 1
},
};
let tmp = t; // this temporary is needed to satisfy the borrow checker
t = &mut tmp.children[i];
}
t.size += size;
t.count += 1;
}
// Fill in sizes for interior nodes and sort sub-trees accordingly. Should only be done once
// all the reports have been inserted.
fn compute_interior_node_sizes_and_sort(&mut self) -> usize {
if !self.children.is_empty() {
// Interior node. Derive its size from its children.
if self.size != 0 {
// This will occur if e.g. we have paths ["a", "b"] and ["a", "b", "c"].
panic!("one report's path is a sub-path of another report's path");
}
for child in &mut self.children {
self.size += child.compute_interior_node_sizes_and_sort();
}
// Now that child sizes have been computed, we can sort the children.
self.children.sort_by(|t1, t2| t2.size.cmp(&t1.size));
}
self.size
}
fn print(&self, depth: i32) {
if !self.children.is_empty() {
assert_eq!(self.count, 0);
}
let mut indent_str = String::new();
for _ in 0..depth {
indent_str.push_str(" ");
}
let mebi = 1024f64 * 1024f64;
let count_str = if self.count > 1 { format!(" [{}]", self.count) } else { "".to_owned() };
println!("|{}{:8.2} MiB -- {}{}",
indent_str, (self.size as f64) / mebi, self.path_seg, count_str);
for child in &self.children {
child.print(depth + 1);
}
}
}
/// A collection of ReportsTrees. It represents the data from multiple memory reports in a form
/// that's good to print.
struct ReportsForest {
trees: HashMap<String, ReportsTree>,
}
impl ReportsForest {
fn new() -> ReportsForest {
ReportsForest {
trees: HashMap::new(),
}
}
// Insert the path and size into the forest, adding any trees and nodes as necessary.
fn insert(&mut self, path: &[String], size: usize) {
let (head, tail) = path.split_first().unwrap();
// Get the right tree, creating it if necessary.
if !self.trees.contains_key(head) {
self.trees.insert(head.clone(), ReportsTree::new(head.clone()));
}
let t = self.trees.get_mut(head).unwrap();
// Use tail because the 0th path segment was used to find the right tree in the forest.
t.insert(tail, size);
}
fn print(&mut self) {
// Fill in sizes of interior nodes, and recursively sort the sub-trees.
for (_, tree) in &mut self.trees {
tree.compute_interior_node_sizes_and_sort();
}
// Put the trees into a sorted vector. Primary sort: degenerate trees (those containing a
// single node) come after non-degenerate trees. Secondary sort: alphabetical order of the
// root node's path_seg.
let mut v = vec![];
for (_, tree) in &self.trees {
v.push(tree);
}
v.sort_by(|a, b| {
if a.children.is_empty() && !b.children.is_empty() {
Ordering::Greater
} else if !a.children.is_empty() && b.children.is_empty() {
Ordering::Less
} else {
a.path_seg.cmp(&b.path_seg)
}
});
// Print the forest.
for tree in &v {
tree.print(0);
// Print a blank line after non-degenerate trees.
if !tree.children.is_empty() {
println!("|");
}
}
}
}
//---------------------------------------------------------------------------
mod system_reporter {
use libc::{c_char, c_int, c_void, size_t};
use profile_traits::mem::{Report, ReportKind, ReporterRequest};
use std::borrow::ToOwned;
use std::ffi::CString;
use std::mem::size_of;
use std::ptr::null_mut;
use super::{JEMALLOC_HEAP_ALLOCATED_STR, SYSTEM_HEAP_ALLOCATED_STR};
#[cfg(target_os = "macos")]
use task_info::task_basic_info::{virtual_size, resident_size};
/// Collects global measurements from the OS and heap allocators.
pub fn collect_reports(request: ReporterRequest) {
let mut reports = vec![];
{
let mut report = |path, size| {
if let Some(size) = size {
reports.push(Report {
path: path,
kind: ReportKind::NonExplicitSize,
size: size,
});
}
};
// Virtual and physical memory usage, as reported by the OS.
report(path!["vsize"], vsize());
report(path!["resident"], resident());
// Memory segments, as reported by the OS.
for seg in resident_segments() {
report(path!["resident-according-to-smaps", seg.0], Some(seg.1));
}
// Total number of bytes allocated by the application on the system
// heap.
report(path![SYSTEM_HEAP_ALLOCATED_STR], system_heap_allocated());
// The descriptions of the following jemalloc measurements are taken
// directly from the jemalloc documentation.
// "Total number of bytes allocated by the application."
report(path![JEMALLOC_HEAP_ALLOCATED_STR], jemalloc_stat("stats.allocated"));
// "Total number of bytes in active pages allocated by the application.
// This is a multiple of the page size, and greater than or equal to<|fim▁hole|> report(path!["jemalloc-heap-active"], jemalloc_stat("stats.active"));
// "Total number of bytes in chunks mapped on behalf of the application.
// This is a multiple of the chunk size, and is at least as large as
// |stats.active|. This does not include inactive chunks."
report(path!["jemalloc-heap-mapped"], jemalloc_stat("stats.mapped"));
}
request.reports_channel.send(reports);
}
#[cfg(target_os = "linux")]
extern {
fn mallinfo() -> struct_mallinfo;
}
#[cfg(target_os = "linux")]
#[repr(C)]
pub struct struct_mallinfo {
arena: c_int,
ordblks: c_int,
smblks: c_int,
hblks: c_int,
hblkhd: c_int,
usmblks: c_int,
fsmblks: c_int,
uordblks: c_int,
fordblks: c_int,
keepcost: c_int,
}
#[cfg(target_os = "linux")]
fn system_heap_allocated() -> Option<usize> {
let info: struct_mallinfo = unsafe { mallinfo() };
// The documentation in the glibc man page makes it sound like |uordblks| would suffice,
// but that only gets the small allocations that are put in the brk heap. We need |hblkhd|
// as well to get the larger allocations that are mmapped.
//
// These fields are unfortunately |int| and so can overflow (becoming negative) if memory
// usage gets high enough. So don't report anything in that case. In the non-overflow case
// we cast the two values to usize before adding them to make sure the sum also doesn't
// overflow.
if info.hblkhd < 0 || info.uordblks < 0 {
None
} else {
Some(info.hblkhd as usize + info.uordblks as usize)
}
}
#[cfg(not(target_os = "linux"))]
fn system_heap_allocated() -> Option<usize> {
None
}
extern {
fn je_mallctl(name: *const c_char, oldp: *mut c_void, oldlenp: *mut size_t,
newp: *mut c_void, newlen: size_t) -> c_int;
}
fn jemalloc_stat(value_name: &str) -> Option<usize> {
// Before we request the measurement of interest, we first send an "epoch"
// request. Without that jemalloc gives cached statistics(!) which can be
// highly inaccurate.
let epoch_name = "epoch";
let epoch_c_name = CString::new(epoch_name).unwrap();
let mut epoch: u64 = 0;
let epoch_ptr = &mut epoch as *mut _ as *mut c_void;
let mut epoch_len = size_of::<u64>() as size_t;
let value_c_name = CString::new(value_name).unwrap();
let mut value: size_t = 0;
let value_ptr = &mut value as *mut _ as *mut c_void;
let mut value_len = size_of::<size_t>() as size_t;
// Using the same values for the `old` and `new` parameters is enough
// to get the statistics updated.
let rv = unsafe {
je_mallctl(epoch_c_name.as_ptr(), epoch_ptr, &mut epoch_len, epoch_ptr,
epoch_len)
};
if rv != 0 {
return None;
}
let rv = unsafe {
je_mallctl(value_c_name.as_ptr(), value_ptr, &mut value_len, null_mut(), 0)
};
if rv != 0 {
return None;
}
Some(value as usize)
}
// Like std::macros::try!, but for Option<>.
macro_rules! option_try(
($e:expr) => (match $e { Some(e) => e, None => return None })
);
#[cfg(target_os = "linux")]
fn page_size() -> usize {
unsafe {
::libc::sysconf(::libc::_SC_PAGESIZE) as usize
}
}
#[cfg(target_os = "linux")]
fn proc_self_statm_field(field: usize) -> Option<usize> {
use std::fs::File;
use std::io::Read;
let mut f = option_try!(File::open("/proc/self/statm").ok());
let mut contents = String::new();
option_try!(f.read_to_string(&mut contents).ok());
let s = option_try!(contents.split_whitespace().nth(field));
let npages = option_try!(s.parse::<usize>().ok());
Some(npages * page_size())
}
#[cfg(target_os = "linux")]
fn vsize() -> Option<usize> {
proc_self_statm_field(0)
}
#[cfg(target_os = "linux")]
fn resident() -> Option<usize> {
proc_self_statm_field(1)
}
#[cfg(target_os = "macos")]
fn vsize() -> Option<usize> {
virtual_size()
}
#[cfg(target_os = "macos")]
fn resident() -> Option<usize> {
resident_size()
}
#[cfg(not(any(target_os = "linux", target_os = "macos")))]
fn vsize() -> Option<usize> {
None
}
#[cfg(not(any(target_os = "linux", target_os = "macos")))]
fn resident() -> Option<usize> {
None
}
#[cfg(target_os = "linux")]
fn resident_segments() -> Vec<(String, usize)> {
use regex::Regex;
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::fs::File;
use std::io::{BufReader, BufRead};
// The first line of an entry in /proc/<pid>/smaps looks just like an entry
// in /proc/<pid>/maps:
//
// address perms offset dev inode pathname
// 02366000-025d8000 rw-p 00000000 00:00 0 [heap]
//
// Each of the following lines contains a key and a value, separated
// by ": ", where the key does not contain either of those characters.
// For example:
//
// Rss: 132 kB
let f = match File::open("/proc/self/smaps") {
Ok(f) => BufReader::new(f),
Err(_) => return vec![],
};
let seg_re = Regex::new(
r"^[:xdigit:]+-[:xdigit:]+ (....) [:xdigit:]+ [:xdigit:]+:[:xdigit:]+ \d+ +(.*)").unwrap();
let rss_re = Regex::new(r"^Rss: +(\d+) kB").unwrap();
// We record each segment's resident size.
let mut seg_map: HashMap<String, usize> = HashMap::new();
#[derive(PartialEq)]
enum LookingFor { Segment, Rss }
let mut looking_for = LookingFor::Segment;
let mut curr_seg_name = String::new();
// Parse the file.
for line in f.lines() {
let line = match line {
Ok(line) => line,
Err(_) => continue,
};
if looking_for == LookingFor::Segment {
// Look for a segment info line.
let cap = match seg_re.captures(&line) {
Some(cap) => cap,
None => continue,
};
let perms = cap.at(1).unwrap();
let pathname = cap.at(2).unwrap();
// Construct the segment name from its pathname and permissions.
curr_seg_name.clear();
if pathname == "" || pathname.starts_with("[stack:") {
// Anonymous memory. Entries marked with "[stack:nnn]"
// look like thread stacks but they may include other
// anonymous mappings, so we can't trust them and just
// treat them as entirely anonymous.
curr_seg_name.push_str("anonymous");
} else {
curr_seg_name.push_str(pathname);
}
curr_seg_name.push_str(" (");
curr_seg_name.push_str(perms);
curr_seg_name.push_str(")");
looking_for = LookingFor::Rss;
} else {
// Look for an "Rss:" line.
let cap = match rss_re.captures(&line) {
Some(cap) => cap,
None => continue,
};
let rss = cap.at(1).unwrap().parse::<usize>().unwrap() * 1024;
if rss > 0 {
// Aggregate small segments into "other".
let seg_name = if rss < 512 * 1024 {
"other".to_owned()
} else {
curr_seg_name.clone()
};
match seg_map.entry(seg_name) {
Entry::Vacant(entry) => { entry.insert(rss); },
Entry::Occupied(mut entry) => *entry.get_mut() += rss,
}
}
looking_for = LookingFor::Segment;
}
}
// Note that the sum of all these segments' RSS values differs from the "resident"
// measurement obtained via /proc/<pid>/statm in resident(). It's unclear why this
// difference occurs; for some processes the measurements match, but for Servo they do not.
let segs: Vec<(String, usize)> = seg_map.into_iter().collect();
segs
}
#[cfg(not(target_os = "linux"))]
fn resident_segments() -> Vec<(String, usize)> {
vec![]
}
}<|fim▁end|>
|
// |stats.allocated|."
|
<|file_name|>GetOrderViewsMethod.java<|end_file_name|><|fim▁begin|>package com.notronix.lw.impl.method.orders;
import com.google.gson.Gson;
import com.notronix.lw.api.model.UserOrderView;
import com.notronix.lw.impl.method.AbstractLinnworksAPIMethod;
import java.util.Arrays;
<|fim▁hole|>{
@Override
public String getURI() {
return "Orders/GetOrderViews";
}
@Override
public List<UserOrderView> getResponse(Gson gson, String jsonPayload) {
return Arrays.asList(gson.fromJson(jsonPayload, UserOrderView[].class));
}
}<|fim▁end|>
|
import java.util.List;
public class GetOrderViewsMethod extends AbstractLinnworksAPIMethod<List<UserOrderView>>
|
<|file_name|>binprocess.rs<|end_file_name|><|fim▁begin|>use std::process::exit;
use std::io::ErrorKind;
use std::path::PathBuf;
use std::ffi::CString;
use std::ffi::OsStr;
use std::os::unix::ffi::OsStrExt;
use shell::Shell;
use posix;
use opts;
use posix::ReadPipe;
use posix::WritePipe;
use posix::Pgid;
use exec::Arg;
use exec::Process;
use exec::ProcessInner;
use exec::Child;
pub struct BinProcess {
to_exec: PathBuf,
argv: Vec<*const i8>,
m_args: Vec<CString>,
inner: ProcessInner,
}
fn os2c(s: &OsStr) -> CString {
CString::new(s.as_bytes()).unwrap_or_else(|_e| CString::new("<string-with-nul>").unwrap())
}
fn pb2c(pb: PathBuf) -> CString {
os2c(pb.as_os_str())
}
fn str2c(s: String) -> CString {
CString::new(s.as_bytes()).unwrap_or_else(|_e| CString::new("<string-with-nul>").unwrap())
}
impl BinProcess {
pub fn new(cmd: &String, b: PathBuf) -> Self {
let cb = str2c(cmd.to_owned());
BinProcess {
argv: vec![cb.as_ptr(), 0 as *const _],
to_exec: b,
m_args: vec![cb],
inner: ProcessInner::new(),
}
}
}
impl Process for BinProcess {
fn exec(self, _sh: &mut Shell, pgid: Option<Pgid>) -> Option<Child> {
match posix::fork(opts::is_set("__tin_inter"), pgid) {
Err(e) => {
// oops. gotta bail.
warn!("Could not fork child: {}", e);
None
}
Ok(None) => {
if let Err(e) = self.inner.redirect(false) {
warn!("Could not redirect: {}", e);
exit(e.raw_os_error().unwrap_or(7));
}
let e = posix::execv(pb2c(self.to_exec).as_ptr(), self.argv.as_ptr());
if e.kind() == ErrorKind::NotFound {
// TODO: custom handler function
warn!("Command '{}' not found.", self.m_args[0].to_str().unwrap());
} else {
warn!("Could not exec: {}", e);
}
exit(e.raw_os_error().unwrap_or(22)); // EINVAL
}
Ok(Some(ch_pid)) => Some(Child::new(ch_pid)),
}
}
// A BinProcess always has at least its first argument -- the executable
// to be run.
fn has_args(&self) -> bool {
true
}
fn push_arg(&mut self, new_arg: Arg) -> &Process {
// downconvert args to Strings as we add them
let mut v = Vec::new();
match new_arg {
Arg::Str(s) => {
v.push(s);
}
Arg::Bl(lines) => {
for l in lines {
v.push(l);
}
}
Arg::Rd(rd) => {
self.inner.rds.push(rd);
}
}
// TODO: this is not a perfect way to do this
for new_arg in v {
let arg = str2c(new_arg);
self.argv[self.m_args.len()] = arg.as_ptr();
self.argv.push(0 as *const _);
// for memory correctness
self.m_args.push(arg);
}
self
}
fn stdin(&mut self, read: ReadPipe) -> &Process {
self.inner.ch_stdin = Some(read);
self<|fim▁hole|> self.inner.ch_stdout = Some(write);
self
}
}<|fim▁end|>
|
}
fn stdout(&mut self, write: WritePipe) -> &Process {
|
<|file_name|>regex.rs<|end_file_name|><|fim▁begin|>// Copyright 2022 Collabora, Ltd.
//
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file or at
// https://opensource.org/licenses/MIT.
use std::{error::Error, fmt, str::FromStr};
use onig::{Regex, RegexOptions, Syntax};
use super::Matcher;
#[derive(Debug)]
pub struct ParseRegexTypeError(String);
impl Error for ParseRegexTypeError {}
impl fmt::Display for ParseRegexTypeError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Invalid regex type: {} (must be one of {})",
self.0,
RegexType::VALUES
.iter()
.map(|t| format!("'{}'", t))
.collect::<Vec<_>>()
.join(", ")
)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RegexType {
Emacs,
Grep,
PosixBasic,
PosixExtended,
}
impl RegexType {
pub const VALUES: &'static [Self] = &[
Self::Emacs,
Self::Grep,
Self::PosixBasic,
Self::PosixExtended,
];
}
impl fmt::Display for RegexType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
RegexType::Emacs => write!(f, "emacs"),
RegexType::Grep => write!(f, "grep"),
RegexType::PosixBasic => write!(f, "posix-basic"),
RegexType::PosixExtended => write!(f, "posix-extended"),
}
}
}
impl FromStr for RegexType {
type Err = ParseRegexTypeError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"emacs" => Ok(Self::Emacs),
"grep" => Ok(Self::Grep),
"posix-basic" => Ok(Self::PosixBasic),
"posix-extended" => Ok(Self::PosixExtended),
_ => Err(ParseRegexTypeError(s.to_owned())),
}
}
}
impl Default for RegexType {
fn default() -> Self {
Self::Emacs
}
}
pub struct RegexMatcher {
regex: Regex,
}
impl RegexMatcher {
pub fn new(
regex_type: RegexType,
pattern: &str,
ignore_case: bool,
) -> Result<Self, Box<dyn Error>> {
let syntax = match regex_type {
RegexType::Emacs => Syntax::emacs(),
RegexType::Grep => Syntax::grep(),
RegexType::PosixBasic => Syntax::posix_basic(),
RegexType::PosixExtended => Syntax::posix_extended(),
};
let regex = Regex::with_options(
pattern,
if ignore_case {
RegexOptions::REGEX_OPTION_IGNORECASE
} else {
RegexOptions::REGEX_OPTION_NONE
},
syntax,
)?;
Ok(Self { regex })
}
}
impl Matcher for RegexMatcher {
fn matches(&self, file_info: &walkdir::DirEntry, _: &mut super::MatcherIO) -> bool {
self.regex
.is_match(file_info.path().to_string_lossy().as_ref())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::find::matchers::tests::get_dir_entry_for;
use crate::find::matchers::Matcher;
use crate::find::tests::FakeDependencies;
const POSIX_BASIC_INTERVALS_RE: &str = r".*/ab\{1,3\}c";
const POSIX_EXTENDED_INTERVALS_RE: &str = r".*/ab{1,3}c";
const EMACS_AND_POSIX_EXTENDED_KLEENE_PLUS: &str = r".*/ab+c";
// Variants of fix_up_slashes that properly escape the forward slashes for
// being in a regex.
#[cfg(windows)]
fn fix_up_regex_slashes(re: &str) -> String {
re.replace("/", r"\\")
}
#[cfg(not(windows))]
fn fix_up_regex_slashes(re: &str) -> String {
re.to_owned()
}
#[test]
fn case_sensitive_matching() {
let abbbc = get_dir_entry_for("test_data/simple", "abbbc");
let matcher =
RegexMatcher::new(RegexType::Emacs, &fix_up_regex_slashes(".*/ab.BC"), false).unwrap();
let deps = FakeDependencies::new();
assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io()));
}
#[test]
fn case_insensitive_matching() {
let abbbc = get_dir_entry_for("test_data/simple", "abbbc");
let matcher =
RegexMatcher::new(RegexType::Emacs, &fix_up_regex_slashes(".*/ab.BC"), true).unwrap();
let deps = FakeDependencies::new();
assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io()));
}
#[test]
fn emacs_regex() {
// Emacs syntax is mostly the same as POSIX extended but with escaped
// brace intervals.
let abbbc = get_dir_entry_for("test_data/simple", "abbbc");
let matcher = RegexMatcher::new(
RegexType::Emacs,
&fix_up_regex_slashes(EMACS_AND_POSIX_EXTENDED_KLEENE_PLUS),
true,
)
.unwrap();
let deps = FakeDependencies::new();
assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io()));
let matcher = RegexMatcher::new(
RegexType::Emacs,
&fix_up_regex_slashes(POSIX_EXTENDED_INTERVALS_RE),
true,
)
.unwrap();
let deps = FakeDependencies::new();
assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io()));
}
#[test]
fn posix_basic_regex() {
let abbbc = get_dir_entry_for("test_data/simple", "abbbc");
let matcher = RegexMatcher::new(
RegexType::PosixBasic,
&fix_up_regex_slashes(POSIX_BASIC_INTERVALS_RE),
true,
)
.unwrap();
let deps = FakeDependencies::new();
assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io()));
let matcher = RegexMatcher::new(
RegexType::PosixBasic,
&fix_up_regex_slashes(POSIX_EXTENDED_INTERVALS_RE),
true,
)
.unwrap();<|fim▁hole|> assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io()));
}
#[test]
fn posix_extended_regex() {
let abbbc = get_dir_entry_for("test_data/simple", "abbbc");
let matcher = RegexMatcher::new(
RegexType::PosixExtended,
&fix_up_regex_slashes(POSIX_EXTENDED_INTERVALS_RE),
true,
)
.unwrap();
let deps = FakeDependencies::new();
assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io()));
let matcher = RegexMatcher::new(
RegexType::PosixExtended,
&fix_up_regex_slashes(POSIX_BASIC_INTERVALS_RE),
true,
)
.unwrap();
let deps = FakeDependencies::new();
assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io()));
}
}<|fim▁end|>
|
let deps = FakeDependencies::new();
|
<|file_name|>test_source_api.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Wavefront REST API
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import wavefront_api_client
from wavefront_api_client.api.source_api import SourceApi # noqa: E501
from wavefront_api_client.rest import ApiException<|fim▁hole|>
class TestSourceApi(unittest.TestCase):
"""SourceApi unit test stubs"""
def setUp(self):
self.api = wavefront_api_client.api.source_api.SourceApi() # noqa: E501
def tearDown(self):
pass
def test_add_source_tag(self):
"""Test case for add_source_tag
Add a tag to a specific source # noqa: E501
"""
pass
def test_create_source(self):
"""Test case for create_source
Create metadata (description or tags) for a specific source # noqa: E501
"""
pass
def test_delete_source(self):
"""Test case for delete_source
Delete metadata (description and tags) for a specific source # noqa: E501
"""
pass
def test_get_all_source(self):
"""Test case for get_all_source
Get all sources for a customer # noqa: E501
"""
pass
def test_get_source(self):
"""Test case for get_source
Get a specific source for a customer # noqa: E501
"""
pass
def test_get_source_tags(self):
"""Test case for get_source_tags
Get all tags associated with a specific source # noqa: E501
"""
pass
def test_remove_description(self):
"""Test case for remove_description
Remove description from a specific source # noqa: E501
"""
pass
def test_remove_source_tag(self):
"""Test case for remove_source_tag
Remove a tag from a specific source # noqa: E501
"""
pass
def test_set_description(self):
"""Test case for set_description
Set description associated with a specific source # noqa: E501
"""
pass
def test_set_source_tags(self):
"""Test case for set_source_tags
Set all tags associated with a specific source # noqa: E501
"""
pass
def test_update_source(self):
"""Test case for update_source
Update metadata (description or tags) for a specific source. # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()<|fim▁end|>
| |
<|file_name|>Skin.js<|end_file_name|><|fim▁begin|>(function($, Translator) {
Skin = function Skin(code, skinData) {
this.variableRegex = /\{#([\w \|]*)\#\}/;
this.translateRegex = /\{=([\w \|,]*)\=\}/;
this.code = code;
this.extractData(skinData);
}
Skin.prototype.scanChildren = function(code, skinData) {
if(skinData)
this.extractData(skinData);
var variables = this.data.variableBlocks;
var loops = this.data.loopBlocks;
code = this.checkCondition(code, variables);
code = this.checkLoop(code, loops, skinData);
return this.executeVariableBlocks(this.executeTranslateBlocks(code, variables), variables);
}
/*
* Gets code, parses with jQuery, checks and returns code
*/
Skin.prototype.checkCondition = function(code, variables) {
var checkCondition = $(code);
if(checkCondition.hasAttr('skif')) {
var skif = checkCondition.attr('skif');
if(variables[skif]) {
if($.toBoolean(variables[skif])) {
checkCondition.removeAttr('skif');
return checkCondition.outerHTML();
} else
return '';
} else
return '';
}
return code;
}
/*
* Gets code, parses with jQuery, checks and returns code
*/
Skin.prototype.checkLoop = function(code, loops, skinData) {
var checkLoop = $(code);
var newNodeHTML = '';
var children = checkLoop.contents();
var $this = this;
if(checkLoop.hasAttr('skloop')) {
var skloop = checkLoop.attr('skloop');
if(loops[skloop]) {
checkLoop.removeAttr('skloop');
<|fim▁hole|> } else
return '';
} else {
$.each(children, function(key, child) {
if(child.nodeType === 1)
newNodeHTML += $this.scanChildren($(child).outerHTML(), skinData);
else if(child.nodeType === 3)
newNodeHTML += child.textContent;
});
}
if(newNodeHTML !== '')
code = newNodeHTML;
checkLoop.html(code);
return checkLoop.outerHTML();
}
Skin.prototype.extractData = function(skinData) {
var $this = this;
this.data = {
'loopBlocks':{},
'variableBlocks':{}
};
$.each(skinData, function(key, value) {
switch (typeof(value)) {
case 'object':
case 'array':
$this.data.loopBlocks[key] = value;
break;
default:
$this.data.variableBlocks[key] = value;
break;
}
});
}
Skin.prototype.executeVariableBlocks = function(code, variableBlocks) {
if (variableBlocks) {
var matchVariable = code.matchAll(this.variableRegex);
if (matchVariable) {
for (var variables = 0; variables < matchVariable.length; ++variables) {
var explodeVar = matchVariable[variables][1].split('|');
var property = explodeVar[0].replace(' ', '');
if (variableBlocks[property]) {
if(explodeVar[1]) {
var explodeFunc = explodeVar[1].split(' ');
for(var i = 0; i < explodeFunc.length; ++i) {
var $func = this[explodeFunc[i]];
if(explodeFunc[i] && typeof $func === 'function')
variableBlocks[property] = $func(variableBlocks[property]);
}
}
return code.replace(matchVariable[variables][0], variableBlocks[property]);
} else
return code.replace(matchVariable[variables][0], '');
}
}
}
return code;
}
Skin.prototype.executeTranslateBlocks = function(code, variableBlocks) {
if(variableBlocks) {
var matchTranslate = code.matchAll(this.translateRegex);
if(matchTranslate) {
for(var translates = 0; translates < matchTranslate.length; ++translates) {
var explodeTranslate = matchTranslate[translates][1].replace(/ /g,'').split('|');
if(explodeTranslate[1]) {
var arguments = explodeTranslate[1].split(',');
var variables = [];
for(var i = 0; i < arguments.length; ++i)
variables[i] = variableBlocks[arguments[i]] ? variableBlocks[arguments[i]] : '"' + arguments[i] + '"';
return code.replace(matchTranslate[0][translates], Translator._(explodeTranslate[0], variables));
} else
return code.replace(matchTranslate[0][translates], Translator._(explodeTranslate[0]));
}
}
}
return code;
}
Skin.prototype.render = function() {
var domNode = $('<div id="skinTemporaryDiv">' + this.code + '</div>')
var children = domNode.contents();
var $this = this;
var outHTML = '';
$.each(children, function(key, child) {
outHTML += $this.scanChildren($(child).outerHTML());
});
return outHTML;
}
/*
* Skin functions & aliases
*/
Skin.prototype.e = Skin.prototype.escape = function(string) {
return string.htmlentities();
}
Skin.prototype.df = Skin.prototype.date = function(date) {
var date = new Date(date);
return date.format('dd/mm/yyyy')
}
Skin.prototype.l = Skin.prototype.link = function(string) {
}
return Skin;
}($, Translator));<|fim▁end|>
|
for(var i = 0; i < loops[skloop].length; ++i) {
if(typeof(loops[skloop]) === 'array' || typeof(loops[skloop]) === 'object')
newNodeHTML += $this.scanChildren(checkLoop.outerHTML(), loops[skloop][i]);
}
|
<|file_name|>purejslider.js<|end_file_name|><|fim▁begin|><script type="text/javascript">
//set the interval temporary variable
var setIntrVal = null;
var intrVal = 3000;
//lets get the slider elements
var slides = document.getElementById('slides'); //get the <ul id="slides">
var slide = document.getElementsByClassName('slide'); //get the <li class="slide">
var active = document.getElementsByClassName('active'); //get the <li class="slide active">
//lets set z-index properties to the slides
var j = 99; //lets initialize a higher value, change this if you need to
for (var i = 0; i < slide.length; i++) {
slide[i].style.zIndex = j;
j--;
}
var ywtSlider = {
init: function (newIntrVal) {
//pass the new interval value into the intrVal variable
if(newIntrVal) intrVal = newIntrVal;
//start cycle on init
ywtSlider.cycle();
},
cycle: function() {
//check if cycle is already started then clear the cycle
//this will clear the current interval
if(setIntrVal) clearInterval(setIntrVal);
//ok lets start another cycle
setIntrVal = setInterval(function () {
ywtSlider.slide('next');
}, intrVal);
//console.log(interVal);
},
slide: function (dir) {
//lets get the slide index number so we can set this to the slide
var nodeList = Array.prototype.slice.call(slides.children);
var itemIndex = nodeList.indexOf(active[active.length - 1]);
if (dir == 'back') {
//check and run if the direction is back
//if the direction is back<|fim▁hole|> for (k = itemIndex; k < slide.length; k++) {
slide[k].className = 'slide';
}
} else if (dir == 'next') {
//check and run if the direction is next
//lets check first the position of the current item
if (itemIndex + 1 < slide.length - 1) {
//if the next item index is not the last item lets set the 'active' class
//to the next item
slide[itemIndex + 1].className += ' active';
} else {
//if the next item supposed to be the last item, lets remove the 'active' class
//from all slide elements
for (var k = 0; k < slide.length; k++) {
slide[k].className = 'slide';
}
}
}
//continue the cycle
ywtSlider.cycle();
}
};
window.onload = function() {
ywtSlider.init(5000);
}
</script><|fim▁end|>
|
//lets remove the class starting from the current item to the last
|
<|file_name|>PRESUBMIT.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for Chromium theme resources.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools, and see
http://www.chromium.org/developers/web-development-style-guide for the rules
we're checking against here.
"""
def CheckChangeOnUpload(input_api, output_api):
return _CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _CommonChecks(input_api, output_api)
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
resources = input_api.os_path.join(input_api.PresubmitLocalPath(),
'../../../ui/resources')
# List of paths with their associated scale factor. This is used to verify
# that the images modified in one are the correct scale of the other.
path_scales = [
[(100, 'default_100_percent/'), (200, 'default_200_percent/')],
]
<|fim▁hole|>
try:
sys.path = [resources] + old_path
from resource_check import resource_scale_factors
for paths in path_scales:
results.extend(resource_scale_factors.ResourceScaleFactors(
input_api, output_api, paths).RunChecks())
finally:
sys.path = old_path
return results<|fim▁end|>
|
import sys
old_path = sys.path
|
<|file_name|>camera.rs<|end_file_name|><|fim▁begin|>//! Camera structure and manipulation functions.
use gl;
use gl::types::*;<|fim▁hole|>use yaglw::gl_context::GLContext;
use yaglw::shader::Shader;
/// T representation as 3 distinct matrices, as well as a position + two rotations.
pub struct T {
#[allow(missing_docs)]
pub position: Point3<f32>,
#[allow(missing_docs)]
lateral_rotation: f32,
#[allow(missing_docs)]
vertical_rotation: f32,
// Projection matrix components
#[allow(missing_docs)]
pub translation: Matrix4<GLfloat>,
#[allow(missing_docs)]
pub rotation: Matrix4<GLfloat>,
#[allow(missing_docs)]
pub fov: Matrix4<GLfloat>,
}
/// This T sits at (0, 0, 0),
/// maps [-1, 1] in x horizontally,
/// maps [-1, 1] in y vertically,
/// and [0, -1] in z in depth.
pub fn unit() -> T {
T {
position : Point3::new(0.0, 0.0, 0.0),
lateral_rotation : 0.0,
vertical_rotation : 0.0,
translation : Matrix4::one(),
rotation : Matrix4::one(),
fov : Matrix4::one(),
}
}
impl T {
#[allow(missing_docs)]
pub fn projection_matrix(&self) -> Matrix4<GLfloat> {
self.fov * self.rotation * self.translation
}
#[allow(missing_docs)]
pub fn translate_to(&mut self, p: Point3<f32>) {
self.position = p;
self.translation = Matrix4::from_translation(-p.to_vec());
}
/// Rotate about a given vector, by `r` radians.
fn rotate(&mut self, v: &Vector3<f32>, r: f32) {
let mat = Matrix3::from_axis_angle(*v, -cgmath::Rad(r));
let mat =
Matrix4::new(
mat.x.x, mat.x.y, mat.x.z, 0.0,
mat.y.x, mat.y.y, mat.y.z, 0.0,
mat.z.x, mat.z.y, mat.z.z, 0.0,
0.0, 0.0, 0.0, 1.0,
);
self.rotation = self.rotation * mat;
}
/// Rotate the camera around the y axis, by `r` radians. Positive is counterclockwise.
pub fn rotate_lateral(&mut self, r: GLfloat) {
self.lateral_rotation = self.lateral_rotation + r;
self.rotate(&Vector3::new(0.0, 1.0, 0.0), r);
}
/// Changes the camera pitch by `r` radians. Positive is up.
/// Angles that "flip around" (i.e. looking too far up or down)
/// are sliently rejected.
pub fn rotate_vertical(&mut self, r: GLfloat) {
let new_rotation = self.vertical_rotation + r;
if new_rotation < -PI / 2.0
|| new_rotation > PI / 2.0 {
return
}
self.vertical_rotation = new_rotation;
let axis =
Matrix3::from_axis_angle(
Vector3::new(0.0, 1.0, 0.0),
cgmath::Rad(self.lateral_rotation),
);
let axis = axis * (&Vector3::new(1.0, 0.0, 0.0));
self.rotate(&axis, r);
}
}
/// Set a shader's projection matrix to match that of a camera.
pub fn set_camera(shader: &mut Shader, gl: &mut GLContext, c: &T) {
let projection_matrix = shader.get_uniform_location("projection_matrix");
shader.use_shader(gl);
unsafe {
let val = c.projection_matrix();
let ptr = &val as *const _ as *const _;
gl::UniformMatrix4fv(projection_matrix, 1, 0, ptr);
}
}<|fim▁end|>
|
use cgmath;
use cgmath::{Matrix3, Matrix4, One, Vector3, Point3, EuclideanSpace};
use std::f32::consts::PI;
|
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>use std::env;
fn main() {
let target = env::var("TARGET").unwrap_or("".to_string());
let tundra_dir = env::var("TUNDRA_OBJECTDIR").unwrap_or("".to_string());
let libs = env::var("TUNDRA_STATIC_LIBS").unwrap_or("".to_string());
let native_libs = libs.split(" ");
println!("cargo:rustc-link-search=native={}", tundra_dir);
for lib in native_libs {<|fim▁hole|> println!("cargo:rustc-link-lib=static={}", lib);
println!("cargo:rerun-if-changed={}", lib);
}
if target.contains("darwin") {
println!("cargo:rustc-flags=-l dylib=stdc++");
println!("cargo:rustc-flags=-l framework=Cocoa");
println!("cargo:rustc-flags=-l framework=Metal");
println!("cargo:rustc-flags=-l framework=OpenGL");
println!("cargo:rustc-flags=-l framework=QuartzCore");
} else if target.contains("windows") {
} else {
println!("cargo:rustc-flags=-l dylib=stdc++");
println!("cargo:rustc-flags=-l dylib=X11");
println!("cargo:rustc-flags=-l dylib=GL");
println!("cargo:rustc-flags=-l dylib=dl");
}
}<|fim▁end|>
| |
<|file_name|>line.ts<|end_file_name|><|fim▁begin|>import { Atom } from '../core/atom-class';
import { Box } from '../core/box';
import { VBox } from '../core/v-box';
import { Context } from '../core/context';
import { Style } from '../public/core';
export class LineAtom extends Atom {
private readonly position: 'overline' | 'underline';
constructor(
command: string,
body: Atom[],
options: { position: 'overline' | 'underline'; style: Style }
) {
super('line', { command, style: options.style });
this.skipBoundary = true;
this.body = body;
this.position = options.position;
}
render(parentContext: Context): Box | null {
// TeXBook:443. Rule 9 and 10
const context = new Context(parentContext, this.style, 'cramp');
const inner = Atom.createBox(context, this.body);
if (!inner) return null;
const ruleWidth =
context.metrics.defaultRuleThickness / context.scalingFactor;
const line = new Box(null, { classes: this.position + '-line' });
line.height = ruleWidth;
line.maxFontSize = ruleWidth * 1.125 * context.scalingFactor;
let stack: Box;
if (this.position === 'overline') {
stack = new VBox({
shift: 0,
children: [{ box: inner }, 3 * ruleWidth, { box: line }, ruleWidth],
});
} else {
stack = new VBox({
top: inner.height,
children: [ruleWidth, { box: line }, 3 * ruleWidth, { box: inner }],
});
}
if (this.caret) stack.caret = this.caret;
return new Box(stack, {<|fim▁hole|> });
}
}<|fim▁end|>
|
classes: this.position,
type: 'mord',
|
<|file_name|>htmlbaseelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLBaseElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLBaseElementDerived;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::HTMLBaseElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, ElementNodeTypeId};
use servo_util::str::DOMString;
#[deriving(Encodable)]
pub struct HTMLBaseElement {
pub htmlelement: HTMLElement
}
impl HTMLBaseElementDerived for EventTarget {
fn is_htmlbaseelement(&self) -> bool {
self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLBaseElementTypeId))
}
}
impl HTMLBaseElement {
pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLBaseElement {
HTMLBaseElement {
htmlelement: HTMLElement::new_inherited(HTMLBaseElementTypeId, localName, document)
}
}
pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLBaseElement> {
let element = HTMLBaseElement::new_inherited(localName, document);<|fim▁hole|>}
pub trait HTMLBaseElementMethods {
}
impl Reflectable for HTMLBaseElement {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector()
}
}<|fim▁end|>
|
Node::reflect_node(box element, document, HTMLBaseElementBinding::Wrap)
}
|
<|file_name|>atm_log.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Janice Cheng
import logging
from s13.Day5.conf import setting
logger = logging.getLogger('ATM-TRANSACTION-LOG')
logger.setLevel(logging.INFO) #configure a global logging level
console_handler = logging.StreamHandler() #print the log on the console
file_handler = logging.FileHandler("{}/log/access.log".format(setting.APP_DIR))
formatter = logging.Formatter('%(asctime)s - %(name)s - %(filename)s - %(levelname)s - %(message)s')
console_handler.setFormatter(formatter)
<|fim▁hole|>logger.addHandler(file_handler)<|fim▁end|>
|
file_handler.setFormatter(formatter)
logger.addHandler(console_handler)
|
<|file_name|>pyminer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright (c) 2011 The Bitcoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class BitcoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []<|fim▁hole|> for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("<I", nonce)
# hash final 4b, the nonce value
hash1_o = static_hash.copy()
hash1_o.update(nonce_bin)
hash1 = hash1_o.digest()
# sha256 hash of sha256 hash
hash_o = hashlib.sha256()
hash_o.update(hash1)
hash = hash_o.digest()
# quick test for winning solution: high 32 bits zero?
if hash[-4:] != '\0\0\0\0':
continue
# convert binary hash to 256-bit Python long
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
l = long(hash_str, 16)
# proof-of-work test: hash < target
if l < target:
print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,)
return (nonce + 1, nonce_bin)
else:
print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,)
# return (nonce + 1, nonce_bin)
return (nonce + 1, None)
def submit_work(self, rpc, original_data, nonce_bin):
nonce_bin = bufreverse(nonce_bin)
nonce = nonce_bin.encode('hex')
solution = original_data[:152] + nonce + original_data[160:256]
param_arr = [ solution ]
result = rpc.getwork(param_arr)
print time.asctime(), "--> Upstream RPC result:", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print "HashMeter(%d): %d hashes, %.2f Khash/sec" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: pyminer.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 45888
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], "mining threads started"
print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])<|fim▁end|>
| |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>from rest_framework import serializers
from rest_framework import pagination
<|fim▁hole|>from .models import Airport
class AirportSerializer(serializers.ModelSerializer):
read_only_fields = ('id','name','city','country','country_code','iata','icao')
class Meta:
model = Airport
class PaginationAirportSerializer(pagination.PaginationSerializer):
class Meta:
object_serializer_class = AirportSerializer<|fim▁end|>
| |
<|file_name|>metrics.go<|end_file_name|><|fim▁begin|>package server
import (
"net/http"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
)
const (
metricNotificationsTotalName = "service_notifications_total"
metricSuccessName = "service_success_total"
metricSuccessWithAckName = "service_success_with_ack_total"
metricDeliveredName = "service_delivered_total"
metricFailureName = "service_failure_total"
metricFailureWithReasonName = "service_failure_with_reason_total"
metricRemovalName = "service_removal_total"
metricBadRequestName = "service_bad_request_total"
metricFCMResponseName = "service_fcm_request_duration_seconds"
metricAPNSResponseName = "service_apns_request_duration_seconds"
metricServiceResponseName = "service_request_duration_seconds"
metricNotificationResponseName = "service_notification_duration_seconds"
)
// NewPrometheusHandler returns the http.Handler to expose Prometheus metrics
func NewPrometheusHandler() http.Handler {
return promhttp.Handler()
}
type metrics struct {
metricNotificationsTotal *prometheus.CounterVec
metricSuccess *prometheus.CounterVec
metricSuccessWithAck *prometheus.CounterVec
metricDelivered *prometheus.CounterVec
metricFailure *prometheus.CounterVec
metricFailureWithReason *prometheus.CounterVec
metricRemoval *prometheus.CounterVec
metricBadRequest prometheus.Counter
metricAPNSResponse prometheus.Histogram
metricFCMResponse prometheus.Histogram
metricNotificationResponse *prometheus.HistogramVec
metricServiceResponse prometheus.Histogram
}
// newMetrics initializes the metrics and registers them
func newMetrics() *metrics {
m := &metrics{
metricNotificationsTotal: prometheus.NewCounterVec(prometheus.CounterOpts{
Name: metricNotificationsTotalName,
Help: "Number of notifications sent"},
[]string{"platform", "type"}),
metricSuccess: prometheus.NewCounterVec(prometheus.CounterOpts{
Name: metricSuccessName,
Help: "Number of push success."},
[]string{"platform", "type"}),
metricSuccessWithAck: prometheus.NewCounterVec(prometheus.CounterOpts{
Name: metricSuccessWithAckName,
Help: "Number of push success that contains ackId."},
[]string{"platform", "type"},
),
metricDelivered: prometheus.NewCounterVec(prometheus.CounterOpts{
Name: metricDeliveredName,
Help: "Number of push delivered."},
[]string{"platform", "type"},
),
metricFailure: prometheus.NewCounterVec(prometheus.CounterOpts{
Name: metricFailureName,
Help: "Number of push errors."},
[]string{"platform", "type"}),
metricFailureWithReason: prometheus.NewCounterVec(prometheus.CounterOpts{
Name: metricFailureWithReasonName,
Help: "Number of push errors with reasons."},
[]string{"platform", "type", "reason"}),
metricRemoval: prometheus.NewCounterVec(prometheus.CounterOpts{
Name: metricRemovalName,
Help: "Number of device token errors."},
[]string{"platform", "reason"}),
metricBadRequest: prometheus.NewCounter(prometheus.CounterOpts{
Name: metricBadRequestName,
Help: "Request to push proxy was a bad request",
}),
metricAPNSResponse: prometheus.NewHistogram(prometheus.HistogramOpts{
Name: metricAPNSResponseName,
Help: "Request latency distribution",
}),
metricFCMResponse: prometheus.NewHistogram(prometheus.HistogramOpts{
Name: metricFCMResponseName,
Help: "Request latency distribution",
}),
metricNotificationResponse: prometheus.NewHistogramVec(prometheus.HistogramOpts{
Name: metricNotificationResponseName,
Help: "Notifiction request latency distribution"},
[]string{"platform"}),
metricServiceResponse: prometheus.NewHistogram(prometheus.HistogramOpts{
Name: metricServiceResponseName,
Help: "Request latency distribution",
}),
}
prometheus.MustRegister(
m.metricNotificationsTotal,
m.metricSuccess,
m.metricSuccessWithAck,
m.metricFailure,
m.metricFailureWithReason,
m.metricRemoval,
m.metricBadRequest,
m.metricAPNSResponse,
m.metricFCMResponse,
m.metricServiceResponse,
m.metricNotificationResponse,
)
return m
}
func (m *metrics) shutdown() {
func(cs ...prometheus.Collector) {
for _, c := range cs {
prometheus.Unregister(c)
}
}(
m.metricNotificationsTotal,
m.metricSuccess,
m.metricSuccessWithAck,
m.metricFailure,
m.metricFailureWithReason,
m.metricRemoval,
m.metricBadRequest,
m.metricAPNSResponse,
m.metricFCMResponse,
m.metricServiceResponse,
m.metricNotificationResponse,
)
}
func (m *metrics) incrementNotificationTotal(platform, pushType string) {
m.metricNotificationsTotal.WithLabelValues(platform, pushType).Inc()
}
func (m *metrics) incrementSuccess(platform, pushType string) {
m.metricSuccess.WithLabelValues(platform, pushType).Inc()
}
func (m *metrics) incrementSuccessWithAck(platform, pushType string) {
m.incrementSuccess(platform, pushType)
m.metricSuccessWithAck.WithLabelValues(platform, pushType).Inc()
}
func (m *metrics) incrementDelivered(platform, pushType string) {
m.metricDelivered.WithLabelValues(platform, pushType).Inc()
}
func (m *metrics) incrementFailure(platform, pushType, reason string) {
m.metricFailure.WithLabelValues(platform, pushType).Inc()
if len(reason) > 0 {
m.metricFailureWithReason.WithLabelValues(platform, pushType, reason).Inc()
}
}
func (m *metrics) incrementRemoval(platform, pushType, reason string) {
m.metricRemoval.WithLabelValues(platform, reason).Inc()
m.incrementFailure(platform, pushType, reason)
}
func (m *metrics) incrementBadRequest() {
m.metricBadRequest.Inc()
}
func (m *metrics) observeAPNSResponse(dur float64) {
m.metricAPNSResponse.Observe(dur)
}
func (m *metrics) observeFCMResponse(dur float64) {
m.metricFCMResponse.Observe(dur)
}
func (m *metrics) observeServiceResponse(dur float64) {
m.metricServiceResponse.Observe(dur)
}
func (m *metrics) observerNotificationResponse(platform string, dur float64) {
m.metricNotificationResponse.WithLabelValues(platform).Observe(dur)
switch platform {
case PushNotifyApple:
m.observeAPNSResponse(dur)<|fim▁hole|><|fim▁end|>
|
case PushNotifyAndroid:
m.observeFCMResponse(dur)
}
}
|
<|file_name|>ScriptEditorDialog.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
***************************************************************************
EditScriptDialog.py
---------------------
Date : December 2012
Copyright : (C) 2012 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from processing.modeler.ModelerUtils import ModelerUtils
__author__ = 'Alexander Bruy'
__date__ = 'December 2012'
__copyright__ = '(C) 2012, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import codecs
import sys
import json
from PyQt4.QtCore import *<|fim▁hole|>from PyQt4.QtGui import *
from PyQt4.Qsci import *
from qgis.core import *
from qgis.utils import iface
from processing.gui.ParametersDialog import ParametersDialog
from processing.gui.HelpEditionDialog import HelpEditionDialog
from processing.algs.r.RAlgorithm import RAlgorithm
from processing.algs.r.RUtils import RUtils
from processing.script.ScriptAlgorithm import ScriptAlgorithm
from processing.script.ScriptUtils import ScriptUtils
from processing.ui.ui_DlgScriptEditor import Ui_DlgScriptEditor
import processing.resources_rc
class ScriptEditorDialog(QDialog, Ui_DlgScriptEditor):
SCRIPT_PYTHON = 0
SCRIPT_R = 1
hasChanged = False
def __init__(self, algType, alg):
QDialog.__init__(self)
self.setupUi(self)
self.setWindowFlags(Qt.WindowMinimizeButtonHint |
Qt.WindowMaximizeButtonHint |
Qt.WindowCloseButtonHint)
# Set icons
self.btnSave.setIcon(
QgsApplication.getThemeIcon('/mActionFileSave.svg'))
self.btnSaveAs.setIcon(
QgsApplication.getThemeIcon('/mActionFileSaveAs.svg'))
self.btnEditHelp.setIcon(QIcon(':/processing/images/edithelp.png'))
self.btnRun.setIcon(QIcon(':/processing/images/runalgorithm.png'))
self.btnCut.setIcon(QgsApplication.getThemeIcon('/mActionEditCut.png'))
self.btnCopy.setIcon(
QgsApplication.getThemeIcon('/mActionEditCopy.png'))
self.btnPaste.setIcon(
QgsApplication.getThemeIcon('/mActionEditPaste.png'))
self.btnUndo.setIcon(QgsApplication.getThemeIcon('/mActionUndo.png'))
self.btnRedo.setIcon(QgsApplication.getThemeIcon('/mActionRedo.png'))
# Connect signals and slots
self.btnSave.clicked.connect(self.save)
self.btnSaveAs.clicked.connect(self.saveAs)
self.btnEditHelp.clicked.connect(self.editHelp)
self.btnRun.clicked.connect(self.runAlgorithm)
self.btnCut.clicked.connect(self.editor.cut)
self.btnCopy.clicked.connect(self.editor.copy)
self.btnPaste.clicked.connect(self.editor.paste)
self.btnUndo.clicked.connect(self.editor.undo)
self.btnRedo.clicked.connect(self.editor.redo)
self.editor.textChanged.connect(lambda: self.setHasChanged(True))
self.alg = alg
self.algType = algType
if self.alg is not None:
self.filename = self.alg.descriptionFile
self.editor.setText(self.alg.script)
else:
self.filename = None
self.update = False
self.help = None
self.setHasChanged(False)
self.editor.setLexerType(self.algType)
def editHelp(self):
if self.alg is None:
if self.algType == self.SCRIPT_PYTHON:
alg = ScriptAlgorithm(None, unicode(self.editor.text()))
elif self.algType == self.SCRIPT_R:
alg = RAlgorithm(None, unicode(self.editor.text()))
else:
alg = self.alg
dlg = HelpEditionDialog(alg)
dlg.exec_()
# We store the description string in case there were not saved
# because there was no filename defined yet
if self.alg is None and dlg.descriptions:
self.help = dlg.descriptions
def save(self):
self.saveScript(False)
def saveAs(self):
self.saveScript(True)
def saveScript(self, saveAs):
if self.filename is None or saveAs:
if self.algType == self.SCRIPT_PYTHON:
scriptDir = ScriptUtils.scriptsFolder()
filterName = self.tr('Python scripts (*.py)')
elif self.algType == self.SCRIPT_R:
scriptDir = RUtils.RScriptsFolder()
filterName = self.tr('Processing R script (*.rsx)')
self.filename = unicode(QFileDialog.getSaveFileName(self,
self.tr('Save script'), scriptDir,
filterName))
if self.filename:
if self.algType == self.SCRIPT_PYTHON \
and not self.filename.lower().endswith('.py'):
self.filename += '.py'
if self.algType == self.SCRIPT_R \
and not self.filename.lower().endswith('.rsx'):
self.filename += '.rsx'
text = unicode(self.editor.text())
if self.alg is not None:
self.alg.script = text
try:
with codecs.open(self.filename, 'w', encoding='utf-8') as fout:
fout.write(text)
except IOError:
QMessageBox.warning(self, self.tr('I/O error'),
self.tr('Unable to save edits. Reason:\n %s')
% unicode(sys.exc_info()[1]))
return
self.update = True
# If help strings were defined before saving the script for
# the first time, we do it here
if self.help:
with open(self.filename + '.help', 'w') as f:
json.dump(self.help, f)
self.help = None
self.setHasChanged(False)
else:
self.filename = None
def setHasChanged(self, hasChanged):
self.hasChanged = hasChanged
self.btnSave.setEnabled(hasChanged)
def runAlgorithm(self):
if self.algType == self.SCRIPT_PYTHON:
alg = ScriptAlgorithm(None, unicode(self.editor.text()))
alg.provider = ModelerUtils.providers['script']
if self.algType == self.SCRIPT_R:
alg = RAlgorithm(None, unicode(self.editor.text()))
alg.provider = ModelerUtils.providers['r']
dlg = alg.getCustomParametersDialog()
if not dlg:
dlg = ParametersDialog(alg)
canvas = iface.mapCanvas()
prevMapTool = canvas.mapTool()
dlg.show()
dlg.exec_()
if canvas.mapTool() != prevMapTool:
try:
canvas.mapTool().reset()
except:
pass
canvas.setMapTool(prevMapTool)<|fim▁end|>
| |
<|file_name|>system_device.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
class SystemDevice(object):
def upload_project(self, project: Project, **kwargs):
raise NotImplementedError
def write_program(self, file: typing.BinaryIO, quirk: int = 0, **kwargs):
raise NotImplementedError<|fim▁end|>
|
import typing
from pros.conductor import Project
|
<|file_name|>GitModificationParser.java<|end_file_name|><|fim▁begin|>/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.domain.materials.git;
import com.thoughtworks.go.domain.materials.Modification;
import com.thoughtworks.go.util.DateUtils;
import java.util.LinkedList;
import java.util.List;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class GitModificationParser {
private LinkedList<Modification> modifications = new LinkedList<>();
private static final String SPACES = "\\s+";
private static final String COMMENT_INDENT = "\\s{4}";
private static final String COMMENT_TEXT = "(.*)";
private static final String HASH = "(\\w+)";
private static final String DATE = "(.+)";
private static final String AUTHOR = "(.+)";
private static final Pattern COMMIT_PATTERN = Pattern.compile("^commit" + SPACES + HASH + "$");
private static final Pattern AUTHOR_PATTERN = Pattern.compile("^Author:"+ SPACES + AUTHOR + "$");
private static final Pattern DATE_PATTERN = Pattern.compile("^Date:" + SPACES + DATE + "$");
private static final Pattern COMMENT_PATTERN = Pattern.compile("^" + COMMENT_INDENT + COMMENT_TEXT + "$");
<|fim▁hole|> }
return modifications;
}
public List<Modification> getModifications() {
return modifications;
}
public void processLine(String line) {
Matcher matcher = COMMIT_PATTERN.matcher(line);
if (matcher.matches()) {
modifications.add(new Modification("", "", null, null, matcher.group(1)));
}
Matcher authorMatcher = AUTHOR_PATTERN.matcher(line);
if (authorMatcher.matches()) {
modifications.getLast().setUserName(authorMatcher.group(1));
}
Matcher dateMatcher = DATE_PATTERN.matcher(line);
if (dateMatcher.matches()) {
modifications.getLast().setModifiedTime(DateUtils.parseISO8601(dateMatcher.group(1)));
}
Matcher commentMatcher = COMMENT_PATTERN.matcher(line);
if (commentMatcher.matches()) {
Modification last = modifications.getLast();
String comment = Optional.ofNullable(last.getComment()).orElse("");
if (!comment.isEmpty()) comment += "\n";
last.setComment(comment + commentMatcher.group(1));
}
}
}<|fim▁end|>
|
public List<Modification> parse(List<String> output) {
for (String line : output) {
processLine(line);
|
<|file_name|>population_builder.rs<|end_file_name|><|fim▁begin|>//! This module defines helper functions (builder pattern) to create a valid population.
//!
//! darwin-rs: evolutionary algorithms with Rust
//!
//! Written by Willi Kappler, Version 0.4 (2017.06.26)
//!
//! Repository: https://github.com/willi-kappler/darwin-rs
//!
//! License: MIT
//!
//! This library allows you to write evolutionary algorithms (EA) in Rust.
//! Examples provided: TSP, Sudoku, Queens Problem, OCR
//!
//!
use std;
use individual::{Individual, IndividualWrapper};
use population::Population;
/// This is a helper struct in order to build (configure) a valid population.
/// See builder pattern: https://en.wikipedia.org/wiki/Builder_pattern
///
/// Maybe use phantom types, see https://github.com/willi-kappler/darwin-rs/issues/9
pub struct PopulationBuilder<T: Individual> {
/// The actual simulation
population: Population<T>,
}
error_chain! {
errors {
IndividualsTooLow
LimitEndTooLow
}
}
/// This implementation contains all the helper method to build (configure) a valid population.
impl<T: Individual + Clone> PopulationBuilder<T> {
/// Start with this method, it must always be called as the first one.
/// It creates a default population with some dummy (but invalid) values.
pub fn new() -> PopulationBuilder<T> {
PopulationBuilder {
population: Population {
num_of_individuals: 0,
population: Vec::new(),
reset_limit: 0,
reset_limit_start: 1000,
reset_limit_end: 10000,
reset_limit_increment: 1000,
reset_counter: 0,
id: 1,
fitness_counter: 0
}
}
}
/// Sets the initial population provided inside a vector, length must be >= 3
pub fn initial_population(mut self, individuals: &[T]) -> PopulationBuilder<T> {
self.population.num_of_individuals = individuals.len() as u32;
for individual in individuals {
self.population.population.push(IndividualWrapper {
individual: (*individual).clone(),
fitness: std::f64::MAX,
num_of_mutations: 1,
id: self.population.id,
});
}
self
}
/// Configures the mutation rates (number of mutation runs) for all the individuals
/// in the population: The first individual will mutate once, the second will mutate twice,
/// the nth individual will Mutate n-times per iteration.
pub fn increasing_mutation_rate(mut self) -> PopulationBuilder<T> {
let mut mutation_rate = 1;
for wrapper in &mut self.population.population {
wrapper.num_of_mutations = mutation_rate;
mutation_rate += 1;
}
self
}
/// Configures the mutation rates (number of mutation runs) for all the individuals in the
/// population: Instead of a linear growing mutation rate like in the
/// `increasing_mutation_rate` function above this sets an exponention mutation rate for
/// all the individuals. The first individual will mutate base^1 times, the second will
/// mutate base^2 times, and nth will mutate base^n times per iteration.
pub fn increasing_exp_mutation_rate(mut self, base: f64) -> PopulationBuilder<T> {
let mut mutation_rate = 1;
for wrapper in &mut self.population.population {
wrapper.num_of_mutations = base.powi(mutation_rate).floor() as u32;
mutation_rate += 1;
}
self
}
/// Configures the mutation rates (number of mutation runs) for all the individuals in the
/// population: This allows to specify an arbitrary mutation scheme for each individual.
/// The number of rates must be equal to the number of individuals.
pub fn mutation_rate(mut self, mutation_rate: Vec<u32>) -> PopulationBuilder<T> {
// TODO: better error handling
assert!(self.population.population.len() == mutation_rate.len());
for (individual, mutation_rate) in self.population
.population
.iter_mut()
.zip(mutation_rate.into_iter()) {
individual.num_of_mutations = mutation_rate;
}
self
}
/// Configures the reset limit for the population. If reset_limit_end is greater than zero
/// then a reset counter is increased each iteration. If that counter is greater than the
/// limit, all individuals will be resetted, the limit will be increased by 1000 and the
/// counter is set back to zero. Default value for reset_limit_start is 1000.
pub fn reset_limit_start(mut self, reset_limit_start: u32) -> PopulationBuilder<T> {
self.population.reset_limit_start = reset_limit_start;
self.population.reset_limit = reset_limit_start;
self
}
/// Configures the end value for the reset_limit. If the reset_limit >= reset_limit_end
/// then the reset_limit will be resetted to the start value reset_limit_start.
/// Default value for reset_limit_end is 100000.
/// If reset_limit_end == 0 then the reset limit feature will be disabled.
pub fn reset_limit_end(mut self, reset_limit_end: u32) -> PopulationBuilder<T> {
self.population.reset_limit_end = reset_limit_end;
self
}
/// Configure the increment for the reset_limit. If the reset_limit is reached, its value
/// is incrementet by the amount of reset_limit_increment.
pub fn reset_limit_increment(mut self, reset_limit_increment: u32) -> PopulationBuilder<T> {
self.population.reset_limit_increment = reset_limit_increment;
self
}
/// Set the population id. Currently this is only used for statistics.
pub fn set_id(mut self, id: u32) -> PopulationBuilder<T> {
for individual in &mut self.population.population {
individual.id = id;
}
self.population.id = id;
self
}
/// This checks the configuration of the simulation and returns an PopError or Ok if no PopErrors
/// where found.
pub fn finalize(self) -> Result<Population<T>> {
match self.population {
Population { num_of_individuals: 0...2, ..} => {
Err(ErrorKind::IndividualsTooLow.into())
}
Population { reset_limit_start: start,<|fim▁hole|> _ => Ok(self.population)
}
}
}<|fim▁end|>
|
reset_limit_end: end, ..} if (end > 0) && (start >= end) => {
Err(ErrorKind::LimitEndTooLow.into())
}
|
<|file_name|>cipherTWO.cpp<|end_file_name|><|fim▁begin|>/******************************************************************************
** Copyright (C) 2016 Yakup Ates <[email protected]>
** This program is free software: you can redistribute it and/or modify
** it under the terms of the GNU General Public License as published by
** the Free Software Foundation, either version 3 of the License, or
** any later version.
** This program is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
** GNU General Public License for more details.
** You should have received a copy of the GNU General Public License
** along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
#include "cipherTWO.h"
const char cipherTWO::SBox[] = {
6, 4, 12, 5, 0, 7, 2, 14,
1, 15, 3, 13, 8, 10, 9, 11
};
const char cipherTWO::SBox_inv[] = {
4, 8, 6, 10, 1, 3, 0, 5,
12, 14, 13, 15, 2, 11, 7, 9
};
const char* cipherTWO::version = "1.0";
char cipherTWO::getSBox(int index){
return SBox[index%16];
}
char cipherTWO::getSBox_inv(int index){
return SBox_inv[index%16];
}
int cipherTWO::get_k_cnt(){
return k_cnt;
}
const char* cipherTWO::get_version(){
return version;
}
int* cipherTWO::getKey(){
return k;
}
void cipherTWO::setKey(int* k_new){
for(int i=0; i<k_cnt; ++i){
k[i] = k_new[i] % 16;
}
}
int cipherTWO::encrypt(int m, int k[k_cnt]){
char u, v, w, x, c;
for(int i=0; i<k_cnt; ++i){
k[i] %= 16;
}
m %= 16;
u = m ^ k[0];
u %= 16;
v = getSBox(u);
v %= 16;
w = v ^ k[1];
w %= 16;
x = getSBox(w);
x %= 16;
c = x ^ k[2];
c %= 16;
std::cout << "Encrypting with:"
<< std::endl
<< "---"
<< std::endl
<< "m : " << int(m)
<< std::endl
<< "k0: " << int(k[0])
<< std::endl
<< "k1: " << int(k[1])
<< std::endl
<< "k2: " << int(k[2])
<< std::endl
<< "---"
<< std::endl << std::endl
<< "Results:"
<< std::endl
<< "---"
<< std::endl
<< "u: " << int(u)
<< std::endl
<< "v: " << int(v)
<< std::endl
<< "w: " << int(w)
<< std::endl
<< "x: " << int(x)
<< std::endl
<< "c: " << int(c)
<< std::endl
<< "---"
<< std::endl;
return int(c);
}
int cipherTWO::decrypt(int c, int k[k_cnt], int silent=0){
char u, v, w, x, m;
for(int i=0; i<k_cnt; ++i){
k[i] %= 16;
}
c %= 16;
x = c ^ k[2];
x %= 16;
w = getSBox_inv(x);
w %= 16;
v = w ^ k[1];
v %= 16;
u = getSBox_inv(v);
u %= 16;
m = u ^ k[0];
m %= 16;
if(silent == 0){
std::cout << "Decrypting with:"
<< std::endl
<< "---"
<< std::endl
<< "c : " << int(c)
<< std::endl
<< "k0: " << int(k[0])
<< std::endl
<< "k1: " << int(k[1])
<< std::endl
<< "k2: " << int(k[2])
<< std::endl
<< "---"
<< std::endl << std::endl
<< "Results:"
<< std::endl
<< "---"
<< std::endl
<< "u: " << int(u)
<< std::endl
<< "v: " << int(v)
<< std::endl
<< "w: " << int(w)
<< std::endl
<< "x: " << int(x)
<< std::endl
<< "m: " << int(m)
<< std::endl
<< "---"
<< std::endl;
}
return int(m);
}
std::vector<char> cipherTWO::get_influence_SBox(char diff=15){
std::ios state(NULL);
state.copyfmt(std::cout);
const int table_size = 16;
const char sep = ' ';
const int entry_width = 12;
char v0[table_size], v1[table_size], u0[table_size], u1[table_size];
std::vector<char> v_diff(table_size);
for(int i=0; i<table_size; ++i){
u0[i] = i;
u0[i] %= 16;
u1[i] = i ^ diff;
u1[i] %= 16;
v0[i] = getSBox(i);
v1[i] = getSBox(u1[i]);
v_diff[i] = v0[i] ^ v1[i];
}
std::cout << std::left << std::setw(entry_width) << std::setfill(sep)
<< "u0";
std::cout << std::left << std::setw(entry_width) << std::setfill(sep)
<< "u1=u0^15";
std::cout << std::left << std::setw(entry_width) << std::setfill(sep)
<< "v0=S(u0)";
std::cout << std::left << std::setw(entry_width) << std::setfill(sep)
<< "v1=S(u1)";
std::cout << std::left << std::setw(entry_width) << std::setfill(sep)
<< "v1^v2" << std::endl;
std::cout << "-------------------------------------------------------"
<< std::endl;
for(int i=0; i<table_size; ++i){
std::cout << std::left << std::setw(entry_width)
<< std::setfill(sep) << int(u0[i]);
std::cout << std::left << std::setw(entry_width)
<< std::setfill(sep) << int(u1[i]);
std::cout << std::left << std::setw(entry_width)
<< std::setfill(sep) << int(v0[i]);
std::cout << std::left << std::setw(entry_width)
<< std::setfill(sep) << int(v1[i]);
std::cout << std::left << std::setw(entry_width)
<< std::setfill(sep) << int(v_diff[i])
<< std::endl;
}
// reset std::cout state
std::cout.copyfmt(state);
//std::vector<int> v_diff_copy(v_diff);
sbox_statistics(v_diff, table_size, 0);
return v_diff;
}
std::vector<int> cipherTWO::sbox_statistics(std::vector<char> v_diff_copy, int table_size, int silent=1){
int max_occ[table_size] = {0};
int max = 0;
int element = 0;
std::vector<int> result(2);
std::sort(v_diff_copy.begin(), v_diff_copy.end());
for(int i=0; i<table_size; ++i){
max_occ[int(v_diff_copy[i])] += 1;
}
for(int i=0; i<table_size; ++i){
if(max_occ[i] > max){
max = max_occ[i];
element = i;
}
}
if(silent == 0){
std::cout << std::endl << "[+] NOTE: Element "
<< element << " occurs in " << max << "/"
<< table_size << " times."
<< std::endl;
}
result[0] = element;
result[1] = max;
return result;
}
std::vector<int> cipherTWO::attack(int* m, int* c, int silent=0){
const int table_size = 16;
int v_diff = 0;
int pair_cnt;
std::vector<int> possible_keys;
int m_size = sizeof(m)/sizeof(int);
int c_size = sizeof(c)/sizeof(int);
if(m_size == 0 || c_size == 0)
return (std::vector<int>)1;
if(m_size != c_size){
if(m_size > c_size){
pair_cnt = c_size;
} else {
pair_cnt = m_size;
}
} else {
pair_cnt = m_size;
}
if(silent == 0){
std::cout << "Attacking with:"
<< std::endl
<< "---"
<< std::endl;
for(int i=0; i<m_size; ++i){
std::cout << "m" << i << ": "
<< int(m[i]) << std::endl;
std::cout << "c" << i << ": " << int(c[i])
<< std::endl;
}
std::cout << "---"
<< std::endl;
}
for(int i=0; i<pair_cnt; ++i){
m[i] %= 16;
c[i] %= 16;
//m_diff ^= m[i];
}
int v_[pair_cnt];
int u_[pair_cnt];
int w_[pair_cnt];
int x_[pair_cnt];
for(int i=0; i<table_size; ++i){
int c_diff = 0;
for(int j=0; j<pair_cnt; ++j){
x_[j] = c[j] ^ i;
w_[j] = getSBox_inv(v_[j]);
}
for(int j=0; j<pair_cnt; ++j){
c_diff ^= w_[j];
}
if(c_diff == v_diff)
possible_keys.push_back(i);
}
if(silent == 0){
int keys_size = possible_keys.size();
if(keys_size > 0){
std::cout << std::endl
<< "Results:"
<< std::endl
<< "---"
<< std::endl;
if(keys_size == 1){
std::cout << "Possible key: ";
} else {
std::cout << "Possible keys: ";
}
for(int i=0; i<keys_size; ++i){
if(i != keys_size-1)
std::cout << possible_keys[i] << ", ";
else
std::cout << possible_keys[i] << std::endl;
}
std::cout << "---" << std::endl;
} else {
std::cout << "[-]"
<< " Warning: "
<< "I found no possible keys... :-("
<< std::endl;
}
}
return possible_keys;
}
/*
* Signals:
* 0 - too few arguments
* 1 - too many arguments, run if possible but return warning.
* 2 - weird character type. Stop and return error.
*/
void cipherTWO::error_handler(char* programname, int signal){
switch(signal){
case 0:
usage(programname);
std::cout << std::endl;
std::cout << "[-] Error:"
<< " You gave me too few arguments."
<< std::endl;
break;
case 1:
//usage(programname);
std::cout << std::endl;
std::cout << "[-] Warning:"
<< " You entered more arguments than expected."
<< " I will only choose the first few..."
<< std::endl << std::endl;
break;
case 2:
usage(programname);
std::cout << std::endl;
std::cout << "[-] Error:"
<< " You gave me weird characters."
<< " I can't handle these characters. Stopping."
<< std::endl;
break;
}
}
void cipherTWO::usage(char* programname){
std::cout << programname << " " << get_version()
<< " usage: "
<< std::endl
<< "###"
<< std::endl
<< "----"
<< std::endl
<< " [-e|--encrypt] <message byte> <key byte 0> <key byte 1> <key byte 2>:"
<< " Encrypt plaintext-byte with cipherTWO."
<< std::endl
<< " [-d|--decrypt] <cipher byte> <key byte 0> <key byte 1> <key byte 2>:"
<< " Decrypt cipher-byte with cipherTWO."
<< std::endl
<< " [-a|--attack] <message byte 0> <message byte 1> <cipher byte 0> <cipher byte 1>:"
<< " Attack cipherTWO via a differential attack. (Chosen-Plaintext Attack)"
<< std::endl
<< " [-as|--analyse-sbox] <character>:"
<< " Analyse S-Box for given character"
<< std::endl
<< " [-h|--help]:"
<< " Print this help message."
<< std::endl
<< "----"
<< std::endl;
// << std::endl << std::endl
// << "License"
// << std::endl
// << "###"
// << std::endl
// << "Copyright (C) 2016 Yakup Ates <Yakup [email protected]>"
// << std::endl
// << programname << " " << get_version()
// << " comes with ABSOLUTELY NO WARRANTY."
// << std::endl
// << "You may redistribute copies of "
// << programname << " " << get_version()
// << " under the terms of the GNU General Public License."
// << std::endl;
}
int main(int argc, char** argv){
cipherTWO cipher;
if(argc == 1){
cipher.usage(argv[0]);
return 1;
}
if((std::string(argv[1]) == "-h") || (std::string(argv[1]) == "--help")){
cipher.usage(argv[0]);
} else if((std::string(argv[1]) == "-e")
|| (std::string(argv[1]) == "--encrypt")){
int expect_arg_cnt = cipher.get_k_cnt()+3;
if(argc < expect_arg_cnt){
cipher.error_handler(argv[0], 0);
return 1;
} else if(argc > expect_arg_cnt){
cipher.error_handler(argv[0], 1);
}
int key[cipher.get_k_cnt()] = {atoi(argv[3]), atoi(argv[4]),
atoi(argv[5])};
//cipher.setKey(key);
cipher.encrypt(atoi(argv[2]), key);
} else if((std::string(argv[1]) == "-d")
|| (std::string(argv[1]) == "--decrypt")){
int expect_arg_cnt = cipher.get_k_cnt()+3;
if(argc < expect_arg_cnt){
cipher.error_handler(argv[0], 0);
return 1;
} else if(argc > expect_arg_cnt){
cipher.error_handler(argv[0], 1);
}
int key[cipher.get_k_cnt()] = {atoi(argv[3]), atoi(argv[4]),
atoi(argv[5])};
//cipher.setKey(key);
cipher.decrypt(atoi(argv[2]), key);
} else if((std::string(argv[1]) == "-a")
|| (std::string(argv[1]) == "--attack")){
int expect_arg_cnt = 6;
int count_pair = 2;
if(argc < expect_arg_cnt){
cipher.error_handler(argv[0], 0);
return 1;
} else if(argc > expect_arg_cnt){
cipher.error_handler(argv[0], 1);
}
int m[count_pair] = {atoi(argv[2]), atoi(argv[3])};
int c[count_pair] = {atoi(argv[4]), atoi(argv[5])};
cipher.attack(m, c);
} else if((std::string(argv[1]) == "-as")
|| (std::string(argv[1]) == "--analyse-sbox")){
int expect_arg_cnt = 2;
<|fim▁hole|> if(argc < expect_arg_cnt){
cipher.error_handler(argv[0], 0);
return 1;
} else if(argc > expect_arg_cnt){
int char_test = atoi(argv[2]);
cipher.get_influence_SBox(char_test);
} else if(argc > expect_arg_cnt+1){
cipher.error_handler(argv[0], 1);
int char_test = atoi(argv[2]);
cipher.get_influence_SBox(char_test);
} else {
cipher.get_influence_SBox();
}
} else {
cipher.usage(argv[0]);
}
return 0;
}<|fim▁end|>
| |
<|file_name|>core.js<|end_file_name|><|fim▁begin|>'use strict';
module.exports = function() {
// Occupy the global variable of Chart, and create a simple base class
var Chart = function(item, config) {
this.construct(item, config);
return this;
};
// Globally expose the defaults to allow for user updating/changing
Chart.defaults = {
global: {
responsive: true,
responsiveAnimationDuration: 0,
maintainAspectRatio: true,<|fim▁hole|> mode: 'nearest',
intersect: true,
animationDuration: 400
},
onClick: null,
defaultColor: 'rgba(0,0,0,0.1)',
defaultFontColor: '#666',
defaultFontFamily: "'Helvetica Neue', 'Helvetica', 'Arial', sans-serif",
defaultFontSize: 12,
defaultFontStyle: 'normal',
showLines: true,
// Element defaults defined in element extensions
elements: {},
// Layout options such as padding
layout: {
padding: {
top: 0,
right: 0,
bottom: 0,
left: 0
}
},
// Legend callback string
legendCallback: function(chart) {
var text = [];
text.push('<ul class="' + chart.id + '-legend">');
for (var i = 0; i < chart.data.datasets.length; i++) {
text.push('<li><span style="background-color:' + chart.data.datasets[i].backgroundColor + '"></span>');
if (chart.data.datasets[i].label) {
text.push(chart.data.datasets[i].label);
}
text.push('</li>');
}
text.push('</ul>');
return text.join('');
}
}
};
Chart.Chart = Chart;
return Chart;
};<|fim▁end|>
|
events: ['mousemove', 'mouseout', 'click', 'touchstart', 'touchmove'],
hover: {
onHover: null,
|
<|file_name|>datagrid.js<|end_file_name|><|fim▁begin|>/*
* Datagrid.js
*/
function dg_send(url, grid, form, page, tamano, orderby, orderdirection)
{
var oForm = $("#"+form);
var oGrid = $("#"+grid);
var oLoader = $("#loader_"+grid);
$('#Form_'+grid+'_page').val(page);
$('#Form_'+grid+'_tamano').val(tamano);
$('#Form_'+grid+'_orderby').val(orderby);
$('#Form_'+grid+'_orderdirection').val(orderdirection);
$(oLoader).width($(oGrid).width())
$(oLoader).height($(oGrid).height())
jQuery(oLoader).show();
$.ajax({
type: "POST",
url: Routing.generate(url),
data: ($(oForm).serialize()+'&datagrid='+grid),
success: function(data){
data = data.replace(/^\s*|\s*$/g,"");
$(oGrid).html(data);
},
error: function(XMLHttpRequest, textStatus, errorThrown){
jQuery(oLoader).hide();
$("#dialog-modal").html("Error: "+XMLHttpRequest.status + ' ' + XMLHttpRequest.statusText);<|fim▁hole|>}
function dg_delitem(urlreload, urldelete, grid, form, deletekey)
{
if (confirm("¿Está seguro que desea eliminar?")) {
var oForm = $("#"+form);
var oGrid = $("#"+grid);
var oLoader = $("#loader_"+grid);
$('#Form_'+grid+'_deletekey').val(deletekey);
$(oLoader).width($(oGrid).width())
$(oLoader).height($(oGrid).height())
jQuery(oLoader).show();
$.ajax({
type: "POST",
url: Routing.generate(urldelete),
data: $(oForm).serialize(),
success: function(data){
data = data.replace(/^\s*|\s*$/g,"");
if(data == 'OK'){
dg_reload(urlreload, grid, form);
}else{
jQuery(oLoader).hide();
$("#dialog-modal").html(data);
$("#dialog-modal").dialog("open");
}
},
error: function(XMLHttpRequest, textStatus, errorThrown){
jQuery(oLoader).hide();
$("#dialog-modal").html("Error: "+XMLHttpRequest.status + ' ' + XMLHttpRequest.statusText);
$("#dialog-modal").dialog("open");
}
});
}
}
function dg_reload(url, grid, form)
{
dg_send(url, grid, form,
$('#Form_'+grid+'_page').val(),
$('#Form_'+grid+'_tamano').val(),
$('#Form_'+grid+'_orderby').val(),
$('#Form_'+grid+'_orderdirection').val()
);
}<|fim▁end|>
|
$("#dialog-modal").dialog("open");
}
});
|
<|file_name|>fakes.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package backends
import (
"fmt"
compute "google.golang.org/api/compute/v1"
api_v1 "k8s.io/client-go/pkg/api/v1"
"k8s.io/client-go/tools/cache"
"k8s.io/ingress/controllers/gce/utils"
)
// NewFakeBackendServices creates a new fake backend services manager.
func NewFakeBackendServices(ef func(op int, be *compute.BackendService) error) *FakeBackendServices {
return &FakeBackendServices{
errFunc: ef,
backendServices: cache.NewStore(func(obj interface{}) (string, error) {
svc := obj.(*compute.BackendService)
return svc.Name, nil
}),
}
}
// FakeBackendServices fakes out GCE backend services.
type FakeBackendServices struct {
backendServices cache.Store
calls []int
errFunc func(op int, be *compute.BackendService) error
}
// GetBackendService fakes getting a backend service from the cloud.
func (f *FakeBackendServices) GetBackendService(name string) (*compute.BackendService, error) {
f.calls = append(f.calls, utils.Get)
obj, exists, err := f.backendServices.GetByKey(name)
if !exists {
return nil, fmt.Errorf("backend service %v not found", name)
}
if err != nil {
return nil, err
}
svc := obj.(*compute.BackendService)
if name == svc.Name {
return svc, nil
}
return nil, fmt.Errorf("backend service %v not found", name)
}
// CreateBackendService fakes backend service creation.
func (f *FakeBackendServices) CreateBackendService(be *compute.BackendService) error {
if f.errFunc != nil {
if err := f.errFunc(utils.Create, be); err != nil {
return err
}
}
f.calls = append(f.calls, utils.Create)
be.SelfLink = be.Name
return f.backendServices.Update(be)
}
// DeleteBackendService fakes backend service deletion.
func (f *FakeBackendServices) DeleteBackendService(name string) error {
f.calls = append(f.calls, utils.Delete)
svc, exists, err := f.backendServices.GetByKey(name)
if !exists {
return fmt.Errorf("backend service %v not found", name)
}
if err != nil {
return err
}
return f.backendServices.Delete(svc)
}
// ListBackendServices fakes backend service listing.
func (f *FakeBackendServices) ListBackendServices() (*compute.BackendServiceList, error) {
var svcs []*compute.BackendService
for _, s := range f.backendServices.List() {
svc := s.(*compute.BackendService)
svcs = append(svcs, svc)
}
return &compute.BackendServiceList{Items: svcs}, nil
}
// UpdateBackendService fakes updating a backend service.
func (f *FakeBackendServices) UpdateBackendService(be *compute.BackendService) error {
if f.errFunc != nil {
if err := f.errFunc(utils.Update, be); err != nil {
return err
}
}
f.calls = append(f.calls, utils.Update)
return f.backendServices.Update(be)
}
// GetHealth fakes getting backend service health.
func (f *FakeBackendServices) GetHealth(name, instanceGroupLink string) (*compute.BackendServiceGroupHealth, error) {
be, err := f.GetBackendService(name)
if err != nil {
return nil, err
}
states := []*compute.HealthStatus{
{
HealthState: "HEALTHY",
IpAddress: "",
Port: be.Port,
},
}
return &compute.BackendServiceGroupHealth{
HealthStatus: states}, nil
}
// FakeProbeProvider implements the probeProvider interface for tests.
type FakeProbeProvider struct {
probes map[ServicePort]*api_v1.Probe
}
// NewFakeProbeProvider returns a struct which satisfies probeProvider interface<|fim▁hole|>
// GetProbe returns the probe for a given nodePort
func (pp *FakeProbeProvider) GetProbe(port ServicePort) (*api_v1.Probe, error) {
if probe, exists := pp.probes[port]; exists && probe.HTTPGet != nil {
return probe, nil
}
return nil, nil
}<|fim▁end|>
|
func NewFakeProbeProvider(probes map[ServicePort]*api_v1.Probe) *FakeProbeProvider {
return &FakeProbeProvider{probes}
}
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod config;
pub mod common;<|fim▁hole|><|fim▁end|>
|
mod init;
pub use drivers::pci::init::pci_init;
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>#encoding=utf-8
from django.utils.safestring import mark_safe
from django.db import models
from django.utils.six import python_2_unicode_compatible
from mptt.models import MPTTModel
from mptt.fields import TreeForeignKey, TreeManyToManyField
from datetime import date, timedelta
from datetime import datetime
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import gettext_lazy as __
from decimal import Decimal
from itertools import chain
# Create your models here.
a_strG = "<a onclick='return showRelatedObjectLookupPopup(this);' href='/admin/general/"
a_strW = "<a onclick='return showRelatedObjectLookupPopup(this);' href='/admin/Welcome/"
#a_str2 = "?_popup=1&_changelist_filters=_popup=1&t=human' target='_blank' style='margin-left:-100px'>"
a_str2 = "?_popup=1&t=human' target='_blank' >"
a_str3 = "?_popup=1&t=human' target='_blank'>"
a_edit = '<b>Edit</b>'
ul_tag1 = '<ul style="margin-left:-10em;">'
ul_tag = '<ul>'
str_none = __('(none)')
str_remove = 'erase'
def erase_id_link(field, id):
out = '<a class="erase_id_on_box" name="'+str(field)+','+str(id)+'" href="javascript:;">'+str_remove+'</a>'
print(out)
return out
# C O N C E P T S - (Concepts, Ideas...)
@python_2_unicode_compatible
class Concept(MPTTModel): # Abstract
name = models.CharField(unique=True, verbose_name=_("Name"), max_length=200, help_text=_("The name of the Concept"), default="")
description = models.TextField(blank=True, verbose_name=_("Description"))
parent = TreeForeignKey('self', null=True, blank=True, related_name='children', on_delete=models.CASCADE)
def __str__(self):
return self.name
class Meta:
abstract = True
verbose_name = _("Concept")
verbose_name_plural = _("c- Concepts")
@python_2_unicode_compatible
class Type(Concept): # Create own ID's (TREE)
#concept = models.OneToOneField('Concept', primary_key=True, parent_link=True, on_delete=models.CASCADE)
clas = models.CharField(blank=True, verbose_name=_("Class"), max_length=200,
help_text=_("Django model or python class associated to the Type"))
#types = TreeManyToManyField('self', through='rel_Type_Types', verbose_name=_(u"Related Types"), blank=True)
class Meta:
verbose_name = _("c- Type")
#verbose_name_plural = _(u"c- Types")
def __str__(self):
if self.clas is None or self.clas == '':
return self.name
else:
return self.name+' ('+self.clas+')'
def save(self, *args, **kwargs):
if not self.name_ca:
print("save: name_ca:"+self.name_en)
self.name_ca = self.name_en
if not self.name_es:
print("save: name_es:"+self.name_en)
self.name_es = self.name_en
super(Type, self).save(*args, **kwargs)
"""
class rel_Type_Types(models.Model):
typ = TreeForeignKey('Type', on_delete=models.CASCADE)
typ2 = TreeForeignKey('Type', verbose_name=_(u"related Type"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='ty_typ+', blank=True, null=True, verbose_name=_(u"relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"T_type")
verbose_name_plural = _(u"Types related the Type")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.typ2.__str__()
else:
return self.relation.gerund+' > '+self.typ2.__str__()
"""
# B E I N G S - (Éssers, Entitats, Projectes...)
"""
@python_2_unicode_compatible
class Being(models.Model): # Abstract
name = models.CharField(verbose_name=_(u"Name"), max_length=200, help_text=_(u"The name of the Entity"))
#being_type = TreeForeignKey('Being_Type', blank=True, null=True, verbose_name="Type of entity", on_delete=models.SET_NULL)
birth_date = models.DateField(blank=True, null=True, verbose_name=_(u"Born date"), help_text=_(u"The day of starting existence"))
death_date = models.DateField(blank=True, null=True, verbose_name=_(u"Die date"), help_text=_(u"The day of ceasing existence"))
class Meta:
abstract = True
def __str__(self):
return self.name.encode("utf-8")
"""
class Being_Type(Type):
typ = models.OneToOneField('Type', primary_key=True, parent_link=True, on_delete=models.CASCADE)
class Meta:
verbose_name= _("Type of entity")
verbose_name_plural = _("e--> Types of entities")
"""
@python_2_unicode_compatible
class Human(Being): # Create own ID's
nickname = models.CharField(max_length=50, blank=True, verbose_name=_(u"Nickname"), help_text=_(u"The nickname most used of the human entity"))
email = models.EmailField(max_length=100, blank=True, verbose_name=_(u"Email"), help_text=_(u"The main email address of the human entity"))
telephone_cell = models.CharField(max_length=20, blank=True, verbose_name=_(u"Mobile phone"), help_text=_(u"The main telephone of the human entity"))
telephone_land = models.CharField(max_length=20, blank=True, verbose_name=_(u"Land phone"))
website = models.CharField(max_length=100, blank=True, verbose_name=_(u"Web"), help_text=_(u"The main web url of the human entity"))
description = models.TextField(blank=True, null=True, verbose_name=_(u"Entity description"))
jobs = TreeManyToManyField('Job', through='rel_Human_Jobs', verbose_name=_(u"Activities, Jobs, Skills"), blank=True)
addresses = models.ManyToManyField('Address', through='rel_Human_Addresses', verbose_name=_(u"Addresses"), blank=True)
regions = models.ManyToManyField('Region', through='rel_Human_Regions', verbose_name=_(u"Regions"), blank=True)
records = models.ManyToManyField('Record', through='rel_Human_Records', verbose_name=_(u"Records"), blank=True)
materials = models.ManyToManyField('Material', through='rel_Human_Materials', verbose_name=_(u"Material artworks"), blank=True)
nonmaterials = models.ManyToManyField('Nonmaterial', through='rel_Human_Nonmaterials', verbose_name=_(u"Non-material artworks"), blank=True)
persons = models.ManyToManyField('Person', through='rel_Human_Persons', related_name='hum_persons', verbose_name=_(u"Persons"), blank=True)
projects = models.ManyToManyField('Project', through='rel_Human_Projects', related_name='hum_projects', verbose_name=_(u"Projects"), blank=True)
companies = models.ManyToManyField('Company', through='rel_Human_Companies', related_name='hum_companies', verbose_name=_(u"Companies"), blank=True)
class Meta:
verbose_name = _(u"Human")
verbose_name_plural = _(u"e- Humans")
def __str__(self):
if self.nickname is None or self.nickname == '':
return self.name
else:
return self.nickname+' ('+self.name+')'
def _my_accounts(self):
return list(chain(self.accountsCes.all(), self.accountsCrypto.all(), self.accountsBank.all()))
#_my_accounts.list = []
accounts = property(_my_accounts)
def _selflink(self):
if self.id:<|fim▁hole|> if hasattr(self, 'person'):
return mark_safe( a_strG + "person/" + str(self.person.id) + a_str2 + a_edit + "</a>") # % str(self.id))
elif hasattr(self, 'project'):
return mark_safe( a_strG + "project/" + str(self.project.id) + a_str2 + a_edit + "</a>")# % str(self.id) )
else:
return "Not present"
_selflink.allow_tags = True
_selflink.short_description = ''
self_link = property (_selflink)
def _ic_membership(self):
try:
#print(self.ic_membership_set.all())
if hasattr(self, 'ic_person_membership_set'):
ic_ms = self.ic_person_membership_set.all()
out = ul_tag
for ms in ic_ms:
out += '<li>'+a_strW + "ic_person_membership/" + str(ms.id) + a_str3 + '<b>'+ms.name +"</b></a></li>"
return out+'</ul>'
elif hasattr(self, 'ic_project_membership_set'):
ic_ms = self.ic_project_membership_set.all()
out = ul_tag
for ms in ic_ms:
out += '<li>'+a_strW + "ic_project_membership/" + str(ms.id) + a_str3 + '<b>'+ms.name +"</b></a></li>"
if out == ul_tag:
return str_none
return out+'</ul>'
return str_none
except:
return str_none
_ic_membership.allow_tags = True
_ic_membership.short_description = _(u"IC Membership")
def _fees_to_pay(self):
try:
if self.out_fees.all().count() > 0:
out = ul_tag
for fe in self.out_fees.all():
if not fe.payed:
out += '<li>'+a_strW + "fee/" + str(fe.id) + a_str3 +'<b>'+ fe.name + "</b></a></li>"
if out == ul_tag:
return str_none
return out+'</ul>'
return str_none
except:
return str_none
_fees_to_pay.allow_tags = True
_fees_to_pay.short_description = _(u"Fees to pay")
def __init__(self, *args, **kwargs):
super(Human, self).__init__(*args, **kwargs)
if not 'rel_tit' in globals():
rel_tit = Relation.objects.get(clas='holder')
#print('I N I T H U M A N : '+self.name)
'''if hasattr(self, 'accountsCes') and self.accountsCes.count() > 0:
recrels = rel_Human_Records.objects.filter(human=self, record__in=self.accountsCes.all())
if recrels.count() == 0:
for acc in self.accountsCes.all():
newrec, created = rel_Human_Records.objects.get_or_create(human=self, record=acc, relation=rel_tit)
print('- new_REC acc_Ces: CREATED:' + str(created) + ' :: ' + str(newrec))
if hasattr(self, 'accountsBank') and self.accountsBank.count() > 0:
recrels = rel_Human_Records.objects.filter(human=self, record__in=self.accountsBank.all())
if recrels.count() == 0:
for acc in self.accountsBank.all():
newrec, created = rel_Human_Records.objects.get_or_create(human=self, record=acc, relation=rel_tit)
print('- new_REC acc_Bank: CREATED:' + str(created) + ' :: ' + str(newrec))
if hasattr(self, 'accountsCrypto') and self.accountsCrypto.count() > 0:
recrels = rel_Human_Records.objects.filter(human=self, record__in=self.accountsCrypto.all())
if recrels.count() == 0:
for acc in self.accountsCrypto.all():
newrec, created = rel_Human_Records.objects.get_or_create(human=self, record=acc, relation=rel_tit)
print('- new_REC acc_Crypto: CREATED:'+str(created)+' :: '+str(newrec))
'''
@python_2_unicode_compatible
class Person(Human):
human = models.OneToOneField('Human', primary_key=True, parent_link=True, on_delete=models.CASCADE)
surnames = models.CharField(max_length=200, blank=True, verbose_name=_(u"Surnames"), help_text=_(u"The surnames of the Person"))
id_card = models.CharField(max_length=9, blank=True, verbose_name=_(u"ID/DNI/NIE"))
email2 = models.EmailField(blank=True, verbose_name=_(u"Alternate email"))
nickname2 = models.CharField(max_length=50, blank=True, verbose_name=_(u"Nickname in FairNetwork"))
class Meta:
verbose_name= _(u'Person')
verbose_name_plural= _(u'e- Persons')
def __str__(self):
if self.nickname is None or self.nickname == '':
if self.surnames is None or self.surnames == '':
return self.name+' '+self.nickname2
else:
return self.name+' '+self.surnames
else:
#return self.nickname
if self.surnames is None or self.surnames == '':
return self.name+' ('+self.nickname+')'
else:
return self.name+' '+self.surnames+' ('+self.nickname+')'
@python_2_unicode_compatible
class Project(MPTTModel, Human):
human = models.OneToOneField('Human', primary_key=True, parent_link=True, on_delete=models.CASCADE)
project_type = TreeForeignKey('Project_Type', blank=True, null=True, verbose_name=_(u"Type of project"), on_delete=models.SET_NULL)
parent = TreeForeignKey('self', null=True, blank=True, related_name='subprojects', verbose_name=_(u"Parent project"), on_delete=models.SET_NULL)
socialweb = models.CharField(max_length=100, blank=True, verbose_name=_(u"Social website"))
email2 = models.EmailField(blank=True, verbose_name=_(u"Alternate email"))
ecommerce = models.BooleanField(default=False, verbose_name=_(u"E-commerce?"))
#images = models.ManyToManyField('Image', blank=True, null=True, verbose_name=_(u"Images"))
def _is_collective(self):
if self.persons.count() < 2 and self.projects.count() < 2:
return False
else:
return True
_is_collective.boolean = True
_is_collective.short_description = _(u"is collective?")
collective = property(_is_collective)
#ref_persons = models.ManyToManyField('Person', blank=True, null=True, verbose_name=_(u"Reference Persons"))
class Meta:
verbose_name= _(u'Project')
verbose_name_plural= _(u'e- Projects')
def _get_ref_persons(self):
return self.human_persons.filter(relation__clas='reference')
def _ref_persons(self):
prs = self._get_ref_persons()
if prs.count() > 0:
out = ul_tag
for pr in prs:
out += '<li>'+str(pr)+'</li>'
return out+'</ul>'
return str_none
_ref_persons.allow_tags = True
_ref_persons.short_description = _(u"Reference person?")
def __str__(self):
if self.nickname is None or self.nickname == '':
if self.project_type:
return self.name+' ('+self.project_type.name+')'
else:
return self.name
else:
return self.nickname+' ('+self.name+')'
"""
class Project_Type(Being_Type):
projectType_being_type = models.OneToOneField('Being_Type', primary_key=True, parent_link=True, on_delete=models.CASCADE)
class Meta:
verbose_name = _("Type of Project")
verbose_name_plural = _("e-> Types of Projects")
"""
class Company(Human):
human = models.OneToOneField('Human', primary_key=True, parent_link=True, on_delete=models.CASCADE)
company_type = TreeForeignKey('Company_Type', null=True, blank=True, verbose_name=_(u"Type of company"), on_delete=models.SET_NULL)
legal_name = models.CharField(max_length=200, blank=True, null=True, verbose_name=_(u"Legal name"))
vat_number = models.CharField(max_length=20, blank=True, null=True, verbose_name=_(u"VAT/CIF"))
class Meta:
verbose_name = _(u"Company")
verbose_name_plural = _(u"e- Companies")
"""
class Company_Type(Being_Type):
companyType_being_type = models.OneToOneField('Being_Type', primary_key=True, parent_link=True, on_delete=models.CASCADE)
class Meta:
verbose_name = _("Type of Company")
verbose_name_plural = _("e-> Types of Companies")
"""
@python_2_unicode_compatible
class rel_Human_Jobs(models.Model):
human = models.ForeignKey('Human', on_delete=models.CASCADE)
job = TreeForeignKey('Job', verbose_name=_(u"Job"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='hu_job+', blank=True, null=True, verbose_name=_(u"relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"H_job")
verbose_name_plural = _(u"Skills of the entity")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.job.__str__()
else:
return self.relation.gerund+' > '+self.job.__str__()
@python_2_unicode_compatible
class rel_Human_Addresses(models.Model):
human = models.ForeignKey('Human', on_delete=models.CASCADE)
address = models.ForeignKey('Address', related_name='rel_human', verbose_name=_(u"Address"), on_delete=models.CASCADE,
help_text=_(u"Once choosed the address, save the profile to see the changes."))
relation = TreeForeignKey('Relation', related_name='hu_adr+', blank=True, null=True, verbose_name=_(u"relation"), on_delete=models.CASCADE)
main_address = models.BooleanField(default=False, verbose_name=_(u"Main address?"))
class Meta:
verbose_name = _(u"H_addr")
verbose_name_plural = _(u"Addresses of the entity")
def __str__(self):
if self.relation is None or self.relation.gerund is None or self.relation.gerund == '':
return self.address.__str__()
else:
return self.relation.gerund+' > '+self.address.__str__()
def _is_main(self):
return self.main_address
_is_main.boolean = True
is_main = property(_is_main)
def _selflink(self):
if self.address:
return self.address._selflink()
_selflink.allow_tags = True
_selflink.short_description = ''
@python_2_unicode_compatible
class rel_Human_Regions(models.Model):
human = models.ForeignKey('Human', on_delete=models.CASCADE)
region = TreeForeignKey('Region', verbose_name=_(u"Region"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='hu_reg+', blank=True, null=True, verbose_name=_(u"relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"H_reg")
verbose_name_plural = _(u"Related regions")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.region.__str__()
else:
return self.relation.gerund+' > '+self.region.__str__()
@python_2_unicode_compatible
class rel_Human_Records(models.Model):
human = models.ForeignKey('Human', on_delete=models.CASCADE)
record = models.ForeignKey('Record', verbose_name=_(u"Record"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='hu_rec+', blank=True, null=True, verbose_name=_(u"relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"H_rec")
verbose_name_plural = _(u"Related records")
def __str__(self):
if not hasattr(self.relation, 'gerund') or self.relation.gerund is None or self.relation.gerund == '':
return self.record.__str__()
else:
if not hasattr(self.record, 'record_type') or self.record.record_type is None or self.record.record_type == '':
return self.relation.gerund+' > '+self.record.__str__()
return self.record.record_type.name+': '+self.relation.gerund+' > '+self.record.__str__()
def _selflink(self):
return self.record._selflink()
_selflink.allow_tags = True
_selflink.short_description = ''
@python_2_unicode_compatible
class rel_Human_Materials(models.Model):
human = models.ForeignKey('Human', on_delete=models.CASCADE)
material = models.ForeignKey('Material', verbose_name=_(u"Material artwork"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='hu_mat+', blank=True, null=True, verbose_name=_(u"relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"H_mat")
verbose_name_plural = _(u"Material Artworks")
def __str__(self):
if not hasattr(self.relation, 'gerund') or self.relation.gerund is None or self.relation.gerund == '':
return self.material.__str__()
else:
return self.relation.gerund+' > '+self.material.__str__()
@python_2_unicode_compatible
class rel_Human_Nonmaterials(models.Model):
human = models.ForeignKey('Human', on_delete=models.CASCADE)
nonmaterial = models.ForeignKey('Nonmaterial', verbose_name=_(u"Non-material artwork"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='hu_non+', blank=True, null=True, verbose_name=_(u"relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"H_inm")
verbose_name_plural = _(u"Non-material Artworks")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.nonmaterial.__str__()
else:
return self.relation.gerund+' > '+self.nonmaterial.__str__()
@python_2_unicode_compatible
class rel_Human_Persons(models.Model):
human = models.ForeignKey('Human', related_name='human_persons', on_delete=models.CASCADE)
person = models.ForeignKey('Person', related_name='rel_humans', verbose_name=_(u"Related person"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='hu_hum+', blank=True, null=True, verbose_name=_(u"relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"H_per")
verbose_name_plural = _(u"Related persons")
def __str__(self):
if self.relation is None or self.relation.gerund is None or self.relation.gerund == '':
return self.person.__str__()
else:
return self.relation.gerund+' > '+self.person.__str__()
def _selflink(self):
return self.person._selflink()
_selflink.allow_tags = True
_selflink.short_description = ''
@python_2_unicode_compatible
class rel_Human_Projects(models.Model):
human = models.ForeignKey('Human', related_name='human_projects', on_delete=models.CASCADE)
project = TreeForeignKey('Project', related_name='rel_humans', verbose_name=_(u"Related project"), on_delete=models.CASCADE,
help_text=_(u"Once choosed the project, save the profile to see the changes."))
relation = TreeForeignKey('Relation', related_name='hu_hum+', blank=True, null=True, verbose_name=_(u"relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"H_pro")
verbose_name_plural = _(u"Related projects")
def __str__(self):
if self.project.project_type is None or self.project.project_type == '':
if self.relation.gerund is None or self.relation.gerund == '':
return self.project.__str__()
else:
return self.relation.gerund+' > '+self.project.__str__()
else:
if not self.relation or self.relation.gerund is None or self.relation.gerund == '':
return '('+self.project.project_type.being_type.name+') rel? > '+self.project.name
else:
return '('+self.project.project_type.being_type.name+') '+self.relation.gerund+' > '+self.project.name
@python_2_unicode_compatible
class rel_Human_Companies(models.Model):
human= models.ForeignKey('Human', related_name='human_companies', on_delete=models.CASCADE)
company = models.ForeignKey('Company', verbose_name=_(u"related Company"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='hu_hum+', blank=True, null=True, verbose_name=_(u"relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"H_emp")
verbose_name_plural = _(u"Related companies")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.company.__str__()
else:
return '('+self.company.company_type.being_type.name+') '+self.relation.gerund+' > '+self.company.__str__()
"""
'''
class rel_Address_Jobs(models.Model):
address = models.ForeignKey('Address')
job = models.ForeignKey('Job', verbose_name=_(u"related Art/Job"))
relation = TreeForeignKey('Relation', related_name='ad_job+', blank=True, null=True, on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"job")
verbose_name_plural = _(u"related Arts/Jobs")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.job.__str__()
else:
return self.relation.gerund+' > '+self.job.__str__()
'''
# A R T S - (Verbs, Relacions, Arts, Oficis, Sectors...)
@python_2_unicode_compatible
class Art(MPTTModel): # Abstract
name = models.CharField(unique=True, max_length=200, verbose_name=_("Name"), help_text=_("The name of the Art"))
verb = models.CharField(max_length=200, blank=True, verbose_name=_("Verb"), help_text=_("The verb of the action, infinitive"))
gerund = models.CharField(max_length=200, blank=True, verbose_name=_("Gerund"), help_text=_("The verb in gerund, present"))
description = models.TextField(blank=True, verbose_name=_("Description"))
parent = TreeForeignKey('self', null=True, blank=True, related_name='subarts', on_delete=models.SET_NULL)
def __str__(self):
if self.verb:
return self.name+', '+self.verb
else:
return self.name
class Meta:
abstract = True
verbose_name = _("Art")
verbose_name_plural = _("a- Arts")
@python_2_unicode_compatible
class Relation(Art): # Create own ID's (TREE)
#art = models.OneToOneField('Art', primary_key=True, parent_link=True, on_delete=models.CASCADE)
clas = models.CharField(blank=True, verbose_name=_("Class"), max_length=50,
help_text=_("Django model or python class associated to the Relation"))
class Meta:
verbose_name= _('Relation')
verbose_name_plural= _('a- Relations')
def __str__(self):
if self.verb:
if self.clas is None or self.clas == '':
return self.verb
else:
return self.verb+' ('+self.clas+')'
else:
if self.clas is None or self.clas == '':
return self.name
else:
return self.name+' ('+self.clas+')'
@python_2_unicode_compatible
class Job(Art): # Create own ID's (TREE)
#art = models.OneToOneField('Art', primary_key=True, parent_link=True, on_delete=models.CASCADE)
clas = models.CharField(blank=True, verbose_name=_("Clas"), max_length=50, help_text=_("Django model or python class associated to the Job"))
jobs = models.ManyToManyField(
'self',
through='rel_Job_Jobs',
through_fields=('job1', 'job2'),
symmetrical=False,
blank=True,
verbose_name=_("related Skills"))
class Meta:
verbose_name= _('Skill')
verbose_name_plural= _('a- Skills')
def __str__(self):
if self.clas is None or self.clas == '':
return self.name#+', '+self.verb
else:
return self.name+' ('+self.clas+')'
@python_2_unicode_compatible
class rel_Job_Jobs(models.Model):
job1 = models.ForeignKey('Job', on_delete=models.CASCADE, related_name="rel_jobs1")
job2 = TreeForeignKey('Job', on_delete=models.CASCADE, related_name="rel_jobs2") #, verbose_name=_(u"related Jobs")
relation = TreeForeignKey('Relation', on_delete=models.SET_NULL, related_name='jo_job+', blank=True, null=True)
class Meta:
verbose_name = _("J_job")
verbose_name_plural = _("Related jobs")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.job1.__str__()
else:
return '('+self.job1.name+') '+self.relation.gerund+' > '+self.job2.__str__()
#rel_tit = Relation.objects.get(clas='holder')
# S P A C E S - (Regions, Places, Addresses...)
@python_2_unicode_compatible
class Space(models.Model): # Abstact
name = models.CharField(verbose_name=_("Name"), max_length=100, help_text=_("The name of the Space"))
#space_type = TreeForeignKey('Space_Type', blank=True, null=True, verbose_name=_(u"Type of space"), on_delete=models.SET_NULL)
#m2 = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
def __str__(self):
return self.name
class Meta:
abstract = True
class Space_Type(Type):
typ = models.OneToOneField('Type', primary_key=True, parent_link=True, on_delete=models.CASCADE)
class Meta:
verbose_name= _("Type of Space")
verbose_name_plural= _("s--> Types of Spaces")
@python_2_unicode_compatible
class Address(Space): # Create own ID's
#space = models.OneToOneField('Space', primary_key=True, parent_link=True, on_delete=models.CASCADE)
address_type = TreeForeignKey('Address_Type', blank=True, null=True, verbose_name=_("Type of address"), on_delete=models.SET_NULL)
p_address = models.CharField(max_length=200, verbose_name=_("Address"), help_text=_("Postal address able to receive by post"))
town = models.CharField(max_length=150, verbose_name=_("Town"), help_text=_("Town or City"))
postalcode = models.CharField(max_length=5, blank=True, null=True, verbose_name=_("Postal/Zip code"))
region = TreeForeignKey('Region', blank=True, null=True, related_name='rel_addresses', verbose_name=_("Region"), on_delete=models.SET_NULL)
#telephone = models.CharField(max_length=20, blank=True, verbose_name=_(u"Telephone"))
ic_larder = models.BooleanField(default=False, verbose_name=_("Is a Larder?"))
#main_address = models.BooleanField(default=False, verbose_name=_(u"Main address?"))
size = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True, verbose_name=_('Size'), help_text=_("Number of units (accept 2 decimals)"))
size_unit = models.ForeignKey('Unit', blank=True, null=True, verbose_name=_("Unit of measure"), on_delete=models.SET_NULL)
longitude = models.IntegerField(blank=True, null=True, verbose_name=_("Longitude (geo)"))
latitude = models.IntegerField(blank=True, null=True, verbose_name=_("Latitude (geo)"))
jobs = models.ManyToManyField('Job', related_name='addresses', blank=True, verbose_name=_("Related Jobs"))
description = models.TextField(blank=True, null=True, verbose_name=_("Description of the Address"), help_text=_("Exact localization, indications to arrive or comments"))
def _main_addr_of(self):
'''rel = rel_Human_Addresses.objects.filter(address=self, main_address=True).first() #TODO accept various and make a list
if rel:
return rel.human
else:'''
return _('nobody')
_main_addr_of.allow_tags = True
_main_addr_of.short_description = _("Main address of")
main_addr_of = property(_main_addr_of)
class Meta:
verbose_name= _('Address')
verbose_name_plural= _('s- Addresses')
def __str__(self):
return self.name+' ('+self.p_address+' - '+self.town+')'
def _jobs_list(self):
out = ul_tag
for jo in self.jobs.all():
out += '<li><b>'+jo.verb+'</b> - '+erase_id_link('jobs', str(jo.id))+'</li>'
if out == ul_tag:
return str_none
return out+'</ul>'
_jobs_list.allow_tags = True
_jobs_list.short_description = ''
def _selflink(self):
if self.id:
return a_strG + "address/" + str(self.id) + a_str2 + a_edit +"</a>"# % str(self.id)
else:
return "Not present"
_selflink.allow_tags = True
class Address_Type(Space_Type):
addrTypeSpace_type = models.OneToOneField('Space_Type', primary_key=True, parent_link=True, on_delete=models.CASCADE)
class Meta:
verbose_name = _("Type of Address")
verbose_name_plural = _("s-> Types of Addresses")
class Region(MPTTModel, Space): # Create own ID's (TREE)
#space = models.OneToOneField('Space', primary_key=True, parent_link=True, on_delete=models.CASCADE)
region_type = TreeForeignKey('Region_Type', blank=True, null=True, verbose_name=_("Type of region"), on_delete=models.SET_NULL)
parent = TreeForeignKey('self', null=True, blank=True, related_name='subregions', on_delete=models.SET_NULL)
description = models.TextField(blank=True, null=True, verbose_name=_("Description of the Region"))
class Meta:
verbose_name= _('Region')
verbose_name_plural= _('s- Regions')
class Region_Type(Space_Type):
regionType_space_type = models.OneToOneField('Space_Type', primary_key=True, parent_link=True, on_delete=models.CASCADE)
class Meta:
verbose_name = _("Type of Region")
verbose_name_plural = _("s-> Types of Regions")
# A R T W O R K S - (Obres, Coses, Registres, Documents...)
@python_2_unicode_compatible
class Artwork(models.Model): # Abstract
name = models.CharField(verbose_name=_("Name"), max_length=200, blank=True, null=True) #, help_text=_(u"The name of the artwork (Record, Unit, Thing)"))
#artwork_type = TreeForeignKey('Artwork_Type', blank=True, verbose_name=_(u"Type of Artwork"), on_delete=models.SET_NULL)
description = models.TextField(blank=True, null=True, verbose_name=_("Description"))
def __str__(self):
return self.name
class Meta:
abstract = True
class Artwork_Type(Type):
typ = models.OneToOneField('Type', primary_key=True, parent_link=True, on_delete=models.CASCADE)
class Meta:
verbose_name = _("Type of Artwork")
verbose_name_plural = _("o--> Types of Artworks")
# - - - - - N O N - M A T E R I A L
"""
@python_2_unicode_compatible
class rel_Nonmaterial_Records(models.Model):
nonmaterial = models.ForeignKey('Nonmaterial')
record = models.ForeignKey('Record', verbose_name=_(u"related Record"))
relation = TreeForeignKey('Relation', related_name='no_reg+', blank=True, null=True, on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"N_rec")
verbose_name_plural = _(u"Related records")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.record.__str__()
else:
return '('+self.record.record_type.name+') '+self.relation.gerund+' > '+self.record.__str__()
@python_2_unicode_compatible
class rel_Nonmaterial_Addresses(models.Model):
nonmaterial = models.ForeignKey('Nonmaterial')
address = models.ForeignKey('Address', verbose_name=_(u"related Address"))
relation = TreeForeignKey('Relation', related_name='no_adr+', blank=True, null=True, on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"N_adr")
verbose_name_plural = _(u"Related addresses")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.address.__str__()
else:
return '('+self.address.address_type.name+') '+self.relation.gerund+' > '+self.address.__str__()
@python_2_unicode_compatible
class rel_Nonmaterial_Jobs(models.Model):
nonmaterial = models.ForeignKey('Nonmaterial')
job = models.ForeignKey('Job', related_name='nonmaterials', verbose_name=_(u"related Arts/Jobs"))
relation = TreeForeignKey('Relation', related_name='no_job+', blank=True, null=True, verbose_name=_(u"Relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"N_ofi")
verbose_name_plural = _(u"Related Arts/Jobs")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.job.__str__()
else:
return self.relation.gerund+' > '+self.job.__str__()
@python_2_unicode_compatible
class rel_Nonmaterial_Nonmaterials(models.Model):
nonmaterial = models.ForeignKey('Nonmaterial')
nonmaterial2 = models.ForeignKey('Nonmaterial', related_name='subnonmaterials', verbose_name=_(u"related Non-material Artworks"))
relation = TreeForeignKey('Relation', related_name='ma_mat+', blank=True, null=True, verbose_name=_(u"Relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"N_mat")
verbose_name_plural = _(u"related Non-material artworks")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.nonmaterial2.__str__()
else:
return '('+self.nonmaterial2.material_type.name+') '+self.relation.gerund+' > '+self.nonmaterial2.__str__()
class Nonmaterial(Artwork): # Create own ID's
nonmaterial_type = TreeForeignKey('Nonmaterial_Type', blank=True, null=True, verbose_name=_(u"Type of non-material artwork"), on_delete=models.SET_NULL)
records = models.ManyToManyField('Record', through='rel_Nonmaterial_Records', blank=True, verbose_name=_(u"related Records"))
addresses = models.ManyToManyField('Address', through='rel_Nonmaterial_Addresses', blank=True, verbose_name=_(u"related Addresses"))
jobs = models.ManyToManyField('Job', through='rel_Nonmaterial_Jobs', blank=True, verbose_name=_(u"related Arts/Jobs"))
nonmaterials = models.ManyToManyField('self', through='rel_Nonmaterial_Nonmaterials', symmetrical=False, blank=True, verbose_name=_(u"related Non-material artworks"))
class Meta:
verbose_name = _(u"Non-material Artwork")
verbose_name_plural = _(u"o- Non-material Artworks")
"""
class Nonmaterial_Type(Artwork_Type):
nonmaterialType_artwork_type = models.OneToOneField('Artwork_Type', primary_key=True, parent_link=True, on_delete=models.CASCADE)
class Meta:
verbose_name= _("Type of Non-material artwork")
verbose_name_plural= _("o-> Types of Non-material artworks")
"""
class Image(Nonmaterial):
image_nonmaterial = models.OneToOneField('Nonmaterial', primary_key=True, parent_link=True, on_delete=models.CASCADE)
image_image = models.ImageField(upload_to='files/images', height_field='height', width_field='width',
blank=True, null=True,
verbose_name=_(u"Image (jpg/png)"))
#footer = models.TextField(blank=True, null=True, verbose_name=_(u"Image caption"))
url = models.URLField(blank=True, null=True, verbose_name=_(u"Url of the image"))
height = models.IntegerField(blank=True, null=True, verbose_name=_(u"Height"))
width = models.IntegerField(blank=True, null=True, verbose_name=_(u"Width"))
class Meta:
verbose_name = _(u"Image")
verbose_name_plural = _(u"o- Images")
"""
# - - - - - M A T E R I A L
"""
@python_2_unicode_compatible
class rel_Material_Nonmaterials(models.Model):
material = models.ForeignKey('Material', on_delete=models.CASCADE)
nonmaterial = models.ForeignKey('Nonmaterial', verbose_name=_(u"related Non-material"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='ma_non+', blank=True, null=True, on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"M_inm")
verbose_name_plural = _(u"related Non-materials")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.nonmaterial.__str__()
else:
return '('+self.nonmaterial.nonmaterial_type.name+') '+self.relation.gerund+' > '+self.nonmaterial.__str__()
@python_2_unicode_compatible
class rel_Material_Records(models.Model):
material = models.ForeignKey('Material', on_delete=models.CASCADE)
record = models.ForeignKey('Record', verbose_name=_(u"related Record"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='ma_reg+', blank=True, null=True, on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"M_rec")
verbose_name_plural = _(u"related Records")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.record.__str__()
else:
return '('+self.record.record_type.name+') '+self.relation.gerund+' > '+self.record.__str__()
@python_2_unicode_compatible
class rel_Material_Addresses(models.Model):
material = models.ForeignKey('Material', on_delete=models.CASCADE)
address = models.ForeignKey('Address', related_name='materials', verbose_name=_(u"related Address"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='ma_adr+', blank=True, null=True, on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"M_adr")
verbose_name_plural = _(u"related Addresses")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.address.__str__()
else:
return '('+self.address.address_type.name+') '+self.relation.gerund+' > '+self.address.__str__()
@python_2_unicode_compatible
class rel_Material_Materials(models.Model):
material = models.ForeignKey('Material', on_delete=models.CASCADE)
material2 = models.ForeignKey('Material', related_name='submaterials', verbose_name=_(u"related Material artworks"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='ma_mat+', blank=True, null=True, verbose_name=_(u"Relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"M_mat")
verbose_name_plural = _(u"related Material artworks")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.material2.__str__()
else:
return '('+self.material2.material_type.name+') '+self.relation.gerund+' > '+self.material2.__str__()
@python_2_unicode_compatible
class rel_Material_Jobs(models.Model):
material = models.ForeignKey('Material', on_delete=models.CASCADE, on_delete=models.CASCADE)
job = models.ForeignKey('Job', related_name='materials', verbose_name=_(u"related Arts/Jobs"), on_delete=models.CASCADE)
relation = TreeForeignKey('Relation', related_name='ma_job+', blank=True, null=True, verbose_name=_(u"Relation"), on_delete=models.SET_NULL)
class Meta:
verbose_name = _(u"M_ofi")
verbose_name_plural = _(u"related Arts/Jobs")
def __str__(self):
if self.relation.gerund is None or self.relation.gerund == '':
return self.job.__str__()
else:
return self.relation.gerund+' > '+self.job.__str__()
class Material(Artwork): # Create own ID's
material_type = TreeForeignKey('Material_Type', blank=True, null=True, verbose_name=_(u"Type of physical artwork"), on_delete=models.SET_NULL)
nonmaterials = models.ManyToManyField('Nonmaterial', through='rel_Material_Nonmaterials', blank=True, verbose_name=_(u"related Non-materials"))
records = models.ManyToManyField('Record', through='rel_Material_Records', blank=True, verbose_name=_(u"related Records"))
addresses = models.ManyToManyField('Address', through='rel_Material_Addresses', blank=True, verbose_name=_(u"related Addresses"))
materials = models.ManyToManyField('self', through='rel_Material_Materials', symmetrical=False, blank=True, verbose_name=_(u"related Material artworks"))
jobs = models.ManyToManyField('Job', through='rel_Material_Jobs', blank=True, verbose_name=_(u"related Arts/Jobs"))
class Meta:
verbose_name = _(u"Material Artwork")
verbose_name_plural = _(u"o- Material Artworks")
def _addresses_list(self):
out = ul_tag
print(self.addresses.all())
if self.addresses.all().count() > 0:
for add in self.addresses.all():
rel = add.materials.filter(material=self).first().relation
out += '<li>'+rel.gerund+': <b>'+add.__str__()+'</b></li>'
return out+'</ul>'
return str_none
_addresses_list.allow_tags = True
_addresses_list.short_description = _(u"related Addresses?")
def _jobs_list(self):
out = ul_tag
print(self.jobs.all())
if self.jobs.all().count() > 0:
for job in self.jobs.all():
rel = job.materials.filter(material=self).first().relation
out += '<li>'+rel.gerund+': <b>'+job.__str__()+'</b></li>'
return out+'</ul>'
return str_none
_jobs_list.allow_tags = True
_jobs_list.short_description = _(u"related Arts/Jobs?")
"""
class Material_Type(Artwork_Type):
materialType_artwork_type = models.OneToOneField('Artwork_Type', primary_key=True, parent_link=True, on_delete=models.CASCADE)
class Meta:
verbose_name= _("Type of Material artwork")
verbose_name_plural= _("o-> Types of Material artworks")
"""
@python_2_unicode_compatible
class Asset(Material):
asset_material = models.OneToOneField('Material', primary_key=True, parent_link=True, on_delete=models.CASCADE)
asset_human = models.ForeignKey('Human', verbose_name=_(u"Entity"), on_delete=models.CASCADE)
reciprocity = models.TextField(blank=True, verbose_name=_(u"Reciprocity"))
class Meta:
verbose_name = _(u"Asset")
verbose_name_plural = _(u"o- Assets")
def __str__(self):
return '('+self.material_type.name+') '+self.material.name
def _selflink(self):
if self.id:
return a_strG + "asset/" + str(self.id) + a_str2 + a_edit +"</a>"# % str(self.id)
else:
return "Not present"
_selflink.allow_tags = True
_selflink.short_description = ''
"""
# - - - - - U N I T S
from valuenetwork.valueaccounting.models import Unit as Ocp_Unit
@python_2_unicode_compatible
class Unit(Artwork): # Create own ID's
unit_type = TreeForeignKey('Unit_Type', blank=True, null=True, verbose_name=_("Type of Unit"), on_delete=models.SET_NULL)
code = models.CharField(max_length=4, verbose_name=_("Code or Symbol"))
region = TreeForeignKey('Region', blank=True, null=True, verbose_name=_("related use Region"), on_delete=models.SET_NULL)
#human = models.ForeignKey('Human', blank=True, null=True, verbose_name=_(u"related Entity"))
ocp_unit = models.OneToOneField(Ocp_Unit, blank=True, null=True, verbose_name=_("OCP Unit"), related_name="gen_unit", on_delete=models.SET_NULL)
class Meta:
verbose_name= _('Unit')
verbose_name_plural= _('o- Units')
def __str__(self):
if hasattr(self, 'ocp_unit') and self.ocp_unit:
return self.name+' <'
else:
return self.unit_type.name+': '+self.name
class Unit_Type(Artwork_Type):
unitType_artwork_type = models.OneToOneField('Artwork_Type', primary_key=True, parent_link=True, on_delete=models.CASCADE)
class Meta:
verbose_name = _("Type of Unit")
verbose_name_plural = _("o-> Types of Units")
# - - - - - R E C O R D
@python_2_unicode_compatible
class Record(Artwork): # Create own ID's
record_type = TreeForeignKey('Record_Type', blank=True, null=True, verbose_name=_("Type of Record"), on_delete=models.SET_NULL)
changed_date = models.DateTimeField(auto_now=True, blank=True, null=True, editable=False)
class Meta:
verbose_name= _('Record')
verbose_name_plural= _('o- Records')
def __str__(self):
if self.record_type is None or self.record_type == '':
return self.name
else:
return self.record_type.name+': '+self.name
def _selflink(self):
if self.id:
return a_strG + "record/" + str(self.id) + a_str2 + a_edit +"</a>"# % str(self.id)
else:
return "Not present"
_selflink.allow_tags = True
class Record_Type(Artwork_Type):
recordType_artwork_type = models.OneToOneField('Artwork_Type', primary_key=True, parent_link=True, on_delete=models.CASCADE)
class Meta:
verbose_name= _('Type of Record')
verbose_name_plural= _('o-> Types of Records')
@python_2_unicode_compatible
class UnitRatio(Record):
record = models.OneToOneField('Record', primary_key=True, parent_link=True, on_delete=models.CASCADE)
in_unit = models.ForeignKey('Unit', related_name='ratio_in', verbose_name=_("in Unit"), on_delete=models.CASCADE)
rate = models.DecimalField(max_digits=50, decimal_places=9, verbose_name=_("Ratio multiplier"), default=Decimal("0.0"))
out_unit = models.ForeignKey('Unit', related_name='ratio_out', verbose_name=_("out Unit"), on_delete=models.CASCADE)
class Meta:
verbose_name = _("Equivalence between Units")
verbose_name_plural = _("o- Equivalences between Units")
def __str__(self):
return self.in_unit.name+' * '+str(self.rate)+' = '+self.out_unit.name
"""
@python_2_unicode_compatible
class AccountCes(Record):
record = models.OneToOneField('Record', primary_key=True, parent_link=True, on_delete=models.CASCADE)
accCes_human = models.ForeignKey('Human', related_name='accountsCes', verbose_name=_(u"Owner human entity"), on_delete=models.CASCADE)
entity = models.ForeignKey('Project', verbose_name=_(u"Network of the account"), on_delete=models.CASCADE)
unit = models.ForeignKey('Unit', verbose_name=_(u"Unit (currency)"), on_delete=models.CASCADE)
code = models.CharField(max_length=10, blank=True, null=True, verbose_name=_(u"Network code"))
number = models.CharField(max_length=4, blank=True, null=True, verbose_name=_(u"Account number"))
class Meta:
verbose_name= _(u'CES Account')
verbose_name_plural= _(u'o- CES Accounts')
def __str__(self):
return '('+self.unit.code+') '+self.accCes_human.nickname + ' ' + self.code + self.number#+' '+self.name
@python_2_unicode_compatible
class AccountBank(Record):
record = models.OneToOneField('Record', primary_key=True, parent_link=True, on_delete=models.CASCADE)
accBnk_human = models.ForeignKey('Human', related_name='accountsBank', verbose_name=_(u"Owner human entity"), on_delete=models.CASCADE)
company = models.ForeignKey('Company', blank=True, null=True, verbose_name=_(u"Bank entity"), on_delete=models.SET_NULL)
unit = models.ForeignKey('Unit', blank=True, null=True, verbose_name=_(u"Unit (currency)"), on_delete=models.SET_NULL)
code = models.CharField(max_length=11, blank=True, null=True, verbose_name=_(u"SWIFT/BIC Code"))
number = models.CharField(max_length=34, blank=True, null=True, verbose_name=_(u"IBAN Account number"))
bankcard = models.BooleanField(default=False, verbose_name=_(u"with bank Card?"))
class Meta:
verbose_name= _(u'Bank Account')
verbose_name_plural= _(u'o- Bank Accounts')
def __str__(self):
try:
return '('+self.unit.code+') '+self.company.nickname+': '+self.accBnk_human.nickname + ' ' + self.number
except:
return "<projecte buit>"
@python_2_unicode_compatible
class AccountCrypto(Record):
record = models.OneToOneField('Record', primary_key=True, parent_link=True, on_delete=models.CASCADE)
accCrypt_human = models.ForeignKey('Human', related_name='accountsCrypto', verbose_name=_(u"Owner human entity"), on_delete=models.CASCADE)
unit = models.ForeignKey('Unit', verbose_name=_(u"Unit (currency)"), on_delete=models.CASCADE)
number = models.CharField(max_length=34, blank=True, verbose_name=_(u"Address of the wallet"))
class Meta:
verbose_name = _(u"Cryptocurrency Account")
verbose_name_plural = _(u"o- Cryptocurrency Accounts")
def __str__(self):
return '('+self.unit.code+') '+self.accCrypt_human.nickname + ' ' + self.number # +' '+self.name
"""
# B A S I C D B R E C O R D S ##
from django.db.models.signals import post_migrate
#from general.apps import GeneralAppConfig
def create_general_types(**kwargs):
sep = ", "
out = "Initial basic types created: <br>"
being, created = Type.objects.get_or_create(name_en='Being', clas='Being')
if created: out += str(being)+sep
artwork, created = Type.objects.get_or_create(name_en='Artwork', clas='Artwork')
if created: out += str(artwork)+sep
space, created = Type.objects.get_or_create(name_en='Space', clas='Space')
if created: out += str(space)+'<br>'
"""human, created = Being_Type.objects.get_or_create(name_en='Human', clas='Human', parent=being)
if created: out += str(human)+": "
persons = Being_Type.objects.filter(name_en="Person")
if persons:
if len(persons) > 1:
out += "ERROR there's more than one 'Person' as a Being_Type ?"+'<br>'
return out
else:
person = persons[0]
else:
person, created = Being_Type.objects.get_or_create(name_en='Person', parent=human)
if created: out += str(person)+sep
person.clas = 'Person'
person.parent = human
person.save()
projects = Being_Type.objects.filter(name_en="Project")
if projects:
if len(projects) > 1:
out += "ERROR there's more than one 'Project' as a Being_Type ?"+'<br>'
return out
else:
project = projects[0]
else:
project, created = Being_Type.objects.get_or_create(name_en='Project', parent=human)
if created: out += str(project)+sep
project.clas = 'Project'
project.parent = human
project.save()
companys = Being_Type.objects.filter(name_en="Company")
if companys:
if len(companys) > 1:
out += "ERROR there's more than one 'Company' as a Being_Type ?"+'<br>'
return out
else:
company = companys[0]
else:
company, created = Being_Type.objects.get_or_create(name_en='Company', parent=human)
if created: out += str(company)+'<br>'
company.clas = 'Company'
company.parent = human
company.save()
material, created = Artwork_Type.objects.get_or_create(name_en='Material', clas='Material', parent=artwork)
if created: out += str(material)+sep
nonmaterial, created = Artwork_Type.objects.get_or_create(name_en='Non-material', clas='Nonmaterial', parent=artwork)
if created: out += str(nonmaterial)+sep"""
record, created = Artwork_Type.objects.get_or_create(name_en='Record', clas='Record', parent=artwork)
if created: out += str(record)+sep
unit, created = Artwork_Type.objects.get_or_create(name_en='Unit', clas='Unit', parent=artwork)
if created: out += str(unit)+sep
"""currency, created = Unit_Type.objects.get_or_create(name_en='Currency', parent=unit)
if created: out += str(currency)+sep
social, created = Unit_Type.objects.get_or_create(name_en='MutualCredit currency', parent=currency)
if created: out += str(social)+sep
crypto, created = Unit_Type.objects.get_or_create(name_en='Cryptocurrency', parent=currency)
if created: out += str(crypto)+sep
fiat, created = Unit_Type.objects.get_or_create(name_en='Fiat currency', parent=currency)
if created: out += str(crypto)+'<br>'
"""
region, created = Space_Type.objects.get_or_create(name_en='Region', clas='Region', parent=space)
if created: out += str(region)+sep
address, created = Space_Type.objects.get_or_create(name_en='Address', clas='Address', parent=space)
if created: out += str(address)+'<br>'
unitratio, created = Record_Type.objects.get_or_create(name_en='Unit Ratio', clas='UnitRatio', parent=record)
if created: out += str(unitratio)+sep
"""ces, created = Record_Type.objects.get_or_create(name_en='Account Ces', clas='AccountCes', parent=record)
if created: out += str(ces)+sep
bank, created = Record_Type.objects.get_or_create(name_en='Account Bank', clas='AccountBank', parent=record)
if created: out += str(bank)+sep"""
print(out)
return out
#post_migrate.connect(create_general_types, sender=GeneralAppConfig)<|fim▁end|>
| |
<|file_name|>gen_events.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
'''
This script is used to generate a set of random-ish events to
simulate log data from a Juniper Netscreen FW. It was built<|fim▁hole|>command to populate data:
./gen_events.py 2>&1 | nc 127.0.0.1 9999
'''
import random
from netaddr import *
from time import sleep
protocols = ['6', '17']
common_ports = ['20','21','22','23','25','80','109','110','119','143','156','161','389','443']
action_list = ['Deny', 'Accept', 'Drop', 'Reject'];
src_network = IPNetwork('192.168.1.0/24')
dest_network = IPNetwork('172.35.0.0/16')
fo = open("replay_log.txt", "w")
while (1 == 1):
proto_index = random.randint(0,1)
protocol = protocols[proto_index]
src_port_index = random.randint(0,13)
dest_port_index = random.randint(0,13)
src_port = common_ports[src_port_index]
dest_port = common_ports[dest_port_index]
action_index = random.randint(0,3)
action = action_list[action_index]
src_ip_index = random.randint(1,254)
src_ip = src_network[src_ip_index]
dest_ip_index = random.randint(1,65535)
dest_ip = dest_network[dest_ip_index]
event = "192.168.1.3 Netscreen-FW1: NetScreen device_id=Netscreen-FW1 [Root]system-notification-00257(traffic): start_time=\"YYYY-MM-DD HH:MM:SS\" duration=0 policy_id=125 service=syslog proto=%s src zone=Untrust dst zone=Trust action=%s sent=0 rcvd=0 src=%s dst=%s src_port=%s dst_port=%s session_id=0" % (protocol, action, src_ip, dest_ip, src_port, dest_port)
fo.write(event + "\n")
print event
sleep(0.3)
fo.close()<|fim▁end|>
|
around using netcat to feed data into Flume for ingestion
into a Hadoop cluster.
Once you have Flume configured you would use the following
|
<|file_name|>test_package_squash.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import functools
import imp
import mock
import os
from oslotest import base
from testtools.matchers import Mismatch
installs_squash_src = (os.path.dirname(os.path.realpath(__file__)) +
'/../bin/package-installs-squash')
installs_squash = imp.load_source('installs_squash', installs_squash_src)
class IsMatchingInstallList(object):
<|fim▁hole|> def __init__(self, expected):
self.expected = expected
def match(self, actual):
for phase, ops in self.expected.items():
if phase not in actual:
# missing the phase
return Mismatch(
"Phase %d does not exist in %s" % (phase, actual))
for op, pkgs in ops.items():
if op not in actual[phase]:
# missing op (install/uninstall)
return Mismatch(
"Operation %s does not exist in %s" % (op, ops))
# on py2 these can be out of order, we just want a match
expected_phase_ops = sorted(self.expected[phase][op])
actual_phase_ops = sorted(actual[phase][op])
if expected_phase_ops != actual_phase_ops:
return Mismatch(
"Operation list %s does not match expected %s" %
(actual[phase][op], self.expected[phase][op]))
class TestPackageInstall(base.BaseTestCase):
def setUp(self):
super(TestPackageInstall, self).setUp()
self.final_dict = collections.defaultdict(
functools.partial(collections.defaultdict, list))
def test_simple(self):
'''Test a basic package install'''
objs = {
'test_package': ''
}
result = installs_squash.collect_data(
self.final_dict, objs, 'test_element')
expected = {
'install.d': {
'install': [('test_package', 'test_element')]
}
}
self.assertThat(result, IsMatchingInstallList(expected))
@mock.patch.object(os, 'environ', dict(ARCH='arm64'))
def test_arch(self):
'''Exercise the arch and not-arch flags'''
objs = {
'test_package': '',
'test_arm64_package': {
'arch': 'arm64'
},
'do_not_install': {
'not-arch': 'arm64'
}
}
result = installs_squash.collect_data(
self.final_dict, objs, 'test_element')
expected = {
'install.d': {
'install': [('test_package', 'test_element'),
('test_arm64_package', 'test_element')]
}
}
self.assertThat(result, IsMatchingInstallList(expected))
kernel_objs = {
'linux-image-generic': [
{
'not-arch': 'arm64',
'when': 'DIB_UBUNTU_KERNEL = linux-image-generic',
},
{
'arch': 'arm64',
'when': (
'DIB_RELEASE != xenial',
'DIB_UBUNTU_KERNEL = linux-image-generic',
)
},
],
'linux-generic-hwe-16.04': {
'arch': 'arm64',
'when': (
'DIB_RELEASE = xenial',
'DIB_UBUNTU_KERNEL = linux-image-generic',
)
},
}
def _test_kernel_objs_match(self, arch, release, expected):
with mock.patch.object(os, 'environ',
dict(ARCH=arch,
DIB_UBUNTU_KERNEL='linux-image-generic',
DIB_RELEASE=release)):
result = installs_squash.collect_data(
self.final_dict, self.kernel_objs, 'test_element')
expected = {
'install.d': {
'install': [(expected, 'test_element')]
}
}
self.assertThat(result, IsMatchingInstallList(expected))
def test_param_list_x86(self):
self._test_kernel_objs_match('x86_64', 'focal', 'linux-image-generic')
def test_param_list_arm64_xenial(self):
self._test_kernel_objs_match('arm64', 'xenial',
'linux-generic-hwe-16.04')
def test_param_list_arm64_focal(self):
self._test_kernel_objs_match('arm64', 'focal', 'linux-image-generic')
@mock.patch.object(os, 'environ', dict(DIB_FEATURE='1', **os.environ))
def test_skip_when(self):
'''Exercise the when flag'''
objs = {
'skipped_package': {
'when': 'DIB_FEATURE=0'
},
'not_skipped_package': {
'when': 'DIB_FEATURE=1'
},
'not_equal_package': {
'when': 'DIB_FEATURE!=0'
},
'not_equal_skipped_package': {
'when': 'DIB_FEATURE!=1'
},
}
result = installs_squash.collect_data(
self.final_dict, objs, 'test_element')
expected = {
'install.d': {
'install': [('not_skipped_package', 'test_element'),
('not_equal_package', 'test_element')]
}
}
self.assertThat(result, IsMatchingInstallList(expected))
def test_skip_no_var(self):
'''Exercise the skip_when missing variable failure case'''
objs = {
'package': {
'when': 'MISSING_VAR=1'
},
}
self.assertRaises(RuntimeError, installs_squash.collect_data,
self.final_dict, objs, 'test_element')
@mock.patch.object(os, 'environ',
dict(
DIB_A_FEATURE='1',
DIB_B_FEATURE='1',
DIB_C_FEATURE='1'))
def test_skip_when_list(self):
'''Exercise the when flag with lists'''
objs = {
'not_skipped_package': {
'when': [
'DIB_A_FEATURE=1',
'DIB_B_FEATURE=1',
'DIB_C_FEATURE=1'
]
},
'skipped_package': {
'when': [
'DIB_A_FEATURE=1',
'DIB_B_FEATURE=0',
'DIB_C_FEATURE=1',
]
},
}
result = installs_squash.collect_data(
self.final_dict, objs, 'test_element')
expected = {
'install.d': {
'install': [('not_skipped_package', 'test_element')]
}
}
self.assertThat(result, IsMatchingInstallList(expected))<|fim▁end|>
| |
<|file_name|>ItemDescription.java<|end_file_name|><|fim▁begin|>package io.github.dousha.randomCraft.randomcraft;
import java.util.HashMap;
// WHY DOESN'T JAVA HAVE A STRUCT
public class ItemDescription implements Cloneable{
public boolean type; // false = in, true = out, I would use #define in c/c++!
public String itemname;
@Deprecated
public int itemid;
public boolean isMajor;
public int leastAmount;
public double base;
public String formula;
public double arg1, arg2, arg3;
public boolean isMagic;
// TODO: make it works
public HashMap<String, String> enchantments; // <name, level>
// ----^-----------------------^-----------
public String giveMethod;
@Override
public Object clone(){
ItemDescription o = null;
try{
o = (ItemDescription) super.clone();
}
catch(CloneNotSupportedException ex){<|fim▁hole|> return o;
}
}<|fim▁end|>
|
ex.printStackTrace();
}
|
<|file_name|>sodium_type.rs<|end_file_name|><|fim▁begin|>// This macro allows to wrap Sodimoxide type to libindy type keeping the same behaviour
macro_rules! sodium_type (($newtype:ident, $sodiumtype:path, $len:ident) => (
pub struct $newtype(pub(super) $sodiumtype);
impl $newtype {
#[allow(dead_code)]
pub fn new(bytes: [u8; $len]) -> $newtype {
$newtype($sodiumtype(bytes))
}
#[allow(dead_code)]
pub fn from_slice(bs: &[u8]) -> Result<$newtype, ::errors::IndyError> {
let inner = <$sodiumtype>::from_slice(bs)
.ok_or(::errors::err_msg(::errors::IndyErrorKind::InvalidStructure, format!("Invalid bytes for {:?}", stringify!($newtype))))?;
Ok($newtype(inner))
}
}
<|fim▁hole|> $newtype(self.0.clone())
}
}
impl ::std::fmt::Debug for $newtype {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
self.0.fmt(f)
}
}
impl ::std::cmp::PartialEq for $newtype {
fn eq(&self, other: &$newtype) -> bool {
self.0.eq(&other.0)
}
}
impl ::std::cmp::Eq for $newtype {}
impl ::serde::Serialize for $newtype {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: ::serde::Serializer
{
serializer.serialize_bytes(&self.0[..])
}
}
impl<'de> ::serde::Deserialize<'de> for $newtype {
fn deserialize<D>(deserializer: D) -> Result<$newtype, D::Error> where D: ::serde::Deserializer<'de>
{
<$sodiumtype>::deserialize(deserializer).map($newtype)
}
}
impl ::std::ops::Index<::std::ops::Range<usize>> for $newtype {
type Output = [u8];
fn index(&self, _index: ::std::ops::Range<usize>) -> &[u8] {
self.0.index(_index)
}
}
impl ::std::ops::Index<::std::ops::RangeTo<usize>> for $newtype {
type Output = [u8];
fn index(&self, _index: ::std::ops::RangeTo<usize>) -> &[u8] {
self.0.index(_index)
}
}
impl ::std::ops::Index<::std::ops::RangeFrom<usize>> for $newtype {
type Output = [u8];
fn index(&self, _index: ::std::ops::RangeFrom<usize>) -> &[u8] {
self.0.index(_index)
}
}
impl ::std::ops::Index<::std::ops::RangeFull> for $newtype {
type Output = [u8];
fn index(&self, _index: ::std::ops::RangeFull) -> &[u8] {
self.0.index(_index)
}
}
impl AsRef<[u8]> for $newtype {
#[inline]
fn as_ref(&self) -> &[u8] {
&self[..]
}
}
));<|fim▁end|>
|
impl Clone for $newtype {
fn clone(&self) -> $newtype {
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from machina.models.fields import ExtendedImageField, MarkupTextField
RESIZED_IMAGE_WIDTH = 100
RESIZED_IMAGE_HEIGHT = 100
VALIDATED_IMAGE_MIN_WIDTH = 100
VALIDATED_IMAGE_MAX_WIDTH = 120
VALIDATED_IMAGE_MIN_HEIGHT = 100
VALIDATED_IMAGE_MAX_HEIGHT = 120
VALIDATED_IMAGE_MAX_SIZE = 12000
class DummyModel(models.Model):
"""
This model will be used for testing purposes only.
"""
content = MarkupTextField(null=True, blank=True)
resized_image = ExtendedImageField(
upload_to='machina/test_images', width=RESIZED_IMAGE_WIDTH, height=RESIZED_IMAGE_HEIGHT,
null=True, blank=True)
validated_image = ExtendedImageField(
upload_to='machina/test_images', min_width=VALIDATED_IMAGE_MIN_WIDTH,
max_width=VALIDATED_IMAGE_MAX_WIDTH, min_height=VALIDATED_IMAGE_MIN_HEIGHT,<|fim▁hole|> max_height=VALIDATED_IMAGE_MAX_HEIGHT, max_upload_size=VALIDATED_IMAGE_MAX_SIZE, null=True,
blank=True)
class Meta:
app_label = 'tests'<|fim▁end|>
| |
<|file_name|>resource_thread.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use ipc_channel::ipc;
use msg::constellation_msg::{PipelineId, ReferrerPolicy};
use net::resource_thread::new_core_resource_thread;
use net_traits::hosts::{parse_hostsfile, host_replacement};
use net_traits::{CoreResourceMsg, LoadData, LoadConsumer, LoadContext};
use net_traits::{NetworkError, ProgressMsg, LoadOrigin, RequestSource};
use profile_traits::time::ProfilerChan;
use std::borrow::ToOwned;
use std::collections::HashMap;
use std::net::IpAddr;
use std::sync::mpsc::channel;
use url::Url;
fn ip(s: &str) -> IpAddr {
s.parse().unwrap()
}
struct ResourceTest;
impl LoadOrigin for ResourceTest {
fn referrer_url(&self) -> Option<Url> {
None
}
fn referrer_policy(&self) -> Option<ReferrerPolicy> {
None
}
fn request_source(&self) -> RequestSource {
RequestSource::None
}
fn pipeline_id(&self) -> Option<PipelineId> {
None
}
}
#[test]
fn test_exit() {
let (tx, _rx) = ipc::channel().unwrap();
let (sender, receiver) = ipc::channel().unwrap();
let resource_thread = new_core_resource_thread("".to_owned(), None, ProfilerChan(tx));
resource_thread.send(CoreResourceMsg::Exit(sender)).unwrap();
receiver.recv().unwrap();
}
#[test]
fn test_bad_scheme() {
let (tx, _rx) = ipc::channel().unwrap();
let (sender, receiver) = ipc::channel().unwrap();
let resource_thread = new_core_resource_thread("".to_owned(), None, ProfilerChan(tx));
let (start_chan, start) = ipc::channel().unwrap();
let url = Url::parse("bogus://whatever").unwrap();
resource_thread.send(CoreResourceMsg::Load(LoadData::new(LoadContext::Browsing, url, &ResourceTest),
LoadConsumer::Channel(start_chan), None)).unwrap();
let response = start.recv().unwrap();
match response.progress_port.recv().unwrap() {
ProgressMsg::Done(result) => { assert!(result.is_err()) }
_ => panic!("bleh")
}
resource_thread.send(CoreResourceMsg::Exit(sender)).unwrap();
receiver.recv().unwrap();
}
#[test]
fn test_parse_hostsfile() {
let mock_hosts_file_content = "127.0.0.1 foo.bar.com\n127.0.0.2 servo.test.server";
let hosts_table = parse_hostsfile(mock_hosts_file_content);
assert_eq!(2, hosts_table.len());
assert_eq!(ip("127.0.0.1"), *hosts_table.get("foo.bar.com").unwrap());
assert_eq!(ip("127.0.0.2"), *hosts_table.get("servo.test.server").unwrap());
}
#[test]
fn test_parse_malformed_hostsfile() {<|fim▁hole|>}
#[test]
fn test_parse_hostsfile_with_line_comment() {
let mock_hosts_file_content = "# this is a line comment\n127.0.0.1 foo.bar.com\n# anothercomment";
let hosts_table = parse_hostsfile(mock_hosts_file_content);
assert_eq!(1, hosts_table.len());
assert_eq!(ip("127.0.0.1"), *hosts_table.get("foo.bar.com").unwrap());
}
#[test]
fn test_parse_hostsfile_with_end_of_line_comment() {
let mock_hosts_file_content = "127.0.0.1 foo.bar.com # line ending comment\n127.0.0.2 servo.test.server #comment";
let hosts_table = parse_hostsfile(mock_hosts_file_content);
assert_eq!(2, hosts_table.len());
assert_eq!(ip("127.0.0.1"), *hosts_table.get("foo.bar.com").unwrap());
assert_eq!(ip("127.0.0.2"), *hosts_table.get("servo.test.server").unwrap());
}
#[test]
fn test_parse_hostsfile_with_2_hostnames_for_1_address() {
let mock_hosts_file_content = "127.0.0.1 foo.bar.com baz.bar.com";
let hosts_table = parse_hostsfile(mock_hosts_file_content);
assert_eq!(2, hosts_table.len());
assert_eq!(ip("127.0.0.1"), *hosts_table.get("foo.bar.com").unwrap());
assert_eq!(ip("127.0.0.1"), *hosts_table.get("baz.bar.com").unwrap());
}
#[test]
fn test_parse_hostsfile_with_4_hostnames_for_1_address() {
let mock_hosts_file_content = "127.0.0.1 moz.foo.com moz.bar.com moz.baz.com moz.moz.com";
let hosts_table = parse_hostsfile(mock_hosts_file_content);
assert_eq!(4, hosts_table.len());
assert_eq!(ip("127.0.0.1"), *hosts_table.get("moz.foo.com").unwrap());
assert_eq!(ip("127.0.0.1"), *hosts_table.get("moz.bar.com").unwrap());
assert_eq!(ip("127.0.0.1"), *hosts_table.get("moz.baz.com").unwrap());
assert_eq!(ip("127.0.0.1"), *hosts_table.get("moz.moz.com").unwrap());
}
#[test]
fn test_parse_hostsfile_with_tabs_instead_spaces() {
let mock_hosts_file_content = "127.0.0.1\tfoo.bar.com\n127.0.0.2\tservo.test.server";
let hosts_table = parse_hostsfile(mock_hosts_file_content);
assert_eq!(2, hosts_table.len());
assert_eq!(ip("127.0.0.1"), *hosts_table.get("foo.bar.com").unwrap());
assert_eq!(ip("127.0.0.2"), *hosts_table.get("servo.test.server").unwrap());
}
#[test]
fn test_parse_hostsfile_with_valid_ipv4_addresses()
{
let mock_hosts_file_content =
"255.255.255.255 foo.bar.com\n169.0.1.201 servo.test.server\n192.168.5.0 servo.foo.com";
let hosts_table = parse_hostsfile(mock_hosts_file_content);
assert_eq!(3, hosts_table.len());
}
#[test]
fn test_parse_hostsfile_with_invalid_ipv4_addresses()
{
let mock_hosts_file_content = "256.255.255.255 foo.bar.com\n169.0.1000.201 servo.test.server \
\n192.168.5.500 servo.foo.com\n192.abc.100.2 test.servo.com";
let hosts_table = parse_hostsfile(mock_hosts_file_content);
assert_eq!(0, hosts_table.len());
}
#[test]
fn test_parse_hostsfile_with_valid_ipv6_addresses()
{
let mock_hosts_file_content = "2001:0db8:0000:0000:0000:ff00:0042:8329 foo.bar.com\n\
2001:db8:0:0:0:ff00:42:8329 moz.foo.com\n\
2001:db8::ff00:42:8329 foo.moz.com moz.moz.com\n\
0000:0000:0000:0000:0000:0000:0000:0001 bar.moz.com\n\
::1 foo.bar.baz baz.foo.com\n\
2001:0DB8:85A3:0042:1000:8A2E:0370:7334 baz.bar.moz\n\
:: unspecified.moz.com";
let hosts_table = parse_hostsfile(mock_hosts_file_content);
assert_eq!(9, hosts_table.len());
}
#[test]
fn test_parse_hostsfile_with_invalid_ipv6_addresses()
{
let mock_hosts_file_content = "12001:0db8:0000:0000:0000:ff00:0042:8329 foo.bar.com\n\
2001:zdb8:0:0:0:gg00:42:t329 moz.foo.com\n\
2002:0DB8:85A3:0042:1000:8A2E:0370:7334/1289 baz3.bar.moz";
let hosts_table = parse_hostsfile(mock_hosts_file_content);
assert_eq!(0, hosts_table.len());
}
#[test]
fn test_parse_hostsfile_with_end_of_line_whitespace()
{
let mock_hosts_file_content = "127.0.0.1 foo.bar.com \n\
2001:db8:0:0:0:ff00:42:8329 moz.foo.com\n \
127.0.0.2 servo.test.server ";
let hosts_table = parse_hostsfile(mock_hosts_file_content);
assert_eq!(3, hosts_table.len());
assert_eq!(ip("127.0.0.1"), *hosts_table.get("foo.bar.com").unwrap());
assert_eq!(ip("2001:db8:0:0:0:ff00:42:8329"), *hosts_table.get("moz.foo.com").unwrap());
assert_eq!(ip("127.0.0.2"), *hosts_table.get("servo.test.server").unwrap());
}
#[test]
fn test_replace_hosts() {
let mut host_table = HashMap::new();
host_table.insert("foo.bar.com".to_owned(), ip("127.0.0.1"));
host_table.insert("servo.test.server".to_owned(), ip("127.0.0.2"));
let url = Url::parse("http://foo.bar.com:8000/foo").unwrap();
assert_eq!(host_replacement(&host_table, &url).host_str().unwrap(), "127.0.0.1");
let url = Url::parse("http://servo.test.server").unwrap();
assert_eq!(host_replacement(&host_table, &url).host_str().unwrap(), "127.0.0.2");
let url = Url::parse("http://a.foo.bar.com").unwrap();
assert_eq!(host_replacement(&host_table, &url).host_str().unwrap(), "a.foo.bar.com");
}
#[test]
fn test_cancelled_listener() {
use std::io::Write;
use std::net::TcpListener;
use std::thread;
// http_loader always checks for headers in the response
let header = vec!["HTTP/1.1 200 OK",
"Server: test-server",
"Content-Type: text/plain",
"\r\n"];
let body = vec!["Yay!", "We're doomed!"];
// Setup a TCP server to which requests are made
let listener = TcpListener::bind("127.0.0.1:0").unwrap();
let port = listener.local_addr().unwrap().port();
let (body_sender, body_receiver) = channel();
thread::spawn(move || {
if let Ok((mut stream, _)) = listener.accept() {
// immediately stream the headers once the connection has been established
let _ = stream.write(header.join("\r\n").as_bytes());
// wait for the main thread to send the body, so as to ensure that we're
// doing everything sequentially
let body_vec: Vec<&str> = body_receiver.recv().unwrap();
let _ = stream.write(body_vec.join("\r\n").as_bytes());
}
});
let (tx, _rx) = ipc::channel().unwrap();
let (exit_sender, exit_receiver) = ipc::channel().unwrap();
let resource_thread = new_core_resource_thread("".to_owned(), None, ProfilerChan(tx));
let (sender, receiver) = ipc::channel().unwrap();
let (id_sender, id_receiver) = ipc::channel().unwrap();
let (sync_sender, sync_receiver) = ipc::channel().unwrap();
let url = Url::parse(&format!("http://127.0.0.1:{}", port)).unwrap();
resource_thread.send(CoreResourceMsg::Load(LoadData::new(LoadContext::Browsing, url, &ResourceTest),
LoadConsumer::Channel(sender),
Some(id_sender))).unwrap();
// get the `ResourceId` and send a cancel message, which should stop the loading loop
let res_id = id_receiver.recv().unwrap();
resource_thread.send(CoreResourceMsg::Cancel(res_id)).unwrap();
// synchronize with the resource_thread loop, so that we don't simply send everything at once!
resource_thread.send(CoreResourceMsg::Synchronize(sync_sender)).unwrap();
let _ = sync_receiver.recv();
// now, let's send the body, because the connection is still active and data would be loaded
// (but, the loading has been cancelled)
let _ = body_sender.send(body);
let response = receiver.recv().unwrap();
assert_eq!(response.progress_port.recv().unwrap(),
ProgressMsg::Done(Err(NetworkError::LoadCancelled)));
resource_thread.send(CoreResourceMsg::Exit(exit_sender)).unwrap();
exit_receiver.recv().unwrap();
}<|fim▁end|>
|
let mock_hosts_file_content = "malformed file\n127.0.0.1 foo.bar.com\nservo.test.server 127.0.0.1";
let hosts_table = parse_hostsfile(mock_hosts_file_content);
assert_eq!(1, hosts_table.len());
assert_eq!(ip("127.0.0.1"), *hosts_table.get("foo.bar.com").unwrap());
|
<|file_name|>gpio_handlers.py<|end_file_name|><|fim▁begin|>"""
/*
* Custom handlers for the BBB
*
*/
"""
import Adafruit_BBIO.GPIO as GPIO
GPIO.setup("P9_12", GPIO.OUT)
def alexaHandler(client, userdata, message):
print "Received payload: " + str(message.payload.decode())
# Assume only 1 and 0 are send here.
if message.payload == "1":
GPIO.output("P9_12", GPIO.HIGH)
print "Turned christmas tree On"<|fim▁hole|>
def cleanUp():
GPIO.cleanup()<|fim▁end|>
|
elif message.payload == "0":
GPIO.output("P9_12", GPIO.LOW)
print "Turned christmas tree Off"
|
<|file_name|>error_log.js<|end_file_name|><|fim▁begin|>function error_log(message, message_type, destination, extra_headers) {
// http://kevin.vanzonneveld.net
// + original by: Paul Hutchinson (http://restaurantthing.com/)
// + revised by: Brett Zamir (http://brett-zamir.me)
// % note 1: The dependencies, mail(), syslog(), and file_put_contents()
// % note 1: are either not fullly implemented or implemented at all
// - depends on: mail
// - depends on: syslog
// - depends on: file_put_contents
// * example 1: error_log('Oops!');
// * returns 1: true
var that = this,
_sapi = function() { // SAPI logging (we treat console as the "server" logging; the
// syslog option could do so potentially as well)
if (!that.window.console || !that.window.console.log) {
return false;
}
that.window.console.log(message);
return true;
};
message_type = message_type || 0;
switch (message_type) {
case 1: // Email
var subject = 'PHP error_log message'; // Apparently no way to customize the subject<|fim▁hole|> case 0: // syslog or file depending on ini
var log = this.php_js && this.php_js.ini && this.php_js.ini.error_log && this.php_js.ini.error_log.local_value;
if (!log) {
return _sapi();
}
if (log === 'syslog') {
return this.syslog(4, message); // presumably 'LOG_ERR' (4) is correct?
}
destination = log;
// Fall-through
case 3: // File logging
var ret = this.file_put_contents(destination, message, 8); // FILE_APPEND (8)
return ret === false ? false : true;
case 4: // SAPI logging
return _sapi();
default: // Unrecognized value
return false;
}
return false; // Shouldn't get here
}<|fim▁end|>
|
return this.mail(destination, subject, message, extra_headers);
case 2: // No longer an option in PHP, but had been to send via TCP/IP to 'destination' (name or IP:port)
// use socket_create() and socket_send()?
return false;
|
<|file_name|>link_popup.js<|end_file_name|><|fim▁begin|>var xForRefIcon = 0;
var yForRefIcon = 0;
var xForSubDiagramIcon = 0;
var yForSubDiagramIcon = 0;
var xForTransIcon = 0;
var yForTransIcon = 0;
var fileLinks = [];
var folderLinks = [];
var urlLinks = [];
var diagramLinks = [];
var shapeLinks = [];
var subdiagramLinks = [];
var fromTransitorLinks = [];
var toTransitorLinks = [];
// vplink
var vpLinkProjectLink;
var vpLinkPageUrl;
var vpLinkProjectLinkWithName;
var vpLinkPageUrlWithName;
function showDefaultReferenceIcon(imageId, modelValues, objectId) {
if (modelValues != ''){
var xyValueArray = modelValues.split(",");
var shapeWidth = xyValueArray[2]*1 - xyValueArray[0]*1;
// if (shapeWidth > 24){
var diagram = document.getElementById(imageId);
var xOffset = findPosX(diagram);
var yOffset = findPosY(diagram);
var shapeX = xyValueArray[0]*1;
var shapeY = xyValueArray[1]*1;
var x = shapeX + xOffset*1;
var y = shapeY + yOffset*1 - 13;
var h = xyValueArray[3]*1 - xyValueArray[1]*1;
var url = xyValueArray[4];
var referenceIconLayer = document.getElementById(objectId);
N = (document.all) ? 0 : 1;
if (N) {
referenceIconLayer.style.left = x - 3;
referenceIconLayer.style.top = y + h;
} else {
referenceIconLayer.style.posLeft = x - 3;
referenceIconLayer.style.posTop = y + h;
}
referenceIconLayer.style.visibility="visible"
// }
}
}
function showReferenceIcon(imageId, modelValues) {
if (modelValues != '') {
var xyValueArray = modelValues.split(",");
var shapeWidth = xyValueArray[2]*1 - xyValueArray[0]*1;
// if (shapeWidth > 24){
var diagram = document.getElementById(imageId);
var xOffset = findPosX(diagram);
var yOffset = findPosY(diagram);
var shapeX = xyValueArray[0]*1;
var shapeY = xyValueArray[1]*1;
var x = shapeX + xOffset*1;
var y = shapeY + yOffset*1 - 13;
var h = xyValueArray[3]*1 - xyValueArray[1]*1;
var url = xyValueArray[4];
var referenceIconLayer = document.getElementById("referenceIconLayer");
N = (document.all) ? 0 : 1;
if (N) {
referenceIconLayer.style.left = x - 3;
referenceIconLayer.style.top = y + h;
} else {
referenceIconLayer.style.posLeft = x - 3;
referenceIconLayer.style.posTop = y + h;
}
referenceIconLayer.style.visibility="visible"
// }
}
}
function hideReferenceIcon() {
var referenceIconLayer = document.getElementById("referenceIconLayer");
if (referenceIconLayer != null) {
referenceIconLayer.style.visibility="hidden"
}
}
function showDefaultSubdiagramIcon(imageId, modelValues, objectId) {
if (modelValues != ''){
var xyValueArray = modelValues.split(",");
var shapeWidth = xyValueArray[2]*1 - xyValueArray[0]*1;
// if (shapeWidth > 24){
var diagram = document.getElementById(imageId);
var xOffset = findPosX(diagram);
var yOffset = findPosY(diagram);
var shapeRightX = xyValueArray[2]*1;
var shapeRightY = xyValueArray[1]*1;
var x = shapeRightX + xOffset*1 - 10;
var y = shapeRightY + yOffset*1 - 13;
var h = xyValueArray[3]*1 - xyValueArray[1]*1;
var url = xyValueArray[4];
var subdiagramIconLayer = document.getElementById(objectId);
N = (document.all) ? 0 : 1;
if (N) {
subdiagramIconLayer.style.left = x - 3;
subdiagramIconLayer.style.top = y + h;
} else {
subdiagramIconLayer.style.posLeft = x - 3;
subdiagramIconLayer.style.posTop = y + h;
}
subdiagramIconLayer.style.visibility="visible"
// }
}
}
function showSubdiagramIcon(imageId, modelValues) {
if (modelValues != ''){
var xyValueArray = modelValues.split(",");
var shapeWidth = xyValueArray[2]*1 - xyValueArray[0]*1;
// if (shapeWidth > 24){
var diagram = document.getElementById(imageId);
var xOffset = findPosX(diagram);
var yOffset = findPosY(diagram);
var shapeRightX = xyValueArray[2]*1;
var shapeRightY = xyValueArray[1]*1;
var x = shapeRightX + xOffset*1 - 10;
var y = shapeRightY + yOffset*1 - 13;
var h = xyValueArray[3]*1 - xyValueArray[1]*1;
var url = xyValueArray[4];
var subdiagramIconLayer = document.getElementById("subdiagramIconLayer");
N = (document.all) ? 0 : 1;
if (N) {
subdiagramIconLayer.style.left = x - 3;
subdiagramIconLayer.style.top = y + h;
} else {
subdiagramIconLayer.style.posLeft = x - 3;
subdiagramIconLayer.style.posTop = y + h;
}
subdiagramIconLayer.style.visibility="visible"
// }
}
}
function hideSubdiagramIcon() {
var subdiagramIconLayer = document.getElementById("subdiagramIconLayer");
if (subdiagramIconLayer != null) {
subdiagramIconLayer.style.visibility="hidden"
}
}
function showDefaultTransitorIcon(imageId, modelValues, objectId) {
if (modelValues != ''){
var xyValueArray = modelValues.split(",");
var shapeWidth = xyValueArray[2]*1 - xyValueArray[0]*1;
// if (shapeWidth > 24){
var diagram = document.getElementById(imageId);
var xOffset = findPosX(diagram);
var yOffset = findPosY(diagram);
var shapeRightX = xyValueArray[2]*1;
var shapeRightY = xyValueArray[1]*1;
var x = shapeRightX + xOffset*1 - 10;
var y = shapeRightY + yOffset*1;
var h = xyValueArray[3]*1 - xyValueArray[1]*1;
var url = xyValueArray[4];
var transitorIconLayer = document.getElementById(objectId);
N = (document.all) ? 0 : 1;
if (N) {
transitorIconLayer.style.left = x - 3;
transitorIconLayer.style.top = y + h;
} else {
transitorIconLayer.style.posLeft = x - 3;
transitorIconLayer.style.posTop = y + h;
}
transitorIconLayer.style.visibility="visible"
// }
}
}
function showTransitorIcon(imageId, modelValues) {
if (modelValues != ''){
var xyValueArray = modelValues.split(",");
var shapeWidth = xyValueArray[2]*1 - xyValueArray[0]*1;
// if (shapeWidth > 24){
var diagram = document.getElementById(imageId);
var xOffset = findPosX(diagram);
var yOffset = findPosY(diagram);
var shapeRightX = xyValueArray[2]*1;
var shapeRightY = xyValueArray[1]*1;
var x = shapeRightX + xOffset*1 - 10;
var y = shapeRightY + yOffset*1;
var h = xyValueArray[3]*1 - xyValueArray[1]*1;
var url = xyValueArray[4];
var transitorIconLayer = document.getElementById("transitorIconLayer");
N = (document.all) ? 0 : 1;
if (N) {
transitorIconLayer.style.left = x - 3;
transitorIconLayer.style.top = y + h;
} else {
transitorIconLayer.style.posLeft = x - 3;
transitorIconLayer.style.posTop = y + h;
}
transitorIconLayer.style.visibility="visible"
// }
}
}
function hideTransitorIcon() {
var transitorIconLayer = document.getElementById("transitorIconLayer");
if (transitorIconLayer != null) {
transitorIconLayer.style.visibility="hidden"
}
}
function showDefaultDocumentationIcon(imageId, modelValues, objectId) {
if (modelValues != ''){
var xyValueArray = modelValues.split(",");
var shapeWidth = xyValueArray[2]*1 - xyValueArray[0]*1;
// if (shapeWidth > 24){
var diagram = document.getElementById(imageId);
var xOffset = findPosX(diagram);
var yOffset = findPosY(diagram);
var shapeX = xyValueArray[0]*1;
var shapeY = xyValueArray[1]*1;
var x = shapeX + xOffset*1;
var y = shapeY + yOffset*1;
var h = xyValueArray[3]*1 - xyValueArray[1]*1;
var url = xyValueArray[4];
var documentationIconLayer = document.getElementById(objectId);
N = (document.all) ? 0 : 1;
if (N) {
documentationIconLayer.style.left = x - 3;
documentationIconLayer.style.top = y + h;
} else {
documentationIconLayer.style.posLeft = x - 3;
documentationIconLayer.style.posTop = y + h;
}
documentationIconLayer.style.visibility="visible"
// }
}
}
function storeReferenceAndSubdiagramInfos(imageId, coords, fileRefs, folderRefs, urlRefs, diagramRefs, shapeRefs, subdiagrams, modelElementRefs, fromTransitors, toTransitors) {
if (coords != ''){
var xyValueArray = coords.split(",");
var shapeWidth = xyValueArray[2]*1 - xyValueArray[0]*1;
// if (shapeWidth > 24){
fileLinks = [];
folderLinks = [];
urlLinks = [];
diagramLinks = [];
shapeLinks = [];
subdiagramLinks = [];
modelElementLinks = [];
fromTransitorLinks = [];
toTransitorLinks = [];
var popup = document.getElementById("linkPopupMenuTable");
popup.width = 250; // reset to 250 first (forZachman may changed it to 500)
for (i = 0 ; i < fileRefs.length ; i++) {
fileLinks[i] = fileRefs[i];
}
for (i = 0 ; i < folderRefs.length ; i++) {
folderLinks[i] = folderRefs[i];
}
for (i = 0 ; i < urlRefs.length ; i++) {
urlLinks[i] = urlRefs[i];
}
for (j = 0 ; j < diagramRefs.length ; j++) {
diagramLinks[j] = diagramRefs[j]
}
for (j = 0 ; j < shapeRefs.length ; j++) {
shapeLinks[j] = shapeRefs[j]
}
for (j = 0 ; j < subdiagrams.length ; j++) {
subdiagramLinks[j] = subdiagrams[j]
}
for (j = 0 ; j < modelElementRefs.length ; j++) {
modelElementLinks[j] = modelElementRefs[j]
}
for (j = 0 ; j < fromTransitors.length ; j++) {
fromTransitorLinks[j] = fromTransitors[j]
}
for (j = 0 ; j < toTransitors.length ; j++) {
toTransitorLinks[j] = toTransitors[j]
}
var diagram = document.getElementById(imageId);
var xOffset = findPosX(diagram);
var yOffset = findPosY(diagram);
var shapeX = xyValueArray[0]*1;
var shapeY = xyValueArray[1]*1;
var x = shapeX + xOffset*1;
var y = shapeY + yOffset*1 + 2;
var w = xyValueArray[2]*1 - xyValueArray[0]*1;
var h = xyValueArray[3]*1 - xyValueArray[1]*1;
var url = xyValueArray[4];
xForRefIcon = x;
yForRefIcon = y + h;
shapeX = xyValueArray[2]*1;
shapeY = xyValueArray[1]*1;
x = shapeX + xOffset*1 - 12;
y = shapeY + yOffset*1 + 2;
w = xyValueArray[2]*1 - xyValueArray[0]*1;
h = xyValueArray[3]*1 - xyValueArray[1]*1;
url = xyValueArray[4];
xForSubDiagramIcon = x;
yForSubDiagramIcon = y + h;
xForTransIcon = x;
yForTransIcon = y + h + 12;
// }
}
}
function resetPopupForReference() {
clearLinkPopupContent();
var popup = document.getElementById("linkPopupMenuTable");
// file references
for (i = 0 ; i < fileLinks.length ; i++) {
var fileNameUrl = fileLinks[i].split("*");
var name = fileNameUrl[0];
var url = fileNameUrl[1]; // may be null
var row = popup.insertRow(popup.rows.length)
var imgPopupCell = row.insertCell(0);
imgPopupCell.innerHTML="<div style=\"float: left; width: 18px !important;height: 18px !important;background-image:url(../images/icons/FileReference.png) !important; background-image:url(''); filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='../images/icons/FileReference.png'); background-repeat: no-repeat;\"></div> "+name;
imgPopupCell.valign="middle";
if (url == null) {
imgPopupCell.className="PopupMenuRowNonSelectable";
}
else {
imgPopupCell.destination=url;
imgPopupCell.className="PopupMenuRowDeselected";
imgPopupCell.onmouseover= function onmouseover(event) { this.className="PopupMenuRowSelected"; };
imgPopupCell.onmouseout= function onmouseover(event) { this.className="PopupMenuRowDeselected"; };
imgPopupCell.onclick= function onclick(event) { window.open(this.destination);hideLinkPopup(); };
}
}
// folder reference
for (i = 0 ; i < folderLinks.length ; i++) {
var folderNameUrl = folderLinks[i].split("*");
var name = folderNameUrl[0];
var url = folderNameUrl[1]; // may be null
var row = popup.insertRow(popup.rows.length)
var imgPopupCell = row.insertCell(0);
imgPopupCell.innerHTML="<div style=\"float: left; width: 18px !important;height: 18px !important;background-image:url(../images/icons/FolderReference.png) !important; background-image:url(''); filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='../images/icons/FolderReference.png'); background-repeat: no-repeat;\"></div> "+name;
imgPopupCell.valign="middle";
if (url == null) {
imgPopupCell.className="PopupMenuRowNonSelectable";
}
else if (url != null) {
imgPopupCell.destination=url;
imgPopupCell.className="PopupMenuRowDeselected";
imgPopupCell.onmouseover= function onmouseover(event) { this.className="PopupMenuRowSelected"; };
imgPopupCell.onmouseout= function onmouseover(event) { this.className="PopupMenuRowDeselected"; };
imgPopupCell.onclick= function onclick(event) { window.open(this.destination);hideLinkPopup(); };
}
}
// url reference
for (i = 0 ; i < urlLinks.length ; i++) {
var row = popup.insertRow(popup.rows.length)
var imgPopupCell = row.insertCell(0);
var destination = urlLinks[i][0];
var name = urlLinks[i][1];
if (name == null || name == '') {
name = destination;
}
imgPopupCell.innerHTML="<div style=\"float: left; width: 18px !important;height: 18px !important;background-image:url(../images/icons/UrlReference.png) !important; background-image:url(''); filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='../images/icons/UrlReference.png'); background-repeat: no-repeat;\"></div> "+name
imgPopupCell.valign="middle"
imgPopupCell.destination=destination;
imgPopupCell.className="PopupMenuRowDeselected";
imgPopupCell.onmouseover= function onmouseover(event) { this.className="PopupMenuRowSelected"; };
imgPopupCell.onmouseout= function onmouseover(event) { this.className="PopupMenuRowDeselected"; };
imgPopupCell.onclick= function onclick(event) { window.open(this.destination);hideLinkPopup(); };
}
// diagram reference
for (j = 0 ; j < diagramLinks.length ; j++) {
var diagramUrlNameType = diagramLinks[j].split("/");
var url = diagramUrlNameType[0];
var name = diagramUrlNameType[1];
var type = diagramUrlNameType[2];
var imgSrc = '../images/icons/'+type+'.png';
var row = popup.insertRow(popup.rows.length)
var imgPopupCell = row.insertCell(0);
imgPopupCell.innerHTML="<div style=\"float: left; width: 18px !important;height: 18px !important;background-image:url(" + imgSrc + ") !important; background-image:url(''); filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + imgSrc + "'); background-repeat: no-repeat;\"></div> "+name
imgPopupCell.valign="middle"
imgPopupCell.className="PopupMenuRowDeselected";
imgPopupCell.onmouseover= function onmouseover(event) { this.className="PopupMenuRowSelected"; };
imgPopupCell.onmouseout= function onmouseover(event) { this.className="PopupMenuRowDeselected"; };
if (url == 'vplink') {
imgPopupCell.destination= diagramUrlNameType[3].replace('@','/');
imgPopupCell.vpLinkWithName= diagramUrlNameType[4].replace('@','/');
imgPopupCell.onclick= function onclick(event) { showVpLink(this.destination, this.vpLinkWithName, null, this) };
} else {
imgPopupCell.destination=url
if (url != null && url != '') {
imgPopupCell.onclick= function onclick(event) { window.open(this.destination,'_self') };
}
}
}
// shape reference
for (j = 0 ; j < shapeLinks.length ; j++) {
var shapeUrlNameType = shapeLinks[j].split("/");
var url = shapeUrlNameType[0];
var name = shapeUrlNameType[1];
var iconFileName = shapeUrlNameType[2];
var imgSrc = '../images/icons/'+iconFileName+'.png';
var row = popup.insertRow(popup.rows.length)
var row = popup.insertRow(popup.rows.length)
var imgPopupCell = row.insertCell(0);
imgPopupCell.innerHTML="<div style=\"float: left; width: 18px !important;height: 18px !important;background-image:url(" + imgSrc + ") !important; background-image:url(''); filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + imgSrc + "'); background-repeat: no-repeat;\"></div> "+name
imgPopupCell.valign="middle"
imgPopupCell.destination=url
imgPopupCell.className="PopupMenuRowDeselected";
imgPopupCell.onmouseover= function onmouseover(event) { this.className="PopupMenuRowSelected"; };
imgPopupCell.onmouseout= function onmouseover(event) { this.className="PopupMenuRowDeselected"; };
if (iconFileName.length > 0){
imgPopupCell.onclick= function onclick(event) { window.open(this.destination,'_self') };
}
}
// model element reference
for (j = 0 ; j < modelElementLinks.length ; j++) {
var modelElementUrlNameType = modelElementLinks[j].split("/");
var url = modelElementUrlNameType[0];
var name = modelElementUrlNameType[1];
var iconFileName = modelElementUrlNameType[2];
var imgSrc = '../images/icons/'+iconFileName+'.png';
var row = popup.insertRow(popup.rows.length)
var row = popup.insertRow(popup.rows.length)
var imgPopupCell = row.insertCell(0);
imgPopupCell.innerHTML="<div style=\"float: left; width: 18px !important;height: 18px !important;background-image:url(" + imgSrc + ") !important; background-image:url(''); filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + imgSrc + "'); background-repeat: no-repeat;\"></div> "+name
imgPopupCell.valign="middle"
imgPopupCell.destination=url
imgPopupCell.className="PopupMenuRowDeselected";
imgPopupCell.onmouseover= function onmouseover(event) { this.className="PopupMenuRowSelected"; };
imgPopupCell.onmouseout= function onmouseover(event) { this.className="PopupMenuRowDeselected"; };
if (iconFileName.length > 0) {
imgPopupCell.onclick= function onclick(event) { window.open(this.destination,'_self') };
}
}
}
function resetPopupForSubdiagram() {
clearLinkPopupContent();
var popup = document.getElementById("linkPopupMenuTable");
// subdiagram
for (j = 0 ; j < subdiagramLinks.length ; j++) {
var diagramUrlNameType = subdiagramLinks[j].split("/");
var url = diagramUrlNameType[0];
var name = diagramUrlNameType[1];
var type = diagramUrlNameType[2];
var imgSrc = '../images/icons/'+type+'.png';
var row = popup.insertRow(popup.rows.length)
var imgPopupCell = row.insertCell(0);
imgPopupCell.innerHTML="<div style=\"float: left; width: 18px !important;height: 18px !important;background-image:url(" + imgSrc + ") !important; background-image:url(''); filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + imgSrc + "'); background-repeat: no-repeat;\"></div> "+name
imgPopupCell.valign="middle"
imgPopupCell.destination=url
imgPopupCell.className="PopupMenuRowDeselected";
imgPopupCell.onmouseover= function onmouseover(event) { this.className="PopupMenuRowSelected"; };
imgPopupCell.onmouseout= function onmouseover(event) { this.className="PopupMenuRowDeselected"; };
if (url != null && url != '') {
imgPopupCell.onclick= function onclick(event) { window.open(this.destination,'_self') };
}
}
}
function movePopupPositionToReferenceIconPosition() {
movePopupPositionToSpecificPosition(xForRefIcon, yForRefIcon);
}
function movePopupPositionToSubdiagramIconPosition() {
movePopupPositionToSpecificPosition(xForSubDiagramIcon, yForSubDiagramIcon);
}
function movePopupPositionToCursorPosition(imageId, event) {
var diagram = document.getElementById(imageId);
var xOffset = 0;
var yOffset = 0;
var e = (window.event) ? window.event : event;
xOffset = e.clientX;
yOffset = e.clientY;
if (document.all) {
if (!document.documentElement.scrollLeft)
xOffset += document.body.scrollLeft;
else
xOffset += document.documentElement.scrollLeft;
if (!document.documentElement.scrollTop)
yOffset += document.body.scrollTop;
else
yOffset += document.documentElement.scrollTop;
}else{
xOffset += window.pageXOffset;
yOffset += window.pageYOffset;
}
var nX = xOffset*1;
var nY = yOffset*1;
movePopupPositionToSpecificPosition(nX, nY);
}
function movePopupPositionToSpecificPosition(x, y) {
var popupLayer = document.getElementById("linkPopupMenuLayer");
N = (document.all) ? 0 : 1;
if (N) {
popupLayer.style.left = x;
popupLayer.style.top = y;
} else {
popupLayer.style.posLeft = x;
popupLayer.style.posTop = y;
}
}
function switchPopupShowHideStatus(){
var popup = document.getElementById("linkPopupMenuTable");
if (popup.style.visibility=="visible") {
hideLinkPopup();
}else{
showLinkPopup();
}
}
function switchPopupShowHideStatusForZachman(aForZachmanKind) {
var popup = document.getElementById("linkPopupMenuTable");
if (popup.style.visibility=="visible") {
if (aForZachmanKind == popup.forZachmanKind) {
popup.forZachmanKind = null;
hideLinkPopup();
}
else {
// keep popup shown, just need change its forZachmanKind
popup.forZachmanKind = aForZachmanKind;
}
}else{
popup.forZachmanKind = aForZachmanKind;
showLinkPopup();
}
}
function adjustPopupPositionForSpotLightTable() {
movePopupPositionToSpecificPosition(cursorX,cursorY);
}
function showLinkPopup(){
hideVpLink();
hideReferencedBys();
var popup = document.getElementById("linkPopupMenuTable");
popup.style.visibility="visible"
document.getElementById("linkPopupMenuLayer").style.visibility="visible";
}
function hideLinkPopup(){
var popup = document.getElementById("linkPopupMenuTable");
if (popup != null) {
popup.style.visibility="hidden"
document.getElementById("linkPopupMenuLayer").style.visibility="hidden";
}
}
function clearLinkPopupContent(){
var popup = document.getElementById("linkPopupMenuTable");
for (i = popup.rows.length ; i >0 ; i--) {
popup.deleteRow(0);
}
}
function movePopupPositionToTransitorIconPosition() {
movePopupPositionToSpecificPosition(xForTransIcon, yForTransIcon);
}
function resetPopupForTransitor() {
clearLinkPopupContent();
var popup = document.getElementById("linkPopupMenuTable");
// transitor
var row = popup.insertRow(popup.rows.length);
var popupCell = row.insertCell(0);
popupCell.innerHTML="<div style=\"font-size:11px\">From:</div>";
for (j = 0 ; j < fromTransitorLinks.length ; j++) {
var shapeUrlNameType = fromTransitorLinks[j].split("/");
addPopupItem(popup, shapeUrlNameType);
}
row = popup.insertRow(popup.rows.length);
popupCell = row.insertCell(0);
popupCell.innerHTML="<div style=\"font-size:11px\">To:</div>";
for (j = 0 ; j < toTransitorLinks.length ; j++) {
var shapeUrlNameType = toTransitorLinks[j].split("/");
addPopupItem(popup, shapeUrlNameType);
}
}
// for From/To Transitor
function addPopupItem(popup, shapeUrlNameType) {
var url = shapeUrlNameType[0];
var name = shapeUrlNameType[1];
var iconFileName = shapeUrlNameType[2];
var imgSrc = '../images/icons/'+iconFileName+'.png';
var row = popup.insertRow(popup.rows.length)
var imgPopupCell = row.insertCell(0);
imgPopupCell.innerHTML="<div style=\"float: left; width: 18px !important;height: 18px !important;background-image:url(" + imgSrc + ") !important; background-image:url(''); filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + imgSrc + "'); background-repeat: no-repeat;\"></div> "+name
imgPopupCell.valign="middle";
imgPopupCell.destination=url;
imgPopupCell.className="PopupMenuRowDeselected";
imgPopupCell.onmouseover= function onmouseover(event) { this.className="PopupMenuRowSelected"; };
imgPopupCell.onmouseout= function onmouseover(event) { this.className="PopupMenuRowDeselected"; };
imgPopupCell.onclick= function onclick(event) { window.open(this.destination,'_self') };
}
// for Zachman
// @param format: url/name/type, url/name/type, ...
function resetPopupForZachmanCellDiagrams(lCellId, lValues) {
clearLinkPopupContent();
var popup = document.getElementById("linkPopupMenuTable");
popup.style.width = 250;
var lZachmanCell = document.getElementById(lCellId);
var lZachmanCellX = findPosX(lZachmanCell);
var lZachmanCellY = findPosY(lZachmanCell);
if (lZachmanCellX > 250) {
// show on left
movePopupPositionToSpecificPosition(lZachmanCellX+lZachmanCell.offsetWidth-popup.offsetWidth-5, lZachmanCellY+lZachmanCell.offsetHeight-5);
}
else {
// show on right
// x+5 & y-5 to let the popup overlap with current cell
movePopupPositionToSpecificPosition(lZachmanCellX+5, lZachmanCellY+lZachmanCell.offsetHeight-5);
}
// ZachmanCell.diagrams
for (j = 0 ; j < lValues.length ; j++) {
var diagramUrlNameType = lValues[j].split("/");
var url = diagramUrlNameType[0];
var name = diagramUrlNameType[1];
var type = diagramUrlNameType[2];
var imgSrc = '../images/icons/'+type+'.png';
var row = popup.insertRow(popup.rows.length);
var imgPopupCell = row.insertCell(0);
imgPopupCell.innerHTML="<div style=\"float: left; width: 18px !important;height: 18px !important;background-image:url(" + imgSrc + ") !important; background-image:url(''); filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + imgSrc + "'); background-repeat: no-repeat;\"></div> "+name;
imgPopupCell.valign="middle";
imgPopupCell.destination=url;
imgPopupCell.className="PopupMenuRowDeselected";
imgPopupCell.onmouseout= function onmouseover(event) { this.className="PopupMenuRowDeselected"; };
if (url != null && url != '') {
imgPopupCell.onmouseover= function onmouseover(event) { this.className="PopupMenuRowSelected"; };
imgPopupCell.onclick= function onclick(event) { window.open(this.destination,'_self') };
}
else {
imgPopupCell.onmouseover= function onmouseover(event) { this.className="PopupMenuRowNonSelectable"; };
}
}
}
// @param format: url/name/aliases/labels/documentation, url/name/aliases/labels/documentation, ...
function resetPopupForZachmanCellTerms(lCellId, lValues) {
clearLinkPopupContent();
var popup = document.getElementById("linkPopupMenuTable");
popup.style.width = 500;
var lZachmanCell = document.getElementById(lCellId);
var lZachmanCellX = findPosX(lZachmanCell);
var lZachmanCellY = findPosY(lZachmanCell);
if (lZachmanCellX > 500) {
// show on left
movePopupPositionToSpecificPosition(lZachmanCellX+lZachmanCell.offsetWidth-popup.offsetWidth-5, lZachmanCellY+lZachmanCell.offsetHeight-5);
}
else {
// show on right
// x+5 & y-5 to let the popup overlap with current cell
movePopupPositionToSpecificPosition(lZachmanCellX+5, lZachmanCellY+lZachmanCell.offsetHeight-5);
}
// ZachmanCell.terms
{
var row = popup.insertRow(popup.rows.length);
row.className="PopupMenuHeaderRow";
{
var lPopupCell = row.insertCell(0);
lPopupCell.innerHTML="Name";
lPopupCell.valign="middle";
}
{
var lPopupCell = row.insertCell(1);
lPopupCell.innerHTML="Aliases";
lPopupCell.valign="middle";
}
{
var lPopupCell = row.insertCell(2);
lPopupCell.innerHTML="Labels";
lPopupCell.valign="middle";
}
{
var lPopupCell = row.insertCell(3);
lPopupCell.innerHTML="Documentation";
lPopupCell.valign="middle";
}
}
for (j = 0 ; j < lValues.length ; j++) {
var lValue = lValues[j].split("/");
var url = lValue[0];
var name = lValue[1];
var aliases = lValue[2];
var labels = lValue[3];
var documentation = lValue[4];
var row = popup.insertRow(popup.rows.length);
for (lCellIndex = 1; lCellIndex < lValue.length; lCellIndex++) {
var lPopupCell = row.insertCell(lCellIndex-1);
lPopupCell.id="cell"+j+","+lCellIndex-1;
lPopupCell.innerHTML=lValue[lCellIndex];
lPopupCell.valign="middle";
}
if (url != null && url != '') {
row.destination=url;
row.className="PopupMenuRowDeselected";
row.onmouseover= function onmouseover(event) { this.className="PopupMenuRowSelected"; };
row.onmouseout= function onmouseover(event) { this.className="PopupMenuRowDeselected"; };
row.onclick= function onclick(event) { window.open(this.destination,'_self') };
}
else {
row.className="PopupMenuRowNonSelectable";
}
}
}
// @param format: url/id/name/ruleText, url/id/name/ruleText, ...
function resetPopupForZachmanCellRules(lCellId, lValues) {
clearLinkPopupContent();
var popup = document.getElementById("linkPopupMenuTable");
popup.style.width = 500;
var lZachmanCell = document.getElementById(lCellId);
var lZachmanCellX = findPosX(lZachmanCell);
var lZachmanCellY = findPosY(lZachmanCell);
if (lZachmanCellX > 500) {
// show on left
movePopupPositionToSpecificPosition(lZachmanCellX+lZachmanCell.offsetWidth-popup.offsetWidth-5, lZachmanCellY+lZachmanCell.offsetHeight-5);
}
else {
// show on right
// x+5 & y-5 to let the popup overlap with current cell
movePopupPositionToSpecificPosition(lZachmanCellX+5, lZachmanCellY+lZachmanCell.offsetHeight-5);
}
// ZachmanCell.rules
{
var row = popup.insertRow(popup.rows.length);
row.className="PopupMenuHeaderRow";
{
var lPopupCell = row.insertCell(0);
lPopupCell.innerHTML="ID";
lPopupCell.valign="middle";
}
{
var lPopupCell = row.insertCell(1);
lPopupCell.innerHTML="Name";
lPopupCell.valign="middle";
}
{
var lPopupCell = row.insertCell(2);
lPopupCell.innerHTML="Rule";
lPopupCell.valign="middle";
}
}
for (j = 0 ; j < lValues.length ; j++) {
var lValue = lValues[j].split("/");
var url = lValue[0];
var id = lValue[1];
var name = lValue[2];
var ruleText = lValue[3];
var row = popup.insertRow(popup.rows.length);
for (lCellIndex = 1; lCellIndex < lValue.length; lCellIndex++) {
var lPopupCell = row.insertCell(lCellIndex-1);
lPopupCell.id="cell"+j+","+lCellIndex-1;
lPopupCell.innerHTML=lValue[lCellIndex];
lPopupCell.valign="middle";
}
if (url != null && url != '') {
row.destination=url;
row.className="PopupMenuRowDeselected";
row.onmouseover= function onmouseover(event) { this.className="PopupMenuRowSelected"; };
row.onmouseout= function onmouseover(event) { this.className="PopupMenuRowDeselected"; };
row.onclick= function onclick(event) { window.open(this.destination,'_self') };
}
else {
row.className="PopupMenuRowNonSelectable";
}
}
}
function showVpLink(link, linkWithName, pageUrlElementName, linkElem) {
// getting absolute location in page
var lLeft = 0;
var lTop = 0;
var lParent = linkElem;
while (lParent != null) {
lLeft += lParent.offsetLeft;
lTop += lParent.offsetTop;
lParent = lParent.offsetParent;
}
showVpLinkAt(link, linkWithName, pageUrlElementName, lLeft, lTop + linkElem.offsetHeight);
}
function showVpLinkAtDiagram(link, linkWithName, pageUrlElementName, aLeft, aTop) {
var lLeft = 0;
var lTop = 0;
var diagramElem = document.getElementById('diagram');
var lParent = diagramElem;
while (lParent != null) {
lLeft += lParent.offsetLeft;
lTop += lParent.offsetTop;
lParent = lParent.offsetParent;
}
showVpLinkAt(link, linkWithName, pageUrlElementName, lLeft + aLeft, lTop + aTop);
}
function showVpLinkAt(link, linkWithName, pageUrlElementName, aLeft, aTop) {
var popup = document.getElementById("vplink");
if (popup.style.visibility == "visible") {
popup.style.visibility="hidden";
} else {
var linktext = document.getElementById("vplink-text");
var withName = document.getElementById("vplink-checkbox");
var lLinkType = document.getElementById("vplink-linkType")
// read from cookies (https://earth.space/#issueId=81262)
var lWithNameValue = getCookie("vpProjectPublisher_vpLink_withName");
if (lWithNameValue != null) {
if (lWithNameValue == "true") {
withName.checked = true;
}
else {
withName.checked = false;
}
}
var lLinkTypeValue = getCookie("vpProjectPublisher_vpLink_type");
if (lLinkTypeValue != null) {
if (lLinkTypeValue == "Page URL") {
lLinkType.selectedIndex = 1; // 1 should be "Page URL"
}
else {
lLinkType.selectedIndex = 0; // 0 should be "Project Link"
}
}
vpLinkProjectLink = link;
vpLinkProjectLinkWithName = linkWithName;
if (pageUrlElementName != null) {
vpLinkPageUrl = document.location.href;
vpLinkPageUrlWithName = pageUrlElementName+"\n"+vpLinkPageUrl;
lLinkType.disabled = false;
}
<|fim▁hole|> vpLinkPageUrlWithName = null;
lLinkType.selectedIndex = 0; // 0 should be "Project Link"
lLinkType.disabled = true;
}
if (withName.checked) {
if (lLinkType.disabled == false && lLinkType.options[lLinkType.selectedIndex].value == "Page URL") {
// Page URL
linktext.value = vpLinkPageUrlWithName;
}
else {
// Project Link
linktext.value = vpLinkProjectLinkWithName;
}
} else {
if (lLinkType.disabled == false && lLinkType.options[lLinkType.selectedIndex].value == "Page URL") {
// Page URL
linktext.value = vpLinkPageUrl;
}
else {
// Project Link
linktext.value = vpLinkProjectLink;
}
}
N = (document.all) ? 0 : 1;
if (N) {
popup.style.left = aLeft;
popup.style.top = aTop;
} else {
popup.style.posLeft = aLeft;
popup.style.posTop = aTop;
}
hideLinkPopup();
hideReferencedBys();
popup.style.visibility="visible"
linktext.focus();
linktext.select();
}
}
function hideVpLink() {
var popupLayer = document.getElementById("vplink");
if (popupLayer != null && popupLayer.style.visibility == "visible") {
popupLayer.style.visibility="hidden";
}
}
function vpLinkToggleName() {
var linktext = document.getElementById("vplink-text");
var withName = document.getElementById("vplink-checkbox");
var lLinkType = document.getElementById("vplink-linkType")
// write to cookies (https://earth.space/#issueId=81262)
setCookie("vpProjectPublisher_vpLink_withName", withName.checked);
setCookie("vpProjectPublisher_vpLink_type", lLinkType.options[lLinkType.selectedIndex].value);
if (withName.checked) {
if (lLinkType.disabled == false && lLinkType.options[lLinkType.selectedIndex].value == "Page URL") {
// Page URL
linktext.value = vpLinkPageUrlWithName;
}
else {
// Project Link
linktext.value = vpLinkProjectLinkWithName;
}
} else {
if (lLinkType.disabled == false && lLinkType.options[lLinkType.selectedIndex].value == "Page URL") {
// Page URL
linktext.value = vpLinkPageUrl;
}
else {
// Project Link
linktext.value = vpLinkProjectLink;
}
}
linktext.focus();
linktext.select();
}
function showReferencedBys(invokerId, refByDiagrams, refByModels) {
var popupLayer = document.getElementById("referencedBys");
if (popupLayer.style.visibility == "visible") {
popupLayer.style.visibility="hidden";
} else {
var popup = document.getElementById("referencedBysTable");
for (i = popup.rows.length ; i >0 ; i--) {
popup.deleteRow(0);
}
var refByDiagramLinks = [];
var refByModelLinks = [];
{
popup.width = 250; // reset to 250 first (forZachman may changed it to 500)
for (i = 0 ; i < refByDiagrams.length ; i++) {
refByDiagramLinks[i] = refByDiagrams[i];
}
for (i = 0 ; i < refByModels.length ; i++) {
refByModelLinks[i] = refByModels[i];
}
}
{
// ref by diagrams
for (j = 0 ; j < refByDiagramLinks.length ; j++) {
var diagramUrlNameType = refByDiagramLinks[j].split("/");
var url = diagramUrlNameType[0];
var name = diagramUrlNameType[1];
var type = diagramUrlNameType[2];
var imgSrc = '../images/icons/'+type+'.png';
var row = popup.insertRow(popup.rows.length)
var imgPopupCell = row.insertCell(0);
imgPopupCell.innerHTML="<div style=\"float: left; width: 18px !important;height: 18px !important;background-image:url(" + imgSrc + ") !important; background-image:url(''); filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + imgSrc + "'); background-repeat: no-repeat;\"></div> "+name
imgPopupCell.valign="middle"
imgPopupCell.className="PopupMenuRowDeselected";
imgPopupCell.onmouseover= function onmouseover(event) { this.className="PopupMenuRowSelected"; };
imgPopupCell.onmouseout= function onmouseover(event) { this.className="PopupMenuRowDeselected"; };
if (url == 'vplink') {
imgPopupCell.destination= diagramUrlNameType[3].replace('@','/');
imgPopupCell.vpLinkWithName= diagramUrlNameType[4].replace('@','/');
imgPopupCell.onclick= function onclick(event) { showVpLink(this.destination, this.vpLinkWithName, null, this) };
} else {
imgPopupCell.destination=url
if (url != null && url != '') {
imgPopupCell.onclick= function onclick(event) { window.open(this.destination,'_self') };
}
}
}
// ref by models
for (j = 0 ; j < refByModelLinks.length ; j++) {
var modelElementUrlNameType = refByModelLinks[j].split("/");
var url = modelElementUrlNameType[0];
var name = modelElementUrlNameType[1];
var iconFileName = modelElementUrlNameType[2];
var imgSrc = '../images/icons/'+iconFileName+'.png';
var row = popup.insertRow(popup.rows.length)
var row = popup.insertRow(popup.rows.length)
var imgPopupCell = row.insertCell(0);
imgPopupCell.innerHTML="<div style=\"float: left; width: 18px !important;height: 18px !important;background-image:url(" + imgSrc + ") !important; background-image:url(''); filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + imgSrc + "'); background-repeat: no-repeat;\"></div> "+name
imgPopupCell.valign="middle"
imgPopupCell.destination=url
imgPopupCell.className="PopupMenuRowDeselected";
imgPopupCell.onmouseover= function onmouseover(event) { this.className="PopupMenuRowSelected"; };
imgPopupCell.onmouseout= function onmouseover(event) { this.className="PopupMenuRowDeselected"; };
if (iconFileName.length > 0) {
imgPopupCell.onclick= function onclick(event) { window.open(this.destination,'_self') };
}
}
}
var invoker = document.getElementById(invokerId);
var xOffset = findPosX(invoker);
var yOffset = findPosY(invoker);
yOffset = yOffset+18;
N = (document.all) ? 0 : 1;
if (N) {
popupLayer.style.left = xOffset;
popupLayer.style.top = yOffset;
} else {
popupLayer.style.posLeft = xOffset;
popupLayer.style.posTop = yOffset;
}
hideLinkPopup();
hideVpLink();
popupLayer.style.visibility = "visible";
}
}
function hideReferencedBys() {
var popupLayer = document.getElementById("referencedBys");
if (popupLayer != null && popupLayer.style.visibility == "visible") {
popupLayer.style.visibility="hidden";
}
}
function setCookie(c_name, value) {
var c_value = escape(value);
document.cookie = c_name + "=" + c_value;
}
function getCookie(c_name) {
var c_value = document.cookie;
var c_start = c_value.indexOf(" " + c_name + "=");
if (c_start == -1) {
c_start = c_value.indexOf(c_name + "=");
}
if (c_start == -1) {
c_value = null;
}
else {
c_start = c_value.indexOf("=", c_start)+1;
var c_end = c_value.indexOf(";", c_start);
if (c_end == -1) {
c_end = c_value.length;
}
c_value = unescape(c_value.substring(c_start, c_end));
}
return c_value;
}<|fim▁end|>
|
else {
vpLinkPageUrl = null;
|
<|file_name|>INotificationView.ts<|end_file_name|><|fim▁begin|>/**<|fim▁hole|> * `changedLecture` and `changedUnit` only represent the optional IDs (`_id`s) of the corresponding full interfaces of `INotification`.
* The `user` is omitted, because the notifications currently can only be requested by the owner (i.e. `@CurrentUser`) anyway.
* (Also, the `_id` is of type `string` instead of `any`, since the `any` in `INotification` is only there for compatibility
* with `mongoose.Document` to form `INotificationModel`.)
*/
export interface INotificationView {
_id: string;
changedCourse?: string;
changedLecture?: string;
changedUnit?: string;
text: string;
isOld: boolean;
}<|fim▁end|>
|
* Defines what the `{get} /api/notification/` aka `getNotifications` route returns as response (in form of an array).
* It is the sanitized version of `INotification` interface: The property names are identical, but `changedCourse`,
|
<|file_name|>FiredriveCom.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from pyload.plugin.internal.DeadCrypter import DeadCrypter
class FiredriveCom(DeadCrypter):
__name = "FiredriveCom"
__type = "crypter"
__version = "0.03"
<|fim▁hole|> __pattern = r'https?://(?:www\.)?(firedrive|putlocker)\.com/share/.+'
__config = [] #@TODO: Remove in 0.4.10
__description = """Firedrive.com folder decrypter plugin"""
__license = "GPLv3"
__authors = [("Walter Purcaro", "[email protected]")]<|fim▁end|>
| |
<|file_name|>MobGrinder.java<|end_file_name|><|fim▁begin|>package com.avrgaming.civcraft.structure;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.concurrent.locks.ReentrantLock;
import org.bukkit.Location;
import com.avrgaming.civcraft.config.CivSettings;
import com.avrgaming.civcraft.exception.CivException;
import com.avrgaming.civcraft.exception.InvalidConfiguration;
import com.avrgaming.civcraft.object.Buff;
import com.avrgaming.civcraft.object.Town;
public class MobGrinder extends Structure {
private static final double T1_CHANCE = CivSettings.getDoubleStructure("mobGrinder.t1_chance"); //1%
private static final double T2_CHANCE = CivSettings.getDoubleStructure("mobGrinder.t2_chance"); //2%
private static final double T3_CHANCE = CivSettings.getDoubleStructure("mobGrinder.t3_chance"); //1%
private static final double T4_CHANCE = CivSettings.getDoubleStructure("mobGrinder.t4_chance"); //0.25%
private static final double PACK_CHANCE = CivSettings.getDoubleStructure("mobGrinder.pack_chance"); //0.10%
private static final double BIGPACK_CHANCE = CivSettings.getDoubleStructure("mobGrinder.bigpack_chance");
private static final double HUGEPACK_CHANCE = CivSettings.getDoubleStructure("mobGrinder.hugepack_chance");
public int skippedCounter = 0;
public ReentrantLock lock = new ReentrantLock();
public enum Crystal {
T1,
T2,
T3,
T4,
PACK,
BIGPACK,
HUGEPACK
}
protected MobGrinder(Location center, String id, Town town) throws CivException {
super(center, id, town);
}
public MobGrinder(ResultSet rs) throws SQLException, CivException {
super(rs);
}
@Override
public String getDynmapDescription() {
return null;
}
@Override
public String getMarkerIconName() {
return "minecart";
}
public double getMineralChance(Crystal crystal) {
double chance = 0;<|fim▁hole|> case T1:
chance = T1_CHANCE;
break;
case T2:
chance = T2_CHANCE;
break;
case T3:
chance = T3_CHANCE;
break;
case T4:
chance = T4_CHANCE;
break;
case PACK:
chance = PACK_CHANCE;
break;
case BIGPACK:
chance = BIGPACK_CHANCE;
break;
case HUGEPACK:
chance = HUGEPACK_CHANCE;
}
double increase = chance*this.getTown().getBuffManager().getEffectiveDouble(Buff.EXTRACTION);
chance += increase;
try {
if (this.getTown().getGovernment().id.equals("gov_tribalism")) {
chance *= CivSettings.getDouble(CivSettings.structureConfig, "mobGrinder.tribalism_rate");
} else {
chance *= CivSettings.getDouble(CivSettings.structureConfig, "mobGrinder.penalty_rate");
}
} catch (InvalidConfiguration e) {
e.printStackTrace();
}
return chance;
}
}<|fim▁end|>
|
switch (crystal) {
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-Lcdplate',
version=get_version('mopidy_lcdplate/__init__.py'),
url='https://github.com/gimunu/mopidy-lcdplate',
license='Apache License, Version 2.0',
author='Umberto De Giovannini',
author_email='[email protected]',
description='Modipy extension for Adafruit lcd plate',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy >= 0.18',
'Pykka >= 1.1',
],
test_suite='nose.collector',
tests_require=[
'nose',
'mock >= 1.0',
],
entry_points={
'mopidy.ext': [
'lcdplate = mopidy_lcdplate:Extension',<|fim▁hole|> },
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)<|fim▁end|>
|
],
|
<|file_name|>repl.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var server = repl.start({});
var con = server.context;
con.name='zfpx';
con.age = 5;
con.grow = function(){
return ++con.age;
}<|fim▁end|>
|
var repl = require('repl');
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::CR2 {
#[doc = r" Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `MMS`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum MMSR {
#[doc = "the UG bit from the TIMx_EGR register is used as trigger output (TRGO). If the reset is generated by the trigger input (slave mode controller configured in reset mode) then the signal on TRGO is delayed compared to the actual reset."] RESET,
#[doc = "the Counter Enable signal CNT_EN is used as trigger output (TRGO)."] ENABLE,
#[doc = "The update event is selected as trigger output (TRGO)."] UPDATE,
#[doc = "The trigger output send a positive pulse when the CC1IF flag is to be set (even if it was already high), as soon as a capture or a compare match occurred.(TRGO)."] COMPAREPULSE,
#[doc = "OC1REF signal is used as trigger output (TRGO)"] COMPAREOC1REF,
#[doc = "OC2REF signal is used as trigger output (TRGO)"] COMPAREOC2REF,
#[doc = "OC3REF signal is used as trigger output (TRGO)"] COMPAREOC3REF,
#[doc = "OC4REF signal is used as trigger output (TRGO)"] COMPAREOC4REF,
}
impl MMSR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
match *self {
MMSR::RESET => 0,
MMSR::ENABLE => 1,
MMSR::UPDATE => 2,
MMSR::COMPAREPULSE => 3,
MMSR::COMPAREOC1REF => 4,
MMSR::COMPAREOC2REF => 5,
MMSR::COMPAREOC3REF => 6,
MMSR::COMPAREOC4REF => 7,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline(always)]
pub fn _from(value: u8) -> MMSR {
match value {
0 => MMSR::RESET,
1 => MMSR::ENABLE,
2 => MMSR::UPDATE,
3 => MMSR::COMPAREPULSE,
4 => MMSR::COMPAREOC1REF,
5 => MMSR::COMPAREOC2REF,
6 => MMSR::COMPAREOC3REF,
7 => MMSR::COMPAREOC4REF,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `RESET`"]
#[inline(always)]
pub fn is_reset(&self) -> bool {
*self == MMSR::RESET
}
#[doc = "Checks if the value of the field is `ENABLE`"]
#[inline(always)]
pub fn is_enable(&self) -> bool {
*self == MMSR::ENABLE
}<|fim▁hole|> }
#[doc = "Checks if the value of the field is `COMPAREPULSE`"]
#[inline(always)]
pub fn is_compare_pulse(&self) -> bool {
*self == MMSR::COMPAREPULSE
}
#[doc = "Checks if the value of the field is `COMPAREOC1REF`"]
#[inline(always)]
pub fn is_compare_oc1ref(&self) -> bool {
*self == MMSR::COMPAREOC1REF
}
#[doc = "Checks if the value of the field is `COMPAREOC2REF`"]
#[inline(always)]
pub fn is_compare_oc2ref(&self) -> bool {
*self == MMSR::COMPAREOC2REF
}
#[doc = "Checks if the value of the field is `COMPAREOC3REF`"]
#[inline(always)]
pub fn is_compare_oc3ref(&self) -> bool {
*self == MMSR::COMPAREOC3REF
}
#[doc = "Checks if the value of the field is `COMPAREOC4REF`"]
#[inline(always)]
pub fn is_compare_oc4ref(&self) -> bool {
*self == MMSR::COMPAREOC4REF
}
}
#[doc = "Values that can be written to the field `MMS`"]
pub enum MMSW {
#[doc = "the UG bit from the TIMx_EGR register is used as trigger output (TRGO). If the reset is generated by the trigger input (slave mode controller configured in reset mode) then the signal on TRGO is delayed compared to the actual reset."] RESET,
#[doc = "the Counter Enable signal CNT_EN is used as trigger output (TRGO)."] ENABLE,
#[doc = "The update event is selected as trigger output (TRGO)."] UPDATE,
#[doc = "The trigger output send a positive pulse when the CC1IF flag is to be set (even if it was already high), as soon as a capture or a compare match occurred.(TRGO)."] COMPAREPULSE,
#[doc = "OC1REF signal is used as trigger output (TRGO)"] COMPAREOC1REF,
#[doc = "OC2REF signal is used as trigger output (TRGO)"] COMPAREOC2REF,
#[doc = "OC3REF signal is used as trigger output (TRGO)"] COMPAREOC3REF,
#[doc = "OC4REF signal is used as trigger output (TRGO)"] COMPAREOC4REF,
}
impl MMSW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline(always)]
pub fn _bits(&self) -> u8 {
match *self {
MMSW::RESET => 0,
MMSW::ENABLE => 1,
MMSW::UPDATE => 2,
MMSW::COMPAREPULSE => 3,
MMSW::COMPAREOC1REF => 4,
MMSW::COMPAREOC2REF => 5,
MMSW::COMPAREOC3REF => 6,
MMSW::COMPAREOC4REF => 7,
}
}
}
#[doc = r" Proxy"]
pub struct _MMSW<'a> {
w: &'a mut W,
}
impl<'a> _MMSW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: MMSW) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "the UG bit from the TIMx_EGR register is used as trigger output (TRGO). If the reset is generated by the trigger input (slave mode controller configured in reset mode) then the signal on TRGO is delayed compared to the actual reset."]
#[inline(always)]
pub fn reset(self) -> &'a mut W {
self.variant(MMSW::RESET)
}
#[doc = "the Counter Enable signal CNT_EN is used as trigger output (TRGO)."]
#[inline(always)]
pub fn enable(self) -> &'a mut W {
self.variant(MMSW::ENABLE)
}
#[doc = "The update event is selected as trigger output (TRGO)."]
#[inline(always)]
pub fn update(self) -> &'a mut W {
self.variant(MMSW::UPDATE)
}
#[doc = "The trigger output send a positive pulse when the CC1IF flag is to be set (even if it was already high), as soon as a capture or a compare match occurred.(TRGO)."]
#[inline(always)]
pub fn compare_pulse(self) -> &'a mut W {
self.variant(MMSW::COMPAREPULSE)
}
#[doc = "OC1REF signal is used as trigger output (TRGO)"]
#[inline(always)]
pub fn compare_oc1ref(self) -> &'a mut W {
self.variant(MMSW::COMPAREOC1REF)
}
#[doc = "OC2REF signal is used as trigger output (TRGO)"]
#[inline(always)]
pub fn compare_oc2ref(self) -> &'a mut W {
self.variant(MMSW::COMPAREOC2REF)
}
#[doc = "OC3REF signal is used as trigger output (TRGO)"]
#[inline(always)]
pub fn compare_oc3ref(self) -> &'a mut W {
self.variant(MMSW::COMPAREOC3REF)
}
#[doc = "OC4REF signal is used as trigger output (TRGO)"]
#[inline(always)]
pub fn compare_oc4ref(self) -> &'a mut W {
self.variant(MMSW::COMPAREOC4REF)
}
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 4:6 - Master mode selection"]
#[inline(always)]
pub fn mms(&self) -> MMSR {
MMSR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline(always)]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 4:6 - Master mode selection"]
#[inline(always)]
pub fn mms(&mut self) -> _MMSW {
_MMSW { w: self }
}
}<|fim▁end|>
|
#[doc = "Checks if the value of the field is `UPDATE`"]
#[inline(always)]
pub fn is_update(&self) -> bool {
*self == MMSR::UPDATE
|
<|file_name|>todoListItem.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react';
export interface TodoListItemProps {
task: string;
isCompleted: boolean;
toggleTask: Function;
saveTask: Function;
deleteTask: Function;
}
export interface TodoListItemState {
isEditing: boolean
}
export class TodoListItem extends React.Component<TodoListItemProps, TodoListItemState> {
public refs: {
[string: string]: any;
editTask: HTMLInputElement;
}
constructor(props) {
super(props);
this.state = {
isEditing: false
}
this.bindMethods();
}
public render() {
var actionsSection = this.renderActionsSection();
var taskSection = this.renderTaskSection();
return (
<tr>
{taskSection}
{actionsSection}
</tr>
);
};
private bindMethods() {
this.renderActionsSection = this.renderActionsSection.bind(this);
this.renderTaskSection = this.renderTaskSection.bind(this);
this.onEditClick = this.onEditClick.bind(this);
this.onSaveClick = this.onSaveClick.bind(this);
this.onCancelClick = this.onCancelClick.bind(this);
}
private renderActionsSection() {
return (this.state.isEditing) ? (
<td>
<button className="todo-edit-btn"
onClick={this.onSaveClick}>Save</button>
<button className="todo-delete-btn"
onClick={this.onCancelClick}>Cancel</button>
</td>
)
: (
<td>
<button className="todo-edit-btn"
onClick={this.onEditClick}>Edit</button>
<button className="todo-delete-btn"
onClick={this.onDeleteClick.bind(this, this.props.task)}>Delete</button>
</td>
)
}
private renderTaskSection() {
var isCompleted = this.props.isCompleted;
var taskStyle = {
textDecorationLine: isCompleted ? "line-through" : "",
color: isCompleted ? "grey" : "blue",
cursor: "pointer"
};
return (this.state.isEditing) ? (
<td>
<form onSubmit={this.onSaveClick}>
<input type="text" defaultValue={this.props.task}
ref="editTask" />
</form>
</td>
)
: (
<td style={taskStyle}
onClick={this.onTaskClick.bind(this, this.props.task)}>
{this.props.task}
</td>
)
}
private onTaskClick(task) {
this.props.toggleTask(task);
}
private onEditClick() {
this.setState({
isEditing: true
})
}
private onDeleteClick(task) {
this.props.deleteTask(task)
}
<|fim▁hole|> var oldTask = this.props.task;
this.props.saveTask(oldTask, newTask);
this.setState({
isEditing: false
});
}
}
private onCancelClick() {
this.setState({
isEditing: false
})
}
}<|fim▁end|>
|
private onSaveClick(event) {
event.preventDefault();
var newTask = this.refs.editTask.value;
if (newTask) {
|
<|file_name|>keras_vectorizer.py<|end_file_name|><|fim▁begin|>import sys
import numpy as np
from normalization import tokenize
from helpers import ahash
class KerasVectorizer():
'''
Convert list of documents to numpy array for input into Keras model
'''
def __init__(self, n_features=100000, maxlen=None, maxper=100, hash_function=ahash):
self.maxlen = maxlen
self.maxper = maxper
self.n_features = n_features
self.hash_function = hash_function
def _exact_hash(self, word, n_features):
return self.token_lookup.get(word, 0)
def fit_transform(self, raw_documents, y=None, suffix='', verbose=True):
if verbose:
print >> sys.stderr, 'splitting raw documents'
# Some way to print progress?
tokens = map(self._split_function, raw_documents)
if self.maxlen:
maxlen = self.maxlen
else:
maxlen = int(np.percentile(map(len, tokens), self.maxper))
self.maxlen = maxlen
X = np.zeros((len(tokens), maxlen))
for i,t in enumerate(tokens):
if verbose:
if not i % 10000:
print >> sys.stderr, 'processed %d tokens' % i
if len(t) > 0:
X[i,-len(t):] = map(lambda x: self.hash_function(x + suffix, self.n_features), t[:maxlen])
return X
class KerasCharacterVectorizer(KerasVectorizer):
'''
Split a string into characters
'''
def _split_function(self, doc):
return list(doc)
class KerasTokenVectorizer(KerasVectorizer):
'''
Split a string into words,
'''
def _split_function(self, doc):
return tokenize(doc, keep_punctuation=True)
<|fim▁hole|> def _split_function(self, doc):
return doc
'''
from keras_vectorizer import KerasTokenVectorizer, KerasCharacterVectorizer
ktv = KerasTokenVectorizer()
ktv.fit_transform(['this is a test'])
ktv.fit_transform(['this is a test', 'this is a another test'])
ktv = KerasTokenVectorizer(maxlen=2)
ktv.fit_transform(['this is a test', 'this is a another test'])
kcv = KerasCharacterVectorizer()
kcv.fit_transform(['something', 'else'])
'''<|fim▁end|>
|
class KerasPretokenizedVectorizer(KerasVectorizer):
|
<|file_name|>0003_add_pootle_user_store_score.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-14 19:45
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('pootle_store', '0045_remove_suggestion_tmp_state'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('pootle_score', '0002_set_user_scores'),
]
operations = [
migrations.CreateModel(
name='UserStoreScore',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField(db_index=True)),
('score', models.FloatField(db_index=True)),
('reviewed', models.IntegerField(db_index=True, default=0)),
('suggested', models.IntegerField(db_index=True, default=0)),
('translated', models.IntegerField(db_index=True, default=0)),
('store', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_scores', to='pootle_store.Store')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='store_scores', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,<|fim▁hole|> ),
migrations.AlterUniqueTogether(
name='userstorescore',
unique_together=set([('date', 'store', 'user')]),
),
]<|fim▁end|>
|
'db_table': 'pootle_user_store_score',
},
|
<|file_name|>network_info.py<|end_file_name|><|fim▁begin|><|fim▁hole|>"""
category = "monitoring"
organization = "jumpscale"
author = "[email protected]"
license = "bsd"
version = "1.0"
roles = []
def action():
return j.sal.nettools.getNetworkInfo()
if __name__ == "__main__":
print(action())<|fim▁end|>
|
from JumpScale import j
descr = """
This jumpscript returns network info
|
<|file_name|>test_msvc9compiler.py<|end_file_name|><|fim▁begin|>"""Tests for distutils.msvc9compiler."""
import sys
import unittest
import os
from distutils.errors import DistutilsPlatformError
from distutils.tests import support
from test.support import run_unittest
_MANIFEST = """\
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
manifestVersion="1.0">
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false">
</requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC90.CRT"
version="9.0.21022.8" processorArchitecture="x86"
publicKeyToken="XXXX">
</assemblyIdentity>
</dependentAssembly>
</dependency>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
version="9.0.21022.8" processorArchitecture="x86"
publicKeyToken="XXXX"></assemblyIdentity>
</dependentAssembly>
</dependency>
</assembly>
"""
_CLEANED_MANIFEST = """\
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
manifestVersion="1.0">
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false">
</requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
</dependency>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
version="9.0.21022.8" processorArchitecture="x86"
publicKeyToken="XXXX"></assemblyIdentity>
</dependentAssembly>
</dependency>
</assembly>"""
if sys.platform=="win32":
from distutils.msvccompiler import get_build_version
if get_build_version()>=8.0:
SKIP_MESSAGE = None
else:
SKIP_MESSAGE = "These tests are only for MSVC8.0 or above"
else:
SKIP_MESSAGE = "These tests are only for win32"
@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE)
class msvc9compilerTestCase(support.TempdirManager,
unittest.TestCase):
def test_no_compiler(self):
# makes sure query_vcvarsall throws
# a DistutilsPlatformError if the compiler
# is not found
from distutils.msvc9compiler import query_vcvarsall
def _find_vcvarsall(version):
return None
from distutils import msvc9compiler
old_find_vcvarsall = msvc9compiler.find_vcvarsall
msvc9compiler.find_vcvarsall = _find_vcvarsall
try:
self.assertRaises(DistutilsPlatformError, query_vcvarsall,
'wont find this version')
finally:
msvc9compiler.find_vcvarsall = old_find_vcvarsall
def test_reg_class(self):
from distutils.msvc9compiler import Reg
self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx')
# looking for values that should exist on all
# windows registeries versions.
path = r'Control Panel\Desktop'
v = Reg.get_value(path, 'dragfullwindows')
self.assertTrue(v in ('0', '1', '2'))
import winreg
HKCU = winreg.HKEY_CURRENT_USER
keys = Reg.read_keys(HKCU, 'xxxx')
self.assertEqual(keys, None)
keys = Reg.read_keys(HKCU, r'Control Panel')
self.assertTrue('Desktop' in keys)
def test_remove_visual_c_ref(self):
from distutils.msvc9compiler import MSVCCompiler
tempdir = self.mkdtemp()
manifest = os.path.join(tempdir, 'manifest')
f = open(manifest, 'w')
try:
f.write(_MANIFEST)
finally:
f.close()
compiler = MSVCCompiler()
compiler._remove_visual_c_ref(manifest)
# see what we got
f = open(manifest)
try:
# removing trailing spaces
content = '\n'.join([line.rstrip() for line in f.readlines()])
finally:
f.close()
# makes sure the manifest was properly cleaned
self.assertEqual(content, _CLEANED_MANIFEST)<|fim▁hole|>def test_suite():
return unittest.makeSuite(msvc9compilerTestCase)
if __name__ == "__main__":
run_unittest(test_suite())<|fim▁end|>
| |
<|file_name|>debug.rs<|end_file_name|><|fim▁begin|>use core::slice;
use drivers::io::{Io, Pio};
use system::error::Result;
pub fn do_sys_debug(ptr: *const u8, len: usize) -> Result<usize> {
let bytes = unsafe { slice::from_raw_parts(ptr, len) };
if unsafe { ::ENV_PTR.is_some() } {
::env().console.lock().write(bytes);
} else {
let serial_status = Pio::<u8>::new(0x3F8 + 5);
let mut serial_data = Pio::<u8>::new(0x3F8);
for byte in bytes.iter() {
while !serial_status.readf(0x20) {}
serial_data.write(*byte);
if *byte == 8 {
while !serial_status.readf(0x20) {}
serial_data.write(0x20);<|fim▁hole|> }
}
}
Ok(len)
}<|fim▁end|>
|
while !serial_status.readf(0x20) {}
serial_data.write(8);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.